diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index e319d63f00c..5b2db410381 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1,39 +1,4 @@ -megatron/core/ @NVIDIA/core-adlr @NVIDIA/core-nemo - -megatron/core/models/gpt/ @NVIDIA/core-adlr @NVIDIA/core-nemo @NVIDIA/gpt - -megatron/core/models/multimodal/ @NVIDIA/core-adlr @NVIDIA/core-nemo @NVIDIA/multi-modal - -megatron/core/models/mamba/ @NVIDIA/core-adlr @NVIDIA/core-nemo @NVIDIA/hybrid-mamba -megatron/core/ssm/ @NVIDIA/core-adlr @NVIDIA/core-nemo @NVIDIA/hybrid-mamba - -megatron/core/datasets/ @NVIDIA/core-adlr @NVIDIA/core-nemo @NVIDIA/datasets - -megatron/core/distributed/fsdp/ @NVIDIA/core-adlr @NVIDIA/core-nemo @NVIDIA/megatron-fsdp - -megatron/core/transformer/fsdp_dtensor_checkpoint.py @NVIDIA/core-adlr @NVIDIA/core-nemo @NVIDIA/megatron-fsdp - -megatron/core/dist_checkpointing/ @NVIDIA/core-adlr @NVIDIA/core-nemo @NVIDIA/dist-checkpointing - -megatron/core/optimizer/distrib_optimizer/ @NVIDIA/core-adlr @NVIDIA/core-nemo @NVIDIA/dist-optimizer - -megatron/core/inference/modelopt_support @NVIDIA/core-adlr @NVIDIA/core-nemo @NVIDIA/quantization-and-inference - -megatron/core/datasets/ @NVIDIA/core-adlr @NVIDIA/core-nemo @NVIDIA/datasets - -megatron/core/pipeline_parallel/ @NVIDIA/core-adlr @NVIDIA/core-nemo @NVIDIA/pipeline-parallelism - -megatron/core/transformer/ @NVIDIA/core-adlr @NVIDIA/core-nemo - -megatron/core/transformer/moe/ @NVIDIA/core-adlr @NVIDIA/core-nemo @NVIDIA/mixture-of-experts-adlr @NVIDIA/mixture-of-experts-devtech - -megatron/core/inference/ @NVIDIA/core-adlr @NVIDIA/core-nemo @NVIDIA/inference - -megatron/core/parallel_state.py @NVIDIA/core-adlr @NVIDIA/core-nemo - -megatron/core/post_training/ @NVIDIA/core-adlr @NVIDIA/core-nemo @NVIDIA/post-training - -megatron/post_training/ @NVIDIA/post-training +* @NVIDIA/core-nemo @NVIDIA/core-devtech megatron/core/transformer/cuda_graphs.py @NVIDIA/core-adlr @NVIDIA/core-nemo @NVIDIA/cuda-graphs @@ -41,19 +6,9 @@ megatron/core/transformer/cuda_graphs.py @NVIDIA/core-adlr @NVIDIA/core-nemo @NV .github/ @NVIDIA/ci .gitlab-ci.yml @NVIDIA/ci docker/ @NVIDIA/ci +tests/unit_tests/run_ci_test.sh @NVIDIA/ci +tests/test_utils/python_scripts/ tests/functional_tests/python_test_utils/ @NVIDIA/ci tests/functional_tests/shell_test_utils/ @NVIDIA/ci -tests/test_utils/recipes/ @NVIDIA/ci -tests/unit_tests/run_ci_test.sh @NVIDIA/ci - -# API Backwards Compatibility Check -scripts/check_api_backwards_compatibility.py @NVIDIA/ci @pablo-garay -scripts/README_API_COMPAT.md @NVIDIA/ci @pablo-garay -.github/workflows/check_api_backwards_compatibility_workflow.yml @NVIDIA/ci @pablo-garay -docs/api-backwards-compatibility-check.md @NVIDIA/ci @pablo-garay -tests/unit_tests/test_api_backwards_compat_setup.py @NVIDIA/ci @pablo-garay - -megatron/rl/ @NVIDIA/reinforcement-learning -examples/rl/ @NVIDIA/reinforcement-learning -test/unit_tests/test_rl_utils.py @NVIDIA/reinforcement-learning -train_rl.py @NVIDIA/reinforcement-learning +pyproject.toml @NVIDIA/ci +uv.lock @NVIDIA/ci diff --git a/.github/actions/action.yml b/.github/actions/action.yml index f3e42e5843d..decaa5ff3f8 100644 --- a/.github/actions/action.yml +++ b/.github/actions/action.yml @@ -48,7 +48,9 @@ inputs: is_ci_workload: description: "Is CI workload" required: true - + is_merge_group: + description: "Is merge group" + required: true runs: using: "composite" steps: @@ -56,38 +58,6 @@ runs: shell: bash -x -e -u -o pipefail {0} run: echo "node_name=$NODE_NAME" | tee -a "$GITHUB_OUTPUT" - - name: GPU Sanity Check - shell: bash -x -e -u -o pipefail {0} - run: | - echo "Starting GPU Sanity Check..." - - # 1. Check for active Compute Processes - # query-compute-apps returns a list of PIDs using the GPU. If empty, we are good. - OPEN_PROCESSES=$(docker run --rm --gpus all ubuntu nvidia-smi --query-compute-apps=pid,process_name --format=csv,noheader) - - if [ -n "$OPEN_PROCESSES" ]; then - echo "::error::❌ GPU is not clean! Found active processes:" - echo "$OPEN_PROCESSES" - else - echo "✅ No active compute processes found." - fi - - # 2. Check VRAM Usage (Optional but recommended) - # We allow a small buffer (e.g., < 300MiB) for driver overhead/Xorg, - # though on headless K8s nodes this should be very close to 0. - - MEMORY_USAGES=$(docker run --rm --gpus all ubuntu nvidia-smi --query-gpu=memory.used --format=csv,noheader,nounits) - - # Check each GPU visible to the container - for MEMORY in $MEMORY_USAGES; do - if [ "$MEMORY" -gt 300 ]; then - echo "::error::❌ GPU VRAM usage is suspiciously high: ${MEMORY} MiB" - fi - done - - echo "✅ GPU Memory is clean (all < 300 MiB)." - echo "Ready to start workflow." - - name: Checkout repository uses: actions/checkout@v2 @@ -117,8 +87,10 @@ runs: export PYTHONPATH=$(pwd) export NEMORUN_HOME=$(pwd) export NCCL_DEBUG=INFO - pip install --no-cache-dir uv - uv sync --only-group test + pip install --no-cache-dir "uv<0.9.29" + uv venv .venv + uv cache clean + uv sync --no-cache --only-group test uv run python tests/test_utils/python_scripts/launch_nemo_run_workload.py \ --scope unit-tests \ --model unit-tests \ @@ -177,7 +149,12 @@ runs: #!/bin/bash set -euxo pipefail - if [ "${{ steps.has-run-tests-label.outputs.main }}" == "true" ]; then + if [ "${{ inputs.is_merge_group }}" == "true" ]; then + ARGS=( + --scope mr-github + --n-repeat 1 + ) + elif [ "${{ steps.has-run-tests-label.outputs.main }}" == "true" ]; then ARGS=( --scope mr-github --enable-lightweight-mode @@ -197,8 +174,10 @@ runs: export PYTHONPATH=$(pwd) export NEMORUN_HOME=$(pwd) - pip install --no-cache-dir uv - uv sync --only-group test + pip install --no-cache-dir "uv<0.9.29" + uv venv .venv + uv cache clean + uv sync --no-cache --only-group test uv run python tests/test_utils/python_scripts/launch_nemo_run_workload.py \ ${ARGS[@]} \ --model ${{ inputs.model }} \ diff --git a/.github/copy-pr-bot.yaml b/.github/copy-pr-bot.yaml index f43437d19c0..d9ece17bd35 100644 --- a/.github/copy-pr-bot.yaml +++ b/.github/copy-pr-bot.yaml @@ -1,4 +1,4 @@ enabled: true auto_sync_draft: false auto_sync_ready: true -trustees_override: ["AAnoosheh", "ArEsKay3", "Autumn1998", "BestJuly", "BoxiangW", "ChenhanYu", "FDecaYed", "HaochenYuan", "ISEEKYAN", "JRD971000", "Phlip79", "QiZhangNV", "ShriyaRishab", "Victarry", "Wohox", "ZhiyuLi-Nvidia", "ahmadki", "aklife97", "ananthsub", "asolergi-nv", "buptzyb", "chtruong814", "cspades", "cuichenx", "deepakn94", "dimapihtar", "duncanriach", "erhoo82", "ericharper", "fanshiqing", "frsun-nvda", "gautham-kollu", "gdengk", "guyueh1", "hxbai", "jalbericiola", "janEbert", "jaredcasper", "jenchen13", "jiemingz", "jingqiny-99", "jkamalu", "jon-barker", "jstjohn", "kanz-nv", "kevalmorabia97", "ko3n1g", "kunlunl", "kvareddy", "kwyss-nvidia", "layalir", "lhb8125", "lmcafee-nvidia", "maanug-nv", "mathemakitten", "matthieule", "mehraakash", "mkhona-nvidia", "parthmannan", "prajwal1210", "pthombre", "rogerwaleffe", "sanandaraj5597", "sancha", "santhnm2", "sbak5", "shanmugamr1992", "shifangx", "shjwudp", "sidsingh-nvidia", "skyw", "sudhakarsingh27", "tdene", "theothermike", "thomasdhc", "trintamaki", "tylerpoon", "wdykas", "xiaoyao0115", "xuwchen", "yanring", "yaox12", "yaoyu-33", "yashaswikarnati", "yeyu-nvidia", "yobibyte", "youngeunkwon0405", "yuzhongw-nvidia", "zhongbozhu"] +trustees_override: ["AAnoosheh", "ArEsKay3", "Autumn1998", "BestJuly", "BoxiangW", "ChenhanYu", "FDecaYed", "HaochenYuan", "ISEEKYAN", "JRD971000", "Phlip79", "QiZhangNV", "RPrenger", "ShriyaRishab", "Victarry", "Wohox", "ZhiyuLi-Nvidia", "ahmadki", "aklife97", "ananthsub", "asolergi-nv", "buptzyb", "chtruong814", "cspades", "cuichenx", "deepakn94", "dimapihtar", "dingqingy-nv", "duncanriach", "erhoo82", "ericharper", "fanshiqing", "frsun-nvda", "gautham-kollu", "gdengk", "guyueh1", "hxbai", "ilml", "jalbericiola", "janEbert", "jaredcasper", "jenchen13", "jiemingz", "jingqiny-99", "jkamalu", "jon-barker", "jstjohn", "kanz-nv", "kevalmorabia97", "ko3n1g", "kunlunl", "kvareddy", "kwyss-nvidia", "layalir", "lhb8125", "lmcafee-nvidia", "maanug-nv", "mathemakitten", "matthieule", "mehraakash", "mkhona-nvidia", "parthmannan", "prajwal1210", "pthombre", "rogerwaleffe", "sajadn", "sanandaraj5597", "sancha", "santhnm2", "sbak5", "shanmugamr1992", "sharathts", "shengf-nv", "shifangx", "shjwudp", "sidsingh-nvidia", "skyw", "sudhakarsingh27", "tdene", "theothermike", "thomasdhc", "trintamaki", "tylerpoon", "wdykas", "xiaoyao0115", "xuwchen", "yanring", "yaox12", "yaoyu-33", "yashaswikarnati", "yeyu-nvidia", "yobibyte", "youngeunkwon0405", "yueshen2016", "yuzhongw-nvidia", "zhongbozhu"] diff --git a/.github/oncall_schedule.json b/.github/oncall_schedule.json index 5fa49e966bc..58fcf0ddbbc 100644 --- a/.github/oncall_schedule.json +++ b/.github/oncall_schedule.json @@ -1,18 +1,6 @@ [ - { - "user": "dimapihtar", - "date": "2026-01-28" - }, - { - "user": "gautham-kollu", - "date": "2026-02-04" - }, { "user": "janEbert", - "date": "2026-02-11" - }, - { - "user": "Phlip79", "date": "2026-02-18" }, { @@ -46,5 +34,17 @@ { "user": "BoxiangW", "date": "2026-04-15" + }, + { + "user": "Phlip79", + "date": "2026-04-22" + }, + { + "user": "asolergi-nv", + "date": "2026-04-29" + }, + { + "user": "dimapihtar", + "date": "2026-05-06" } ] diff --git a/.github/scripts/readme.sh b/.github/scripts/readme.sh new file mode 100644 index 00000000000..216d5224a28 --- /dev/null +++ b/.github/scripts/readme.sh @@ -0,0 +1,65 @@ +#!/bin/bash + +cat << 'EOF' +╔══════════════════════════════════════════════════════════════════════╗ +║ ║ +║ ███╗ ███╗██████╗ ██████╗ ██╗██████╗ ██████╗ ███████╗ ║ +║ ████╗ ████║██╔══██╗██╔══██╗██║██╔══██╗██╔════╝ ██╔════╝ ║ +║ ██╔████╔██║██████╔╝██████╔╝██║██║ ██║██║ ███╗█████╗ ║ +║ ██║╚██╔╝██║██╔══██╗██╔══██╗██║██║ ██║██║ ██║██╔══╝ ║ +║ ██║ ╚═╝ ██║██████╔╝██║ ██║██║██████╔╝╚██████╔╝███████╗ ║ +║ ╚═╝ ╚═╝╚═════╝ ╚═╝ ╚═╝╚═╝╚═════╝ ╚═════╝ ╚══════╝ ║ +║ ║ +║ H O W T O : M B R I D G E T E S T I N G ║ +╚══════════════════════════════════════════════════════════════════════╝ + + MBridge unit tests run automatically on every PR. To also trigger + functional tests, attach the label and re-run the workflow step. + + ┌─────────────────────────────────────────────────────────────────┐ + │ DEFAULT │ Unit tests run on every PR (no action needed) │ + ├─────────────────────────────────────────────────────────────────┤ + │ │ + │ Every PR ──► cicd-mbridge-testing ──► unit tests only │ + │ │ + └─────────────────────────────────────────────────────────────────┘ + + ┌─────────────────────────────────────────────────────────────────┐ + │ STEP 1 │ Attach the label to your PR (for functional tests) │ + ├─────────────────────────────────────────────────────────────────┤ + │ │ + │ PR Labels ──► [ + Add label ] ──► "Run MBridge tests" │ + │ │ + └─────────────────────────────────────────────────────────────────┘ + + ┌─────────────────────────────────────────────────────────────────┐ + │ STEP 2 │ Re-run this workflow step │ + ├─────────────────────────────────────────────────────────────────┤ + │ │ + │ Actions ──► [ Re-run jobs ] ──► Re-run failed jobs │ + │ │ + └─────────────────────────────────────────────────────────────────┘ + + ┌─────────────────────────────────────────────────────────────────┐ + │ RESULT │ Unit + functional tests run! │ + ├─────────────────────────────────────────────────────────────────┤ + │ │ + │ cicd-mbridge-testing ◄── unit + functional tests │ + │ │ + │ Tests run against MBridge using the merge commit │ + │ SHA of your pull request. │ + │ │ + └─────────────────────────────────────────────────────────────────┘ + + ┌────────────────────────────────────┐ + │ Label present? NO → unit │ + │ Label present? YES → unit + │ + │ functional│ + └────────────────────────────────────┘ + + NOTE: The label must be present BEFORE the re-run is triggered. + The CI checks for "Run MBridge tests" at runtime. + + NOTE: All MBridge test results are optional — failures do not + block merging your PR. +EOF diff --git a/.github/workflows/_build_test_publish_wheel.yml b/.github/workflows/_build_test_publish_wheel.yml index 9e9062827de..0b71577b587 100644 --- a/.github/workflows/_build_test_publish_wheel.yml +++ b/.github/workflows/_build_test_publish_wheel.yml @@ -17,8 +17,6 @@ on: type: boolean default: true secrets: - TWINE_USERNAME: - required: true TWINE_PASSWORD: required: true @@ -147,7 +145,6 @@ jobs: needs: [build-and-test-wheels] runs-on: ubuntu-latest if: inputs.no-publish == false - environment: ${{ (github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/heads/r')) && 'main' || 'public' }} strategy: fail-fast: false matrix: @@ -170,7 +167,7 @@ jobs: - name: Publish wheels env: - TWINE_USERNAME: ${{ secrets.TWINE_USERNAME }} + TWINE_USERNAME: __token__ TWINE_PASSWORD: ${{ secrets.TWINE_PASSWORD }} TWINE_REPOSITORY: ${{ (github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/heads/r')) && 'pypi' || 'testpypi' }} PLATFORM: ${{ matrix.PLATFORM }} diff --git a/.github/workflows/_release_library.yml b/.github/workflows/_release_library.yml index d39ee505c2a..684dacc27aa 100644 --- a/.github/workflows/_release_library.yml +++ b/.github/workflows/_release_library.yml @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -name: 'Release' +name: "Release" defaults: run: @@ -38,13 +38,24 @@ on: description: Create a GitHub release type: boolean default: true + gh-release-use-changelog-builder: + required: false + description: Use release-changelog-builder-action to dynamically build changelog + type: boolean + default: true + gh-release-changelog-config: + required: false + description: Path to changelog builder configuration file + type: string + default: ".github/workflows/config/changelog-config.json" + gh-release-from-tag: + required: false + description: Starting tag for changelog builder (leave empty for auto-detect) + type: string + default: "" secrets: - TWINE_USERNAME: - required: true TWINE_PASSWORD: required: true - SLACK_WEBHOOK_ADMIN: - required: true SLACK_WEBHOOK: required: true PAT: @@ -62,12 +73,10 @@ jobs: ref: ${{ inputs.release-ref }} no-publish: true secrets: - TWINE_USERNAME: ${{ secrets.TWINE_USERNAME }} TWINE_PASSWORD: ${{ secrets.TWINE_PASSWORD }} bump-next-version: runs-on: ubuntu-latest - environment: main # ${{ inputs.dry-run == true && 'public' || 'main' }} needs: build-test-publish-wheels-dry-run if: | ( @@ -90,8 +99,8 @@ jobs: - name: Bump version MCore id: bump-version-mcore env: - SRC_DIR: '' - PYPROJECT_NAME: 'megatron.core' + SRC_DIR: "" + PYPROJECT_NAME: "megatron.core" run: | set +u cd ${{ github.run_id }} @@ -129,8 +138,8 @@ jobs: - name: Bump version MFSDP id: bump-version-mfsdp env: - SRC_DIR: 'megatron/core/distributed/fsdp/src/' - PYPROJECT_NAME: 'megatron_fsdp' + SRC_DIR: "megatron/core/distributed/fsdp/src/" + PYPROJECT_NAME: "megatron_fsdp" run: | set +u @@ -323,7 +332,6 @@ jobs: create-gh-release: needs: [build-test-publish-wheels, bump-next-version] runs-on: ubuntu-latest - environment: ${{ inputs.dry-run == true && 'public' || 'main' }} if: | ( success() || !failure() @@ -345,12 +353,51 @@ jobs: ref: ${{ inputs.release-ref }} token: ${{ secrets.PAT || secrets.GITHUB_TOKEN }} + - name: Determine fromTag for changelog + id: determine-from-tag + if: inputs.gh-release-use-changelog-builder == true + run: | + cd ${{ github.run_id }} + + # If gh-release-from-tag is provided, use it + if [[ -n "${{ inputs.gh-release-from-tag }}" ]]; then + FROM_TAG="${{ inputs.gh-release-from-tag }}" + echo "Using provided fromTag: $FROM_TAG" + else + # Get the most recent tag + FROM_TAG=$(git describe --tags --abbrev=0 2>/dev/null || echo "") + if [[ -z "$FROM_TAG" ]]; then + echo "No previous tags found, leaving fromTag empty" + else + echo "Auto-detected most recent tag: $FROM_TAG" + fi + fi + + echo "from-tag=$FROM_TAG" >> $GITHUB_OUTPUT + + - name: Build Changelog + id: build-changelog + if: inputs.gh-release-use-changelog-builder == true + uses: mikepenz/release-changelog-builder-action@v6.1.0 + env: + GITHUB_TOKEN: ${{ secrets.PAT || secrets.GITHUB_TOKEN }} + with: + configuration: ${{ github.run_id }}/${{ inputs.gh-release-changelog-config }} + owner: ${{ github.repository_owner }} + repo: ${{ github.event.repository.name }} + ignorePreReleases: "false" + failOnError: "false" + fromTag: ${{ steps.determine-from-tag.outputs.from-tag }} + toTag: ${{ inputs.release-ref }} + mode: ${{ inputs.gh-release-changelog-mode }} + - name: Create release id: version-number env: SHA: ${{ inputs.release-ref }} GH_TOKEN: ${{ secrets.PAT }} IS_DRY_RUN: ${{ inputs.dry-run }} + BUILT_CHANGELOG: ${{ steps.build-changelog.outputs.changelog }} run: | cd ${{ github.run_id }} @@ -359,7 +406,10 @@ jobs: IS_PRERELEASE=$([[ "$IS_RELEASE_CANDIDATE" == "true" || "$IS_ALPHA" == "true" ]] && echo "true" || echo "false") NAME="NVIDIA $PROJECT_NAME ${VERSION}" - if [[ "$IS_RELEASE_CANDIDATE" == "true" ]]; then + # Use built changelog if available, otherwise fall back to CHANGELOG.md + if [[ -n "$BUILT_CHANGELOG" ]]; then + CHANGELOG="$BUILT_CHANGELOG" + elif [[ "$IS_RELEASE_CANDIDATE" == "true" ]]; then DATE=$(date +"%Y-%m-%d") CHANGELOG="Prerelease: $NAME ($DATE)" else @@ -402,10 +452,19 @@ jobs: eval "$CMD" fi + publish-docs: + needs: [bump-next-version, create-gh-release] + uses: ./.github/workflows/release-docs.yml + with: + dry-run: ${{ inputs.dry-run }} + publish-as-latest: true + docs-version-override: ${{ needs.bump-next-version.outputs.release-version }} + build-docs-ref: ${{ inputs.release-ref }} + secrets: inherit + notify: needs: [build-test-publish-wheels, create-gh-release] runs-on: ubuntu-latest - environment: ${{ inputs.dry-run == true && 'public' || 'main' }} env: GH_URL: https://github.com/${{ github.repository }}/releases/tag/v${{ needs.build-test-publish-wheels.outputs.version }} PYPI_URL: https://${{ inputs.dry-run == true && 'test.' || '' }}pypi.org/project/${{ needs.build-test-publish-wheels.outputs.pypi-name }}/${{ needs.build-test-publish-wheels.outputs.version }}/ diff --git a/.github/workflows/_update_dependencies.yml b/.github/workflows/_update_dependencies.yml index 063b966b5de..0a5fb47605f 100644 --- a/.github/workflows/_update_dependencies.yml +++ b/.github/workflows/_update_dependencies.yml @@ -9,12 +9,6 @@ on: secrets: PAT: required: true - AZURE_CLIENT_ID: - required: true - AZURE_TENANT_ID: - required: true - AZURE_SUBSCRIPTION_ID: - required: true SSH_KEY: required: true SSH_PWD: @@ -32,26 +26,12 @@ jobs: run: echo "date=$(date +%F)" | tee -a "$GITHUB_OUTPUT" update-lockfile: - environment: nemo-ci runs-on: linux-amd64-cpu16 needs: [pre-flight] env: SOURCE_BRANCH: ${{ needs.pre-flight.outputs.bump-branch }} TARGET_BRANCH: ${{ inputs.target-branch }} steps: - - name: Install Azure CLI - run: curl -sL https://aka.ms/InstallAzureCLIDeb | sudo bash - - - name: Azure Login - uses: azure/login@v2 - with: - client-id: ${{ secrets.AZURE_CLIENT_ID }} - tenant-id: ${{ secrets.AZURE_TENANT_ID }} - subscription-id: ${{ secrets.AZURE_SUBSCRIPTION_ID }} - - - name: Azure ACR Login - run: az acr login --name nemoci - - name: Checkout repo uses: actions/checkout@v4 with: @@ -96,7 +76,6 @@ jobs: create-pr: needs: [update-lockfile, pre-flight] runs-on: ubuntu-latest - environment: main env: SOURCE_BRANCH: ${{ needs.pre-flight.outputs.bump-branch }} TARGET_BRANCH: ${{ inputs.target-branch }} diff --git a/.github/workflows/auto-assign-milestone.yml b/.github/workflows/auto-assign-milestone.yml index 8153728f9fd..b972329bac1 100644 --- a/.github/workflows/auto-assign-milestone.yml +++ b/.github/workflows/auto-assign-milestone.yml @@ -13,7 +13,6 @@ permissions: jobs: assign-milestone: runs-on: ubuntu-latest - environment: nemo-ci if: github.repository == 'NVIDIA/Megatron-LM' steps: - name: Get PR info diff --git a/.github/workflows/auto-reminder-bot.yml b/.github/workflows/auto-reminder-bot.yml index c3aa8169b50..37e6e5498e3 100644 --- a/.github/workflows/auto-reminder-bot.yml +++ b/.github/workflows/auto-reminder-bot.yml @@ -9,7 +9,6 @@ on: jobs: run-script: - environment: main name: Run Auto Reminder Bot runs-on: ubuntu-latest if: github.repository == 'NVIDIA/Megatron-LM' @@ -28,7 +27,7 @@ jobs: - name: Run Auto Reminder Bot run: | - export SLACK_TOKEN=${{ secrets.SLACK_TOKEN }} - export SLACK_WEBHOOK_URL=${{ secrets.SLACK_WEBHOOK_URL }} + export SLACK_TOKEN=${{ secrets.SLACK_BOT_TOKEN }} + export SLACK_WEBHOOK_URL=${{ secrets.SLACK_REVIEW_REMINDER_CHANNEL_WEBHOOK }} export GH_TOKEN=${{ secrets.PAT }} python tests/test_utils/python_scripts/auto_reminder_github.py diff --git a/.github/workflows/auto-update-copy-pr-bot.yml b/.github/workflows/auto-update-copy-pr-bot.yml index 5f6f1ade9e8..3358a747f34 100644 --- a/.github/workflows/auto-update-copy-pr-bot.yml +++ b/.github/workflows/auto-update-copy-pr-bot.yml @@ -3,12 +3,11 @@ name: Auto Update Copy PR Bot on: workflow_dispatch: schedule: - - cron: '0 0 * * *' + - cron: "0 0 * * *" jobs: auto-update-copy-pr-bot: runs-on: ubuntu-latest - environment: nemo-ci if: github.repository == 'NVIDIA/Megatron-LM' steps: - name: Checkout code diff --git a/.github/workflows/build-test-publish-wheel.yml b/.github/workflows/build-test-publish-wheel.yml index 2b2ea3dfc85..00711b50806 100644 --- a/.github/workflows/build-test-publish-wheel.yml +++ b/.github/workflows/build-test-publish-wheel.yml @@ -18,8 +18,8 @@ on: push: branches: - main - - 'pull-request/[0-9]+' - - 'deploy-release/*' + - "pull-request/[0-9]+" + - "deploy-release/*" merge_group: types: [checks_requested] @@ -33,7 +33,7 @@ permissions: jobs: pre-flight: - uses: NVIDIA-NeMo/FW-CI-templates/.github/workflows/_cicd_preflight.yml@v0.65.5 + uses: NVIDIA-NeMo/FW-CI-templates/.github/workflows/_cicd_preflight.yml@v0.73.2 if: github.repository == 'NVIDIA/Megatron-LM' build-test-publish-wheels: @@ -42,8 +42,7 @@ jobs: with: no-publish: true secrets: - TWINE_USERNAME: ${{ secrets.TWINE_USERNAME }} - TWINE_PASSWORD: ${{ secrets.TWINE_PASSWORD }} + TWINE_PASSWORD: ${{ (github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/heads/r')) && secrets.SVC_PYPI_TOKEN || secrets.SVC_PYPI_TEST_TOKEN }} build-test-publish-wheel-summary: needs: [pre-flight, build-test-publish-wheels] @@ -65,7 +64,7 @@ jobs: env: GH_TOKEN: ${{ github.token }} GITHUB_RUN_ID: ${{ github.run_id }} - SKIPPING_IS_ALLOWED: ${{ needs.pre-flight.outputs.docs_only == 'true' || needs.pre-flight.outputs.is_deployment_workflow == 'true' || needs.pre-flight.outputs.is_merge_group == 'true' || needs.pre-flight.outputs.is_ci_workload == 'true' || github.ref != 'refs/heads/main' }} + SKIPPING_IS_ALLOWED: true run: | FAILED_JOBS=$(gh run view $GITHUB_RUN_ID --json jobs --jq '[.jobs[] | select(.status == "completed" and .conclusion != "success")] | length') || echo 0 diff --git a/.github/workflows/check_api_backwards_compatibility_workflow.yml b/.github/workflows/check_api_backwards_compatibility_workflow.yml index f4fcd4c3713..4ba0ed2780c 100644 --- a/.github/workflows/check_api_backwards_compatibility_workflow.yml +++ b/.github/workflows/check_api_backwards_compatibility_workflow.yml @@ -28,7 +28,7 @@ jobs: uses: actions/checkout@v4 with: fetch-depth: 0 - + - name: Check if relevant files changed id: check_files run: | @@ -46,11 +46,11 @@ jobs: else # For push events, use merge-base to find common ancestor # This ensures we only detect changes actually made in this PR branch, - # not changes that happened in main after the branch was created - BASE_SHA=$(git merge-base origin/main HEAD 2>/dev/null || echo "") + # not changes that happened in dev after the branch was created + BASE_SHA=$(git merge-base origin/dev HEAD 2>/dev/null || echo "") if [ -z "$BASE_SHA" ]; then - # Fallback for pull-request/* branches targeting dev - BASE_SHA=$(git merge-base origin/dev HEAD 2>/dev/null || echo "") + # Fallback for branches targeting main + BASE_SHA=$(git merge-base origin/main HEAD 2>/dev/null || echo "") fi echo "Push event - comparing against merge-base: $BASE_SHA" fi @@ -83,7 +83,7 @@ jobs: if: needs.pre-flight.outputs.should_skip != 'true' name: "OPTIONAL: Check API Backward Compatibility" runs-on: ubuntu-latest - + # ============================================================================ # Configuration Parameters (modify here) # ============================================================================ @@ -91,24 +91,24 @@ jobs: # Default baseline for automatic PR checks # Can be: branch name (e.g., 'main'), commit hash, or tag # Will be resolved to commit hash during execution - DEFAULT_BASELINE: '5ab481cb45efc72add12f8ba0378e849b3d2bc50' + DEFAULT_BASELINE: 'ed804b49860201e7103ce0f9c1129a330a384a65' # Tag pattern for auto-detection (e.g., 'core_r*', 'core_v*') TAG_PATTERN: 'core_v*' # Tag regex filter (e.g., '^core_v[0-9]+\.[0-9]+\.[0-9]+$' for stable versions only) TAG_REGEX_FILTER: '^core_v[0-9]+\.[0-9]+\.[0-9]+$' # ============================================================================ - + steps: - name: Checkout code uses: actions/checkout@v4 with: - fetch-depth: 0 # Need full history to access baseline ref - + fetch-depth: 0 # Need full history to access baseline ref + - name: Set up Python uses: actions/setup-python@v5 with: python-version: '3.12' - + - name: Install griffe run: | python -m pip install --upgrade pip @@ -116,7 +116,7 @@ jobs: python -c "import griffe; print('Griffe installed successfully')" python -c "from griffe import Object; print('Object import successful')" || echo "Object import from griffe failed" python -c "from griffe.dataclasses import Object; print('Object import from dataclasses successful')" || echo "Object import from dataclasses failed" - + - name: Determine baseline reference id: baseline run: | @@ -134,13 +134,13 @@ jobs: # BASELINE_REF="${{ env.DEFAULT_BASELINE }}" # fi fi - + # Resolve baseline to commit hash (works for branches, tags, or commit hashes) BASELINE_HASH=$(git rev-parse "$BASELINE_REF") - + echo "baseline=$BASELINE_HASH" >> $GITHUB_OUTPUT echo "Using baseline: $BASELINE_REF (resolved to commit: $BASELINE_HASH)" - + - name: Run compatibility check id: compat_check run: | @@ -148,13 +148,13 @@ jobs: python scripts/check_api_backwards_compatibility.py \ --baseline ${{ steps.baseline.outputs.baseline }} \ --verbose 2>&1 | tee compat_check_output.txt - + # Capture exit code EXIT_CODE=${PIPESTATUS[0]} echo "exit_code=$EXIT_CODE" >> $GITHUB_OUTPUT exit $EXIT_CODE continue-on-error: true - + - name: Fail job if breaking changes detected if: steps.compat_check.outcome == 'failure' run: | @@ -233,10 +233,10 @@ jobs: echo "🔧 Checker script: scripts/check_api_backwards_compatibility.py" echo "❓ Questions? Check the docs or ask in #megatron-core" echo "" - + echo "::error::Breaking API changes detected. Please review the output above and choose a resolution strategy." exit 1 - + - name: Success message if: steps.compat_check.outcome == 'success' run: | @@ -271,4 +271,3 @@ jobs: gh run view $GITHUB_RUN_ID --json jobs --jq '.jobs[] | select(.status == "completed" and .conclusion != "success" and .name != "OPTIONAL: API Backward Compatibility Check Summary") | .name' exit 1 fi - diff --git a/.github/workflows/cherry-pick-release-commit.yml b/.github/workflows/cherry-pick-release-commit.yml index 882b3f5b268..9da305f07e6 100644 --- a/.github/workflows/cherry-pick-release-commit.yml +++ b/.github/workflows/cherry-pick-release-commit.yml @@ -26,5 +26,5 @@ jobs: target-branches-pattern: 'core_(*dev_)?r[0-9]+\.[0-9]+\.[0-9]+' secrets: PAT: ${{ secrets.PAT }} - SLACK_WEBHOOK_ADMIN: ${{ secrets.SLACK_WEBHOOK_ADMIN }} - SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }} + SLACK_WEBHOOK_ADMIN: ${{ secrets.SLACK_TEAM_GROUP_ID }} + SLACK_WEBHOOK: ${{ secrets.SLACK_CI_CHANNEL_WEBHOOK }} diff --git a/.github/workflows/cicd-approve-test-queue.yml b/.github/workflows/cicd-approve-test-queue.yml index f34657eb509..2cba41eafb8 100644 --- a/.github/workflows/cicd-approve-test-queue.yml +++ b/.github/workflows/cicd-approve-test-queue.yml @@ -181,8 +181,8 @@ jobs: steps: - name: Notify env: - SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }} - SLACK_WEBHOOK_ADMIN: + SLACK_WEBHOOK: ${{ secrets.SLACK_CI_CHANNEL_WEBHOOK }} + SLACK_WEBHOOK_ADMIN: GITHUB_RUN_ID: ${{ github.run_id }} GITHUB_REPOSITORY: ${{ github.repository }} run: | diff --git a/.github/workflows/cicd-main.yml b/.github/workflows/cicd-main.yml index ad26a5ba0f6..cc108bc66d0 100644 --- a/.github/workflows/cicd-main.yml +++ b/.github/workflows/cicd-main.yml @@ -18,8 +18,6 @@ on: - cron: 0 0 * * * push: branches: - - dev - - main - "pull-request/[0-9]+" - "deploy-release/*" merge_group: @@ -27,7 +25,7 @@ on: workflow_dispatch: concurrency: - group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}-${{ github.event.label.name || 'main' }}-${{ github.event_name }} + group: ${{ github.workflow }}-${{ github.head_ref || github.ref || github.event.pull_request.number }} cancel-in-progress: true permissions: @@ -77,8 +75,8 @@ jobs: IS_MERGE_GROUP: ${{ github.event_name == 'merge_group' }} SCHEDULED_JOB: ${{ github.event_name == 'schedule' }} run: | - # Skip SSO check for scheduled jobs, main branch, or merge groups - if [ "${{ env.SCHEDULED_JOB }}" == "true" ] || [ "${IS_MAIN_BRANCH}" == "true" ] || [ "${IS_MERGE_GROUP}" == "true" ]; then + # Skip SSO check for scheduled jobs, main branch, dev branch, or merge groups + if [ "${{ env.SCHEDULED_JOB }}" == "true" ] || [ "${IS_MAIN_BRANCH}" == "true" ] || [ "${IS_DEV_BRANCH}" == "true" ] || [ "${IS_MERGE_GROUP}" == "true" ]; then echo "is_maintainer=true" | tee -a $GITHUB_OUTPUT exit 0 fi @@ -131,7 +129,7 @@ jobs: pre-flight: needs: [is-not-external-contributor] if: github.repository == 'NVIDIA/Megatron-LM' - uses: NVIDIA-NeMo/FW-CI-templates/.github/workflows/_cicd_preflight.yml@v0.65.10 + uses: NVIDIA-NeMo/FW-CI-templates/.github/workflows/_cicd_preflight.yml@v0.73.2 linting: runs-on: ubuntu-latest @@ -179,10 +177,11 @@ jobs: cicd-wait-in-queue: runs-on: ubuntu-latest needs: [pre-flight, linting] - environment: ${{ needs.pre-flight.outputs.is_merge_group == 'true' && 'merge-gate' || 'test' }} + environment: "test" if: | !(needs.pre-flight.outputs.is_ci_workload == 'true' || needs.pre-flight.outputs.is_deployment_workflow == 'true' + || needs.pre-flight.outputs.is_merge_group == 'true' || needs.pre-flight.outputs.docs_only == 'true') steps: - name: Running CI tests @@ -190,16 +189,128 @@ jobs: echo "Running CI tests" echo "is_merge_group: ${{ needs.pre-flight.outputs.is_merge_group }}" + cicd-parse-downstream-testing: + runs-on: ubuntu-latest + needs: + - pre-flight + - cicd-wait-in-queue + if: | + needs.pre-flight.result != 'cancelled' + && needs.cicd-wait-in-queue.result != 'cancelled' + && ( + success() + || needs.pre-flight.outputs.is_ci_workload == 'true' + || needs.pre-flight.outputs.force_run_all == 'true' + || needs.pre-flight.outputs.is_merge_group == 'true' + ) + && !cancelled() + outputs: + mbridge-test-suite: ${{ steps.select-mbridge-test-suite.outputs.main }} + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Get PR info + id: get-pr-info + if: startsWith(github.ref, 'refs/heads/pull-request/') + uses: nv-gha-runners/get-pr-info@main + + - name: Select MBridge test suite + id: select-mbridge-test-suite + env: + GH_TOKEN: ${{ secrets.PAT }} + run: | + PR_NUMBER=${{ fromJSON(steps.get-pr-info.outputs.pr-info || '{}').number }} + TEST_SUITE=$(gh pr view $PR_NUMBER --json labels | jq -r 'if [.labels[].name] | any(. == "Run MBridge tests") then "all" else "unit-only" end') + echo "main=$TEST_SUITE" | tee -a $GITHUB_OUTPUT + + - name: How-To + run: bash .github/scripts/readme.sh + + cicd-mbridge-testing: + runs-on: ubuntu-latest + needs: + - pre-flight + - cicd-wait-in-queue + - cicd-parse-downstream-testing + if: | + needs.pre-flight.result != 'cancelled' + && needs.cicd-wait-in-queue.result != 'cancelled' + && needs.cicd-parse-downstream-testing.result != 'cancelled' + && ( + success() + || needs.pre-flight.outputs.is_ci_workload == 'true' + || needs.pre-flight.outputs.force_run_all == 'true' + || needs.pre-flight.outputs.is_merge_group == 'true' + ) + && !cancelled() + steps: + - name: Get PR info + id: get-pr-info + if: startsWith(github.ref, 'refs/heads/pull-request/') + uses: nv-gha-runners/get-pr-info@main + + - name: Checkout MBridge and create testing branch + uses: actions/checkout@v4 + with: + ref: main + repository: NVIDIA-NeMo/Megatron-Bridge + path: megatron-bridge + token: ${{ secrets.PAT }} + + - name: Create testing branch + run: | + cd megatron-bridge + git fetch origin main + git checkout -b mcore-testing-${{ fromJSON(steps.get-pr-info.outputs.pr-info || '{}').number }} origin/main + git push origin mcore-testing-${{ fromJSON(steps.get-pr-info.outputs.pr-info || '{}').number }} --force + + - name: Get merge commit sha + shell: bash -x -e -u -o pipefail {0} + id: sha + env: + IS_PR: ${{ startsWith(github.ref, 'refs/heads/pull-request/') }} + IS_MERGE_GROUP: ${{ github.event_name == 'merge_group' }} + run: | + if [[ "$IS_PR" == "true" ]]; then + SHA=${{ fromJSON(steps.get-pr-info.outputs.pr-info || '{}').merge_commit_sha }} + elif [[ "$IS_MERGE_GROUP" == "true" ]]; then + SHA=${{ github.event.merge_group.head_sha }} + else + SHA=${GITHUB_SHA} + fi + echo "main=${SHA}" | tee -a "$GITHUB_OUTPUT" + + - name: Trigger MBridge tests + uses: convictional/trigger-workflow-and-wait@v1.6.5 + with: + owner: NVIDIA-NeMo + repo: Megatron-Bridge + workflow_file_name: cicd-main.yml + github_token: ${{ secrets.PAT }} + ref: mcore-testing-${{ fromJSON(steps.get-pr-info.outputs.pr-info || '{}').number }} + wait_interval: 60 + propagate_failure: true + client_payload: | + { + "mcore_ref": "${{ steps.sha.outputs.main }}", + "test_suite": "${{ needs.cicd-parse-downstream-testing.outputs.mbridge-test-suite }}", + "triggered_by": "https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}" + } + cicd-container-build: needs: [is-not-external-contributor, pre-flight, cicd-wait-in-queue] runs-on: ${{ needs.is-not-external-contributor.outputs.selected_runner }} if: | - ( + needs.is-not-external-contributor.result != 'cancelled' + && needs.pre-flight.result != 'cancelled' + && needs.cicd-wait-in-queue.result != 'cancelled' + && ( success() || needs.pre-flight.outputs.is_ci_workload == 'true' + || needs.pre-flight.outputs.is_merge_group == 'true' || needs.pre-flight.outputs.force_run_all == 'true' ) - && needs.pre-flight.outputs.is_merge_group == 'false' && !cancelled() steps: - name: Get PR info @@ -212,9 +323,12 @@ jobs: id: sha env: IS_PR: ${{ startsWith(github.ref, 'refs/heads/pull-request/') }} + IS_MERGE_GROUP: ${{ github.event_name == 'merge_group' }} run: | if [[ "$IS_PR" == "true" ]]; then SHA=${{ fromJSON(steps.get-pr-info.outputs.pr-info || '{}').merge_commit_sha }} + elif [[ "$IS_MERGE_GROUP" == "true" ]]; then + SHA=${{ github.event.merge_group.head_sha }} else SHA=${GITHUB_SHA} fi @@ -332,12 +446,15 @@ jobs: - cicd-wait-in-queue - cicd-container-build if: | - ( + needs.pre-flight.result != 'cancelled' + && needs.cicd-wait-in-queue.result != 'cancelled' + && needs.cicd-container-build.result != 'cancelled' + && ( success() || needs.pre-flight.outputs.is_ci_workload == 'true' || needs.pre-flight.outputs.force_run_all == 'true' + || needs.pre-flight.outputs.is_merge_group == 'true' ) - && needs.pre-flight.outputs.is_merge_group == 'false' && !cancelled() steps: - name: Checkout @@ -363,12 +480,17 @@ jobs: timeout-minutes: 60 name: "${{ matrix.bucket }} - latest" if: | - ( + needs.is-not-external-contributor.result != 'cancelled' + && needs.pre-flight.result != 'cancelled' + && needs.cicd-wait-in-queue.result != 'cancelled' + && needs.cicd-container-build.result != 'cancelled' + && needs.cicd-parse-unit-tests.result != 'cancelled' + && ( success() || needs.pre-flight.outputs.is_ci_workload == 'true' || needs.pre-flight.outputs.force_run_all == 'true' + || needs.pre-flight.outputs.is_merge_group == 'true' ) - && needs.pre-flight.outputs.is_merge_group == 'false' && !cancelled() env: PIP_DISABLE_PIP_VERSION_CHECK: 1 @@ -390,14 +512,17 @@ jobs: cicd-parse-integration-tests: runs-on: ubuntu-latest - timeout-minutes: 60 needs: - pre-flight - cicd-wait-in-queue - cicd-container-build - cicd-unit-tests-latest if: | - ( + needs.pre-flight.result != 'cancelled' + && needs.cicd-wait-in-queue.result != 'cancelled' + && needs.cicd-container-build.result != 'cancelled' + && needs.cicd-unit-tests-latest.result != 'cancelled' + && ( success() || needs.pre-flight.outputs.is_ci_workload == 'true' || needs.pre-flight.outputs.force_run_all == 'true' @@ -439,7 +564,7 @@ jobs: id: main env: HAS_RUN_TESTS_LABEL: ${{ steps.has-run-tests-label.outputs.main }} - HAS_RUN_FUNCTIONAL_TESTS_LABEL: ${{ steps.has-run-functional-tests-label.outputs.main }} + HAS_RUN_FUNCTIONAL_TESTS_LABEL: ${{ steps.has-run-functional-tests-label.outputs.main == 'true' || needs.pre-flight.outputs.is_merge_group == 'true' }} run: | export PYTHONPATH=$(pwd) @@ -480,6 +605,7 @@ jobs: echo "integration-tests=$(cat integration-tests.json)" | tee -a "$GITHUB_OUTPUT" cicd-integration-tests-latest: + timeout-minutes: 60 strategy: fail-fast: false matrix: @@ -497,12 +623,17 @@ jobs: PIP_NO_PYTHON_VERSION_WARNING: 1 PIP_ROOT_USER_ACTION: ignore if: | - ( + needs.is-not-external-contributor.result != 'cancelled' + && needs.pre-flight.result != 'cancelled' + && needs.cicd-wait-in-queue.result != 'cancelled' + && needs.cicd-parse-integration-tests.result != 'cancelled' + && needs.cicd-unit-tests-latest.result != 'cancelled' + && ( success() || needs.pre-flight.outputs.is_ci_workload == 'true' || needs.pre-flight.outputs.force_run_all == 'true' + || needs.pre-flight.outputs.is_merge_group == 'true' ) - && needs.pre-flight.outputs.is_merge_group == 'false' && !cancelled() steps: - name: Checkout @@ -518,6 +649,7 @@ jobs: PAT: ${{ secrets.PAT }} container-image: ${{ env.container-registry }}/megatron-lm:${{ github.sha }} is_ci_workload: ${{ needs.pre-flight.outputs.is_ci_workload }} + is_merge_group: ${{ needs.pre-flight.outputs.is_merge_group }} Nemo_CICD_Test: needs: @@ -548,8 +680,8 @@ jobs: GITHUB_RUN_ID: ${{ github.run_id }} SKIPPING_IS_ALLOWED: ${{ needs.pre-flight.outputs.docs_only == 'true' || needs.pre-flight.outputs.is_deployment_workflow == 'true' || needs.pre-flight.outputs.is_merge_group == 'true' || needs.pre-flight.outputs.is_ci_workload == 'true' }} run: | - FAILED_JOBS=$(gh run view $GITHUB_RUN_ID --json jobs --jq '[.jobs[] | select(.status == "completed" and .conclusion == "failure")] | length') || echo 0 - SKIPPED_JOBS=$(gh run view $GITHUB_RUN_ID --json jobs --jq '[.jobs[] | select(.status == "completed" and .conclusion == "skipped")] | length') || echo 0 + FAILED_JOBS=$(gh run view $GITHUB_RUN_ID --json jobs --jq '[.jobs[] | select(.status == "completed" and .conclusion == "failure" and .name != "merge-queue-notification")] | length') || echo 0 + SKIPPED_JOBS=$(gh run view $GITHUB_RUN_ID --json jobs --jq '[.jobs[] | select(.status == "completed" and .conclusion == "skipped" and .name != "merge-queue-notification")] | length') || echo 0 if [ "${FAILED_JOBS:-0}" -eq 0 ] && ([ "${SKIPPED_JOBS:-0}" -eq 0 ] || [ "$SKIPPING_IS_ALLOWED" == "true" ]); then echo "✅ All previous jobs completed successfully" @@ -641,6 +773,34 @@ jobs: .coverage include-hidden-files: true + merge-queue-notification: + runs-on: ubuntu-latest + if: github.event_name == 'merge_group' + permissions: + pull-requests: write + steps: + - name: Extract PR number from merge group + id: get-pr-number + run: | + # Extract PR number from merge group head_ref (format: refs/heads/gh-readonly-queue/main/pr--) + PR_NUMBER=$(echo "${{ github.event.merge_group.head_ref }}" | sed -n 's/.*\/pr-\([0-9]*\)-.*/\1/p') + echo "pr_number=$PR_NUMBER" >> $GITHUB_OUTPUT + + - name: Comment on PR with action run URL + uses: actions/github-script@v7 + with: + github-token: ${{ secrets.PAT }} + script: | + const prNumber = ${{ steps.get-pr-number.outputs.pr_number }}; + const runUrl = `https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}`; + + await github.rest.issues.createComment({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: prNumber, + body: `🔄 Merge queue validation started!\n\nYou can track the progress here: ${runUrl}` + }); + cleanup-taint-node: runs-on: ${{ needs.is-not-external-contributor.outputs.selected_runner }} needs: diff --git a/.github/workflows/config/changelog-config.json b/.github/workflows/config/changelog-config.json new file mode 100644 index 00000000000..e640b90a0f3 --- /dev/null +++ b/.github/workflows/config/changelog-config.json @@ -0,0 +1,24 @@ +{ + "categories": [], + "ignore_labels": [ + "ignore" + ], + "sort": "ASC", + "template": "\n${{CHANGELOG}}\n\n
Changelog Details\n\n${{UNCATEGORIZED}}\n
\n", + "pr_template": "- ${{TITLE}} by @${{AUTHOR}} :: PR: #${{NUMBER}}", + "commit_template": "- ${{TITLE}} by @${{AUTHOR}}", + "empty_template": "${{OWNER}}\n${{REPO}}\n${{FROM_TAG}}\n${{TO_TAG}}", + "duplicate_filter": { + "pattern": ".+", + "on_property": "title", + "method": "match" + }, + "transformers": [], + "max_tags_to_fetch": 100, + "max_pull_requests": 500, + "max_back_track_time_days": 365, + "exclude_merge_branches": [], + "tag_resolver": { + "method": "semver" + } +} diff --git a/.github/workflows/copyright-check.yml b/.github/workflows/copyright-check.yml index 9bbb7a1f201..a7f51cd8a0e 100644 --- a/.github/workflows/copyright-check.yml +++ b/.github/workflows/copyright-check.yml @@ -17,14 +17,14 @@ name: Copyright check on: push: branches: - - 'pull-request/[0-9]+' - - 'deploy-release/*' + - "pull-request/[0-9]+" + - "deploy-release/*" merge_group: types: [checks_requested] jobs: pre-flight: - uses: NVIDIA-NeMo/FW-CI-templates/.github/workflows/_cicd_preflight.yml@v0.65.10 + uses: NVIDIA-NeMo/FW-CI-templates/.github/workflows/_cicd_preflight.yml@v0.73.2 if: github.repository == 'NVIDIA/Megatron-LM' copyright-check: diff --git a/.github/workflows/dependabot.yml b/.github/workflows/dependabot.yml index 9dc1e6ac5a9..6aa16ba0a0c 100644 --- a/.github/workflows/dependabot.yml +++ b/.github/workflows/dependabot.yml @@ -11,7 +11,6 @@ permissions: jobs: get-release-branch-names: runs-on: ubuntu-latest - environment: nemo-ci outputs: mcore: ${{ steps.get-branch.outputs.mcore_release_branch }} if: github.repository == 'NVIDIA/Megatron-LM' @@ -41,9 +40,6 @@ jobs: target-branch: ${{ matrix.target-branch }} secrets: PAT: ${{ secrets.PAT }} - AZURE_CLIENT_ID: ${{ secrets.AZURE_CLIENT_ID }} - AZURE_TENANT_ID: ${{ secrets.AZURE_TENANT_ID }} - AZURE_SUBSCRIPTION_ID: ${{ secrets.AZURE_SUBSCRIPTION_ID }} SSH_KEY: ${{ secrets.SSH_KEY }} SSH_PWD: ${{ secrets.SSH_PWD }} @@ -54,8 +50,8 @@ jobs: steps: - name: Notify env: - SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }} - SLACK_WEBHOOK_ADMIN: + SLACK_WEBHOOK: ${{ secrets.SLACK_CI_CHANNEL_WEBHOOK }} + SLACK_WEBHOOK_ADMIN: GITHUB_RUN_ID: ${{ github.run_id }} GITHUB_REPOSITORY: ${{ github.repository }} run: | diff --git a/.github/workflows/install-test.yml b/.github/workflows/install-test.yml index ece9184ee94..5a0abb8596d 100644 --- a/.github/workflows/install-test.yml +++ b/.github/workflows/install-test.yml @@ -22,14 +22,14 @@ on: branches: - dev - main - - 'pull-request/[0-9]+' - - 'deploy-release/*' + - "pull-request/[0-9]+" + - "deploy-release/*" merge_group: types: [checks_requested] jobs: pre-flight: - uses: NVIDIA-NeMo/FW-CI-templates/.github/workflows/_cicd_preflight.yml@v0.65.5 + uses: NVIDIA-NeMo/FW-CI-templates/.github/workflows/_cicd_preflight.yml@v0.73.2 if: github.repository == 'NVIDIA/Megatron-LM' pip-test-pytorch: @@ -43,11 +43,10 @@ jobs: name: Pip - Python${{ matrix.python-version }} - AMD64/Linux - NGC PyTorch container: image: nvcr.io/nvidia/pytorch:25.05-py3 - environment: nemo-ci strategy: fail-fast: false matrix: - python-version: ['3.12'] + python-version: ["3.12"] steps: - name: Checkout repository uses: actions/checkout@v4 @@ -89,11 +88,10 @@ jobs: name: UV - Python${{ matrix.python-version }} - AMD64/Linux - NGC PyTorch container: image: nvcr.io/nvidia/pytorch:25.05-py3 - environment: nemo-ci strategy: fail-fast: false matrix: - python-version: ['3.12'] + python-version: ["3.12"] steps: - name: Checkout repository uses: actions/checkout@v4 diff --git a/.github/workflows/mirror-to-main.yml b/.github/workflows/mirror-to-main.yml new file mode 100644 index 00000000000..cb77851942b --- /dev/null +++ b/.github/workflows/mirror-to-main.yml @@ -0,0 +1,129 @@ +# Copyright (c) 2025, NVIDIA CORPORATION. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +name: Mirror Dev to Main + +on: + push: + branches: + - "pull-request/[0-9]+" + +jobs: + cherry-pick-to-main: + runs-on: ubuntu-latest + permissions: + contents: write + pull-requests: write + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + fetch-depth: 0 + token: ${{ secrets.PAT }} + + - name: Get PR info + id: get-pr-info + uses: nv-gha-runners/get-pr-info@main + + - name: Configure Git + run: | + git config --global user.email "github-actions[bot]@users.noreply.github.com" + git config --global user.name "GitHub Actions Bot" + + - name: Cherry-pick to main + env: + GH_TOKEN: ${{ secrets.PAT }} + run: | + set -x + + PR_NUMBER=${{ fromJSON(steps.get-pr-info.outputs.pr-info || '{}').number }} + BASE_REF="${{ fromJSON(steps.get-pr-info.outputs.pr-info).base.ref }}" + HAS_MIRROR_MAIN_LABEL=$(gh pr view $PR_NUMBER --json labels | jq '[.labels[].name] | any(. == "mirror-to-main")' || echo "false") + TARGET_BRANCH="cherry-pick-$PR_NUMBER-into-main" + + # Skip if not labeled with mirror-to-main + if [ "$HAS_MIRROR_MAIN_LABEL" != "true" ]; then + echo "PR is not labeled with mirror-to-main, will not mirror to main." + exit 0 + fi + + # Skip if not targeting dev + if [ "$BASE_REF" != "dev" ]; then + echo "PR is not targeting dev, will not mirror to main." + exit 0 + fi + + # Check if target branch already exists + if git ls-remote --heads origin "refs/heads/$TARGET_BRANCH" | grep -q .; then + echo "Target branch already exists, will not cherry-pick again." + exit 0 + fi + + # Get PR details + PR_AUTHOR="${{ fromJSON(steps.get-pr-info.outputs.pr-info).user.login }}" + PR_TITLE="${{ fromJSON(steps.get-pr-info.outputs.pr-info).title }}" + SOURCE_BRANCH="${{ fromJSON(steps.get-pr-info.outputs.pr-info).head.ref }}" + SOURCE_REPO="${{ fromJSON(steps.get-pr-info.outputs.pr-info).head.repo.full_name }}" + + # Fetch all branches + git fetch origin dev + + # Handle forks vs same repo + if [ "$SOURCE_REPO" = "${{ github.repository }}" ]; then + git fetch origin "$SOURCE_BRANCH" + git checkout "$SOURCE_BRANCH" + else + git fetch "https://github.com/$SOURCE_REPO.git" "$SOURCE_BRANCH" + git checkout FETCH_HEAD + fi + + # Find commit range to cherry-pick + START_COMMIT=$(git merge-base origin/dev HEAD) + END_COMMIT=$(git rev-parse HEAD) + + # Create cherry-pick branch from main + git fetch origin main + git checkout main + git checkout -b "$TARGET_BRANCH" + + # Cherry-pick commits + if ! git cherry-pick "$START_COMMIT..$END_COMMIT"; then + # Comment on the original PR about the failure + COMMENT_BODY=$(cat <<'EOF' + ❌ **Cherry-pick to main failed** + + The cherry-pick encountered conflicts and could not be completed automatically. + + **Next steps:** + 1. Manually create a PR with these changes to main + 2. Resolve any conflicts + EOF + ) + + gh pr comment $PR_NUMBER --body "$COMMENT_BODY" + exit 1 + fi + + # Push branch + git push -u origin "$TARGET_BRANCH" + + # Create PR to main + gh pr create \ + --base main \ + --head "$TARGET_BRANCH" \ + --title "cp: \`$PR_TITLE ($PR_NUMBER)\` into \`main\`" \ + --body "[🤖]: Hi @$PR_AUTHOR 👋

We've cherry-picked \`$PR_TITLE (#$PR_NUMBER)\` into \`main\` for you! 🚀

Please review and approve this cherry-pick at your convenience!" \ + --label "cherry-pick" \ + --reviewer "$PR_AUTHOR" + diff --git a/.github/workflows/multi-approval-bot.yml b/.github/workflows/multi-approval-bot.yml deleted file mode 100644 index e8507605aa7..00000000000 --- a/.github/workflows/multi-approval-bot.yml +++ /dev/null @@ -1,75 +0,0 @@ -name: "Codeowners Approval Workflow" - -on: - push: - branches: - - "pull-request/[0-9]+" - merge_group: - types: [checks_requested] - -jobs: - pre-flight: - uses: NVIDIA-NeMo/FW-CI-templates/.github/workflows/_cicd_preflight.yml@v0.65.5 - if: github.repository == 'NVIDIA/Megatron-LM' - - codeowners-approval: - needs: [pre-flight] - runs-on: ubuntu-latest - environment: nemo-ci - if: | - !(needs.pre-flight.outputs.docs_only == 'true' - || needs.pre-flight.outputs.is_merge_group == 'true' - || needs.pre-flight.outputs.is_deployment_workflow == 'true') - steps: - - name: Get PR info - id: get-pr-info - if: startsWith(github.ref, 'refs/heads/pull-request/') - uses: nv-gha-runners/get-pr-info@main - - - name: Checkout action - uses: actions/checkout@v3 - with: - repository: noamelf/codeowner-multi-approval-action - ref: v0.1 - path: codeowner-multi-approval-action - - - name: Check Codeowners Approval - uses: ./codeowner-multi-approval-action - with: - pr-number: ${{ fromJSON(steps.get-pr-info.outputs.pr-info || '{}').number }} - repo-name: ${{ github.repository }} - github-token: ${{ secrets.PAT }} - - multi-approval-bot-summary: - needs: [pre-flight, codeowners-approval] - if: | - ( - needs.pre-flight.outputs.docs_only == 'true' - || needs.pre-flight.outputs.is_merge_group == 'true' - || needs.pre-flight.outputs.is_deployment_workflow == 'true' - || always() - ) - && github.repository == 'NVIDIA/Megatron-LM' - && !cancelled() - runs-on: ubuntu-latest - steps: - - name: Checkout repository - uses: actions/checkout@v4 - - - name: Result - env: - GH_TOKEN: ${{ github.token }} - GITHUB_RUN_ID: ${{ github.run_id }} - SKIPPING_IS_ALLOWED: ${{ needs.pre-flight.outputs.docs_only == 'true' || needs.pre-flight.outputs.is_deployment_workflow == 'true' || needs.pre-flight.outputs.is_merge_group == 'true' || needs.pre-flight.outputs.is_ci_workload == 'true' }} - run: | - FAILED_JOBS=$(gh run view $GITHUB_RUN_ID --json jobs --jq '[.jobs[] | select(.status == "completed" and .conclusion != "success")] | length') || echo 0 - - if [ "${FAILED_JOBS:-0}" -eq 0 ] || [ "$SKIPPING_IS_ALLOWED" == "true" ]; then - echo "✅ All previous jobs completed successfully" - exit 0 - else - echo "❌ Found $FAILED_JOBS failed job(s)" - # Show which jobs failed - gh run view $GITHUB_RUN_ID --json jobs --jq '.jobs[] | select(.status == "completed" and .conclusion != "success") | .name' - exit 1 - fi diff --git a/.github/workflows/oncall-rotation.yml b/.github/workflows/oncall-rotation.yml index 46a45810ad1..a621be7f652 100644 --- a/.github/workflows/oncall-rotation.yml +++ b/.github/workflows/oncall-rotation.yml @@ -17,7 +17,7 @@ name: Oncall Rotation on: schedule: # Runs at 09:00 UTC every Wednesday - - cron: '0 9 * * 3' + - cron: "0 9 * * 3" workflow_dispatch: permissions: @@ -25,7 +25,6 @@ permissions: jobs: rotate-schedule: - environment: main runs-on: ubuntu-latest steps: - name: Checkout code @@ -36,7 +35,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v4 with: - python-version: '3.10' + python-version: "3.10" - name: Rotate Schedule env: @@ -45,7 +44,10 @@ jobs: # Slack token for updating the Slack usergroup SLACK_TOKEN: ${{ secrets.ONCALL_SLACK_TOKEN }} run: | - pip install --no-cache-dir uv + pip install --no-cache-dir "uv<0.9.29" + uv venv .venv + uv cache clean + uv sync --no-cache uv run --with slack-sdk python .github/scripts/oncall_manager.py rotate - name: Commit and Push changes @@ -56,4 +58,3 @@ jobs: git commit -m "chore: rotate oncall schedule" || echo "No changes to commit" git pull --rebase git push origin HEAD:main - diff --git a/.github/workflows/release-docs.yml b/.github/workflows/release-docs.yml index d15ea74f052..b393a69c745 100644 --- a/.github/workflows/release-docs.yml +++ b/.github/workflows/release-docs.yml @@ -20,23 +20,52 @@ on: required: true type: boolean default: true - version-number: - description: Version number to release this as (use `latest` for main branch) - required: true + publish-as-latest: + description: Publish as Latest stable version. + required: false + type: boolean + default: true + docs-version-override: + description: Docs version if commit is not tagged + required: false type: string + default: "" notify-emails: description: Email addresses to send the notification to. Format as "me@me.com,you@you.com". + required: false + type: string + workflow_call: + inputs: + dry-run: + description: Whether to run the workflow in dry-run mode required: true + type: boolean + default: true + publish-as-latest: + description: Publish as Latest stable version. + required: false + type: boolean + default: true + docs-version-override: + description: Docs version if commit is not tagged + required: false + type: string + default: "" + notify-emails: + description: Email addresses to send the notification to. Format as "me@me.com,you@you.com". + required: false type: string - aws-region: - description: AWS region + build-docs-ref: + description: Reference to build the docs from required: false type: string - default: us-east-1 + default: ${{ github.sha }} jobs: build-docs: uses: NVIDIA-NeMo/FW-CI-templates/.github/workflows/_build_docs.yml@v0.67.0 + with: + ref: ${{ inputs.build-docs-ref }} publish-docs: runs-on: ubuntu-latest @@ -45,7 +74,7 @@ jobs: - uses: actions/checkout@v6 with: repository: NVIDIA-NeMo/FW-CI-templates - ref: v0.67.2 + ref: v0.72.0 path: FW-CI-templates - uses: ./FW-CI-templates/.github/actions/publish-docs @@ -59,10 +88,11 @@ jobs: artifacts-name: docs-html artifacts-path: _build/html emails-csv: ${{ inputs.notify-emails && format('{0},{1}', vars.docs_release_emails, inputs.notify-emails) || vars.docs_release_emails }} - overwrite-latest-on-tag: false + overwrite-latest-on-tag: ${{ inputs.publish-as-latest }} + docs-version-override: ${{ inputs.docs-version-override }} run-on-version-tag-only: ${{ github.ref_name != 'main' }} request-name: megatron-core-publish-docs-${{ github.run_id }} - aws-region: ${{ inputs.aws-region }} + aws-region: ${{ vars.DOCS_AWS_REGION }} aws-role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN }} aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} diff --git a/.github/workflows/release-freeze.yml b/.github/workflows/release-freeze.yml index 82f26168bd6..dc4bad0a9a7 100644 --- a/.github/workflows/release-freeze.yml +++ b/.github/workflows/release-freeze.yml @@ -42,5 +42,5 @@ jobs: freeze-commit: ${{ inputs.freeze-commit }} dry-run: ${{ inputs.dry-run }} secrets: - SLACK_WEBHOOK: ${{ secrets.SLACK_RELEASE_ENDPOINT }} - SLACK_WEBHOOK_ADMIN: ${{ secrets.SLACK_WEBHOOK_ADMIN }} + SLACK_WEBHOOK: ${{ secrets.SLACK_MAIN_CHANNEL_WEBHOOK }} + SLACK_WEBHOOK_ADMIN: ${{ secrets.SLACK_TEAM_GROUP_ID }} diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index aa04408689b..647e6af2379 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -11,7 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -name: 'Release Megatron-Core' +name: "Release Megatron-Core" on: workflow_dispatch: @@ -30,6 +30,16 @@ on: required: true default: true type: boolean + generate-changelog: + description: Generate changelog + required: false + default: true + type: boolean + publish-docs: + description: Publish docs + required: false + default: true + type: boolean version-bump-branch: description: Branch for version bump required: true @@ -47,9 +57,9 @@ jobs: dry-run: ${{ inputs.dry-run || false }} version-bump-branch: ${{ inputs.version-bump-branch || github.ref_name }} create-gh-release: ${{ inputs.create-gh-release || true }} + gh-release-use-changelog-builder: ${{ inputs.generate-changelog }} + publish-docs: ${{ inputs.publish-docs }} secrets: - TWINE_USERNAME: ${{ secrets.TWINE_USERNAME }} - TWINE_PASSWORD: ${{ secrets.TWINE_PASSWORD }} - SLACK_WEBHOOK_ADMIN: ${{ secrets.SLACK_WEBHOOK_ADMIN }} - SLACK_WEBHOOK: ${{ secrets.SLACK_RELEASE_ENDPOINT }} + TWINE_PASSWORD: ${{ (github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/heads/r')) && secrets.SVC_PYPI_TOKEN || secrets.SVC_PYPI_TEST_TOKEN }} + SLACK_WEBHOOK: ${{ (github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/heads/r')) && secrets.SLACK_MAIN_CHANNEL_WEBHOOK || secrets.SLACK_CI_CHANNEL_WEBHOOK }} PAT: ${{ secrets.PAT }} diff --git a/.github/workflows/sync-team-usergroups.yml b/.github/workflows/sync-team-usergroups.yml index 8b08182dceb..6db5127d9a0 100644 --- a/.github/workflows/sync-team-usergroups.yml +++ b/.github/workflows/sync-team-usergroups.yml @@ -19,7 +19,6 @@ on: jobs: sync-usergroups: - environment: main runs-on: ubuntu-latest steps: - name: Checkout code @@ -28,12 +27,15 @@ jobs: - name: Set up Python uses: actions/setup-python@v4 with: - python-version: '3.10' + python-version: "3.10" - name: Sync Teams to User Groups env: GH_TOKEN: ${{ secrets.NVIDIA_MCORE_ONCALL_TOKEN || secrets.PAT || secrets.GITHUB_TOKEN }} SLACK_TOKEN: ${{ secrets.ONCALL_SLACK_TOKEN }} run: | - pip install --no-cache-dir uv + pip install --no-cache-dir "uv<0.9.29" + uv venv .venv + uv cache clean + uv sync --no-cache uv run --with slack-sdk python .github/scripts/sync_team_usergroups.py diff --git a/.github/workflows/trigger-mbridge-tests.yml b/.github/workflows/trigger-mbridge-tests.yml index b1a3aa0089d..282818460bb 100644 --- a/.github/workflows/trigger-mbridge-tests.yml +++ b/.github/workflows/trigger-mbridge-tests.yml @@ -2,182 +2,32 @@ # SPDX-License-Identifier: Apache-2.0 name: Trigger MBridge Tests -# Remote testing of MBridge from MCore -# Triggers MBridge CI tests with current MCore commit to verify backward compatibility - on: - # Manual trigger only workflow_dispatch: inputs: mbridge_ref: - description: 'MBridge branch/ref to trigger' + description: "MBridge branch/ref to trigger" required: false type: string - default: 'main' - run_cicd_main: - description: 'Run cicd-main.yml (full CI/CD)' - required: false - type: boolean - default: true - run_install_test: - description: 'Run install-test.yml (quick install check)' - required: false - type: boolean - default: true - test_suite: - description: 'Test suite to run (for cicd-main)' - required: false - type: choice - options: - - 'all' - - 'unit-only' - - 'functional-only' - default: 'all' + default: "main" jobs: - # First job: Get MCore commit info (shared by all matrix jobs) - get-mcore-info: + trigger-mbridge-tests: runs-on: ubuntu-latest - outputs: - sha: ${{ steps.mcore_info.outputs.sha }} - short_sha: ${{ steps.mcore_info.outputs.short_sha }} - branch: ${{ steps.mcore_info.outputs.branch }} - repo_url: ${{ steps.mcore_info.outputs.repo_url }} steps: - - name: Checkout MCore - uses: actions/checkout@v4 + - name: Trigger MBridge tests + uses: convictional/trigger-workflow-and-wait@v1.6.5 with: - fetch-depth: 0 - - - name: Get MCore commit info - id: mcore_info - run: | - echo "sha=$(git rev-parse HEAD)" >> $GITHUB_OUTPUT - echo "short_sha=$(git rev-parse --short HEAD)" >> $GITHUB_OUTPUT - echo "branch=${GITHUB_REF#refs/heads/}" >> $GITHUB_OUTPUT - - # Get repo URL from origin remote, fallback to constructing from github context - REPO_URL=$(git remote get-url origin 2>/dev/null || echo "${{ github.server_url }}/${{ github.repository }}.git") - echo "repo_url=${REPO_URL}" >> $GITHUB_OUTPUT - - echo "📦 MCore commit: $(git rev-parse --short HEAD)" - echo "🌿 Branch: ${GITHUB_REF#refs/heads/}" - echo "📍 Repo: ${REPO_URL}" - - # Matrix job: Trigger and monitor MBridge workflows in parallel - trigger-and-monitor: - needs: [get-mcore-info] - runs-on: ubuntu-latest - continue-on-error: true # Don't fail workflow if monitoring times out - strategy: - fail-fast: false # Continue other matrix jobs even if one fails - matrix: - include: - - workflow: install-test.yml - name: Install Test - - workflow: cicd-main.yml - name: CI/CD Main - - name: ${{ matrix.name }} - - steps: - - name: Check if workflow should run - id: should_run - run: | - if [[ "${{ matrix.workflow }}" == "install-test.yml" && "${{ inputs.run_install_test }}" == "true" ]]; then - echo "run=true" >> $GITHUB_OUTPUT - elif [[ "${{ matrix.workflow }}" == "cicd-main.yml" && "${{ inputs.run_cicd_main }}" == "true" ]]; then - echo "run=true" >> $GITHUB_OUTPUT - else - echo "run=false" >> $GITHUB_OUTPUT - echo "⏭️ Skipping ${{ matrix.workflow }} (not enabled)" - fi - - - name: Trigger ${{ matrix.workflow }} - if: steps.should_run.outputs.run == 'true' - id: trigger - env: - GH_TOKEN: ${{ secrets.PAT }} - run: | - echo "🚀 Triggering ${{ matrix.workflow }} | MCore: ${{ needs.get-mcore-info.outputs.short_sha }} | MBridge: ${{ inputs.mbridge_ref }}" - - gh workflow run ${{ matrix.workflow }} \ - --repo NVIDIA-NeMo/Megatron-Bridge --ref ${{ inputs.mbridge_ref }} \ - --field mcore_commit=${{ needs.get-mcore-info.outputs.sha }} \ - --field mcore_branch=${{ needs.get-mcore-info.outputs.branch }} \ - --field mcore_repo=${{ needs.get-mcore-info.outputs.repo_url }} \ - --field test_suite=${{ inputs.test_suite }} \ - --field triggered_by=mcore-ci - - - name: Get run ID - if: steps.should_run.outputs.run == 'true' - id: get_run_id - env: - GH_TOKEN: ${{ secrets.PAT }} - run: | - sleep 10 # Wait for run to appear - RUN_ID=$(gh run list \ - --repo NVIDIA-NeMo/Megatron-Bridge \ - --workflow=${{ matrix.workflow }} \ - --limit 5 \ - --json databaseId,createdAt \ - --jq "sort_by(.createdAt) | reverse | .[0] | .databaseId") - - echo "run_id=${RUN_ID}" >> $GITHUB_OUTPUT - echo "📋 Run ID: ${RUN_ID}" - - cat >> $GITHUB_STEP_SUMMARY << EOF - ## 🔄 ${{ matrix.name }} Triggered - - **MCore:** \`${{ needs.get-mcore-info.outputs.short_sha }}\` | **MBridge:** \`${{ inputs.mbridge_ref }}\` | **Suite:** \`${{ inputs.test_suite }}\` - - - 🔄 [${{ matrix.workflow }}](https://github.com/NVIDIA-NeMo/Megatron-Bridge/actions/runs/${RUN_ID}) - Running... - - ⏳ Monitoring every 5 minutes until completion - - > **Note:** Tests run without approval when triggered from MCore - EOF - - - name: Monitor workflow - if: steps.should_run.outputs.run == 'true' - id: monitor - continue-on-error: true - env: - GH_TOKEN: ${{ secrets.PAT }} - run: | - RUN_ID="${{ steps.get_run_id.outputs.run_id }}" - echo "📊 Monitoring ${{ matrix.workflow }} (Run ID: ${RUN_ID})" - - gh run watch ${RUN_ID} --repo NVIDIA-NeMo/Megatron-Bridge --exit-status - - CONCLUSION=$(gh run view ${RUN_ID} --repo NVIDIA-NeMo/Megatron-Bridge --json conclusion --jq -r .conclusion) - echo "workflow_status=${CONCLUSION}" >> $GITHUB_ENV - echo "✅ Completed: ${CONCLUSION}" - - - name: Report results - if: always() && steps.should_run.outputs.run == 'true' - run: | - CONCLUSION="${{ env.workflow_status || 'unknown' }}" - RUN_ID="${{ steps.get_run_id.outputs.run_id }}" - - case "$CONCLUSION" in - "success") ICON="✅"; MSG="passed" ;; - "failure") ICON="❌"; MSG="failed"; EXIT_CODE=1 ;; - "cancelled") ICON="🚫"; MSG="cancelled"; EXIT_CODE=0 ;; - *) ICON="⏳"; MSG="still running or timed out"; EXIT_CODE=0 ;; - esac - - cat >> $GITHUB_STEP_SUMMARY << EOF - ## 📊 ${{ matrix.name }} Results - - ### ${ICON} ${{ matrix.workflow }} - **Status:** \`${CONCLUSION}\` - - [View full results →](https://github.com/NVIDIA-NeMo/Megatron-Bridge/actions/runs/${RUN_ID}) - - --- - *Triggered from MCore \`${{ needs.get-mcore-info.outputs.short_sha }}\`* - EOF - - echo "${ICON} ${{ matrix.name }} ${MSG}" - exit ${EXIT_CODE:-0} - + owner: NVIDIA-NeMo + repo: Megatron-Bridge + workflow_file_name: cicd-main.yml + github_token: ${{ secrets.PAT }} + ref: ${{ inputs.mbridge_ref }} + wait_interval: 60 + propagate_failure: true + client_payload: | + { + "mcore_ref": "${{ github.sha }}", + "test_suite": "all", + "triggered_by": "https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}" + } diff --git a/.gitlab/stages/00.pre.yml b/.gitlab/stages/00.pre.yml index d3ac804e599..ff9e4e5178b 100644 --- a/.gitlab/stages/00.pre.yml +++ b/.gitlab/stages/00.pre.yml @@ -68,6 +68,7 @@ pre:create_ci_branches_dev: - branch: ci-dev-rebuild-mcore-nemo-image - branch: ci-dev-mr - branch: ci-dev-nightly + - branch: ci-dev-weekly - branch: ci-dev-upgrade-dependencies tags: - arch/amd64 diff --git a/.gitlab/stages/04.functional-tests.yml b/.gitlab/stages/04.functional-tests.yml index 1591373e9a6..d32ff86a344 100644 --- a/.gitlab/stages/04.functional-tests.yml +++ b/.gitlab/stages/04.functional-tests.yml @@ -211,7 +211,7 @@ functional:run_nemo: inherit: variables: true variables: - MCORE_COMMIT: $CI_COMMIT_SHA + MCORE_MR_COMMIT: $CI_COMMIT_SHA TEST_NEMO2_MODULE: 'True' ALLOW_FAILURE_DEPENDENCY: 'True' TESTS_TO_RUN_ON_THIS_COMMIT: nightly @@ -250,7 +250,7 @@ functional:x_notify: - export RO_API_TOKEN=${PROJECT_ACCESS_TOKEN_MCORE} - export GITLAB_ENDPOINT - export CONTEXT=$FUNCTIONAL_TEST_SCOPE - - export TAG_TEAM=$([[ "$CI_COMMIT_BRANCH" == "main" ]] && echo "1" || "0") + - export TAG_TEAM=$([[ "$CI_COMMIT_BRANCH" == "main" || "$CI_COMMIT_BRANCH" == "dev" ]] && echo "1" || "0") - export TEAM_SLUG=$SLACK_ADMIN - | python tests/test_utils/python_scripts/notify.py \ @@ -264,7 +264,7 @@ functional:x_notify: paths: - scripts rules: - - if: ($CI_PIPELINE_SOURCE == "schedule" || $CI_COMMIT_BRANCH == "main") && $FUNCTIONAL_TEST == "yes" + - if: ($CI_PIPELINE_SOURCE == "schedule" || $CI_COMMIT_BRANCH == "main" || $CI_COMMIT_BRANCH == "dev") && $FUNCTIONAL_TEST == "yes" when: always - when: never diff --git a/README.md b/README.md index 6fa300a6d4d..abb581c9b34 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@
-Megatron-LM and Megatron Core -============================= +Megatron-LM & Megatron Core +===========================

GPU-optimized library for training transformer models at scale

@@ -11,135 +11,66 @@ Megatron-LM and Megatron Core
-## About +> ## 🚨 **DEVELOPMENT BRANCH** +> ⚠️ **EXPERIMENTAL FEATURES** - This is the **dev branch** with experimental features. +> +> **→ For releases and comprehensive documentation, visit the [main branch](https://github.com/NVIDIA/Megatron-LM)** -**Megatron-Core (MCore)**: Composable library with GPU-optimized building blocks for custom training frameworks. -You can install this library using pip or use it within the Megatron-LM GitHub repository. +## ⚡ Quickstart -**Megatron-LM**: Reference implementation that includes end-to-end examples utilizing Megatron Core. +```bash +# Clone the dev branch +git clone -b dev https://github.com/NVIDIA/Megatron-LM.git +cd Megatron-LM -**Megatron-Bridge**: Training library with bidirectional Hugging Face ↔ Megatron checkpoint conversion, flexible training loops, and example model training recipes. - -For more information, refer to [Megatron Bridge](https://github.com/NVIDIA-NeMo/Megatron-Bridge). - - -## Quick Start - -Install Megatron Core with pip: - -1. Install Megatron Core with required dependencies: - - ```bash - pip install --no-build-isolation megatron-core[mlm,dev] - ``` - -2. Clone repository for examples: - - ```bash - git clone https://github.com/NVIDIA/Megatron-LM.git - cd Megatron-LM - pip install --no-build-isolation .[mlm,dev] - ``` - - -# Latest News - -- **[2025/12]** 🎉 **Megatron Core development has moved to GitHub!** All development and CI now happens in the open. We welcome community contributions. -- **[2025/10]** **[Megatron Dev Branch](https://github.com/NVIDIA/Megatron-LM/tree/dev)** - early access branch with experimental features. -- **[2025/10]** **[Megatron Bridge](https://github.com/NVIDIA-NeMo/Megatron-Bridge)** - Bidirectional converter for interoperability between Hugging Face and Megatron checkpoints, featuring production-ready recipes for popular models. -- **[2025/08]** **[MoE Q3-Q4 2025 Roadmap](https://github.com/NVIDIA/Megatron-LM/issues/1729)** - Comprehensive roadmap for MoE features including DeepSeek-V3, Qwen3, advanced parallelism strategies, FP8 optimizations, and Blackwell performance enhancements. -- **[2025/08]** **[GPT-OSS Model](https://github.com/NVIDIA/Megatron-LM/issues/1739)** - Advanced features including YaRN RoPE scaling, attention sinks, and custom activation functions are being integrated into Megatron Core. -- **[2025/06]** **[Megatron MoE Model Zoo](https://github.com/yanring/Megatron-MoE-ModelZoo)** - Best practices and optimized configurations for training DeepSeek-V3, Mixtral, and Qwen3 MoE models with performance benchmarking and checkpoint conversion tools. -- **[2025/05]** Megatron Core v0.11.0 brings new capabilities for multi-data center LLM training ([blog](https://developer.nvidia.com/blog/turbocharge-llm-training-across-long-haul-data-center-networks-with-nvidia-nemo-framework/)). - -
-Previous News - -- **[2024/07]** Megatron Core v0.7 improves scalability and training resiliency and adds support for multimodal training ([blog](https://developer.nvidia.com/blog/train-generative-ai-models-more-efficiently-with-new-nvidia-Megatron-Core-functionalities/)). -- **[2024/06]** Megatron Core added supports for Mamba-based models. Check out our paper [An Empirical Study of Mamba-based Language Models](https://arxiv.org/pdf/2406.07887) and [code example](https://github.com/NVIDIA/Megatron-LM/tree/ssm/examples/mamba). -- **[2024/01 Announcement]** NVIDIA has released the core capabilities in **Megatron-LM** into [**Megatron Core**](https://github.com/NVIDIA/Megatron-LM/tree/main/megatron/core) in this repository. Megatron Core expands upon Megatron-LM's GPU-optimized techniques with more cutting-edge innovations on system-level optimizations, featuring composable and modular APIs. Explore the [Megatron Core intro](#Megatron Core) for more details. - -
- - - - -# Project Structure - -``` -Megatron-LM/ -├── megatron/ -│ ├── core/ # Megatron Core (kernels, parallelism, building blocks) -│ │ ├── models/ # Transformer models -│ │ ├── transformer/ # Transformer building blocks -│ │ ├── tensor_parallel/ # Tensor parallelism -│ │ ├── pipeline_parallel/ # Pipeline parallelism -│ │ ├── distributed/ # Distributed training (FSDP, DDP) -│ │ ├── optimizer/ # Optimizers -│ │ ├── datasets/ # Dataset loaders -│ │ ├── inference/ # Inference engines -│ │ └── export/ # Model export (e.g. TensorRT-LLM) -│ ├── training/ # Training scripts -│ ├── inference/ # Inference server -│ ├── legacy/ # Legacy components -│ └── post_training/ # Post-training (RLHF, etc.) -├── examples/ # Ready-to-use training examples -├── tools/ # Utility tools -├── tests/ # Comprehensive test suite -└── docs/ # Documentation +# Install from source with dev dependencies (includes transformer_engine) +pip install -e .[mlm,dev] ``` +
+Table of Contents +**Getting Started** +- [⚡ Quick Start](#-quick-start) +- [🧠 Dev Branch Philosophy](#-dev-branch-philosophy) +- [📊 Performance & Benchmarking](#-performance--benchmarking) +- [👥 Community & Support](#-community--support) -# Performance Benchmarking - -For our latest performance benchmarking results, please refer to [NVIDIA NeMo Framework Performance Summary](https://docs.nvidia.com/nemo/megatron-bridge/latest/performance-summary.html). - -Our codebase efficiently trains models from 2B to 462B parameters across thousands of GPUs, achieving up to **47% Model FLOP Utilization (MFU)** on H100 clusters. - -![Model table](images/model_table.png) - -**Benchmark Configuration:** - -- **Vocabulary size**: 131,072 tokens -- **Sequence length**: 4096 tokens -- **Model scaling**: Varied hidden size, attention heads, and layers to achieve target parameter counts -- **Communication optimizations**: Fine-grained overlapping with DP (`--overlap-grad-reduce`, `--overlap-param-gather`), TP (`--tp-comm-overlap`), and PP (enabled by default) - -**Key Results:** - -- **6144 H100 GPUs**: Successfully benchmarked 462B parameter model training -- **Superlinear scaling**: MFU increases from 41% to 47-48% with model size -- **End-to-end measurement**: Throughputs include all operations (data loading, optimizer steps, communication, logging) -- **Production ready**: Full training pipeline with checkpointing and fault tolerance -- *Note: Performance results measured without training to convergence* +**For Complete Documentation** → [Main Branch](https://github.com/NVIDIA/Megatron-LM) | [Official Docs](https://docs.nvidia.com/Megatron-Core/) -## Weak Scaling Results +
-Our weak scaled results show superlinear scaling (MFU increases from 41% for the smallest model considered to 47-48% for the largest models); this is because larger GEMMs have higher arithmetic intensity and are consequently more efficient to execute. -![Weak scaling](images/weak_scaling.png) -## Strong Scaling Results -We also strong scaled the standard GPT-3 model (our version has slightly more than 175 billion parameters due to larger vocabulary size) from 96 H100 GPUs to 4608 GPUs, using the same batch size of 1152 sequences throughout. Communication becomes more exposed at larger scale, leading to a reduction in MFU from 47% to 42%. -![Strong scaling](images/strong_scaling.png) +## Dev Branch Philosophy +### Fast Iteration +- **Streamlined Review**: 1 code owner + 1 dev approver (can delegate review) + CI/CD +### Feature Lifecycle (Coming Soon) +- **6-Month Timeline**: Experimental features must graduate to stable or be deprecated +- **Migration Support**: Assistance provided for feature transitions +### Stability Expectations +- **Experimental Nature**: Features may change or be removed as development progresses +- **Testing**: All features will pass convergence and performance validation before inclusion +- **Support**: Dev branch issues should include `[DEV]` prefix +## Performance & Benchmarking -# Resources +- 🚀 [2025/11] [Optimizing DeepSeek-V3 Training Performance on NVIDIA GB200 NVL72](docs/discussions/deepseek-v3-gb200-optimization/deepseek-v3-gb200-optimization.md). +- ⚡ [2025/11] [A Guide to Reproduce DeepSeek-V3 Pre-training Performance on GB200](docs/discussions/deepseek-v3-gb200-optimization/deepseek-v3-gb200-reproduce-guide.md). -## Getting Help +## Community & Support +### Getting Help - 📖 **[Documentation](https://docs.nvidia.com/Megatron-Core/)** - Official documentation - 🐛 **[Issues](https://github.com/NVIDIA/Megatron-LM/issues)** - Bug reports and feature requests -## Contributing - +### Contributing We ❤️ contributions! Ways to contribute: - 🐛 **Report bugs** - Help us improve reliability @@ -149,10 +80,7 @@ We ❤️ contributions! Ways to contribute: **→ [Contributing Guide](./CONTRIBUTING.md)** -## Citation - -If you use Megatron in your research or project, we appreciate that you use the following citations: - +### Citation ```bibtex @article{megatron-lm, title={Megatron-LM: Training Multi-Billion Parameter Language Models Using Model Parallelism}, diff --git a/docker/Dockerfile.ci.dev b/docker/Dockerfile.ci.dev index bb9ca5fbe9a..fa214deeea5 100644 --- a/docker/Dockerfile.ci.dev +++ b/docker/Dockerfile.ci.dev @@ -88,7 +88,7 @@ RUN --mount=type=secret,id=JET_INDEX_URLS bash -ex <<"EOF" JET_INDEX_URLS=$(cat /run/secrets/JET_INDEX_URLS) python -m venv /opt/jet /opt/jet/bin/pip install --no-cache-dir $JET_INDEX_URLS \ - jet-api==$JET_API_VERSION + "jet-api==$JET_API_VERSION" "setuptools<82.0.0" EOF RUN --mount=type=secret,id=JET_INDEX_URLS \ diff --git a/docs/api-guide/fine_grained_activation_offloading.md b/docs/api-guide/fine_grained_activation_offloading.md new file mode 100644 index 00000000000..53211d1d06c --- /dev/null +++ b/docs/api-guide/fine_grained_activation_offloading.md @@ -0,0 +1,31 @@ +# Fine-grained Activation Offloading (collaborated with rednote) + +Memory capacity is more and more important with the rising of extreme sparse MoE models like DeepSeek-V3 and Qwen3-235B. Fine-grained recomputing reduces the memory footprint at the cost of extra recomputation, while offloading could utilize the host-device bandwidth to achieve nearly zero-overhead. Fine-grained Activation Offloading targets at offloading the activation at the granularity of specific modules, so that we can calibrate the amount of offloading activation to maximize the training throughput. + +Currently, the supported offloading modules are `"attn_norm", "core_attn", "attn_proj", "mlp_norm", "expert_fc1", "moe_act"`, which could work with fine-grained recomputation to release almost all activations of a transformer layer. + +**Features** +* Support PP=1/PP/Interleaved PP +* Compatible with fine-grained recomputation +* Support FP8 +* Support MTP +* Support mixed dense & moe layer +* Support A2A Overlap +* Support CUDA Graph + * (Temporary) cuda graph scope cannot contains the offloading modules + +**Usage** +```bash +# Enable fine-grained activation offloading +--fine-grained-activation-offloading + +# Specify which modules are going to offload its input +# Choices: "attn_norm", "core_attn", "attn_proj", "mlp_norm", "expert_fc1", "moe_act". +--offload-modules expert_fc1 +``` +**Compatible with Fine-grained Recomputation** +- For modules with minor perf overhead like layernorm or moe_act, use recomputing to reduce memory footprint; +- For other modules, use offloading to reduce memory footprint; +- Make sure the offloading/reloading could be overlapped with computing; + +![Fine-grained Activation Offloading and Fine-grained Recomputation](../../images/fine_grained_activation_offloading/offloading_and_recomputing.png) diff --git a/docs/discussions/deepseek-v3-gb200-optimization/deepseek-v3-gb200-reproduce-guide.md b/docs/discussions/deepseek-v3-gb200-optimization/deepseek-v3-gb200-reproduce-guide.md new file mode 100644 index 00000000000..61bd8289c66 --- /dev/null +++ b/docs/discussions/deepseek-v3-gb200-optimization/deepseek-v3-gb200-reproduce-guide.md @@ -0,0 +1,354 @@ +# A Guide to Reproduce DeepSeek-V3 Pre-training Performance on GB200 + +## 1. Dockerfile + +Requirements: +- Transformer Engine: We recommend using commit [d2945c6](https://github.com/NVIDIA/TransformerEngine/commit/d2945c6a571e3978677614d1fe08779966a5a4ef) with PR [2146](https://github.com/NVIDIA/TransformerEngine/pull/2146) and [2150](https://github.com/NVIDIA/TransformerEngine/pull/2150). You could prepare the branch by yourself, or use this [branch](https://github.com/hxbai/TransformerEngine/commits/dev_20251024/) based on TE v2.9 plus the above three commits/PRs. +- cuDNN: v9.14 is required. +- HybridEP: Install it from [here](https://github.com/deepseek-ai/DeepEP/commits/3f601f7ac1c062c46502646ff04c535013bfca00). + +Dockerfile for reference. + +```dockerfile +FROM nvcr.io/nvidia/pytorch:25.09-py3 AS base + +ENV SHELL=/bin/bash + +# ========================= +# Install system packages +# ========================= +RUN rm -rf /opt/megatron-lm && \ + apt-get update && \ + apt-get install -y sudo gdb bash-builtins git zsh autojump tmux curl gettext libfabric-dev && \ + wget https://github.com/mikefarah/yq/releases/download/v4.27.5/yq_linux_arm64 -O /usr/bin/yq && \ + chmod +x /usr/bin/yq + +# ========================= +# Install Python packages +# ========================= +# NOTE: `unset PIP_CONSTRAINT` to install packages that do not meet the default constraint in the base image. +# Some package requirements and related versions are from +# https://github.com/NVIDIA/Megatron-LM/blob/core_v0.12.0/Dockerfile.linting. +# https://github.com/NVIDIA/Megatron-LM/blob/core_v0.12.0/requirements_mlm.txt. +# https://github.com/NVIDIA/Megatron-LM/blob/core_v0.12.0/requirements_ci.txt. +RUN unset PIP_CONSTRAINT && pip install --no-cache-dir debugpy dm-tree torch_tb_profiler einops wandb \ + sentencepiece tokenizers transformers torchvision ftfy modelcards datasets tqdm pydantic \ + nvidia-pytriton py-spy yapf darker \ + tiktoken flask-restful \ + nltk wrapt pytest pytest_asyncio pytest-cov pytest_mock pytest-random-order \ + black==24.4.2 isort==5.13.2 flake8==7.1.0 pylint==3.2.6 coverage mypy \ + setuptools==69.5.1 + +# ========================= +# Install cudnn 9.14.0.64 for correct mxfp8 quantization and layernorm fusion +# ========================= +RUN apt-get update && \ + wget https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2404/sbsa/cuda-keyring_1.1-1_all.deb && \ + dpkg -i cuda-keyring_1.1-1_all.deb && \ + apt-get update && \ + apt-get -y install libcudnn9-cuda-13 + +# ========================= +# Install latest TE +# Use a specific commit instead of main to make it more stable. +# This is based on release_v2.9 branch and contains some CPU and quantization optimizations. +# ========================= +ARG COMMIT="7dd3914726abb79bc99ff5a5db1449458ed64151" +ARG TE="git+https://github.com/hxbai/TransformerEngine.git@${COMMIT}" +RUN pip install nvidia-mathdx==25.1.1 && \ + unset PIP_CONSTRAINT && \ + NVTE_CUDA_ARCHS="100" NVTE_BUILD_THREADS_PER_JOB=8 NVTE_FRAMEWORK=pytorch pip install --no-build-isolation --no-cache-dir $TE + +# ========================= +# Install HybridEP +# ========================= +WORKDIR /home/ +RUN git clone --branch hybrid-ep https://github.com/deepseek-ai/DeepEP.git && \ + cd DeepEP && git checkout 3f601f7ac1c062c46502646ff04c535013bfca00 && \ + TORCH_CUDA_ARCH_LIST="10.0" pip install --no-build-isolation . + +# ========================= +# Clean cache +# ========================= +RUN rm -rf /root/.cache /tmp/* +``` + +> [!Tip] +> +> If you prefer to use CUDA 12.9, please change the base container to `nvcr.io/nvidia/pytorch:25.06-py3` and the cuDNN to be installed to `libcudnn9-cuda-12`. + +## 2. Megatron-Core + +We recommend using the [dev branch](https://github.com/NVIDIA/Megatron-LM/tree/dev) after PR [1917](https://github.com/NVIDIA/Megatron-LM/pull/1917). + +```bash +git clone https://github.com/NVIDIA/Megatron-LM.git && \ +cd Megatron-LM && +git checkout effebd81f410bc6566fffee6c320b6f8f762e06d +``` + +## 3. Cluster Configuration + +Since we're using EP 32 on NVL72, it's important to make sure + +> [!Important] +> **Every 32 GB200 GPUs (8 nodes) are in the same NVL domain (or rack)**. + +Usually you can make it via your cluster workload manager. Taking Slurm as an example, you could pass `--segment 8` to the sbatch command to ensure that every segment of 8 nodes will be scheduled to a rack. + +## 4. Training scripts + +### Environment variables + +```bash +CUDA_DEVICE_MAX_CONNECTIONS=1 +NVTE_FWD_LAYERNORM_SM_MARGIN=0 +NVTE_BWD_LAYERNORM_SM_MARGIN=0 +NVLINK_DOMAIN_SIZE=72 +NVTE_ALLOW_NONDETERMINISTIC_ALGO=1 +PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True +NCCL_NVLS_ENABLE=0 +NVTE_FUSED_ATTN=1 +NVTE_NORM_FWD_USE_CUDNN=1 +NVTE_NORM_BWD_USE_CUDNN=1 +PYTHONWARNINGS=ignore +NCCL_DEBUG=VERSION +NCCL_GRAPH_REGISTER=0 +``` + +### bindpcie + +Download [bindpcie](https://github.com/NVIDIA/mlperf-common/blob/main/client/bindpcie) to your workdir, make it executable, + +```bash +wget https://raw.githubusercontent.com/NVIDIA/mlperf-common/refs/heads/main/client/bindpcie && +chmod 755 bindpcie +``` + +and then + +> [!Important] +> **Place it at the beginning of your launch command in every process.** + +Taking Slurm as an example, your script should look like + +```bash +#!/bin/bash + +#SBATCH [... sbatch args] + +srun [... srun args] /path/to/bindpcie /path/to/pretrain_gpt.py [... mcore arguments] +``` + +This is a very important step on GB200. + +### Launch script + +```bash +/path/to/bindpcie \ +/path/to/megatron-lm/pretrain_gpt.py \ +--distributed-timeout-minutes 60 \ +--tensor-model-parallel-size 1 \ +--pipeline-model-parallel-size 8 \ +--expert-model-parallel-size 32 \ +--context-parallel-size 1 \ +--expert-tensor-parallel-size 1 \ +--use-distributed-optimizer \ +--overlap-grad-reduce \ +--overlap-param-gather \ +--use-mcore-models \ +--sequence-parallel \ +--use-flash-attn \ +--disable-bias-linear \ +--micro-batch-size 1 \ +--global-batch-size 2048 \ +--train-samples 585937500 \ +--exit-duration-in-mins 220 \ +--no-save-optim \ +--no-check-for-nan-in-loss-and-grad \ +--cross-entropy-loss-fusion \ +--cross-entropy-fusion-impl te \ +--manual-gc \ +--manual-gc-interval 10 \ +--enable-experimental \ +--transformer-impl transformer_engine \ +--seq-length 4096 \ +--data-cache-path /path/to/data_cache \ +--tokenizer-type HuggingFaceTokenizer \ +--tokenizer-model unsloth/DeepSeek-V3 \ +--data-path /path/to/data \ +--split 99,1,0 \ +--no-mmap-bin-files \ +--no-create-attention-mask-in-dataloader \ +--num-workers 6 \ +--num-layers 61 \ +--hidden-size 7168 \ +--ffn-hidden-size 18432 \ +--num-attention-heads 128 \ +--kv-channels 128 \ +--max-position-embeddings 4096 \ +--position-embedding-type rope \ +--rotary-base 10000 \ +--make-vocab-size-divisible-by 3232 \ +--normalization RMSNorm \ +--norm-epsilon 1e-6 \ +--swiglu \ +--untie-embeddings-and-output-weights \ +--multi-latent-attention \ +--attention-dropout 0.0 \ +--hidden-dropout 0.0 \ +--clip-grad 1.0 \ +--weight-decay 0.1 \ +--qk-layernorm \ +--lr-decay-samples 584765624 \ +--lr-warmup-samples 1536000 \ +--lr-warmup-init 3.9e-7 \ +--lr 3.9e-6 \ +--min-lr 3.9e-7 \ +--lr-decay-style cosine \ +--adam-beta1 0.9 \ +--adam-beta2 0.95 \ +--num-experts 256 \ +--moe-layer-freq ([0]*3+[1]*58) \ +--moe-ffn-hidden-size 2048 \ +--moe-shared-expert-intermediate-size 2048 \ +--moe-router-load-balancing-type seq_aux_loss \ +--moe-router-topk 8 \ +--moe-grouped-gemm \ +--moe-aux-loss-coeff 1e-4 \ +--moe-router-group-topk 4 \ +--moe-router-num-groups 8 \ +--moe-router-pre-softmax \ +--moe-router-padding-for-quantization \ +--moe-router-topk-scaling-factor 2.5 \ +--moe-router-score-function sigmoid \ +--moe-router-enable-expert-bias \ +--moe-router-bias-update-rate 1e-3 \ +--moe-router-dtype fp32 \ +--moe-permute-fusion \ +--moe-router-fusion \ +--q-lora-rank 1536 \ +--kv-lora-rank 512 \ +--qk-head-dim 128 \ +--qk-pos-emb-head-dim 64 \ +--v-head-dim 128 \ +--rotary-scaling-factor 40 \ +--mscale 1.0 \ +--mscale-all-dim 1.0 \ +--eval-iters 32 \ +--eval-interval 200 \ +--no-load-optim \ +--no-load-rng \ +--auto-detect-ckpt-format \ +--load None \ +--save /path/to/checkpoints \ +--save-interval 500 \ +--dist-ckpt-strictness log_all \ +--init-method-std 0.02 \ +--log-timers-to-tensorboard \ +--log-memory-to-tensorboard \ +--log-validation-ppl-to-tensorboard \ +--log-throughput \ +--log-interval 1 \ +--logging-level 40 \ +--tensorboard-dir /path/to/tensorboard \ +--wandb-project deepseek-v3-benchmarking-v0.15 \ +--wandb-exp-name DeepSeek-V3-TP1PP8EP32CP1VPP4-MBS1GBS2048-v0.15 \ +--bf16 \ +--enable-experimental \ +--recompute-granularity selective \ +--recompute-modules moe_act mlp \ +--cuda-graph-impl transformer_engine \ +--cuda-graph-scope attn moe_router moe_preprocess \ +--te-rng-tracker \ +--pipeline-model-parallel-layout "Et|(tt|)*30L" \ +--moe-router-force-load-balancing \ +--moe-token-dispatcher-type flex \ +--moe-flex-dispatcher-backend hybridep \ +--moe-hybridep-num-sms 32 \ +--fp8-recipe mxfp8 \ +--fp8-format e4m3 \ +--fp8-param-gather \ +--reuse-grad-buf-for-mxfp8-param-ag \ +--use-precision-aware-optimizer \ +--main-grads-dtype fp32 \ +--main-params-dtype fp32 \ +--exp-avg-dtype bf16 \ +--exp-avg-sq-dtype bf16 \ +``` + +### Explanation of arguments + +The following arguments indicate key optimizations. + +- Pipeline parallel layout + +```bash +--pipeline-model-parallel-layout "Et|(tt|)*30L" +``` + +`E` stands for embedding, `t` for transformer layer, `L` for Loss. So it's interpreted as a total of 32 stages, where the first stage is Embedding + 1 transformer layer, the last stage is Loss, and the middle 30 stages are 2 transformer layers. + +- Fine-grained recompute + +```bash +--recompute-granularity selective \ +--recompute-modules moe_act mlp \ +``` + +- Partial CUDA Graphs + +```bash +--cuda-graph-impl transformer_engine \ +--cuda-graph-scope attn moe_router moe_preprocess \ +--te-rng-tracker \ +``` + +- Force load balancing for performance benchmark + +```bash +--moe-router-force-load-balancing \ +``` + +- HybridEP + +```bash +--moe-token-dispatcher-type flex \ +--moe-flex-dispatcher-backend hybridep \ +--moe-hybridep-num-sms 32 \ +``` + +- MXFP8 recipe + +```bash +--fp8-recipe mxfp8 \ +--fp8-format e4m3 \ +--fp8-param-gather \ +--reuse-grad-buf-for-mxfp8-param-ag \ +``` + +- BF16 optimizer states + +```bash +--use-precision-aware-optimizer \ +--main-grads-dtype fp32 \ +--main-params-dtype fp32 \ +--exp-avg-dtype bf16 \ +--exp-avg-sq-dtype bf16 \ +``` + +- Kernel fusions + +```bash +--cross-entropy-loss-fusion \ +--cross-entropy-fusion-impl te \ +--moe-permute-fusion \ +--moe-router-fusion \ +``` + +- Manual GC to make ranks better synchronized + +```bash +--manual-gc \ +--manual-gc-interval 10 \ +``` diff --git a/docs/discussions/deepseek-v3-gb200-optimization/images/image1.png b/docs/discussions/deepseek-v3-gb200-optimization/images/image1.png new file mode 100644 index 00000000000..6e4dad685c4 Binary files /dev/null and b/docs/discussions/deepseek-v3-gb200-optimization/images/image1.png differ diff --git a/docs/discussions/deepseek-v3-gb200-optimization/images/image2.png b/docs/discussions/deepseek-v3-gb200-optimization/images/image2.png new file mode 100644 index 00000000000..920e3c57f94 Binary files /dev/null and b/docs/discussions/deepseek-v3-gb200-optimization/images/image2.png differ diff --git a/docs/discussions/deepseek-v3-gb200-optimization/images/image3.png b/docs/discussions/deepseek-v3-gb200-optimization/images/image3.png new file mode 100644 index 00000000000..f606dbfb744 Binary files /dev/null and b/docs/discussions/deepseek-v3-gb200-optimization/images/image3.png differ diff --git a/docs/discussions/deepseek-v3-gb200-optimization/images/image4.png b/docs/discussions/deepseek-v3-gb200-optimization/images/image4.png new file mode 100644 index 00000000000..04239401edd Binary files /dev/null and b/docs/discussions/deepseek-v3-gb200-optimization/images/image4.png differ diff --git a/docs/discussions/deepseek-v3-gb200-optimization/images/image5.png b/docs/discussions/deepseek-v3-gb200-optimization/images/image5.png new file mode 100644 index 00000000000..0128fc7ae45 Binary files /dev/null and b/docs/discussions/deepseek-v3-gb200-optimization/images/image5.png differ diff --git a/docs/discussions/deepseek-v3-gb200-optimization/images/image6.png b/docs/discussions/deepseek-v3-gb200-optimization/images/image6.png new file mode 100644 index 00000000000..cb2ed2eb9ad Binary files /dev/null and b/docs/discussions/deepseek-v3-gb200-optimization/images/image6.png differ diff --git a/docs/discussions/deepseek-v3-gb200-optimization/images/image7.png b/docs/discussions/deepseek-v3-gb200-optimization/images/image7.png new file mode 100644 index 00000000000..325d0fd4f52 Binary files /dev/null and b/docs/discussions/deepseek-v3-gb200-optimization/images/image7.png differ diff --git a/docs/source/api-guide/router_replay.md b/docs/source/api-guide/router_replay.md new file mode 100644 index 00000000000..334a29c78a6 --- /dev/null +++ b/docs/source/api-guide/router_replay.md @@ -0,0 +1,176 @@ +# Design Document: MoE Router Replay Feature + +### 1. Overview + +This document provides a detailed description of the "Router Replay" feature implemented within the Megatron-LM Core for Mixture-of-Experts (MoE) models. + +This feature is designed to enhance determinism and analyzability in MoE model training and inference. It enables the model to load routing decisions from a predefined file and enforce their use during the forward pass, thereby bypassing the real-time routing computation. + +### 2. Motivation + +* **Determinism & Reproducibility**: In distributed training, MoE routing decisions can exhibit minor variations due to factors like floating-point precision. By replaying a fixed routing table, the MoE computation path is guaranteed to be identical across runs, which facilitates debugging and reproducing experimental results. +* **Performance Profiling**: The router's own computation (e.g., logits calculation, top-k selection) incurs overhead. In replay mode, this part of the computation can be completely skipped, allowing for more precise isolation and profiling of performance bottlenecks within the Expert Layers themselves. +* **Debugging Aid**: When issues arise in the model, fixing the routing decisions helps to isolate variables, making it easier to determine whether the problem lies with the routing mechanism or the expert computations. + +### 3. Design and Architecture + +The design follows the principles of being non-intrusive and on-demand, with the core idea of activating the replay logic only when explicitly requested by the user. + +* **Core Components**: + * `RouterReplay` (located in `megatron/core/transformer/moe/router_replay.py`): A utility class for replaying MoE routing decisions. When enabled via the `enable_routing_replay` flag, a separate instance of `RouterReplay` is created for each MoE layer's router. Each instance is responsible for loading routing data and providing the deterministic routing decisions for its corresponding layer during the forward pass. + * `enable_routing_replay` (located in `megatron/core/transformer/transformer_config.py`): A boolean global configuration flag that serves as the sole entry point for enabling this feature. + +* **Workflow**: + The feature supports different modes, such as recording and replaying, controlled by a `RouterReplayAction`. + + 1. **Enabling the Feature**: The user sets `enable_routing_replay` to `True` in the model configuration. + 2. **Initialization**: When `enable_routing_replay` is true, each `TopKRouter` creates its own `RouterReplay` instance. + 3. **Mode Configuration**: The user must programmatically set the desired router replay action (e.g., `record`, `forward_replay`, `backward_replay`) on the `RouterReplay` instances. + 4. **Execution Flow (within a mini-batch)**: + * **Forward Pass**: + * For each micro-batch, the `topk_routing_with_score_function` checks the `router_replay_action`. + * **In `record` mode**: The dynamically computed `top-k` expert indices are captured and stored. + * **In `forward_replay` mode**: The function retrieves pre-loaded expert indices from `target_topk_idx`. These indices are used for the forward computation and are also appended to the `replay_backward_list` to prepare for the backward pass. + * **Backward Pass**: + * For each micro-batch (processed in reverse order in pipeline parallelism), the `router_replay_action` is checked again. + * **In `backward_replay` mode**: The function retrieves the expert indices for the corresponding micro-batch by popping them from the `replay_backward_list`. This mode is intended for training recomputation (e.g., activation checkpointing and pipeline recompute) so the same routing decisions are used during recompute/backward as in forward, ensuring determinism and correctness. + +### 4. Implementation Details + +The implementation cleanly separates the replay logic from the router's core computation. + +* **`megatron/core/transformer/transformer_config.py`**: + * Adds the configuration option `enable_routing_replay: bool = False`. + +* **`megatron/core/transformer/moe/moe_utils.py`**: + * Introduces the `RouterReplay` class to manage the state for recording and replaying routing decisions for a single MoE layer. + * `target_topk_idx`: An attribute holding the expert indices for the current micro-batch during forward replay mode. + * `recorded_topk_idx`: An attribute for storing the computed expert indices when in record mode. + * `replay_backward_list`: A list that accumulates the `top-k` indices used during the forward passes of a mini-batch. This list is consumed in FIFO order during the backward pass to ensure correctness under pipeline parallelism. + * `set_target_indices()`: A method to load the replay indices into `target_topk_idx` for the forward pass. + * `record_indices()`: A method to save the computed indices. + * The `topk_routing_with_score_function` is modified to contain the core logic. It checks the `router_replay_action` on the `router_replay` instance and accordingly performs one of the following actions: computes and records indices, replays indices from `target_topk_idx` (for forward), replays indices from `replay_backward_list` (for backward), or falls through to the default dynamic routing. + +#### Training recompute usage +- During forward replay, `set_target_indices()` prepares `replay_backward_list` so each micro-batch’s indices are available for recomputation. +- During recompute/backward, set action to `REPLAY_BACKWARD` so indices are consumed in FIFO order to mirror the forward sequence. + +### 5. Usage Guide + +1. **Enable & Instantiate** + - Create one `RouterReplay` instance per MoE router layer when building the model. + - Optionally use the global helpers to set/clear actions across all layers. +2. **Record Routing Decisions** + - Set action: `RouterReplay.set_global_router_replay_action(RouterReplayAction.RECORD)`. + - Run the model; retrieve per-layer indices via `RouterReplay.get_recorded_data()` and persist. +3. **Forward Replay** + - Load indices and distribute: `RouterReplay.set_replay_data(list_of_tensors)`. + - Set action: `RouterReplay.set_global_router_replay_action(RouterReplayAction.REPLAY_FORWARD)`. + - Run the model; dynamic top‑k is bypassed and target indices are used. +4. **Backward Replay** + - For training recomputation (activation checkpointing or pipeline recompute), set action: `REPLAY_BACKWARD` during recomputation. + - Per micro‑batch indices are consumed from `replay_backward_list` in FIFO order. +5. **Cleanup** + - Use `RouterReplay.clear_global_indices()`, `RouterReplay.clear_global_router_replay_action()`, and `RouterReplay.clear_global_router_replay_instances()` to restore default behavior and prevent memory leaks. + +#### Quick usage with `topk_routing_with_score_function` + +```python +import torch +from megatron.core.transformer.moe.router_replay import RouterReplay, RouterReplayAction +from megatron.core.transformer.moe.moe_utils import topk_routing_with_score_function + +rr = RouterReplay() + +# Record +RouterReplay.set_global_router_replay_action(RouterReplayAction.RECORD) +logits = torch.randn(8, 16) +probs_rec, routing_map_rec = topk_routing_with_score_function( + logits=logits, topk=2, use_pre_softmax=False, score_function="softmax", router_replay=rr, +) +recorded = rr.get_recorded_indices() +torch.save(recorded, "/tmp/replay.pt") + +# Forward replay +rr.clear_router_replay_action() +rr.set_router_replay_action(RouterReplayAction.REPLAY_FORWARD) +target = torch.load("/tmp/replay.pt") +rr.set_target_indices(target) +probs_rep, routing_map_rep = topk_routing_with_score_function( + logits=logits, topk=2, use_pre_softmax=False, score_function="softmax", router_replay=rr, +) + +RouterReplay.clear_global_router_replay_action() +RouterReplay.clear_global_indices() +RouterReplay.clear_global_router_replay_instances() +``` + +### 6. Minimal Demo + +Here is a minimal code example showing how to use RouterReplay for recording and replaying: + +```python +import torch +import torch.distributed as dist +from megatron.core.transformer.transformer_config import TransformerConfig +from megatron.core.transformer.moe.router import TopKRouter +from megatron.core.transformer.moe.router_replay import RouterReplay, RouterReplayAction + + +# Initialize distributed training +if not dist.is_initialized(): + dist.init_process_group(backend="nccl") + +# Create a transformer config with RouterReplay enabled +config = TransformerConfig( + num_experts=8, + expert_model_parallel_size=1, + num_top_k=2, + enable_routing_replay=True +) + +# Create a TopKRouter instance +router = TopKRouter(config) + +# Generate sample input (batch_size, sequence_length, hidden_size) +logits = torch.randn(16, 32, 8).to(torch.cuda.current_device()) + +# ----------------- +# 1. Recording Mode +# ----------------- +print("=== Recording Mode ===") +# Set global router replay action to RECORD +RouterReplay.set_global_router_replay_action(RouterReplayAction.RECORD) + +# Perform routing +routing_output = router.forward(logits) +print(f"Recorded top-k indices shape: {routing_output.top_k_idx.shape}") + +# ----------------- +# 2. Forward Replay Mode +# ----------------- +print("\n=== Forward Replay Mode ===") +# Save recorded indices to a file +torch.save(routing_output.top_k_idx, "/tmp/replay.pt") + +# Load indices from file and set as target for replay +replay_indices = torch.load("/tmp/replay.pt") +for router_instance in RouterReplay.global_router_replay_instances: + router_instance.target_topk_idx = replay_indices + +# Set global router replay action to REPLAY_FORWARD +RouterReplay.set_global_router_replay_action(RouterReplayAction.REPLAY_FORWARD) + +# Perform routing again - this will use the replayed indices +replay_routing_output = router.forward(logits) +print(f"Replayed top-k indices shape: {replay_routing_output.top_k_idx.shape}") +print(f"Are indices the same? {torch.equal(routing_output.top_k_idx, replay_routing_output.top_k_idx)}") + + +# Clean up +RouterReplay.clear_global_router_replay_action() +RouterReplay.clear_global_indices() +RouterReplay.clear_global_router_replay_instances() +if dist.is_initialized(): + dist.destroy_process_group() +``` diff --git a/examples/rl/environments/countdown/countdown.py b/examples/rl/environments/countdown/countdown.py index c5ad57bb72d..acfabc46681 100644 --- a/examples/rl/environments/countdown/countdown.py +++ b/examples/rl/environments/countdown/countdown.py @@ -1,3 +1,4 @@ +# Copyright (c) 2025 NVIDIA CORPORATION & AFFILIATES. All rights reserved. # This file is adapted from code in https://github.com/Jiayi-Pan/TinyZero import re diff --git a/gpt_builders.py b/gpt_builders.py index dfe41f7b88e..0be64edaab6 100644 --- a/gpt_builders.py +++ b/gpt_builders.py @@ -11,6 +11,7 @@ ) from megatron.core.models.gpt.experimental_attention_variant_module_specs import ( get_transformer_block_with_experimental_attention_variant_spec, + get_transformer_layer_with_experimental_attention_variant_spec, ) from megatron.core.models.gpt.heterogeneous.heterogeneous_layer_specs import ( get_gpt_heterogeneous_layer_spec, @@ -71,23 +72,29 @@ def gpt_builder(args, pre_process, post_process, vp_stage=None, config=None, pg_ mtp_block_spec = None if args.mtp_num_layers is not None: assert not (config.transformer_impl == "inference_optimized") - if ( - hasattr(transformer_layer_spec, 'layer_specs') - and len(transformer_layer_spec.layer_specs) == 0 - ): - # Get the decoder layer spec explicitly if no decoder layer in the last stage, - # Only happens with block spec (TransformerBlockSubmodules) when using MoE. - transformer_layer_spec_for_mtp = _get_transformer_layer_spec(use_te, config) + # Get GPT decoder layer specs for the model. + if args.spec is not None: + mtp_transformer_layer_spec = import_module(args.spec) else: # Define the decoder block spec - decoder_layer_specs = get_gpt_decoder_layer_specs( - config, use_transformer_engine=use_te, normalization=args.normalization, qk_l2_norm=args.qk_l2_norm, vp_stage=vp_stage - ) - transformer_layer_spec_for_mtp = decoder_layer_specs[-1] + if args.experimental_attention_variant is not None: + decoder_layer_specs = ( + get_transformer_layer_with_experimental_attention_variant_spec( + config=config + ) + ) + else: + decoder_layer_specs = get_gpt_decoder_layer_specs( + config, + use_transformer_engine=use_te, + normalization=args.normalization, + qk_l2_norm=args.qk_l2_norm, + ) + mtp_transformer_layer_spec = decoder_layer_specs[-1] # Use spec of the last layer in decoder block as spec of the transformer layer in MTP mtp_block_spec = get_gpt_mtp_block_spec( config, - transformer_layer_spec_for_mtp, + mtp_transformer_layer_spec, use_transformer_engine=use_te, vp_stage=vp_stage, ) @@ -138,6 +145,7 @@ def _get_transformer_layer_spec(use_te, config): use_kitchen=config.use_kitchen, use_kitchen_attention=config.use_kitchen_attention, kitchen_attention_backend=config.kitchen_attention_backend, + fallback_to_eager_attn=config.fallback_to_eager_attn, ) elif config.transformer_impl == "inference_optimized": return get_gpt_layer_with_inference_spec( diff --git a/hello_world b/hello_world new file mode 100644 index 00000000000..e69de29bb2d diff --git a/megatron/core/datasets/blended_megatron_dataset_builder.py b/megatron/core/datasets/blended_megatron_dataset_builder.py index f728fe10d03..6cb75ab5104 100644 --- a/megatron/core/datasets/blended_megatron_dataset_builder.py +++ b/megatron/core/datasets/blended_megatron_dataset_builder.py @@ -48,11 +48,13 @@ def __init__( sizes: List[int], is_built_on_rank: Callable, config: BlendedMegatronDatasetConfig, + vp_stage: Optional[int] = None, ): self.cls = cls self.sizes = sizes self.is_built_on_rank = is_built_on_rank self.config = config + self.vp_stage = vp_stage log_single_rank( logger, diff --git a/megatron/core/distributed/distributed_data_parallel_config.py b/megatron/core/distributed/distributed_data_parallel_config.py index eaec971c79c..7e2d19e5ce9 100644 --- a/megatron/core/distributed/distributed_data_parallel_config.py +++ b/megatron/core/distributed/distributed_data_parallel_config.py @@ -124,6 +124,14 @@ class DistributedDataParallelConfig: This option will be automatically set to True when nccl_ub=True. """ + fsdp_all_gather_in_start_param_sync: bool = True + """ + If True, use all-gather during the initial Megatron-FSDP parameter + synchronization step. This can increase overlap between the first + parameter all-gather and computation, helping to better hide the + initial communication cost. + """ + outer_dp_sharding_strategy: str = 'no_shard' """ Sharding strategy for outer data parallel group in Hybrid Sharded Data Parallel (HSDP) mode. diff --git a/megatron/core/distributed/fsdp/mcore_fsdp_adapter.py b/megatron/core/distributed/fsdp/mcore_fsdp_adapter.py index d6384e70488..5bf543fdc5c 100644 --- a/megatron/core/distributed/fsdp/mcore_fsdp_adapter.py +++ b/megatron/core/distributed/fsdp/mcore_fsdp_adapter.py @@ -212,6 +212,13 @@ def _init_dist_index(self, pg_collection): hybrid_fsdp_group = parallel_state.get_data_parallel_group( with_context_parallel=True, partial_data_parallel=False ) + expt_dp_group = parallel_state.get_expert_data_parallel_group( + partial_expert_data_parallel=True + ) + hybrid_fsdp_expt_group = parallel_state.get_expert_data_parallel_group( + partial_expert_data_parallel=False + ) + ep_group = parallel_state.get_expert_model_parallel_group() else: dp_cp_group = parallel_state.get_data_parallel_group( with_context_parallel=True, partial_data_parallel=False @@ -227,6 +234,10 @@ def _init_dist_index(self, pg_collection): dp_cp_group = pg_collection.intra_dp_cp outer_fsdp_group = pg_collection.inter_dist_opt hybrid_fsdp_group = pg_collection.dp_cp + # This has not been tested yet. + expt_dp_group = getattr(pg_collection, 'intra_expt_dp', None) + hybrid_fsdp_expt_group = getattr(pg_collection, 'expt_dp', None) + ep_group = getattr(pg_collection, 'ep', None) else: dp_cp_group = pg_collection.dp_cp outer_fsdp_group = None @@ -243,6 +254,18 @@ def _init_dist_index(self, pg_collection): expt_tp_group = single_rank_group if enable_hsdp: + if expt_dp_group is not None: + expt_mesh = _get_hsdp_tp_mesh( + outer_fsdp_group, expt_dp_group, expt_tp_group, ep_size=ep_group.size() + ) + expt_device_mesh = DeviceMesh.from_group( + [outer_fsdp_group, expt_dp_group, expt_tp_group], + device_type="cuda", + mesh=expt_mesh.tolist(), + mesh_dim_names=["outer_fsdp_dp", "dp_cp", "tp"], + ) + else: + expt_device_mesh = None mesh = _get_hsdp_tp_mesh(outer_fsdp_group, dp_cp_group, tp_group) dist_index = FSDPDistributedIndex( hsdp_outer_dp_shard=self.ddp_config.outer_dp_sharding_strategy != "no_shard", @@ -256,6 +279,8 @@ def _init_dist_index(self, pg_collection): dp_shard_dim="dp_cp", tp_dim="tp", hybrid_fsdp_group=hybrid_fsdp_group, + hybrid_fsdp_expt_group=hybrid_fsdp_expt_group, + expt_device_mesh=expt_device_mesh, ) else: if ep_group is not None: @@ -308,22 +333,24 @@ def sync_rng_states_across_tp_group(self): _load_rng_state_dict(broadcast_list[0]) -def _get_hsdp_tp_mesh(outer_fsdp_dp_group, dp_cp_group, tp_group): +def _get_hsdp_tp_mesh(outer_fsdp_dp_group, dp_cp_group, tp_group, ep_size=1): assert HAVE_EINOPS, "einops is not installed. Please install it with `pip install einops`." world_size = dist.get_world_size() mesh = einops.rearrange( torch.arange(world_size), - "(outer_fsdp_dp fsdp tp) -> outer_fsdp_dp fsdp tp", + "(outer_fsdp_dp fsdp ep tp) -> ep outer_fsdp_dp fsdp tp", outer_fsdp_dp=outer_fsdp_dp_group.size(), tp=tp_group.size(), + ep=ep_size, ) mesh_fsdp_ranks = einops.rearrange( mesh, - 'outer_fsdp_dp fsdp tp -> (outer_fsdp_dp tp) fsdp', + 'ep outer_fsdp_dp fsdp tp -> (outer_fsdp_dp ep tp) fsdp', tp=tp_group.size(), fsdp=dp_cp_group.size(), + ep=ep_size, ) fsdp_group_ranks = dist.get_process_group_ranks(dp_cp_group) assert _check_mesh_ranks_and_group_ranks_are_consistent(mesh_fsdp_ranks, fsdp_group_ranks), ( @@ -333,7 +360,7 @@ def _get_hsdp_tp_mesh(outer_fsdp_dp_group, dp_cp_group, tp_group): mesh_tp_ranks = einops.rearrange( mesh, - 'outer_fsdp_dp fsdp tp -> (outer_fsdp_dp fsdp) tp', + 'ep outer_fsdp_dp fsdp tp -> (outer_fsdp_dp fsdp ep) tp', tp=tp_group.size(), fsdp=dp_cp_group.size(), ) @@ -345,9 +372,10 @@ def _get_hsdp_tp_mesh(outer_fsdp_dp_group, dp_cp_group, tp_group): mesh_outer_fsdp_dp_ranks = einops.rearrange( mesh, - 'outer_fsdp_dp fsdp tp -> (fsdp tp) outer_fsdp_dp', + 'ep outer_fsdp_dp fsdp tp -> (fsdp ep tp) outer_fsdp_dp', tp=tp_group.size(), fsdp=dp_cp_group.size(), + ep=ep_size, ) outer_fsdp_dp_group_ranks = dist.get_process_group_ranks(outer_fsdp_dp_group) assert _check_mesh_ranks_and_group_ranks_are_consistent( @@ -357,7 +385,21 @@ def _get_hsdp_tp_mesh(outer_fsdp_dp_group, dp_cp_group, tp_group): f"do not match the ranks in the Outer FSDP DP group {outer_fsdp_dp_group_ranks}." ) - return mesh + # Exclude the expert parallel dimension + rank = dist.get_rank() + dp_tp_meshes = [per_ep_mesh for per_ep_mesh in mesh if rank in per_ep_mesh.reshape(-1).tolist()] + assert ( + len(dp_tp_meshes) == 1 + ), f"[Megatron-FSDP] Current rank {rank} is not unique in the mesh ranks {mesh.tolist()}." + assert ( + len(dp_tp_meshes[0].reshape(-1).tolist()) + == outer_fsdp_dp_group.size() * dp_cp_group.size() * tp_group.size() + ), ( + f"[Megatron-FSDP] DP-TP mesh size {len(dp_tp_meshes[0].reshape(-1).tolist())} " + f"does not match the expected size" + f"{outer_fsdp_dp_group.size() * dp_cp_group.size() * tp_group.size()}." + ) + return dp_tp_meshes[0] def _get_dp_tp_mesh(dp_cp_group, tp_group, ep_size=1): diff --git a/megatron/core/distributed/fsdp/src/README.md b/megatron/core/distributed/fsdp/src/README.md index bc4cdaa078e..75cb7c45613 100644 --- a/megatron/core/distributed/fsdp/src/README.md +++ b/megatron/core/distributed/fsdp/src/README.md @@ -156,12 +156,13 @@ device_mesh[("dp_outer", "dp_shard", "cp")]._flatten("hsdp") hsdp_group = device_mesh["hsdp"].get_group() # Initialize DeviceMesh for expert parallel (EP) modules when using FSDP + EP. -expt_device_mesh = DeviceMesh.from_group( - [expt_dp_group, expt_tp_group], - device_type="cuda", - mesh=expt_mesh.tolist(), - mesh_dim_names=["dp_shard_cp", "tp"], +expert_device_mesh = torch.distributed.device_mesh.init_device_mesh( + "cuda", + mesh_shape=(dp_outer_size, expt_dp_shard_size, expt_tp_size), + mesh_dim_names=("dp_outer", "dp_shard_cp", "tp"), ) +expert_device_mesh[("dp_outer", "dp_shard_cp")].flatten("hsdp") +hsdp_expt_group = expert_device_mesh["hsdp"].get_group() ``` ### Convert models into fully-sharded `MegatronFSDP` models with `fully_shard_model`. @@ -186,6 +187,8 @@ model = fully_shard_model( tp_dim="tp", # Only required when using HSDP. Otherwise, set this to None. hybrid_fsdp_group=hsdp_group, + # Only required when using HSDP + EP. Otherwise, set this to None. + hybrid_fsdp_expt_group=hsdp_expt_group, # Only required for FSDP + EP. Otherwise, set this to None. expt_device_mesh=expt_device_mesh, # FSDP Sharding Strategy: no_shard (0) / optim (1) / optim_grads (2) / optim_grads_params (3) @@ -295,6 +298,7 @@ Megatron-FSDP's `fully_shard_*` API has a comprehensive set of arguments for fin - `tp_dim` is the name of the sub-mesh used for tensor parallelism (TP), which is required for `(FSDP, TP)`-strided sharding when using Megatron-LM or Torch-native `DTensor` TP. - For more information about tensor parallelism, refer to: [Megatron-LM: Training Multi-Billion Parameter Language Models Using Model Parallelism](https://arxiv.org/abs/1909.08053). - `hybrid_fsdp_group` is the `ProcessGroup` which contains all ranks in the flattened `dp_shard_dim` and `dp_outer_dim` sub-meshes utilized to specify the `(DP-Outer, DP-Shard)` sharded mesh coordinates for the weight and gradient buffers. Required for HSDP. + - `hybrid_fsdp_expt_group` defines the data-parallel communication group for expert parameters. It is required for HSDP. - `expt_device_mesh` is another [`torch.distributed.DeviceMesh`](https://docs.pytorch.org/docs/stable/distributed.html#devicemesh) tailored for the expert parallel (EP) modules in `MegatronFSDP`. - `dp_shard_dim` is the name of the sub-mesh required for FSDP sharding of the EP modules, enabling expert data parallelism (EDP). - `tp_dim` is the name of the sub-mesh used for expert tensor parallelism (ETP), which is required for `(FSDP, ETP)`-strided sharding when using Megatron-LM or Torch-native `DTensor` ETP. diff --git a/megatron/core/distributed/fsdp/src/megatron_fsdp/distributed_data_parallel_config.py b/megatron/core/distributed/fsdp/src/megatron_fsdp/distributed_data_parallel_config.py index f0c817e1f80..32c0ffde2ad 100644 --- a/megatron/core/distributed/fsdp/src/megatron_fsdp/distributed_data_parallel_config.py +++ b/megatron/core/distributed/fsdp/src/megatron_fsdp/distributed_data_parallel_config.py @@ -119,6 +119,14 @@ class DistributedDataParallelConfig: This option will be automatically set to True when nccl_ub=True. """ + fsdp_all_gather_in_start_param_sync: bool = True + """ + If True, use all-gather during the initial Megatron-FSDP parameter + synchronization step. This can increase overlap between the first + parameter all-gather and computation, helping to better hide the + initial communication cost. + """ + outer_dp_sharding_strategy: str = 'no_shard' """ Sharding strategy for outer data parallel group in Hybrid Sharded Data Parallel (HSDP) mode. diff --git a/megatron/core/distributed/fsdp/src/megatron_fsdp/fully_shard.py b/megatron/core/distributed/fsdp/src/megatron_fsdp/fully_shard.py index df210f15f05..7a118a8424b 100644 --- a/megatron/core/distributed/fsdp/src/megatron_fsdp/fully_shard.py +++ b/megatron/core/distributed/fsdp/src/megatron_fsdp/fully_shard.py @@ -77,6 +77,7 @@ def fully_shard_model( dp_outer_dim: Optional[str] = None, tp_dim: Optional[str] = None, hybrid_fsdp_group: Optional[torch.distributed.ProcessGroup] = None, + hybrid_fsdp_expt_group: Optional[torch.distributed.ProcessGroup] = None, expt_device_mesh: Optional[DeviceMesh] = None, fsdp_unit_modules: Optional[Sequence[Type[torch.nn.Module]] | Sequence[str]] = None, zero_dp_strategy: str | int = 3, @@ -352,6 +353,8 @@ class that schedules the sharding lifecycle of the model parameters and gradient tp_dim=tp_dim, # Only required for HSDP. hybrid_fsdp_group=hybrid_fsdp_group, + # Only required for HSDP + EP. + hybrid_fsdp_expt_group=hybrid_fsdp_expt_group, # Access to flattened DP rank assignments for HSDP. hsdp_outer_dp_shard=_outer_fsdp_sharding, # Only required for Megatron-FSDP + EP. @@ -521,6 +524,7 @@ def fully_shard( dp_outer_dim: Optional[str] = None, tp_dim: Optional[str] = None, hybrid_fsdp_group: Optional[torch.distributed.ProcessGroup] = None, + hybrid_fsdp_expt_group: Optional[torch.distributed.ProcessGroup] = None, expt_device_mesh: Optional[DeviceMesh] = None, fsdp_unit_modules: Optional[Sequence[Type[torch.nn.Module]] | Sequence[str]] = None, zero_dp_strategy: str | int = 3, @@ -568,6 +572,7 @@ def fully_shard( dp_outer_dim=dp_outer_dim, tp_dim=tp_dim, hybrid_fsdp_group=hybrid_fsdp_group, + hybrid_fsdp_expt_group=hybrid_fsdp_expt_group, expt_device_mesh=expt_device_mesh, fsdp_unit_modules=fsdp_unit_modules, zero_dp_strategy=zero_dp_strategy, diff --git a/megatron/core/distributed/fsdp/src/megatron_fsdp/megatron_fsdp.py b/megatron/core/distributed/fsdp/src/megatron_fsdp/megatron_fsdp.py index bd13e76379e..671487a30eb 100644 --- a/megatron/core/distributed/fsdp/src/megatron_fsdp/megatron_fsdp.py +++ b/megatron/core/distributed/fsdp/src/megatron_fsdp/megatron_fsdp.py @@ -1000,9 +1000,11 @@ def _register_pre_backward_param_unshard_hook(module): with_kwargs=True, ) ) - grad_acc_param_list = list(module.parameters()) + grad_acc_param_list = [p for p in module.parameters() if p.requires_grad] else: - grad_acc_param_list = list(module.parameters(recurse=False)) + grad_acc_param_list = [ + p for p in module.parameters(recurse=False) if p.requires_grad + ] for param in grad_acc_param_list: self.grad_acc_hooks[f"grad_acc and reduce for {self.param_to_name[param]}"] = ( @@ -1116,10 +1118,11 @@ def start_param_sync(self, *unused, force_sync: bool = False, force_dispatch: bo if not force_sync and self.ddp_config.overlap_param_gather: # All-gather the first bucket before the forward pass. - first_param = list(self.module.parameters())[0] - self.all_gather_and_wait_parameters_ready( - params=[first_param], prefetch=True, wait_bucket_ready=False - ) + if self.ddp_config.fsdp_all_gather_in_start_param_sync: + first_param = list(self.module.parameters())[0] + self.all_gather_and_wait_parameters_ready( + params=[first_param], prefetch=True, wait_bucket_ready=False + ) else: self.synchronize_param_gather() for bucket_id in range(self.all_gather_pipeline.num_buckets): diff --git a/megatron/core/distributed/fsdp/src/megatron_fsdp/mixed_precision.py b/megatron/core/distributed/fsdp/src/megatron_fsdp/mixed_precision.py index 177e3b1caa2..d2797d98079 100644 --- a/megatron/core/distributed/fsdp/src/megatron_fsdp/mixed_precision.py +++ b/megatron/core/distributed/fsdp/src/megatron_fsdp/mixed_precision.py @@ -57,7 +57,6 @@ QUANTIZED_MODEL_INIT_CLASS = fp8_model_init else: QUANTIZED_MODEL_INIT_CLASS = nullcontext - # Detect the FP8 tensor class try: from transformer_engine.pytorch.tensor import QuantizedTensor diff --git a/megatron/core/distributed/fsdp/src/megatron_fsdp/param_and_grad_buffer.py b/megatron/core/distributed/fsdp/src/megatron_fsdp/param_and_grad_buffer.py index 3ec117ebd9e..aabdd010ed9 100644 --- a/megatron/core/distributed/fsdp/src/megatron_fsdp/param_and_grad_buffer.py +++ b/megatron/core/distributed/fsdp/src/megatron_fsdp/param_and_grad_buffer.py @@ -1914,7 +1914,9 @@ def _init_each_parameter_group_buffers(self, meta_device_init_fp8_params): hsdp_buf_dp_group = self.dist_index.get_fsdp_group( is_expert_parallel=group.is_expert_param ) - main_buf_extra_kwargs["dp_rank"] = self.dist_index.get_logical_hybrid_fsdp_rank() + main_buf_extra_kwargs["dp_rank"] = self.dist_index.get_logical_hybrid_fsdp_rank( + is_expert_parallel=group.is_expert_param + ) else: main_buf_dp_group = self.dist_index.get_fsdp_group( is_expert_parallel=group.is_expert_param @@ -2431,6 +2433,8 @@ def _reset_parameters(self, old_params, new_params): self.param_to_direct_module[new_param] = self.param_to_direct_module[old_param] del self.param_to_direct_module[old_param] + new_param.requires_grad_(old_param.requires_grad) + for tp_attr in ["_mcore_tp", "_tp_partition_dim", "_tp_duplicated"]: if getattr(old_param, tp_attr, None) is not None: setattr(new_param, tp_attr, getattr(old_param, tp_attr)) @@ -2794,7 +2798,6 @@ def _batch_quantize_blockwise_fp8_params( if is_blockwise_float8tensor(param): fp8_params.append(param) if model_param.numel() == 0: - # Empty parameter. shard_fp32_from_fp8.append(None) shard_offsets_in_fp8.append(None) shard_model_params.append([None, None]) diff --git a/megatron/core/distributed/fsdp/src/megatron_fsdp/utils.py b/megatron/core/distributed/fsdp/src/megatron_fsdp/utils.py index d5fbc91fcf8..ad29956e1b0 100644 --- a/megatron/core/distributed/fsdp/src/megatron_fsdp/utils.py +++ b/megatron/core/distributed/fsdp/src/megatron_fsdp/utils.py @@ -450,6 +450,7 @@ def __init__( dp_outer_dim: Optional[str] = None, tp_dim: Optional[str] = None, hybrid_fsdp_group: Optional[torch.distributed.ProcessGroup] = None, + hybrid_fsdp_expt_group: Optional[torch.distributed.ProcessGroup] = None, hsdp_outer_dp_shard: bool = False, expt_device_mesh: Optional[DeviceMesh] = None, ): @@ -464,6 +465,9 @@ def __init__( hybrid_fsdp_group (Optional[torch.distributed.ProcessGroup]): The process group for hybrid FSDP communication, which is the flattened combination of the dp_outer and dp_shard process groups. + hybrid_fsdp_expt_group (Optional[torch.distributed.ProcessGroup]): The + process group for hybrid FSDP expert communication, which is the flattened + combination of the expert dp_outer and expert dp_shard process groups. hsdp_outer_dp_shard (bool): Whether to have outer DP group sharding in hybrid FSDP. Specifying outer sharding will lift the bucket sharding coordinate system to flattened ranks of (dp_shard, dp_outer) instead of @@ -509,6 +513,7 @@ def __init__( # Save a reference to the overall HSDP process group, which is the flattened # combination of the outer-FSDP and FSDP process groups. self.hybrid_fsdp_group = hybrid_fsdp_group + self.hybrid_fsdp_expt_group = hybrid_fsdp_expt_group # Retrieve the expert parallel process groups from the DeviceMesh. self.expt_fsdp_group = ( @@ -518,6 +523,13 @@ def __init__( else None ) + self.expt_outer_fsdp_group = ( + self.expt_device_mesh[self.dp_outer_dim].get_group() + if self.expt_device_mesh is not None + and contains_submesh(self.expt_device_mesh, self.dp_outer_dim) + else None + ) + """ Megatron-FSDP is responsible for storing all required DeviceMesh as per best practices recommended by the DeviceMesh API. @@ -558,6 +570,8 @@ def register_submesh(device_mesh, submesh, is_expert_parallel): register_submesh(self.expt_device_mesh, tp_submesh, True) register_submesh(self.expt_device_mesh, fsdp_tp_submesh, True) register_submesh(self.expt_device_mesh, fsdp_submesh, True) + register_submesh(self.expt_device_mesh, hsdp_submesh, True) + register_submesh(self.expt_device_mesh, hsdp_tp_submesh, True) # Validate FSDP arguments. if self.fsdp_group is None: @@ -629,6 +643,8 @@ def get_submesh( def get_dp_group(self, is_expert_parallel: bool = False) -> ProcessGroup: """Get the data parallel process group.""" if is_expert_parallel: + if self.use_hybrid_fsdp: + return self.hybrid_fsdp_expt_group return self.expt_fsdp_group if self.use_hybrid_fsdp: return self.hybrid_fsdp_group @@ -644,10 +660,12 @@ def get_fsdp_group( return self.fsdp_group_ag return self.fsdp_group - def get_outer_fsdp_group(self) -> ProcessGroup: + def get_outer_fsdp_group(self, is_expert_parallel: bool = False) -> ProcessGroup: """Get the outer-FSDP process group.""" if not self.use_hybrid_fsdp: return None + if is_expert_parallel: + return self.expt_outer_fsdp_group return self.outer_fsdp_group def get_root_mesh(self, is_expert_parallel: bool = False) -> DeviceMesh: @@ -659,7 +677,7 @@ def get_root_mesh(self, is_expert_parallel: bool = False) -> DeviceMesh: return self.expt_device_mesh return self.device_mesh - def get_logical_hybrid_fsdp_rank(self): + def get_logical_hybrid_fsdp_rank(self, is_expert_parallel: bool = False): """ Returns the logical rank of the current process within the full-shard hybrid FSDP group. @@ -679,20 +697,28 @@ def get_logical_hybrid_fsdp_rank(self): self.hsdp_outer_dp_shard ), "get_logical_hybrid_fsdp_rank is only valid when full-shard hybrid FSDP is enabled." - if not hasattr(self, "_hybrid_fsdp_group_ranks"): - dp_world_size = self.get_dp_group().size() + _hybrid_fsdp_group_name = ( + "_hybrid_fsdp_group_ranks" + if not is_expert_parallel + else "_hybrid_fsdp_expt_group_ranks" + ) + + if not hasattr(self, _hybrid_fsdp_group_name): + dp_world_size = self.get_dp_group(is_expert_parallel).size() # Reorder the flat ranks: (outer_dp, inner_dp) -> (inner_dp, outer_dp) mesh = einops.rearrange( torch.arange(dp_world_size), "(outer_dp inner_dp) -> (inner_dp outer_dp)", - outer_dp=self.outer_fsdp_group.size(), - inner_dp=self.fsdp_group.size(), + outer_dp=self.get_outer_fsdp_group(is_expert_parallel).size(), + inner_dp=self.get_fsdp_group(is_expert_parallel).size(), ) - self._hybrid_fsdp_group_ranks = mesh.tolist() + setattr(self, _hybrid_fsdp_group_name, mesh.tolist()) # Find the index for the current rank in the hybrid group - return self._hybrid_fsdp_group_ranks.index(self.hybrid_fsdp_group.rank()) + return getattr(self, _hybrid_fsdp_group_name).index( + self.get_dp_group(is_expert_parallel).rank() + ) class GlobalMemoryBuffer: diff --git a/megatron/core/distributed/param_and_grad_buffer.py b/megatron/core/distributed/param_and_grad_buffer.py index b9480533d7a..088374fbf13 100644 --- a/megatron/core/distributed/param_and_grad_buffer.py +++ b/megatron/core/distributed/param_and_grad_buffer.py @@ -79,6 +79,8 @@ class _ParamAndGradBucket: communication. Its application is twofold: it facilitates the averaging of gradients and the scaling of gradients in the context of the Mixture of Experts (MoE) model. bucket_id: Index of bucket in buffer. + param_index_map: Mapping from param to (start, end, bucket_id) in the global buffer. + Used to derive bucket-local offsets for param_to_index. """ def __init__( @@ -90,6 +92,7 @@ def __init__( numel_unpadded: int, gradient_scaling_factor: float, bucket_id: int, + param_index_map: Dict[torch.nn.Parameter, tuple], ): self.params_list = params self.params = set(params) @@ -103,11 +106,11 @@ def __init__( self.numel_unpadded = numel_unpadded self.gradient_scaling_factor = gradient_scaling_factor self.bucket_id = bucket_id + # Derive bucket-local param offsets from the global param_index_map. self.param_to_index = {} - offset = 0 for param in params: - self.param_to_index[param] = (offset, offset + param.numel()) - offset += param.numel() + global_start, global_end, _ = param_index_map[param] + self.param_to_index[param] = (global_start - offset, global_end - offset) class _ParamAndGradBucketGroup: @@ -762,6 +765,12 @@ def _does_param_require_new_bucket(param): group=self.data_parallel_group, symmetric=not self.ddp_config.disable_symmetric_registration, ) + # Since nccl communicator group is created lazily, we need to perform a warmup call to + # initialize NCCL comm buffers for this dp_group before doing buffer registration. + torch.distributed.barrier() + tmp_warmup_tensor = torch.zeros([1], device="cuda") + torch.distributed.all_reduce(tmp_warmup_tensor, group=self.data_parallel_group) + torch.distributed.barrier() else: # If nccl_ub is False, mem_alloc_context is nullcontext. mem_alloc_context = nullcontext @@ -942,6 +951,7 @@ def _new_bucket( numel_unpadded=numel_unpadded, gradient_scaling_factor=self.gradient_scaling_factor, bucket_id=bucket_id, + param_index_map=self.param_index_map, ) for bucket_param in bucket_params: assert bucket_param not in self.param_to_bucket diff --git a/megatron/core/extensions/transformer_engine_spec_provider.py b/megatron/core/extensions/transformer_engine_spec_provider.py index a071959bfc9..6f8947078b9 100644 --- a/megatron/core/extensions/transformer_engine_spec_provider.py +++ b/megatron/core/extensions/transformer_engine_spec_provider.py @@ -17,6 +17,7 @@ from megatron.core.fusions.fused_layer_norm import FusedLayerNorm from megatron.core.models.backends import BackendSpecProvider from megatron.core.tensor_parallel.layers import ColumnParallelLinear, RowParallelLinear +from megatron.core.transformer.dot_product_attention import DotProductAttention from megatron.core.transformer.mlp import MLPSubmodules from megatron.core.transformer.moe.experts import GroupedMLP, SequentialMLP, TEGroupedMLP from megatron.core.utils import get_te_version, is_te_min_version @@ -25,6 +26,10 @@ class TESpecProvider(BackendSpecProvider): """A protocol for providing the submodules used in Spec building.""" + def __init__(self, fallback_to_eager_attn: bool = False): + super().__init__() + self.fallback_to_eager_attn = fallback_to_eager_attn + def linear(self) -> type: """Which linear module TE backend uses""" return TELinear @@ -56,6 +61,8 @@ def layer_norm(self, rms_norm: bool = False, for_qk: bool = False) -> type: def core_attention(self) -> type: """Which module to use for attention""" + if self.fallback_to_eager_attn: + return DotProductAttention return TEDotProductAttention def grouped_mlp_modules( diff --git a/megatron/core/fp4_utils.py b/megatron/core/fp4_utils.py index 42e7f875826..95368d7c2b7 100644 --- a/megatron/core/fp4_utils.py +++ b/megatron/core/fp4_utils.py @@ -96,7 +96,9 @@ def get_fp4_recipe(config: TransformerConfig): if is_te_min_version("2.7.0.dev0"): if config.fp4_recipe == Fp4Recipe.nvfp4: try: - fp4_recipe = transformer_engine.common.recipe.NVFP4BlockScaling() + fp4_recipe = transformer_engine.common.recipe.NVFP4BlockScaling( + fp8_dpa=config.fp8_dot_product_attention + ) except AttributeError: raise ValueError( """NVFP4BlockScaling recipe is not available in this version of diff --git a/megatron/core/fusions/fused_linear_cross_entropy.py b/megatron/core/fusions/fused_linear_cross_entropy.py new file mode 100644 index 00000000000..b533fef7aa3 --- /dev/null +++ b/megatron/core/fusions/fused_linear_cross_entropy.py @@ -0,0 +1,242 @@ +# Copyright (c) 2025, NVIDIA CORPORATION. All rights reserved. + +""" +Linear Cross Entropy API +Fuse cross entropy with linear layer. +""" + +import typing +from functools import lru_cache + +import torch + + +class Platform: + """ + Singleton class for targeted GPU platform. + """ + + _instance: typing.Optional["Platform"] = None + + def __new__(cls) -> "Platform": + if cls._instance is None: + cls._instance = super().__new__(cls) + return cls._instance + + def __init__(self) -> None: + if getattr(self, "_initialized", False): + return + + assert torch.cuda.is_available(), "CUDA is not available" + device = torch.cuda.current_device() + cc = torch.cuda.get_device_capability(device) + + if cc[0] == 10: + from .linear_cross_entropy.blackwell import entry as gpu_entry + + self.forward_func: typing.Callable[..., typing.Any] = gpu_entry.forward + self.backward_func: typing.Callable[..., typing.Any] = gpu_entry.backward + else: + raise ValueError(f"Unsupported architecture: {cc[0]}") + + self._initialized = True + + +@lru_cache(maxsize=1) +def _get_platform() -> Platform: + """ + Helper function to lazy initialize the platform. + """ + return Platform() + + +class LinearCrossEntropy(torch.autograd.Function): + """ + This class implements a custom autograd function for linear and cross entropy, + whose equivalent logic in PyTorch is: + ```python + def torch_entropy(hidden, weight, labels): + logits = torch.matmul(hidden, weight) + logprobs = torch.nn.functional.cross_entropy(logits, labels) + return logprobs + ``` + """ + + @staticmethod + def forward( + ctx, + hidden: torch.Tensor, + weight: torch.Tensor, + labels: torch.Tensor, + tp_group: typing.Optional[torch.distributed.ProcessGroup] = None, + reduction: typing.Literal["none", "sum", "mean"] = "mean", + ignore_index: int = -100, + sequence_parallel: bool = False, + ) -> torch.Tensor: + """ + The forward pass of the Linear Cross Entropy. + If tp_group is not None, the weight tensor to each TP rank should be + (global_vocab_size // world_size, dim). + Note that each of the ranks should get equal shards along the vocab_size dimension. + + Args: + @param hidden: the input tensor with shape (num_tokens, dim) + @param weight: the lm_head weight tensor with shape (local_vocab_size, dim) + @param labels: the labels tensor with shape (num_tokens,) + @param tp_group: the distributed process group for TP. + @param reduction: Default to "mean", and can be one of "none", "sum", "mean". + @param ignore_index: The index to ignore. Default to -100. + @param sequence_parallel: Whether to use sequence parallel. Default to False. + Returns: + @return: logprobs with shape + - either (num_tokens,) when reduction is "none" + - or (1,) when reduction is "mean" or "sum" + + tp_group is None ----------------------------------> DP + B + A C + tp_group is not None & sequence_parallel is False -> TP + B0 B1 + A C0 C1 + tp_group is not None & sequence_parallel is True --> SP + B0 B1 + A0 C0 XX + A1 XX C1 + + When tp_group is not None, the weight tensor will be split along the vocab_size + dimension, which means each rank will get equal shards along the global_vocab_size + dimension. Specifically, the weight tensor to each rank will be (local_vocab_size, dim). + And there is an assumption that each rank will get the same local_vocab_size. + + When sequence_parallel is True, the hidden tensor will be split along the + sequence length dimension, which means each rank will get equal shards along + the sequence length dimension. Specifically, the hidden tensor to each rank + will be (local_num_tokens, dim). And there is an assumption that each rank + will get the same local_num_tokens. + + In TP forward pass, the hidden tensor and label tensor shall be identical + among all TP ranks, and it's user's responsibility to ensure the hidden tensor + is identical among all TP ranks. Then this operation will produce identical + logprobs among all TP ranks. + + In TP backward pass, the gradient of the logprobs shall be identical among all + TP ranks, and it's user's responsibility to ensure the gradient of the logprobs + is identical among all TP ranks. Then this operation will produce distinct gradients + for the local weight tensor, and identical gradients for the hidden tensor. + + ```python + # ------------ forward pass ------------ # + hidden = tp_group.broadcast(hidden, src=0) # handled by framework + labels = tp_group.broadcast(labels, src=0) # handled by framework + logprobs = linear_cross_entropy(...) + # each rank will get the same logprobs + + # ------------ backward pass ------------ # + g_logprobs = tp_group.broadcast(g_logprobs, src=0) # handled by framework + d_hidden, d_weight = torch.autograd.grad(...) + # each rank will get the same d_hidden, + # and distinct d_weight for local weight shard + ``` + + In SP forward pass, the hidden tensor shall be split along the sequence length dimension, + and the label tensor shall be identical among all TP ranks. + Then this operation will produce identical logprobs among all TP ranks. + + In SP backward pass, the gradient of the logprobs shall be identical among all TP ranks, + Then this operation will produce distinct gradients for the local hidden tensor + and local weight tensor. + ```python + # ------------ forward pass ------------ # + hidden = global_hidden[tp_rank] # handled by framework + labels = tp_group.broadcast(labels, src=0) # handled by framework + logprobs = linear_cross_entropy(...) + # each rank will get the same logprobs + + # ------------ backward pass ------------ # + g_logprobs = tp_group.broadcast(g_logprobs, src=0) # handled by framework + d_hidden, d_weight = torch.autograd.grad(...) + # each rank will get distinct local d_hidden and d_weight + ``` + """ + with torch.cuda.nvtx.range("LinearCrossEntropy-forward"): + ( + logprobs, + _maximum, + _acc, + _num_valid_tokens, + tp_rank, + tp_world_size, + global_hidden, + ) = _get_platform().forward_func( + hidden, weight, labels, tp_group, reduction, ignore_index, sequence_parallel + ) + ctx.save_for_backward(global_hidden, weight, labels, _maximum, _acc, _num_valid_tokens) + ctx.tp_group = tp_group + ctx.ignore_index = ignore_index + ctx.reduction = reduction + ctx.tp_rank = tp_rank + ctx.tp_world_size = tp_world_size + ctx.sequence_parallel = sequence_parallel + + return logprobs + + @staticmethod + def backward( + ctx, dlogprobs: torch.Tensor + ) -> typing.Tuple[torch.Tensor, torch.Tensor, None, None, None, None, None]: + """ + The backward pass of the Linear Cross Entropy. + Args: + dlogprobs (torch.Tensor): The gradient of the cross entropy, with shape + - either (num_tokens,) when reduction is "none" + - or (1,) when reduction is "mean" or "sum" + Returns: + dhidden (torch.Tensor): The gradient of the hidden. + dweight (torch.Tensor): The gradient of the weight. + """ + with torch.cuda.nvtx.range("LinearCrossEntropy-backward"): + (global_hidden, weight, labels, _maximum, _accu, _num_valid_tokens) = ctx.saved_tensors + + tp_group = ctx.tp_group + ignore_index = ctx.ignore_index + reduction = ctx.reduction + tp_rank = ctx.tp_rank + tp_world_size = ctx.tp_world_size + sequence_parallel = ctx.sequence_parallel + + d_hidden, d_weight = _get_platform().backward_func( + dlogprobs, + global_hidden, + weight, + labels, + _maximum, + _accu, + _num_valid_tokens, + reduction, + ignore_index, + tp_group, + tp_rank, + tp_world_size, + sequence_parallel, + ) + + return d_hidden, d_weight, None, None, None, None, None + + +def linear_cross_entropy( + hidden: torch.Tensor, + weight: torch.Tensor, + labels: torch.Tensor, + tp_group: typing.Optional[torch.distributed.ProcessGroup] = None, + reduction: typing.Literal["none", "sum", "mean"] = "mean", + ignore_index: int = -100, + sequence_parallel: bool = False, +) -> torch.Tensor: + """ + helper function for linear cross entropy. + """ + _impl = LinearCrossEntropy.apply + return _impl(hidden, weight, labels, tp_group, reduction, ignore_index, sequence_parallel) + + +__all__ = ["linear_cross_entropy", "LinearCrossEntropy"] diff --git a/megatron/core/fusions/linear_cross_entropy/__init__.py b/megatron/core/fusions/linear_cross_entropy/__init__.py new file mode 100644 index 00000000000..b9a9591fa69 --- /dev/null +++ b/megatron/core/fusions/linear_cross_entropy/__init__.py @@ -0,0 +1 @@ +# Copyright (c) 2025 NVIDIA CORPORATION & AFFILIATES. All rights reserved. diff --git a/megatron/core/fusions/linear_cross_entropy/blackwell/__init__.py b/megatron/core/fusions/linear_cross_entropy/blackwell/__init__.py new file mode 100644 index 00000000000..b9a9591fa69 --- /dev/null +++ b/megatron/core/fusions/linear_cross_entropy/blackwell/__init__.py @@ -0,0 +1 @@ +# Copyright (c) 2025 NVIDIA CORPORATION & AFFILIATES. All rights reserved. diff --git a/megatron/core/fusions/linear_cross_entropy/blackwell/bwd_partial_dlogits.py b/megatron/core/fusions/linear_cross_entropy/blackwell/bwd_partial_dlogits.py new file mode 100644 index 00000000000..3178e8c6909 --- /dev/null +++ b/megatron/core/fusions/linear_cross_entropy/blackwell/bwd_partial_dlogits.py @@ -0,0 +1,667 @@ +# Copyright (c) 2025, NVIDIA CORPORATION. All rights reserved. + +import logging +from typing import Optional, Tuple, Type + +try: + import cuda.bindings.driver as cuda # type: ignore + import cutlass + import cutlass.cute as cute + import cutlass.pipeline as pipeline # type: ignore + import cutlass.utils as utils # type: ignore + import cutlass.utils.blackwell_helpers as sm100_utils # type: ignore + from cutlass.cute.nvgpu import cpasync, tcgen05 + + SM100_TMEM_CAPACITY_COLUMNS: int = 512 + + def make_thread_cooperative_group(size: int, alignment: Optional[int] = None): + """ + Create a thread cooperative group. + """ + return pipeline.CooperativeGroup( + pipeline.Agent.Thread, size, alignment=alignment if alignment is not None else size + ) + + class BwdPartialDlogits: + """ + This class implements the backward kernel for partial d_logits. + """ + + def __init__( + self, + reduction: int, + acc_dtype: Type[cutlass.Numeric] = cutlass.Float32, + use_2cta_instrs: bool = False, + mma_tiler_mn: Tuple[int, int] = (128, 256), + vocab_per_split: int = 512, + ): + self.REDUCTION: cutlass.Constexpr[cutlass.Int32] = cutlass.const_expr(reduction) + self.acc_dtype = acc_dtype + self.use_2cta_instrs = use_2cta_instrs + self.mma_tiler = (*mma_tiler_mn, 1) + self.vocab_per_split = vocab_per_split + + self.cta_group = tcgen05.CtaGroup.TWO if self.use_2cta_instrs else tcgen05.CtaGroup.ONE + self.cluster_shape_mn = (2, 1) if self.use_2cta_instrs else (1, 1) + + self.smem_capacity = utils.get_smem_capacity_in_bytes("sm_100") + + self.threads_per_warp: int = 32 + + self.epi_warp_ids = (0, 1, 2, 3) + self.load_warp_ids = 4 + self.mma_warp_ids = 5 + self.empty_warp_ids = (6, 7) + + self.threads_per_cta: int = self.threads_per_warp * len( + (*self.epi_warp_ids, self.load_warp_ids, self.mma_warp_ids, *self.empty_warp_ids) + ) + self.cta_sync_barrier = pipeline.NamedBarrier( + barrier_id=1, num_threads=self.threads_per_cta + ) + + self.buffer_align_bytes: int = 1024 + self.num_regs_other: int = 32 + self.num_regs_epi: int = 192 + + def _compute_grid( + self, + problem_mnk: Tuple[int, int, int], + cluster_shape_mn: Tuple[int, int], + cta_tiler: Tuple[int, int, int], + ) -> Tuple[int, int, int]: + cluster_shape_mnk = (*cluster_shape_mn, 1) + + grid = cute.round_up( + ( + cute.ceil_div(problem_mnk[0], cta_tiler[0]), + cute.ceil_div(self.vocab_per_split, cta_tiler[1]), + 1, + ), + cluster_shape_mnk, + ) + return grid + + def _compute_stages( + self, + tiled_mma: cute.TiledMma, + mma_tiler: Tuple[int, int, int], + a_dtype: Type[cutlass.Numeric], + b_dtype: Type[cutlass.Numeric], + ): + num_acc_stage = 1 + num_ab_stage = 4 + num_epi_stage_per_tile = 4 + return num_acc_stage, num_ab_stage, num_epi_stage_per_tile + + def _setup_attributes( + self, + tiled_mma: cute.TiledMma, + a_dtype: Type[cutlass.Numeric], + b_dtype: Type[cutlass.Numeric], + ): + self.cluster_shape_mnk = (*self.cluster_shape_mn, 1) + self.cluster_layout_vmnk = cute.tiled_divide( + cute.make_layout(self.cluster_shape_mnk), (tiled_mma.thr_id.shape,) + ) + + mma_inst_shape_k = cute.size(tiled_mma.shape_mnk, mode=[2]) + # it requires k-mode to be 128B aligned + mma_inst_tile_k: int = 4 + self.mma_tiler = ( + self.mma_tiler[0], + self.mma_tiler[1], + mma_inst_shape_k * mma_inst_tile_k, + ) + + self.num_acc_stage, self.num_ab_stage, self.num_epi_stage_per_tile = ( + self._compute_stages(tiled_mma, self.mma_tiler, a_dtype, b_dtype) + ) + self.tmem_alloc_cols = self.num_acc_stage * self.mma_tiler[1] + assert self.tmem_alloc_cols <= SM100_TMEM_CAPACITY_COLUMNS + + self.cta_tile_shape_mnk = ( + self.mma_tiler[0] // cute.size(tiled_mma.thr_id.shape), + self.mma_tiler[1], + self.mma_tiler[2], + ) + + @cute.kernel + def kernel( + self, + split_idx: cutlass.Int32, + tiled_mma: cute.TiledMma, + tma_atom_a: cute.CopyAtom, + mA: cute.Tensor, + tma_atom_b: cute.CopyAtom, + mB: cute.Tensor, + mLabels: cute.Tensor, + mDlogprobs: cute.Tensor, + mMaximum: cute.Tensor, + mAccu: cute.Tensor, + mDlogits_partial: cute.Tensor, + scalarNumValidTokens: cute.Pointer, + ignore_index: cutlass.Int64, + a_smem_layout_staged: cute.ComposedLayout, + b_smem_layout_staged: cute.ComposedLayout, + cluster_layout_vmnk: cute.Layout, + problem_mnk: Tuple[int, int, int], + rank: cutlass.Int32, + ) -> None: + """ + The backward kernel for partial d_logits. + """ + warp_idx = cute.arch.make_warp_uniform(cute.arch.warp_idx()) + tidx, _, _ = cute.arch.thread_idx() + bidx, bidy, _ = cute.arch.block_idx() + # FIXME: block swizzling applied here + pidm, pidn = bidx, bidy + + # FIXME: if 2 CTAs, modify here + cta_rank_in_cluster = 0 + block_in_cluster_coord_vmnk = cluster_layout_vmnk.get_flat_coord(cta_rank_in_cluster) + + # prefetch tma descriptors + if warp_idx == self.load_warp_ids: + cute.nvgpu.cpasync.prefetch_descriptor(tma_atom_a) + cute.nvgpu.cpasync.prefetch_descriptor(tma_atom_b) + + smem = utils.SmemAllocator() + storage = smem.allocate(self.shared_storage) + + ab_pipeline = pipeline.PipelineTmaUmma.create( + num_stages=self.num_ab_stage, + producer_group=make_thread_cooperative_group(len([self.load_warp_ids])), + consumer_group=make_thread_cooperative_group(len([self.mma_warp_ids])), + tx_count=self.tma_copy_ab_bytes, + barrier_storage=storage.load_ab_mbar_ptr.data_ptr(), + ) + ab_producer_state = pipeline.make_pipeline_state( + pipeline.PipelineUserType.Producer, self.num_ab_stage + ) + ab_consumer_state = pipeline.make_pipeline_state( + pipeline.PipelineUserType.Consumer, self.num_ab_stage + ) + + mma_pipeline = pipeline.PipelineUmmaAsync.create( + num_stages=self.num_acc_stage, + producer_group=make_thread_cooperative_group(len([self.mma_warp_ids])), + consumer_group=make_thread_cooperative_group( + self.threads_per_warp * len(self.epi_warp_ids) + ), + barrier_storage=storage.mma_mbar_ptr.data_ptr(), + ) + mma_producer_state = pipeline.make_pipeline_state( + pipeline.PipelineUserType.Producer, self.num_acc_stage + ) + mma_consumer_state = pipeline.make_pipeline_state( + pipeline.PipelineUserType.Consumer, self.num_acc_stage + ) + + tmem_dealloc_mbar_ptr = storage.tmem_dealloc_mbar_ptr.data_ptr() + if warp_idx == self.empty_warp_ids[0]: + with cute.arch.elect_one(): + cute.arch.mbarrier_init( + tmem_dealloc_mbar_ptr, self.threads_per_warp * len(self.epi_warp_ids) + ) + cute.arch.mbarrier_init_fence() + + # -------- tensor partition ------------ # + # swizzle o [(tileM, tileK), loopM, loopK, stage] + sA = storage.sA.get_tensor( + a_smem_layout_staged.outer, swizzle=a_smem_layout_staged.inner + ) + # swizzle o [(tileN, tileK), loopN, loopK, stage] + sB = storage.sB.get_tensor( + b_smem_layout_staged.outer, swizzle=b_smem_layout_staged.inner + ) + + # FIXME: if 2 CTAs, modify here + thr_mma = tiled_mma.get_slice(0) + # [MMA, loopM, loopK, stage] + tCsA = thr_mma.make_fragment_A(sA) + # [MMA, loopN, loopK, stage] + tCsB = thr_mma.make_fragment_B(sB) + + # [tileM, tileK, loopK] + gA = cute.local_tile( + mA, (self.cta_tile_shape_mnk[0], self.cta_tile_shape_mnk[2]), (pidm, None) + ) + # [vocab_per_split, dim] + mB_n = cute.local_tile( + mB, (self.vocab_per_split, cute.size(mB.layout.shape, mode=[1])), (split_idx, 0) + ) + # [tileN, tileK, loopK] + gB = cute.local_tile( + mB_n, (self.cta_tile_shape_mnk[1], self.cta_tile_shape_mnk[2]), (pidn, None) + ) + + a_cta_layout = cute.make_layout(cute.slice_(cluster_layout_vmnk, (0, 0, None, 0)).shape) + # just to make sure SMEM and GMEM tensor has the same size in the first rank + tCgA = thr_mma.partition_A(gA) + tCgB = thr_mma.partition_B(gB) + # [CPY, stage] & [CPY, loopK] + tTMAsA, tTMAgA = cpasync.tma_partition( + tma_atom_a, + block_in_cluster_coord_vmnk[2], # cta_coord, + a_cta_layout, + cute.group_modes(sA, 0, 3), + cute.group_modes(tCgA, 0, 3), + ) + b_cta_layout = cute.make_layout(cute.slice_(cluster_layout_vmnk, (0, None, 0, 0)).shape) + # [CPY, stage] & [CPY, loopK] + tTMAsB, tTMAgB = cpasync.tma_partition( + tma_atom_b, + block_in_cluster_coord_vmnk[1], # cta_coord + b_cta_layout, + cute.group_modes(sB, 0, 3), + cute.group_modes(tCgB, 0, 3), + ) + + # ------ Allocate TMEM ------ # + tmem_holding_buf = storage.tmem_holding_buf + if warp_idx == self.empty_warp_ids[0]: + cute.arch.alloc_tmem( + self.tmem_alloc_cols, tmem_holding_buf, is_two_cta=self.use_2cta_instrs + ) + self.cta_sync_barrier.arrive_and_wait() + tmem_ptr = cute.arch.retrieve_tmem_ptr( + self.acc_dtype, alignment=16, ptr_to_buffer_holding_addr=tmem_holding_buf + ) + + tmem_shape = (128, self.tmem_alloc_cols) + acc_shape = thr_mma.partition_shape_C(tmem_shape) + tCtC_fake = thr_mma.make_fragment_C(acc_shape) + # [(tileM, tileN), loopM, loopN] + tCtC = cute.make_tensor(tmem_ptr, tCtC_fake.layout) + + # ------ Empty ------ # + if warp_idx in self.empty_warp_ids: + cute.arch.warpgroup_reg_dealloc(self.num_regs_other) + + # ------ Load ------ # + if warp_idx == self.load_warp_ids: + cute.arch.warpgroup_reg_dealloc(self.num_regs_other) + + for k in cutlass.range(cute.size(gA, mode=[2])): + ab_pipeline.producer_acquire(ab_producer_state) + cute.copy( + tma_atom_a, + tTMAgA[(None, k)], + tTMAsA[(None, ab_producer_state.index)], + tma_bar_ptr=ab_pipeline.producer_get_barrier(ab_producer_state), + ) + cute.copy( + tma_atom_b, + tTMAgB[(None, k)], + tTMAsB[(None, ab_producer_state.index)], + tma_bar_ptr=ab_pipeline.producer_get_barrier(ab_producer_state), + ) + ab_pipeline.producer_commit(ab_producer_state) + ab_producer_state.advance() + + # ------ MMA ------ # + if warp_idx == self.mma_warp_ids: + cute.arch.warpgroup_reg_dealloc(self.num_regs_other) + + tiled_mma.set(tcgen05.Field.ACCUMULATE, False) + mma_pipeline.producer_acquire(mma_producer_state) + + for k in cutlass.range(cute.size(gA, mode=[2])): + ab_pipeline.consumer_wait(ab_consumer_state) + + for kblock_idx in cutlass.range(cute.size(tCsA, mode=[2]), unroll_full=True): + cute.gemm( + tiled_mma, + cute.append_ones(tCtC[(None, None, mma_producer_state.index)]), + tCsA[(None, None, kblock_idx, ab_consumer_state.index)], + tCsB[(None, None, kblock_idx, ab_consumer_state.index)], + cute.append_ones(tCtC[(None, None, mma_producer_state.index)]), + ) + tiled_mma.set(tcgen05.Field.ACCUMULATE, True) + + ab_pipeline.consumer_release(ab_consumer_state) + ab_consumer_state.advance() + + mma_pipeline.producer_commit(mma_producer_state) + mma_producer_state.advance() + + # ------ EPI ------ # + if warp_idx in self.epi_warp_ids: + cute.arch.warpgroup_reg_alloc(self.num_regs_epi) + + copy_atom_t2r = sm100_utils.get_tmem_load_op( + self.cta_tile_shape_mnk, + utils.LayoutEnum.ROW_MAJOR, + self.acc_dtype, + self.acc_dtype, + (self.epi_tile[0], self.epi_tile[1] // self.num_epi_stage_per_tile), + self.use_2cta_instrs, + ) + # [tileM, subTileN, loopM, CntSubTileN, loopN] + tAcc_epi = cute.flat_divide( + tCtC[((None, None), 0, None)], + (self.epi_tile[0], self.epi_tile[1] // self.num_epi_stage_per_tile), + ) + tiled_copy_t2r = tcgen05.make_tmem_copy( + copy_atom_t2r, tAcc_epi[(None, None, 0, 0, 0)] + ) + thr_copy_t2r = tiled_copy_t2r.get_slice(tidx) + tTMEM_load_tAcc = thr_copy_t2r.partition_S(tAcc_epi) + tTMEM_load_tAcc = cute.group_modes( + tTMEM_load_tAcc, 3, cute.rank(tTMEM_load_tAcc) - 1 + ) + + # predicates + cAcc = cute.make_identity_tensor(self.mma_tiler[:2]) + tCcAcc = thr_mma.partition_C(cAcc) + tCcAcc_epi = cute.flat_divide( + tCcAcc[((None, None), 0, None)], + (self.epi_tile[0], self.epi_tile[1] // self.num_epi_stage_per_tile), + ) + tTMEM_load_cAcc = thr_copy_t2r.partition_D(tCcAcc_epi) + tTMEM_load_cAcc_shape = cute.select(tTMEM_load_cAcc.shape, mode=[0, 1, 2]) + tTMEM_load_rAcc = cute.make_fragment(tTMEM_load_cAcc_shape, self.acc_dtype) + + copy_atom_g2r_int64 = cute.make_copy_atom( + cute.nvgpu.CopyUniversalOp(), mLabels.element_type + ) + copy_atom_g2r_fp32 = cute.make_copy_atom( + cute.nvgpu.CopyUniversalOp(), mDlogprobs.element_type + ) + epilogue_thread_layout = cute.make_layout((128, 1), stride=(1, 1)) + tiled_copy_g2r_int64 = cute.make_tiled_copy_tv( + copy_atom_g2r_int64, epilogue_thread_layout, cute.make_layout((1, 1)) + ) + tiled_copy_g2r_fp32 = cute.make_tiled_copy_tv( + copy_atom_g2r_fp32, epilogue_thread_layout, cute.make_layout((1, 1)) + ) + thr_copy_g2r_int64 = tiled_copy_g2r_int64.get_slice(tidx) + thr_copy_g2r_fp32 = tiled_copy_g2r_fp32.get_slice(tidx) + + # [tileM] + gLabels = cute.local_tile(mLabels, (self.epi_tile[0],), (pidm,)) + gMaximum = cute.local_tile(mMaximum, (self.epi_tile[0],), (pidm,)) + gAccu = cute.local_tile(mAccu, (self.epi_tile[0],), (pidm,)) + + # slice along M direction + tMCAcc = thr_copy_g2r_int64.partition_S(cAcc)[(None, None, 0)] + # [(1, 1), 1] + tMCAcc_mask = cute.make_fragment(tMCAcc.shape, cutlass.Boolean) + # to align shape with gMax and gAccu + tMCAcc_mask = cute.append_ones(tMCAcc_mask) + tMCAcc_mask[0] = cute.elem_less( + pidm * self.epi_tile[0] + tidx, cute.size(mA, mode=[0]) + ) + # [(1, 1), 1, 1] + tMgLabels = thr_copy_g2r_int64.partition_S(cute.append_ones(gLabels)) + tMrLabels = cute.make_fragment(tMgLabels.shape, tMgLabels.element_type) + cute.copy(tiled_copy_g2r_int64, tMgLabels, tMrLabels, pred=tMCAcc_mask) + tMgMaximum = thr_copy_g2r_fp32.partition_S(cute.append_ones(gMaximum)) + tMrMaximum = cute.make_fragment(tMgMaximum.layout, tMgMaximum.element_type) + cute.copy(tiled_copy_g2r_fp32, tMgMaximum, tMrMaximum, pred=tMCAcc_mask) + tMgAccu = thr_copy_g2r_fp32.partition_S(cute.append_ones(gAccu)) + tMrAccu = cute.make_fragment(tMgAccu.layout, tMgAccu.element_type) + cute.copy(tiled_copy_g2r_fp32, tMgAccu, tMrAccu, pred=tMCAcc_mask) + + tMrDlogprobs = cute.make_fragment(tMgAccu.layout, mDlogprobs.element_type) + if cutlass.const_expr(self.REDUCTION == 2): + # mean reduction + num_valid_tokens = cute.make_tensor(scalarNumValidTokens, layout=(1,)) + tMrDlogprobs[0] = mDlogprobs[0] / num_valid_tokens[0].to(cutlass.Float32) + elif cutlass.const_expr(self.REDUCTION == 1): + # sum reduction + tMrDlogprobs[0] = mDlogprobs[0] + else: + # no reduction + gDlogprobs = cute.local_tile(mDlogprobs, (self.epi_tile[0],), (pidm,)) + tMgDlogprobs = thr_copy_g2r_fp32.partition_S(cute.append_ones(gDlogprobs)) + cute.copy(tiled_copy_g2r_fp32, tMgDlogprobs, tMrDlogprobs, pred=tMCAcc_mask) + + tMrAccu[0] = cute.arch.rcp_approx(tMrAccu[0]) + tMrDlogprobs[0] *= tMrLabels[0] != ignore_index + tMr_d_acc_exp_logits = tMrDlogprobs[0] * tMrAccu[0] + + # ------ Partial output ------ # + # [tileM, tileN] + gDlogits_partial = cute.local_tile( + mDlogits_partial, (self.epi_tile[0], self.epi_tile[1]), (pidm, pidn) + ) + # blackwell supports STG.256 + copy_atom_r2g = cute.make_copy_atom( + cute.nvgpu.CopyUniversalOp(), + gDlogits_partial.element_type, + num_bits_per_copy=256, + ) + tiled_copy_r2g = cute.make_tiled_copy_tv( + copy_atom_r2g, epilogue_thread_layout, copy_atom_r2g.layout_dst_tv + ) + thr_copy_r2g = tiled_copy_r2g.get_slice(tidx) + + # [CPY, loopM, loopN] + tR2GCAcc = thr_copy_r2g.partition_S(cAcc) + tR2GCAcc_pred = cute.make_fragment(tR2GCAcc.shape, cutlass.Boolean) + for elem in cutlass.range(cute.size(tR2GCAcc_pred, mode=[0])): + for row in cutlass.range(cute.size(tR2GCAcc_pred, mode=[1])): + for col in cutlass.range(cute.size(tR2GCAcc_pred, mode=[2])): + tR2GCAcc_pred[elem, row, col] = cute.elem_less( + pidm * self.epi_tile[0] + tR2GCAcc[elem, row, col][0], + problem_mnk[0], + ) and cute.elem_less( + split_idx * self.vocab_per_split + + pidn * self.epi_tile[1] + + tR2GCAcc[elem, row, col][1], + problem_mnk[1], + ) + + tR2GgDlogits = thr_copy_r2g.partition_D(gDlogits_partial) + + # for type conversion + dLogits_half = cute.make_fragment(tTMEM_load_rAcc.shape, tR2GgDlogits.element_type) + dLogits_half = cute.tiled_divide( + dLogits_half, (cute.size(tR2GgDlogits, mode=[0]), 1) + ) + dLogits_half = cute.group_modes(dLogits_half, 2, cute.rank(dLogits_half)) + + mma_pipeline.consumer_wait(mma_consumer_state) + + block_vocab_left_idx: cutlass.Int64 = ( + split_idx * self.vocab_per_split + pidn * self.epi_tile[1] + ) + block_vocab_right_idx: cutlass.Int64 = min( + split_idx * self.vocab_per_split + (pidn + 1) * self.epi_tile[1], + min((split_idx + 1) * self.vocab_per_split, problem_mnk[1]), + ) + num_n_subtiles: cutlass.Int64 = cute.ceil_div( + (block_vocab_right_idx - block_vocab_left_idx), + cute.size(tTMEM_load_rAcc, mode=[0]), + ) + for n_subtile in cutlass.range(num_n_subtiles): + cute.copy( + tiled_copy_t2r, + tTMEM_load_tAcc[(None, None, None, n_subtile, mma_consumer_state.index)], + tTMEM_load_rAcc, + ) + + for idx in cutlass.range( + cute.size(tTMEM_load_rAcc, mode=[0]), unroll_full=True + ): + # exp_logits + tTMEM_load_rAcc[idx] = cute.exp(tTMEM_load_rAcc[idx] - tMrMaximum[0]) + + position: cutlass.Int64 = ( + rank * problem_mnk[1] + + split_idx * self.vocab_per_split + + pidn * self.epi_tile[1] + + n_subtile * cute.size(tTMEM_load_rAcc, mode=[0]) + + idx + ) + mask: cutlass.Boolean = ( + position == tMrLabels[0] and tMrLabels[0] != ignore_index + ) + # d_logits + tTMEM_load_rAcc[idx] *= tMr_d_acc_exp_logits + tTMEM_load_rAcc[idx] += mask * -tMrDlogprobs[0] + dLogits_half[idx] = tTMEM_load_rAcc[idx].to(dLogits_half.element_type) + + for idx in cutlass.range(cute.size(dLogits_half, mode=[1]), unroll_full=True): + copy_id = n_subtile * cute.size(dLogits_half, mode=[1]) + idx + cute.copy( + tiled_copy_r2g, + dLogits_half[(None, idx, None)], + tR2GgDlogits[(None, None, copy_id)], + pred=tR2GCAcc_pred[((0, None), None, copy_id)], + ) + + mma_pipeline.consumer_release(mma_consumer_state) + mma_consumer_state.advance() + + # ------ Deallocate TMEM ------ # + self.cta_sync_barrier.arrive_and_wait() + if warp_idx == self.empty_warp_ids[0]: + cute.arch.relinquish_tmem_alloc_permit() + cute.arch.dealloc_tmem( + tmem_ptr, self.tmem_alloc_cols, is_two_cta=self.use_2cta_instrs + ) + + @cute.jit + def __call__( + self, + split_idx: cutlass.Int32, + hidden: cute.Tensor, + weight: cute.Tensor, + labels: cute.Tensor, + dlogprobs: cute.Tensor, + maximum: cute.Tensor, + accu: cute.Tensor, + dlogits_partial: cute.Tensor, + scalarNumValidTokens: cute.Pointer, + ignore_index: cutlass.Int64, + rank: cutlass.Int32, + stream: cuda.CUstream, + ) -> None: + a_dtype: Type[cutlass.Numeric] = hidden.element_type + b_dtype: Type[cutlass.Numeric] = weight.element_type + + if cutlass.const_expr(hidden.element_type != weight.element_type): + raise RuntimeError( + f"data type don't match: {hidden.element_type} v.s. {weight.element_type}" + ) + if cutlass.const_expr(hidden.element_type not in [cutlass.Float16, cutlass.BFloat16]): + raise RuntimeError("hidden can only be FP16 or BF16") + if cutlass.const_expr(hidden.layout.shape[1] != weight.layout.shape[1]): + raise RuntimeError("K dimension doesn't match") + + problem_mnk = (hidden.layout.shape[0], weight.layout.shape[0], hidden.layout.shape[1]) + if cutlass.const_expr((problem_mnk[2] * a_dtype.width // 8) % 16 != 0): + raise RuntimeError(f"K dimension is not 16B aligned: {problem_mnk[2]}") + if cutlass.const_expr((problem_mnk[2] * b_dtype.width // 8) % 128 != 0): + raise RuntimeError(f"N dimension is not 128B aligned: {problem_mnk[1]}") + + grid = self._compute_grid( + problem_mnk=problem_mnk, + cluster_shape_mn=self.cluster_shape_mn, + cta_tiler=self.mma_tiler, + ) + + a_major_mode = utils.LayoutEnum.from_tensor(hidden).mma_major_mode() + b_major_mode = utils.LayoutEnum.from_tensor(weight).mma_major_mode() + + tiled_mma = sm100_utils.make_trivial_tiled_mma( + a_dtype, + a_major_mode, + b_major_mode, + self.acc_dtype, + self.cta_group, + self.mma_tiler[:2], + ) + self._setup_attributes(tiled_mma, a_dtype, b_dtype) + + self.epi_tile = self.cta_tile_shape_mnk[:2] + + # Swizzle o [(tileM, tileK), loopM, loopK, stage] + a_smem_layout_staged = sm100_utils.make_smem_layout_a( + tiled_mma, self.mma_tiler, a_dtype, self.num_ab_stage + ) + # Swizzle o [(tileN, tileK), loopN, loopK, stage] + b_smem_layout_staged = sm100_utils.make_smem_layout_b( + tiled_mma, self.mma_tiler, b_dtype, self.num_ab_stage + ) + tma_load_op = cpasync.CopyBulkTensorTileG2SOp(self.cta_group) + tma_store_op = cpasync.CopyBulkTensorTileS2GOp() + + # Swizzle o [(tileM, tileK), loopM, loopK] + a_smem_layout = cute.select(a_smem_layout_staged, mode=[0, 1, 2]) + tma_atom_a, tma_tensor_a = cute.nvgpu.make_tiled_tma_atom_A( + tma_load_op, + hidden, + a_smem_layout, + self.mma_tiler, + tiled_mma, + self.cluster_layout_vmnk.shape, + ) + # Swizzle o [(tileN, tileK), loopN, loopK] + b_smem_layout = cute.select(b_smem_layout_staged, mode=[0, 1, 2]) + tma_atom_b, tma_tensor_b = cute.nvgpu.make_tiled_tma_atom_B( + tma_load_op, + weight, + b_smem_layout, + self.mma_tiler, + tiled_mma, + self.cluster_layout_vmnk.shape, + ) + a_copy_size = cute.size_in_bytes(a_dtype, a_smem_layout) + b_copy_size = cute.size_in_bytes(b_dtype, b_smem_layout) + self.tma_copy_ab_bytes = a_copy_size + b_copy_size + + @cute.struct + class SharedStorage: + """ + The shared storage for the backward kernel. + """ + + load_ab_mbar_ptr: cute.struct.MemRange[cutlass.Int64, self.num_ab_stage * 2] + mma_mbar_ptr: cute.struct.MemRange[cutlass.Int64, self.num_acc_stage * 2] + + tmem_dealloc_mbar_ptr: cute.struct.MemRange[cutlass.Int64, 1] + tmem_holding_buf: cutlass.Int32 + + sA: cute.struct.Align[ + cute.struct.MemRange[a_dtype, cute.cosize(a_smem_layout_staged)], + self.buffer_align_bytes, + ] + sB: cute.struct.Align[ + cute.struct.MemRange[b_dtype, cute.cosize(b_smem_layout_staged)], + self.buffer_align_bytes, + ] + + self.shared_storage = SharedStorage + + self.kernel( + split_idx, + tiled_mma, + tma_atom_a, + tma_tensor_a, + tma_atom_b, + tma_tensor_b, + labels, + dlogprobs, + maximum, + accu, + dlogits_partial, + scalarNumValidTokens, + ignore_index, + a_smem_layout_staged, + b_smem_layout_staged, + self.cluster_layout_vmnk, + problem_mnk, + rank, + ).launch( + grid=grid, + block=[self.threads_per_cta, 1, 1], + cluster=self.cluster_shape_mnk, + stream=stream, + ) + +except ImportError: + logging.warning("Cutlass or CUDA bindings not found. BwdPartialDlogits will not be available.") diff --git a/megatron/core/fusions/linear_cross_entropy/blackwell/entry.py b/megatron/core/fusions/linear_cross_entropy/blackwell/entry.py new file mode 100644 index 00000000000..07e018b51ff --- /dev/null +++ b/megatron/core/fusions/linear_cross_entropy/blackwell/entry.py @@ -0,0 +1,480 @@ +# Copyright (c) 2025, NVIDIA CORPORATION. All rights reserved. + +import logging +import os +import typing +from dataclasses import dataclass, field +from functools import lru_cache + +try: + import cuda.bindings.driver as cuda # type: ignore + import cutlass + import cutlass.cute as cute + import torch + import torch.distributed as dist + import triton # type: ignore + from cutlass.cute.runtime import from_dlpack + + import megatron.core.fusions.linear_cross_entropy.utils as utils + from megatron.core.fusions.linear_cross_entropy.blackwell import ( + bwd_partial_dlogits as bwd_partial_dlogits, + ) + from megatron.core.fusions.linear_cross_entropy.blackwell import fwd_mainloop as fwd_mainloop + from megatron.core.fusions.linear_cross_entropy.blackwell import triton as triton_kernels + + @dataclass + class FwdConfig: + """ + The configuration for the forward pass. + """ + + _dedicated_stream: torch.cuda.Stream = field(default_factory=torch.cuda.Stream) + _dedicated_events: typing.List[torch.cuda.Event] = field(default_factory=list) + _initialized: bool = field(default=False) + _fwd_mainloop_kernels: typing.Dict[str, cute.kernel] = field(default_factory=dict) + _vocab_per_split: int = field( + default=int(os.environ.get("LCE_FWD_VOCAB_SPLIT_SIZE", 512 * 6)) + ) + + @dataclass + class BwdConfig: + """ + The configuration for the backward pass. + """ + + _bwd_kernel: typing.Dict[str, cute.kernel] = field(default_factory=dict) + _vocab_per_split: int = field( + default=int(os.environ.get("LCE_BWD_VOCAB_SPLIT_SIZE", 512 * 6)) + ) + _backward_method: utils.BackwardMethodEnum = field( + default=utils.BackwardMethodEnum.kDlogitsSplitN + ) + + @lru_cache(maxsize=1) + def _get_fwd_config() -> FwdConfig: + """ + Helper function to lazy initialize the forward configuration. + """ + return FwdConfig() + + @lru_cache(maxsize=1) + def _get_bwd_config() -> BwdConfig: + """ + Helper function to lazy initialize the backward configuration. + """ + return BwdConfig() + + def forward( + hidden: torch.Tensor, + weight: torch.Tensor, + labels: torch.Tensor, + tp_group: typing.Optional[torch.distributed.ProcessGroup] = None, + reduction: typing.Literal["none", "sum", "mean"] = "mean", + ignore_index: int = -100, + sequence_parallel: bool = False, + ) -> typing.Tuple[ + torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor, int, int, torch.Tensor + ]: + """ + forward host function + """ + tp_rank = 0 if tp_group is None else torch.distributed.get_rank(tp_group) + tp_world_size = 1 if tp_group is None else torch.distributed.get_world_size(tp_group) + in_tp_mode = (tp_group is not None) and (tp_world_size > 1) + + assert hidden.is_cuda and weight.is_cuda and labels.is_cuda + assert weight.device == hidden.device and labels.device == hidden.device + + # hidden could be [batch, seqlen, dim] or [seqlen, batch, dim] or [tokens, dim] + assert hidden.dim() == 2 or hidden.dim() == 3 + # weight must be [vocab_size, dim] + assert weight.dim() == 2 + # labels could be [batch, seqlen] or [seqlen, batch] or [tokens] + assert (hidden.dim() == 2 and labels.dim() == 1) or ( + hidden.dim() == 3 and labels.dim() == 2 + ) + assert hidden.is_contiguous() and weight.is_contiguous() and labels.is_contiguous() + + hidden_view = hidden.view(-1, hidden.shape[-1]) + labels_view = labels.view(-1) + + assert ( + sequence_parallel and hidden_view.shape[0] * tp_world_size == labels_view.shape[0] + ) or (not sequence_parallel and hidden_view.shape[0] == labels_view.shape[0]) + assert hidden_view.shape[1] == weight.shape[1] + + global_hidden = hidden + if in_tp_mode and sequence_parallel: + partial_hidden_shape = hidden.shape + global_hidden_shape = ( + partial_hidden_shape[0] * tp_world_size, + *partial_hidden_shape[1:], + ) + global_hidden = torch.empty( + global_hidden_shape, dtype=hidden.dtype, device=hidden.device + ) + dist.all_gather_into_tensor(global_hidden, hidden, group=tp_group) + assert global_hidden.is_contiguous() + hidden_view = global_hidden.view(-1, global_hidden.shape[-1]) + + num_tokens, dim = hidden_view.shape + vocab_size, _ = weight.shape + + if not _get_fwd_config()._initialized: + _get_fwd_config()._dedicated_stream = torch.cuda.Stream(hidden.device) + _get_fwd_config()._dedicated_events = [torch.cuda.Event() for _ in range(2)] + _get_fwd_config()._initialized = True + + REDUCTION = utils.str_to_reduction_enum(reduction) + # declare logprobs + if REDUCTION == utils.EntropyReductionEnum.kNone: + logprobs = torch.empty((num_tokens,), device=hidden.device, dtype=torch.float32) + if in_tp_mode: + logprobs.zero_() + else: + logprobs = torch.zeros((), device=hidden.device, dtype=torch.float32) + # declare auxiliary tensors + maximum = torch.empty((num_tokens,), device=hidden.device, dtype=torch.float32) + accumulate = torch.empty_like(maximum, dtype=torch.float32) + num_valid_tokens = torch.empty((), device=hidden.device, dtype=torch.int64) + assert ( + maximum.is_contiguous() + and accumulate.is_contiguous() + and num_valid_tokens.is_contiguous() + ) + # declare intermediate tensors + # NOTE: this is a parameter for tuning + num_splits = ( + vocab_size + _get_fwd_config()._vocab_per_split - 1 + ) // _get_fwd_config()._vocab_per_split + _max = torch.empty((num_tokens, num_splits), device=hidden.device, dtype=torch.float32) + _accu = torch.empty((num_tokens, num_splits), device=hidden.device, dtype=torch.float32) + if REDUCTION == utils.EntropyReductionEnum.kNone: + _logprobs = logprobs + else: + _logprobs = torch.empty((num_tokens,), device=hidden.device, dtype=torch.float32) + if in_tp_mode: + _logprobs.zero_() + assert _max.is_contiguous() and _accu.is_contiguous() and _logprobs.is_contiguous() + + triton_kernels.get_num_valid_tokens[(1,)]( + num_tokens, ignore_index, labels_view, labels_view.stride(0), num_valid_tokens + ) + + # need to compile the kernel for the first time + hidden_packed = from_dlpack( + hidden_view.detach(), assumed_align=16 + ).mark_compact_shape_dynamic(mode=0) + weight_packed = from_dlpack(weight.detach(), assumed_align=16) + labels_packed = from_dlpack( + labels_view.detach(), assumed_align=8 + ).mark_compact_shape_dynamic(mode=0) + logprobs_packed = from_dlpack(_logprobs, assumed_align=16).mark_compact_shape_dynamic( + mode=0 + ) + _max_packed = from_dlpack(_max, assumed_align=8).mark_compact_shape_dynamic( + mode=0, stride_order=(0, 1) + ) + _accu_packed = from_dlpack(_accu, assumed_align=8).mark_compact_shape_dynamic( + mode=0, stride_order=(0, 1) + ) + cuda_stream = cuda.CUstream(torch.cuda.current_stream().cuda_stream) + + # VocabSize and Dim are fixed for a given model, + # only the number of tokens can vary + key = f"vocab_size:{vocab_size}+dim:{dim}+dtype:{hidden_view.dtype}" + if _get_fwd_config()._fwd_mainloop_kernels.get(key) is None: + fwd_mainloop_kernel = fwd_mainloop.FwdMainLoop( + vocab_per_split=_get_fwd_config()._vocab_per_split + ) + fwd_mainloop_compiled_kernel = cute.compile( + fwd_mainloop_kernel, + hidden_packed, + weight_packed, + labels_packed, + logprobs_packed, + _max_packed, + _accu_packed, + ignore_index, + tp_rank, + cuda_stream, + ) + _get_fwd_config()._fwd_mainloop_kernels[key] = fwd_mainloop_compiled_kernel + else: + fwd_mainloop_compiled_kernel = _get_fwd_config()._fwd_mainloop_kernels[key] + fwd_mainloop_compiled_kernel( + hidden_packed, + weight_packed, + labels_packed, + logprobs_packed, + _max_packed, + _accu_packed, + ignore_index, + tp_rank, + cuda_stream, + ) + + if not in_tp_mode: + + def grid(meta): + return (triton.cdiv(num_tokens, meta["BLOCK_SIZE_M"]),) + + triton_kernels.forward_dp_epilogue[grid]( + num_tokens, + num_splits, + ignore_index, + labels_view, + labels_view.stride(0), + num_valid_tokens, + _max, + _max.stride(0), + _max.stride(1), + _accu, + _accu.stride(0), + _accu.stride(1), + maximum, + maximum.stride(0), + accumulate, + maximum.stride(0), + _logprobs, + _logprobs.stride(0), + logprobs, + triton.language.constexpr(REDUCTION.value), + ) + else: + _max_backup = _max.clone() + dist.all_reduce(_max, op=dist.ReduceOp.MAX, group=tp_group) + + torch.cuda.current_stream().record_event(_get_fwd_config()._dedicated_events[0]) + with torch.cuda.stream(_get_fwd_config()._dedicated_stream): + _get_fwd_config()._dedicated_stream.wait_event( + _get_fwd_config()._dedicated_events[0] + ) + dist.all_reduce(_logprobs, op=dist.ReduceOp.SUM, group=tp_group) + _get_fwd_config()._dedicated_stream.record_event( + _get_fwd_config()._dedicated_events[1] + ) + + def grid(meta): + return (triton.cdiv(num_tokens, meta["BLOCK_SIZE_M"]),) + + triton_kernels.forward_tp_epilogue[grid]( + num_tokens, + num_splits, + _max, + _max.stride(0), + _max.stride(1), + _max_backup, + _max_backup.stride(0), + _max_backup.stride(1), + _accu, + _accu.stride(0), + _accu.stride(1), + maximum, + maximum.stride(0), + accumulate, + maximum.stride(0), + ) + # reduce accumulate + dist.all_reduce(accumulate, op=dist.ReduceOp.SUM, group=tp_group) + + # update logprobs + torch.cuda.current_stream().wait_event(_get_fwd_config()._dedicated_events[1]) + triton_kernels.forward_tp_epilogue_update_logprobs[grid]( + num_tokens, + ignore_index, + num_valid_tokens, + labels_view, + labels_view.stride(0), + _logprobs, + _logprobs.stride(0), + maximum, + maximum.stride(0), + accumulate, + accumulate.stride(0), + logprobs, + REDUCTION.value, + ) + + return ( + logprobs, + maximum, + accumulate, + num_valid_tokens, + tp_rank, + tp_world_size, + global_hidden, + ) + + def backward( + dlogprobs: torch.Tensor, + global_hidden: torch.Tensor, + weight: torch.Tensor, + labels: torch.Tensor, + maximum: torch.Tensor, + accu: torch.Tensor, + num_valid_tokens: torch.Tensor, + reduction: typing.Literal["none", "sum", "mean"] = "mean", + ignore_index: int = -100, + tp_group: typing.Optional[dist.ProcessGroup] = None, + tp_rank: int = 0, + tp_world_size: int = 1, + sequence_parallel: bool = False, + ) -> typing.Tuple[torch.Tensor, torch.Tensor]: + """ + backward host function + """ + in_tp_mode = (tp_group is not None) and (tp_world_size > 1) + + hidden_view = global_hidden.view(-1, global_hidden.shape[-1]) + labels_view = labels.view(-1) + + num_tokens, dim = hidden_view.shape + vocab_size, _ = weight.shape + + REDUCTION = utils.str_to_reduction_enum(reduction) + dlogprobs_view = dlogprobs.view(-1) + assert ( + REDUCTION == utils.EntropyReductionEnum.kNone and dlogprobs.shape == (num_tokens,) + ) or (REDUCTION != utils.EntropyReductionEnum.kNone and dlogprobs.dim() == 0) + assert dlogprobs.is_contiguous() and dlogprobs.is_cuda + + assert ( + num_valid_tokens.dim() == 0 + and num_valid_tokens.is_cuda + and num_valid_tokens.dtype == torch.int64 + ) + + # Allocate d_hidden in float32 for better numerical stability + d_hidden = torch.empty_like(global_hidden, dtype=torch.float32) + d_weight = torch.empty_like(weight) + assert d_hidden.is_contiguous() and d_weight.is_contiguous() + + # FIXME: implement different backward methods + _backward_method = _get_bwd_config()._backward_method + if _backward_method == utils.BackwardMethodEnum.kDlogitsSplitN: + vocab_per_split = _get_bwd_config()._vocab_per_split + num_splits = (vocab_size + vocab_per_split - 1) // vocab_per_split + + _d_logits = torch.empty( + (num_tokens, vocab_per_split), + device=global_hidden.device, + dtype=global_hidden.dtype, + ) + + hidden_packed = from_dlpack( + hidden_view.detach(), assumed_align=16 + ).mark_compact_shape_dynamic(mode=0) + weight_packed = from_dlpack(weight.detach(), assumed_align=16) + labels_packed = from_dlpack( + labels_view.detach(), assumed_align=8 + ).mark_compact_shape_dynamic(mode=0) + dlogprobs_packed = from_dlpack( + dlogprobs_view.detach(), assumed_align=8 + ).mark_compact_shape_dynamic(mode=0) + maximum_packed = from_dlpack( + maximum.detach(), assumed_align=8 + ).mark_compact_shape_dynamic(mode=0) + accu_packed = from_dlpack(accu.detach(), assumed_align=8).mark_compact_shape_dynamic( + mode=0 + ) + dlogits_packed = from_dlpack(_d_logits, assumed_align=32).mark_compact_shape_dynamic( + mode=0 + ) + scalarNumValidTokens_packed = cute.runtime.make_ptr( + cutlass.Int64, num_valid_tokens.data_ptr(), cute.AddressSpace.gmem, assumed_align=8 + ) + + stream = cuda.CUstream(torch.cuda.current_stream().cuda_stream) + + key = ( + f"vocab_size:{vocab_size}+dim:{dim}+reduction:{REDUCTION}+dtype:{hidden_view.dtype}" + ) + if _get_bwd_config()._bwd_kernel.get(key) is None: + bwd_kernel = bwd_partial_dlogits.BwdPartialDlogits( + reduction=REDUCTION.value, vocab_per_split=vocab_per_split + ) + bwd_kernel_compiled = cute.compile( + bwd_kernel, + 0, # split_idx + hidden_packed, + weight_packed, + labels_packed, + dlogprobs_packed, + maximum_packed, + accu_packed, + dlogits_packed, + scalarNumValidTokens_packed, + ignore_index, + tp_rank, + stream, + ) + _get_bwd_config()._bwd_kernel[key] = bwd_kernel_compiled + else: + bwd_kernel_compiled = _get_bwd_config()._bwd_kernel.get(key) + + for split_idx in range(num_splits): + bwd_kernel_compiled( + split_idx, + hidden_packed, + weight_packed, + labels_packed, + dlogprobs_packed, + maximum_packed, + accu_packed, + dlogits_packed, + scalarNumValidTokens_packed, + ignore_index, + tp_rank, + stream, + ) + # remove padding areas + # cublas can handle non-contiguous tensors + # therefore, we do not need to contiguous the tensor + vocab_right_bound = ( + min((split_idx + 1) * vocab_per_split, vocab_size) - split_idx * vocab_per_split + ) + valid_d_logits = _d_logits[:, :vocab_right_bound] + + _delta_hidden = torch.mm( + valid_d_logits, + weight[split_idx * vocab_per_split : (split_idx + 1) * vocab_per_split, :], + out_dtype=torch.float32, + ).view_as(d_hidden) + if split_idx == 0: + d_hidden.copy_(_delta_hidden) + else: + d_hidden.add_(_delta_hidden) + torch.matmul( + valid_d_logits.T, + hidden_view, + out=d_weight[ + split_idx * vocab_per_split : (split_idx + 1) * vocab_per_split, : + ], + ) + else: + raise NotImplementedError(f"Unsupported backward method: {_backward_method}") + + if in_tp_mode: + dist.all_reduce(d_hidden, op=dist.ReduceOp.SUM, group=tp_group) + if sequence_parallel: + partial_hidden_shape = ( + global_hidden.shape[0] // tp_world_size, + *global_hidden.shape[1:], + ) + partial_num_tokens = num_tokens // tp_world_size + d_hidden = d_hidden.view(-1, d_hidden.shape[-1])[ + tp_rank * partial_num_tokens : (tp_rank + 1) * partial_num_tokens, : + ] + d_hidden = d_hidden.view(partial_hidden_shape).clone() + + # convert d_hidden to the original dtype + d_hidden = d_hidden.type_as(global_hidden) + + return d_hidden, d_weight + +except ImportError: + logging.warning( + "Cutlass or CUDA bindings not found. LinearCrossEntropy Blackwell entry " + "points will not be available." + ) diff --git a/megatron/core/fusions/linear_cross_entropy/blackwell/fwd_mainloop.py b/megatron/core/fusions/linear_cross_entropy/blackwell/fwd_mainloop.py new file mode 100644 index 00000000000..93f5b9523e7 --- /dev/null +++ b/megatron/core/fusions/linear_cross_entropy/blackwell/fwd_mainloop.py @@ -0,0 +1,693 @@ +# Copyright (c) 2025, NVIDIA CORPORATION. All rights reserved. + +""" +Implementations of the fusion lm_head(Linear) + Cross-Entropy kernel +""" + +import logging +from typing import Tuple, Type + +try: + import cuda.bindings.driver as cuda # type: ignore + import cutlass + import cutlass.cute as cute + import cutlass.pipeline as pipeline # type: ignore + import cutlass.utils as utils # type: ignore + import cutlass.utils.blackwell_helpers as sm100_utils # type: ignore + from cutlass.cute.nvgpu import cpasync, tcgen05 + + SM100_TMEM_CAPACITY_COLUMNS: int = 512 + + def make_thread_cooperative_group(size: int): + """ + Create a thread cooperative group. + """ + return pipeline.CooperativeGroup(pipeline.Agent.Thread, size, alignment=size) + + class FwdMainLoop: + """ + This class implements the mainloop for forward process. + + Traits stored as attributes. + + :param acc_dtype: + """ + + def __init__( + self, + acc_dtype: Type[cutlass.Numeric] = cutlass.Float32, + use_2cta_instrs: bool = False, + mma_tiler_mn: Tuple[int, int] = (128, 256), + vocab_per_split: int = 512, + ): + """ + Configuration including: + - MMA instruction settings + - Cluster Shape + """ + self.acc_dtype: Type[cutlass.Numeric] = acc_dtype + self.use_2cta_instrs = use_2cta_instrs + # This is the shape covered by tiledMMA, not just single MMA instruction + self.mma_tiler = (*mma_tiler_mn, 1) + self.cta_tiler = (self.mma_tiler[0], vocab_per_split, self.mma_tiler[2]) + self.vocab_per_split = vocab_per_split + + self.cta_group = tcgen05.CtaGroup.TWO if self.use_2cta_instrs else tcgen05.CtaGroup.ONE + self.cluster_shape_mn = (2, 1) if self.use_2cta_instrs else (1, 1) + + self.occupancy = 1 + # query SMEM capacity + self.smem_capacity = utils.get_smem_capacity_in_bytes("sm_100") + + # the maximum columns per MMA is 256, and there is only one GEMM, so we can fully + # assign TMEM for that GEMM of different tiles. + # so 512 = 2 * 256 + + self.threads_per_warp: int = 32 + # 1 warp for loading, 1 warp for issuing MMA, 1 WG for storing + self.epi_warp_ids = (0, 1, 2, 3) + self.load_warp_ids = 4 + self.mma_warp_ids = 5 + self.empty_warp_ids = (6, 7) + + self.threads_per_cta: int = self.threads_per_warp * len( + (*self.epi_warp_ids, self.load_warp_ids, self.mma_warp_ids, *self.empty_warp_ids) + ) + + self.cta_sync_barrier = pipeline.NamedBarrier( + barrier_id=1, num_threads=self.threads_per_cta + ) + self.tmem_alloc_barrier = pipeline.NamedBarrier( + barrier_id=2, num_threads=self.threads_per_cta + ) + + self.buffer_align_bytes: int = 1024 + self.num_regs_other: int = 32 + self.num_regs_epi: int = 192 + + def _compute_stages( + self, + tiled_mma: cute.TiledMma, + mma_tiler: Tuple[int, int, int], + a_dtype: Type[cutlass.Numeric], + b_dtype: Type[cutlass.Numeric], + ): + a_smem_layout_stage_one = sm100_utils.make_smem_layout_a( + tiled_mma, mma_tiler, a_dtype, 1 # only single stage + ) + b_smem_layout_stage_one = sm100_utils.make_smem_layout_b( + tiled_mma, mma_tiler, b_dtype, 1 + ) + a_bytes_per_stage = cute.size_in_bytes(a_dtype, a_smem_layout_stage_one) + b_bytes_per_stage = cute.size_in_bytes(b_dtype, b_smem_layout_stage_one) + num_acc_stage = 2 + num_a_stage = 4 + num_b_stage = 4 + num_epi_stage_per_tile = 4 + + return num_acc_stage, num_a_stage, num_b_stage, num_epi_stage_per_tile + + def _setup_attributes( + self, + tiled_mma: cute.TiledMma, + a_dtype: Type[cutlass.Numeric], + b_dtype: Type[cutlass.Numeric], + ): + self.cluster_shape_mnk = (*self.cluster_shape_mn, 1) + self.cluster_layout_vmnk = cute.tiled_divide( + cute.make_layout(self.cluster_shape_mnk), (tiled_mma.thr_id.shape,) + ) + + # this is fixed for dense MMA, k=16 + mma_inst_shape_k = cute.size(tiled_mma.shape_mnk, mode=[2]) + # 16*4 = 64; 64 * sizeof(FP16) = 128Bytes + mma_inst_tile_k: int = 4 + self.mma_tiler = ( + self.mma_tiler[0], + self.mma_tiler[1], + mma_inst_shape_k * mma_inst_tile_k, + ) + + self.num_acc_stage, self.num_a_stage, self.num_b_stage, self.num_epi_stage_per_tile = ( + self._compute_stages(tiled_mma, self.mma_tiler, a_dtype, b_dtype) + ) + self.tmem_alloc_cols = self.num_acc_stage * self.mma_tiler[1] + assert self.tmem_alloc_cols <= SM100_TMEM_CAPACITY_COLUMNS + + self.cta_tile_shape_mnk = ( + self.mma_tiler[0] // cute.size(tiled_mma.thr_id.shape), + self.mma_tiler[1], + self.mma_tiler[2], + ) + + @cute.kernel + def kernel( + self, + tiled_mma: cute.TiledMma, + tma_atom_a: cute.CopyAtom, + mA: cute.Tensor, + tma_atom_b: cute.CopyAtom, + mB: cute.Tensor, + mLabels: cute.Tensor, + mMax: cute.Tensor, + mAccu: cute.Tensor, + mLogprobs: cute.Tensor, + a_smem_layout_staged: cute.ComposedLayout, + b_smem_layout_staged: cute.ComposedLayout, + cluster_layout_vmnk: cute.Layout, + problem_mnk: Tuple[int, int, int], + ignore_index: cutlass.Int64, + rank: cutlass.Int32, + ): + """ + The forward kernel for the mainloop. + """ + warp_idx = cute.arch.make_warp_uniform(cute.arch.warp_idx()) + tidx, _, _ = cute.arch.thread_idx() + bidx, bidy, _ = cute.arch.block_idx() + # FIXME: block swizzling applied here + pidm, pidn = bidx, bidy + + # prefetch tma descriptors + if warp_idx == self.load_warp_ids: + cute.nvgpu.cpasync.prefetch_descriptor(tma_atom_a) + cute.nvgpu.cpasync.prefetch_descriptor(tma_atom_b) + + # declare SMEM + smem = utils.SmemAllocator() + storage = smem.allocate(self.shared_storage) + + ab_pipeline = pipeline.PipelineTmaUmma.create( + num_stages=self.num_a_stage, + producer_group=make_thread_cooperative_group(len([self.load_warp_ids])), + consumer_group=make_thread_cooperative_group(len([self.mma_warp_ids])), + tx_count=self.tma_copy_a_bytes + self.tma_copy_b_bytes, + barrier_storage=storage.load_ab_mbar_ptr.data_ptr(), + ) + ab_producer_state = pipeline.make_pipeline_state( + pipeline.PipelineUserType.Producer, self.num_a_stage + ) + ab_consumer_state = pipeline.make_pipeline_state( + pipeline.PipelineUserType.Consumer, self.num_a_stage + ) + + mma_pipeline = pipeline.PipelineUmmaAsync.create( + num_stages=self.num_acc_stage, + producer_group=make_thread_cooperative_group(len([self.mma_warp_ids])), + consumer_group=make_thread_cooperative_group( + self.threads_per_warp * len(self.epi_warp_ids) + ), + barrier_storage=storage.mma_mbar_ptr.data_ptr(), + ) + mma_producer_state = pipeline.make_pipeline_state( + pipeline.PipelineUserType.Producer, self.num_acc_stage + ) + mma_consumer_state = pipeline.make_pipeline_state( + pipeline.PipelineUserType.Consumer, self.num_acc_stage + ) + + tmem_dealloc_mbar_ptr = storage.tmem_dealloc_mbar_ptr.data_ptr() + if warp_idx == self.empty_warp_ids[0]: + with cute.arch.elect_one(): + cute.arch.mbarrier_init( + tmem_dealloc_mbar_ptr, self.threads_per_warp * len(self.epi_warp_ids) + ) + cute.arch.mbarrier_init_fence() + + # -------- SMEM partition ------------ # + # swizzle o [(tileM, tileK), loopM, loopK, Stage] + sA = storage.sA.get_tensor( + a_smem_layout_staged.outer, swizzle=a_smem_layout_staged.inner + ) + # swizzle o [(tileN, tileK), loopN, loopK, stage] + sB = storage.sB.get_tensor( + b_smem_layout_staged.outer, swizzle=b_smem_layout_staged.inner + ) + + # FIXME: if 2 CTAs, modify here + thr_mma = tiled_mma.get_slice(0) + # [MMA, loopM, loopK, stage] + tCsA = thr_mma.make_fragment_A(sA) + # [MMA, loopN, loopK, stage] + tCsB = thr_mma.make_fragment_B(sB) + + # ---------- GMEM partition ----------- # + # [tileM, tileK, loopK] + gA = cute.local_tile(mA, (self.mma_tiler[0], self.mma_tiler[2]), (pidm, None)) + + # [vocab_size_per_split, dim] + mB_n = cute.local_tile( + mB, (self.vocab_per_split, cute.size(mB.layout.shape, mode=[1])), (pidn, 0) + ) + + # [tileN, tileK, loopN, loopK] + gB = cute.local_tile(mB_n, (self.mma_tiler[1], self.mma_tiler[2]), (None, None)) + + # [MMA, tileCntM, tileCntK, loopK] + tCgA = thr_mma.partition_A(gA) + # [MMA, tileCntN, tileCntK, loopN, loopK] + tCgB = thr_mma.partition_B(gB) + + a_cta_layout = cute.make_layout(cute.slice_(cluster_layout_vmnk, (0, 0, None, 0)).shape) + # FIXME: if 2 CTAs, modify here + cta_rank_in_cluster = 0 + block_in_cluster_coord_vmnk = cluster_layout_vmnk.get_flat_coord(cta_rank_in_cluster) + tTMAsA, tTMAgA = cpasync.tma_partition( + tma_atom_a, + block_in_cluster_coord_vmnk[2], # cta_coord, + a_cta_layout, + cute.group_modes(sA, 0, 3), # SMEM tensor + cute.group_modes(tCgA, 0, 3), # GMEM tensor + ) + b_cta_layout = cute.make_layout(cute.slice_(cluster_layout_vmnk, (0, None, 0, 0)).shape) + tTMAsB, tTMAgB = cpasync.tma_partition( + tma_atom_b, + block_in_cluster_coord_vmnk[1], # cta_coord + b_cta_layout, + cute.group_modes(sB, 0, 3), + cute.group_modes(tCgB, 0, 3), + ) + + # Allocate TMEM + tmem_holding_buf = storage.tmem_holding_buf + if warp_idx == self.empty_warp_ids[0]: + cute.arch.alloc_tmem( + self.tmem_alloc_cols, tmem_holding_buf, is_two_cta=self.use_2cta_instrs + ) + self.cta_sync_barrier.arrive_and_wait() + tmem_ptr = cute.arch.retrieve_tmem_ptr( + self.acc_dtype, alignment=16, ptr_to_buffer_holding_addr=tmem_holding_buf + ) + + # [(tileM, tileN), loopM, loopN] + tmem_shape = (128, self.tmem_alloc_cols) + acc_shape = thr_mma.partition_shape_C(tmem_shape) + tCtC_fake = thr_mma.make_fragment_C(acc_shape) + tCtC = cute.make_tensor(tmem_ptr, tCtC_fake.layout) + + block_vocab_left_idx: cutlass.Int64 = pidn * self.vocab_per_split + block_vocab_right_idx: cutlass.Int64 = min( + (pidn + 1) * self.vocab_per_split, problem_mnk[1] + ) + num_n_tiles: cutlass.Int64 = cute.ceil_div( + (block_vocab_right_idx - block_vocab_left_idx), self.mma_tiler[1] + ) + + # /////// + # empty + # /////// + if warp_idx in self.empty_warp_ids: + cute.arch.warpgroup_reg_dealloc(self.num_regs_other) + + # /////// + # load + # /////// + if warp_idx == self.load_warp_ids: + cute.arch.warpgroup_reg_dealloc(self.num_regs_other) + + for n in cutlass.range(num_n_tiles): + for k in cutlass.range(cute.size(gA, mode=[2])): + ab_pipeline.producer_acquire(ab_producer_state) + cute.copy( + tma_atom_a, + tTMAgA[(None, k)], + tTMAsA[(None, ab_producer_state.index)], + tma_bar_ptr=ab_pipeline.producer_get_barrier(ab_producer_state), + ) + cute.copy( + tma_atom_b, + tTMAgB[(None, n, k)], + tTMAsB[(None, ab_producer_state.index)], + tma_bar_ptr=ab_pipeline.producer_get_barrier(ab_producer_state), + ) + ab_pipeline.producer_commit(ab_producer_state) + ab_producer_state.advance() + + # /////// + # mma + # /////// + if warp_idx == self.mma_warp_ids: + cute.arch.warpgroup_reg_dealloc(self.num_regs_other) + + for n in cutlass.range(num_n_tiles): + # disable accumulate for the first tile + tiled_mma.set(tcgen05.Field.ACCUMULATE, False) + mma_pipeline.producer_acquire(mma_producer_state) + + for k in cutlass.range(cute.size(gA, mode=[2])): + ab_pipeline.consumer_wait(ab_consumer_state) + + for kblock_idx in cutlass.range( + cute.size(tCsA, mode=[2]), unroll_full=True + ): + cute.gemm( + tiled_mma, + cute.append_ones(tCtC[(None, None, mma_producer_state.index)]), + tCsA[(None, None, kblock_idx, ab_consumer_state.index)], + tCsB[(None, None, kblock_idx, ab_consumer_state.index)], + cute.append_ones(tCtC[(None, None, mma_producer_state.index)]), + ) + # enable accumulate for the next tile + tiled_mma.set(tcgen05.Field.ACCUMULATE, True) + + ab_pipeline.consumer_release(ab_consumer_state) + ab_consumer_state.advance() + + mma_pipeline.producer_commit(mma_producer_state) + mma_producer_state.advance() + + # ////////// + # epilogue + # ////////// + if warp_idx in self.epi_warp_ids: + cute.arch.warpgroup_reg_alloc(self.num_regs_epi) + + # epilog TMEM copy and partition + copy_atom_t2r = sm100_utils.get_tmem_load_op( + self.cta_tile_shape_mnk, + utils.LayoutEnum.ROW_MAJOR, # This is hard-coded + self.acc_dtype, + self.acc_dtype, + (self.epi_tile[0], self.epi_tile[1] // self.num_epi_stage_per_tile), + self.use_2cta_instrs, + ) + # [tileM, subTileN, loopM, CntSubTileN, loopN] + tAcc_epi = cute.flat_divide( + tCtC[((None, None), 0, None)], + (self.epi_tile[0], self.epi_tile[1] // self.num_epi_stage_per_tile), + ) + tiled_copy_t2r = tcgen05.make_tmem_copy( + copy_atom_t2r, tAcc_epi[(None, None, 0, 0, 0)] + ) + thr_copy_t2r = tiled_copy_t2r.get_slice(tidx) + tTMEM_load_tAcc = thr_copy_t2r.partition_S(tAcc_epi) + # [(pattern), loopM, loopN, CntTileM, CntTileN] + tTMEM_load_tAcc = cute.group_modes( + tTMEM_load_tAcc, 3, cute.rank(tTMEM_load_tAcc) - 1 + ) + + cAcc = cute.make_identity_tensor(self.mma_tiler[:2]) + tCcAcc = thr_mma.partition_C(cAcc) + # [tileM, subTileN, loopM, CntSubTileN, CntTileN] + tCcAcc_epi = cute.flat_divide( + tCcAcc[((None, None), 0, None)], + (self.epi_tile[0], self.epi_tile[1] // self.num_epi_stage_per_tile), + ) + tTMEM_load_cAcc = thr_copy_t2r.partition_D(tCcAcc_epi) + tTMEM_load_cAcc_shape = cute.select(tTMEM_load_cAcc.shape, mode=[0, 1, 2]) + + # epilogue layouts + epilogue_thread_layout = cute.make_layout((128, 1)) + copy_atom_g2r = cute.make_copy_atom( + cute.nvgpu.CopyUniversalOp(), mLabels.element_type + ) + tiled_copy_g2r = cute.make_tiled_copy( + copy_atom_g2r, epilogue_thread_layout, (128, 1) + ) + thr_copy_g2r = tiled_copy_g2r.get_slice(tidx) + + copy_atom_r2g = cute.make_copy_atom(cute.nvgpu.CopyUniversalOp(), cutlass.Float32) + tiled_copy_r2g = cute.make_tiled_copy( + copy_atom_r2g, epilogue_thread_layout, (128, 1) + ) + thr_copy_r2g = tiled_copy_r2g.get_slice(tidx) + + # auxiliary tensors + # [tileM] + gLabels = cute.local_tile(mLabels, (self.epi_tile[0],), (pidm,)) + + tLabelsCAcc = thr_copy_g2r.partition_S(cAcc)[(None, None, 0)] + tLabelsCAcc_mask = cute.make_fragment(tLabelsCAcc.shape, cutlass.Boolean) + # [(1, 1), 1] + tLabelsCAcc_mask[0] = cute.elem_less(pidm * self.epi_tile[0] + tidx, problem_mnk[0]) + # to align shape with gMax and gAccu + tLabelsCAcc_mask = cute.append_ones(tLabelsCAcc_mask) + + # [(1, 1), 1, 1] + tLabelsgLabels = thr_copy_g2r.partition_S(cute.append_ones(gLabels)) + tLabelsrLabels = cute.make_fragment( + tLabelsgLabels.shape, tLabelsgLabels.element_type + ) + cute.copy(tiled_copy_g2r, tLabelsgLabels, tLabelsrLabels, pred=tLabelsCAcc_mask) + valid_mask: cutlass.Boolean = ( + tLabelsrLabels[0] != ignore_index + ) and tLabelsCAcc_mask[0] + + # [tileM, 1] + gMax = cute.local_tile(mMax, (self.epi_tile[0], 1), (pidm, pidn)) + # [(CPYM, CPYN), loopM, loopN] + tR2GgMax = thr_copy_r2g.partition_D(gMax) + tR2GrMax = cute.make_fragment(tR2GgMax.shape, tR2GgMax.element_type) + tR2GrMax.fill(-1e30) + + # [tileM, 1] + gAccu = cute.local_tile(mAccu, (self.epi_tile[0], 1), (pidm, pidn)) + # [(CPYM, CPYN), loopM, loopN] + tR2GgAccu = thr_copy_r2g.partition_D(gAccu) + tR2GrAccu = cute.make_fragment(tR2GgAccu.shape, tR2GgAccu.element_type) + tR2GrAccu.fill(0.0) + + # [tileM, 1] + gLogprobs = cute.append_ones( + cute.local_tile(mLogprobs, (self.epi_tile[0],), (pidm,)) + ) + # [(CPYM, CPYN), loopM, loopN] + tR2GgLogprobs = thr_copy_r2g.partition_D(gLogprobs) + tR2GrLogprobs = cute.make_fragment(tR2GgLogprobs.shape, tR2GgLogprobs.element_type) + tR2GrLogprobs.fill(0.0) + + # [(tileN // num_epi_stage_per_tile, 1), 1, 1] + tTMEM_load_rAcc = cute.make_fragment(tTMEM_load_cAcc_shape, self.acc_dtype) + + for n in cutlass.range(num_n_tiles): + mma_pipeline.consumer_wait(mma_consumer_state) + + left: cutlass.Int64 = block_vocab_left_idx + n * self.epi_tile[1] + right: cutlass.Int64 = min( + (n + 1) * self.epi_tile[1] + block_vocab_left_idx, block_vocab_right_idx + ) + num_n_subtiles: cutlass.Int64 = cute.ceil_div( + (right - left), cute.size(tTMEM_load_rAcc, mode=[0]) + ) + for n_subtile in cutlass.range(num_n_subtiles): + cute.copy( + tiled_copy_t2r, + tTMEM_load_tAcc[ + (None, None, None, n_subtile, mma_consumer_state.index) + ], + tTMEM_load_rAcc, + ) + + for idx in cutlass.range( + cute.size(tTMEM_load_rAcc, mode=[0]), unroll_full=True + ): + local_position: cutlass.Int64 = ( + n * self.epi_tile[1] + + n_subtile * cute.size(tTMEM_load_rAcc, mode=[0]) + + idx + ) + if (block_vocab_left_idx + local_position) < block_vocab_right_idx: + _max_old = tR2GrMax[0] + tR2GrMax[0] = cute.arch.fmax(tR2GrMax[0], tTMEM_load_rAcc[idx]) + exp_logits = cute.exp(tTMEM_load_rAcc[idx] - tR2GrMax[0]) + coeff = cute.exp(_max_old - tR2GrMax[0]) + tR2GrAccu[0] = coeff * tR2GrAccu[0] + exp_logits + + position: cutlass.Int64 = ( + rank * problem_mnk[1] + + pidn * self.vocab_per_split + + local_position + ) + mask: cutlass.Boolean = valid_mask and ( + position == tLabelsrLabels[0] + ) + tR2GrLogprobs[0] += mask * tTMEM_load_rAcc[idx] + + mma_pipeline.consumer_release(mma_consumer_state) + mma_consumer_state.advance() + + cute.copy(tiled_copy_r2g, tR2GrMax, tR2GgMax, pred=tLabelsCAcc_mask) + cute.copy(tiled_copy_r2g, tR2GrAccu, tR2GgAccu, pred=tLabelsCAcc_mask) + + vocab_left_idx: cutlass.Int64 = rank * problem_mnk[1] + pidn * self.vocab_per_split + vocab_right_idx: cutlass.Int64 = rank * problem_mnk[1] + min( + (pidn + 1) * self.vocab_per_split, problem_mnk[1] + ) + valid: cutlass.Boolean = ( + tLabelsrLabels[0] >= vocab_left_idx and tLabelsrLabels[0] < vocab_right_idx + ) + tLabelsCAcc_mask[0] &= valid + + cute.copy(tiled_copy_r2g, tR2GrLogprobs, tR2GgLogprobs, pred=tLabelsCAcc_mask) + + # Dealloc TMEM + self.cta_sync_barrier.arrive_and_wait() + if warp_idx == self.empty_warp_ids[0]: + cute.arch.relinquish_tmem_alloc_permit() + cute.arch.dealloc_tmem( + tmem_ptr, self.tmem_alloc_cols, is_two_cta=self.use_2cta_instrs + ) + + @staticmethod + def _compute_grid( + problem_mnk: Tuple[int, int, int], + cluster_shape_mn: Tuple[int, int], + cta_tiler: Tuple[int, int, int], + num_splits: int, + ) -> Tuple[int, int, int]: + + cluster_shape = (*cluster_shape_mn, 1) + + grid = cute.round_up( + (cute.ceil_div(problem_mnk[0], cta_tiler[0]), num_splits, 1), cluster_shape + ) + return grid + + @cute.jit + def __call__( + self, + hidden: cute.Tensor, + weight: cute.Tensor, + labels: cute.Tensor, + _logprobs: cute.Tensor, + _max: cute.Tensor, + _accu: cute.Tensor, + ignore_index: cutlass.Int64, + rank: cutlass.Int32, + stream: cuda.CUstream, + ) -> None: + a_dtype: Type[cutlass.Numeric] = hidden.element_type + b_dtype: Type[cutlass.Numeric] = weight.element_type + + if cutlass.const_expr(hidden.element_type != weight.element_type): + raise RuntimeError( + f"data type don't match: {hidden.element_type} v.s. {weight.element_type}" + ) + if cutlass.const_expr(hidden.element_type not in [cutlass.Float16, cutlass.BFloat16]): + raise RuntimeError("hidden can only be FP16 or BF16") + if cutlass.const_expr(hidden.layout.shape[1] != weight.layout.shape[1]): + raise RuntimeError("K dimension doesn't match") + + problem_mnk = (hidden.layout.shape[0], weight.layout.shape[0], hidden.layout.shape[1]) + if cutlass.const_expr((problem_mnk[2] * a_dtype.width // 8) % 16 != 0): + raise RuntimeError(f"K dimension is not 16B aligned: {problem_mnk[2]}") + + num_splits = cute.ceil_div(problem_mnk[1], self.vocab_per_split) + + grid = self._compute_grid( + problem_mnk=problem_mnk, + cluster_shape_mn=self.cluster_shape_mn, + cta_tiler=self.cta_tiler, + num_splits=num_splits, + ) + a_major_mode = utils.LayoutEnum.from_tensor(hidden).mma_major_mode() + b_major_mode = utils.LayoutEnum.from_tensor(weight).mma_major_mode() + + tiled_mma = sm100_utils.make_trivial_tiled_mma( + a_dtype, + a_major_mode, + b_major_mode, + self.acc_dtype, + self.cta_group, + self.mma_tiler[:2], + ) + + self._setup_attributes(tiled_mma, a_dtype, b_dtype) + if cutlass.const_expr((problem_mnk[2] * a_dtype.width // 8) % 128 != 0): + raise RuntimeError(f"K dimension is not 128B aligned: {problem_mnk[2]}") + + self.epi_tile = self.mma_tiler[:2] + + # Swizzle o [(tileM, tileK), loopM, loopK, stage] + a_smem_layout_staged = sm100_utils.make_smem_layout_a( + tiled_mma, self.mma_tiler, a_dtype, self.num_a_stage + ) + # Swizzle o [(tileN, tileK), loopN, loopK, stage] + b_smem_layout_staged = sm100_utils.make_smem_layout_b( + tiled_mma, self.mma_tiler, b_dtype, self.num_b_stage + ) + + # TMA loading + tma_load_op = cpasync.CopyBulkTensorTileG2SOp(self.cta_group) + tma_store_op = cpasync.CopyBulkTensorTileS2GOp() + + # Swizzle o [(tileM, tileK), loopM, loopK] + a_smem_layout = cute.select(a_smem_layout_staged, mode=[0, 1, 2]) + # create tma copy atom for hidden, + # and the cooresponding tma descriptor tensor + tma_atom_a, tma_desc_a = cute.nvgpu.make_tiled_tma_atom_A( + tma_load_op, + hidden, # gmem_tensor + a_smem_layout, # SMEM layout + self.mma_tiler, # MMA tiler + tiled_mma, # TiledMMA + self.cluster_layout_vmnk.shape, # cluster_shape_vmnk + ) + # Swizzle o [(tileN, tileK), loopN, loopK] + b_smem_layout = cute.select(b_smem_layout_staged, mode=[0, 1, 2]) + tma_atom_b, tma_desc_b = cute.nvgpu.make_tiled_tma_atom_B( + tma_load_op, + weight, # gmem_tensor + b_smem_layout, # SMEM layout + self.mma_tiler, # MMA tiler + tiled_mma, # TiledMMA + self.cluster_layout_vmnk.shape, # cluster_shape_vmnk + ) + a_copy_size = cute.size_in_bytes(a_dtype, a_smem_layout) + b_copy_size = cute.size_in_bytes(b_dtype, b_smem_layout) + self.tma_copy_a_bytes = a_copy_size + self.tma_copy_b_bytes = b_copy_size + + assert self.num_a_stage == self.num_b_stage + + @cute.struct + class SharedStorage: + """ + The shared storage for the forward kernel. + """ + + # pipeline barriers, 2 = producer + consumer + load_ab_mbar_ptr: cute.struct.MemRange[cutlass.Int64, self.num_a_stage * 2] + mma_mbar_ptr: cute.struct.MemRange[cutlass.Int64, self.num_acc_stage * 2] + tmem_dealloc_mbar_ptr: cute.struct.MemRange[cutlass.Int64, 1] + # tmem holding buffer + tmem_holding_buf: cutlass.Int32 + # SMEM tensors + sA: cute.struct.Align[ + cute.struct.MemRange[a_dtype, cute.cosize(a_smem_layout_staged)], + self.buffer_align_bytes, + ] + sB: cute.struct.Align[ + cute.struct.MemRange[b_dtype, cute.cosize(b_smem_layout_staged)], + self.buffer_align_bytes, + ] + + self.shared_storage = SharedStorage + + # launch kernel + self.kernel( + tiled_mma, + tma_atom_a, + tma_desc_a, + tma_atom_b, + tma_desc_b, + labels, + _max, + _accu, + _logprobs, + a_smem_layout_staged, + b_smem_layout_staged, + self.cluster_layout_vmnk, + problem_mnk, + ignore_index, + rank, + ).launch( + grid=grid, + block=[self.threads_per_cta, 1, 1], + cluster=self.cluster_shape_mnk, + stream=stream, + ) + return None + +except ImportError: + logging.warning("Cutlass or CUDA Python bindings not found. FwdMainLoop will not be available.") diff --git a/megatron/core/fusions/linear_cross_entropy/blackwell/triton.py b/megatron/core/fusions/linear_cross_entropy/blackwell/triton.py new file mode 100644 index 00000000000..e025cc046f4 --- /dev/null +++ b/megatron/core/fusions/linear_cross_entropy/blackwell/triton.py @@ -0,0 +1,248 @@ +# Copyright (c) 2025, NVIDIA CORPORATION. All rights reserved. + +import triton # type: ignore +import triton.language as tl # type: ignore + +# NOTE: tl.pointer_type() is not available in Triton 3.3.0 + + +@triton.autotune( + configs=[ + triton.Config({"BLOCK_SIZE_M": 1024}, num_stages=3, num_warps=32), + triton.Config({"BLOCK_SIZE_M": 2048}, num_stages=3, num_warps=32), + ], + key=["num_tokens"], +) +@triton.jit +def get_num_valid_tokens( + num_tokens: tl.int64, + ignore_index: tl.int64, + labels_ptr, #: tl.pointer_type(tl.int64), + stride_labels: tl.int64, + num_valid_tokens_ptr, #: tl.pointer_type(tl.int64), + BLOCK_SIZE_M: tl.constexpr, +): + """ + Calculate the number of valid tokens in the labels tensor. + """ + num_pid_m: tl.int64 = tl.cdiv(num_tokens, BLOCK_SIZE_M) + + num_valid_tokens: tl.int64 = tl.zeros((), dtype=tl.int64) + for m in range(0, num_pid_m): + offs_am = m * BLOCK_SIZE_M + tl.arange(0, BLOCK_SIZE_M) + + labels = tl.load( + labels_ptr + offs_am * stride_labels, mask=offs_am < num_tokens, other=ignore_index + ) + + valid_labels_mask = labels != ignore_index + num_valid_tokens += (tl.sum(valid_labels_mask.to(tl.int32), axis=0)).to(tl.int64) + tl.store(num_valid_tokens_ptr, num_valid_tokens) + + +@triton.autotune( + configs=[triton.Config({"BLOCK_SIZE_M": 16, "BLOCK_SIZE_N": 64})], + key=["num_tokens", "num_splits"], +) +@triton.jit +def forward_dp_epilogue( + num_tokens: tl.int64, + num_splits: tl.int64, # TODO: maybe this could be a constexpr + ignore_index: tl.int64, + labels_ptr, #: tl.pointer_type(tl.int64), + stride_labels: tl.int64, + num_valid_tokens_ptr, #: tl.pointer_type(tl.int64), + max_ptr, #: tl.pointer_type(tl.float32), + stride_max_m: tl.int64, + stride_max_n: tl.int64, + accu_ptr, #: tl.pointer_type(tl.float32), + stride_accu_m: tl.int64, + stride_accu_n: tl.int64, + global_max_ptr, #: tl.pointer_type(tl.float32), + stride_global_max: tl.int64, + global_accu_ptr, #: tl.pointer_type(tl.float32), + stride_global_accu: tl.int64, + global_logprobs_ptr, #: tl.pointer_type(tl.float32), + stride_global_logprobs: tl.int64, + global_logprobs_scalar_ptr, #: tl.pointer_type(tl.float32), + REDUCTION: tl.constexpr, + BLOCK_SIZE_M: tl.constexpr, + BLOCK_SIZE_N: tl.constexpr, +): + """ + forward epilogue in dp + """ + pid_m = tl.program_id(axis=0) + + offs_m = pid_m * BLOCK_SIZE_M + tl.arange(0, BLOCK_SIZE_M) + global_max = tl.zeros((BLOCK_SIZE_M,), dtype=tl.float32) + global_accu = tl.zeros((BLOCK_SIZE_M,), dtype=tl.float32) + + for pid_n in range(0, tl.cdiv(num_splits, BLOCK_SIZE_N)): + offs_n = pid_n * BLOCK_SIZE_N + tl.arange(0, BLOCK_SIZE_N) + + _max = tl.load( + max_ptr + offs_m[:, None] * stride_max_m + offs_n[None, :] * stride_max_n, + mask=(offs_m[:, None] < num_tokens) & (offs_n[None, :] < num_splits), + other=0.0, + ) + _accu = tl.load( + accu_ptr + offs_m[:, None] * stride_accu_m + offs_n[None, :] * stride_accu_n, + mask=(offs_m[:, None] < num_tokens) & (offs_n[None, :] < num_splits), + other=0.0, + ) + + # local reduction + _max_old = global_max + _local_max = tl.max(_max, axis=1, return_indices=False) + global_max = tl.maximum(global_max, _local_max) + + _scale = tl.exp(_max - global_max[:, None]) + _coeff = tl.exp(_max_old - global_max) + global_accu = _coeff * global_accu + tl.sum(_scale * _accu, axis=1) + + # store maximum + tl.store(global_max_ptr + offs_m * stride_global_max, global_max, mask=offs_m < num_tokens) + # store accumulate + tl.store(global_accu_ptr + offs_m * stride_global_accu, global_accu, mask=offs_m < num_tokens) + # update logprobs + labels = tl.load( + labels_ptr + offs_m * stride_labels, mask=offs_m < num_tokens, other=ignore_index + ) + global_logprobs_ptrs = global_logprobs_ptr + offs_m * stride_global_logprobs + global_logprobs = tl.load(global_logprobs_ptrs, mask=offs_m < num_tokens) + global_logprobs = global_max + tl.log(global_accu) - global_logprobs + label_mask = labels != ignore_index + global_logprobs = tl.where(label_mask, global_logprobs, 0.0) + + if REDUCTION == 0: # no-reduction + tl.store(global_logprobs_ptrs, global_logprobs, mask=offs_m < num_tokens) + elif REDUCTION == 1: # sum + global_logprobs_scalar = tl.sum(global_logprobs, axis=0) + tl.atomic_add(global_logprobs_scalar_ptr, global_logprobs_scalar) + elif REDUCTION == 2: # mean + num_valid_tokens = tl.load(num_valid_tokens_ptr) + global_logprobs_scalar = tl.fdiv( + tl.sum(global_logprobs, axis=0), num_valid_tokens.to(tl.float32) + ) + tl.atomic_add(global_logprobs_scalar_ptr, global_logprobs_scalar) + + +@triton.autotune( + configs=[triton.Config({"BLOCK_SIZE_M": 16, "BLOCK_SIZE_N": 64})], + key=["num_tokens", "num_splits"], +) +@triton.jit +def forward_tp_epilogue( + num_tokens: tl.int64, + num_splits: tl.int64, + reduced_max_ptr, #: tl.pointer_type(tl.float32), + stride_reduced_max_m: tl.int64, + stride_reduced_max_n: tl.int64, + original_max_ptr, #: tl.pointer_type(tl.float32), + stride_original_max_m: tl.int64, + stride_original_max_n: tl.int64, + accu_ptr, #: tl.pointer_type(tl.float32), + stride_accu_m: tl.int64, + stride_accu_n: tl.int64, + global_max_ptr, #: tl.pointer_type(tl.float32), + stride_global_max: tl.int64, + global_accu_ptr, #: tl.pointer_type(tl.float32), + stride_global_accu: tl.int64, + BLOCK_SIZE_M: tl.constexpr, + BLOCK_SIZE_N: tl.constexpr, +): + """ + forward epilogue in tp + """ + pid_m = tl.program_id(axis=0) + + offs_m = pid_m * BLOCK_SIZE_M + tl.arange(0, BLOCK_SIZE_M) + + global_max = tl.zeros((BLOCK_SIZE_M,), dtype=tl.float32) + global_accu = tl.zeros((BLOCK_SIZE_M,), dtype=tl.float32) + + for pid_n in range(0, tl.cdiv(num_splits, BLOCK_SIZE_N)): + offs_n = pid_n * BLOCK_SIZE_N + tl.arange(0, BLOCK_SIZE_N) + + _reduced_max = tl.load( + reduced_max_ptr + + offs_m[:, None] * stride_reduced_max_m + + offs_n[None, :] * stride_reduced_max_n, + mask=(offs_m[:, None] < num_tokens) & (offs_n[None, :] < num_splits), + other=0.0, + ) + _original_max = tl.load( + original_max_ptr + + offs_m[:, None] * stride_original_max_m + + offs_n[None, :] * stride_original_max_n, + mask=(offs_m[:, None] < num_tokens) & (offs_n[None, :] < num_splits), + other=0.0, + ) + _accu = tl.load( + accu_ptr + offs_m[:, None] * stride_accu_m + offs_n[None, :] * stride_accu_n, + mask=(offs_m[:, None] < num_tokens) & (offs_n[None, :] < num_splits), + other=0.0, + ) + + # local reduction + _max_old = global_max + _local_max = tl.max(_reduced_max, axis=1) + global_max = tl.maximum(global_max, _local_max) + + # update accumulate + _coeff = tl.exp(_max_old - global_max) + _scale = tl.exp(_original_max - global_max[:, None]) + global_accu = _coeff * global_accu + tl.sum(_scale * _accu, axis=1) + + # store + tl.store(global_max_ptr + offs_m * stride_global_max, global_max, mask=offs_m < num_tokens) + tl.store(global_accu_ptr + offs_m * stride_global_accu, global_accu, mask=offs_m < num_tokens) + + +@triton.autotune(configs=[triton.Config({"BLOCK_SIZE_M": 16})], key=["num_tokens"]) +@triton.jit +def forward_tp_epilogue_update_logprobs( + num_tokens: tl.int64, + ignore_index: tl.int64, + num_valid_tokens_ptr, #: tl.pointer_type(tl.int64), + labels_ptr, #: tl.pointer_type(tl.int64), + stride_labels: tl.int64, + logprobs_ptr, #: tl.pointer_type(tl.float32), + stride_logprobs: tl.int64, + maximum_ptr, #: tl.pointer_type(tl.float32), + stride_maximum: tl.int64, + accumulate_ptr, #: tl.pointer_type(tl.float32), + stride_accumulate: tl.int64, + logprobs_scalar_ptr, #: tl.pointer_type(tl.float32), + REDUCTION: tl.constexpr, + BLOCK_SIZE_M: tl.constexpr, +): + """ + update logprobs in tp + """ + pid_m = tl.program_id(axis=0) + + offs_m = pid_m * BLOCK_SIZE_M + tl.arange(0, BLOCK_SIZE_M) + + logprobs = tl.load(logprobs_ptr + offs_m * stride_logprobs, mask=offs_m < num_tokens) + maximum = tl.load(maximum_ptr + offs_m * stride_maximum, mask=offs_m < num_tokens) + accumulate = tl.load(accumulate_ptr + offs_m * stride_accumulate, mask=offs_m < num_tokens) + + labels = tl.load( + labels_ptr + offs_m * stride_labels, mask=offs_m < num_tokens, other=ignore_index + ) + label_mask = labels != ignore_index + + logprobs = maximum + tl.log(accumulate) - logprobs + logprobs = tl.where(label_mask, logprobs, 0.0) + + if REDUCTION == 0: # no-reduction + tl.store(logprobs_ptr + offs_m * stride_logprobs, logprobs, mask=offs_m < num_tokens) + elif REDUCTION == 1: # sum + logprobs_scalar = tl.sum(logprobs, axis=0) + tl.atomic_add(logprobs_scalar_ptr, logprobs_scalar) + elif REDUCTION == 2: # mean + num_valid_tokens = tl.load(num_valid_tokens_ptr) + logprobs_scalar = tl.fdiv(tl.sum(logprobs, axis=0), num_valid_tokens.to(tl.float32)) + tl.atomic_add(logprobs_scalar_ptr, logprobs_scalar) diff --git a/megatron/core/fusions/linear_cross_entropy/utils.py b/megatron/core/fusions/linear_cross_entropy/utils.py new file mode 100644 index 00000000000..d077d64ab17 --- /dev/null +++ b/megatron/core/fusions/linear_cross_entropy/utils.py @@ -0,0 +1,43 @@ +# Copyright (c) 2025, NVIDIA CORPORATION. All rights reserved. + +import typing +from enum import Enum + + +class EntropyReductionEnum(Enum): + """ + Enum for the reduction method of cross entropy. + """ + + kNone = 0 + kSum = 1 + kMean = 2 + + +def str_to_reduction_enum(reduction: typing.Literal["none", "sum", "mean"]) -> EntropyReductionEnum: + """ + str -> EntropyReductionEnum + """ + _enum = EntropyReductionEnum.kNone + if reduction == "none": + _enum = EntropyReductionEnum.kNone + elif reduction == "sum": + _enum = EntropyReductionEnum.kSum + elif reduction == "mean": + _enum = EntropyReductionEnum.kMean + else: + raise ValueError(f"Invalid reduction: {reduction}") + return _enum + + +class BackwardMethodEnum(Enum): + """ + Enum for the backward method of linear cross entropy. + """ + + # two separate kernels for d_hidden and d_weight, respectively + kTwoKernels = 0 + # calculate partial d_logits along its N dimension + kDlogitsSplitN = 1 + # fuse d_hidden and d_weight into a single kernel + kFused = 2 diff --git a/megatron/core/model_parallel_config.py b/megatron/core/model_parallel_config.py index 3c6ff04d3b0..e30cb0e1d1a 100644 --- a/megatron/core/model_parallel_config.py +++ b/megatron/core/model_parallel_config.py @@ -6,8 +6,11 @@ import torch +from megatron.core.utils import experimental_api + @dataclass +@experimental_api class ModelParallelConfig: """Base configuration for Megatron Core @@ -243,9 +246,14 @@ class ModelParallelConfig: Defaults to False. """ - cross_entropy_fusion_impl: Literal['native', 'te'] = 'native' - """If 'native', MCore based CE loss fusion is used, if 'te', Parallel CE loss - from Transformer Engine library is used. Defaults to 'native'. + cross_entropy_fusion_impl: Literal['native', 'te', 'linear'] = 'native' + """ + Specifies the implementation of cross-entropy loss fusion. + + Options: + - 'native': Uses MCore-based cross-entropy loss fusion (default). + - 'te': Uses the parallel cross-entropy loss implementation from the Transformer Engine library. + - 'linear': Uses a linear-cross-entropy fusion approach. """ tp_comm_overlap_disable_qkv: bool = False diff --git a/megatron/core/models/common/embeddings/yarn_rotary_pos_embedding.py b/megatron/core/models/common/embeddings/yarn_rotary_pos_embedding.py index 166ef9b41e7..bc5a9c5fa3f 100644 --- a/megatron/core/models/common/embeddings/yarn_rotary_pos_embedding.py +++ b/megatron/core/models/common/embeddings/yarn_rotary_pos_embedding.py @@ -186,13 +186,13 @@ def forward( emb = get_pos_emb_on_this_cp_rank(emb, 0, cp_group) return emb, _mscale - def _set_cos_sin_cache(self, seq_len, offset, dtype, packed_seq=False): + def _set_cos_sin_cache(self, seq_len, offset, dtype, packed_seq=False, cp_group=None): self.max_seq_len_cached = seq_len self.offset_cached = offset self.dtype_cached = dtype self.packed_seq_cached = packed_seq - emb, _mscale = self.forward(seq_len, offset, packed_seq) + emb, _mscale = self.forward(seq_len, offset, packed_seq=packed_seq, cp_group=cp_group) self.register_buffer( "cos_cached", (emb.cos() * _mscale).to(dtype).contiguous(), persistent=False ) @@ -201,7 +201,7 @@ def _set_cos_sin_cache(self, seq_len, offset, dtype, packed_seq=False): ) def get_cached_cos_sin( - self, seq_len, offset=0, dtype=torch.get_default_dtype(), packed_seq=False + self, seq_len, offset=0, dtype=torch.get_default_dtype(), packed_seq=False, cp_group=None ): """Get cached cos and sin values.""" if ( @@ -210,7 +210,7 @@ def get_cached_cos_sin( or dtype != self.dtype_cached or packed_seq != self.packed_seq_cached ): - self._set_cos_sin_cache(seq_len, offset, dtype, packed_seq) + self._set_cos_sin_cache(seq_len, offset, dtype, packed_seq, cp_group) return (self.cos_cached[:seq_len, ...], self.sin_cached[:seq_len, ...]) diff --git a/megatron/core/models/common/language_module/language_module.py b/megatron/core/models/common/language_module/language_module.py index b0fa6126b63..259bb716a93 100644 --- a/megatron/core/models/common/language_module/language_module.py +++ b/megatron/core/models/common/language_module/language_module.py @@ -312,7 +312,7 @@ def tie_embeddings_and_output_weights_state_dict( sharded_state_dict: ShardedStateDict, output_layer_weight_key: str, first_stage_word_emb_key: str, - metadata: dict = {}, + metadata: Optional[dict] = None, ) -> None: """Ties the embedding and output weights in a given sharded state dict. @@ -322,9 +322,11 @@ def tie_embeddings_and_output_weights_state_dict( This entry will be replaced with a tied version first_stage_word_emb_key (str): this must be the same as the ShardedTensor.key of the first stage word embeddings. + metadata (Optional[Dict]): metadata controlling sharded state dict creation. Returns: None, acts in-place """ + metadata = ensure_metadata_has_dp_cp_group(metadata) if not self.post_process: # No output layer assert output_layer_weight_key not in sharded_state_dict, sharded_state_dict.keys() diff --git a/megatron/core/models/common/model_chunk_schedule_plan.py b/megatron/core/models/common/model_chunk_schedule_plan.py index 033e8e808f9..f451942ffc2 100644 --- a/megatron/core/models/common/model_chunk_schedule_plan.py +++ b/megatron/core/models/common/model_chunk_schedule_plan.py @@ -14,6 +14,7 @@ get_comm_stream, get_comp_stream, ) +from megatron.core.transformer.enums import CudaGraphScope class ModelChunkState: @@ -122,14 +123,13 @@ def _build_callable_nodes(self, event, comp_stream, comm_stream, extra_args): # get flags for latter use is_mtp = isinstance(self.layer, MultiTokenPredictionLayer) - is_moe = ( - isinstance(self.layer.transformer_layer.mlp, MoELayer) - if is_mtp - else isinstance(self.layer.mlp, MoELayer) - ) + transformer_layer = self.layer.transformer_layer if is_mtp else self.layer + is_moe = isinstance(transformer_layer.mlp, MoELayer) + num_local_experts = transformer_layer.mlp.num_local_experts if is_moe else None extra_args["config"] = self.layer.config extra_args["is_moe"] = is_moe + extra_args["num_local_experts"] = num_local_experts extra_args["delay_wgrad_compute"] = self.layer.config.delay_wgrad_compute extra_args["is_mtp"] = is_mtp @@ -173,6 +173,11 @@ def create_node(stream, module, name): else: self.mtp_post_process = NoopScheduleNode() + # mlp and combine may receive dgrad from attn, which is managed by cuda graph. + if CudaGraphScope.attn in self.config.cuda_graph_scope: + self.mlp.manual_grads_release = False + self.moe_combine.manual_grads_release = False + def get_fp8_context(self): """ Get the fp8 context for the transformer layer. diff --git a/megatron/core/models/gpt/experimental_attention_variant_module_specs.py b/megatron/core/models/gpt/experimental_attention_variant_module_specs.py index a7cc7cc0a55..3051cf6e960 100644 --- a/megatron/core/models/gpt/experimental_attention_variant_module_specs.py +++ b/megatron/core/models/gpt/experimental_attention_variant_module_specs.py @@ -149,12 +149,12 @@ def get_experimental_attention_variant_module_spec( ########## -def get_transformer_block_with_experimental_attention_variant_spec( - config: TransformerConfig, vp_stage: Optional[int] = None, pp_rank: Optional[int] = None -) -> TransformerBlockSubmodules: - """Build transformer block spec with experimental attention variants (e.g., linear attention). +def get_transformer_layer_with_experimental_attention_variant_spec( + config: TransformerConfig, backend: BackendSpecProvider = None +) -> List[ModuleSpec]: + """Build transformer layer specs with experimental attention variants (e.g., linear attention). - This function constructs a heterogeneous transformer block that supports mixing different + This function is for constructing a heterogeneous transformer that supports mixing different attention mechanisms (experimental vs standard) and MLP types (MoE vs dense) across layers. **Note that, this API is a experimental API in the short term, and might be deprecated in the future. In the long run, we will move to a new design that better support hybrid models.** @@ -170,22 +170,19 @@ def get_transformer_block_with_experimental_attention_variant_spec( 2. Per-Layer Spec Construction: Iterates through layers, constructing transformer layer specs based on attention and MLP patterns. - 3. Pipeline Slicing: Extracts layer specs for the current pipeline stage. - Args: config: Transformer configuration containing model hyperparameters and feature flags. - vp_stage: Virtual pipeline stage index for interleaved pipeline parallelism. - pp_rank: Pipeline model parallel rank. Returns: - TransformerBlockSubmodules containing per-layer specs and final layer norm. + List[ModuleSpec] containing per-layer specs. Note: Currently only supports transformer_engine backend. Kitchen backend can be used as a wrapper with TE fallback for unsupported operations. """ - backend = _get_backend_spec_provider(config=config) + if backend is None: + backend = _get_backend_spec_provider(config=config) # Get attention patterns and specs experimental_attention_pattern = [0] * config.num_layers @@ -257,6 +254,42 @@ def get_transformer_block_with_experimental_attention_variant_spec( ) ) + return layer_specs + + +def get_transformer_block_with_experimental_attention_variant_spec( + config: TransformerConfig, vp_stage: Optional[int] = None, pp_rank: Optional[int] = None +) -> TransformerBlockSubmodules: + """Build transformer block spec with experimental attention variants (e.g., linear attention). + + This function constructs a heterogeneous transformer block that supports mixing different + attention mechanisms (experimental vs standard) and MLP types (MoE vs dense) across layers. + **Note that, this API is a experimental API in the short term, and might be deprecated in the + future. In the long run, we will move to a new design that better support hybrid models.** + + Constructing transformer layer specs by + `get_transformer_layer_with_experimental_attention_variant_spec` and then slicing the + layer specs to only include the layers that are built in this pipeline stage. + + Args: + config: Transformer configuration containing model hyperparameters and feature flags. + vp_stage: Virtual pipeline stage index for interleaved pipeline parallelism. + pp_rank: Pipeline model parallel rank. + + Returns: + TransformerBlockSubmodules containing per-layer specs and final layer norm. + + Note: + Currently only supports transformer_engine backend. Kitchen backend can be used as a + wrapper with TE fallback for unsupported operations. + """ + + backend = _get_backend_spec_provider(config=config) + + layer_specs = get_transformer_layer_with_experimental_attention_variant_spec( + config=config, backend=backend + ) + # Slice the layer specs to only include the layers that are built in this pipeline stage. if config.pipeline_model_parallel_layout is not None: local_layer_ids = config.pipeline_model_parallel_layout.get_layer_id_list( @@ -270,6 +303,7 @@ def get_transformer_block_with_experimental_attention_variant_spec( layer_specs = [layer_specs[layer_id] for layer_id in local_layer_ids] # Get GPT decoder block spec + rms_norm = config.normalization == "RMSNorm" gpt_decoder_block_spec = TransformerBlockSubmodules( layer_specs=layer_specs, layer_norm=backend.layer_norm(rms_norm=rms_norm, for_qk=False) ) @@ -359,7 +393,7 @@ def _get_backend_spec_provider(config: TransformerConfig) -> BackendSpecProvider ) backend: BackendSpecProvider = ( KitchenSpecProvider( - fallback=TESpecProvider(), + fallback=TESpecProvider(fallback_to_eager_attn=config.fallback_to_eager_attn), use_kitchen_attention=config.use_kitchen_attention, kitchen_attention_backend=config.kitchen_attention_backend, ) @@ -396,6 +430,7 @@ def _get_self_attention_module_spec( qk_l2_norm=config.qk_l2_norm, use_kitchen=config.use_kitchen, use_te_activation_func=config.use_te_activation_func, + fallback_to_eager_attn=config.fallback_to_eager_attn, use_kitchen_attention=config.use_kitchen_attention, kitchen_attention_backend=config.kitchen_attention_backend, ) diff --git a/megatron/core/models/gpt/fine_grained_callables.py b/megatron/core/models/gpt/fine_grained_callables.py index 7cee9d2973c..e77cfb71871 100644 --- a/megatron/core/models/gpt/fine_grained_callables.py +++ b/megatron/core/models/gpt/fine_grained_callables.py @@ -43,13 +43,14 @@ def wrapped_func(*args, **kwarg): @internal_api -def should_free_input(name, is_moe, config): +def should_free_input(name, is_moe, config, num_local_experts): """Determine if the node should free its input memory. Args: name: Node name is_moe: Whether it's a MoE model config: TransformerConfig object + num_local_experts: Number of local experts in MoE module Returns: bool: Whether to free input memory @@ -70,8 +71,19 @@ def should_free_input(name, is_moe, config): # when and how to free the input memory. # The input and output of A2A are not needed anymore after the forward pass, # so we can free the input memory after the forward pass. + + # When low precision fp8/4 is enabled, the casted tensors are saved and the + # original bf16 tensors are safe to be freed. + free_mlp = config.fp8 is not None or config.fp4 is not None + if not free_mlp: + # AlltoAll dispatcher with local_num_experts=1 and HybridEP both use identity + # operation for `dispatch_postprocess`, hence the mlp inputs will be directly + # passed to GroupedGemm and should be saved for backward pass. + free_mlp = num_local_experts > 1 or config.moe_token_dispatcher_type != "alltoall" + free_mlp = free_mlp and not enable_hybridep + free_input_nodes = { - "mlp": not enable_hybridep, + "mlp": free_mlp, "moe_combine": True, # For non-DeepEP and non-HybridEP dispatcher mode, the input is the un-dispatched tokens # and probs before dispatch A2A and it's not needed anymore after the forward pass @@ -256,7 +268,8 @@ def __init__( config = extra_args.get("config", None) assert config is not None, "model config must be passed to TransformerLayerNode." is_moe = extra_args.get("is_moe", False) - free_input = should_free_input(name, is_moe, config) + num_local_experts = extra_args.get("num_local_experts", None) + free_input = should_free_input(name, is_moe, config, num_local_experts) self.delay_wgrad_compute = extra_args.get("delay_wgrad_compute", False) super().__init__( @@ -316,9 +329,11 @@ def backward_dw(self): """Computes the weight gradients for the transformer layer node.""" if not self.delay_wgrad_compute: return - with torch.cuda.nvtx.range(f"{self.name} wgrad"): + with torch.cuda.stream(self.stream): + torch.cuda.nvtx.range_push(f"{self.name} wgrad") for module in self.bwd_dw_callables: module.backward_dw() + torch.cuda.nvtx.range_pop() # the output grad memory is last used in wgrad compute, should be safe to release. assert self.delay_grads_release, "output grad memory should be valid before wgrad." @@ -514,6 +529,10 @@ def submodule_dispatch_forward( token_dispatcher._comm_manager.token_probs = probs dispatched_tokens, dispatched_probs = layer.mlp.dispatch(local_tokens, probs) + + # `dispatched_probs` is needed by backward pass of swiglu, therefore it's + # passed to moe_forward within `layer_state` to avoid the free_input process + # of the input tensors. node.layer_state.dispatched_probs = node.detach(dispatched_probs) return dispatched_tokens @@ -531,13 +550,16 @@ def submodule_moe_forward(node: ScheduleNode, dispatched_tokens: torch.Tensor): expert_output, _ = layer.mlp.routed_experts_compute(dispatched_tokens, dispatched_probs) + # For HybridEP, tokens_per_expert is generated on comm stream, as the input to + # `routed_experts_compute`, a ref is needed to prevent it from being freed. + if enable_hybridep: + tokens_per_expert = token_dispatcher._comm_manager.get_number_of_tokens_per_expert() + node.layer_state.tokens_per_expert = tokens_per_expert + if layer.recompute_pre_mlp_layernorm: # discard the output of the pre-mlp layernorm and register the recompute # as a gradient hook of expert_output layer.pre_mlp_norm_checkpoint.discard_output_and_register_recompute(expert_output) - # release tensor reference after use - node.layer_state.dispatched_probs = None - node.layer_state.pre_mlp_layernorm_output = None return expert_output @@ -572,11 +594,14 @@ def submodule_combine_forward(node: ScheduleNode, output: torch.Tensor): inp=hidden_states, requires_grad=hidden_states.requires_grad, keep_graph=True ) - # Need to record residual to comm stream, since it's created on comp stream + # Need to record tensors created on comp stream to comm stream node.layer_state.residual.record_stream(torch.cuda.current_stream()) + if shared_expert_output is not None: + shared_expert_output.record_stream(torch.cuda.current_stream()) # release tensor reference after use node.layer_state.residual = None + node.layer_state.shared_expert_output = None # final layer norm from decoder final_layernorm = node.chunk_state.model.decoder.final_layernorm diff --git a/megatron/core/models/gpt/gpt_layer_specs.py b/megatron/core/models/gpt/gpt_layer_specs.py index 49501ee54eb..5bb479ad3ea 100755 --- a/megatron/core/models/gpt/gpt_layer_specs.py +++ b/megatron/core/models/gpt/gpt_layer_specs.py @@ -40,7 +40,7 @@ from megatron.core.utils import is_te_min_version try: - import transformer_engine as te # type: ignore[import-untyped] # pylint: disable=unused-import + import transformer_engine as te # pylint: disable=unused-import from megatron.core.extensions.transformer_engine import TEFusedMLP, TENorm from megatron.core.extensions.transformer_engine_spec_provider import TESpecProvider @@ -56,7 +56,7 @@ HAVE_KITCHEN = False try: - import apex # type: ignore[import-untyped] # pylint: disable=unused-import + import apex # pylint: disable=unused-import from megatron.core.fusions.fused_layer_norm import FusedLayerNorm @@ -179,6 +179,7 @@ def get_gpt_layer_with_transformer_engine_spec( use_te_op_fuser: Optional[bool] = False, use_kitchen: bool = False, use_te_activation_func: bool = False, + fallback_to_eager_attn: bool = False, use_kitchen_attention: bool = False, kitchen_attention_backend: str = "sdpa", ) -> ModuleSpec: @@ -210,7 +211,7 @@ def get_gpt_layer_with_transformer_engine_spec( if use_kitchen: assert HAVE_KITCHEN backend: BackendSpecProvider = KitchenSpecProvider( - fallback=TESpecProvider(), + fallback=TESpecProvider(fallback_to_eager_attn=fallback_to_eager_attn), use_kitchen_attention=use_kitchen_attention, kitchen_attention_backend=kitchen_attention_backend, ) @@ -219,7 +220,7 @@ def get_gpt_layer_with_transformer_engine_spec( if use_te_activation_func: raise AssertionError("use_te_activation_func not compatible with using kitchen.") else: - backend = TESpecProvider() + backend = TESpecProvider(fallback_to_eager_attn=fallback_to_eager_attn) mlp = get_mlp_module_spec_for_backend( backend=backend, @@ -519,12 +520,14 @@ def get_gpt_decoder_layer_specs( use_transformer_engine: bool, normalization: Optional[str] = None, qk_l2_norm: Optional[bool] = False, - vp_stage: Optional[int] = None, - pp_rank: Optional[int] = None, ) -> TransformerBlockSubmodules: """GPT block spec.""" + assert config.experimental_attention_variant is None, ( + "Experimental attention variant is not supported with get_gpt_decoder_layer_specs, " + f"but got {config.experimental_attention_variant=}." + ) + if use_transformer_engine: - layer_norm_impl = TENorm dense_layer_spec = get_gpt_layer_with_transformer_engine_spec( num_experts=None, moe_grouped_gemm=False, @@ -534,8 +537,6 @@ def get_gpt_decoder_layer_specs( qk_l2_norm=qk_l2_norm, use_kitchen=config.use_kitchen, use_te_activation_func=config.use_te_activation_func, - use_kitchen_attention=config.use_kitchen_attention, - kitchen_attention_backend=config.kitchen_attention_backend, ) moe_layer_spec = get_gpt_layer_with_transformer_engine_spec( num_experts=config.num_moe_experts, @@ -546,11 +547,8 @@ def get_gpt_decoder_layer_specs( qk_l2_norm=qk_l2_norm, use_kitchen=config.use_kitchen, use_te_activation_func=config.use_te_activation_func, - use_kitchen_attention=config.use_kitchen_attention, - kitchen_attention_backend=config.kitchen_attention_backend, ) else: - layer_norm_impl = LNImpl dense_layer_spec = get_gpt_layer_local_spec( num_experts=None, moe_grouped_gemm=False, @@ -560,8 +558,6 @@ def get_gpt_decoder_layer_specs( normalization=normalization, qk_l2_norm=qk_l2_norm, use_kitchen=config.use_kitchen, - use_kitchen_attention=config.use_kitchen_attention, - kitchen_attention_backend=config.kitchen_attention_backend, ) moe_layer_spec = get_gpt_layer_local_spec( num_experts=config.num_moe_experts, @@ -572,8 +568,6 @@ def get_gpt_decoder_layer_specs( normalization=normalization, qk_l2_norm=qk_l2_norm, use_kitchen=config.use_kitchen, - use_kitchen_attention=config.use_kitchen_attention, - kitchen_attention_backend=config.kitchen_attention_backend, ) # Parse config.moe_layer_freq to determine the pattern of expert/dense layers. @@ -621,13 +615,16 @@ def get_gpt_decoder_block_spec( layer_specs = get_gpt_decoder_layer_specs( config, use_transformer_engine, normalization, qk_l2_norm ) + # Slice the layer specs to only include the layers that are built in this pipeline stage. # Note: MCore layer_number starts at 1 num_layers_to_build = get_num_layers_to_build(config, vp_stage=vp_stage, pp_rank=pp_rank) if config.pipeline_model_parallel_layout is not None: layout = config.pipeline_model_parallel_layout - assert isinstance(layout, PipelineParallelLayerLayout) + assert isinstance( + layout, PipelineParallelLayerLayout + ), f"Invalid pipeline model parallel layout: {layout}" local_layer_specs = [ layer_specs[layer_id] for layer_id in layout.get_layer_id_list( @@ -638,11 +635,11 @@ def get_gpt_decoder_block_spec( offset = get_transformer_layer_offset(config, vp_stage=vp_stage, pp_rank=pp_rank) local_layer_specs = layer_specs[offset : offset + num_layers_to_build] + # Block spec. if use_transformer_engine: layer_norm_impl = TENorm else: layer_norm_impl = LNImpl - # Block spec. block_spec = TransformerBlockSubmodules( layer_specs=local_layer_specs, layer_norm=layer_norm_impl ) @@ -659,22 +656,17 @@ def get_gpt_mtp_block_spec( ) -> MultiTokenPredictionBlockSubmodules: """GPT Multi-Token Prediction (MTP) block spec.""" if use_transformer_engine: - backend: BackendSpecProvider = ( - KitchenSpecProvider( - fallback=TESpecProvider(), + if config.use_kitchen: + backend: BackendSpecProvider = KitchenSpecProvider( + fallback=TESpecProvider(fallback_to_eager_attn=config.fallback_to_eager_attn), use_kitchen_attention=config.use_kitchen_attention, kitchen_attention_backend=config.kitchen_attention_backend, ) - if config.use_kitchen - else TESpecProvider() - ) + else: + backend = TESpecProvider(fallback_to_eager_attn=config.fallback_to_eager_attn) else: backend = ( - KitchenSpecProvider( - fallback=LocalSpecProvider(), - use_kitchen_attention=config.use_kitchen_attention, - kitchen_attention_backend=config.kitchen_attention_backend, - ) + KitchenSpecProvider(fallback=LocalSpecProvider()) if config.use_kitchen else LocalSpecProvider() ) diff --git a/megatron/core/models/gpt/gpt_model.py b/megatron/core/models/gpt/gpt_model.py index e287344c13d..5b31ddedf13 100644 --- a/megatron/core/models/gpt/gpt_model.py +++ b/megatron/core/models/gpt/gpt_model.py @@ -25,6 +25,7 @@ from megatron.core.quantization.utils import get_quant_config_or_none from megatron.core.tensor_parallel import gather_from_sequence_parallel_region from megatron.core.transformer.enums import CudaGraphScope, ModelType +from megatron.core.transformer.linear_cross_entropy import LinearCrossEntropyModule from megatron.core.transformer.multi_token_prediction import ( MTPLossAutoScaler, MTPLossLoggingHelper, @@ -146,6 +147,11 @@ def __init__( self.mtp_block_spec = mtp_block_spec self.mtp_process = mtp_block_spec is not None + self.fuse_linear_cross_entropy = ( + self.config.cross_entropy_loss_fusion + and self.config.cross_entropy_fusion_impl == "linear" + ) + if self.pre_process or self.mtp_process: self.embedding = LanguageModelEmbedding( config=self.config, @@ -238,7 +244,7 @@ def __init__( self.embedding_activation_buffer = None self.grad_output_buffer = None - self.output_layer = tensor_parallel.ColumnParallelLinear( + self.output_layer = LinearCrossEntropyModule( config.hidden_size, self.vocab_size, config=config, @@ -444,7 +450,7 @@ def _preprocess( # return this extra tensor # this is for backwards compatibility with # legacy unit tests, which break if you - # return a 6 tuple instead of 5. + # return a 7 tuple instead of 6. preproc_output += (rotary_pos_cos_sin,) return preproc_output @@ -616,12 +622,6 @@ def _postprocess( # if loss_mask is not provided, use all ones as loss_mask loss_mask = torch.ones_like(mtp_labels) for mtp_layer_number in range(self.config.mtp_num_layers): - # output - mtp_logits, _ = self.output_layer( - hidden_states_list[mtp_layer_number + 1], - weight=output_weight, - runtime_gather_output=runtime_gather_output, - ) # Calc loss for the current Multi-Token Prediction (MTP) layers. mtp_labels, _ = roll_tensor( mtp_labels, @@ -637,7 +637,23 @@ def _postprocess( cp_group=self.cp_group, packed_seq_params=packed_seq_params, ) - mtp_loss = self.compute_language_model_loss(mtp_labels, mtp_logits) + + # Compute mtp loss without storing logits to save memory. + output_layer_kwargs = dict( + input_=hidden_states_list[mtp_layer_number + 1], + weight=output_weight, + runtime_gather_output=runtime_gather_output, + ) + if self.fuse_linear_cross_entropy: + mtp_loss = self.output_layer( + output_cross_entropy_loss=self.fuse_linear_cross_entropy, + labels=mtp_labels, + **output_layer_kwargs, + ) + else: + mtp_logits, _ = self.output_layer(**output_layer_kwargs) + mtp_loss = self.compute_language_model_loss(mtp_labels, mtp_logits) + mtp_loss = loss_mask * mtp_loss if self.training: # TODO(shifangx): remove the use of parallel_state here @@ -682,9 +698,12 @@ def _postprocess( hidden_states.squeeze(1).unsqueeze(0) ).unsqueeze(1) - logits, _ = self.output_layer( - hidden_states, weight=output_weight, runtime_gather_output=runtime_gather_output - ) + if has_config_logger_enabled(self.config) or labels is None: + logits, _ = self.output_layer( + hidden_states, weight=output_weight, runtime_gather_output=runtime_gather_output + ) + else: + logits = None # Restore sequence parallel execution to the output layer if necessary. if sequence_parallel_override: @@ -711,7 +730,18 @@ def _postprocess( # [s b h] => [b s h] return logits.transpose(0, 1).contiguous() - loss = self.compute_language_model_loss(labels, logits) + output_layer_kwargs = dict( + input_=hidden_states, weight=output_weight, runtime_gather_output=runtime_gather_output + ) + if self.fuse_linear_cross_entropy: + loss = self.output_layer( + output_cross_entropy_loss=self.fuse_linear_cross_entropy, + labels=labels, + **output_layer_kwargs, + ) + else: + logits, _ = self.output_layer(**output_layer_kwargs) + loss = self.compute_language_model_loss(labels, logits) return loss diff --git a/megatron/core/models/mamba/mamba_model.py b/megatron/core/models/mamba/mamba_model.py index 8d45e1d0147..cf1002a5426 100644 --- a/megatron/core/models/mamba/mamba_model.py +++ b/megatron/core/models/mamba/mamba_model.py @@ -4,7 +4,6 @@ from torch import Tensor -from megatron.core import tensor_parallel from megatron.core.config_logger import has_config_logger_enabled, log_config_to_disk from megatron.core.inference.contexts import BaseInferenceContext from megatron.core.models.common.embeddings.language_model_embedding import LanguageModelEmbedding @@ -16,6 +15,7 @@ from megatron.core.tensor_parallel import gather_from_sequence_parallel_region from megatron.core.transformer import TransformerConfig from megatron.core.transformer.enums import ModelType +from megatron.core.transformer.linear_cross_entropy import LinearCrossEntropyModule from megatron.core.transformer.spec_utils import ModuleSpec, build_module from megatron.core.utils import ( WrappedTensor, @@ -102,6 +102,11 @@ def __init__( # TODO: remove this dependency ? self.model_type = ModelType.encoder_or_decoder + self.fuse_linear_cross_entropy = ( + self.config.cross_entropy_loss_fusion + and self.config.cross_entropy_fusion_impl == "linear" + ) + if self.pre_process: self.embedding = LanguageModelEmbedding( config=self.config, @@ -136,7 +141,7 @@ def __init__( # Output if post_process: - self.output_layer = tensor_parallel.ColumnParallelLinear( + self.output_layer = LinearCrossEntropyModule( config.hidden_size, self.vocab_size, config=config, @@ -288,9 +293,10 @@ def forward( hidden_states.squeeze(1).unsqueeze(0) ).unsqueeze(1) - logits, _ = self.output_layer( - hidden_states, weight=output_weight, runtime_gather_output=runtime_gather_output - ) + if labels is None: + logits, _ = self.output_layer( + hidden_states, weight=output_weight, runtime_gather_output=runtime_gather_output + ) # Restore sequence parallel execution to the output layer if necessary. if sequence_parallel_override: @@ -305,6 +311,17 @@ def forward( # [s b h] => [b s h] return logits.transpose(0, 1).contiguous() - loss = self.compute_language_model_loss(labels, logits) + output_layer_kwargs = dict( + input_=hidden_states, weight=output_weight, runtime_gather_output=runtime_gather_output + ) + if self.fuse_linear_cross_entropy: + loss = self.output_layer( + output_cross_entropy_loss=self.fuse_linear_cross_entropy, + labels=labels, + **output_layer_kwargs, + ) + else: + logits, _ = self.output_layer(**output_layer_kwargs) + loss = self.compute_language_model_loss(labels, logits) return loss diff --git a/megatron/core/optimizer/cpu_offloading/optimizer_state_offloader.py b/megatron/core/optimizer/cpu_offloading/optimizer_state_offloader.py new file mode 100644 index 00000000000..81fd116c8ba --- /dev/null +++ b/megatron/core/optimizer/cpu_offloading/optimizer_state_offloader.py @@ -0,0 +1,315 @@ +# Copyright (c) 2025, NVIDIA CORPORATION. All rights reserved. + +"""Optimizer state offloading class.""" + +from typing import TYPE_CHECKING, Dict, List, Tuple + +import torch + +if TYPE_CHECKING: + from megatron.core.optimizer.distrib_optimizer import DistributedOptimizer + + +class OptimizerStateOffloader: + """ + Manages offloading of optimizer states and master weights to CPU. + Used with DistributedOptimizer to reduce GPU memory usage. + + Supports overlapped D2H/H2D transfers using CUDA streams. + + Master weights can be stored in two locations: + - In adam optimizer state (when use_precision_aware_optimizer_no_fp8_or_ds_fp8 is True) + - In mcore's shard_fp32_from_float16_groups + """ + + OPTIMIZER_STATE_KEYS = ('exp_avg', 'exp_avg_sq') + MASTER_WEIGHT_KEY = 'master_param' + + def __init__(self, distrib_optimizer: "DistributedOptimizer"): + """ + Args: + distrib_optimizer: The DistributedOptimizer to offload states and master weights from. + """ + self.dist_optimizer = distrib_optimizer + self.adam_optimizer = distrib_optimizer.optimizer + + # Only support TE FusedAdam optimizer for now. + try: + from transformer_engine.pytorch.optimizers import FusedAdam + + assert isinstance(self.adam_optimizer, FusedAdam), ( + f"OptimizerStateOffloader requires TE FusedAdam optimizer, " + f"but got {type(self.adam_optimizer).__name__}" + ) + except ImportError: + raise ImportError( + "OptimizerStateOffloader requires transformer_engine.pytorch.optimizers.FusedAdam" + ) + + # Check if master weights are stored in adam optimizer state + self.optimizer_contains_master_weights = self.adam_optimizer.master_weights + + # CUDA streams for async transfers + self._d2h_stream = torch.cuda.Stream() + self._h2d_stream = torch.cuda.Stream() + + # CPU buffers for optimizer states: {param: {key: cpu_tensor}} + self._opt_state_cpu_buffers: Dict[torch.Tensor, Dict[str, torch.Tensor]] = {} + + # CPU buffers for mcore master weights, matching the structure of source groups + # List[List[cpu_tensor]] + self._shard_fp32_from_float16_cpu_buffers: List[List[torch.Tensor]] = [] + + # State tracking + self._offloaded = False + self._offloaded_state_keys: Tuple[str, ...] = () + self._offloaded_mcore_master_weights = False + + # Track whether optimizer states (exp_avg, exp_avg_sq) have been initialized. + # These are lazily initialized by FusedAdam during the first optimizer.step(). + # Master weights (shard_fp32_from_float16_groups) are available from the start. + self._optimizer_states_initialized = False + + def mark_optimizer_states_initialized(self): + """ + Mark that optimizer states (exp_avg, exp_avg_sq) are now available. + Should be called after the first optimizer.step() completes. + """ + self._optimizer_states_initialized = True + + def _get_state_keys_to_offload( + self, offload_optimizer_states: bool, offload_master_weights: bool + ) -> Tuple[str, ...]: + """Get the state keys in FusedAdam to offload based on configuration.""" + keys = [] + # Skip optimizer states offloading if they haven't been initialized yet. + # Optimizer states are lazily initialized by FusedAdam during the first optimizer.step(). + if self._optimizer_states_initialized: + if offload_optimizer_states: + keys.extend(self.OPTIMIZER_STATE_KEYS) + if offload_master_weights and self.optimizer_contains_master_weights: + keys.append(self.MASTER_WEIGHT_KEY) + return tuple(keys) + + def _ensure_state_cpu_buffer( + self, param: torch.Tensor, state_key: str, gpu_tensor: torch.Tensor, pin_memory: bool = True + ) -> torch.Tensor: + """Get or create a CPU buffer for a state tensor.""" + if param not in self._opt_state_cpu_buffers: + self._opt_state_cpu_buffers[param] = {} + + if state_key not in self._opt_state_cpu_buffers[param]: + cpu_buffer = torch.empty( + gpu_tensor.size(), + dtype=gpu_tensor.dtype, + layout=gpu_tensor.layout, + device='cpu', + pin_memory=pin_memory, + ) + self._opt_state_cpu_buffers[param][state_key] = cpu_buffer + + return self._opt_state_cpu_buffers[param][state_key] + + def _offload_shard_groups( + self, + shard_groups: List[List[torch.Tensor]], + cpu_buffers: List[List[torch.Tensor]], + pin_memory: bool = True, + ): + """Offload a shard group to CPU buffers.""" + # Initialize CPU buffers on first call + if len(cpu_buffers) == 0: + for group in shard_groups: + group_buffers = [] + for gpu_tensor in group: + cpu_buffer = torch.empty( + gpu_tensor.size(), + dtype=gpu_tensor.dtype, + layout=gpu_tensor.layout, + device='cpu', + pin_memory=pin_memory, + ) + group_buffers.append(cpu_buffer) + cpu_buffers.append(group_buffers) + + # Copy D2H + for group_idx, group in enumerate(shard_groups): + for param_idx, gpu_tensor in enumerate(group): + cpu_buffer = cpu_buffers[group_idx][param_idx] + cpu_buffer.copy_(gpu_tensor, non_blocking=pin_memory) + gpu_tensor.record_stream(self._d2h_stream) + + def _offload_states( + self, + offload_optimizer_states: bool, + offload_master_weights: bool, + use_pin_memory: bool = True, + ): + """Offload optimizer states and/or master weights to CPU.""" + # Offload states from adam optimizer + self._offloaded_state_keys = self._get_state_keys_to_offload( + offload_optimizer_states, offload_master_weights + ) + states = self.adam_optimizer.state + + for param, param_state in states.items(): + for state_key in self._offloaded_state_keys: + if state_key not in param_state: + continue + + gpu_tensor = param_state[state_key] + if not isinstance(gpu_tensor, torch.Tensor) or not gpu_tensor.is_cuda: + continue + + cpu_buffer = self._ensure_state_cpu_buffer( + param, state_key, gpu_tensor, use_pin_memory + ) + cpu_buffer.copy_(gpu_tensor, non_blocking=use_pin_memory) + gpu_tensor.record_stream(self._d2h_stream) + + # Offload mcore master weights if not in optimizer state + if offload_master_weights and not self.optimizer_contains_master_weights: + self._offload_shard_groups( + self.dist_optimizer.shard_fp32_from_float16_groups, + self._shard_fp32_from_float16_cpu_buffers, + use_pin_memory, + ) + self._offloaded_mcore_master_weights = True + + def _release_states(self): + """Replace optimizer state GPU tensors with CPU tensors to free GPU memory.""" + states = self.adam_optimizer.state + + for param, param_state in states.items(): + if param not in self._opt_state_cpu_buffers: + continue + + for state_key in self._offloaded_state_keys: + if state_key not in self._opt_state_cpu_buffers[param]: + continue + + param_state[state_key].untyped_storage().resize_(0) + + if self._offloaded_mcore_master_weights: + for group in self.dist_optimizer.shard_fp32_from_float16_groups: + for gpu_tensor in group: + gpu_tensor.untyped_storage().resize_(0) + + def _reload_shard_groups( + self, + shard_groups: List[List[torch.Tensor]], + cpu_buffers: List[List[torch.Tensor]], + is_allocate_stage: bool, + ): + """Reload shard groups from CPU to GPU.""" + for group_idx, group in enumerate(shard_groups): + for param_idx, _ in enumerate(group): + cpu_buffer = cpu_buffers[group_idx][param_idx] + if is_allocate_stage: + shard_groups[group_idx][param_idx].untyped_storage().resize_( + cpu_buffer.untyped_storage().size() + ) + else: + shard_groups[group_idx][param_idx].copy_( + cpu_buffer, non_blocking=cpu_buffer.is_pinned() + ) + + def _reload_states(self, is_allocate_stage: bool): + """ + Reload optimizer states and/or master weights from CPU to GPU. + + If is_allocate_stage is True, only allocate GPU memory for the states and master weights, + but do not copy the data from CPU to GPU. Otherwise, copy the data from CPU to GPU. + The two processes are separated to make sure that the GPU memory is allocated on the + default stream to avoid fragmentation. + """ + # Reload states to adam optimizer + states = self.adam_optimizer.state + + for param, param_state in states.items(): + if param not in self._opt_state_cpu_buffers: + continue + + for state_key in self._offloaded_state_keys: + if state_key not in self._opt_state_cpu_buffers[param]: + continue + + cpu_buffer = self._opt_state_cpu_buffers[param][state_key] + if is_allocate_stage: + param_state[state_key].untyped_storage().resize_( + cpu_buffer.untyped_storage().size() + ) + else: + param_state[state_key].copy_(cpu_buffer, non_blocking=cpu_buffer.is_pinned()) + + # Reload mcore master weights if not in optimizer state + if self._offloaded_mcore_master_weights: + self._reload_shard_groups( + self.dist_optimizer.shard_fp32_from_float16_groups, + self._shard_fp32_from_float16_cpu_buffers, + is_allocate_stage, + ) + + def offload(self, offload_optimizer_states: bool = True, offload_master_weights: bool = True): + """ + Offload optimizer states and/or master weights to CPU. + Starts async D2H transfer that can overlap with other operations. + + Args: + offload_optimizer_states: Whether to offload exp_avg, exp_avg_sq. + offload_master_weights: Whether to offload master weights. + """ + if not offload_optimizer_states and not offload_master_weights: + return + + # Wait for current stream finishing updating the optimizer states. + self._d2h_stream.wait_stream(torch.cuda.current_stream()) + + with torch.cuda.stream(self._d2h_stream): + self._offload_states(offload_optimizer_states, offload_master_weights) + + self._offloaded = True + + def release_gpu_memory(self): + """ + Release GPU memory for optimizer states and master weights after D2H copy completes. + + This is separated from offload() to allow delayed GPU memory release, + which is needed for mxfp8 + overlap_param_gather case where master weights + must remain on GPU until after _copy_main_params_to_param_buffer() is called. + """ + if not self._offloaded: + return + + self._release_states() + + def reload(self): + """ + Reload optimizer states and/or master weights from CPU to GPU. + Call before optimizer.step() to ensure states are on GPU. + """ + if not self._offloaded: + return + + # Allocate GPU memory on the current stream to avoid fragmentation. + self._reload_states(is_allocate_stage=True) + + self._h2d_stream.wait_stream(self._d2h_stream) + self._h2d_stream.wait_stream(torch.cuda.current_stream()) + + # Reload states on the h2d stream to overlap with other operations. + with torch.cuda.stream(self._h2d_stream): + self._reload_states(is_allocate_stage=False) + + self._offloaded_state_keys = () + self._offloaded_mcore_master_weights = False + self._offloaded = False + + def sync_before_step(self): + """ + Wait for H2D reload to complete before optimizer.step(). + Must be called to ensure states are on GPU before optimizer uses them. + + This is separated from reload() to make it possible to move the reload ahead of time. + """ + torch.cuda.current_stream().wait_stream(self._h2d_stream) diff --git a/megatron/core/optimizer/distrib_optimizer.py b/megatron/core/optimizer/distrib_optimizer.py index e2b1b0dbd73..a4364f5e92d 100644 --- a/megatron/core/optimizer/distrib_optimizer.py +++ b/megatron/core/optimizer/distrib_optimizer.py @@ -52,6 +52,7 @@ from ..fp8_utils import dequantize_fp8_tensor, is_float8tensor, quantize_param_shard from ..transformer.fsdp_dtensor_checkpoint import handle_experts_in_state_dict from ..transformer.module import MegatronModule +from .cpu_offloading.optimizer_state_offloader import OptimizerStateOffloader from .grad_scaler import MegatronGradScaler from .optimizer import MixedPrecisionOptimizer, _zero_grad_group_helper, param_group_identifier_keys from .optimizer_config import OptimizerConfig @@ -519,6 +520,8 @@ def __init__( "due to checkpointing requirements." ) + self._state_offloader: Optional[OptimizerStateOffloader] = None + # when freezing sub-models we have no real optimizer # but still need a stub DistributedOptimizer class if optimizer is None: @@ -607,6 +610,9 @@ def __init__( self.optimizer.param_groups = [g["orig_group"] for g in self.opt_group_ranges] self.optimizer.load_state_dict(self.optimizer.state_dict()) + if self.config.offload_optimizer_states: + self._state_offloader = OptimizerStateOffloader(self) + def _get_model_param_range_map(self, param: torch.nn.Parameter): """ Given a model param, get the index sub-range of the param that this @@ -2522,7 +2528,7 @@ def _build_model_param_to_state_dict_param_map(self, state_dict): for name, model_param in model_chunk.named_parameters(): while name.startswith("module."): name = name[len("module.") :] - matched_keys = [k for k in names_in_state_dict if name in k] + matched_keys = [k for k in names_in_state_dict if k.endswith(name)] assert ( len(matched_keys) == 1 ), f"Parameter {name} has {len(matched_keys)} matches in state dict" @@ -2595,6 +2601,8 @@ def step_with_ready_grads(self) -> bool: Under the hood, either launch synchronous param all-gathers or get ready to launch asynchorous all-gathers that get overlapped with the next forward pass. """ + if self._state_offloader is not None: + self._state_offloader.sync_before_step() update_successful = super().step_with_ready_grads() timers = self.config.timers @@ -2615,4 +2623,22 @@ def step_with_ready_grads(self) -> bool: if timers is not None: timers('params-all-gather').stop() + if self._state_offloader is not None: + self._state_offloader.mark_optimizer_states_initialized() + return update_successful + + def offload_states(self): + """Offload states to CPU.""" + if self._state_offloader is not None: + self._state_offloader.offload() + + def reload_offloaded_states(self): + """Start async reload of offloaded states.""" + if self._state_offloader is not None: + self._state_offloader.reload() + + def release_offloaded_gpu_states(self): + """Release GPU memory after D2H completes. For delayed release case.""" + if self._state_offloader is not None: + self._state_offloader.release_gpu_memory() diff --git a/megatron/core/optimizer/optimizer_config.py b/megatron/core/optimizer/optimizer_config.py index 2d3e3ca08e0..94163102eb3 100644 --- a/megatron/core/optimizer/optimizer_config.py +++ b/megatron/core/optimizer/optimizer_config.py @@ -142,7 +142,6 @@ class OptimizerConfig: ############## # General ############## - lr: Optional[float] = None """Initial learning rate. Depending on decay style and initial warmup, the learning rate at each iteration would be different. @@ -325,6 +324,12 @@ class OptimizerConfig: pin_cpu_params: bool = True """If True, pin the optimizer parameters to CPU memory.""" + offload_optimizer_states: bool = False + """ + If True, offload optimizer states to CPU after each optimizer step and + reload them before the next optimizer step. + """ + ################ # Miscellaneous ################ diff --git a/megatron/core/optimizer/qk_clip.py b/megatron/core/optimizer/qk_clip.py index 72127f94712..26b5787cd50 100644 --- a/megatron/core/optimizer/qk_clip.py +++ b/megatron/core/optimizer/qk_clip.py @@ -22,6 +22,11 @@ def clip_qk(model, log_max_only=False) -> float: for model_chunk in model: for transformer_layer in model_chunk.module.module.decoder.layers: if hasattr(transformer_layer.self_attention, 'clip_qk'): + if ( + transformer_layer.self_attention.core_attention.current_max_attn_logits + is None + ): + continue torch.distributed.all_reduce( transformer_layer.self_attention.core_attention.current_max_attn_logits, op=torch.distributed.ReduceOp.MAX, diff --git a/megatron/core/pipeline_parallel/fine_grained_activation_offload.py b/megatron/core/pipeline_parallel/fine_grained_activation_offload.py index 9fbc657d574..1d2545b682d 100644 --- a/megatron/core/pipeline_parallel/fine_grained_activation_offload.py +++ b/megatron/core/pipeline_parallel/fine_grained_activation_offload.py @@ -654,6 +654,9 @@ def pop_forward_chunk(self, name=None): while not self._is_warmup and ( self._cur_forward_chunk is None or self._cur_forward_chunk.finish_all_groups(name) ): + if self._cached_chunks_index_forward >= len(self._cached_chunks_forward): + self._cur_forward_chunk = None + break self._cur_forward_chunk = self._cached_chunks_forward[self._cached_chunks_index_forward] self._cached_chunks_index_forward += 1 debug_rank(f"new cur_forward_chunk {self._cur_forward_chunk}") @@ -1253,7 +1256,6 @@ def forward_record(event: torch.cuda.Event) -> None: torch.cuda.current_stream().record_event(event) torch.cuda.current_stream().wait_stream(d2h_stream) - @staticmethod def reset(): """Reset the chunk handler.""" PipelineOffloadManager.get_instance().reset() diff --git a/megatron/core/pipeline_parallel/schedules.py b/megatron/core/pipeline_parallel/schedules.py index edca62be375..f15dcd1400b 100644 --- a/megatron/core/pipeline_parallel/schedules.py +++ b/megatron/core/pipeline_parallel/schedules.py @@ -2,6 +2,7 @@ import contextlib from functools import partial +from itertools import zip_longest from typing import Callable, Iterator, List, Optional, Union import torch @@ -289,6 +290,8 @@ def forward_step_calc_loss( if config.calculate_per_token_loss: MoEAuxLossAutoScaler.set_loss_scale(loss_scale) else: + # See https://github.com/NVIDIA/Megatron-LM/pull/2217 for detailed explanation + # of scaling by cp_group_size MoEAuxLossAutoScaler.set_loss_scale(loss_scale * cp_group_size / num_microbatches) # Set the loss scale for Multi-Token Prediction (MTP) loss. @@ -825,6 +828,136 @@ def get_schedule_table(num_microbatches, num_model_chunks, microbatch_group_size return schedule_table +def convert_schedule_table_to_order(num_warmup_microbatches, num_model_chunks, schedule_table): + """Convert a tunable schedule lookup table to the te.make_graphed_callables() accepted + order format. For example, the tunable schedule table for PP2 N3M5 with VP2 is as below: + virtual_microbatch_id | 0 1 2 3 4 5 6 7 8 9 + microbatch_id | 0 1 2 0 1 2 3 4 3 4 + model_chunk_id | 0 0 0 1 1 1 0 0 1 1 + + Then the forward backward separated order is: + forward | 1 1 1 2 2 2 1 1 2 2 + backward | -2 -2 -2 -1 -1 -1 -2 -2 -1 -1 + + If num_warmup_microbatches is 5, the output order is: + 1 1 1 2 2 2 -2 1 -2 1 -2 2 -1 2 -1 -1 -2 -2 -1 -1 + """ + _, model_chunk_id_table = zip(*schedule_table) + forward_order = [chunk_id + 1 for chunk_id in model_chunk_id_table] + backward_order = [chunk_id - num_model_chunks for chunk_id in model_chunk_id_table] + order = forward_order[:num_warmup_microbatches] + for i in range(num_warmup_microbatches, len(forward_order)): + order.append(forward_order[i]) + order.append(backward_order[i - num_warmup_microbatches]) + if num_warmup_microbatches > 0: + order.extend(backward_order[-num_warmup_microbatches:]) + return order + + +def get_overlap_moe_expert_parallel_comm_order(order, num_layers_per_chunk, capture_wgrad_graph): + """ + This functions gets the order for overlap_moe_expert_parallel_comm schedule for the original + chunk-wise order list. Each chunk is transformered to chunks with only 1 layer so that + layers between 2 chunks can now overlap with each other while following the graph order. + If capture_wgrad_graph is True, the wgrad backward graph is also added to the order by + decreasing the layer id by 0.5. + + Args: + order (List[int]): The original chunk-wise order list. Positive values represent forward + passes for chunks, negative values represent backward passes. The absolute value + indicates the chunk ID (1-indexed). + num_layers_per_chunk (List[int]): Number of graphable layers in each chunk. The length + of this list equals the number of chunks. + capture_wgrad_graph (bool): If True, weight gradient computation graphs are added to the + order by appending entries with layer_id - 0.5. + + Returns: + Tuple[List[float], List[Optional[List[int]]]]: A tuple containing: + - new_order: The layer-wise order list where each chunk is expanded to individual + layers. Positive values are forward passes, negative values are backward passes. + Values with .5 suffix indicate weight gradient computations. + - chunk_id_list: A list parallel to new_order. For forward passes, contains + [chunk_id, layer_index_within_chunk]. For backward passes, contains None. + + Example: + original_order: [1, 2, -2, 1, -1, -1] + num_layers_per_chunk: [1, 2] + capture_wgrad_graph=True: + new_order: [1, 2, 3, 1, -3, -3.5, -2, -2.5, -1, -1.5, -1, -1.5] + chunk_id_list: [[0, 0], [1, 0], [1, 1], [0, 0], None, + None, None, None, None, None, None, None] + capture_wgrad_graph=False: + new_order: [1, 2, 3, 1, -3, -2, -1, -1] + chunk_id_list: [[0, 0], [1, 0], [1, 1], [0, 0], None, None, None, None] + """ + + def _add_order(new_order, chunk_id_list, c_id, layer_id, is_wgrad=False, index=None): + if is_wgrad: + new_order.append(layer_id - 0.5) + else: + new_order.append(layer_id) + if c_id > 0: + chunk_id_list.append([abs(c_id) - 1, index]) + else: + chunk_id_list.append(None) + + new_order = [] + chunk_id_list = [] + add_order = partial(_add_order, new_order, chunk_id_list) + first_backward_idx, last_forward_idx = None, None + for idx, c_id in enumerate(order): + if first_backward_idx is None and c_id < 0: + first_backward_idx = idx + if c_id > 0: + last_forward_idx = idx + + def get_layer_range(c_id): + num_layers = num_layers_per_chunk[abs(c_id) - 1] + num_layers_previous_chunks = sum(num_layers_per_chunk[: abs(c_id) - 1]) + if c_id > 0: + return list( + range(num_layers_previous_chunks + 1, num_layers_previous_chunks + num_layers + 1) + ) + return list(range(-num_layers_previous_chunks - num_layers, -num_layers_previous_chunks)) + + # warmup stage + for c_id in order[:first_backward_idx]: + layer_range = get_layer_range(c_id) + new_order += layer_range + chunk_id_list.extend([abs(c_id) - 1, i] for i in range(len(layer_range))) + + # 1f1b overlap stage + if first_backward_idx < last_forward_idx: + for c_id_b, c_id_f in zip( + order[first_backward_idx : last_forward_idx + 1 : 2], + order[first_backward_idx + 1 : last_forward_idx + 1 : 2], + ): + layer_range_f = get_layer_range(c_id_f) + layer_range_b = get_layer_range(c_id_b) + index = 0 + for l_b, l_f in zip_longest(layer_range_b, layer_range_f, fillvalue=0): + # always forward graph before backward graph + if l_f != 0: + add_order(c_id_f, l_f, index=index) + if l_b != 0: + add_order(c_id_b, l_b) + if capture_wgrad_graph and index < len(layer_range_b) - 1: + add_order(c_id_b, l_b, is_wgrad=True) + index += 1 + # last wgrad backward + if capture_wgrad_graph and layer_range_b: + add_order(c_id_b, layer_range_b[-1], is_wgrad=True) + + # cool down stage, backward graphs only + for c_id in order[last_forward_idx + 1 :]: + for l_b in get_layer_range(c_id): + add_order(c_id, l_b) + if capture_wgrad_graph: + add_order(c_id, l_b, is_wgrad=True) + + return new_order, chunk_id_list + + def forward_backward_pipelining_with_interleaving( *, forward_step_func, diff --git a/megatron/core/pipeline_parallel/utils.py b/megatron/core/pipeline_parallel/utils.py index 03c5f01f443..8f6b25eec32 100644 --- a/megatron/core/pipeline_parallel/utils.py +++ b/megatron/core/pipeline_parallel/utils.py @@ -116,16 +116,6 @@ def set_ideal_affinity_for_current_gpu(): ) -@contextmanager -def stream_acquire_context(stream, event): - """Stream acquire context""" - event.wait(stream) - try: - yield - finally: - event.record(stream) - - class NoopScheduleNode: """A placeholder node in the computation graph that simply passes through inputs and outputs. @@ -208,26 +198,21 @@ def forward(self, inputs=()): return self._forward(*inputs) def _forward(self, *inputs): - with stream_acquire_context(self.stream, self.event): - torch.cuda.nvtx.range_push(f"{self.name} forward") - with torch.cuda.stream(self.stream): - self.inputs = [make_viewless(e).detach() if e is not None else None for e in inputs] - for i, input in enumerate(self.inputs): - if input is not None: - input.requires_grad = inputs[i].requires_grad + with self.stream_acquire_context(f"{self.name} forward"): + self.inputs = [make_viewless(e).detach() if e is not None else None for e in inputs] + for i, input in enumerate(self.inputs): + if input is not None: + input.requires_grad = inputs[i].requires_grad - data = tuple(self.inputs) - data = self.forward_func(*data) + data = tuple(self.inputs) + data = self.forward_func(*data) - if not isinstance(data, tuple): - data = make_viewless(data) - else: - data = tuple( - [make_viewless(e) if isinstance(e, torch.Tensor) else e for e in data] - ) + if not isinstance(data, tuple): + data = make_viewless(data) + else: + data = tuple([make_viewless(e) if isinstance(e, torch.Tensor) else e for e in data]) - self.output = data - torch.cuda.nvtx.range_pop() + self.output = data # Immediately frees input tensors after they are used for nodes # where inputs are no longer needed after computation. @@ -250,18 +235,15 @@ def backward(self, output_grad): return self._backward(*output_grad) def _backward(self, *output_grad): - with stream_acquire_context(self.stream, self.event): - torch.cuda.nvtx.range_push(f"{self.name} backward") - with torch.cuda.stream(self.stream): - outputs = self.output - if not isinstance(outputs, tuple): - outputs = (outputs,) - assert len(outputs) == len(output_grad), ( - f"{len(outputs)} of {type(outputs[0])} is not equal to " - f"{len(output_grad)} of {type(output_grad[0])}" - ) - output_grad = self.backward_func(outputs, output_grad) - torch.cuda.nvtx.range_pop() + with self.stream_acquire_context(f"{self.name} backward"): + outputs = self.output + if not isinstance(outputs, tuple): + outputs = (outputs,) + assert len(outputs) == len(output_grad), ( + f"{len(outputs)} of {type(outputs[0])} is not equal to " + f"{len(output_grad)} of {type(output_grad[0])}" + ) + output_grad = self.backward_func(outputs, output_grad) # output_grad maybe from another stream if output_grad: @@ -288,6 +270,30 @@ def get_grad(self): grad = grad[0] return grad + @contextmanager + def stream_acquire_context(self, name=None): + """Stream acquire context that handles event synchronization, + NVTX profiling, and stream context. + + This context manager consolidates: + 1. Event wait/record for synchronization between streams + 2. NVTX range for profiling (if name is provided) + 3. torch.cuda.stream context for execution on the specified stream + + Args: + name: Optional name for NVTX range profiling + """ + self.event.wait(self.stream) + if name: + torch.cuda.nvtx.range_push(name) + try: + with torch.cuda.stream(self.stream): + yield + finally: + if name: + torch.cuda.nvtx.range_pop() + self.event.record(self.stream) + def _release_state(self): """Clear the state of the node""" self.inputs = None diff --git a/megatron/core/ssm/gated_delta_net.py b/megatron/core/ssm/gated_delta_net.py index 70e749724dc..d347697bae6 100644 --- a/megatron/core/ssm/gated_delta_net.py +++ b/megatron/core/ssm/gated_delta_net.py @@ -21,6 +21,12 @@ from megatron.core.jit import jit_fuser from megatron.core.packed_seq_params import PackedSeqParams from megatron.core.process_groups_config import ProcessGroupCollection +from megatron.core.ssm.mamba_context_parallel import ( + _all_to_all_cp2hp, + _all_to_all_hp2cp, + _redo_attention_load_balancing, + _undo_attention_load_balancing, +) from megatron.core.tensor_parallel import get_cuda_rng_tracker from megatron.core.transformer import TransformerConfig from megatron.core.transformer.identity_op import IdentityOp @@ -33,25 +39,19 @@ ) from megatron.core.utils import deprecate_inference_params, nvtx_range_pop, nvtx_range_push -# TODO: Implement GatedDeltaNetContextParallel -# from .gated_delta_net_context_parallel import GatedDeltaNetContextParallel - try: + from fla.modules.convolution import causal_conv1d from fla.modules.l2norm import l2norm from fla.ops.gated_delta_rule import chunk_gated_delta_rule HAVE_FLA = True except ImportError: + causal_conv1d = None + l2norm = None chunk_gated_delta_rule = None HAVE_FLA = False -try: - from causal_conv1d import causal_conv1d_fn -except ImportError: - causal_conv1d_fn = None - causal_conv1d_update = None - logger = logging.getLogger(__name__) @@ -85,6 +85,7 @@ def __init__( use_qk_l2norm: bool = True, A_init_range: Tuple[float, float] = (1, 16), pg_collection: ProcessGroupCollection = None, + **kwargs, ): """ Args: @@ -117,6 +118,7 @@ def __init__( self.use_qk_l2norm = use_qk_l2norm assert pg_collection is not None, "pg_collection must be provided for GatedDeltaNet" self.pg_collection = pg_collection + self.cp_size = self.pg_collection.cp.size() self.tp_size = self.pg_collection.tp.size() self.sp_size = self.tp_size if config.sequence_parallel else 1 @@ -132,6 +134,8 @@ def __init__( self.num_value_heads = config.linear_num_value_heads self.qk_dim = self.key_head_dim * self.num_key_heads self.v_dim = self.value_head_dim * self.num_value_heads + self.qk_dim_local_tp = self.qk_dim // self.tp_size + self.v_dim_local_tp = self.v_dim // self.tp_size # Input projection (hidden_states -> q, k, v, gate, beta, alpha) # TODO: for now, output gate is forced for GDN. @@ -198,6 +202,11 @@ def __init__( ) setattr(self.A_log, "tensor_model_parallel", True) + if self.config.deterministic_mode: + self.gated_delta_rule = torch_chunk_gated_delta_rule + else: + self.gated_delta_rule = chunk_gated_delta_rule + # Output layernorm before projection self.out_norm = build_module( submodules.out_norm, @@ -220,8 +229,6 @@ def __init__( tp_group=self.pg_collection.tp, ) - # TODO: support CP - self.reset_parameters() def reset_parameters(self): @@ -250,9 +257,7 @@ def forward( self, hidden_states: Tensor, attention_mask: Tensor, - key_value_states: Optional[Tensor] = None, inference_context: Optional[BaseInferenceContext] = None, - attention_bias: Optional[Tensor] = None, packed_seq_params: Optional[PackedSeqParams] = None, sequence_len_offset: Optional[int] = None, *, @@ -265,10 +270,8 @@ def forward( Args: hidden_states (Tensor): Hidden states. attention_mask (Tensor): Attention mask. - key_value_states (Optional[Tensor]): Key/value states (for cross attention). inference_context (Optional[BaseInferenceContext]): Inference context that manages KV cache. - attention_bias (Optional[Tensor]): Attention bias. packed_seq_params (Optional[PackedSeqparams]): Parameters used for THD format. sequence_len_offset (Optional[int]): Sequence length offset used for inference CUDA graphs. @@ -282,7 +285,7 @@ def forward( inference_context = deprecate_inference_params(inference_context, inference_params) seq_len, batch, _ = hidden_states.shape - seq_len = seq_len * self.sp_size + seq_len = seq_len * self.sp_size * self.cp_size if inference_context is not None: assert ( @@ -293,14 +296,76 @@ def forward( raise NotImplementedError("GDN does not support inference for now.") if packed_seq_params is not None: - # TODO: support packed sequence - raise NotImplementedError("GDN does not support packed sequence for now.") + assert batch == 1, "Packed sequence expects batch dimension to be 1" + assert ( + not self.config.deterministic_mode + ), "Packed sequence does not support deterministic mode." + + # Prefer cu_seqlens_q_padded if available, otherwise use cu_seqlens_q + if packed_seq_params.cu_seqlens_q_padded is not None: + cu_seqlens_q = packed_seq_params.cu_seqlens_q_padded + else: + cu_seqlens_q = packed_seq_params.cu_seqlens_q + # Prefer cu_seqlens_kv_padded if available, otherwise use cu_seqlens_kv + if packed_seq_params.cu_seqlens_kv_padded is not None: + cu_seqlens_kv = packed_seq_params.cu_seqlens_kv_padded + else: + cu_seqlens_kv = packed_seq_params.cu_seqlens_kv + assert torch.equal(cu_seqlens_q, cu_seqlens_kv), ( + "Currently only support cu_seqlens_q equals to cu_seqlens_kv, " + f"but got {cu_seqlens_q=} and {cu_seqlens_kv=}" + ) + num_packed_seqs = cu_seqlens_q.shape[0] - 1 + assert num_packed_seqs > 0, ( + "Number of packed sequences must be greater than 0, " + f"but got {cu_seqlens_q=} and {cu_seqlens_kv=}" + ) + else: + cu_seqlens_q = None + cu_seqlens_kv = None # Input projection nvtx_range_push(suffix="in_proj") qkvzba, _ = self.in_proj(hidden_states) nvtx_range_pop(suffix="in_proj") + # CP All to All: CP to HP + if packed_seq_params is not None: + unpacked_qkvzba = _unpack_sequence(qkvzba, cu_seqlens_q // self.cp_size, dim=0) + outputs = [] + for qkvzba_i in unpacked_qkvzba: + qkvzba_i = tensor_a2a_cp2hp( + qkvzba_i, + seq_dim=0, + head_dim=-1, + cp_group=self.pg_collection.cp, + split_sections=[ + self.qk_dim_local_tp, + self.qk_dim_local_tp, + self.v_dim_local_tp, + self.v_dim_local_tp, + self.num_value_heads // self.tp_size, + self.num_value_heads // self.tp_size, + ], + ) + outputs.append(qkvzba_i) + qkvzba = torch.cat(outputs, dim=0) + else: + qkvzba = tensor_a2a_cp2hp( + qkvzba, + seq_dim=0, + head_dim=-1, + cp_group=self.pg_collection.cp, + split_sections=[ + self.qk_dim_local_tp, + self.qk_dim_local_tp, + self.v_dim_local_tp, + self.v_dim_local_tp, + self.num_value_heads // self.tp_size, + self.num_value_heads // self.tp_size, + ], + ) + # Transpose: s b x --> b s x # From sbhd to bshd format qkvzba = qkvzba.transpose(0, 1) @@ -309,10 +374,10 @@ def forward( qkv, gate, beta, alpha = torch.split( qkvzba, [ - (self.qk_dim * 2 + self.v_dim) // self.tp_size, - self.v_dim // self.tp_size, - self.num_value_heads // self.tp_size, - self.num_value_heads // self.tp_size, + (self.qk_dim_local_tp * 2 + self.v_dim_local_tp) // self.cp_size, + self.v_dim_local_tp // self.cp_size, + self.num_value_heads // self.tp_size // self.cp_size, + self.num_value_heads // self.tp_size // self.cp_size, ], dim=-1, ) @@ -321,33 +386,75 @@ def forward( alpha = alpha.reshape(batch, seq_len, -1) # Convolution on qkv - qkv = qkv.transpose(1, 2).contiguous() # b, s, d -> b, d, s nvtx_range_push(suffix="conv1d") - if (causal_conv1d_fn is None) or self.config.deterministic_mode: - qkv = self.act_fn(self.conv1d(qkv)[..., :seq_len]) + seq_len = qkv.shape[1] + qkv_channels_split_sections = [ + self.qk_dim_local_tp, + self.qk_dim_local_tp, + self.v_dim_local_tp, + ] + conv1d_weight = get_parameter_local_cp( + self.conv1d.weight, + dim=0, + cp_group=self.pg_collection.cp, + split_sections=qkv_channels_split_sections, + ) + conv1d_bias = ( + get_parameter_local_cp( + self.conv1d.bias, + dim=0, + cp_group=self.pg_collection.cp, + split_sections=qkv_channels_split_sections, + ) + if self.conv_bias + else None + ) + if self.config.deterministic_mode: + qkv = qkv.transpose(1, 2).contiguous() # b, s, d -> b, d, s + conv_out = F.conv1d( + input=qkv, # Torch-native only accept [b, d, s] format input + weight=conv1d_weight, + bias=conv1d_bias, + stride=self.conv1d.stride, + padding=self.conv1d.padding, + dilation=self.conv1d.dilation, + groups=self.conv_dim_local_tp // self.cp_size, + ) + qkv = self.act_fn(conv_out[..., :seq_len]) + qkv = qkv.transpose(1, 2) # b, d, s -> b, s, d else: assert self.activation in ["silu", "swish"] - qkv = causal_conv1d_fn( - x=qkv, - weight=self.conv1d.weight.squeeze(1), # d, 1, w -> d, w - bias=self.conv1d.bias, + qkv, _ = causal_conv1d( + x=qkv, # FLA conv1d accepts [b, s, d] format input + weight=conv1d_weight.squeeze(1), # d, 1, w -> d, w + bias=conv1d_bias, activation=self.activation, + initial_state=None, + output_final_state=False, + cu_seqlens=cu_seqlens_q, ) nvtx_range_pop(suffix="conv1d") - # Split qkv into query, key, and value - qkv = qkv.transpose(1, 2) # b, d, s -> b, s, d - query, key, value = torch.split( + + # Split qkv into query_key, and value + query_key, value = torch.split( qkv, - [self.qk_dim // self.tp_size, self.qk_dim // self.tp_size, self.v_dim // self.tp_size], + [2 * self.qk_dim_local_tp // self.cp_size, self.v_dim_local_tp // self.cp_size], dim=-1, ) - query = query.reshape(batch, seq_len, -1, self.key_head_dim) - key = key.reshape(batch, seq_len, -1, self.key_head_dim) + query_key = query_key.reshape(batch, seq_len, -1, self.key_head_dim) value = value.reshape(batch, seq_len, -1, self.value_head_dim) # Apply L2 norm to query and key if self.use_qk_l2norm: - query = l2norm(query.contiguous()) - key = l2norm(key.contiguous()) + query_key = l2norm(query_key.contiguous()) + # Split query and key. + query, key = torch.split( + query_key, + [ + self.qk_dim_local_tp // self.key_head_dim // self.cp_size, + self.qk_dim_local_tp // self.key_head_dim // self.cp_size, + ], + dim=2, + ) if self.num_value_heads // self.num_key_heads > 1: query = query.repeat_interleave(self.num_value_heads // self.num_key_heads, dim=2) key = key.repeat_interleave(self.num_value_heads // self.num_key_heads, dim=2) @@ -362,33 +469,26 @@ def forward( # Calculate g and beta nvtx_range_push(suffix="g_and_beta") - g = -self.A_log.exp() * F.softplus(alpha.float() + self.dt_bias) # In fp32 + A_log_local_cp = get_parameter_local_cp(self.A_log, dim=0, cp_group=self.pg_collection.cp) + dt_bias_local_cp = get_parameter_local_cp( + self.dt_bias, dim=0, cp_group=self.pg_collection.cp + ) + g = -A_log_local_cp.exp() * F.softplus(alpha.float() + dt_bias_local_cp) # In fp32 beta = beta.sigmoid() nvtx_range_pop(suffix="g_and_beta") nvtx_range_push(suffix="gated_delta_rule") - if self.config.deterministic_mode: - core_attn_out, last_recurrent_state = torch_chunk_gated_delta_rule( - query, - key, - value, - g=g, - beta=beta, - initial_state=None, - output_final_state=False, - use_qk_l2norm_in_kernel=False, - ) - else: - core_attn_out, last_recurrent_state = chunk_gated_delta_rule( - query, - key, - value, - g=g, - beta=beta, - initial_state=None, - output_final_state=False, - use_qk_l2norm_in_kernel=False, - ) + core_attn_out, last_recurrent_state = self.gated_delta_rule( + query, + key, + value, + g=g, + beta=beta, + initial_state=None, + output_final_state=False, + use_qk_l2norm_in_kernel=False, + cu_seqlens=cu_seqlens_q, + ) nvtx_range_pop(suffix="gated_delta_rule") # RMSNorm @@ -401,6 +501,21 @@ def forward( norm_out = norm_out.reshape(batch, seq_len, -1) norm_out = norm_out.transpose(0, 1).contiguous() + # CP all to all: HP to CP + if packed_seq_params is not None: + unpacked_norm_out = _unpack_sequence(norm_out, cu_seqlens_q, dim=0) + outputs = [] + for norm_out_i in unpacked_norm_out: + norm_out_i = tensor_a2a_hp2cp( + norm_out_i, seq_dim=0, head_dim=-1, cp_group=self.pg_collection.cp + ) + outputs.append(norm_out_i) + norm_out = torch.cat(outputs, dim=0) + else: + norm_out = tensor_a2a_hp2cp( + norm_out, seq_dim=0, head_dim=-1, cp_group=self.pg_collection.cp + ) + # Output projection nvtx_range_push(suffix="out_proj") out, out_bias = self.out_proj(norm_out) @@ -474,10 +589,10 @@ def sharded_state_dict(self, prefix="", sharded_offsets=(), metadata=None, tp_gr sharded_state_dict[f"{prefix}in_proj.weight"] = _split_tensor_factory( sharded_state_dict[f"{prefix}in_proj.weight"], [ - self.qk_dim // self.tp_size, - self.qk_dim // self.tp_size, - self.v_dim // self.tp_size, - self.v_dim // self.tp_size, + self.qk_dim_local_tp, + self.qk_dim_local_tp, + self.v_dim_local_tp, + self.v_dim_local_tp, self.num_value_heads // self.tp_size, self.num_value_heads // self.tp_size, ], @@ -497,18 +612,41 @@ def sharded_state_dict(self, prefix="", sharded_offsets=(), metadata=None, tp_gr for conv_layer_name in conv_layer_name_list: sharded_state_dict[f"{prefix}{conv_layer_name}"] = _split_tensor_factory( sharded_state_dict[f"{prefix}{conv_layer_name}"], - [ - self.qk_dim // self.tp_size, - self.qk_dim // self.tp_size, - self.v_dim // self.tp_size, - ], + [self.qk_dim_local_tp, self.qk_dim_local_tp, self.v_dim_local_tp], ["query", "key", "value"], 0, ) return sharded_state_dict + def backward_dw(self): + """Execute weight gradient computation for all linear layers.""" + self._backward_in_proj() + self._backward_out_proj() + + def _backward_in_proj(self): + """Computes weight gradients of input projection layer.""" + self.in_proj.backward_dw() + + def _backward_out_proj(self): + """Computes weight gradients of output projection layer.""" + self.out_proj.backward_dw() + +def _unpack_sequence(x, cu_seqlens, dim=1): + unpacked_x = [] + num_seqs = cu_seqlens.shape[0] - 1 + for i in range(num_seqs): + idx_start = cu_seqlens[i].item() + idx_end = cu_seqlens[i + 1].item() + chunked_index = [slice(None)] * dim + [slice(idx_start, idx_end)] + unpacked_x.append(x[tuple(chunked_index)]) + return unpacked_x + + +#################### +# Sharded state dict utilities +#################### def _split_tensor_factory( orig_sh_ten: ShardedTensor, split_sections: List[int], split_names: List[str], split_dim: int ) -> ShardedTensorFactory: @@ -569,6 +707,184 @@ def sh_ten_merge_fn(sub_state_dict): ) +#################### +# Context parallel utilities +#################### +def get_parameter_local_cp( + param: torch.Tensor, + dim: int, + cp_group: torch.distributed.ProcessGroup, + split_sections: Optional[List[int]] = None, +) -> torch.Tensor: + """Get the local parameter for the current context parallel rank. + + Args: + param (torch.Tensor): The entire parameter to get the local parameter for. + dim (int): The dimension to split the parameter along. Usually the dimension of head. + cp_group (torch.distributed.ProcessGroup): The context parallel group. + split_sections (Optional[List[int]]): If not None, + first split the parameter along the dimension dim into sections, + then get the local hidden parallel weights separately, + finally concatenate the local hidden parallel weights along the dimension dim. + + Returns: + torch.Tensor: The local parameter for the current context parallel rank. + """ + + cp_size = cp_group.size() + cp_rank = cp_group.rank() + + # No need to split if CP size is 1. + if cp_size == 1: + return param + + # Split first if needed. + if split_sections is not None: + inputs = torch.split(param, split_sections, dim=dim) + outputs = [] + for p in inputs: + p = get_parameter_local_cp(p, dim, cp_group) + outputs.append(p) + return torch.cat(outputs, dim=dim) + + # Slice the parameter. + slices = [slice(None)] * param.dim() + dim_size = param.size(dim=dim) + slices[dim] = slice(cp_rank * dim_size // cp_size, (cp_rank + 1) * dim_size // cp_size) + param = param[slices] + return param + + +def tensor_a2a_cp2hp( + tensor: torch.Tensor, + seq_dim: int, + head_dim: int, + cp_group: torch.distributed.ProcessGroup, + split_sections: Optional[List[int]] = None, + undo_attention_load_balancing: bool = True, +): + """All-to-all context parallel to hidden parallel. + + Args: + tensor (torch.Tensor): The tensor to all-to-all. + Currently only support (seq_len, batch, head_dim) shaped tensor. + seq_dim (int): The dimension of sequence length. Currently only supports seq_dim == 0. + head_dim (int): The dimension of head. Currently only supports head_dim == -1 or 2. + cp_group (torch.distributed.ProcessGroup): The context parallel group. + split_sections (Optional[List[int]]): If not None, split the tensor along the dimension + head_dim into sections first, then do all-to-all for each section separately, + finally concatenate the separated tensors along the dimension head_dim. + undo_attention_load_balancing (bool): Whether to undo the attention load balancing of CP. + + Returns: + torch.Tensor: The all-to-all tensor. + """ + + cp_size = cp_group.size() + + # No need to all-to-all if CP size is 1. + if cp_size == 1: + return tensor + + # Limitations of mamba_context_parallel._all_to_all_cp2hp. + assert seq_dim == 0, f"tensor_a2a_cp2hp only supports seq_dim == 0 for now, but got {seq_dim=}" + assert ( + head_dim == -1 or head_dim == 2 + ), f"tensor_a2a_cp2hp only supports head_dim == -1 or 2 for now, but got {head_dim=}" + assert ( + tensor.dim() == 3 + ), f"tensor_a2a_cp2hp only supports 3-d input tensor for now, but got {tensor.dim()=}" + + # Split first if needed. + if split_sections is not None: + inputs = torch.split(tensor, split_sections, dim=head_dim) + outputs = [] + for x in inputs: + x = tensor_a2a_cp2hp( + x, + seq_dim=seq_dim, + head_dim=head_dim, + cp_group=cp_group, + undo_attention_load_balancing=False, + ) + outputs.append(x) + tensor = torch.cat(outputs, dim=head_dim) + else: + tensor = _all_to_all_cp2hp(tensor, cp_group) + + # Undo attention load balancing last if needed. + if undo_attention_load_balancing: + tensor = _undo_attention_load_balancing(tensor, cp_size) + return tensor + + +def tensor_a2a_hp2cp( + tensor: torch.Tensor, + seq_dim: int, + head_dim: int, + cp_group: torch.distributed.ProcessGroup, + split_sections: Optional[List[int]] = None, + redo_attention_load_balancing: bool = True, +): + """All-to-all hidden parallel to context parallel. + + Args: + tensor (torch.Tensor): The tensor to all-to-all. + Currently only support (seq_len, batch, head_dim) shaped tensor. + seq_dim (int): The dimension of sequence length. Currently only supports seq_dim == 0. + head_dim (int): The dimension of head. Currently only supports head_dim == -1 or 2. + cp_group (torch.distributed.ProcessGroup): The context parallel group. + split_sections (Optional[List[int]]): If not None, first split the tensor along the + dimension head_dim into sections, then do all-to-all for each section separately, + finally concatenate the separated tensors along the dimension head_dim. + redo_attention_load_balancing (bool): Whether to redo the attention load balancing of HP. + + Returns: + torch.Tensor: The all-to-all tensor. + """ + + cp_size = cp_group.size() + + # No need to all-to-all if CP size is 1. + if cp_size == 1: + return tensor + + # Limitations of mamba_context_parallel._all_to_all_hp2cp. + assert seq_dim == 0, f"tensor_a2a_cp2hp only supports seq_dim == 0 for now, but got {seq_dim=}" + assert ( + head_dim == -1 or head_dim == 2 + ), f"tensor_a2a_cp2hp only supports head_dim == -1 or 2 for now, but got {head_dim=}" + assert ( + tensor.dim() == 3 + ), f"tensor_a2a_cp2hp only supports 3-d input tensor for now, but got {tensor.dim()=}" + + # Redo attention load balancing first if needed. + if redo_attention_load_balancing: + tensor = _redo_attention_load_balancing(tensor, cp_size) + + # Split first if needed. + if split_sections is not None: + inputs = torch.split(tensor, split_sections, dim=head_dim) + outputs = [] + for x in inputs: + x = tensor_a2a_hp2cp( + x, + seq_dim=seq_dim, + head_dim=head_dim, + cp_group=cp_group, + redo_attention_load_balancing=False, + ) + outputs.append(x) + tensor = torch.cat(outputs, dim=head_dim) + else: + tensor = _all_to_all_hp2cp(tensor, cp_group) + + return tensor + + +#################### +# Torch native gated delta rule +#################### def torch_chunk_gated_delta_rule( query, key, @@ -579,6 +895,7 @@ def torch_chunk_gated_delta_rule( initial_state=None, output_final_state=False, use_qk_l2norm_in_kernel=False, + cu_seqlens=None, ): # pylint: disable=line-too-long ''' @@ -588,6 +905,10 @@ def torch_chunk_gated_delta_rule( Reference: https://github.com/huggingface/transformers/blob/144c8ce2809a2e21914017652700e1ecb450501e/src/transformers/models/qwen3_next/modeling_qwen3_next.py#L470-L547 ''' + assert cu_seqlens is None, ( + "cu_seqlens is not supported for torch_chunk_gated_delta_rule for now." + ) + initial_dtype = query.dtype if use_qk_l2norm_in_kernel: query = l2norm(query, dim=-1, eps=1e-6) diff --git a/megatron/core/ssm/mamba_layer.py b/megatron/core/ssm/mamba_layer.py index ac6e8b5bf40..f86c7f53e1a 100644 --- a/megatron/core/ssm/mamba_layer.py +++ b/megatron/core/ssm/mamba_layer.py @@ -193,6 +193,9 @@ def _should_call_local_cudagraph(self, *args, **kwargs): and kwargs.get('attention_mask') is None and kwargs.get('inference_context') is not None ): - using_cuda_graph = kwargs['inference_context'].using_cuda_graph_this_step() - return using_cuda_graph + if hasattr(kwargs['inference_context'], "using_cuda_graph_this_step"): + return kwargs['inference_context'].using_cuda_graph_this_step() + else: + # static + return kwargs['inference_context'].is_decode_only() return False diff --git a/megatron/core/ssm/mamba_mixer.py b/megatron/core/ssm/mamba_mixer.py index cc71cdc32f6..1910c96cb11 100644 --- a/megatron/core/ssm/mamba_mixer.py +++ b/megatron/core/ssm/mamba_mixer.py @@ -315,18 +315,26 @@ def __init__( self.act = nn.SiLU() with get_cuda_rng_tracker().fork(): - # Initialize dt bias so that F.softplus(dt_bias) is between dt_min and dt_max - dt = torch.exp( - torch.rand( + if self.config.perform_initialization: + # Initialize dt bias so that F.softplus(dt_bias) is between dt_min and dt_max + dt = torch.exp( + torch.rand( + self.nheads_local_tp, + device=torch.cuda.current_device(), + dtype=config.params_dtype, + ) + * (math.log(dt_max) - math.log(dt_min)) + + math.log(dt_min) + ).clamp(min=dt_init_floor) + # Inverse of softplus: https://github.com/pytorch/pytorch/issues/72759 + inv_dt = dt + torch.log(-torch.expm1(-dt)) + else: + inv_dt = torch.empty( self.nheads_local_tp, device=torch.cuda.current_device(), dtype=config.params_dtype, ) - * (math.log(dt_max) - math.log(dt_min)) - + math.log(dt_min) - ).clamp(min=dt_init_floor) - # Inverse of softplus: https://github.com/pytorch/pytorch/issues/72759 - inv_dt = dt + torch.log(-torch.expm1(-dt)) + self.dt_bias = nn.Parameter(inv_dt) setattr(self.dt_bias, "tensor_model_parallel", True) diff --git a/megatron/core/tensor_parallel/layers.py b/megatron/core/tensor_parallel/layers.py index 69f442eb2d4..b2b254dec32 100644 --- a/megatron/core/tensor_parallel/layers.py +++ b/megatron/core/tensor_parallel/layers.py @@ -253,6 +253,10 @@ def __init__( rank=get_pg_rank(self.tp_group), world_size=get_pg_size(self.tp_group), ) + else: + set_tensor_model_parallel_attributes( + tensor=self.weight, is_parallel=True, dim=0, stride=1 + ) else: self.weight = Parameter( torch.empty( @@ -264,6 +268,10 @@ def __init__( ) if config.perform_initialization: _initialize_affine_weight_gpu(self.weight, init_method, partition_dim=0, stride=1) + else: + set_tensor_model_parallel_attributes( + tensor=self.weight, is_parallel=True, dim=0, stride=1 + ) def forward(self, input_): """Forward. @@ -865,6 +873,10 @@ def __init__( rank=rank, world_size=world_size, ) + else: + set_tensor_model_parallel_attributes( + tensor=self.weight, is_parallel=True, dim=0, stride=stride + ) else: self.weight = Parameter( torch.empty( @@ -882,6 +894,10 @@ def __init__( stride=stride, is_expert=self.is_expert, ) + else: + set_tensor_model_parallel_attributes( + tensor=self.weight, is_parallel=True, dim=0, stride=stride + ) setattr(self.weight, "allreduce", not (self.is_expert and self.expert_parallel)) else: @@ -1189,6 +1205,10 @@ def __init__( rank=rank, world_size=world_size, ) + else: + set_tensor_model_parallel_attributes( + tensor=self.weight, is_parallel=True, dim=1, stride=stride + ) else: self.weight = Parameter( torch.empty( @@ -1206,6 +1226,10 @@ def __init__( stride=stride, is_expert=self.is_expert, ) + else: + set_tensor_model_parallel_attributes( + tensor=self.weight, is_parallel=True, dim=1, stride=stride + ) setattr(self.weight, "allreduce", not (self.is_expert and self.expert_parallel)) if bias: diff --git a/megatron/core/tensor_parallel/random.py b/megatron/core/tensor_parallel/random.py index bf00717ab6c..bc11c9fbb61 100644 --- a/megatron/core/tensor_parallel/random.py +++ b/megatron/core/tensor_parallel/random.py @@ -1,4 +1,4 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. +# Copyright (c) 2025 NVIDIA CORPORATION & AFFILIATES. All rights reserved. # Parts of the code here are adapted from PyTorch # repo: https://github.com/pytorch/pytorch diff --git a/megatron/core/transformer/attention.py b/megatron/core/transformer/attention.py index 2200b558225..bc5e4e2ee0d 100644 --- a/megatron/core/transformer/attention.py +++ b/megatron/core/transformer/attention.py @@ -307,6 +307,7 @@ def __init__( self.key_hidden_size = self.hidden_size_per_attention_head self.val_hidden_size = self.hidden_size_per_attention_head + # TODO: This is built twice when using MLA, should be refactored. if self.config.num_query_groups < world_size: # TE throws an assertion error if num_kv_heads / num_query_groups # is not divisible by TP size. @@ -970,12 +971,15 @@ def forward( self.k_layernorm is None or isinstance(self.k_layernorm, IdentityOp), ] ) + output_gate = self.config.attention_output_gate # Check if fused_single_qkv_rope is requested but either unavailable or not # supported for the current use case. if self.attention_type != "cross": assert not ( self.config.fused_single_qkv_rope and split_qkv ), "fused_single_qkv_rope requested but not available/supported for the config." + if output_gate: + assert split_qkv, "output_gate is not supported for unsplit mixed_qkv tensor." with off_interface(self.offload_qkv_linear, hidden_states, "qkv_linear") as hidden_states: qkv_output = self.get_query_key_value_tensors( @@ -1186,6 +1190,7 @@ def forward( core_attn_out = off_interface.group_commit( core_attn_out, name="core_attn", forced_released_tensors=[query, key, value] ) + if packed_seq_params is not None and packed_seq_params.qkv_format == 'thd': # reshape to same output shape as unpacked case # (t, np, hn) -> (t, b=1, h=np*hn) @@ -1670,6 +1675,8 @@ def get_query_key_value_tensors( Derives `query` tensor from `hidden_states`, and `key`/`value` tensors from `key_value_states`. """ + assert not output_gate, "Output gate is not supported in cross attention for now." + assert split_qkv, "split_qkv must be True for CrossAttention" assert not output_gate, "Output gate is not supported in cross attention for now." diff --git a/megatron/core/transformer/dot_product_attention.py b/megatron/core/transformer/dot_product_attention.py index 69039e0bfd0..26622839c14 100644 --- a/megatron/core/transformer/dot_product_attention.py +++ b/megatron/core/transformer/dot_product_attention.py @@ -1,4 +1,4 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. +# Copyright (c) 2025 NVIDIA CORPORATION & AFFILIATES. All rights reserved. import math @@ -12,6 +12,9 @@ from megatron.core.fusions.fused_softmax import FusedScaleMaskSoftmax from megatron.core.packed_seq_params import PackedSeqParams from megatron.core.process_groups_config import ProcessGroupCollection +from megatron.core.transformer.dot_product_attention_context_parallel import ( + AttentionFuncionWithContextParallel, +) from megatron.core.transformer.enums import AttnMaskType from megatron.core.transformer.module import MegatronModule from megatron.core.transformer.transformer_config import TransformerConfig @@ -54,9 +57,12 @@ def __init__( self.config: TransformerConfig = config - assert ( - self.config.context_parallel_size == 1 - ), "Context parallelism is only supported by TEDotProductAttention!" + if self.config.context_parallel_size > 1: + assert attention_dropout is None and self.config.attention_dropout == 0.0, ( + f'DotProductAttention with context parallelism does not support attention dropout,' + f' but got {self.config.context_parallel_size=},' + f' {attention_dropout=}, and {self.config.attention_dropout=}.' + ) self.layer_number = max(1, layer_number) self.attn_mask_type = attn_mask_type @@ -174,6 +180,19 @@ def forward( self.num_attention_heads_per_partition // self.num_query_groups_per_partition, dim=2 ) + if self.config.context_parallel_size > 1: + output = AttentionFuncionWithContextParallel.apply( + query, + key, + value, + attention_mask, + self.config.attention_dropout, + self.softmax_scale, + parallel_state.get_context_parallel_group(), + ) + output = output.view(query.shape[0], query.shape[1], self.hidden_size_per_partition) + return output + # [b, np, sq, sk] output_size = (query.size(1), query.size(2), query.size(0), key.size(0)) diff --git a/megatron/core/transformer/dot_product_attention_context_parallel.py b/megatron/core/transformer/dot_product_attention_context_parallel.py new file mode 100644 index 00000000000..aaf08d40ade --- /dev/null +++ b/megatron/core/transformer/dot_product_attention_context_parallel.py @@ -0,0 +1,345 @@ +# Copyright (c) 2025 NVIDIA CORPORATION & AFFILIATES. All rights reserved. + +# Some of this code was adopted from https://github.com/zhuzilin/ring-flash-attention/ +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import torch +from torch.nn import functional as F + +try: + import einops + + HAVE_EINOPS = True +except ImportError: + HAVE_EINOPS = False + + +@torch.no_grad +def eager_attn_fwd(q, k, v, attn_bias, sinks, scale, dropout): + """Forward pass for eager attention""" + + # Rearrange query, key, value to (b, h, s, d) + b, sq, h, d = q.shape + sk = k.shape[1] + _q = einops.rearrange(q, 'b s h d -> b h s d') + _k = einops.rearrange(k, 'b s h d -> b h d s') + _v = einops.rearrange(v, 'b s h d -> b h s d') + + # Compute attention weights + attn_w = torch.matmul(_q, _k) * scale + attn_w = attn_w + attn_bias + + # Add sinks to attention weights + if sinks is None: + logits = attn_w + else: + _sinks = sinks.reshape(1, h, 1, 1).expand(b, -1, sq, 1) + logits = torch.cat([attn_w, _sinks], dim=-1) + + # Compute attention scores + probs = F.softmax(logits, dim=-1, dtype=logits.dtype) + if sinks is None: + attn_w = probs + else: + attn_w = probs[..., :-1] # Drop the sink + + # Compute attention output + attn_output = torch.matmul(attn_w, _v) + attn_output = einops.rearrange(attn_output, 'b h s d -> b s h d') + attn_output = attn_output.contiguous() + + return attn_output, probs + + +@torch.no_grad +def eager_attn_bwd(q, k, v, attn_bias, sinks, scale, dropout, attn_output, probs, grad_output): + """Backward pass for eager attention""" + + # Rearrange query, key, value to (b, h, s, d) + b, sq, h, d = q.shape + sk = k.shape[1] + _q_T = einops.rearrange(q, 'b s h d -> b h d s') + _k_T = einops.rearrange(k, 'b s h d -> b h s d') + _v_T = einops.rearrange(v, ' b s h d -> b h d s') + + # Backward pass for score @ value + if sinks is None: + attn_w = probs + else: + attn_w = probs[..., :-1] # Drop the sink + grad_output = einops.rearrange(grad_output, 'b s h d -> b h s d') + attn_w_T = einops.rearrange(attn_w, ' b h sq sk -> b h sk sq') + grad__v = torch.matmul(attn_w_T, grad_output) + grad_attn_w = torch.matmul(grad_output, _v_T) + + # Backward pass for softmax + if sinks is None: + grad_probs = grad_attn_w + else: + dummy = torch.zeros((b, h, sq, 1), device=q.device, dtype=q.dtype) + grad_probs = torch.cat([grad_attn_w, dummy], dim=3) + del grad_attn_w + grad_logits = torch._softmax_backward_data( + grad_probs, probs, -1, probs.dtype + ) # [b, h, sq, sk+1] + + # Backward pass for adding sinks + if sinks is None: + grad_sinks = None + grad_attn_w = grad_logits + else: + grad__sinks = grad_logits[:, :, :, -1] # [b, h, sq] + grad_sinks = einops.rearrange(grad__sinks, 'b h s -> h (b s)').sum(-1) + grad_attn_w = grad_logits[:, :, :, :-1].contiguous() # [b, h, sq, sk] + + # Backward pass for q @ K^T + grad_attn_w *= scale + grad__q = torch.matmul(grad_attn_w, _k_T) + grad__k = torch.matmul(_q_T, grad_attn_w) + + # Rearrange grads to (b, s, h, d) + grad_v = einops.rearrange(grad__v, 'b h s d -> b s h d') + grad_k = einops.rearrange(grad__k, 'b h d s -> b s h d') + grad_q = einops.rearrange(grad__q, 'b h s d -> b s h d') + return grad_q, grad_k, grad_v, grad_sinks + + +class AllGatherComm: + """All gather communication with async operations""" + + def __init__(self, group=None) -> None: + self.group = group + self.handles = [] + + def all_gather(self, output_tensor: torch.Tensor, input_tensor: torch.Tensor): + '''All gather the input tensor to the output tensor''' + + if self.group is None: + output_tensor.copy_(input_tensor) + else: + handle = torch.distributed.all_gather_into_tensor( + output_tensor, input_tensor, group=self.group, async_op=True + ) + self.handles.append(handle) + + def wait(self): + '''Wait for all gather operations to complete''' + + if self.group is not None: + for handle in self.handles: + handle.wait() + self.handles = [] + + +def to_zz_mask_attn_bias(attention_mask, cp_size, nheads, nheads_k, heads_k_stride, device, dtype): + '''Convert the attention mask to the attention bias''' + + if cp_size == 1: + zz_mask = attention_mask + else: + chunked = attention_mask.chunk(dim=3, chunks=cp_size * 2) + zz_mask = [_x for _p in zip(chunked[:cp_size], reversed(chunked[cp_size:])) for _x in _p] + zz_mask = torch.cat(zz_mask, dim=3) + attn_bias = torch.zeros(zz_mask.shape, device=device, dtype=dtype) + attn_bias.masked_fill_(zz_mask, float('-inf')) + attn_bias = attn_bias.expand(-1, heads_k_stride * (nheads // nheads_k), -1, -1) + return attn_bias + + +class AttentionFuncionWithContextParallel(torch.autograd.Function): + """Native attention function with context parallelism.""" + + @staticmethod + def forward(ctx, q, k, v, attention_mask, attention_dropout, softmax_scale, pg): + '''Forward pass for the native attention function with context parallelism''' + + # Assert einops exists + if not HAVE_EINOPS: + raise ImportError("einops is required by the attention CP but cannot be imported.") + + # Initialize communication group and constants + cp_size = 1 + if pg is not None: + cp_size = torch.distributed.get_world_size(pg) + comm = AllGatherComm(group=pg) + nheads = q.shape[2] + nheads_k = k.shape[2] + heads_k_stride = 1 + assert nheads % nheads_k == 0 and nheads_k % heads_k_stride == 0 + outs = [] + probs = [] + + # Initialize KV buffers + kv_buffer = torch.empty( + (2, k.shape[0] * cp_size, k.shape[1], heads_k_stride, k.shape[3]), + dtype=k.dtype, + device=k.device, + ) + kv_buffer_copy = torch.empty_like(kv_buffer) + + # All-gather first chunk of KV buffers + k_0 = k[:, :, :heads_k_stride].contiguous() + v_0 = v[:, :, :heads_k_stride].contiguous() + comm.all_gather(kv_buffer_copy[0], k_0) + comm.all_gather(kv_buffer_copy[1], v_0) + + # Prepare attention bias + assert ( + attention_mask is not None + ), "Attention mask is required for the native attention function with context parallelism" + attn_bias = to_zz_mask_attn_bias( + attention_mask, cp_size, nheads, nheads_k, heads_k_stride, q.device, q.dtype + ) + + # Iterate over heads + for i in range(0, nheads_k, heads_k_stride): + # Wait for previous all-gather to complete + comm.wait() + kv_buffer, kv_buffer_copy = kv_buffer_copy, kv_buffer + # All-gather the next portion of KV buffers if not the last iteration + if i < nheads_k - heads_k_stride: + kvsl = i + heads_k_stride + kvsr = kvsl + heads_k_stride + send_k = k[:, :, kvsl:kvsr].contiguous() + send_v = v[:, :, kvsl:kvsr].contiguous() + comm.all_gather(kv_buffer_copy[0], send_k) + comm.all_gather(kv_buffer_copy[1], send_v) + + # Prepare query, key, value for attention + q_i = q[:, :, i * nheads // nheads_k : (i + heads_k_stride) * nheads // nheads_k] + k_i = kv_buffer[0] + v_i = kv_buffer[1] + + # Rearrange query, key, value to (b, s, h, d) + q_i = einops.rearrange(q_i, 's b h d -> b s h d') + k_i = einops.rearrange(k_i, 's b h d -> b s h d') + v_i = einops.rearrange(v_i, 's b h d -> b s h d') + + # Forward pass + out_i, probs_i = eager_attn_fwd( + q_i, k_i, v_i, attn_bias, None, softmax_scale, attention_dropout + ) + outs.append(out_i) + probs.append(probs_i) + + # Concatenate outputs and rearrange to (s, b, h, d) + out = torch.cat(outs, dim=2) + out = einops.rearrange(out, 'b s h d -> s b h d') + + # Save contexts for backward pass + ctx.save_for_backward(q, k, v, attention_mask, *outs, *probs) + ctx.dropout = attention_dropout + ctx.scale = softmax_scale + ctx.heads_k_stride = heads_k_stride # TODO make it configurable + ctx.pg = pg + + return out + + @staticmethod + def backward(ctx, dout): + '''Backward pass for the native attention function with context parallelism''' + + # Initialize or resume constants and communication group + q, k, v, attention_mask, *rest = ctx.saved_tensors + nheads = q.shape[2] + nheads_k = k.shape[2] + heads_k_stride = ctx.heads_k_stride + assert nheads_k % heads_k_stride == 0 + outs = rest[: nheads_k // heads_k_stride] + probs = rest[nheads_k // heads_k_stride :] + pg = ctx.pg + cp_size = 1 + if pg is not None: + cp_size = torch.distributed.get_world_size(pg) + comm = AllGatherComm(group=pg) + + # Initialize KV buffers + kv_buffer = torch.empty( + (2, k.shape[0] * cp_size, k.shape[1], heads_k_stride, k.shape[3]), + dtype=k.dtype, + device=k.device, + ) + kv_buffer_copy = torch.empty_like(kv_buffer) + + # All-gather first chunk of KV buffers + dq = [] + dk = [] + dv = [] + k_0 = k[:, :, :heads_k_stride].contiguous() + v_0 = v[:, :, :heads_k_stride].contiguous() + comm.all_gather(kv_buffer_copy[0], k_0) + comm.all_gather(kv_buffer_copy[1], v_0) + + # Prepare attention bias + attn_bias = to_zz_mask_attn_bias( + attention_mask, cp_size, nheads, nheads_k, heads_k_stride, q.device, q.dtype + ) + + # Iterate over heads + for i in range(0, nheads_k, heads_k_stride): + # Slice query and output for this iteration + q_slice = slice(i * nheads // nheads_k, (i + heads_k_stride) * nheads // nheads_k) + q_i = q[:, :, q_slice] + dout_i = dout[:, :, q_slice] + + # Wait for previous all-gather to complete + comm.wait() + kv_buffer, kv_buffer_copy = kv_buffer_copy, kv_buffer + + # All-gather the next portion of KV buffers if not the last iteration + if i < nheads_k - heads_k_stride: + kvsl = i + heads_k_stride + kvsr = kvsl + heads_k_stride + send_k = k[:, :, kvsl:kvsr].contiguous() + send_v = v[:, :, kvsl:kvsr].contiguous() + comm.all_gather(kv_buffer_copy[0], send_k) + comm.all_gather(kv_buffer_copy[1], send_v) + + # Prepare key, value for attention + k_i = kv_buffer[0] + v_i = kv_buffer[1] + + # Rearrange query, key, value to (b, s, h, d) + q_i = einops.rearrange(q_i, 's b h d -> b s h d') + k_i = einops.rearrange(k_i, 's b h d -> b s h d') + v_i = einops.rearrange(v_i, 's b h d -> b s h d') + dout_i = einops.rearrange(dout_i, 's b h d -> b s h d') + + # Backward pass + dq_i, _dk_i, _dv_i, _ = eager_attn_bwd( + q_i, k_i, v_i, attn_bias, None, ctx.scale, ctx.dropout, outs[i], probs[i], dout_i + ) + + # Rearrange gradients to (s, b, h, d) + dq_i = einops.rearrange(dq_i, 'b s h d -> s b h d') + _dk_i = einops.rearrange(_dk_i, 'b s h d -> s b h d') + _dv_i = einops.rearrange(_dv_i, 'b s h d -> s b h d') + if pg is None: + dk_i = _dk_i + dv_i = _dv_i + else: + # Reduce-scatter gradients if CP > 1 + dk_i = torch.zeros( + (k_i.shape[1] // cp_size, k_i.shape[0], k_i.shape[2], k_i.shape[3]), + device=k_i.device, + dtype=k_i.dtype, + ) + dv_i = torch.zeros( + (v_i.shape[1] // cp_size, v_i.shape[0], v_i.shape[2], v_i.shape[3]), + device=v_i.device, + dtype=v_i.dtype, + ) + torch.distributed.reduce_scatter_tensor(dk_i, _dk_i, group=pg) + torch.distributed.reduce_scatter_tensor(dv_i, _dv_i, group=pg) + + # Collect gradients + dq.append(dq_i) + dk.append(dk_i) + dv.append(dv_i) + + # Concatenate gradients and return + dq = torch.cat(dq, dim=2) + dk = torch.cat(dk, dim=2) + dv = torch.cat(dv, dim=2) + return dq, dk, dv, None, None, None, None diff --git a/megatron/core/transformer/experimental_attention_variant/absorbed_mla.py b/megatron/core/transformer/experimental_attention_variant/absorbed_mla.py new file mode 100644 index 00000000000..b56add7302e --- /dev/null +++ b/megatron/core/transformer/experimental_attention_variant/absorbed_mla.py @@ -0,0 +1,961 @@ +# Copyright (c) 2025 NVIDIA CORPORATION & AFFILIATES. All rights reserved. + +""" +Absorbed Multi-Latent Attention implementation. + +This module implements MLA with matrix absorption: +- Absorbs K's up projection into Q: Q' = Q @ K_up_proj^T +- Applies V's up projection after core attention +- Core attention operates in MQA form with KV being single-head. + +The absorption is mathematically equivalent to standard MLA but enables MQA-style attention which +can be more efficient for certain attention variants. +""" + +import math +from dataclasses import dataclass +from typing import NoReturn, Optional, Union + +import torch + +from megatron.core import tensor_parallel +from megatron.core.models.common.embeddings import ( + RotaryEmbedding, + YarnRotaryEmbedding, + _yarn_get_mscale, + apply_rotary_pos_emb, +) +from megatron.core.process_groups_config import ProcessGroupCollection +from megatron.core.tensor_parallel.layers import ColumnParallelLinear +from megatron.core.tensor_parallel.mappings import ( + gather_from_sequence_parallel_region, + gather_from_tensor_model_parallel_region, + scatter_to_sequence_parallel_region, +) +from megatron.core.transformer.attention import Attention +from megatron.core.transformer.enums import AttnMaskType +from megatron.core.transformer.spec_utils import ModuleSpec, build_module +from megatron.core.transformer.transformer_config import MLATransformerConfig +from megatron.core.utils import deprecate_inference_params, get_pg_size + +try: + from megatron.core.fusions.fused_mla_yarn_rope_apply import ( + fused_apply_mla_rope_for_kv, + fused_apply_mla_rope_for_q, + ) +except ImportError: + fused_apply_mla_rope_for_kv = None + fused_apply_mla_rope_for_q = None + +try: + from megatron.core.extensions.transformer_engine import ( + TEColumnParallelLinear, + TELinear, + set_save_original_input, + ) + from megatron.core.post_training.modelopt.layers import Linear + + HAVE_TE = True +except ImportError: + TEColumnParallelLinear, TELinear, Linear, set_save_original_input = None, None, None, None + HAVE_TE = False + + +@dataclass +class AbsorbedMLASelfAttentionSubmodules: + """ + Configuration class for specifying the submodules of absorbed multi-latent self-attention. + """ + + linear_q_proj: Union[ModuleSpec, type] = None + linear_q_down_proj: Union[ModuleSpec, type] = None + linear_q_up_proj: Union[ModuleSpec, type] = None + linear_kv_down_proj: Union[ModuleSpec, type] = None + linear_k_up_proj: Union[ModuleSpec, type] = None + linear_v_up_proj: Union[ModuleSpec, type] = None + core_attention: Union[ModuleSpec, type] = None + linear_proj: Union[ModuleSpec, type] = None + q_layernorm: Union[ModuleSpec, type] = None + kv_layernorm: Union[ModuleSpec, type] = None + + +class AbsorbedMLASelfAttention(Attention): + """Multi-latent self-attention layer with matrix absorption. + + This layer takes input with shape [s, b, h] and returns output of the same shape. + + Compared to standard MLA, this class implements matrix absorption: + - K's up projection is applied to the query before core attention, not to the compressed KV. + - V's up projection is applied to the output of core attention, not to the compressed KV. + - Core attention operates in MQA form with KV being single-head. + + The absorption is mathematically equivalent to standard MLA but enables MQA-style attention + computation which can be more efficient for certain attention variants. + """ + + def __init__( + self, + config: MLATransformerConfig, + submodules: AbsorbedMLASelfAttentionSubmodules, + layer_number: int, + attn_mask_type=AttnMaskType.padding, + cp_comm_type: Optional[str] = None, + pg_collection: ProcessGroupCollection = None, + ): + if pg_collection is None: + pg_collection = ProcessGroupCollection.use_mpu_process_groups() + + super().__init__( + config=config, + submodules=submodules, + layer_number=layer_number, + attn_mask_type=attn_mask_type, + attention_type="self", + pg_collection=pg_collection, + ) + + assert not config.add_bias_linear, "add_bias_linear is not supported for AbsorbedMLA" + assert not ( + config.tensor_model_parallel_size > 1 and not config.sequence_parallel + ), "AbsorbedMLA requires sequence_parallel when tensor_model_parallel_size > 1" + + self.query_projection_size = self.config.v_head_dim * self.config.num_attention_heads + self.q_head_dim = self.config.qk_head_dim + self.config.qk_pos_emb_head_dim + + # Inference is currently not supported. + self.key_hidden_size = None + self.val_hidden_size = None + + self.recompute_up_proj = ( + self.config.recompute_granularity == 'selective' + and "mla_up_proj" in self.config.recompute_modules + ) + self.qkv_up_checkpoint = None + + mscale = _yarn_get_mscale(self.config.rotary_scaling_factor, self.config.mscale_all_dim) + self.softmax_scale = mscale * mscale / math.sqrt(self.q_head_dim) + self.cache_mla_latents = self.config.cache_mla_latents + assert not self.cache_mla_latents, "cache_mla_latents is not supported for AbsorbedMLA" + + if self.config.rope_type == "rope": + self.rotary_pos_emb = RotaryEmbedding( + self.config.qk_pos_emb_head_dim, + rotary_percent=self.config.rotary_percent, + rotary_base=self.config.rotary_base, + cp_group=self.pg_collection.cp, + ) + elif self.config.rope_type == "yarn": + self.rotary_pos_emb = YarnRotaryEmbedding( + self.config.qk_pos_emb_head_dim, + rotary_base=self.config.rotary_base, + scaling_factor=self.config.rotary_scaling_factor, + original_max_position_embeddings=self.config.original_max_position_embeddings, + beta_fast=self.config.beta_fast, + beta_slow=self.config.beta_slow, + mscale=self.config.mscale, + mscale_all_dim=self.config.mscale_all_dim, + cp_group=self.pg_collection.cp, + ) + else: + raise ValueError( + f"Unsupported RoPE type: {self.config.rope_type}, supported types are " + "'rope' and 'yarn'" + ) + + self.core_attention = build_module( + submodules.core_attention, + config=self.config, + layer_number=self.layer_number, + attn_mask_type=self.attn_mask_type, + attention_type="self", + softmax_scale=self.softmax_scale, + k_channels=self.config.kv_lora_rank + self.config.qk_pos_emb_head_dim, + v_channels=self.config.kv_lora_rank, + cp_comm_type=cp_comm_type, + pg_collection=self.pg_collection, + ) + + if self.config.q_lora_rank is None: + # Not projecting query + self.linear_q_proj = build_module( + submodules.linear_q_proj, + self.config.hidden_size, + self.config.num_attention_heads * self.q_head_dim, + config=self.config, + init_method=self.config.init_method, + gather_output=False, + bias=False, + skip_bias_add=False, + is_expert=False, + tp_comm_buffer_name='q_proj', + ) + else: + q_down_proj_kwargs = {} + if submodules.linear_q_down_proj in [TELinear]: + q_down_proj_kwargs['parallel_mode'] = 'duplicated' + elif submodules.linear_q_down_proj in [ + Linear, + TEColumnParallelLinear, + ColumnParallelLinear, + ]: + q_down_proj_kwargs['gather_output'] = False + else: + raise ValueError(f"Unsupported linear_q_down_proj: {submodules.linear_q_down_proj}") + + self.linear_q_down_proj = build_module( + submodules.linear_q_down_proj, + self.config.hidden_size, + self.config.q_lora_rank, + config=self.config, + init_method=self.config.init_method, + bias=False, + skip_bias_add=False, + is_expert=False, + tp_comm_buffer_name='q_down_proj', + skip_weight_param_allocation=False, + tp_group=( + pg_collection.tp + if q_down_proj_kwargs.get('parallel_mode') != 'duplicated' + else None + ), + **q_down_proj_kwargs, + ) + + self.linear_q_up_proj = build_module( + submodules.linear_q_up_proj, + self.config.q_lora_rank, + self.config.num_attention_heads * self.q_head_dim, + config=self.config, + init_method=self.config.init_method, + gather_output=False, + bias=False, + skip_bias_add=False, + is_expert=False, + tp_comm_buffer_name='q_up_proj', + tp_group=pg_collection.tp, + ) + + kv_down_proj_kwargs = {} + if submodules.linear_kv_down_proj in [TELinear]: + kv_down_proj_kwargs['parallel_mode'] = 'duplicated' + elif submodules.linear_kv_down_proj in [ + Linear, + TEColumnParallelLinear, + ColumnParallelLinear, + ]: + kv_down_proj_kwargs['gather_output'] = False + else: + raise ValueError(f"Unsupported linear_kv_down_proj: {submodules.linear_kv_down_proj}") + + self.linear_kv_down_proj = build_module( + submodules.linear_kv_down_proj, + self.config.hidden_size, + self.config.kv_lora_rank + self.config.qk_pos_emb_head_dim, + config=self.config, + init_method=self.config.init_method, + bias=False, + skip_bias_add=False, + is_expert=False, + tp_comm_buffer_name='kv_down_proj', + skip_weight_param_allocation=False, + tp_group=( + pg_collection.tp + if kv_down_proj_kwargs.get('parallel_mode') != 'duplicated' + else None + ), + **kv_down_proj_kwargs, + ) + + # Build separate K and V up projections + self.linear_k_up_proj = build_module( + submodules.linear_k_up_proj, + self.config.kv_lora_rank, + self.config.num_attention_heads * self.config.qk_head_dim, + config=self.config, + init_method=self.config.init_method, + gather_output=False, + bias=False, + skip_bias_add=False, + is_expert=False, + tp_comm_buffer_name='k_up_proj', + tp_group=pg_collection.tp, + ) + self.linear_v_up_proj = build_module( + submodules.linear_v_up_proj, + self.config.kv_lora_rank, + self.config.num_attention_heads * self.config.v_head_dim, + config=self.config, + init_method=self.config.init_method, + gather_output=False, + bias=False, + skip_bias_add=False, + is_expert=False, + tp_comm_buffer_name='v_up_proj', + tp_group=pg_collection.tp, + ) + + if self.config.q_lora_rank is not None: + self.q_layernorm = build_module( + submodules.q_layernorm, + hidden_size=self.config.q_lora_rank, + config=self.config, + eps=self.config.layernorm_epsilon, + ) + + self.kv_layernorm = build_module( + submodules.kv_layernorm, + hidden_size=self.config.kv_lora_rank, + config=self.config, + eps=self.config.layernorm_epsilon, + ) + + # Output. + self.linear_proj = build_module( + submodules.linear_proj, + self.query_projection_size, + self.config.hidden_size, + config=self.config, + init_method=self.config.output_layer_init_method, + bias=self.config.add_bias_linear, + input_is_parallel=True, + skip_bias_add=True, + is_expert=False, + tp_comm_buffer_name='proj', + tp_group=self.pg_collection.tp, + ) + + if ( + HAVE_TE + and isinstance(self.linear_proj, TELinear) + and ( + ( + self.config.fp8 + and self.config.fp8_recipe != 'delayed' + and is_te_min_version("2.6.0dev0") + ) + or (self.config.fp4 and is_te_min_version("2.7.0.dev0")) + ) + ): + # For fp8/fp4 training, the output of the fused core_attn is saved by itself, and + # linear_proj also saves the quantized tensor of this output. Here we set the + # linear_proj to save the original input tensors to avoid the extra memory usage of + # the quantized tensor. + set_save_original_input(self.linear_proj) + + def get_query_key_value_tensors( + self, + hidden_states, + key_value_states=None, + packed_seq_params=None, + inference_context=None, + *, + inference_params=None, + ): + """ + Derives absorbed q, compressed q, and compressed kv tensors from `hidden_states`. + """ + # s = sequence length, b = batch size, h = hidden size + assert ( + hidden_states.ndim == 3 + ), f"hidden_states should be 3D, [s, b, h], got {hidden_states.ndim}D" + if packed_seq_params is not None: + assert ( + packed_seq_params.local_cp_size is None + ), "dynamic context parallel is not supported with MLA yet and is planned for future. \ + Please disable dynamic context parallel." + + inference_context = deprecate_inference_params(inference_context, inference_params) + + # ========================================= + # Prepare RoPE and seqlen related params + # ========================================= + rotary_seq_len = self.rotary_pos_emb.get_rotary_seq_len( + inference_context, None, hidden_states, self.config, packed_seq_params + ) + + mscale = 1.0 + rotary_pos_cos = None + rotary_pos_sin = None + packed_seq = packed_seq_params is not None and packed_seq_params.qkv_format == 'thd' + if self.config.rope_type == "rope": + rotary_pos_emb = self.rotary_pos_emb(rotary_seq_len, packed_seq=packed_seq) + else: + if self.config.apply_rope_fusion: + rotary_pos_cos, rotary_pos_sin = self.rotary_pos_emb.get_cached_cos_sin( + rotary_seq_len, dtype=hidden_states.dtype, packed_seq=packed_seq + ) + rotary_pos_emb = None + assert inference_context is None, "Inference with MLA RoPE fusion is not supported" + assert ( + fused_apply_mla_rope_for_q is not None + and fused_apply_mla_rope_for_kv is not None + ), "Fused MLA RoPE apply is not imported successfully" + else: + rotary_pos_emb, mscale = self.rotary_pos_emb(rotary_seq_len, packed_seq=packed_seq) + + if packed_seq_params is not None and packed_seq_params.qkv_format == 'thd': + if packed_seq_params.cu_seqlens_q_padded is not None: + cu_seqlens_q = packed_seq_params.cu_seqlens_q_padded + else: + cu_seqlens_q = packed_seq_params.cu_seqlens_q + if packed_seq_params.cu_seqlens_kv_padded is not None: + cu_seqlens_kv = packed_seq_params.cu_seqlens_kv_padded + else: + cu_seqlens_kv = packed_seq_params.cu_seqlens_kv + else: + cu_seqlens_q = cu_seqlens_kv = None + + # ========================================= + # Q down projection + # ========================================= + if self.config.q_lora_rank is not None: + # if linear_q_down_proj is ColumnParallelLinear: + # q_compressed: [s, b, q_lora_rank / TP] + # elif linear_q_down_proj is Linear: + # q_compressed: [s / TP, b, q_lora_rank] + q_compressed, _ = self.linear_q_down_proj(hidden_states) + + # When output is sharded (ColumnParallelLinear), two things are needed to be + # identical to a normal Linear. + # 1. Manually gather output to restore output dim q_lora_rank; + # 2. Scatter sequence back to s / TP if sequence-parallel since it was + # gathered by ColumnParallelLinear. + if q_compressed.size(-1) != self.config.q_lora_rank: + q_compressed = gather_from_tensor_model_parallel_region(q_compressed) + if self.config.sequence_parallel: + q_compressed = scatter_to_sequence_parallel_region(q_compressed) + else: + q_compressed = hidden_states + + # ========================================= + # KV down projection + # ========================================= + # if linear_kv_down_proj is ColumnParallelLinear: + # kv_combined: [s, b, (kv_lora_rank + qk_pos_emb_head_dim) / TP] + # elif linear_kv_down_proj is Linear: + # kv_combined: [s / TP, b, (kv_lora_rank + qk_pos_emb_head_dim)] + kv_combined, _ = self.linear_kv_down_proj(hidden_states) + if kv_combined.size(-1) != self.config.kv_lora_rank + self.config.qk_pos_emb_head_dim: + # kv_combined: [s, b, (kv_lora_rank + qk_pos_emb_head_dim)] + kv_combined = gather_from_tensor_model_parallel_region(kv_combined) + # kv_compressed:[s, b, kv_lora_rank], k_pos_emb: [s, b, qk_pos_emb_head_dim] + kv_compressed, k_pos_emb = torch.split( + kv_combined, [self.config.kv_lora_rank, self.config.qk_pos_emb_head_dim], dim=-1 + ) + if self.config.sequence_parallel: + # kv_compressed:[s / TP, b, kv_lora_rank] + kv_compressed = scatter_to_sequence_parallel_region(kv_compressed) + else: + # kv_compressed:[s / TP, b, kv_lora_rank], k_pos_emb: [s / TP, b, qk_pos_emb_head_dim] + kv_compressed, k_pos_emb = torch.split( + kv_combined, [self.config.kv_lora_rank, self.config.qk_pos_emb_head_dim], dim=-1 + ) + if get_pg_size(self.tp_group) > 1 and self.config.sequence_parallel: + # k_pos_emb: [s, b, qk_pos_emb_head_dim] + k_pos_emb = gather_from_sequence_parallel_region(k_pos_emb, group=self.tp_group) + + if packed_seq_params is not None: + assert q_compressed.ndim == 3 and q_compressed.size(1) == 1 + assert kv_compressed.ndim == 3 and kv_compressed.size(1) == 1 + assert k_pos_emb.ndim == 3 and k_pos_emb.size(1) == 1 + # If sequence packing, TE expect [t, h, d] shaped qkv input. + # In Megatron-Core, the qkv shape is [t, 1, h, d]. + # So we need to reshape qkv from [t, 1, h, d] to [t, h, d]. + q_compressed = q_compressed.squeeze(1) + kv_compressed = kv_compressed.squeeze(1) + k_pos_emb = k_pos_emb.squeeze(1) + + # ========================================= + # Apply norm + # ========================================= + if self.config.q_lora_rank is not None: + # q_compressed: [num_tokens, q_lora_rank] + q_compressed = self.q_layernorm(q_compressed) + + kv_compressed = self.kv_layernorm(kv_compressed) + # Because we won't apply V up projection to the compressed KV, so we need to gather it + # manually. + if get_pg_size(self.tp_group) > 1 and self.config.sequence_parallel: + kv_compressed = gather_from_sequence_parallel_region(kv_compressed, group=self.tp_group) + + # ========================================= + # QKV up projection and RoPE apply + # ========================================= + + def qkv_up_proj_and_rope_apply(q_compressed, kv_compressed, k_pos_emb, rotary_pos_emb): + """ + Apply the up projection and RoPE to the query and key. + When sequence packing enabled, the input tensors adopt a packed shape of [t, ...]; + otherwise, they maintain the unpacked shape [s, b, ...]. In subsequent code comments, + we uniformly use [num_tokens, ...] to denote [s, b, ...] or [t, ...] for two cases. + """ + if self.config.q_lora_rank is not None: + # q_compressed: [num_tokens, q_lora_rank] + # q: [num_tokens, n * (qk_head_dim + qk_pos_emb_head_dim)] + q, _ = self.linear_q_up_proj(q_compressed) + else: + # q_compressed: [num_tokens, hidden_size] + # q: [num_tokens, n * (qk_head_dim + qk_pos_emb_head_dim)] + q, _ = self.linear_q_proj(q_compressed) + + # q: [num_tokens, n, q_head_dim] + q = q.view(*q.size()[:-1], self.num_attention_heads_per_partition, self.q_head_dim) + + # [num_tokens, kv_lora_rank] -> [num_tokens, 1, kv_lora_rank] + kv_compressed = torch.unsqueeze(kv_compressed, -2) + # [num_tokens, qk_pos_emb_head_dim] -> [num_tokens, 1, qk_pos_emb_head_dim] + k_pos_emb = torch.unsqueeze(k_pos_emb, -2) + + # Prepare k_up_weight for absorption + # k_up_weight: linear_k_up_proj.weight viewed as [n, qk_head_dim, kv_lora_rank] + assert self.linear_k_up_proj.weight.size(0) == ( + self.num_attention_heads_per_partition * self.config.qk_head_dim + ) + assert self.linear_k_up_proj.weight.size(1) == self.config.kv_lora_rank + k_up_weight = self.linear_k_up_proj.weight.view( + self.num_attention_heads_per_partition, + self.config.qk_head_dim, + self.config.kv_lora_rank, + ) + + if self.config.apply_rope_fusion: + # q_no_pe: [num_tokens, n, qk_head_dim] + # q_pos_emb: [num_tokens, n, qk_pos_emb_head_dim] + q_no_pe, q_pos_emb = torch.split( + q, [self.config.qk_head_dim, self.config.qk_pos_emb_head_dim], dim=-1 + ) + + # Absorb k_up_weight into q_no_pe + # q_absorbed: [num_tokens, n, kv_lora_rank] + q_absorbed = torch.einsum("...nd,ndk->...nk", q_no_pe, k_up_weight) + q_absorbed = q_absorbed.contiguous() + assert q_absorbed.ndim == q.ndim + assert q_absorbed.shape[:-1] == q.shape[:-1] + assert q_absorbed.size(-1) == self.config.kv_lora_rank + + # q_absorbed: [num_tokens, n, (kv_lora_rank + qk_pos_emb_head_dim)] + q_absorbed = torch.cat([q_absorbed, q_pos_emb], dim=-1) + # kv_compressed: [num_tokens, 1, (kv_lora_rank + qk_pos_emb_head_dim)] + kv_compressed = torch.cat([kv_compressed, k_pos_emb], dim=-1) + + cp_rank = self.pg_collection.cp.rank() + cp_size = self.pg_collection.cp.size() + q_absorbed = fused_apply_mla_rope_for_q( + q_absorbed, + rotary_pos_cos, + rotary_pos_sin, + self.config.kv_lora_rank, + self.config.qk_pos_emb_head_dim, + cu_seqlens_q, + cp_rank, + cp_size, + ) + kv_compressed = fused_apply_mla_rope_for_q( + kv_compressed, + rotary_pos_cos, + rotary_pos_sin, + self.config.kv_lora_rank, + self.config.qk_pos_emb_head_dim, + cu_seqlens_kv, + cp_rank, + cp_size, + ) + else: + q_len = q.size()[0] + if inference_context is not None: + # add offset to the sequence start for inference + sequence_start = inference_context.sequence_len_offset + sequence_end = sequence_start + q_len + rotary_pos_emb = rotary_pos_emb[sequence_start:sequence_end] + elif packed_seq_params is None or self.config.context_parallel_size == 1: + # Shorten rotary_pos_emb to the sequence length when inference_params + # is not provided. This makes sure we can run forward directly with + # any sequence length. During training, the sequence length is always + # the full rotary_pos_emb length, except for sequence packing + CP. + # When sequence packing and context parallel are both enabled, the + # position embedding will not split rotary_pos_emb, so it may exceed + # the sequence length on this CP rank, but we need the full rotary_pos_emb + # to cover the full sequence, so we do not shorten it here. + rotary_pos_emb = rotary_pos_emb[0:q_len] + + # q_no_pe: [num_tokens, n, qk_head_dim] + # q_pos_emb: [num_tokens, n, qk_pos_emb_head_dim] + q_no_pe, q_pos_emb = torch.split( + q, [self.config.qk_head_dim, self.config.qk_pos_emb_head_dim], dim=-1 + ) + + # Absorb k_up_weight into q_no_pe + # q_absorbed: [num_tokens, n, kv_lora_rank] + q_absorbed = torch.einsum("...nd,ndk->...nk", q_no_pe, k_up_weight) + q_absorbed = q_absorbed.contiguous() + assert q_absorbed.ndim == q.ndim + assert q_absorbed.shape[:-1] == q.shape[:-1] + assert q_absorbed.size(-1) == self.config.kv_lora_rank + + # Apply RoPE to q_pos_emb: [num_tokens, n, qk_pos_emb_head_dim] + q_pos_emb = apply_rotary_pos_emb( + q_pos_emb, + rotary_pos_emb, + config=self.config, + cu_seqlens=cu_seqlens_q, + mscale=mscale, + cp_group=self.pg_collection.cp, + ) + # k_pos_emb:[num_tokens, 1, qk_pos_emb_head_dim] + k_pos_emb = apply_rotary_pos_emb( + k_pos_emb, + rotary_pos_emb, + config=self.config, + cu_seqlens=cu_seqlens_kv, + mscale=mscale, + cp_group=self.pg_collection.cp, + ) + + # query: [num_tokens, n, (kv_lora_rank + qk_pos_emb_head_dim)] + q_absorbed = torch.cat([q_absorbed, q_pos_emb], dim=-1) + # key: [num_tokens, 1, (kv_lora_rank + qk_pos_emb_head_dim)] + kv_compressed = torch.cat([kv_compressed, k_pos_emb], dim=-1) + + assert q_absorbed.is_contiguous() + assert kv_compressed.is_contiguous() + + return q_absorbed, kv_compressed + + if self.recompute_up_proj: + quantization = self.config.fp8 or self.config.fp4 + assert not quantization, "FP8/FP4 is not supported for AbsorbedMLA" + self.qkv_up_checkpoint = tensor_parallel.CheckpointWithoutOutput(fp8=quantization) + q_absorbed, kv_compressed = self.qkv_up_checkpoint.checkpoint( + qkv_up_proj_and_rope_apply, q_compressed, kv_compressed, k_pos_emb, rotary_pos_emb + ) + else: + assert not self.cache_mla_latents, "cache_mla_latents is not supported for AbsorbedMLA" + q_absorbed, kv_compressed = qkv_up_proj_and_rope_apply( + q_compressed, kv_compressed, k_pos_emb, rotary_pos_emb + ) + + return q_absorbed, kv_compressed, q_compressed + + def _checkpointed_attention_forward( + self, + q_absorbed, + k_compressed, + v_compressed, + hidden_states, + q_compressed, + attention_mask, + rotary_pos_emb=None, + attn_mask_type=None, + attention_bias=None, + packed_seq_params=None, + ): + """Forward method with selective activation checkpointing.""" + + def custom_forward(*inputs): + q_absorbed = inputs[0] + k_compressed = inputs[1] + v_compressed = inputs[2] + hidden_states = inputs[3] + q_compressed = inputs[4] + attention_mask = inputs[5] + attn_mask_type = inputs[7] + attention_bias = inputs[8] + packed_seq_params = inputs[9] + attn_mask_type = AttnMaskType(attn_mask_type.item()) + output_ = self.core_attention( + q_absorbed, + k_compressed, + v_compressed, + hidden_states, + q_compressed, + attention_mask, + attn_mask_type=attn_mask_type, + attention_bias=attention_bias, + packed_seq_params=packed_seq_params, + ) + return output_ + + if attn_mask_type is None: + attn_mask_type = self.attn_mask_type + attn_mask_type = torch.tensor([attn_mask_type.value], dtype=torch.int) + hidden_states = tensor_parallel.checkpoint( + custom_forward, + False, + q_absorbed, + k_compressed, + v_compressed, + hidden_states, + q_compressed, + attention_mask, + rotary_pos_emb, + attn_mask_type, + attention_bias, + packed_seq_params, + ) + + return hidden_states + + def forward( + self, + hidden_states, + attention_mask, + key_value_states=None, + inference_context=None, + rotary_pos_emb=None, + rotary_pos_cos=None, + rotary_pos_sin=None, + rotary_pos_cos_sin=None, + attention_bias=None, + packed_seq_params=None, + sequence_len_offset=None, + *, + inference_params=None, + ): + """Forward pass for multi-latent attention with matrix absorption""" + assert rotary_pos_emb is None, "Rotary position embeddings should not be passed into MLA." + assert attention_bias is None, "Attention bias should not be passed into MLA." + assert ( + rotary_pos_cos is None and rotary_pos_sin is None + ), "MLA does not support Flash Decoding" + assert not rotary_pos_cos_sin, "Flash-infer rope has not been tested with MLA." + assert not ( + self.training and self.cache_mla_latents + ), "cache_mla_latents conflicts with training." + assert ( + inference_context is None and inference_params is None + ), "Inference is not supported for AbsorbedMLA" + + # ===================== + # Query, Key, and Value + # ===================== + q_absorbed, kv_compressed, q_compressed = self.get_query_key_value_tensors( + hidden_states, key_value_states, packed_seq_params, inference_context=inference_context + ) + + assert q_absorbed.is_contiguous() + assert q_compressed.is_contiguous() + assert kv_compressed.is_contiguous() + + # ================================== + # Core attention computation + # ================================== + if self.checkpoint_core_attention and self.training: + core_attn_out = self._checkpointed_attention_forward( + q_absorbed, + kv_compressed, + None, + hidden_states, + q_compressed, + attention_mask, + packed_seq_params=packed_seq_params, + ) + else: + core_attn_out = self.core_attention( + q_absorbed, + kv_compressed, + None, + hidden_states, + q_compressed, + attention_mask, + packed_seq_params=packed_seq_params, + attn_mask_type=self.attn_mask_type, + ) + + # ================================== + # Apply V up projection + # ================================== + assert self.linear_v_up_proj.weight.size(0) == ( + self.num_attention_heads_per_partition * self.config.v_head_dim + ) + assert self.linear_v_up_proj.weight.size(1) == self.config.kv_lora_rank + v_up_weight = self.linear_v_up_proj.weight.view( + self.num_attention_heads_per_partition, self.config.v_head_dim, self.config.kv_lora_rank + ) + core_attn_out = core_attn_out.view( + *core_attn_out.shape[:-1], + self.num_attention_heads_per_partition, + self.config.kv_lora_rank, + ) + core_attn_out = torch.einsum("...nc,ndc->...nd", core_attn_out, v_up_weight) + core_attn_out = core_attn_out.contiguous() + core_attn_out = core_attn_out.view(*core_attn_out.shape[:-2], -1) + + if packed_seq_params is not None and packed_seq_params.qkv_format == 'thd': + core_attn_out = core_attn_out.unsqueeze(1) + + assert core_attn_out.ndim == hidden_states.ndim + assert core_attn_out.shape[0] == ( + hidden_states.shape[0] * self.config.tensor_model_parallel_size + ), ( + f"{core_attn_out.shape[0]} != " + f"{hidden_states.shape[0]} * " + f"{self.config.tensor_model_parallel_size}" + ) + assert core_attn_out.shape[1:-1] == hidden_states.shape[1:-1] + assert core_attn_out.size(-1) == ( + self.config.v_head_dim * self.num_attention_heads_per_partition + ) + + if self.recompute_up_proj: + assert self.qkv_up_checkpoint is not None + self.qkv_up_checkpoint.discard_output_and_register_recompute(core_attn_out) + self.qkv_up_checkpoint = None + + # ================= + # Output. [sq, b, h] + # ================= + output, bias = self.linear_proj(core_attn_out) + + return output, bias + + def backward_dw(self) -> NoReturn: + """Execute weight gradient computation.""" + self._backward_kv_proj() + self._backward_q_proj() + self._backward_output_proj() + + def _backward_kv_proj(self): + """Computes weight gradients of KV projection layers.""" + self.linear_k_up_proj.backward_dw() + self.linear_v_up_proj.backward_dw() + self.linear_kv_down_proj.backward_dw() + + def _backward_q_proj(self): + """Computes weight gradients of Q projection layers.""" + if self.config.q_lora_rank is None: + self.linear_q_proj.backward_dw() + else: + self.linear_q_down_proj.backward_dw() + self.linear_q_up_proj.backward_dw() + + def _backward_output_proj(self): + """Computes weight gradients of output projection layer.""" + self.linear_proj.backward_dw() + + def set_for_recompute_input_layernorm(self): + """Set the attention layer for recompute input_layernorm. Only needed for fp8/fp4.""" + from megatron.core.extensions.transformer_engine import set_save_original_input + + if self.config.q_lora_rank is not None: + set_save_original_input(self.linear_q_down_proj) + set_save_original_input(self.linear_kv_down_proj) + + def clip_qk(self): + """ + QK Clipping is a technique to clip the query and key attention logits to prevent the + attention logits from exploding. Per MuonClip usage, we update the weight by calling this + function after Muon optimizer step. + """ + raise NotImplementedError("clip_qk is not implemented for AbsorbedMLA") + + def _combine_kv_weights(self, k_weight, v_weight): + """Combine separate K and V weights into MLA's interleaved format. + + MLA's linear_kv_up_proj weight layout (per head interleaved): + [head0_K, head0_V, head1_K, head1_V, ...] + + AbsorbedMLA's separate weights layout: + K: [head0_K, head1_K, ...] + V: [head0_V, head1_V, ...] + + This method interleaves K and V per head to match MLA's format. + + Args: + k_weight: [num_heads_per_partition * qk_head_dim, kv_lora_rank] + v_weight: [num_heads_per_partition * v_head_dim, kv_lora_rank] + + Returns: + combined: [num_heads_per_partition * (qk_head_dim + v_head_dim), kv_lora_rank] + """ + n = self.num_attention_heads_per_partition + qk_dim = self.config.qk_head_dim + v_dim = self.config.v_head_dim + lora_rank = self.config.kv_lora_rank + + # Reshape to per-head format + k_per_head = k_weight.view(n, qk_dim, lora_rank) + v_per_head = v_weight.view(n, v_dim, lora_rank) + + # Concatenate K and V for each head along dim=1 + # Result: [n, qk_dim + v_dim, lora_rank] + combined_per_head = torch.cat([k_per_head, v_per_head], dim=1) + + # Reshape back to linear weight format + combined_weight = combined_per_head.view(n * (qk_dim + v_dim), lora_rank) + + return combined_weight + + def _split_kv_weights(self, combined_weight): + """Split MLA's interleaved KV weight into separate K and V weights. + + MLA's linear_kv_up_proj weight layout (per head interleaved): + [head0_K, head0_V, head1_K, head1_V, ...] + + This method extracts K and V into separate tensors: + K: [head0_K, head1_K, ...] + V: [head0_V, head1_V, ...] + + Args: + combined_weight: [num_heads_per_partition * (qk_head_dim + v_head_dim), kv_lora_rank] + + Returns: + k_weight: [num_heads_per_partition * qk_head_dim, kv_lora_rank] + v_weight: [num_heads_per_partition * v_head_dim, kv_lora_rank] + """ + n = self.num_attention_heads_per_partition + qk_dim = self.config.qk_head_dim + v_dim = self.config.v_head_dim + lora_rank = self.config.kv_lora_rank + + # Reshape to per-head format + combined_per_head = combined_weight.view(n, qk_dim + v_dim, lora_rank) + + # Split K and V for each head (slicing creates non-contiguous views) + k_per_head = combined_per_head[:, :qk_dim, :] # [n, qk_dim, lora_rank] + v_per_head = combined_per_head[:, qk_dim:, :] # [n, v_dim, lora_rank] + + # Make contiguous and reshape back to linear weight format + k_weight = k_per_head.contiguous().view(n * qk_dim, lora_rank) + v_weight = v_per_head.contiguous().view(n * v_dim, lora_rank) + + return k_weight, v_weight + + def _load_from_state_dict(self, state_dict, prefix, *args, **kwargs): + """Handle loading from checkpoints with combined KV up projection weights. + + This method splits the combined 'linear_kv_up_proj.weight' (which has per-head + interleaved K and V) into separate 'linear_k_up_proj.weight' and 'linear_v_up_proj.weight'. + """ + combined_key = f'{prefix}linear_kv_up_proj.weight' + k_up_key = f'{prefix}linear_k_up_proj.weight' + v_up_key = f'{prefix}linear_v_up_proj.weight' + + # Split combined KV weights into separate K and V + if combined_key in state_dict: + combined_weight = state_dict[combined_key] + + # Split with proper per-head de-interleaving + k_weight, v_weight = self._split_kv_weights(combined_weight) + + state_dict[k_up_key] = k_weight + state_dict[v_up_key] = v_weight + + del state_dict[combined_key] + + combined_extra_state_key = f'{prefix}linear_kv_up_proj._extra_state' + k_up_extra_state_key = f'{prefix}linear_k_up_proj._extra_state' + v_up_extra_state_key = f'{prefix}linear_v_up_proj._extra_state' + + if combined_extra_state_key in state_dict: + combined_extra_state = state_dict[combined_extra_state_key] + + assert isinstance(combined_extra_state, torch.Tensor) + # Now we can only handle the case where the extra state is empty. + assert combined_extra_state.numel() == 0 + + state_dict[k_up_extra_state_key] = combined_extra_state.clone() + state_dict[v_up_extra_state_key] = combined_extra_state.clone() + + del state_dict[combined_extra_state_key] + + super()._load_from_state_dict(state_dict, prefix, *args, **kwargs) diff --git a/megatron/core/transformer/experimental_attention_variant/dsa.py b/megatron/core/transformer/experimental_attention_variant/dsa.py index 88b4713dc60..3734db7043f 100644 --- a/megatron/core/transformer/experimental_attention_variant/dsa.py +++ b/megatron/core/transformer/experimental_attention_variant/dsa.py @@ -252,6 +252,330 @@ def compute_dsa_indexer_loss( return indexer_loss +def _compute_index_scores(q: torch.Tensor, weights: torch.Tensor, k: torch.Tensor) -> torch.Tensor: + """ + Perform index score using BF16 precision. + + Reference: + https://github.com/deepseek-ai/DeepSeek-V3.2-Exp/blob/main/inference/kernel.py#L254-L274 + This is a BF16 implementation of the `fp8_index` logic: + 1. Compute attention scores: q @ k^T; + 2. Apply ReLU activation; + 3. Weight by attention weights; + 4. Sum across attention heads. + + Args: + q: BF16 [seqlen_q, batch, index_n_heads, index_head_dim], the query tensor. + weights: BF16 [seqlen_q, batch, index_n_heads], the attention weights. + k: BF16 [seqlen_k, batch, index_head_dim], the key tensor. + + Returns: + index_scores: FP32 [batch, seqlen_q, seqlen_k], the index scores. + """ + # Compute attention scores: q @ k^T + # [seqlen_q, batch, index_n_heads, index_head_dim] @ [seqlen_k, batch, index_head_dim]^T + # -> [seqlen_q, batch, index_n_heads, seqlen_k] + index_scores = torch.einsum('sbhd,tbd->sbht', q.float(), k.float()) + + # Apply ReLU activation. + index_scores = torch.relu(index_scores) + + # Weight each head by attention weights. + # [seqlen_q, batch, index_n_heads, seqlen_k] * [seqlen_q, batch, index_n_heads, 1] + # -> [seqlen_q, batch, index_n_heads, seqlen_k] + index_scores = index_scores * weights.unsqueeze(-1) + + # Sum across attention heads. + # [seqlen_q, batch, index_n_heads, seqlen_k] -> [seqlen_q, batch, seqlen_k] + index_scores = index_scores.sum(dim=2) + + # Transpose to [batch, seqlen_q, seqlen_k]. + index_scores = index_scores.transpose(0, 1) + + return index_scores + + +def fused_qk_topk_naive( + q: torch.Tensor, + k: torch.Tensor, + weights: torch.Tensor, + index_topk: int, + mask: Optional[torch.Tensor] = None, +): + """Naive implementation of QK Topk.""" + seqlen = q.size(0) + # ========================================= + # Compute index scores + # ========================================= + # [batch, seqlen, seqlen] + index_scores = _compute_index_scores(q, weights, k) + if mask is not None: + assert mask.dtype == index_scores.dtype, "Mask dtype must match index scores dtype" + index_scores = index_scores + mask + + # ========================================= + # Select top-k indices + # ========================================= + topk_k = min(index_topk, seqlen) + # [batch, seqlen, index_topk] + topk_indices = index_scores.topk(topk_k, dim=-1)[1] + + return index_scores, topk_indices + + +def fwd_fused_indexer_loss_naive( + q, weights, k, query, key, topk, softmax_scale, loss_coeff, mask, sparse_loss, pg_collection +): + """Naive implementation of forward pass for indexer loss.""" + index_scores, topk_indices = fused_qk_topk_naive(q, k, weights, topk, mask) + + indexer_loss = compute_dsa_indexer_loss( + index_scores, + topk_indices, + query, + key, + softmax_scale, + loss_coeff, + sparse_loss, + pg_collection, + ) + + return topk_indices, indexer_loss + + +def bwd_fused_indexer_loss_naive( + q, + weights, + k, + query, + key, + topk_indices, + softmax_scale, + loss_coeff, + sparse_loss, + grad_loss, + pg_collection, +): + """Naive implementation of backward pass for indexer loss.""" + index_scores = _compute_index_scores(q, weights, k) # [B, Sq, Sk] + + sq, b, np, hn = query.size() + sk = key.size(0) + + # [sq, b, np, hn] -> [b, np, sq, hn] -> [b * np, sq, hn] + query_reshaped = query.permute(1, 2, 0, 3).reshape(b * np, sq, hn) + # [sk, b, np, hn] -> [b, np, hn, sk] -> [b * np, hn, sk] + key_reshaped = key.permute(1, 2, 3, 0).reshape(b * np, hn, sk) + # Compute attention scores [b * np, sq, sk] + attention_scores = torch.bmm(query_reshaped.float(), key_reshaped.float()) * softmax_scale + # Free reshaped tensors - no longer needed after bmm + del query_reshaped, key_reshaped + + # Reshape to [b, np, sq, sk] + attention_scores = attention_scores.reshape(b, np, sq, sk) + + # causal_mask [sq, sk] + causal_mask = torch.triu( + torch.full((sq, sk), float('-inf'), dtype=torch.float32, device=attention_scores.device), + diagonal=1, + ) + # index_mask [b, sq, sk] + index_mask = torch.full( + (b, sq, sk), float("-inf"), dtype=torch.float32, device=causal_mask.device + ).scatter_(-1, topk_indices, 0) + + # Apply causal mask to both attention and index scores + # [b, np, sq, skv] + [1, 1, sq, skv] -> [b, np, sq, skv] + attention_scores = attention_scores + causal_mask.view(1, 1, sq, sk) + # [b, sq, sk] + [1, sq, sk] -> [b, sq, sk] + index_scores = index_scores + causal_mask.unsqueeze(0) + # Free causal_mask - no longer needed + del causal_mask + + if sparse_loss: + # [b, np, sq, sk] + [b, 1, sq, sk] -> [b, np, sq, sk] + attention_scores = attention_scores + index_mask.view(b, 1, sq, sk) + # [b, sq, sk] + [b, sq, sk] -> [b, sq, sk] + index_scores = index_scores + index_mask + + # Compute softmax for both + attention_scores_softmax = torch.nn.functional.softmax( + attention_scores, dim=-1, dtype=torch.float32 + ) + # Free attention_scores immediately + del attention_scores + + index_scores_softmax = torch.nn.functional.softmax(index_scores, dim=-1, dtype=torch.float32) + # Free index_scores - no longer needed after softmax + del index_scores + + # Sum attention scores across heads: [b, np, sq, sk] -> [b, sq, sk] + attention_scores_sum = attention_scores_softmax.sum(dim=1) + # Free attention_scores_softmax + del attention_scores_softmax + + if pg_collection.tp.size() > 1: + # attention scores are scattered to TP ranks in head dimension. + torch.distributed.all_reduce(attention_scores_sum.contiguous(), group=pg_collection.tp) + + # L1 normalize + attention_scores_normalized = attention_scores_sum / attention_scores_sum.sum( + dim=-1, keepdim=True + ) + # Free attention_scores_sum - no longer needed after normalization + del attention_scores_sum + + # Backward through loss = kl_div * loss_coeff + # where kl_div = kl_per_element.sum(dim=-1).mean() + grad_kl_div = grad_loss * loss_coeff # scalar + + # Backward through mean: distribute gradient equally + grad_kl_per_row = grad_kl_div / (b * sq) # scalar value for each row + + # Backward through sum(dim=-1): broadcast back to [b, sq, sk] + # Each element in a row contributes to the sum, so gradient is same for all + grad_kl_per_element = grad_kl_per_row.view(1, 1, 1).expand(b, sq, sk) + + # Backward through kl_per_element = target * (log(target) - log(index)) + # ∂kl/∂index_softmax = -target / index_softmax + grad_index_scores_softmax = ( + -attention_scores_normalized / (index_scores_softmax + 1e-10) * grad_kl_per_element + ) + # Free attention_scores_normalized - no longer needed + del attention_scores_normalized + + # Backward through softmax: ∂L/∂x = softmax * (∂L/∂softmax - sum(∂L/∂softmax * softmax)) + sum_grad = (grad_index_scores_softmax * index_scores_softmax).sum(dim=-1, keepdim=True) + grad_index_scores_logits = index_scores_softmax * (grad_index_scores_softmax - sum_grad) + # Free intermediate tensors + del index_scores_softmax, grad_index_scores_softmax, sum_grad + + # Zero out gradients for masked positions + # Create a mask for valid (non-masked) positions + # Causal mask: position (i, j) is valid if j <= i + causal_valid_mask = torch.tril( + torch.ones((sq, sk), device=q.device, dtype=torch.bool) + ) # [sq, sk] + if sparse_loss: + # Also apply index mask - only topk positions are valid + index_valid_mask = index_mask == 0 # [b, sq, sk] + del index_mask # Free index_mask immediately after use + valid_mask = causal_valid_mask.unsqueeze(0) & index_valid_mask # [b, sq, sk] + del index_valid_mask + else: + del index_mask # Free index_mask even if not used for sparse_loss + valid_mask = causal_valid_mask.unsqueeze(0).expand(b, sq, sk) # [b, sq, sk] + del causal_valid_mask + + grad_index_scores_logits = grad_index_scores_logits * valid_mask.float() + del valid_mask + + # Transpose from [b, sq, sk] to [sq, b, sk] + grad_index_scores = grad_index_scores_logits.transpose(0, 1) # [sq, b, sk] + del grad_index_scores_logits + + # Backward through sum over heads: expand gradient + grad_weighted_scores = grad_index_scores.unsqueeze(2) # [sq, b, 1, sk] + del grad_index_scores + + # Compute forward values needed for backward + scores = torch.einsum('sbhd,tbd->sbht', q.float(), k.float()) # [sq, b, h, sk] + # Compute relu_mask before relu (saves memory vs keeping both scores and relu output) + relu_mask = scores > 0 + scores_after_relu = torch.relu(scores) + del scores + + # Backward through multiplication by weights: index_scores_per_head * weights + # ∂L/∂weights = grad * relu_scores (sum over sk) + grad_weights = (grad_weighted_scores * scores_after_relu).sum(dim=-1) # [sq, b, h] + + # ∂L/∂relu_scores = grad * weights + grad_scores_after_relu = grad_weighted_scores * weights.unsqueeze(-1) # [sq, b, h, sk] + del grad_weighted_scores, scores_after_relu + + # Backward through ReLU + grad_scores = grad_scores_after_relu * relu_mask.float() # [sq, b, h, sk] + del grad_scores_after_relu, relu_mask + + # Backward through einsum 'sbhd,tbd->sbht' + # ∂L/∂q = einsum('sbht,tbd->sbhd', grad_scores, k) + grad_q = torch.einsum('sbht,tbd->sbhd', grad_scores, k.float()) # [sq, b, h, d] + # ∂L/∂k = einsum('sbht,sbhd->tbd', grad_scores, q) + grad_k = torch.einsum('sbht,sbhd->tbd', grad_scores, q.float()) # [sk, b, d] + del grad_scores + + return grad_q.to(q.dtype), grad_weights.to(weights.dtype), grad_k.to(k.dtype) + + +class FusedDSAIndexerLoss(torch.autograd.Function): + """Fused implementation of DSA Indexer Loss.""" + + @staticmethod + def forward( + ctx, + q, + weights, + k, + query, + key, + softmax_scale, + topk, + loss_coeff, + mask, + sparse_loss, + pg_collection, + ): + """ + Fused forward: index_scores never materialized in full. + """ + topk_indices, loss = fwd_fused_indexer_loss_naive( + q, + weights, + k, + query, + key, + topk, + softmax_scale, + loss_coeff, + mask, + sparse_loss, + pg_collection, + ) + + # Save for backward (recomputation strategy) + ctx.save_for_backward(q, weights, k, query, key, topk_indices) + ctx.softmax_scale = softmax_scale + ctx.loss_coeff = loss_coeff + ctx.sparse_loss = sparse_loss + ctx.pg_collection = pg_collection + + return topk_indices, loss + + @staticmethod + def backward(ctx, grad_topk_indices, grad_loss): + """ + Backward: Recompute what we need. + """ + q, weights, k, query, key, topk_indices = ctx.saved_tensors + + grad_q, grad_weights, grad_k = bwd_fused_indexer_loss_naive( + q, + weights, + k, + query, + key, + topk_indices, + ctx.softmax_scale, + ctx.loss_coeff, + ctx.sparse_loss, + grad_loss, + ctx.pg_collection, + ) + + # query and key are detached in forward, so return None for their gradients + return grad_q, grad_weights, grad_k, None, None, None, None, None, None, None, None + + class DSAIndexerLossAutoScaler(torch.autograd.Function): """An AutoScaler that triggers the backward pass and scales the grad for indexer loss. @@ -471,74 +795,10 @@ def _apply_rope(self, x: torch.Tensor, rotary_pos_emb: torch.Tensor, mscale: flo x = torch.cat([x_nope, x_pe], dim=-1) return x - def _compute_index_scores( - self, q: torch.Tensor, weights: torch.Tensor, k: torch.Tensor - ) -> torch.Tensor: - """ - Perform index score using BF16 precision. - - Reference: - https://github.com/deepseek-ai/DeepSeek-V3.2-Exp/blob/main/inference/kernel.py#L254-L274 - This is a BF16 implementation of the `fp8_index` logic: - 1. Compute attention scores: q @ k^T; - 2. Apply ReLU activation; - 3. Weight by attention weights; - 4. Sum across attention heads. - - Args: - q: BF16 [seqlen_q, batch, index_n_heads, index_head_dim], the query tensor. - weights: BF16 [seqlen_q, batch, index_n_heads], the attention weights. - k: BF16 [seqlen_k, batch, index_head_dim], the key tensor. - - Returns: - index_scores: FP32 [batch, seqlen_q, seqlen_k], the index scores. - """ - # Compute attention scores: q @ k^T - # [seqlen_q, batch, index_n_heads, index_head_dim] @ [seqlen_k, batch, index_head_dim]^T - # -> [seqlen_q, batch, index_n_heads, seqlen_k] - index_scores = torch.einsum('sbhd,tbd->sbht', q.float(), k.float()) - - # Apply ReLU activation. - index_scores = torch.relu(index_scores) - - # Weight each head by attention weights. - # [seqlen_q, batch, index_n_heads, seqlen_k] * [seqlen_q, batch, index_n_heads, 1] - # -> [seqlen_q, batch, index_n_heads, seqlen_k] - index_scores = index_scores * weights.unsqueeze(-1) - - # Sum across attention heads. - # [seqlen_q, batch, index_n_heads, seqlen_k] -> [seqlen_q, batch, seqlen_k] - index_scores = index_scores.sum(dim=2) - - # Transpose to [batch, seqlen_q, seqlen_k]. - index_scores = index_scores.transpose(0, 1) - - return index_scores - - def forward_with_scores( - self, - x: torch.Tensor, - qr: torch.Tensor, - mask: Optional[torch.Tensor] = None, - packed_seq_params: Optional[PackedSeqParams] = None, + def forward_before_topk( + self, x: torch.Tensor, qr: torch.Tensor, packed_seq_params: Optional[PackedSeqParams] = None ) -> Tuple[torch.Tensor, torch.Tensor]: - """ - Forward pass for DSA Indexer that returns both index scores and top-k indices. - - This is used when KL loss is enabled to compare indexer scores with true attention scores. - - Args: - x: hidden states [seqlen, batch, hidden_size]. - qr: Low-rank query tensor [seqlen, batch, q_lora_rank]. - mask: Attention mask [batch, seqlen, seqlen]. - packed_seq_params: Packed sequence parameters for variable length sequences. - - Returns: - index_scores: Index scores [batch, seqlen, seqlen]. - topk_indices: Top-k indices [batch, seqlen, index_topk]. - """ - assert packed_seq_params is None, "Packed sequence is not supported for DSAttention" - + """All computations before topk.""" # ========================================= # Prepare RoPE params # ========================================= @@ -592,23 +852,45 @@ def forward_with_scores( k = rotate_activation(k) # ========================================= - # Compute index scores + # Prepare weights for index scores # ========================================= # [seqlen, batch, hidden_size] -> [seqlen, batch, index_n_heads] weights, _ = self.linear_weights_proj(x) weights = weights * (self.index_n_heads**-0.5) * self.softmax_scale - # [batch, seqlen, seqlen] - index_scores = self._compute_index_scores(q, weights, k) - if mask is not None: - assert mask.dtype == index_scores.dtype, "Mask dtype must match index scores dtype" - index_scores = index_scores + mask - # ========================================= - # Select top-k indices - # ========================================= - topk_k = min(self.index_topk, seqlen) - # [batch, seqlen, index_topk] - topk_indices = index_scores.topk(topk_k, dim=-1)[1] + return q, k, weights + + def forward_with_scores( + self, + x: torch.Tensor, + qr: torch.Tensor, + mask: Optional[torch.Tensor] = None, + packed_seq_params: Optional[PackedSeqParams] = None, + ) -> Tuple[torch.Tensor, torch.Tensor]: + """ + Forward pass for DSA Indexer that returns both index scores and top-k indices. + + This is used when KL loss is enabled to compare indexer scores with true attention scores. + + Args: + x: hidden states [seqlen, batch, hidden_size]. + qr: Low-rank query tensor [seqlen, batch, q_lora_rank]. + mask: Attention mask [batch, seqlen, seqlen]. + packed_seq_params: Packed sequence parameters for variable length sequences. + + Returns: + index_scores: Index scores [batch, seqlen, seqlen]. + topk_indices: Top-k indices [batch, seqlen, index_topk]. + """ + assert packed_seq_params is None, "Packed sequence is not supported for DSAttention" + + # [seqlen, batch, index_n_heads * index_head_dim] + # [seqlen, batch, index_head_dim] + # [seqlen, batch, index_n_heads] + q, k, weights = self.forward_before_topk(x, qr, packed_seq_params) + + # [batch, seqlen, seqlen], [batch, seqlen, index_topk] + index_scores, topk_indices = fused_qk_topk_naive(q, k, weights, self.index_topk, mask) return index_scores, topk_indices @@ -781,31 +1063,27 @@ def forward( mask, float('-inf') ) - # =================================== - # Get index scores and top-k indices - # =================================== - index_scores, topk_indices = self.indexer.forward_with_scores( - x, qr, mask=float_mask, packed_seq_params=packed_seq_params - ) - - # =================================== - # Run sparse attention kernel - # =================================== - output = unfused_dsa_fn(query, key, value, topk_indices, self.softmax_scale) - - # =================================== - # Attach indexer loss - # =================================== if self.training and torch.is_grad_enabled(): - # Compute KL divergence loss between indexer scores and true attention scores + # =================================== + # Prepare inputs for indexer loss + # =================================== + q, k, weights = self.indexer.forward_before_topk(x, qr, packed_seq_params) indexer_loss_coeff = getattr(self.config, 'dsa_indexer_loss_coeff', 0.0) - indexer_loss = compute_dsa_indexer_loss( - index_scores, - topk_indices, + + # =================================== + # Attach indexer topk and loss + # =================================== + # Compute KL divergence loss between indexer scores and true attention scores + topk_indices, indexer_loss = FusedDSAIndexerLoss.apply( + q, + weights, + k, query.detach(), key.detach(), self.softmax_scale, + self.indexer.index_topk, indexer_loss_coeff, + float_mask, getattr(self.config, "dsa_indexer_use_sparse_loss", False), self.indexer.pg_collection, ) @@ -816,7 +1094,26 @@ def forward( layer_number=self.layer_number, num_layers=self.config.num_layers, ) + + # =================================== + # Run sparse attention kernel + # =================================== + output = unfused_dsa_fn(query, key, value, topk_indices, self.softmax_scale) + # Attach loss to output output = DSAIndexerLossAutoScaler.apply(output, indexer_loss) + else: + # =================================== + # Get index scores and top-k indices + # =================================== + _, topk_indices = self.indexer.forward_with_scores( + x, qr, mask=float_mask, packed_seq_params=packed_seq_params + ) + + # =================================== + # Run sparse attention kernel + # =================================== + output = unfused_dsa_fn(query, key, value, topk_indices, self.softmax_scale) + return output diff --git a/megatron/core/transformer/fsdp_dtensor_checkpoint.py b/megatron/core/transformer/fsdp_dtensor_checkpoint.py index f7a938aff2a..4dbc6623506 100644 --- a/megatron/core/transformer/fsdp_dtensor_checkpoint.py +++ b/megatron/core/transformer/fsdp_dtensor_checkpoint.py @@ -45,6 +45,7 @@ from megatron.core import parallel_state from megatron.core.tensor_parallel.layers import copy_tensor_model_parallel_attributes from megatron.core.transformer.transformer_layer import TransformerLayer +from megatron.core.utils import get_attr_wrapped_model def get_ep_layer_offset(num_experts: int | None = None) -> int: @@ -65,6 +66,19 @@ def get_ep_layer_offset(num_experts: int | None = None) -> int: return local_expert_offset +def get_total_num_experts(num_experts: int | None = None) -> int: + """ + Get the total number of experts for the current model. + + Args: + num_experts: Total number of experts in the model. If None, returns 0. + + Returns: + The total number of experts. + """ + return num_experts if num_experts else 0 + + def get_expert_index_from_key(key): """Extract expert index from various expert key formats. @@ -101,7 +115,7 @@ def handle_experts_in_state_dict(state_dict, num_experts: int | None = None): The processed state dictionary with rewritten expert keys. """ local_expert_start = get_ep_layer_offset(num_experts) - local_expert_end = num_experts if num_experts else 0 + local_expert_end = get_total_num_experts(num_experts) def should_keep_expert_key(expert_index): """Determine if this rank should keep this expert key based on expert index""" @@ -183,7 +197,10 @@ def handle_swiglu_in_state_dict(model, model_state_dict, optimizer_state_dict): assert HAVE_MEGATRON_FSDP, "This function requires Megatron-FSDP to be installed." # Extract num_experts from model config for expert parameter processing - num_experts = model.config.num_moe_experts if hasattr(model, 'config') else None + model_config = get_attr_wrapped_model(model, "config", allow_none=True) + num_experts = ( + getattr(model_config, 'num_moe_experts', None) if model_config is not None else None + ) def intersection(s1, s2): # Only works for step=1 diff --git a/megatron/core/transformer/linear_cross_entropy.py b/megatron/core/transformer/linear_cross_entropy.py new file mode 100644 index 00000000000..e7afe326e1c --- /dev/null +++ b/megatron/core/transformer/linear_cross_entropy.py @@ -0,0 +1,76 @@ +# Copyright (c) 2026, NVIDIA CORPORATION. All rights reserved. + +from typing import Literal, Optional, Tuple, Union + +import torch + +from megatron.core import tensor_parallel +from megatron.core.fusions.fused_linear_cross_entropy import linear_cross_entropy + + +class LinearCrossEntropyModule(tensor_parallel.ColumnParallelLinear): + """ + A module that combines a ColumnParallelLinear layer with fused + linear + cross-entropy loss computation over a tensor-parallel vocabulary. + """ + + def forward( + self, + input_: torch.Tensor, + weight: Optional[torch.Tensor] = None, + runtime_gather_output: Optional[bool] = None, + output_cross_entropy_loss: bool = False, + labels: Optional[torch.Tensor] = None, + reduction: Literal["none", "sum", "mean"] = "none", + ignore_index: int = -100, + ) -> Union[torch.Tensor, Tuple[torch.Tensor, Optional[torch.Tensor]]]: + """Run either the plain ColumnParallelLinear or fused linear+cross-entropy.""" + if output_cross_entropy_loss: + assert labels is not None, "labels cannot be None when outputting cross-entropy loss." + return self._compute_linear_and_cross_entropy_loss( + hidden=input_, + weight=weight if weight is not None else self.weight, + labels=labels, + reduction=reduction, + ignore_index=ignore_index, + ) + + # Fall back to standard ColumnParallelLinear forward. + # ColumnParallelLinear.forward returns (output, bias) or just output + # depending on configuration, so keep the return type as Tensor. + return super().forward(input_, weight, runtime_gather_output) + + def _compute_linear_and_cross_entropy_loss( + self, + hidden: torch.Tensor, + weight: torch.Tensor, + labels: Optional[torch.Tensor] = None, + reduction: Literal["none", "sum", "mean"] = "none", + ignore_index: int = -100, + ) -> torch.Tensor: + """Compute fused linear + cross-entropy over tensor-parallel vocab.""" + assert self.config.cross_entropy_loss_fusion, "Cross-entropy loss fusion must be enabled." + assert self.config.cross_entropy_fusion_impl == "linear", ( + "Cross-entropy loss fusion implementation must be 'linear' to use " + "_compute_linear_and_cross_entropy_loss." + ) + assert weight is not None, "weight cannot be None when using fused linear cross entropy." + assert labels is not None, "labels cannot be None when using fused linear cross entropy." + + # [b s] => [s b] + labels = labels.transpose(0, 1).contiguous() + loss = linear_cross_entropy( + hidden, + weight, + labels, + sequence_parallel=self.sequence_parallel, + reduction=reduction, + ignore_index=ignore_index, + tp_group=self.tp_group, + ) + # If reduction != "none" this will be a scalar; for "none" it should + # match [s, b] and can be reshaped back to [b, s]. + if reduction == "none": + loss = loss.view_as(labels).transpose(0, 1).contiguous() + + return loss diff --git a/megatron/core/transformer/moe/experts.py b/megatron/core/transformer/moe/experts.py index 62fb7a148c8..d8e75342226 100644 --- a/megatron/core/transformer/moe/experts.py +++ b/megatron/core/transformer/moe/experts.py @@ -31,6 +31,7 @@ from megatron.core.tensor_parallel.layers import ( _initialize_affine_weight_cpu, _initialize_affine_weight_gpu, + set_tensor_model_parallel_attributes, ) from megatron.core.tensor_parallel.utils import divide from megatron.core.transformer.mlp import MLP, MLPSubmodules, apply_swiglu_sharded_factory @@ -176,6 +177,14 @@ def activation_func_with_probs(x, probs): rank=tp_rank, world_size=tp_size, ) + else: + # Ensure TP attrs are set even when not initializing + set_tensor_model_parallel_attributes( + tensor=self.weight1, is_parallel=True, dim=1, stride=1 + ) + set_tensor_model_parallel_attributes( + tensor=self.weight2, is_parallel=True, dim=0, stride=1 + ) else: self.weight1 = Parameter( torch.empty( @@ -200,6 +209,14 @@ def activation_func_with_probs(x, probs): _initialize_affine_weight_gpu( self.weight2, config.output_layer_init_method, partition_dim=0, is_expert=True ) + else: + # Ensure TP attrs are set even when not initializing + set_tensor_model_parallel_attributes( + tensor=self.weight1, is_parallel=True, dim=1, stride=1 + ) + set_tensor_model_parallel_attributes( + tensor=self.weight2, is_parallel=True, dim=0, stride=1 + ) setattr(self.weight1, 'allreduce', not self.expert_parallel) setattr(self.weight2, 'allreduce', not self.expert_parallel) diff --git a/megatron/core/transformer/moe/moe_utils.py b/megatron/core/transformer/moe/moe_utils.py index 65d8fed1015..4250f764948 100644 --- a/megatron/core/transformer/moe/moe_utils.py +++ b/megatron/core/transformer/moe/moe_utils.py @@ -1,5 +1,4 @@ # Copyright (c) 2025 NVIDIA CORPORATION & AFFILIATES. All rights reserved. - import functools import math from dataclasses import dataclass @@ -11,7 +10,11 @@ from megatron.core.fp4_utils import get_fp4_align_size from megatron.core.fp8_utils import get_fp8_align_size from megatron.core.process_groups_config import ProcessGroupCollection -from megatron.core.tensor_parallel import get_cuda_rng_tracker, get_expert_parallel_rng_tracker_name +from megatron.core.tensor_parallel import ( + get_cuda_rng_tracker, + get_data_parallel_rng_tracker_name, + get_expert_parallel_rng_tracker_name, +) from megatron.core.tensor_parallel.mappings import reduce_from_tensor_model_parallel_region from megatron.core.transformer.cuda_graphs import is_graph_capturing from megatron.core.transformer.enums import CudaGraphScope @@ -952,6 +955,11 @@ def reduce_aux_losses_tracker_across_ranks( torch.distributed.all_reduce( values, group=tracker[name]['avg_group'], op=torch.distributed.ReduceOp.AVG ) + # Average aux losses across data parallel ranks. + # The `global_load_balancing_loss` already uses `tp_dp_cp_group` in `reduce_group`, + # so we don't need to reduce it again. Others use `tp_cp_group` in `reduce_group`. + if name != "global_load_balancing_loss": + torch.distributed.all_reduce(values, group=dp_group, op=torch.distributed.ReduceOp.AVG) def track_moe_metrics( @@ -991,13 +999,20 @@ def track_moe_metrics( """ # Aux loss logging tracker = get_moe_layer_wise_logging_tracker() - # Initialize the tracker if force_initialize is True + # Initialize the tracker if force_initialize is True. + # The values tensor size must match what the router creates in save_to_aux_losses_tracker, + # which uses (num_layers + mtp_num_layers). This is important for PP ranks that have no + # MoE layers (so the tracker is empty and force_initialize creates the entry); their tensor + # size must match ranks that do have MoE layers, otherwise all_reduce across PP will hang. + tracker_num_layers = num_layers + if mtp_num_layers is not None: + tracker_num_layers += mtp_num_layers if force_initialize: if track_names is not None: for key in track_names: if key not in tracker: tracker[key] = {} - tracker[key]["values"] = torch.zeros(num_layers, device="cuda") + tracker[key]["values"] = torch.zeros(tracker_num_layers, device="cuda") tracker[key]["reduce_group"] = None tracker[key]["avg_group"] = None tracker[key]["reduce_group_has_dp"] = False @@ -1159,6 +1174,54 @@ def apply_random_logits(logits: torch.Tensor) -> torch.Tensor: return RandomSTE.apply(logits) +@internal_api +class RandomSTEShared(torch.autograd.Function): + """ + STE that generates random values with shared seed across all ranks. + When std < 0, caches and reuses values per layer. + """ + + _cache = {} + + @staticmethod + def forward(ctx, logits, std, layer_number): + """Forward pass: apply random bias to logits.""" + # Check cache if reuse mode (negative std) + if std < 0 and layer_number in RandomSTEShared._cache: + return logits + RandomSTEShared._cache[layer_number] + + # Generate random bias with shared seed across all ranks + with get_cuda_rng_tracker().fork(get_data_parallel_rng_tracker_name()): + bias = torch.empty(logits.shape[-1], device=logits.device, dtype=logits.dtype).normal_( + std=abs(std) + ) + + # Cache if reuse mode + if std < 0 and layer_number is not None: + RandomSTEShared._cache[layer_number] = bias + + return logits + bias + + @staticmethod + def backward(ctx, grad_output): + """Backward pass: pass through gradients.""" + return grad_output, None, None + + +def apply_biased_logits(logits, std, layer_number=None): + """ + Apply random bias to logits. All ranks get the same random values. + + Args: + logits: Input logits tensor [num_tokens, num_experts] + std: Standard deviation for random bias. If negative, generate once + per layer and reuse (using abs(std) as actual std). + layer_number: Layer number for caching when std is negative. + """ + logits = apply_random_logits(logits) + return RandomSTEShared.apply(logits, std, layer_number) + + class RouterGatingLinearFunction(torch.autograd.Function): """ Autograd function for router gating linear. diff --git a/megatron/core/transformer/moe/router.py b/megatron/core/transformer/moe/router.py index 4be97401748..a2f3e90bd0f 100644 --- a/megatron/core/transformer/moe/router.py +++ b/megatron/core/transformer/moe/router.py @@ -10,6 +10,7 @@ from megatron.core.transformer.moe.moe_utils import ( MoEAuxLossAutoScaler, ProcessGroupCollection, + apply_biased_logits, apply_random_logits, apply_router_token_dropping, compute_routing_scores_for_aux_loss, @@ -302,6 +303,7 @@ def _apply_aux_loss( moe_aux_loss_coeff=aux_loss_coeff, fused=self.config.moe_router_fusion, ) + probs = self.attach_and_log_load_balancing_loss( probs, aux_loss_coeff, @@ -388,7 +390,6 @@ def _apply_global_aux_loss( topk=self.topk, ) ) - self.global_tokens_per_expert += global_tokens_per_expert self.ga_steps += 1 averated_tokens_per_expert = self.global_tokens_per_expert / self.ga_steps @@ -402,6 +403,7 @@ def _apply_global_aux_loss( moe_aux_loss_coeff=global_aux_loss_coeff, fused=self.config.moe_router_fusion, ) + probs = self.attach_and_log_load_balancing_loss( probs, global_aux_loss_coeff, @@ -656,6 +658,12 @@ def forward(self, input: torch.Tensor, padding_mask: Optional[torch.Tensor] = No # Apply force load balancing with random logits for benchmark logits = apply_random_logits(logits) + if self.config.moe_router_force_biased is not None: + # Apply biased logits with shared random bias across all ranks + logits = apply_biased_logits( + logits, self.config.moe_router_force_biased, self.layer_number + ) + probs, routing_map = self.routing(logits, padding_mask=padding_mask) return probs, routing_map diff --git a/megatron/core/transformer/moe/token_dispatcher.py b/megatron/core/transformer/moe/token_dispatcher.py index f2e26c63cf5..327dbc8a382 100644 --- a/megatron/core/transformer/moe/token_dispatcher.py +++ b/megatron/core/transformer/moe/token_dispatcher.py @@ -37,6 +37,8 @@ from megatron.core.transformer.moe.shared_experts import SharedExpertMLP from megatron.core.transformer.transformer_config import TransformerConfig +logger = logging.getLogger(__name__) + """ We use the following notation throughout this file: H: hidden size B: micro batch size diff --git a/megatron/core/transformer/multi_token_prediction.py b/megatron/core/transformer/multi_token_prediction.py index 2edb652bfc6..b0476155ad9 100755 --- a/megatron/core/transformer/multi_token_prediction.py +++ b/megatron/core/transformer/multi_token_prediction.py @@ -1,4 +1,4 @@ -# Copyright (c) 2025, NVIDIA CORPORATION. All rights reserved. +# Copyright (c) 2025 NVIDIA CORPORATION & AFFILIATES. All rights reserved. import warnings from contextlib import nullcontext diff --git a/megatron/core/transformer/spec_utils.py b/megatron/core/transformer/spec_utils.py index 09058084181..5639737d6c8 100644 --- a/megatron/core/transformer/spec_utils.py +++ b/megatron/core/transformer/spec_utils.py @@ -7,6 +7,8 @@ logger = logging.getLogger(__name__) +logger = logging.getLogger(__name__) + @dataclass class ModuleSpec: diff --git a/megatron/core/transformer/transformer_block.py b/megatron/core/transformer/transformer_block.py index 831b5546d53..f222a2c3a6b 100755 --- a/megatron/core/transformer/transformer_block.py +++ b/megatron/core/transformer/transformer_block.py @@ -390,7 +390,6 @@ def build_layer(layer_spec, layer_number): def has_final_layernorm_in_this_stage(self): """ Check if this vpp stage contains the final layernorm. - Note: Final layernorm now has been moved from the post-process stage to the last decoder layer by using this function. diff --git a/megatron/core/transformer/transformer_config.py b/megatron/core/transformer/transformer_config.py index eaae585905e..dce438520aa 100644 --- a/megatron/core/transformer/transformer_config.py +++ b/megatron/core/transformer/transformer_config.py @@ -11,6 +11,7 @@ from megatron.core.quantization.quant_config import RecipeConfig from megatron.core.transformer.enums import AttnBackend, CudaGraphScope from megatron.core.transformer.pipeline_parallel_layer_layout import PipelineParallelLayerLayout +from megatron.core.utils import experimental_api from ..fusions.fused_bias_geglu import quick_gelu from ..model_parallel_config import ModelParallelConfig @@ -31,6 +32,7 @@ @dataclass +@experimental_api class TransformerConfig(ModelParallelConfig): """Configuration object for megatron-core transformers. @@ -272,6 +274,9 @@ class TransformerConfig(ModelParallelConfig): #################### # linear attention #################### + linear_attention_type: Optional[str] = None + """Type of linear attention to use. + Deprecated. Use experimental_attention_variant instead.""" linear_attention_freq: Optional[Union[int, List[int]]] = None """Frequency between LA (linear attention) layers and SDPA (scaled dot-product attention) layers. @@ -544,7 +549,7 @@ class TransformerConfig(ModelParallelConfig): in the hidden_states gradient.""" moe_shared_expert_gate: bool = False - """Enable gate for shared expert. Only effective when + """Enable gate for shared expert. Only effective when moe-shared-expert-intermediate-size is set.""" moe_shared_expert_overlap: bool = False @@ -646,6 +651,13 @@ class TransformerConfig(ModelParallelConfig): """[Experimental] Force load balancing with random logits for MoE router, supports naive topk and group-limited topk. This is an experimental feature and only for benchmark.""" + moe_router_force_biased: Optional[float] = None + """[Experimental] Apply random expert bias in normal distribution with specified std + to router logits. Shared seed across all ranks ensures identical bias. + If positive, generates new random bias each forward pass. + If negative, generates bias once per layer and reuses it (abs value is std). + This is an experimental feature for benchmarking purposes.""" + moe_grouped_gemm: bool = False """When there are multiple experts per rank, compress multiple local (potentially small) gemms in a single kernel launch to improve the utilization and performance by leveraging the Grouped @@ -883,6 +895,9 @@ class TransformerConfig(ModelParallelConfig): """Transformer implementation to use. Options are 'transformer_engine' for Transformer Engine and 'local' for MCore.""" + fallback_to_eager_attn: bool = False + """Whether to fallback to eager attention in TE implementation. + Suggested for when desired features are not available in TE implementation.""" ##################################### # Fine-grained Activation Offloading ##################################### @@ -945,45 +960,56 @@ def __post_init__(self): f"tensor_model_parallel_size ({self.tensor_model_parallel_size})." ) - if self.experimental_attention_variant == "gated_delta_net": + if self.linear_attention_type is not None: + warnings.warn( + "linear_attention_type is deprecated, " + "use experimental_attention_variant instead." + ) + self.experimental_attention_variant = self.linear_attention_type + self.linear_attention_type = None + + if self.experimental_attention_variant in ["gated_delta_net"]: assert ( self.linear_attention_freq is not None - ), f"linear_attention_freq must be set for linear gated_delta_net." + ), f"linear_attention_freq must be set for linear attention." - # Check required parameters - assert ( - self.linear_conv_kernel_dim is not None - ), "linear_conv_kernel_dim must be set for gated delta net." - assert ( - self.linear_key_head_dim is not None - ), "linear_key_head_dim must be set for gated delta net." - assert ( - self.linear_value_head_dim is not None - ), "linear_value_head_dim must be set for gated delta net." - assert ( - self.linear_num_key_heads is not None - ), "linear_num_key_heads must be set for gated delta net." - assert ( - self.linear_num_value_heads is not None - ), "linear_num_value_heads must be set for gated delta net." - assert self.linear_num_value_heads % self.linear_num_key_heads == 0, ( - f"linear_num_value_heads ({self.linear_num_value_heads}) must be a multiple of " - f"linear_num_key_heads ({self.linear_num_key_heads})." - ) + if self.experimental_attention_variant == "gated_delta_net": + # Check required parameters + assert ( + self.linear_conv_kernel_dim is not None + ), "linear_conv_kernel_dim must be set for gated delta net." + assert ( + self.linear_key_head_dim is not None + ), "linear_key_head_dim must be set for gated delta net." + assert ( + self.linear_value_head_dim is not None + ), "linear_value_head_dim must be set for gated delta net." + assert ( + self.linear_num_key_heads is not None + ), "linear_num_key_heads must be set for gated delta net." + assert ( + self.linear_num_value_heads is not None + ), "linear_num_value_heads must be set for gated delta net." + assert self.linear_num_value_heads % self.linear_num_key_heads == 0, ( + f"linear_num_value_heads ({self.linear_num_value_heads}) must be a multiple of " + f"linear_num_key_heads ({self.linear_num_key_heads})." + ) - # Check tensor parallelism compatibility - assert ( - self.linear_num_key_heads % self.tensor_model_parallel_size == 0 - ), "linear_num_key_heads must be a multiple of tensor_model_parallel_size." + # Check tensor parallelism compatibility + tp_cp_size = self.tensor_model_parallel_size * self.context_parallel_size + assert self.linear_num_key_heads % tp_cp_size == 0, ( + f"{self.linear_num_key_heads=} must be a multiple of " + f"({self.tensor_model_parallel_size=} * {self.context_parallel_size=})." + ) + assert self.linear_num_value_heads % tp_cp_size == 0, ( + f"{self.linear_num_value_heads=} must be a multiple of " + f"({self.tensor_model_parallel_size=} * {self.context_parallel_size=})." + ) + elif self.experimental_attention_variant == "dsa": assert ( - self.linear_num_value_heads % self.tensor_model_parallel_size == 0 - ), "linear_num_value_heads must be a multiple of tensor_model_parallel_size." - - # Do not support yet, but coming soon. - assert self.context_parallel_size == 1, ( - f"Gated delta net does not support context parallel for now," - f" but got {self.context_parallel_size=}." - ) + self.context_parallel_size == 1 + ), "Currently context parallelism is not supported by DSAttention!" + assert not self.apply_rope_fusion, "RoPE fusion is not supported for DSAttention" if self.fp8: # cannot support first last layer bf16 with delayed scaling @@ -1977,6 +2003,25 @@ def __post_init__(self): f"the number of layers ({self.num_layers})" ) + if self.fallback_to_eager_attn: + assert self.transformer_impl == "transformer_engine", ( + f"fallback_to_eager_attn is only available with transformer_engine implementation," + f" but got {self.transformer_impl=}." + ) + + if self.fallback_to_eager_attn or self.transformer_impl == "local": + if self.context_parallel_size > 1 and self.cp_comm_type is not None: + all_cp_comm_types_are_all_gather = ( + all(item == "all_gather" for item in self.cp_comm_type) + if isinstance(self.cp_comm_type, list) + else self.cp_comm_type == "all_gather" + ) + if not all_cp_comm_types_are_all_gather: + raise ValueError( + f"fallback_to_eager_attn only supports all_gather communication type " + f"for context parallelism, but got {self.cp_comm_type=} instead." + ) + if self.transformer_impl == "inference_optimized": assert self.normalization == "RMSNorm" assert not self.layernorm_zero_centered_gamma @@ -1984,12 +2029,6 @@ def __post_init__(self): assert not self.add_qkv_bias assert not self.use_kitchen - if self.experimental_attention_variant == "dsa": - assert ( - self.context_parallel_size == 1 - ), "Currently context parallelism is not supported by DSAttention!" - assert not self.apply_rope_fusion, "RoPE fusion is not supported for DSAttention" - if self.inference_fuse_tp_communication: assert self.transformer_impl == "inference_optimized", ( "inference_fuse_tp_communication is only supported " @@ -2003,6 +2042,7 @@ def __post_init__(self): @dataclass +@experimental_api class MLATransformerConfig(TransformerConfig): """Configuration object for megatron-core Multi-Latent Attention (MLA) transformers. diff --git a/megatron/core/transformer/transformer_layer.py b/megatron/core/transformer/transformer_layer.py index a5eaec92866..12c24868473 100644 --- a/megatron/core/transformer/transformer_layer.py +++ b/megatron/core/transformer/transformer_layer.py @@ -1203,6 +1203,15 @@ def _should_call_local_cudagraph(self, *args, **kwargs): return True return False + def backward_dw_cudagraph(self, microbatch_idx): + """ + CUDA Graph backward weight gradient computation for this layer. + """ + cg_index = microbatch_idx % len(self.cuda_graphs) + if not hasattr(self.cuda_graphs[cg_index], 'backward_dw'): + return + self.cuda_graphs[cg_index].backward_dw() + def __call__(self, *args, **kwargs): if self._should_call_local_cudagraph(*args, **kwargs): # Inference mode. diff --git a/megatron/core/utils.py b/megatron/core/utils.py index d7b702f25ec..036589b209a 100644 --- a/megatron/core/utils.py +++ b/megatron/core/utils.py @@ -46,6 +46,7 @@ from megatron.core import config from megatron.core._rank_utils import log_single_rank from megatron.core.package_info import __version__ as mcore_version +from megatron.core.packed_seq_params import PackedSeqParams try: from torch.distributed._tensor import DTensor @@ -965,6 +966,12 @@ def make_tp_sharded_tensor_for_checkpoint( # Pop group parameters from kwargs tp_group = kwargs.pop('tp_group', None) dp_cp_group = kwargs.pop('dp_cp_group', None) + # If there are any additional kwargs left, surface them for visibility + # (these will be forwarded to ShardedTensor.from_rank_offsets). + if kwargs: + logger.warning( + "make_tp_sharded_tensor_for_checkpoint received extra kwargs: %s", list(kwargs.keys()) + ) prepend_axis_num = len(prepend_offsets) @@ -1030,6 +1037,12 @@ def make_sharded_tensor_for_checkpoint(tensor, key, prepend_offsets=(), replica_ # Pop group parameters from kwargs tp_group = kwargs.pop('tp_group', None) dp_cp_group = kwargs.pop('dp_cp_group', None) + # If there are any additional kwargs left, surface them for visibility + # (these will be forwarded to ShardedTensor.from_rank_offsets). + if kwargs: + logger.warning( + "make_sharded_tensor_for_checkpoint received extra kwargs: %s", list(kwargs.keys()) + ) prepend_axis_num = len(prepend_offsets) @@ -2081,8 +2094,8 @@ def get_thd_batch_on_this_cp_rank( max_seqlen_kv=int(max_seqlen[0].item()), ) - cp_size = get_context_parallel_world_size() if cp_size is None else cp_size - cp_rank = get_context_parallel_rank() if cp_rank is None else cp_rank + cp_size = parallel_state.get_context_parallel_world_size() if cp_size is None else cp_size + cp_rank = parallel_state.get_context_parallel_rank() if cp_rank is None else cp_rank if cp_size > 1: # slice batch along sequence dimension for context parallelism assert tex is not None and is_te_min_version("1.10.0"), ( "Please update Transformer Engine to >= 1.10 to use " diff --git a/megatron/training/arguments.py b/megatron/training/arguments.py index 5246f44d206..1af066a8207 100644 --- a/megatron/training/arguments.py +++ b/megatron/training/arguments.py @@ -689,14 +689,13 @@ def validate_args(args, defaults={}): assert args.ckpt_format == "fsdp_dtensor", \ "Megatron FSDP only supports fsdp_dtensor checkpoint format" - + + args.reuse_grad_buf_for_mxfp8_param_ag = False + if args.fsdp_manual_registration: assert args.use_megatron_fsdp, "FSDP manual registration is only supported with Megatron FSDP" assert args.nccl_ub, "FSDP manual registration is only supported with nccl-ub option" - if args.use_megatron_fsdp: - args.reuse_grad_buf_for_mxfp8_param_ag = False - # Parameters dtype. args.params_dtype = torch.float if args.fp16: @@ -1176,15 +1175,15 @@ def validate_args(args, defaults={}): # Muon optimizer check if 'muon' in args.optimizer: - # TODO: remove these checks once we support them assert not args.overlap_grad_reduce, "Muon optimizer does not support overlap grad reduce for now." assert not args.overlap_param_gather, "Muon optimizer does not support overlap param gather for now." - assert not args.use_distributed_optimizer, "Muon optimizer does not support distributed optimizer for now." assert not args.use_torch_fsdp2, "Muon optimizer does not support Torch-FSDP2 for now." assert not args.use_megatron_fsdp, "Muon optimizer does not support Megatron-FSDP for now." assert args.ckpt_format in ["torch", "torch_dist"], "Muon optimizer supports torch and torch_dist checkpoint format." + assert args.experimental_attention_variant is None, "Muon optimizer does not support attention variant for now." + assert not args.attention_output_gate, "Muon optimizer does not support attention output gate for now." # Optimizer CPU offload check if args.optimizer_cpu_offload: @@ -1197,6 +1196,11 @@ def validate_args(args, defaults={}): "must be used in conjunction with `--fp8-recipe delayed`." ) + if args.offload_optimizer_states: + assert args.use_distributed_optimizer, "offload_optimizer_states is only supported with distributed optimizer" + assert args.optimizer == 'adam', "offload_optimizer_states is only supported with adam optimizer" + assert not args.use_megatron_fsdp, "offload_optimizer_states does not support Megatron-FSDP for now." + if args.non_persistent_ckpt_type == "local": assert args.non_persistent_local_ckpt_dir is not None, "Tried to use local checkpointing without specifying --local-ckpt-dir!" if args.replication: @@ -1213,6 +1217,25 @@ def validate_args(args, defaults={}): assert is_te_min_version("2.8.0"), ( "overlap_grad_reduce is only supported with TE >= 2.8.0 when enabling delay_wgrad_compute" ) + wgrad_in_graph_scope = CudaGraphScope.attn in args.cuda_graph_scope or ( + CudaGraphScope.moe_router in args.cuda_graph_scope + and args.moe_shared_expert_intermediate_size is not None + and not args.moe_shared_expert_overlap + ) + if wgrad_in_graph_scope: + assert is_te_min_version( + "2.12.0" + ), "CUDA graph with delay_wgrad_compute requires TE version >= 2.12.0." + assert args.gradient_accumulation_fusion, ( + 'CUDA graph with delay_wgrad_compute requires gradient_accumulation_fusion ' + 'to be enabled. This is because the default gradient accumulation does not ' + 'use static memory addresses, which breaks CUDA graph requirements.' + ) + if CudaGraphScope.attn in args.cuda_graph_scope: + assert ( + not args.add_bias_linear and not args.add_qkv_bias + ), "CUDA graph with delay_wgrad_compute doesn't support attn bias for now." + if not args.gradient_accumulation_fusion: assert is_te_min_version("2.7.0"), ( "disabling gradient_accumulation_fusion is only supported with TE >= 2.7.0 " @@ -2074,6 +2097,14 @@ def _add_training_args(parser): help='Disable pinning of CPU memory for gradients.') group.add_argument('--no-pin-cpu-params', action='store_false', dest='pin_cpu_params', help='Disable pinning of CPU memory for parameters.') + group.add_argument('--offload-optimizer-states', + action='store_true', + dest='offload_optimizer_states', + help='Offload optimizer states to CPU after each optimizer step and ' + 'reload them before the next optimizer step. ' + 'Only support TE FusedAdam optimizer.' + 'Note that this still uses pure GPU optimizer instead of ' + 'HybridDeviceOptimizer for --optimizer-cpu-offload.') group.add_argument('--dataloader-type', type=str, default=None, choices=['single', 'cyclic', 'external'], help='Single pass vs multiple pass data loader') diff --git a/megatron/training/checkpointing.py b/megatron/training/checkpointing.py index a3d307f1e30..7c87eca191a 100644 --- a/megatron/training/checkpointing.py +++ b/megatron/training/checkpointing.py @@ -561,7 +561,7 @@ def save_checkpoint(iteration, model, optimizer, opt_param_scheduler, num_floati ensure_directory_exists(optim_checkpoint_name) if not optimizer.is_stub_optimizer: optimizer.save_parameter_state(optim_checkpoint_name) - + # LayerWiseDistributedOptimizer save optimizer state to file on different ranks if getattr(args, "optimizer", "adam").startswith("dist_") and args.ckpt_format == 'torch': dp_rank = mpu.get_data_parallel_rank() diff --git a/megatron/training/tokenizer/tokenizer.py b/megatron/training/tokenizer/tokenizer.py index 33340a5e978..17df57ddacb 100644 --- a/megatron/training/tokenizer/tokenizer.py +++ b/megatron/training/tokenizer/tokenizer.py @@ -48,7 +48,7 @@ def build_tokenizer(args, **kwargs): tokenizer = _GPTSentencePieceTokenizer(args.tokenizer_model) elif args.tokenizer_type == 'HuggingFaceTokenizer': tokenizer = _HuggingFaceTokenizer( - args.tokenizer_model, trust_remote_code = args.trust_remote_code, **kwargs, + args.tokenizer_model, trust_remote_code=args.trust_remote_code, **kwargs ) elif args.tokenizer_type == 'Llama2Tokenizer': assert args.tokenizer_model is not None @@ -78,11 +78,7 @@ def build_tokenizer(args, **kwargs): kwargs = dict() if args.tokenizer_prompt_format == "nvlm-yi-34b": - kwargs = { - "from_slow": True, - "legacy": False, - "add_bos_token": True, - } + kwargs = {"from_slow": True, "legacy": False, "add_bos_token": True} # Currently, only HuggingFace tokenizers are supported. underlying_tokenizer = transformers.AutoTokenizer.from_pretrained( @@ -97,10 +93,7 @@ def build_tokenizer(args, **kwargs): args.force_system_message, ) elif args.tokenizer_type == "SFTTokenizer": - tokenizer = SFTTokenizer( - args.tokenizer_model, - args.sft_tokenizer_prompt_format, - ) + tokenizer = SFTTokenizer(args.tokenizer_model, args.sft_tokenizer_prompt_format) elif args.tokenizer_type == 'NullMultimodalTokenizer': assert args.vocab_size is not None tokenizer = _NullMultimodalTokenizer(args.vocab_size) @@ -144,7 +137,7 @@ def __init__(self, pretrained_model_name_or_path, trust_remote_code=False, **kwa self._tokenizer = transformers.AutoTokenizer.from_pretrained( pretrained_model_name_or_path=pretrained_model_name_or_path, trust_remote_code=trust_remote_code, - **kwargs + **kwargs, ) self._vocab = self._tokenizer.get_vocab() self._inv_vocab = {token_id: token for token, token_id in self._vocab.items()} @@ -367,6 +360,10 @@ def detokenize(self, token_ids): def eod(self): return self.eod_id + @property + def eos(self): + return self.eod_id + class _SentencePieceTokenizer(MegatronLegacyTokenizer): """SentencePieceTokenizer-Megatron wrapper""" @@ -573,6 +570,10 @@ def mask(self): def eod(self): return self._eos_id + @property + def eos(self): + return self._eos_id + @property def additional_special_tokens_ids(self): return None @@ -623,6 +624,10 @@ def mask(self): def eod(self): return self.eos_id + @property + def eos(self): + return self.eos_id + @property def additional_special_tokens_ids(self): return None @@ -747,7 +752,7 @@ def bos(self) -> int: @property def eos(self) -> int: return self._eos_id - + @property def pad(self) -> int: return self._pad_id @@ -858,19 +863,30 @@ def mask(self): def eod(self): return self._eod_id + @property + def eos(self): + return self._eod_id + @property def additional_special_tokens_ids(self): return None + class _NullMultimodalTokenizer(MegatronLegacyTokenizer): def __init__(self, vocab_size, image_token=None, image_token_id=None): super().__init__(None, vocab_size=vocab_size) self._vocab_size_without_eod = int(vocab_size) self._eod_id = self._vocab_size_without_eod - from megatron.core.models.multimodal.llava_model import DEFAULT_IMAGE_TOKEN_INDEX, IMAGE_TOKEN + from megatron.core.models.multimodal.llava_model import ( + DEFAULT_IMAGE_TOKEN_INDEX, + IMAGE_TOKEN, + ) + self._image_token = image_token if image_token is not None else IMAGE_TOKEN - self._image_token_id = image_token_id if image_token_id is not None else DEFAULT_IMAGE_TOKEN_INDEX + self._image_token_id = ( + image_token_id if image_token_id is not None else DEFAULT_IMAGE_TOKEN_INDEX + ) def tokenize(self, text): return [int(x) for x in text.split(' ')] @@ -887,7 +903,9 @@ def offsets(self, ids: list[int], text: str) -> list[int]: return offsets def convert_tokens_to_ids(self, tokens): - ids = [(int(t) if t != self._image_token else self._image_token_id) for t in tokens.split(' ')] + ids = [ + (int(t) if t != self._image_token else self._image_token_id) for t in tokens.split(' ') + ] return ids if len(ids) > 1 else ids[0] @property @@ -918,6 +936,10 @@ def mask(self): def eod(self): return self._eod_id + @property + def eos(self): + return self._eod_id + @property def additional_special_tokens_ids(self): return None diff --git a/megatron/training/training.py b/megatron/training/training.py index 500d30b9e73..e9736ac085c 100644 --- a/megatron/training/training.py +++ b/megatron/training/training.py @@ -782,7 +782,9 @@ def pretrain( set_ideal_affinity_for_current_gpu ) set_ideal_affinity_for_current_gpu() - + if args.batch_invariant_mode: + print_rank_0("Enabling batch invariant mode globally", flush=True) + enable_batch_invariant_mode() if args.log_progress: append_to_progress_log("Starting job") @@ -1610,6 +1612,12 @@ def train_step(forward_step_func, data_iterator, model, optimizer, opt_param_sch save_wgrads_in_this_iteration = (args.save_wgrads_interval is not None and (iteration + 1) % args.save_wgrads_interval == 0) while rerun_state_machine.should_run_forward_backward(data_iterator): + # Offload optimizer states to CPU if enabled. + if args.offload_optimizer_states: + for optim_instance in optimizer.chained_optimizers: + if isinstance(optim_instance, DistributedOptimizer): + optim_instance.offload_states() + # Set grad to zero. for model_chunk in model: model_chunk.zero_grad_buffer() @@ -1645,6 +1653,14 @@ def train_step(forward_step_func, data_iterator, model, optimizer, opt_param_sch if isinstance(optim_instance, DistributedOptimizer): optim_instance._copy_main_params_to_param_buffer() + # Release GPU memory for offloaded optimizer states. + # This needs to be done after _copy_main_params_to_param_buffer(). + # Separate offload and release to allow early D2H transfer to overlap with other operations. + if args.offload_optimizer_states: + for optim_instance in optimizer.chained_optimizers: + if isinstance(optim_instance, DistributedOptimizer): + optim_instance.release_offloaded_gpu_states() + # Forward pass. if save_dgrads_in_this_iteration: enable_dgrad_logging(model, args.save) @@ -1990,7 +2006,6 @@ def training_log( MTPLossLoggingHelper.track_mtp_metrics( mtp_loss_scale, iteration, writer, wandb_writer, total_loss_dict ) - # Track sparse attention indexer loss. if args.dsa_indexer_loss_coeff is not None and args.dsa_indexer_loss_coeff > 0: indexer_loss_scale = 1 / get_num_microbatches() @@ -2001,7 +2016,6 @@ def training_log( wandb_writer=wandb_writer, total_loss_dict=total_loss_dict, ) - # Dump memory snapshot and print metrics to stdout. if iteration % args.log_interval == 0 or is_first_iteration: if args.record_memory_history and (is_last_rank() or torch.distributed.get_backend() == 'fake'): @@ -2179,7 +2193,8 @@ def save_checkpoint_and_time( # Stop timer to get accurate train interval time and exclude checkpointing duration timers('interval-time').stop() - energy_monitor.pause() + if args.log_energy: + energy_monitor.pause() # Extra barrier is added to make sure all ranks report the max time. timer_key = 'save-checkpoint-non-persistent' if non_persistent_ckpt else 'save-checkpoint' @@ -2232,7 +2247,9 @@ def save_checkpoint_and_time( ) # Recover timing - energy_monitor.resume() + if args.log_energy: + energy_monitor.resume() + timers('interval-time', log_level=0).start(barrier=True) @@ -2560,7 +2577,21 @@ def train( config.param_sync_func = [model_chunk.start_param_sync for model_chunk in model] if len(model) == 1: config.param_sync_func = config.param_sync_func[0] - config.finalize_model_grads_func = finalize_model_grads + + # Wrap finalize_model_grads to reload offloaded optimizer states before grad finalization. + # This allows H2D transfer to overlap with grad all-reduce. + if args.offload_optimizer_states: + + def finalize_model_grads_with_state_reload(*fmg_args, **fmg_kwargs): + # Reload offloaded states for all DistributedOptimizer instances + for optim_instance in optimizer.chained_optimizers: + if isinstance(optim_instance, DistributedOptimizer): + optim_instance.reload_offloaded_states() + return finalize_model_grads(*fmg_args, **fmg_kwargs) + + config.finalize_model_grads_func = finalize_model_grads_with_state_reload + else: + config.finalize_model_grads_func = finalize_model_grads if args.log_energy: energy_monitor.setup() @@ -3314,18 +3345,20 @@ def get_train_valid_test_num_samples(): return (train_samples_in_current_phase, eval_samples, test_samples) -def build_train_valid_test_datasets(build_train_valid_test_datasets_provider, train_valid_test_num_samples=None): +def build_train_valid_test_datasets(build_train_valid_test_datasets_provider, train_valid_test_num_samples=None, vp_stage=None): """Build pretraining datasets.""" if train_valid_test_num_samples is None: train_valid_test_num_samples = get_train_valid_test_num_samples() - print_rank_0(' > datasets target sizes (minimum size):') print_rank_0(' train: {}'.format(train_valid_test_num_samples[0])) print_rank_0(' validation: {}'.format(train_valid_test_num_samples[1])) print_rank_0(' test: {}'.format(train_valid_test_num_samples[2])) - return build_train_valid_test_datasets_provider(train_valid_test_num_samples) + if vp_stage is not None: + return build_train_valid_test_datasets_provider(train_valid_test_num_samples, vp_stage=vp_stage) + else: + return build_train_valid_test_datasets_provider(train_valid_test_num_samples) -def build_train_valid_test_data_loaders(build_train_valid_test_datasets_provider): +def build_train_valid_test_data_loaders(build_train_valid_test_datasets_provider, vp_stage=None): """Build pretraining data loaders.""" args = get_args() @@ -3372,7 +3405,10 @@ def build_train_valid_test_data_loaders(build_train_valid_test_datasets_provider else: # Build datasets. - train_ds, valid_ds, test_ds = build_train_valid_test_datasets(build_train_valid_test_datasets_provider) + train_ds, valid_ds, test_ds = build_train_valid_test_datasets( + build_train_valid_test_datasets_provider, + vp_stage=vp_stage, + ) valid_ds = [valid_ds] if not isinstance(valid_ds, list) else valid_ds if args.skip_train: train_dataloader = None @@ -3412,14 +3448,15 @@ def build_train_valid_test_data_loaders(build_train_valid_test_datasets_provider return train_dataloader, valid_dataloaders, test_dataloader -def build_train_valid_test_data_iterators(build_train_valid_test_datasets_provider): +def build_train_valid_test_data_iterators(build_train_valid_test_datasets_provider, vp_stage=None): """Build pretraining data iterators.""" args = get_args() # Build loaders. train_dataloader, valid_dataloaders, test_dataloader = build_train_valid_test_data_loaders( - build_train_valid_test_datasets_provider + build_train_valid_test_datasets_provider, + vp_stage=vp_stage ) # Build iterators. diff --git a/megatron/training/utils.py b/megatron/training/utils.py index d9681728467..7709f651391 100644 --- a/megatron/training/utils.py +++ b/megatron/training/utils.py @@ -39,6 +39,7 @@ from megatron.core.utils import ( get_batch_on_this_cp_rank, get_data_parallel_group_if_dtensor, + is_torch_min_version, to_local_if_dtensor, unwrap_model, ) @@ -284,7 +285,8 @@ def report_memory(name): string += f" | max allocated: {torch.cuda.max_memory_allocated() / mega_bytes:.2f}" string += f" | reserved: {torch.cuda.memory_reserved() / mega_bytes:.2f}" string += f" | max reserved: {torch.cuda.max_memory_reserved() / mega_bytes:.2f}" - if args.log_device_memory_used: + if args.log_device_memory_used and is_torch_min_version("2.6.0"): + # device usage is not supported in torch < 2.6.0 string += f" | total device memory used: {torch.cuda.device_memory_used() / mega_bytes:.2f}" if mpu.get_data_parallel_rank() == 0: print("[Rank {}] {}".format(torch.distributed.get_rank(), string), flush=True) diff --git a/pretrain_gpt.py b/pretrain_gpt.py index 8eff08d24b2..a2d956a3266 100644 --- a/pretrain_gpt.py +++ b/pretrain_gpt.py @@ -30,6 +30,7 @@ from megatron.core.rerun_state_machine import get_rerun_state_machine from megatron.core.utils import get_attr_wrapped_model, get_thd_batch_on_this_cp_rank, get_batch_on_this_hybrid_cp_rank, StragglerDetector from megatron.core.tokenizers.text.utils.build_tokenizer import build_tokenizer +from megatron.core.transformer.multi_token_prediction import mtp_on_this_rank, get_mtp_ranks from megatron.training import ( get_args, get_timers, @@ -39,6 +40,7 @@ print_rank_0, set_startup_timestamps, ) +from megatron.training.arguments import core_transformer_config_from_args from megatron.training.datasets.sft_dataset import SFTDataset from megatron.core.transformer.multi_token_prediction import mtp_on_this_rank, get_mtp_ranks from megatron.training.arguments import core_transformer_config_from_args diff --git a/tests/functional_tests/shell_test_utils/run_ci_test.sh b/tests/functional_tests/shell_test_utils/run_ci_test.sh index 3d47e591749..fa3ed2f4db9 100644 --- a/tests/functional_tests/shell_test_utils/run_ci_test.sh +++ b/tests/functional_tests/shell_test_utils/run_ci_test.sh @@ -149,6 +149,10 @@ for i in $(seq 1 $N_REPEAT); do # First run never loads from a checkpoint export RUN_NUMBER=1 + DIR=$(dirname "$_TENSORBOARD_PATH") + FILE=$(basename "$_TENSORBOARD_PATH") + export TENSORBOARD_PATH=$DIR/$i/$FILE + mkdir -p $(dirname $TENSORBOARD_PATH) export REPEAT=$i export CHECKPOINT_SAVE_PATH=$_CHECKPOINT_SAVE_PATH export TRAINING_EXIT_CODE=0 diff --git a/tests/functional_tests/test_cases/bert/bert_release/model_config.yaml b/tests/functional_tests/test_cases/bert/bert_release/model_config.yaml index ab5558fa7d2..546926fc66c 100644 --- a/tests/functional_tests/test_cases/bert/bert_release/model_config.yaml +++ b/tests/functional_tests/test_cases/bert/bert_release/model_config.yaml @@ -27,7 +27,7 @@ MODEL_ARGS: --pipeline-model-parallel-size: 8 # Data args --data-path: ${DATA_BLEND} - --vocab-file: ${DATA_PATH}/text/the_pile/bert_shard00/vocab.txt + --vocab-file: ${DATA_PATH}/vocab.txt --split: 949,50,1 --data-cache-path: ${DATA_CACHE_PATH} # EVAL_AND_LOGGING_ARGS @@ -45,6 +45,7 @@ MODEL_ARGS: --log-params-norm: true --log-validation-ppl-to-tensorboard: true --wandb-project: megatron-core-release-runs + --wandb-entity: adlr --wandb-exp-name: ${WANDB_EXPERIMENT} --attention-backend: unfused --exit-interval: 20000 diff --git a/tests/functional_tests/test_cases/ci_base_config.yml b/tests/functional_tests/test_cases/ci_base_config.yml new file mode 100644 index 00000000000..739f343da9d --- /dev/null +++ b/tests/functional_tests/test_cases/ci_base_config.yml @@ -0,0 +1,14 @@ +MODEL_ARGS: + # Add logging args + --log-timers-to-tensorboard: true + --log-memory-to-tensorboard: true + --log-num-zeros-in-grad: true + --log-params-norm: true + --log-validation-ppl-to-tensorboard: true + --log-throughput: true + --log-interval: 1 + --logging-level: 40 + --tensorboard-dir: ${TENSORBOARD_PATH} + # Add checkpointing args + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} diff --git a/tests/functional_tests/test_cases/gpt/gpt3_15b_8t_release/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_15b_8t_release/model_config.yaml index 44f9de33775..692e3882e02 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_15b_8t_release/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_15b_8t_release/model_config.yaml @@ -84,6 +84,7 @@ MODEL_ARGS: --log-interval: 100 --tensorboard-dir: ${TENSORBOARD_PATH} --wandb-project: megatron-core-release-runs + --wandb-entity: adlr --wandb-exp-name: ${WANDB_EXPERIMENT} # Add mixed precision args --bf16: true diff --git a/tests/functional_tests/test_cases/gpt/gpt3_15b_8t_release_sm/golden_values_dev_dgx_h100.json b/tests/functional_tests/test_cases/gpt/gpt3_15b_8t_release_sm/golden_values_dev_dgx_h100.json index 02bcf7fe698..523227bf433 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_15b_8t_release_sm/golden_values_dev_dgx_h100.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_15b_8t_release_sm/golden_values_dev_dgx_h100.json @@ -1 +1,10242 @@ -{"lm loss": {"start_step": 1, "end_step": 13000, "step_interval": 5, "values": {"1": 12.98419, "5": 12.93858, "10": 12.06404, "15": 11.97882, "20": 10.53588, "25": 10.11952, "30": 9.7286, "35": 9.44173, "40": 9.2373, "45": 9.03763, "50": 8.85277, "55": 8.64259, "60": 8.60098, "65": 8.50179, "70": 8.41326, "75": 8.31346, "80": 8.16921, "85": 8.09253, "90": 7.97894, "95": 7.91859, "100": 7.82704, "105": 7.71191, "110": 7.62418, "115": 7.52685, "120": 7.48107, "125": 7.48004, "130": 7.33364, "135": 7.26758, "140": 7.23146, "145": 7.04647, "150": 7.17621, "155": 7.00383, "160": 6.89968, "165": 6.91293, "170": 6.84228, "175": 6.85916, "180": 6.81429, "185": 6.7203, "190": 6.66124, "195": 6.59364, "200": 6.64046, "205": 6.64305, "210": 6.5179, "215": 6.51519, "220": 6.51027, "225": 6.46653, "230": 6.47574, "235": 6.42409, "240": 6.36976, "245": 6.3778, "250": 6.29868, "255": 6.43438, "260": 6.34377, "265": 6.28803, "270": 6.23364, "275": 6.26123, "280": 6.19076, "285": 6.19886, "290": 6.15022, "295": 6.12619, "300": 6.11141, "305": 6.01886, "310": 6.08556, "315": 6.07169, "320": 5.99243, "325": 5.93189, "330": 5.99792, "335": 6.0145, "340": 5.93453, "345": 5.92339, "350": 5.87179, "355": 5.84258, "360": 5.85866, "365": 5.81752, "370": 5.80407, "375": 5.80516, "380": 5.85848, "385": 5.78993, "390": 5.81141, "395": 5.68051, "400": 5.66121, "405": 5.68906, "410": 5.66202, "415": 5.70461, "420": 5.63851, "425": 5.66062, "430": 5.62802, "435": 5.56913, "440": 5.62147, "445": 5.52803, "450": 5.58428, "455": 5.5123, "460": 5.49325, "465": 5.56828, "470": 5.54845, "475": 5.49678, "480": 5.46247, "485": 5.49185, "490": 5.47566, "495": 5.47856, "500": 5.42533, "505": 5.38883, "510": 5.44319, "515": 5.42148, "520": 5.47608, "525": 5.31477, "530": 5.33216, "535": 5.36, "540": 5.33276, "545": 5.41314, "550": 5.37099, "555": 5.23374, "560": 5.32665, "565": 5.27809, "570": 5.25324, "575": 5.28184, "580": 5.23593, "585": 5.21762, "590": 5.22346, "595": 5.22561, "600": 5.26751, "605": 5.22896, "610": 5.2012, "615": 5.18737, "620": 5.19543, "625": 5.19655, "630": 5.14985, "635": 5.12452, "640": 5.09298, "645": 5.13279, "650": 5.14481, "655": 5.11963, "660": 5.0475, "665": 5.1142, "670": 5.04119, "675": 5.01723, "680": 5.05635, "685": 5.00678, "690": 5.01633, "695": 4.96228, "700": 4.97301, "705": 4.95571, "710": 4.97305, "715": 4.87719, "720": 4.85764, "725": 4.80769, "730": 4.84352, "735": 4.82916, "740": 4.8644, "745": 4.74895, "750": 4.75764, "755": 4.8023, "760": 4.78257, "765": 4.76428, "770": 4.69615, "775": 4.69212, "780": 4.684, "785": 4.7405, "790": 4.67498, "795": 4.64675, "800": 4.61184, "805": 4.61203, "810": 4.65393, "815": 4.60253, "820": 4.62914, "825": 4.58486, "830": 4.57946, "835": 4.56275, "840": 4.48603, "845": 4.50743, "850": 4.45704, "855": 4.51258, "860": 4.43583, "865": 4.52116, "870": 4.47717, "875": 4.38345, "880": 4.41849, "885": 4.38985, "890": 4.43389, "895": 4.42652, "900": 4.39808, "905": 4.32618, "910": 4.35391, "915": 4.34171, "920": 4.38377, "925": 4.38353, "930": 4.30961, "935": 4.29021, "940": 4.36235, "945": 4.31265, "950": 4.35051, "955": 4.28087, "960": 4.19218, "965": 4.27537, "970": 4.26236, "975": 4.24397, "980": 4.22146, "985": 4.17523, "990": 4.13237, "995": 4.18304, "1000": 4.2308, "1005": 4.18908, "1010": 4.17127, "1015": 4.13273, "1020": 4.15962, "1025": 4.22451, "1030": 4.1247, "1035": 4.10028, "1040": 4.13078, "1045": 4.11598, "1050": 4.15985, "1055": 4.0973, "1060": 4.1011, "1065": 4.06848, "1070": 4.05654, "1075": 4.07977, "1080": 4.07117, "1085": 4.06586, "1090": 4.02567, "1095": 4.09923, "1100": 4.06572, "1105": 4.071, "1110": 4.03046, "1115": 4.00398, "1120": 4.00356, "1125": 4.00318, "1130": 4.04694, "1135": 4.01101, "1140": 4.00143, "1145": 3.93633, "1150": 4.03948, "1155": 4.00619, "1160": 3.9851, "1165": 3.86882, "1170": 3.92684, "1175": 3.93038, "1180": 3.95878, "1185": 3.96184, "1190": 3.93034, "1195": 3.94125, "1200": 3.89932, "1205": 3.87266, "1210": 3.97831, "1215": 3.84899, "1220": 3.87263, "1225": 3.81413, "1230": 3.92846, "1235": 3.89312, "1240": 3.87537, "1245": 3.80079, "1250": 3.82885, "1255": 3.8493, "1260": 3.8874, "1265": 3.79336, "1270": 3.88167, "1275": 3.8455, "1280": 3.82143, "1285": 3.85735, "1290": 3.87159, "1295": 3.84912, "1300": 3.81711, "1305": 3.82495, "1310": 3.82551, "1315": 3.81192, "1320": 3.81405, "1325": 3.72789, "1330": 3.79529, "1335": 3.76889, "1340": 3.76123, "1345": 3.74704, "1350": 3.73516, "1355": 3.76572, "1360": 3.73919, "1365": 3.73441, "1370": 3.73284, "1375": 3.74202, "1380": 3.74909, "1385": 3.7541, "1390": 3.66502, "1395": 3.73883, "1400": 3.72777, "1405": 3.66183, "1410": 3.67095, "1415": 3.64909, "1420": 3.7012, "1425": 3.71121, "1430": 3.6707, "1435": 3.65988, "1440": 3.63587, "1445": 3.67497, "1450": 3.67892, "1455": 3.65003, "1460": 3.64827, "1465": 3.69085, "1470": 3.63258, "1475": 3.6929, "1480": 3.65433, "1485": 3.65353, "1490": 3.61514, "1495": 3.60996, "1500": 3.65344, "1505": 3.69268, "1510": 3.55544, "1515": 3.60731, "1520": 3.63573, "1525": 3.59446, "1530": 3.59436, "1535": 3.59439, "1540": 3.59806, "1545": 3.59944, "1550": 3.56476, "1555": 3.56897, "1560": 3.61095, "1565": 3.62486, "1570": 3.58908, "1575": 3.54769, "1580": 3.59493, "1585": 3.58838, "1590": 3.47011, "1595": 3.51425, "1600": 3.50212, "1605": 3.55557, "1610": 3.56864, "1615": 3.50712, "1620": 3.52052, "1625": 3.47381, "1630": 3.50275, "1635": 3.55197, "1640": 3.51791, "1645": 3.54635, "1650": 3.49031, "1655": 3.4704, "1660": 3.52131, "1665": 3.45881, "1670": 3.51236, "1675": 3.50045, "1680": 3.46681, "1685": 3.49078, "1690": 3.48353, "1695": 3.49776, "1700": 3.46185, "1705": 3.40868, "1710": 3.48728, "1715": 3.49675, "1720": 3.43132, "1725": 3.44122, "1730": 3.42961, "1735": 3.4644, "1740": 3.45584, "1745": 3.43534, "1750": 3.41748, "1755": 3.42246, "1760": 3.37607, "1765": 3.42468, "1770": 3.43261, "1775": 3.379, "1780": 3.4355, "1785": 3.42306, "1790": 3.37807, "1795": 3.40594, "1800": 3.34684, "1805": 3.39025, "1810": 3.34654, "1815": 3.4221, "1820": 3.41309, "1825": 3.38102, "1830": 3.32801, "1835": 3.42581, "1840": 3.38756, "1845": 3.42483, "1850": 3.39575, "1855": 3.37642, "1860": 3.35084, "1865": 3.38387, "1870": 3.29854, "1875": 3.45603, "1880": 3.34133, "1885": 3.36396, "1890": 3.34216, "1895": 3.39809, "1900": 3.36794, "1905": 3.30189, "1910": 3.32844, "1915": 3.3151, "1920": 3.36278, "1925": 3.33318, "1930": 3.31578, "1935": 3.31145, "1940": 3.36438, "1945": 3.26186, "1950": 3.40063, "1955": 3.30708, "1960": 3.31486, "1965": 3.28405, "1970": 3.29999, "1975": 3.33744, "1980": 3.34165, "1985": 3.23762, "1990": 3.32593, "1995": 3.28362, "2000": 3.27303, "2005": 3.26618, "2010": 3.28661, "2015": 3.22715, "2020": 3.27479, "2025": 3.27135, "2030": 3.27231, "2035": 3.29709, "2040": 3.26992, "2045": 3.23395, "2050": 3.27144, "2055": 3.32457, "2060": 3.28203, "2065": 3.24424, "2070": 3.30416, "2075": 3.24361, "2080": 3.22085, "2085": 3.30421, "2090": 3.15415, "2095": 3.29493, "2100": 3.23056, "2105": 3.19586, "2110": 3.20308, "2115": 3.24137, "2120": 3.18045, "2125": 3.21484, "2130": 3.22503, "2135": 3.27763, "2140": 3.1948, "2145": 3.21466, "2150": 3.20867, "2155": 3.2365, "2160": 3.20615, "2165": 3.25296, "2170": 3.23909, "2175": 3.17384, "2180": 3.22049, "2185": 3.25755, "2190": 3.24863, "2195": 3.15892, "2200": 3.2045, "2205": 3.17192, "2210": 3.12378, "2215": 3.19067, "2220": 3.19593, "2225": 3.18949, "2230": 3.13791, "2235": 3.19063, "2240": 3.21593, "2245": 3.1871, "2250": 3.21088, "2255": 3.1585, "2260": 3.14105, "2265": 3.23218, "2270": 3.21271, "2275": 3.1566, "2280": 3.17614, "2285": 3.16486, "2290": 3.17692, "2295": 3.20197, "2300": 3.13847, "2305": 3.15963, "2310": 3.12614, "2315": 3.06316, "2320": 3.11512, "2325": 3.17382, "2330": 3.12397, "2335": 3.12649, "2340": 3.17195, "2345": 3.12462, "2350": 3.129, "2355": 3.11726, "2360": 3.16031, "2365": 3.09266, "2370": 3.15197, "2375": 3.13019, "2380": 3.11082, "2385": 3.09359, "2390": 3.09567, "2395": 3.09807, "2400": 3.09966, "2405": 3.10436, "2410": 3.09007, "2415": 3.09491, "2420": 3.08537, "2425": 3.07877, "2430": 3.08079, "2435": 3.06761, "2440": 3.08574, "2445": 3.05747, "2450": 3.12167, "2455": 3.15832, "2460": 3.08596, "2465": 3.07656, "2470": 3.03663, "2475": 3.06421, "2480": 3.10252, "2485": 3.06485, "2490": 3.06573, "2495": 3.08845, "2500": 3.05671, "2505": 3.105, "2510": 3.12399, "2515": 3.0532, "2520": 3.07806, "2525": 3.02426, "2530": 3.04842, "2535": 3.09401, "2540": 3.07984, "2545": 3.05538, "2550": 3.00469, "2555": 3.07001, "2560": 3.04403, "2565": 3.12, "2570": 3.00976, "2575": 3.0601, "2580": 3.08548, "2585": 3.02156, "2590": 3.06606, "2595": 2.99925, "2600": 3.0841, "2605": 3.06879, "2610": 3.05401, "2615": 3.06935, "2620": 2.99191, "2625": 3.01384, "2630": 3.03627, "2635": 3.05041, "2640": 3.01088, "2645": 3.05612, "2650": 3.02233, "2655": 2.98756, "2660": 3.01604, "2665": 3.03817, "2670": 2.98547, "2675": 2.97442, "2680": 3.00378, "2685": 3.00171, "2690": 2.99912, "2695": 2.99265, "2700": 3.03079, "2705": 2.98376, "2710": 2.97975, "2715": 2.96047, "2720": 3.02663, "2725": 2.99565, "2730": 3.05827, "2735": 3.04913, "2740": 3.02027, "2745": 3.02502, "2750": 3.02065, "2755": 2.96792, "2760": 2.99447, "2765": 3.00785, "2770": 2.98958, "2775": 2.99278, "2780": 3.02294, "2785": 2.95383, "2790": 2.96474, "2795": 2.95595, "2800": 2.98985, "2805": 2.94051, "2810": 2.99046, "2815": 2.95976, "2820": 3.0756, "2825": 3.03639, "2830": 3.01855, "2835": 2.92175, "2840": 2.92574, "2845": 2.96102, "2850": 2.96997, "2855": 2.96207, "2860": 2.94977, "2865": 2.91535, "2870": 2.99202, "2875": 2.92084, "2880": 2.96303, "2885": 2.91779, "2890": 2.98572, "2895": 2.93253, "2900": 2.95289, "2905": 3.00499, "2910": 2.92994, "2915": 2.94325, "2920": 2.95516, "2925": 2.94427, "2930": 2.95621, "2935": 2.94005, "2940": 2.94552, "2945": 2.9075, "2950": 2.97913, "2955": 2.91177, "2960": 2.97029, "2965": 2.87292, "2970": 2.96107, "2975": 2.99603, "2980": 2.94257, "2985": 3.04155, "2990": 2.93897, "2995": 2.87114, "3000": 2.9422, "3005": 2.89655, "3010": 2.93538, "3015": 2.91032, "3020": 2.91995, "3025": 2.91883, "3030": 2.92686, "3035": 2.95815, "3040": 2.9312, "3045": 2.83504, "3050": 2.8988, "3055": 2.89613, "3060": 2.92461, "3065": 2.92459, "3070": 2.88159, "3075": 2.86953, "3080": 2.9243, "3085": 2.90325, "3090": 2.91754, "3095": 2.92816, "3100": 2.86703, "3105": 2.92918, "3110": 2.90236, "3115": 2.94681, "3120": 2.95312, "3125": 2.86217, "3130": 2.93048, "3135": 2.92489, "3140": 2.87699, "3145": 2.91715, "3150": 2.85701, "3155": 2.8442, "3160": 2.83887, "3165": 2.84564, "3170": 2.89213, "3175": 2.90452, "3180": 2.85788, "3185": 2.89571, "3190": 2.90627, "3195": 2.92723, "3200": 2.92789, "3205": 2.85912, "3210": 2.86987, "3215": 2.91563, "3220": 2.87374, "3225": 2.86935, "3230": 2.815, "3235": 2.87434, "3240": 2.8734, "3245": 2.90299, "3250": 2.86289, "3255": 2.8503, "3260": 2.85959, "3265": 2.86936, "3270": 2.85223, "3275": 2.86681, "3280": 2.79974, "3285": 2.81123, "3290": 2.86928, "3295": 2.92038, "3300": 2.87938, "3305": 2.86113, "3310": 2.85785, "3315": 2.80615, "3320": 2.8258, "3325": 2.82403, "3330": 2.82839, "3335": 2.8465, "3340": 2.82742, "3345": 2.84707, "3350": 2.84121, "3355": 2.85933, "3360": 2.79899, "3365": 2.85514, "3370": 2.84537, "3375": 2.84431, "3380": 2.84971, "3385": 2.87822, "3390": 2.8682, "3395": 2.81029, "3400": 2.78472, "3405": 2.82816, "3410": 2.84591, "3415": 2.86134, "3420": 2.82742, "3425": 2.81129, "3430": 2.82982, "3435": 2.8906, "3440": 2.81795, "3445": 2.86973, "3450": 2.81507, "3455": 2.7888, "3460": 2.8203, "3465": 2.84691, "3470": 2.83475, "3475": 2.7758, "3480": 2.84173, "3485": 2.82043, "3490": 2.8933, "3495": 2.84901, "3500": 2.84084, "3505": 2.82498, "3510": 2.81285, "3515": 2.83554, "3520": 2.77982, "3525": 2.80208, "3530": 2.84998, "3535": 2.78436, "3540": 2.83771, "3545": 2.81048, "3550": 2.79684, "3555": 2.8181, "3560": 2.82828, "3565": 2.82912, "3570": 2.80393, "3575": 2.80372, "3580": 2.82073, "3585": 2.83541, "3590": 2.8298, "3595": 2.77921, "3600": 2.74897, "3605": 2.79083, "3610": 2.8488, "3615": 2.75492, "3620": 2.80351, "3625": 2.88338, "3630": 2.77314, "3635": 2.78625, "3640": 2.78253, "3645": 2.76926, "3650": 2.80301, "3655": 2.81469, "3660": 2.76524, "3665": 2.7858, "3670": 2.77457, "3675": 2.77711, "3680": 2.80733, "3685": 2.80194, "3690": 2.8055, "3695": 2.81135, "3700": 2.78752, "3705": 2.78408, "3710": 2.75166, "3715": 2.80201, "3720": 2.79409, "3725": 2.78884, "3730": 2.84096, "3735": 2.80547, "3740": 2.74952, "3745": 2.78986, "3750": 2.8022, "3755": 2.79576, "3760": 2.75757, "3765": 2.75586, "3770": 2.75989, "3775": 2.76991, "3780": 2.76383, "3785": 2.7793, "3790": 2.74202, "3795": 2.79511, "3800": 2.80269, "3805": 2.75159, "3810": 2.80354, "3815": 2.76482, "3820": 2.78758, "3825": 2.73331, "3830": 2.74563, "3835": 2.81464, "3840": 2.72812, "3845": 2.71424, "3850": 2.77453, "3855": 2.71774, "3860": 2.80173, "3865": 2.75469, "3870": 2.77531, "3875": 2.75779, "3880": 2.78968, "3885": 2.78424, "3890": 2.74541, "3895": 2.79804, "3900": 2.76127, "3905": 2.72353, "3910": 2.74147, "3915": 2.75183, "3920": 2.79462, "3925": 2.77792, "3930": 2.70759, "3935": 2.73982, "3940": 2.75131, "3945": 2.74267, "3950": 2.725, "3955": 2.77958, "3960": 2.75991, "3965": 2.74216, "3970": 2.75653, "3975": 2.72552, "3980": 2.73817, "3985": 2.75045, "3990": 2.69347, "3995": 2.78059, "4000": 2.73558, "4005": 2.7658, "4010": 2.70885, "4015": 2.72538, "4020": 2.74956, "4025": 2.733, "4030": 2.65924, "4035": 2.69455, "4040": 2.74652, "4045": 2.74857, "4050": 2.78817, "4055": 2.7239, "4060": 2.71419, "4065": 2.6515, "4070": 2.80691, "4075": 2.75748, "4080": 2.71884, "4085": 2.74977, "4090": 2.67836, "4095": 2.69073, "4100": 2.7114, "4105": 2.73822, "4110": 2.72956, "4115": 2.70127, "4120": 2.73267, "4125": 2.70389, "4130": 2.69553, "4135": 2.6893, "4140": 2.68057, "4145": 2.77973, "4150": 2.70801, "4155": 2.73792, "4160": 2.76329, "4165": 2.72099, "4170": 2.67438, "4175": 2.71828, "4180": 2.72666, "4185": 2.72916, "4190": 2.73806, "4195": 2.70222, "4200": 2.71066, "4205": 2.73922, "4210": 2.67309, "4215": 2.66565, "4220": 2.65928, "4225": 2.70242, "4230": 2.71409, "4235": 2.7326, "4240": 2.70413, "4245": 2.69658, "4250": 2.71363, "4255": 2.64837, "4260": 2.7266, "4265": 2.73863, "4270": 2.72157, "4275": 2.68943, "4280": 2.70233, "4285": 2.73114, "4290": 2.68765, "4295": 2.69223, "4300": 2.69956, "4305": 2.70313, "4310": 2.73003, "4315": 2.71191, "4320": 2.69906, "4325": 2.70557, "4330": 2.7106, "4335": 2.69172, "4340": 2.6976, "4345": 2.72675, "4350": 2.67431, "4355": 2.69349, "4360": 2.71041, "4365": 2.78314, "4370": 2.73369, "4375": 2.74431, "4380": 2.71504, "4385": 2.69901, "4390": 2.70191, "4395": 2.75058, "4400": 2.66501, "4405": 2.66954, "4410": 2.68278, "4415": 2.70442, "4420": 2.7077, "4425": 2.72158, "4430": 2.69277, "4435": 2.68035, "4440": 2.69127, "4445": 2.67896, "4450": 2.65272, "4455": 2.69044, "4460": 2.70302, "4465": 2.70631, "4470": 2.6731, "4475": 2.68761, "4480": 2.65626, "4485": 2.69968, "4490": 2.65279, "4495": 2.70894, "4500": 2.70235, "4505": 2.69674, "4510": 2.64893, "4515": 2.70162, "4520": 2.66802, "4525": 2.66816, "4530": 2.6736, "4535": 2.67113, "4540": 2.70729, "4545": 2.65603, "4550": 2.70177, "4555": 2.68009, "4560": 2.65895, "4565": 2.63898, "4570": 2.6402, "4575": 2.66692, "4580": 2.68858, "4585": 2.68157, "4590": 2.61727, "4595": 2.66336, "4600": 2.67638, "4605": 2.68094, "4610": 2.66704, "4615": 2.66234, "4620": 2.65727, "4625": 2.68721, "4630": 2.6742, "4635": 2.64708, "4640": 2.69575, "4645": 2.64774, "4650": 2.7018, "4655": 2.70661, "4660": 2.67838, "4665": 2.68918, "4670": 2.67556, "4675": 2.68422, "4680": 2.66596, "4685": 2.65737, "4690": 2.70912, "4695": 2.65528, "4700": 2.67174, "4705": 2.65146, "4710": 2.68366, "4715": 2.64964, "4720": 2.72485, "4725": 2.62902, "4730": 2.65213, "4735": 2.68926, "4740": 2.64614, "4745": 2.65553, "4750": 2.65737, "4755": 2.65793, "4760": 2.66632, "4765": 2.64489, "4770": 2.62202, "4775": 2.65194, "4780": 2.65519, "4785": 2.68655, "4790": 2.65201, "4795": 2.67305, "4800": 2.62427, "4805": 2.64099, "4810": 2.65942, "4815": 2.65033, "4820": 2.6668, "4825": 2.65019, "4830": 2.6151, "4835": 2.64832, "4840": 2.65513, "4845": 2.6348, "4850": 2.62482, "4855": 2.60337, "4860": 2.65757, "4865": 2.62536, "4870": 2.63972, "4875": 2.61897, "4880": 2.62576, "4885": 2.62632, "4890": 2.67912, "4895": 2.65961, "4900": 2.618, "4905": 2.61823, "4910": 2.63845, "4915": 2.61463, "4920": 2.65397, "4925": 2.64838, "4930": 2.57129, "4935": 2.65193, "4940": 2.63034, "4945": 2.63777, "4950": 2.62825, "4955": 2.61794, "4960": 2.61856, "4965": 2.65951, "4970": 2.6008, "4975": 2.65676, "4980": 2.62049, "4985": 2.63225, "4990": 2.65645, "4995": 2.58184, "5000": 2.6621, "5005": 2.6658, "5010": 2.68112, "5015": 2.63396, "5020": 2.64091, "5025": 2.68726, "5030": 2.64362, "5035": 2.61873, "5040": 2.62248, "5045": 2.60699, "5050": 2.62641, "5055": 2.65014, "5060": 2.64375, "5065": 2.68893, "5070": 2.60617, "5075": 2.61421, "5080": 2.61231, "5085": 2.60499, "5090": 2.59441, "5095": 2.65086, "5100": 2.64984, "5105": 2.61053, "5110": 2.66408, "5115": 2.62171, "5120": 2.67055, "5125": 2.6309, "5130": 2.615, "5135": 2.61462, "5140": 2.57424, "5145": 2.62966, "5150": 2.63646, "5155": 2.61887, "5160": 2.66278, "5165": 2.58409, "5170": 2.59136, "5175": 2.62185, "5180": 2.60659, "5185": 2.62099, "5190": 2.6266, "5195": 2.67047, "5200": 2.5968, "5205": 2.60868, "5210": 2.60701, "5215": 2.64792, "5220": 2.58826, "5225": 2.55166, "5230": 2.6359, "5235": 2.61417, "5240": 2.62802, "5245": 2.64006, "5250": 2.61297, "5255": 2.62612, "5260": 2.5619, "5265": 2.59802, "5270": 2.58865, "5275": 2.61781, "5280": 2.61032, "5285": 2.60442, "5290": 2.63245, "5295": 2.62071, "5300": 2.57979, "5305": 2.59834, "5310": 2.60591, "5315": 2.5881, "5320": 2.61539, "5325": 2.64615, "5330": 2.6015, "5335": 2.58439, "5340": 2.56291, "5345": 2.65819, "5350": 2.62526, "5355": 2.57953, "5360": 2.59528, "5365": 2.62373, "5370": 2.61518, "5375": 2.63002, "5380": 2.58083, "5385": 2.56502, "5390": 2.58666, "5395": 2.61597, "5400": 2.60909, "5405": 2.54774, "5410": 2.61298, "5415": 2.59619, "5420": 2.61443, "5425": 2.62678, "5430": 2.62674, "5435": 2.57707, "5440": 2.58734, "5445": 2.633, "5450": 2.6473, "5455": 2.61252, "5460": 2.59272, "5465": 2.60502, "5470": 2.60189, "5475": 2.62728, "5480": 2.58753, "5485": 2.59002, "5490": 2.57733, "5495": 2.57075, "5500": 2.56937, "5505": 2.61715, "5510": 2.62664, "5515": 2.58137, "5520": 2.55697, "5525": 2.5859, "5530": 2.66433, "5535": 2.62339, "5540": 2.57109, "5545": 2.59633, "5550": 2.54936, "5555": 2.57342, "5560": 2.56447, "5565": 2.60758, "5570": 2.65168, "5575": 2.63138, "5580": 2.57564, "5585": 2.59822, "5590": 2.56185, "5595": 2.58521, "5600": 2.55512, "5605": 2.59879, "5610": 2.58291, "5615": 2.58198, "5620": 2.58123, "5625": 2.55147, "5630": 2.57081, "5635": 2.63484, "5640": 2.59425, "5645": 2.56995, "5650": 2.58004, "5655": 2.54766, "5660": 2.55881, "5665": 2.58604, "5670": 2.56686, "5675": 2.60728, "5680": 2.52861, "5685": 2.56813, "5690": 2.6039, "5695": 2.55782, "5700": 2.59695, "5705": 2.596, "5710": 2.57921, "5715": 2.58424, "5720": 2.53643, "5725": 2.6038, "5730": 2.57366, "5735": 2.61087, "5740": 2.59519, "5745": 2.56, "5750": 2.54216, "5755": 2.55997, "5760": 2.62481, "5765": 2.56328, "5770": 2.5429, "5775": 2.58373, "5780": 2.57701, "5785": 2.53911, "5790": 2.56461, "5795": 2.60179, "5800": 2.54494, "5805": 2.53531, "5810": 2.55658, "5815": 2.52456, "5820": 2.59694, "5825": 2.50599, "5830": 2.49558, "5835": 2.59597, "5840": 2.53979, "5845": 2.5528, "5850": 2.61315, "5855": 2.5102, "5860": 2.56169, "5865": 2.51778, "5870": 2.57574, "5875": 2.60723, "5880": 2.58596, "5885": 2.56757, "5890": 2.58608, "5895": 2.55562, "5900": 2.61651, "5905": 2.55716, "5910": 2.59828, "5915": 2.61008, "5920": 2.58733, "5925": 2.55324, "5930": 2.57568, "5935": 2.55168, "5940": 2.57131, "5945": 2.5204, "5950": 2.55562, "5955": 2.586, "5960": 2.56741, "5965": 2.62046, "5970": 2.55594, "5975": 2.58503, "5980": 2.55843, "5985": 2.56032, "5990": 2.55653, "5995": 2.55873, "6000": 2.55658, "6005": 2.51961, "6010": 2.5612, "6015": 2.52607, "6020": 2.53453, "6025": 2.55768, "6030": 2.6046, "6035": 2.54228, "6040": 2.54868, "6045": 2.49077, "6050": 2.5963, "6055": 2.5204, "6060": 2.54409, "6065": 2.52518, "6070": 2.52918, "6075": 2.5364, "6080": 2.53607, "6085": 2.59714, "6090": 2.57034, "6095": 2.53592, "6100": 2.5428, "6105": 2.52487, "6110": 2.55483, "6115": 2.58495, "6120": 2.55695, "6125": 2.53683, "6130": 2.47322, "6135": 2.5563, "6140": 2.55589, "6145": 2.55739, "6150": 2.52565, "6155": 2.50872, "6160": 2.54299, "6165": 2.57304, "6170": 2.54638, "6175": 2.60079, "6180": 2.51196, "6185": 2.55194, "6190": 2.49345, "6195": 2.57854, "6200": 2.55164, "6205": 2.5377, "6210": 2.52088, "6215": 2.51358, "6220": 2.56539, "6225": 2.51406, "6230": 2.51072, "6235": 2.56268, "6240": 2.55115, "6245": 2.52327, "6250": 2.53069, "6255": 2.57365, "6260": 2.52537, "6265": 2.57441, "6270": 2.52397, "6275": 2.56565, "6280": 2.52297, "6285": 2.5207, "6290": 2.51982, "6295": 2.50722, "6300": 2.55559, "6305": 2.52486, "6310": 2.51259, "6315": 2.53731, "6320": 2.4894, "6325": 2.59818, "6330": 2.555, "6335": 2.51085, "6340": 2.51313, "6345": 2.55702, "6350": 2.556, "6355": 2.52448, "6360": 2.52293, "6365": 2.48409, "6370": 2.53563, "6375": 2.49779, "6380": 2.56282, "6385": 2.58189, "6390": 2.50441, "6395": 2.55121, "6400": 2.5086, "6405": 2.5278, "6410": 2.51466, "6415": 2.52482, "6420": 2.54258, "6425": 2.53509, "6430": 2.57978, "6435": 2.54444, "6440": 2.53907, "6445": 2.53125, "6450": 2.53474, "6455": 2.52399, "6460": 2.51849, "6465": 2.56225, "6470": 2.52104, "6475": 2.52654, "6480": 2.48826, "6485": 2.52861, "6490": 2.50978, "6495": 2.49978, "6500": 2.52402, "6505": 2.49432, "6510": 2.54199, "6515": 2.5101, "6520": 2.51003, "6525": 2.49503, "6530": 2.54392, "6535": 2.53282, "6540": 2.53291, "6545": 2.56194, "6550": 2.50127, "6555": 2.55627, "6560": 2.51016, "6565": 2.52281, "6570": 2.58445, "6575": 2.52324, "6580": 2.49815, "6585": 2.50823, "6590": 2.5097, "6595": 2.49807, "6600": 2.49539, "6605": 2.54253, "6610": 2.4797, "6615": 2.56766, "6620": 2.53402, "6625": 2.51202, "6630": 2.51431, "6635": 2.47464, "6640": 2.54106, "6645": 2.59681, "6650": 2.51024, "6655": 2.4983, "6660": 2.57419, "6665": 2.52156, "6670": 2.5674, "6675": 2.46861, "6680": 2.54697, "6685": 2.53564, "6690": 2.51427, "6695": 2.48573, "6700": 2.52463, "6705": 2.52218, "6710": 2.49347, "6715": 2.51687, "6720": 2.50996, "6725": 2.52089, "6730": 2.52013, "6735": 2.4825, "6740": 2.51535, "6745": 2.49672, "6750": 2.55754, "6755": 2.47484, "6760": 2.54212, "6765": 2.48878, "6770": 2.51847, "6775": 2.50828, "6780": 2.53878, "6785": 2.47177, "6790": 2.54553, "6795": 2.49868, "6800": 2.52671, "6805": 2.51099, "6810": 2.50296, "6815": 2.52064, "6820": 2.48696, "6825": 2.5071, "6830": 2.54063, "6835": 2.50678, "6840": 2.50885, "6845": 2.52492, "6850": 2.47583, "6855": 2.512, "6860": 2.50239, "6865": 2.49001, "6870": 2.55392, "6875": 2.47561, "6880": 2.55072, "6885": 2.47892, "6890": 2.54905, "6895": 2.50384, "6900": 2.49072, "6905": 2.51205, "6910": 2.5215, "6915": 2.51823, "6920": 2.5328, "6925": 2.54741, "6930": 2.49289, "6935": 2.521, "6940": 2.50604, "6945": 2.46237, "6950": 2.48628, "6955": 2.5288, "6960": 2.51952, "6965": 2.49196, "6970": 2.47065, "6975": 2.52409, "6980": 2.45258, "6985": 2.51631, "6990": 2.52932, "6995": 2.46179, "7000": 2.49172, "7005": 2.47011, "7010": 2.47632, "7015": 2.51983, "7020": 2.46705, "7025": 2.45424, "7030": 2.48487, "7035": 2.47988, "7040": 2.50783, "7045": 2.52359, "7050": 2.52831, "7055": 2.44161, "7060": 2.47409, "7065": 2.48138, "7070": 2.48981, "7075": 2.49452, "7080": 2.53479, "7085": 2.48717, "7090": 2.47618, "7095": 2.4999, "7100": 2.51585, "7105": 2.4884, "7110": 2.487, "7115": 2.50558, "7120": 2.47286, "7125": 2.46376, "7130": 2.48693, "7135": 2.51456, "7140": 2.50032, "7145": 2.49769, "7150": 2.51016, "7155": 2.50401, "7160": 2.47274, "7165": 2.45638, "7170": 2.50459, "7175": 2.50355, "7180": 2.50497, "7185": 2.48172, "7190": 2.46296, "7195": 2.46639, "7200": 2.50998, "7205": 2.49029, "7210": 2.44246, "7215": 2.47885, "7220": 2.4456, "7225": 2.51269, "7230": 2.50805, "7235": 2.48249, "7240": 2.47867, "7245": 2.50035, "7250": 2.50922, "7255": 2.49324, "7260": 2.46058, "7265": 2.45308, "7270": 2.47086, "7275": 2.49781, "7280": 2.49343, "7285": 2.42363, "7290": 2.47944, "7295": 2.48626, "7300": 2.41751, "7305": 2.44554, "7310": 2.44899, "7315": 2.48986, "7320": 2.48389, "7325": 2.45917, "7330": 2.4893, "7335": 2.47688, "7340": 2.46486, "7345": 2.49515, "7350": 2.5106, "7355": 2.49669, "7360": 2.48037, "7365": 2.46906, "7370": 2.47138, "7375": 2.4508, "7380": 2.49622, "7385": 2.48448, "7390": 2.47337, "7395": 2.47339, "7400": 2.48169, "7405": 2.43994, "7410": 2.48078, "7415": 2.47113, "7420": 2.49398, "7425": 2.45774, "7430": 2.52358, "7435": 2.49185, "7440": 2.52151, "7445": 2.5101, "7450": 2.4751, "7455": 2.45401, "7460": 2.46474, "7465": 2.47685, "7470": 2.44899, "7475": 2.45681, "7480": 2.51145, "7485": 2.45042, "7490": 2.47478, "7495": 2.48246, "7500": 2.49584, "7505": 2.44104, "7510": 2.43501, "7515": 2.41997, "7520": 2.49389, "7525": 2.49884, "7530": 2.47668, "7535": 2.4601, "7540": 2.47288, "7545": 2.47471, "7550": 2.49181, "7555": 2.45487, "7560": 2.42922, "7565": 2.51106, "7570": 2.4857, "7575": 2.439, "7580": 2.45825, "7585": 2.48256, "7590": 2.48193, "7595": 2.46508, "7600": 2.46362, "7605": 2.44863, "7610": 2.44948, "7615": 2.42526, "7620": 2.54441, "7625": 2.47879, "7630": 2.42526, "7635": 2.42739, "7640": 2.45364, "7645": 2.47151, "7650": 2.46303, "7655": 2.48304, "7660": 2.4532, "7665": 2.4342, "7670": 2.4426, "7675": 2.45588, "7680": 2.48517, "7685": 2.43208, "7690": 2.48, "7695": 2.45485, "7700": 2.48159, "7705": 2.49878, "7710": 2.49483, "7715": 2.44384, "7720": 2.4696, "7725": 2.47981, "7730": 2.45864, "7735": 2.47057, "7740": 2.43882, "7745": 2.45157, "7750": 2.43921, "7755": 2.46722, "7760": 2.45122, "7765": 2.45511, "7770": 2.47144, "7775": 2.45332, "7780": 2.41653, "7785": 2.44516, "7790": 2.48285, "7795": 2.44125, "7800": 2.46355, "7805": 2.48202, "7810": 2.50258, "7815": 2.48733, "7820": 2.44788, "7825": 2.51471, "7830": 2.45477, "7835": 2.4697, "7840": 2.47907, "7845": 2.46064, "7850": 2.41717, "7855": 2.47244, "7860": 2.49887, "7865": 2.42434, "7870": 2.46693, "7875": 2.44544, "7880": 2.45287, "7885": 2.46023, "7890": 2.47026, "7895": 2.44872, "7900": 2.4404, "7905": 2.43773, "7910": 2.42565, "7915": 2.48107, "7920": 2.47699, "7925": 2.4218, "7930": 2.47199, "7935": 2.44975, "7940": 2.42126, "7945": 2.46977, "7950": 2.44424, "7955": 2.4204, "7960": 2.49038, "7965": 2.5188, "7970": 2.52207, "7975": 2.44798, "7980": 2.44076, "7985": 2.46872, "7990": 2.43169, "7995": 2.46954, "8000": 2.43641, "8005": 2.41891, "8010": 2.45749, "8015": 2.46841, "8020": 2.48116, "8025": 2.47363, "8030": 2.45173, "8035": 2.47071, "8040": 2.41983, "8045": 2.45333, "8050": 2.44721, "8055": 2.42302, "8060": 2.44253, "8065": 2.46158, "8070": 2.4567, "8075": 2.46077, "8080": 2.44618, "8085": 2.44085, "8090": 2.42787, "8095": 2.42397, "8100": 2.43904, "8105": 2.49479, "8110": 2.43878, "8115": 2.58899, "8120": 2.49362, "8125": 2.47876, "8130": 2.45879, "8135": 2.4574, "8140": 2.44166, "8145": 2.42774, "8150": 2.42089, "8155": 2.48312, "8160": 2.45131, "8165": 2.43947, "8170": 2.43326, "8175": 2.42092, "8180": 2.4946, "8185": 2.42477, "8190": 2.46908, "8195": 2.45732, "8200": 2.44651, "8205": 2.44406, "8210": 2.43096, "8215": 2.44122, "8220": 2.43556, "8225": 2.41067, "8230": 2.44055, "8235": 2.46438, "8240": 2.42694, "8245": 2.44767, "8250": 2.44524, "8255": 2.43772, "8260": 2.43153, "8265": 2.42903, "8270": 2.4363, "8275": 2.44197, "8280": 2.39831, "8285": 2.4405, "8290": 2.48021, "8295": 2.44762, "8300": 2.45931, "8305": 2.40847, "8310": 2.43461, "8315": 2.45616, "8320": 2.40422, "8325": 2.39725, "8330": 2.43986, "8335": 2.44684, "8340": 2.49212, "8345": 2.44942, "8350": 2.45049, "8355": 2.40704, "8360": 2.40131, "8365": 2.45443, "8370": 2.45427, "8375": 2.42518, "8380": 2.41939, "8385": 2.42541, "8390": 2.4387, "8395": 2.44193, "8400": 2.44114, "8405": 2.49132, "8410": 2.4383, "8415": 2.43519, "8420": 2.41861, "8425": 2.44324, "8430": 2.46253, "8435": 2.40559, "8440": 2.45227, "8445": 2.45999, "8450": 2.40867, "8455": 2.46028, "8460": 2.45495, "8465": 2.43629, "8470": 2.40854, "8475": 2.47887, "8480": 2.40222, "8485": 2.41392, "8490": 2.46612, "8495": 2.43613, "8500": 2.44492, "8505": 2.40329, "8510": 2.40218, "8515": 2.42871, "8520": 2.42574, "8525": 2.49152, "8530": 2.3746, "8535": 2.40109, "8540": 2.48679, "8545": 2.3811, "8550": 2.43875, "8555": 2.4514, "8560": 2.47019, "8565": 2.42055, "8570": 2.43185, "8575": 2.44959, "8580": 2.44124, "8585": 2.42059, "8590": 2.4038, "8595": 2.42895, "8600": 2.41116, "8605": 2.49131, "8610": 2.42052, "8615": 2.38808, "8620": 2.45039, "8625": 2.42523, "8630": 2.45471, "8635": 2.4509, "8640": 2.43534, "8645": 2.47406, "8650": 2.42305, "8655": 2.45293, "8660": 2.45576, "8665": 2.38622, "8670": 2.41139, "8675": 2.42943, "8680": 2.44841, "8685": 2.43079, "8690": 2.41017, "8695": 2.44311, "8700": 2.43428, "8705": 2.42016, "8710": 2.42854, "8715": 2.44862, "8720": 2.47696, "8725": 2.41012, "8730": 2.39278, "8735": 2.43505, "8740": 2.43198, "8745": 2.39801, "8750": 2.43609, "8755": 2.42381, "8760": 2.40031, "8765": 2.43541, "8770": 2.40569, "8775": 2.43812, "8780": 2.42153, "8785": 2.47144, "8790": 2.42041, "8795": 2.41876, "8800": 2.41592, "8805": 2.40548, "8810": 2.41139, "8815": 2.47509, "8820": 2.45362, "8825": 2.4241, "8830": 2.38744, "8835": 2.42258, "8840": 2.39347, "8845": 2.42679, "8850": 2.43485, "8855": 2.4044, "8860": 2.42715, "8865": 2.42631, "8870": 2.43391, "8875": 2.44152, "8880": 2.41099, "8885": 2.39514, "8890": 2.44614, "8895": 2.42902, "8900": 2.41354, "8905": 2.40085, "8910": 2.4019, "8915": 2.4163, "8920": 2.43454, "8925": 2.46713, "8930": 2.41511, "8935": 2.40784, "8940": 2.38869, "8945": 2.39353, "8950": 2.41789, "8955": 2.39534, "8960": 2.43426, "8965": 2.41798, "8970": 2.40536, "8975": 2.47767, "8980": 2.44109, "8985": 2.37482, "8990": 2.41061, "8995": 2.416, "9000": 2.45568, "9005": 2.41279, "9010": 2.37662, "9015": 2.41141, "9020": 2.40089, "9025": 2.3701, "9030": 2.40026, "9035": 2.4243, "9040": 2.42079, "9045": 2.41805, "9050": 2.39505, "9055": 2.41785, "9060": 2.41922, "9065": 2.40527, "9070": 2.44454, "9075": 2.39395, "9080": 2.43398, "9085": 2.4136, "9090": 2.41293, "9095": 2.39793, "9100": 2.40135, "9105": 2.35782, "9110": 2.46451, "9115": 2.41499, "9120": 2.40368, "9125": 2.45804, "9130": 2.39387, "9135": 2.44878, "9140": 2.43562, "9145": 2.42684, "9150": 2.42505, "9155": 2.3752, "9160": 2.41724, "9165": 2.42569, "9170": 2.37359, "9175": 2.41857, "9180": 2.37803, "9185": 2.43942, "9190": 2.41281, "9195": 2.40662, "9200": 2.39186, "9205": 2.44999, "9210": 2.36248, "9215": 2.46363, "9220": 2.44779, "9225": 2.3828, "9230": 2.44575, "9235": 2.39772, "9240": 2.40182, "9245": 2.43796, "9250": 2.43806, "9255": 2.4326, "9260": 2.38813, "9265": 2.43977, "9270": 2.43657, "9275": 2.39535, "9280": 2.39074, "9285": 2.42225, "9290": 2.40437, "9295": 2.38603, "9300": 2.42495, "9305": 2.40579, "9310": 2.41555, "9315": 2.41153, "9320": 2.44493, "9325": 2.37049, "9330": 2.40434, "9335": 2.36191, "9340": 2.40835, "9345": 2.41458, "9350": 2.44039, "9355": 2.47763, "9360": 2.43745, "9365": 2.38821, "9370": 2.43648, "9375": 2.43331, "9380": 2.35346, "9385": 2.39958, "9390": 2.38109, "9395": 2.38731, "9400": 2.44471, "9405": 2.41259, "9410": 2.39756, "9415": 2.43759, "9420": 2.4441, "9425": 2.43656, "9430": 2.45071, "9435": 2.41453, "9440": 2.47761, "9445": 2.37622, "9450": 2.39383, "9455": 2.40249, "9460": 2.38597, "9465": 2.3775, "9470": 2.38205, "9475": 2.36454, "9480": 2.43551, "9485": 2.38642, "9490": 2.4204, "9495": 2.38165, "9500": 2.36325, "9505": 2.4296, "9510": 2.39916, "9515": 2.43096, "9520": 2.41792, "9525": 2.38898, "9530": 2.45385, "9535": 2.40151, "9540": 2.41839, "9545": 2.37813, "9550": 2.42143, "9555": 2.39054, "9560": 2.42191, "9565": 2.40523, "9570": 2.37157, "9575": 2.41109, "9580": 2.39564, "9585": 2.42353, "9590": 2.42924, "9595": 2.44777, "9600": 2.39117, "9605": 2.38431, "9610": 2.42142, "9615": 2.41558, "9620": 2.41413, "9625": 2.44723, "9630": 2.39712, "9635": 2.40396, "9640": 2.44817, "9645": 2.4109, "9650": 2.39894, "9655": 2.37366, "9660": 2.42329, "9665": 2.39029, "9670": 2.38274, "9675": 2.35662, "9680": 2.39869, "9685": 2.40199, "9690": 2.46804, "9695": 2.38133, "9700": 2.37698, "9705": 2.38453, "9710": 2.36554, "9715": 2.38868, "9720": 2.43552, "9725": 2.4413, "9730": 2.42919, "9735": 2.38684, "9740": 2.38077, "9745": 2.42676, "9750": 2.3991, "9755": 2.40788, "9760": 2.41084, "9765": 2.37036, "9770": 2.43675, "9775": 2.40145, "9780": 2.36196, "9785": 2.40085, "9790": 2.40714, "9795": 2.3593, "9800": 2.39629, "9805": 2.40561, "9810": 2.41066, "9815": 2.37884, "9820": 2.37671, "9825": 2.40364, "9830": 2.42194, "9835": 2.3861, "9840": 2.41457, "9845": 2.36502, "9850": 2.39824, "9855": 2.39496, "9860": 2.3972, "9865": 2.38197, "9870": 2.39342, "9875": 2.38398, "9880": 2.45319, "9885": 2.39313, "9890": 2.35399, "9895": 2.32116, "9900": 2.3962, "9905": 2.42494, "9910": 2.35642, "9915": 2.36473, "9920": 2.41154, "9925": 2.39863, "9930": 2.38182, "9935": 2.35063, "9940": 2.38377, "9945": 2.37842, "9950": 2.40342, "9955": 2.44928, "9960": 2.43108, "9965": 2.35851, "9970": 2.41017, "9975": 2.38564, "9980": 2.33084, "9985": 2.40772, "9990": 2.39761, "9995": 2.39543, "10000": 2.36621, "10005": 2.37213, "10010": 2.38256, "10015": 2.44495, "10020": 2.36326, "10025": 2.38851, "10030": 2.38817, "10035": 2.40993, "10040": 2.40515, "10045": 2.3831, "10050": 2.34965, "10055": 2.36805, "10060": 2.42146, "10065": 2.37528, "10070": 2.42235, "10075": 2.37088, "10080": 2.36211, "10085": 2.36918, "10090": 2.34573, "10095": 2.40221, "10100": 2.31408, "10105": 2.38253, "10110": 2.40897, "10115": 2.38736, "10120": 2.35801, "10125": 2.37033, "10130": 2.36037, "10135": 2.38382, "10140": 2.4139, "10145": 2.40714, "10150": 2.37532, "10155": 2.39536, "10160": 2.36205, "10165": 2.38369, "10170": 2.4236, "10175": 2.32447, "10180": 2.39651, "10185": 2.3824, "10190": 2.44396, "10195": 2.40416, "10200": 2.38955, "10205": 2.38797, "10210": 2.36805, "10215": 2.34261, "10220": 2.41843, "10225": 2.43079, "10230": 2.35627, "10235": 2.38764, "10240": 2.37226, "10245": 2.39117, "10250": 2.38838, "10255": 2.41316, "10260": 2.33469, "10265": 2.34846, "10270": 2.34979, "10275": 2.3717, "10280": 2.4513, "10285": 2.35906, "10290": 2.3861, "10295": 2.375, "10300": 2.36936, "10305": 2.41578, "10310": 2.38877, "10315": 2.36095, "10320": 2.36607, "10325": 2.36094, "10330": 2.41247, "10335": 2.36135, "10340": 2.41934, "10345": 2.36966, "10350": 2.35686, "10355": 2.39609, "10360": 2.37338, "10365": 2.36225, "10370": 2.34061, "10375": 2.3585, "10380": 2.41953, "10385": 2.40576, "10390": 2.38058, "10395": 2.35968, "10400": 2.37919, "10405": 2.34877, "10410": 2.3389, "10415": 2.41664, "10420": 2.37924, "10425": 2.32522, "10430": 2.35941, "10435": 2.37129, "10440": 2.3711, "10445": 2.35949, "10450": 2.36154, "10455": 2.38113, "10460": 2.38064, "10465": 2.30273, "10470": 2.3577, "10475": 2.37958, "10480": 2.36276, "10485": 2.36137, "10490": 2.41283, "10495": 2.36502, "10500": 2.36277, "10505": 2.37018, "10510": 2.38172, "10515": 2.37393, "10520": 2.40259, "10525": 2.39024, "10530": 2.39211, "10535": 2.35551, "10540": 2.40461, "10545": 2.35856, "10550": 2.37752, "10555": 2.35793, "10560": 2.34025, "10565": 2.37346, "10570": 2.37536, "10575": 2.3535, "10580": 2.37788, "10585": 2.36682, "10590": 2.37817, "10595": 2.37713, "10600": 2.33146, "10605": 2.3724, "10610": 2.36498, "10615": 2.36379, "10620": 2.34659, "10625": 2.41843, "10630": 2.36855, "10635": 2.32266, "10640": 2.36413, "10645": 2.42158, "10650": 2.36174, "10655": 2.30869, "10660": 2.34689, "10665": 2.39981, "10670": 2.31617, "10675": 2.41612, "10680": 2.35445, "10685": 2.28871, "10690": 2.38456, "10695": 2.33038, "10700": 2.38407, "10705": 2.38432, "10710": 2.34313, "10715": 2.3828, "10720": 2.32518, "10725": 2.35278, "10730": 2.34872, "10735": 2.35338, "10740": 2.31849, "10745": 2.33808, "10750": 2.33362, "10755": 2.4041, "10760": 2.36431, "10765": 2.33591, "10770": 2.36802, "10775": 2.38746, "10780": 2.36985, "10785": 2.39167, "10790": 2.34599, "10795": 2.38556, "10800": 2.32491, "10805": 2.39755, "10810": 2.37536, "10815": 2.35431, "10820": 2.34323, "10825": 2.37192, "10830": 2.33781, "10835": 2.3477, "10840": 2.32993, "10845": 2.38645, "10850": 2.33282, "10855": 2.36654, "10860": 2.33304, "10865": 2.32192, "10870": 2.32311, "10875": 2.30406, "10880": 2.39356, "10885": 2.40455, "10890": 2.36115, "10895": 2.37301, "10900": 2.33176, "10905": 2.31266, "10910": 2.40728, "10915": 2.37119, "10920": 2.37413, "10925": 2.36306, "10930": 2.31881, "10935": 2.36035, "10940": 2.35501, "10945": 2.34689, "10950": 2.36286, "10955": 2.3644, "10960": 2.30987, "10965": 2.3635, "10970": 2.35624, "10975": 2.40775, "10980": 2.37303, "10985": 2.3427, "10990": 2.39729, "10995": 2.36387, "11000": 2.33714, "11005": 2.36117, "11010": 2.34243, "11015": 2.32557, "11020": 2.3346, "11025": 2.36577, "11030": 2.34044, "11035": 2.31307, "11040": 2.31887, "11045": 2.31738, "11050": 2.31805, "11055": 2.28859, "11060": 2.33998, "11065": 2.31013, "11070": 2.39402, "11075": 2.32015, "11080": 2.35427, "11085": 2.33669, "11090": 2.34632, "11095": 2.37084, "11100": 2.32912, "11105": 2.31663, "11110": 2.36288, "11115": 2.37225, "11120": 2.38139, "11125": 2.31341, "11130": 2.34997, "11135": 2.3336, "11140": 2.37217, "11145": 2.35107, "11150": 2.39612, "11155": 2.34114, "11160": 2.3659, "11165": 2.36388, "11170": 2.34098, "11175": 2.33474, "11180": 2.37348, "11185": 2.31203, "11190": 2.27804, "11195": 2.32819, "11200": 2.34726, "11205": 2.36258, "11210": 2.33385, "11215": 2.31927, "11220": 2.34329, "11225": 2.37141, "11230": 2.36569, "11235": 2.32069, "11240": 2.34092, "11245": 2.35748, "11250": 2.3324, "11255": 2.33515, "11260": 2.35577, "11265": 2.38918, "11270": 2.28782, "11275": 2.31519, "11280": 2.36893, "11285": 2.29387, "11290": 2.34639, "11295": 2.3655, "11300": 2.38111, "11305": 2.33495, "11310": 2.32963, "11315": 2.29825, "11320": 2.30482, "11325": 2.31462, "11330": 2.35421, "11335": 2.33831, "11340": 2.30841, "11345": 2.31278, "11350": 2.29588, "11355": 2.3219, "11360": 2.35153, "11365": 2.29378, "11370": 2.35263, "11375": 2.32804, "11380": 2.34006, "11385": 2.34763, "11390": 2.33477, "11395": 2.28732, "11400": 2.30981, "11405": 2.35647, "11410": 2.35502, "11415": 2.38458, "11420": 2.35172, "11425": 2.30761, "11430": 2.36718, "11435": 2.36201, "11440": 2.34796, "11445": 2.36318, "11450": 2.32182, "11455": 2.30476, "11460": 2.35092, "11465": 2.34386, "11470": 2.37434, "11475": 2.31342, "11480": 2.32527, "11485": 2.30987, "11490": 2.34568, "11495": 2.406, "11500": 2.33937, "11505": 2.35014, "11510": 2.36223, "11515": 2.32176, "11520": 2.30507, "11525": 2.36152, "11530": 2.31469, "11535": 2.32196, "11540": 2.34627, "11545": 2.34321, "11550": 2.36438, "11555": 2.32533, "11560": 2.34981, "11565": 2.34125, "11570": 2.34916, "11575": 2.29628, "11580": 2.32931, "11585": 2.35173, "11590": 2.36158, "11595": 2.33454, "11600": 2.35704, "11605": 2.3235, "11610": 2.36089, "11615": 2.35899, "11620": 2.29569, "11625": 2.2757, "11630": 2.32782, "11635": 2.34204, "11640": 2.30488, "11645": 2.30751, "11650": 2.32628, "11655": 2.35114, "11660": 2.33566, "11665": 2.32994, "11670": 2.30002, "11675": 2.29666, "11680": 2.32542, "11685": 2.33637, "11690": 2.34433, "11695": 2.31688, "11700": 2.32535, "11705": 2.3009, "11710": 2.34479, "11715": 2.31575, "11720": 2.29844, "11725": 2.33988, "11730": 2.30403, "11735": 2.32822, "11740": 2.27122, "11745": 2.31714, "11750": 2.32793, "11755": 2.35133, "11760": 2.31357, "11765": 2.3378, "11770": 2.27597, "11775": 2.32591, "11780": 2.25511, "11785": 2.2973, "11790": 2.31403, "11795": 2.32024, "11800": 2.3345, "11805": 2.30403, "11810": 2.30398, "11815": 2.33078, "11820": 2.32015, "11825": 2.36083, "11830": 2.31663, "11835": 2.33741, "11840": 2.34081, "11845": 2.31727, "11850": 2.30496, "11855": 2.31403, "11860": 2.34333, "11865": 2.35836, "11870": 2.37861, "11875": 2.28155, "11880": 2.29163, "11885": 2.33553, "11890": 2.29241, "11895": 2.29059, "11900": 2.33401, "11905": 2.31769, "11910": 2.27783, "11915": 2.31082, "11920": 2.33519, "11925": 2.30272, "11930": 2.30681, "11935": 2.31569, "11940": 2.3175, "11945": 2.34208, "11950": 2.29773, "11955": 2.31327, "11960": 2.33576, "11965": 2.29584, "11970": 2.28204, "11975": 2.33575, "11980": 2.30612, "11985": 2.2776, "11990": 2.30416, "11995": 2.33013, "12000": 2.32323, "12005": 2.32565, "12010": 2.2884, "12015": 2.30861, "12020": 2.32922, "12025": 2.33525, "12030": 2.31246, "12035": 2.33617, "12040": 2.3154, "12045": 2.3126, "12050": 2.30835, "12055": 2.33352, "12060": 2.29764, "12065": 2.32975, "12070": 2.30319, "12075": 2.2775, "12080": 2.35063, "12085": 2.33812, "12090": 2.33359, "12095": 2.28176, "12100": 2.31543, "12105": 2.30903, "12110": 2.33029, "12115": 2.3036, "12120": 2.30606, "12125": 2.29484, "12130": 2.30409, "12135": 2.32842, "12140": 2.29591, "12145": 2.25622, "12150": 2.26125, "12155": 2.34249, "12160": 2.35771, "12165": 2.31914, "12170": 2.3336, "12175": 2.3412, "12180": 2.33054, "12185": 2.34135, "12190": 2.33375, "12195": 2.29767, "12200": 2.30036, "12205": 2.32225, "12210": 2.35697, "12215": 2.30437, "12220": 2.2987, "12225": 2.24241, "12230": 2.33348, "12235": 2.33945, "12240": 2.32345, "12245": 2.28764, "12250": 2.27397, "12255": 2.33706, "12260": 2.31368, "12265": 2.34287, "12270": 2.31292, "12275": 2.31361, "12280": 2.31869, "12285": 2.28631, "12290": 2.31074, "12295": 2.26654, "12300": 2.32931, "12305": 2.26821, "12310": 2.28768, "12315": 2.3543, "12320": 2.2963, "12325": 2.32045, "12330": 2.30113, "12335": 2.3194, "12340": 2.34117, "12345": 2.36885, "12350": 2.34318, "12355": 2.30683, "12360": 2.31344, "12365": 2.32933, "12370": 2.29273, "12375": 2.29957, "12380": 2.29184, "12385": 2.29061, "12390": 2.25018, "12395": 2.30421, "12400": 2.29905, "12405": 2.31088, "12410": 2.30419, "12415": 2.28306, "12420": 2.31729, "12425": 2.30099, "12430": 2.31571, "12435": 2.30048, "12440": 2.33123, "12445": 2.3202, "12450": 2.30745, "12455": 2.24018, "12460": 2.33488, "12465": 2.36363, "12470": 2.27626, "12475": 2.27276, "12480": 2.29139, "12485": 2.30632, "12490": 2.33128, "12495": 2.26961, "12500": 2.32122, "12505": 2.3351, "12510": 2.35582, "12515": 2.27062, "12520": 2.31971, "12525": 2.28653, "12530": 2.32054, "12535": 2.27138, "12540": 2.28491, "12545": 2.29049, "12550": 2.31572, "12555": 2.32333, "12560": 2.30023, "12565": 2.3353, "12570": 2.27829, "12575": 2.29941, "12580": 2.31153, "12585": 2.29201, "12590": 2.33455, "12595": 2.3227, "12600": 2.28167, "12605": 2.31996, "12610": 2.3631, "12615": 2.30567, "12620": 2.33322, "12625": 2.32935, "12630": 2.29885, "12635": 2.33561, "12640": 2.29568, "12645": 2.27902, "12650": 2.32556, "12655": 2.2647, "12660": 2.34199, "12665": 2.31843, "12670": 2.3097, "12675": 2.31886, "12680": 2.27525, "12685": 2.3664, "12690": 2.30452, "12695": 2.33199, "12700": 2.29244, "12705": 2.30628, "12710": 2.30837, "12715": 2.28749, "12720": "nan", "12725": "nan", "12730": "nan", "12735": "nan", "12740": "nan", "12745": "nan", "12750": "nan", "12755": "nan", "12760": "nan", "12765": "nan", "12770": "nan", "12775": "nan", "12780": "nan", "12785": "nan", "12790": "nan", "12795": "nan", "12800": "nan", "12805": "nan", "12810": "nan", "12815": "nan", "12820": "nan", "12825": "nan", "12830": "nan", "12835": "nan", "12840": "nan", "12845": "nan", "12850": "nan", "12855": "nan", "12860": "nan", "12865": "nan", "12870": "nan", "12875": "nan", "12880": "nan", "12885": "nan", "12890": "nan", "12895": "nan", "12900": "nan", "12905": "nan", "12910": "nan", "12915": "nan", "12920": "nan", "12925": "nan", "12930": "nan", "12935": "nan", "12940": "nan", "12945": "nan", "12950": "nan", "12955": "nan", "12960": "nan", "12965": "nan", "12970": "nan", "12975": "nan", "12980": "nan", "12985": "nan", "12990": "nan", "12995": "nan", "13000": "nan"}}, "num-zeros": {"start_step": 1, "end_step": 13000, "step_interval": 5, "values": {"1": 956236544.0, "5": 967337600.0, "10": 971388224.0, "15": 946439424.0, "20": 961330240.0, "25": 1083876480.0, "30": 1211133312.0, "35": 1297707520.0, "40": 1271785728.0, "45": 1175048064.0, "50": 1126729728.0, "55": 1083975424.0, "60": 1045060608.0, "65": 1026047360.0, "70": 995721280.0, "75": 986257152.0, "80": 1010241664.0, "85": 1006739968.0, "90": 988780736.0, "95": 959700032.0, "100": 971861632.0, "105": 980754624.0, "110": 977222528.0, "115": 978430848.0, "120": 961162432.0, "125": 942469184.0, "130": 977095104.0, "135": 966160128.0, "140": 963476928.0, "145": 976512384.0, "150": 921597184.0, "155": 968134336.0, "160": 956383232.0, "165": 959869952.0, "170": 974372224.0, "175": 949013120.0, "180": 946688448.0, "185": 972006784.0, "190": 969055488.0, "195": 985121664.0, "200": 945774592.0, "205": 958353792.0, "210": 979445248.0, "215": 967478208.0, "220": 956423424.0, "225": 962400768.0, "230": 948177792.0, "235": 965221120.0, "240": 966072192.0, "245": 969161216.0, "250": 974435968.0, "255": 925063296.0, "260": 965635968.0, "265": 970660352.0, "270": 959131264.0, "275": 954001216.0, "280": 963427648.0, "285": 945777408.0, "290": 974124544.0, "295": 966704640.0, "300": 967140096.0, "305": 964514048.0, "310": 940354048.0, "315": 967404800.0, "320": 969006080.0, "325": 980552832.0, "330": 972090752.0, "335": 946865984.0, "340": 966598784.0, "345": 973025856.0, "350": 973918720.0, "355": 963261696.0, "360": 948351680.0, "365": 964821248.0, "370": 962952704.0, "375": 958446848.0, "380": 947153280.0, "385": 955988608.0, "390": 945399616.0, "395": 970423552.0, "400": 979770112.0, "405": 968344320.0, "410": 970058752.0, "415": 953158528.0, "420": 943569920.0, "425": 954774144.0, "430": 962663232.0, "435": 977082240.0, "440": 954811392.0, "445": 971894272.0, "450": 963512576.0, "455": 973134720.0, "460": 983714688.0, "465": 945280512.0, "470": 942055616.0, "475": 967007104.0, "480": 966107264.0, "485": 976414528.0, "490": 962538880.0, "495": 945454464.0, "500": 964454656.0, "505": 986005440.0, "510": 965682944.0, "515": 943411584.0, "520": 945017408.0, "525": 971262208.0, "530": 971890688.0, "535": 979140352.0, "540": 969531264.0, "545": 954116608.0, "550": 951267584.0, "555": 987219456.0, "560": 960428288.0, "565": 966616320.0, "570": 975727488.0, "575": 927224960.0, "580": 970694528.0, "585": 961176064.0, "590": 972967040.0, "595": 963682816.0, "600": 937079168.0, "605": 951470208.0, "610": 963360768.0, "615": 970009728.0, "620": 976472192.0, "625": 949579776.0, "630": 954445504.0, "635": 986042816.0, "640": 980980992.0, "645": 955010560.0, "650": 958545664.0, "655": 951656640.0, "660": 961043712.0, "665": 967552000.0, "670": 962514304.0, "675": 968337536.0, "680": 965619200.0, "685": 962871040.0, "690": 961921088.0, "695": 954770368.0, "700": 970340608.0, "705": 945512640.0, "710": 943885440.0, "715": 973357568.0, "720": 968369856.0, "725": 978489984.0, "730": 952195008.0, "735": 948813952.0, "740": 955633408.0, "745": 975866880.0, "750": 981235072.0, "755": 962156608.0, "760": 951964800.0, "765": 967343616.0, "770": 976148096.0, "775": 970544000.0, "780": 977540928.0, "785": 931529024.0, "790": 960441536.0, "795": 964582016.0, "800": 967022848.0, "805": 962321024.0, "810": 940969344.0, "815": 949037568.0, "820": 953181440.0, "825": 954502400.0, "830": 976442240.0, "835": 956073344.0, "840": 948401920.0, "845": 965153024.0, "850": 966029248.0, "855": 960904384.0, "860": 976027200.0, "865": 938157824.0, "870": 966414016.0, "875": 972314880.0, "880": 963120896.0, "885": 967745600.0, "890": 949967872.0, "895": 960019072.0, "900": 974229696.0, "905": 963968256.0, "910": 958435072.0, "915": 956354560.0, "920": 943974592.0, "925": 960833728.0, "930": 978845952.0, "935": 971073664.0, "940": 960905792.0, "945": 945063040.0, "950": 957423360.0, "955": 979035520.0, "960": 983589248.0, "965": 966165824.0, "970": 951228672.0, "975": 961577344.0, "980": 968071040.0, "985": 968991872.0, "990": 984393024.0, "995": 953291264.0, "1000": 934780480.0, "1005": 960147328.0, "1010": 971538624.0, "1015": 985184896.0, "1020": 962780928.0, "1025": 935009408.0, "1030": 974679936.0, "1035": 964992384.0, "1040": 980464256.0, "1045": 960826496.0, "1050": 955197824.0, "1055": 957780352.0, "1060": 967748800.0, "1065": 967116352.0, "1070": 966600064.0, "1075": 950061696.0, "1080": 954508544.0, "1085": 967251712.0, "1090": 977132800.0, "1095": 961237632.0, "1100": 979613568.0, "1105": 953365120.0, "1110": 965954176.0, "1115": 966986944.0, "1120": 970350592.0, "1125": 965707776.0, "1130": 954942400.0, "1135": 965843328.0, "1140": 965176384.0, "1145": 970988224.0, "1150": 955556864.0, "1155": 930578432.0, "1160": 957774208.0, "1165": 978124736.0, "1170": 974299520.0, "1175": 973059648.0, "1180": 973083648.0, "1185": 947344640.0, "1190": 964793216.0, "1195": 953138560.0, "1200": 972843136.0, "1205": 988478656.0, "1210": 931126784.0, "1215": 968647040.0, "1220": 969160960.0, "1225": 975950656.0, "1230": 967331712.0, "1235": 943446912.0, "1240": 955853952.0, "1245": 981503488.0, "1250": 966111808.0, "1255": 973676032.0, "1260": 946497280.0, "1265": 963997824.0, "1270": 960489024.0, "1275": 973615104.0, "1280": 961112576.0, "1285": 957580480.0, "1290": 952528768.0, "1295": 971610240.0, "1300": 968862464.0, "1305": 963739136.0, "1310": 963334656.0, "1315": 943553408.0, "1320": 966307200.0, "1325": 989784960.0, "1330": 969508992.0, "1335": 972302464.0, "1340": 972269440.0, "1345": 960658304.0, "1350": 968639296.0, "1355": 955853312.0, "1360": 971822144.0, "1365": 960387584.0, "1370": 948791872.0, "1375": 973533376.0, "1380": 953470208.0, "1385": 969146880.0, "1390": 975720640.0, "1395": 931673984.0, "1400": 945854848.0, "1405": 976753536.0, "1410": 974510336.0, "1415": 967573760.0, "1420": 966747328.0, "1425": 937378560.0, "1430": 973916608.0, "1435": 978335552.0, "1440": 964178304.0, "1445": 958058240.0, "1450": 946147712.0, "1455": 983922304.0, "1460": 968651136.0, "1465": 948745152.0, "1470": 984243328.0, "1475": 943906048.0, "1480": 963975488.0, "1485": 957349376.0, "1490": 961261888.0, "1495": 980539648.0, "1500": 958332032.0, "1505": 942866816.0, "1510": 984180096.0, "1515": 959094528.0, "1520": 959105408.0, "1525": 952786816.0, "1530": 957741312.0, "1535": 949428928.0, "1540": 971088256.0, "1545": 963132352.0, "1550": 978666752.0, "1555": 952320512.0, "1560": 980089984.0, "1565": 967314048.0, "1570": 973844352.0, "1575": 975494912.0, "1580": 941862656.0, "1585": 970028352.0, "1590": 983822208.0, "1595": 948631616.0, "1600": 967442560.0, "1605": 952451328.0, "1610": 969616512.0, "1615": 983146496.0, "1620": 968019200.0, "1625": 970715776.0, "1630": 962887360.0, "1635": 942311936.0, "1640": 981612224.0, "1645": 973977856.0, "1650": 974188224.0, "1655": 967265024.0, "1660": 940687744.0, "1665": 961704448.0, "1670": 962902016.0, "1675": 971280896.0, "1680": 980879232.0, "1685": 944416192.0, "1690": 964688128.0, "1695": 965644992.0, "1700": 966342336.0, "1705": 985200000.0, "1710": 978354304.0, "1715": 943210880.0, "1720": 977089408.0, "1725": 965870208.0, "1730": 968968960.0, "1735": 965088000.0, "1740": 949713280.0, "1745": 970012352.0, "1750": 959681728.0, "1755": 960085440.0, "1760": 966381376.0, "1765": 951816192.0, "1770": 954665728.0, "1775": 973752064.0, "1780": 970534272.0, "1785": 968824960.0, "1790": 950235520.0, "1795": 945131072.0, "1800": 984666816.0, "1805": 987163520.0, "1810": 977766656.0, "1815": 948004480.0, "1820": 949209216.0, "1825": 978853632.0, "1830": 966362368.0, "1835": 964133632.0, "1840": 972320128.0, "1845": 935415808.0, "1850": 952497792.0, "1855": 980048640.0, "1860": 975866880.0, "1865": 958966528.0, "1870": 958949056.0, "1875": 932593408.0, "1880": 973574016.0, "1885": 978843264.0, "1890": 971358720.0, "1895": 959212288.0, "1900": 947394432.0, "1905": 981829952.0, "1910": 969126912.0, "1915": 970040704.0, "1920": 975597056.0, "1925": 960496512.0, "1930": 977922304.0, "1935": 963250432.0, "1940": 952460928.0, "1945": 981338176.0, "1950": 939172864.0, "1955": 960604416.0, "1960": 970031744.0, "1965": 981176000.0, "1970": 962045120.0, "1975": 952822016.0, "1980": 936847360.0, "1985": 975938432.0, "1990": 965965696.0, "1995": 962609920.0, "2000": 960553984.0, "2005": 954497728.0, "2010": 975579776.0, "2015": 991802112.0, "2020": 975433408.0, "2025": 974303936.0, "2030": 952084736.0, "2035": 967847680.0, "2040": 987457536.0, "2045": 976480064.0, "2050": 984702464.0, "2055": 942839488.0, "2060": 942593920.0, "2065": 966208768.0, "2070": 969622528.0, "2075": 980553536.0, "2080": 977598080.0, "2085": 939635968.0, "2090": 969872256.0, "2095": 961274880.0, "2100": 976719168.0, "2105": 972537920.0, "2110": 959901568.0, "2115": 956875264.0, "2120": 977482304.0, "2125": 962566784.0, "2130": 979618496.0, "2135": 950537408.0, "2140": 946996544.0, "2145": 962273920.0, "2150": 973404416.0, "2155": 972690944.0, "2160": 970314560.0, "2165": 948644160.0, "2170": 961541696.0, "2175": 969377216.0, "2180": 969329920.0, "2185": 947446592.0, "2190": 940480960.0, "2195": 986085952.0, "2200": 961861248.0, "2205": 978924672.0, "2210": 964100864.0, "2215": 963502336.0, "2220": 951311104.0, "2225": 969315776.0, "2230": 976331328.0, "2235": 974025920.0, "2240": 975493888.0, "2245": 960230784.0, "2250": 967640192.0, "2255": 969129984.0, "2260": 975065024.0, "2265": 968258688.0, "2270": 951744768.0, "2275": 962766848.0, "2280": 969640064.0, "2285": 971692992.0, "2290": 962889344.0, "2295": 931409280.0, "2300": 959906048.0, "2305": 970426560.0, "2310": 967444864.0, "2315": 970905792.0, "2320": 975590848.0, "2325": 938587264.0, "2330": 988438528.0, "2335": 977489408.0, "2340": 964596352.0, "2345": 964166528.0, "2350": 947555712.0, "2355": 977029568.0, "2360": 966899072.0, "2365": 977297728.0, "2370": 965072640.0, "2375": 953966272.0, "2380": 962918912.0, "2385": 967194496.0, "2390": 963077248.0, "2395": 974465792.0, "2400": 958410816.0, "2405": 968119552.0, "2410": 951586112.0, "2415": 965904256.0, "2420": 966516160.0, "2425": 959045632.0, "2430": 956685952.0, "2435": 961389184.0, "2440": 959755904.0, "2445": 970891392.0, "2450": 961996736.0, "2455": 922721216.0, "2460": 951953536.0, "2465": 955730432.0, "2470": 972570496.0, "2475": 973812992.0, "2480": 943895296.0, "2485": 944184064.0, "2490": 972411136.0, "2495": 974451712.0, "2500": 973910080.0, "2505": 958492032.0, "2510": 939510912.0, "2515": 979553728.0, "2520": 970473792.0, "2525": 964390784.0, "2530": 955799168.0, "2535": 936598144.0, "2540": 969027648.0, "2545": 970385024.0, "2550": 969462528.0, "2555": 969439040.0, "2560": 964978304.0, "2565": 959763712.0, "2570": 985176704.0, "2575": 957426112.0, "2580": 967424512.0, "2585": 966022400.0, "2590": 956354944.0, "2595": 981830400.0, "2600": 959530880.0, "2605": 962999168.0, "2610": 965972864.0, "2615": 951924992.0, "2620": 971241216.0, "2625": 976456064.0, "2630": 974409984.0, "2635": 948071296.0, "2640": 948137088.0, "2645": 963036736.0, "2650": 953982912.0, "2655": 977112448.0, "2660": 949622976.0, "2665": 953929024.0, "2670": 959064064.0, "2675": 979275904.0, "2680": 961395840.0, "2685": 970701952.0, "2690": 965222016.0, "2695": 943553536.0, "2700": 969425664.0, "2705": 978961792.0, "2710": 971810560.0, "2715": 990813952.0, "2720": 942649152.0, "2725": 967955328.0, "2730": 955466496.0, "2735": 970672704.0, "2740": 977921536.0, "2745": 932280000.0, "2750": 947856384.0, "2755": 956317184.0, "2760": 981697664.0, "2765": 966112192.0, "2770": 948914688.0, "2775": 935830272.0, "2780": 964777088.0, "2785": 969570176.0, "2790": 974273152.0, "2795": 966886144.0, "2800": 944388096.0, "2805": 964353920.0, "2810": 969610752.0, "2815": 975845248.0, "2820": 963081600.0, "2825": 937627392.0, "2830": 956738368.0, "2835": 986321024.0, "2840": 961756672.0, "2845": 967505920.0, "2850": 951714816.0, "2855": 962091520.0, "2860": 954242304.0, "2865": 955881216.0, "2870": 944662848.0, "2875": 974662784.0, "2880": 968199936.0, "2885": 981081984.0, "2890": 953454080.0, "2895": 957178304.0, "2900": 964989440.0, "2905": 931707648.0, "2910": 955730688.0, "2915": 979477120.0, "2920": 970492544.0, "2925": 964975680.0, "2930": 964046592.0, "2935": 940140416.0, "2940": 964912384.0, "2945": 989149952.0, "2950": 965209088.0, "2955": 965104256.0, "2960": 933161472.0, "2965": 968794496.0, "2970": 973034688.0, "2975": 958092288.0, "2980": 964497280.0, "2985": 937267584.0, "2990": 951254720.0, "2995": 978315264.0, "3000": 969275392.0, "3005": 974686080.0, "3010": 950235008.0, "3015": 943841024.0, "3020": 958440960.0, "3025": 975186560.0, "3030": 965018624.0, "3035": 963454464.0, "3040": 952131776.0, "3045": 989793472.0, "3050": 965545728.0, "3055": 982520704.0, "3060": 971227712.0, "3065": 943915648.0, "3070": 978409024.0, "3075": 975204992.0, "3080": 960992896.0, "3085": 962352128.0, "3090": 945953664.0, "3095": 938116032.0, "3100": 972929088.0, "3105": 961989120.0, "3110": 970657152.0, "3115": 963390528.0, "3120": 947116032.0, "3125": 972720640.0, "3130": 952973312.0, "3135": 966041920.0, "3140": 968488896.0, "3145": 937852288.0, "3150": 975009280.0, "3155": 976815488.0, "3160": 969629184.0, "3165": 982194944.0, "3170": 937959936.0, "3175": 953825280.0, "3180": 983810176.0, "3185": 965169536.0, "3190": 968482880.0, "3195": 950933888.0, "3200": 945101440.0, "3205": 959863232.0, "3210": 957486336.0, "3215": 958020096.0, "3220": 968129792.0, "3225": 935614144.0, "3230": 962588672.0, "3235": 975775488.0, "3240": 962621440.0, "3245": 981274368.0, "3250": 943260544.0, "3255": 954599424.0, "3260": 980363648.0, "3265": 963619968.0, "3270": 965162624.0, "3275": 959731072.0, "3280": 967046720.0, "3285": 982478144.0, "3290": 947689408.0, "3295": 966419840.0, "3300": 959165632.0, "3305": 949131008.0, "3310": 979510144.0, "3315": 964283008.0, "3320": 969207296.0, "3325": 956193280.0, "3330": 941167104.0, "3335": 964973184.0, "3340": 956900736.0, "3345": 972500224.0, "3350": 964576192.0, "3355": 943346176.0, "3360": 970037632.0, "3365": 969453952.0, "3370": 954774976.0, "3375": 958676288.0, "3380": 971463168.0, "3385": 947973376.0, "3390": 965793024.0, "3395": 978390016.0, "3400": 978127360.0, "3405": 976724032.0, "3410": 924193664.0, "3415": 955424256.0, "3420": 971820416.0, "3425": 977167488.0, "3430": 973837696.0, "3435": 936071680.0, "3440": 970502464.0, "3445": 957303232.0, "3450": 959840896.0, "3455": 963864256.0, "3460": 967881856.0, "3465": 931318976.0, "3470": 952348224.0, "3475": 973704384.0, "3480": 959738112.0, "3485": 979960640.0, "3490": 944670592.0, "3495": 953904576.0, "3500": 969330432.0, "3505": 964348416.0, "3510": 971222656.0, "3515": 955946112.0, "3520": 958733120.0, "3525": 971914240.0, "3530": 964124672.0, "3535": 983192192.0, "3540": 937489664.0, "3545": 944730496.0, "3550": 984462784.0, "3555": 978057984.0, "3560": 974372992.0, "3565": 968800128.0, "3570": 946694528.0, "3575": 976104640.0, "3580": 977494272.0, "3585": 954568832.0, "3590": 956425856.0, "3595": 951444800.0, "3600": 988996800.0, "3605": 962008448.0, "3610": 965055104.0, "3615": 974635648.0, "3620": 954888384.0, "3625": 939515392.0, "3630": 990148736.0, "3635": 971435712.0, "3640": 976025856.0, "3645": 961489664.0, "3650": 945804160.0, "3655": 965782784.0, "3660": 976208000.0, "3665": 964021248.0, "3670": 977431936.0, "3675": 943476096.0, "3680": 958185152.0, "3685": 964285312.0, "3690": 982093952.0, "3695": 963125248.0, "3700": 950570176.0, "3705": 947345792.0, "3710": 982355328.0, "3715": 972682240.0, "3720": 976138624.0, "3725": 964038272.0, "3730": 948851328.0, "3735": 967079424.0, "3740": 960973568.0, "3745": 969334272.0, "3750": 963949056.0, "3755": 953432832.0, "3760": 976641920.0, "3765": 979841280.0, "3770": 972360320.0, "3775": 972374720.0, "3780": 952585664.0, "3785": 960236800.0, "3790": 985598848.0, "3795": 969195392.0, "3800": 957877504.0, "3805": 972408192.0, "3810": 954517952.0, "3815": 974566528.0, "3820": 963019648.0, "3825": 962070528.0, "3830": 969394304.0, "3835": 934744960.0, "3840": 971255808.0, "3845": 986849792.0, "3850": 968873536.0, "3855": 965248640.0, "3860": 948057600.0, "3865": 975049216.0, "3870": 985098432.0, "3875": 983043072.0, "3880": 963615616.0, "3885": 953012224.0, "3890": 960296960.0, "3895": 960591616.0, "3900": 984926464.0, "3905": 976222592.0, "3910": 987360512.0, "3915": 946017792.0, "3920": 974867328.0, "3925": 961248384.0, "3930": 976790208.0, "3935": 978925824.0, "3940": 950292672.0, "3945": 960260864.0, "3950": 974185152.0, "3955": 972967808.0, "3960": 974078848.0, "3965": 950861696.0, "3970": 980692992.0, "3975": 960750208.0, "3980": 977519808.0, "3985": 962949440.0, "3990": 972755328.0, "3995": 953714176.0, "4000": 975011968.0, "4005": 971658752.0, "4010": 978420992.0, "4015": 971491968.0, "4020": 950311040.0, "4025": 968433024.0, "4030": 997935360.0, "4035": 978548864.0, "4040": 959813248.0, "4045": 939664448.0, "4050": 944718720.0, "4055": 980958720.0, "4060": 977682816.0, "4065": 975706624.0, "4070": 942152832.0, "4075": 945765376.0, "4080": 988765632.0, "4085": 962079872.0, "4090": 983356736.0, "4095": 986958720.0, "4100": 957224448.0, "4105": 954073984.0, "4110": 966488192.0, "4115": 976042432.0, "4120": 983531776.0, "4125": 960054336.0, "4130": 967278976.0, "4135": 971431296.0, "4140": 963171200.0, "4145": 956175616.0, "4150": 960332672.0, "4155": 946216576.0, "4160": 968451328.0, "4165": 970327424.0, "4170": 971946880.0, "4175": 955862272.0, "4180": 940997632.0, "4185": 968318592.0, "4190": 968003712.0, "4195": 989223168.0, "4200": 962678912.0, "4205": 960595072.0, "4210": 971851136.0, "4215": 974145536.0, "4220": 981144704.0, "4225": 975196160.0, "4230": 952703104.0, "4235": 958493312.0, "4240": 966824448.0, "4245": 961756800.0, "4250": 965845632.0, "4255": 958268032.0, "4260": 949545344.0, "4265": 964137856.0, "4270": 978408896.0, "4275": 975397504.0, "4280": 962672064.0, "4285": 951451136.0, "4290": 980029952.0, "4295": 968842816.0, "4300": 958299456.0, "4305": 966802048.0, "4310": 939613376.0, "4315": 949416704.0, "4320": 984522240.0, "4325": 982547712.0, "4330": 974746240.0, "4335": 949459840.0, "4340": 959478400.0, "4345": 956644096.0, "4350": 979960320.0, "4355": 968869888.0, "4360": 966313728.0, "4365": 941208704.0, "4370": 969495552.0, "4375": 972982336.0, "4380": 966147840.0, "4385": 972060544.0, "4390": 954141120.0, "4395": 951856512.0, "4400": 973629184.0, "4405": 972441728.0, "4410": 967908480.0, "4415": 958935232.0, "4420": 960777856.0, "4425": 976416000.0, "4430": 965905664.0, "4435": 975864704.0, "4440": 962282496.0, "4445": 954799872.0, "4450": 978294912.0, "4455": 960203776.0, "4460": 968579008.0, "4465": 968805760.0, "4470": 944127232.0, "4475": 951973056.0, "4480": 978824832.0, "4485": 968265728.0, "4490": 956975616.0, "4495": 938925248.0, "4500": 953215488.0, "4505": 977129344.0, "4510": 978700416.0, "4515": 962493568.0, "4520": 958825024.0, "4525": 958101760.0, "4530": 964428608.0, "4535": 976641024.0, "4540": 976846784.0, "4545": 970169408.0, "4550": 953223680.0, "4555": 959591040.0, "4560": 972497920.0, "4565": 973442560.0, "4570": 978909568.0, "4575": 957760000.0, "4580": 963142016.0, "4585": 957381888.0, "4590": 986530176.0, "4595": 960189824.0, "4600": 952267648.0, "4605": 959297664.0, "4610": 963698304.0, "4615": 957950912.0, "4620": 960255936.0, "4625": 973786624.0, "4630": 944507648.0, "4635": 977024896.0, "4640": 960310272.0, "4645": 981993856.0, "4650": 962400128.0, "4655": 939457792.0, "4660": 964001280.0, "4665": 962540544.0, "4670": 976671872.0, "4675": 963514432.0, "4680": 957488448.0, "4685": 949674432.0, "4690": 956962240.0, "4695": 969926912.0, "4700": 961214016.0, "4705": 970781312.0, "4710": 934493248.0, "4715": 970475008.0, "4720": 966361920.0, "4725": 980303616.0, "4730": 965826240.0, "4735": 937871360.0, "4740": 960124288.0, "4745": 975997376.0, "4750": 967957760.0, "4755": 984959744.0, "4760": 959123648.0, "4765": 955261056.0, "4770": 958621440.0, "4775": 991092608.0, "4780": 976856000.0, "4785": 967582336.0, "4790": 943756160.0, "4795": 955861760.0, "4800": 967745984.0, "4805": 976565888.0, "4810": 965160448.0, "4815": 957969408.0, "4820": 973993216.0, "4825": 961505920.0, "4830": 962638336.0, "4835": 972543936.0, "4840": 948913920.0, "4845": 965716608.0, "4850": 960305024.0, "4855": 964130624.0, "4860": 963051008.0, "4865": 967532352.0, "4870": 957207424.0, "4875": 983574528.0, "4880": 957085120.0, "4885": 977052480.0, "4890": 959740928.0, "4895": 942152192.0, "4900": 973700352.0, "4905": 975213952.0, "4910": 969220608.0, "4915": 970053248.0, "4920": 941169024.0, "4925": 954809344.0, "4930": 977034624.0, "4935": 963750016.0, "4940": 972586496.0, "4945": 960056448.0, "4950": 940793856.0, "4955": 968036480.0, "4960": 976800640.0, "4965": 961022464.0, "4970": 958665472.0, "4975": 933775168.0, "4980": 960824704.0, "4985": 963012160.0, "4990": 963588736.0, "4995": 986307968.0, "5000": 940776000.0, "5005": 968887552.0, "5010": 970307776.0, "5015": 965233408.0, "5020": 966702336.0, "5025": 949471744.0, "5030": 953490560.0, "5035": 967352704.0, "5040": 955689856.0, "5045": 969129856.0, "5050": 953417984.0, "5055": 954811520.0, "5060": 963029248.0, "5065": 952195072.0, "5070": 973597056.0, "5075": 978581632.0, "5080": 942828736.0, "5085": 965863040.0, "5090": 972857088.0, "5095": 964397952.0, "5100": 958318016.0, "5105": 965353728.0, "5110": 950401920.0, "5115": 972347520.0, "5120": 960424960.0, "5125": 969755008.0, "5130": 938795456.0, "5135": 943659008.0, "5140": 969888896.0, "5145": 968688768.0, "5150": 970601216.0, "5155": 972635008.0, "5160": 926551872.0, "5165": 961591552.0, "5170": 966873472.0, "5175": 966086400.0, "5180": 963656192.0, "5185": 930802688.0, "5190": 949852992.0, "5195": 972422016.0, "5200": 973758656.0, "5205": 968249664.0, "5210": 960527936.0, "5215": 928815680.0, "5220": 979174208.0, "5225": 984780416.0, "5230": 975060672.0, "5235": 975055232.0, "5240": 944314112.0, "5245": 970834048.0, "5250": 972426624.0, "5255": 966895296.0, "5260": 976679744.0, "5265": 942270592.0, "5270": 969202176.0, "5275": 970073344.0, "5280": 962825728.0, "5285": 964072064.0, "5290": 932501824.0, "5295": 951762944.0, "5300": 975588288.0, "5305": 951853504.0, "5310": 968040960.0, "5315": 955817472.0, "5320": 950921984.0, "5325": 973040384.0, "5330": 967843264.0, "5335": 967536384.0, "5340": 966503424.0, "5345": 962961344.0, "5350": 978937216.0, "5355": 972202560.0, "5360": 963892288.0, "5365": 965246464.0, "5370": 947824896.0, "5375": 948820864.0, "5380": 967210752.0, "5385": 980540416.0, "5390": 965312832.0, "5395": 955160000.0, "5400": 948286464.0, "5405": 974365952.0, "5410": 967844224.0, "5415": 976074688.0, "5420": 967431680.0, "5425": 937412096.0, "5430": 963919744.0, "5435": 971948544.0, "5440": 969128192.0, "5445": 957501184.0, "5450": 919431040.0, "5455": 952052800.0, "5460": 962315264.0, "5465": 978905088.0, "5470": 981031104.0, "5475": 941610304.0, "5480": 955755072.0, "5485": 964902912.0, "5490": 976036992.0, "5495": 962805120.0, "5500": 971218240.0, "5505": 957001728.0, "5510": 968610432.0, "5515": 945466624.0, "5520": 963181952.0, "5525": 975932480.0, "5530": 936635520.0, "5535": 970693824.0, "5540": 960295936.0, "5545": 972084800.0, "5550": 967898240.0, "5555": 955968960.0, "5560": 954520832.0, "5565": 968862848.0, "5570": 945186112.0, "5575": 960539584.0, "5580": 960563456.0, "5585": 959470720.0, "5590": 977668224.0, "5595": 975194496.0, "5600": 963009472.0, "5605": 964201216.0, "5610": 943157760.0, "5615": 966515904.0, "5620": 963224448.0, "5625": 982398656.0, "5630": 976073984.0, "5635": 957347520.0, "5640": 951455488.0, "5645": 967805568.0, "5650": 979181056.0, "5655": 983510912.0, "5660": 956493952.0, "5665": 953574656.0, "5670": 966097408.0, "5675": 967701184.0, "5680": 978598848.0, "5685": 962008576.0, "5690": 935908928.0, "5695": 963739648.0, "5700": 952464384.0, "5705": 974525376.0, "5710": 971341376.0, "5715": 946147648.0, "5720": 974991360.0, "5725": 967473664.0, "5730": 978561792.0, "5735": 964979712.0, "5740": 943573056.0, "5745": 971142016.0, "5750": 981951168.0, "5755": 956754944.0, "5760": 963695168.0, "5765": 957707648.0, "5770": 955747584.0, "5775": 970847104.0, "5780": 962811840.0, "5785": 970673664.0, "5790": 974652672.0, "5795": 949802368.0, "5800": 965999232.0, "5805": 968774272.0, "5810": 975986176.0, "5815": 970103936.0, "5820": 936377408.0, "5825": 969266816.0, "5830": 977614464.0, "5835": 974980224.0, "5840": 963061120.0, "5845": 968774464.0, "5850": 942897536.0, "5855": 975999104.0, "5860": 979537600.0, "5865": 978369280.0, "5870": 968714112.0, "5875": 942328320.0, "5880": 964508224.0, "5885": 974806656.0, "5890": 972671104.0, "5895": 965681920.0, "5900": 941482880.0, "5905": 961766528.0, "5910": 958568832.0, "5915": 968174464.0, "5920": 977468032.0, "5925": 959468800.0, "5930": 946750080.0, "5935": 952334656.0, "5940": 977662144.0, "5945": 984818560.0, "5950": 980689536.0, "5955": 935094464.0, "5960": 961735296.0, "5965": 965938176.0, "5970": 970612096.0, "5975": 961933888.0, "5980": 958344832.0, "5985": 964562816.0, "5990": 973676288.0, "5995": 955919488.0, "6000": 955633216.0, "6005": 961366784.0, "6010": 952707072.0, "6015": 974539328.0, "6020": 978265920.0, "6025": 972226688.0, "6030": 955311744.0, "6035": 946954368.0, "6040": 962641920.0, "6045": 983743552.0, "6050": 956515200.0, "6055": 963445888.0, "6060": 945767488.0, "6065": 958443776.0, "6070": 978390272.0, "6075": 977988416.0, "6080": 957530304.0, "6085": 947641408.0, "6090": 953645632.0, "6095": 964770560.0, "6100": 979886720.0, "6105": 971011904.0, "6110": 961816128.0, "6115": 943814016.0, "6120": 968573824.0, "6125": 960703232.0, "6130": 984040320.0, "6135": 960994432.0, "6140": 958720704.0, "6145": 971225728.0, "6150": 968493312.0, "6155": 974930688.0, "6160": 977216128.0, "6165": 952742848.0, "6170": 951129728.0, "6175": 963358592.0, "6180": 969549568.0, "6185": 966320320.0, "6190": 963731264.0, "6195": 947266752.0, "6200": 969378240.0, "6205": 967161728.0, "6210": 959339264.0, "6215": 973147776.0, "6220": 936374912.0, "6225": 978715520.0, "6230": 976146816.0, "6235": 971740992.0, "6240": 966006336.0, "6245": 956249728.0, "6250": 956449920.0, "6255": 973363584.0, "6260": 978804800.0, "6265": 974941952.0, "6270": 958855808.0, "6275": 963713152.0, "6280": 973141120.0, "6285": 966123136.0, "6290": 971040256.0, "6295": 987490560.0, "6300": 947640064.0, "6305": 964795456.0, "6310": 979017216.0, "6315": 978443392.0, "6320": 971724672.0, "6325": 923002624.0, "6330": 959366784.0, "6335": 974940864.0, "6340": 984824576.0, "6345": 966835456.0, "6350": 944574016.0, "6355": 957990272.0, "6360": 972711552.0, "6365": 972207744.0, "6370": 958966784.0, "6375": 967142144.0, "6380": 951399104.0, "6385": 973584896.0, "6390": 965514880.0, "6395": 975032064.0, "6400": 983945472.0, "6405": 944065408.0, "6410": 977178496.0, "6415": 971635776.0, "6420": 956726592.0, "6425": 960937728.0, "6430": 957525120.0, "6435": 960333440.0, "6440": 968713088.0, "6445": 973479168.0, "6450": 974637056.0, "6455": 962142208.0, "6460": 940994496.0, "6465": 974482944.0, "6470": 979911936.0, "6475": 960847808.0, "6480": 967532032.0, "6485": 948559616.0, "6490": 970748032.0, "6495": 988369024.0, "6500": 980468864.0, "6505": 972158336.0, "6510": 951648576.0, "6515": 957565440.0, "6520": 979061952.0, "6525": 978903424.0, "6530": 973271744.0, "6535": 967829056.0, "6540": 950159040.0, "6545": 966294144.0, "6550": 979335168.0, "6555": 967119872.0, "6560": 975391104.0, "6565": 949645696.0, "6570": 952068224.0, "6575": 962553728.0, "6580": 975679424.0, "6585": 979544832.0, "6590": 949212544.0, "6595": 961471616.0, "6600": 961353856.0, "6605": 961755520.0, "6610": 985212480.0, "6615": 959518336.0, "6620": 944576256.0, "6625": 971028736.0, "6630": 971564928.0, "6635": 964103936.0, "6640": 959857152.0, "6645": 951077504.0, "6650": 978674944.0, "6655": 965949440.0, "6660": 968814080.0, "6665": 969002112.0, "6670": 932999424.0, "6675": 970736128.0, "6680": 969016064.0, "6685": 958784384.0, "6690": 956215552.0, "6695": 955745920.0, "6700": 962135936.0, "6705": 979365824.0, "6710": 971098240.0, "6715": 966874944.0, "6720": 974162048.0, "6725": 941768192.0, "6730": 979399488.0, "6735": 994709376.0, "6740": 976356224.0, "6745": 974602752.0, "6750": 939272320.0, "6755": 977649344.0, "6760": 969757888.0, "6765": 978454848.0, "6770": 975513728.0, "6775": 943523520.0, "6780": 947283584.0, "6785": 975371712.0, "6790": 960607104.0, "6795": 976217984.0, "6800": 973344640.0, "6805": 946806016.0, "6810": 958265856.0, "6815": 970929792.0, "6820": 978086528.0, "6825": 969192704.0, "6830": 950435072.0, "6835": 981464192.0, "6840": 983022336.0, "6845": 948763840.0, "6850": 965465152.0, "6855": 954199552.0, "6860": 979123968.0, "6865": 983975808.0, "6870": 964842560.0, "6875": 978847808.0, "6880": 950371200.0, "6885": 958582016.0, "6890": 960484032.0, "6895": 965665280.0, "6900": 985370880.0, "6905": 968478592.0, "6910": 950097088.0, "6915": 971060736.0, "6920": 967166720.0, "6925": 965180672.0, "6930": 964715648.0, "6935": 952122112.0, "6940": 962920704.0, "6945": 986470144.0, "6950": 973350272.0, "6955": 964715136.0, "6960": 940248960.0, "6965": 974503680.0, "6970": 978554240.0, "6975": 985114880.0, "6980": 982851072.0, "6985": 959949376.0, "6990": 945298944.0, "6995": 987557120.0, "7000": 963329344.0, "7005": 962922240.0, "7010": 985144320.0, "7015": 945447424.0, "7020": 982884608.0, "7025": 968840640.0, "7030": 953537472.0, "7035": 982810432.0, "7040": 950520320.0, "7045": 956041600.0, "7050": 960403712.0, "7055": 963929728.0, "7060": 976999040.0, "7065": 968391296.0, "7070": 953547264.0, "7075": 956559360.0, "7080": 969124864.0, "7085": 965868800.0, "7090": 969521920.0, "7095": 960078592.0, "7100": 973651200.0, "7105": 973332672.0, "7110": 970254848.0, "7115": 958633088.0, "7120": 948865536.0, "7125": 963163584.0, "7130": 971421376.0, "7135": 964540096.0, "7140": 961681152.0, "7145": 930416448.0, "7150": 946280064.0, "7155": 991092864.0, "7160": 968442496.0, "7165": 956888320.0, "7170": 968275328.0, "7175": 955828224.0, "7180": 958441536.0, "7185": 984880256.0, "7190": 978735936.0, "7195": 973712000.0, "7200": 935905536.0, "7205": 957525760.0, "7210": 967114624.0, "7215": 969707264.0, "7220": 982219584.0, "7225": 928936768.0, "7230": 949560960.0, "7235": 967251712.0, "7240": 966968064.0, "7245": 967454976.0, "7250": 949502336.0, "7255": 957363968.0, "7260": 970114816.0, "7265": 974961664.0, "7270": 959874240.0, "7275": 959286784.0, "7280": 957121920.0, "7285": 977575808.0, "7290": 977249920.0, "7295": 962901120.0, "7300": 975499904.0, "7305": 964022528.0, "7310": 977515520.0, "7315": 966809600.0, "7320": 974552768.0, "7325": 966906752.0, "7330": 959690880.0, "7335": 964049280.0, "7340": 977556864.0, "7345": 967668224.0, "7350": 984804864.0, "7355": 959674816.0, "7360": 948842240.0, "7365": 972772864.0, "7370": 982593664.0, "7375": 963567424.0, "7380": 964284224.0, "7385": 948615488.0, "7390": 964192512.0, "7395": 958787008.0, "7400": 970242816.0, "7405": 988116736.0, "7410": 952423488.0, "7415": 950935744.0, "7420": 967472640.0, "7425": 982705664.0, "7430": 965871552.0, "7435": 973294080.0, "7440": 937228160.0, "7445": 969074752.0, "7450": 980608832.0, "7455": 971587712.0, "7460": 972749056.0, "7465": 939573760.0, "7470": 972011648.0, "7475": 958500480.0, "7480": 969529792.0, "7485": 961637568.0, "7490": 934760704.0, "7495": 957297216.0, "7500": 969548416.0, "7505": 970380928.0, "7510": 972589184.0, "7515": 979619840.0, "7520": 951796224.0, "7525": 970763840.0, "7530": 954655104.0, "7535": 971887616.0, "7540": 979952832.0, "7545": 959493248.0, "7550": 960599936.0, "7555": 960564352.0, "7560": 970209920.0, "7565": 955267200.0, "7570": 942667904.0, "7575": 966060032.0, "7580": 982740480.0, "7585": 979190784.0, "7590": 970155264.0, "7595": 950188416.0, "7600": 946546432.0, "7605": 982722432.0, "7610": 969487360.0, "7615": 988970624.0, "7620": 957136000.0, "7625": 941577856.0, "7630": 971809152.0, "7635": 984756608.0, "7640": 983937792.0, "7645": 968197120.0, "7650": 959357504.0, "7655": 962682368.0, "7660": 969198976.0, "7665": 978073088.0, "7670": 975279104.0, "7675": 975891840.0, "7680": 943167616.0, "7685": 960475136.0, "7690": 975843968.0, "7695": 982086400.0, "7700": 979923648.0, "7705": 940595776.0, "7710": 974811648.0, "7715": 979946496.0, "7720": 968216448.0, "7725": 960576640.0, "7730": 943583104.0, "7735": 968598400.0, "7740": 980697600.0, "7745": 964667008.0, "7750": 963965568.0, "7755": 960341056.0, "7760": 970667072.0, "7765": 971220096.0, "7770": 962730624.0, "7775": 981588800.0, "7780": 965073280.0, "7785": 959776384.0, "7790": 968147968.0, "7795": 969118208.0, "7800": 971586880.0, "7805": 968712128.0, "7810": 946156608.0, "7815": 963601664.0, "7820": 974369664.0, "7825": 963930944.0, "7830": 957420864.0, "7835": 949820864.0, "7840": 957576448.0, "7845": 954299264.0, "7850": 980140416.0, "7855": 987100288.0, "7860": 947203712.0, "7865": 949597632.0, "7870": 965653760.0, "7875": 976341632.0, "7880": 968749184.0, "7885": 969863296.0, "7890": 951979520.0, "7895": 974744576.0, "7900": 964075264.0, "7905": 964628544.0, "7910": 966224768.0, "7915": 943623808.0, "7920": 951251584.0, "7925": 969683840.0, "7930": 965018496.0, "7935": 984522112.0, "7940": 965260992.0, "7945": 950920512.0, "7950": 961778944.0, "7955": 980819072.0, "7960": 964107328.0, "7965": 952800768.0, "7970": 952096960.0, "7975": 969954944.0, "7980": 965058752.0, "7985": 959497728.0, "7990": 968288768.0, "7995": 947074368.0, "8000": 962595712.0, "8005": 980875264.0, "8010": 965703040.0, "8015": 982795648.0, "8020": 960636544.0, "8025": 965519616.0, "8030": 958643200.0, "8035": 975716096.0, "8040": 960827648.0, "8045": 948395264.0, "8050": 959831808.0, "8055": 979617792.0, "8060": 969592128.0, "8065": 958394752.0, "8070": 964066944.0, "8075": 942266240.0, "8080": 966035328.0, "8085": 966815936.0, "8090": 983700160.0, "8095": 988871424.0, "8100": 966531968.0, "8105": 944438272.0, "8110": 969326016.0, "8115": 985228672.0, "8120": 974833408.0, "8125": 964005120.0, "8130": 966272000.0, "8135": 967624576.0, "8140": 963686848.0, "8145": 994976768.0, "8150": 973166016.0, "8155": 938390528.0, "8160": 964462464.0, "8165": 972803200.0, "8170": 968497280.0, "8175": 961587008.0, "8180": 936029440.0, "8185": 962625536.0, "8190": 967799296.0, "8195": 977385088.0, "8200": 956367296.0, "8205": 960566528.0, "8210": 946495424.0, "8215": 982005248.0, "8220": 988443520.0, "8225": 966243584.0, "8230": 962552576.0, "8235": 934131712.0, "8240": 980267904.0, "8245": 976606848.0, "8250": 964327808.0, "8255": 977492864.0, "8260": 956833664.0, "8265": 982957440.0, "8270": 952836608.0, "8275": 974283968.0, "8280": 974906560.0, "8285": 953985664.0, "8290": 940194816.0, "8295": 981360128.0, "8300": 972952832.0, "8305": 978368320.0, "8310": 951095936.0, "8315": 937922048.0, "8320": 977484544.0, "8325": 967872768.0, "8330": 990116800.0, "8335": 975746048.0, "8340": 947366912.0, "8345": 970641408.0, "8350": 970082176.0, "8355": 975014080.0, "8360": 979651456.0, "8365": 932855680.0, "8370": 965537344.0, "8375": 979732736.0, "8380": 965482496.0, "8385": 972889472.0, "8390": 962502912.0, "8395": 951003840.0, "8400": 972739968.0, "8405": 951808384.0, "8410": 960912000.0, "8415": 965867904.0, "8420": 941925888.0, "8425": 968447872.0, "8430": 961416704.0, "8435": 966249344.0, "8440": 969510272.0, "8445": 952921344.0, "8450": 984742912.0, "8455": 990518400.0, "8460": 969086848.0, "8465": 967798656.0, "8470": 963598464.0, "8475": 942921920.0, "8480": 987605888.0, "8485": 979799936.0, "8490": 991849856.0, "8495": 971815552.0, "8500": 951760768.0, "8505": 982982848.0, "8510": 974371200.0, "8515": 969206912.0, "8520": 961827968.0, "8525": 944996096.0, "8530": 984721152.0, "8535": 978411520.0, "8540": 968342592.0, "8545": 969125440.0, "8550": 942408448.0, "8555": 971549056.0, "8560": 958775296.0, "8565": 975676160.0, "8570": 975305216.0, "8575": 971852992.0, "8580": 932583232.0, "8585": 966065856.0, "8590": 978933760.0, "8595": 979387904.0, "8600": 983792768.0, "8605": 958356416.0, "8610": 984069888.0, "8615": 978067776.0, "8620": 963535168.0, "8625": 979909120.0, "8630": 943580032.0, "8635": 961797632.0, "8640": 973745600.0, "8645": 970784128.0, "8650": 969289152.0, "8655": 970653440.0, "8660": 944484096.0, "8665": 986977728.0, "8670": 960353920.0, "8675": 974610176.0, "8680": 962718976.0, "8685": 956147136.0, "8690": 978612864.0, "8695": 969139072.0, "8700": 973135360.0, "8705": 973914176.0, "8710": 947435776.0, "8715": 973736320.0, "8720": 958622976.0, "8725": 978719488.0, "8730": 985894400.0, "8735": 952583040.0, "8740": 940201728.0, "8745": 987763456.0, "8750": 972207744.0, "8755": 971134720.0, "8760": 965569152.0, "8765": 934519872.0, "8770": 986656640.0, "8775": 969789440.0, "8780": 967920512.0, "8785": 962639488.0, "8790": 947921664.0, "8795": 969775296.0, "8800": 971220608.0, "8805": 973559168.0, "8810": 983161280.0, "8815": 951065856.0, "8820": 939478016.0, "8825": 964494336.0, "8830": 981089472.0, "8835": 971889408.0, "8840": 979835520.0, "8845": 951616384.0, "8850": 987153920.0, "8855": 971335296.0, "8860": 962222080.0, "8865": 957359360.0, "8870": 946242816.0, "8875": 968628096.0, "8880": 984173184.0, "8885": 971110144.0, "8890": 970299648.0, "8895": 952971136.0, "8900": 962246528.0, "8905": 977392000.0, "8910": 981876416.0, "8915": 981149952.0, "8920": 968258432.0, "8925": 940189184.0, "8930": 970787456.0, "8935": 963634560.0, "8940": 978025664.0, "8945": 982356352.0, "8950": 946274176.0, "8955": 972928128.0, "8960": 974032128.0, "8965": 973961216.0, "8970": 966361216.0, "8975": 937321600.0, "8980": 953099648.0, "8985": 977878528.0, "8990": 967166592.0, "8995": 980283904.0, "9000": 952421184.0, "9005": 950292544.0, "9010": 974935552.0, "9015": 982668672.0, "9020": 959278656.0, "9025": 979055040.0, "9030": 953936640.0, "9035": 968749312.0, "9040": 978270080.0, "9045": 968843136.0, "9050": 983417600.0, "9055": 947885952.0, "9060": 956699776.0, "9065": 970246528.0, "9070": 968015744.0, "9075": 981225856.0, "9080": 952541632.0, "9085": 971319168.0, "9090": 963789184.0, "9095": 968313984.0, "9100": 974584320.0, "9105": 960032896.0, "9110": 947321664.0, "9115": 956833728.0, "9120": 985899904.0, "9125": 963026176.0, "9130": 958457216.0, "9135": 951989056.0, "9140": 967565824.0, "9145": 977433728.0, "9150": 987305408.0, "9155": 976649408.0, "9160": 958050816.0, "9165": 950957248.0, "9170": 988702272.0, "9175": 971913280.0, "9180": 967854400.0, "9185": 955127680.0, "9190": 957263744.0, "9195": 966003584.0, "9200": 968856960.0, "9205": 967330048.0, "9210": 984179584.0, "9215": 931743808.0, "9220": 949808960.0, "9225": 971440256.0, "9230": 971281792.0, "9235": 971857152.0, "9240": 959917376.0, "9245": 963584128.0, "9250": 961416384.0, "9255": 983241472.0, "9260": 979566336.0, "9265": 953039104.0, "9270": 949474624.0, "9275": 978502016.0, "9280": 978025536.0, "9285": 962828800.0, "9290": 979390080.0, "9295": 958548480.0, "9300": 965876352.0, "9305": 969599232.0, "9310": 973283008.0, "9315": 976451392.0, "9320": 948304512.0, "9325": 979749696.0, "9330": 977926784.0, "9335": 975525504.0, "9340": 960336000.0, "9345": 943464832.0, "9350": 952835072.0, "9355": 962850048.0, "9360": 960675328.0, "9365": 983816320.0, "9370": 983035904.0, "9375": 942080896.0, "9380": 982540928.0, "9385": 985259136.0, "9390": 973406272.0, "9395": 978528128.0, "9400": 938038400.0, "9405": 968500672.0, "9410": 981791488.0, "9415": 991945472.0, "9420": 960625728.0, "9425": 956681216.0, "9430": 938695808.0, "9435": 974362368.0, "9440": 959727872.0, "9445": 973720576.0, "9450": 961877760.0, "9455": 946303872.0, "9460": 978086272.0, "9465": 988617984.0, "9470": 963615872.0, "9475": 983908608.0, "9480": 930854528.0, "9485": 987221248.0, "9490": 963974912.0, "9495": 972857088.0, "9500": 982392960.0, "9505": 970286080.0, "9510": 964873536.0, "9515": 957183296.0, "9520": 948641664.0, "9525": 965336064.0, "9530": 958567296.0, "9535": 950963840.0, "9540": 954501120.0, "9545": 979935296.0, "9550": 955384704.0, "9555": 953296192.0, "9560": 958726208.0, "9565": 969930112.0, "9570": 977751168.0, "9575": 958849792.0, "9580": 963257728.0, "9585": 946197184.0, "9590": 948135936.0, "9595": 967007808.0, "9600": 985117952.0, "9605": 985499648.0, "9610": 943959808.0, "9615": 952912128.0, "9620": 980920192.0, "9625": 978524736.0, "9630": 969671168.0, "9635": 974868544.0, "9640": 940772416.0, "9645": 962475008.0, "9650": 970857536.0, "9655": 987496960.0, "9660": 963394176.0, "9665": 950327872.0, "9670": 965817856.0, "9675": 963579264.0, "9680": 965384064.0, "9685": 986598272.0, "9690": 940596864.0, "9695": 950521728.0, "9700": 975714688.0, "9705": 972896256.0, "9710": 967299968.0, "9715": 971403392.0, "9720": 940613632.0, "9725": 966514816.0, "9730": 974099584.0, "9735": 974345792.0, "9740": 971516928.0, "9745": 951220736.0, "9750": 979370880.0, "9755": 970170432.0, "9760": 968237888.0, "9765": 963835520.0, "9770": 952652160.0, "9775": 956682880.0, "9780": 970721984.0, "9785": 958959232.0, "9790": 961043072.0, "9795": 958779200.0, "9800": 949918656.0, "9805": 962651200.0, "9810": 979093888.0, "9815": 978146816.0, "9820": 982841088.0, "9825": 939730944.0, "9830": 969614208.0, "9835": 973272832.0, "9840": 971945664.0, "9845": 967603328.0, "9850": 947232896.0, "9855": 956896512.0, "9860": 987801728.0, "9865": 970385664.0, "9870": 990310144.0, "9875": 957380096.0, "9880": 931362176.0, "9885": 963678464.0, "9890": 972811648.0, "9895": 984054016.0, "9900": 956595136.0, "9905": 939303808.0, "9910": 979107072.0, "9915": 973996800.0, "9920": 943946432.0, "9925": 963187328.0, "9930": 948020224.0, "9935": 960573120.0, "9940": 965856512.0, "9945": 958998016.0, "9950": 964584192.0, "9955": 943733120.0, "9960": 966844160.0, "9965": 983732096.0, "9970": 966840192.0, "9975": 964040640.0, "9980": 980881024.0, "9985": 942746240.0, "9990": 976134400.0, "9995": 982950848.0, "10000": 972073152.0, "10005": 970193472.0, "10010": 944380480.0, "10015": 983265344.0, "10020": 977865472.0, "10025": 979868544.0, "10030": 971490816.0, "10035": 946263296.0, "10040": 950534016.0, "10045": 977546880.0, "10050": 986017280.0, "10055": 990492800.0, "10060": 958996032.0, "10065": 947517312.0, "10070": 966895616.0, "10075": 979683904.0, "10080": 971953920.0, "10085": 974879744.0, "10090": 944216960.0, "10095": 962977344.0, "10100": 972381952.0, "10105": 976354432.0, "10110": 972128768.0, "10115": 948919680.0, "10120": 962852480.0, "10125": 974293120.0, "10130": 980737472.0, "10135": 972335104.0, "10140": 957843264.0, "10145": 934671872.0, "10150": 973965568.0, "10155": 970306112.0, "10160": 962491456.0, "10165": 975341248.0, "10170": 944624384.0, "10175": 979643712.0, "10180": 984008448.0, "10185": 978870144.0, "10190": 955877376.0, "10195": 937261120.0, "10200": 988253760.0, "10205": 973401856.0, "10210": 966901120.0, "10215": 976049664.0, "10220": 948799872.0, "10225": 950572096.0, "10230": 976120896.0, "10235": 954421632.0, "10240": 969850752.0, "10245": 962265472.0, "10250": 936756480.0, "10255": 979774976.0, "10260": 965000704.0, "10265": 967563712.0, "10270": 969297920.0, "10275": 935944256.0, "10280": 969526272.0, "10285": 996465152.0, "10290": 979762816.0, "10295": 981662912.0, "10300": 952271936.0, "10305": 972024256.0, "10310": 960359872.0, "10315": 971605760.0, "10320": 985354304.0, "10325": 983302336.0, "10330": 935148288.0, "10335": 976392064.0, "10340": 957603840.0, "10345": 973044352.0, "10350": 984707136.0, "10355": 942479296.0, "10360": 962279040.0, "10365": 973641856.0, "10370": 980432768.0, "10375": 970343296.0, "10380": 962080384.0, "10385": 955687296.0, "10390": 990783104.0, "10395": 965164608.0, "10400": 960470208.0, "10405": 950214848.0, "10410": 955491392.0, "10415": 975924736.0, "10420": 967248320.0, "10425": 969875328.0, "10430": 965126272.0, "10435": 962680768.0, "10440": 972024064.0, "10445": 972467456.0, "10450": 974949504.0, "10455": 965864704.0, "10460": 948726272.0, "10465": 971534464.0, "10470": 972756736.0, "10475": 979392128.0, "10480": 997292352.0, "10485": 949631936.0, "10490": 935104896.0, "10495": 969599424.0, "10500": 978688704.0, "10505": 959342784.0, "10510": 951008000.0, "10515": 954223744.0, "10520": 972150016.0, "10525": 969942528.0, "10530": 970425728.0, "10535": 986576256.0, "10540": 946829632.0, "10545": 970484032.0, "10550": 969371968.0, "10555": 959521856.0, "10560": 976274496.0, "10565": 960798208.0, "10570": 968688128.0, "10575": 973272576.0, "10580": 961017472.0, "10585": 973457024.0, "10590": 952053568.0, "10595": 956331776.0, "10600": 967935552.0, "10605": 986576256.0, "10610": 966417408.0, "10615": 976957568.0, "10620": 940933888.0, "10625": 965306432.0, "10630": 968022272.0, "10635": 973333888.0, "10640": 974664448.0, "10645": 948582400.0, "10650": 966388224.0, "10655": 985562624.0, "10660": 976682624.0, "10665": 967088256.0, "10670": 955226368.0, "10675": 934529920.0, "10680": 986153344.0, "10685": 991102656.0, "10690": 963886208.0, "10695": 971933632.0, "10700": 950091520.0, "10705": 978240128.0, "10710": 968317184.0, "10715": 967450432.0, "10720": 966357824.0, "10725": 944490816.0, "10730": 980318592.0, "10735": 961117952.0, "10740": 971283392.0, "10745": 984630528.0, "10750": 981762816.0, "10755": 945191296.0, "10760": 969882304.0, "10765": 972886400.0, "10770": 974268608.0, "10775": 959067392.0, "10780": 949520384.0, "10785": 953706304.0, "10790": 970157568.0, "10795": 960631552.0, "10800": 972050368.0, "10805": 951460864.0, "10810": 974235456.0, "10815": 959804160.0, "10820": 971302656.0, "10825": 967211072.0, "10830": 957016128.0, "10835": 963139136.0, "10840": 971035008.0, "10845": 964268160.0, "10850": 958162432.0, "10855": 967657344.0, "10860": 950849536.0, "10865": 964061696.0, "10870": 983627200.0, "10875": 982016640.0, "10880": 958659648.0, "10885": 954981888.0, "10890": 973122560.0, "10895": 973655744.0, "10900": 970546048.0, "10905": 965184256.0, "10910": 939048192.0, "10915": 960749824.0, "10920": 983653376.0, "10925": 970068160.0, "10930": 968771200.0, "10935": 963228480.0, "10940": 954249408.0, "10945": 964532608.0, "10950": 972466880.0, "10955": 966621248.0, "10960": 972285056.0, "10965": 966333184.0, "10970": 983572160.0, "10975": 965330496.0, "10980": 974669248.0, "10985": 986818496.0, "10990": 950797760.0, "10995": 963598784.0, "11000": 985495104.0, "11005": 978671168.0, "11010": 971614464.0, "11015": 970071232.0, "11020": 948195648.0, "11025": 960105088.0, "11030": 978168768.0, "11035": 976017024.0, "11040": 986523264.0, "11045": 956708480.0, "11050": 973395968.0, "11055": 974051968.0, "11060": 962164544.0, "11065": 985712768.0, "11070": 949791424.0, "11075": 976565888.0, "11080": 972315712.0, "11085": 967328576.0, "11090": 976399296.0, "11095": 946696448.0, "11100": 966199040.0, "11105": 974421504.0, "11110": 981198912.0, "11115": 968108160.0, "11120": 957518656.0, "11125": 956979840.0, "11130": 975786432.0, "11135": 979636544.0, "11140": 964944832.0, "11145": 966499008.0, "11150": 935518400.0, "11155": 976579008.0, "11160": 984367232.0, "11165": 982289792.0, "11170": 978113472.0, "11175": 958084864.0, "11180": 962589888.0, "11185": 972260672.0, "11190": 979666368.0, "11195": 985502784.0, "11200": 983014336.0, "11205": 942426240.0, "11210": 984802368.0, "11215": 967690816.0, "11220": 983476928.0, "11225": 961985728.0, "11230": 953398272.0, "11235": 981841280.0, "11240": 977805568.0, "11245": 966530176.0, "11250": 969466304.0, "11255": 960572544.0, "11260": 980096576.0, "11265": 963926720.0, "11270": 981695936.0, "11275": 968525888.0, "11280": 955905088.0, "11285": 953700224.0, "11290": 956489152.0, "11295": 968197568.0, "11300": 962513216.0, "11305": 958759872.0, "11310": 946304256.0, "11315": 983036096.0, "11320": 964828480.0, "11325": 980906304.0, "11330": 975476608.0, "11335": 952186816.0, "11340": 970596800.0, "11345": 969926080.0, "11350": 981628736.0, "11355": 981905088.0, "11360": 940723328.0, "11365": 970750592.0, "11370": 978978432.0, "11375": 975338432.0, "11380": 968256960.0, "11385": 958096384.0, "11390": 937948288.0, "11395": 977494080.0, "11400": 973515520.0, "11405": 961359424.0, "11410": 966143616.0, "11415": 929202368.0, "11420": 964768960.0, "11425": 981196352.0, "11430": 978636864.0, "11435": 970153280.0, "11440": 945072704.0, "11445": 975241024.0, "11450": 984735296.0, "11455": 971426176.0, "11460": 965182016.0, "11465": 960090176.0, "11470": 955191296.0, "11475": 972691072.0, "11480": 956542272.0, "11485": 977076864.0, "11490": 986332352.0, "11495": 959121344.0, "11500": 969424704.0, "11505": 964024640.0, "11510": 976702848.0, "11515": 977904064.0, "11520": 953963584.0, "11525": 976039360.0, "11530": 976686784.0, "11535": 979809792.0, "11540": 974141760.0, "11545": 953644288.0, "11550": 953295552.0, "11555": 981560640.0, "11560": 984532352.0, "11565": 965181312.0, "11570": 966487424.0, "11575": 950896832.0, "11580": 976062592.0, "11585": 977550784.0, "11590": 969314368.0, "11595": 976719232.0, "11600": 946047104.0, "11605": 973359168.0, "11610": 982457984.0, "11615": 972010048.0, "11620": 969363904.0, "11625": 949111040.0, "11630": 937578176.0, "11635": 973667008.0, "11640": 981259456.0, "11645": 980106048.0, "11650": 971758144.0, "11655": 956204288.0, "11660": 980712192.0, "11665": 958265664.0, "11670": 982618880.0, "11675": 972427200.0, "11680": 956445568.0, "11685": 982955712.0, "11690": 968614528.0, "11695": 968085632.0, "11700": 973819008.0, "11705": 956432640.0, "11710": 964584640.0, "11715": 983208448.0, "11720": 983636224.0, "11725": 965204032.0, "11730": 955695040.0, "11735": 942941376.0, "11740": 973829824.0, "11745": 971260672.0, "11750": 961624256.0, "11755": 963534976.0, "11760": 950291904.0, "11765": 983877632.0, "11770": 984810368.0, "11775": 975671936.0, "11780": 985430336.0, "11785": 947272512.0, "11790": 972444352.0, "11795": 970670464.0, "11800": 973251520.0, "11805": 986780480.0, "11810": 967591808.0, "11815": 955862848.0, "11820": 973765952.0, "11825": 970671296.0, "11830": 974883776.0, "11835": 961826368.0, "11840": 944467904.0, "11845": 980681344.0, "11850": 974601536.0, "11855": 977943744.0, "11860": 971600192.0, "11865": 938631104.0, "11870": 940150208.0, "11875": 990036736.0, "11880": 971605184.0, "11885": 962938432.0, "11890": 970330560.0, "11895": 965356416.0, "11900": 978791360.0, "11905": 961690240.0, "11910": 983653824.0, "11915": 989882688.0, "11920": 944918016.0, "11925": 994335296.0, "11930": 964853504.0, "11935": 963527104.0, "11940": 977089344.0, "11945": 944859968.0, "11950": 977822912.0, "11955": 979190400.0, "11960": 972201664.0, "11965": 976359488.0, "11970": 963048064.0, "11975": 963239936.0, "11980": 977609536.0, "11985": 953135424.0, "11990": 968692864.0, "11995": 965206144.0, "12000": 958650816.0, "12005": 974558720.0, "12010": 979488320.0, "12015": 972212928.0, "12020": 972972992.0, "12025": 934714048.0, "12030": 969111104.0, "12035": 984089664.0, "12040": 977507648.0, "12045": 981645056.0, "12050": 931286400.0, "12055": 938790208.0, "12060": 974349248.0, "12065": 965845696.0, "12070": 968239104.0, "12075": 949940224.0, "12080": 953231552.0, "12085": 972342592.0, "12090": 964042304.0, "12095": 963552832.0, "12100": 976701248.0, "12105": 950225216.0, "12110": 972205568.0, "12115": 968115136.0, "12120": 986359296.0, "12125": 980675264.0, "12130": 941589504.0, "12135": 955591040.0, "12140": 975756032.0, "12145": 979390528.0, "12150": 979435776.0, "12155": 961767936.0, "12160": 946323264.0, "12165": 968612864.0, "12170": 964129152.0, "12175": 967543936.0, "12180": 974942848.0, "12185": 952774592.0, "12190": 988261760.0, "12195": 970261312.0, "12200": 964981312.0, "12205": 968767232.0, "12210": 939144320.0, "12215": 996663488.0, "12220": 970037696.0, "12225": 979618880.0, "12230": 980198144.0, "12235": 950224576.0, "12240": 963441344.0, "12245": 965754240.0, "12250": 976793792.0, "12255": 968140288.0, "12260": 983346688.0, "12265": 931758592.0, "12270": 966457472.0, "12275": 979665408.0, "12280": 977629696.0, "12285": 970432320.0, "12290": 929095296.0, "12295": 976750144.0, "12300": 985683008.0, "12305": 969918016.0, "12310": 986105792.0, "12315": 936237952.0, "12320": 957828032.0, "12325": 966696000.0, "12330": 968270016.0, "12335": 963800896.0, "12340": 957433344.0, "12345": 944247872.0, "12350": 966701760.0, "12355": 975709440.0, "12360": 978832832.0, "12365": 964561280.0, "12370": 948990016.0, "12375": 963593536.0, "12380": 964789056.0, "12385": 973126912.0, "12390": 961501056.0, "12395": 961629824.0, "12400": 975307712.0, "12405": 976388032.0, "12410": 953154688.0, "12415": 962821184.0, "12420": 943854144.0, "12425": 948976640.0, "12430": 972157696.0, "12435": 969004352.0, "12440": 961893696.0, "12445": 951751744.0, "12450": 947021888.0, "12455": 981248448.0, "12460": 973989696.0, "12465": 954462784.0, "12470": 981144320.0, "12475": 958569728.0, "12480": 967084864.0, "12485": 978267776.0, "12490": 974168192.0, "12495": 969692160.0, "12500": 961573632.0, "12505": 943534528.0, "12510": 961013504.0, "12515": 969566080.0, "12520": 974194304.0, "12525": 972107840.0, "12530": 944608640.0, "12535": 976414272.0, "12540": 965917440.0, "12545": 972033856.0, "12550": 969671552.0, "12555": 941300736.0, "12560": 964469312.0, "12565": 947620608.0, "12570": 974443840.0, "12575": 963055232.0, "12580": 958104128.0, "12585": 964300352.0, "12590": 965924288.0, "12595": 978732672.0, "12600": 982093952.0, "12605": 949125824.0, "12610": 937745344.0, "12615": 962779264.0, "12620": 961201664.0, "12625": 966531136.0, "12630": 971046272.0, "12635": 962236416.0, "12640": 978541696.0, "12645": 969369920.0, "12650": 970161664.0, "12655": 964217216.0, "12660": 932390336.0, "12665": 956865664.0, "12670": 986180352.0, "12675": 965566464.0, "12680": 961126528.0, "12685": 951304256.0, "12690": 945491456.0, "12695": 978387648.0, "12700": 985277888.0, "12705": 958784640.0, "12710": 968294144.0, "12715": 956280512.0, "12720": "nan", "12725": "nan", "12730": "nan", "12735": "nan", "12740": "nan", "12745": "nan", "12750": "nan", "12755": "nan", "12760": "nan", "12765": "nan", "12770": "nan", "12775": "nan", "12780": "nan", "12785": "nan", "12790": "nan", "12795": "nan", "12800": "nan", "12805": "nan", "12810": "nan", "12815": "nan", "12820": "nan", "12825": "nan", "12830": "nan", "12835": "nan", "12840": "nan", "12845": "nan", "12850": "nan", "12855": "nan", "12860": "nan", "12865": "nan", "12870": "nan", "12875": "nan", "12880": "nan", "12885": "nan", "12890": "nan", "12895": "nan", "12900": "nan", "12905": "nan", "12910": "nan", "12915": "nan", "12920": "nan", "12925": "nan", "12930": "nan", "12935": "nan", "12940": "nan", "12945": "nan", "12950": "nan", "12955": "nan", "12960": "nan", "12965": "nan", "12970": "nan", "12975": "nan", "12980": "nan", "12985": "nan", "12990": "nan", "12995": "nan", "13000": "nan"}}, "mem-allocated-bytes": {"start_step": 1, "end_step": 13000, "step_interval": 5, "values": {"1": 12795811840.0, "5": 12795811840.0, "10": 12795811840.0, "15": 12795811840.0, "20": 12795811840.0, "25": 12795811840.0, "30": 12795811840.0, "35": 12795811840.0, "40": 12795811840.0, "45": 12795811840.0, "50": 12795811840.0, "55": 12795811840.0, "60": 12795811840.0, "65": 12795811840.0, "70": 12795811840.0, "75": 12795811840.0, "80": 12795811840.0, "85": 12795811840.0, "90": 12795811840.0, "95": 12795811840.0, "100": 12795811840.0, "105": 12795811840.0, "110": 12795811840.0, "115": 12795811840.0, "120": 12795811840.0, "125": 12795811840.0, "130": 12795811840.0, "135": 12795811840.0, "140": 12795811840.0, "145": 12795811840.0, "150": 12795811840.0, "155": 12795811840.0, "160": 12795811840.0, "165": 12795811840.0, "170": 12795811840.0, "175": 12795811840.0, "180": 12795811840.0, "185": 12795811840.0, "190": 12795811840.0, "195": 12795811840.0, "200": 12795811840.0, "205": 12795811840.0, "210": 12795811840.0, "215": 12795811840.0, "220": 12795811840.0, "225": 12795811840.0, "230": 12795811840.0, "235": 12795811840.0, "240": 12795811840.0, "245": 12795811840.0, "250": 12795811840.0, "255": 12795811840.0, "260": 12795811840.0, "265": 12795811840.0, "270": 12795811840.0, "275": 12795811840.0, "280": 12795811840.0, "285": 12795811840.0, "290": 12795811840.0, "295": 12795811840.0, "300": 12795811840.0, "305": 12795811840.0, "310": 12795811840.0, "315": 12795811840.0, "320": 12795811840.0, "325": 12795811840.0, "330": 12795811840.0, "335": 12795811840.0, "340": 12795811840.0, "345": 12795811840.0, "350": 12795811840.0, "355": 12795811840.0, "360": 12795811840.0, "365": 12795811840.0, "370": 12795811840.0, "375": 12795811840.0, "380": 12795811840.0, "385": 12795811840.0, "390": 12795811840.0, "395": 12795811840.0, "400": 12795811840.0, "405": 12795811840.0, "410": 12795811840.0, "415": 12795811840.0, "420": 12795811840.0, "425": 12795811840.0, "430": 12795811840.0, "435": 12795811840.0, "440": 12795811840.0, "445": 12795811840.0, "450": 12795811840.0, "455": 12795811840.0, "460": 12795811840.0, "465": 12795811840.0, "470": 12795811840.0, "475": 12795811840.0, "480": 12795811840.0, "485": 12795811840.0, "490": 12795811840.0, "495": 12795811840.0, "500": 12795811840.0, "505": 12795811840.0, "510": 12795811840.0, "515": 12795811840.0, "520": 12795811840.0, "525": 12795811840.0, "530": 12795811840.0, "535": 12795811840.0, "540": 12795811840.0, "545": 12795811840.0, "550": 12795811840.0, "555": 12795811840.0, "560": 12795811840.0, "565": 12795811840.0, "570": 12795811840.0, "575": 12795811840.0, "580": 12795811840.0, "585": 12795811840.0, "590": 12795811840.0, "595": 12795811840.0, "600": 12795811840.0, "605": 12795811840.0, "610": 12795811840.0, "615": 12795811840.0, "620": 12795811840.0, "625": 12795811840.0, "630": 12795811840.0, "635": 12795811840.0, "640": 12795811840.0, "645": 12795811840.0, "650": 12795811840.0, "655": 12795811840.0, "660": 12795811840.0, "665": 12795811840.0, "670": 12795811840.0, "675": 12795811840.0, "680": 12795811840.0, "685": 12795811840.0, "690": 12795811840.0, "695": 12795811840.0, "700": 12795811840.0, "705": 12795811840.0, "710": 12795811840.0, "715": 12795811840.0, "720": 12795811840.0, "725": 12795811840.0, "730": 12795811840.0, "735": 12795811840.0, "740": 12795811840.0, "745": 12795811840.0, "750": 12795811840.0, "755": 12795811840.0, "760": 12795811840.0, "765": 12795811840.0, "770": 12795811840.0, "775": 12795811840.0, "780": 12795811840.0, "785": 12795811840.0, "790": 12795811840.0, "795": 12795811840.0, "800": 12795811840.0, "805": 12795811840.0, "810": 12795811840.0, "815": 12795811840.0, "820": 12795811840.0, "825": 12795811840.0, "830": 12795811840.0, "835": 12795811840.0, "840": 12795811840.0, "845": 12795811840.0, "850": 12795811840.0, "855": 12795811840.0, "860": 12795811840.0, "865": 12795811840.0, "870": 12795811840.0, "875": 12795811840.0, "880": 12795811840.0, "885": 12795811840.0, "890": 12795811840.0, "895": 12795811840.0, "900": 12795811840.0, "905": 12795811840.0, "910": 12795811840.0, "915": 12795811840.0, "920": 12795811840.0, "925": 12795811840.0, "930": 12795811840.0, "935": 12795811840.0, "940": 12795811840.0, "945": 12795811840.0, "950": 12795811840.0, "955": 12795811840.0, "960": 12795811840.0, "965": 12795811840.0, "970": 12795811840.0, "975": 12795811840.0, "980": 12795811840.0, "985": 12795811840.0, "990": 12795811840.0, "995": 12795811840.0, "1000": 12795811840.0, "1005": 12795811840.0, "1010": 12795811840.0, "1015": 12795811840.0, "1020": 12795811840.0, "1025": 12795811840.0, "1030": 12795811840.0, "1035": 12795811840.0, "1040": 12795811840.0, "1045": 12795811840.0, "1050": 12795811840.0, "1055": 12795811840.0, "1060": 12795811840.0, "1065": 12795811840.0, "1070": 12795811840.0, "1075": 12795811840.0, "1080": 12795811840.0, "1085": 12795811840.0, "1090": 12795811840.0, "1095": 12795811840.0, "1100": 12795811840.0, "1105": 12795811840.0, "1110": 12795811840.0, "1115": 12795811840.0, "1120": 12795811840.0, "1125": 12795811840.0, "1130": 12795811840.0, "1135": 12795811840.0, "1140": 12795811840.0, "1145": 12795811840.0, "1150": 12795811840.0, "1155": 12795811840.0, "1160": 12795811840.0, "1165": 12795811840.0, "1170": 12795811840.0, "1175": 12795811840.0, "1180": 12795811840.0, "1185": 12795811840.0, "1190": 12795811840.0, "1195": 12795811840.0, "1200": 12795811840.0, "1205": 12795811840.0, "1210": 12795811840.0, "1215": 12795811840.0, "1220": 12795811840.0, "1225": 12795811840.0, "1230": 12795811840.0, "1235": 12795811840.0, "1240": 12795811840.0, "1245": 12795811840.0, "1250": 12795811840.0, "1255": 12795811840.0, "1260": 12795811840.0, "1265": 12795811840.0, "1270": 12795811840.0, "1275": 12795811840.0, "1280": 12795811840.0, "1285": 12795811840.0, "1290": 12795811840.0, "1295": 12795811840.0, "1300": 12795811840.0, "1305": 12795811840.0, "1310": 12795811840.0, "1315": 12795811840.0, "1320": 12795811840.0, "1325": 12795811840.0, "1330": 12795811840.0, "1335": 12795811840.0, "1340": 12795811840.0, "1345": 12795811840.0, "1350": 12795811840.0, "1355": 12795811840.0, "1360": 12795811840.0, "1365": 12795811840.0, "1370": 12795811840.0, "1375": 12795811840.0, "1380": 12795811840.0, "1385": 12795811840.0, "1390": 12795811840.0, "1395": 12795811840.0, "1400": 12795811840.0, "1405": 12795811840.0, "1410": 12795811840.0, "1415": 12795811840.0, "1420": 12795811840.0, "1425": 12795811840.0, "1430": 12795811840.0, "1435": 12795811840.0, "1440": 12795811840.0, "1445": 12795811840.0, "1450": 12795811840.0, "1455": 12795811840.0, "1460": 12795811840.0, "1465": 12795811840.0, "1470": 12795811840.0, "1475": 12795811840.0, "1480": 12795811840.0, "1485": 12795811840.0, "1490": 12795811840.0, "1495": 12795811840.0, "1500": 12795811840.0, "1505": 12795811840.0, "1510": 12795811840.0, "1515": 12795811840.0, "1520": 12795811840.0, "1525": 12795811840.0, "1530": 12795811840.0, "1535": 12795811840.0, "1540": 12795811840.0, "1545": 12795811840.0, "1550": 12795811840.0, "1555": 12795811840.0, "1560": 12795811840.0, "1565": 12795811840.0, "1570": 12795811840.0, "1575": 12795811840.0, "1580": 12795811840.0, "1585": 12795811840.0, "1590": 12795811840.0, "1595": 12795811840.0, "1600": 12795811840.0, "1605": 12795811840.0, "1610": 12795811840.0, "1615": 12795811840.0, "1620": 12795811840.0, "1625": 12795811840.0, "1630": 12795811840.0, "1635": 12795811840.0, "1640": 12795811840.0, "1645": 12795811840.0, "1650": 12795811840.0, "1655": 12795811840.0, "1660": 12795811840.0, "1665": 12795811840.0, "1670": 12795811840.0, "1675": 12795811840.0, "1680": 12795811840.0, "1685": 12795811840.0, "1690": 12795811840.0, "1695": 12795811840.0, "1700": 12795811840.0, "1705": 12795811840.0, "1710": 12795811840.0, "1715": 12795811840.0, "1720": 12795811840.0, "1725": 12795811840.0, "1730": 12795811840.0, "1735": 12795811840.0, "1740": 12795811840.0, "1745": 12795811840.0, "1750": 12795811840.0, "1755": 12795811840.0, "1760": 12795811840.0, "1765": 12795811840.0, "1770": 12795811840.0, "1775": 12795811840.0, "1780": 12795811840.0, "1785": 12795811840.0, "1790": 12795811840.0, "1795": 12795811840.0, "1800": 12795811840.0, "1805": 12795811840.0, "1810": 12795811840.0, "1815": 12795811840.0, "1820": 12795811840.0, "1825": 12795811840.0, "1830": 12795811840.0, "1835": 12795811840.0, "1840": 12795811840.0, "1845": 12795811840.0, "1850": 12795811840.0, "1855": 12795811840.0, "1860": 12795811840.0, "1865": 12795811840.0, "1870": 12795811840.0, "1875": 12795811840.0, "1880": 12795811840.0, "1885": 12795811840.0, "1890": 12795811840.0, "1895": 12795811840.0, "1900": 12795811840.0, "1905": 12795811840.0, "1910": 12795811840.0, "1915": 12795811840.0, "1920": 12795811840.0, "1925": 12795811840.0, "1930": 12795811840.0, "1935": 12795811840.0, "1940": 12795811840.0, "1945": 12795811840.0, "1950": 12795811840.0, "1955": 12795811840.0, "1960": 12795811840.0, "1965": 12795811840.0, "1970": 12795811840.0, "1975": 12795811840.0, "1980": 12795811840.0, "1985": 12795811840.0, "1990": 12795811840.0, "1995": 12795811840.0, "2000": 12795811840.0, "2005": 12795811840.0, "2010": 12795811840.0, "2015": 12795811840.0, "2020": 12795811840.0, "2025": 12795811840.0, "2030": 12795811840.0, "2035": 12795811840.0, "2040": 12795811840.0, "2045": 12795811840.0, "2050": 12795811840.0, "2055": 12795811840.0, "2060": 12795811840.0, "2065": 12795811840.0, "2070": 12795811840.0, "2075": 12795811840.0, "2080": 12795811840.0, "2085": 12795811840.0, "2090": 12795811840.0, "2095": 12795811840.0, "2100": 12795811840.0, "2105": 12795811840.0, "2110": 12795811840.0, "2115": 12795811840.0, "2120": 12795811840.0, "2125": 12795811840.0, "2130": 12795811840.0, "2135": 12795811840.0, "2140": 12795811840.0, "2145": 12795811840.0, "2150": 12795811840.0, "2155": 12795811840.0, "2160": 12795811840.0, "2165": 12795811840.0, "2170": 12795811840.0, "2175": 12795811840.0, "2180": 12795811840.0, "2185": 12795811840.0, "2190": 12795811840.0, "2195": 12795811840.0, "2200": 12795811840.0, "2205": 12795811840.0, "2210": 12795811840.0, "2215": 12795811840.0, "2220": 12795811840.0, "2225": 12795811840.0, "2230": 12795811840.0, "2235": 12795811840.0, "2240": 12795811840.0, "2245": 12795811840.0, "2250": 12795811840.0, "2255": 12795811840.0, "2260": 12795811840.0, "2265": 12795811840.0, "2270": 12795811840.0, "2275": 12795811840.0, "2280": 12795811840.0, "2285": 12795811840.0, "2290": 12795811840.0, "2295": 12795811840.0, "2300": 12795811840.0, "2305": 12795811840.0, "2310": 12795811840.0, "2315": 12795811840.0, "2320": 12795811840.0, "2325": 12795811840.0, "2330": 12795811840.0, "2335": 12795811840.0, "2340": 12795811840.0, "2345": 12795811840.0, "2350": 12795811840.0, "2355": 12795811840.0, "2360": 12795811840.0, "2365": 12795811840.0, "2370": 12795811840.0, "2375": 12795811840.0, "2380": 12795811840.0, "2385": 12795811840.0, "2390": 12795811840.0, "2395": 12795811840.0, "2400": 12795811840.0, "2405": 12795811840.0, "2410": 12795811840.0, "2415": 12795811840.0, "2420": 12795811840.0, "2425": 12795811840.0, "2430": 12795811840.0, "2435": 12795811840.0, "2440": 12795811840.0, "2445": 12795811840.0, "2450": 12795811840.0, "2455": 12795811840.0, "2460": 12795811840.0, "2465": 12795811840.0, "2470": 12795811840.0, "2475": 12795811840.0, "2480": 12795811840.0, "2485": 12795811840.0, "2490": 12795811840.0, "2495": 12795811840.0, "2500": 12795811840.0, "2505": 12795811840.0, "2510": 12795811840.0, "2515": 12795811840.0, "2520": 12795811840.0, "2525": 12795811840.0, "2530": 12795811840.0, "2535": 12795811840.0, "2540": 12795811840.0, "2545": 12795811840.0, "2550": 12795811840.0, "2555": 12795811840.0, "2560": 12795811840.0, "2565": 12795811840.0, "2570": 12795811840.0, "2575": 12795811840.0, "2580": 12795811840.0, "2585": 12795811840.0, "2590": 12795811840.0, "2595": 12795811840.0, "2600": 12795811840.0, "2605": 12795811840.0, "2610": 12795811840.0, "2615": 12795811840.0, "2620": 12795811840.0, "2625": 12795811840.0, "2630": 12795811840.0, "2635": 12795811840.0, "2640": 12795811840.0, "2645": 12795811840.0, "2650": 12795811840.0, "2655": 12795811840.0, "2660": 12795811840.0, "2665": 12795811840.0, "2670": 12795811840.0, "2675": 12795811840.0, "2680": 12795811840.0, "2685": 12795811840.0, "2690": 12795811840.0, "2695": 12795811840.0, "2700": 12795811840.0, "2705": 12795811840.0, "2710": 12795811840.0, "2715": 12795811840.0, "2720": 12795811840.0, "2725": 12795811840.0, "2730": 12795811840.0, "2735": 12795811840.0, "2740": 12795811840.0, "2745": 12795811840.0, "2750": 12795811840.0, "2755": 12795811840.0, "2760": 12795811840.0, "2765": 12795811840.0, "2770": 12795811840.0, "2775": 12795811840.0, "2780": 12795811840.0, "2785": 12795811840.0, "2790": 12795811840.0, "2795": 12795811840.0, "2800": 12795811840.0, "2805": 12795811840.0, "2810": 12795811840.0, "2815": 12795811840.0, "2820": 12795811840.0, "2825": 12795811840.0, "2830": 12795811840.0, "2835": 12795811840.0, "2840": 12795811840.0, "2845": 12795811840.0, "2850": 12795811840.0, "2855": 12795811840.0, "2860": 12795811840.0, "2865": 12795811840.0, "2870": 12795811840.0, "2875": 12795811840.0, "2880": 12795811840.0, "2885": 12795811840.0, "2890": 12795811840.0, "2895": 12795811840.0, "2900": 12795811840.0, "2905": 12795811840.0, "2910": 12795811840.0, "2915": 12795811840.0, "2920": 12795811840.0, "2925": 12795811840.0, "2930": 12795811840.0, "2935": 12795811840.0, "2940": 12795811840.0, "2945": 12795811840.0, "2950": 12795811840.0, "2955": 12795811840.0, "2960": 12795811840.0, "2965": 12795811840.0, "2970": 12795811840.0, "2975": 12795811840.0, "2980": 12795811840.0, "2985": 12795811840.0, "2990": 12795811840.0, "2995": 12795811840.0, "3000": 12795811840.0, "3005": 12795811840.0, "3010": 12795811840.0, "3015": 12795811840.0, "3020": 12795811840.0, "3025": 12795811840.0, "3030": 12795811840.0, "3035": 12795811840.0, "3040": 12795811840.0, "3045": 12795811840.0, "3050": 12795811840.0, "3055": 12795811840.0, "3060": 12795811840.0, "3065": 12795811840.0, "3070": 12795811840.0, "3075": 12795811840.0, "3080": 12795811840.0, "3085": 12795811840.0, "3090": 12795811840.0, "3095": 12795811840.0, "3100": 12795811840.0, "3105": 12795811840.0, "3110": 12795811840.0, "3115": 12795811840.0, "3120": 12795811840.0, "3125": 12795811840.0, "3130": 12795811840.0, "3135": 12795811840.0, "3140": 12795811840.0, "3145": 12795811840.0, "3150": 12795811840.0, "3155": 12795811840.0, "3160": 12795811840.0, "3165": 12795811840.0, "3170": 12795811840.0, "3175": 12795811840.0, "3180": 12795811840.0, "3185": 12795811840.0, "3190": 12795811840.0, "3195": 12795811840.0, "3200": 12795811840.0, "3205": 12795811840.0, "3210": 12795811840.0, "3215": 12795811840.0, "3220": 12795811840.0, "3225": 12795811840.0, "3230": 12795811840.0, "3235": 12795811840.0, "3240": 12795811840.0, "3245": 12795811840.0, "3250": 12795811840.0, "3255": 12795811840.0, "3260": 12795811840.0, "3265": 12795811840.0, "3270": 12795811840.0, "3275": 12795811840.0, "3280": 12795811840.0, "3285": 12795811840.0, "3290": 12795811840.0, "3295": 12795811840.0, "3300": 12795811840.0, "3305": 12795811840.0, "3310": 12795811840.0, "3315": 12795811840.0, "3320": 12795811840.0, "3325": 12795811840.0, "3330": 12795811840.0, "3335": 12795811840.0, "3340": 12795811840.0, "3345": 12795811840.0, "3350": 12795811840.0, "3355": 12795811840.0, "3360": 12795811840.0, "3365": 12795811840.0, "3370": 12795811840.0, "3375": 12795811840.0, "3380": 12795811840.0, "3385": 12795811840.0, "3390": 12795811840.0, "3395": 12795811840.0, "3400": 12795811840.0, "3405": 12795811840.0, "3410": 12795811840.0, "3415": 12795811840.0, "3420": 12795811840.0, "3425": 12795811840.0, "3430": 12795811840.0, "3435": 12795811840.0, "3440": 12795811840.0, "3445": 12795811840.0, "3450": 12795811840.0, "3455": 12795811840.0, "3460": 12795811840.0, "3465": 12795811840.0, "3470": 12795811840.0, "3475": 12795811840.0, "3480": 12795811840.0, "3485": 12795811840.0, "3490": 12795811840.0, "3495": 12795811840.0, "3500": 12795811840.0, "3505": 12795811840.0, "3510": 12795811840.0, "3515": 12795811840.0, "3520": 12795811840.0, "3525": 12795811840.0, "3530": 12795811840.0, "3535": 12795811840.0, "3540": 12795811840.0, "3545": 12795811840.0, "3550": 12795811840.0, "3555": 12795811840.0, "3560": 12795811840.0, "3565": 12795811840.0, "3570": 12795789312.0, "3575": 12795789312.0, "3580": 12795789312.0, "3585": 12795789312.0, "3590": 12795789312.0, "3595": 12795789312.0, "3600": 12795789312.0, "3605": 12795789312.0, "3610": 12795789312.0, "3615": 12795789312.0, "3620": 12795789312.0, "3625": 12795789312.0, "3630": 12795789312.0, "3635": 12795789312.0, "3640": 12795789312.0, "3645": 12795789312.0, "3650": 12795789312.0, "3655": 12795789312.0, "3660": 12795789312.0, "3665": 12795789312.0, "3670": 12795789312.0, "3675": 12795789312.0, "3680": 12795789312.0, "3685": 12795789312.0, "3690": 12795789312.0, "3695": 12795789312.0, "3700": 12795789312.0, "3705": 12795789312.0, "3710": 12795789312.0, "3715": 12795789312.0, "3720": 12795789312.0, "3725": 12795789312.0, "3730": 12795789312.0, "3735": 12795789312.0, "3740": 12795789312.0, "3745": 12795789312.0, "3750": 12795789312.0, "3755": 12795789312.0, "3760": 12795789312.0, "3765": 12795789312.0, "3770": 12795789312.0, "3775": 12795789312.0, "3780": 12795789312.0, "3785": 12795789312.0, "3790": 12795789312.0, "3795": 12795789312.0, "3800": 12795789312.0, "3805": 12795789312.0, "3810": 12795789312.0, "3815": 12795789312.0, "3820": 12795789312.0, "3825": 12795789312.0, "3830": 12795789312.0, "3835": 12795789312.0, "3840": 12795789312.0, "3845": 12795789312.0, "3850": 12795789312.0, "3855": 12795789312.0, "3860": 12795789312.0, "3865": 12795789312.0, "3870": 12795789312.0, "3875": 12795789312.0, "3880": 12795789312.0, "3885": 12795789312.0, "3890": 12795789312.0, "3895": 12795789312.0, "3900": 12795789312.0, "3905": 12795789312.0, "3910": 12795789312.0, "3915": 12795789312.0, "3920": 12795789312.0, "3925": 12795789312.0, "3930": 12795789312.0, "3935": 12795789312.0, "3940": 12795789312.0, "3945": 12795789312.0, "3950": 12795789312.0, "3955": 12795789312.0, "3960": 12795789312.0, "3965": 12795789312.0, "3970": 12795789312.0, "3975": 12795789312.0, "3980": 12795789312.0, "3985": 12795789312.0, "3990": 12795789312.0, "3995": 12795789312.0, "4000": 12795789312.0, "4005": 12795789312.0, "4010": 12795789312.0, "4015": 12795789312.0, "4020": 12795789312.0, "4025": 12795789312.0, "4030": 12795789312.0, "4035": 12795789312.0, "4040": 12795789312.0, "4045": 12795789312.0, "4050": 12795789312.0, "4055": 12795789312.0, "4060": 12795789312.0, "4065": 12795789312.0, "4070": 12795789312.0, "4075": 12795789312.0, "4080": 12795789312.0, "4085": 12795789312.0, "4090": 12795789312.0, "4095": 12795789312.0, "4100": 12795789312.0, "4105": 12795789312.0, "4110": 12795789312.0, "4115": 12795789312.0, "4120": 12795789312.0, "4125": 12795789312.0, "4130": 12795789312.0, "4135": 12795789312.0, "4140": 12795789312.0, "4145": 12795789312.0, "4150": 12795789312.0, "4155": 12795789312.0, "4160": 12795789312.0, "4165": 12795789312.0, "4170": 12795789312.0, "4175": 12795789312.0, "4180": 12795789312.0, "4185": 12795789312.0, "4190": 12795789312.0, "4195": 12795789312.0, "4200": 12795789312.0, "4205": 12795789312.0, "4210": 12795789312.0, "4215": 12795789312.0, "4220": 12795789312.0, "4225": 12795789312.0, "4230": 12795789312.0, "4235": 12795789312.0, "4240": 12795789312.0, "4245": 12795789312.0, "4250": 12795789312.0, "4255": 12795789312.0, "4260": 12795789312.0, "4265": 12795789312.0, "4270": 12795789312.0, "4275": 12795789312.0, "4280": 12795789312.0, "4285": 12795789312.0, "4290": 12795789312.0, "4295": 12795789312.0, "4300": 12795789312.0, "4305": 12795789312.0, "4310": 12795789312.0, "4315": 12795789312.0, "4320": 12795789312.0, "4325": 12795789312.0, "4330": 12795789312.0, "4335": 12795789312.0, "4340": 12795789312.0, "4345": 12795789312.0, "4350": 12795789312.0, "4355": 12795789312.0, "4360": 12795789312.0, "4365": 12795789312.0, "4370": 12795789312.0, "4375": 12795789312.0, "4380": 12795789312.0, "4385": 12795789312.0, "4390": 12795789312.0, "4395": 12795789312.0, "4400": 12795789312.0, "4405": 12795789312.0, "4410": 12795789312.0, "4415": 12795789312.0, "4420": 12795789312.0, "4425": 12795789312.0, "4430": 12795789312.0, "4435": 12795789312.0, "4440": 12795789312.0, "4445": 12795789312.0, "4450": 12795789312.0, "4455": 12795789312.0, "4460": 12795789312.0, "4465": 12795789312.0, "4470": 12795789312.0, "4475": 12795789312.0, "4480": 12795789312.0, "4485": 12795789312.0, "4490": 12795789312.0, "4495": 12795789312.0, "4500": 12795789312.0, "4505": 12795789312.0, "4510": 12795789312.0, "4515": 12795789312.0, "4520": 12795789312.0, "4525": 12795789312.0, "4530": 12795789312.0, "4535": 12795789312.0, "4540": 12795789312.0, "4545": 12795789312.0, "4550": 12795789312.0, "4555": 12795789312.0, "4560": 12795789312.0, "4565": 12795789312.0, "4570": 12795789312.0, "4575": 12795789312.0, "4580": 12795789312.0, "4585": 12795789312.0, "4590": 12795789312.0, "4595": 12795789312.0, "4600": 12795789312.0, "4605": 12795789312.0, "4610": 12795789312.0, "4615": 12795789312.0, "4620": 12795789312.0, "4625": 12795789312.0, "4630": 12795789312.0, "4635": 12795789312.0, "4640": 12795789312.0, "4645": 12795789312.0, "4650": 12795789312.0, "4655": 12795789312.0, "4660": 12795789312.0, "4665": 12795789312.0, "4670": 12795789312.0, "4675": 12795789312.0, "4680": 12795789312.0, "4685": 12795789312.0, "4690": 12795789312.0, "4695": 12795789312.0, "4700": 12795789312.0, "4705": 12795789312.0, "4710": 12795789312.0, "4715": 12795789312.0, "4720": 12795789312.0, "4725": 12795789312.0, "4730": 12795789312.0, "4735": 12795789312.0, "4740": 12795789312.0, "4745": 12795789312.0, "4750": 12795789312.0, "4755": 12795789312.0, "4760": 12795789312.0, "4765": 12795789312.0, "4770": 12795789312.0, "4775": 12795789312.0, "4780": 12795789312.0, "4785": 12795789312.0, "4790": 12795789312.0, "4795": 12795789312.0, "4800": 12795789312.0, "4805": 12795789312.0, "4810": 12795789312.0, "4815": 12795789312.0, "4820": 12795789312.0, "4825": 12795789312.0, "4830": 12795789312.0, "4835": 12795789312.0, "4840": 12795789312.0, "4845": 12795789312.0, "4850": 12795789312.0, "4855": 12795789312.0, "4860": 12795789312.0, "4865": 12795789312.0, "4870": 12795789312.0, "4875": 12795789312.0, "4880": 12795789312.0, "4885": 12795789312.0, "4890": 12795789312.0, "4895": 12795789312.0, "4900": 12795789312.0, "4905": 12795789312.0, "4910": 12795789312.0, "4915": 12795789312.0, "4920": 12795789312.0, "4925": 12795789312.0, "4930": 12795789312.0, "4935": 12795789312.0, "4940": 12795789312.0, "4945": 12795789312.0, "4950": 12795789312.0, "4955": 12795789312.0, "4960": 12795789312.0, "4965": 12795789312.0, "4970": 12795789312.0, "4975": 12795789312.0, "4980": 12795789312.0, "4985": 12795789312.0, "4990": 12795789312.0, "4995": 12795789312.0, "5000": 12795789312.0, "5005": 12795789312.0, "5010": 12795789312.0, "5015": 12795789312.0, "5020": 12795789312.0, "5025": 12795789312.0, "5030": 12795789312.0, "5035": 12795789312.0, "5040": 12795789312.0, "5045": 12795789312.0, "5050": 12795789312.0, "5055": 12795789312.0, "5060": 12795789312.0, "5065": 12795789312.0, "5070": 12795789312.0, "5075": 12795789312.0, "5080": 12795789312.0, "5085": 12795789312.0, "5090": 12795789312.0, "5095": 12795789312.0, "5100": 12795789312.0, "5105": 12795789312.0, "5110": 12795789312.0, "5115": 12795789312.0, "5120": 12795789312.0, "5125": 12795789312.0, "5130": 12795789312.0, "5135": 12795789312.0, "5140": 12795789312.0, "5145": 12795789312.0, "5150": 12795789312.0, "5155": 12795789312.0, "5160": 12795789312.0, "5165": 12795789312.0, "5170": 12795789312.0, "5175": 12795789312.0, "5180": 12795789312.0, "5185": 12795789312.0, "5190": 12795789312.0, "5195": 12795789312.0, "5200": 12795789312.0, "5205": 12795789312.0, "5210": 12795789312.0, "5215": 12795789312.0, "5220": 12795789312.0, "5225": 12795789312.0, "5230": 12795789312.0, "5235": 12795789312.0, "5240": 12795789312.0, "5245": 12795789312.0, "5250": 12795789312.0, "5255": 12795789312.0, "5260": 12795789312.0, "5265": 12795789312.0, "5270": 12795789312.0, "5275": 12795789312.0, "5280": 12795789312.0, "5285": 12795789312.0, "5290": 12795789312.0, "5295": 12795789312.0, "5300": 12795789312.0, "5305": 12795789312.0, "5310": 12795789312.0, "5315": 12795789312.0, "5320": 12795789312.0, "5325": 12795789312.0, "5330": 12795789312.0, "5335": 12795789312.0, "5340": 12795789312.0, "5345": 12795789312.0, "5350": 12795789312.0, "5355": 12795789312.0, "5360": 12795789312.0, "5365": 12795789312.0, "5370": 12795789312.0, "5375": 12795789312.0, "5380": 12795789312.0, "5385": 12795789312.0, "5390": 12795789312.0, "5395": 12795789312.0, "5400": 12795789312.0, "5405": 12795789312.0, "5410": 12795789312.0, "5415": 12795789312.0, "5420": 12795789312.0, "5425": 12795789312.0, "5430": 12795789312.0, "5435": 12795789312.0, "5440": 12795789312.0, "5445": 12795789312.0, "5450": 12795789312.0, "5455": 12795789312.0, "5460": 12795789312.0, "5465": 12795789312.0, "5470": 12795789312.0, "5475": 12795789312.0, "5480": 12795789312.0, "5485": 12795789312.0, "5490": 12795789312.0, "5495": 12795789312.0, "5500": 12795789312.0, "5505": 12795789312.0, "5510": 12795789312.0, "5515": 12795789312.0, "5520": 12795789312.0, "5525": 12795789312.0, "5530": 12795789312.0, "5535": 12795789312.0, "5540": 12795789312.0, "5545": 12795789312.0, "5550": 12795789312.0, "5555": 12795789312.0, "5560": 12795789312.0, "5565": 12795789312.0, "5570": 12795789312.0, "5575": 12795789312.0, "5580": 12795789312.0, "5585": 12795789312.0, "5590": 12795789312.0, "5595": 12795789312.0, "5600": 12795789312.0, "5605": 12795789312.0, "5610": 12795789312.0, "5615": 12795789312.0, "5620": 12795789312.0, "5625": 12795789312.0, "5630": 12795789312.0, "5635": 12795789312.0, "5640": 12795789312.0, "5645": 12795789312.0, "5650": 12795789312.0, "5655": 12795789312.0, "5660": 12795789312.0, "5665": 12795789312.0, "5670": 12795789312.0, "5675": 12795789312.0, "5680": 12795789312.0, "5685": 12795789312.0, "5690": 12795789312.0, "5695": 12795789312.0, "5700": 12795789312.0, "5705": 12795789312.0, "5710": 12795789312.0, "5715": 12795789312.0, "5720": 12795789312.0, "5725": 12795789312.0, "5730": 12795789312.0, "5735": 12795789312.0, "5740": 12795789312.0, "5745": 12795789312.0, "5750": 12795789312.0, "5755": 12795789312.0, "5760": 12795789312.0, "5765": 12795789312.0, "5770": 12795789312.0, "5775": 12795789312.0, "5780": 12795789312.0, "5785": 12795789312.0, "5790": 12795789312.0, "5795": 12795789312.0, "5800": 12795789312.0, "5805": 12795789312.0, "5810": 12795789312.0, "5815": 12795789312.0, "5820": 12795789312.0, "5825": 12795789312.0, "5830": 12795789312.0, "5835": 12795789312.0, "5840": 12795789312.0, "5845": 12795789312.0, "5850": 12795789312.0, "5855": 12795789312.0, "5860": 12795789312.0, "5865": 12795789312.0, "5870": 12795789312.0, "5875": 12795789312.0, "5880": 12795789312.0, "5885": 12795789312.0, "5890": 12795789312.0, "5895": 12795789312.0, "5900": 12795789312.0, "5905": 12795789312.0, "5910": 12795789312.0, "5915": 12795789312.0, "5920": 12795789312.0, "5925": 12795789312.0, "5930": 12795789312.0, "5935": 12795789312.0, "5940": 12795789312.0, "5945": 12795789312.0, "5950": 12795789312.0, "5955": 12795789312.0, "5960": 12795789312.0, "5965": 12795789312.0, "5970": 12795789312.0, "5975": 12795789312.0, "5980": 12795789312.0, "5985": 12795789312.0, "5990": 12795789312.0, "5995": 12795789312.0, "6000": 12795789312.0, "6005": 12795789312.0, "6010": 12795789312.0, "6015": 12795789312.0, "6020": 12795789312.0, "6025": 12795789312.0, "6030": 12795789312.0, "6035": 12795789312.0, "6040": 12795789312.0, "6045": 12795789312.0, "6050": 12795789312.0, "6055": 12795789312.0, "6060": 12795789312.0, "6065": 12795789312.0, "6070": 12795789312.0, "6075": 12795789312.0, "6080": 12795789312.0, "6085": 12795789312.0, "6090": 12795789312.0, "6095": 12795789312.0, "6100": 12795789312.0, "6105": 12795789312.0, "6110": 12795789312.0, "6115": 12795789312.0, "6120": 12795789312.0, "6125": 12795789312.0, "6130": 12795789312.0, "6135": 12795789312.0, "6140": 12795789312.0, "6145": 12795789312.0, "6150": 12795789312.0, "6155": 12795789312.0, "6160": 12795789312.0, "6165": 12795789312.0, "6170": 12795789312.0, "6175": 12795789312.0, "6180": 12795789312.0, "6185": 12795789312.0, "6190": 12795789312.0, "6195": 12795789312.0, "6200": 12795789312.0, "6205": 12795789312.0, "6210": 12795789312.0, "6215": 12795789312.0, "6220": 12795789312.0, "6225": 12795789312.0, "6230": 12795789312.0, "6235": 12795789312.0, "6240": 12795789312.0, "6245": 12795789312.0, "6250": 12795789312.0, "6255": 12795789312.0, "6260": 12795789312.0, "6265": 12795789312.0, "6270": 12795789312.0, "6275": 12795789312.0, "6280": 12795789312.0, "6285": 12795789312.0, "6290": 12795789312.0, "6295": 12795789312.0, "6300": 12795789312.0, "6305": 12795789312.0, "6310": 12795789312.0, "6315": 12795789312.0, "6320": 12795789312.0, "6325": 12795789312.0, "6330": 12795789312.0, "6335": 12795789312.0, "6340": 12795789312.0, "6345": 12795789312.0, "6350": 12795789312.0, "6355": 12795789312.0, "6360": 12795789312.0, "6365": 12795789312.0, "6370": 12795789312.0, "6375": 12795789312.0, "6380": 12795789312.0, "6385": 12795789312.0, "6390": 12795789312.0, "6395": 12795789312.0, "6400": 12795789312.0, "6405": 12795789312.0, "6410": 12795789312.0, "6415": 12795789312.0, "6420": 12795789312.0, "6425": 12795789312.0, "6430": 12795789312.0, "6435": 12795789312.0, "6440": 12795789312.0, "6445": 12795789312.0, "6450": 12795789312.0, "6455": 12795789312.0, "6460": 12795789312.0, "6465": 12795789312.0, "6470": 12795789312.0, "6475": 12795789312.0, "6480": 12795789312.0, "6485": 12795789312.0, "6490": 12795789312.0, "6495": 12795789312.0, "6500": 12795789312.0, "6505": 12795789312.0, "6510": 12795789312.0, "6515": 12795789312.0, "6520": 12795789312.0, "6525": 12795789312.0, "6530": 12795789312.0, "6535": 12795789312.0, "6540": 12795789312.0, "6545": 12795789312.0, "6550": 12795789312.0, "6555": 12795789312.0, "6560": 12795789312.0, "6565": 12795789312.0, "6570": 12795789312.0, "6575": 12795789312.0, "6580": 12795789312.0, "6585": 12795789312.0, "6590": 12795789312.0, "6595": 12795789312.0, "6600": 12795789312.0, "6605": 12795789312.0, "6610": 12795789312.0, "6615": 12795789312.0, "6620": 12795789312.0, "6625": 12795789312.0, "6630": 12795789312.0, "6635": 12795789312.0, "6640": 12795789312.0, "6645": 12795789312.0, "6650": 12795789312.0, "6655": 12795789312.0, "6660": 12795789312.0, "6665": 12795789312.0, "6670": 12795789312.0, "6675": 12795789312.0, "6680": 12795789312.0, "6685": 12795789312.0, "6690": 12795789312.0, "6695": 12795789312.0, "6700": 12795789312.0, "6705": 12795789312.0, "6710": 12795789312.0, "6715": 12795789312.0, "6720": 12795789312.0, "6725": 12795789312.0, "6730": 12795789312.0, "6735": 12795789312.0, "6740": 12795789312.0, "6745": 12795789312.0, "6750": 12795789312.0, "6755": 12795789312.0, "6760": 12795789312.0, "6765": 12795789312.0, "6770": 12795789312.0, "6775": 12795789312.0, "6780": 12795789312.0, "6785": 12795789312.0, "6790": 12795789312.0, "6795": 12795789312.0, "6800": 12795789312.0, "6805": 12795789312.0, "6810": 12795789312.0, "6815": 12795789312.0, "6820": 12795789312.0, "6825": 12795789312.0, "6830": 12795789312.0, "6835": 12795789312.0, "6840": 12795789312.0, "6845": 12795789312.0, "6850": 12795789312.0, "6855": 12795789312.0, "6860": 12795789312.0, "6865": 12795789312.0, "6870": 12795789312.0, "6875": 12795789312.0, "6880": 12795789312.0, "6885": 12795789312.0, "6890": 12795789312.0, "6895": 12795789312.0, "6900": 12795789312.0, "6905": 12795789312.0, "6910": 12795789312.0, "6915": 12795789312.0, "6920": 12795789312.0, "6925": 12795789312.0, "6930": 12795789312.0, "6935": 12795789312.0, "6940": 12795789312.0, "6945": 12795789312.0, "6950": 12795789312.0, "6955": 12795789312.0, "6960": 12795789312.0, "6965": 12795789312.0, "6970": 12795789312.0, "6975": 12795789312.0, "6980": 12795789312.0, "6985": 12795789312.0, "6990": 12795789312.0, "6995": 12795789312.0, "7000": 12795789312.0, "7005": 12795789312.0, "7010": 12795789312.0, "7015": 12795789312.0, "7020": 12795789312.0, "7025": 12795789312.0, "7030": 12795789312.0, "7035": 12795789312.0, "7040": 12795789312.0, "7045": 12795789312.0, "7050": 12795789312.0, "7055": 12795789312.0, "7060": 12795789312.0, "7065": 12795789312.0, "7070": 12795789312.0, "7075": 12795789312.0, "7080": 12795789312.0, "7085": 12795789312.0, "7090": 12795789312.0, "7095": 12795789312.0, "7100": 12795789312.0, "7105": 12795789312.0, "7110": 12795789312.0, "7115": 12795789312.0, "7120": 12795789312.0, "7125": 12795789312.0, "7130": 12795789312.0, "7135": 12795789312.0, "7140": 12795789312.0, "7145": 12795789312.0, "7150": 12795789312.0, "7155": 12795789312.0, "7160": 12795789312.0, "7165": 12795789312.0, "7170": 12795789312.0, "7175": 12795789312.0, "7180": 12795789312.0, "7185": 12795789312.0, "7190": 12795789312.0, "7195": 12795789312.0, "7200": 12795789312.0, "7205": 12795789312.0, "7210": 12795789312.0, "7215": 12795789312.0, "7220": 12795789312.0, "7225": 12795789312.0, "7230": 12795789312.0, "7235": 12795789312.0, "7240": 12795789312.0, "7245": 12795789312.0, "7250": 12795789312.0, "7255": 12795789312.0, "7260": 12795789312.0, "7265": 12795789312.0, "7270": 12795789312.0, "7275": 12795789312.0, "7280": 12795789312.0, "7285": 12795789312.0, "7290": 12795789312.0, "7295": 12795789312.0, "7300": 12795789312.0, "7305": 12795789312.0, "7310": 12795789312.0, "7315": 12795789312.0, "7320": 12795789312.0, "7325": 12795789312.0, "7330": 12795789312.0, "7335": 12795789312.0, "7340": 12795789312.0, "7345": 12795789312.0, "7350": 12795789312.0, "7355": 12795789312.0, "7360": 12795789312.0, "7365": 12795789312.0, "7370": 12795789312.0, "7375": 12795789312.0, "7380": 12795789312.0, "7385": 12795789312.0, "7390": 12795789312.0, "7395": 12795789312.0, "7400": 12795789312.0, "7405": 12795789312.0, "7410": 12795789312.0, "7415": 12795789312.0, "7420": 12795789312.0, "7425": 12795789312.0, "7430": 12795789312.0, "7435": 12795789312.0, "7440": 12795789312.0, "7445": 12795789312.0, "7450": 12795789312.0, "7455": 12795789312.0, "7460": 12795789312.0, "7465": 12795789312.0, "7470": 12795789312.0, "7475": 12795789312.0, "7480": 12795789312.0, "7485": 12795789312.0, "7490": 12795789312.0, "7495": 12795789312.0, "7500": 12795789312.0, "7505": 12795789312.0, "7510": 12795789312.0, "7515": 12795789312.0, "7520": 12795789312.0, "7525": 12795789312.0, "7530": 12795789312.0, "7535": 12795789312.0, "7540": 12795789312.0, "7545": 12795789312.0, "7550": 12795789312.0, "7555": 12795789312.0, "7560": 12795789312.0, "7565": 12795789312.0, "7570": 12795789312.0, "7575": 12795789312.0, "7580": 12795789312.0, "7585": 12795789312.0, "7590": 12795789312.0, "7595": 12795789312.0, "7600": 12795789312.0, "7605": 12795789312.0, "7610": 12795789312.0, "7615": 12795789312.0, "7620": 12795789312.0, "7625": 12795789312.0, "7630": 12795789312.0, "7635": 12795789312.0, "7640": 12795789312.0, "7645": 12795789312.0, "7650": 12795789312.0, "7655": 12795789312.0, "7660": 12795789312.0, "7665": 12795789312.0, "7670": 12795789312.0, "7675": 12795789312.0, "7680": 12795789312.0, "7685": 12795789312.0, "7690": 12795789312.0, "7695": 12795789312.0, "7700": 12795789312.0, "7705": 12795789312.0, "7710": 12795789312.0, "7715": 12795789312.0, "7720": 12795789312.0, "7725": 12795789312.0, "7730": 12795789312.0, "7735": 12795789312.0, "7740": 12795789312.0, "7745": 12795789312.0, "7750": 12795789312.0, "7755": 12795789312.0, "7760": 12795789312.0, "7765": 12795789312.0, "7770": 12795789312.0, "7775": 12795789312.0, "7780": 12795789312.0, "7785": 12795789312.0, "7790": 12795789312.0, "7795": 12795789312.0, "7800": 12795789312.0, "7805": 12795789312.0, "7810": 12795789312.0, "7815": 12795789312.0, "7820": 12795789312.0, "7825": 12795789312.0, "7830": 12795789312.0, "7835": 12795789312.0, "7840": 12795789312.0, "7845": 12795789312.0, "7850": 12795789312.0, "7855": 12795789312.0, "7860": 12795789312.0, "7865": 12795789312.0, "7870": 12795789312.0, "7875": 12795789312.0, "7880": 12795789312.0, "7885": 12795789312.0, "7890": 12795789312.0, "7895": 12795789312.0, "7900": 12795789312.0, "7905": 12795789312.0, "7910": 12795789312.0, "7915": 12795789312.0, "7920": 12795789312.0, "7925": 12795789312.0, "7930": 12795789312.0, "7935": 12795789312.0, "7940": 12795789312.0, "7945": 12795789312.0, "7950": 12795789312.0, "7955": 12795789312.0, "7960": 12795789312.0, "7965": 12795789312.0, "7970": 12795789312.0, "7975": 12795789312.0, "7980": 12795789312.0, "7985": 12795789312.0, "7990": 12795789312.0, "7995": 12795789312.0, "8000": 12795789312.0, "8005": 12795789312.0, "8010": 12795789312.0, "8015": 12795789312.0, "8020": 12795789312.0, "8025": 12795789312.0, "8030": 12795789312.0, "8035": 12795789312.0, "8040": 12795789312.0, "8045": 12795789312.0, "8050": 12795789312.0, "8055": 12795789312.0, "8060": 12795789312.0, "8065": 12795789312.0, "8070": 12795789312.0, "8075": 12795789312.0, "8080": 12795789312.0, "8085": 12795789312.0, "8090": 12795789312.0, "8095": 12795789312.0, "8100": 12795789312.0, "8105": 12795789312.0, "8110": 12795789312.0, "8115": 12795789312.0, "8120": 12795789312.0, "8125": 12795789312.0, "8130": 12795789312.0, "8135": 12795789312.0, "8140": 12795789312.0, "8145": 12795789312.0, "8150": 12795789312.0, "8155": 12795789312.0, "8160": 12795789312.0, "8165": 12795789312.0, "8170": 12795789312.0, "8175": 12795789312.0, "8180": 12795789312.0, "8185": 12795789312.0, "8190": 12795789312.0, "8195": 12795789312.0, "8200": 12795789312.0, "8205": 12795789312.0, "8210": 12795789312.0, "8215": 12795789312.0, "8220": 12795789312.0, "8225": 12795789312.0, "8230": 12795789312.0, "8235": 12795789312.0, "8240": 12795789312.0, "8245": 12795789312.0, "8250": 12795789312.0, "8255": 12795789312.0, "8260": 12795789312.0, "8265": 12795789312.0, "8270": 12795789312.0, "8275": 12795789312.0, "8280": 12795789312.0, "8285": 12795789312.0, "8290": 12795789312.0, "8295": 12795789312.0, "8300": 12795789312.0, "8305": 12795789312.0, "8310": 12795789312.0, "8315": 12795789312.0, "8320": 12795789312.0, "8325": 12795789312.0, "8330": 12795789312.0, "8335": 12795789312.0, "8340": 12795789312.0, "8345": 12795789312.0, "8350": 12795789312.0, "8355": 12795789312.0, "8360": 12795789312.0, "8365": 12795789312.0, "8370": 12795789312.0, "8375": 12795789312.0, "8380": 12795789312.0, "8385": 12795789312.0, "8390": 12795789312.0, "8395": 12795789312.0, "8400": 12795789312.0, "8405": 12795789312.0, "8410": 12795789312.0, "8415": 12795789312.0, "8420": 12795789312.0, "8425": 12795789312.0, "8430": 12795789312.0, "8435": 12795789312.0, "8440": 12795789312.0, "8445": 12795789312.0, "8450": 12795789312.0, "8455": 12795789312.0, "8460": 12795789312.0, "8465": 12795789312.0, "8470": 12795789312.0, "8475": 12795789312.0, "8480": 12795789312.0, "8485": 12795789312.0, "8490": 12795789312.0, "8495": 12795789312.0, "8500": 12795789312.0, "8505": 12795789312.0, "8510": 12795789312.0, "8515": 12795789312.0, "8520": 12795789312.0, "8525": 12795789312.0, "8530": 12795789312.0, "8535": 12795789312.0, "8540": 12795789312.0, "8545": 12795789312.0, "8550": 12795789312.0, "8555": 12795789312.0, "8560": 12795789312.0, "8565": 12795789312.0, "8570": 12795789312.0, "8575": 12795789312.0, "8580": 12795789312.0, "8585": 12795789312.0, "8590": 12795789312.0, "8595": 12795789312.0, "8600": 12795789312.0, "8605": 12795789312.0, "8610": 12795789312.0, "8615": 12795789312.0, "8620": 12795789312.0, "8625": 12795789312.0, "8630": 12795789312.0, "8635": 12795789312.0, "8640": 12795789312.0, "8645": 12795789312.0, "8650": 12795789312.0, "8655": 12795789312.0, "8660": 12795789312.0, "8665": 12795789312.0, "8670": 12795789312.0, "8675": 12795789312.0, "8680": 12795789312.0, "8685": 12795789312.0, "8690": 12795789312.0, "8695": 12795789312.0, "8700": 12795789312.0, "8705": 12795789312.0, "8710": 12795789312.0, "8715": 12795789312.0, "8720": 12795789312.0, "8725": 12795789312.0, "8730": 12795789312.0, "8735": 12795789312.0, "8740": 12795789312.0, "8745": 12795789312.0, "8750": 12795789312.0, "8755": 12795789312.0, "8760": 12795789312.0, "8765": 12795789312.0, "8770": 12795789312.0, "8775": 12795789312.0, "8780": 12795789312.0, "8785": 12795789312.0, "8790": 12795789312.0, "8795": 12795789312.0, "8800": 12795789312.0, "8805": 12795789312.0, "8810": 12795789312.0, "8815": 12795789312.0, "8820": 12795789312.0, "8825": 12795789312.0, "8830": 12795789312.0, "8835": 12795789312.0, "8840": 12795789312.0, "8845": 12795789312.0, "8850": 12795789312.0, "8855": 12795789312.0, "8860": 12795789312.0, "8865": 12795789312.0, "8870": 12795789312.0, "8875": 12795789312.0, "8880": 12795789312.0, "8885": 12795789312.0, "8890": 12795789312.0, "8895": 12795789312.0, "8900": 12795789312.0, "8905": 12795789312.0, "8910": 12795789312.0, "8915": 12795789312.0, "8920": 12795789312.0, "8925": 12795789312.0, "8930": 12795789312.0, "8935": 12795789312.0, "8940": 12795789312.0, "8945": 12795789312.0, "8950": 12795789312.0, "8955": 12795789312.0, "8960": 12795789312.0, "8965": 12795789312.0, "8970": 12795789312.0, "8975": 12795789312.0, "8980": 12795789312.0, "8985": 12795789312.0, "8990": 12795789312.0, "8995": 12795789312.0, "9000": 12795789312.0, "9005": 12795789312.0, "9010": 12795789312.0, "9015": 12795789312.0, "9020": 12795789312.0, "9025": 12795789312.0, "9030": 12795789312.0, "9035": 12795789312.0, "9040": 12795789312.0, "9045": 12795789312.0, "9050": 12795789312.0, "9055": 12795789312.0, "9060": 12795789312.0, "9065": 12795789312.0, "9070": 12795789312.0, "9075": 12795789312.0, "9080": 12795789312.0, "9085": 12795789312.0, "9090": 12795789312.0, "9095": 12795789312.0, "9100": 12795789312.0, "9105": 12795789312.0, "9110": 12795789312.0, "9115": 12795789312.0, "9120": 12795789312.0, "9125": 12795789312.0, "9130": 12795789312.0, "9135": 12795789312.0, "9140": 12795789312.0, "9145": 12795789312.0, "9150": 12795789312.0, "9155": 12795789312.0, "9160": 12795789312.0, "9165": 12795789312.0, "9170": 12795789312.0, "9175": 12795789312.0, "9180": 12795789312.0, "9185": 12795789312.0, "9190": 12795789312.0, "9195": 12795789312.0, "9200": 12795789312.0, "9205": 12795789312.0, "9210": 12795789312.0, "9215": 12795789312.0, "9220": 12795789312.0, "9225": 12795789312.0, "9230": 12795789312.0, "9235": 12795789312.0, "9240": 12795789312.0, "9245": 12795789312.0, "9250": 12795789312.0, "9255": 12795789312.0, "9260": 12795789312.0, "9265": 12795789312.0, "9270": 12795789312.0, "9275": 12795789312.0, "9280": 12795789312.0, "9285": 12795789312.0, "9290": 12795789312.0, "9295": 12795789312.0, "9300": 12795789312.0, "9305": 12795789312.0, "9310": 12795789312.0, "9315": 12795789312.0, "9320": 12795789312.0, "9325": 12795789312.0, "9330": 12795789312.0, "9335": 12795789312.0, "9340": 12795789312.0, "9345": 12795789312.0, "9350": 12795789312.0, "9355": 12795789312.0, "9360": 12795789312.0, "9365": 12795789312.0, "9370": 12795789312.0, "9375": 12795789312.0, "9380": 12795789312.0, "9385": 12795789312.0, "9390": 12795789312.0, "9395": 12795789312.0, "9400": 12795789312.0, "9405": 12795789312.0, "9410": 12795789312.0, "9415": 12795789312.0, "9420": 12795789312.0, "9425": 12795789312.0, "9430": 12795789312.0, "9435": 12795789312.0, "9440": 12795789312.0, "9445": 12795789312.0, "9450": 12795789312.0, "9455": 12795789312.0, "9460": 12795789312.0, "9465": 12795789312.0, "9470": 12795789312.0, "9475": 12795789312.0, "9480": 12795789312.0, "9485": 12795789312.0, "9490": 12795789312.0, "9495": 12795789312.0, "9500": 12795789312.0, "9505": 12795789312.0, "9510": 12795789312.0, "9515": 12795789312.0, "9520": 12795789312.0, "9525": 12795789312.0, "9530": 12795789312.0, "9535": 12795789312.0, "9540": 12795789312.0, "9545": 12795789312.0, "9550": 12795789312.0, "9555": 12795789312.0, "9560": 12795789312.0, "9565": 12795789312.0, "9570": 12795789312.0, "9575": 12795789312.0, "9580": 12795789312.0, "9585": 12795789312.0, "9590": 12795789312.0, "9595": 12795789312.0, "9600": 12795789312.0, "9605": 12795789312.0, "9610": 12795789312.0, "9615": 12795789312.0, "9620": 12795789312.0, "9625": 12795789312.0, "9630": 12795789312.0, "9635": 12795789312.0, "9640": 12795789312.0, "9645": 12795789312.0, "9650": 12795789312.0, "9655": 12795789312.0, "9660": 12795789312.0, "9665": 12795789312.0, "9670": 12795789312.0, "9675": 12795789312.0, "9680": 12795789312.0, "9685": 12795789312.0, "9690": 12795789312.0, "9695": 12795789312.0, "9700": 12795789312.0, "9705": 12795789312.0, "9710": 12795789312.0, "9715": 12795789312.0, "9720": 12795789312.0, "9725": 12795789312.0, "9730": 12795789312.0, "9735": 12795789312.0, "9740": 12795789312.0, "9745": 12795789312.0, "9750": 12795789312.0, "9755": 12795789312.0, "9760": 12795789312.0, "9765": 12795789312.0, "9770": 12795789312.0, "9775": 12795789312.0, "9780": 12795789312.0, "9785": 12795789312.0, "9790": 12795789312.0, "9795": 12795789312.0, "9800": 12795789312.0, "9805": 12795789312.0, "9810": 12795789312.0, "9815": 12795789312.0, "9820": 12795789312.0, "9825": 12795789312.0, "9830": 12795789312.0, "9835": 12795789312.0, "9840": 12795789312.0, "9845": 12795789312.0, "9850": 12795789312.0, "9855": 12795789312.0, "9860": 12795789312.0, "9865": 12795789312.0, "9870": 12795789312.0, "9875": 12795789312.0, "9880": 12795789312.0, "9885": 12795789312.0, "9890": 12795789312.0, "9895": 12795789312.0, "9900": 12795789312.0, "9905": 12795789312.0, "9910": 12795789312.0, "9915": 12795789312.0, "9920": 12795789312.0, "9925": 12795789312.0, "9930": 12795789312.0, "9935": 12795789312.0, "9940": 12795789312.0, "9945": 12795789312.0, "9950": 12795789312.0, "9955": 12795789312.0, "9960": 12795789312.0, "9965": 12795789312.0, "9970": 12795789312.0, "9975": 12795789312.0, "9980": 12795789312.0, "9985": 12795789312.0, "9990": 12795789312.0, "9995": 12795789312.0, "10000": 12795789312.0, "10005": 12795789312.0, "10010": 12795789312.0, "10015": 12795789312.0, "10020": 12795789312.0, "10025": 12795789312.0, "10030": 12795789312.0, "10035": 12795789312.0, "10040": 12795789312.0, "10045": 12795789312.0, "10050": 12795789312.0, "10055": 12795789312.0, "10060": 12795789312.0, "10065": 12795789312.0, "10070": 12795789312.0, "10075": 12795789312.0, "10080": 12795789312.0, "10085": 12795789312.0, "10090": 12795789312.0, "10095": 12795789312.0, "10100": 12795789312.0, "10105": 12795789312.0, "10110": 12795789312.0, "10115": 12795789312.0, "10120": 12795789312.0, "10125": 12795789312.0, "10130": 12795789312.0, "10135": 12795789312.0, "10140": 12795789312.0, "10145": 12795789312.0, "10150": 12795789312.0, "10155": 12795789312.0, "10160": 12795789312.0, "10165": 12795789312.0, "10170": 12795789312.0, "10175": 12795789312.0, "10180": 12795789312.0, "10185": 12795789312.0, "10190": 12795789312.0, "10195": 12795789312.0, "10200": 12795789312.0, "10205": 12795789312.0, "10210": 12795789312.0, "10215": 12795789312.0, "10220": 12795789312.0, "10225": 12795789312.0, "10230": 12795789312.0, "10235": 12795789312.0, "10240": 12795789312.0, "10245": 12795789312.0, "10250": 12795789312.0, "10255": 12795789312.0, "10260": 12795789312.0, "10265": 12795789312.0, "10270": 12795789312.0, "10275": 12795789312.0, "10280": 12795789312.0, "10285": 12795789312.0, "10290": 12795789312.0, "10295": 12795789312.0, "10300": 12795789312.0, "10305": 12795789312.0, "10310": 12795789312.0, "10315": 12795789312.0, "10320": 12795789312.0, "10325": 12795789312.0, "10330": 12795789312.0, "10335": 12795789312.0, "10340": 12795789312.0, "10345": 12795789312.0, "10350": 12795789312.0, "10355": 12795789312.0, "10360": 12795789312.0, "10365": 12795789312.0, "10370": 12795789312.0, "10375": 12795789312.0, "10380": 12795789312.0, "10385": 12795789312.0, "10390": 12795789312.0, "10395": 12795789312.0, "10400": 12795789312.0, "10405": 12795789312.0, "10410": 12795789312.0, "10415": 12795789312.0, "10420": 12795789312.0, "10425": 12795789312.0, "10430": 12795789312.0, "10435": 12795789312.0, "10440": 12795789312.0, "10445": 12795789312.0, "10450": 12795789312.0, "10455": 12795789312.0, "10460": 12795789312.0, "10465": 12795789312.0, "10470": 12795789312.0, "10475": 12795789312.0, "10480": 12795789312.0, "10485": 12795789312.0, "10490": 12795789312.0, "10495": 12795789312.0, "10500": 12795789312.0, "10505": 12795789312.0, "10510": 12795789312.0, "10515": 12795789312.0, "10520": 12795789312.0, "10525": 12795789312.0, "10530": 12795789312.0, "10535": 12795789312.0, "10540": 12795789312.0, "10545": 12795789312.0, "10550": 12795789312.0, "10555": 12795789312.0, "10560": 12795789312.0, "10565": 12795789312.0, "10570": 12795789312.0, "10575": 12795789312.0, "10580": 12795789312.0, "10585": 12795789312.0, "10590": 12795789312.0, "10595": 12795789312.0, "10600": 12795789312.0, "10605": 12795789312.0, "10610": 12795789312.0, "10615": 12795789312.0, "10620": 12795789312.0, "10625": 12795789312.0, "10630": 12795789312.0, "10635": 12795789312.0, "10640": 12795789312.0, "10645": 12795789312.0, "10650": 12795789312.0, "10655": 12795789312.0, "10660": 12795789312.0, "10665": 12795789312.0, "10670": 12795789312.0, "10675": 12795789312.0, "10680": 12795789312.0, "10685": 12795789312.0, "10690": 12795789312.0, "10695": 12795789312.0, "10700": 12795789312.0, "10705": 12795789312.0, "10710": 12795789312.0, "10715": 12795789312.0, "10720": 12795789312.0, "10725": 12795789312.0, "10730": 12795789312.0, "10735": 12795789312.0, "10740": 12795789312.0, "10745": 12795789312.0, "10750": 12795789312.0, "10755": 12795789312.0, "10760": 12795789312.0, "10765": 12795789312.0, "10770": 12795789312.0, "10775": 12795789312.0, "10780": 12795789312.0, "10785": 12795789312.0, "10790": 12795789312.0, "10795": 12795789312.0, "10800": 12795789312.0, "10805": 12795789312.0, "10810": 12795789312.0, "10815": 12795789312.0, "10820": 12795789312.0, "10825": 12795789312.0, "10830": 12795789312.0, "10835": 12795789312.0, "10840": 12795789312.0, "10845": 12795789312.0, "10850": 12795789312.0, "10855": 12795789312.0, "10860": 12795789312.0, "10865": 12795789312.0, "10870": 12795789312.0, "10875": 12795789312.0, "10880": 12795789312.0, "10885": 12795789312.0, "10890": 12795789312.0, "10895": 12795789312.0, "10900": 12795789312.0, "10905": 12795789312.0, "10910": 12795789312.0, "10915": 12795789312.0, "10920": 12795789312.0, "10925": 12795789312.0, "10930": 12795789312.0, "10935": 12795789312.0, "10940": 12795789312.0, "10945": 12795789312.0, "10950": 12795789312.0, "10955": 12795789312.0, "10960": 12795789312.0, "10965": 12795789312.0, "10970": 12795789312.0, "10975": 12795789312.0, "10980": 12795789312.0, "10985": 12795789312.0, "10990": 12795789312.0, "10995": 12795789312.0, "11000": 12795789312.0, "11005": 12795789312.0, "11010": 12795789312.0, "11015": 12795789312.0, "11020": 12795789312.0, "11025": 12795789312.0, "11030": 12795789312.0, "11035": 12795789312.0, "11040": 12795789312.0, "11045": 12795789312.0, "11050": 12795789312.0, "11055": 12795789312.0, "11060": 12795789312.0, "11065": 12795789312.0, "11070": 12795789312.0, "11075": 12795789312.0, "11080": 12795789312.0, "11085": 12795789312.0, "11090": 12795789312.0, "11095": 12795789312.0, "11100": 12795789312.0, "11105": 12795789312.0, "11110": 12795789312.0, "11115": 12795789312.0, "11120": 12795789312.0, "11125": 12795789312.0, "11130": 12795789312.0, "11135": 12795789312.0, "11140": 12795789312.0, "11145": 12795789312.0, "11150": 12795789312.0, "11155": 12795789312.0, "11160": 12795789312.0, "11165": 12795789312.0, "11170": 12795789312.0, "11175": 12795789312.0, "11180": 12795789312.0, "11185": 12795789312.0, "11190": 12795789312.0, "11195": 12795789312.0, "11200": 12795789312.0, "11205": 12795789312.0, "11210": 12795789312.0, "11215": 12795789312.0, "11220": 12795789312.0, "11225": 12795789312.0, "11230": 12795789312.0, "11235": 12795789312.0, "11240": 12795789312.0, "11245": 12795789312.0, "11250": 12795789312.0, "11255": 12795789312.0, "11260": 12795789312.0, "11265": 12795789312.0, "11270": 12795789312.0, "11275": 12795789312.0, "11280": 12795789312.0, "11285": 12795789312.0, "11290": 12795789312.0, "11295": 12795789312.0, "11300": 12795789312.0, "11305": 12795789312.0, "11310": 12795789312.0, "11315": 12795789312.0, "11320": 12795789312.0, "11325": 12795789312.0, "11330": 12795789312.0, "11335": 12795789312.0, "11340": 12795789312.0, "11345": 12795789312.0, "11350": 12795789312.0, "11355": 12795789312.0, "11360": 12795789312.0, "11365": 12795789312.0, "11370": 12795789312.0, "11375": 12795789312.0, "11380": 12795789312.0, "11385": 12795789312.0, "11390": 12795789312.0, "11395": 12795789312.0, "11400": 12795789312.0, "11405": 12795789312.0, "11410": 12795789312.0, "11415": 12795789312.0, "11420": 12795789312.0, "11425": 12795789312.0, "11430": 12795789312.0, "11435": 12795789312.0, "11440": 12795789312.0, "11445": 12795789312.0, "11450": 12795789312.0, "11455": 12795789312.0, "11460": 12795789312.0, "11465": 12795789312.0, "11470": 12795789312.0, "11475": 12795789312.0, "11480": 12795789312.0, "11485": 12795789312.0, "11490": 12795789312.0, "11495": 12795789312.0, "11500": 12795789312.0, "11505": 12795789312.0, "11510": 12795789312.0, "11515": 12795789312.0, "11520": 12795789312.0, "11525": 12795789312.0, "11530": 12795789312.0, "11535": 12795789312.0, "11540": 12795789312.0, "11545": 12795789312.0, "11550": 12795789312.0, "11555": 12795789312.0, "11560": 12795789312.0, "11565": 12795789312.0, "11570": 12795789312.0, "11575": 12795789312.0, "11580": 12795789312.0, "11585": 12795789312.0, "11590": 12795789312.0, "11595": 12795789312.0, "11600": 12795789312.0, "11605": 12795789312.0, "11610": 12795789312.0, "11615": 12795789312.0, "11620": 12795789312.0, "11625": 12795789312.0, "11630": 12795789312.0, "11635": 12795789312.0, "11640": 12795789312.0, "11645": 12795789312.0, "11650": 12795789312.0, "11655": 12795789312.0, "11660": 12795789312.0, "11665": 12795789312.0, "11670": 12795789312.0, "11675": 12795789312.0, "11680": 12795789312.0, "11685": 12795789312.0, "11690": 12795789312.0, "11695": 12795789312.0, "11700": 12795789312.0, "11705": 12795789312.0, "11710": 12795789312.0, "11715": 12795789312.0, "11720": 12795789312.0, "11725": 12795789312.0, "11730": 12795789312.0, "11735": 12795789312.0, "11740": 12795789312.0, "11745": 12795789312.0, "11750": 12795789312.0, "11755": 12795789312.0, "11760": 12795789312.0, "11765": 12795789312.0, "11770": 12795789312.0, "11775": 12795789312.0, "11780": 12795789312.0, "11785": 12795789312.0, "11790": 12795789312.0, "11795": 12795789312.0, "11800": 12795789312.0, "11805": 12795789312.0, "11810": 12795789312.0, "11815": 12795789312.0, "11820": 12795789312.0, "11825": 12795789312.0, "11830": 12795789312.0, "11835": 12795789312.0, "11840": 12795789312.0, "11845": 12795789312.0, "11850": 12795789312.0, "11855": 12795789312.0, "11860": 12795789312.0, "11865": 12795789312.0, "11870": 12795789312.0, "11875": 12795789312.0, "11880": 12795789312.0, "11885": 12795789312.0, "11890": 12795789312.0, "11895": 12795789312.0, "11900": 12795789312.0, "11905": 12795789312.0, "11910": 12795789312.0, "11915": 12795789312.0, "11920": 12795789312.0, "11925": 12795789312.0, "11930": 12795789312.0, "11935": 12795789312.0, "11940": 12795789312.0, "11945": 12795789312.0, "11950": 12795789312.0, "11955": 12795789312.0, "11960": 12795789312.0, "11965": 12795789312.0, "11970": 12795789312.0, "11975": 12795789312.0, "11980": 12795789312.0, "11985": 12795789312.0, "11990": 12795789312.0, "11995": 12795789312.0, "12000": 12795789312.0, "12005": 12795789312.0, "12010": 12795789312.0, "12015": 12795789312.0, "12020": 12795789312.0, "12025": 12795789312.0, "12030": 12795789312.0, "12035": 12795789312.0, "12040": 12795789312.0, "12045": 12795789312.0, "12050": 12795789312.0, "12055": 12795789312.0, "12060": 12795789312.0, "12065": 12795789312.0, "12070": 12795789312.0, "12075": 12795789312.0, "12080": 12795789312.0, "12085": 12795789312.0, "12090": 12795789312.0, "12095": 12795789312.0, "12100": 12795789312.0, "12105": 12795789312.0, "12110": 12795789312.0, "12115": 12795789312.0, "12120": 12795789312.0, "12125": 12795789312.0, "12130": 12795789312.0, "12135": 12795789312.0, "12140": 12795789312.0, "12145": 12795789312.0, "12150": 12795789312.0, "12155": 12795789312.0, "12160": 12795789312.0, "12165": 12795789312.0, "12170": 12795789312.0, "12175": 12795789312.0, "12180": 12795789312.0, "12185": 12795789312.0, "12190": 12795789312.0, "12195": 12795789312.0, "12200": 12795789312.0, "12205": 12795789312.0, "12210": 12795789312.0, "12215": 12795789312.0, "12220": 12795789312.0, "12225": 12795789312.0, "12230": 12795789312.0, "12235": 12795789312.0, "12240": 12795789312.0, "12245": 12795789312.0, "12250": 12795789312.0, "12255": 12795789312.0, "12260": 12795789312.0, "12265": 12795789312.0, "12270": 12795789312.0, "12275": 12795789312.0, "12280": 12795789312.0, "12285": 12795789312.0, "12290": 12795789312.0, "12295": 12795789312.0, "12300": 12795789312.0, "12305": 12795789312.0, "12310": 12795789312.0, "12315": 12795789312.0, "12320": 12795789312.0, "12325": 12795789312.0, "12330": 12795789312.0, "12335": 12795789312.0, "12340": 12795789312.0, "12345": 12795789312.0, "12350": 12795789312.0, "12355": 12795789312.0, "12360": 12795789312.0, "12365": 12795789312.0, "12370": 12795789312.0, "12375": 12795789312.0, "12380": 12795789312.0, "12385": 12795789312.0, "12390": 12795789312.0, "12395": 12795789312.0, "12400": 12795789312.0, "12405": 12795789312.0, "12410": 12795789312.0, "12415": 12795789312.0, "12420": 12795789312.0, "12425": 12795789312.0, "12430": 12795789312.0, "12435": 12795789312.0, "12440": 12795789312.0, "12445": 12795789312.0, "12450": 12795789312.0, "12455": 12795789312.0, "12460": 12795789312.0, "12465": 12795789312.0, "12470": 12795789312.0, "12475": 12795789312.0, "12480": 12795789312.0, "12485": 12795789312.0, "12490": 12795789312.0, "12495": 12795789312.0, "12500": 12795789312.0, "12505": 12795789312.0, "12510": 12795789312.0, "12515": 12795789312.0, "12520": 12795789312.0, "12525": 12795789312.0, "12530": 12795789312.0, "12535": 12795789312.0, "12540": 12795789312.0, "12545": 12795789312.0, "12550": 12795789312.0, "12555": 12795789312.0, "12560": 12795789312.0, "12565": 12795789312.0, "12570": 12795789312.0, "12575": 12795789312.0, "12580": 12795789312.0, "12585": 12795789312.0, "12590": 12795789312.0, "12595": 12795789312.0, "12600": 12795789312.0, "12605": 12795789312.0, "12610": 12795789312.0, "12615": 12795789312.0, "12620": 12795789312.0, "12625": 12795789312.0, "12630": 12795789312.0, "12635": 12795789312.0, "12640": 12795789312.0, "12645": 12795789312.0, "12650": 12795789312.0, "12655": 12795789312.0, "12660": 12795789312.0, "12665": 12795789312.0, "12670": 12795789312.0, "12675": 12795789312.0, "12680": 12795789312.0, "12685": 12795789312.0, "12690": 12795789312.0, "12695": 12795789312.0, "12700": 12795789312.0, "12705": 12795789312.0, "12710": 12795789312.0, "12715": 12795789312.0, "12720": "nan", "12725": "nan", "12730": "nan", "12735": "nan", "12740": "nan", "12745": "nan", "12750": "nan", "12755": "nan", "12760": "nan", "12765": "nan", "12770": "nan", "12775": "nan", "12780": "nan", "12785": "nan", "12790": "nan", "12795": "nan", "12800": "nan", "12805": "nan", "12810": "nan", "12815": "nan", "12820": "nan", "12825": "nan", "12830": "nan", "12835": "nan", "12840": "nan", "12845": "nan", "12850": "nan", "12855": "nan", "12860": "nan", "12865": "nan", "12870": "nan", "12875": "nan", "12880": "nan", "12885": "nan", "12890": "nan", "12895": "nan", "12900": "nan", "12905": "nan", "12910": "nan", "12915": "nan", "12920": "nan", "12925": "nan", "12930": "nan", "12935": "nan", "12940": "nan", "12945": "nan", "12950": "nan", "12955": "nan", "12960": "nan", "12965": "nan", "12970": "nan", "12975": "nan", "12980": "nan", "12985": "nan", "12990": "nan", "12995": "nan", "13000": "nan"}}, "mem-max-allocated-bytes": {"start_step": 1, "end_step": 13000, "step_interval": 5, "values": {"1": 27991298048.0, "5": 28489385984.0, "10": 28489385984.0, "15": 28489385984.0, "20": 28489385984.0, "25": 28489385984.0, "30": 28489385984.0, "35": 28489385984.0, "40": 28489385984.0, "45": 28489385984.0, "50": 28489385984.0, "55": 28489385984.0, "60": 28489385984.0, "65": 28489385984.0, "70": 28489385984.0, "75": 28489385984.0, "80": 28489385984.0, "85": 28489385984.0, "90": 28489385984.0, "95": 28489385984.0, "100": 28489385984.0, "105": 28489385984.0, "110": 28489385984.0, "115": 28489385984.0, "120": 28489385984.0, "125": 28489385984.0, "130": 28489385984.0, "135": 28489385984.0, "140": 28489385984.0, "145": 28489385984.0, "150": 28489385984.0, "155": 28489385984.0, "160": 28489385984.0, "165": 28489385984.0, "170": 28489385984.0, "175": 28489385984.0, "180": 28489385984.0, "185": 28489385984.0, "190": 28489385984.0, "195": 28489385984.0, "200": 28489385984.0, "205": 28489385984.0, "210": 28489385984.0, "215": 28489385984.0, "220": 28489385984.0, "225": 28489385984.0, "230": 28489385984.0, "235": 28489385984.0, "240": 28489385984.0, "245": 28489385984.0, "250": 28489385984.0, "255": 28489385984.0, "260": 28489385984.0, "265": 28489385984.0, "270": 28489385984.0, "275": 28489385984.0, "280": 28489385984.0, "285": 28489385984.0, "290": 28489385984.0, "295": 28489385984.0, "300": 28489385984.0, "305": 28489385984.0, "310": 28489385984.0, "315": 28489385984.0, "320": 28489385984.0, "325": 28489385984.0, "330": 28489385984.0, "335": 28489385984.0, "340": 28489385984.0, "345": 28489385984.0, "350": 28489385984.0, "355": 28489385984.0, "360": 28489385984.0, "365": 28489385984.0, "370": 28489385984.0, "375": 28489385984.0, "380": 28489385984.0, "385": 28489385984.0, "390": 28489385984.0, "395": 28489385984.0, "400": 28489385984.0, "405": 28489385984.0, "410": 28489385984.0, "415": 28489385984.0, "420": 28489385984.0, "425": 28489385984.0, "430": 28489385984.0, "435": 28489385984.0, "440": 28489385984.0, "445": 28489385984.0, "450": 28489385984.0, "455": 28489385984.0, "460": 28489385984.0, "465": 28489385984.0, "470": 28489385984.0, "475": 28489385984.0, "480": 28489385984.0, "485": 28489385984.0, "490": 28489385984.0, "495": 28489385984.0, "500": 28489385984.0, "505": 28489385984.0, "510": 28489385984.0, "515": 28489385984.0, "520": 28489385984.0, "525": 28489385984.0, "530": 28489385984.0, "535": 28489385984.0, "540": 28489385984.0, "545": 28489385984.0, "550": 28489385984.0, "555": 28489385984.0, "560": 28489385984.0, "565": 28489385984.0, "570": 28489385984.0, "575": 28489385984.0, "580": 28489385984.0, "585": 28489385984.0, "590": 28489385984.0, "595": 28489385984.0, "600": 28489385984.0, "605": 28489385984.0, "610": 28489385984.0, "615": 28489385984.0, "620": 28489385984.0, "625": 28489385984.0, "630": 28489385984.0, "635": 28489385984.0, "640": 28489385984.0, "645": 28489385984.0, "650": 28489385984.0, "655": 28489385984.0, "660": 28489385984.0, "665": 28489385984.0, "670": 28489385984.0, "675": 28489385984.0, "680": 28489385984.0, "685": 28489385984.0, "690": 28489385984.0, "695": 28489385984.0, "700": 28489385984.0, "705": 28489385984.0, "710": 28489385984.0, "715": 28489385984.0, "720": 28489385984.0, "725": 28489385984.0, "730": 28489385984.0, "735": 28489385984.0, "740": 28489385984.0, "745": 28489385984.0, "750": 28489385984.0, "755": 28489385984.0, "760": 28489385984.0, "765": 28489385984.0, "770": 28489385984.0, "775": 28489385984.0, "780": 28489385984.0, "785": 28489385984.0, "790": 28489385984.0, "795": 28489385984.0, "800": 28489385984.0, "805": 28489385984.0, "810": 28489385984.0, "815": 28489385984.0, "820": 28489385984.0, "825": 28489385984.0, "830": 28489385984.0, "835": 28489385984.0, "840": 28489385984.0, "845": 28489385984.0, "850": 28489385984.0, "855": 28489385984.0, "860": 28489385984.0, "865": 28489385984.0, "870": 28489385984.0, "875": 28489385984.0, "880": 28489385984.0, "885": 28489385984.0, "890": 28489385984.0, "895": 28489385984.0, "900": 28489385984.0, "905": 28489385984.0, "910": 28489385984.0, "915": 28489385984.0, "920": 28489385984.0, "925": 28489385984.0, "930": 28489385984.0, "935": 28489385984.0, "940": 28489385984.0, "945": 28489385984.0, "950": 28489385984.0, "955": 28489385984.0, "960": 28489385984.0, "965": 28489385984.0, "970": 28489385984.0, "975": 28489385984.0, "980": 28489385984.0, "985": 28489385984.0, "990": 28489385984.0, "995": 28489385984.0, "1000": 28489385984.0, "1005": 28489385984.0, "1010": 28489385984.0, "1015": 28489385984.0, "1020": 28489385984.0, "1025": 28489385984.0, "1030": 28489385984.0, "1035": 28489385984.0, "1040": 28489385984.0, "1045": 28489385984.0, "1050": 28489385984.0, "1055": 28489385984.0, "1060": 28489385984.0, "1065": 28489385984.0, "1070": 28489385984.0, "1075": 28489385984.0, "1080": 28489385984.0, "1085": 28489385984.0, "1090": 28489385984.0, "1095": 28489385984.0, "1100": 28489385984.0, "1105": 28489385984.0, "1110": 28489385984.0, "1115": 28489385984.0, "1120": 28489385984.0, "1125": 28489385984.0, "1130": 28489385984.0, "1135": 28489385984.0, "1140": 28489385984.0, "1145": 28489385984.0, "1150": 28489385984.0, "1155": 28489385984.0, "1160": 28489385984.0, "1165": 28489385984.0, "1170": 28489385984.0, "1175": 28489385984.0, "1180": 28489385984.0, "1185": 28489385984.0, "1190": 28489385984.0, "1195": 28489385984.0, "1200": 28489385984.0, "1205": 28489385984.0, "1210": 28489385984.0, "1215": 28489385984.0, "1220": 28489385984.0, "1225": 28489385984.0, "1230": 28489385984.0, "1235": 28489385984.0, "1240": 28489385984.0, "1245": 28489385984.0, "1250": 28489385984.0, "1255": 28489385984.0, "1260": 28489385984.0, "1265": 28489385984.0, "1270": 28489385984.0, "1275": 28489385984.0, "1280": 28489385984.0, "1285": 28489385984.0, "1290": 28489385984.0, "1295": 28489385984.0, "1300": 28489385984.0, "1305": 28489385984.0, "1310": 28489385984.0, "1315": 28489385984.0, "1320": 28489385984.0, "1325": 28489385984.0, "1330": 28489385984.0, "1335": 28489385984.0, "1340": 28489385984.0, "1345": 28489385984.0, "1350": 28489385984.0, "1355": 28489385984.0, "1360": 28489385984.0, "1365": 28489385984.0, "1370": 28489385984.0, "1375": 28489385984.0, "1380": 28489385984.0, "1385": 28489385984.0, "1390": 28489385984.0, "1395": 28489385984.0, "1400": 28489385984.0, "1405": 28489385984.0, "1410": 28489385984.0, "1415": 28489385984.0, "1420": 28489385984.0, "1425": 28489385984.0, "1430": 28489385984.0, "1435": 28489385984.0, "1440": 28489385984.0, "1445": 28489385984.0, "1450": 28489385984.0, "1455": 28489385984.0, "1460": 28489385984.0, "1465": 28489385984.0, "1470": 28489385984.0, "1475": 28489385984.0, "1480": 28489385984.0, "1485": 28489385984.0, "1490": 28489385984.0, "1495": 28489385984.0, "1500": 28489385984.0, "1505": 28489385984.0, "1510": 28489385984.0, "1515": 28489385984.0, "1520": 28489385984.0, "1525": 28489385984.0, "1530": 28489385984.0, "1535": 28489385984.0, "1540": 28489385984.0, "1545": 28489385984.0, "1550": 28489385984.0, "1555": 28489385984.0, "1560": 28489385984.0, "1565": 28489385984.0, "1570": 28489385984.0, "1575": 28489385984.0, "1580": 28489385984.0, "1585": 28489385984.0, "1590": 28489385984.0, "1595": 28489385984.0, "1600": 28489385984.0, "1605": 28489385984.0, "1610": 28489385984.0, "1615": 28489385984.0, "1620": 28489385984.0, "1625": 28489385984.0, "1630": 28489385984.0, "1635": 28489385984.0, "1640": 28489385984.0, "1645": 28489385984.0, "1650": 28489385984.0, "1655": 28489385984.0, "1660": 28489385984.0, "1665": 28489385984.0, "1670": 28489385984.0, "1675": 28489385984.0, "1680": 28489385984.0, "1685": 28489385984.0, "1690": 28489385984.0, "1695": 28489385984.0, "1700": 28489385984.0, "1705": 28489385984.0, "1710": 28489385984.0, "1715": 28489385984.0, "1720": 28489385984.0, "1725": 28489385984.0, "1730": 28489385984.0, "1735": 28489385984.0, "1740": 28489385984.0, "1745": 28489385984.0, "1750": 28489385984.0, "1755": 28489385984.0, "1760": 28489385984.0, "1765": 28489385984.0, "1770": 28489385984.0, "1775": 28489385984.0, "1780": 28489385984.0, "1785": 28489385984.0, "1790": 28489385984.0, "1795": 28489385984.0, "1800": 28489385984.0, "1805": 28489385984.0, "1810": 28489385984.0, "1815": 28489385984.0, "1820": 28489385984.0, "1825": 28489385984.0, "1830": 28489385984.0, "1835": 28489385984.0, "1840": 28489385984.0, "1845": 28489385984.0, "1850": 28489385984.0, "1855": 28489385984.0, "1860": 28489385984.0, "1865": 28489385984.0, "1870": 28489385984.0, "1875": 28489385984.0, "1880": 28489385984.0, "1885": 28489385984.0, "1890": 28489385984.0, "1895": 28489385984.0, "1900": 28489385984.0, "1905": 28489385984.0, "1910": 28489385984.0, "1915": 28489385984.0, "1920": 28489385984.0, "1925": 28489385984.0, "1930": 28489385984.0, "1935": 28489385984.0, "1940": 28489385984.0, "1945": 28489385984.0, "1950": 28489385984.0, "1955": 28489385984.0, "1960": 28489385984.0, "1965": 28489385984.0, "1970": 28489385984.0, "1975": 28489385984.0, "1980": 28489385984.0, "1985": 28489385984.0, "1990": 28489385984.0, "1995": 28489385984.0, "2000": 28489385984.0, "2005": 28489385984.0, "2010": 28489385984.0, "2015": 28489385984.0, "2020": 28489385984.0, "2025": 28489385984.0, "2030": 28489385984.0, "2035": 28489385984.0, "2040": 28489385984.0, "2045": 28489385984.0, "2050": 28489385984.0, "2055": 28489385984.0, "2060": 28489385984.0, "2065": 28489385984.0, "2070": 28489385984.0, "2075": 28489385984.0, "2080": 28489385984.0, "2085": 28489385984.0, "2090": 28489385984.0, "2095": 28489385984.0, "2100": 28489385984.0, "2105": 28489385984.0, "2110": 28489385984.0, "2115": 28489385984.0, "2120": 28489385984.0, "2125": 28489385984.0, "2130": 28489385984.0, "2135": 28489385984.0, "2140": 28489385984.0, "2145": 28489385984.0, "2150": 28489385984.0, "2155": 28489385984.0, "2160": 28489385984.0, "2165": 28489385984.0, "2170": 28489385984.0, "2175": 28489385984.0, "2180": 28489385984.0, "2185": 28489385984.0, "2190": 28489385984.0, "2195": 28489385984.0, "2200": 28489385984.0, "2205": 28489385984.0, "2210": 28489385984.0, "2215": 28489385984.0, "2220": 28489385984.0, "2225": 28489385984.0, "2230": 28489385984.0, "2235": 28489385984.0, "2240": 28489385984.0, "2245": 28489385984.0, "2250": 28489385984.0, "2255": 28489385984.0, "2260": 28489385984.0, "2265": 28489385984.0, "2270": 28489385984.0, "2275": 28489385984.0, "2280": 28489385984.0, "2285": 28489385984.0, "2290": 28489385984.0, "2295": 28489385984.0, "2300": 28489385984.0, "2305": 28489385984.0, "2310": 28489385984.0, "2315": 28489385984.0, "2320": 28489385984.0, "2325": 28489385984.0, "2330": 28489385984.0, "2335": 28489385984.0, "2340": 28489385984.0, "2345": 28489385984.0, "2350": 28489385984.0, "2355": 28489385984.0, "2360": 28489385984.0, "2365": 28489385984.0, "2370": 28489385984.0, "2375": 28489385984.0, "2380": 28489385984.0, "2385": 28489385984.0, "2390": 28489385984.0, "2395": 28489385984.0, "2400": 28489385984.0, "2405": 28489385984.0, "2410": 28489385984.0, "2415": 28489385984.0, "2420": 28489385984.0, "2425": 28489385984.0, "2430": 28489385984.0, "2435": 28489385984.0, "2440": 28489385984.0, "2445": 28489385984.0, "2450": 28489385984.0, "2455": 28489385984.0, "2460": 28489385984.0, "2465": 28489385984.0, "2470": 28489385984.0, "2475": 28489385984.0, "2480": 28489385984.0, "2485": 28489385984.0, "2490": 28489385984.0, "2495": 28489385984.0, "2500": 28489385984.0, "2505": 28489385984.0, "2510": 28489385984.0, "2515": 28489385984.0, "2520": 28489385984.0, "2525": 28489385984.0, "2530": 28489385984.0, "2535": 28489385984.0, "2540": 28489385984.0, "2545": 28489385984.0, "2550": 28489385984.0, "2555": 28489385984.0, "2560": 28489385984.0, "2565": 28489385984.0, "2570": 28489385984.0, "2575": 28489385984.0, "2580": 28489385984.0, "2585": 28489385984.0, "2590": 28489385984.0, "2595": 28489385984.0, "2600": 28489385984.0, "2605": 28489385984.0, "2610": 28489385984.0, "2615": 28489385984.0, "2620": 28489385984.0, "2625": 28489385984.0, "2630": 28489385984.0, "2635": 28489385984.0, "2640": 28489385984.0, "2645": 28489385984.0, "2650": 28489385984.0, "2655": 28489385984.0, "2660": 28489385984.0, "2665": 28489385984.0, "2670": 28489385984.0, "2675": 28489385984.0, "2680": 28489385984.0, "2685": 28489385984.0, "2690": 28489385984.0, "2695": 28489385984.0, "2700": 28489385984.0, "2705": 28489385984.0, "2710": 28489385984.0, "2715": 28489385984.0, "2720": 28489385984.0, "2725": 28489385984.0, "2730": 28489385984.0, "2735": 28489385984.0, "2740": 28489385984.0, "2745": 28489385984.0, "2750": 28489385984.0, "2755": 28489385984.0, "2760": 28489385984.0, "2765": 28489385984.0, "2770": 28489385984.0, "2775": 28489385984.0, "2780": 28489385984.0, "2785": 28489385984.0, "2790": 28489385984.0, "2795": 28489385984.0, "2800": 28489385984.0, "2805": 28489385984.0, "2810": 28489385984.0, "2815": 28489385984.0, "2820": 28489385984.0, "2825": 28489385984.0, "2830": 28489385984.0, "2835": 28489385984.0, "2840": 28489385984.0, "2845": 28489385984.0, "2850": 28489385984.0, "2855": 28489385984.0, "2860": 28489385984.0, "2865": 28489385984.0, "2870": 28489385984.0, "2875": 28489385984.0, "2880": 28489385984.0, "2885": 28489385984.0, "2890": 28489385984.0, "2895": 28489385984.0, "2900": 28489385984.0, "2905": 28489385984.0, "2910": 28489385984.0, "2915": 28489385984.0, "2920": 28489385984.0, "2925": 28489385984.0, "2930": 28489385984.0, "2935": 28489385984.0, "2940": 28489385984.0, "2945": 28489385984.0, "2950": 28489385984.0, "2955": 28489385984.0, "2960": 28489385984.0, "2965": 28489385984.0, "2970": 28489385984.0, "2975": 28489385984.0, "2980": 28489385984.0, "2985": 28489385984.0, "2990": 28489385984.0, "2995": 28489385984.0, "3000": 28489385984.0, "3005": 28489385984.0, "3010": 28489385984.0, "3015": 28489385984.0, "3020": 28489385984.0, "3025": 28489385984.0, "3030": 28489385984.0, "3035": 28489385984.0, "3040": 28489385984.0, "3045": 28489385984.0, "3050": 28489385984.0, "3055": 28489385984.0, "3060": 28489385984.0, "3065": 28489385984.0, "3070": 28489385984.0, "3075": 28489385984.0, "3080": 28489385984.0, "3085": 28489385984.0, "3090": 28489385984.0, "3095": 28489385984.0, "3100": 28489385984.0, "3105": 28489385984.0, "3110": 28489385984.0, "3115": 28489385984.0, "3120": 28489385984.0, "3125": 28489385984.0, "3130": 28489385984.0, "3135": 28489385984.0, "3140": 28489385984.0, "3145": 28489385984.0, "3150": 28489385984.0, "3155": 28489385984.0, "3160": 28489385984.0, "3165": 28489385984.0, "3170": 28489385984.0, "3175": 28489385984.0, "3180": 28489385984.0, "3185": 28489385984.0, "3190": 28489385984.0, "3195": 28489385984.0, "3200": 28489385984.0, "3205": 28489385984.0, "3210": 28489385984.0, "3215": 28489385984.0, "3220": 28489385984.0, "3225": 28489385984.0, "3230": 28489385984.0, "3235": 28489385984.0, "3240": 28489385984.0, "3245": 28489385984.0, "3250": 28489385984.0, "3255": 28489385984.0, "3260": 28489385984.0, "3265": 28489385984.0, "3270": 28489385984.0, "3275": 28489385984.0, "3280": 28489385984.0, "3285": 28489385984.0, "3290": 28489385984.0, "3295": 28489385984.0, "3300": 28489385984.0, "3305": 28489385984.0, "3310": 28489385984.0, "3315": 28489385984.0, "3320": 28489385984.0, "3325": 28489385984.0, "3330": 28489385984.0, "3335": 28489385984.0, "3340": 28489385984.0, "3345": 28489385984.0, "3350": 28489385984.0, "3355": 28489385984.0, "3360": 28489385984.0, "3365": 28489385984.0, "3370": 28489385984.0, "3375": 28489385984.0, "3380": 28489385984.0, "3385": 28489385984.0, "3390": 28489385984.0, "3395": 28489385984.0, "3400": 28489385984.0, "3405": 28489385984.0, "3410": 28489385984.0, "3415": 28489385984.0, "3420": 28489385984.0, "3425": 28489385984.0, "3430": 28489385984.0, "3435": 28489385984.0, "3440": 28489385984.0, "3445": 28489385984.0, "3450": 28489385984.0, "3455": 28489385984.0, "3460": 28489385984.0, "3465": 28489385984.0, "3470": 28489385984.0, "3475": 28489385984.0, "3480": 28489385984.0, "3485": 28489385984.0, "3490": 28489385984.0, "3495": 28489385984.0, "3500": 28489385984.0, "3505": 28489385984.0, "3510": 28489385984.0, "3515": 28489385984.0, "3520": 28489385984.0, "3525": 28489385984.0, "3530": 28489385984.0, "3535": 28489385984.0, "3540": 28489385984.0, "3545": 28489385984.0, "3550": 28489385984.0, "3555": 28489385984.0, "3560": 28489385984.0, "3565": 28489385984.0, "3570": 28489568256.0, "3575": 28489568256.0, "3580": 28489568256.0, "3585": 28489568256.0, "3590": 28489568256.0, "3595": 28489568256.0, "3600": 28489568256.0, "3605": 28489568256.0, "3610": 28489568256.0, "3615": 28489568256.0, "3620": 28489568256.0, "3625": 28489568256.0, "3630": 28489568256.0, "3635": 28489568256.0, "3640": 28489568256.0, "3645": 28489568256.0, "3650": 28489568256.0, "3655": 28489568256.0, "3660": 28489568256.0, "3665": 28489568256.0, "3670": 28489568256.0, "3675": 28489568256.0, "3680": 28489568256.0, "3685": 28489568256.0, "3690": 28489568256.0, "3695": 28489568256.0, "3700": 28489568256.0, "3705": 28489568256.0, "3710": 28489568256.0, "3715": 28489568256.0, "3720": 28489568256.0, "3725": 28489568256.0, "3730": 28489568256.0, "3735": 28489568256.0, "3740": 28489568256.0, "3745": 28489568256.0, "3750": 28489568256.0, "3755": 28489568256.0, "3760": 28489568256.0, "3765": 28489568256.0, "3770": 28489568256.0, "3775": 28489568256.0, "3780": 28489568256.0, "3785": 28489568256.0, "3790": 28489568256.0, "3795": 28489568256.0, "3800": 28489568256.0, "3805": 28489568256.0, "3810": 28489568256.0, "3815": 28489568256.0, "3820": 28489568256.0, "3825": 28489568256.0, "3830": 28489568256.0, "3835": 28489568256.0, "3840": 28489568256.0, "3845": 28489568256.0, "3850": 28489568256.0, "3855": 28489568256.0, "3860": 28489568256.0, "3865": 28489568256.0, "3870": 28489568256.0, "3875": 28489568256.0, "3880": 28489568256.0, "3885": 28489568256.0, "3890": 28489568256.0, "3895": 28489568256.0, "3900": 28489568256.0, "3905": 28489568256.0, "3910": 28489568256.0, "3915": 28489568256.0, "3920": 28489568256.0, "3925": 28489568256.0, "3930": 28489568256.0, "3935": 28489568256.0, "3940": 28489568256.0, "3945": 28489568256.0, "3950": 28489568256.0, "3955": 28489568256.0, "3960": 28489568256.0, "3965": 28489568256.0, "3970": 28489568256.0, "3975": 28489568256.0, "3980": 28489568256.0, "3985": 28489568256.0, "3990": 28489568256.0, "3995": 28489568256.0, "4000": 28489568256.0, "4005": 28489568256.0, "4010": 28489568256.0, "4015": 28489568256.0, "4020": 28489568256.0, "4025": 28489568256.0, "4030": 28489568256.0, "4035": 28489568256.0, "4040": 28489568256.0, "4045": 28489568256.0, "4050": 28489568256.0, "4055": 28489568256.0, "4060": 28489568256.0, "4065": 28489568256.0, "4070": 28489568256.0, "4075": 28489568256.0, "4080": 28489568256.0, "4085": 28489568256.0, "4090": 28489568256.0, "4095": 28489568256.0, "4100": 28489568256.0, "4105": 28489568256.0, "4110": 28489568256.0, "4115": 28489568256.0, "4120": 28489568256.0, "4125": 28489568256.0, "4130": 28489568256.0, "4135": 28489568256.0, "4140": 28489568256.0, "4145": 28489568256.0, "4150": 28489568256.0, "4155": 28489568256.0, "4160": 28489568256.0, "4165": 28489568256.0, "4170": 28489568256.0, "4175": 28489568256.0, "4180": 28489568256.0, "4185": 28489568256.0, "4190": 28489568256.0, "4195": 28489568256.0, "4200": 28489568256.0, "4205": 28489568256.0, "4210": 28489568256.0, "4215": 28489568256.0, "4220": 28489568256.0, "4225": 28489568256.0, "4230": 28489568256.0, "4235": 28489568256.0, "4240": 28489568256.0, "4245": 28489568256.0, "4250": 28489568256.0, "4255": 28489568256.0, "4260": 28489568256.0, "4265": 28489568256.0, "4270": 28489568256.0, "4275": 28489568256.0, "4280": 28489568256.0, "4285": 28489568256.0, "4290": 28489568256.0, "4295": 28489568256.0, "4300": 28489568256.0, "4305": 28489568256.0, "4310": 28489568256.0, "4315": 28489568256.0, "4320": 28489568256.0, "4325": 28489568256.0, "4330": 28489568256.0, "4335": 28489568256.0, "4340": 28489568256.0, "4345": 28489568256.0, "4350": 28489568256.0, "4355": 28489568256.0, "4360": 28489568256.0, "4365": 28489568256.0, "4370": 28489568256.0, "4375": 28489568256.0, "4380": 28489568256.0, "4385": 28489568256.0, "4390": 28489568256.0, "4395": 28489568256.0, "4400": 28489568256.0, "4405": 28489568256.0, "4410": 28489568256.0, "4415": 28489568256.0, "4420": 28489568256.0, "4425": 28489568256.0, "4430": 28489568256.0, "4435": 28489568256.0, "4440": 28489568256.0, "4445": 28489568256.0, "4450": 28489568256.0, "4455": 28489568256.0, "4460": 28489568256.0, "4465": 28489568256.0, "4470": 28489568256.0, "4475": 28489568256.0, "4480": 28489568256.0, "4485": 28489568256.0, "4490": 28489568256.0, "4495": 28489568256.0, "4500": 28489568256.0, "4505": 28489568256.0, "4510": 28489568256.0, "4515": 28489568256.0, "4520": 28489568256.0, "4525": 28489568256.0, "4530": 28489568256.0, "4535": 28489568256.0, "4540": 28489568256.0, "4545": 28489568256.0, "4550": 28489568256.0, "4555": 28489568256.0, "4560": 28489568256.0, "4565": 28489568256.0, "4570": 28489568256.0, "4575": 28489568256.0, "4580": 28489568256.0, "4585": 28489568256.0, "4590": 28489568256.0, "4595": 28489568256.0, "4600": 28489568256.0, "4605": 28489568256.0, "4610": 28489568256.0, "4615": 28489568256.0, "4620": 28489568256.0, "4625": 28489568256.0, "4630": 28489568256.0, "4635": 28489568256.0, "4640": 28489568256.0, "4645": 28489568256.0, "4650": 28489568256.0, "4655": 28489568256.0, "4660": 28489568256.0, "4665": 28489568256.0, "4670": 28489568256.0, "4675": 28489568256.0, "4680": 28489568256.0, "4685": 28489568256.0, "4690": 28489568256.0, "4695": 28489568256.0, "4700": 28489568256.0, "4705": 28489568256.0, "4710": 28489568256.0, "4715": 28489568256.0, "4720": 28489568256.0, "4725": 28489568256.0, "4730": 28489568256.0, "4735": 28489568256.0, "4740": 28489568256.0, "4745": 28489568256.0, "4750": 28489568256.0, "4755": 28489568256.0, "4760": 28489568256.0, "4765": 28489568256.0, "4770": 28489568256.0, "4775": 28489568256.0, "4780": 28489568256.0, "4785": 28489568256.0, "4790": 28489568256.0, "4795": 28489568256.0, "4800": 28489568256.0, "4805": 28489568256.0, "4810": 28489568256.0, "4815": 28489568256.0, "4820": 28489568256.0, "4825": 28489568256.0, "4830": 28489568256.0, "4835": 28489568256.0, "4840": 28489568256.0, "4845": 28489568256.0, "4850": 28489568256.0, "4855": 28489568256.0, "4860": 28489568256.0, "4865": 28489568256.0, "4870": 28489568256.0, "4875": 28489568256.0, "4880": 28489568256.0, "4885": 28489568256.0, "4890": 28489568256.0, "4895": 28489568256.0, "4900": 28489568256.0, "4905": 28489568256.0, "4910": 28489568256.0, "4915": 28489568256.0, "4920": 28489568256.0, "4925": 28489568256.0, "4930": 28489568256.0, "4935": 28489568256.0, "4940": 28489568256.0, "4945": 28489568256.0, "4950": 28489568256.0, "4955": 28489568256.0, "4960": 28489568256.0, "4965": 28489568256.0, "4970": 28489568256.0, "4975": 28489568256.0, "4980": 28489568256.0, "4985": 28489568256.0, "4990": 28489568256.0, "4995": 28489568256.0, "5000": 28489568256.0, "5005": 28489568256.0, "5010": 28489568256.0, "5015": 28489568256.0, "5020": 28489568256.0, "5025": 28489568256.0, "5030": 28489568256.0, "5035": 28489568256.0, "5040": 28489568256.0, "5045": 28489568256.0, "5050": 28489568256.0, "5055": 28489568256.0, "5060": 28489568256.0, "5065": 28489568256.0, "5070": 28489568256.0, "5075": 28489568256.0, "5080": 28489568256.0, "5085": 28489568256.0, "5090": 28489568256.0, "5095": 28489568256.0, "5100": 28489568256.0, "5105": 28489568256.0, "5110": 28489568256.0, "5115": 28489568256.0, "5120": 28489568256.0, "5125": 28489568256.0, "5130": 28489568256.0, "5135": 28489568256.0, "5140": 28489568256.0, "5145": 28489568256.0, "5150": 28489568256.0, "5155": 28489568256.0, "5160": 28489568256.0, "5165": 28489568256.0, "5170": 28489568256.0, "5175": 28489568256.0, "5180": 28489568256.0, "5185": 28489568256.0, "5190": 28489568256.0, "5195": 28489568256.0, "5200": 28489568256.0, "5205": 28489568256.0, "5210": 28489568256.0, "5215": 28489568256.0, "5220": 28489568256.0, "5225": 28489568256.0, "5230": 28489568256.0, "5235": 28489568256.0, "5240": 28489568256.0, "5245": 28489568256.0, "5250": 28489568256.0, "5255": 28489568256.0, "5260": 28489568256.0, "5265": 28489568256.0, "5270": 28489568256.0, "5275": 28489568256.0, "5280": 28489568256.0, "5285": 28489568256.0, "5290": 28489568256.0, "5295": 28489568256.0, "5300": 28489568256.0, "5305": 28489568256.0, "5310": 28489568256.0, "5315": 28489568256.0, "5320": 28489568256.0, "5325": 28489568256.0, "5330": 28489568256.0, "5335": 28489568256.0, "5340": 28489568256.0, "5345": 28489568256.0, "5350": 28489568256.0, "5355": 28489568256.0, "5360": 28489568256.0, "5365": 28489568256.0, "5370": 28489568256.0, "5375": 28489568256.0, "5380": 28489568256.0, "5385": 28489568256.0, "5390": 28489568256.0, "5395": 28489568256.0, "5400": 28489568256.0, "5405": 28489568256.0, "5410": 28489568256.0, "5415": 28489568256.0, "5420": 28489568256.0, "5425": 28489568256.0, "5430": 28489568256.0, "5435": 28489568256.0, "5440": 28489568256.0, "5445": 28489568256.0, "5450": 28489568256.0, "5455": 28489568256.0, "5460": 28489568256.0, "5465": 28489568256.0, "5470": 28489568256.0, "5475": 28489568256.0, "5480": 28489568256.0, "5485": 28489568256.0, "5490": 28489568256.0, "5495": 28489568256.0, "5500": 28489568256.0, "5505": 28489568256.0, "5510": 28489568256.0, "5515": 28489568256.0, "5520": 28489568256.0, "5525": 28489568256.0, "5530": 28489568256.0, "5535": 28489568256.0, "5540": 28489568256.0, "5545": 28489568256.0, "5550": 28489568256.0, "5555": 28489568256.0, "5560": 28489568256.0, "5565": 28489568256.0, "5570": 28489568256.0, "5575": 28489568256.0, "5580": 28489568256.0, "5585": 28489568256.0, "5590": 28489568256.0, "5595": 28489568256.0, "5600": 28489568256.0, "5605": 28489568256.0, "5610": 28489568256.0, "5615": 28489568256.0, "5620": 28489568256.0, "5625": 28489568256.0, "5630": 28489568256.0, "5635": 28489568256.0, "5640": 28489568256.0, "5645": 28489568256.0, "5650": 28489568256.0, "5655": 28489568256.0, "5660": 28489568256.0, "5665": 28489568256.0, "5670": 28489568256.0, "5675": 28489568256.0, "5680": 28489568256.0, "5685": 28489568256.0, "5690": 28489568256.0, "5695": 28489568256.0, "5700": 28489568256.0, "5705": 28489568256.0, "5710": 28489568256.0, "5715": 28489568256.0, "5720": 28489568256.0, "5725": 28489568256.0, "5730": 28489568256.0, "5735": 28489568256.0, "5740": 28489568256.0, "5745": 28489568256.0, "5750": 28489568256.0, "5755": 28489568256.0, "5760": 28489568256.0, "5765": 28489568256.0, "5770": 28489568256.0, "5775": 28489568256.0, "5780": 28489568256.0, "5785": 28489568256.0, "5790": 28489568256.0, "5795": 28489568256.0, "5800": 28489568256.0, "5805": 28489568256.0, "5810": 28489568256.0, "5815": 28489568256.0, "5820": 28489568256.0, "5825": 28489568256.0, "5830": 28489568256.0, "5835": 28489568256.0, "5840": 28489568256.0, "5845": 28489568256.0, "5850": 28489568256.0, "5855": 28489568256.0, "5860": 28489568256.0, "5865": 28489568256.0, "5870": 28489568256.0, "5875": 28489568256.0, "5880": 28489568256.0, "5885": 28489568256.0, "5890": 28489568256.0, "5895": 28489568256.0, "5900": 28489568256.0, "5905": 28489568256.0, "5910": 28489568256.0, "5915": 28489568256.0, "5920": 28489568256.0, "5925": 28489568256.0, "5930": 28489568256.0, "5935": 28489568256.0, "5940": 28489568256.0, "5945": 28489568256.0, "5950": 28489568256.0, "5955": 28489568256.0, "5960": 28489568256.0, "5965": 28489568256.0, "5970": 28489568256.0, "5975": 28489568256.0, "5980": 28489568256.0, "5985": 28489568256.0, "5990": 28489568256.0, "5995": 28489568256.0, "6000": 28489568256.0, "6005": 28489568256.0, "6010": 28489568256.0, "6015": 28489568256.0, "6020": 28489568256.0, "6025": 28489568256.0, "6030": 28489568256.0, "6035": 28489568256.0, "6040": 28489568256.0, "6045": 28489568256.0, "6050": 28489568256.0, "6055": 28489568256.0, "6060": 28489568256.0, "6065": 28489568256.0, "6070": 28489568256.0, "6075": 28489568256.0, "6080": 28489568256.0, "6085": 28489568256.0, "6090": 28489568256.0, "6095": 28489568256.0, "6100": 28489568256.0, "6105": 28489568256.0, "6110": 28489568256.0, "6115": 28489568256.0, "6120": 28489568256.0, "6125": 28489568256.0, "6130": 28489568256.0, "6135": 28489568256.0, "6140": 28489568256.0, "6145": 28489568256.0, "6150": 28489568256.0, "6155": 28489568256.0, "6160": 28489568256.0, "6165": 28489568256.0, "6170": 28489568256.0, "6175": 28489568256.0, "6180": 28489568256.0, "6185": 28489568256.0, "6190": 28489568256.0, "6195": 28489568256.0, "6200": 28489568256.0, "6205": 28489568256.0, "6210": 28489568256.0, "6215": 28489568256.0, "6220": 28489568256.0, "6225": 28489568256.0, "6230": 28489568256.0, "6235": 28489568256.0, "6240": 28489568256.0, "6245": 28489568256.0, "6250": 28489568256.0, "6255": 28489568256.0, "6260": 28489568256.0, "6265": 28489568256.0, "6270": 28489568256.0, "6275": 28489568256.0, "6280": 28489568256.0, "6285": 28489568256.0, "6290": 28489568256.0, "6295": 28489568256.0, "6300": 28489568256.0, "6305": 28489568256.0, "6310": 28489568256.0, "6315": 28489568256.0, "6320": 28489568256.0, "6325": 28489568256.0, "6330": 28489568256.0, "6335": 28489568256.0, "6340": 28489568256.0, "6345": 28489568256.0, "6350": 28489568256.0, "6355": 28489568256.0, "6360": 28489568256.0, "6365": 28489568256.0, "6370": 28489568256.0, "6375": 28489568256.0, "6380": 28489568256.0, "6385": 28489568256.0, "6390": 28489568256.0, "6395": 28489568256.0, "6400": 28489568256.0, "6405": 28489568256.0, "6410": 28489568256.0, "6415": 28489568256.0, "6420": 28489568256.0, "6425": 28489568256.0, "6430": 28489568256.0, "6435": 28489568256.0, "6440": 28489568256.0, "6445": 28489568256.0, "6450": 28489568256.0, "6455": 28489568256.0, "6460": 28489568256.0, "6465": 28489568256.0, "6470": 28489568256.0, "6475": 28489568256.0, "6480": 28489568256.0, "6485": 28489568256.0, "6490": 28489568256.0, "6495": 28489568256.0, "6500": 28489568256.0, "6505": 28489568256.0, "6510": 28489568256.0, "6515": 28489568256.0, "6520": 28489568256.0, "6525": 28489568256.0, "6530": 28489568256.0, "6535": 28489568256.0, "6540": 28489568256.0, "6545": 28489568256.0, "6550": 28489568256.0, "6555": 28489568256.0, "6560": 28489568256.0, "6565": 28489568256.0, "6570": 28489568256.0, "6575": 28489568256.0, "6580": 28489568256.0, "6585": 28489568256.0, "6590": 28489568256.0, "6595": 28489568256.0, "6600": 28489568256.0, "6605": 28489568256.0, "6610": 28489568256.0, "6615": 28489568256.0, "6620": 28489568256.0, "6625": 28489568256.0, "6630": 28489568256.0, "6635": 28489568256.0, "6640": 28489568256.0, "6645": 28489568256.0, "6650": 28489568256.0, "6655": 28489568256.0, "6660": 28489568256.0, "6665": 28489568256.0, "6670": 28489568256.0, "6675": 28489568256.0, "6680": 28489568256.0, "6685": 28489568256.0, "6690": 28489568256.0, "6695": 28489568256.0, "6700": 28489568256.0, "6705": 28489568256.0, "6710": 28489568256.0, "6715": 28489568256.0, "6720": 28489568256.0, "6725": 28489568256.0, "6730": 28489568256.0, "6735": 28489568256.0, "6740": 28489568256.0, "6745": 28489568256.0, "6750": 28489568256.0, "6755": 28489568256.0, "6760": 28489568256.0, "6765": 28489568256.0, "6770": 28489568256.0, "6775": 28489568256.0, "6780": 28489568256.0, "6785": 28489568256.0, "6790": 28489568256.0, "6795": 28489568256.0, "6800": 28489568256.0, "6805": 28489568256.0, "6810": 28489568256.0, "6815": 28489568256.0, "6820": 28489568256.0, "6825": 28489568256.0, "6830": 28489568256.0, "6835": 28489568256.0, "6840": 28489568256.0, "6845": 28489568256.0, "6850": 28489568256.0, "6855": 28489568256.0, "6860": 28489568256.0, "6865": 28489568256.0, "6870": 28489568256.0, "6875": 28489568256.0, "6880": 28489568256.0, "6885": 28489568256.0, "6890": 28489568256.0, "6895": 28489568256.0, "6900": 28489568256.0, "6905": 28489568256.0, "6910": 28489568256.0, "6915": 28489568256.0, "6920": 28489568256.0, "6925": 28489568256.0, "6930": 28489568256.0, "6935": 28489568256.0, "6940": 28489568256.0, "6945": 28489568256.0, "6950": 28489568256.0, "6955": 28489568256.0, "6960": 28489568256.0, "6965": 28489568256.0, "6970": 28489568256.0, "6975": 28489568256.0, "6980": 28489568256.0, "6985": 28489568256.0, "6990": 28489568256.0, "6995": 28489568256.0, "7000": 28489568256.0, "7005": 28489568256.0, "7010": 28489568256.0, "7015": 28489568256.0, "7020": 28489568256.0, "7025": 28489568256.0, "7030": 28489568256.0, "7035": 28489568256.0, "7040": 28489568256.0, "7045": 28489568256.0, "7050": 28489568256.0, "7055": 28489568256.0, "7060": 28489568256.0, "7065": 28489568256.0, "7070": 28489568256.0, "7075": 28489568256.0, "7080": 28489568256.0, "7085": 28489568256.0, "7090": 28489568256.0, "7095": 28489568256.0, "7100": 28489568256.0, "7105": 28489568256.0, "7110": 28489568256.0, "7115": 28489568256.0, "7120": 28489568256.0, "7125": 28489568256.0, "7130": 28489568256.0, "7135": 28489568256.0, "7140": 28489568256.0, "7145": 28489568256.0, "7150": 28489568256.0, "7155": 28489568256.0, "7160": 28489568256.0, "7165": 28489568256.0, "7170": 28489568256.0, "7175": 28489568256.0, "7180": 28489568256.0, "7185": 28489568256.0, "7190": 28489568256.0, "7195": 28489568256.0, "7200": 28489568256.0, "7205": 28489568256.0, "7210": 28489568256.0, "7215": 28489568256.0, "7220": 28489568256.0, "7225": 28489568256.0, "7230": 28489568256.0, "7235": 28489568256.0, "7240": 28489568256.0, "7245": 28489568256.0, "7250": 28489568256.0, "7255": 28489568256.0, "7260": 28489568256.0, "7265": 28489568256.0, "7270": 28489568256.0, "7275": 28489568256.0, "7280": 28489568256.0, "7285": 28489568256.0, "7290": 28489568256.0, "7295": 28489568256.0, "7300": 28489568256.0, "7305": 28489568256.0, "7310": 28489568256.0, "7315": 28489568256.0, "7320": 28489568256.0, "7325": 28489568256.0, "7330": 28489568256.0, "7335": 28489568256.0, "7340": 28489568256.0, "7345": 28489568256.0, "7350": 28489568256.0, "7355": 28489568256.0, "7360": 28489568256.0, "7365": 28489568256.0, "7370": 28489568256.0, "7375": 28489568256.0, "7380": 28489568256.0, "7385": 28489568256.0, "7390": 28489568256.0, "7395": 28489568256.0, "7400": 28489568256.0, "7405": 28489568256.0, "7410": 28489568256.0, "7415": 28489568256.0, "7420": 28489568256.0, "7425": 28489568256.0, "7430": 28489568256.0, "7435": 28489568256.0, "7440": 28489568256.0, "7445": 28489568256.0, "7450": 28489568256.0, "7455": 28489568256.0, "7460": 28489568256.0, "7465": 28489568256.0, "7470": 28489568256.0, "7475": 28489568256.0, "7480": 28489568256.0, "7485": 28489568256.0, "7490": 28489568256.0, "7495": 28489568256.0, "7500": 28489568256.0, "7505": 28489568256.0, "7510": 28489568256.0, "7515": 28489568256.0, "7520": 28489568256.0, "7525": 28489568256.0, "7530": 28489568256.0, "7535": 28489568256.0, "7540": 28489568256.0, "7545": 28489568256.0, "7550": 28489568256.0, "7555": 28489568256.0, "7560": 28489568256.0, "7565": 28489568256.0, "7570": 28489568256.0, "7575": 28489568256.0, "7580": 28489568256.0, "7585": 28489568256.0, "7590": 28489568256.0, "7595": 28489568256.0, "7600": 28489568256.0, "7605": 28489568256.0, "7610": 28489568256.0, "7615": 28489568256.0, "7620": 28489568256.0, "7625": 28489568256.0, "7630": 28489568256.0, "7635": 28489568256.0, "7640": 28489568256.0, "7645": 28489568256.0, "7650": 28489568256.0, "7655": 28489568256.0, "7660": 28489568256.0, "7665": 28489568256.0, "7670": 28489568256.0, "7675": 28489568256.0, "7680": 28489568256.0, "7685": 28489568256.0, "7690": 28489568256.0, "7695": 28489568256.0, "7700": 28489568256.0, "7705": 28489568256.0, "7710": 28489568256.0, "7715": 28489568256.0, "7720": 28489568256.0, "7725": 28489568256.0, "7730": 28489568256.0, "7735": 28489568256.0, "7740": 28489568256.0, "7745": 28489568256.0, "7750": 28489568256.0, "7755": 28489568256.0, "7760": 28489568256.0, "7765": 28489568256.0, "7770": 28489568256.0, "7775": 28489568256.0, "7780": 28489568256.0, "7785": 28489568256.0, "7790": 28489568256.0, "7795": 28489568256.0, "7800": 28489568256.0, "7805": 28489568256.0, "7810": 28489568256.0, "7815": 28489568256.0, "7820": 28489568256.0, "7825": 28489568256.0, "7830": 28489568256.0, "7835": 28489568256.0, "7840": 28489568256.0, "7845": 28489568256.0, "7850": 28489568256.0, "7855": 28489568256.0, "7860": 28489568256.0, "7865": 28489568256.0, "7870": 28489568256.0, "7875": 28489568256.0, "7880": 28489568256.0, "7885": 28489568256.0, "7890": 28489568256.0, "7895": 28489568256.0, "7900": 28489568256.0, "7905": 28489568256.0, "7910": 28489568256.0, "7915": 28489568256.0, "7920": 28489568256.0, "7925": 28489568256.0, "7930": 28489568256.0, "7935": 28489568256.0, "7940": 28489568256.0, "7945": 28489568256.0, "7950": 28489568256.0, "7955": 28489568256.0, "7960": 28489568256.0, "7965": 28489568256.0, "7970": 28489568256.0, "7975": 28489568256.0, "7980": 28489568256.0, "7985": 28489568256.0, "7990": 28489568256.0, "7995": 28489568256.0, "8000": 28489568256.0, "8005": 28489568256.0, "8010": 28489568256.0, "8015": 28489568256.0, "8020": 28489568256.0, "8025": 28489568256.0, "8030": 28489568256.0, "8035": 28489568256.0, "8040": 28489568256.0, "8045": 28489568256.0, "8050": 28489568256.0, "8055": 28489568256.0, "8060": 28489568256.0, "8065": 28489568256.0, "8070": 28489568256.0, "8075": 28489568256.0, "8080": 28489568256.0, "8085": 28489568256.0, "8090": 28489568256.0, "8095": 28489568256.0, "8100": 28489568256.0, "8105": 28489568256.0, "8110": 28489568256.0, "8115": 28489568256.0, "8120": 28489568256.0, "8125": 28489568256.0, "8130": 28489568256.0, "8135": 28489568256.0, "8140": 28489568256.0, "8145": 28489568256.0, "8150": 28489568256.0, "8155": 28489568256.0, "8160": 28489568256.0, "8165": 28489568256.0, "8170": 28489568256.0, "8175": 28489568256.0, "8180": 28489568256.0, "8185": 28489568256.0, "8190": 28489568256.0, "8195": 28489568256.0, "8200": 28489568256.0, "8205": 28489568256.0, "8210": 28489568256.0, "8215": 28489568256.0, "8220": 28489568256.0, "8225": 28489568256.0, "8230": 28489568256.0, "8235": 28489568256.0, "8240": 28489568256.0, "8245": 28489568256.0, "8250": 28489568256.0, "8255": 28489568256.0, "8260": 28489568256.0, "8265": 28489568256.0, "8270": 28489568256.0, "8275": 28489568256.0, "8280": 28489568256.0, "8285": 28489568256.0, "8290": 28489568256.0, "8295": 28489568256.0, "8300": 28489568256.0, "8305": 28489568256.0, "8310": 28489568256.0, "8315": 28489568256.0, "8320": 28489568256.0, "8325": 28489568256.0, "8330": 28489568256.0, "8335": 28489568256.0, "8340": 28489568256.0, "8345": 28489568256.0, "8350": 28489568256.0, "8355": 28489568256.0, "8360": 28489568256.0, "8365": 28489568256.0, "8370": 28489568256.0, "8375": 28489568256.0, "8380": 28489568256.0, "8385": 28489568256.0, "8390": 28489568256.0, "8395": 28489568256.0, "8400": 28489568256.0, "8405": 28489568256.0, "8410": 28489568256.0, "8415": 28489568256.0, "8420": 28489568256.0, "8425": 28489568256.0, "8430": 28489568256.0, "8435": 28489568256.0, "8440": 28489568256.0, "8445": 28489568256.0, "8450": 28489568256.0, "8455": 28489568256.0, "8460": 28489568256.0, "8465": 28489568256.0, "8470": 28489568256.0, "8475": 28489568256.0, "8480": 28489568256.0, "8485": 28489568256.0, "8490": 28489568256.0, "8495": 28489568256.0, "8500": 28489568256.0, "8505": 28489568256.0, "8510": 28489568256.0, "8515": 28489568256.0, "8520": 28489568256.0, "8525": 28489568256.0, "8530": 28489568256.0, "8535": 28489568256.0, "8540": 28489568256.0, "8545": 28489568256.0, "8550": 28489568256.0, "8555": 28489568256.0, "8560": 28489568256.0, "8565": 28489568256.0, "8570": 28489568256.0, "8575": 28489568256.0, "8580": 28489568256.0, "8585": 28489568256.0, "8590": 28489568256.0, "8595": 28489568256.0, "8600": 28489568256.0, "8605": 28489568256.0, "8610": 28489568256.0, "8615": 28489568256.0, "8620": 28489568256.0, "8625": 28489568256.0, "8630": 28489568256.0, "8635": 28489568256.0, "8640": 28489568256.0, "8645": 28489568256.0, "8650": 28489568256.0, "8655": 28489568256.0, "8660": 28489568256.0, "8665": 28489568256.0, "8670": 28489568256.0, "8675": 28489568256.0, "8680": 28489568256.0, "8685": 28489568256.0, "8690": 28489568256.0, "8695": 28489568256.0, "8700": 28489568256.0, "8705": 28489568256.0, "8710": 28489568256.0, "8715": 28489568256.0, "8720": 28489568256.0, "8725": 28489568256.0, "8730": 28489568256.0, "8735": 28489568256.0, "8740": 28489568256.0, "8745": 28489568256.0, "8750": 28489568256.0, "8755": 28489568256.0, "8760": 28489568256.0, "8765": 28489568256.0, "8770": 28489568256.0, "8775": 28489568256.0, "8780": 28489568256.0, "8785": 28489568256.0, "8790": 28489568256.0, "8795": 28489568256.0, "8800": 28489568256.0, "8805": 28489568256.0, "8810": 28489568256.0, "8815": 28489568256.0, "8820": 28489568256.0, "8825": 28489568256.0, "8830": 28489568256.0, "8835": 28489568256.0, "8840": 28489568256.0, "8845": 28489568256.0, "8850": 28489568256.0, "8855": 28489568256.0, "8860": 28489568256.0, "8865": 28489568256.0, "8870": 28489568256.0, "8875": 28489568256.0, "8880": 28489568256.0, "8885": 28489568256.0, "8890": 28489568256.0, "8895": 28489568256.0, "8900": 28489568256.0, "8905": 28489568256.0, "8910": 28489568256.0, "8915": 28489568256.0, "8920": 28489568256.0, "8925": 28489568256.0, "8930": 28489568256.0, "8935": 28489568256.0, "8940": 28489568256.0, "8945": 28489568256.0, "8950": 28489568256.0, "8955": 28489568256.0, "8960": 28489568256.0, "8965": 28489568256.0, "8970": 28489568256.0, "8975": 28489568256.0, "8980": 28489568256.0, "8985": 28489568256.0, "8990": 28489568256.0, "8995": 28489568256.0, "9000": 28489568256.0, "9005": 28489568256.0, "9010": 28489568256.0, "9015": 28489568256.0, "9020": 28489568256.0, "9025": 28489568256.0, "9030": 28489568256.0, "9035": 28489568256.0, "9040": 28489568256.0, "9045": 28489568256.0, "9050": 28489568256.0, "9055": 28489568256.0, "9060": 28489568256.0, "9065": 28489568256.0, "9070": 28489568256.0, "9075": 28489568256.0, "9080": 28489568256.0, "9085": 28489568256.0, "9090": 28489568256.0, "9095": 28489568256.0, "9100": 28489568256.0, "9105": 28489568256.0, "9110": 28489568256.0, "9115": 28489568256.0, "9120": 28489568256.0, "9125": 28489568256.0, "9130": 28489568256.0, "9135": 28489568256.0, "9140": 28489568256.0, "9145": 28489568256.0, "9150": 28489568256.0, "9155": 28489568256.0, "9160": 28489568256.0, "9165": 28489568256.0, "9170": 28489568256.0, "9175": 28489568256.0, "9180": 28489568256.0, "9185": 28489568256.0, "9190": 28489568256.0, "9195": 28489568256.0, "9200": 28489568256.0, "9205": 28489568256.0, "9210": 28489568256.0, "9215": 28489568256.0, "9220": 28489568256.0, "9225": 28489568256.0, "9230": 28489568256.0, "9235": 28489568256.0, "9240": 28489568256.0, "9245": 28489568256.0, "9250": 28489568256.0, "9255": 28489568256.0, "9260": 28489568256.0, "9265": 28489568256.0, "9270": 28489568256.0, "9275": 28489568256.0, "9280": 28489568256.0, "9285": 28489568256.0, "9290": 28489568256.0, "9295": 28489568256.0, "9300": 28489568256.0, "9305": 28489568256.0, "9310": 28489568256.0, "9315": 28489568256.0, "9320": 28489568256.0, "9325": 28489568256.0, "9330": 28489568256.0, "9335": 28489568256.0, "9340": 28489568256.0, "9345": 28489568256.0, "9350": 28489568256.0, "9355": 28489568256.0, "9360": 28489568256.0, "9365": 28489568256.0, "9370": 28489568256.0, "9375": 28489568256.0, "9380": 28489568256.0, "9385": 28489568256.0, "9390": 28489568256.0, "9395": 28489568256.0, "9400": 28489568256.0, "9405": 28489568256.0, "9410": 28489568256.0, "9415": 28489568256.0, "9420": 28489568256.0, "9425": 28489568256.0, "9430": 28489568256.0, "9435": 28489568256.0, "9440": 28489568256.0, "9445": 28489568256.0, "9450": 28489568256.0, "9455": 28489568256.0, "9460": 28489568256.0, "9465": 28489568256.0, "9470": 28489568256.0, "9475": 28489568256.0, "9480": 28489568256.0, "9485": 28489568256.0, "9490": 28489568256.0, "9495": 28489568256.0, "9500": 28489568256.0, "9505": 28489568256.0, "9510": 28489568256.0, "9515": 28489568256.0, "9520": 28489568256.0, "9525": 28489568256.0, "9530": 28489568256.0, "9535": 28489568256.0, "9540": 28489568256.0, "9545": 28489568256.0, "9550": 28489568256.0, "9555": 28489568256.0, "9560": 28489568256.0, "9565": 28489568256.0, "9570": 28489568256.0, "9575": 28489568256.0, "9580": 28489568256.0, "9585": 28489568256.0, "9590": 28489568256.0, "9595": 28489568256.0, "9600": 28489568256.0, "9605": 28489568256.0, "9610": 28489568256.0, "9615": 28489568256.0, "9620": 28489568256.0, "9625": 28489568256.0, "9630": 28489568256.0, "9635": 28489568256.0, "9640": 28489568256.0, "9645": 28489568256.0, "9650": 28489568256.0, "9655": 28489568256.0, "9660": 28489568256.0, "9665": 28489568256.0, "9670": 28489568256.0, "9675": 28489568256.0, "9680": 28489568256.0, "9685": 28489568256.0, "9690": 28489568256.0, "9695": 28489568256.0, "9700": 28489568256.0, "9705": 28489568256.0, "9710": 28489568256.0, "9715": 28489568256.0, "9720": 28489568256.0, "9725": 28489568256.0, "9730": 28489568256.0, "9735": 28489568256.0, "9740": 28489568256.0, "9745": 28489568256.0, "9750": 28489568256.0, "9755": 28489568256.0, "9760": 28489568256.0, "9765": 28489568256.0, "9770": 28489568256.0, "9775": 28489568256.0, "9780": 28489568256.0, "9785": 28489568256.0, "9790": 28489568256.0, "9795": 28489568256.0, "9800": 28489568256.0, "9805": 28489568256.0, "9810": 28489568256.0, "9815": 28489568256.0, "9820": 28489568256.0, "9825": 28489568256.0, "9830": 28489568256.0, "9835": 28489568256.0, "9840": 28489568256.0, "9845": 28489568256.0, "9850": 28489568256.0, "9855": 28489568256.0, "9860": 28489568256.0, "9865": 28489568256.0, "9870": 28489568256.0, "9875": 28489568256.0, "9880": 28489568256.0, "9885": 28489568256.0, "9890": 28489568256.0, "9895": 28489568256.0, "9900": 28489568256.0, "9905": 28489568256.0, "9910": 28489568256.0, "9915": 28489568256.0, "9920": 28489568256.0, "9925": 28489568256.0, "9930": 28489568256.0, "9935": 28489568256.0, "9940": 28489568256.0, "9945": 28489568256.0, "9950": 28489568256.0, "9955": 28489568256.0, "9960": 28489568256.0, "9965": 28489568256.0, "9970": 28489568256.0, "9975": 28489568256.0, "9980": 28489568256.0, "9985": 28489568256.0, "9990": 28489568256.0, "9995": 28489568256.0, "10000": 28489568256.0, "10005": 28489568256.0, "10010": 28489568256.0, "10015": 28489568256.0, "10020": 28489568256.0, "10025": 28489568256.0, "10030": 28489568256.0, "10035": 28489568256.0, "10040": 28489568256.0, "10045": 28489568256.0, "10050": 28489568256.0, "10055": 28489568256.0, "10060": 28489568256.0, "10065": 28489568256.0, "10070": 28489568256.0, "10075": 28489568256.0, "10080": 28489568256.0, "10085": 28489568256.0, "10090": 28489568256.0, "10095": 28489568256.0, "10100": 28489568256.0, "10105": 28489568256.0, "10110": 28489568256.0, "10115": 28489568256.0, "10120": 28489568256.0, "10125": 28489568256.0, "10130": 28489568256.0, "10135": 28489568256.0, "10140": 28489568256.0, "10145": 28489568256.0, "10150": 28489568256.0, "10155": 28489568256.0, "10160": 28489568256.0, "10165": 28489568256.0, "10170": 28489568256.0, "10175": 28489568256.0, "10180": 28489568256.0, "10185": 28489568256.0, "10190": 28489568256.0, "10195": 28489568256.0, "10200": 28489568256.0, "10205": 28489568256.0, "10210": 28489568256.0, "10215": 28489568256.0, "10220": 28489568256.0, "10225": 28489568256.0, "10230": 28489568256.0, "10235": 28489568256.0, "10240": 28489568256.0, "10245": 28489568256.0, "10250": 28489568256.0, "10255": 28489568256.0, "10260": 28489568256.0, "10265": 28489568256.0, "10270": 28489568256.0, "10275": 28489568256.0, "10280": 28489568256.0, "10285": 28489568256.0, "10290": 28489568256.0, "10295": 28489568256.0, "10300": 28489568256.0, "10305": 28489568256.0, "10310": 28489568256.0, "10315": 28489568256.0, "10320": 28489568256.0, "10325": 28489568256.0, "10330": 28489568256.0, "10335": 28489568256.0, "10340": 28489568256.0, "10345": 28489568256.0, "10350": 28489568256.0, "10355": 28489568256.0, "10360": 28489568256.0, "10365": 28489568256.0, "10370": 28489568256.0, "10375": 28489568256.0, "10380": 28489568256.0, "10385": 28489568256.0, "10390": 28489568256.0, "10395": 28489568256.0, "10400": 28489568256.0, "10405": 28489568256.0, "10410": 28489568256.0, "10415": 28489568256.0, "10420": 28489568256.0, "10425": 28489568256.0, "10430": 28489568256.0, "10435": 28489568256.0, "10440": 28489568256.0, "10445": 28489568256.0, "10450": 28489568256.0, "10455": 28489568256.0, "10460": 28489568256.0, "10465": 28489568256.0, "10470": 28489568256.0, "10475": 28489568256.0, "10480": 28489568256.0, "10485": 28489568256.0, "10490": 28489568256.0, "10495": 28489568256.0, "10500": 28489568256.0, "10505": 28489568256.0, "10510": 28489568256.0, "10515": 28489568256.0, "10520": 28489568256.0, "10525": 28489568256.0, "10530": 28489568256.0, "10535": 28489568256.0, "10540": 28489568256.0, "10545": 28489568256.0, "10550": 28489568256.0, "10555": 28489568256.0, "10560": 28489568256.0, "10565": 28489568256.0, "10570": 28489568256.0, "10575": 28489568256.0, "10580": 28489568256.0, "10585": 28489568256.0, "10590": 28489568256.0, "10595": 28489568256.0, "10600": 28489568256.0, "10605": 28489568256.0, "10610": 28489568256.0, "10615": 28489568256.0, "10620": 28489568256.0, "10625": 28489568256.0, "10630": 28489568256.0, "10635": 28489568256.0, "10640": 28489568256.0, "10645": 28489568256.0, "10650": 28489568256.0, "10655": 28489568256.0, "10660": 28489568256.0, "10665": 28489568256.0, "10670": 28489568256.0, "10675": 28489568256.0, "10680": 28489568256.0, "10685": 28489568256.0, "10690": 28489568256.0, "10695": 28489568256.0, "10700": 28489568256.0, "10705": 28489568256.0, "10710": 28489568256.0, "10715": 28489568256.0, "10720": 28489568256.0, "10725": 28489568256.0, "10730": 28489568256.0, "10735": 28489568256.0, "10740": 28489568256.0, "10745": 28489568256.0, "10750": 28489568256.0, "10755": 28489568256.0, "10760": 28489568256.0, "10765": 28489568256.0, "10770": 28489568256.0, "10775": 28489568256.0, "10780": 28489568256.0, "10785": 28489568256.0, "10790": 28489568256.0, "10795": 28489568256.0, "10800": 28489568256.0, "10805": 28489568256.0, "10810": 28489568256.0, "10815": 28489568256.0, "10820": 28489568256.0, "10825": 28489568256.0, "10830": 28489568256.0, "10835": 28489568256.0, "10840": 28489568256.0, "10845": 28489568256.0, "10850": 28489568256.0, "10855": 28489568256.0, "10860": 28489568256.0, "10865": 28489568256.0, "10870": 28489568256.0, "10875": 28489568256.0, "10880": 28489568256.0, "10885": 28489568256.0, "10890": 28489568256.0, "10895": 28489568256.0, "10900": 28489568256.0, "10905": 28489568256.0, "10910": 28489568256.0, "10915": 28489568256.0, "10920": 28489568256.0, "10925": 28489568256.0, "10930": 28489568256.0, "10935": 28489568256.0, "10940": 28489568256.0, "10945": 28489568256.0, "10950": 28489568256.0, "10955": 28489568256.0, "10960": 28489568256.0, "10965": 28489568256.0, "10970": 28489568256.0, "10975": 28489568256.0, "10980": 28489568256.0, "10985": 28489568256.0, "10990": 28489568256.0, "10995": 28489568256.0, "11000": 28489568256.0, "11005": 28489568256.0, "11010": 28489568256.0, "11015": 28489568256.0, "11020": 28489568256.0, "11025": 28489568256.0, "11030": 28489568256.0, "11035": 28489568256.0, "11040": 28489568256.0, "11045": 28489568256.0, "11050": 28489568256.0, "11055": 28489568256.0, "11060": 28489568256.0, "11065": 28489568256.0, "11070": 28489568256.0, "11075": 28489568256.0, "11080": 28489568256.0, "11085": 28489568256.0, "11090": 28489568256.0, "11095": 28489568256.0, "11100": 28489568256.0, "11105": 28489568256.0, "11110": 28489568256.0, "11115": 28489568256.0, "11120": 28489568256.0, "11125": 28489568256.0, "11130": 28489568256.0, "11135": 28489568256.0, "11140": 28489568256.0, "11145": 28489568256.0, "11150": 28489568256.0, "11155": 28489568256.0, "11160": 28489568256.0, "11165": 28489568256.0, "11170": 28489568256.0, "11175": 28489568256.0, "11180": 28489568256.0, "11185": 28489568256.0, "11190": 28489568256.0, "11195": 28489568256.0, "11200": 28489568256.0, "11205": 28489568256.0, "11210": 28489568256.0, "11215": 28489568256.0, "11220": 28489568256.0, "11225": 28489568256.0, "11230": 28489568256.0, "11235": 28489568256.0, "11240": 28489568256.0, "11245": 28489568256.0, "11250": 28489568256.0, "11255": 28489568256.0, "11260": 28489568256.0, "11265": 28489568256.0, "11270": 28489568256.0, "11275": 28489568256.0, "11280": 28489568256.0, "11285": 28489568256.0, "11290": 28489568256.0, "11295": 28489568256.0, "11300": 28489568256.0, "11305": 28489568256.0, "11310": 28489568256.0, "11315": 28489568256.0, "11320": 28489568256.0, "11325": 28489568256.0, "11330": 28489568256.0, "11335": 28489568256.0, "11340": 28489568256.0, "11345": 28489568256.0, "11350": 28489568256.0, "11355": 28489568256.0, "11360": 28489568256.0, "11365": 28489568256.0, "11370": 28489568256.0, "11375": 28489568256.0, "11380": 28489568256.0, "11385": 28489568256.0, "11390": 28489568256.0, "11395": 28489568256.0, "11400": 28489568256.0, "11405": 28489568256.0, "11410": 28489568256.0, "11415": 28489568256.0, "11420": 28489568256.0, "11425": 28489568256.0, "11430": 28489568256.0, "11435": 28489568256.0, "11440": 28489568256.0, "11445": 28489568256.0, "11450": 28489568256.0, "11455": 28489568256.0, "11460": 28489568256.0, "11465": 28489568256.0, "11470": 28489568256.0, "11475": 28489568256.0, "11480": 28489568256.0, "11485": 28489568256.0, "11490": 28489568256.0, "11495": 28489568256.0, "11500": 28489568256.0, "11505": 28489568256.0, "11510": 28489568256.0, "11515": 28489568256.0, "11520": 28489568256.0, "11525": 28489568256.0, "11530": 28489568256.0, "11535": 28489568256.0, "11540": 28489568256.0, "11545": 28489568256.0, "11550": 28489568256.0, "11555": 28489568256.0, "11560": 28489568256.0, "11565": 28489568256.0, "11570": 28489568256.0, "11575": 28489568256.0, "11580": 28489568256.0, "11585": 28489568256.0, "11590": 28489568256.0, "11595": 28489568256.0, "11600": 28489568256.0, "11605": 28489568256.0, "11610": 28489568256.0, "11615": 28489568256.0, "11620": 28489568256.0, "11625": 28489568256.0, "11630": 28489568256.0, "11635": 28489568256.0, "11640": 28489568256.0, "11645": 28489568256.0, "11650": 28489568256.0, "11655": 28489568256.0, "11660": 28489568256.0, "11665": 28489568256.0, "11670": 28489568256.0, "11675": 28489568256.0, "11680": 28489568256.0, "11685": 28489568256.0, "11690": 28489568256.0, "11695": 28489568256.0, "11700": 28489568256.0, "11705": 28489568256.0, "11710": 28489568256.0, "11715": 28489568256.0, "11720": 28489568256.0, "11725": 28489568256.0, "11730": 28489568256.0, "11735": 28489568256.0, "11740": 28489568256.0, "11745": 28489568256.0, "11750": 28489568256.0, "11755": 28489568256.0, "11760": 28489568256.0, "11765": 28489568256.0, "11770": 28489568256.0, "11775": 28489568256.0, "11780": 28489568256.0, "11785": 28489568256.0, "11790": 28489568256.0, "11795": 28489568256.0, "11800": 28489568256.0, "11805": 28489568256.0, "11810": 28489568256.0, "11815": 28489568256.0, "11820": 28489568256.0, "11825": 28489568256.0, "11830": 28489568256.0, "11835": 28489568256.0, "11840": 28489568256.0, "11845": 28489568256.0, "11850": 28489568256.0, "11855": 28489568256.0, "11860": 28489568256.0, "11865": 28489568256.0, "11870": 28489568256.0, "11875": 28489568256.0, "11880": 28489568256.0, "11885": 28489568256.0, "11890": 28489568256.0, "11895": 28489568256.0, "11900": 28489568256.0, "11905": 28489568256.0, "11910": 28489568256.0, "11915": 28489568256.0, "11920": 28489568256.0, "11925": 28489568256.0, "11930": 28489568256.0, "11935": 28489568256.0, "11940": 28489568256.0, "11945": 28489568256.0, "11950": 28489568256.0, "11955": 28489568256.0, "11960": 28489568256.0, "11965": 28489568256.0, "11970": 28489568256.0, "11975": 28489568256.0, "11980": 28489568256.0, "11985": 28489568256.0, "11990": 28489568256.0, "11995": 28489568256.0, "12000": 28489568256.0, "12005": 28489568256.0, "12010": 28489568256.0, "12015": 28489568256.0, "12020": 28489568256.0, "12025": 28489568256.0, "12030": 28489568256.0, "12035": 28489568256.0, "12040": 28489568256.0, "12045": 28489568256.0, "12050": 28489568256.0, "12055": 28489568256.0, "12060": 28489568256.0, "12065": 28489568256.0, "12070": 28489568256.0, "12075": 28489568256.0, "12080": 28489568256.0, "12085": 28489568256.0, "12090": 28489568256.0, "12095": 28489568256.0, "12100": 28489568256.0, "12105": 28489568256.0, "12110": 28489568256.0, "12115": 28489568256.0, "12120": 28489568256.0, "12125": 28489568256.0, "12130": 28489568256.0, "12135": 28489568256.0, "12140": 28489568256.0, "12145": 28489568256.0, "12150": 28489568256.0, "12155": 28489568256.0, "12160": 28489568256.0, "12165": 28489568256.0, "12170": 28489568256.0, "12175": 28489568256.0, "12180": 28489568256.0, "12185": 28489568256.0, "12190": 28489568256.0, "12195": 28489568256.0, "12200": 28489568256.0, "12205": 28489568256.0, "12210": 28489568256.0, "12215": 28489568256.0, "12220": 28489568256.0, "12225": 28489568256.0, "12230": 28489568256.0, "12235": 28489568256.0, "12240": 28489568256.0, "12245": 28489568256.0, "12250": 28489568256.0, "12255": 28489568256.0, "12260": 28489568256.0, "12265": 28489568256.0, "12270": 28489568256.0, "12275": 28489568256.0, "12280": 28489568256.0, "12285": 28489568256.0, "12290": 28489568256.0, "12295": 28489568256.0, "12300": 28489568256.0, "12305": 28489568256.0, "12310": 28489568256.0, "12315": 28489568256.0, "12320": 28489568256.0, "12325": 28489568256.0, "12330": 28489568256.0, "12335": 28489568256.0, "12340": 28489568256.0, "12345": 28489568256.0, "12350": 28489568256.0, "12355": 28489568256.0, "12360": 28489568256.0, "12365": 28489568256.0, "12370": 28489568256.0, "12375": 28489568256.0, "12380": 28489568256.0, "12385": 28489568256.0, "12390": 28489568256.0, "12395": 28489568256.0, "12400": 28489568256.0, "12405": 28489568256.0, "12410": 28489568256.0, "12415": 28489568256.0, "12420": 28489568256.0, "12425": 28489568256.0, "12430": 28489568256.0, "12435": 28489568256.0, "12440": 28489568256.0, "12445": 28489568256.0, "12450": 28489568256.0, "12455": 28489568256.0, "12460": 28489568256.0, "12465": 28489568256.0, "12470": 28489568256.0, "12475": 28489568256.0, "12480": 28489568256.0, "12485": 28489568256.0, "12490": 28489568256.0, "12495": 28489568256.0, "12500": 28489568256.0, "12505": 28489568256.0, "12510": 28489568256.0, "12515": 28489568256.0, "12520": 28489568256.0, "12525": 28489568256.0, "12530": 28489568256.0, "12535": 28489568256.0, "12540": 28489568256.0, "12545": 28489568256.0, "12550": 28489568256.0, "12555": 28489568256.0, "12560": 28489568256.0, "12565": 28489568256.0, "12570": 28489568256.0, "12575": 28489568256.0, "12580": 28489568256.0, "12585": 28489568256.0, "12590": 28489568256.0, "12595": 28489568256.0, "12600": 28489568256.0, "12605": 28489568256.0, "12610": 28489568256.0, "12615": 28489568256.0, "12620": 28489568256.0, "12625": 28489568256.0, "12630": 28489568256.0, "12635": 28489568256.0, "12640": 28489568256.0, "12645": 28489568256.0, "12650": 28489568256.0, "12655": 28489568256.0, "12660": 28489568256.0, "12665": 28489568256.0, "12670": 28489568256.0, "12675": 28489568256.0, "12680": 28489568256.0, "12685": 28489568256.0, "12690": 28489568256.0, "12695": 28489568256.0, "12700": 28489568256.0, "12705": 28489568256.0, "12710": 28489568256.0, "12715": 28489568256.0, "12720": "nan", "12725": "nan", "12730": "nan", "12735": "nan", "12740": "nan", "12745": "nan", "12750": "nan", "12755": "nan", "12760": "nan", "12765": "nan", "12770": "nan", "12775": "nan", "12780": "nan", "12785": "nan", "12790": "nan", "12795": "nan", "12800": "nan", "12805": "nan", "12810": "nan", "12815": "nan", "12820": "nan", "12825": "nan", "12830": "nan", "12835": "nan", "12840": "nan", "12845": "nan", "12850": "nan", "12855": "nan", "12860": "nan", "12865": "nan", "12870": "nan", "12875": "nan", "12880": "nan", "12885": "nan", "12890": "nan", "12895": "nan", "12900": "nan", "12905": "nan", "12910": "nan", "12915": "nan", "12920": "nan", "12925": "nan", "12930": "nan", "12935": "nan", "12940": "nan", "12945": "nan", "12950": "nan", "12955": "nan", "12960": "nan", "12965": "nan", "12970": "nan", "12975": "nan", "12980": "nan", "12985": "nan", "12990": "nan", "12995": "nan", "13000": "nan"}}, "iteration-time": {"start_step": 1, "end_step": 13000, "step_interval": 5, "values": {"1": "nan", "5": "nan", "10": "nan", "15": "nan", "20": "nan", "25": "nan", "30": "nan", "35": "nan", "40": "nan", "45": "nan", "50": "nan", "55": "nan", "60": "nan", "65": "nan", "70": "nan", "75": "nan", "80": "nan", "85": "nan", "90": "nan", "95": "nan", "100": 3.5554, "105": "nan", "110": "nan", "115": "nan", "120": "nan", "125": "nan", "130": "nan", "135": "nan", "140": "nan", "145": "nan", "150": "nan", "155": "nan", "160": "nan", "165": "nan", "170": "nan", "175": "nan", "180": "nan", "185": "nan", "190": "nan", "195": "nan", "200": 3.47185, "205": "nan", "210": "nan", "215": "nan", "220": "nan", "225": "nan", "230": "nan", "235": "nan", "240": "nan", "245": "nan", "250": "nan", "255": "nan", "260": "nan", "265": "nan", "270": "nan", "275": "nan", "280": "nan", "285": "nan", "290": "nan", "295": "nan", "300": 3.45692, "305": "nan", "310": "nan", "315": "nan", "320": "nan", "325": "nan", "330": "nan", "335": "nan", "340": "nan", "345": "nan", "350": "nan", "355": "nan", "360": "nan", "365": "nan", "370": "nan", "375": "nan", "380": "nan", "385": "nan", "390": "nan", "395": "nan", "400": 3.45471, "405": "nan", "410": "nan", "415": "nan", "420": "nan", "425": "nan", "430": "nan", "435": "nan", "440": "nan", "445": "nan", "450": "nan", "455": "nan", "460": "nan", "465": "nan", "470": "nan", "475": "nan", "480": "nan", "485": "nan", "490": "nan", "495": "nan", "500": 3.45467, "505": "nan", "510": "nan", "515": "nan", "520": "nan", "525": "nan", "530": "nan", "535": "nan", "540": "nan", "545": "nan", "550": "nan", "555": "nan", "560": "nan", "565": "nan", "570": "nan", "575": "nan", "580": "nan", "585": "nan", "590": "nan", "595": "nan", "600": 3.4543, "605": "nan", "610": "nan", "615": "nan", "620": "nan", "625": "nan", "630": "nan", "635": "nan", "640": "nan", "645": "nan", "650": "nan", "655": "nan", "660": "nan", "665": "nan", "670": "nan", "675": "nan", "680": "nan", "685": "nan", "690": "nan", "695": "nan", "700": 3.45264, "705": "nan", "710": "nan", "715": "nan", "720": "nan", "725": "nan", "730": "nan", "735": "nan", "740": "nan", "745": "nan", "750": "nan", "755": "nan", "760": "nan", "765": "nan", "770": "nan", "775": "nan", "780": "nan", "785": "nan", "790": "nan", "795": "nan", "800": 3.45125, "805": "nan", "810": "nan", "815": "nan", "820": "nan", "825": "nan", "830": "nan", "835": "nan", "840": "nan", "845": "nan", "850": "nan", "855": "nan", "860": "nan", "865": "nan", "870": "nan", "875": "nan", "880": "nan", "885": "nan", "890": "nan", "895": "nan", "900": 3.44668, "905": "nan", "910": "nan", "915": "nan", "920": "nan", "925": "nan", "930": "nan", "935": "nan", "940": "nan", "945": "nan", "950": "nan", "955": "nan", "960": "nan", "965": "nan", "970": "nan", "975": "nan", "980": "nan", "985": "nan", "990": "nan", "995": "nan", "1000": 3.44035, "1005": "nan", "1010": "nan", "1015": "nan", "1020": "nan", "1025": "nan", "1030": "nan", "1035": "nan", "1040": "nan", "1045": "nan", "1050": "nan", "1055": "nan", "1060": "nan", "1065": "nan", "1070": "nan", "1075": "nan", "1080": "nan", "1085": "nan", "1090": "nan", "1095": "nan", "1100": 3.43442, "1105": "nan", "1110": "nan", "1115": "nan", "1120": "nan", "1125": "nan", "1130": "nan", "1135": "nan", "1140": "nan", "1145": "nan", "1150": "nan", "1155": "nan", "1160": "nan", "1165": "nan", "1170": "nan", "1175": "nan", "1180": "nan", "1185": "nan", "1190": "nan", "1195": "nan", "1200": 3.4306, "1205": "nan", "1210": "nan", "1215": "nan", "1220": "nan", "1225": "nan", "1230": "nan", "1235": "nan", "1240": "nan", "1245": "nan", "1250": "nan", "1255": "nan", "1260": "nan", "1265": "nan", "1270": "nan", "1275": "nan", "1280": "nan", "1285": "nan", "1290": "nan", "1295": "nan", "1300": 3.42464, "1305": "nan", "1310": "nan", "1315": "nan", "1320": "nan", "1325": "nan", "1330": "nan", "1335": "nan", "1340": "nan", "1345": "nan", "1350": "nan", "1355": "nan", "1360": "nan", "1365": "nan", "1370": "nan", "1375": "nan", "1380": "nan", "1385": "nan", "1390": "nan", "1395": "nan", "1400": 3.42155, "1405": "nan", "1410": "nan", "1415": "nan", "1420": "nan", "1425": "nan", "1430": "nan", "1435": "nan", "1440": "nan", "1445": "nan", "1450": "nan", "1455": "nan", "1460": "nan", "1465": "nan", "1470": "nan", "1475": "nan", "1480": "nan", "1485": "nan", "1490": "nan", "1495": "nan", "1500": 3.4201, "1505": "nan", "1510": "nan", "1515": "nan", "1520": "nan", "1525": "nan", "1530": "nan", "1535": "nan", "1540": "nan", "1545": "nan", "1550": "nan", "1555": "nan", "1560": "nan", "1565": "nan", "1570": "nan", "1575": "nan", "1580": "nan", "1585": "nan", "1590": "nan", "1595": "nan", "1600": 3.41703, "1605": "nan", "1610": "nan", "1615": "nan", "1620": "nan", "1625": "nan", "1630": "nan", "1635": "nan", "1640": "nan", "1645": "nan", "1650": "nan", "1655": "nan", "1660": "nan", "1665": "nan", "1670": "nan", "1675": "nan", "1680": "nan", "1685": "nan", "1690": "nan", "1695": "nan", "1700": 3.41482, "1705": "nan", "1710": "nan", "1715": "nan", "1720": "nan", "1725": "nan", "1730": "nan", "1735": "nan", "1740": "nan", "1745": "nan", "1750": "nan", "1755": "nan", "1760": "nan", "1765": "nan", "1770": "nan", "1775": "nan", "1780": "nan", "1785": "nan", "1790": "nan", "1795": "nan", "1800": 3.41352, "1805": "nan", "1810": "nan", "1815": "nan", "1820": "nan", "1825": "nan", "1830": "nan", "1835": "nan", "1840": "nan", "1845": "nan", "1850": "nan", "1855": "nan", "1860": "nan", "1865": "nan", "1870": "nan", "1875": "nan", "1880": "nan", "1885": "nan", "1890": "nan", "1895": "nan", "1900": 3.4128, "1905": "nan", "1910": "nan", "1915": "nan", "1920": "nan", "1925": "nan", "1930": "nan", "1935": "nan", "1940": "nan", "1945": "nan", "1950": "nan", "1955": "nan", "1960": "nan", "1965": "nan", "1970": "nan", "1975": "nan", "1980": "nan", "1985": "nan", "1990": "nan", "1995": "nan", "2000": 3.40994, "2005": "nan", "2010": "nan", "2015": "nan", "2020": "nan", "2025": "nan", "2030": "nan", "2035": "nan", "2040": "nan", "2045": "nan", "2050": "nan", "2055": "nan", "2060": "nan", "2065": "nan", "2070": "nan", "2075": "nan", "2080": "nan", "2085": "nan", "2090": "nan", "2095": "nan", "2100": 3.40878, "2105": "nan", "2110": "nan", "2115": "nan", "2120": "nan", "2125": "nan", "2130": "nan", "2135": "nan", "2140": "nan", "2145": "nan", "2150": "nan", "2155": "nan", "2160": "nan", "2165": "nan", "2170": "nan", "2175": "nan", "2180": "nan", "2185": "nan", "2190": "nan", "2195": "nan", "2200": 3.40862, "2205": "nan", "2210": "nan", "2215": "nan", "2220": "nan", "2225": "nan", "2230": "nan", "2235": "nan", "2240": "nan", "2245": "nan", "2250": "nan", "2255": "nan", "2260": "nan", "2265": "nan", "2270": "nan", "2275": "nan", "2280": "nan", "2285": "nan", "2290": "nan", "2295": "nan", "2300": 3.41459, "2305": "nan", "2310": "nan", "2315": "nan", "2320": "nan", "2325": "nan", "2330": "nan", "2335": "nan", "2340": "nan", "2345": "nan", "2350": "nan", "2355": "nan", "2360": "nan", "2365": "nan", "2370": "nan", "2375": "nan", "2380": "nan", "2385": "nan", "2390": "nan", "2395": "nan", "2400": 3.40662, "2405": "nan", "2410": "nan", "2415": "nan", "2420": "nan", "2425": "nan", "2430": "nan", "2435": "nan", "2440": "nan", "2445": "nan", "2450": "nan", "2455": "nan", "2460": "nan", "2465": "nan", "2470": "nan", "2475": "nan", "2480": "nan", "2485": "nan", "2490": "nan", "2495": "nan", "2500": 3.40543, "2505": "nan", "2510": "nan", "2515": "nan", "2520": "nan", "2525": "nan", "2530": "nan", "2535": "nan", "2540": "nan", "2545": "nan", "2550": "nan", "2555": "nan", "2560": "nan", "2565": "nan", "2570": "nan", "2575": "nan", "2580": "nan", "2585": "nan", "2590": "nan", "2595": "nan", "2600": 3.40484, "2605": "nan", "2610": "nan", "2615": "nan", "2620": "nan", "2625": "nan", "2630": "nan", "2635": "nan", "2640": "nan", "2645": "nan", "2650": "nan", "2655": "nan", "2660": "nan", "2665": "nan", "2670": "nan", "2675": "nan", "2680": "nan", "2685": "nan", "2690": "nan", "2695": "nan", "2700": 3.40448, "2705": "nan", "2710": "nan", "2715": "nan", "2720": "nan", "2725": "nan", "2730": "nan", "2735": "nan", "2740": "nan", "2745": "nan", "2750": "nan", "2755": "nan", "2760": "nan", "2765": "nan", "2770": "nan", "2775": "nan", "2780": "nan", "2785": "nan", "2790": "nan", "2795": "nan", "2800": 3.403, "2805": "nan", "2810": "nan", "2815": "nan", "2820": "nan", "2825": "nan", "2830": "nan", "2835": "nan", "2840": "nan", "2845": "nan", "2850": "nan", "2855": "nan", "2860": "nan", "2865": "nan", "2870": "nan", "2875": "nan", "2880": "nan", "2885": "nan", "2890": "nan", "2895": "nan", "2900": 3.40346, "2905": "nan", "2910": "nan", "2915": "nan", "2920": "nan", "2925": "nan", "2930": "nan", "2935": "nan", "2940": "nan", "2945": "nan", "2950": "nan", "2955": "nan", "2960": "nan", "2965": "nan", "2970": "nan", "2975": "nan", "2980": "nan", "2985": "nan", "2990": "nan", "2995": "nan", "3000": 3.4023, "3005": "nan", "3010": "nan", "3015": "nan", "3020": "nan", "3025": "nan", "3030": "nan", "3035": "nan", "3040": "nan", "3045": "nan", "3050": "nan", "3055": "nan", "3060": "nan", "3065": "nan", "3070": "nan", "3075": "nan", "3080": "nan", "3085": "nan", "3090": "nan", "3095": "nan", "3100": 3.40069, "3105": "nan", "3110": "nan", "3115": "nan", "3120": "nan", "3125": "nan", "3130": "nan", "3135": "nan", "3140": "nan", "3145": "nan", "3150": "nan", "3155": "nan", "3160": "nan", "3165": "nan", "3170": "nan", "3175": "nan", "3180": "nan", "3185": "nan", "3190": "nan", "3195": "nan", "3200": 3.40162, "3205": "nan", "3210": "nan", "3215": "nan", "3220": "nan", "3225": "nan", "3230": "nan", "3235": "nan", "3240": "nan", "3245": "nan", "3250": "nan", "3255": "nan", "3260": "nan", "3265": "nan", "3270": "nan", "3275": "nan", "3280": "nan", "3285": "nan", "3290": "nan", "3295": "nan", "3300": 3.40071, "3305": "nan", "3310": "nan", "3315": "nan", "3320": "nan", "3325": "nan", "3330": "nan", "3335": "nan", "3340": "nan", "3345": "nan", "3350": "nan", "3355": "nan", "3360": "nan", "3365": "nan", "3370": "nan", "3375": "nan", "3380": "nan", "3385": "nan", "3390": "nan", "3395": "nan", "3400": 3.40058, "3405": "nan", "3410": "nan", "3415": "nan", "3420": "nan", "3425": "nan", "3430": "nan", "3435": "nan", "3440": "nan", "3445": "nan", "3450": "nan", "3455": "nan", "3460": "nan", "3465": "nan", "3470": "nan", "3475": "nan", "3480": "nan", "3485": "nan", "3490": "nan", "3495": "nan", "3500": 3.39993, "3505": "nan", "3510": "nan", "3515": "nan", "3520": "nan", "3525": "nan", "3530": "nan", "3535": "nan", "3540": "nan", "3545": "nan", "3550": "nan", "3555": "nan", "3560": "nan", "3565": "nan", "3570": "nan", "3575": "nan", "3580": "nan", "3585": "nan", "3590": "nan", "3595": "nan", "3600": 3.62689, "3605": "nan", "3610": "nan", "3615": "nan", "3620": "nan", "3625": "nan", "3630": "nan", "3635": "nan", "3640": "nan", "3645": "nan", "3650": "nan", "3655": "nan", "3660": "nan", "3665": "nan", "3670": "nan", "3675": "nan", "3680": "nan", "3685": "nan", "3690": "nan", "3695": "nan", "3700": 3.40474, "3705": "nan", "3710": "nan", "3715": "nan", "3720": "nan", "3725": "nan", "3730": "nan", "3735": "nan", "3740": "nan", "3745": "nan", "3750": "nan", "3755": "nan", "3760": "nan", "3765": "nan", "3770": "nan", "3775": "nan", "3780": "nan", "3785": "nan", "3790": "nan", "3795": "nan", "3800": 3.40515, "3805": "nan", "3810": "nan", "3815": "nan", "3820": "nan", "3825": "nan", "3830": "nan", "3835": "nan", "3840": "nan", "3845": "nan", "3850": "nan", "3855": "nan", "3860": "nan", "3865": "nan", "3870": "nan", "3875": "nan", "3880": "nan", "3885": "nan", "3890": "nan", "3895": "nan", "3900": 3.40436, "3905": "nan", "3910": "nan", "3915": "nan", "3920": "nan", "3925": "nan", "3930": "nan", "3935": "nan", "3940": "nan", "3945": "nan", "3950": "nan", "3955": "nan", "3960": "nan", "3965": "nan", "3970": "nan", "3975": "nan", "3980": "nan", "3985": "nan", "3990": "nan", "3995": "nan", "4000": 3.40304, "4005": "nan", "4010": "nan", "4015": "nan", "4020": "nan", "4025": "nan", "4030": "nan", "4035": "nan", "4040": "nan", "4045": "nan", "4050": "nan", "4055": "nan", "4060": "nan", "4065": "nan", "4070": "nan", "4075": "nan", "4080": "nan", "4085": "nan", "4090": "nan", "4095": "nan", "4100": 3.40076, "4105": "nan", "4110": "nan", "4115": "nan", "4120": "nan", "4125": "nan", "4130": "nan", "4135": "nan", "4140": "nan", "4145": "nan", "4150": "nan", "4155": "nan", "4160": "nan", "4165": "nan", "4170": "nan", "4175": "nan", "4180": "nan", "4185": "nan", "4190": "nan", "4195": "nan", "4200": 3.40222, "4205": "nan", "4210": "nan", "4215": "nan", "4220": "nan", "4225": "nan", "4230": "nan", "4235": "nan", "4240": "nan", "4245": "nan", "4250": "nan", "4255": "nan", "4260": "nan", "4265": "nan", "4270": "nan", "4275": "nan", "4280": "nan", "4285": "nan", "4290": "nan", "4295": "nan", "4300": 3.40241, "4305": "nan", "4310": "nan", "4315": "nan", "4320": "nan", "4325": "nan", "4330": "nan", "4335": "nan", "4340": "nan", "4345": "nan", "4350": "nan", "4355": "nan", "4360": "nan", "4365": "nan", "4370": "nan", "4375": "nan", "4380": "nan", "4385": "nan", "4390": "nan", "4395": "nan", "4400": 3.40228, "4405": "nan", "4410": "nan", "4415": "nan", "4420": "nan", "4425": "nan", "4430": "nan", "4435": "nan", "4440": "nan", "4445": "nan", "4450": "nan", "4455": "nan", "4460": "nan", "4465": "nan", "4470": "nan", "4475": "nan", "4480": "nan", "4485": "nan", "4490": "nan", "4495": "nan", "4500": 3.40282, "4505": "nan", "4510": "nan", "4515": "nan", "4520": "nan", "4525": "nan", "4530": "nan", "4535": "nan", "4540": "nan", "4545": "nan", "4550": "nan", "4555": "nan", "4560": "nan", "4565": "nan", "4570": "nan", "4575": "nan", "4580": "nan", "4585": "nan", "4590": "nan", "4595": "nan", "4600": 3.40214, "4605": "nan", "4610": "nan", "4615": "nan", "4620": "nan", "4625": "nan", "4630": "nan", "4635": "nan", "4640": "nan", "4645": "nan", "4650": "nan", "4655": "nan", "4660": "nan", "4665": "nan", "4670": "nan", "4675": "nan", "4680": "nan", "4685": "nan", "4690": "nan", "4695": "nan", "4700": 3.40155, "4705": "nan", "4710": "nan", "4715": "nan", "4720": "nan", "4725": "nan", "4730": "nan", "4735": "nan", "4740": "nan", "4745": "nan", "4750": "nan", "4755": "nan", "4760": "nan", "4765": "nan", "4770": "nan", "4775": "nan", "4780": "nan", "4785": "nan", "4790": "nan", "4795": "nan", "4800": 3.4016, "4805": "nan", "4810": "nan", "4815": "nan", "4820": "nan", "4825": "nan", "4830": "nan", "4835": "nan", "4840": "nan", "4845": "nan", "4850": "nan", "4855": "nan", "4860": "nan", "4865": "nan", "4870": "nan", "4875": "nan", "4880": "nan", "4885": "nan", "4890": "nan", "4895": "nan", "4900": 3.40208, "4905": "nan", "4910": "nan", "4915": "nan", "4920": "nan", "4925": "nan", "4930": "nan", "4935": "nan", "4940": "nan", "4945": "nan", "4950": "nan", "4955": "nan", "4960": "nan", "4965": "nan", "4970": "nan", "4975": "nan", "4980": "nan", "4985": "nan", "4990": "nan", "4995": "nan", "5000": 3.40265, "5005": "nan", "5010": "nan", "5015": "nan", "5020": "nan", "5025": "nan", "5030": "nan", "5035": "nan", "5040": "nan", "5045": "nan", "5050": "nan", "5055": "nan", "5060": "nan", "5065": "nan", "5070": "nan", "5075": "nan", "5080": "nan", "5085": "nan", "5090": "nan", "5095": "nan", "5100": 3.3986, "5105": "nan", "5110": "nan", "5115": "nan", "5120": "nan", "5125": "nan", "5130": "nan", "5135": "nan", "5140": "nan", "5145": "nan", "5150": "nan", "5155": "nan", "5160": "nan", "5165": "nan", "5170": "nan", "5175": "nan", "5180": "nan", "5185": "nan", "5190": "nan", "5195": "nan", "5200": 3.39887, "5205": "nan", "5210": "nan", "5215": "nan", "5220": "nan", "5225": "nan", "5230": "nan", "5235": "nan", "5240": "nan", "5245": "nan", "5250": "nan", "5255": "nan", "5260": "nan", "5265": "nan", "5270": "nan", "5275": "nan", "5280": "nan", "5285": "nan", "5290": "nan", "5295": "nan", "5300": 3.3991, "5305": "nan", "5310": "nan", "5315": "nan", "5320": "nan", "5325": "nan", "5330": "nan", "5335": "nan", "5340": "nan", "5345": "nan", "5350": "nan", "5355": "nan", "5360": "nan", "5365": "nan", "5370": "nan", "5375": "nan", "5380": "nan", "5385": "nan", "5390": "nan", "5395": "nan", "5400": 3.40087, "5405": "nan", "5410": "nan", "5415": "nan", "5420": "nan", "5425": "nan", "5430": "nan", "5435": "nan", "5440": "nan", "5445": "nan", "5450": "nan", "5455": "nan", "5460": "nan", "5465": "nan", "5470": "nan", "5475": "nan", "5480": "nan", "5485": "nan", "5490": "nan", "5495": "nan", "5500": 3.40055, "5505": "nan", "5510": "nan", "5515": "nan", "5520": "nan", "5525": "nan", "5530": "nan", "5535": "nan", "5540": "nan", "5545": "nan", "5550": "nan", "5555": "nan", "5560": "nan", "5565": "nan", "5570": "nan", "5575": "nan", "5580": "nan", "5585": "nan", "5590": "nan", "5595": "nan", "5600": 3.40101, "5605": "nan", "5610": "nan", "5615": "nan", "5620": "nan", "5625": "nan", "5630": "nan", "5635": "nan", "5640": "nan", "5645": "nan", "5650": "nan", "5655": "nan", "5660": "nan", "5665": "nan", "5670": "nan", "5675": "nan", "5680": "nan", "5685": "nan", "5690": "nan", "5695": "nan", "5700": 3.4007, "5705": "nan", "5710": "nan", "5715": "nan", "5720": "nan", "5725": "nan", "5730": "nan", "5735": "nan", "5740": "nan", "5745": "nan", "5750": "nan", "5755": "nan", "5760": "nan", "5765": "nan", "5770": "nan", "5775": "nan", "5780": "nan", "5785": "nan", "5790": "nan", "5795": "nan", "5800": 3.40177, "5805": "nan", "5810": "nan", "5815": "nan", "5820": "nan", "5825": "nan", "5830": "nan", "5835": "nan", "5840": "nan", "5845": "nan", "5850": "nan", "5855": "nan", "5860": "nan", "5865": "nan", "5870": "nan", "5875": "nan", "5880": "nan", "5885": "nan", "5890": "nan", "5895": "nan", "5900": 3.40093, "5905": "nan", "5910": "nan", "5915": "nan", "5920": "nan", "5925": "nan", "5930": "nan", "5935": "nan", "5940": "nan", "5945": "nan", "5950": "nan", "5955": "nan", "5960": "nan", "5965": "nan", "5970": "nan", "5975": "nan", "5980": "nan", "5985": "nan", "5990": "nan", "5995": "nan", "6000": 3.40207, "6005": "nan", "6010": "nan", "6015": "nan", "6020": "nan", "6025": "nan", "6030": "nan", "6035": "nan", "6040": "nan", "6045": "nan", "6050": "nan", "6055": "nan", "6060": "nan", "6065": "nan", "6070": "nan", "6075": "nan", "6080": "nan", "6085": "nan", "6090": "nan", "6095": "nan", "6100": 3.40047, "6105": "nan", "6110": "nan", "6115": "nan", "6120": "nan", "6125": "nan", "6130": "nan", "6135": "nan", "6140": "nan", "6145": "nan", "6150": "nan", "6155": "nan", "6160": "nan", "6165": "nan", "6170": "nan", "6175": "nan", "6180": "nan", "6185": "nan", "6190": "nan", "6195": "nan", "6200": 3.40254, "6205": "nan", "6210": "nan", "6215": "nan", "6220": "nan", "6225": "nan", "6230": "nan", "6235": "nan", "6240": "nan", "6245": "nan", "6250": "nan", "6255": "nan", "6260": "nan", "6265": "nan", "6270": "nan", "6275": "nan", "6280": "nan", "6285": "nan", "6290": "nan", "6295": "nan", "6300": 3.4024, "6305": "nan", "6310": "nan", "6315": "nan", "6320": "nan", "6325": "nan", "6330": "nan", "6335": "nan", "6340": "nan", "6345": "nan", "6350": "nan", "6355": "nan", "6360": "nan", "6365": "nan", "6370": "nan", "6375": "nan", "6380": "nan", "6385": "nan", "6390": "nan", "6395": "nan", "6400": 3.40281, "6405": "nan", "6410": "nan", "6415": "nan", "6420": "nan", "6425": "nan", "6430": "nan", "6435": "nan", "6440": "nan", "6445": "nan", "6450": "nan", "6455": "nan", "6460": "nan", "6465": "nan", "6470": "nan", "6475": "nan", "6480": "nan", "6485": "nan", "6490": "nan", "6495": "nan", "6500": 3.40268, "6505": "nan", "6510": "nan", "6515": "nan", "6520": "nan", "6525": "nan", "6530": "nan", "6535": "nan", "6540": "nan", "6545": "nan", "6550": "nan", "6555": "nan", "6560": "nan", "6565": "nan", "6570": "nan", "6575": "nan", "6580": "nan", "6585": "nan", "6590": "nan", "6595": "nan", "6600": 3.40166, "6605": "nan", "6610": "nan", "6615": "nan", "6620": "nan", "6625": "nan", "6630": "nan", "6635": "nan", "6640": "nan", "6645": "nan", "6650": "nan", "6655": "nan", "6660": "nan", "6665": "nan", "6670": "nan", "6675": "nan", "6680": "nan", "6685": "nan", "6690": "nan", "6695": "nan", "6700": 3.40129, "6705": "nan", "6710": "nan", "6715": "nan", "6720": "nan", "6725": "nan", "6730": "nan", "6735": "nan", "6740": "nan", "6745": "nan", "6750": "nan", "6755": "nan", "6760": "nan", "6765": "nan", "6770": "nan", "6775": "nan", "6780": "nan", "6785": "nan", "6790": "nan", "6795": "nan", "6800": 3.40047, "6805": "nan", "6810": "nan", "6815": "nan", "6820": "nan", "6825": "nan", "6830": "nan", "6835": "nan", "6840": "nan", "6845": "nan", "6850": "nan", "6855": "nan", "6860": "nan", "6865": "nan", "6870": "nan", "6875": "nan", "6880": "nan", "6885": "nan", "6890": "nan", "6895": "nan", "6900": 3.40079, "6905": "nan", "6910": "nan", "6915": "nan", "6920": "nan", "6925": "nan", "6930": "nan", "6935": "nan", "6940": "nan", "6945": "nan", "6950": "nan", "6955": "nan", "6960": "nan", "6965": "nan", "6970": "nan", "6975": "nan", "6980": "nan", "6985": "nan", "6990": "nan", "6995": "nan", "7000": 3.40034, "7005": "nan", "7010": "nan", "7015": "nan", "7020": "nan", "7025": "nan", "7030": "nan", "7035": "nan", "7040": "nan", "7045": "nan", "7050": "nan", "7055": "nan", "7060": "nan", "7065": "nan", "7070": "nan", "7075": "nan", "7080": "nan", "7085": "nan", "7090": "nan", "7095": "nan", "7100": 3.39893, "7105": "nan", "7110": "nan", "7115": "nan", "7120": "nan", "7125": "nan", "7130": "nan", "7135": "nan", "7140": "nan", "7145": "nan", "7150": "nan", "7155": "nan", "7160": "nan", "7165": "nan", "7170": "nan", "7175": "nan", "7180": "nan", "7185": "nan", "7190": "nan", "7195": "nan", "7200": 3.40035, "7205": "nan", "7210": "nan", "7215": "nan", "7220": "nan", "7225": "nan", "7230": "nan", "7235": "nan", "7240": "nan", "7245": "nan", "7250": "nan", "7255": "nan", "7260": "nan", "7265": "nan", "7270": "nan", "7275": "nan", "7280": "nan", "7285": "nan", "7290": "nan", "7295": "nan", "7300": 3.39965, "7305": "nan", "7310": "nan", "7315": "nan", "7320": "nan", "7325": "nan", "7330": "nan", "7335": "nan", "7340": "nan", "7345": "nan", "7350": "nan", "7355": "nan", "7360": "nan", "7365": "nan", "7370": "nan", "7375": "nan", "7380": "nan", "7385": "nan", "7390": "nan", "7395": "nan", "7400": 3.40073, "7405": "nan", "7410": "nan", "7415": "nan", "7420": "nan", "7425": "nan", "7430": "nan", "7435": "nan", "7440": "nan", "7445": "nan", "7450": "nan", "7455": "nan", "7460": "nan", "7465": "nan", "7470": "nan", "7475": "nan", "7480": "nan", "7485": "nan", "7490": "nan", "7495": "nan", "7500": 3.40077, "7505": "nan", "7510": "nan", "7515": "nan", "7520": "nan", "7525": "nan", "7530": "nan", "7535": "nan", "7540": "nan", "7545": "nan", "7550": "nan", "7555": "nan", "7560": "nan", "7565": "nan", "7570": "nan", "7575": "nan", "7580": "nan", "7585": "nan", "7590": "nan", "7595": "nan", "7600": 3.39989, "7605": "nan", "7610": "nan", "7615": "nan", "7620": "nan", "7625": "nan", "7630": "nan", "7635": "nan", "7640": "nan", "7645": "nan", "7650": "nan", "7655": "nan", "7660": "nan", "7665": "nan", "7670": "nan", "7675": "nan", "7680": "nan", "7685": "nan", "7690": "nan", "7695": "nan", "7700": 3.3988, "7705": "nan", "7710": "nan", "7715": "nan", "7720": "nan", "7725": "nan", "7730": "nan", "7735": "nan", "7740": "nan", "7745": "nan", "7750": "nan", "7755": "nan", "7760": "nan", "7765": "nan", "7770": "nan", "7775": "nan", "7780": "nan", "7785": "nan", "7790": "nan", "7795": "nan", "7800": 3.3995, "7805": "nan", "7810": "nan", "7815": "nan", "7820": "nan", "7825": "nan", "7830": "nan", "7835": "nan", "7840": "nan", "7845": "nan", "7850": "nan", "7855": "nan", "7860": "nan", "7865": "nan", "7870": "nan", "7875": "nan", "7880": "nan", "7885": "nan", "7890": "nan", "7895": "nan", "7900": 3.39928, "7905": "nan", "7910": "nan", "7915": "nan", "7920": "nan", "7925": "nan", "7930": "nan", "7935": "nan", "7940": "nan", "7945": "nan", "7950": "nan", "7955": "nan", "7960": "nan", "7965": "nan", "7970": "nan", "7975": "nan", "7980": "nan", "7985": "nan", "7990": "nan", "7995": "nan", "8000": 3.39834, "8005": "nan", "8010": "nan", "8015": "nan", "8020": "nan", "8025": "nan", "8030": "nan", "8035": "nan", "8040": "nan", "8045": "nan", "8050": "nan", "8055": "nan", "8060": "nan", "8065": "nan", "8070": "nan", "8075": "nan", "8080": "nan", "8085": "nan", "8090": "nan", "8095": "nan", "8100": 3.39716, "8105": "nan", "8110": "nan", "8115": "nan", "8120": "nan", "8125": "nan", "8130": "nan", "8135": "nan", "8140": "nan", "8145": "nan", "8150": "nan", "8155": "nan", "8160": "nan", "8165": "nan", "8170": "nan", "8175": "nan", "8180": "nan", "8185": "nan", "8190": "nan", "8195": "nan", "8200": 3.39809, "8205": "nan", "8210": "nan", "8215": "nan", "8220": "nan", "8225": "nan", "8230": "nan", "8235": "nan", "8240": "nan", "8245": "nan", "8250": "nan", "8255": "nan", "8260": "nan", "8265": "nan", "8270": "nan", "8275": "nan", "8280": "nan", "8285": "nan", "8290": "nan", "8295": "nan", "8300": 3.39856, "8305": "nan", "8310": "nan", "8315": "nan", "8320": "nan", "8325": "nan", "8330": "nan", "8335": "nan", "8340": "nan", "8345": "nan", "8350": "nan", "8355": "nan", "8360": "nan", "8365": "nan", "8370": "nan", "8375": "nan", "8380": "nan", "8385": "nan", "8390": "nan", "8395": "nan", "8400": 3.39822, "8405": "nan", "8410": "nan", "8415": "nan", "8420": "nan", "8425": "nan", "8430": "nan", "8435": "nan", "8440": "nan", "8445": "nan", "8450": "nan", "8455": "nan", "8460": "nan", "8465": "nan", "8470": "nan", "8475": "nan", "8480": "nan", "8485": "nan", "8490": "nan", "8495": "nan", "8500": 3.39927, "8505": "nan", "8510": "nan", "8515": "nan", "8520": "nan", "8525": "nan", "8530": "nan", "8535": "nan", "8540": "nan", "8545": "nan", "8550": "nan", "8555": "nan", "8560": "nan", "8565": "nan", "8570": "nan", "8575": "nan", "8580": "nan", "8585": "nan", "8590": "nan", "8595": "nan", "8600": 3.40049, "8605": "nan", "8610": "nan", "8615": "nan", "8620": "nan", "8625": "nan", "8630": "nan", "8635": "nan", "8640": "nan", "8645": "nan", "8650": "nan", "8655": "nan", "8660": "nan", "8665": "nan", "8670": "nan", "8675": "nan", "8680": "nan", "8685": "nan", "8690": "nan", "8695": "nan", "8700": 3.39943, "8705": "nan", "8710": "nan", "8715": "nan", "8720": "nan", "8725": "nan", "8730": "nan", "8735": "nan", "8740": "nan", "8745": "nan", "8750": "nan", "8755": "nan", "8760": "nan", "8765": "nan", "8770": "nan", "8775": "nan", "8780": "nan", "8785": "nan", "8790": "nan", "8795": "nan", "8800": 3.3999, "8805": "nan", "8810": "nan", "8815": "nan", "8820": "nan", "8825": "nan", "8830": "nan", "8835": "nan", "8840": "nan", "8845": "nan", "8850": "nan", "8855": "nan", "8860": "nan", "8865": "nan", "8870": "nan", "8875": "nan", "8880": "nan", "8885": "nan", "8890": "nan", "8895": "nan", "8900": 3.39978, "8905": "nan", "8910": "nan", "8915": "nan", "8920": "nan", "8925": "nan", "8930": "nan", "8935": "nan", "8940": "nan", "8945": "nan", "8950": "nan", "8955": "nan", "8960": "nan", "8965": "nan", "8970": "nan", "8975": "nan", "8980": "nan", "8985": "nan", "8990": "nan", "8995": "nan", "9000": 3.40051, "9005": "nan", "9010": "nan", "9015": "nan", "9020": "nan", "9025": "nan", "9030": "nan", "9035": "nan", "9040": "nan", "9045": "nan", "9050": "nan", "9055": "nan", "9060": "nan", "9065": "nan", "9070": "nan", "9075": "nan", "9080": "nan", "9085": "nan", "9090": "nan", "9095": "nan", "9100": 3.39875, "9105": "nan", "9110": "nan", "9115": "nan", "9120": "nan", "9125": "nan", "9130": "nan", "9135": "nan", "9140": "nan", "9145": "nan", "9150": "nan", "9155": "nan", "9160": "nan", "9165": "nan", "9170": "nan", "9175": "nan", "9180": "nan", "9185": "nan", "9190": "nan", "9195": "nan", "9200": 3.39972, "9205": "nan", "9210": "nan", "9215": "nan", "9220": "nan", "9225": "nan", "9230": "nan", "9235": "nan", "9240": "nan", "9245": "nan", "9250": "nan", "9255": "nan", "9260": "nan", "9265": "nan", "9270": "nan", "9275": "nan", "9280": "nan", "9285": "nan", "9290": "nan", "9295": "nan", "9300": 3.40071, "9305": "nan", "9310": "nan", "9315": "nan", "9320": "nan", "9325": "nan", "9330": "nan", "9335": "nan", "9340": "nan", "9345": "nan", "9350": "nan", "9355": "nan", "9360": "nan", "9365": "nan", "9370": "nan", "9375": "nan", "9380": "nan", "9385": "nan", "9390": "nan", "9395": "nan", "9400": 3.40005, "9405": "nan", "9410": "nan", "9415": "nan", "9420": "nan", "9425": "nan", "9430": "nan", "9435": "nan", "9440": "nan", "9445": "nan", "9450": "nan", "9455": "nan", "9460": "nan", "9465": "nan", "9470": "nan", "9475": "nan", "9480": "nan", "9485": "nan", "9490": "nan", "9495": "nan", "9500": 3.39896, "9505": "nan", "9510": "nan", "9515": "nan", "9520": "nan", "9525": "nan", "9530": "nan", "9535": "nan", "9540": "nan", "9545": "nan", "9550": "nan", "9555": "nan", "9560": "nan", "9565": "nan", "9570": "nan", "9575": "nan", "9580": "nan", "9585": "nan", "9590": "nan", "9595": "nan", "9600": 3.39953, "9605": "nan", "9610": "nan", "9615": "nan", "9620": "nan", "9625": "nan", "9630": "nan", "9635": "nan", "9640": "nan", "9645": "nan", "9650": "nan", "9655": "nan", "9660": "nan", "9665": "nan", "9670": "nan", "9675": "nan", "9680": "nan", "9685": "nan", "9690": "nan", "9695": "nan", "9700": 3.39866, "9705": "nan", "9710": "nan", "9715": "nan", "9720": "nan", "9725": "nan", "9730": "nan", "9735": "nan", "9740": "nan", "9745": "nan", "9750": "nan", "9755": "nan", "9760": "nan", "9765": "nan", "9770": "nan", "9775": "nan", "9780": "nan", "9785": "nan", "9790": "nan", "9795": "nan", "9800": 3.40009, "9805": "nan", "9810": "nan", "9815": "nan", "9820": "nan", "9825": "nan", "9830": "nan", "9835": "nan", "9840": "nan", "9845": "nan", "9850": "nan", "9855": "nan", "9860": "nan", "9865": "nan", "9870": "nan", "9875": "nan", "9880": "nan", "9885": "nan", "9890": "nan", "9895": "nan", "9900": 3.39979, "9905": "nan", "9910": "nan", "9915": "nan", "9920": "nan", "9925": "nan", "9930": "nan", "9935": "nan", "9940": "nan", "9945": "nan", "9950": "nan", "9955": "nan", "9960": "nan", "9965": "nan", "9970": "nan", "9975": "nan", "9980": "nan", "9985": "nan", "9990": "nan", "9995": "nan", "10000": 3.3996, "10005": "nan", "10010": "nan", "10015": "nan", "10020": "nan", "10025": "nan", "10030": "nan", "10035": "nan", "10040": "nan", "10045": "nan", "10050": "nan", "10055": "nan", "10060": "nan", "10065": "nan", "10070": "nan", "10075": "nan", "10080": "nan", "10085": "nan", "10090": "nan", "10095": "nan", "10100": 3.39815, "10105": "nan", "10110": "nan", "10115": "nan", "10120": "nan", "10125": "nan", "10130": "nan", "10135": "nan", "10140": "nan", "10145": "nan", "10150": "nan", "10155": "nan", "10160": "nan", "10165": "nan", "10170": "nan", "10175": "nan", "10180": "nan", "10185": "nan", "10190": "nan", "10195": "nan", "10200": 3.3996, "10205": "nan", "10210": "nan", "10215": "nan", "10220": "nan", "10225": "nan", "10230": "nan", "10235": "nan", "10240": "nan", "10245": "nan", "10250": "nan", "10255": "nan", "10260": "nan", "10265": "nan", "10270": "nan", "10275": "nan", "10280": "nan", "10285": "nan", "10290": "nan", "10295": "nan", "10300": 3.40067, "10305": "nan", "10310": "nan", "10315": "nan", "10320": "nan", "10325": "nan", "10330": "nan", "10335": "nan", "10340": "nan", "10345": "nan", "10350": "nan", "10355": "nan", "10360": "nan", "10365": "nan", "10370": "nan", "10375": "nan", "10380": "nan", "10385": "nan", "10390": "nan", "10395": "nan", "10400": 3.39985, "10405": "nan", "10410": "nan", "10415": "nan", "10420": "nan", "10425": "nan", "10430": "nan", "10435": "nan", "10440": "nan", "10445": "nan", "10450": "nan", "10455": "nan", "10460": "nan", "10465": "nan", "10470": "nan", "10475": "nan", "10480": "nan", "10485": "nan", "10490": "nan", "10495": "nan", "10500": 3.39923, "10505": "nan", "10510": "nan", "10515": "nan", "10520": "nan", "10525": "nan", "10530": "nan", "10535": "nan", "10540": "nan", "10545": "nan", "10550": "nan", "10555": "nan", "10560": "nan", "10565": "nan", "10570": "nan", "10575": "nan", "10580": "nan", "10585": "nan", "10590": "nan", "10595": "nan", "10600": 3.40087, "10605": "nan", "10610": "nan", "10615": "nan", "10620": "nan", "10625": "nan", "10630": "nan", "10635": "nan", "10640": "nan", "10645": "nan", "10650": "nan", "10655": "nan", "10660": "nan", "10665": "nan", "10670": "nan", "10675": "nan", "10680": "nan", "10685": "nan", "10690": "nan", "10695": "nan", "10700": 3.40098, "10705": "nan", "10710": "nan", "10715": "nan", "10720": "nan", "10725": "nan", "10730": "nan", "10735": "nan", "10740": "nan", "10745": "nan", "10750": "nan", "10755": "nan", "10760": "nan", "10765": "nan", "10770": "nan", "10775": "nan", "10780": "nan", "10785": "nan", "10790": "nan", "10795": "nan", "10800": 3.39973, "10805": "nan", "10810": "nan", "10815": "nan", "10820": "nan", "10825": "nan", "10830": "nan", "10835": "nan", "10840": "nan", "10845": "nan", "10850": "nan", "10855": "nan", "10860": "nan", "10865": "nan", "10870": "nan", "10875": "nan", "10880": "nan", "10885": "nan", "10890": "nan", "10895": "nan", "10900": 3.66029, "10905": "nan", "10910": "nan", "10915": "nan", "10920": "nan", "10925": "nan", "10930": "nan", "10935": "nan", "10940": "nan", "10945": "nan", "10950": "nan", "10955": "nan", "10960": "nan", "10965": "nan", "10970": "nan", "10975": "nan", "10980": "nan", "10985": "nan", "10990": "nan", "10995": "nan", "11000": 3.38756, "11005": "nan", "11010": "nan", "11015": "nan", "11020": "nan", "11025": "nan", "11030": "nan", "11035": "nan", "11040": "nan", "11045": "nan", "11050": "nan", "11055": "nan", "11060": "nan", "11065": "nan", "11070": "nan", "11075": "nan", "11080": "nan", "11085": "nan", "11090": "nan", "11095": "nan", "11100": 3.38656, "11105": "nan", "11110": "nan", "11115": "nan", "11120": "nan", "11125": "nan", "11130": "nan", "11135": "nan", "11140": "nan", "11145": "nan", "11150": "nan", "11155": "nan", "11160": "nan", "11165": "nan", "11170": "nan", "11175": "nan", "11180": "nan", "11185": "nan", "11190": "nan", "11195": "nan", "11200": 3.38765, "11205": "nan", "11210": "nan", "11215": "nan", "11220": "nan", "11225": "nan", "11230": "nan", "11235": "nan", "11240": "nan", "11245": "nan", "11250": "nan", "11255": "nan", "11260": "nan", "11265": "nan", "11270": "nan", "11275": "nan", "11280": "nan", "11285": "nan", "11290": "nan", "11295": "nan", "11300": 3.38835, "11305": "nan", "11310": "nan", "11315": "nan", "11320": "nan", "11325": "nan", "11330": "nan", "11335": "nan", "11340": "nan", "11345": "nan", "11350": "nan", "11355": "nan", "11360": "nan", "11365": "nan", "11370": "nan", "11375": "nan", "11380": "nan", "11385": "nan", "11390": "nan", "11395": "nan", "11400": 3.38844, "11405": "nan", "11410": "nan", "11415": "nan", "11420": "nan", "11425": "nan", "11430": "nan", "11435": "nan", "11440": "nan", "11445": "nan", "11450": "nan", "11455": "nan", "11460": "nan", "11465": "nan", "11470": "nan", "11475": "nan", "11480": "nan", "11485": "nan", "11490": "nan", "11495": "nan", "11500": 3.38793, "11505": "nan", "11510": "nan", "11515": "nan", "11520": "nan", "11525": "nan", "11530": "nan", "11535": "nan", "11540": "nan", "11545": "nan", "11550": "nan", "11555": "nan", "11560": "nan", "11565": "nan", "11570": "nan", "11575": "nan", "11580": "nan", "11585": "nan", "11590": "nan", "11595": "nan", "11600": 3.38725, "11605": "nan", "11610": "nan", "11615": "nan", "11620": "nan", "11625": "nan", "11630": "nan", "11635": "nan", "11640": "nan", "11645": "nan", "11650": "nan", "11655": "nan", "11660": "nan", "11665": "nan", "11670": "nan", "11675": "nan", "11680": "nan", "11685": "nan", "11690": "nan", "11695": "nan", "11700": 3.38738, "11705": "nan", "11710": "nan", "11715": "nan", "11720": "nan", "11725": "nan", "11730": "nan", "11735": "nan", "11740": "nan", "11745": "nan", "11750": "nan", "11755": "nan", "11760": "nan", "11765": "nan", "11770": "nan", "11775": "nan", "11780": "nan", "11785": "nan", "11790": "nan", "11795": "nan", "11800": 3.38748, "11805": "nan", "11810": "nan", "11815": "nan", "11820": "nan", "11825": "nan", "11830": "nan", "11835": "nan", "11840": "nan", "11845": "nan", "11850": "nan", "11855": "nan", "11860": "nan", "11865": "nan", "11870": "nan", "11875": "nan", "11880": "nan", "11885": "nan", "11890": "nan", "11895": "nan", "11900": 3.38839, "11905": "nan", "11910": "nan", "11915": "nan", "11920": "nan", "11925": "nan", "11930": "nan", "11935": "nan", "11940": "nan", "11945": "nan", "11950": "nan", "11955": "nan", "11960": "nan", "11965": "nan", "11970": "nan", "11975": "nan", "11980": "nan", "11985": "nan", "11990": "nan", "11995": "nan", "12000": 3.38814, "12005": "nan", "12010": "nan", "12015": "nan", "12020": "nan", "12025": "nan", "12030": "nan", "12035": "nan", "12040": "nan", "12045": "nan", "12050": "nan", "12055": "nan", "12060": "nan", "12065": "nan", "12070": "nan", "12075": "nan", "12080": "nan", "12085": "nan", "12090": "nan", "12095": "nan", "12100": 3.38677, "12105": "nan", "12110": "nan", "12115": "nan", "12120": "nan", "12125": "nan", "12130": "nan", "12135": "nan", "12140": "nan", "12145": "nan", "12150": "nan", "12155": "nan", "12160": "nan", "12165": "nan", "12170": "nan", "12175": "nan", "12180": "nan", "12185": "nan", "12190": "nan", "12195": "nan", "12200": 3.38679, "12205": "nan", "12210": "nan", "12215": "nan", "12220": "nan", "12225": "nan", "12230": "nan", "12235": "nan", "12240": "nan", "12245": "nan", "12250": "nan", "12255": "nan", "12260": "nan", "12265": "nan", "12270": "nan", "12275": "nan", "12280": "nan", "12285": "nan", "12290": "nan", "12295": "nan", "12300": 3.38609, "12305": "nan", "12310": "nan", "12315": "nan", "12320": "nan", "12325": "nan", "12330": "nan", "12335": "nan", "12340": "nan", "12345": "nan", "12350": "nan", "12355": "nan", "12360": "nan", "12365": "nan", "12370": "nan", "12375": "nan", "12380": "nan", "12385": "nan", "12390": "nan", "12395": "nan", "12400": 3.38665, "12405": "nan", "12410": "nan", "12415": "nan", "12420": "nan", "12425": "nan", "12430": "nan", "12435": "nan", "12440": "nan", "12445": "nan", "12450": "nan", "12455": "nan", "12460": "nan", "12465": "nan", "12470": "nan", "12475": "nan", "12480": "nan", "12485": "nan", "12490": "nan", "12495": "nan", "12500": 3.38727, "12505": "nan", "12510": "nan", "12515": "nan", "12520": "nan", "12525": "nan", "12530": "nan", "12535": "nan", "12540": "nan", "12545": "nan", "12550": "nan", "12555": "nan", "12560": "nan", "12565": "nan", "12570": "nan", "12575": "nan", "12580": "nan", "12585": "nan", "12590": "nan", "12595": "nan", "12600": 3.38752, "12605": "nan", "12610": "nan", "12615": "nan", "12620": "nan", "12625": "nan", "12630": "nan", "12635": "nan", "12640": "nan", "12645": "nan", "12650": "nan", "12655": "nan", "12660": "nan", "12665": "nan", "12670": "nan", "12675": "nan", "12680": "nan", "12685": "nan", "12690": "nan", "12695": "nan", "12700": 3.38807, "12705": "nan", "12710": "nan", "12715": "nan", "12720": "nan", "12725": "nan", "12730": "nan", "12735": "nan", "12740": "nan", "12745": "nan", "12750": "nan", "12755": "nan", "12760": "nan", "12765": "nan", "12770": "nan", "12775": "nan", "12780": "nan", "12785": "nan", "12790": "nan", "12795": "nan", "12800": "nan", "12805": "nan", "12810": "nan", "12815": "nan", "12820": "nan", "12825": "nan", "12830": "nan", "12835": "nan", "12840": "nan", "12845": "nan", "12850": "nan", "12855": "nan", "12860": "nan", "12865": "nan", "12870": "nan", "12875": "nan", "12880": "nan", "12885": "nan", "12890": "nan", "12895": "nan", "12900": "nan", "12905": "nan", "12910": "nan", "12915": "nan", "12920": "nan", "12925": "nan", "12930": "nan", "12935": "nan", "12940": "nan", "12945": "nan", "12950": "nan", "12955": "nan", "12960": "nan", "12965": "nan", "12970": "nan", "12975": "nan", "12980": "nan", "12985": "nan", "12990": "nan", "12995": "nan", "13000": "nan"}}} \ No newline at end of file +{ + "lm loss": { + "start_step": 1, + "end_step": 10200, + "step_interval": 5, + "values": { + "1": 12.95636, + "5": 12.91493, + "10": 12.06068, + "15": 11.39907, + "20": 10.42782, + "25": 9.9771, + "30": 9.62861, + "35": 9.36625, + "40": 9.17252, + "45": 9.00247, + "50": 8.84227, + "55": 8.635, + "60": 8.61848, + "65": 8.51386, + "70": 8.39546, + "75": 8.26338, + "80": 8.09298, + "85": 8.09858, + "90": 7.94846, + "95": 7.8971, + "100": 7.80875, + "105": 7.65613, + "110": 7.56901, + "115": 7.48001, + "120": 7.41832, + "125": 7.47105, + "130": 7.28349, + "135": 7.29674, + "140": 7.25373, + "145": 7.06382, + "150": 7.15942, + "155": 7.00232, + "160": 6.92577, + "165": 6.89706, + "170": 6.81939, + "175": 6.86062, + "180": 6.81545, + "185": 6.72257, + "190": 6.65587, + "195": 6.58935, + "200": 6.6135, + "205": 6.63845, + "210": 6.53563, + "215": 6.51843, + "220": 6.51177, + "225": 6.50414, + "230": 6.50432, + "235": 6.46917, + "240": 6.38517, + "245": 6.3812, + "250": 6.33511, + "255": 6.449, + "260": 6.33771, + "265": 6.27428, + "270": 6.23222, + "275": 6.27245, + "280": 6.20586, + "285": 6.23286, + "290": 6.16479, + "295": 6.145, + "300": 6.10719, + "305": 6.0417, + "310": 6.07263, + "315": 6.07514, + "320": 5.98003, + "325": 5.92946, + "330": 5.98563, + "335": 6.03283, + "340": 5.95895, + "345": 5.93647, + "350": 5.92017, + "355": 5.8495, + "360": 5.85881, + "365": 5.8328, + "370": 5.79334, + "375": 5.83779, + "380": 5.83614, + "385": 5.78613, + "390": 5.7777, + "395": 5.67732, + "400": 5.65984, + "405": 5.66333, + "410": 5.66665, + "415": 5.72633, + "420": 5.66944, + "425": 5.68244, + "430": 5.63871, + "435": 5.56635, + "440": 5.61469, + "445": 5.54454, + "450": 5.59407, + "455": 5.50781, + "460": 5.50087, + "465": 5.58081, + "470": 5.56589, + "475": 5.50951, + "480": 5.47548, + "485": 5.50235, + "490": 5.46533, + "495": 5.45996, + "500": 5.40736, + "505": 5.36926, + "510": 5.4224, + "515": 5.41691, + "520": 5.43066, + "525": 5.30525, + "530": 5.34552, + "535": 5.31057, + "540": 5.3297, + "545": 5.39306, + "550": 5.36947, + "555": 5.19608, + "560": 5.32548, + "565": 5.28038, + "570": 5.27177, + "575": 5.29276, + "580": 5.21254, + "585": 5.21476, + "590": 5.21614, + "595": 5.21547, + "600": 5.23604, + "605": 5.22126, + "610": 5.19884, + "615": 5.15789, + "620": 5.19311, + "625": 5.18747, + "630": 5.12499, + "635": 5.10527, + "640": 5.08589, + "645": 5.09422, + "650": 5.13357, + "655": 5.10849, + "660": 5.04614, + "665": 5.06634, + "670": 5.03349, + "675": 4.99757, + "680": 4.98096, + "685": 4.9669, + "690": 4.99708, + "695": 4.94269, + "700": 4.94045, + "705": 4.89612, + "710": 4.94295, + "715": 4.87556, + "720": 4.83941, + "725": 4.79045, + "730": 4.83137, + "735": 4.78843, + "740": 4.815, + "745": 4.70979, + "750": 4.70037, + "755": 4.7934, + "760": 4.75129, + "765": 4.70711, + "770": 4.65835, + "775": 4.64785, + "780": 4.68267, + "785": 4.72822, + "790": 4.61917, + "795": 4.6209, + "800": 4.58307, + "805": 4.60223, + "810": 4.62062, + "815": 4.58748, + "820": 4.59547, + "825": 4.57325, + "830": 4.54908, + "835": 4.52106, + "840": 4.46661, + "845": 4.46341, + "850": 4.41993, + "855": 4.46928, + "860": 4.41115, + "865": 4.49884, + "870": 4.44657, + "875": 4.35033, + "880": 4.38974, + "885": 4.36025, + "890": 4.40871, + "895": 4.39389, + "900": 4.35994, + "905": 4.31426, + "910": 4.32151, + "915": 4.31741, + "920": 4.3601, + "925": 4.35556, + "930": 4.28626, + "935": 4.28016, + "940": 4.31823, + "945": 4.28675, + "950": 4.33146, + "955": 4.23865, + "960": 4.17329, + "965": 4.24662, + "970": 4.26306, + "975": 4.21677, + "980": 4.20264, + "985": 4.16318, + "990": 4.12123, + "995": 4.17995, + "1000": 4.2382, + "1005": 4.17489, + "1010": 4.15353, + "1015": 4.10781, + "1020": 4.14472, + "1025": 4.20371, + "1030": 4.09909, + "1035": 4.09186, + "1040": 4.09316, + "1045": 4.09014, + "1050": 4.11964, + "1055": 4.09834, + "1060": 4.09551, + "1065": 4.05398, + "1070": 4.04751, + "1075": 4.06679, + "1080": 4.05176, + "1085": 4.05513, + "1090": 4.0203, + "1095": 4.08006, + "1100": 4.05952, + "1105": 4.06174, + "1110": 4.04279, + "1115": 3.99444, + "1120": 4.00593, + "1125": 3.9937, + "1130": 4.0578, + "1135": 4.00553, + "1140": 3.98485, + "1145": 3.93503, + "1150": 4.02991, + "1155": 3.98774, + "1160": 3.94876, + "1165": 3.85343, + "1170": 3.91359, + "1175": 3.93274, + "1180": 3.93706, + "1185": 3.97637, + "1190": 3.91901, + "1195": 3.93409, + "1200": 3.87706, + "1205": 3.87883, + "1210": 3.98038, + "1215": 3.82721, + "1220": 3.86817, + "1225": 3.80347, + "1230": 3.89882, + "1235": 3.88541, + "1240": 3.85377, + "1245": 3.79592, + "1250": 3.81771, + "1255": 3.85764, + "1260": 3.88133, + "1265": 3.77832, + "1270": 3.86905, + "1275": 3.82128, + "1280": 3.81356, + "1285": 3.82855, + "1290": 3.84927, + "1295": 3.84003, + "1300": 3.8012, + "1305": 3.81011, + "1310": 3.8163, + "1315": 3.80504, + "1320": 3.81781, + "1325": 3.71094, + "1330": 3.78861, + "1335": 3.7482, + "1340": 3.74059, + "1345": 3.76409, + "1350": 3.72869, + "1355": 3.78241, + "1360": 3.72585, + "1365": 3.72505, + "1370": 3.74271, + "1375": 3.75098, + "1380": 3.7467, + "1385": 3.74016, + "1390": 3.66738, + "1395": 3.73768, + "1400": 3.73285, + "1405": 3.66947, + "1410": 3.65899, + "1415": 3.65272, + "1420": 3.70798, + "1425": 3.70326, + "1430": 3.67713, + "1435": 3.65521, + "1440": 3.62359, + "1445": 3.67462, + "1450": 3.66895, + "1455": 3.63408, + "1460": 3.65242, + "1465": 3.67233, + "1470": 3.61501, + "1475": 3.68638, + "1480": 3.65857, + "1485": 3.67278, + "1490": 3.63029, + "1495": 3.60394, + "1500": 3.65552, + "1505": 3.68609, + "1510": 3.55109, + "1515": 3.59939, + "1520": 3.62723, + "1525": 3.59192, + "1530": 3.59983, + "1535": 3.58568, + "1540": 3.61642, + "1545": 3.59448, + "1550": 3.56613, + "1555": 3.57416, + "1560": 3.60143, + "1565": 3.62419, + "1570": 3.59238, + "1575": 3.55103, + "1580": 3.61463, + "1585": 3.57681, + "1590": 3.46422, + "1595": 3.50863, + "1600": 3.49103, + "1605": 3.53794, + "1610": 3.56037, + "1615": 3.49817, + "1620": 3.52298, + "1625": 3.46961, + "1630": 3.49722, + "1635": 3.5368, + "1640": 3.52869, + "1645": 3.53266, + "1650": 3.47711, + "1655": 3.4673, + "1660": 3.51444, + "1665": 3.45262, + "1670": 3.51621, + "1675": 3.49635, + "1680": 3.47136, + "1685": 3.4794, + "1690": 3.47578, + "1695": 3.49278, + "1700": 3.47195, + "1705": 3.39545, + "1710": 3.48055, + "1715": 3.48126, + "1720": 3.42363, + "1725": 3.41313, + "1730": 3.41695, + "1735": 3.48191, + "1740": 3.45584, + "1745": 3.42798, + "1750": 3.39635, + "1755": 3.41903, + "1760": 3.36551, + "1765": 3.41537, + "1770": 3.44954, + "1775": 3.37407, + "1780": 3.43219, + "1785": 3.41375, + "1790": 3.38142, + "1795": 3.39439, + "1800": 3.33122, + "1805": 3.38289, + "1810": 3.33314, + "1815": 3.42456, + "1820": 3.41124, + "1825": 3.38737, + "1830": 3.321, + "1835": 3.42972, + "1840": 3.3964, + "1845": 3.43042, + "1850": 3.37212, + "1855": 3.36489, + "1860": 3.33899, + "1865": 3.38416, + "1870": 3.2974, + "1875": 3.43275, + "1880": 3.33858, + "1885": 3.34035, + "1890": 3.33182, + "1895": 3.38805, + "1900": 3.37148, + "1905": 3.29374, + "1910": 3.32697, + "1915": 3.30981, + "1920": 3.35061, + "1925": 3.32954, + "1930": 3.30997, + "1935": 3.29828, + "1940": 3.34822, + "1945": 3.26885, + "1950": 3.40411, + "1955": 3.29406, + "1960": 3.29982, + "1965": 3.26189, + "1970": 3.29411, + "1975": 3.32491, + "1980": 3.3304, + "1985": 3.23214, + "1990": 3.29106, + "1995": 3.27313, + "2000": 3.26868, + "2005": 3.24857, + "2010": 3.24793, + "2015": 3.21842, + "2020": 3.26421, + "2025": 3.27751, + "2030": 3.27857, + "2035": 3.27744, + "2040": 3.24719, + "2045": 3.23203, + "2050": 3.26826, + "2055": 3.31971, + "2060": 3.2822, + "2065": 3.22581, + "2070": 3.27883, + "2075": 3.24034, + "2080": 3.22343, + "2085": 3.29053, + "2090": 3.13251, + "2095": 3.29421, + "2100": 3.22448, + "2105": 3.18523, + "2110": 3.18724, + "2115": 3.21807, + "2120": 3.17889, + "2125": 3.2113, + "2130": 3.20286, + "2135": 3.26519, + "2140": 3.18758, + "2145": 3.19507, + "2150": 3.20824, + "2155": 3.22091, + "2160": 3.19528, + "2165": 3.35955, + "2170": 3.2585, + "2175": 3.17861, + "2180": 3.21668, + "2185": 3.24726, + "2190": 3.23016, + "2195": 3.14745, + "2200": 3.18543, + "2205": 3.16084, + "2210": 3.11745, + "2215": 3.1858, + "2220": 3.18572, + "2225": 3.17846, + "2230": 3.14655, + "2235": 3.17171, + "2240": 3.22575, + "2245": 3.17054, + "2250": 3.19236, + "2255": 3.12598, + "2260": 3.12323, + "2265": 3.2398, + "2270": 3.17957, + "2275": 3.14483, + "2280": 3.19429, + "2285": 3.17293, + "2290": 3.16102, + "2295": 3.19661, + "2300": 3.12642, + "2305": 3.15285, + "2310": 3.11359, + "2315": 3.05962, + "2320": 3.10881, + "2325": 3.15813, + "2330": 3.11558, + "2335": 3.1051, + "2340": 3.16096, + "2345": 3.12605, + "2350": 3.14231, + "2355": 3.14361, + "2360": 3.15245, + "2365": 3.0827, + "2370": 3.14312, + "2375": 3.14177, + "2380": 3.12494, + "2385": 3.06938, + "2390": 3.08808, + "2395": 3.09036, + "2400": 3.08116, + "2405": 3.08824, + "2410": 3.07707, + "2415": 3.08042, + "2420": 3.06917, + "2425": 3.06886, + "2430": 3.07357, + "2435": 3.05462, + "2440": 3.08959, + "2445": 3.05963, + "2450": 3.12048, + "2455": 3.15148, + "2460": 3.07551, + "2465": 3.06878, + "2470": 3.02726, + "2475": 3.06894, + "2480": 3.08459, + "2485": 3.04843, + "2490": 3.05287, + "2495": 3.06025, + "2500": 3.06094, + "2505": 3.12591, + "2510": 3.11813, + "2515": 3.06371, + "2520": 3.06863, + "2525": 3.01919, + "2530": 3.04918, + "2535": 3.0825, + "2540": 3.07124, + "2545": 3.04337, + "2550": 2.99371, + "2555": 3.0642, + "2560": 3.03432, + "2565": 3.10038, + "2570": 3.02333, + "2575": 3.04987, + "2580": 3.08227, + "2585": 3.01065, + "2590": 3.06011, + "2595": 3.00654, + "2600": 3.05996, + "2605": 3.04069, + "2610": 3.04466, + "2615": 3.05458, + "2620": 2.98698, + "2625": 3.00577, + "2630": 3.03121, + "2635": 3.0586, + "2640": 3.0119, + "2645": 3.04364, + "2650": 3.01519, + "2655": 2.9886, + "2660": 3.00648, + "2665": 3.03526, + "2670": 2.97842, + "2675": 2.95563, + "2680": 2.98899, + "2685": 3.00002, + "2690": 2.99684, + "2695": 2.98246, + "2700": 3.02031, + "2705": 2.97486, + "2710": 2.97993, + "2715": 2.95982, + "2720": 3.02327, + "2725": 2.98962, + "2730": 3.03754, + "2735": 3.04423, + "2740": 3.00496, + "2745": 3.03712, + "2750": 3.01402, + "2755": 2.9617, + "2760": 2.99646, + "2765": 3.00037, + "2770": 2.97044, + "2775": 2.98405, + "2780": 3.00503, + "2785": 2.94687, + "2790": 2.95285, + "2795": 2.9441, + "2800": 2.95258, + "2805": 2.93276, + "2810": 2.97571, + "2815": 2.95532, + "2820": 2.98101, + "2825": 3.00437, + "2830": 2.98552, + "2835": 2.91254, + "2840": 2.92325, + "2845": 2.95348, + "2850": 2.96382, + "2855": 2.96228, + "2860": 2.95491, + "2865": 2.91272, + "2870": 2.98144, + "2875": 2.91969, + "2880": 2.95539, + "2885": 2.9164, + "2890": 2.97968, + "2895": 2.9266, + "2900": 2.951, + "2905": 3.00319, + "2910": 2.93046, + "2915": 2.97319, + "2920": 2.95673, + "2925": 2.94019, + "2930": 2.94216, + "2935": 2.93167, + "2940": 2.92828, + "2945": 2.91138, + "2950": 2.97326, + "2955": 2.90742, + "2960": 2.96518, + "2965": 2.86466, + "2970": 2.95452, + "2975": 2.98998, + "2980": 2.93403, + "2985": 3.0293, + "2990": 2.94204, + "2995": 2.86476, + "3000": 2.92558, + "3005": 2.88291, + "3010": 2.93006, + "3015": 2.91195, + "3020": 2.91509, + "3025": 2.91667, + "3030": 2.91981, + "3035": 2.95562, + "3040": 2.91897, + "3045": 2.83026, + "3050": 2.89438, + "3055": 2.89872, + "3060": 2.92425, + "3065": 2.9202, + "3070": 2.87732, + "3075": 2.8659, + "3080": 2.91886, + "3085": 2.89913, + "3090": 2.91337, + "3095": 2.91795, + "3100": 2.86455, + "3105": 2.91991, + "3110": 2.89521, + "3115": 2.94229, + "3120": 2.9502, + "3125": 2.85442, + "3130": 2.92898, + "3135": 2.91391, + "3140": 2.87153, + "3145": 2.91673, + "3150": 2.84677, + "3155": 2.84841, + "3160": 2.84268, + "3165": 2.83843, + "3170": 2.88775, + "3175": 2.90031, + "3180": 2.85186, + "3185": 2.88764, + "3190": 2.90097, + "3195": 2.9244, + "3200": 2.91971, + "3205": 2.86321, + "3210": 2.87135, + "3215": 2.91131, + "3220": 2.86938, + "3225": 2.86623, + "3230": 2.81226, + "3235": 2.86736, + "3240": 2.8667, + "3245": 2.90142, + "3250": 2.85616, + "3255": 2.8457, + "3260": 2.85684, + "3265": 2.86296, + "3270": 2.83877, + "3275": 2.86179, + "3280": 2.7955, + "3285": 2.80704, + "3290": 2.8637, + "3295": 2.89374, + "3300": 2.86884, + "3305": 2.85627, + "3310": 2.85096, + "3315": 2.8031, + "3320": 2.81957, + "3325": 2.8231, + "3330": 2.82253, + "3335": 2.84275, + "3340": 2.82275, + "3345": 2.83801, + "3350": 2.88535, + "3355": 2.93845, + "3360": 2.8104, + "3365": 2.88218, + "3370": 2.84292, + "3375": 2.82821, + "3380": 2.83961, + "3385": 2.86845, + "3390": 2.85562, + "3395": 2.79888, + "3400": 2.7751, + "3405": 2.82035, + "3410": 2.83623, + "3415": 2.85155, + "3420": 2.82276, + "3425": 2.80723, + "3430": 2.82699, + "3435": 2.88405, + "3440": 2.81654, + "3445": 2.8666, + "3450": 2.81173, + "3455": 2.78937, + "3460": 2.81326, + "3465": 2.85496, + "3470": 2.83669, + "3475": 2.76818, + "3480": 2.83952, + "3485": 2.81857, + "3490": 2.88583, + "3495": 2.84954, + "3500": 2.8294, + "3505": 2.82724, + "3510": 2.81197, + "3515": 2.83002, + "3520": 2.77447, + "3525": 2.80315, + "3530": 2.84695, + "3535": 2.77826, + "3540": 2.83561, + "3545": 2.80697, + "3550": 2.79352, + "3555": 2.81372, + "3560": 2.81919, + "3565": 2.82503, + "3570": 2.80064, + "3575": 2.80282, + "3580": 2.81963, + "3585": 2.83113, + "3590": 2.82883, + "3595": 2.77456, + "3600": 2.74549, + "3605": 2.79356, + "3610": 2.85251, + "3615": 2.75065, + "3620": 2.80136, + "3625": 2.88028, + "3630": 2.77415, + "3635": 2.78563, + "3640": 2.78098, + "3645": 2.7678, + "3650": 2.80164, + "3655": 2.81848, + "3660": 2.76378, + "3665": 2.78241, + "3670": 2.76706, + "3675": 2.7699, + "3680": 2.80551, + "3685": 2.79657, + "3690": 2.7994, + "3695": 2.80976, + "3700": 2.78825, + "3705": 2.78652, + "3710": 2.75031, + "3715": 2.80152, + "3720": 2.79403, + "3725": 2.78917, + "3730": 2.83544, + "3735": 2.7951, + "3740": 2.75106, + "3745": 2.78944, + "3750": 2.80263, + "3755": 2.79078, + "3760": 2.75654, + "3765": 2.75784, + "3770": 2.76197, + "3775": 2.76756, + "3780": 2.75776, + "3785": 2.77592, + "3790": 2.73997, + "3795": 2.78753, + "3800": 2.79987, + "3805": 2.74849, + "3810": 2.80307, + "3815": 2.76119, + "3820": 2.78756, + "3825": 2.73454, + "3830": 2.7435, + "3835": 2.8103, + "3840": 2.72999, + "3845": 2.7159, + "3850": 2.77108, + "3855": 2.71808, + "3860": 2.80515, + "3865": 2.74932, + "3870": 2.77188, + "3875": 2.75713, + "3880": 2.78467, + "3885": 2.77955, + "3890": 2.74036, + "3895": 2.79847, + "3900": 2.7638, + "3905": 2.71774, + "3910": 2.74179, + "3915": 2.75467, + "3920": 2.79458, + "3925": 2.77793, + "3930": 2.7083, + "3935": 2.74319, + "3940": 2.74853, + "3945": 2.74033, + "3950": 2.71968, + "3955": 2.77484, + "3960": 2.76006, + "3965": 2.73678, + "3970": 2.75662, + "3975": 2.72787, + "3980": 2.7365, + "3985": 2.74465, + "3990": 2.69651, + "3995": 2.78349, + "4000": 2.73738, + "4005": 2.77091, + "4010": 2.71132, + "4015": 2.7268, + "4020": 2.7481, + "4025": 2.73123, + "4030": 2.6573, + "4035": 2.69717, + "4040": 2.75386, + "4045": 2.74786, + "4050": 2.78816, + "4055": 2.72171, + "4060": 2.71292, + "4065": 2.65324, + "4070": 2.80526, + "4075": 2.75436, + "4080": 2.71672, + "4085": 2.74687, + "4090": 2.67753, + "4095": 2.69318, + "4100": 2.71292, + "4105": 2.73681, + "4110": 2.73132, + "4115": 2.70148, + "4120": 2.72697, + "4125": 2.70056, + "4130": 2.69693, + "4135": 2.68794, + "4140": 2.68045, + "4145": 2.78003, + "4150": 2.70964, + "4155": 2.73827, + "4160": 2.75924, + "4165": 2.71992, + "4170": 2.6732, + "4175": 2.71977, + "4180": 2.72516, + "4185": 2.7266, + "4190": 2.74574, + "4195": 2.69323, + "4200": 2.70389, + "4205": 2.739, + "4210": 2.67821, + "4215": 2.66615, + "4220": 2.66041, + "4225": 2.70194, + "4230": 2.72519, + "4235": 2.74505, + "4240": 2.70556, + "4245": 2.70809, + "4250": 2.71276, + "4255": 2.64758, + "4260": 2.72108, + "4265": 2.7407, + "4270": 2.71826, + "4275": 2.68449, + "4280": 2.69872, + "4285": 2.72975, + "4290": 2.68276, + "4295": 2.69079, + "4300": 2.70099, + "4305": 2.69702, + "4310": 2.72824, + "4315": 2.71117, + "4320": 2.6963, + "4325": 2.70147, + "4330": 2.70861, + "4335": 2.69073, + "4340": 2.69714, + "4345": 2.72371, + "4350": 2.67452, + "4355": 2.69376, + "4360": 2.70886, + "4365": 2.78626, + "4370": 2.73026, + "4375": 2.74021, + "4380": 2.69919, + "4385": 2.6977, + "4390": 2.70243, + "4395": 2.75288, + "4400": 2.66495, + "4405": 2.66457, + "4410": 2.68277, + "4415": 2.70545, + "4420": 2.70636, + "4425": 2.72116, + "4430": 2.68965, + "4435": 2.68021, + "4440": 2.69172, + "4445": 2.67902, + "4450": 2.65267, + "4455": 2.66361, + "4460": 2.68715, + "4465": 2.70505, + "4470": 2.67175, + "4475": 2.68445, + "4480": 2.65432, + "4485": 2.69855, + "4490": 2.6496, + "4495": 2.70815, + "4500": 2.70161, + "4505": 2.69411, + "4510": 2.64848, + "4515": 2.70089, + "4520": 2.66867, + "4525": 2.67028, + "4530": 2.67045, + "4535": 2.67125, + "4540": 2.70756, + "4545": 2.65655, + "4550": 2.69725, + "4555": 2.68117, + "4560": 2.65534, + "4565": 2.63892, + "4570": 2.63915, + "4575": 2.66537, + "4580": 2.68501, + "4585": 2.68386, + "4590": 2.61629, + "4595": 2.66205, + "4600": 2.67639, + "4605": 2.67565, + "4610": 2.65541, + "4615": 2.66315, + "4620": 2.65863, + "4625": 2.70449, + "4630": 2.67529, + "4635": 2.64482, + "4640": 2.69151, + "4645": 2.64525, + "4650": 2.69485, + "4655": 2.7021, + "4660": 2.67166, + "4665": 2.68866, + "4670": 2.67161, + "4675": 2.68208, + "4680": 2.66232, + "4685": 2.65409, + "4690": 2.70073, + "4695": 2.65354, + "4700": 2.67167, + "4705": 2.64927, + "4710": 2.67483, + "4715": 2.64734, + "4720": 2.71881, + "4725": 2.62661, + "4730": 2.6509, + "4735": 2.6871, + "4740": 2.64143, + "4745": 2.65229, + "4750": 2.64026, + "4755": 2.65299, + "4760": 2.66273, + "4765": 2.64635, + "4770": 2.62577, + "4775": 2.65478, + "4780": 2.65753, + "4785": 2.69313, + "4790": 2.65185, + "4795": 2.67228, + "4800": 2.62783, + "4805": 2.64059, + "4810": 2.6644, + "4815": 2.64691, + "4820": 2.67273, + "4825": 2.65228, + "4830": 2.61459, + "4835": 2.64948, + "4840": 2.65752, + "4845": 2.63255, + "4850": 2.62325, + "4855": 2.59969, + "4860": 2.65178, + "4865": 2.62536, + "4870": 2.6393, + "4875": 2.61885, + "4880": 2.62557, + "4885": 2.62692, + "4890": 2.67877, + "4895": 2.65892, + "4900": 2.61592, + "4905": 2.61904, + "4910": 2.63808, + "4915": 2.61278, + "4920": 2.65283, + "4925": 2.64926, + "4930": 2.57001, + "4935": 2.64979, + "4940": 2.63168, + "4945": 2.6367, + "4950": 2.6251, + "4955": 2.61685, + "4960": 2.6205, + "4965": 2.6585, + "4970": 2.59857, + "4975": 2.65638, + "4980": 2.61828, + "4985": 2.63171, + "4990": 2.65627, + "4995": 2.58066, + "5000": 2.65821, + "5005": 2.6652, + "5010": 2.68657, + "5015": 2.63702, + "5020": 2.6427, + "5025": 2.68656, + "5030": 2.64369, + "5035": 2.6177, + "5040": 2.62106, + "5045": 2.60302, + "5050": 2.62586, + "5055": 2.64898, + "5060": 2.64403, + "5065": 2.6874, + "5070": 2.60434, + "5075": 2.61149, + "5080": 2.61127, + "5085": 2.61237, + "5090": 2.59602, + "5095": 2.65327, + "5100": 2.64698, + "5105": 2.60906, + "5110": 2.66203, + "5115": 2.61929, + "5120": 2.67064, + "5125": 2.62921, + "5130": 2.61274, + "5135": 2.61163, + "5140": 2.57705, + "5145": 2.62816, + "5150": 2.63428, + "5155": 2.61691, + "5160": 2.66153, + "5165": 2.58419, + "5170": 2.59059, + "5175": 2.61619, + "5180": 2.60661, + "5185": 2.62149, + "5190": 2.62346, + "5195": 2.66821, + "5200": 2.60109, + "5205": 2.60537, + "5210": 2.6045, + "5215": 2.64533, + "5220": 2.58657, + "5225": 2.55085, + "5230": 2.63463, + "5235": 2.61656, + "5240": 2.62091, + "5245": 2.62955, + "5250": 2.59194, + "5255": 2.61576, + "5260": 2.55526, + "5265": 2.5946, + "5270": 2.58902, + "5275": 2.61851, + "5280": 2.61104, + "5285": 2.6026, + "5290": 2.63419, + "5295": 2.62205, + "5300": 2.57655, + "5305": 2.59742, + "5310": 2.61293, + "5315": 2.59071, + "5320": 2.61645, + "5325": 2.64426, + "5330": 2.60211, + "5335": 2.58442, + "5340": 2.56099, + "5345": 2.65685, + "5350": 2.61867, + "5355": 2.57667, + "5360": 2.59317, + "5365": 2.62154, + "5370": 2.6143, + "5375": 2.62936, + "5380": 2.57983, + "5385": 2.56433, + "5390": 2.58453, + "5395": 2.61878, + "5400": 2.60528, + "5405": 2.54318, + "5410": 2.61244, + "5415": 2.59663, + "5420": 2.61317, + "5425": 2.62412, + "5430": 2.62829, + "5435": 2.57578, + "5440": 2.5861, + "5445": 2.62832, + "5450": 2.65323, + "5455": 2.61253, + "5460": 2.59283, + "5465": 2.60497, + "5470": 2.5971, + "5475": 2.62515, + "5480": 2.58707, + "5485": 2.58882, + "5490": 2.57638, + "5495": 2.56911, + "5500": 2.56798, + "5505": 2.6147, + "5510": 2.62508, + "5515": 2.58073, + "5520": 2.55375, + "5525": 2.58367, + "5530": 2.66448, + "5535": 2.62011, + "5540": 2.56912, + "5545": 2.59599, + "5550": 2.54873, + "5555": 2.57203, + "5560": 2.56243, + "5565": 2.60838, + "5570": 2.65345, + "5575": 2.62922, + "5580": 2.57343, + "5585": 2.5961, + "5590": 2.56073, + "5595": 2.5818, + "5600": 2.55146, + "5605": 2.59879, + "5610": 2.58312, + "5615": 2.58429, + "5620": 2.58081, + "5625": 2.54856, + "5630": 2.56991, + "5635": 2.63041, + "5640": 2.5916, + "5645": 2.56846, + "5650": 2.57465, + "5655": 2.54631, + "5660": 2.55573, + "5665": 2.58316, + "5670": 2.56552, + "5675": 2.6051, + "5680": 2.52587, + "5685": 2.56617, + "5690": 2.60027, + "5695": 2.5569, + "5700": 2.59658, + "5705": 2.59486, + "5710": 2.57702, + "5715": 2.58277, + "5720": 2.53627, + "5725": 2.60322, + "5730": 2.57254, + "5735": 2.6087, + "5740": 2.59522, + "5745": 2.55931, + "5750": 2.53991, + "5755": 2.55759, + "5760": 2.62474, + "5765": 2.55861, + "5770": 2.54026, + "5775": 2.58406, + "5780": 2.57586, + "5785": 2.5382, + "5790": 2.56312, + "5795": 2.59953, + "5800": 2.54244, + "5805": 2.53338, + "5810": 2.55644, + "5815": 2.52433, + "5820": 2.59823, + "5825": 2.50593, + "5830": 2.49732, + "5835": 2.59651, + "5840": 2.53896, + "5845": 2.55347, + "5850": 2.61179, + "5855": 2.51002, + "5860": 2.55898, + "5865": 2.51794, + "5870": 2.57307, + "5875": 2.60738, + "5880": 2.58521, + "5885": 2.5662, + "5890": 2.58404, + "5895": 2.55371, + "5900": 2.61276, + "5905": 2.55604, + "5910": 2.59526, + "5915": 2.61001, + "5920": 2.58723, + "5925": 2.53738, + "5930": 2.57762, + "5935": 2.55201, + "5940": 2.57061, + "5945": 2.51742, + "5950": 2.55385, + "5955": 2.59921, + "5960": 2.56774, + "5965": 2.61891, + "5970": 2.55084, + "5975": 2.58223, + "5980": 2.55929, + "5985": 2.56146, + "5990": 2.55428, + "5995": 2.55604, + "6000": 2.55347, + "6005": 2.51918, + "6010": 2.55968, + "6015": 2.52192, + "6020": 2.53377, + "6025": 2.5569, + "6030": 2.60368, + "6035": 2.54135, + "6040": 2.54912, + "6045": 2.49048, + "6050": 2.59428, + "6055": 2.51886, + "6060": 2.54379, + "6065": 2.52482, + "6070": 2.52825, + "6075": 2.53458, + "6080": 2.53462, + "6085": 2.59606, + "6090": 2.56861, + "6095": 2.53271, + "6100": 2.54134, + "6105": 2.52059, + "6110": 2.55382, + "6115": 2.58369, + "6120": 2.55719, + "6125": 2.53864, + "6130": 2.47506, + "6135": 2.55486, + "6140": 2.55502, + "6145": 2.55645, + "6150": 2.52511, + "6155": 2.50788, + "6160": 2.53773, + "6165": 2.57035, + "6170": 2.54359, + "6175": 2.59872, + "6180": 2.50939, + "6185": 2.54966, + "6190": 2.49143, + "6195": 2.57856, + "6200": 2.55195, + "6205": 2.53696, + "6210": 2.51913, + "6215": 2.51433, + "6220": 2.56607, + "6225": 2.51286, + "6230": 2.51108, + "6235": 2.56061, + "6240": 2.54928, + "6245": 2.52483, + "6250": 2.52912, + "6255": 2.57251, + "6260": 2.52083, + "6265": 2.57221, + "6270": 2.52297, + "6275": 2.56269, + "6280": 2.52402, + "6285": 2.52172, + "6290": 2.52273, + "6295": 2.50766, + "6300": 2.55521, + "6305": 2.52333, + "6310": 2.51033, + "6315": 2.53641, + "6320": 2.48793, + "6325": 2.59556, + "6330": 2.55493, + "6335": 2.51056, + "6340": 2.51365, + "6345": 2.554, + "6350": 2.55544, + "6355": 2.52304, + "6360": 2.52028, + "6365": 2.48473, + "6370": 2.53351, + "6375": 2.49303, + "6380": 2.55576, + "6385": 2.57478, + "6390": 2.50485, + "6395": 2.54909, + "6400": 2.50599, + "6405": 2.52514, + "6410": 2.51147, + "6415": 2.52125, + "6420": 2.53958, + "6425": 2.53282, + "6430": 2.57602, + "6435": 2.54302, + "6440": 2.53463, + "6445": 2.52617, + "6450": 2.53113, + "6455": 2.52147, + "6460": 2.51423, + "6465": 2.55926, + "6470": 2.51637, + "6475": 2.52603, + "6480": 2.4882, + "6485": 2.52882, + "6490": 2.50677, + "6495": 2.49902, + "6500": 2.52356, + "6505": 2.49389, + "6510": 2.5409, + "6515": 2.50842, + "6520": 2.50856, + "6525": 2.49197, + "6530": 2.54065, + "6535": 2.53032, + "6540": 2.52904, + "6545": 2.55998, + "6550": 2.50044, + "6555": 2.55575, + "6560": 2.50919, + "6565": 2.51983, + "6570": 2.58266, + "6575": 2.5221, + "6580": 2.4976, + "6585": 2.50481, + "6590": 2.50722, + "6595": 2.49751, + "6600": 2.49428, + "6605": 2.53842, + "6610": 2.47637, + "6615": 2.56638, + "6620": 2.53233, + "6625": 2.51062, + "6630": 2.51116, + "6635": 2.47299, + "6640": 2.53891, + "6645": 2.5957, + "6650": 2.50945, + "6655": 2.49555, + "6660": 2.57228, + "6665": 2.5186, + "6670": 2.56576, + "6675": 2.46509, + "6680": 2.54752, + "6685": 2.53432, + "6690": 2.51231, + "6695": 2.48519, + "6700": 2.52191, + "6705": 2.51661, + "6710": 2.49005, + "6715": 2.51625, + "6720": 2.50795, + "6725": 2.51834, + "6730": 2.51954, + "6735": 2.48309, + "6740": 2.51335, + "6745": 2.49433, + "6750": 2.55697, + "6755": 2.47385, + "6760": 2.54193, + "6765": 2.48826, + "6770": 2.51722, + "6775": 2.50587, + "6780": 2.53774, + "6785": 2.47123, + "6790": 2.54526, + "6795": 2.49809, + "6800": 2.52509, + "6805": 2.51004, + "6810": 2.5022, + "6815": 2.52175, + "6820": 2.48494, + "6825": 2.5025, + "6830": 2.53822, + "6835": 2.5061, + "6840": 2.50895, + "6845": 2.5234, + "6850": 2.47328, + "6855": 2.51141, + "6860": 2.50139, + "6865": 2.48788, + "6870": 2.55255, + "6875": 2.47292, + "6880": 2.54863, + "6885": 2.47591, + "6890": 2.54517, + "6895": 2.50126, + "6900": 2.48503, + "6905": 2.49614, + "6910": 2.51701, + "6915": 2.51638, + "6920": 2.53141, + "6925": 2.53752, + "6930": 2.48681, + "6935": 2.51676, + "6940": 2.49758, + "6945": 2.45779, + "6950": 2.48149, + "6955": 2.52444, + "6960": 2.51846, + "6965": 2.49132, + "6970": 2.47056, + "6975": 2.52071, + "6980": 2.45195, + "6985": 2.51437, + "6990": 2.52894, + "6995": 2.46158, + "7000": 2.48597, + "7005": 2.4686, + "7010": 2.47377, + "7015": 2.52019, + "7020": 2.46598, + "7025": 2.45097, + "7030": 2.48257, + "7035": 2.47716, + "7040": 2.50435, + "7045": 2.51849, + "7050": 2.52366, + "7055": 2.43866, + "7060": 2.47405, + "7065": 2.4831, + "7070": 2.48979, + "7075": 2.49284, + "7080": 2.53297, + "7085": 2.48384, + "7090": 2.47443, + "7095": 2.50012, + "7100": 2.5144, + "7105": 2.48598, + "7110": 2.48519, + "7115": 2.50298, + "7120": 2.46732, + "7125": 2.45979, + "7130": 2.48348, + "7135": 2.51033, + "7140": 2.49838, + "7145": 2.49599, + "7150": 2.50775, + "7155": 2.50185, + "7160": 2.472, + "7165": 2.45522, + "7170": 2.50356, + "7175": 2.50054, + "7180": 2.50191, + "7185": 2.48, + "7190": 2.45987, + "7195": 2.46375, + "7200": 2.50646, + "7205": 2.48744, + "7210": 2.44239, + "7215": 2.47767, + "7220": 2.44151, + "7225": 2.51106, + "7230": 2.50582, + "7235": 2.48172, + "7240": 2.4782, + "7245": 2.49936, + "7250": 2.50678, + "7255": 2.4912, + "7260": 2.45802, + "7265": 2.45003, + "7270": 2.46693, + "7275": 2.49683, + "7280": 2.49101, + "7285": 2.42033, + "7290": 2.47866, + "7295": 2.48483, + "7300": 2.41501, + "7305": 2.44141, + "7310": 2.44461, + "7315": 2.48723, + "7320": 2.48097, + "7325": 2.45667, + "7330": 2.48916, + "7335": 2.47148, + "7340": 2.4637, + "7345": 2.49267, + "7350": 2.51067, + "7355": 2.49399, + "7360": 2.47747, + "7365": 2.46623, + "7370": 2.46976, + "7375": 2.44817, + "7380": 2.49099, + "7385": 2.48213, + "7390": 2.46996, + "7395": 2.46937, + "7400": 2.47759, + "7405": 2.43735, + "7410": 2.479, + "7415": 2.46864, + "7420": 2.49062, + "7425": 2.45385, + "7430": 2.52079, + "7435": 2.48865, + "7440": 2.51835, + "7445": 2.50675, + "7450": 2.4717, + "7455": 2.45394, + "7460": 2.46163, + "7465": 2.47321, + "7470": 2.44826, + "7475": 2.45502, + "7480": 2.50736, + "7485": 2.44851, + "7490": 2.47418, + "7495": 2.47985, + "7500": 2.49378, + "7505": 2.43918, + "7510": 2.43573, + "7515": 2.42055, + "7520": 2.49283, + "7525": 2.49607, + "7530": 2.47395, + "7535": 2.45882, + "7540": 2.47236, + "7545": 2.47257, + "7550": 2.48854, + "7555": 2.45427, + "7560": 2.42558, + "7565": 2.50802, + "7570": 2.48336, + "7575": 2.43657, + "7580": 2.45585, + "7585": 2.4796, + "7590": 2.47883, + "7595": 2.46048, + "7600": 2.4605, + "7605": 2.44515, + "7610": 2.44772, + "7615": 2.42408, + "7620": 2.54293, + "7625": 2.47978, + "7630": 2.42405, + "7635": 2.42523, + "7640": 2.45197, + "7645": 2.47074, + "7650": 2.46103, + "7655": 2.48292, + "7660": 2.45069, + "7665": 2.43132, + "7670": 2.43977, + "7675": 2.45451, + "7680": 2.48428, + "7685": 2.43105, + "7690": 2.47923, + "7695": 2.45298, + "7700": 2.49224, + "7705": 2.5344, + "7710": 2.49781, + "7715": 2.44151, + "7720": 2.46783, + "7725": 2.47891, + "7730": 2.45705, + "7735": 2.47177, + "7740": 2.43645, + "7745": 2.44594, + "7750": 2.43539, + "7755": 2.46419, + "7760": 2.44791, + "7765": 2.4518, + "7770": 2.4666, + "7775": 2.44972, + "7780": 2.41283, + "7785": 2.44234, + "7790": 2.48019, + "7795": 2.43806, + "7800": 2.46172, + "7805": 2.47968, + "7810": 2.50095, + "7815": 2.4866, + "7820": 2.44473, + "7825": 2.51138, + "7830": 2.45119, + "7835": 2.46518, + "7840": 2.47743, + "7845": 2.45952, + "7850": 2.41496, + "7855": 2.47017, + "7860": 2.49724, + "7865": 2.42238, + "7870": 2.46624, + "7875": 2.4439, + "7880": 2.45106, + "7885": 2.46043, + "7890": 2.47034, + "7895": 2.44531, + "7900": 2.43635, + "7905": 2.43389, + "7910": 2.42388, + "7915": 2.47992, + "7920": 2.47327, + "7925": 2.41978, + "7930": 2.47016, + "7935": 2.45012, + "7940": 2.41955, + "7945": 2.46827, + "7950": 2.4431, + "7955": 2.41866, + "7960": 2.48694, + "7965": 2.51733, + "7970": 2.52064, + "7975": 2.44744, + "7980": 2.44021, + "7985": 2.46499, + "7990": 2.42982, + "7995": 2.4678, + "8000": 2.43369, + "8005": 2.41551, + "8010": 2.4566, + "8015": 2.46743, + "8020": 2.4794, + "8025": 2.47237, + "8030": 2.44969, + "8035": 2.46877, + "8040": 2.42022, + "8045": 2.45138, + "8050": 2.44722, + "8055": 2.42479, + "8060": 2.44355, + "8065": 2.463, + "8070": 2.45573, + "8075": 2.45766, + "8080": 2.44452, + "8085": 2.43918, + "8090": 2.42947, + "8095": 2.42336, + "8100": 2.43809, + "8105": 2.4929, + "8110": 2.43687, + "8115": 2.44253, + "8120": 2.46616, + "8125": 2.46474, + "8130": 2.45131, + "8135": 2.45162, + "8140": 2.43804, + "8145": 2.42476, + "8150": 2.4201, + "8155": 2.48593, + "8160": 2.45442, + "8165": 2.44161, + "8170": 2.43359, + "8175": 2.42032, + "8180": 2.49387, + "8185": 2.42374, + "8190": 2.46733, + "8195": 2.45639, + "8200": 2.44552, + "8205": 2.44393, + "8210": 2.42974, + "8215": 2.43789, + "8220": 2.43568, + "8225": 2.40813, + "8230": 2.44002, + "8235": 2.46402, + "8240": 2.42523, + "8245": 2.44686, + "8250": 2.44348, + "8255": 2.43916, + "8260": 2.43307, + "8265": 2.42573, + "8270": 2.43174, + "8275": 2.44039, + "8280": 2.3963, + "8285": 2.43685, + "8290": 2.47852, + "8295": 2.44671, + "8300": 2.45587, + "8305": 2.40638, + "8310": 2.43379, + "8315": 2.45525, + "8320": 2.39709, + "8325": 2.39084, + "8330": 2.43277, + "8335": 2.44245, + "8340": 2.4878, + "8345": 2.44494, + "8350": 2.44735, + "8355": 2.40568, + "8360": 2.39807, + "8365": 2.45388, + "8370": 2.4495, + "8375": 2.42225, + "8380": 2.41565, + "8385": 2.42229, + "8390": 2.43699, + "8395": 2.43929, + "8400": 2.43589, + "8405": 2.4875, + "8410": 2.43766, + "8415": 2.43224, + "8420": 2.41348, + "8425": 2.43603, + "8430": 2.45874, + "8435": 2.40392, + "8440": 2.44872, + "8445": 2.45716, + "8450": 2.40546, + "8455": 2.45741, + "8460": 2.4534, + "8465": 2.43498, + "8470": 2.40732, + "8475": 2.47375, + "8480": 2.40045, + "8485": 2.41586, + "8490": 2.4639, + "8495": 2.43561, + "8500": 2.44316, + "8505": 2.40386, + "8510": 2.40086, + "8515": 2.42745, + "8520": 2.42214, + "8525": 2.4898, + "8530": 2.37128, + "8535": 2.39928, + "8540": 2.48121, + "8545": 2.37881, + "8550": 2.43739, + "8555": 2.44828, + "8560": 2.46927, + "8565": 2.41924, + "8570": 2.42864, + "8575": 2.44669, + "8580": 2.43829, + "8585": 2.41804, + "8590": 2.40217, + "8595": 2.42543, + "8600": 2.40923, + "8605": 2.48923, + "8610": 2.4189, + "8615": 2.38627, + "8620": 2.44771, + "8625": 2.42556, + "8630": 2.45885, + "8635": 2.45722, + "8640": 2.43592, + "8645": 2.47348, + "8650": 2.42058, + "8655": 2.45362, + "8660": 2.45518, + "8665": 2.38364, + "8670": 2.40841, + "8675": 2.42831, + "8680": 2.44533, + "8685": 2.42795, + "8690": 2.40821, + "8695": 2.44087, + "8700": 2.43118, + "8705": 2.41801, + "8710": 2.42606, + "8715": 2.44636, + "8720": 2.47348, + "8725": 2.40723, + "8730": 2.38832, + "8735": 2.43292, + "8740": 2.42791, + "8745": 2.39523, + "8750": 2.43296, + "8755": 2.4228, + "8760": 2.39965, + "8765": 2.43238, + "8770": 2.40279, + "8775": 2.43581, + "8780": 2.4169, + "8785": 2.47064, + "8790": 2.41812, + "8795": 2.41771, + "8800": 2.4148, + "8805": 2.40261, + "8810": 2.40855, + "8815": 2.47233, + "8820": 2.45221, + "8825": 2.42327, + "8830": 2.38596, + "8835": 2.42007, + "8840": 2.3912, + "8845": 2.42402, + "8850": 2.43315, + "8855": 2.40161, + "8860": 2.42753, + "8865": 2.42451, + "8870": 2.43294, + "8875": 2.43697, + "8880": 2.41075, + "8885": 2.39174, + "8890": 2.44505, + "8895": 2.42678, + "8900": 2.41124, + "8905": 2.40029, + "8910": 2.39777, + "8915": 2.41598, + "8920": 2.43089, + "8925": 2.46425, + "8930": 2.41457, + "8935": 2.40635, + "8940": 2.38763, + "8945": 2.39267, + "8950": 2.41516, + "8955": 2.39226, + "8960": 2.43161, + "8965": 2.41676, + "8970": 2.40264, + "8975": 2.47364, + "8980": 2.43865, + "8985": 2.37275, + "8990": 2.40714, + "8995": 2.41421, + "9000": 2.45425, + "9005": 2.41041, + "9010": 2.37331, + "9015": 2.40629, + "9020": 2.39684, + "9025": 2.36793, + "9030": 2.39775, + "9035": 2.4222, + "9040": 2.42089, + "9045": 2.4185, + "9050": 2.39505, + "9055": 2.41883, + "9060": 2.41777, + "9065": 2.40319, + "9070": 2.44397, + "9075": 2.39439, + "9080": 2.43305, + "9085": 2.41006, + "9090": 2.40938, + "9095": 2.3941, + "9100": 2.39969, + "9105": 2.35719, + "9110": 2.46849, + "9115": 2.4178, + "9120": 2.4039, + "9125": 2.45748, + "9130": 2.39278, + "9135": 2.44705, + "9140": 2.43327, + "9145": 2.42517, + "9150": 2.42294, + "9155": 2.37266, + "9160": 2.41355, + "9165": 2.42349, + "9170": 2.37092, + "9175": 2.41554, + "9180": 2.3779, + "9185": 2.438, + "9190": 2.41083, + "9195": 2.39616, + "9200": 2.40527, + "9205": 2.45505, + "9210": 2.36436, + "9215": 2.46318, + "9220": 2.44705, + "9225": 2.38094, + "9230": 2.44355, + "9235": 2.39394, + "9240": 2.39928, + "9245": 2.43306, + "9250": 2.43135, + "9255": 2.42746, + "9260": 2.38386, + "9265": 2.43528, + "9270": 2.43273, + "9275": 2.39047, + "9280": 2.38694, + "9285": 2.419, + "9290": 2.40065, + "9295": 2.38095, + "9300": 2.42048, + "9305": 2.40199, + "9310": 2.41314, + "9315": 2.40673, + "9320": 2.44261, + "9325": 2.36848, + "9330": 2.40099, + "9335": 2.35836, + "9340": 2.40562, + "9345": 2.41339, + "9350": 2.43711, + "9355": 2.54976, + "9360": 2.46791, + "9365": 2.40083, + "9370": 2.44296, + "9375": 2.43843, + "9380": 2.35492, + "9385": 2.40106, + "9390": 2.37923, + "9395": 2.38537, + "9400": 2.44192, + "9405": 2.41134, + "9410": 2.39282, + "9415": 2.43379, + "9420": 2.44336, + "9425": 2.42952, + "9430": 2.44459, + "9435": 2.41314, + "9440": 2.47521, + "9445": 2.37319, + "9450": 2.39425, + "9455": 2.40156, + "9460": 2.3856, + "9465": 2.37862, + "9470": 2.38082, + "9475": 2.36462, + "9480": 2.43197, + "9485": 2.38766, + "9490": 2.42039, + "9495": 2.38324, + "9500": 2.36513, + "9505": 2.43009, + "9510": 2.39857, + "9515": 2.42968, + "9520": 2.41785, + "9525": 2.38847, + "9530": 2.45376, + "9535": 2.40056, + "9540": 2.41611, + "9545": 2.37735, + "9550": 2.42123, + "9555": 2.38863, + "9560": 2.42176, + "9565": 2.4065, + "9570": 2.37249, + "9575": 2.41052, + "9580": 2.3951, + "9585": 2.42188, + "9590": 2.42821, + "9595": 2.44726, + "9600": 2.38934, + "9605": 2.38358, + "9610": 2.42106, + "9615": 2.4142, + "9620": 2.41302, + "9625": 2.44609, + "9630": 2.3957, + "9635": 2.40141, + "9640": 2.44439, + "9645": 2.41011, + "9650": 2.39715, + "9655": 2.3728, + "9660": 2.42407, + "9665": 2.38942, + "9670": 2.38106, + "9675": 2.35623, + "9680": 2.39556, + "9685": 2.39468, + "9690": 2.46311, + "9695": 2.3803, + "9700": 2.37568, + "9705": 2.38264, + "9710": 2.36519, + "9715": 2.38727, + "9720": 2.43505, + "9725": 2.44188, + "9730": 2.42828, + "9735": 2.38742, + "9740": 2.38014, + "9745": 2.42614, + "9750": 2.39713, + "9755": 2.40574, + "9760": 2.41046, + "9765": 2.36693, + "9770": 2.46311, + "9775": 2.40027, + "9780": 2.3623, + "9785": 2.40073, + "9790": 2.40637, + "9795": 2.35988, + "9800": 2.39429, + "9805": 2.40453, + "9810": 2.40856, + "9815": 2.37781, + "9820": 2.37606, + "9825": 2.40326, + "9830": 2.42047, + "9835": 2.38296, + "9840": 2.41561, + "9845": 2.36392, + "9850": 2.39833, + "9855": 2.39588, + "9860": 2.39015, + "9865": 2.37872, + "9870": 2.38479, + "9875": 2.37975, + "9880": 2.45111, + "9885": 2.39299, + "9890": 2.3509, + "9895": 2.31999, + "9900": 2.39398, + "9905": 2.42373, + "9910": 2.35469, + "9915": 2.36188, + "9920": 2.41069, + "9925": 2.3962, + "9930": 2.37914, + "9935": 2.34871, + "9940": 2.38316, + "9945": 2.37785, + "9950": 2.40242, + "9955": 2.44647, + "9960": 2.431, + "9965": 2.35688, + "9970": 2.40859, + "9975": 2.3835, + "9980": 2.33008, + "9985": 2.40516, + "9990": 2.39642, + "9995": 2.39479, + "10000": 2.36666, + "10005": 2.37185, + "10010": 2.38179, + "10015": 2.44374, + "10020": 2.36251, + "10025": 2.38767, + "10030": 2.38659, + "10035": 2.40904, + "10040": 2.40154, + "10045": 2.38171, + "10050": 2.35085, + "10055": 2.36819, + "10060": 2.41802, + "10065": 2.3733, + "10070": 2.42068, + "10075": 2.37133, + "10080": 2.36112, + "10085": 2.36913, + "10090": 2.34609, + "10095": 2.4004, + "10100": 2.31173, + "10105": 2.37923, + "10110": 2.40867, + "10115": 2.38512, + "10120": 2.35537, + "10125": 2.36885, + "10130": 2.35772, + "10135": 2.38091, + "10140": 2.41174, + "10145": 2.40531, + "10150": 2.37466, + "10155": 2.39341, + "10160": 2.36024, + "10165": 2.38118, + "10170": 2.42175, + "10175": 2.32278, + "10180": 2.39348, + "10185": 2.38112, + "10190": 2.44208, + "10195": 2.40114, + "10200": 2.38989 + } + }, + "num-zeros": { + "start_step": 1, + "end_step": 10200, + "step_interval": 5, + "values": { + "1": 956237952.0, + "5": 967338112.0, + "10": 971388416.0, + "15": 946492416.0, + "20": 957330304.0, + "25": 1064217280.0, + "30": 1177614208.0, + "35": 1231743488.0, + "40": 1192773248.0, + "45": 1126056192.0, + "50": 1114252800.0, + "55": 1079868160.0, + "60": 1027359552.0, + "65": 1009657856.0, + "70": 993522560.0, + "75": 986463616.0, + "80": 1003645696.0, + "85": 998620032.0, + "90": 980123200.0, + "95": 955802304.0, + "100": 970032128.0, + "105": 979558464.0, + "110": 977155840.0, + "115": 976995648.0, + "120": 960840640.0, + "125": 943207360.0, + "130": 976265792.0, + "135": 965483008.0, + "140": 963541184.0, + "145": 976785856.0, + "150": 921376256.0, + "155": 968230080.0, + "160": 956410112.0, + "165": 959776064.0, + "170": 974353984.0, + "175": 949006720.0, + "180": 946693120.0, + "185": 972018368.0, + "190": 969083072.0, + "195": 985127296.0, + "200": 945772672.0, + "205": 958353408.0, + "210": 979445952.0, + "215": 967486976.0, + "220": 956421696.0, + "225": 962398720.0, + "230": 948180224.0, + "235": 965227392.0, + "240": 966053440.0, + "245": 969162176.0, + "250": 974440448.0, + "255": 925063104.0, + "260": 965635712.0, + "265": 970669504.0, + "270": 959133056.0, + "275": 953996096.0, + "280": 963428992.0, + "285": 945783360.0, + "290": 974114048.0, + "295": 966696704.0, + "300": 967154368.0, + "305": 964506688.0, + "310": 940354688.0, + "315": 967400640.0, + "320": 969003968.0, + "325": 980556544.0, + "330": 972097024.0, + "335": 946861312.0, + "340": 966592256.0, + "345": 973024128.0, + "350": 973919360.0, + "355": 963257216.0, + "360": 948355072.0, + "365": 964821888.0, + "370": 962956160.0, + "375": 958448192.0, + "380": 947154304.0, + "385": 955990528.0, + "390": 945401728.0, + "395": 970420864.0, + "400": 979777280.0, + "405": 968349440.0, + "410": 970066624.0, + "415": 953156736.0, + "420": 943564288.0, + "425": 954775680.0, + "430": 962658560.0, + "435": 977076480.0, + "440": 954808960.0, + "445": 971887936.0, + "450": 963509120.0, + "455": 973133504.0, + "460": 983711680.0, + "465": 945280256.0, + "470": 942053120.0, + "475": 967006336.0, + "480": 966106368.0, + "485": 976411264.0, + "490": 962540736.0, + "495": 945465216.0, + "500": 964456640.0, + "505": 986011904.0, + "510": 965684480.0, + "515": 943408256.0, + "520": 945018624.0, + "525": 971265920.0, + "530": 971885824.0, + "535": 979139712.0, + "540": 969531776.0, + "545": 954128384.0, + "550": 951264512.0, + "555": 987223680.0, + "560": 960431488.0, + "565": 966615296.0, + "570": 975727872.0, + "575": 927221504.0, + "580": 970695168.0, + "585": 961176704.0, + "590": 972965824.0, + "595": 963685696.0, + "600": 937080896.0, + "605": 951472448.0, + "610": 963365312.0, + "615": 970013120.0, + "620": 976467456.0, + "625": 949582144.0, + "630": 954443392.0, + "635": 986044928.0, + "640": 980976256.0, + "645": 955010944.0, + "650": 958547072.0, + "655": 951653568.0, + "660": 961041088.0, + "665": 967554752.0, + "670": 962515968.0, + "675": 968332416.0, + "680": 965617152.0, + "685": 962867648.0, + "690": 961918848.0, + "695": 954766656.0, + "700": 970338304.0, + "705": 945516672.0, + "710": 943885568.0, + "715": 973354368.0, + "720": 968367552.0, + "725": 978491392.0, + "730": 952193920.0, + "735": 948812288.0, + "740": 955630848.0, + "745": 975866880.0, + "750": 981238016.0, + "755": 962152064.0, + "760": 951964928.0, + "765": 967348608.0, + "770": 976151168.0, + "775": 970545920.0, + "780": 977537536.0, + "785": 931530240.0, + "790": 960442880.0, + "795": 964583104.0, + "800": 967022336.0, + "805": 962319104.0, + "810": 940973184.0, + "815": 949035712.0, + "820": 953181248.0, + "825": 954500288.0, + "830": 976441984.0, + "835": 956073600.0, + "840": 948406720.0, + "845": 965158016.0, + "850": 966029056.0, + "855": 960904896.0, + "860": 976026752.0, + "865": 938160640.0, + "870": 966414464.0, + "875": 972316416.0, + "880": 963124608.0, + "885": 967743488.0, + "890": 949968896.0, + "895": 960018432.0, + "900": 974232704.0, + "905": 963969664.0, + "910": 958437888.0, + "915": 956355456.0, + "920": 943971968.0, + "925": 960835456.0, + "930": 978848640.0, + "935": 971072640.0, + "940": 960906880.0, + "945": 945063680.0, + "950": 957426432.0, + "955": 979036352.0, + "960": 983589184.0, + "965": 966167808.0, + "970": 951230848.0, + "975": 961577024.0, + "980": 968069504.0, + "985": 968991744.0, + "990": 984392448.0, + "995": 953289600.0, + "1000": 934775808.0, + "1005": 960145920.0, + "1010": 971539584.0, + "1015": 985186304.0, + "1020": 962782080.0, + "1025": 935012032.0, + "1030": 974681856.0, + "1035": 964992512.0, + "1040": 980466560.0, + "1045": 960827392.0, + "1050": 955202112.0, + "1055": 957778496.0, + "1060": 967746560.0, + "1065": 967116288.0, + "1070": 966603008.0, + "1075": 950059968.0, + "1080": 954511488.0, + "1085": 967252544.0, + "1090": 977133568.0, + "1095": 961236864.0, + "1100": 979609024.0, + "1105": 953366336.0, + "1110": 965954304.0, + "1115": 966985344.0, + "1120": 970350720.0, + "1125": 965709824.0, + "1130": 954939008.0, + "1135": 965841728.0, + "1140": 965176832.0, + "1145": 970989504.0, + "1150": 955556288.0, + "1155": 930579200.0, + "1160": 957775040.0, + "1165": 978125824.0, + "1170": 974303808.0, + "1175": 973059712.0, + "1180": 973081600.0, + "1185": 947342208.0, + "1190": 964794880.0, + "1195": 953139200.0, + "1200": 972845568.0, + "1205": 988479104.0, + "1210": 931126400.0, + "1215": 968647168.0, + "1220": 969162304.0, + "1225": 975951360.0, + "1230": 967335360.0, + "1235": 943445120.0, + "1240": 955857792.0, + "1245": 981503808.0, + "1250": 966111232.0, + "1255": 973673344.0, + "1260": 946494592.0, + "1265": 963999488.0, + "1270": 960487680.0, + "1275": 973616128.0, + "1280": 961113856.0, + "1285": 957583488.0, + "1290": 952529984.0, + "1295": 971612800.0, + "1300": 968863616.0, + "1305": 963741440.0, + "1310": 963333248.0, + "1315": 943553536.0, + "1320": 966307328.0, + "1325": 989787264.0, + "1330": 969507712.0, + "1335": 972301824.0, + "1340": 972270720.0, + "1345": 960657792.0, + "1350": 968638464.0, + "1355": 955853120.0, + "1360": 971822336.0, + "1365": 960387584.0, + "1370": 948790784.0, + "1375": 973530240.0, + "1380": 953468864.0, + "1385": 969147008.0, + "1390": 975717376.0, + "1395": 931675264.0, + "1400": 945854976.0, + "1405": 976751488.0, + "1410": 974512384.0, + "1415": 967568640.0, + "1420": 966748928.0, + "1425": 937378944.0, + "1430": 973915136.0, + "1435": 978334336.0, + "1440": 964178496.0, + "1445": 958059456.0, + "1450": 946148224.0, + "1455": 983922496.0, + "1460": 968650368.0, + "1465": 948745536.0, + "1470": 984242432.0, + "1475": 943906048.0, + "1480": 963975680.0, + "1485": 957348864.0, + "1490": 961262592.0, + "1495": 980537280.0, + "1500": 958331264.0, + "1505": 942866944.0, + "1510": 984179712.0, + "1515": 959093120.0, + "1520": 959104640.0, + "1525": 952784704.0, + "1530": 957740608.0, + "1535": 949429632.0, + "1540": 971081792.0, + "1545": 963131776.0, + "1550": 978667264.0, + "1555": 952316864.0, + "1560": 980088832.0, + "1565": 967314944.0, + "1570": 973842944.0, + "1575": 975493248.0, + "1580": 941857664.0, + "1585": 970029056.0, + "1590": 983819456.0, + "1595": 948632192.0, + "1600": 967443328.0, + "1605": 952451072.0, + "1610": 969618944.0, + "1615": 983146624.0, + "1620": 968018048.0, + "1625": 970716096.0, + "1630": 962886784.0, + "1635": 942310912.0, + "1640": 981611776.0, + "1645": 973976512.0, + "1650": 974183872.0, + "1655": 967264640.0, + "1660": 940688512.0, + "1665": 961703040.0, + "1670": 962901120.0, + "1675": 971277184.0, + "1680": 980877056.0, + "1685": 944416128.0, + "1690": 964686656.0, + "1695": 965641088.0, + "1700": 966339584.0, + "1705": 985198208.0, + "1710": 978354176.0, + "1715": 943209216.0, + "1720": 977088000.0, + "1725": 965873792.0, + "1730": 968969408.0, + "1735": 965084288.0, + "1740": 949714944.0, + "1745": 970010688.0, + "1750": 959682944.0, + "1755": 960089600.0, + "1760": 966381056.0, + "1765": 951815488.0, + "1770": 954663296.0, + "1775": 973752448.0, + "1780": 970534144.0, + "1785": 968825472.0, + "1790": 950234368.0, + "1795": 945131648.0, + "1800": 984667392.0, + "1805": 987162112.0, + "1810": 977768064.0, + "1815": 948004352.0, + "1820": 949208000.0, + "1825": 978852096.0, + "1830": 966362816.0, + "1835": 964133376.0, + "1840": 972319616.0, + "1845": 935413504.0, + "1850": 952499200.0, + "1855": 980050816.0, + "1860": 975869248.0, + "1865": 958964544.0, + "1870": 958950400.0, + "1875": 932595072.0, + "1880": 973574272.0, + "1885": 978847104.0, + "1890": 971360000.0, + "1895": 959212032.0, + "1900": 947393792.0, + "1905": 981830912.0, + "1910": 969124096.0, + "1915": 970039936.0, + "1920": 975597952.0, + "1925": 960497280.0, + "1930": 977924032.0, + "1935": 963251200.0, + "1940": 952460224.0, + "1945": 981337472.0, + "1950": 939171904.0, + "1955": 960605568.0, + "1960": 970030464.0, + "1965": 981177472.0, + "1970": 962043776.0, + "1975": 952823424.0, + "1980": 936848896.0, + "1985": 975940736.0, + "1990": 965966464.0, + "1995": 962611456.0, + "2000": 960555008.0, + "2005": 954499200.0, + "2010": 975581056.0, + "2015": 991801792.0, + "2020": 975435008.0, + "2025": 974302976.0, + "2030": 952083712.0, + "2035": 967848448.0, + "2040": 987458944.0, + "2045": 976479360.0, + "2050": 984702976.0, + "2055": 942838016.0, + "2060": 942594048.0, + "2065": 966208512.0, + "2070": 969622976.0, + "2075": 980553792.0, + "2080": 977598656.0, + "2085": 939638016.0, + "2090": 969875584.0, + "2095": 961274048.0, + "2100": 976719040.0, + "2105": 972537344.0, + "2110": 959904896.0, + "2115": 956878848.0, + "2120": 977481088.0, + "2125": 962566656.0, + "2130": 979619328.0, + "2135": 950538496.0, + "2140": 946996160.0, + "2145": 962277888.0, + "2150": 973404544.0, + "2155": 972690560.0, + "2160": 970314880.0, + "2165": 948640000.0, + "2170": 961543680.0, + "2175": 969376896.0, + "2180": 969329408.0, + "2185": 947448576.0, + "2190": 940479616.0, + "2195": 986087424.0, + "2200": 961861504.0, + "2205": 978924288.0, + "2210": 964101632.0, + "2215": 963502080.0, + "2220": 951311616.0, + "2225": 969316992.0, + "2230": 976328064.0, + "2235": 974026560.0, + "2240": 975493120.0, + "2245": 960232576.0, + "2250": 967639616.0, + "2255": 969130752.0, + "2260": 975064896.0, + "2265": 968259968.0, + "2270": 951744256.0, + "2275": 962768576.0, + "2280": 969637248.0, + "2285": 971692416.0, + "2290": 962890240.0, + "2295": 931408128.0, + "2300": 959906304.0, + "2305": 970426752.0, + "2310": 967444608.0, + "2315": 970905344.0, + "2320": 975590400.0, + "2325": 938586880.0, + "2330": 988439424.0, + "2335": 977490176.0, + "2340": 964596672.0, + "2345": 964165888.0, + "2350": 947553664.0, + "2355": 977028864.0, + "2360": 966901632.0, + "2365": 977296576.0, + "2370": 965071616.0, + "2375": 953964800.0, + "2380": 962916736.0, + "2385": 967195392.0, + "2390": 963075968.0, + "2395": 974463232.0, + "2400": 958411520.0, + "2405": 968120704.0, + "2410": 951586944.0, + "2415": 965904896.0, + "2420": 966517056.0, + "2425": 959046016.0, + "2430": 956686464.0, + "2435": 961390208.0, + "2440": 959755712.0, + "2445": 970888704.0, + "2450": 961996800.0, + "2455": 922722368.0, + "2460": 951953664.0, + "2465": 955730688.0, + "2470": 972571136.0, + "2475": 973811776.0, + "2480": 943895680.0, + "2485": 944185216.0, + "2490": 972411648.0, + "2495": 974452480.0, + "2500": 973908352.0, + "2505": 958490112.0, + "2510": 939510528.0, + "2515": 979550912.0, + "2520": 970472448.0, + "2525": 964388608.0, + "2530": 955796608.0, + "2535": 936597696.0, + "2540": 969027008.0, + "2545": 970385088.0, + "2550": 969461376.0, + "2555": 969440256.0, + "2560": 964978688.0, + "2565": 959765504.0, + "2570": 985177344.0, + "2575": 957424640.0, + "2580": 967423872.0, + "2585": 966023168.0, + "2590": 956354816.0, + "2595": 981828736.0, + "2600": 959531456.0, + "2605": 963000320.0, + "2610": 965972736.0, + "2615": 951925504.0, + "2620": 971241792.0, + "2625": 976456000.0, + "2630": 974411008.0, + "2635": 948070528.0, + "2640": 948137536.0, + "2645": 963037504.0, + "2650": 953983488.0, + "2655": 977112320.0, + "2660": 949622400.0, + "2665": 953930112.0, + "2670": 959063104.0, + "2675": 979275328.0, + "2680": 961396608.0, + "2685": 970700992.0, + "2690": 965221376.0, + "2695": 943552256.0, + "2700": 969424320.0, + "2705": 978961152.0, + "2710": 971810624.0, + "2715": 990813696.0, + "2720": 942648576.0, + "2725": 967955072.0, + "2730": 955466432.0, + "2735": 970671680.0, + "2740": 977920768.0, + "2745": 932277184.0, + "2750": 947856896.0, + "2755": 956317312.0, + "2760": 981696576.0, + "2765": 966112512.0, + "2770": 948914816.0, + "2775": 935831168.0, + "2780": 964779392.0, + "2785": 969569152.0, + "2790": 974273280.0, + "2795": 966886464.0, + "2800": 944390336.0, + "2805": 964353152.0, + "2810": 969610112.0, + "2815": 975844992.0, + "2820": 963087232.0, + "2825": 937630464.0, + "2830": 956741184.0, + "2835": 986323136.0, + "2840": 961760640.0, + "2845": 967509376.0, + "2850": 951717760.0, + "2855": 962093184.0, + "2860": 954244160.0, + "2865": 955883520.0, + "2870": 944663744.0, + "2875": 974664576.0, + "2880": 968203584.0, + "2885": 981082880.0, + "2890": 953455424.0, + "2895": 957180416.0, + "2900": 964989952.0, + "2905": 931707840.0, + "2910": 955731584.0, + "2915": 979475584.0, + "2920": 970492544.0, + "2925": 964975744.0, + "2930": 964048320.0, + "2935": 940140672.0, + "2940": 964912256.0, + "2945": 989149120.0, + "2950": 965210112.0, + "2955": 965103936.0, + "2960": 933162304.0, + "2965": 968795008.0, + "2970": 973035456.0, + "2975": 958092224.0, + "2980": 964498432.0, + "2985": 937269120.0, + "2990": 951253824.0, + "2995": 978316800.0, + "3000": 969276480.0, + "3005": 974686272.0, + "3010": 950235392.0, + "3015": 943840768.0, + "3020": 958442752.0, + "3025": 975186560.0, + "3030": 965017536.0, + "3035": 963453888.0, + "3040": 952134912.0, + "3045": 989794240.0, + "3050": 965545472.0, + "3055": 982520576.0, + "3060": 971226112.0, + "3065": 943915776.0, + "3070": 978408576.0, + "3075": 975205120.0, + "3080": 960991872.0, + "3085": 962352384.0, + "3090": 945953152.0, + "3095": 938115200.0, + "3100": 972929600.0, + "3105": 961989824.0, + "3110": 970656768.0, + "3115": 963390464.0, + "3120": 947116928.0, + "3125": 972720768.0, + "3130": 952973952.0, + "3135": 966041472.0, + "3140": 968488256.0, + "3145": 937852416.0, + "3150": 975010240.0, + "3155": 976814592.0, + "3160": 969627968.0, + "3165": 982194304.0, + "3170": 937959744.0, + "3175": 953826560.0, + "3180": 983809536.0, + "3185": 965169152.0, + "3190": 968482880.0, + "3195": 950932736.0, + "3200": 945101824.0, + "3205": 959861504.0, + "3210": 957486464.0, + "3215": 958020480.0, + "3220": 968130112.0, + "3225": 935615104.0, + "3230": 962589184.0, + "3235": 975776128.0, + "3240": 962621696.0, + "3245": 981274240.0, + "3250": 943259968.0, + "3255": 954599104.0, + "3260": 980363392.0, + "3265": 963620864.0, + "3270": 965163904.0, + "3275": 959731840.0, + "3280": 967046016.0, + "3285": 982476928.0, + "3290": 947689088.0, + "3295": 966422912.0, + "3300": 959166464.0, + "3305": 949131840.0, + "3310": 979510912.0, + "3315": 964284352.0, + "3320": 969208320.0, + "3325": 956195136.0, + "3330": 941167040.0, + "3335": 964974336.0, + "3340": 956901504.0, + "3345": 972501184.0, + "3350": 964574464.0, + "3355": 943343168.0, + "3360": 970036736.0, + "3365": 969453248.0, + "3370": 954763264.0, + "3375": 958677248.0, + "3380": 971464320.0, + "3385": 947973824.0, + "3390": 965781312.0, + "3395": 978391424.0, + "3400": 978128512.0, + "3405": 976725248.0, + "3410": 924193024.0, + "3415": 955425536.0, + "3420": 971820672.0, + "3425": 977156416.0, + "3430": 973826816.0, + "3435": 936072704.0, + "3440": 970503232.0, + "3445": 957302336.0, + "3450": 959840384.0, + "3455": 963854720.0, + "3460": 967882624.0, + "3465": 931319936.0, + "3470": 952335488.0, + "3475": 973704640.0, + "3480": 959738368.0, + "3485": 979948800.0, + "3490": 944671360.0, + "3495": 953905216.0, + "3500": 969318272.0, + "3505": 964348544.0, + "3510": 971222272.0, + "3515": 955946880.0, + "3520": 958734080.0, + "3525": 971914944.0, + "3530": 964126528.0, + "3535": 983191936.0, + "3540": 937491328.0, + "3545": 944731776.0, + "3550": 984462720.0, + "3555": 978058944.0, + "3560": 974374784.0, + "3565": 968801216.0, + "3570": 946694976.0, + "3575": 976117248.0, + "3580": 977494464.0, + "3585": 954581248.0, + "3590": 956426496.0, + "3595": 951457280.0, + "3600": 989009024.0, + "3605": 962008832.0, + "3610": 965065792.0, + "3615": 974648192.0, + "3620": 954900480.0, + "3625": 939528256.0, + "3630": 990161536.0, + "3635": 971435904.0, + "3640": 976024896.0, + "3645": 961489024.0, + "3650": 945804160.0, + "3655": 965794816.0, + "3660": 976196480.0, + "3665": 964021760.0, + "3670": 977445760.0, + "3675": 943476224.0, + "3680": 958185856.0, + "3685": 964286400.0, + "3690": 982094144.0, + "3695": 963125888.0, + "3700": 950571328.0, + "3705": 947345920.0, + "3710": 982356096.0, + "3715": 972681856.0, + "3720": 976138112.0, + "3725": 964038656.0, + "3730": 948849536.0, + "3735": 967079680.0, + "3740": 960960832.0, + "3745": 969322944.0, + "3750": 963936896.0, + "3755": 953420800.0, + "3760": 976641984.0, + "3765": 979829376.0, + "3770": 972359744.0, + "3775": 972373376.0, + "3780": 952573888.0, + "3785": 960225408.0, + "3790": 985586048.0, + "3795": 969183872.0, + "3800": 957865728.0, + "3805": 972396800.0, + "3810": 954505408.0, + "3815": 974555136.0, + "3820": 963006592.0, + "3825": 962059520.0, + "3830": 969383040.0, + "3835": 934733568.0, + "3840": 971243904.0, + "3845": 986837248.0, + "3850": 968849024.0, + "3855": 965235648.0, + "3860": 948045568.0, + "3865": 975036736.0, + "3870": 985086976.0, + "3875": 983029376.0, + "3880": 963603200.0, + "3885": 953000064.0, + "3890": 960284160.0, + "3895": 960578496.0, + "3900": 984913536.0, + "3905": 976211584.0, + "3910": 987323712.0, + "3915": 946029888.0, + "3920": 974879360.0, + "3925": 961235456.0, + "3930": 976765824.0, + "3935": 978901120.0, + "3940": 950293184.0, + "3945": 960261504.0, + "3950": 974173440.0, + "3955": 972979840.0, + "3960": 974042048.0, + "3965": 950886400.0, + "3970": 980680576.0, + "3975": 960738176.0, + "3980": 977519104.0, + "3985": 962937600.0, + "3990": 972755712.0, + "3995": 953677056.0, + "4000": 974963584.0, + "4005": 971645952.0, + "4010": 978396480.0, + "4015": 971467008.0, + "4020": 950334528.0, + "4025": 968445184.0, + "4030": 997960384.0, + "4035": 978560832.0, + "4040": 959813632.0, + "4045": 939663552.0, + "4050": 944706176.0, + "4055": 980959680.0, + "4060": 977695232.0, + "4065": 975718208.0, + "4070": 942152576.0, + "4075": 945752320.0, + "4080": 988741376.0, + "4085": 962067968.0, + "4090": 983356928.0, + "4095": 986945728.0, + "4100": 957188160.0, + "4105": 954086272.0, + "4110": 966486528.0, + "4115": 976029568.0, + "4120": 983530752.0, + "4125": 960041856.0, + "4130": 967289984.0, + "4135": 971454464.0, + "4140": 963159168.0, + "4145": 956199808.0, + "4150": 960319296.0, + "4155": 946193280.0, + "4160": 968426880.0, + "4165": 970351488.0, + "4170": 972008576.0, + "4175": 955898944.0, + "4180": 940996608.0, + "4185": 968293504.0, + "4190": 968040000.0, + "4195": 989248320.0, + "4200": 962678656.0, + "4205": 960595072.0, + "4210": 971837568.0, + "4215": 974168832.0, + "4220": 981145280.0, + "4225": 975208704.0, + "4230": 952678272.0, + "4235": 958456192.0, + "4240": 966824128.0, + "4245": 961718784.0, + "4250": 965833216.0, + "4255": 958255872.0, + "4260": 949557888.0, + "4265": 964137536.0, + "4270": 978396672.0, + "4275": 975410176.0, + "4280": 962671680.0, + "4285": 951440064.0, + "4290": 980029632.0, + "4295": 968818560.0, + "4300": 958274304.0, + "4305": 966815104.0, + "4310": 939614208.0, + "4315": 949429312.0, + "4320": 984548608.0, + "4325": 982584960.0, + "4330": 974746112.0, + "4335": 949471616.0, + "4340": 959541248.0, + "4345": 956619648.0, + "4350": 979947648.0, + "4355": 968833536.0, + "4360": 966363520.0, + "4365": 941244800.0, + "4370": 969470976.0, + "4375": 972995776.0, + "4380": 966147968.0, + "4385": 972035840.0, + "4390": 954153408.0, + "4395": 951868544.0, + "4400": 973640320.0, + "4405": 972478848.0, + "4410": 967920896.0, + "4415": 958937088.0, + "4420": 960777856.0, + "4425": 976428544.0, + "4430": 965968384.0, + "4435": 975901440.0, + "4440": 962332288.0, + "4445": 954849664.0, + "4450": 978307456.0, + "4455": 960230464.0, + "4460": 968569216.0, + "4465": 968757120.0, + "4470": 944102272.0, + "4475": 951972800.0, + "4480": 978813056.0, + "4485": 968302848.0, + "4490": 957025536.0, + "4495": 938938176.0, + "4500": 953241088.0, + "4505": 977178752.0, + "4510": 978724864.0, + "4515": 962468608.0, + "4520": 958824704.0, + "4525": 958102016.0, + "4530": 964416384.0, + "4535": 976616576.0, + "4540": 976871680.0, + "4545": 970144640.0, + "4550": 953199232.0, + "4555": 959578048.0, + "4560": 972472384.0, + "4565": 973467776.0, + "4570": 978947072.0, + "4575": 957798080.0, + "4580": 963180800.0, + "4585": 957380608.0, + "4590": 986567936.0, + "4595": 960214592.0, + "4600": 952255872.0, + "4605": 959274432.0, + "4610": 963772160.0, + "4615": 958012672.0, + "4620": 960256320.0, + "4625": 973835648.0, + "4630": 944496000.0, + "4635": 977013440.0, + "4640": 960259968.0, + "4645": 981992512.0, + "4650": 962375232.0, + "4655": 939472128.0, + "4660": 964000832.0, + "4665": 962565760.0, + "4670": 976683904.0, + "4675": 963478144.0, + "4680": 957451840.0, + "4685": 949662656.0, + "4690": 956937856.0, + "4695": 969950592.0, + "4700": 961190656.0, + "4705": 970768960.0, + "4710": 934469120.0, + "4715": 970426496.0, + "4720": 966336768.0, + "4725": 980291584.0, + "4730": 965826176.0, + "4735": 937835776.0, + "4740": 960112128.0, + "4745": 976009856.0, + "4750": 967935488.0, + "4755": 984935680.0, + "4760": 959148800.0, + "4765": 955200960.0, + "4770": 958584768.0, + "4775": 991080192.0, + "4780": 976757824.0, + "4785": 967570560.0, + "4790": 943719616.0, + "4795": 955799936.0, + "4800": 967697152.0, + "4805": 976492800.0, + "4810": 965148224.0, + "4815": 957982720.0, + "4820": 973943872.0, + "4825": 961432704.0, + "4830": 962614016.0, + "4835": 972544512.0, + "4840": 948901888.0, + "4845": 965715584.0, + "4850": 960293184.0, + "4855": 964168320.0, + "4860": 963038464.0, + "4865": 967507072.0, + "4870": 957207808.0, + "4875": 983562112.0, + "4880": 957035712.0, + "4885": 977040896.0, + "4890": 959740608.0, + "4895": 942201728.0, + "4900": 973663296.0, + "4905": 975201920.0, + "4910": 969245376.0, + "4915": 970078336.0, + "4920": 941145280.0, + "4925": 954784768.0, + "4930": 977046848.0, + "4935": 963774912.0, + "4940": 972611648.0, + "4945": 959971008.0, + "4950": 940758016.0, + "4955": 967988160.0, + "4960": 976764672.0, + "4965": 960972608.0, + "4970": 958543744.0, + "4975": 933702400.0, + "4980": 960813056.0, + "4985": 962963392.0, + "4990": 963553152.0, + "4995": 986284480.0, + "5000": 940703360.0, + "5005": 968887552.0, + "5010": 970308096.0, + "5015": 965208704.0, + "5020": 966690944.0, + "5025": 949385920.0, + "5030": 953442560.0, + "5035": 967304320.0, + "5040": 955678272.0, + "5045": 969118336.0, + "5050": 953356416.0, + "5055": 954799616.0, + "5060": 963030400.0, + "5065": 952183168.0, + "5070": 973597952.0, + "5075": 978556800.0, + "5080": 942865920.0, + "5085": 965826112.0, + "5090": 972856384.0, + "5095": 964435200.0, + "5100": 958367808.0, + "5105": 965341952.0, + "5110": 950402368.0, + "5115": 972383616.0, + "5120": 960425728.0, + "5125": 969731200.0, + "5130": 938820736.0, + "5135": 943697216.0, + "5140": 969888896.0, + "5145": 968603200.0, + "5150": 970638336.0, + "5155": 972598912.0, + "5160": 926527232.0, + "5165": 961629184.0, + "5170": 966850304.0, + "5175": 966061312.0, + "5180": 963668224.0, + "5185": 930852608.0, + "5190": 949780992.0, + "5195": 972447104.0, + "5200": 973784000.0, + "5205": 968213824.0, + "5210": 960503936.0, + "5215": 928804608.0, + "5220": 979174272.0, + "5225": 984779648.0, + "5230": 975024896.0, + "5235": 975043648.0, + "5240": 944313472.0, + "5245": 970787328.0, + "5250": 972415104.0, + "5255": 966871872.0, + "5260": 976729728.0, + "5265": 942223360.0, + "5270": 969202304.0, + "5275": 970110528.0, + "5280": 962826112.0, + "5285": 964096896.0, + "5290": 932539008.0, + "5295": 951751808.0, + "5300": 975588608.0, + "5305": 951842688.0, + "5310": 968040512.0, + "5315": 955720064.0, + "5320": 950885120.0, + "5325": 973053056.0, + "5330": 967782656.0, + "5335": 967585280.0, + "5340": 966443584.0, + "5345": 962912384.0, + "5350": 978899968.0, + "5355": 972204288.0, + "5360": 963818368.0, + "5365": 965210752.0, + "5370": 947777152.0, + "5375": 948783488.0, + "5380": 967150208.0, + "5385": 980441984.0, + "5390": 965277568.0, + "5395": 955074816.0, + "5400": 948262656.0, + "5405": 974243840.0, + "5410": 967782976.0, + "5415": 976050816.0, + "5420": 967358464.0, + "5425": 937314752.0, + "5430": 963808192.0, + "5435": 971801344.0, + "5440": 969017856.0, + "5445": 957391360.0, + "5450": 919358848.0, + "5455": 952027648.0, + "5460": 962326656.0, + "5465": 978770752.0, + "5470": 980846592.0, + "5475": 941512960.0, + "5480": 955731392.0, + "5485": 964854272.0, + "5490": 975913088.0, + "5495": 962719104.0, + "5500": 971083136.0, + "5505": 956855296.0, + "5510": 968464000.0, + "5515": 945392640.0, + "5520": 963158272.0, + "5525": 975747904.0, + "5530": 936486784.0, + "5535": 970620928.0, + "5540": 960246272.0, + "5545": 972023808.0, + "5550": 967775104.0, + "5555": 955895808.0, + "5560": 954422592.0, + "5565": 968691520.0, + "5570": 945089216.0, + "5575": 960418688.0, + "5580": 960526976.0, + "5585": 959396736.0, + "5590": 977582848.0, + "5595": 975146240.0, + "5600": 962923904.0, + "5605": 964054400.0, + "5610": 943058944.0, + "5615": 966442240.0, + "5620": 963212480.0, + "5625": 982264192.0, + "5630": 975889600.0, + "5635": 957163136.0, + "5640": 951369664.0, + "5645": 967695616.0, + "5650": 978997888.0, + "5655": 983425152.0, + "5660": 956272384.0, + "5665": 953364864.0, + "5670": 966013248.0, + "5675": 967555456.0, + "5680": 978488192.0, + "5685": 961909888.0, + "5690": 935774208.0, + "5695": 963653376.0, + "5700": 952427392.0, + "5705": 974427328.0, + "5710": 971157440.0, + "5715": 946073792.0, + "5720": 974930304.0, + "5725": 967388288.0, + "5730": 978427520.0, + "5735": 964795648.0, + "5740": 943538560.0, + "5745": 971106368.0, + "5750": 981828864.0, + "5755": 956631808.0, + "5760": 963585280.0, + "5765": 957608640.0, + "5770": 955601216.0, + "5775": 970675776.0, + "5780": 962738752.0, + "5785": 970490624.0, + "5790": 974666624.0, + "5795": 949705088.0, + "5800": 965914048.0, + "5805": 968700800.0, + "5810": 975987200.0, + "5815": 969991936.0, + "5820": 936229632.0, + "5825": 969217152.0, + "5830": 977565696.0, + "5835": 974930944.0, + "5840": 962975936.0, + "5845": 968653120.0, + "5850": 942886528.0, + "5855": 975877120.0, + "5860": 979501056.0, + "5865": 978198336.0, + "5870": 968715008.0, + "5875": 942132480.0, + "5880": 964251712.0, + "5885": 974611072.0, + "5890": 972438784.0, + "5895": 965571968.0, + "5900": 941212992.0, + "5905": 961669376.0, + "5910": 958372608.0, + "5915": 968039040.0, + "5920": 977222784.0, + "5925": 959432832.0, + "5930": 946590336.0, + "5935": 952272704.0, + "5940": 977613312.0, + "5945": 984918272.0, + "5950": 980554688.0, + "5955": 934946432.0, + "5960": 961601984.0, + "5965": 965853312.0, + "5970": 970501248.0, + "5975": 961882112.0, + "5980": 958110464.0, + "5985": 964414784.0, + "5990": 973356544.0, + "5995": 955833984.0, + "6000": 955485696.0, + "6005": 961293504.0, + "6010": 952645440.0, + "6015": 974415872.0, + "6020": 978156224.0, + "6025": 972178944.0, + "6030": 955398272.0, + "6035": 946868864.0, + "6040": 962679808.0, + "6045": 983656832.0, + "6050": 956588800.0, + "6055": 963322816.0, + "6060": 945595392.0, + "6065": 958394496.0, + "6070": 978304896.0, + "6075": 978074880.0, + "6080": 957444096.0, + "6085": 947629952.0, + "6090": 953646976.0, + "6095": 964623552.0, + "6100": 979801088.0, + "6105": 971049216.0, + "6110": 961718080.0, + "6115": 943655552.0, + "6120": 968487808.0, + "6125": 960593088.0, + "6130": 983892416.0, + "6135": 960969600.0, + "6140": 958597248.0, + "6145": 971126656.0, + "6150": 968345856.0, + "6155": 974893056.0, + "6160": 977191168.0, + "6165": 952755968.0, + "6170": 951080576.0, + "6175": 963409152.0, + "6180": 969340416.0, + "6185": 966259200.0, + "6190": 963511168.0, + "6195": 947045312.0, + "6200": 969109504.0, + "6205": 967150016.0, + "6210": 959082176.0, + "6215": 972729600.0, + "6220": 936227072.0, + "6225": 978544512.0, + "6230": 975876224.0, + "6235": 971570944.0, + "6240": 965809728.0, + "6245": 955906304.0, + "6250": 956425216.0, + "6255": 972970624.0, + "6260": 978730816.0, + "6265": 974782336.0, + "6270": 958695808.0, + "6275": 963775040.0, + "6280": 973067264.0, + "6285": 965963008.0, + "6290": 970720576.0, + "6295": 987342912.0, + "6300": 947369216.0, + "6305": 964586496.0, + "6310": 978893696.0, + "6315": 978454144.0, + "6320": 971650560.0, + "6325": 922867200.0, + "6330": 959047104.0, + "6335": 974670464.0, + "6340": 984554240.0, + "6345": 966774720.0, + "6350": 944513408.0, + "6355": 957805696.0, + "6360": 972588800.0, + "6365": 972220608.0, + "6370": 958906048.0, + "6375": 967154496.0, + "6380": 951240640.0, + "6385": 973634432.0, + "6390": 965183552.0, + "6395": 974971584.0, + "6400": 984093056.0, + "6405": 943782784.0, + "6410": 977277312.0, + "6415": 971329280.0, + "6420": 956394752.0, + "6425": 960816512.0, + "6430": 957378304.0, + "6435": 959965056.0, + "6440": 968701376.0, + "6445": 973296128.0, + "6450": 974207936.0, + "6455": 961958528.0, + "6460": 941006720.0, + "6465": 974272768.0, + "6470": 979691584.0, + "6475": 960665216.0, + "6480": 967482240.0, + "6485": 948620800.0, + "6490": 970648896.0, + "6495": 988281728.0, + "6500": 980382464.0, + "6505": 971875712.0, + "6510": 951611008.0, + "6515": 957443328.0, + "6520": 978938496.0, + "6525": 978842496.0, + "6530": 973296256.0, + "6535": 967939328.0, + "6540": 950123456.0, + "6545": 966146752.0, + "6550": 979249280.0, + "6555": 967058688.0, + "6560": 975182336.0, + "6565": 949290112.0, + "6570": 951981440.0, + "6575": 962234560.0, + "6580": 975681280.0, + "6585": 979275392.0, + "6590": 948979200.0, + "6595": 961632640.0, + "6600": 960998144.0, + "6605": 961411968.0, + "6610": 985152000.0, + "6615": 959273408.0, + "6620": 944343296.0, + "6625": 970979712.0, + "6630": 971369024.0, + "6635": 964018688.0, + "6640": 959501376.0, + "6645": 950697984.0, + "6650": 978663936.0, + "6655": 965815168.0, + "6660": 968359488.0, + "6665": 968707584.0, + "6670": 932667904.0, + "6675": 970711552.0, + "6680": 969041152.0, + "6685": 958538048.0, + "6690": 956117248.0, + "6695": 955316864.0, + "6700": 962073984.0, + "6705": 978985088.0, + "6710": 970864576.0, + "6715": 966629824.0, + "6720": 973916544.0, + "6725": 941781952.0, + "6730": 979276864.0, + "6735": 994439680.0, + "6740": 976625664.0, + "6745": 974443200.0, + "6750": 938891264.0, + "6755": 977391616.0, + "6760": 969598208.0, + "6765": 978356480.0, + "6770": 975059072.0, + "6775": 943254272.0, + "6780": 947074624.0, + "6785": 975161856.0, + "6790": 960433984.0, + "6795": 975886336.0, + "6800": 972829312.0, + "6805": 946376320.0, + "6810": 958179712.0, + "6815": 970549696.0, + "6820": 977619456.0, + "6825": 969243264.0, + "6830": 950263168.0, + "6835": 981328512.0, + "6840": 982974272.0, + "6845": 948935808.0, + "6850": 965416512.0, + "6855": 953916544.0, + "6860": 979025792.0, + "6865": 983853312.0, + "6870": 964855232.0, + "6875": 978762496.0, + "6880": 950433920.0, + "6885": 958312448.0, + "6890": 960313472.0, + "6895": 965972928.0, + "6900": 985248128.0, + "6905": 968528640.0, + "6910": 949863808.0, + "6915": 971062208.0, + "6920": 967351744.0, + "6925": 964971776.0, + "6930": 964557376.0, + "6935": 952123328.0, + "6940": 963032320.0, + "6945": 986507520.0, + "6950": 973327232.0, + "6955": 964531968.0, + "6960": 940003712.0, + "6965": 974492160.0, + "6970": 978420224.0, + "6975": 984993472.0, + "6980": 982692096.0, + "6985": 959667584.0, + "6990": 945238272.0, + "6995": 987459264.0, + "7000": 963220480.0, + "7005": 962812480.0, + "7010": 984947712.0, + "7015": 945496384.0, + "7020": 983131008.0, + "7025": 968731392.0, + "7030": 953218944.0, + "7035": 982774144.0, + "7040": 950460160.0, + "7045": 955747072.0, + "7050": 959962240.0, + "7055": 963622912.0, + "7060": 976790272.0, + "7065": 968059264.0, + "7070": 953450048.0, + "7075": 956437504.0, + "7080": 969003520.0, + "7085": 965390272.0, + "7090": 969313408.0, + "7095": 959464128.0, + "7100": 973246080.0, + "7105": 972939648.0, + "7110": 970071104.0, + "7115": 958216192.0, + "7120": 949039872.0, + "7125": 963264320.0, + "7130": 971177472.0, + "7135": 964579328.0, + "7140": 961276800.0, + "7145": 930369536.0, + "7150": 946022400.0, + "7155": 990835520.0, + "7160": 968320896.0, + "7165": 956643200.0, + "7170": 968043008.0, + "7175": 955791488.0, + "7180": 957939072.0, + "7185": 984720896.0, + "7190": 978552640.0, + "7195": 973603584.0, + "7200": 935673600.0, + "7205": 957390912.0, + "7210": 967078464.0, + "7215": 969339904.0, + "7220": 982085632.0, + "7225": 928925056.0, + "7230": 949390144.0, + "7235": 966846336.0, + "7240": 966649472.0, + "7245": 967233792.0, + "7250": 949331392.0, + "7255": 957081344.0, + "7260": 970027584.0, + "7265": 974604544.0, + "7270": 959689920.0, + "7275": 959067136.0, + "7280": 956852096.0, + "7285": 977440768.0, + "7290": 976980608.0, + "7295": 962803904.0, + "7300": 975069824.0, + "7305": 963961984.0, + "7310": 977308032.0, + "7315": 966674560.0, + "7320": 974209536.0, + "7325": 966674496.0, + "7330": 959420800.0, + "7335": 963904000.0, + "7340": 977262336.0, + "7345": 967447680.0, + "7350": 984572288.0, + "7355": 959405568.0, + "7360": 948622016.0, + "7365": 972354432.0, + "7370": 982372864.0, + "7375": 962879360.0, + "7380": 964038784.0, + "7385": 948431488.0, + "7390": 963677696.0, + "7395": 957965440.0, + "7400": 969640576.0, + "7405": 987491264.0, + "7410": 951945152.0, + "7415": 950271936.0, + "7420": 966599680.0, + "7425": 982952832.0, + "7430": 965750400.0, + "7435": 973161024.0, + "7440": 936884736.0, + "7445": 968767616.0, + "7450": 980462208.0, + "7455": 971280896.0, + "7460": 972147904.0, + "7465": 939168000.0, + "7470": 971950592.0, + "7475": 957960000.0, + "7480": 969186432.0, + "7485": 961158592.0, + "7490": 934207552.0, + "7495": 957347008.0, + "7500": 968909696.0, + "7505": 969939584.0, + "7510": 971938176.0, + "7515": 979643968.0, + "7520": 951674240.0, + "7525": 970150400.0, + "7530": 954250368.0, + "7535": 971002624.0, + "7540": 979351424.0, + "7545": 959002112.0, + "7550": 959936640.0, + "7555": 960035136.0, + "7560": 969951360.0, + "7565": 954850304.0, + "7570": 942336704.0, + "7575": 965396736.0, + "7580": 981979904.0, + "7585": 978810240.0, + "7590": 970279296.0, + "7595": 949956608.0, + "7600": 945846592.0, + "7605": 982612096.0, + "7610": 969044480.0, + "7615": 988564736.0, + "7620": 957000192.0, + "7625": 941454976.0, + "7630": 971133056.0, + "7635": 984264576.0, + "7640": 983299712.0, + "7645": 968097472.0, + "7650": 959086656.0, + "7655": 962007040.0, + "7660": 969076928.0, + "7665": 978171584.0, + "7670": 975401856.0, + "7675": 975338368.0, + "7680": 942762112.0, + "7685": 959972096.0, + "7690": 975364608.0, + "7695": 982221568.0, + "7700": 979086976.0, + "7705": 940546432.0, + "7710": 974737280.0, + "7715": 978988352.0, + "7720": 967479232.0, + "7725": 960220928.0, + "7730": 942869632.0, + "7735": 968105792.0, + "7740": 980045568.0, + "7745": 963941632.0, + "7750": 963412288.0, + "7755": 959505408.0, + "7760": 970593920.0, + "7765": 970864320.0, + "7770": 962472832.0, + "7775": 982167424.0, + "7780": 964654592.0, + "7785": 959739904.0, + "7790": 968466944.0, + "7795": 968762112.0, + "7800": 972053184.0, + "7805": 968564992.0, + "7810": 945972224.0, + "7815": 963664576.0, + "7820": 974677184.0, + "7825": 963635712.0, + "7830": 957630336.0, + "7835": 949710016.0, + "7840": 957638784.0, + "7845": 953894016.0, + "7850": 979439872.0, + "7855": 987051200.0, + "7860": 947830016.0, + "7865": 949621120.0, + "7870": 965052160.0, + "7875": 975974016.0, + "7880": 968774208.0, + "7885": 969494720.0, + "7890": 952188608.0, + "7895": 974695424.0, + "7900": 963989504.0, + "7905": 964420608.0, + "7910": 965869184.0, + "7915": 943378176.0, + "7920": 951312064.0, + "7925": 969302016.0, + "7930": 964944256.0, + "7935": 984349376.0, + "7940": 964437184.0, + "7945": 951030016.0, + "7950": 962343936.0, + "7955": 980155648.0, + "7960": 963591552.0, + "7965": 952763776.0, + "7970": 951961984.0, + "7975": 969783232.0, + "7980": 965072384.0, + "7985": 959288192.0, + "7990": 968166144.0, + "7995": 946780480.0, + "8000": 962926784.0, + "8005": 981072256.0, + "8010": 965470912.0, + "8015": 982932416.0, + "8020": 960808768.0, + "8025": 965372544.0, + "8030": 958532992.0, + "8035": 975850880.0, + "8040": 960656128.0, + "8045": 948529280.0, + "8050": 959733888.0, + "8055": 979126144.0, + "8060": 969222912.0, + "8065": 958210240.0, + "8070": 963956032.0, + "8075": 941995392.0, + "8080": 966046912.0, + "8085": 966803776.0, + "8090": 983576064.0, + "8095": 988772800.0, + "8100": 966714752.0, + "8105": 944461696.0, + "8110": 968896000.0, + "8115": 985733696.0, + "8120": 974861568.0, + "8125": 964166528.0, + "8130": 966678400.0, + "8135": 967760256.0, + "8140": 963773824.0, + "8145": 995469568.0, + "8150": 973350080.0, + "8155": 938673152.0, + "8160": 964474496.0, + "8165": 973369472.0, + "8170": 968755200.0, + "8175": 961722624.0, + "8180": 936287232.0, + "8185": 963154048.0, + "8190": 968499840.0, + "8195": 977409792.0, + "8200": 956736768.0, + "8205": 960825152.0, + "8210": 946704320.0, + "8215": 982718592.0, + "8220": 988075776.0, + "8225": 966563072.0, + "8230": 962898240.0, + "8235": 933923072.0, + "8240": 980574848.0, + "8245": 976655872.0, + "8250": 964328128.0, + "8255": 977591424.0, + "8260": 957202304.0, + "8265": 982956800.0, + "8270": 953142528.0, + "8275": 974529792.0, + "8280": 974647808.0, + "8285": 953322048.0, + "8290": 939886464.0, + "8295": 981518720.0, + "8300": 973431872.0, + "8305": 978196352.0, + "8310": 951561920.0, + "8315": 938326912.0, + "8320": 977595072.0, + "8325": 968155456.0, + "8330": 990191552.0, + "8335": 976103488.0, + "8340": 947710464.0, + "8345": 971095872.0, + "8350": 970304128.0, + "8355": 975432832.0, + "8360": 979810560.0, + "8365": 932929344.0, + "8370": 965587200.0, + "8375": 980236416.0, + "8380": 965580416.0, + "8385": 973086464.0, + "8390": 962870912.0, + "8395": 951507392.0, + "8400": 972631168.0, + "8405": 951465728.0, + "8410": 960777792.0, + "8415": 965439360.0, + "8420": 942011776.0, + "8425": 968658048.0, + "8430": 960766080.0, + "8435": 966214144.0, + "8440": 970051712.0, + "8445": 952774912.0, + "8450": 984547584.0, + "8455": 990665216.0, + "8460": 968987456.0, + "8465": 967294720.0, + "8470": 963132736.0, + "8475": 942846592.0, + "8480": 987200512.0, + "8485": 979788480.0, + "8490": 992367168.0, + "8495": 971569856.0, + "8500": 951234176.0, + "8505": 983252608.0, + "8510": 974223552.0, + "8515": 968555328.0, + "8520": 962099200.0, + "8525": 945303040.0, + "8530": 984622528.0, + "8535": 978460928.0, + "8540": 967298304.0, + "8545": 968855680.0, + "8550": 942065152.0, + "8555": 972102656.0, + "8560": 958762624.0, + "8565": 976119360.0, + "8570": 975011264.0, + "8575": 971620608.0, + "8580": 932116864.0, + "8585": 966041920.0, + "8590": 979227200.0, + "8595": 978896640.0, + "8600": 984223104.0, + "8605": 957447552.0, + "8610": 983899008.0, + "8615": 977896448.0, + "8620": 962810816.0, + "8625": 979565440.0, + "8630": 943517952.0, + "8635": 961760384.0, + "8640": 973499008.0, + "8645": 970415616.0, + "8650": 969116672.0, + "8655": 971144000.0, + "8660": 944569088.0, + "8665": 987124736.0, + "8670": 960697792.0, + "8675": 974081792.0, + "8680": 962718080.0, + "8685": 955789568.0, + "8690": 978378368.0, + "8695": 968400640.0, + "8700": 972974720.0, + "8705": 974037760.0, + "8710": 947079872.0, + "8715": 973847232.0, + "8720": 958228864.0, + "8725": 979063488.0, + "8730": 985832448.0, + "8735": 951944576.0, + "8740": 940816000.0, + "8745": 987874240.0, + "8750": 972060672.0, + "8755": 971515904.0, + "8760": 965372928.0, + "8765": 934839296.0, + "8770": 986313344.0, + "8775": 970021952.0, + "8780": 967712704.0, + "8785": 962283008.0, + "8790": 947736832.0, + "8795": 968976000.0, + "8800": 970581504.0, + "8805": 973301376.0, + "8810": 983639104.0, + "8815": 951717120.0, + "8820": 939956864.0, + "8825": 964321920.0, + "8830": 981076992.0, + "8835": 971459840.0, + "8840": 979418368.0, + "8845": 951370560.0, + "8850": 986478080.0, + "8855": 971028992.0, + "8860": 961827264.0, + "8865": 957370624.0, + "8870": 945910528.0, + "8875": 968308736.0, + "8880": 983829568.0, + "8885": 971367680.0, + "8890": 969993344.0, + "8895": 952713792.0, + "8900": 961952256.0, + "8905": 976987136.0, + "8910": 981570048.0, + "8915": 980806208.0, + "8920": 967926528.0, + "8925": 939821440.0, + "8930": 970123264.0, + "8935": 963770368.0, + "8940": 977559552.0, + "8945": 981912960.0, + "8950": 945599360.0, + "8955": 972731008.0, + "8960": 973885056.0, + "8965": 973850112.0, + "8970": 966361728.0, + "8975": 937235264.0, + "8980": 952251776.0, + "8985": 978014016.0, + "8990": 967350656.0, + "8995": 980370176.0, + "9000": 952395840.0, + "9005": 950832768.0, + "9010": 975267712.0, + "9015": 982975232.0, + "9020": 959192448.0, + "9025": 978796352.0, + "9030": 953629376.0, + "9035": 968787200.0, + "9040": 978294400.0, + "9045": 968522496.0, + "9050": 983036480.0, + "9055": 948155392.0, + "9060": 956478848.0, + "9065": 969792000.0, + "9070": 967683456.0, + "9075": 980881152.0, + "9080": 952258304.0, + "9085": 971295232.0, + "9090": 963557184.0, + "9095": 968166016.0, + "9100": 974511104.0, + "9105": 960145344.0, + "9110": 948329728.0, + "9115": 956402816.0, + "9120": 985494912.0, + "9125": 963296384.0, + "9130": 958346432.0, + "9135": 951620288.0, + "9140": 967284224.0, + "9145": 976844544.0, + "9150": 987023232.0, + "9155": 976968704.0, + "9160": 957694208.0, + "9165": 950415872.0, + "9170": 988382016.0, + "9175": 971692480.0, + "9180": 967374912.0, + "9185": 955127040.0, + "9190": 956710656.0, + "9195": 965721472.0, + "9200": 968597760.0, + "9205": 967291904.0, + "9210": 984681216.0, + "9215": 931374400.0, + "9220": 949709440.0, + "9225": 971070592.0, + "9230": 971195264.0, + "9235": 971746176.0, + "9240": 959830400.0, + "9245": 964026752.0, + "9250": 961097344.0, + "9255": 983241472.0, + "9260": 979074048.0, + "9265": 953027136.0, + "9270": 949216512.0, + "9275": 978441152.0, + "9280": 977570688.0, + "9285": 962618944.0, + "9290": 979501440.0, + "9295": 958696192.0, + "9300": 965544960.0, + "9305": 969071744.0, + "9310": 973001216.0, + "9315": 976390912.0, + "9320": 948280320.0, + "9325": 979885568.0, + "9330": 978062592.0, + "9335": 975746688.0, + "9340": 960410432.0, + "9345": 943464448.0, + "9350": 952934016.0, + "9355": 963355520.0, + "9360": 960499136.0, + "9365": 983692608.0, + "9370": 982297984.0, + "9375": 942116224.0, + "9380": 982577024.0, + "9385": 985393536.0, + "9390": 973418240.0, + "9395": 977974528.0, + "9400": 938173312.0, + "9405": 968168192.0, + "9410": 981667648.0, + "9415": 991883392.0, + "9420": 960392064.0, + "9425": 956951872.0, + "9430": 939298432.0, + "9435": 974398784.0, + "9440": 959543808.0, + "9445": 973733120.0, + "9450": 961915008.0, + "9455": 946070656.0, + "9460": 978455360.0, + "9465": 988236032.0, + "9470": 963308672.0, + "9475": 983797504.0, + "9480": 931000960.0, + "9485": 987539456.0, + "9490": 963408832.0, + "9495": 972525184.0, + "9500": 982379136.0, + "9505": 970260160.0, + "9510": 964553472.0, + "9515": 957060224.0, + "9520": 948433984.0, + "9525": 965556608.0, + "9530": 958432128.0, + "9535": 950951168.0, + "9540": 954255872.0, + "9545": 979665920.0, + "9550": 956061696.0, + "9555": 953442368.0, + "9560": 958456448.0, + "9565": 970137984.0, + "9570": 977209856.0, + "9575": 958959744.0, + "9580": 963097600.0, + "9585": 946270400.0, + "9590": 948540928.0, + "9595": 966947008.0, + "9600": 984798208.0, + "9605": 985302016.0, + "9610": 943246592.0, + "9615": 952814016.0, + "9620": 980981696.0, + "9625": 978266112.0, + "9630": 969978496.0, + "9635": 974721536.0, + "9640": 940294272.0, + "9645": 961910080.0, + "9650": 970943744.0, + "9655": 987569792.0, + "9660": 963184320.0, + "9665": 950168192.0, + "9670": 965781632.0, + "9675": 963111936.0, + "9680": 964954816.0, + "9685": 986709120.0, + "9690": 940607744.0, + "9695": 950447616.0, + "9700": 975738816.0, + "9705": 972589056.0, + "9710": 967656704.0, + "9715": 971132416.0, + "9720": 940416640.0, + "9725": 965998080.0, + "9730": 973718784.0, + "9735": 974172928.0, + "9740": 971431104.0, + "9745": 950998912.0, + "9750": 979592064.0, + "9755": 970403840.0, + "9760": 968064640.0, + "9765": 963786880.0, + "9770": 952209408.0, + "9775": 956731136.0, + "9780": 970107776.0, + "9785": 958344448.0, + "9790": 961042816.0, + "9795": 958213248.0, + "9800": 949058560.0, + "9805": 962196992.0, + "9810": 978331520.0, + "9815": 977753600.0, + "9820": 982361216.0, + "9825": 939473536.0, + "9830": 969626048.0, + "9835": 972510208.0, + "9840": 971687936.0, + "9845": 967061760.0, + "9850": 946544128.0, + "9855": 957252992.0, + "9860": 987359744.0, + "9865": 970127168.0, + "9870": 989524736.0, + "9875": 957073408.0, + "9880": 930847232.0, + "9885": 963629696.0, + "9890": 972392448.0, + "9895": 983574528.0, + "9900": 956631744.0, + "9905": 939009728.0, + "9910": 978860288.0, + "9915": 973504960.0, + "9920": 944413952.0, + "9925": 962928512.0, + "9930": 947381120.0, + "9935": 960634880.0, + "9940": 966066752.0, + "9945": 958494784.0, + "9950": 964043072.0, + "9955": 943413824.0, + "9960": 967004224.0, + "9965": 983596800.0, + "9970": 966349184.0, + "9975": 963217664.0, + "9980": 980808320.0, + "9985": 941910144.0, + "9990": 976343936.0, + "9995": 982828672.0, + "10000": 972023552.0, + "10005": 969480960.0, + "10010": 944330624.0, + "10015": 983276288.0, + "10020": 978455552.0, + "10025": 979844096.0, + "10030": 971601280.0, + "10035": 946594944.0, + "10040": 950559168.0, + "10045": 978050944.0, + "10050": 985832832.0, + "10055": 990444288.0, + "10060": 958898048.0, + "10065": 947517056.0, + "10070": 967166656.0, + "10075": 979106432.0, + "10080": 971621632.0, + "10085": 974302720.0, + "10090": 944020544.0, + "10095": 962781696.0, + "10100": 972061120.0, + "10105": 975506304.0, + "10110": 971932352.0, + "10115": 948612096.0, + "10120": 962324864.0, + "10125": 974035904.0, + "10130": 980282240.0, + "10135": 972211584.0, + "10140": 957941568.0, + "10145": 933985280.0, + "10150": 973634752.0, + "10155": 969765824.0, + "10160": 962060800.0, + "10165": 974346112.0, + "10170": 944527424.0, + "10175": 978734144.0, + "10180": 983909952.0, + "10185": 978637312.0, + "10190": 955692928.0, + "10195": 936523904.0, + "10200": 988193152.0 + } + }, + "mem-allocated-bytes": { + "start_step": 1, + "end_step": 10200, + "step_interval": 5, + "values": { + "1": 13270018048.0, + "5": 13270018048.0, + "10": 13270018048.0, + "15": 13270018048.0, + "20": 13270018048.0, + "25": 13270018048.0, + "30": 13270018048.0, + "35": 13270018048.0, + "40": 13270018048.0, + "45": 13270018048.0, + "50": 13270018048.0, + "55": 13270018048.0, + "60": 13270018048.0, + "65": 13270018048.0, + "70": 13270018048.0, + "75": 13270018048.0, + "80": 13270018048.0, + "85": 13270018048.0, + "90": 13270018048.0, + "95": 13270018048.0, + "100": 13270018048.0, + "105": 13270018048.0, + "110": 13270018048.0, + "115": 13270018048.0, + "120": 13270018048.0, + "125": 13270018048.0, + "130": 13270018048.0, + "135": 13270018048.0, + "140": 13270018048.0, + "145": 13270018048.0, + "150": 13270018048.0, + "155": 13270018048.0, + "160": 13270018048.0, + "165": 13270018048.0, + "170": 13270018048.0, + "175": 13270018048.0, + "180": 13270018048.0, + "185": 13270018048.0, + "190": 13270018048.0, + "195": 13270018048.0, + "200": 13270018048.0, + "205": 13270018048.0, + "210": 13270018048.0, + "215": 13270018048.0, + "220": 13270018048.0, + "225": 13270018048.0, + "230": 13270018048.0, + "235": 13270018048.0, + "240": 13270018048.0, + "245": 13270018048.0, + "250": 13270018048.0, + "255": 13270018048.0, + "260": 13270018048.0, + "265": 13270018048.0, + "270": 13270018048.0, + "275": 13270018048.0, + "280": 13270018048.0, + "285": 13270018048.0, + "290": 13270018048.0, + "295": 13270018048.0, + "300": 13270018048.0, + "305": 13270018048.0, + "310": 13270018048.0, + "315": 13270018048.0, + "320": 13270018048.0, + "325": 13270018048.0, + "330": 13270018048.0, + "335": 13270018048.0, + "340": 13270018048.0, + "345": 13270018048.0, + "350": 13270018048.0, + "355": 13270018048.0, + "360": 13270018048.0, + "365": 13270018048.0, + "370": 13270018048.0, + "375": 13270018048.0, + "380": 13270018048.0, + "385": 13270018048.0, + "390": 13270018048.0, + "395": 13270018048.0, + "400": 13270018048.0, + "405": 13270018048.0, + "410": 13270018048.0, + "415": 13270018048.0, + "420": 13270018048.0, + "425": 13270018048.0, + "430": 13270018048.0, + "435": 13270018048.0, + "440": 13270018048.0, + "445": 13270018048.0, + "450": 13270018048.0, + "455": 13270018048.0, + "460": 13270018048.0, + "465": 13270018048.0, + "470": 13270018048.0, + "475": 13270018048.0, + "480": 13270018048.0, + "485": 13270018048.0, + "490": 13270018048.0, + "495": 13270018048.0, + "500": 13270018048.0, + "505": 13270018048.0, + "510": 13270018048.0, + "515": 13270018048.0, + "520": 13270018048.0, + "525": 13270018048.0, + "530": 13270018048.0, + "535": 13270018048.0, + "540": 13270018048.0, + "545": 13270018048.0, + "550": 13270018048.0, + "555": 13270018048.0, + "560": 13270018048.0, + "565": 13270018048.0, + "570": 13270018048.0, + "575": 13270018048.0, + "580": 13270018048.0, + "585": 13270018048.0, + "590": 13270018048.0, + "595": 13270018048.0, + "600": 13270018048.0, + "605": 13270018048.0, + "610": 13270018048.0, + "615": 13270018048.0, + "620": 13270018048.0, + "625": 13270018048.0, + "630": 13270018048.0, + "635": 13270018048.0, + "640": 13270018048.0, + "645": 13270018048.0, + "650": 13270018048.0, + "655": 13270018048.0, + "660": 13270018048.0, + "665": 13270018048.0, + "670": 13270018048.0, + "675": 13270018048.0, + "680": 13270018048.0, + "685": 13270018048.0, + "690": 13270018048.0, + "695": 13270018048.0, + "700": 13270018048.0, + "705": 13270018048.0, + "710": 13270018048.0, + "715": 13270018048.0, + "720": 13270018048.0, + "725": 13270018048.0, + "730": 13270018048.0, + "735": 13270018048.0, + "740": 13270018048.0, + "745": 13270018048.0, + "750": 13270018048.0, + "755": 13270018048.0, + "760": 13270018048.0, + "765": 13270018048.0, + "770": 13270018048.0, + "775": 13270018048.0, + "780": 13270018048.0, + "785": 13270018048.0, + "790": 13270018048.0, + "795": 13270018048.0, + "800": 13270018048.0, + "805": 13270018048.0, + "810": 13270018048.0, + "815": 13270018048.0, + "820": 13270018048.0, + "825": 13270018048.0, + "830": 13270018048.0, + "835": 13270018048.0, + "840": 13270018048.0, + "845": 13270018048.0, + "850": 13270018048.0, + "855": 13270018048.0, + "860": 13270018048.0, + "865": 13270018048.0, + "870": 13270018048.0, + "875": 13270018048.0, + "880": 13270018048.0, + "885": 13270018048.0, + "890": 13270018048.0, + "895": 13270018048.0, + "900": 13270018048.0, + "905": 13270018048.0, + "910": 13270018048.0, + "915": 13270018048.0, + "920": 13270018048.0, + "925": 13270018048.0, + "930": 13270018048.0, + "935": 13270018048.0, + "940": 13270018048.0, + "945": 13270018048.0, + "950": 13270018048.0, + "955": 13270018048.0, + "960": 13270018048.0, + "965": 13270018048.0, + "970": 13270018048.0, + "975": 13270018048.0, + "980": 13270018048.0, + "985": 13270018048.0, + "990": 13270018048.0, + "995": 13270018048.0, + "1000": 13270018048.0, + "1005": 13270018048.0, + "1010": 13270018048.0, + "1015": 13270018048.0, + "1020": 13270018048.0, + "1025": 13270018048.0, + "1030": 13270018048.0, + "1035": 13270018048.0, + "1040": 13270018048.0, + "1045": 13270018048.0, + "1050": 13270018048.0, + "1055": 13270018048.0, + "1060": 13270018048.0, + "1065": 13270018048.0, + "1070": 13270018048.0, + "1075": 13270018048.0, + "1080": 13270018048.0, + "1085": 13270018048.0, + "1090": 13270018048.0, + "1095": 13270018048.0, + "1100": 13270018048.0, + "1105": 13270018048.0, + "1110": 13270018048.0, + "1115": 13270018048.0, + "1120": 13270018048.0, + "1125": 13270018048.0, + "1130": 13270018048.0, + "1135": 13270018048.0, + "1140": 13270018048.0, + "1145": 13270018048.0, + "1150": 13270018048.0, + "1155": 13270018048.0, + "1160": 13270018048.0, + "1165": 13270018048.0, + "1170": 13270018048.0, + "1175": 13270018048.0, + "1180": 13270018048.0, + "1185": 13270018048.0, + "1190": 13270018048.0, + "1195": 13270018048.0, + "1200": 13270018048.0, + "1205": 13270018048.0, + "1210": 13270018048.0, + "1215": 13270018048.0, + "1220": 13270018048.0, + "1225": 13270018048.0, + "1230": 13270018048.0, + "1235": 13270018048.0, + "1240": 13270018048.0, + "1245": 13270018048.0, + "1250": 13270018048.0, + "1255": 13270018048.0, + "1260": 13270018048.0, + "1265": 13270018048.0, + "1270": 13270018048.0, + "1275": 13270018048.0, + "1280": 13270018048.0, + "1285": 13270018048.0, + "1290": 13270018048.0, + "1295": 13270018048.0, + "1300": 13270018048.0, + "1305": 13270018048.0, + "1310": 13270018048.0, + "1315": 13270018048.0, + "1320": 13270018048.0, + "1325": 13270018048.0, + "1330": 13270018048.0, + "1335": 13270018048.0, + "1340": 13270018048.0, + "1345": 13270018048.0, + "1350": 13270018048.0, + "1355": 13270018048.0, + "1360": 13270018048.0, + "1365": 13270018048.0, + "1370": 13270018048.0, + "1375": 13270018048.0, + "1380": 13270018048.0, + "1385": 13270018048.0, + "1390": 13270018048.0, + "1395": 13270018048.0, + "1400": 13270018048.0, + "1405": 13270018048.0, + "1410": 13270018048.0, + "1415": 13270018048.0, + "1420": 13270018048.0, + "1425": 13270018048.0, + "1430": 13270018048.0, + "1435": 13270018048.0, + "1440": 13270018048.0, + "1445": 13270018048.0, + "1450": 13270018048.0, + "1455": 13270018048.0, + "1460": 13270018048.0, + "1465": 13270018048.0, + "1470": 13270018048.0, + "1475": 13270018048.0, + "1480": 13270018048.0, + "1485": 13270018048.0, + "1490": 13270018048.0, + "1495": 13270018048.0, + "1500": 13270018048.0, + "1505": 13270018048.0, + "1510": 13270018048.0, + "1515": 13270018048.0, + "1520": 13270018048.0, + "1525": 13270018048.0, + "1530": 13270018048.0, + "1535": 13270018048.0, + "1540": 13270018048.0, + "1545": 13270018048.0, + "1550": 13270018048.0, + "1555": 13270018048.0, + "1560": 13270018048.0, + "1565": 13270018048.0, + "1570": 13270018048.0, + "1575": 13270018048.0, + "1580": 13270018048.0, + "1585": 13270018048.0, + "1590": 13270018048.0, + "1595": 13270018048.0, + "1600": 13270018048.0, + "1605": 13270018048.0, + "1610": 13270018048.0, + "1615": 13270018048.0, + "1620": 13270018048.0, + "1625": 13270018048.0, + "1630": 13270018048.0, + "1635": 13270018048.0, + "1640": 13270018048.0, + "1645": 13270018048.0, + "1650": 13270018048.0, + "1655": 13270018048.0, + "1660": 13270018048.0, + "1665": 13270018048.0, + "1670": 13270018048.0, + "1675": 13270018048.0, + "1680": 13270018048.0, + "1685": 13270018048.0, + "1690": 13270018048.0, + "1695": 13270018048.0, + "1700": 13270018048.0, + "1705": 13270018048.0, + "1710": 13270018048.0, + "1715": 13270018048.0, + "1720": 13270018048.0, + "1725": 13270018048.0, + "1730": 13270018048.0, + "1735": 13270018048.0, + "1740": 13270018048.0, + "1745": 13270018048.0, + "1750": 13270018048.0, + "1755": 13270018048.0, + "1760": 13270018048.0, + "1765": 13270018048.0, + "1770": 13270018048.0, + "1775": 13270018048.0, + "1780": 13270018048.0, + "1785": 13270018048.0, + "1790": 13270018048.0, + "1795": 13270018048.0, + "1800": 13270018048.0, + "1805": 13270018048.0, + "1810": 13270018048.0, + "1815": 13270018048.0, + "1820": 13270018048.0, + "1825": 13270018048.0, + "1830": 13270018048.0, + "1835": 13270018048.0, + "1840": 13270018048.0, + "1845": 13270018048.0, + "1850": 13270018048.0, + "1855": 13270018048.0, + "1860": 13270018048.0, + "1865": 13270018048.0, + "1870": 13270018048.0, + "1875": 13270018048.0, + "1880": 13270018048.0, + "1885": 13270018048.0, + "1890": 13270018048.0, + "1895": 13270018048.0, + "1900": 13270018048.0, + "1905": 13270018048.0, + "1910": 13270018048.0, + "1915": 13270018048.0, + "1920": 13270018048.0, + "1925": 13270018048.0, + "1930": 13270018048.0, + "1935": 13270018048.0, + "1940": 13270018048.0, + "1945": 13270018048.0, + "1950": 13270018048.0, + "1955": 13270018048.0, + "1960": 13270018048.0, + "1965": 13270018048.0, + "1970": 13270018048.0, + "1975": 13270018048.0, + "1980": 13270018048.0, + "1985": 13270018048.0, + "1990": 13270018048.0, + "1995": 13270018048.0, + "2000": 13270018048.0, + "2005": 13270018048.0, + "2010": 13270018048.0, + "2015": 13270018048.0, + "2020": 13270018048.0, + "2025": 13270018048.0, + "2030": 13270018048.0, + "2035": 13270018048.0, + "2040": 13270018048.0, + "2045": 13270018048.0, + "2050": 13270018048.0, + "2055": 13270018048.0, + "2060": 13270018048.0, + "2065": 13270018048.0, + "2070": 13270018048.0, + "2075": 13270018048.0, + "2080": 13270018048.0, + "2085": 13270018048.0, + "2090": 13270018048.0, + "2095": 13270018048.0, + "2100": 13270018048.0, + "2105": 13270018048.0, + "2110": 13270018048.0, + "2115": 13270018048.0, + "2120": 13270018048.0, + "2125": 13270018048.0, + "2130": 13270018048.0, + "2135": 13270018048.0, + "2140": 13270018048.0, + "2145": 13270018048.0, + "2150": 13270018048.0, + "2155": 13270018048.0, + "2160": 13270018048.0, + "2165": 13270018048.0, + "2170": 13270018048.0, + "2175": 13270018048.0, + "2180": 13270018048.0, + "2185": 13270018048.0, + "2190": 13270018048.0, + "2195": 13270018048.0, + "2200": 13270018048.0, + "2205": 13270018048.0, + "2210": 13270018048.0, + "2215": 13270018048.0, + "2220": 13270018048.0, + "2225": 13270018048.0, + "2230": 13270018048.0, + "2235": 13270018048.0, + "2240": 13270018048.0, + "2245": 13270018048.0, + "2250": 13270018048.0, + "2255": 13270018048.0, + "2260": 13270018048.0, + "2265": 13270018048.0, + "2270": 13270018048.0, + "2275": 13270018048.0, + "2280": 13270018048.0, + "2285": 13270018048.0, + "2290": 13270018048.0, + "2295": 13270018048.0, + "2300": 13270018048.0, + "2305": 13270018048.0, + "2310": 13270018048.0, + "2315": 13270018048.0, + "2320": 13270018048.0, + "2325": 13270018048.0, + "2330": 13270018048.0, + "2335": 13270018048.0, + "2340": 13270018048.0, + "2345": 13270018048.0, + "2350": 13270018048.0, + "2355": 13270018048.0, + "2360": 13270018048.0, + "2365": 13270018048.0, + "2370": 13270018048.0, + "2375": 13270018048.0, + "2380": 13270018048.0, + "2385": 13270018048.0, + "2390": 13270018048.0, + "2395": 13270018048.0, + "2400": 13270018048.0, + "2405": 13270018048.0, + "2410": 13270018048.0, + "2415": 13270018048.0, + "2420": 13270018048.0, + "2425": 13270018048.0, + "2430": 13270018048.0, + "2435": 13270018048.0, + "2440": 13270018048.0, + "2445": 13270018048.0, + "2450": 13270018048.0, + "2455": 13270018048.0, + "2460": 13270018048.0, + "2465": 13270018048.0, + "2470": 13270018048.0, + "2475": 13270018048.0, + "2480": 13270018048.0, + "2485": 13270018048.0, + "2490": 13270018048.0, + "2495": 13270018048.0, + "2500": 13270018048.0, + "2505": 13270018048.0, + "2510": 13270018048.0, + "2515": 13270018048.0, + "2520": 13270018048.0, + "2525": 13270018048.0, + "2530": 13270018048.0, + "2535": 13270018048.0, + "2540": 13270018048.0, + "2545": 13270018048.0, + "2550": 13270018048.0, + "2555": 13270018048.0, + "2560": 13270018048.0, + "2565": 13270018048.0, + "2570": 13270018048.0, + "2575": 13270018048.0, + "2580": 13270018048.0, + "2585": 13270018048.0, + "2590": 13270018048.0, + "2595": 13270018048.0, + "2600": 13270018048.0, + "2605": 13270018048.0, + "2610": 13270018048.0, + "2615": 13270018048.0, + "2620": 13270018048.0, + "2625": 13270018048.0, + "2630": 13270018048.0, + "2635": 13270018048.0, + "2640": 13270018048.0, + "2645": 13270018048.0, + "2650": 13270018048.0, + "2655": 13270018048.0, + "2660": 13270018048.0, + "2665": 13270018048.0, + "2670": 13270018048.0, + "2675": 13270018048.0, + "2680": 13270018048.0, + "2685": 13270018048.0, + "2690": 13270018048.0, + "2695": 13270018048.0, + "2700": 13270018048.0, + "2705": 13270018048.0, + "2710": 13270018048.0, + "2715": 13270018048.0, + "2720": 13270018048.0, + "2725": 13270018048.0, + "2730": 13270018048.0, + "2735": 13270018048.0, + "2740": 13270018048.0, + "2745": 13270018048.0, + "2750": 13270018048.0, + "2755": 13270018048.0, + "2760": 13270018048.0, + "2765": 13270018048.0, + "2770": 13270018048.0, + "2775": 13270018048.0, + "2780": 13270018048.0, + "2785": 13270018048.0, + "2790": 13270018048.0, + "2795": 13270018048.0, + "2800": 13270018048.0, + "2805": 13270018048.0, + "2810": 13270018048.0, + "2815": 13270018048.0, + "2820": 13270018048.0, + "2825": 13270018048.0, + "2830": 13270018048.0, + "2835": 13270018048.0, + "2840": 13270018048.0, + "2845": 13270018048.0, + "2850": 13270018048.0, + "2855": 13270018048.0, + "2860": 13270018048.0, + "2865": 13270018048.0, + "2870": 13270018048.0, + "2875": 13270018048.0, + "2880": 13270018048.0, + "2885": 13270018048.0, + "2890": 13270018048.0, + "2895": 13270018048.0, + "2900": 13270018048.0, + "2905": 13270018048.0, + "2910": 13270018048.0, + "2915": 13270018048.0, + "2920": 13270018048.0, + "2925": 13270018048.0, + "2930": 13270018048.0, + "2935": 13270018048.0, + "2940": 13270018048.0, + "2945": 13270018048.0, + "2950": 13270018048.0, + "2955": 13270018048.0, + "2960": 13270018048.0, + "2965": 13270018048.0, + "2970": 13270018048.0, + "2975": 13270018048.0, + "2980": 13270018048.0, + "2985": 13270018048.0, + "2990": 13270018048.0, + "2995": 13270018048.0, + "3000": 13270018048.0, + "3005": 13270018048.0, + "3010": 13270018048.0, + "3015": 13270018048.0, + "3020": 13270018048.0, + "3025": 13270018048.0, + "3030": 13270018048.0, + "3035": 13270018048.0, + "3040": 13270018048.0, + "3045": 13270018048.0, + "3050": 13270018048.0, + "3055": 13270018048.0, + "3060": 13270018048.0, + "3065": 13270018048.0, + "3070": 13270018048.0, + "3075": 13270018048.0, + "3080": 13270018048.0, + "3085": 13270018048.0, + "3090": 13270018048.0, + "3095": 13270018048.0, + "3100": 13270018048.0, + "3105": 13270018048.0, + "3110": 13270018048.0, + "3115": 13270018048.0, + "3120": 13270018048.0, + "3125": 13270018048.0, + "3130": 13270018048.0, + "3135": 13270018048.0, + "3140": 13270018048.0, + "3145": 13270018048.0, + "3150": 13270018048.0, + "3155": 13270018048.0, + "3160": 13270018048.0, + "3165": 13270018048.0, + "3170": 13270018048.0, + "3175": 13270018048.0, + "3180": 13270018048.0, + "3185": 13270018048.0, + "3190": 13270018048.0, + "3195": 13270018048.0, + "3200": 13270018048.0, + "3205": 13270018048.0, + "3210": 13270018048.0, + "3215": 13270018048.0, + "3220": 13270018048.0, + "3225": 13270018048.0, + "3230": 13270018048.0, + "3235": 13270018048.0, + "3240": 13270018048.0, + "3245": 13270018048.0, + "3250": 13270018048.0, + "3255": 13270018048.0, + "3260": 13270018048.0, + "3265": 13270018048.0, + "3270": 13270018048.0, + "3275": 13270018048.0, + "3280": 13270018048.0, + "3285": 13270018048.0, + "3290": 13270018048.0, + "3295": 13270018048.0, + "3300": 13270018048.0, + "3305": 13270018048.0, + "3310": 13270018048.0, + "3315": 13270018048.0, + "3320": 13270018048.0, + "3325": 13270018048.0, + "3330": 13270018048.0, + "3335": 13270018048.0, + "3340": 13270018048.0, + "3345": 13270018048.0, + "3350": 13270018048.0, + "3355": 13270018048.0, + "3360": 13270018048.0, + "3365": 13270018048.0, + "3370": 13270018048.0, + "3375": 13270018048.0, + "3380": 13270018048.0, + "3385": 13270018048.0, + "3390": 13270018048.0, + "3395": 13270018048.0, + "3400": 13270018048.0, + "3405": 13270018048.0, + "3410": 13270018048.0, + "3415": 13270018048.0, + "3420": 13270018048.0, + "3425": 13270018048.0, + "3430": 13270018048.0, + "3435": 13270018048.0, + "3440": 13270018048.0, + "3445": 13270018048.0, + "3450": 13270018048.0, + "3455": 13270018048.0, + "3460": 13270018048.0, + "3465": 13270018048.0, + "3470": 13270018048.0, + "3475": 13270018048.0, + "3480": 13270018048.0, + "3485": 13270018048.0, + "3490": 13270018048.0, + "3495": 13270018048.0, + "3500": 13270018048.0, + "3505": 13270018048.0, + "3510": 13270018048.0, + "3515": 13270018048.0, + "3520": 13270018048.0, + "3525": 13270018048.0, + "3530": 13270018048.0, + "3535": 13270018048.0, + "3540": 13270018048.0, + "3545": 13270018048.0, + "3550": 13270018048.0, + "3555": 13270018048.0, + "3560": 13270018048.0, + "3565": 13270018048.0, + "3570": 13270018048.0, + "3575": 13270018048.0, + "3580": 13270018048.0, + "3585": 13270018048.0, + "3590": 13270018048.0, + "3595": 13270018048.0, + "3600": 13270018048.0, + "3605": 13270018048.0, + "3610": 13270018048.0, + "3615": 13270018048.0, + "3620": 13270018048.0, + "3625": 13270018048.0, + "3630": 13270018048.0, + "3635": 13270018048.0, + "3640": 13270018048.0, + "3645": 13270018048.0, + "3650": 13270018048.0, + "3655": 13270018048.0, + "3660": 13270018048.0, + "3665": 13270018048.0, + "3670": 13270018048.0, + "3675": 13270018048.0, + "3680": 13270018048.0, + "3685": 13270018048.0, + "3690": 13270018048.0, + "3695": 13270018048.0, + "3700": 13270018048.0, + "3705": 13270018048.0, + "3710": 13270018048.0, + "3715": 13270018048.0, + "3720": 13270018048.0, + "3725": 13270018048.0, + "3730": 13270018048.0, + "3735": 13270018048.0, + "3740": 13270018048.0, + "3745": 13270018048.0, + "3750": 13270018048.0, + "3755": 13270018048.0, + "3760": 13270018048.0, + "3765": 13270018048.0, + "3770": 13270018048.0, + "3775": 13270018048.0, + "3780": 13270018048.0, + "3785": 13270018048.0, + "3790": 13270018048.0, + "3795": 13270018048.0, + "3800": 13270018048.0, + "3805": 13270018048.0, + "3810": 13270018048.0, + "3815": 13270018048.0, + "3820": 13270018048.0, + "3825": 13270018048.0, + "3830": 13270018048.0, + "3835": 13270018048.0, + "3840": 13270018048.0, + "3845": 13270018048.0, + "3850": 13270018048.0, + "3855": 13270018048.0, + "3860": 13270018048.0, + "3865": 13270018048.0, + "3870": 13270018048.0, + "3875": 13270018048.0, + "3880": 13270018048.0, + "3885": 13270018048.0, + "3890": 13270018048.0, + "3895": 13270018048.0, + "3900": 13270018048.0, + "3905": 13270018048.0, + "3910": 13270018048.0, + "3915": 13270018048.0, + "3920": 13270018048.0, + "3925": 13270018048.0, + "3930": 13270018048.0, + "3935": 13270018048.0, + "3940": 13270018048.0, + "3945": 13270018048.0, + "3950": 13270018048.0, + "3955": 13270018048.0, + "3960": 13270018048.0, + "3965": 13270018048.0, + "3970": 13270018048.0, + "3975": 13270018048.0, + "3980": 13270018048.0, + "3985": 13270018048.0, + "3990": 13270018048.0, + "3995": 13270018048.0, + "4000": 13270018048.0, + "4005": 13270018048.0, + "4010": 13270018048.0, + "4015": 13270018048.0, + "4020": 13270018048.0, + "4025": 13270018048.0, + "4030": 13270018048.0, + "4035": 13270018048.0, + "4040": 13270018048.0, + "4045": 13270018048.0, + "4050": 13270018048.0, + "4055": 13270018048.0, + "4060": 13270018048.0, + "4065": 13270018048.0, + "4070": 13270018048.0, + "4075": 13270018048.0, + "4080": 13270018048.0, + "4085": 13270018048.0, + "4090": 13270018048.0, + "4095": 13270018048.0, + "4100": 13270018048.0, + "4105": 13270018048.0, + "4110": 13270018048.0, + "4115": 13270018048.0, + "4120": 13270018048.0, + "4125": 13270018048.0, + "4130": 13270018048.0, + "4135": 13270018048.0, + "4140": 13270018048.0, + "4145": 13270018048.0, + "4150": 13270018048.0, + "4155": 13270018048.0, + "4160": 13270018048.0, + "4165": 13270018048.0, + "4170": 13270018048.0, + "4175": 13270018048.0, + "4180": 13270018048.0, + "4185": 13270018048.0, + "4190": 13270018048.0, + "4195": 13270018048.0, + "4200": 13270018048.0, + "4205": 13270018048.0, + "4210": 13270018048.0, + "4215": 13270018048.0, + "4220": 13270018048.0, + "4225": 13270018048.0, + "4230": 13270018048.0, + "4235": 13270018048.0, + "4240": 13270018048.0, + "4245": 13270018048.0, + "4250": 13270018048.0, + "4255": 13270018048.0, + "4260": 13270018048.0, + "4265": 13270018048.0, + "4270": 13270018048.0, + "4275": 13270018048.0, + "4280": 13270018048.0, + "4285": 13270018048.0, + "4290": 13270018048.0, + "4295": 13270018048.0, + "4300": 13270018048.0, + "4305": 13270018048.0, + "4310": 13270018048.0, + "4315": 13270018048.0, + "4320": 13270018048.0, + "4325": 13270018048.0, + "4330": 13270018048.0, + "4335": 13270018048.0, + "4340": 13270018048.0, + "4345": 13270018048.0, + "4350": 13270018048.0, + "4355": 13270018048.0, + "4360": 13270018048.0, + "4365": 13270018048.0, + "4370": 13270018048.0, + "4375": 13270018048.0, + "4380": 13270018048.0, + "4385": 13270018048.0, + "4390": 13270018048.0, + "4395": 13270018048.0, + "4400": 13270018048.0, + "4405": 13270018048.0, + "4410": 13270018048.0, + "4415": 13270018048.0, + "4420": 13270018048.0, + "4425": 13270018048.0, + "4430": 13270018048.0, + "4435": 13270018048.0, + "4440": 13270018048.0, + "4445": 13270018048.0, + "4450": 13270018048.0, + "4455": 13270018048.0, + "4460": 13270018048.0, + "4465": 13270018048.0, + "4470": 13270018048.0, + "4475": 13270018048.0, + "4480": 13270018048.0, + "4485": 13270018048.0, + "4490": 13270018048.0, + "4495": 13270018048.0, + "4500": 13270018048.0, + "4505": 13270018048.0, + "4510": 13270018048.0, + "4515": 13270018048.0, + "4520": 13270018048.0, + "4525": 13270018048.0, + "4530": 13270018048.0, + "4535": 13270018048.0, + "4540": 13270018048.0, + "4545": 13270018048.0, + "4550": 13270018048.0, + "4555": 13270018048.0, + "4560": 13270018048.0, + "4565": 13270018048.0, + "4570": 13270018048.0, + "4575": 13270018048.0, + "4580": 13270018048.0, + "4585": 13270018048.0, + "4590": 13270018048.0, + "4595": 13270018048.0, + "4600": 13270018048.0, + "4605": 13270018048.0, + "4610": 13270018048.0, + "4615": 13270018048.0, + "4620": 13270018048.0, + "4625": 13270018048.0, + "4630": 13270018048.0, + "4635": 13270018048.0, + "4640": 13270018048.0, + "4645": 13270018048.0, + "4650": 13270018048.0, + "4655": 13270018048.0, + "4660": 13270018048.0, + "4665": 13270018048.0, + "4670": 13270018048.0, + "4675": 13270018048.0, + "4680": 13270018048.0, + "4685": 13270018048.0, + "4690": 13270018048.0, + "4695": 13270018048.0, + "4700": 13270018048.0, + "4705": 13270018048.0, + "4710": 13270018048.0, + "4715": 13270018048.0, + "4720": 13270018048.0, + "4725": 13270018048.0, + "4730": 13270018048.0, + "4735": 13270018048.0, + "4740": 13270018048.0, + "4745": 13270018048.0, + "4750": 13270018048.0, + "4755": 13270018048.0, + "4760": 13270018048.0, + "4765": 13270018048.0, + "4770": 13270018048.0, + "4775": 13270018048.0, + "4780": 13270018048.0, + "4785": 13270018048.0, + "4790": 13270018048.0, + "4795": 13270018048.0, + "4800": 13270018048.0, + "4805": 13270018048.0, + "4810": 13270018048.0, + "4815": 13270018048.0, + "4820": 13270018048.0, + "4825": 13270018048.0, + "4830": 13270018048.0, + "4835": 13270018048.0, + "4840": 13270018048.0, + "4845": 13270018048.0, + "4850": 13270018048.0, + "4855": 13270018048.0, + "4860": 13270018048.0, + "4865": 13270018048.0, + "4870": 13270018048.0, + "4875": 13270018048.0, + "4880": 13270018048.0, + "4885": 13270018048.0, + "4890": 13270018048.0, + "4895": 13270018048.0, + "4900": 13270018048.0, + "4905": 13270018048.0, + "4910": 13270018048.0, + "4915": 13270018048.0, + "4920": 13270018048.0, + "4925": 13270018048.0, + "4930": 13270018048.0, + "4935": 13270018048.0, + "4940": 13270018048.0, + "4945": 13270018048.0, + "4950": 13270018048.0, + "4955": 13270018048.0, + "4960": 13270018048.0, + "4965": 13270018048.0, + "4970": 13270018048.0, + "4975": 13270018048.0, + "4980": 13270018048.0, + "4985": 13270018048.0, + "4990": 13270018048.0, + "4995": 13270018048.0, + "5000": 13270018048.0, + "5005": 13270018048.0, + "5010": 13270018048.0, + "5015": 13270018048.0, + "5020": 13270018048.0, + "5025": 13270018048.0, + "5030": 13270018048.0, + "5035": 13270018048.0, + "5040": 13270018048.0, + "5045": 13270018048.0, + "5050": 13270018048.0, + "5055": 13270018048.0, + "5060": 13270018048.0, + "5065": 13270018048.0, + "5070": 13270018048.0, + "5075": 13270018048.0, + "5080": 13270018048.0, + "5085": 13270018048.0, + "5090": 13270018048.0, + "5095": 13270018048.0, + "5100": 13270018048.0, + "5105": 13270018048.0, + "5110": 13270018048.0, + "5115": 13270018048.0, + "5120": 13270018048.0, + "5125": 13270018048.0, + "5130": 13270018048.0, + "5135": 13270018048.0, + "5140": 13270018048.0, + "5145": 13270018048.0, + "5150": 13270018048.0, + "5155": 13270018048.0, + "5160": 13270018048.0, + "5165": 13270018048.0, + "5170": 13270018048.0, + "5175": 13270018048.0, + "5180": 13270018048.0, + "5185": 13270018048.0, + "5190": 13270018048.0, + "5195": 13270018048.0, + "5200": 13270018048.0, + "5205": 13270018048.0, + "5210": 13270018048.0, + "5215": 13270018048.0, + "5220": 13270018048.0, + "5225": 13270018048.0, + "5230": 13270018048.0, + "5235": 13270018048.0, + "5240": 13270018048.0, + "5245": 13270018048.0, + "5250": 13270018048.0, + "5255": 13270018048.0, + "5260": 13270018048.0, + "5265": 13270018048.0, + "5270": 13270018048.0, + "5275": 13270018048.0, + "5280": 13270018048.0, + "5285": 13270018048.0, + "5290": 13270018048.0, + "5295": 13270018048.0, + "5300": 13270018048.0, + "5305": 13270018048.0, + "5310": 13270018048.0, + "5315": 13270018048.0, + "5320": 13270018048.0, + "5325": 13270018048.0, + "5330": 13270018048.0, + "5335": 13270018048.0, + "5340": 13270018048.0, + "5345": 13270018048.0, + "5350": 13270018048.0, + "5355": 13270018048.0, + "5360": 13270018048.0, + "5365": 13270018048.0, + "5370": 13270018048.0, + "5375": 13270018048.0, + "5380": 13270018048.0, + "5385": 13270018048.0, + "5390": 13270018048.0, + "5395": 13270018048.0, + "5400": 13270018048.0, + "5405": 13270018048.0, + "5410": 13270018048.0, + "5415": 13270018048.0, + "5420": 13270018048.0, + "5425": 13270018048.0, + "5430": 13270018048.0, + "5435": 13270018048.0, + "5440": 13270018048.0, + "5445": 13270018048.0, + "5450": 13270018048.0, + "5455": 13270018048.0, + "5460": 13270018048.0, + "5465": 13270018048.0, + "5470": 13270018048.0, + "5475": 13270018048.0, + "5480": 13270018048.0, + "5485": 13270018048.0, + "5490": 13270018048.0, + "5495": 13270018048.0, + "5500": 13270018048.0, + "5505": 13270018048.0, + "5510": 13270018048.0, + "5515": 13270018048.0, + "5520": 13270018048.0, + "5525": 13270018048.0, + "5530": 13270018048.0, + "5535": 13270018048.0, + "5540": 13270018048.0, + "5545": 13270018048.0, + "5550": 13270018048.0, + "5555": 13270018048.0, + "5560": 13270018048.0, + "5565": 13270018048.0, + "5570": 13270018048.0, + "5575": 13270018048.0, + "5580": 13270018048.0, + "5585": 13270018048.0, + "5590": 13270018048.0, + "5595": 13270018048.0, + "5600": 13270018048.0, + "5605": 13270018048.0, + "5610": 13270018048.0, + "5615": 13270018048.0, + "5620": 13270018048.0, + "5625": 13270018048.0, + "5630": 13270018048.0, + "5635": 13270018048.0, + "5640": 13270018048.0, + "5645": 13270018048.0, + "5650": 13270018048.0, + "5655": 13270018048.0, + "5660": 13270018048.0, + "5665": 13270018048.0, + "5670": 13270018048.0, + "5675": 13270018048.0, + "5680": 13270018048.0, + "5685": 13270018048.0, + "5690": 13270018048.0, + "5695": 13270018048.0, + "5700": 13270018048.0, + "5705": 13270018048.0, + "5710": 13270018048.0, + "5715": 13270018048.0, + "5720": 13270018048.0, + "5725": 13270018048.0, + "5730": 13270018048.0, + "5735": 13270018048.0, + "5740": 13270018048.0, + "5745": 13270018048.0, + "5750": 13270018048.0, + "5755": 13270018048.0, + "5760": 13270018048.0, + "5765": 13270018048.0, + "5770": 13270018048.0, + "5775": 13270018048.0, + "5780": 13270018048.0, + "5785": 13270018048.0, + "5790": 13270018048.0, + "5795": 13270018048.0, + "5800": 13270018048.0, + "5805": 13270018048.0, + "5810": 13270018048.0, + "5815": 13270018048.0, + "5820": 13270018048.0, + "5825": 13270018048.0, + "5830": 13270018048.0, + "5835": 13270018048.0, + "5840": 13270018048.0, + "5845": 13270018048.0, + "5850": 13270018048.0, + "5855": 13270018048.0, + "5860": 13270018048.0, + "5865": 13270018048.0, + "5870": 13270018048.0, + "5875": 13270018048.0, + "5880": 13270018048.0, + "5885": 13270018048.0, + "5890": 13270018048.0, + "5895": 13270018048.0, + "5900": 13270018048.0, + "5905": 13270018048.0, + "5910": 13270018048.0, + "5915": 13270018048.0, + "5920": 13270018048.0, + "5925": 13270018048.0, + "5930": 13270018048.0, + "5935": 13270018048.0, + "5940": 13270018048.0, + "5945": 13270018048.0, + "5950": 13270018048.0, + "5955": 13270018048.0, + "5960": 13270018048.0, + "5965": 13270018048.0, + "5970": 13270018048.0, + "5975": 13270018048.0, + "5980": 13270018048.0, + "5985": 13270018048.0, + "5990": 13270018048.0, + "5995": 13270018048.0, + "6000": 13270018048.0, + "6005": 13270018048.0, + "6010": 13270018048.0, + "6015": 13270018048.0, + "6020": 13270018048.0, + "6025": 13270018048.0, + "6030": 13270018048.0, + "6035": 13270018048.0, + "6040": 13270018048.0, + "6045": 13270018048.0, + "6050": 13270018048.0, + "6055": 13270018048.0, + "6060": 13270018048.0, + "6065": 13270018048.0, + "6070": 13270018048.0, + "6075": 13270018048.0, + "6080": 13270018048.0, + "6085": 13270018048.0, + "6090": 13270018048.0, + "6095": 13270018048.0, + "6100": 13270018048.0, + "6105": 13270018048.0, + "6110": 13270018048.0, + "6115": 13270018048.0, + "6120": 13270018048.0, + "6125": 13270018048.0, + "6130": 13270018048.0, + "6135": 13270018048.0, + "6140": 13270018048.0, + "6145": 13270018048.0, + "6150": 13270018048.0, + "6155": 13270018048.0, + "6160": 13270018048.0, + "6165": 13270018048.0, + "6170": 13270018048.0, + "6175": 13270018048.0, + "6180": 13270018048.0, + "6185": 13270018048.0, + "6190": 13270018048.0, + "6195": 13270018048.0, + "6200": 13270018048.0, + "6205": 13270018048.0, + "6210": 13270018048.0, + "6215": 13270018048.0, + "6220": 13270018048.0, + "6225": 13270018048.0, + "6230": 13270018048.0, + "6235": 13270018048.0, + "6240": 13270018048.0, + "6245": 13270018048.0, + "6250": 13270018048.0, + "6255": 13270018048.0, + "6260": 13270018048.0, + "6265": 13270018048.0, + "6270": 13270018048.0, + "6275": 13270018048.0, + "6280": 13270018048.0, + "6285": 13270018048.0, + "6290": 13270018048.0, + "6295": 13270018048.0, + "6300": 13270018048.0, + "6305": 13270018048.0, + "6310": 13270018048.0, + "6315": 13270018048.0, + "6320": 13270018048.0, + "6325": 13270018048.0, + "6330": 13270018048.0, + "6335": 13270018048.0, + "6340": 13270018048.0, + "6345": 13270018048.0, + "6350": 13270018048.0, + "6355": 13270018048.0, + "6360": 13270018048.0, + "6365": 13270018048.0, + "6370": 13270018048.0, + "6375": 13270018048.0, + "6380": 13270018048.0, + "6385": 13270018048.0, + "6390": 13270018048.0, + "6395": 13270018048.0, + "6400": 13270018048.0, + "6405": 13270018048.0, + "6410": 13270018048.0, + "6415": 13270018048.0, + "6420": 13270018048.0, + "6425": 13270018048.0, + "6430": 13270018048.0, + "6435": 13270018048.0, + "6440": 13270018048.0, + "6445": 13270018048.0, + "6450": 13270018048.0, + "6455": 13270018048.0, + "6460": 13270018048.0, + "6465": 13270018048.0, + "6470": 13270018048.0, + "6475": 13270018048.0, + "6480": 13270018048.0, + "6485": 13270018048.0, + "6490": 13270018048.0, + "6495": 13270018048.0, + "6500": 13270018048.0, + "6505": 13270018048.0, + "6510": 13270018048.0, + "6515": 13270018048.0, + "6520": 13270018048.0, + "6525": 13270018048.0, + "6530": 13270018048.0, + "6535": 13270018048.0, + "6540": 13270018048.0, + "6545": 13270018048.0, + "6550": 13270018048.0, + "6555": 13270018048.0, + "6560": 13270018048.0, + "6565": 13270018048.0, + "6570": 13270018048.0, + "6575": 13270018048.0, + "6580": 13270018048.0, + "6585": 13270018048.0, + "6590": 13270018048.0, + "6595": 13270018048.0, + "6600": 13270018048.0, + "6605": 13270018048.0, + "6610": 13270018048.0, + "6615": 13270018048.0, + "6620": 13270018048.0, + "6625": 13270018048.0, + "6630": 13270018048.0, + "6635": 13270018048.0, + "6640": 13270018048.0, + "6645": 13270018048.0, + "6650": 13270018048.0, + "6655": 13270018048.0, + "6660": 13270018048.0, + "6665": 13270018048.0, + "6670": 13270018048.0, + "6675": 13270018048.0, + "6680": 13270018048.0, + "6685": 13270018048.0, + "6690": 13270018048.0, + "6695": 13270018048.0, + "6700": 13270018048.0, + "6705": 13270018048.0, + "6710": 13270018048.0, + "6715": 13270018048.0, + "6720": 13270018048.0, + "6725": 13270018048.0, + "6730": 13270018048.0, + "6735": 13270018048.0, + "6740": 13270018048.0, + "6745": 13270018048.0, + "6750": 13270018048.0, + "6755": 13270018048.0, + "6760": 13270018048.0, + "6765": 13270018048.0, + "6770": 13270018048.0, + "6775": 13270018048.0, + "6780": 13270018048.0, + "6785": 13270018048.0, + "6790": 13270018048.0, + "6795": 13270018048.0, + "6800": 13270018048.0, + "6805": 13270018048.0, + "6810": 13270018048.0, + "6815": 13270018048.0, + "6820": 13270018048.0, + "6825": 13270018048.0, + "6830": 13270018048.0, + "6835": 13270018048.0, + "6840": 13270018048.0, + "6845": 13270018048.0, + "6850": 13270018048.0, + "6855": 13270018048.0, + "6860": 13270018048.0, + "6865": 13270018048.0, + "6870": 13270018048.0, + "6875": 13270018048.0, + "6880": 13270018048.0, + "6885": 13270018048.0, + "6890": 13270018048.0, + "6895": 13270018048.0, + "6900": 13270018048.0, + "6905": 13270018048.0, + "6910": 13270018048.0, + "6915": 13270018048.0, + "6920": 13270018048.0, + "6925": 13270018048.0, + "6930": 13270018048.0, + "6935": 13270018048.0, + "6940": 13270018048.0, + "6945": 13270018048.0, + "6950": 13270018048.0, + "6955": 13270018048.0, + "6960": 13270018048.0, + "6965": 13270018048.0, + "6970": 13270018048.0, + "6975": 13270018048.0, + "6980": 13270018048.0, + "6985": 13270018048.0, + "6990": 13270018048.0, + "6995": 13270018048.0, + "7000": 13270018048.0, + "7005": 13270018048.0, + "7010": 13270018048.0, + "7015": 13270018048.0, + "7020": 13270018048.0, + "7025": 13270018048.0, + "7030": 13270018048.0, + "7035": 13270018048.0, + "7040": 13270018048.0, + "7045": 13270018048.0, + "7050": 13270018048.0, + "7055": 13270018048.0, + "7060": 13270018048.0, + "7065": 13270018048.0, + "7070": 13270018048.0, + "7075": 13270018048.0, + "7080": 13270018048.0, + "7085": 13270018048.0, + "7090": 13270018048.0, + "7095": 13270018048.0, + "7100": 13270018048.0, + "7105": 13270018048.0, + "7110": 13270018048.0, + "7115": 13270018048.0, + "7120": 13270018048.0, + "7125": 13270018048.0, + "7130": 13270018048.0, + "7135": 13270018048.0, + "7140": 13270018048.0, + "7145": 13270018048.0, + "7150": 13270018048.0, + "7155": 13270018048.0, + "7160": 13270018048.0, + "7165": 13270018048.0, + "7170": 13270018048.0, + "7175": 13270018048.0, + "7180": 13270018048.0, + "7185": 13270018048.0, + "7190": 13270018048.0, + "7195": 13270018048.0, + "7200": 13270018048.0, + "7205": 13270018048.0, + "7210": 13270018048.0, + "7215": 13270018048.0, + "7220": 13270018048.0, + "7225": 13270018048.0, + "7230": 13270018048.0, + "7235": 13270018048.0, + "7240": 13270018048.0, + "7245": 13270018048.0, + "7250": 13270018048.0, + "7255": 13270018048.0, + "7260": 13270018048.0, + "7265": 13270018048.0, + "7270": 13270018048.0, + "7275": 13270018048.0, + "7280": 13270018048.0, + "7285": 13270018048.0, + "7290": 13270018048.0, + "7295": 13270018048.0, + "7300": 13270018048.0, + "7305": 13270018048.0, + "7310": 13270018048.0, + "7315": 13270018048.0, + "7320": 13270018048.0, + "7325": 13270018048.0, + "7330": 13270018048.0, + "7335": 13270018048.0, + "7340": 13270018048.0, + "7345": 13270018048.0, + "7350": 13270018048.0, + "7355": 13270018048.0, + "7360": 13270018048.0, + "7365": 13270018048.0, + "7370": 13270018048.0, + "7375": 13270018048.0, + "7380": 13270018048.0, + "7385": 13270018048.0, + "7390": 13270018048.0, + "7395": 13270018048.0, + "7400": 13270018048.0, + "7405": 13270018048.0, + "7410": 13270018048.0, + "7415": 13270018048.0, + "7420": 13270018048.0, + "7425": 13270018048.0, + "7430": 13270018048.0, + "7435": 13270018048.0, + "7440": 13270018048.0, + "7445": 13270018048.0, + "7450": 13270018048.0, + "7455": 13270018048.0, + "7460": 13270018048.0, + "7465": 13270018048.0, + "7470": 13270018048.0, + "7475": 13270018048.0, + "7480": 13270018048.0, + "7485": 13270018048.0, + "7490": 13270018048.0, + "7495": 13270018048.0, + "7500": 13270018048.0, + "7505": 13270018048.0, + "7510": 13270018048.0, + "7515": 13270018048.0, + "7520": 13270018048.0, + "7525": 13270018048.0, + "7530": 13270018048.0, + "7535": 13270018048.0, + "7540": 13270018048.0, + "7545": 13270018048.0, + "7550": 13270018048.0, + "7555": 13270018048.0, + "7560": 13270018048.0, + "7565": 13270018048.0, + "7570": 13270018048.0, + "7575": 13270018048.0, + "7580": 13270018048.0, + "7585": 13270018048.0, + "7590": 13270018048.0, + "7595": 13270018048.0, + "7600": 13270018048.0, + "7605": 13270018048.0, + "7610": 13270018048.0, + "7615": 13270018048.0, + "7620": 13270018048.0, + "7625": 13270018048.0, + "7630": 13270018048.0, + "7635": 13270018048.0, + "7640": 13270018048.0, + "7645": 13270018048.0, + "7650": 13270018048.0, + "7655": 13270018048.0, + "7660": 13270018048.0, + "7665": 13270018048.0, + "7670": 13270018048.0, + "7675": 13270018048.0, + "7680": 13270018048.0, + "7685": 13270018048.0, + "7690": 13270018048.0, + "7695": 13270018048.0, + "7700": 13270018048.0, + "7705": 13270018048.0, + "7710": 13270018048.0, + "7715": 13270018048.0, + "7720": 13270018048.0, + "7725": 13270018048.0, + "7730": 13270018048.0, + "7735": 13270018048.0, + "7740": 13270018048.0, + "7745": 13270018048.0, + "7750": 13270018048.0, + "7755": 13270018048.0, + "7760": 13270018048.0, + "7765": 13270018048.0, + "7770": 13270018048.0, + "7775": 13270018048.0, + "7780": 13270018048.0, + "7785": 13270018048.0, + "7790": 13270018048.0, + "7795": 13270018048.0, + "7800": 13270018048.0, + "7805": 13270018048.0, + "7810": 13270018048.0, + "7815": 13270018048.0, + "7820": 13270018048.0, + "7825": 13270018048.0, + "7830": 13270018048.0, + "7835": 13270018048.0, + "7840": 13270018048.0, + "7845": 13270018048.0, + "7850": 13270018048.0, + "7855": 13270018048.0, + "7860": 13270018048.0, + "7865": 13270018048.0, + "7870": 13270018048.0, + "7875": 13270018048.0, + "7880": 13270018048.0, + "7885": 13270018048.0, + "7890": 13270018048.0, + "7895": 13270018048.0, + "7900": 13270018048.0, + "7905": 13270018048.0, + "7910": 13270018048.0, + "7915": 13270018048.0, + "7920": 13270018048.0, + "7925": 13270018048.0, + "7930": 13270018048.0, + "7935": 13270018048.0, + "7940": 13270018048.0, + "7945": 13270018048.0, + "7950": 13270018048.0, + "7955": 13270018048.0, + "7960": 13270018048.0, + "7965": 13270018048.0, + "7970": 13270018048.0, + "7975": 13270018048.0, + "7980": 13270018048.0, + "7985": 13270018048.0, + "7990": 13270018048.0, + "7995": 13270018048.0, + "8000": 13270018048.0, + "8005": 13270018048.0, + "8010": 13270018048.0, + "8015": 13270018048.0, + "8020": 13270018048.0, + "8025": 13270018048.0, + "8030": 13270018048.0, + "8035": 13270018048.0, + "8040": 13270018048.0, + "8045": 13270018048.0, + "8050": 13270018048.0, + "8055": 13270018048.0, + "8060": 13270018048.0, + "8065": 13270018048.0, + "8070": 13270018048.0, + "8075": 13270018048.0, + "8080": 13270018048.0, + "8085": 13270018048.0, + "8090": 13270018048.0, + "8095": 13270018048.0, + "8100": 13270018048.0, + "8105": 13270018048.0, + "8110": 13270018048.0, + "8115": 13270018048.0, + "8120": 13270018048.0, + "8125": 13270018048.0, + "8130": 13270018048.0, + "8135": 13270018048.0, + "8140": 13270018048.0, + "8145": 13270018048.0, + "8150": 13270018048.0, + "8155": 13270018048.0, + "8160": 13270018048.0, + "8165": 13270018048.0, + "8170": 13270018048.0, + "8175": 13270018048.0, + "8180": 13270018048.0, + "8185": 13270018048.0, + "8190": 13270018048.0, + "8195": 13270018048.0, + "8200": 13270018048.0, + "8205": 13270018048.0, + "8210": 13270018048.0, + "8215": 13270018048.0, + "8220": 13270018048.0, + "8225": 13270018048.0, + "8230": 13270018048.0, + "8235": 13270018048.0, + "8240": 13270018048.0, + "8245": 13270018048.0, + "8250": 13270018048.0, + "8255": 13270018048.0, + "8260": 13270018048.0, + "8265": 13270018048.0, + "8270": 13270018048.0, + "8275": 13270018048.0, + "8280": 13270018048.0, + "8285": 13270018048.0, + "8290": 13270018048.0, + "8295": 13270018048.0, + "8300": 13270018048.0, + "8305": 13270018048.0, + "8310": 13270018048.0, + "8315": 13270018048.0, + "8320": 13270018048.0, + "8325": 13270018048.0, + "8330": 13270018048.0, + "8335": 13270018048.0, + "8340": 13270018048.0, + "8345": 13270018048.0, + "8350": 13270018048.0, + "8355": 13270018048.0, + "8360": 13270018048.0, + "8365": 13270018048.0, + "8370": 13270018048.0, + "8375": 13270018048.0, + "8380": 13270018048.0, + "8385": 13270018048.0, + "8390": 13270018048.0, + "8395": 13270018048.0, + "8400": 13270018048.0, + "8405": 13270018048.0, + "8410": 13270018048.0, + "8415": 13270018048.0, + "8420": 13270018048.0, + "8425": 13270018048.0, + "8430": 13270018048.0, + "8435": 13270018048.0, + "8440": 13270018048.0, + "8445": 13270018048.0, + "8450": 13270018048.0, + "8455": 13270018048.0, + "8460": 13270018048.0, + "8465": 13270018048.0, + "8470": 13270018048.0, + "8475": 13270018048.0, + "8480": 13270018048.0, + "8485": 13270018048.0, + "8490": 13270018048.0, + "8495": 13270018048.0, + "8500": 13270018048.0, + "8505": 13270018048.0, + "8510": 13270018048.0, + "8515": 13270018048.0, + "8520": 13270018048.0, + "8525": 13270018048.0, + "8530": 13270018048.0, + "8535": 13270018048.0, + "8540": 13270018048.0, + "8545": 13270018048.0, + "8550": 13270018048.0, + "8555": 13270018048.0, + "8560": 13270018048.0, + "8565": 13270018048.0, + "8570": 13270018048.0, + "8575": 13270018048.0, + "8580": 13270018048.0, + "8585": 13270018048.0, + "8590": 13270018048.0, + "8595": 13270018048.0, + "8600": 13270018048.0, + "8605": 13270018048.0, + "8610": 13270018048.0, + "8615": 13270018048.0, + "8620": 13270018048.0, + "8625": 13270018048.0, + "8630": 13270018048.0, + "8635": 13270018048.0, + "8640": 13270018048.0, + "8645": 13270018048.0, + "8650": 13270018048.0, + "8655": 13270018048.0, + "8660": 13270018048.0, + "8665": 13270018048.0, + "8670": 13270018048.0, + "8675": 13270018048.0, + "8680": 13270018048.0, + "8685": 13270018048.0, + "8690": 13270018048.0, + "8695": 13270018048.0, + "8700": 13270018048.0, + "8705": 13270018048.0, + "8710": 13270018048.0, + "8715": 13270018048.0, + "8720": 13270018048.0, + "8725": 13270018048.0, + "8730": 13270018048.0, + "8735": 13270018048.0, + "8740": 13270018048.0, + "8745": 13270018048.0, + "8750": 13270018048.0, + "8755": 13270018048.0, + "8760": 13270018048.0, + "8765": 13270018048.0, + "8770": 13270018048.0, + "8775": 13270018048.0, + "8780": 13270018048.0, + "8785": 13270018048.0, + "8790": 13270018048.0, + "8795": 13270018048.0, + "8800": 13270018048.0, + "8805": 13270018048.0, + "8810": 13268436992.0, + "8815": 13268436992.0, + "8820": 13268436992.0, + "8825": 13268436992.0, + "8830": 13268436992.0, + "8835": 13268436992.0, + "8840": 13268436992.0, + "8845": 13268436992.0, + "8850": 13268436992.0, + "8855": 13268436992.0, + "8860": 13268436992.0, + "8865": 13268436992.0, + "8870": 13268436992.0, + "8875": 13268436992.0, + "8880": 13268436992.0, + "8885": 13268436992.0, + "8890": 13268436992.0, + "8895": 13268436992.0, + "8900": 13268436992.0, + "8905": 13268436992.0, + "8910": 13268436992.0, + "8915": 13268436992.0, + "8920": 13268436992.0, + "8925": 13268436992.0, + "8930": 13268436992.0, + "8935": 13268436992.0, + "8940": 13268436992.0, + "8945": 13268436992.0, + "8950": 13268436992.0, + "8955": 13268436992.0, + "8960": 13268436992.0, + "8965": 13268436992.0, + "8970": 13268436992.0, + "8975": 13268436992.0, + "8980": 13268436992.0, + "8985": 13268436992.0, + "8990": 13268436992.0, + "8995": 13268436992.0, + "9000": 13268436992.0, + "9005": 13268436992.0, + "9010": 13268436992.0, + "9015": 13268436992.0, + "9020": 13268436992.0, + "9025": 13268436992.0, + "9030": 13268436992.0, + "9035": 13268436992.0, + "9040": 13268436992.0, + "9045": 13268436992.0, + "9050": 13268436992.0, + "9055": 13268436992.0, + "9060": 13268436992.0, + "9065": 13268436992.0, + "9070": 13268436992.0, + "9075": 13268436992.0, + "9080": 13268436992.0, + "9085": 13268436992.0, + "9090": 13268436992.0, + "9095": 13268436992.0, + "9100": 13268436992.0, + "9105": 13268436992.0, + "9110": 13268436992.0, + "9115": 13268436992.0, + "9120": 13268436992.0, + "9125": 13268436992.0, + "9130": 13268436992.0, + "9135": 13268436992.0, + "9140": 13268436992.0, + "9145": 13268436992.0, + "9150": 13268436992.0, + "9155": 13268436992.0, + "9160": 13268436992.0, + "9165": 13268436992.0, + "9170": 13268436992.0, + "9175": 13268436992.0, + "9180": 13268436992.0, + "9185": 13268436992.0, + "9190": 13268436992.0, + "9195": 13268436992.0, + "9200": 13268436992.0, + "9205": 13268436992.0, + "9210": 13268436992.0, + "9215": 13268436992.0, + "9220": 13268436992.0, + "9225": 13268436992.0, + "9230": 13268436992.0, + "9235": 13268436992.0, + "9240": 13268436992.0, + "9245": 13268436992.0, + "9250": 13268436992.0, + "9255": 13268436992.0, + "9260": 13268436992.0, + "9265": 13268436992.0, + "9270": 13268436992.0, + "9275": 13268436992.0, + "9280": 13268436992.0, + "9285": 13268436992.0, + "9290": 13268436992.0, + "9295": 13268436992.0, + "9300": 13268436992.0, + "9305": 13268436992.0, + "9310": 13268436992.0, + "9315": 13268436992.0, + "9320": 13268436992.0, + "9325": 13268436992.0, + "9330": 13268436992.0, + "9335": 13268436992.0, + "9340": 13268436992.0, + "9345": 13268436992.0, + "9350": 13268436992.0, + "9355": 13268436992.0, + "9360": 13268436992.0, + "9365": 13268436992.0, + "9370": 13268436992.0, + "9375": 13268436992.0, + "9380": 13268436992.0, + "9385": 13268436992.0, + "9390": 13268436992.0, + "9395": 13268436992.0, + "9400": 13268436992.0, + "9405": 13268436992.0, + "9410": 13268436992.0, + "9415": 13268436992.0, + "9420": 13268436992.0, + "9425": 13268436992.0, + "9430": 13268436992.0, + "9435": 13268436992.0, + "9440": 13268436992.0, + "9445": 13268436992.0, + "9450": 13268436992.0, + "9455": 13268436992.0, + "9460": 13268436992.0, + "9465": 13268436992.0, + "9470": 13268436992.0, + "9475": 13268436992.0, + "9480": 13268436992.0, + "9485": 13268436992.0, + "9490": 13268436992.0, + "9495": 13268436992.0, + "9500": 13268436992.0, + "9505": 13268436992.0, + "9510": 13268436992.0, + "9515": 13268436992.0, + "9520": 13268436992.0, + "9525": 13268436992.0, + "9530": 13268436992.0, + "9535": 13268436992.0, + "9540": 13268436992.0, + "9545": 13268436992.0, + "9550": 13268436992.0, + "9555": 13268436992.0, + "9560": 13268436992.0, + "9565": 13268436992.0, + "9570": 13268436992.0, + "9575": 13268436992.0, + "9580": 13268436992.0, + "9585": 13268436992.0, + "9590": 13268436992.0, + "9595": 13268436992.0, + "9600": 13268436992.0, + "9605": 13268436992.0, + "9610": 13268436992.0, + "9615": 13268436992.0, + "9620": 13268436992.0, + "9625": 13268436992.0, + "9630": 13268436992.0, + "9635": 13268436992.0, + "9640": 13268436992.0, + "9645": 13268436992.0, + "9650": 13268436992.0, + "9655": 13268436992.0, + "9660": 13268436992.0, + "9665": 13268436992.0, + "9670": 13268436992.0, + "9675": 13268436992.0, + "9680": 13268436992.0, + "9685": 13268436992.0, + "9690": 13268436992.0, + "9695": 13268436992.0, + "9700": 13268436992.0, + "9705": 13268436992.0, + "9710": 13268436992.0, + "9715": 13268436992.0, + "9720": 13268436992.0, + "9725": 13268436992.0, + "9730": 13268436992.0, + "9735": 13268436992.0, + "9740": 13268436992.0, + "9745": 13268436992.0, + "9750": 13268436992.0, + "9755": 13268436992.0, + "9760": 13268436992.0, + "9765": 13268436992.0, + "9770": 13268436992.0, + "9775": 13268436992.0, + "9780": 13268436992.0, + "9785": 13268436992.0, + "9790": 13268436992.0, + "9795": 13268436992.0, + "9800": 13268436992.0, + "9805": 13268436992.0, + "9810": 13268436992.0, + "9815": 13268436992.0, + "9820": 13268436992.0, + "9825": 13268436992.0, + "9830": 13268436992.0, + "9835": 13268436992.0, + "9840": 13268436992.0, + "9845": 13268436992.0, + "9850": 13268436992.0, + "9855": 13268436992.0, + "9860": 13268436992.0, + "9865": 13268436992.0, + "9870": 13268436992.0, + "9875": 13268436992.0, + "9880": 13268436992.0, + "9885": 13268436992.0, + "9890": 13268436992.0, + "9895": 13268436992.0, + "9900": 13268436992.0, + "9905": 13268436992.0, + "9910": 13268436992.0, + "9915": 13268436992.0, + "9920": 13268436992.0, + "9925": 13268436992.0, + "9930": 13268436992.0, + "9935": 13268436992.0, + "9940": 13268436992.0, + "9945": 13268436992.0, + "9950": 13268436992.0, + "9955": 13268436992.0, + "9960": 13268436992.0, + "9965": 13268436992.0, + "9970": 13268436992.0, + "9975": 13268436992.0, + "9980": 13268436992.0, + "9985": 13268436992.0, + "9990": 13268436992.0, + "9995": 13268436992.0, + "10000": 13268436992.0, + "10005": 13268436992.0, + "10010": 13268436992.0, + "10015": 13268436992.0, + "10020": 13268436992.0, + "10025": 13268436992.0, + "10030": 13268436992.0, + "10035": 13268436992.0, + "10040": 13268436992.0, + "10045": 13268436992.0, + "10050": 13268436992.0, + "10055": 13268436992.0, + "10060": 13268436992.0, + "10065": 13268436992.0, + "10070": 13268436992.0, + "10075": 13268436992.0, + "10080": 13268436992.0, + "10085": 13268436992.0, + "10090": 13268436992.0, + "10095": 13268436992.0, + "10100": 13268436992.0, + "10105": 13268436992.0, + "10110": 13268436992.0, + "10115": 13268436992.0, + "10120": 13268436992.0, + "10125": 13268436992.0, + "10130": 13268436992.0, + "10135": 13268436992.0, + "10140": 13268436992.0, + "10145": 13268436992.0, + "10150": 13268436992.0, + "10155": 13268436992.0, + "10160": 13268436992.0, + "10165": 13268436992.0, + "10170": 13268436992.0, + "10175": 13268436992.0, + "10180": 13268436992.0, + "10185": 13268436992.0, + "10190": 13268436992.0, + "10195": 13268436992.0, + "10200": 13268436992.0 + } + }, + "mem-max-allocated-bytes": { + "start_step": 1, + "end_step": 10200, + "step_interval": 5, + "values": { + "1": 27658637312.0, + "5": 28158672896.0, + "10": 28158672896.0, + "15": 28158672896.0, + "20": 28158672896.0, + "25": 28158672896.0, + "30": 28158672896.0, + "35": 28158672896.0, + "40": 28158672896.0, + "45": 28158672896.0, + "50": 28158672896.0, + "55": 28158672896.0, + "60": 28158672896.0, + "65": 28158672896.0, + "70": 28158672896.0, + "75": 28158672896.0, + "80": 28158672896.0, + "85": 28158672896.0, + "90": 28158672896.0, + "95": 28158672896.0, + "100": 28158672896.0, + "105": 28158672896.0, + "110": 28158672896.0, + "115": 28158672896.0, + "120": 28158672896.0, + "125": 28158672896.0, + "130": 28158672896.0, + "135": 28158672896.0, + "140": 28158672896.0, + "145": 28158672896.0, + "150": 28158672896.0, + "155": 28158672896.0, + "160": 28158672896.0, + "165": 28158672896.0, + "170": 28158672896.0, + "175": 28158672896.0, + "180": 28158672896.0, + "185": 28158672896.0, + "190": 28158672896.0, + "195": 28158672896.0, + "200": 28158672896.0, + "205": 28158672896.0, + "210": 28158672896.0, + "215": 28158672896.0, + "220": 28158672896.0, + "225": 28158672896.0, + "230": 28158672896.0, + "235": 28158672896.0, + "240": 28158672896.0, + "245": 28158672896.0, + "250": 28158672896.0, + "255": 28158672896.0, + "260": 28158672896.0, + "265": 28158672896.0, + "270": 28158672896.0, + "275": 28158672896.0, + "280": 28158672896.0, + "285": 28158672896.0, + "290": 28158672896.0, + "295": 28158672896.0, + "300": 28158672896.0, + "305": 28158672896.0, + "310": 28158672896.0, + "315": 28158672896.0, + "320": 28158672896.0, + "325": 28158672896.0, + "330": 28158672896.0, + "335": 28158672896.0, + "340": 28158672896.0, + "345": 28158672896.0, + "350": 28158672896.0, + "355": 28158672896.0, + "360": 28158672896.0, + "365": 28158672896.0, + "370": 28158672896.0, + "375": 28158672896.0, + "380": 28158672896.0, + "385": 28158672896.0, + "390": 28158672896.0, + "395": 28158672896.0, + "400": 28158672896.0, + "405": 28158672896.0, + "410": 28158672896.0, + "415": 28158672896.0, + "420": 28158672896.0, + "425": 28158672896.0, + "430": 28158672896.0, + "435": 28158672896.0, + "440": 28158672896.0, + "445": 28158672896.0, + "450": 28158672896.0, + "455": 28158672896.0, + "460": 28158672896.0, + "465": 28158672896.0, + "470": 28158672896.0, + "475": 28158672896.0, + "480": 28158672896.0, + "485": 28158672896.0, + "490": 28158672896.0, + "495": 28158672896.0, + "500": 28158672896.0, + "505": 28158672896.0, + "510": 28158672896.0, + "515": 28158672896.0, + "520": 28158672896.0, + "525": 28158672896.0, + "530": 28158672896.0, + "535": 28158672896.0, + "540": 28158672896.0, + "545": 28158672896.0, + "550": 28158672896.0, + "555": 28158672896.0, + "560": 28158672896.0, + "565": 28158672896.0, + "570": 28158672896.0, + "575": 28158672896.0, + "580": 28158672896.0, + "585": 28158672896.0, + "590": 28158672896.0, + "595": 28158672896.0, + "600": 28158672896.0, + "605": 28158672896.0, + "610": 28158672896.0, + "615": 28158672896.0, + "620": 28158672896.0, + "625": 28158672896.0, + "630": 28158672896.0, + "635": 28158672896.0, + "640": 28158672896.0, + "645": 28158672896.0, + "650": 28158672896.0, + "655": 28158672896.0, + "660": 28158672896.0, + "665": 28158672896.0, + "670": 28158672896.0, + "675": 28158672896.0, + "680": 28158672896.0, + "685": 28158672896.0, + "690": 28158672896.0, + "695": 28158672896.0, + "700": 28158672896.0, + "705": 28158672896.0, + "710": 28158672896.0, + "715": 28158672896.0, + "720": 28158672896.0, + "725": 28158672896.0, + "730": 28158672896.0, + "735": 28158672896.0, + "740": 28158672896.0, + "745": 28158672896.0, + "750": 28158672896.0, + "755": 28158672896.0, + "760": 28158672896.0, + "765": 28158672896.0, + "770": 28158672896.0, + "775": 28158672896.0, + "780": 28158672896.0, + "785": 28158672896.0, + "790": 28158672896.0, + "795": 28158672896.0, + "800": 28158672896.0, + "805": 28158672896.0, + "810": 28158672896.0, + "815": 28158672896.0, + "820": 28158672896.0, + "825": 28158672896.0, + "830": 28158672896.0, + "835": 28158672896.0, + "840": 28158672896.0, + "845": 28158672896.0, + "850": 28158672896.0, + "855": 28158672896.0, + "860": 28158672896.0, + "865": 28158672896.0, + "870": 28158672896.0, + "875": 28158672896.0, + "880": 28158672896.0, + "885": 28158672896.0, + "890": 28158672896.0, + "895": 28158672896.0, + "900": 28158672896.0, + "905": 28158672896.0, + "910": 28158672896.0, + "915": 28158672896.0, + "920": 28158672896.0, + "925": 28158672896.0, + "930": 28158672896.0, + "935": 28158672896.0, + "940": 28158672896.0, + "945": 28158672896.0, + "950": 28158672896.0, + "955": 28158672896.0, + "960": 28158672896.0, + "965": 28158672896.0, + "970": 28158672896.0, + "975": 28158672896.0, + "980": 28158672896.0, + "985": 28158672896.0, + "990": 28158672896.0, + "995": 28158672896.0, + "1000": 28158672896.0, + "1005": 28158672896.0, + "1010": 28158672896.0, + "1015": 28158672896.0, + "1020": 28158672896.0, + "1025": 28158672896.0, + "1030": 28158672896.0, + "1035": 28158672896.0, + "1040": 28158672896.0, + "1045": 28158672896.0, + "1050": 28158672896.0, + "1055": 28158672896.0, + "1060": 28158672896.0, + "1065": 28158672896.0, + "1070": 28158672896.0, + "1075": 28158672896.0, + "1080": 28158672896.0, + "1085": 28158672896.0, + "1090": 28158672896.0, + "1095": 28158672896.0, + "1100": 28158672896.0, + "1105": 28158672896.0, + "1110": 28158672896.0, + "1115": 28158672896.0, + "1120": 28158672896.0, + "1125": 28158672896.0, + "1130": 28158672896.0, + "1135": 28158672896.0, + "1140": 28158672896.0, + "1145": 28158672896.0, + "1150": 28158672896.0, + "1155": 28158672896.0, + "1160": 28158672896.0, + "1165": 28158672896.0, + "1170": 28158672896.0, + "1175": 28158672896.0, + "1180": 28158672896.0, + "1185": 28158672896.0, + "1190": 28158672896.0, + "1195": 28158672896.0, + "1200": 28158672896.0, + "1205": 28158672896.0, + "1210": 28158672896.0, + "1215": 28158672896.0, + "1220": 28158672896.0, + "1225": 28158672896.0, + "1230": 28158672896.0, + "1235": 28158672896.0, + "1240": 28158672896.0, + "1245": 28158672896.0, + "1250": 28158672896.0, + "1255": 28158672896.0, + "1260": 28158672896.0, + "1265": 28158672896.0, + "1270": 28158672896.0, + "1275": 28158672896.0, + "1280": 28158672896.0, + "1285": 28158672896.0, + "1290": 28158672896.0, + "1295": 28158672896.0, + "1300": 28158672896.0, + "1305": 28158672896.0, + "1310": 28158672896.0, + "1315": 28158672896.0, + "1320": 28158672896.0, + "1325": 28158672896.0, + "1330": 28158672896.0, + "1335": 28158672896.0, + "1340": 28158672896.0, + "1345": 28158672896.0, + "1350": 28158672896.0, + "1355": 28158672896.0, + "1360": 28158672896.0, + "1365": 28158672896.0, + "1370": 28158672896.0, + "1375": 28158672896.0, + "1380": 28158672896.0, + "1385": 28158672896.0, + "1390": 28158672896.0, + "1395": 28158672896.0, + "1400": 28158672896.0, + "1405": 28158672896.0, + "1410": 28158672896.0, + "1415": 28158672896.0, + "1420": 28158672896.0, + "1425": 28158672896.0, + "1430": 28158672896.0, + "1435": 28158672896.0, + "1440": 28158672896.0, + "1445": 28158672896.0, + "1450": 28158672896.0, + "1455": 28158672896.0, + "1460": 28158672896.0, + "1465": 28158672896.0, + "1470": 28158672896.0, + "1475": 28158672896.0, + "1480": 28158672896.0, + "1485": 28158672896.0, + "1490": 28158672896.0, + "1495": 28158672896.0, + "1500": 28158672896.0, + "1505": 28158672896.0, + "1510": 28158672896.0, + "1515": 28158672896.0, + "1520": 28158672896.0, + "1525": 28158672896.0, + "1530": 28158672896.0, + "1535": 28158672896.0, + "1540": 28158672896.0, + "1545": 28158672896.0, + "1550": 28158672896.0, + "1555": 28158672896.0, + "1560": 28158672896.0, + "1565": 28158672896.0, + "1570": 28158672896.0, + "1575": 28158672896.0, + "1580": 28158672896.0, + "1585": 28158672896.0, + "1590": 28158672896.0, + "1595": 28158672896.0, + "1600": 28158672896.0, + "1605": 28158672896.0, + "1610": 28158672896.0, + "1615": 28158672896.0, + "1620": 28158672896.0, + "1625": 28158672896.0, + "1630": 28158672896.0, + "1635": 28158672896.0, + "1640": 28158672896.0, + "1645": 28158672896.0, + "1650": 28158672896.0, + "1655": 28158672896.0, + "1660": 28158672896.0, + "1665": 28158672896.0, + "1670": 28158672896.0, + "1675": 28158672896.0, + "1680": 28158672896.0, + "1685": 28158672896.0, + "1690": 28158672896.0, + "1695": 28158672896.0, + "1700": 28158672896.0, + "1705": 28158672896.0, + "1710": 28158672896.0, + "1715": 28158672896.0, + "1720": 28158672896.0, + "1725": 28158672896.0, + "1730": 28158672896.0, + "1735": 28158672896.0, + "1740": 28158672896.0, + "1745": 28158672896.0, + "1750": 28158672896.0, + "1755": 28158672896.0, + "1760": 28158672896.0, + "1765": 28158672896.0, + "1770": 28158672896.0, + "1775": 28158672896.0, + "1780": 28158672896.0, + "1785": 28158672896.0, + "1790": 28158672896.0, + "1795": 28158672896.0, + "1800": 28158672896.0, + "1805": 28158672896.0, + "1810": 28158672896.0, + "1815": 28158672896.0, + "1820": 28158672896.0, + "1825": 28158672896.0, + "1830": 28158672896.0, + "1835": 28158672896.0, + "1840": 28158672896.0, + "1845": 28158672896.0, + "1850": 28158672896.0, + "1855": 28158672896.0, + "1860": 28158672896.0, + "1865": 28158672896.0, + "1870": 28158672896.0, + "1875": 28158672896.0, + "1880": 28158672896.0, + "1885": 28158672896.0, + "1890": 28158672896.0, + "1895": 28158672896.0, + "1900": 28158672896.0, + "1905": 28158672896.0, + "1910": 28158672896.0, + "1915": 28158672896.0, + "1920": 28158672896.0, + "1925": 28158672896.0, + "1930": 28158672896.0, + "1935": 28158672896.0, + "1940": 28158672896.0, + "1945": 28158672896.0, + "1950": 28158672896.0, + "1955": 28158672896.0, + "1960": 28158672896.0, + "1965": 28158672896.0, + "1970": 28158672896.0, + "1975": 28158672896.0, + "1980": 28158672896.0, + "1985": 28158672896.0, + "1990": 28158672896.0, + "1995": 28158672896.0, + "2000": 28158672896.0, + "2005": 28158672896.0, + "2010": 28158672896.0, + "2015": 28158672896.0, + "2020": 28158672896.0, + "2025": 28158672896.0, + "2030": 28158672896.0, + "2035": 28158672896.0, + "2040": 28158672896.0, + "2045": 28158672896.0, + "2050": 28158672896.0, + "2055": 28158672896.0, + "2060": 28158672896.0, + "2065": 28158672896.0, + "2070": 28158672896.0, + "2075": 28158672896.0, + "2080": 28158672896.0, + "2085": 28158672896.0, + "2090": 28158672896.0, + "2095": 28158672896.0, + "2100": 28158672896.0, + "2105": 28158672896.0, + "2110": 28158672896.0, + "2115": 28158672896.0, + "2120": 28158672896.0, + "2125": 28158672896.0, + "2130": 28158672896.0, + "2135": 28158672896.0, + "2140": 28158672896.0, + "2145": 28158672896.0, + "2150": 28158672896.0, + "2155": 28158672896.0, + "2160": 28158672896.0, + "2165": 28158672896.0, + "2170": 28158672896.0, + "2175": 28158672896.0, + "2180": 28158672896.0, + "2185": 28158672896.0, + "2190": 28158672896.0, + "2195": 28158672896.0, + "2200": 28158672896.0, + "2205": 28158672896.0, + "2210": 28158672896.0, + "2215": 28158672896.0, + "2220": 28158672896.0, + "2225": 28158672896.0, + "2230": 28158672896.0, + "2235": 28158672896.0, + "2240": 28158672896.0, + "2245": 28158672896.0, + "2250": 28158672896.0, + "2255": 28158672896.0, + "2260": 28158672896.0, + "2265": 28158672896.0, + "2270": 28158672896.0, + "2275": 28158672896.0, + "2280": 28158672896.0, + "2285": 28158672896.0, + "2290": 28158672896.0, + "2295": 28158672896.0, + "2300": 28158672896.0, + "2305": 28158672896.0, + "2310": 28158672896.0, + "2315": 28158672896.0, + "2320": 28158672896.0, + "2325": 28158672896.0, + "2330": 28158672896.0, + "2335": 28158672896.0, + "2340": 28158672896.0, + "2345": 28158672896.0, + "2350": 28158672896.0, + "2355": 28158672896.0, + "2360": 28158672896.0, + "2365": 28158672896.0, + "2370": 28158672896.0, + "2375": 28158672896.0, + "2380": 28158672896.0, + "2385": 28158672896.0, + "2390": 28158672896.0, + "2395": 28158672896.0, + "2400": 28158672896.0, + "2405": 28158672896.0, + "2410": 28158672896.0, + "2415": 28158672896.0, + "2420": 28158672896.0, + "2425": 28158672896.0, + "2430": 28158672896.0, + "2435": 28158672896.0, + "2440": 28158672896.0, + "2445": 28158672896.0, + "2450": 28158672896.0, + "2455": 28158672896.0, + "2460": 28158672896.0, + "2465": 28158672896.0, + "2470": 28158672896.0, + "2475": 28158672896.0, + "2480": 28158672896.0, + "2485": 28158672896.0, + "2490": 28158672896.0, + "2495": 28158672896.0, + "2500": 28158672896.0, + "2505": 28158672896.0, + "2510": 28158672896.0, + "2515": 28158672896.0, + "2520": 28158672896.0, + "2525": 28158672896.0, + "2530": 28158672896.0, + "2535": 28158672896.0, + "2540": 28158672896.0, + "2545": 28158672896.0, + "2550": 28158672896.0, + "2555": 28158672896.0, + "2560": 28158672896.0, + "2565": 28158672896.0, + "2570": 28158672896.0, + "2575": 28158672896.0, + "2580": 28158672896.0, + "2585": 28158672896.0, + "2590": 28158672896.0, + "2595": 28158672896.0, + "2600": 28158672896.0, + "2605": 28158672896.0, + "2610": 28158672896.0, + "2615": 28158672896.0, + "2620": 28158672896.0, + "2625": 28158672896.0, + "2630": 28158672896.0, + "2635": 28158672896.0, + "2640": 28158672896.0, + "2645": 28158672896.0, + "2650": 28158672896.0, + "2655": 28158672896.0, + "2660": 28158672896.0, + "2665": 28158672896.0, + "2670": 28158672896.0, + "2675": 28158672896.0, + "2680": 28158672896.0, + "2685": 28158672896.0, + "2690": 28158672896.0, + "2695": 28158672896.0, + "2700": 28158672896.0, + "2705": 28158672896.0, + "2710": 28158672896.0, + "2715": 28158672896.0, + "2720": 28158672896.0, + "2725": 28158672896.0, + "2730": 28158672896.0, + "2735": 28158672896.0, + "2740": 28158672896.0, + "2745": 28158672896.0, + "2750": 28158672896.0, + "2755": 28158672896.0, + "2760": 28158672896.0, + "2765": 28158672896.0, + "2770": 28158672896.0, + "2775": 28158672896.0, + "2780": 28158672896.0, + "2785": 28158672896.0, + "2790": 28158672896.0, + "2795": 28158672896.0, + "2800": 28158672896.0, + "2805": 28158672896.0, + "2810": 28158672896.0, + "2815": 28158672896.0, + "2820": 28158672896.0, + "2825": 28158672896.0, + "2830": 28158672896.0, + "2835": 28158672896.0, + "2840": 28158672896.0, + "2845": 28158672896.0, + "2850": 28158672896.0, + "2855": 28158672896.0, + "2860": 28158672896.0, + "2865": 28158672896.0, + "2870": 28158672896.0, + "2875": 28158672896.0, + "2880": 28158672896.0, + "2885": 28158672896.0, + "2890": 28158672896.0, + "2895": 28158672896.0, + "2900": 28158672896.0, + "2905": 28158672896.0, + "2910": 28158672896.0, + "2915": 28158672896.0, + "2920": 28158672896.0, + "2925": 28158672896.0, + "2930": 28158672896.0, + "2935": 28158672896.0, + "2940": 28158672896.0, + "2945": 28158672896.0, + "2950": 28158672896.0, + "2955": 28158672896.0, + "2960": 28158672896.0, + "2965": 28158672896.0, + "2970": 28158672896.0, + "2975": 28158672896.0, + "2980": 28158672896.0, + "2985": 28158672896.0, + "2990": 28158672896.0, + "2995": 28158672896.0, + "3000": 28158672896.0, + "3005": 28158672896.0, + "3010": 28158672896.0, + "3015": 28158672896.0, + "3020": 28158672896.0, + "3025": 28158672896.0, + "3030": 28158672896.0, + "3035": 28158672896.0, + "3040": 28158672896.0, + "3045": 28158672896.0, + "3050": 28158672896.0, + "3055": 28158672896.0, + "3060": 28158672896.0, + "3065": 28158672896.0, + "3070": 28158672896.0, + "3075": 28158672896.0, + "3080": 28158672896.0, + "3085": 28158672896.0, + "3090": 28158672896.0, + "3095": 28158672896.0, + "3100": 28158672896.0, + "3105": 28158672896.0, + "3110": 28158672896.0, + "3115": 28158672896.0, + "3120": 28158672896.0, + "3125": 28158672896.0, + "3130": 28158672896.0, + "3135": 28158672896.0, + "3140": 28158672896.0, + "3145": 28158672896.0, + "3150": 28158672896.0, + "3155": 28158672896.0, + "3160": 28158672896.0, + "3165": 28158672896.0, + "3170": 28158672896.0, + "3175": 28158672896.0, + "3180": 28158672896.0, + "3185": 28158672896.0, + "3190": 28158672896.0, + "3195": 28158672896.0, + "3200": 28158672896.0, + "3205": 28158672896.0, + "3210": 28158672896.0, + "3215": 28158672896.0, + "3220": 28158672896.0, + "3225": 28158672896.0, + "3230": 28158672896.0, + "3235": 28158672896.0, + "3240": 28158672896.0, + "3245": 28158672896.0, + "3250": 28158672896.0, + "3255": 28158672896.0, + "3260": 28158672896.0, + "3265": 28158672896.0, + "3270": 28158672896.0, + "3275": 28158672896.0, + "3280": 28158672896.0, + "3285": 28158672896.0, + "3290": 28158672896.0, + "3295": 28158672896.0, + "3300": 28158672896.0, + "3305": 28158672896.0, + "3310": 28158672896.0, + "3315": 28158672896.0, + "3320": 28158672896.0, + "3325": 28158672896.0, + "3330": 28158672896.0, + "3335": 28158672896.0, + "3340": 28158672896.0, + "3345": 28158672896.0, + "3350": 28158672896.0, + "3355": 28158672896.0, + "3360": 28158672896.0, + "3365": 28158672896.0, + "3370": 28158672896.0, + "3375": 28158672896.0, + "3380": 28158672896.0, + "3385": 28158672896.0, + "3390": 28158672896.0, + "3395": 28158672896.0, + "3400": 28158672896.0, + "3405": 28158672896.0, + "3410": 28158672896.0, + "3415": 28158672896.0, + "3420": 28158672896.0, + "3425": 28158672896.0, + "3430": 28158672896.0, + "3435": 28158672896.0, + "3440": 28158672896.0, + "3445": 28158672896.0, + "3450": 28158672896.0, + "3455": 28158672896.0, + "3460": 28158672896.0, + "3465": 28158672896.0, + "3470": 28158672896.0, + "3475": 28158672896.0, + "3480": 28158672896.0, + "3485": 28158672896.0, + "3490": 28158672896.0, + "3495": 28158672896.0, + "3500": 28158672896.0, + "3505": 28158672896.0, + "3510": 28158672896.0, + "3515": 28158672896.0, + "3520": 28158672896.0, + "3525": 28158672896.0, + "3530": 28158672896.0, + "3535": 28158672896.0, + "3540": 28158672896.0, + "3545": 28158672896.0, + "3550": 28158672896.0, + "3555": 28158672896.0, + "3560": 28158672896.0, + "3565": 28158672896.0, + "3570": 28158672896.0, + "3575": 28158672896.0, + "3580": 28158672896.0, + "3585": 28158672896.0, + "3590": 28158672896.0, + "3595": 28158672896.0, + "3600": 28158672896.0, + "3605": 28158672896.0, + "3610": 28158672896.0, + "3615": 28158672896.0, + "3620": 28158672896.0, + "3625": 28158672896.0, + "3630": 28158672896.0, + "3635": 28158672896.0, + "3640": 28158672896.0, + "3645": 28158672896.0, + "3650": 28158672896.0, + "3655": 28158672896.0, + "3660": 28158672896.0, + "3665": 28158672896.0, + "3670": 28158672896.0, + "3675": 28158672896.0, + "3680": 28158672896.0, + "3685": 28158672896.0, + "3690": 28158672896.0, + "3695": 28158672896.0, + "3700": 28158672896.0, + "3705": 28158672896.0, + "3710": 28158672896.0, + "3715": 28158672896.0, + "3720": 28158672896.0, + "3725": 28158672896.0, + "3730": 28158672896.0, + "3735": 28158672896.0, + "3740": 28158672896.0, + "3745": 28158672896.0, + "3750": 28158672896.0, + "3755": 28158672896.0, + "3760": 28158672896.0, + "3765": 28158672896.0, + "3770": 28158672896.0, + "3775": 28158672896.0, + "3780": 28158672896.0, + "3785": 28158672896.0, + "3790": 28158672896.0, + "3795": 28158672896.0, + "3800": 28158672896.0, + "3805": 28158672896.0, + "3810": 28158672896.0, + "3815": 28158672896.0, + "3820": 28158672896.0, + "3825": 28158672896.0, + "3830": 28158672896.0, + "3835": 28158672896.0, + "3840": 28158672896.0, + "3845": 28158672896.0, + "3850": 28158672896.0, + "3855": 28158672896.0, + "3860": 28158672896.0, + "3865": 28158672896.0, + "3870": 28158672896.0, + "3875": 28158672896.0, + "3880": 28158672896.0, + "3885": 28158672896.0, + "3890": 28158672896.0, + "3895": 28158672896.0, + "3900": 28158672896.0, + "3905": 28158672896.0, + "3910": 28158672896.0, + "3915": 28158672896.0, + "3920": 28158672896.0, + "3925": 28158672896.0, + "3930": 28158672896.0, + "3935": 28158672896.0, + "3940": 28158672896.0, + "3945": 28158672896.0, + "3950": 28158672896.0, + "3955": 28158672896.0, + "3960": 28158672896.0, + "3965": 28158672896.0, + "3970": 28158672896.0, + "3975": 28158672896.0, + "3980": 28158672896.0, + "3985": 28158672896.0, + "3990": 28158672896.0, + "3995": 28158672896.0, + "4000": 28158672896.0, + "4005": 28158672896.0, + "4010": 28158672896.0, + "4015": 28158672896.0, + "4020": 28158672896.0, + "4025": 28158672896.0, + "4030": 28158672896.0, + "4035": 28158672896.0, + "4040": 28158672896.0, + "4045": 28158672896.0, + "4050": 28158672896.0, + "4055": 28158672896.0, + "4060": 28158672896.0, + "4065": 28158672896.0, + "4070": 28158672896.0, + "4075": 28158672896.0, + "4080": 28158672896.0, + "4085": 28158672896.0, + "4090": 28158672896.0, + "4095": 28158672896.0, + "4100": 28158672896.0, + "4105": 28158672896.0, + "4110": 28158672896.0, + "4115": 28158672896.0, + "4120": 28158672896.0, + "4125": 28158672896.0, + "4130": 28158672896.0, + "4135": 28158672896.0, + "4140": 28158672896.0, + "4145": 28158672896.0, + "4150": 28158672896.0, + "4155": 28158672896.0, + "4160": 28158672896.0, + "4165": 28158672896.0, + "4170": 28158672896.0, + "4175": 28158672896.0, + "4180": 28158672896.0, + "4185": 28158672896.0, + "4190": 28158672896.0, + "4195": 28158672896.0, + "4200": 28158672896.0, + "4205": 28158672896.0, + "4210": 28158672896.0, + "4215": 28158672896.0, + "4220": 28158672896.0, + "4225": 28158672896.0, + "4230": 28158672896.0, + "4235": 28158672896.0, + "4240": 28158672896.0, + "4245": 28158672896.0, + "4250": 28158672896.0, + "4255": 28158672896.0, + "4260": 28158672896.0, + "4265": 28158672896.0, + "4270": 28158672896.0, + "4275": 28158672896.0, + "4280": 28158672896.0, + "4285": 28158672896.0, + "4290": 28158672896.0, + "4295": 28158672896.0, + "4300": 28158672896.0, + "4305": 28158672896.0, + "4310": 28158672896.0, + "4315": 28158672896.0, + "4320": 28158672896.0, + "4325": 28158672896.0, + "4330": 28158672896.0, + "4335": 28158672896.0, + "4340": 28158672896.0, + "4345": 28158672896.0, + "4350": 28158672896.0, + "4355": 28158672896.0, + "4360": 28158672896.0, + "4365": 28158672896.0, + "4370": 28158672896.0, + "4375": 28158672896.0, + "4380": 28158672896.0, + "4385": 28158672896.0, + "4390": 28158672896.0, + "4395": 28158672896.0, + "4400": 28158672896.0, + "4405": 28158672896.0, + "4410": 28158672896.0, + "4415": 28158672896.0, + "4420": 28158672896.0, + "4425": 28158672896.0, + "4430": 28158672896.0, + "4435": 28158672896.0, + "4440": 28158672896.0, + "4445": 28158672896.0, + "4450": 28158672896.0, + "4455": 28158672896.0, + "4460": 28158672896.0, + "4465": 28158672896.0, + "4470": 28158672896.0, + "4475": 28158672896.0, + "4480": 28158672896.0, + "4485": 28158672896.0, + "4490": 28158672896.0, + "4495": 28158672896.0, + "4500": 28158672896.0, + "4505": 28158672896.0, + "4510": 28158672896.0, + "4515": 28158672896.0, + "4520": 28158672896.0, + "4525": 28158672896.0, + "4530": 28158672896.0, + "4535": 28158672896.0, + "4540": 28158672896.0, + "4545": 28158672896.0, + "4550": 28158672896.0, + "4555": 28158672896.0, + "4560": 28158672896.0, + "4565": 28158672896.0, + "4570": 28158672896.0, + "4575": 28158672896.0, + "4580": 28158672896.0, + "4585": 28158672896.0, + "4590": 28158672896.0, + "4595": 28158672896.0, + "4600": 28158672896.0, + "4605": 28158672896.0, + "4610": 28158672896.0, + "4615": 28158672896.0, + "4620": 28158672896.0, + "4625": 28158672896.0, + "4630": 28158672896.0, + "4635": 28158672896.0, + "4640": 28158672896.0, + "4645": 28158672896.0, + "4650": 28158672896.0, + "4655": 28158672896.0, + "4660": 28158672896.0, + "4665": 28158672896.0, + "4670": 28158672896.0, + "4675": 28158672896.0, + "4680": 28158672896.0, + "4685": 28158672896.0, + "4690": 28158672896.0, + "4695": 28158672896.0, + "4700": 28158672896.0, + "4705": 28158672896.0, + "4710": 28158672896.0, + "4715": 28158672896.0, + "4720": 28158672896.0, + "4725": 28158672896.0, + "4730": 28158672896.0, + "4735": 28158672896.0, + "4740": 28158672896.0, + "4745": 28158672896.0, + "4750": 28158672896.0, + "4755": 28158672896.0, + "4760": 28158672896.0, + "4765": 28158672896.0, + "4770": 28158672896.0, + "4775": 28158672896.0, + "4780": 28158672896.0, + "4785": 28158672896.0, + "4790": 28158672896.0, + "4795": 28158672896.0, + "4800": 28158672896.0, + "4805": 28158672896.0, + "4810": 28158672896.0, + "4815": 28158672896.0, + "4820": 28158672896.0, + "4825": 28158672896.0, + "4830": 28158672896.0, + "4835": 28158672896.0, + "4840": 28158672896.0, + "4845": 28158672896.0, + "4850": 28158672896.0, + "4855": 28158672896.0, + "4860": 28158672896.0, + "4865": 28158672896.0, + "4870": 28158672896.0, + "4875": 28158672896.0, + "4880": 28158672896.0, + "4885": 28158672896.0, + "4890": 28158672896.0, + "4895": 28158672896.0, + "4900": 28158672896.0, + "4905": 28158672896.0, + "4910": 28158672896.0, + "4915": 28158672896.0, + "4920": 28158672896.0, + "4925": 28158672896.0, + "4930": 28158672896.0, + "4935": 28158672896.0, + "4940": 28158672896.0, + "4945": 28158672896.0, + "4950": 28158672896.0, + "4955": 28158672896.0, + "4960": 28158672896.0, + "4965": 28158672896.0, + "4970": 28158672896.0, + "4975": 28158672896.0, + "4980": 28158672896.0, + "4985": 28158672896.0, + "4990": 28158672896.0, + "4995": 28158672896.0, + "5000": 28158672896.0, + "5005": 28158672896.0, + "5010": 28158672896.0, + "5015": 28158672896.0, + "5020": 28158672896.0, + "5025": 28158672896.0, + "5030": 28158672896.0, + "5035": 28158672896.0, + "5040": 28158672896.0, + "5045": 28158672896.0, + "5050": 28158672896.0, + "5055": 28158672896.0, + "5060": 28158672896.0, + "5065": 28158672896.0, + "5070": 28158672896.0, + "5075": 28158672896.0, + "5080": 28158672896.0, + "5085": 28158672896.0, + "5090": 28158672896.0, + "5095": 28158672896.0, + "5100": 28158672896.0, + "5105": 28158672896.0, + "5110": 28158672896.0, + "5115": 28158672896.0, + "5120": 28158672896.0, + "5125": 28158672896.0, + "5130": 28158672896.0, + "5135": 28158672896.0, + "5140": 28158672896.0, + "5145": 28158672896.0, + "5150": 28158672896.0, + "5155": 28158672896.0, + "5160": 28158672896.0, + "5165": 28158672896.0, + "5170": 28158672896.0, + "5175": 28158672896.0, + "5180": 28158672896.0, + "5185": 28158672896.0, + "5190": 28158672896.0, + "5195": 28158672896.0, + "5200": 28158672896.0, + "5205": 28158672896.0, + "5210": 28158672896.0, + "5215": 28158672896.0, + "5220": 28158672896.0, + "5225": 28158672896.0, + "5230": 28158672896.0, + "5235": 28158672896.0, + "5240": 28158672896.0, + "5245": 28158672896.0, + "5250": 28158672896.0, + "5255": 28158672896.0, + "5260": 28158672896.0, + "5265": 28158672896.0, + "5270": 28158672896.0, + "5275": 28158672896.0, + "5280": 28158672896.0, + "5285": 28158672896.0, + "5290": 28158672896.0, + "5295": 28158672896.0, + "5300": 28158672896.0, + "5305": 28158672896.0, + "5310": 28158672896.0, + "5315": 28158672896.0, + "5320": 28158672896.0, + "5325": 28158672896.0, + "5330": 28158672896.0, + "5335": 28158672896.0, + "5340": 28158672896.0, + "5345": 28158672896.0, + "5350": 28158672896.0, + "5355": 28158672896.0, + "5360": 28158672896.0, + "5365": 28158672896.0, + "5370": 28158672896.0, + "5375": 28158672896.0, + "5380": 28158672896.0, + "5385": 28158672896.0, + "5390": 28158672896.0, + "5395": 28158672896.0, + "5400": 28158672896.0, + "5405": 28158672896.0, + "5410": 28158672896.0, + "5415": 28158672896.0, + "5420": 28158672896.0, + "5425": 28158672896.0, + "5430": 28158672896.0, + "5435": 28158672896.0, + "5440": 28158672896.0, + "5445": 28158672896.0, + "5450": 28158672896.0, + "5455": 28158672896.0, + "5460": 28158672896.0, + "5465": 28158672896.0, + "5470": 28158672896.0, + "5475": 28158672896.0, + "5480": 28158672896.0, + "5485": 28158672896.0, + "5490": 28158672896.0, + "5495": 28158672896.0, + "5500": 28158672896.0, + "5505": 28158672896.0, + "5510": 28158672896.0, + "5515": 28158672896.0, + "5520": 28158672896.0, + "5525": 28158672896.0, + "5530": 28158672896.0, + "5535": 28158672896.0, + "5540": 28158672896.0, + "5545": 28158672896.0, + "5550": 28158672896.0, + "5555": 28158672896.0, + "5560": 28158672896.0, + "5565": 28158672896.0, + "5570": 28158672896.0, + "5575": 28158672896.0, + "5580": 28158672896.0, + "5585": 28158672896.0, + "5590": 28158672896.0, + "5595": 28158672896.0, + "5600": 28158672896.0, + "5605": 28158672896.0, + "5610": 28158672896.0, + "5615": 28158672896.0, + "5620": 28158672896.0, + "5625": 28158672896.0, + "5630": 28158672896.0, + "5635": 28158672896.0, + "5640": 28158672896.0, + "5645": 28158672896.0, + "5650": 28158672896.0, + "5655": 28158672896.0, + "5660": 28158672896.0, + "5665": 28158672896.0, + "5670": 28158672896.0, + "5675": 28158672896.0, + "5680": 28158672896.0, + "5685": 28158672896.0, + "5690": 28158672896.0, + "5695": 28158672896.0, + "5700": 28158672896.0, + "5705": 28158672896.0, + "5710": 28158672896.0, + "5715": 28158672896.0, + "5720": 28158672896.0, + "5725": 28158672896.0, + "5730": 28158672896.0, + "5735": 28158672896.0, + "5740": 28158672896.0, + "5745": 28158672896.0, + "5750": 28158672896.0, + "5755": 28158672896.0, + "5760": 28158672896.0, + "5765": 28158672896.0, + "5770": 28158672896.0, + "5775": 28158672896.0, + "5780": 28158672896.0, + "5785": 28158672896.0, + "5790": 28158672896.0, + "5795": 28158672896.0, + "5800": 28158672896.0, + "5805": 28158672896.0, + "5810": 28158672896.0, + "5815": 28158672896.0, + "5820": 28158672896.0, + "5825": 28158672896.0, + "5830": 28158672896.0, + "5835": 28158672896.0, + "5840": 28158672896.0, + "5845": 28158672896.0, + "5850": 28158672896.0, + "5855": 28158672896.0, + "5860": 28158672896.0, + "5865": 28158672896.0, + "5870": 28158672896.0, + "5875": 28158672896.0, + "5880": 28158672896.0, + "5885": 28158672896.0, + "5890": 28158672896.0, + "5895": 28158672896.0, + "5900": 28158672896.0, + "5905": 28158672896.0, + "5910": 28158672896.0, + "5915": 28158672896.0, + "5920": 28158672896.0, + "5925": 28158672896.0, + "5930": 28158672896.0, + "5935": 28158672896.0, + "5940": 28158672896.0, + "5945": 28158672896.0, + "5950": 28158672896.0, + "5955": 28158672896.0, + "5960": 28158672896.0, + "5965": 28158672896.0, + "5970": 28158672896.0, + "5975": 28158672896.0, + "5980": 28158672896.0, + "5985": 28158672896.0, + "5990": 28158672896.0, + "5995": 28158672896.0, + "6000": 28158672896.0, + "6005": 28158672896.0, + "6010": 28158672896.0, + "6015": 28158672896.0, + "6020": 28158672896.0, + "6025": 28158672896.0, + "6030": 28158672896.0, + "6035": 28158672896.0, + "6040": 28158672896.0, + "6045": 28158672896.0, + "6050": 28158672896.0, + "6055": 28158672896.0, + "6060": 28158672896.0, + "6065": 28158672896.0, + "6070": 28158672896.0, + "6075": 28158672896.0, + "6080": 28158672896.0, + "6085": 28158672896.0, + "6090": 28158672896.0, + "6095": 28158672896.0, + "6100": 28158672896.0, + "6105": 28158672896.0, + "6110": 28158672896.0, + "6115": 28158672896.0, + "6120": 28158672896.0, + "6125": 28158672896.0, + "6130": 28158672896.0, + "6135": 28158672896.0, + "6140": 28158672896.0, + "6145": 28158672896.0, + "6150": 28158672896.0, + "6155": 28158672896.0, + "6160": 28158672896.0, + "6165": 28158672896.0, + "6170": 28158672896.0, + "6175": 28158672896.0, + "6180": 28158672896.0, + "6185": 28158672896.0, + "6190": 28158672896.0, + "6195": 28158672896.0, + "6200": 28158672896.0, + "6205": 28158672896.0, + "6210": 28158672896.0, + "6215": 28158672896.0, + "6220": 28158672896.0, + "6225": 28158672896.0, + "6230": 28158672896.0, + "6235": 28158672896.0, + "6240": 28158672896.0, + "6245": 28158672896.0, + "6250": 28158672896.0, + "6255": 28158672896.0, + "6260": 28158672896.0, + "6265": 28158672896.0, + "6270": 28158672896.0, + "6275": 28158672896.0, + "6280": 28158672896.0, + "6285": 28158672896.0, + "6290": 28158672896.0, + "6295": 28158672896.0, + "6300": 28158672896.0, + "6305": 28158672896.0, + "6310": 28158672896.0, + "6315": 28158672896.0, + "6320": 28158672896.0, + "6325": 28158672896.0, + "6330": 28158672896.0, + "6335": 28158672896.0, + "6340": 28158672896.0, + "6345": 28158672896.0, + "6350": 28158672896.0, + "6355": 28158672896.0, + "6360": 28158672896.0, + "6365": 28158672896.0, + "6370": 28158672896.0, + "6375": 28158672896.0, + "6380": 28158672896.0, + "6385": 28158672896.0, + "6390": 28158672896.0, + "6395": 28158672896.0, + "6400": 28158672896.0, + "6405": 28158672896.0, + "6410": 28158672896.0, + "6415": 28158672896.0, + "6420": 28158672896.0, + "6425": 28158672896.0, + "6430": 28158672896.0, + "6435": 28158672896.0, + "6440": 28158672896.0, + "6445": 28158672896.0, + "6450": 28158672896.0, + "6455": 28158672896.0, + "6460": 28158672896.0, + "6465": 28158672896.0, + "6470": 28158672896.0, + "6475": 28158672896.0, + "6480": 28158672896.0, + "6485": 28158672896.0, + "6490": 28158672896.0, + "6495": 28158672896.0, + "6500": 28158672896.0, + "6505": 28158672896.0, + "6510": 28158672896.0, + "6515": 28158672896.0, + "6520": 28158672896.0, + "6525": 28158672896.0, + "6530": 28158672896.0, + "6535": 28158672896.0, + "6540": 28158672896.0, + "6545": 28158672896.0, + "6550": 28158672896.0, + "6555": 28158672896.0, + "6560": 28158672896.0, + "6565": 28158672896.0, + "6570": 28158672896.0, + "6575": 28158672896.0, + "6580": 28158672896.0, + "6585": 28158672896.0, + "6590": 28158672896.0, + "6595": 28158672896.0, + "6600": 28158672896.0, + "6605": 28158672896.0, + "6610": 28158672896.0, + "6615": 28158672896.0, + "6620": 28158672896.0, + "6625": 28158672896.0, + "6630": 28158672896.0, + "6635": 28158672896.0, + "6640": 28158672896.0, + "6645": 28158672896.0, + "6650": 28158672896.0, + "6655": 28158672896.0, + "6660": 28158672896.0, + "6665": 28158672896.0, + "6670": 28158672896.0, + "6675": 28158672896.0, + "6680": 28158672896.0, + "6685": 28158672896.0, + "6690": 28158672896.0, + "6695": 28158672896.0, + "6700": 28158672896.0, + "6705": 28158672896.0, + "6710": 28158672896.0, + "6715": 28158672896.0, + "6720": 28158672896.0, + "6725": 28158672896.0, + "6730": 28158672896.0, + "6735": 28158672896.0, + "6740": 28158672896.0, + "6745": 28158672896.0, + "6750": 28158672896.0, + "6755": 28158672896.0, + "6760": 28158672896.0, + "6765": 28158672896.0, + "6770": 28158672896.0, + "6775": 28158672896.0, + "6780": 28158672896.0, + "6785": 28158672896.0, + "6790": 28158672896.0, + "6795": 28158672896.0, + "6800": 28158672896.0, + "6805": 28158672896.0, + "6810": 28158672896.0, + "6815": 28158672896.0, + "6820": 28158672896.0, + "6825": 28158672896.0, + "6830": 28158672896.0, + "6835": 28158672896.0, + "6840": 28158672896.0, + "6845": 28158672896.0, + "6850": 28158672896.0, + "6855": 28158672896.0, + "6860": 28158672896.0, + "6865": 28158672896.0, + "6870": 28158672896.0, + "6875": 28158672896.0, + "6880": 28158672896.0, + "6885": 28158672896.0, + "6890": 28158672896.0, + "6895": 28158672896.0, + "6900": 28158672896.0, + "6905": 28158672896.0, + "6910": 28158672896.0, + "6915": 28158672896.0, + "6920": 28158672896.0, + "6925": 28158672896.0, + "6930": 28158672896.0, + "6935": 28158672896.0, + "6940": 28158672896.0, + "6945": 28158672896.0, + "6950": 28158672896.0, + "6955": 28158672896.0, + "6960": 28158672896.0, + "6965": 28158672896.0, + "6970": 28158672896.0, + "6975": 28158672896.0, + "6980": 28158672896.0, + "6985": 28158672896.0, + "6990": 28158672896.0, + "6995": 28158672896.0, + "7000": 28158672896.0, + "7005": 28158672896.0, + "7010": 28158672896.0, + "7015": 28158672896.0, + "7020": 28158672896.0, + "7025": 28158672896.0, + "7030": 28158672896.0, + "7035": 28158672896.0, + "7040": 28158672896.0, + "7045": 28158672896.0, + "7050": 28158672896.0, + "7055": 28158672896.0, + "7060": 28158672896.0, + "7065": 28158672896.0, + "7070": 28158672896.0, + "7075": 28158672896.0, + "7080": 28158672896.0, + "7085": 28158672896.0, + "7090": 28158672896.0, + "7095": 28158672896.0, + "7100": 28158672896.0, + "7105": 28158672896.0, + "7110": 28158672896.0, + "7115": 28158672896.0, + "7120": 28158672896.0, + "7125": 28158672896.0, + "7130": 28158672896.0, + "7135": 28158672896.0, + "7140": 28158672896.0, + "7145": 28158672896.0, + "7150": 28158672896.0, + "7155": 28158672896.0, + "7160": 28158672896.0, + "7165": 28158672896.0, + "7170": 28158672896.0, + "7175": 28158672896.0, + "7180": 28158672896.0, + "7185": 28158672896.0, + "7190": 28158672896.0, + "7195": 28158672896.0, + "7200": 28158672896.0, + "7205": 28158672896.0, + "7210": 28158672896.0, + "7215": 28158672896.0, + "7220": 28158672896.0, + "7225": 28158672896.0, + "7230": 28158672896.0, + "7235": 28158672896.0, + "7240": 28158672896.0, + "7245": 28158672896.0, + "7250": 28158672896.0, + "7255": 28158672896.0, + "7260": 28158672896.0, + "7265": 28158672896.0, + "7270": 28158672896.0, + "7275": 28158672896.0, + "7280": 28158672896.0, + "7285": 28158672896.0, + "7290": 28158672896.0, + "7295": 28158672896.0, + "7300": 28158672896.0, + "7305": 28158672896.0, + "7310": 28158672896.0, + "7315": 28158672896.0, + "7320": 28158672896.0, + "7325": 28158672896.0, + "7330": 28158672896.0, + "7335": 28158672896.0, + "7340": 28158672896.0, + "7345": 28158672896.0, + "7350": 28158672896.0, + "7355": 28158672896.0, + "7360": 28158672896.0, + "7365": 28158672896.0, + "7370": 28158672896.0, + "7375": 28158672896.0, + "7380": 28158672896.0, + "7385": 28158672896.0, + "7390": 28158672896.0, + "7395": 28158672896.0, + "7400": 28158672896.0, + "7405": 28158672896.0, + "7410": 28158672896.0, + "7415": 28158672896.0, + "7420": 28158672896.0, + "7425": 28158672896.0, + "7430": 28158672896.0, + "7435": 28158672896.0, + "7440": 28158672896.0, + "7445": 28158672896.0, + "7450": 28158672896.0, + "7455": 28158672896.0, + "7460": 28158672896.0, + "7465": 28158672896.0, + "7470": 28158672896.0, + "7475": 28158672896.0, + "7480": 28158672896.0, + "7485": 28158672896.0, + "7490": 28158672896.0, + "7495": 28158672896.0, + "7500": 28158672896.0, + "7505": 28158672896.0, + "7510": 28158672896.0, + "7515": 28158672896.0, + "7520": 28158672896.0, + "7525": 28158672896.0, + "7530": 28158672896.0, + "7535": 28158672896.0, + "7540": 28158672896.0, + "7545": 28158672896.0, + "7550": 28158672896.0, + "7555": 28158672896.0, + "7560": 28158672896.0, + "7565": 28158672896.0, + "7570": 28158672896.0, + "7575": 28158672896.0, + "7580": 28158672896.0, + "7585": 28158672896.0, + "7590": 28158672896.0, + "7595": 28158672896.0, + "7600": 28158672896.0, + "7605": 28158672896.0, + "7610": 28158672896.0, + "7615": 28158672896.0, + "7620": 28158672896.0, + "7625": 28158672896.0, + "7630": 28158672896.0, + "7635": 28158672896.0, + "7640": 28158672896.0, + "7645": 28158672896.0, + "7650": 28158672896.0, + "7655": 28158672896.0, + "7660": 28158672896.0, + "7665": 28158672896.0, + "7670": 28158672896.0, + "7675": 28158672896.0, + "7680": 28158672896.0, + "7685": 28158672896.0, + "7690": 28158672896.0, + "7695": 28158672896.0, + "7700": 28158672896.0, + "7705": 28158672896.0, + "7710": 28158672896.0, + "7715": 28158672896.0, + "7720": 28158672896.0, + "7725": 28158672896.0, + "7730": 28158672896.0, + "7735": 28158672896.0, + "7740": 28158672896.0, + "7745": 28158672896.0, + "7750": 28158672896.0, + "7755": 28158672896.0, + "7760": 28158672896.0, + "7765": 28158672896.0, + "7770": 28158672896.0, + "7775": 28158672896.0, + "7780": 28158672896.0, + "7785": 28158672896.0, + "7790": 28158672896.0, + "7795": 28158672896.0, + "7800": 28158672896.0, + "7805": 28158672896.0, + "7810": 28158672896.0, + "7815": 28158672896.0, + "7820": 28158672896.0, + "7825": 28158672896.0, + "7830": 28158672896.0, + "7835": 28158672896.0, + "7840": 28158672896.0, + "7845": 28158672896.0, + "7850": 28158672896.0, + "7855": 28158672896.0, + "7860": 28158672896.0, + "7865": 28158672896.0, + "7870": 28158672896.0, + "7875": 28158672896.0, + "7880": 28158672896.0, + "7885": 28158672896.0, + "7890": 28158672896.0, + "7895": 28158672896.0, + "7900": 28158672896.0, + "7905": 28158672896.0, + "7910": 28158672896.0, + "7915": 28158672896.0, + "7920": 28158672896.0, + "7925": 28158672896.0, + "7930": 28158672896.0, + "7935": 28158672896.0, + "7940": 28158672896.0, + "7945": 28158672896.0, + "7950": 28158672896.0, + "7955": 28158672896.0, + "7960": 28158672896.0, + "7965": 28158672896.0, + "7970": 28158672896.0, + "7975": 28158672896.0, + "7980": 28158672896.0, + "7985": 28158672896.0, + "7990": 28158672896.0, + "7995": 28158672896.0, + "8000": 28158672896.0, + "8005": 28158672896.0, + "8010": 28158672896.0, + "8015": 28158672896.0, + "8020": 28158672896.0, + "8025": 28158672896.0, + "8030": 28158672896.0, + "8035": 28158672896.0, + "8040": 28158672896.0, + "8045": 28158672896.0, + "8050": 28158672896.0, + "8055": 28158672896.0, + "8060": 28158672896.0, + "8065": 28158672896.0, + "8070": 28158672896.0, + "8075": 28158672896.0, + "8080": 28158672896.0, + "8085": 28158672896.0, + "8090": 28158672896.0, + "8095": 28158672896.0, + "8100": 28158672896.0, + "8105": 28158672896.0, + "8110": 28158672896.0, + "8115": 28158672896.0, + "8120": 28158672896.0, + "8125": 28158672896.0, + "8130": 28158672896.0, + "8135": 28158672896.0, + "8140": 28158672896.0, + "8145": 28158672896.0, + "8150": 28158672896.0, + "8155": 28158672896.0, + "8160": 28158672896.0, + "8165": 28158672896.0, + "8170": 28158672896.0, + "8175": 28158672896.0, + "8180": 28158672896.0, + "8185": 28158672896.0, + "8190": 28158672896.0, + "8195": 28158672896.0, + "8200": 28158672896.0, + "8205": 28158672896.0, + "8210": 28158672896.0, + "8215": 28158672896.0, + "8220": 28158672896.0, + "8225": 28158672896.0, + "8230": 28158672896.0, + "8235": 28158672896.0, + "8240": 28158672896.0, + "8245": 28158672896.0, + "8250": 28158672896.0, + "8255": 28158672896.0, + "8260": 28158672896.0, + "8265": 28158672896.0, + "8270": 28158672896.0, + "8275": 28158672896.0, + "8280": 28158672896.0, + "8285": 28158672896.0, + "8290": 28158672896.0, + "8295": 28158672896.0, + "8300": 28158672896.0, + "8305": 28158672896.0, + "8310": 28158672896.0, + "8315": 28158672896.0, + "8320": 28158672896.0, + "8325": 28158672896.0, + "8330": 28158672896.0, + "8335": 28158672896.0, + "8340": 28158672896.0, + "8345": 28158672896.0, + "8350": 28158672896.0, + "8355": 28158672896.0, + "8360": 28158672896.0, + "8365": 28158672896.0, + "8370": 28158672896.0, + "8375": 28158672896.0, + "8380": 28158672896.0, + "8385": 28158672896.0, + "8390": 28158672896.0, + "8395": 28158672896.0, + "8400": 28158672896.0, + "8405": 28158672896.0, + "8410": 28158672896.0, + "8415": 28158672896.0, + "8420": 28158672896.0, + "8425": 28158672896.0, + "8430": 28158672896.0, + "8435": 28158672896.0, + "8440": 28158672896.0, + "8445": 28158672896.0, + "8450": 28158672896.0, + "8455": 28158672896.0, + "8460": 28158672896.0, + "8465": 28158672896.0, + "8470": 28158672896.0, + "8475": 28158672896.0, + "8480": 28158672896.0, + "8485": 28158672896.0, + "8490": 28158672896.0, + "8495": 28158672896.0, + "8500": 28158672896.0, + "8505": 28158672896.0, + "8510": 28158672896.0, + "8515": 28158672896.0, + "8520": 28158672896.0, + "8525": 28158672896.0, + "8530": 28158672896.0, + "8535": 28158672896.0, + "8540": 28158672896.0, + "8545": 28158672896.0, + "8550": 28158672896.0, + "8555": 28158672896.0, + "8560": 28158672896.0, + "8565": 28158672896.0, + "8570": 28158672896.0, + "8575": 28158672896.0, + "8580": 28158672896.0, + "8585": 28158672896.0, + "8590": 28158672896.0, + "8595": 28158672896.0, + "8600": 28158672896.0, + "8605": 28158672896.0, + "8610": 28158672896.0, + "8615": 28158672896.0, + "8620": 28158672896.0, + "8625": 28158672896.0, + "8630": 28158672896.0, + "8635": 28158672896.0, + "8640": 28158672896.0, + "8645": 28158672896.0, + "8650": 28158672896.0, + "8655": 28158672896.0, + "8660": 28158672896.0, + "8665": 28158672896.0, + "8670": 28158672896.0, + "8675": 28158672896.0, + "8680": 28158672896.0, + "8685": 28158672896.0, + "8690": 28158672896.0, + "8695": 28158672896.0, + "8700": 28158672896.0, + "8705": 28158672896.0, + "8710": 28158672896.0, + "8715": 28158672896.0, + "8720": 28158672896.0, + "8725": 28158672896.0, + "8730": 28158672896.0, + "8735": 28158672896.0, + "8740": 28158672896.0, + "8745": 28158672896.0, + "8750": 28158672896.0, + "8755": 28158672896.0, + "8760": 28158672896.0, + "8765": 28158672896.0, + "8770": 28158672896.0, + "8775": 28158672896.0, + "8780": 28158672896.0, + "8785": 28158672896.0, + "8790": 28158672896.0, + "8795": 28158672896.0, + "8800": 28158672896.0, + "8805": 28158672896.0, + "8810": 28156907520.0, + "8815": 28156907520.0, + "8820": 28156907520.0, + "8825": 28156907520.0, + "8830": 28156907520.0, + "8835": 28156907520.0, + "8840": 28156907520.0, + "8845": 28156907520.0, + "8850": 28156907520.0, + "8855": 28156907520.0, + "8860": 28156907520.0, + "8865": 28156907520.0, + "8870": 28156907520.0, + "8875": 28156907520.0, + "8880": 28156907520.0, + "8885": 28156907520.0, + "8890": 28156907520.0, + "8895": 28156907520.0, + "8900": 28156907520.0, + "8905": 28156907520.0, + "8910": 28156907520.0, + "8915": 28156907520.0, + "8920": 28156907520.0, + "8925": 28156907520.0, + "8930": 28156907520.0, + "8935": 28156907520.0, + "8940": 28156907520.0, + "8945": 28156907520.0, + "8950": 28156907520.0, + "8955": 28156907520.0, + "8960": 28156907520.0, + "8965": 28156907520.0, + "8970": 28156907520.0, + "8975": 28156907520.0, + "8980": 28156907520.0, + "8985": 28156907520.0, + "8990": 28156907520.0, + "8995": 28156907520.0, + "9000": 28156907520.0, + "9005": 28156907520.0, + "9010": 28156907520.0, + "9015": 28156907520.0, + "9020": 28156907520.0, + "9025": 28156907520.0, + "9030": 28156907520.0, + "9035": 28156907520.0, + "9040": 28156907520.0, + "9045": 28156907520.0, + "9050": 28156907520.0, + "9055": 28156907520.0, + "9060": 28156907520.0, + "9065": 28156907520.0, + "9070": 28156907520.0, + "9075": 28156907520.0, + "9080": 28156907520.0, + "9085": 28156907520.0, + "9090": 28156907520.0, + "9095": 28156907520.0, + "9100": 28156907520.0, + "9105": 28156907520.0, + "9110": 28156907520.0, + "9115": 28156907520.0, + "9120": 28156907520.0, + "9125": 28156907520.0, + "9130": 28156907520.0, + "9135": 28156907520.0, + "9140": 28156907520.0, + "9145": 28156907520.0, + "9150": 28156907520.0, + "9155": 28156907520.0, + "9160": 28156907520.0, + "9165": 28156907520.0, + "9170": 28156907520.0, + "9175": 28156907520.0, + "9180": 28156907520.0, + "9185": 28156907520.0, + "9190": 28156907520.0, + "9195": 28156907520.0, + "9200": 28156907520.0, + "9205": 28156907520.0, + "9210": 28156907520.0, + "9215": 28156907520.0, + "9220": 28156907520.0, + "9225": 28156907520.0, + "9230": 28156907520.0, + "9235": 28156907520.0, + "9240": 28156907520.0, + "9245": 28156907520.0, + "9250": 28156907520.0, + "9255": 28156907520.0, + "9260": 28156907520.0, + "9265": 28156907520.0, + "9270": 28156907520.0, + "9275": 28156907520.0, + "9280": 28156907520.0, + "9285": 28156907520.0, + "9290": 28156907520.0, + "9295": 28156907520.0, + "9300": 28156907520.0, + "9305": 28156907520.0, + "9310": 28156907520.0, + "9315": 28156907520.0, + "9320": 28156907520.0, + "9325": 28156907520.0, + "9330": 28156907520.0, + "9335": 28156907520.0, + "9340": 28156907520.0, + "9345": 28156907520.0, + "9350": 28156907520.0, + "9355": 28156907520.0, + "9360": 28156907520.0, + "9365": 28156907520.0, + "9370": 28156907520.0, + "9375": 28156907520.0, + "9380": 28156907520.0, + "9385": 28156907520.0, + "9390": 28156907520.0, + "9395": 28156907520.0, + "9400": 28156907520.0, + "9405": 28156907520.0, + "9410": 28156907520.0, + "9415": 28156907520.0, + "9420": 28156907520.0, + "9425": 28156907520.0, + "9430": 28156907520.0, + "9435": 28156907520.0, + "9440": 28156907520.0, + "9445": 28156907520.0, + "9450": 28156907520.0, + "9455": 28156907520.0, + "9460": 28156907520.0, + "9465": 28156907520.0, + "9470": 28156907520.0, + "9475": 28156907520.0, + "9480": 28156907520.0, + "9485": 28156907520.0, + "9490": 28156907520.0, + "9495": 28156907520.0, + "9500": 28156907520.0, + "9505": 28156907520.0, + "9510": 28156907520.0, + "9515": 28156907520.0, + "9520": 28156907520.0, + "9525": 28156907520.0, + "9530": 28156907520.0, + "9535": 28156907520.0, + "9540": 28156907520.0, + "9545": 28156907520.0, + "9550": 28156907520.0, + "9555": 28156907520.0, + "9560": 28156907520.0, + "9565": 28156907520.0, + "9570": 28156907520.0, + "9575": 28156907520.0, + "9580": 28156907520.0, + "9585": 28156907520.0, + "9590": 28156907520.0, + "9595": 28156907520.0, + "9600": 28156907520.0, + "9605": 28156907520.0, + "9610": 28156907520.0, + "9615": 28156907520.0, + "9620": 28156907520.0, + "9625": 28156907520.0, + "9630": 28156907520.0, + "9635": 28156907520.0, + "9640": 28156907520.0, + "9645": 28156907520.0, + "9650": 28156907520.0, + "9655": 28156907520.0, + "9660": 28156907520.0, + "9665": 28156907520.0, + "9670": 28156907520.0, + "9675": 28156907520.0, + "9680": 28156907520.0, + "9685": 28156907520.0, + "9690": 28156907520.0, + "9695": 28156907520.0, + "9700": 28156907520.0, + "9705": 28156907520.0, + "9710": 28156907520.0, + "9715": 28156907520.0, + "9720": 28156907520.0, + "9725": 28156907520.0, + "9730": 28156907520.0, + "9735": 28156907520.0, + "9740": 28156907520.0, + "9745": 28156907520.0, + "9750": 28156907520.0, + "9755": 28156907520.0, + "9760": 28156907520.0, + "9765": 28156907520.0, + "9770": 28156907520.0, + "9775": 28156907520.0, + "9780": 28156907520.0, + "9785": 28156907520.0, + "9790": 28156907520.0, + "9795": 28156907520.0, + "9800": 28156907520.0, + "9805": 28156907520.0, + "9810": 28156907520.0, + "9815": 28156907520.0, + "9820": 28156907520.0, + "9825": 28156907520.0, + "9830": 28156907520.0, + "9835": 28156907520.0, + "9840": 28156907520.0, + "9845": 28156907520.0, + "9850": 28156907520.0, + "9855": 28156907520.0, + "9860": 28156907520.0, + "9865": 28156907520.0, + "9870": 28156907520.0, + "9875": 28156907520.0, + "9880": 28156907520.0, + "9885": 28156907520.0, + "9890": 28156907520.0, + "9895": 28156907520.0, + "9900": 28156907520.0, + "9905": 28156907520.0, + "9910": 28156907520.0, + "9915": 28156907520.0, + "9920": 28156907520.0, + "9925": 28156907520.0, + "9930": 28156907520.0, + "9935": 28156907520.0, + "9940": 28156907520.0, + "9945": 28156907520.0, + "9950": 28156907520.0, + "9955": 28156907520.0, + "9960": 28156907520.0, + "9965": 28156907520.0, + "9970": 28156907520.0, + "9975": 28156907520.0, + "9980": 28156907520.0, + "9985": 28156907520.0, + "9990": 28156907520.0, + "9995": 28156907520.0, + "10000": 28156907520.0, + "10005": 28156907520.0, + "10010": 28156907520.0, + "10015": 28156907520.0, + "10020": 28156907520.0, + "10025": 28156907520.0, + "10030": 28156907520.0, + "10035": 28156907520.0, + "10040": 28156907520.0, + "10045": 28156907520.0, + "10050": 28156907520.0, + "10055": 28156907520.0, + "10060": 28156907520.0, + "10065": 28156907520.0, + "10070": 28156907520.0, + "10075": 28156907520.0, + "10080": 28156907520.0, + "10085": 28156907520.0, + "10090": 28156907520.0, + "10095": 28156907520.0, + "10100": 28156907520.0, + "10105": 28156907520.0, + "10110": 28156907520.0, + "10115": 28156907520.0, + "10120": 28156907520.0, + "10125": 28156907520.0, + "10130": 28156907520.0, + "10135": 28156907520.0, + "10140": 28156907520.0, + "10145": 28156907520.0, + "10150": 28156907520.0, + "10155": 28156907520.0, + "10160": 28156907520.0, + "10165": 28156907520.0, + "10170": 28156907520.0, + "10175": 28156907520.0, + "10180": 28156907520.0, + "10185": 28156907520.0, + "10190": 28156907520.0, + "10195": 28156907520.0, + "10200": 28156907520.0 + } + }, + "iteration-time": { + "start_step": 1, + "end_step": 10200, + "step_interval": 5, + "values": { + "1": "nan", + "5": "nan", + "10": "nan", + "15": "nan", + "20": "nan", + "25": "nan", + "30": "nan", + "35": "nan", + "40": "nan", + "45": "nan", + "50": "nan", + "55": "nan", + "60": "nan", + "65": "nan", + "70": "nan", + "75": "nan", + "80": "nan", + "85": "nan", + "90": "nan", + "95": "nan", + "100": 1.72893, + "105": "nan", + "110": "nan", + "115": "nan", + "120": "nan", + "125": "nan", + "130": "nan", + "135": "nan", + "140": "nan", + "145": "nan", + "150": "nan", + "155": "nan", + "160": "nan", + "165": "nan", + "170": "nan", + "175": "nan", + "180": "nan", + "185": "nan", + "190": "nan", + "195": "nan", + "200": 1.43857, + "205": "nan", + "210": "nan", + "215": "nan", + "220": "nan", + "225": "nan", + "230": "nan", + "235": "nan", + "240": "nan", + "245": "nan", + "250": "nan", + "255": "nan", + "260": "nan", + "265": "nan", + "270": "nan", + "275": "nan", + "280": "nan", + "285": "nan", + "290": "nan", + "295": "nan", + "300": 1.43584, + "305": "nan", + "310": "nan", + "315": "nan", + "320": "nan", + "325": "nan", + "330": "nan", + "335": "nan", + "340": "nan", + "345": "nan", + "350": "nan", + "355": "nan", + "360": "nan", + "365": "nan", + "370": "nan", + "375": "nan", + "380": "nan", + "385": "nan", + "390": "nan", + "395": "nan", + "400": 1.43883, + "405": "nan", + "410": "nan", + "415": "nan", + "420": "nan", + "425": "nan", + "430": "nan", + "435": "nan", + "440": "nan", + "445": "nan", + "450": "nan", + "455": "nan", + "460": "nan", + "465": "nan", + "470": "nan", + "475": "nan", + "480": "nan", + "485": "nan", + "490": "nan", + "495": "nan", + "500": 1.43781, + "505": "nan", + "510": "nan", + "515": "nan", + "520": "nan", + "525": "nan", + "530": "nan", + "535": "nan", + "540": "nan", + "545": "nan", + "550": "nan", + "555": "nan", + "560": "nan", + "565": "nan", + "570": "nan", + "575": "nan", + "580": "nan", + "585": "nan", + "590": "nan", + "595": "nan", + "600": 1.43827, + "605": "nan", + "610": "nan", + "615": "nan", + "620": "nan", + "625": "nan", + "630": "nan", + "635": "nan", + "640": "nan", + "645": "nan", + "650": "nan", + "655": "nan", + "660": "nan", + "665": "nan", + "670": "nan", + "675": "nan", + "680": "nan", + "685": "nan", + "690": "nan", + "695": "nan", + "700": 1.4373, + "705": "nan", + "710": "nan", + "715": "nan", + "720": "nan", + "725": "nan", + "730": "nan", + "735": "nan", + "740": "nan", + "745": "nan", + "750": "nan", + "755": "nan", + "760": "nan", + "765": "nan", + "770": "nan", + "775": "nan", + "780": "nan", + "785": "nan", + "790": "nan", + "795": "nan", + "800": 1.43657, + "805": "nan", + "810": "nan", + "815": "nan", + "820": "nan", + "825": "nan", + "830": "nan", + "835": "nan", + "840": "nan", + "845": "nan", + "850": "nan", + "855": "nan", + "860": "nan", + "865": "nan", + "870": "nan", + "875": "nan", + "880": "nan", + "885": "nan", + "890": "nan", + "895": "nan", + "900": 1.44068, + "905": "nan", + "910": "nan", + "915": "nan", + "920": "nan", + "925": "nan", + "930": "nan", + "935": "nan", + "940": "nan", + "945": "nan", + "950": "nan", + "955": "nan", + "960": "nan", + "965": "nan", + "970": "nan", + "975": "nan", + "980": "nan", + "985": "nan", + "990": "nan", + "995": "nan", + "1000": 1.43583, + "1005": "nan", + "1010": "nan", + "1015": "nan", + "1020": "nan", + "1025": "nan", + "1030": "nan", + "1035": "nan", + "1040": "nan", + "1045": "nan", + "1050": "nan", + "1055": "nan", + "1060": "nan", + "1065": "nan", + "1070": "nan", + "1075": "nan", + "1080": "nan", + "1085": "nan", + "1090": "nan", + "1095": "nan", + "1100": 1.43072, + "1105": "nan", + "1110": "nan", + "1115": "nan", + "1120": "nan", + "1125": "nan", + "1130": "nan", + "1135": "nan", + "1140": "nan", + "1145": "nan", + "1150": "nan", + "1155": "nan", + "1160": "nan", + "1165": "nan", + "1170": "nan", + "1175": "nan", + "1180": "nan", + "1185": "nan", + "1190": "nan", + "1195": "nan", + "1200": 1.4292, + "1205": "nan", + "1210": "nan", + "1215": "nan", + "1220": "nan", + "1225": "nan", + "1230": "nan", + "1235": "nan", + "1240": "nan", + "1245": "nan", + "1250": "nan", + "1255": "nan", + "1260": "nan", + "1265": "nan", + "1270": "nan", + "1275": "nan", + "1280": "nan", + "1285": "nan", + "1290": "nan", + "1295": "nan", + "1300": 1.4269, + "1305": "nan", + "1310": "nan", + "1315": "nan", + "1320": "nan", + "1325": "nan", + "1330": "nan", + "1335": "nan", + "1340": "nan", + "1345": "nan", + "1350": "nan", + "1355": "nan", + "1360": "nan", + "1365": "nan", + "1370": "nan", + "1375": "nan", + "1380": "nan", + "1385": "nan", + "1390": "nan", + "1395": "nan", + "1400": 1.42766, + "1405": "nan", + "1410": "nan", + "1415": "nan", + "1420": "nan", + "1425": "nan", + "1430": "nan", + "1435": "nan", + "1440": "nan", + "1445": "nan", + "1450": "nan", + "1455": "nan", + "1460": "nan", + "1465": "nan", + "1470": "nan", + "1475": "nan", + "1480": "nan", + "1485": "nan", + "1490": "nan", + "1495": "nan", + "1500": 1.42106, + "1505": "nan", + "1510": "nan", + "1515": "nan", + "1520": "nan", + "1525": "nan", + "1530": "nan", + "1535": "nan", + "1540": "nan", + "1545": "nan", + "1550": "nan", + "1555": "nan", + "1560": "nan", + "1565": "nan", + "1570": "nan", + "1575": "nan", + "1580": "nan", + "1585": "nan", + "1590": "nan", + "1595": "nan", + "1600": 1.42409, + "1605": "nan", + "1610": "nan", + "1615": "nan", + "1620": "nan", + "1625": "nan", + "1630": "nan", + "1635": "nan", + "1640": "nan", + "1645": "nan", + "1650": "nan", + "1655": "nan", + "1660": "nan", + "1665": "nan", + "1670": "nan", + "1675": "nan", + "1680": "nan", + "1685": "nan", + "1690": "nan", + "1695": "nan", + "1700": 1.42073, + "1705": "nan", + "1710": "nan", + "1715": "nan", + "1720": "nan", + "1725": "nan", + "1730": "nan", + "1735": "nan", + "1740": "nan", + "1745": "nan", + "1750": "nan", + "1755": "nan", + "1760": "nan", + "1765": "nan", + "1770": "nan", + "1775": "nan", + "1780": "nan", + "1785": "nan", + "1790": "nan", + "1795": "nan", + "1800": 1.42098, + "1805": "nan", + "1810": "nan", + "1815": "nan", + "1820": "nan", + "1825": "nan", + "1830": "nan", + "1835": "nan", + "1840": "nan", + "1845": "nan", + "1850": "nan", + "1855": "nan", + "1860": "nan", + "1865": "nan", + "1870": "nan", + "1875": "nan", + "1880": "nan", + "1885": "nan", + "1890": "nan", + "1895": "nan", + "1900": 1.42066, + "1905": "nan", + "1910": "nan", + "1915": "nan", + "1920": "nan", + "1925": "nan", + "1930": "nan", + "1935": "nan", + "1940": "nan", + "1945": "nan", + "1950": "nan", + "1955": "nan", + "1960": "nan", + "1965": "nan", + "1970": "nan", + "1975": "nan", + "1980": "nan", + "1985": "nan", + "1990": "nan", + "1995": "nan", + "2000": 1.42267, + "2005": "nan", + "2010": "nan", + "2015": "nan", + "2020": "nan", + "2025": "nan", + "2030": "nan", + "2035": "nan", + "2040": "nan", + "2045": "nan", + "2050": "nan", + "2055": "nan", + "2060": "nan", + "2065": "nan", + "2070": "nan", + "2075": "nan", + "2080": "nan", + "2085": "nan", + "2090": "nan", + "2095": "nan", + "2100": 1.42131, + "2105": "nan", + "2110": "nan", + "2115": "nan", + "2120": "nan", + "2125": "nan", + "2130": "nan", + "2135": "nan", + "2140": "nan", + "2145": "nan", + "2150": "nan", + "2155": "nan", + "2160": "nan", + "2165": "nan", + "2170": "nan", + "2175": "nan", + "2180": "nan", + "2185": "nan", + "2190": "nan", + "2195": "nan", + "2200": 1.41946, + "2205": "nan", + "2210": "nan", + "2215": "nan", + "2220": "nan", + "2225": "nan", + "2230": "nan", + "2235": "nan", + "2240": "nan", + "2245": "nan", + "2250": "nan", + "2255": "nan", + "2260": "nan", + "2265": "nan", + "2270": "nan", + "2275": "nan", + "2280": "nan", + "2285": "nan", + "2290": "nan", + "2295": "nan", + "2300": 1.42191, + "2305": "nan", + "2310": "nan", + "2315": "nan", + "2320": "nan", + "2325": "nan", + "2330": "nan", + "2335": "nan", + "2340": "nan", + "2345": "nan", + "2350": "nan", + "2355": "nan", + "2360": "nan", + "2365": "nan", + "2370": "nan", + "2375": "nan", + "2380": "nan", + "2385": "nan", + "2390": "nan", + "2395": "nan", + "2400": 1.42003, + "2405": "nan", + "2410": "nan", + "2415": "nan", + "2420": "nan", + "2425": "nan", + "2430": "nan", + "2435": "nan", + "2440": "nan", + "2445": "nan", + "2450": "nan", + "2455": "nan", + "2460": "nan", + "2465": "nan", + "2470": "nan", + "2475": "nan", + "2480": "nan", + "2485": "nan", + "2490": "nan", + "2495": "nan", + "2500": 1.41919, + "2505": "nan", + "2510": "nan", + "2515": "nan", + "2520": "nan", + "2525": "nan", + "2530": "nan", + "2535": "nan", + "2540": "nan", + "2545": "nan", + "2550": "nan", + "2555": "nan", + "2560": "nan", + "2565": "nan", + "2570": "nan", + "2575": "nan", + "2580": "nan", + "2585": "nan", + "2590": "nan", + "2595": "nan", + "2600": 1.41875, + "2605": "nan", + "2610": "nan", + "2615": "nan", + "2620": "nan", + "2625": "nan", + "2630": "nan", + "2635": "nan", + "2640": "nan", + "2645": "nan", + "2650": "nan", + "2655": "nan", + "2660": "nan", + "2665": "nan", + "2670": "nan", + "2675": "nan", + "2680": "nan", + "2685": "nan", + "2690": "nan", + "2695": "nan", + "2700": 1.41854, + "2705": "nan", + "2710": "nan", + "2715": "nan", + "2720": "nan", + "2725": "nan", + "2730": "nan", + "2735": "nan", + "2740": "nan", + "2745": "nan", + "2750": "nan", + "2755": "nan", + "2760": "nan", + "2765": "nan", + "2770": "nan", + "2775": "nan", + "2780": "nan", + "2785": "nan", + "2790": "nan", + "2795": "nan", + "2800": 1.41791, + "2805": "nan", + "2810": "nan", + "2815": "nan", + "2820": "nan", + "2825": "nan", + "2830": "nan", + "2835": "nan", + "2840": "nan", + "2845": "nan", + "2850": "nan", + "2855": "nan", + "2860": "nan", + "2865": "nan", + "2870": "nan", + "2875": "nan", + "2880": "nan", + "2885": "nan", + "2890": "nan", + "2895": "nan", + "2900": 1.41567, + "2905": "nan", + "2910": "nan", + "2915": "nan", + "2920": "nan", + "2925": "nan", + "2930": "nan", + "2935": "nan", + "2940": "nan", + "2945": "nan", + "2950": "nan", + "2955": "nan", + "2960": "nan", + "2965": "nan", + "2970": "nan", + "2975": "nan", + "2980": "nan", + "2985": "nan", + "2990": "nan", + "2995": "nan", + "3000": 1.4195, + "3005": "nan", + "3010": "nan", + "3015": "nan", + "3020": "nan", + "3025": "nan", + "3030": "nan", + "3035": "nan", + "3040": "nan", + "3045": "nan", + "3050": "nan", + "3055": "nan", + "3060": "nan", + "3065": "nan", + "3070": "nan", + "3075": "nan", + "3080": "nan", + "3085": "nan", + "3090": "nan", + "3095": "nan", + "3100": 1.42148, + "3105": "nan", + "3110": "nan", + "3115": "nan", + "3120": "nan", + "3125": "nan", + "3130": "nan", + "3135": "nan", + "3140": "nan", + "3145": "nan", + "3150": "nan", + "3155": "nan", + "3160": "nan", + "3165": "nan", + "3170": "nan", + "3175": "nan", + "3180": "nan", + "3185": "nan", + "3190": "nan", + "3195": "nan", + "3200": 1.41644, + "3205": "nan", + "3210": "nan", + "3215": "nan", + "3220": "nan", + "3225": "nan", + "3230": "nan", + "3235": "nan", + "3240": "nan", + "3245": "nan", + "3250": "nan", + "3255": "nan", + "3260": "nan", + "3265": "nan", + "3270": "nan", + "3275": "nan", + "3280": "nan", + "3285": "nan", + "3290": "nan", + "3295": "nan", + "3300": 1.41612, + "3305": "nan", + "3310": "nan", + "3315": "nan", + "3320": "nan", + "3325": "nan", + "3330": "nan", + "3335": "nan", + "3340": "nan", + "3345": "nan", + "3350": "nan", + "3355": "nan", + "3360": "nan", + "3365": "nan", + "3370": "nan", + "3375": "nan", + "3380": "nan", + "3385": "nan", + "3390": "nan", + "3395": "nan", + "3400": 1.41832, + "3405": "nan", + "3410": "nan", + "3415": "nan", + "3420": "nan", + "3425": "nan", + "3430": "nan", + "3435": "nan", + "3440": "nan", + "3445": "nan", + "3450": "nan", + "3455": "nan", + "3460": "nan", + "3465": "nan", + "3470": "nan", + "3475": "nan", + "3480": "nan", + "3485": "nan", + "3490": "nan", + "3495": "nan", + "3500": 1.41619, + "3505": "nan", + "3510": "nan", + "3515": "nan", + "3520": "nan", + "3525": "nan", + "3530": "nan", + "3535": "nan", + "3540": "nan", + "3545": "nan", + "3550": "nan", + "3555": "nan", + "3560": "nan", + "3565": "nan", + "3570": "nan", + "3575": "nan", + "3580": "nan", + "3585": "nan", + "3590": "nan", + "3595": "nan", + "3600": 1.41975, + "3605": "nan", + "3610": "nan", + "3615": "nan", + "3620": "nan", + "3625": "nan", + "3630": "nan", + "3635": "nan", + "3640": "nan", + "3645": "nan", + "3650": "nan", + "3655": "nan", + "3660": "nan", + "3665": "nan", + "3670": "nan", + "3675": "nan", + "3680": "nan", + "3685": "nan", + "3690": "nan", + "3695": "nan", + "3700": 1.41987, + "3705": "nan", + "3710": "nan", + "3715": "nan", + "3720": "nan", + "3725": "nan", + "3730": "nan", + "3735": "nan", + "3740": "nan", + "3745": "nan", + "3750": "nan", + "3755": "nan", + "3760": "nan", + "3765": "nan", + "3770": "nan", + "3775": "nan", + "3780": "nan", + "3785": "nan", + "3790": "nan", + "3795": "nan", + "3800": 1.41734, + "3805": "nan", + "3810": "nan", + "3815": "nan", + "3820": "nan", + "3825": "nan", + "3830": "nan", + "3835": "nan", + "3840": "nan", + "3845": "nan", + "3850": "nan", + "3855": "nan", + "3860": "nan", + "3865": "nan", + "3870": "nan", + "3875": "nan", + "3880": "nan", + "3885": "nan", + "3890": "nan", + "3895": "nan", + "3900": 1.41853, + "3905": "nan", + "3910": "nan", + "3915": "nan", + "3920": "nan", + "3925": "nan", + "3930": "nan", + "3935": "nan", + "3940": "nan", + "3945": "nan", + "3950": "nan", + "3955": "nan", + "3960": "nan", + "3965": "nan", + "3970": "nan", + "3975": "nan", + "3980": "nan", + "3985": "nan", + "3990": "nan", + "3995": "nan", + "4000": 1.41699, + "4005": "nan", + "4010": "nan", + "4015": "nan", + "4020": "nan", + "4025": "nan", + "4030": "nan", + "4035": "nan", + "4040": "nan", + "4045": "nan", + "4050": "nan", + "4055": "nan", + "4060": "nan", + "4065": "nan", + "4070": "nan", + "4075": "nan", + "4080": "nan", + "4085": "nan", + "4090": "nan", + "4095": "nan", + "4100": 1.41688, + "4105": "nan", + "4110": "nan", + "4115": "nan", + "4120": "nan", + "4125": "nan", + "4130": "nan", + "4135": "nan", + "4140": "nan", + "4145": "nan", + "4150": "nan", + "4155": "nan", + "4160": "nan", + "4165": "nan", + "4170": "nan", + "4175": "nan", + "4180": "nan", + "4185": "nan", + "4190": "nan", + "4195": "nan", + "4200": 1.41758, + "4205": "nan", + "4210": "nan", + "4215": "nan", + "4220": "nan", + "4225": "nan", + "4230": "nan", + "4235": "nan", + "4240": "nan", + "4245": "nan", + "4250": "nan", + "4255": "nan", + "4260": "nan", + "4265": "nan", + "4270": "nan", + "4275": "nan", + "4280": "nan", + "4285": "nan", + "4290": "nan", + "4295": "nan", + "4300": 1.41698, + "4305": "nan", + "4310": "nan", + "4315": "nan", + "4320": "nan", + "4325": "nan", + "4330": "nan", + "4335": "nan", + "4340": "nan", + "4345": "nan", + "4350": "nan", + "4355": "nan", + "4360": "nan", + "4365": "nan", + "4370": "nan", + "4375": "nan", + "4380": "nan", + "4385": "nan", + "4390": "nan", + "4395": "nan", + "4400": 1.41728, + "4405": "nan", + "4410": "nan", + "4415": "nan", + "4420": "nan", + "4425": "nan", + "4430": "nan", + "4435": "nan", + "4440": "nan", + "4445": "nan", + "4450": "nan", + "4455": "nan", + "4460": "nan", + "4465": "nan", + "4470": "nan", + "4475": "nan", + "4480": "nan", + "4485": "nan", + "4490": "nan", + "4495": "nan", + "4500": 1.41647, + "4505": "nan", + "4510": "nan", + "4515": "nan", + "4520": "nan", + "4525": "nan", + "4530": "nan", + "4535": "nan", + "4540": "nan", + "4545": "nan", + "4550": "nan", + "4555": "nan", + "4560": "nan", + "4565": "nan", + "4570": "nan", + "4575": "nan", + "4580": "nan", + "4585": "nan", + "4590": "nan", + "4595": "nan", + "4600": 1.42068, + "4605": "nan", + "4610": "nan", + "4615": "nan", + "4620": "nan", + "4625": "nan", + "4630": "nan", + "4635": "nan", + "4640": "nan", + "4645": "nan", + "4650": "nan", + "4655": "nan", + "4660": "nan", + "4665": "nan", + "4670": "nan", + "4675": "nan", + "4680": "nan", + "4685": "nan", + "4690": "nan", + "4695": "nan", + "4700": 1.41828, + "4705": "nan", + "4710": "nan", + "4715": "nan", + "4720": "nan", + "4725": "nan", + "4730": "nan", + "4735": "nan", + "4740": "nan", + "4745": "nan", + "4750": "nan", + "4755": "nan", + "4760": "nan", + "4765": "nan", + "4770": "nan", + "4775": "nan", + "4780": "nan", + "4785": "nan", + "4790": "nan", + "4795": "nan", + "4800": 1.42135, + "4805": "nan", + "4810": "nan", + "4815": "nan", + "4820": "nan", + "4825": "nan", + "4830": "nan", + "4835": "nan", + "4840": "nan", + "4845": "nan", + "4850": "nan", + "4855": "nan", + "4860": "nan", + "4865": "nan", + "4870": "nan", + "4875": "nan", + "4880": "nan", + "4885": "nan", + "4890": "nan", + "4895": "nan", + "4900": 1.41613, + "4905": "nan", + "4910": "nan", + "4915": "nan", + "4920": "nan", + "4925": "nan", + "4930": "nan", + "4935": "nan", + "4940": "nan", + "4945": "nan", + "4950": "nan", + "4955": "nan", + "4960": "nan", + "4965": "nan", + "4970": "nan", + "4975": "nan", + "4980": "nan", + "4985": "nan", + "4990": "nan", + "4995": "nan", + "5000": 1.41976, + "5005": "nan", + "5010": "nan", + "5015": "nan", + "5020": "nan", + "5025": "nan", + "5030": "nan", + "5035": "nan", + "5040": "nan", + "5045": "nan", + "5050": "nan", + "5055": "nan", + "5060": "nan", + "5065": "nan", + "5070": "nan", + "5075": "nan", + "5080": "nan", + "5085": "nan", + "5090": "nan", + "5095": "nan", + "5100": 1.42195, + "5105": "nan", + "5110": "nan", + "5115": "nan", + "5120": "nan", + "5125": "nan", + "5130": "nan", + "5135": "nan", + "5140": "nan", + "5145": "nan", + "5150": "nan", + "5155": "nan", + "5160": "nan", + "5165": "nan", + "5170": "nan", + "5175": "nan", + "5180": "nan", + "5185": "nan", + "5190": "nan", + "5195": "nan", + "5200": 1.41591, + "5205": "nan", + "5210": "nan", + "5215": "nan", + "5220": "nan", + "5225": "nan", + "5230": "nan", + "5235": "nan", + "5240": "nan", + "5245": "nan", + "5250": "nan", + "5255": "nan", + "5260": "nan", + "5265": "nan", + "5270": "nan", + "5275": "nan", + "5280": "nan", + "5285": "nan", + "5290": "nan", + "5295": "nan", + "5300": 1.41729, + "5305": "nan", + "5310": "nan", + "5315": "nan", + "5320": "nan", + "5325": "nan", + "5330": "nan", + "5335": "nan", + "5340": "nan", + "5345": "nan", + "5350": "nan", + "5355": "nan", + "5360": "nan", + "5365": "nan", + "5370": "nan", + "5375": "nan", + "5380": "nan", + "5385": "nan", + "5390": "nan", + "5395": "nan", + "5400": 1.42072, + "5405": "nan", + "5410": "nan", + "5415": "nan", + "5420": "nan", + "5425": "nan", + "5430": "nan", + "5435": "nan", + "5440": "nan", + "5445": "nan", + "5450": "nan", + "5455": "nan", + "5460": "nan", + "5465": "nan", + "5470": "nan", + "5475": "nan", + "5480": "nan", + "5485": "nan", + "5490": "nan", + "5495": "nan", + "5500": 1.42092, + "5505": "nan", + "5510": "nan", + "5515": "nan", + "5520": "nan", + "5525": "nan", + "5530": "nan", + "5535": "nan", + "5540": "nan", + "5545": "nan", + "5550": "nan", + "5555": "nan", + "5560": "nan", + "5565": "nan", + "5570": "nan", + "5575": "nan", + "5580": "nan", + "5585": "nan", + "5590": "nan", + "5595": "nan", + "5600": 1.41992, + "5605": "nan", + "5610": "nan", + "5615": "nan", + "5620": "nan", + "5625": "nan", + "5630": "nan", + "5635": "nan", + "5640": "nan", + "5645": "nan", + "5650": "nan", + "5655": "nan", + "5660": "nan", + "5665": "nan", + "5670": "nan", + "5675": "nan", + "5680": "nan", + "5685": "nan", + "5690": "nan", + "5695": "nan", + "5700": 1.41833, + "5705": "nan", + "5710": "nan", + "5715": "nan", + "5720": "nan", + "5725": "nan", + "5730": "nan", + "5735": "nan", + "5740": "nan", + "5745": "nan", + "5750": "nan", + "5755": "nan", + "5760": "nan", + "5765": "nan", + "5770": "nan", + "5775": "nan", + "5780": "nan", + "5785": "nan", + "5790": "nan", + "5795": "nan", + "5800": 1.41738, + "5805": "nan", + "5810": "nan", + "5815": "nan", + "5820": "nan", + "5825": "nan", + "5830": "nan", + "5835": "nan", + "5840": "nan", + "5845": "nan", + "5850": "nan", + "5855": "nan", + "5860": "nan", + "5865": "nan", + "5870": "nan", + "5875": "nan", + "5880": "nan", + "5885": "nan", + "5890": "nan", + "5895": "nan", + "5900": 1.42112, + "5905": "nan", + "5910": "nan", + "5915": "nan", + "5920": "nan", + "5925": "nan", + "5930": "nan", + "5935": "nan", + "5940": "nan", + "5945": "nan", + "5950": "nan", + "5955": "nan", + "5960": "nan", + "5965": "nan", + "5970": "nan", + "5975": "nan", + "5980": "nan", + "5985": "nan", + "5990": "nan", + "5995": "nan", + "6000": 1.42017, + "6005": "nan", + "6010": "nan", + "6015": "nan", + "6020": "nan", + "6025": "nan", + "6030": "nan", + "6035": "nan", + "6040": "nan", + "6045": "nan", + "6050": "nan", + "6055": "nan", + "6060": "nan", + "6065": "nan", + "6070": "nan", + "6075": "nan", + "6080": "nan", + "6085": "nan", + "6090": "nan", + "6095": "nan", + "6100": 1.41819, + "6105": "nan", + "6110": "nan", + "6115": "nan", + "6120": "nan", + "6125": "nan", + "6130": "nan", + "6135": "nan", + "6140": "nan", + "6145": "nan", + "6150": "nan", + "6155": "nan", + "6160": "nan", + "6165": "nan", + "6170": "nan", + "6175": "nan", + "6180": "nan", + "6185": "nan", + "6190": "nan", + "6195": "nan", + "6200": 1.41783, + "6205": "nan", + "6210": "nan", + "6215": "nan", + "6220": "nan", + "6225": "nan", + "6230": "nan", + "6235": "nan", + "6240": "nan", + "6245": "nan", + "6250": "nan", + "6255": "nan", + "6260": "nan", + "6265": "nan", + "6270": "nan", + "6275": "nan", + "6280": "nan", + "6285": "nan", + "6290": "nan", + "6295": "nan", + "6300": 1.41997, + "6305": "nan", + "6310": "nan", + "6315": "nan", + "6320": "nan", + "6325": "nan", + "6330": "nan", + "6335": "nan", + "6340": "nan", + "6345": "nan", + "6350": "nan", + "6355": "nan", + "6360": "nan", + "6365": "nan", + "6370": "nan", + "6375": "nan", + "6380": "nan", + "6385": "nan", + "6390": "nan", + "6395": "nan", + "6400": 1.41757, + "6405": "nan", + "6410": "nan", + "6415": "nan", + "6420": "nan", + "6425": "nan", + "6430": "nan", + "6435": "nan", + "6440": "nan", + "6445": "nan", + "6450": "nan", + "6455": "nan", + "6460": "nan", + "6465": "nan", + "6470": "nan", + "6475": "nan", + "6480": "nan", + "6485": "nan", + "6490": "nan", + "6495": "nan", + "6500": 1.42002, + "6505": "nan", + "6510": "nan", + "6515": "nan", + "6520": "nan", + "6525": "nan", + "6530": "nan", + "6535": "nan", + "6540": "nan", + "6545": "nan", + "6550": "nan", + "6555": "nan", + "6560": "nan", + "6565": "nan", + "6570": "nan", + "6575": "nan", + "6580": "nan", + "6585": "nan", + "6590": "nan", + "6595": "nan", + "6600": 1.41769, + "6605": "nan", + "6610": "nan", + "6615": "nan", + "6620": "nan", + "6625": "nan", + "6630": "nan", + "6635": "nan", + "6640": "nan", + "6645": "nan", + "6650": "nan", + "6655": "nan", + "6660": "nan", + "6665": "nan", + "6670": "nan", + "6675": "nan", + "6680": "nan", + "6685": "nan", + "6690": "nan", + "6695": "nan", + "6700": 1.50538, + "6705": "nan", + "6710": "nan", + "6715": "nan", + "6720": "nan", + "6725": "nan", + "6730": "nan", + "6735": "nan", + "6740": "nan", + "6745": "nan", + "6750": "nan", + "6755": "nan", + "6760": "nan", + "6765": "nan", + "6770": "nan", + "6775": "nan", + "6780": "nan", + "6785": "nan", + "6790": "nan", + "6795": "nan", + "6800": 1.41809, + "6805": "nan", + "6810": "nan", + "6815": "nan", + "6820": "nan", + "6825": "nan", + "6830": "nan", + "6835": "nan", + "6840": "nan", + "6845": "nan", + "6850": "nan", + "6855": "nan", + "6860": "nan", + "6865": "nan", + "6870": "nan", + "6875": "nan", + "6880": "nan", + "6885": "nan", + "6890": "nan", + "6895": "nan", + "6900": 1.41692, + "6905": "nan", + "6910": "nan", + "6915": "nan", + "6920": "nan", + "6925": "nan", + "6930": "nan", + "6935": "nan", + "6940": "nan", + "6945": "nan", + "6950": "nan", + "6955": "nan", + "6960": "nan", + "6965": "nan", + "6970": "nan", + "6975": "nan", + "6980": "nan", + "6985": "nan", + "6990": "nan", + "6995": "nan", + "7000": 1.41814, + "7005": "nan", + "7010": "nan", + "7015": "nan", + "7020": "nan", + "7025": "nan", + "7030": "nan", + "7035": "nan", + "7040": "nan", + "7045": "nan", + "7050": "nan", + "7055": "nan", + "7060": "nan", + "7065": "nan", + "7070": "nan", + "7075": "nan", + "7080": "nan", + "7085": "nan", + "7090": "nan", + "7095": "nan", + "7100": 1.41682, + "7105": "nan", + "7110": "nan", + "7115": "nan", + "7120": "nan", + "7125": "nan", + "7130": "nan", + "7135": "nan", + "7140": "nan", + "7145": "nan", + "7150": "nan", + "7155": "nan", + "7160": "nan", + "7165": "nan", + "7170": "nan", + "7175": "nan", + "7180": "nan", + "7185": "nan", + "7190": "nan", + "7195": "nan", + "7200": 1.41907, + "7205": "nan", + "7210": "nan", + "7215": "nan", + "7220": "nan", + "7225": "nan", + "7230": "nan", + "7235": "nan", + "7240": "nan", + "7245": "nan", + "7250": "nan", + "7255": "nan", + "7260": "nan", + "7265": "nan", + "7270": "nan", + "7275": "nan", + "7280": "nan", + "7285": "nan", + "7290": "nan", + "7295": "nan", + "7300": 1.44244, + "7305": "nan", + "7310": "nan", + "7315": "nan", + "7320": "nan", + "7325": "nan", + "7330": "nan", + "7335": "nan", + "7340": "nan", + "7345": "nan", + "7350": "nan", + "7355": "nan", + "7360": "nan", + "7365": "nan", + "7370": "nan", + "7375": "nan", + "7380": "nan", + "7385": "nan", + "7390": "nan", + "7395": "nan", + "7400": 1.41831, + "7405": "nan", + "7410": "nan", + "7415": "nan", + "7420": "nan", + "7425": "nan", + "7430": "nan", + "7435": "nan", + "7440": "nan", + "7445": "nan", + "7450": "nan", + "7455": "nan", + "7460": "nan", + "7465": "nan", + "7470": "nan", + "7475": "nan", + "7480": "nan", + "7485": "nan", + "7490": "nan", + "7495": "nan", + "7500": 1.41688, + "7505": "nan", + "7510": "nan", + "7515": "nan", + "7520": "nan", + "7525": "nan", + "7530": "nan", + "7535": "nan", + "7540": "nan", + "7545": "nan", + "7550": "nan", + "7555": "nan", + "7560": "nan", + "7565": "nan", + "7570": "nan", + "7575": "nan", + "7580": "nan", + "7585": "nan", + "7590": "nan", + "7595": "nan", + "7600": 1.41519, + "7605": "nan", + "7610": "nan", + "7615": "nan", + "7620": "nan", + "7625": "nan", + "7630": "nan", + "7635": "nan", + "7640": "nan", + "7645": "nan", + "7650": "nan", + "7655": "nan", + "7660": "nan", + "7665": "nan", + "7670": "nan", + "7675": "nan", + "7680": "nan", + "7685": "nan", + "7690": "nan", + "7695": "nan", + "7700": 1.41777, + "7705": "nan", + "7710": "nan", + "7715": "nan", + "7720": "nan", + "7725": "nan", + "7730": "nan", + "7735": "nan", + "7740": "nan", + "7745": "nan", + "7750": "nan", + "7755": "nan", + "7760": "nan", + "7765": "nan", + "7770": "nan", + "7775": "nan", + "7780": "nan", + "7785": "nan", + "7790": "nan", + "7795": "nan", + "7800": 1.41752, + "7805": "nan", + "7810": "nan", + "7815": "nan", + "7820": "nan", + "7825": "nan", + "7830": "nan", + "7835": "nan", + "7840": "nan", + "7845": "nan", + "7850": "nan", + "7855": "nan", + "7860": "nan", + "7865": "nan", + "7870": "nan", + "7875": "nan", + "7880": "nan", + "7885": "nan", + "7890": "nan", + "7895": "nan", + "7900": 1.41496, + "7905": "nan", + "7910": "nan", + "7915": "nan", + "7920": "nan", + "7925": "nan", + "7930": "nan", + "7935": "nan", + "7940": "nan", + "7945": "nan", + "7950": "nan", + "7955": "nan", + "7960": "nan", + "7965": "nan", + "7970": "nan", + "7975": "nan", + "7980": "nan", + "7985": "nan", + "7990": "nan", + "7995": "nan", + "8000": 1.41445, + "8005": "nan", + "8010": "nan", + "8015": "nan", + "8020": "nan", + "8025": "nan", + "8030": "nan", + "8035": "nan", + "8040": "nan", + "8045": "nan", + "8050": "nan", + "8055": "nan", + "8060": "nan", + "8065": "nan", + "8070": "nan", + "8075": "nan", + "8080": "nan", + "8085": "nan", + "8090": "nan", + "8095": "nan", + "8100": 1.41546, + "8105": "nan", + "8110": "nan", + "8115": "nan", + "8120": "nan", + "8125": "nan", + "8130": "nan", + "8135": "nan", + "8140": "nan", + "8145": "nan", + "8150": "nan", + "8155": "nan", + "8160": "nan", + "8165": "nan", + "8170": "nan", + "8175": "nan", + "8180": "nan", + "8185": "nan", + "8190": "nan", + "8195": "nan", + "8200": 1.41496, + "8205": "nan", + "8210": "nan", + "8215": "nan", + "8220": "nan", + "8225": "nan", + "8230": "nan", + "8235": "nan", + "8240": "nan", + "8245": "nan", + "8250": "nan", + "8255": "nan", + "8260": "nan", + "8265": "nan", + "8270": "nan", + "8275": "nan", + "8280": "nan", + "8285": "nan", + "8290": "nan", + "8295": "nan", + "8300": 1.41627, + "8305": "nan", + "8310": "nan", + "8315": "nan", + "8320": "nan", + "8325": "nan", + "8330": "nan", + "8335": "nan", + "8340": "nan", + "8345": "nan", + "8350": "nan", + "8355": "nan", + "8360": "nan", + "8365": "nan", + "8370": "nan", + "8375": "nan", + "8380": "nan", + "8385": "nan", + "8390": "nan", + "8395": "nan", + "8400": 1.41435, + "8405": "nan", + "8410": "nan", + "8415": "nan", + "8420": "nan", + "8425": "nan", + "8430": "nan", + "8435": "nan", + "8440": "nan", + "8445": "nan", + "8450": "nan", + "8455": "nan", + "8460": "nan", + "8465": "nan", + "8470": "nan", + "8475": "nan", + "8480": "nan", + "8485": "nan", + "8490": "nan", + "8495": "nan", + "8500": 1.41723, + "8505": "nan", + "8510": "nan", + "8515": "nan", + "8520": "nan", + "8525": "nan", + "8530": "nan", + "8535": "nan", + "8540": "nan", + "8545": "nan", + "8550": "nan", + "8555": "nan", + "8560": "nan", + "8565": "nan", + "8570": "nan", + "8575": "nan", + "8580": "nan", + "8585": "nan", + "8590": "nan", + "8595": "nan", + "8600": 1.41437, + "8605": "nan", + "8610": "nan", + "8615": "nan", + "8620": "nan", + "8625": "nan", + "8630": "nan", + "8635": "nan", + "8640": "nan", + "8645": "nan", + "8650": "nan", + "8655": "nan", + "8660": "nan", + "8665": "nan", + "8670": "nan", + "8675": "nan", + "8680": "nan", + "8685": "nan", + "8690": "nan", + "8695": "nan", + "8700": 1.41717, + "8705": "nan", + "8710": "nan", + "8715": "nan", + "8720": "nan", + "8725": "nan", + "8730": "nan", + "8735": "nan", + "8740": "nan", + "8745": "nan", + "8750": "nan", + "8755": "nan", + "8760": "nan", + "8765": "nan", + "8770": "nan", + "8775": "nan", + "8780": "nan", + "8785": "nan", + "8790": "nan", + "8795": "nan", + "8800": 1.41626, + "8805": "nan", + "8810": "nan", + "8815": "nan", + "8820": "nan", + "8825": "nan", + "8830": "nan", + "8835": "nan", + "8840": "nan", + "8845": "nan", + "8850": "nan", + "8855": "nan", + "8860": "nan", + "8865": "nan", + "8870": "nan", + "8875": "nan", + "8880": "nan", + "8885": "nan", + "8890": "nan", + "8895": "nan", + "8900": 1.72345, + "8905": "nan", + "8910": "nan", + "8915": "nan", + "8920": "nan", + "8925": "nan", + "8930": "nan", + "8935": "nan", + "8940": "nan", + "8945": "nan", + "8950": "nan", + "8955": "nan", + "8960": "nan", + "8965": "nan", + "8970": "nan", + "8975": "nan", + "8980": "nan", + "8985": "nan", + "8990": "nan", + "8995": "nan", + "9000": 1.42136, + "9005": "nan", + "9010": "nan", + "9015": "nan", + "9020": "nan", + "9025": "nan", + "9030": "nan", + "9035": "nan", + "9040": "nan", + "9045": "nan", + "9050": "nan", + "9055": "nan", + "9060": "nan", + "9065": "nan", + "9070": "nan", + "9075": "nan", + "9080": "nan", + "9085": "nan", + "9090": "nan", + "9095": "nan", + "9100": 1.41472, + "9105": "nan", + "9110": "nan", + "9115": "nan", + "9120": "nan", + "9125": "nan", + "9130": "nan", + "9135": "nan", + "9140": "nan", + "9145": "nan", + "9150": "nan", + "9155": "nan", + "9160": "nan", + "9165": "nan", + "9170": "nan", + "9175": "nan", + "9180": "nan", + "9185": "nan", + "9190": "nan", + "9195": "nan", + "9200": 1.41768, + "9205": "nan", + "9210": "nan", + "9215": "nan", + "9220": "nan", + "9225": "nan", + "9230": "nan", + "9235": "nan", + "9240": "nan", + "9245": "nan", + "9250": "nan", + "9255": "nan", + "9260": "nan", + "9265": "nan", + "9270": "nan", + "9275": "nan", + "9280": "nan", + "9285": "nan", + "9290": "nan", + "9295": "nan", + "9300": 1.42052, + "9305": "nan", + "9310": "nan", + "9315": "nan", + "9320": "nan", + "9325": "nan", + "9330": "nan", + "9335": "nan", + "9340": "nan", + "9345": "nan", + "9350": "nan", + "9355": "nan", + "9360": "nan", + "9365": "nan", + "9370": "nan", + "9375": "nan", + "9380": "nan", + "9385": "nan", + "9390": "nan", + "9395": "nan", + "9400": 1.41618, + "9405": "nan", + "9410": "nan", + "9415": "nan", + "9420": "nan", + "9425": "nan", + "9430": "nan", + "9435": "nan", + "9440": "nan", + "9445": "nan", + "9450": "nan", + "9455": "nan", + "9460": "nan", + "9465": "nan", + "9470": "nan", + "9475": "nan", + "9480": "nan", + "9485": "nan", + "9490": "nan", + "9495": "nan", + "9500": 1.41637, + "9505": "nan", + "9510": "nan", + "9515": "nan", + "9520": "nan", + "9525": "nan", + "9530": "nan", + "9535": "nan", + "9540": "nan", + "9545": "nan", + "9550": "nan", + "9555": "nan", + "9560": "nan", + "9565": "nan", + "9570": "nan", + "9575": "nan", + "9580": "nan", + "9585": "nan", + "9590": "nan", + "9595": "nan", + "9600": 1.41511, + "9605": "nan", + "9610": "nan", + "9615": "nan", + "9620": "nan", + "9625": "nan", + "9630": "nan", + "9635": "nan", + "9640": "nan", + "9645": "nan", + "9650": "nan", + "9655": "nan", + "9660": "nan", + "9665": "nan", + "9670": "nan", + "9675": "nan", + "9680": "nan", + "9685": "nan", + "9690": "nan", + "9695": "nan", + "9700": 1.41737, + "9705": "nan", + "9710": "nan", + "9715": "nan", + "9720": "nan", + "9725": "nan", + "9730": "nan", + "9735": "nan", + "9740": "nan", + "9745": "nan", + "9750": "nan", + "9755": "nan", + "9760": "nan", + "9765": "nan", + "9770": "nan", + "9775": "nan", + "9780": "nan", + "9785": "nan", + "9790": "nan", + "9795": "nan", + "9800": 1.41703, + "9805": "nan", + "9810": "nan", + "9815": "nan", + "9820": "nan", + "9825": "nan", + "9830": "nan", + "9835": "nan", + "9840": "nan", + "9845": "nan", + "9850": "nan", + "9855": "nan", + "9860": "nan", + "9865": "nan", + "9870": "nan", + "9875": "nan", + "9880": "nan", + "9885": "nan", + "9890": "nan", + "9895": "nan", + "9900": 1.4201, + "9905": "nan", + "9910": "nan", + "9915": "nan", + "9920": "nan", + "9925": "nan", + "9930": "nan", + "9935": "nan", + "9940": "nan", + "9945": "nan", + "9950": "nan", + "9955": "nan", + "9960": "nan", + "9965": "nan", + "9970": "nan", + "9975": "nan", + "9980": "nan", + "9985": "nan", + "9990": "nan", + "9995": "nan", + "10000": 1.42216, + "10005": "nan", + "10010": "nan", + "10015": "nan", + "10020": "nan", + "10025": "nan", + "10030": "nan", + "10035": "nan", + "10040": "nan", + "10045": "nan", + "10050": "nan", + "10055": "nan", + "10060": "nan", + "10065": "nan", + "10070": "nan", + "10075": "nan", + "10080": "nan", + "10085": "nan", + "10090": "nan", + "10095": "nan", + "10100": 1.43065, + "10105": "nan", + "10110": "nan", + "10115": "nan", + "10120": "nan", + "10125": "nan", + "10130": "nan", + "10135": "nan", + "10140": "nan", + "10145": "nan", + "10150": "nan", + "10155": "nan", + "10160": "nan", + "10165": "nan", + "10170": "nan", + "10175": "nan", + "10180": "nan", + "10185": "nan", + "10190": "nan", + "10195": "nan", + "10200": 1.41657 + } + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_15b_8t_release_sm/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_15b_8t_release_sm/model_config.yaml index 8b437ba75e7..b7fb9d7d661 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_15b_8t_release_sm/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_15b_8t_release_sm/model_config.yaml @@ -23,7 +23,7 @@ MODEL_ARGS: --micro-batch-size: 4 --rampup-batch-size: "[384 384 97656250]" --global-batch-size: 1152 - --train-samples: 4882812 + --train-samples: 19531250 --manual-gc: true # Transformer Engine args --transformer-impl: transformer_engine @@ -68,9 +68,10 @@ MODEL_ARGS: --eval-iters: 32 --eval-interval: 2000 # Add checkpointing args - --load: ${CHECKPOINT_LOAD_PATH} --save: ${CHECKPOINT_SAVE_PATH} - --save-interval: 1000 + --load: ${CHECKPOINT_LOAD_PATH} + --save-interval: 5000 + --save-retain-interval: 10000 # Add initialization args --init-method-std: 0.0134 # Add logging args @@ -83,10 +84,11 @@ MODEL_ARGS: --log-interval: 100 --tensorboard-dir: ${TENSORBOARD_PATH} --wandb-project: megatron-core-release-runs + --wandb-entity: adlr --wandb-exp-name: ${WANDB_EXPERIMENT} # Add mixed precision args --bf16: true - --exit-interval: 13000 + --exit-interval: 10200 --wandb-save-dir: ${WANDB_SAVE_PATH} METRICS: - "iteration-time" diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mcore_te_tp1_pp4_vp1_resume_torch_dist_tunable_overlap/golden_values_lts_dgxa100_dracooci.json b/tests/functional_tests/test_cases/gpt/gpt3_mcore_te_tp1_pp4_vp1_resume_torch_dist_tunable_overlap/golden_values_lts_dgxa100_dracooci.json index 1ba701443ce..7fa302274bf 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mcore_te_tp1_pp4_vp1_resume_torch_dist_tunable_overlap/golden_values_lts_dgxa100_dracooci.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mcore_te_tp1_pp4_vp1_resume_torch_dist_tunable_overlap/golden_values_lts_dgxa100_dracooci.json @@ -534,4 +534,4 @@ "100": 0.16898 } } -} \ No newline at end of file +} diff --git a/tests/functional_tests/test_cases/gpt/gpt3_mcore_te_tp1_pp4_vp1_tunable_overlap/golden_values_lts_dgxa100_dracooci.json b/tests/functional_tests/test_cases/gpt/gpt3_mcore_te_tp1_pp4_vp1_tunable_overlap/golden_values_lts_dgxa100_dracooci.json index d8ec5426bd1..363e94d8f52 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_mcore_te_tp1_pp4_vp1_tunable_overlap/golden_values_lts_dgxa100_dracooci.json +++ b/tests/functional_tests/test_cases/gpt/gpt3_mcore_te_tp1_pp4_vp1_tunable_overlap/golden_values_lts_dgxa100_dracooci.json @@ -284,4 +284,4 @@ "50": 0.16165 } } -} \ No newline at end of file +} diff --git a/tests/functional_tests/test_cases/gpt/gpt3_weekly_dgx_b200_mcore_tp2_pp2_current_scaling_native_fp8_tp_pp_sp_tp_overlap/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_weekly_dgx_b200_mcore_tp2_pp2_current_scaling_native_fp8_tp_pp_sp_tp_overlap/model_config.yaml index f6892ae5c24..048256c3504 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_weekly_dgx_b200_mcore_tp2_pp2_current_scaling_native_fp8_tp_pp_sp_tp_overlap/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_weekly_dgx_b200_mcore_tp2_pp2_current_scaling_native_fp8_tp_pp_sp_tp_overlap/model_config.yaml @@ -41,7 +41,7 @@ MODEL_ARGS: --pipeline-model-parallel-size: 2 --sequence-parallel: true --tp-comm-overlap: true - --tp-comm-overlap-cfg: tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_b200_1N8G_mcore_tp2_pp2_current_scaling_native_fp8_tp_pp_sp_tp_overlap/tp_comm_overlap_cfg.yaml + --tp-comm-overlap-cfg: tests/functional_tests/test_cases/gpt/gpt3_mcore_tp2_pp2_current_scaling_native_fp8_tp_pp_sp_tp_overlap/tp_comm_overlap_cfg.yaml --deterministic-mode: true --no-gradient-accumulation-fusion: true --fp8-format: hybrid diff --git a/tests/functional_tests/test_cases/gpt/gpt3_weekly_dgx_h100_mcore_tp2_pp2_current_scaling_native_fp8_tp_pp_sp_tp_overlap/golden_values_dev_dgx_h100.json b/tests/functional_tests/test_cases/gpt/gpt3_weekly_dgx_h100_mcore_tp2_pp2_current_scaling_native_fp8_tp_pp_sp_tp_overlap/golden_values_dev_dgx_h100.json new file mode 100644 index 00000000000..f56b5fa6f77 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_weekly_dgx_h100_mcore_tp2_pp2_current_scaling_native_fp8_tp_pp_sp_tp_overlap/golden_values_dev_dgx_h100.json @@ -0,0 +1,10037 @@ +{ + "lm loss": { + "start_step": 1, + "end_step": 2000, + "step_interval": 1, + "values": { + "1": 10.85954, + "2": 10.88017, + "3": 10.87732, + "4": 10.8999, + "5": 10.88699, + "6": 10.87335, + "7": 10.88219, + "8": 10.87225, + "9": 10.87277, + "10": 10.87494, + "11": 10.85221, + "12": 10.84405, + "13": 10.84222, + "14": 10.86461, + "15": 10.78656, + "16": 10.81059, + "17": 10.77436, + "18": 10.81246, + "19": 10.72203, + "20": 10.69596, + "21": 10.64272, + "22": 10.64956, + "23": 10.65288, + "24": 10.54233, + "25": 10.55491, + "26": 10.63818, + "27": 10.44117, + "28": 10.46928, + "29": 10.34986, + "30": 10.24645, + "31": 10.42625, + "32": 10.33791, + "33": 10.19559, + "34": 10.14074, + "35": 10.22182, + "36": 10.13202, + "37": 10.07533, + "38": 10.01538, + "39": 10.02986, + "40": 10.05768, + "41": 9.93219, + "42": 9.93962, + "43": 9.8498, + "44": 9.97902, + "45": 9.99946, + "46": 9.83276, + "47": 9.99696, + "48": 9.80958, + "49": 9.94884, + "50": 9.94537, + "51": 9.58197, + "52": 9.79331, + "53": 9.62548, + "54": 9.88686, + "55": 9.73482, + "56": 9.84492, + "57": 9.85708, + "58": 9.87627, + "59": 9.54205, + "60": 9.64489, + "61": 9.88334, + "62": 9.75928, + "63": 9.68107, + "64": 9.82461, + "65": 9.59476, + "66": 9.62868, + "67": 9.74002, + "68": 9.60205, + "69": 9.29216, + "70": 9.42139, + "71": 9.78753, + "72": 9.7124, + "73": 9.61815, + "74": 9.44773, + "75": 9.23898, + "76": 9.50824, + "77": 9.5795, + "78": 9.56058, + "79": 9.30801, + "80": 9.35768, + "81": 9.45813, + "82": 9.55358, + "83": 9.53407, + "84": 9.35442, + "85": 9.3992, + "86": 9.65282, + "87": 9.23449, + "88": 9.48753, + "89": 9.22214, + "90": 9.41067, + "91": 9.38753, + "92": 9.37682, + "93": 9.36024, + "94": 9.51507, + "95": 9.42125, + "96": 9.33616, + "97": 9.20399, + "98": 9.4954, + "99": 9.29284, + "100": 9.35905, + "101": 9.24757, + "102": 9.24676, + "103": 9.07735, + "104": 9.16669, + "105": 9.37858, + "106": 9.1496, + "107": 9.1756, + "108": 9.316, + "109": 9.29109, + "110": 9.36426, + "111": 9.17995, + "112": 9.23471, + "113": 9.35297, + "114": 9.35265, + "115": 9.32672, + "116": 9.00223, + "117": 9.06476, + "118": 9.06643, + "119": 9.22418, + "120": 9.08485, + "121": 9.19671, + "122": 9.14164, + "123": 9.25933, + "124": 9.45506, + "125": 9.21512, + "126": 9.06416, + "127": 9.01814, + "128": 9.22131, + "129": 8.98184, + "130": 9.13972, + "131": 9.15856, + "132": 9.03559, + "133": 8.85977, + "134": 9.18539, + "135": 8.88999, + "136": 9.16801, + "137": 9.15771, + "138": 9.23511, + "139": 9.09197, + "140": 8.87218, + "141": 9.29906, + "142": 9.19961, + "143": 9.1169, + "144": 9.24305, + "145": 9.10446, + "146": 8.98709, + "147": 8.98617, + "148": 9.13261, + "149": 9.06335, + "150": 9.01504, + "151": 8.92787, + "152": 8.8739, + "153": 9.06335, + "154": 9.17913, + "155": 9.13381, + "156": 9.04889, + "157": 9.15064, + "158": 9.04955, + "159": 9.03261, + "160": 8.88987, + "161": 9.04543, + "162": 8.89584, + "163": 8.84272, + "164": 8.97534, + "165": 8.93132, + "166": 8.65959, + "167": 8.83243, + "168": 8.81953, + "169": 8.6566, + "170": 9.04622, + "171": 8.72286, + "172": 8.82159, + "173": 8.91163, + "174": 8.84751, + "175": 8.70611, + "176": 8.75439, + "177": 8.7626, + "178": 8.7201, + "179": 8.64046, + "180": 8.74053, + "181": 8.69404, + "182": 8.72193, + "183": 9.08364, + "184": 8.6088, + "185": 8.88346, + "186": 8.74191, + "187": 8.56949, + "188": 8.67975, + "189": 8.86478, + "190": 8.53542, + "191": 8.66632, + "192": 8.61266, + "193": 8.57469, + "194": 8.75195, + "195": 8.59279, + "196": 8.77393, + "197": 8.74234, + "198": 8.62722, + "199": 8.77454, + "200": 8.73803, + "201": 8.66979, + "202": 8.54593, + "203": 8.54185, + "204": 8.71307, + "205": 8.2228, + "206": 8.8603, + "207": 8.68157, + "208": 8.70896, + "209": 8.75303, + "210": 8.57807, + "211": 8.84258, + "212": 8.49127, + "213": 8.57327, + "214": 8.51199, + "215": 8.5645, + "216": 8.50863, + "217": 8.53183, + "218": 8.52998, + "219": 8.64367, + "220": 8.54746, + "221": 8.39991, + "222": 8.50528, + "223": 8.43775, + "224": 8.53014, + "225": 8.57091, + "226": 8.4394, + "227": 8.67918, + "228": 8.38473, + "229": 8.45045, + "230": 8.49717, + "231": 8.49832, + "232": 8.49783, + "233": 8.49539, + "234": 8.63795, + "235": 8.55875, + "236": 8.39461, + "237": 8.48826, + "238": 8.30522, + "239": 8.562, + "240": 8.66952, + "241": 8.44144, + "242": 8.47219, + "243": 8.51768, + "244": 8.36825, + "245": 8.59274, + "246": 8.59497, + "247": 8.44008, + "248": 8.51279, + "249": 8.52035, + "250": 8.42183, + "251": 8.37751, + "252": 8.54393, + "253": 8.31454, + "254": 8.351, + "255": 8.29005, + "256": 8.20261, + "257": 8.394, + "258": 8.45386, + "259": 8.23708, + "260": 8.2437, + "261": 8.23617, + "262": 8.34919, + "263": 8.30683, + "264": 8.18831, + "265": 8.33481, + "266": 8.23369, + "267": 7.89923, + "268": 8.38063, + "269": 8.40466, + "270": 8.26271, + "271": 8.279, + "272": 8.32109, + "273": 8.13747, + "274": 8.09677, + "275": 8.01372, + "276": 7.92611, + "277": 8.24041, + "278": 8.05017, + "279": 7.96688, + "280": 7.75652, + "281": 8.10713, + "282": 8.15049, + "283": 8.15621, + "284": 8.10354, + "285": 8.07234, + "286": 7.90454, + "287": 7.9963, + "288": 8.24862, + "289": 8.17575, + "290": 8.13093, + "291": 8.25763, + "292": 8.08131, + "293": 8.12059, + "294": 7.98178, + "295": 7.97108, + "296": 8.24114, + "297": 7.79647, + "298": 8.04847, + "299": 7.94257, + "300": 7.85748, + "301": 8.01649, + "302": 7.95112, + "303": 7.99606, + "304": 7.96394, + "305": 8.00301, + "306": 7.98312, + "307": 7.99372, + "308": 8.00491, + "309": 8.01362, + "310": 7.97824, + "311": 7.9323, + "312": 7.89419, + "313": 7.84054, + "314": 7.83, + "315": 7.8335, + "316": 7.75122, + "317": 7.934, + "318": 7.98841, + "319": 7.83343, + "320": 7.57896, + "321": 7.75427, + "322": 7.83781, + "323": 7.7769, + "324": 7.91623, + "325": 7.80539, + "326": 7.65641, + "327": 7.86989, + "328": 7.79369, + "329": 7.89137, + "330": 7.7586, + "331": 7.52885, + "332": 7.81946, + "333": 7.84359, + "334": 7.68375, + "335": 7.69975, + "336": 7.91931, + "337": 7.65356, + "338": 7.90277, + "339": 7.7307, + "340": 7.7606, + "341": 7.70898, + "342": 7.82827, + "343": 7.61824, + "344": 7.58818, + "345": 7.61602, + "346": 7.46415, + "347": 7.5612, + "348": 7.68737, + "349": 7.58361, + "350": 7.65762, + "351": 7.75424, + "352": 7.711, + "353": 7.50477, + "354": 7.74925, + "355": 7.77011, + "356": 7.78305, + "357": 7.81855, + "358": 7.60031, + "359": 7.55187, + "360": 7.63213, + "361": 7.55298, + "362": 7.76875, + "363": 7.59465, + "364": 7.57928, + "365": 7.62839, + "366": 7.31096, + "367": 7.55919, + "368": 7.44577, + "369": 7.3551, + "370": 7.46985, + "371": 7.46609, + "372": 7.65475, + "373": 7.52989, + "374": 7.44843, + "375": 7.53627, + "376": 7.35288, + "377": 7.24313, + "378": 7.54312, + "379": 7.4994, + "380": 7.38859, + "381": 7.47577, + "382": 7.29951, + "383": 7.28478, + "384": 7.4126, + "385": 7.39829, + "386": 7.23652, + "387": 7.42535, + "388": 7.28487, + "389": 7.44425, + "390": 7.24578, + "391": 7.6482, + "392": 7.34245, + "393": 7.42463, + "394": 7.48248, + "395": 7.44483, + "396": 7.29231, + "397": 7.23386, + "398": 7.42507, + "399": 7.16173, + "400": 7.30149, + "401": 7.3585, + "402": 7.39832, + "403": 7.28806, + "404": 7.30832, + "405": 7.27202, + "406": 7.22485, + "407": 7.36688, + "408": 7.18877, + "409": 7.17334, + "410": 7.31999, + "411": 7.2223, + "412": 7.20595, + "413": 7.24047, + "414": 6.9176, + "415": 7.3341, + "416": 7.43139, + "417": 7.0298, + "418": 7.28201, + "419": 7.04286, + "420": 7.41864, + "421": 7.18456, + "422": 7.24003, + "423": 7.09785, + "424": 7.24581, + "425": 7.32182, + "426": 7.29342, + "427": 7.1359, + "428": 7.09617, + "429": 6.87976, + "430": 7.20691, + "431": 7.00662, + "432": 7.23762, + "433": 6.97996, + "434": 6.96131, + "435": 7.02219, + "436": 7.01484, + "437": 6.9921, + "438": 7.00514, + "439": 6.94235, + "440": 7.06367, + "441": 7.04936, + "442": 7.10187, + "443": 7.0941, + "444": 6.71175, + "445": 6.99825, + "446": 7.14631, + "447": 7.12745, + "448": 6.98621, + "449": 7.0508, + "450": 7.01761, + "451": 6.83255, + "452": 6.9157, + "453": 7.02056, + "454": 6.97019, + "455": 7.03145, + "456": 6.99451, + "457": 6.97283, + "458": 6.9066, + "459": 6.69482, + "460": 7.06773, + "461": 7.09857, + "462": 6.87116, + "463": 7.05522, + "464": 6.64922, + "465": 7.02852, + "466": 7.00594, + "467": 6.99935, + "468": 6.95215, + "469": 6.8291, + "470": 7.04615, + "471": 6.88316, + "472": 6.96104, + "473": 6.82398, + "474": 6.97228, + "475": 7.16917, + "476": 6.76379, + "477": 6.89771, + "478": 6.91142, + "479": 6.70396, + "480": 7.03025, + "481": 6.99763, + "482": 6.73608, + "483": 6.78502, + "484": 6.75413, + "485": 6.93205, + "486": 7.06796, + "487": 6.63653, + "488": 6.88737, + "489": 6.77108, + "490": 6.82685, + "491": 6.71122, + "492": 6.69849, + "493": 6.77155, + "494": 6.67651, + "495": 6.63733, + "496": 6.59006, + "497": 6.84564, + "498": 6.65256, + "499": 6.85952, + "500": 6.65795, + "501": 6.73562, + "502": 6.84527, + "503": 6.71173, + "504": 6.62075, + "505": 6.62291, + "506": 6.75234, + "507": 6.86844, + "508": 6.86157, + "509": 6.6555, + "510": 6.82834, + "511": 6.74132, + "512": 6.74051, + "513": 6.66032, + "514": 6.71273, + "515": 6.45045, + "516": 6.74436, + "517": 6.71073, + "518": 6.53817, + "519": 6.63527, + "520": 6.85868, + "521": 6.66571, + "522": 6.70871, + "523": 6.74553, + "524": 6.73396, + "525": 6.6762, + "526": 6.4139, + "527": 6.79901, + "528": 6.66011, + "529": 6.63182, + "530": 6.62611, + "531": 6.64289, + "532": 6.63292, + "533": 6.76391, + "534": 6.61301, + "535": 6.74754, + "536": 6.62605, + "537": 6.63867, + "538": 6.53166, + "539": 6.5542, + "540": 6.5862, + "541": 6.45207, + "542": 6.66957, + "543": 6.68064, + "544": 6.67601, + "545": 6.81307, + "546": 6.63333, + "547": 6.41838, + "548": 6.72367, + "549": 6.69982, + "550": 6.52974, + "551": 6.7478, + "552": 6.63991, + "553": 6.48451, + "554": 6.63407, + "555": 6.4629, + "556": 6.61792, + "557": 6.63496, + "558": 6.3874, + "559": 6.37379, + "560": 6.58293, + "561": 6.73352, + "562": 6.6356, + "563": 6.7444, + "564": 6.35291, + "565": 6.51482, + "566": 6.70247, + "567": 6.56973, + "568": 6.51145, + "569": 6.45578, + "570": 6.36768, + "571": 6.63597, + "572": 6.31359, + "573": 6.58668, + "574": 6.47613, + "575": 6.64961, + "576": 6.5168, + "577": 6.53078, + "578": 6.4847, + "579": 6.46709, + "580": 6.56793, + "581": 6.60857, + "582": 6.48362, + "583": 6.51541, + "584": 6.52831, + "585": 6.42713, + "586": 6.4178, + "587": 6.46113, + "588": 6.56878, + "589": 6.62653, + "590": 6.29114, + "591": 6.67541, + "592": 6.26902, + "593": 6.4773, + "594": 6.38719, + "595": 6.3632, + "596": 6.26099, + "597": 6.18986, + "598": 6.45726, + "599": 6.3998, + "600": 6.45709, + "601": 6.26132, + "602": 6.5338, + "603": 6.52288, + "604": 6.38993, + "605": 6.49993, + "606": 6.31475, + "607": 6.53507, + "608": 6.67525, + "609": 6.17714, + "610": 6.57295, + "611": 6.40188, + "612": 6.57929, + "613": 6.42667, + "614": 6.20672, + "615": 6.40081, + "616": 6.36019, + "617": 6.37969, + "618": 6.4512, + "619": 6.14244, + "620": 6.41233, + "621": 6.46338, + "622": 6.40096, + "623": 6.58352, + "624": 6.36078, + "625": 6.28553, + "626": 6.30525, + "627": 6.44574, + "628": 6.2557, + "629": 6.58813, + "630": 6.36641, + "631": 6.3498, + "632": 6.30972, + "633": 6.25733, + "634": 6.30887, + "635": 6.54592, + "636": 6.24834, + "637": 6.63634, + "638": 6.02046, + "639": 6.2798, + "640": 6.29548, + "641": 6.20953, + "642": 6.28471, + "643": 6.461, + "644": 6.25863, + "645": 6.25115, + "646": 6.40601, + "647": 6.33707, + "648": 6.35671, + "649": 6.3488, + "650": 6.48415, + "651": 6.33395, + "652": 6.25233, + "653": 6.3826, + "654": 6.45063, + "655": 6.52494, + "656": 6.32781, + "657": 6.43503, + "658": 6.24353, + "659": 6.1554, + "660": 6.39397, + "661": 6.17184, + "662": 6.27494, + "663": 6.37237, + "664": 6.33376, + "665": 6.40442, + "666": 6.16399, + "667": 6.1965, + "668": 6.2366, + "669": 6.21813, + "670": 6.24601, + "671": 6.24468, + "672": 6.49032, + "673": 6.34071, + "674": 6.2969, + "675": 6.38396, + "676": 6.39021, + "677": 6.30588, + "678": 6.27751, + "679": 6.23892, + "680": 6.2942, + "681": 6.20621, + "682": 6.08719, + "683": 6.27464, + "684": 6.32896, + "685": 6.30248, + "686": 6.15397, + "687": 6.2862, + "688": 6.20754, + "689": 6.6215, + "690": 6.17931, + "691": 6.18188, + "692": 6.2745, + "693": 6.14405, + "694": 6.23487, + "695": 6.32617, + "696": 6.11842, + "697": 6.15483, + "698": 6.23128, + "699": 6.46051, + "700": 6.0454, + "701": 6.06467, + "702": 6.25219, + "703": 6.18603, + "704": 6.21704, + "705": 6.13155, + "706": 6.07593, + "707": 6.25376, + "708": 6.31553, + "709": 6.01087, + "710": 6.16305, + "711": 6.26062, + "712": 6.18307, + "713": 5.89806, + "714": 6.10759, + "715": 6.11617, + "716": 6.41405, + "717": 6.19202, + "718": 6.2345, + "719": 6.27471, + "720": 6.26372, + "721": 6.26277, + "722": 6.23442, + "723": 6.0814, + "724": 6.22797, + "725": 6.04057, + "726": 6.30046, + "727": 6.01682, + "728": 6.04617, + "729": 6.09111, + "730": 6.18359, + "731": 6.10398, + "732": 6.08898, + "733": 6.12312, + "734": 6.38423, + "735": 6.27849, + "736": 6.18184, + "737": 6.36645, + "738": 6.13411, + "739": 6.14591, + "740": 5.87975, + "741": 6.00667, + "742": 5.98459, + "743": 6.17495, + "744": 6.02962, + "745": 6.15497, + "746": 6.03272, + "747": 6.09789, + "748": 6.23436, + "749": 5.94191, + "750": 6.16819, + "751": 5.9596, + "752": 6.01941, + "753": 6.02989, + "754": 6.28798, + "755": 6.13521, + "756": 6.25357, + "757": 6.02098, + "758": 6.20422, + "759": 6.23062, + "760": 6.02316, + "761": 6.19655, + "762": 6.22713, + "763": 6.03754, + "764": 5.9636, + "765": 5.93413, + "766": 5.97155, + "767": 5.81277, + "768": 6.18725, + "769": 6.27646, + "770": 6.29561, + "771": 5.78767, + "772": 6.03281, + "773": 6.18558, + "774": 5.88583, + "775": 6.03167, + "776": 6.13086, + "777": 5.88612, + "778": 6.05891, + "779": 5.87414, + "780": 6.14047, + "781": 5.85641, + "782": 6.04961, + "783": 5.95687, + "784": 5.91852, + "785": 6.09816, + "786": 6.10929, + "787": 5.66006, + "788": 5.99915, + "789": 6.21789, + "790": 6.26737, + "791": 5.79122, + "792": 5.99828, + "793": 6.18387, + "794": 6.02746, + "795": 6.0051, + "796": 6.17065, + "797": 6.05376, + "798": 6.06076, + "799": 6.11682, + "800": 6.02167, + "801": 6.15011, + "802": 5.98473, + "803": 6.15363, + "804": 6.00859, + "805": 5.83055, + "806": 6.08757, + "807": 6.04997, + "808": 5.92717, + "809": 5.77802, + "810": 6.01973, + "811": 5.93299, + "812": 5.91169, + "813": 5.96567, + "814": 6.0369, + "815": 5.8146, + "816": 6.12034, + "817": 5.94337, + "818": 6.0674, + "819": 6.01476, + "820": 5.7319, + "821": 5.95027, + "822": 6.20452, + "823": 5.83139, + "824": 5.98275, + "825": 6.18795, + "826": 6.20019, + "827": 6.05802, + "828": 6.06976, + "829": 5.89149, + "830": 5.94221, + "831": 5.89773, + "832": 5.97341, + "833": 6.06501, + "834": 5.99675, + "835": 6.00654, + "836": 5.79277, + "837": 6.11496, + "838": 5.86966, + "839": 5.83554, + "840": 6.18614, + "841": 5.78491, + "842": 5.89169, + "843": 5.95102, + "844": 6.00954, + "845": 6.09153, + "846": 5.68733, + "847": 5.75715, + "848": 5.96838, + "849": 6.09512, + "850": 5.84886, + "851": 6.01693, + "852": 5.75188, + "853": 5.99355, + "854": 6.01844, + "855": 5.81656, + "856": 5.99593, + "857": 6.00207, + "858": 6.05507, + "859": 5.95295, + "860": 6.09632, + "861": 6.07189, + "862": 6.00434, + "863": 5.83757, + "864": 5.84474, + "865": 5.93791, + "866": 5.89404, + "867": 5.87803, + "868": 6.06515, + "869": 6.08564, + "870": 5.97153, + "871": 6.04317, + "872": 5.89525, + "873": 5.84383, + "874": 6.02742, + "875": 5.9144, + "876": 5.96905, + "877": 5.92979, + "878": 6.09819, + "879": 5.76783, + "880": 6.01501, + "881": 5.99647, + "882": 5.9097, + "883": 5.67626, + "884": 5.96521, + "885": 5.74544, + "886": 5.99268, + "887": 5.90979, + "888": 5.83897, + "889": 6.01033, + "890": 6.02378, + "891": 5.95247, + "892": 5.70829, + "893": 6.0922, + "894": 5.73134, + "895": 5.84057, + "896": 5.84075, + "897": 5.8564, + "898": 5.9238, + "899": 5.93486, + "900": 5.89946, + "901": 5.95293, + "902": 5.83295, + "903": 6.05665, + "904": 5.93153, + "905": 5.90441, + "906": 5.6172, + "907": 5.91178, + "908": 5.73853, + "909": 5.99118, + "910": 5.86603, + "911": 5.70397, + "912": 5.70712, + "913": 5.76497, + "914": 5.83944, + "915": 5.80032, + "916": 5.8904, + "917": 5.86913, + "918": 5.82415, + "919": 5.81575, + "920": 5.89552, + "921": 5.84163, + "922": 5.62427, + "923": 6.03657, + "924": 5.60536, + "925": 5.62335, + "926": 5.86148, + "927": 5.96071, + "928": 5.84005, + "929": 5.82702, + "930": 5.95816, + "931": 5.765, + "932": 5.59211, + "933": 5.6351, + "934": 5.80541, + "935": 5.63715, + "936": 5.83772, + "937": 5.96629, + "938": 5.59109, + "939": 5.7899, + "940": 5.96726, + "941": 5.7264, + "942": 5.83547, + "943": 5.86622, + "944": 5.95478, + "945": 5.70263, + "946": 5.55832, + "947": 5.74831, + "948": 5.79312, + "949": 5.8268, + "950": 5.84353, + "951": 5.72242, + "952": 5.69295, + "953": 5.67852, + "954": 5.72473, + "955": 5.53107, + "956": 5.62074, + "957": 5.84076, + "958": 5.79676, + "959": 5.57317, + "960": 5.80125, + "961": 5.82952, + "962": 5.76695, + "963": 5.76461, + "964": 5.70677, + "965": 5.64012, + "966": 5.59617, + "967": 5.72434, + "968": 5.74036, + "969": 5.82392, + "970": 5.64422, + "971": 5.7065, + "972": 5.85308, + "973": 5.66884, + "974": 5.71841, + "975": 5.86273, + "976": 5.70493, + "977": 5.77104, + "978": 5.6858, + "979": 5.58655, + "980": 5.75924, + "981": 5.8969, + "982": 5.47038, + "983": 5.61817, + "984": 5.54504, + "985": 5.59032, + "986": 5.64132, + "987": 5.56966, + "988": 5.70939, + "989": 5.69379, + "990": 5.62195, + "991": 5.84899, + "992": 5.77877, + "993": 5.87022, + "994": 5.69735, + "995": 5.73242, + "996": 5.73704, + "997": 5.81329, + "998": 5.83634, + "999": 5.83399, + "1000": 5.68342, + "1001": 5.86668, + "1002": 5.76052, + "1003": 5.64259, + "1004": 5.79811, + "1005": 5.53617, + "1006": 5.326, + "1007": 5.76701, + "1008": 5.79136, + "1009": 5.65046, + "1010": 5.77942, + "1011": 5.89493, + "1012": 5.62303, + "1013": 5.61569, + "1014": 5.68111, + "1015": 5.55747, + "1016": 5.87327, + "1017": 5.83312, + "1018": 5.61865, + "1019": 5.73414, + "1020": 5.61755, + "1021": 5.848, + "1022": 5.50045, + "1023": 5.65182, + "1024": 5.74493, + "1025": 5.5692, + "1026": 5.41415, + "1027": 5.60696, + "1028": 5.6928, + "1029": 5.68764, + "1030": 5.68746, + "1031": 5.40696, + "1032": 5.78748, + "1033": 5.58136, + "1034": 5.61937, + "1035": 5.71368, + "1036": 5.62818, + "1037": 5.3679, + "1038": 5.66452, + "1039": 5.64347, + "1040": 5.57004, + "1041": 5.59722, + "1042": 5.81329, + "1043": 5.566, + "1044": 5.46906, + "1045": 5.9659, + "1046": 5.4866, + "1047": 5.38954, + "1048": 5.50027, + "1049": 5.67182, + "1050": 5.6991, + "1051": 5.57928, + "1052": 5.68227, + "1053": 5.62737, + "1054": 5.45766, + "1055": 5.60313, + "1056": 5.67386, + "1057": 5.75895, + "1058": 5.56782, + "1059": 5.74888, + "1060": 5.82022, + "1061": 5.47624, + "1062": 5.64897, + "1063": 5.50121, + "1064": 5.59136, + "1065": 5.55347, + "1066": 5.74367, + "1067": 5.67235, + "1068": 5.44068, + "1069": 5.60636, + "1070": 5.81264, + "1071": 5.51129, + "1072": 5.61871, + "1073": 5.62147, + "1074": 5.524, + "1075": 5.70529, + "1076": 5.5934, + "1077": 5.71153, + "1078": 5.56524, + "1079": 5.61728, + "1080": 5.64251, + "1081": 5.62319, + "1082": 5.49648, + "1083": 5.64086, + "1084": 5.55389, + "1085": 5.40631, + "1086": 5.62008, + "1087": 5.44148, + "1088": 5.51218, + "1089": 5.7676, + "1090": 5.53165, + "1091": 5.51388, + "1092": 5.41011, + "1093": 5.70025, + "1094": 5.57364, + "1095": 5.57735, + "1096": 5.61585, + "1097": 5.64586, + "1098": 5.64877, + "1099": 5.51631, + "1100": 5.63778, + "1101": 5.67335, + "1102": 5.54037, + "1103": 5.54969, + "1104": 5.53882, + "1105": 5.54754, + "1106": 5.68315, + "1107": 5.68556, + "1108": 5.78611, + "1109": 5.53666, + "1110": 5.66598, + "1111": 5.58973, + "1112": 5.58039, + "1113": 5.62611, + "1114": 5.61279, + "1115": 5.59718, + "1116": 5.65925, + "1117": 5.64676, + "1118": 5.65036, + "1119": 5.70919, + "1120": 5.62738, + "1121": 5.37352, + "1122": 5.22976, + "1123": 5.47237, + "1124": 5.64939, + "1125": 5.67974, + "1126": 5.679, + "1127": 5.56811, + "1128": 5.61992, + "1129": 5.29637, + "1130": 5.54359, + "1131": 5.63153, + "1132": 5.72427, + "1133": 5.51914, + "1134": 5.56063, + "1135": 5.52056, + "1136": 5.42646, + "1137": 5.45971, + "1138": 5.56927, + "1139": 5.41452, + "1140": 5.2656, + "1141": 5.58265, + "1142": 5.64152, + "1143": 5.38298, + "1144": 5.38584, + "1145": 5.36231, + "1146": 5.63508, + "1147": 5.49183, + "1148": 5.50524, + "1149": 5.52352, + "1150": 5.39801, + "1151": 5.5563, + "1152": 5.41525, + "1153": 5.44791, + "1154": 5.49757, + "1155": 5.43833, + "1156": 5.3488, + "1157": 5.66444, + "1158": 5.39487, + "1159": 5.33455, + "1160": 5.79503, + "1161": 5.53955, + "1162": 5.45818, + "1163": 5.52563, + "1164": 5.3837, + "1165": 5.52861, + "1166": 5.48753, + "1167": 5.36312, + "1168": 5.49491, + "1169": 5.39842, + "1170": 5.59202, + "1171": 5.48502, + "1172": 5.64238, + "1173": 5.62295, + "1174": 5.50843, + "1175": 5.34639, + "1176": 5.38504, + "1177": 5.55461, + "1178": 5.46852, + "1179": 5.49505, + "1180": 5.46014, + "1181": 5.56031, + "1182": 5.59593, + "1183": 5.77155, + "1184": 5.54926, + "1185": 5.29008, + "1186": 5.60451, + "1187": 5.55363, + "1188": 5.51655, + "1189": 5.39133, + "1190": 5.40482, + "1191": 5.39266, + "1192": 5.50142, + "1193": 5.46347, + "1194": 5.45607, + "1195": 5.32751, + "1196": 5.52219, + "1197": 5.4809, + "1198": 5.52789, + "1199": 5.3874, + "1200": 5.33059, + "1201": 5.48969, + "1202": 5.43584, + "1203": 5.49537, + "1204": 5.40861, + "1205": 5.48971, + "1206": 5.3371, + "1207": 5.58625, + "1208": 5.4312, + "1209": 5.29323, + "1210": 5.50765, + "1211": 5.51506, + "1212": 5.59777, + "1213": 5.42123, + "1214": 5.51018, + "1215": 5.23832, + "1216": 5.40989, + "1217": 5.38537, + "1218": 5.45232, + "1219": 5.48221, + "1220": 5.38594, + "1221": 5.44848, + "1222": 5.31032, + "1223": 5.47835, + "1224": 5.42017, + "1225": 5.43499, + "1226": 5.3238, + "1227": 5.47632, + "1228": 5.72418, + "1229": 5.32629, + "1230": 5.40556, + "1231": 5.06972, + "1232": 5.78794, + "1233": 5.28923, + "1234": 5.24535, + "1235": 5.37092, + "1236": 5.48471, + "1237": 5.20864, + "1238": 5.41643, + "1239": 5.40751, + "1240": 5.46767, + "1241": 5.57266, + "1242": 5.4536, + "1243": 5.43063, + "1244": 5.51812, + "1245": 5.19115, + "1246": 5.72042, + "1247": 5.43187, + "1248": 5.30004, + "1249": 5.40113, + "1250": 5.33798, + "1251": 5.42034, + "1252": 5.57217, + "1253": 5.48773, + "1254": 5.30628, + "1255": 5.51443, + "1256": 5.60755, + "1257": 5.4214, + "1258": 5.56457, + "1259": 5.48027, + "1260": 5.51461, + "1261": 5.63883, + "1262": 5.39531, + "1263": 5.32916, + "1264": 5.50671, + "1265": 5.30632, + "1266": 5.23819, + "1267": 5.37206, + "1268": 5.39267, + "1269": 5.15366, + "1270": 5.40418, + "1271": 5.27732, + "1272": 5.5252, + "1273": 5.30228, + "1274": 5.3516, + "1275": 5.38466, + "1276": 5.39786, + "1277": 5.46218, + "1278": 5.34689, + "1279": 5.44274, + "1280": 5.45919, + "1281": 5.40638, + "1282": 5.3824, + "1283": 5.42204, + "1284": 5.34841, + "1285": 5.50133, + "1286": 5.33557, + "1287": 5.58795, + "1288": 5.26493, + "1289": 5.429, + "1290": 5.50282, + "1291": 5.50335, + "1292": 5.44662, + "1293": 5.41955, + "1294": 5.49953, + "1295": 5.34675, + "1296": 5.19062, + "1297": 5.17238, + "1298": 5.11916, + "1299": 5.30339, + "1300": 5.21032, + "1301": 5.30157, + "1302": 5.27472, + "1303": 5.36107, + "1304": 5.43231, + "1305": 5.36999, + "1306": 5.25347, + "1307": 5.18829, + "1308": 5.27033, + "1309": 5.40736, + "1310": 5.26399, + "1311": 5.38109, + "1312": 5.35438, + "1313": 5.30056, + "1314": 5.2953, + "1315": 5.42245, + "1316": 5.26148, + "1317": 5.28065, + "1318": 5.2198, + "1319": 5.34619, + "1320": 5.42093, + "1321": 5.44976, + "1322": 5.46399, + "1323": 5.37327, + "1324": 5.25463, + "1325": 5.40657, + "1326": 5.54082, + "1327": 5.39378, + "1328": 5.21893, + "1329": 5.41851, + "1330": 5.40079, + "1331": 5.31685, + "1332": 5.31253, + "1333": 5.37243, + "1334": 5.44685, + "1335": 5.37136, + "1336": 5.43779, + "1337": 5.47852, + "1338": 5.30292, + "1339": 5.14181, + "1340": 5.41486, + "1341": 5.3443, + "1342": 5.36197, + "1343": 5.47816, + "1344": 5.37832, + "1345": 5.34294, + "1346": 5.08195, + "1347": 5.38558, + "1348": 5.4918, + "1349": 5.40832, + "1350": 5.02622, + "1351": 5.3151, + "1352": 5.1591, + "1353": 5.34674, + "1354": 5.35963, + "1355": 5.11092, + "1356": 5.2587, + "1357": 5.29209, + "1358": 5.15773, + "1359": 5.11035, + "1360": 5.17288, + "1361": 5.30521, + "1362": 5.06318, + "1363": 5.2947, + "1364": 5.40031, + "1365": 5.02241, + "1366": 5.11779, + "1367": 5.33051, + "1368": 5.18648, + "1369": 5.22984, + "1370": 5.19906, + "1371": 5.2839, + "1372": 5.26155, + "1373": 5.28402, + "1374": 5.28112, + "1375": 5.46052, + "1376": 5.2713, + "1377": 5.26467, + "1378": 5.31344, + "1379": 5.22741, + "1380": 5.26107, + "1381": 5.47871, + "1382": 5.08923, + "1383": 5.375, + "1384": 5.35914, + "1385": 5.38983, + "1386": 5.16417, + "1387": 5.16094, + "1388": 5.28017, + "1389": 5.30376, + "1390": 5.25514, + "1391": 5.26911, + "1392": 5.37008, + "1393": 5.38307, + "1394": 5.40394, + "1395": 5.32492, + "1396": 5.21356, + "1397": 5.28, + "1398": 5.37051, + "1399": 5.35873, + "1400": 5.26512, + "1401": 5.35924, + "1402": 5.42148, + "1403": 5.20238, + "1404": 5.28629, + "1405": 5.11984, + "1406": 4.99128, + "1407": 5.40442, + "1408": 5.19825, + "1409": 5.3964, + "1410": 5.37519, + "1411": 4.91758, + "1412": 5.35561, + "1413": 5.41314, + "1414": 5.21823, + "1415": 5.44159, + "1416": 5.32905, + "1417": 5.38859, + "1418": 5.29946, + "1419": 5.31787, + "1420": 5.43974, + "1421": 5.39414, + "1422": 5.41749, + "1423": 5.005, + "1424": 5.32995, + "1425": 5.58618, + "1426": 5.23059, + "1427": 5.31804, + "1428": 5.33277, + "1429": 5.07552, + "1430": 5.33075, + "1431": 5.32688, + "1432": 5.33826, + "1433": 5.19107, + "1434": 5.16341, + "1435": 5.19905, + "1436": 5.10851, + "1437": 5.229, + "1438": 5.31867, + "1439": 5.34731, + "1440": 5.34991, + "1441": 5.16484, + "1442": 5.22015, + "1443": 5.20933, + "1444": 5.13701, + "1445": 5.07414, + "1446": 5.26836, + "1447": 5.25895, + "1448": 5.2904, + "1449": 5.2498, + "1450": 5.34281, + "1451": 5.07084, + "1452": 5.27052, + "1453": 5.1668, + "1454": 5.01539, + "1455": 5.12292, + "1456": 5.2717, + "1457": 5.18713, + "1458": 5.00608, + "1459": 5.22304, + "1460": 5.23389, + "1461": 5.07142, + "1462": 4.96923, + "1463": 5.14383, + "1464": 5.21128, + "1465": 5.26911, + "1466": 5.34961, + "1467": 5.33438, + "1468": 5.22205, + "1469": 5.04373, + "1470": 5.11715, + "1471": 5.25199, + "1472": 5.12294, + "1473": 5.10395, + "1474": 5.21775, + "1475": 5.18567, + "1476": 5.15287, + "1477": 5.26203, + "1478": 5.30399, + "1479": 5.01175, + "1480": 5.1809, + "1481": 5.24516, + "1482": 5.34866, + "1483": 5.26395, + "1484": 4.92397, + "1485": 5.29179, + "1486": 5.04178, + "1487": 4.88296, + "1488": 5.18145, + "1489": 5.10246, + "1490": 5.04399, + "1491": 5.31709, + "1492": 5.22469, + "1493": 4.94051, + "1494": 5.10929, + "1495": 5.13424, + "1496": 5.05862, + "1497": 5.36633, + "1498": 5.30967, + "1499": 5.13834, + "1500": 5.09851, + "1501": 5.03466, + "1502": 5.15527, + "1503": 5.43143, + "1504": 5.31968, + "1505": 5.00114, + "1506": 5.14444, + "1507": 5.16068, + "1508": 5.16575, + "1509": 5.31451, + "1510": 5.0185, + "1511": 5.11697, + "1512": 4.98287, + "1513": 5.16993, + "1514": 5.33962, + "1515": 5.36563, + "1516": 5.27715, + "1517": 5.22687, + "1518": 5.02626, + "1519": 5.29861, + "1520": 5.1417, + "1521": 5.15866, + "1522": 5.32824, + "1523": 5.24625, + "1524": 5.06725, + "1525": 5.20424, + "1526": 5.27994, + "1527": 5.25677, + "1528": 5.23589, + "1529": 5.18688, + "1530": 5.24365, + "1531": 5.09964, + "1532": 5.15141, + "1533": 5.05087, + "1534": 5.21589, + "1535": 5.1635, + "1536": 5.09678, + "1537": 5.02713, + "1538": 4.91184, + "1539": 5.23801, + "1540": 5.11515, + "1541": 5.25246, + "1542": 5.23484, + "1543": 5.05152, + "1544": 5.07544, + "1545": 5.1161, + "1546": 5.33085, + "1547": 5.11115, + "1548": 5.23527, + "1549": 5.23735, + "1550": 4.97596, + "1551": 5.2566, + "1552": 5.02944, + "1553": 5.14849, + "1554": 5.11205, + "1555": 5.10901, + "1556": 5.19824, + "1557": 5.08883, + "1558": 5.23067, + "1559": 5.00402, + "1560": 5.11835, + "1561": 5.14529, + "1562": 5.17996, + "1563": 5.24454, + "1564": 5.26389, + "1565": 5.08902, + "1566": 5.29474, + "1567": 5.04166, + "1568": 5.09256, + "1569": 5.20014, + "1570": 5.17348, + "1571": 4.95353, + "1572": 5.04005, + "1573": 5.02897, + "1574": 4.99751, + "1575": 5.2314, + "1576": 5.21263, + "1577": 5.12799, + "1578": 5.36241, + "1579": 4.94367, + "1580": 5.12197, + "1581": 5.09638, + "1582": 5.28497, + "1583": 5.04918, + "1584": 5.05482, + "1585": 5.11977, + "1586": 5.30243, + "1587": 5.13447, + "1588": 5.2184, + "1589": 4.83833, + "1590": 5.09497, + "1591": 5.17411, + "1592": 5.13721, + "1593": 5.23457, + "1594": 5.11805, + "1595": 5.10775, + "1596": 5.18964, + "1597": 5.11486, + "1598": 5.15917, + "1599": 5.19102, + "1600": 4.86871, + "1601": 5.11732, + "1602": 5.23185, + "1603": 5.19543, + "1604": 5.05128, + "1605": 5.02692, + "1606": 4.98659, + "1607": 5.07391, + "1608": 4.97985, + "1609": 5.07337, + "1610": 5.04745, + "1611": 4.99848, + "1612": 4.75205, + "1613": 5.03316, + "1614": 4.88034, + "1615": 5.07442, + "1616": 5.23082, + "1617": 5.06132, + "1618": 4.98704, + "1619": 5.18333, + "1620": 5.14491, + "1621": 5.31452, + "1622": 5.05677, + "1623": 5.14346, + "1624": 5.1355, + "1625": 5.12006, + "1626": 5.10245, + "1627": 5.10987, + "1628": 5.06581, + "1629": 4.92971, + "1630": 5.06799, + "1631": 5.06088, + "1632": 5.10428, + "1633": 4.97515, + "1634": 4.9235, + "1635": 5.05833, + "1636": 4.92289, + "1637": 5.24051, + "1638": 5.15574, + "1639": 4.977, + "1640": 5.00918, + "1641": 5.12718, + "1642": 5.08305, + "1643": 5.04894, + "1644": 5.1181, + "1645": 4.96677, + "1646": 5.11931, + "1647": 5.03295, + "1648": 5.19969, + "1649": 4.92396, + "1650": 5.05963, + "1651": 4.92965, + "1652": 5.21121, + "1653": 5.15959, + "1654": 5.12828, + "1655": 5.16263, + "1656": 5.34595, + "1657": 5.20677, + "1658": 5.04112, + "1659": 4.9258, + "1660": 4.80954, + "1661": 5.03086, + "1662": 5.14123, + "1663": 5.15449, + "1664": 4.981, + "1665": 5.11714, + "1666": 5.10575, + "1667": 4.84897, + "1668": 5.11513, + "1669": 5.06995, + "1670": 5.11266, + "1671": 5.17201, + "1672": 4.77569, + "1673": 5.03851, + "1674": 4.91569, + "1675": 5.05176, + "1676": 5.00402, + "1677": 4.79944, + "1678": 5.02487, + "1679": 4.89421, + "1680": 5.03847, + "1681": 5.06815, + "1682": 5.03274, + "1683": 4.90688, + "1684": 5.06515, + "1685": 5.13579, + "1686": 5.0732, + "1687": 4.97656, + "1688": 5.16537, + "1689": 5.14707, + "1690": 4.99688, + "1691": 5.00011, + "1692": 4.91822, + "1693": 5.01472, + "1694": 4.94657, + "1695": 4.91341, + "1696": 5.08209, + "1697": 5.04294, + "1698": 4.9511, + "1699": 5.00187, + "1700": 4.95393, + "1701": 5.16563, + "1702": 5.07666, + "1703": 5.17125, + "1704": 5.14332, + "1705": 4.96247, + "1706": 4.98333, + "1707": 4.79005, + "1708": 5.03831, + "1709": 5.23334, + "1710": 5.02934, + "1711": 5.19037, + "1712": 5.1958, + "1713": 5.03582, + "1714": 5.04603, + "1715": 4.91495, + "1716": 4.9332, + "1717": 4.86109, + "1718": 5.0273, + "1719": 5.12334, + "1720": 5.02189, + "1721": 4.92752, + "1722": 5.05412, + "1723": 4.93537, + "1724": 5.0407, + "1725": 5.1914, + "1726": 5.06447, + "1727": 4.90742, + "1728": 5.02116, + "1729": 5.04574, + "1730": 4.90343, + "1731": 4.99945, + "1732": 4.92083, + "1733": 5.1311, + "1734": 4.82837, + "1735": 5.20905, + "1736": 4.91585, + "1737": 4.85859, + "1738": 4.97909, + "1739": 5.16688, + "1740": 4.83514, + "1741": 4.77896, + "1742": 4.90909, + "1743": 5.08523, + "1744": 4.9784, + "1745": 4.82327, + "1746": 4.94833, + "1747": 4.87022, + "1748": 5.06379, + "1749": 4.8705, + "1750": 5.01347, + "1751": 5.12189, + "1752": 4.90364, + "1753": 5.09398, + "1754": 5.05918, + "1755": 4.89649, + "1756": 5.02243, + "1757": 5.14389, + "1758": 4.8716, + "1759": 4.94237, + "1760": 4.83366, + "1761": 5.02233, + "1762": 4.81292, + "1763": 4.77382, + "1764": 4.93787, + "1765": 5.14977, + "1766": 5.33847, + "1767": 5.22339, + "1768": 4.95072, + "1769": 5.00607, + "1770": 4.98077, + "1771": 4.96436, + "1772": 4.98395, + "1773": 4.97312, + "1774": 4.86859, + "1775": 4.95207, + "1776": 4.99761, + "1777": 4.94332, + "1778": 4.99268, + "1779": 5.08376, + "1780": 4.83276, + "1781": 5.05321, + "1782": 4.9968, + "1783": 5.01268, + "1784": 4.93195, + "1785": 5.16736, + "1786": 4.81265, + "1787": 4.97081, + "1788": 4.82725, + "1789": 4.88846, + "1790": 4.79821, + "1791": 4.73741, + "1792": 4.87626, + "1793": 5.10356, + "1794": 4.98084, + "1795": 4.96551, + "1796": 4.99704, + "1797": 4.7903, + "1798": 4.76702, + "1799": 5.01884, + "1800": 4.91364, + "1801": 5.04679, + "1802": 4.82665, + "1803": 4.95171, + "1804": 4.88594, + "1805": 4.90346, + "1806": 4.87351, + "1807": 4.92406, + "1808": 4.92697, + "1809": 5.1451, + "1810": 5.09976, + "1811": 4.95906, + "1812": 4.80139, + "1813": 5.09748, + "1814": 4.77766, + "1815": 4.86134, + "1816": 5.05005, + "1817": 4.79012, + "1818": 4.80376, + "1819": 5.02382, + "1820": 4.68652, + "1821": 5.02661, + "1822": 4.66251, + "1823": 4.8659, + "1824": 4.78635, + "1825": 5.06537, + "1826": 4.81944, + "1827": 4.7895, + "1828": 4.94677, + "1829": 5.11262, + "1830": 4.91236, + "1831": 4.89818, + "1832": 4.83359, + "1833": 4.78363, + "1834": 4.9482, + "1835": 4.95795, + "1836": 4.90747, + "1837": 4.67243, + "1838": 4.80953, + "1839": 4.89546, + "1840": 4.90488, + "1841": 4.8292, + "1842": 4.94678, + "1843": 4.70293, + "1844": 4.61431, + "1845": 5.00086, + "1846": 4.74657, + "1847": 4.8645, + "1848": 4.89695, + "1849": 4.85358, + "1850": 4.8676, + "1851": 5.02236, + "1852": 4.97647, + "1853": 4.83325, + "1854": 4.86791, + "1855": 4.8219, + "1856": 4.75614, + "1857": 4.9619, + "1858": 4.96856, + "1859": 4.75323, + "1860": 4.86592, + "1861": 5.20685, + "1862": 4.61669, + "1863": 4.83385, + "1864": 4.7505, + "1865": 4.86441, + "1866": 4.79455, + "1867": 4.99688, + "1868": 4.71331, + "1869": 4.75634, + "1870": 4.93203, + "1871": 4.99184, + "1872": 4.68332, + "1873": 4.69823, + "1874": 4.85174, + "1875": 4.85999, + "1876": 4.7392, + "1877": 4.80362, + "1878": 4.81239, + "1879": 4.82084, + "1880": 4.89314, + "1881": 4.79389, + "1882": 4.79419, + "1883": 4.78157, + "1884": 4.97086, + "1885": 4.91799, + "1886": 4.82203, + "1887": 4.81334, + "1888": 4.97395, + "1889": 4.95922, + "1890": 4.70676, + "1891": 4.65282, + "1892": 4.84393, + "1893": 4.64594, + "1894": 4.90265, + "1895": 4.7886, + "1896": 4.66112, + "1897": 4.78966, + "1898": 4.9139, + "1899": 4.77532, + "1900": 4.91571, + "1901": 4.84525, + "1902": 4.78411, + "1903": 4.75997, + "1904": 4.65339, + "1905": 4.54188, + "1906": 4.81097, + "1907": 4.90225, + "1908": 5.03012, + "1909": 4.88434, + "1910": 4.78852, + "1911": 4.80477, + "1912": 4.64685, + "1913": 4.94065, + "1914": 4.87965, + "1915": 4.85906, + "1916": 4.92227, + "1917": 4.85425, + "1918": 4.87001, + "1919": 4.99304, + "1920": 4.76319, + "1921": 4.88494, + "1922": 4.81295, + "1923": 4.7592, + "1924": 4.82501, + "1925": 5.05793, + "1926": 4.92996, + "1927": 4.92587, + "1928": 4.92702, + "1929": 4.92705, + "1930": 4.91019, + "1931": 4.77616, + "1932": 4.85963, + "1933": 4.83545, + "1934": 4.84013, + "1935": 5.10729, + "1936": 4.88314, + "1937": 4.87654, + "1938": 4.79463, + "1939": 4.71148, + "1940": 4.82418, + "1941": 4.73372, + "1942": 4.87249, + "1943": 4.7353, + "1944": 4.74198, + "1945": 4.6818, + "1946": 4.91539, + "1947": 4.86756, + "1948": 4.59887, + "1949": 4.90387, + "1950": 4.78785, + "1951": 4.95942, + "1952": 4.73677, + "1953": 4.79496, + "1954": 4.73264, + "1955": 4.84308, + "1956": 4.88233, + "1957": 4.73496, + "1958": 4.70018, + "1959": 4.75966, + "1960": 4.76849, + "1961": 4.7146, + "1962": 4.83392, + "1963": 4.82321, + "1964": 4.84664, + "1965": 4.87523, + "1966": 4.78753, + "1967": 4.59211, + "1968": 4.82724, + "1969": 4.59184, + "1970": 4.56633, + "1971": 4.9072, + "1972": 4.90064, + "1973": 4.54642, + "1974": 4.82423, + "1975": 4.82778, + "1976": 4.71327, + "1977": 4.57967, + "1978": 5.0045, + "1979": 4.66094, + "1980": 4.74256, + "1981": 4.86301, + "1982": 4.72234, + "1983": 4.8786, + "1984": 4.64152, + "1985": 4.78, + "1986": 4.70167, + "1987": 4.81036, + "1988": 4.8871, + "1989": 4.63185, + "1990": 4.79636, + "1991": 4.69424, + "1992": 4.79439, + "1993": 4.74063, + "1994": 4.84977, + "1995": 4.5596, + "1996": 4.65161, + "1997": 4.80342, + "1998": 4.67403, + "1999": 4.72284, + "2000": 4.61765 + } + }, + "num-zeros": { + "start_step": 1, + "end_step": 2000, + "step_interval": 1, + "values": { + "1": 80.0, + "2": 70.0, + "3": 78.0, + "4": 80.0, + "5": 75.0, + "6": 87.0, + "7": 63.0, + "8": 77.0, + "9": 62.0, + "10": 90.0, + "11": 74.0, + "12": 79.0, + "13": 77.0, + "14": 83.0, + "15": 78.0, + "16": 69.0, + "17": 64.0, + "18": 63.0, + "19": 87.0, + "20": 90.0, + "21": 75.0, + "22": 84.0, + "23": 81.0, + "24": 78.0, + "25": 87.0, + "26": 69.0, + "27": 86.0, + "28": 91.0, + "29": 94.0, + "30": 115.0, + "31": 99.0, + "32": 109.0, + "33": 92.0, + "34": 103.0, + "35": 118.0, + "36": 117.0, + "37": 105.0, + "38": 129.0, + "39": 89.0, + "40": 129.0, + "41": 114.0, + "42": 121.0, + "43": 135.0, + "44": 128.0, + "45": 126.0, + "46": 129.0, + "47": 133.0, + "48": 139.0, + "49": 135.0, + "50": 157.0, + "51": 122.0, + "52": 150.0, + "53": 108.0, + "54": 140.0, + "55": 133.0, + "56": 156.0, + "57": 150.0, + "58": 153.0, + "59": 135.0, + "60": 135.0, + "61": 165.0, + "62": 145.0, + "63": 199.0, + "64": 161.0, + "65": 162.0, + "66": 162.0, + "67": 195.0, + "68": 140.0, + "69": 158.0, + "70": 169.0, + "71": 188.0, + "72": 160.0, + "73": 151.0, + "74": 154.0, + "75": 172.0, + "76": 169.0, + "77": 165.0, + "78": 193.0, + "79": 144.0, + "80": 173.0, + "81": 150.0, + "82": 141.0, + "83": 186.0, + "84": 169.0, + "85": 183.0, + "86": 196.0, + "87": 197.0, + "88": 184.0, + "89": 169.0, + "90": 182.0, + "91": 200.0, + "92": 179.0, + "93": 165.0, + "94": 153.0, + "95": 176.0, + "96": 191.0, + "97": 183.0, + "98": 199.0, + "99": 163.0, + "100": 157.0, + "101": 144.0, + "102": 184.0, + "103": 206.0, + "104": 171.0, + "105": 215.0, + "106": 176.0, + "107": 172.0, + "108": 172.0, + "109": 172.0, + "110": 216.0, + "111": 182.0, + "112": 172.0, + "113": 167.0, + "114": 192.0, + "115": 175.0, + "116": 181.0, + "117": 177.0, + "118": 142.0, + "119": 212.0, + "120": 164.0, + "121": 193.0, + "122": 160.0, + "123": 169.0, + "124": 191.0, + "125": 214.0, + "126": 160.0, + "127": 192.0, + "128": 160.0, + "129": 180.0, + "130": 214.0, + "131": 219.0, + "132": 173.0, + "133": 166.0, + "134": 171.0, + "135": 182.0, + "136": 172.0, + "137": 176.0, + "138": 174.0, + "139": 161.0, + "140": 178.0, + "141": 164.0, + "142": 159.0, + "143": 192.0, + "144": 157.0, + "145": 144.0, + "146": 149.0, + "147": 148.0, + "148": 169.0, + "149": 143.0, + "150": 111.0, + "151": 159.0, + "152": 115.0, + "153": 147.0, + "154": 162.0, + "155": 185.0, + "156": 144.0, + "157": 147.0, + "158": 130.0, + "159": 165.0, + "160": 190.0, + "161": 141.0, + "162": 155.0, + "163": 140.0, + "164": 174.0, + "165": 168.0, + "166": 179.0, + "167": 147.0, + "168": 138.0, + "169": 161.0, + "170": 159.0, + "171": 125.0, + "172": 193.0, + "173": 172.0, + "174": 190.0, + "175": 192.0, + "176": 146.0, + "177": 168.0, + "178": 172.0, + "179": 177.0, + "180": 148.0, + "181": 161.0, + "182": 213.0, + "183": 215.0, + "184": 201.0, + "185": 154.0, + "186": 207.0, + "187": 175.0, + "188": 183.0, + "189": 169.0, + "190": 167.0, + "191": 163.0, + "192": 193.0, + "193": 169.0, + "194": 161.0, + "195": 141.0, + "196": 174.0, + "197": 188.0, + "198": 168.0, + "199": 150.0, + "200": 187.0, + "201": 173.0, + "202": 183.0, + "203": 142.0, + "204": 177.0, + "205": 153.0, + "206": 198.0, + "207": 168.0, + "208": 140.0, + "209": 179.0, + "210": 175.0, + "211": 167.0, + "212": 194.0, + "213": 192.0, + "214": 174.0, + "215": 188.0, + "216": 164.0, + "217": 170.0, + "218": 171.0, + "219": 211.0, + "220": 195.0, + "221": 181.0, + "222": 154.0, + "223": 176.0, + "224": 173.0, + "225": 166.0, + "226": 174.0, + "227": 211.0, + "228": 146.0, + "229": 193.0, + "230": 149.0, + "231": 177.0, + "232": 169.0, + "233": 193.0, + "234": 183.0, + "235": 215.0, + "236": 200.0, + "237": 218.0, + "238": 179.0, + "239": 139.0, + "240": 217.0, + "241": 174.0, + "242": 193.0, + "243": 192.0, + "244": 181.0, + "245": 206.0, + "246": 221.0, + "247": 219.0, + "248": 175.0, + "249": 189.0, + "250": 156.0, + "251": 205.0, + "252": 164.0, + "253": 172.0, + "254": 184.0, + "255": 218.0, + "256": 171.0, + "257": 208.0, + "258": 210.0, + "259": 174.0, + "260": 199.0, + "261": 178.0, + "262": 185.0, + "263": 181.0, + "264": 200.0, + "265": 171.0, + "266": 149.0, + "267": 141.0, + "268": 186.0, + "269": 198.0, + "270": 170.0, + "271": 168.0, + "272": 210.0, + "273": 151.0, + "274": 212.0, + "275": 182.0, + "276": 172.0, + "277": 159.0, + "278": 169.0, + "279": 185.0, + "280": 174.0, + "281": 160.0, + "282": 171.0, + "283": 174.0, + "284": 183.0, + "285": 169.0, + "286": 173.0, + "287": 203.0, + "288": 168.0, + "289": 202.0, + "290": 157.0, + "291": 241.0, + "292": 172.0, + "293": 209.0, + "294": 194.0, + "295": 207.0, + "296": 217.0, + "297": 160.0, + "298": 126.0, + "299": 170.0, + "300": 177.0, + "301": 189.0, + "302": 209.0, + "303": 170.0, + "304": 177.0, + "305": 148.0, + "306": 172.0, + "307": 213.0, + "308": 184.0, + "309": 193.0, + "310": 218.0, + "311": 159.0, + "312": 178.0, + "313": 177.0, + "314": 199.0, + "315": 165.0, + "316": 168.0, + "317": 185.0, + "318": 261.0, + "319": 181.0, + "320": 196.0, + "321": 200.0, + "322": 217.0, + "323": 198.0, + "324": 200.0, + "325": 184.0, + "326": 283.0, + "327": 211.0, + "328": 231.0, + "329": 189.0, + "330": 248.0, + "331": 205.0, + "332": 208.0, + "333": 199.0, + "334": 182.0, + "335": 202.0, + "336": 207.0, + "337": 216.0, + "338": 231.0, + "339": 213.0, + "340": 240.0, + "341": 207.0, + "342": 153.0, + "343": 264.0, + "344": 214.0, + "345": 202.0, + "346": 183.0, + "347": 194.0, + "348": 216.0, + "349": 206.0, + "350": 218.0, + "351": 218.0, + "352": 207.0, + "353": 225.0, + "354": 213.0, + "355": 201.0, + "356": 227.0, + "357": 217.0, + "358": 206.0, + "359": 186.0, + "360": 217.0, + "361": 187.0, + "362": 256.0, + "363": 226.0, + "364": 203.0, + "365": 200.0, + "366": 241.0, + "367": 205.0, + "368": 192.0, + "369": 160.0, + "370": 221.0, + "371": 212.0, + "372": 193.0, + "373": 218.0, + "374": 164.0, + "375": 249.0, + "376": 195.0, + "377": 197.0, + "378": 222.0, + "379": 254.0, + "380": 210.0, + "381": 199.0, + "382": 217.0, + "383": 208.0, + "384": 238.0, + "385": 183.0, + "386": 221.0, + "387": 185.0, + "388": 205.0, + "389": 185.0, + "390": 217.0, + "391": 241.0, + "392": 212.0, + "393": 247.0, + "394": 242.0, + "395": 247.0, + "396": 197.0, + "397": 202.0, + "398": 191.0, + "399": 231.0, + "400": 211.0, + "401": 200.0, + "402": 210.0, + "403": 261.0, + "404": 211.0, + "405": 171.0, + "406": 209.0, + "407": 200.0, + "408": 226.0, + "409": 200.0, + "410": 220.0, + "411": 196.0, + "412": 194.0, + "413": 168.0, + "414": 223.0, + "415": 204.0, + "416": 225.0, + "417": 213.0, + "418": 196.0, + "419": 203.0, + "420": 203.0, + "421": 217.0, + "422": 200.0, + "423": 213.0, + "424": 237.0, + "425": 239.0, + "426": 178.0, + "427": 213.0, + "428": 196.0, + "429": 174.0, + "430": 243.0, + "431": 169.0, + "432": 203.0, + "433": 211.0, + "434": 194.0, + "435": 188.0, + "436": 208.0, + "437": 170.0, + "438": 194.0, + "439": 156.0, + "440": 199.0, + "441": 190.0, + "442": 232.0, + "443": 225.0, + "444": 172.0, + "445": 194.0, + "446": 221.0, + "447": 209.0, + "448": 233.0, + "449": 257.0, + "450": 207.0, + "451": 199.0, + "452": 177.0, + "453": 200.0, + "454": 227.0, + "455": 263.0, + "456": 196.0, + "457": 204.0, + "458": 169.0, + "459": 131.0, + "460": 216.0, + "461": 223.0, + "462": 210.0, + "463": 203.0, + "464": 208.0, + "465": 187.0, + "466": 190.0, + "467": 192.0, + "468": 194.0, + "469": 188.0, + "470": 193.0, + "471": 221.0, + "472": 166.0, + "473": 191.0, + "474": 193.0, + "475": 196.0, + "476": 192.0, + "477": 168.0, + "478": 180.0, + "479": 176.0, + "480": 145.0, + "481": 197.0, + "482": 167.0, + "483": 198.0, + "484": 172.0, + "485": 175.0, + "486": 192.0, + "487": 143.0, + "488": 182.0, + "489": 172.0, + "490": 178.0, + "491": 175.0, + "492": 194.0, + "493": 211.0, + "494": 159.0, + "495": 165.0, + "496": 153.0, + "497": 145.0, + "498": 196.0, + "499": 195.0, + "500": 165.0, + "501": 183.0, + "502": 167.0, + "503": 175.0, + "504": 182.0, + "505": 212.0, + "506": 177.0, + "507": 159.0, + "508": 135.0, + "509": 195.0, + "510": 156.0, + "511": 186.0, + "512": 177.0, + "513": 186.0, + "514": 173.0, + "515": 190.0, + "516": 175.0, + "517": 143.0, + "518": 169.0, + "519": 186.0, + "520": 156.0, + "521": 146.0, + "522": 173.0, + "523": 175.0, + "524": 172.0, + "525": 202.0, + "526": 168.0, + "527": 178.0, + "528": 173.0, + "529": 183.0, + "530": 168.0, + "531": 161.0, + "532": 185.0, + "533": 172.0, + "534": 166.0, + "535": 140.0, + "536": 164.0, + "537": 150.0, + "538": 155.0, + "539": 125.0, + "540": 151.0, + "541": 130.0, + "542": 153.0, + "543": 149.0, + "544": 185.0, + "545": 132.0, + "546": 184.0, + "547": 150.0, + "548": 155.0, + "549": 162.0, + "550": 170.0, + "551": 144.0, + "552": 147.0, + "553": 213.0, + "554": 182.0, + "555": 150.0, + "556": 162.0, + "557": 154.0, + "558": 181.0, + "559": 144.0, + "560": 194.0, + "561": 174.0, + "562": 147.0, + "563": 125.0, + "564": 169.0, + "565": 143.0, + "566": 136.0, + "567": 144.0, + "568": 153.0, + "569": 167.0, + "570": 153.0, + "571": 131.0, + "572": 143.0, + "573": 128.0, + "574": 162.0, + "575": 133.0, + "576": 143.0, + "577": 171.0, + "578": 167.0, + "579": 140.0, + "580": 165.0, + "581": 164.0, + "582": 145.0, + "583": 151.0, + "584": 146.0, + "585": 148.0, + "586": 102.0, + "587": 147.0, + "588": 146.0, + "589": 123.0, + "590": 146.0, + "591": 149.0, + "592": 115.0, + "593": 166.0, + "594": 159.0, + "595": 127.0, + "596": 113.0, + "597": 135.0, + "598": 139.0, + "599": 157.0, + "600": 129.0, + "601": 144.0, + "602": 129.0, + "603": 125.0, + "604": 125.0, + "605": 139.0, + "606": 135.0, + "607": 144.0, + "608": 149.0, + "609": 139.0, + "610": 135.0, + "611": 148.0, + "612": 148.0, + "613": 115.0, + "614": 150.0, + "615": 132.0, + "616": 156.0, + "617": 120.0, + "618": 145.0, + "619": 136.0, + "620": 170.0, + "621": 147.0, + "622": 150.0, + "623": 119.0, + "624": 128.0, + "625": 141.0, + "626": 122.0, + "627": 121.0, + "628": 157.0, + "629": 126.0, + "630": 134.0, + "631": 147.0, + "632": 146.0, + "633": 131.0, + "634": 145.0, + "635": 174.0, + "636": 151.0, + "637": 169.0, + "638": 128.0, + "639": 164.0, + "640": 145.0, + "641": 136.0, + "642": 132.0, + "643": 134.0, + "644": 124.0, + "645": 145.0, + "646": 106.0, + "647": 123.0, + "648": 121.0, + "649": 134.0, + "650": 153.0, + "651": 117.0, + "652": 163.0, + "653": 155.0, + "654": 140.0, + "655": 154.0, + "656": 124.0, + "657": 116.0, + "658": 130.0, + "659": 114.0, + "660": 145.0, + "661": 121.0, + "662": 143.0, + "663": 124.0, + "664": 139.0, + "665": 138.0, + "666": 111.0, + "667": 127.0, + "668": 144.0, + "669": 116.0, + "670": 139.0, + "671": 132.0, + "672": 136.0, + "673": 139.0, + "674": 119.0, + "675": 165.0, + "676": 123.0, + "677": 127.0, + "678": 135.0, + "679": 83.0, + "680": 139.0, + "681": 120.0, + "682": 111.0, + "683": 119.0, + "684": 121.0, + "685": 145.0, + "686": 127.0, + "687": 145.0, + "688": 117.0, + "689": 119.0, + "690": 119.0, + "691": 124.0, + "692": 118.0, + "693": 112.0, + "694": 156.0, + "695": 114.0, + "696": 141.0, + "697": 123.0, + "698": 130.0, + "699": 147.0, + "700": 119.0, + "701": 139.0, + "702": 111.0, + "703": 113.0, + "704": 118.0, + "705": 115.0, + "706": 102.0, + "707": 121.0, + "708": 115.0, + "709": 116.0, + "710": 95.0, + "711": 101.0, + "712": 98.0, + "713": 117.0, + "714": 127.0, + "715": 135.0, + "716": 124.0, + "717": 88.0, + "718": 143.0, + "719": 114.0, + "720": 120.0, + "721": 106.0, + "722": 117.0, + "723": 101.0, + "724": 97.0, + "725": 106.0, + "726": 103.0, + "727": 95.0, + "728": 123.0, + "729": 104.0, + "730": 124.0, + "731": 111.0, + "732": 78.0, + "733": 96.0, + "734": 129.0, + "735": 142.0, + "736": 110.0, + "737": 132.0, + "738": 110.0, + "739": 136.0, + "740": 106.0, + "741": 102.0, + "742": 123.0, + "743": 133.0, + "744": 130.0, + "745": 109.0, + "746": 122.0, + "747": 125.0, + "748": 133.0, + "749": 114.0, + "750": 121.0, + "751": 113.0, + "752": 111.0, + "753": 96.0, + "754": 118.0, + "755": 87.0, + "756": 113.0, + "757": 91.0, + "758": 105.0, + "759": 99.0, + "760": 125.0, + "761": 106.0, + "762": 105.0, + "763": 101.0, + "764": 109.0, + "765": 118.0, + "766": 95.0, + "767": 133.0, + "768": 115.0, + "769": 122.0, + "770": 106.0, + "771": 123.0, + "772": 106.0, + "773": 136.0, + "774": 128.0, + "775": 116.0, + "776": 112.0, + "777": 95.0, + "778": 113.0, + "779": 119.0, + "780": 99.0, + "781": 107.0, + "782": 80.0, + "783": 108.0, + "784": 122.0, + "785": 111.0, + "786": 111.0, + "787": 115.0, + "788": 116.0, + "789": 108.0, + "790": 127.0, + "791": 83.0, + "792": 117.0, + "793": 102.0, + "794": 106.0, + "795": 123.0, + "796": 121.0, + "797": 124.0, + "798": 112.0, + "799": 136.0, + "800": 99.0, + "801": 117.0, + "802": 93.0, + "803": 166.0, + "804": 127.0, + "805": 124.0, + "806": 97.0, + "807": 134.0, + "808": 108.0, + "809": 121.0, + "810": 126.0, + "811": 107.0, + "812": 116.0, + "813": 126.0, + "814": 105.0, + "815": 98.0, + "816": 99.0, + "817": 97.0, + "818": 97.0, + "819": 109.0, + "820": 106.0, + "821": 88.0, + "822": 109.0, + "823": 108.0, + "824": 127.0, + "825": 108.0, + "826": 128.0, + "827": 134.0, + "828": 100.0, + "829": 125.0, + "830": 113.0, + "831": 114.0, + "832": 107.0, + "833": 113.0, + "834": 100.0, + "835": 98.0, + "836": 123.0, + "837": 95.0, + "838": 118.0, + "839": 96.0, + "840": 109.0, + "841": 98.0, + "842": 114.0, + "843": 113.0, + "844": 123.0, + "845": 108.0, + "846": 124.0, + "847": 112.0, + "848": 115.0, + "849": 118.0, + "850": 92.0, + "851": 145.0, + "852": 89.0, + "853": 106.0, + "854": 101.0, + "855": 113.0, + "856": 125.0, + "857": 105.0, + "858": 129.0, + "859": 107.0, + "860": 118.0, + "861": 85.0, + "862": 106.0, + "863": 95.0, + "864": 81.0, + "865": 104.0, + "866": 105.0, + "867": 104.0, + "868": 106.0, + "869": 109.0, + "870": 105.0, + "871": 122.0, + "872": 114.0, + "873": 100.0, + "874": 113.0, + "875": 108.0, + "876": 93.0, + "877": 130.0, + "878": 110.0, + "879": 122.0, + "880": 106.0, + "881": 103.0, + "882": 80.0, + "883": 107.0, + "884": 115.0, + "885": 113.0, + "886": 116.0, + "887": 131.0, + "888": 89.0, + "889": 120.0, + "890": 110.0, + "891": 103.0, + "892": 102.0, + "893": 106.0, + "894": 91.0, + "895": 118.0, + "896": 110.0, + "897": 103.0, + "898": 115.0, + "899": 119.0, + "900": 120.0, + "901": 99.0, + "902": 100.0, + "903": 102.0, + "904": 127.0, + "905": 105.0, + "906": 124.0, + "907": 104.0, + "908": 117.0, + "909": 124.0, + "910": 108.0, + "911": 102.0, + "912": 117.0, + "913": 122.0, + "914": 130.0, + "915": 98.0, + "916": 120.0, + "917": 113.0, + "918": 112.0, + "919": 85.0, + "920": 110.0, + "921": 108.0, + "922": 111.0, + "923": 116.0, + "924": 119.0, + "925": 105.0, + "926": 128.0, + "927": 120.0, + "928": 106.0, + "929": 94.0, + "930": 116.0, + "931": 102.0, + "932": 123.0, + "933": 114.0, + "934": 133.0, + "935": 86.0, + "936": 114.0, + "937": 96.0, + "938": 118.0, + "939": 111.0, + "940": 110.0, + "941": 102.0, + "942": 98.0, + "943": 119.0, + "944": 107.0, + "945": 106.0, + "946": 112.0, + "947": 93.0, + "948": 119.0, + "949": 116.0, + "950": 124.0, + "951": 112.0, + "952": 106.0, + "953": 97.0, + "954": 111.0, + "955": 112.0, + "956": 87.0, + "957": 117.0, + "958": 97.0, + "959": 91.0, + "960": 103.0, + "961": 102.0, + "962": 103.0, + "963": 127.0, + "964": 113.0, + "965": 120.0, + "966": 106.0, + "967": 104.0, + "968": 119.0, + "969": 89.0, + "970": 121.0, + "971": 115.0, + "972": 96.0, + "973": 90.0, + "974": 113.0, + "975": 109.0, + "976": 113.0, + "977": 85.0, + "978": 104.0, + "979": 109.0, + "980": 100.0, + "981": 94.0, + "982": 105.0, + "983": 84.0, + "984": 112.0, + "985": 108.0, + "986": 92.0, + "987": 88.0, + "988": 123.0, + "989": 106.0, + "990": 103.0, + "991": 128.0, + "992": 104.0, + "993": 109.0, + "994": 98.0, + "995": 104.0, + "996": 93.0, + "997": 128.0, + "998": 121.0, + "999": 89.0, + "1000": 118.0, + "1001": 104.0, + "1002": 96.0, + "1003": 107.0, + "1004": 88.0, + "1005": 103.0, + "1006": 105.0, + "1007": 102.0, + "1008": 83.0, + "1009": 117.0, + "1010": 104.0, + "1011": 127.0, + "1012": 117.0, + "1013": 106.0, + "1014": 111.0, + "1015": 110.0, + "1016": 91.0, + "1017": 76.0, + "1018": 115.0, + "1019": 123.0, + "1020": 111.0, + "1021": 106.0, + "1022": 108.0, + "1023": 137.0, + "1024": 122.0, + "1025": 104.0, + "1026": 109.0, + "1027": 92.0, + "1028": 96.0, + "1029": 116.0, + "1030": 96.0, + "1031": 122.0, + "1032": 103.0, + "1033": 108.0, + "1034": 111.0, + "1035": 86.0, + "1036": 74.0, + "1037": 123.0, + "1038": 85.0, + "1039": 128.0, + "1040": 95.0, + "1041": 116.0, + "1042": 107.0, + "1043": 96.0, + "1044": 116.0, + "1045": 115.0, + "1046": 92.0, + "1047": 106.0, + "1048": 88.0, + "1049": 121.0, + "1050": 117.0, + "1051": 105.0, + "1052": 96.0, + "1053": 98.0, + "1054": 85.0, + "1055": 110.0, + "1056": 91.0, + "1057": 109.0, + "1058": 95.0, + "1059": 106.0, + "1060": 109.0, + "1061": 97.0, + "1062": 105.0, + "1063": 91.0, + "1064": 103.0, + "1065": 108.0, + "1066": 112.0, + "1067": 108.0, + "1068": 108.0, + "1069": 123.0, + "1070": 100.0, + "1071": 95.0, + "1072": 111.0, + "1073": 118.0, + "1074": 101.0, + "1075": 95.0, + "1076": 111.0, + "1077": 89.0, + "1078": 94.0, + "1079": 113.0, + "1080": 82.0, + "1081": 114.0, + "1082": 87.0, + "1083": 116.0, + "1084": 105.0, + "1085": 97.0, + "1086": 119.0, + "1087": 86.0, + "1088": 93.0, + "1089": 114.0, + "1090": 87.0, + "1091": 109.0, + "1092": 90.0, + "1093": 109.0, + "1094": 101.0, + "1095": 90.0, + "1096": 106.0, + "1097": 100.0, + "1098": 105.0, + "1099": 96.0, + "1100": 92.0, + "1101": 108.0, + "1102": 94.0, + "1103": 86.0, + "1104": 103.0, + "1105": 109.0, + "1106": 87.0, + "1107": 87.0, + "1108": 96.0, + "1109": 102.0, + "1110": 89.0, + "1111": 76.0, + "1112": 110.0, + "1113": 104.0, + "1114": 89.0, + "1115": 114.0, + "1116": 97.0, + "1117": 108.0, + "1118": 107.0, + "1119": 118.0, + "1120": 112.0, + "1121": 96.0, + "1122": 103.0, + "1123": 112.0, + "1124": 98.0, + "1125": 97.0, + "1126": 121.0, + "1127": 80.0, + "1128": 91.0, + "1129": 106.0, + "1130": 96.0, + "1131": 82.0, + "1132": 103.0, + "1133": 86.0, + "1134": 92.0, + "1135": 98.0, + "1136": 90.0, + "1137": 120.0, + "1138": 102.0, + "1139": 109.0, + "1140": 88.0, + "1141": 90.0, + "1142": 95.0, + "1143": 88.0, + "1144": 77.0, + "1145": 92.0, + "1146": 85.0, + "1147": 108.0, + "1148": 77.0, + "1149": 93.0, + "1150": 101.0, + "1151": 116.0, + "1152": 72.0, + "1153": 90.0, + "1154": 103.0, + "1155": 106.0, + "1156": 91.0, + "1157": 100.0, + "1158": 101.0, + "1159": 111.0, + "1160": 114.0, + "1161": 90.0, + "1162": 92.0, + "1163": 90.0, + "1164": 96.0, + "1165": 100.0, + "1166": 114.0, + "1167": 82.0, + "1168": 96.0, + "1169": 77.0, + "1170": 91.0, + "1171": 94.0, + "1172": 99.0, + "1173": 124.0, + "1174": 106.0, + "1175": 97.0, + "1176": 102.0, + "1177": 78.0, + "1178": 108.0, + "1179": 103.0, + "1180": 84.0, + "1181": 76.0, + "1182": 115.0, + "1183": 104.0, + "1184": 122.0, + "1185": 104.0, + "1186": 104.0, + "1187": 91.0, + "1188": 112.0, + "1189": 101.0, + "1190": 106.0, + "1191": 97.0, + "1192": 90.0, + "1193": 105.0, + "1194": 99.0, + "1195": 118.0, + "1196": 120.0, + "1197": 93.0, + "1198": 101.0, + "1199": 103.0, + "1200": 90.0, + "1201": 108.0, + "1202": 120.0, + "1203": 90.0, + "1204": 98.0, + "1205": 113.0, + "1206": 102.0, + "1207": 116.0, + "1208": 104.0, + "1209": 85.0, + "1210": 101.0, + "1211": 87.0, + "1212": 100.0, + "1213": 109.0, + "1214": 92.0, + "1215": 103.0, + "1216": 117.0, + "1217": 102.0, + "1218": 135.0, + "1219": 95.0, + "1220": 122.0, + "1221": 121.0, + "1222": 109.0, + "1223": 103.0, + "1224": 93.0, + "1225": 107.0, + "1226": 82.0, + "1227": 108.0, + "1228": 106.0, + "1229": 87.0, + "1230": 97.0, + "1231": 109.0, + "1232": 95.0, + "1233": 99.0, + "1234": 107.0, + "1235": 105.0, + "1236": 101.0, + "1237": 110.0, + "1238": 102.0, + "1239": 118.0, + "1240": 114.0, + "1241": 119.0, + "1242": 90.0, + "1243": 104.0, + "1244": 102.0, + "1245": 105.0, + "1246": 104.0, + "1247": 121.0, + "1248": 104.0, + "1249": 129.0, + "1250": 111.0, + "1251": 91.0, + "1252": 120.0, + "1253": 121.0, + "1254": 110.0, + "1255": 113.0, + "1256": 97.0, + "1257": 114.0, + "1258": 110.0, + "1259": 106.0, + "1260": 93.0, + "1261": 104.0, + "1262": 109.0, + "1263": 104.0, + "1264": 101.0, + "1265": 85.0, + "1266": 106.0, + "1267": 104.0, + "1268": 90.0, + "1269": 102.0, + "1270": 106.0, + "1271": 107.0, + "1272": 79.0, + "1273": 85.0, + "1274": 99.0, + "1275": 127.0, + "1276": 89.0, + "1277": 144.0, + "1278": 109.0, + "1279": 110.0, + "1280": 123.0, + "1281": 98.0, + "1282": 94.0, + "1283": 110.0, + "1284": 88.0, + "1285": 112.0, + "1286": 106.0, + "1287": 86.0, + "1288": 100.0, + "1289": 118.0, + "1290": 109.0, + "1291": 82.0, + "1292": 106.0, + "1293": 97.0, + "1294": 96.0, + "1295": 91.0, + "1296": 110.0, + "1297": 120.0, + "1298": 105.0, + "1299": 114.0, + "1300": 113.0, + "1301": 106.0, + "1302": 112.0, + "1303": 102.0, + "1304": 94.0, + "1305": 109.0, + "1306": 83.0, + "1307": 97.0, + "1308": 120.0, + "1309": 126.0, + "1310": 103.0, + "1311": 126.0, + "1312": 100.0, + "1313": 101.0, + "1314": 107.0, + "1315": 117.0, + "1316": 101.0, + "1317": 107.0, + "1318": 103.0, + "1319": 98.0, + "1320": 103.0, + "1321": 112.0, + "1322": 86.0, + "1323": 117.0, + "1324": 94.0, + "1325": 94.0, + "1326": 139.0, + "1327": 82.0, + "1328": 124.0, + "1329": 103.0, + "1330": 91.0, + "1331": 94.0, + "1332": 106.0, + "1333": 86.0, + "1334": 86.0, + "1335": 96.0, + "1336": 113.0, + "1337": 114.0, + "1338": 126.0, + "1339": 104.0, + "1340": 101.0, + "1341": 83.0, + "1342": 106.0, + "1343": 122.0, + "1344": 99.0, + "1345": 93.0, + "1346": 110.0, + "1347": 105.0, + "1348": 104.0, + "1349": 103.0, + "1350": 111.0, + "1351": 121.0, + "1352": 106.0, + "1353": 108.0, + "1354": 108.0, + "1355": 92.0, + "1356": 89.0, + "1357": 103.0, + "1358": 120.0, + "1359": 110.0, + "1360": 125.0, + "1361": 116.0, + "1362": 133.0, + "1363": 103.0, + "1364": 109.0, + "1365": 101.0, + "1366": 100.0, + "1367": 93.0, + "1368": 108.0, + "1369": 127.0, + "1370": 99.0, + "1371": 121.0, + "1372": 116.0, + "1373": 110.0, + "1374": 94.0, + "1375": 107.0, + "1376": 104.0, + "1377": 115.0, + "1378": 100.0, + "1379": 106.0, + "1380": 88.0, + "1381": 103.0, + "1382": 101.0, + "1383": 118.0, + "1384": 120.0, + "1385": 117.0, + "1386": 123.0, + "1387": 93.0, + "1388": 86.0, + "1389": 119.0, + "1390": 116.0, + "1391": 103.0, + "1392": 84.0, + "1393": 100.0, + "1394": 112.0, + "1395": 77.0, + "1396": 101.0, + "1397": 124.0, + "1398": 104.0, + "1399": 120.0, + "1400": 103.0, + "1401": 100.0, + "1402": 105.0, + "1403": 82.0, + "1404": 104.0, + "1405": 93.0, + "1406": 102.0, + "1407": 118.0, + "1408": 100.0, + "1409": 114.0, + "1410": 85.0, + "1411": 101.0, + "1412": 99.0, + "1413": 117.0, + "1414": 116.0, + "1415": 115.0, + "1416": 90.0, + "1417": 99.0, + "1418": 97.0, + "1419": 96.0, + "1420": 119.0, + "1421": 108.0, + "1422": 113.0, + "1423": 91.0, + "1424": 123.0, + "1425": 101.0, + "1426": 110.0, + "1427": 107.0, + "1428": 116.0, + "1429": 128.0, + "1430": 87.0, + "1431": 96.0, + "1432": 113.0, + "1433": 92.0, + "1434": 101.0, + "1435": 101.0, + "1436": 111.0, + "1437": 122.0, + "1438": 105.0, + "1439": 99.0, + "1440": 101.0, + "1441": 104.0, + "1442": 89.0, + "1443": 109.0, + "1444": 86.0, + "1445": 100.0, + "1446": 87.0, + "1447": 105.0, + "1448": 102.0, + "1449": 88.0, + "1450": 100.0, + "1451": 94.0, + "1452": 95.0, + "1453": 116.0, + "1454": 98.0, + "1455": 92.0, + "1456": 91.0, + "1457": 132.0, + "1458": 121.0, + "1459": 109.0, + "1460": 111.0, + "1461": 111.0, + "1462": 89.0, + "1463": 99.0, + "1464": 108.0, + "1465": 97.0, + "1466": 87.0, + "1467": 99.0, + "1468": 127.0, + "1469": 88.0, + "1470": 103.0, + "1471": 101.0, + "1472": 106.0, + "1473": 120.0, + "1474": 96.0, + "1475": 123.0, + "1476": 85.0, + "1477": 122.0, + "1478": 107.0, + "1479": 113.0, + "1480": 109.0, + "1481": 107.0, + "1482": 118.0, + "1483": 86.0, + "1484": 98.0, + "1485": 91.0, + "1486": 96.0, + "1487": 119.0, + "1488": 106.0, + "1489": 93.0, + "1490": 113.0, + "1491": 107.0, + "1492": 100.0, + "1493": 123.0, + "1494": 105.0, + "1495": 121.0, + "1496": 105.0, + "1497": 99.0, + "1498": 112.0, + "1499": 106.0, + "1500": 104.0, + "1501": 129.0, + "1502": 109.0, + "1503": 91.0, + "1504": 111.0, + "1505": 97.0, + "1506": 116.0, + "1507": 122.0, + "1508": 103.0, + "1509": 141.0, + "1510": 86.0, + "1511": 120.0, + "1512": 120.0, + "1513": 128.0, + "1514": 100.0, + "1515": 108.0, + "1516": 99.0, + "1517": 109.0, + "1518": 106.0, + "1519": 88.0, + "1520": 89.0, + "1521": 101.0, + "1522": 112.0, + "1523": 88.0, + "1524": 113.0, + "1525": 94.0, + "1526": 110.0, + "1527": 112.0, + "1528": 84.0, + "1529": 91.0, + "1530": 114.0, + "1531": 113.0, + "1532": 119.0, + "1533": 95.0, + "1534": 112.0, + "1535": 112.0, + "1536": 109.0, + "1537": 97.0, + "1538": 111.0, + "1539": 115.0, + "1540": 114.0, + "1541": 88.0, + "1542": 126.0, + "1543": 97.0, + "1544": 84.0, + "1545": 105.0, + "1546": 82.0, + "1547": 93.0, + "1548": 90.0, + "1549": 99.0, + "1550": 93.0, + "1551": 98.0, + "1552": 86.0, + "1553": 120.0, + "1554": 109.0, + "1555": 111.0, + "1556": 98.0, + "1557": 90.0, + "1558": 120.0, + "1559": 84.0, + "1560": 107.0, + "1561": 103.0, + "1562": 121.0, + "1563": 116.0, + "1564": 113.0, + "1565": 114.0, + "1566": 113.0, + "1567": 102.0, + "1568": 91.0, + "1569": 122.0, + "1570": 95.0, + "1571": 115.0, + "1572": 102.0, + "1573": 100.0, + "1574": 121.0, + "1575": 108.0, + "1576": 88.0, + "1577": 116.0, + "1578": 101.0, + "1579": 98.0, + "1580": 114.0, + "1581": 102.0, + "1582": 108.0, + "1583": 115.0, + "1584": 70.0, + "1585": 112.0, + "1586": 120.0, + "1587": 101.0, + "1588": 118.0, + "1589": 99.0, + "1590": 103.0, + "1591": 108.0, + "1592": 106.0, + "1593": 121.0, + "1594": 110.0, + "1595": 103.0, + "1596": 117.0, + "1597": 115.0, + "1598": 105.0, + "1599": 76.0, + "1600": 90.0, + "1601": 108.0, + "1602": 105.0, + "1603": 122.0, + "1604": 113.0, + "1605": 122.0, + "1606": 117.0, + "1607": 92.0, + "1608": 118.0, + "1609": 115.0, + "1610": 103.0, + "1611": 117.0, + "1612": 106.0, + "1613": 106.0, + "1614": 104.0, + "1615": 114.0, + "1616": 88.0, + "1617": 97.0, + "1618": 111.0, + "1619": 107.0, + "1620": 112.0, + "1621": 91.0, + "1622": 130.0, + "1623": 109.0, + "1624": 102.0, + "1625": 121.0, + "1626": 100.0, + "1627": 119.0, + "1628": 99.0, + "1629": 119.0, + "1630": 117.0, + "1631": 105.0, + "1632": 116.0, + "1633": 112.0, + "1634": 120.0, + "1635": 99.0, + "1636": 105.0, + "1637": 94.0, + "1638": 107.0, + "1639": 97.0, + "1640": 106.0, + "1641": 120.0, + "1642": 101.0, + "1643": 135.0, + "1644": 117.0, + "1645": 110.0, + "1646": 106.0, + "1647": 127.0, + "1648": 82.0, + "1649": 114.0, + "1650": 121.0, + "1651": 107.0, + "1652": 100.0, + "1653": 108.0, + "1654": 114.0, + "1655": 92.0, + "1656": 80.0, + "1657": 110.0, + "1658": 114.0, + "1659": 105.0, + "1660": 104.0, + "1661": 102.0, + "1662": 124.0, + "1663": 96.0, + "1664": 127.0, + "1665": 89.0, + "1666": 115.0, + "1667": 114.0, + "1668": 122.0, + "1669": 94.0, + "1670": 114.0, + "1671": 102.0, + "1672": 99.0, + "1673": 109.0, + "1674": 117.0, + "1675": 105.0, + "1676": 116.0, + "1677": 101.0, + "1678": 110.0, + "1679": 112.0, + "1680": 96.0, + "1681": 93.0, + "1682": 97.0, + "1683": 106.0, + "1684": 103.0, + "1685": 101.0, + "1686": 109.0, + "1687": 104.0, + "1688": 127.0, + "1689": 88.0, + "1690": 98.0, + "1691": 90.0, + "1692": 107.0, + "1693": 111.0, + "1694": 125.0, + "1695": 129.0, + "1696": 112.0, + "1697": 126.0, + "1698": 104.0, + "1699": 124.0, + "1700": 112.0, + "1701": 120.0, + "1702": 89.0, + "1703": 103.0, + "1704": 103.0, + "1705": 111.0, + "1706": 124.0, + "1707": 93.0, + "1708": 96.0, + "1709": 116.0, + "1710": 133.0, + "1711": 107.0, + "1712": 100.0, + "1713": 91.0, + "1714": 122.0, + "1715": 108.0, + "1716": 110.0, + "1717": 121.0, + "1718": 101.0, + "1719": 110.0, + "1720": 121.0, + "1721": 109.0, + "1722": 96.0, + "1723": 125.0, + "1724": 118.0, + "1725": 122.0, + "1726": 113.0, + "1727": 99.0, + "1728": 98.0, + "1729": 115.0, + "1730": 106.0, + "1731": 96.0, + "1732": 95.0, + "1733": 115.0, + "1734": 106.0, + "1735": 102.0, + "1736": 104.0, + "1737": 122.0, + "1738": 94.0, + "1739": 92.0, + "1740": 105.0, + "1741": 113.0, + "1742": 129.0, + "1743": 113.0, + "1744": 110.0, + "1745": 113.0, + "1746": 127.0, + "1747": 108.0, + "1748": 120.0, + "1749": 115.0, + "1750": 104.0, + "1751": 114.0, + "1752": 122.0, + "1753": 113.0, + "1754": 123.0, + "1755": 114.0, + "1756": 115.0, + "1757": 126.0, + "1758": 105.0, + "1759": 109.0, + "1760": 136.0, + "1761": 111.0, + "1762": 104.0, + "1763": 104.0, + "1764": 105.0, + "1765": 133.0, + "1766": 118.0, + "1767": 108.0, + "1768": 114.0, + "1769": 105.0, + "1770": 98.0, + "1771": 112.0, + "1772": 92.0, + "1773": 77.0, + "1774": 130.0, + "1775": 104.0, + "1776": 85.0, + "1777": 106.0, + "1778": 84.0, + "1779": 111.0, + "1780": 109.0, + "1781": 124.0, + "1782": 109.0, + "1783": 128.0, + "1784": 117.0, + "1785": 118.0, + "1786": 111.0, + "1787": 112.0, + "1788": 104.0, + "1789": 135.0, + "1790": 105.0, + "1791": 115.0, + "1792": 130.0, + "1793": 119.0, + "1794": 128.0, + "1795": 110.0, + "1796": 130.0, + "1797": 97.0, + "1798": 139.0, + "1799": 104.0, + "1800": 103.0, + "1801": 94.0, + "1802": 134.0, + "1803": 117.0, + "1804": 139.0, + "1805": 124.0, + "1806": 127.0, + "1807": 128.0, + "1808": 99.0, + "1809": 92.0, + "1810": 116.0, + "1811": 104.0, + "1812": 103.0, + "1813": 122.0, + "1814": 129.0, + "1815": 94.0, + "1816": 104.0, + "1817": 98.0, + "1818": 128.0, + "1819": 112.0, + "1820": 99.0, + "1821": 126.0, + "1822": 83.0, + "1823": 117.0, + "1824": 96.0, + "1825": 95.0, + "1826": 127.0, + "1827": 124.0, + "1828": 120.0, + "1829": 110.0, + "1830": 123.0, + "1831": 110.0, + "1832": 92.0, + "1833": 100.0, + "1834": 113.0, + "1835": 120.0, + "1836": 113.0, + "1837": 114.0, + "1838": 99.0, + "1839": 123.0, + "1840": 109.0, + "1841": 95.0, + "1842": 101.0, + "1843": 122.0, + "1844": 113.0, + "1845": 127.0, + "1846": 100.0, + "1847": 117.0, + "1848": 133.0, + "1849": 87.0, + "1850": 103.0, + "1851": 89.0, + "1852": 99.0, + "1853": 93.0, + "1854": 99.0, + "1855": 107.0, + "1856": 111.0, + "1857": 121.0, + "1858": 92.0, + "1859": 105.0, + "1860": 115.0, + "1861": 92.0, + "1862": 91.0, + "1863": 112.0, + "1864": 109.0, + "1865": 125.0, + "1866": 124.0, + "1867": 110.0, + "1868": 113.0, + "1869": 119.0, + "1870": 137.0, + "1871": 126.0, + "1872": 95.0, + "1873": 119.0, + "1874": 105.0, + "1875": 128.0, + "1876": 104.0, + "1877": 120.0, + "1878": 95.0, + "1879": 99.0, + "1880": 123.0, + "1881": 99.0, + "1882": 97.0, + "1883": 101.0, + "1884": 115.0, + "1885": 106.0, + "1886": 123.0, + "1887": 121.0, + "1888": 121.0, + "1889": 114.0, + "1890": 100.0, + "1891": 110.0, + "1892": 107.0, + "1893": 113.0, + "1894": 134.0, + "1895": 114.0, + "1896": 111.0, + "1897": 122.0, + "1898": 108.0, + "1899": 94.0, + "1900": 123.0, + "1901": 125.0, + "1902": 115.0, + "1903": 112.0, + "1904": 113.0, + "1905": 109.0, + "1906": 115.0, + "1907": 95.0, + "1908": 113.0, + "1909": 79.0, + "1910": 97.0, + "1911": 135.0, + "1912": 122.0, + "1913": 105.0, + "1914": 112.0, + "1915": 129.0, + "1916": 117.0, + "1917": 115.0, + "1918": 113.0, + "1919": 117.0, + "1920": 122.0, + "1921": 105.0, + "1922": 86.0, + "1923": 113.0, + "1924": 111.0, + "1925": 110.0, + "1926": 112.0, + "1927": 103.0, + "1928": 108.0, + "1929": 113.0, + "1930": 121.0, + "1931": 111.0, + "1932": 106.0, + "1933": 114.0, + "1934": 117.0, + "1935": 93.0, + "1936": 109.0, + "1937": 121.0, + "1938": 108.0, + "1939": 132.0, + "1940": 127.0, + "1941": 126.0, + "1942": 101.0, + "1943": 120.0, + "1944": 87.0, + "1945": 114.0, + "1946": 105.0, + "1947": 109.0, + "1948": 109.0, + "1949": 106.0, + "1950": 111.0, + "1951": 120.0, + "1952": 104.0, + "1953": 113.0, + "1954": 116.0, + "1955": 131.0, + "1956": 91.0, + "1957": 118.0, + "1958": 139.0, + "1959": 114.0, + "1960": 96.0, + "1961": 109.0, + "1962": 113.0, + "1963": 125.0, + "1964": 112.0, + "1965": 108.0, + "1966": 130.0, + "1967": 120.0, + "1968": 110.0, + "1969": 96.0, + "1970": 110.0, + "1971": 121.0, + "1972": 104.0, + "1973": 103.0, + "1974": 110.0, + "1975": 101.0, + "1976": 144.0, + "1977": 122.0, + "1978": 118.0, + "1979": 121.0, + "1980": 115.0, + "1981": 114.0, + "1982": 136.0, + "1983": 123.0, + "1984": 112.0, + "1985": 116.0, + "1986": 104.0, + "1987": 133.0, + "1988": 107.0, + "1989": 100.0, + "1990": 112.0, + "1991": 119.0, + "1992": 103.0, + "1993": 133.0, + "1994": 123.0, + "1995": 118.0, + "1996": 109.0, + "1997": 119.0, + "1998": 107.0, + "1999": 119.0, + "2000": 134.0 + } + }, + "mem-allocated-bytes": { + "start_step": 1, + "end_step": 2000, + "step_interval": 1, + "values": { + "1": 442918400.0, + "2": 442918400.0, + "3": 442918400.0, + "4": 442918400.0, + "5": 442918400.0, + "6": 442918400.0, + "7": 442918400.0, + "8": 442918400.0, + "9": 442918400.0, + "10": 442918400.0, + "11": 442918400.0, + "12": 442918400.0, + "13": 442918400.0, + "14": 442918400.0, + "15": 442918400.0, + "16": 442918400.0, + "17": 442918400.0, + "18": 442918400.0, + "19": 442918400.0, + "20": 442918400.0, + "21": 442918400.0, + "22": 442918400.0, + "23": 442918400.0, + "24": 442918400.0, + "25": 442918400.0, + "26": 442918400.0, + "27": 442918400.0, + "28": 442918400.0, + "29": 442918400.0, + "30": 442918400.0, + "31": 442918400.0, + "32": 442918400.0, + "33": 442918400.0, + "34": 442918400.0, + "35": 442918400.0, + "36": 442918400.0, + "37": 442918400.0, + "38": 442918400.0, + "39": 442918400.0, + "40": 442918400.0, + "41": 442918400.0, + "42": 442918400.0, + "43": 442918400.0, + "44": 442918400.0, + "45": 442918400.0, + "46": 442918400.0, + "47": 442918400.0, + "48": 442918400.0, + "49": 442918400.0, + "50": 442918400.0, + "51": 442918400.0, + "52": 442918400.0, + "53": 442918400.0, + "54": 442918400.0, + "55": 442918400.0, + "56": 442918400.0, + "57": 442918400.0, + "58": 442918400.0, + "59": 442918400.0, + "60": 442918400.0, + "61": 442918400.0, + "62": 442918400.0, + "63": 442918400.0, + "64": 442918400.0, + "65": 442918400.0, + "66": 442918400.0, + "67": 442918400.0, + "68": 442918400.0, + "69": 442918400.0, + "70": 442918400.0, + "71": 442918400.0, + "72": 442918400.0, + "73": 442918400.0, + "74": 442918400.0, + "75": 442918400.0, + "76": 442918400.0, + "77": 442918400.0, + "78": 442918400.0, + "79": 442918400.0, + "80": 442918400.0, + "81": 442918400.0, + "82": 442918400.0, + "83": 442918400.0, + "84": 442918400.0, + "85": 442918400.0, + "86": 442918400.0, + "87": 442918400.0, + "88": 442918400.0, + "89": 442918400.0, + "90": 442918400.0, + "91": 442918400.0, + "92": 442918400.0, + "93": 442918400.0, + "94": 442918400.0, + "95": 442918400.0, + "96": 442918400.0, + "97": 442918400.0, + "98": 442918400.0, + "99": 442918400.0, + "100": 442918400.0, + "101": 442918400.0, + "102": 442918400.0, + "103": 442918400.0, + "104": 442918400.0, + "105": 442918400.0, + "106": 442918400.0, + "107": 442918400.0, + "108": 442918400.0, + "109": 442918400.0, + "110": 442918400.0, + "111": 442918400.0, + "112": 442918400.0, + "113": 442918400.0, + "114": 442918400.0, + "115": 442918400.0, + "116": 442918400.0, + "117": 442918400.0, + "118": 442918400.0, + "119": 442918400.0, + "120": 442918400.0, + "121": 442918400.0, + "122": 442918400.0, + "123": 442918400.0, + "124": 442918400.0, + "125": 442918400.0, + "126": 442918400.0, + "127": 442918400.0, + "128": 442918400.0, + "129": 442918400.0, + "130": 442918400.0, + "131": 442918400.0, + "132": 442918400.0, + "133": 442918400.0, + "134": 442918400.0, + "135": 442918400.0, + "136": 442918400.0, + "137": 442918400.0, + "138": 442918400.0, + "139": 442918400.0, + "140": 442918400.0, + "141": 442918400.0, + "142": 442918400.0, + "143": 442918400.0, + "144": 442918400.0, + "145": 442918400.0, + "146": 442918400.0, + "147": 442918400.0, + "148": 442918400.0, + "149": 442918400.0, + "150": 442918400.0, + "151": 442918400.0, + "152": 442918400.0, + "153": 442918400.0, + "154": 442918400.0, + "155": 442918400.0, + "156": 442918400.0, + "157": 442918400.0, + "158": 442918400.0, + "159": 442918400.0, + "160": 442918400.0, + "161": 442918400.0, + "162": 442918400.0, + "163": 442918400.0, + "164": 442918400.0, + "165": 442918400.0, + "166": 442918400.0, + "167": 442918400.0, + "168": 442918400.0, + "169": 442918400.0, + "170": 442918400.0, + "171": 442918400.0, + "172": 442918400.0, + "173": 442918400.0, + "174": 442918400.0, + "175": 442918400.0, + "176": 442918400.0, + "177": 442918400.0, + "178": 442918400.0, + "179": 442918400.0, + "180": 442918400.0, + "181": 442918400.0, + "182": 442918400.0, + "183": 442918400.0, + "184": 442918400.0, + "185": 442918400.0, + "186": 442918400.0, + "187": 442918400.0, + "188": 442918400.0, + "189": 442918400.0, + "190": 442918400.0, + "191": 442918400.0, + "192": 442918400.0, + "193": 442918400.0, + "194": 442918400.0, + "195": 442918400.0, + "196": 442918400.0, + "197": 442918400.0, + "198": 442918400.0, + "199": 442918400.0, + "200": 442918400.0, + "201": 442918400.0, + "202": 442918400.0, + "203": 442918400.0, + "204": 442918400.0, + "205": 442918400.0, + "206": 442918400.0, + "207": 442918400.0, + "208": 442918400.0, + "209": 442918400.0, + "210": 442918400.0, + "211": 442918400.0, + "212": 442918400.0, + "213": 442918400.0, + "214": 442918400.0, + "215": 442918400.0, + "216": 442918400.0, + "217": 442918400.0, + "218": 442918400.0, + "219": 442918400.0, + "220": 442918400.0, + "221": 442918400.0, + "222": 442918400.0, + "223": 442918400.0, + "224": 442918400.0, + "225": 442918400.0, + "226": 442918400.0, + "227": 442918400.0, + "228": 442918400.0, + "229": 442918400.0, + "230": 442918400.0, + "231": 442918400.0, + "232": 442918400.0, + "233": 442918400.0, + "234": 442918400.0, + "235": 442918400.0, + "236": 442918400.0, + "237": 442918400.0, + "238": 442918400.0, + "239": 442918400.0, + "240": 442918400.0, + "241": 442918400.0, + "242": 442918400.0, + "243": 442918400.0, + "244": 442918400.0, + "245": 442918400.0, + "246": 442918400.0, + "247": 442918400.0, + "248": 442918400.0, + "249": 442918400.0, + "250": 442918400.0, + "251": 442918400.0, + "252": 442918400.0, + "253": 442918400.0, + "254": 442918400.0, + "255": 442918400.0, + "256": 442918400.0, + "257": 442918400.0, + "258": 442918400.0, + "259": 442918400.0, + "260": 442918400.0, + "261": 442918400.0, + "262": 442918400.0, + "263": 442918400.0, + "264": 442918400.0, + "265": 442918400.0, + "266": 442918400.0, + "267": 442918400.0, + "268": 442918400.0, + "269": 442918400.0, + "270": 442918400.0, + "271": 442918400.0, + "272": 442918400.0, + "273": 442918400.0, + "274": 442918400.0, + "275": 442918400.0, + "276": 442918400.0, + "277": 442918400.0, + "278": 442918400.0, + "279": 442918400.0, + "280": 442918400.0, + "281": 442918400.0, + "282": 442918400.0, + "283": 442918400.0, + "284": 442918400.0, + "285": 442918400.0, + "286": 442918400.0, + "287": 442918400.0, + "288": 442918400.0, + "289": 442918400.0, + "290": 442918400.0, + "291": 442918400.0, + "292": 442918400.0, + "293": 442918400.0, + "294": 442918400.0, + "295": 442918400.0, + "296": 442918400.0, + "297": 442918400.0, + "298": 442918400.0, + "299": 442918400.0, + "300": 442918400.0, + "301": 442918400.0, + "302": 442918400.0, + "303": 442918400.0, + "304": 442918400.0, + "305": 442918400.0, + "306": 442918400.0, + "307": 442918400.0, + "308": 442918400.0, + "309": 442918400.0, + "310": 442918400.0, + "311": 442918400.0, + "312": 442918400.0, + "313": 442918400.0, + "314": 442918400.0, + "315": 442918400.0, + "316": 442918400.0, + "317": 442918400.0, + "318": 442918400.0, + "319": 442918400.0, + "320": 442918400.0, + "321": 442918400.0, + "322": 442918400.0, + "323": 442918400.0, + "324": 442918400.0, + "325": 442918400.0, + "326": 442918400.0, + "327": 442918400.0, + "328": 442918400.0, + "329": 442918400.0, + "330": 442918400.0, + "331": 442918400.0, + "332": 442918400.0, + "333": 442918400.0, + "334": 442918400.0, + "335": 442918400.0, + "336": 442918400.0, + "337": 442918400.0, + "338": 442918400.0, + "339": 442918400.0, + "340": 442918400.0, + "341": 442918400.0, + "342": 442918400.0, + "343": 442918400.0, + "344": 442918400.0, + "345": 442918400.0, + "346": 442918400.0, + "347": 442918400.0, + "348": 442918400.0, + "349": 442918400.0, + "350": 442918400.0, + "351": 442918400.0, + "352": 442918400.0, + "353": 442918400.0, + "354": 442918400.0, + "355": 442918400.0, + "356": 442918400.0, + "357": 442918400.0, + "358": 442918400.0, + "359": 442918400.0, + "360": 442918400.0, + "361": 442918400.0, + "362": 442918400.0, + "363": 442918400.0, + "364": 442918400.0, + "365": 442918400.0, + "366": 442918400.0, + "367": 442918400.0, + "368": 442918400.0, + "369": 442918400.0, + "370": 442918400.0, + "371": 442918400.0, + "372": 442918400.0, + "373": 442918400.0, + "374": 442918400.0, + "375": 442918400.0, + "376": 442918400.0, + "377": 442918400.0, + "378": 442918400.0, + "379": 442918400.0, + "380": 442918400.0, + "381": 442918400.0, + "382": 442918400.0, + "383": 442918400.0, + "384": 442918400.0, + "385": 442918400.0, + "386": 442918400.0, + "387": 442918400.0, + "388": 442918400.0, + "389": 442918400.0, + "390": 442918400.0, + "391": 442918400.0, + "392": 442918400.0, + "393": 442918400.0, + "394": 442918400.0, + "395": 442918400.0, + "396": 442918400.0, + "397": 442918400.0, + "398": 442918400.0, + "399": 442918400.0, + "400": 442918400.0, + "401": 442918400.0, + "402": 442918400.0, + "403": 442918400.0, + "404": 442918400.0, + "405": 442918400.0, + "406": 442918400.0, + "407": 442918400.0, + "408": 442918400.0, + "409": 442918400.0, + "410": 442918400.0, + "411": 442918400.0, + "412": 442918400.0, + "413": 442918400.0, + "414": 442918400.0, + "415": 442918400.0, + "416": 442918400.0, + "417": 442918400.0, + "418": 442918400.0, + "419": 442918400.0, + "420": 442918400.0, + "421": 442918400.0, + "422": 442918400.0, + "423": 442918400.0, + "424": 442918400.0, + "425": 442918400.0, + "426": 442918400.0, + "427": 442918400.0, + "428": 442918400.0, + "429": 442918400.0, + "430": 442918400.0, + "431": 442918400.0, + "432": 442918400.0, + "433": 442918400.0, + "434": 442918400.0, + "435": 442918400.0, + "436": 442918400.0, + "437": 442918400.0, + "438": 442918400.0, + "439": 442918400.0, + "440": 442918400.0, + "441": 442918400.0, + "442": 442918400.0, + "443": 442918400.0, + "444": 442918400.0, + "445": 442918400.0, + "446": 442918400.0, + "447": 442918400.0, + "448": 442918400.0, + "449": 442918400.0, + "450": 442918400.0, + "451": 442918400.0, + "452": 442918400.0, + "453": 442918400.0, + "454": 442918400.0, + "455": 442918400.0, + "456": 442918400.0, + "457": 442918400.0, + "458": 442918400.0, + "459": 442918400.0, + "460": 442918400.0, + "461": 442918400.0, + "462": 442918400.0, + "463": 442918400.0, + "464": 442918400.0, + "465": 442918400.0, + "466": 442918400.0, + "467": 442918400.0, + "468": 442918400.0, + "469": 442918400.0, + "470": 442918400.0, + "471": 442918400.0, + "472": 442918400.0, + "473": 442918400.0, + "474": 442918400.0, + "475": 442918400.0, + "476": 442918400.0, + "477": 442918400.0, + "478": 442918400.0, + "479": 442918400.0, + "480": 442918400.0, + "481": 442918400.0, + "482": 442918400.0, + "483": 442918400.0, + "484": 442918400.0, + "485": 442918400.0, + "486": 442918400.0, + "487": 442918400.0, + "488": 442918400.0, + "489": 442918400.0, + "490": 442918400.0, + "491": 442918400.0, + "492": 442918400.0, + "493": 442918400.0, + "494": 442918400.0, + "495": 442918400.0, + "496": 442918400.0, + "497": 442918400.0, + "498": 442918400.0, + "499": 442918400.0, + "500": 442918400.0, + "501": 442918400.0, + "502": 442918400.0, + "503": 442918400.0, + "504": 442918400.0, + "505": 442918400.0, + "506": 442918400.0, + "507": 442918400.0, + "508": 442918400.0, + "509": 442918400.0, + "510": 442918400.0, + "511": 442918400.0, + "512": 442918400.0, + "513": 442918400.0, + "514": 442918400.0, + "515": 442918400.0, + "516": 442918400.0, + "517": 442918400.0, + "518": 442918400.0, + "519": 442918400.0, + "520": 442918400.0, + "521": 442918400.0, + "522": 442918400.0, + "523": 442918400.0, + "524": 442918400.0, + "525": 442918400.0, + "526": 442918400.0, + "527": 442918400.0, + "528": 442918400.0, + "529": 442918400.0, + "530": 442918400.0, + "531": 442918400.0, + "532": 442918400.0, + "533": 442918400.0, + "534": 442918400.0, + "535": 442918400.0, + "536": 442918400.0, + "537": 442918400.0, + "538": 442918400.0, + "539": 442918400.0, + "540": 442918400.0, + "541": 442918400.0, + "542": 442918400.0, + "543": 442918400.0, + "544": 442918400.0, + "545": 442918400.0, + "546": 442918400.0, + "547": 442918400.0, + "548": 442918400.0, + "549": 442918400.0, + "550": 442918400.0, + "551": 442918400.0, + "552": 442918400.0, + "553": 442918400.0, + "554": 442918400.0, + "555": 442918400.0, + "556": 442918400.0, + "557": 442918400.0, + "558": 442918400.0, + "559": 442918400.0, + "560": 442918400.0, + "561": 442918400.0, + "562": 442918400.0, + "563": 442918400.0, + "564": 442918400.0, + "565": 442918400.0, + "566": 442918400.0, + "567": 442918400.0, + "568": 442918400.0, + "569": 442918400.0, + "570": 442918400.0, + "571": 442918400.0, + "572": 442918400.0, + "573": 442918400.0, + "574": 442918400.0, + "575": 442918400.0, + "576": 442918400.0, + "577": 442918400.0, + "578": 442918400.0, + "579": 442918400.0, + "580": 442918400.0, + "581": 442918400.0, + "582": 442918400.0, + "583": 442918400.0, + "584": 442918400.0, + "585": 442918400.0, + "586": 442918400.0, + "587": 442918400.0, + "588": 442918400.0, + "589": 442918400.0, + "590": 442918400.0, + "591": 442918400.0, + "592": 442918400.0, + "593": 442918400.0, + "594": 442918400.0, + "595": 442918400.0, + "596": 442918400.0, + "597": 442918400.0, + "598": 442918400.0, + "599": 442918400.0, + "600": 442918400.0, + "601": 442918400.0, + "602": 442918400.0, + "603": 442918400.0, + "604": 442918400.0, + "605": 442918400.0, + "606": 442918400.0, + "607": 442918400.0, + "608": 442918400.0, + "609": 442918400.0, + "610": 442918400.0, + "611": 442918400.0, + "612": 442918400.0, + "613": 442918400.0, + "614": 442918400.0, + "615": 442918400.0, + "616": 442918400.0, + "617": 442918400.0, + "618": 442918400.0, + "619": 442918400.0, + "620": 442918400.0, + "621": 442918400.0, + "622": 442918400.0, + "623": 442918400.0, + "624": 442918400.0, + "625": 442918400.0, + "626": 442918400.0, + "627": 442918400.0, + "628": 442918400.0, + "629": 442918400.0, + "630": 442918400.0, + "631": 442918400.0, + "632": 442918400.0, + "633": 442918400.0, + "634": 442918400.0, + "635": 442918400.0, + "636": 442918400.0, + "637": 442918400.0, + "638": 442918400.0, + "639": 442918400.0, + "640": 442918400.0, + "641": 442918400.0, + "642": 442918400.0, + "643": 442918400.0, + "644": 442918400.0, + "645": 442918400.0, + "646": 442918400.0, + "647": 442918400.0, + "648": 442918400.0, + "649": 442918400.0, + "650": 442918400.0, + "651": 442918400.0, + "652": 442918400.0, + "653": 442918400.0, + "654": 442918400.0, + "655": 442918400.0, + "656": 442918400.0, + "657": 442918400.0, + "658": 442918400.0, + "659": 442918400.0, + "660": 442918400.0, + "661": 442918400.0, + "662": 442918400.0, + "663": 442918400.0, + "664": 442918400.0, + "665": 442918400.0, + "666": 442918400.0, + "667": 442918400.0, + "668": 442918400.0, + "669": 442918400.0, + "670": 442918400.0, + "671": 442918400.0, + "672": 442918400.0, + "673": 442918400.0, + "674": 442918400.0, + "675": 442918400.0, + "676": 442918400.0, + "677": 442918400.0, + "678": 442918400.0, + "679": 442918400.0, + "680": 442918400.0, + "681": 442918400.0, + "682": 442918400.0, + "683": 442918400.0, + "684": 442918400.0, + "685": 442918400.0, + "686": 442918400.0, + "687": 442918400.0, + "688": 442918400.0, + "689": 442918400.0, + "690": 442918400.0, + "691": 442918400.0, + "692": 442918400.0, + "693": 442918400.0, + "694": 442918400.0, + "695": 442918400.0, + "696": 442918400.0, + "697": 442918400.0, + "698": 442918400.0, + "699": 442918400.0, + "700": 442918400.0, + "701": 442918400.0, + "702": 442918400.0, + "703": 442918400.0, + "704": 442918400.0, + "705": 442918400.0, + "706": 442918400.0, + "707": 442918400.0, + "708": 442918400.0, + "709": 442918400.0, + "710": 442918400.0, + "711": 442918400.0, + "712": 442918400.0, + "713": 442918400.0, + "714": 442918400.0, + "715": 442918400.0, + "716": 442918400.0, + "717": 442918400.0, + "718": 442918400.0, + "719": 442918400.0, + "720": 442918400.0, + "721": 442918400.0, + "722": 442918400.0, + "723": 442918400.0, + "724": 442918400.0, + "725": 442918400.0, + "726": 442918400.0, + "727": 442918400.0, + "728": 442918400.0, + "729": 442918400.0, + "730": 442918400.0, + "731": 442918400.0, + "732": 442918400.0, + "733": 442918400.0, + "734": 442918400.0, + "735": 442918400.0, + "736": 442918400.0, + "737": 442918400.0, + "738": 442918400.0, + "739": 442918400.0, + "740": 442918400.0, + "741": 442918400.0, + "742": 442918400.0, + "743": 442918400.0, + "744": 442918400.0, + "745": 442918400.0, + "746": 442918400.0, + "747": 442918400.0, + "748": 442918400.0, + "749": 442918400.0, + "750": 442918400.0, + "751": 442918400.0, + "752": 442918400.0, + "753": 442918400.0, + "754": 442918400.0, + "755": 442918400.0, + "756": 442918400.0, + "757": 442918400.0, + "758": 442918400.0, + "759": 442918400.0, + "760": 442918400.0, + "761": 442918400.0, + "762": 442918400.0, + "763": 442918400.0, + "764": 442918400.0, + "765": 442918400.0, + "766": 442918400.0, + "767": 442918400.0, + "768": 442918400.0, + "769": 442918400.0, + "770": 442918400.0, + "771": 442918400.0, + "772": 442918400.0, + "773": 442918400.0, + "774": 442918400.0, + "775": 442918400.0, + "776": 442918400.0, + "777": 442918400.0, + "778": 442918400.0, + "779": 442918400.0, + "780": 442918400.0, + "781": 442918400.0, + "782": 442918400.0, + "783": 442918400.0, + "784": 442918400.0, + "785": 442918400.0, + "786": 442918400.0, + "787": 442918400.0, + "788": 442918400.0, + "789": 442918400.0, + "790": 442918400.0, + "791": 442918400.0, + "792": 442918400.0, + "793": 442918400.0, + "794": 442918400.0, + "795": 442918400.0, + "796": 442918400.0, + "797": 442918400.0, + "798": 442918400.0, + "799": 442918400.0, + "800": 442918400.0, + "801": 442918400.0, + "802": 442918400.0, + "803": 442918400.0, + "804": 442918400.0, + "805": 442918400.0, + "806": 442918400.0, + "807": 442918400.0, + "808": 442918400.0, + "809": 442918400.0, + "810": 442918400.0, + "811": 442918400.0, + "812": 442918400.0, + "813": 442918400.0, + "814": 442918400.0, + "815": 442918400.0, + "816": 442918400.0, + "817": 442918400.0, + "818": 442918400.0, + "819": 442918400.0, + "820": 442918400.0, + "821": 442918400.0, + "822": 442918400.0, + "823": 442918400.0, + "824": 442918400.0, + "825": 442918400.0, + "826": 442918400.0, + "827": 442918400.0, + "828": 442918400.0, + "829": 442918400.0, + "830": 442918400.0, + "831": 442918400.0, + "832": 442918400.0, + "833": 442918400.0, + "834": 442918400.0, + "835": 442918400.0, + "836": 442918400.0, + "837": 442918400.0, + "838": 442918400.0, + "839": 442918400.0, + "840": 442918400.0, + "841": 442918400.0, + "842": 442918400.0, + "843": 442918400.0, + "844": 442918400.0, + "845": 442918400.0, + "846": 442918400.0, + "847": 442918400.0, + "848": 442918400.0, + "849": 442918400.0, + "850": 442918400.0, + "851": 442918400.0, + "852": 442918400.0, + "853": 442918400.0, + "854": 442918400.0, + "855": 442918400.0, + "856": 442918400.0, + "857": 442918400.0, + "858": 442918400.0, + "859": 442918400.0, + "860": 442918400.0, + "861": 442918400.0, + "862": 442918400.0, + "863": 442918400.0, + "864": 442918400.0, + "865": 442918400.0, + "866": 442918400.0, + "867": 442918400.0, + "868": 442918400.0, + "869": 442918400.0, + "870": 442918400.0, + "871": 442918400.0, + "872": 442918400.0, + "873": 442918400.0, + "874": 442918400.0, + "875": 442918400.0, + "876": 442918400.0, + "877": 442918400.0, + "878": 442918400.0, + "879": 442918400.0, + "880": 442918400.0, + "881": 442918400.0, + "882": 442918400.0, + "883": 442918400.0, + "884": 442918400.0, + "885": 442918400.0, + "886": 442918400.0, + "887": 442918400.0, + "888": 442918400.0, + "889": 442918400.0, + "890": 442918400.0, + "891": 442918400.0, + "892": 442918400.0, + "893": 442918400.0, + "894": 442918400.0, + "895": 442918400.0, + "896": 442918400.0, + "897": 442918400.0, + "898": 442918400.0, + "899": 442918400.0, + "900": 442918400.0, + "901": 442918400.0, + "902": 442918400.0, + "903": 442918400.0, + "904": 442918400.0, + "905": 442918400.0, + "906": 442918400.0, + "907": 442918400.0, + "908": 442918400.0, + "909": 442918400.0, + "910": 442918400.0, + "911": 442918400.0, + "912": 442918400.0, + "913": 442918400.0, + "914": 442918400.0, + "915": 442918400.0, + "916": 442918400.0, + "917": 442918400.0, + "918": 442918400.0, + "919": 442918400.0, + "920": 442918400.0, + "921": 442918400.0, + "922": 442918400.0, + "923": 442918400.0, + "924": 442918400.0, + "925": 442918400.0, + "926": 442918400.0, + "927": 442918400.0, + "928": 442918400.0, + "929": 442918400.0, + "930": 442918400.0, + "931": 442918400.0, + "932": 442918400.0, + "933": 442918400.0, + "934": 442918400.0, + "935": 442918400.0, + "936": 442918400.0, + "937": 442918400.0, + "938": 442918400.0, + "939": 442918400.0, + "940": 442918400.0, + "941": 442918400.0, + "942": 442918400.0, + "943": 442918400.0, + "944": 442918400.0, + "945": 442918400.0, + "946": 442918400.0, + "947": 442918400.0, + "948": 442918400.0, + "949": 442918400.0, + "950": 442918400.0, + "951": 442918400.0, + "952": 442918400.0, + "953": 442918400.0, + "954": 442918400.0, + "955": 442918400.0, + "956": 442918400.0, + "957": 442918400.0, + "958": 442918400.0, + "959": 442918400.0, + "960": 442918400.0, + "961": 442918400.0, + "962": 442918400.0, + "963": 442918400.0, + "964": 442918400.0, + "965": 442918400.0, + "966": 442918400.0, + "967": 442918400.0, + "968": 442918400.0, + "969": 442918400.0, + "970": 442918400.0, + "971": 442918400.0, + "972": 442918400.0, + "973": 442918400.0, + "974": 442918400.0, + "975": 442918400.0, + "976": 442918400.0, + "977": 442918400.0, + "978": 442918400.0, + "979": 442918400.0, + "980": 442918400.0, + "981": 442918400.0, + "982": 442918400.0, + "983": 442918400.0, + "984": 442918400.0, + "985": 442918400.0, + "986": 442918400.0, + "987": 442918400.0, + "988": 442918400.0, + "989": 442918400.0, + "990": 442918400.0, + "991": 442918400.0, + "992": 442918400.0, + "993": 442918400.0, + "994": 442918400.0, + "995": 442918400.0, + "996": 442918400.0, + "997": 442918400.0, + "998": 442918400.0, + "999": 442918400.0, + "1000": 442918400.0, + "1001": 442918400.0, + "1002": 442918400.0, + "1003": 442918400.0, + "1004": 442918400.0, + "1005": 442918400.0, + "1006": 442918400.0, + "1007": 442918400.0, + "1008": 442918400.0, + "1009": 442918400.0, + "1010": 442918400.0, + "1011": 442918400.0, + "1012": 442918400.0, + "1013": 442918400.0, + "1014": 442918400.0, + "1015": 442918400.0, + "1016": 442918400.0, + "1017": 442918400.0, + "1018": 442918400.0, + "1019": 442918400.0, + "1020": 442918400.0, + "1021": 442918400.0, + "1022": 442918400.0, + "1023": 442918400.0, + "1024": 442918400.0, + "1025": 442918400.0, + "1026": 442918400.0, + "1027": 442918400.0, + "1028": 442918400.0, + "1029": 442918400.0, + "1030": 442918400.0, + "1031": 442918400.0, + "1032": 442918400.0, + "1033": 442918400.0, + "1034": 442918400.0, + "1035": 442918400.0, + "1036": 442918400.0, + "1037": 442918400.0, + "1038": 442918400.0, + "1039": 442918400.0, + "1040": 442918400.0, + "1041": 442918400.0, + "1042": 442918400.0, + "1043": 442918400.0, + "1044": 442918400.0, + "1045": 442918400.0, + "1046": 442918400.0, + "1047": 442918400.0, + "1048": 442918400.0, + "1049": 442918400.0, + "1050": 442918400.0, + "1051": 442918400.0, + "1052": 442918400.0, + "1053": 442918400.0, + "1054": 442918400.0, + "1055": 442918400.0, + "1056": 442918400.0, + "1057": 442918400.0, + "1058": 442918400.0, + "1059": 442918400.0, + "1060": 442918400.0, + "1061": 442918400.0, + "1062": 442918400.0, + "1063": 442918400.0, + "1064": 442918400.0, + "1065": 442918400.0, + "1066": 442918400.0, + "1067": 442918400.0, + "1068": 442918400.0, + "1069": 442918400.0, + "1070": 442918400.0, + "1071": 442918400.0, + "1072": 442918400.0, + "1073": 442918400.0, + "1074": 442918400.0, + "1075": 442918400.0, + "1076": 442918400.0, + "1077": 442918400.0, + "1078": 442918400.0, + "1079": 442918400.0, + "1080": 442918400.0, + "1081": 442918400.0, + "1082": 442918400.0, + "1083": 442918400.0, + "1084": 442918400.0, + "1085": 442918400.0, + "1086": 442918400.0, + "1087": 442918400.0, + "1088": 442918400.0, + "1089": 442918400.0, + "1090": 442918400.0, + "1091": 442918400.0, + "1092": 442918400.0, + "1093": 442918400.0, + "1094": 442918400.0, + "1095": 442918400.0, + "1096": 442918400.0, + "1097": 442918400.0, + "1098": 442918400.0, + "1099": 442918400.0, + "1100": 442918400.0, + "1101": 442918400.0, + "1102": 442918400.0, + "1103": 442918400.0, + "1104": 442918400.0, + "1105": 442918400.0, + "1106": 442918400.0, + "1107": 442918400.0, + "1108": 442918400.0, + "1109": 442918400.0, + "1110": 442918400.0, + "1111": 442918400.0, + "1112": 442918400.0, + "1113": 442918400.0, + "1114": 442918400.0, + "1115": 442918400.0, + "1116": 442918400.0, + "1117": 442918400.0, + "1118": 442918400.0, + "1119": 442918400.0, + "1120": 442918400.0, + "1121": 442918400.0, + "1122": 442918400.0, + "1123": 442918400.0, + "1124": 442918400.0, + "1125": 442918400.0, + "1126": 442918400.0, + "1127": 442918400.0, + "1128": 442918400.0, + "1129": 442918400.0, + "1130": 442918400.0, + "1131": 442918400.0, + "1132": 442918400.0, + "1133": 442918400.0, + "1134": 442918400.0, + "1135": 442918400.0, + "1136": 442918400.0, + "1137": 442918400.0, + "1138": 442918400.0, + "1139": 442918400.0, + "1140": 442918400.0, + "1141": 442918400.0, + "1142": 442918400.0, + "1143": 442918400.0, + "1144": 442918400.0, + "1145": 442918400.0, + "1146": 442918400.0, + "1147": 442918400.0, + "1148": 442918400.0, + "1149": 442918400.0, + "1150": 442918400.0, + "1151": 442918400.0, + "1152": 442918400.0, + "1153": 442918400.0, + "1154": 442918400.0, + "1155": 442918400.0, + "1156": 442918400.0, + "1157": 442918400.0, + "1158": 442918400.0, + "1159": 442918400.0, + "1160": 442918400.0, + "1161": 442918400.0, + "1162": 442918400.0, + "1163": 442918400.0, + "1164": 442918400.0, + "1165": 442918400.0, + "1166": 442918400.0, + "1167": 442918400.0, + "1168": 442918400.0, + "1169": 442918400.0, + "1170": 442918400.0, + "1171": 442918400.0, + "1172": 442918400.0, + "1173": 442918400.0, + "1174": 442918400.0, + "1175": 442918400.0, + "1176": 442918400.0, + "1177": 442918400.0, + "1178": 442918400.0, + "1179": 442918400.0, + "1180": 442918400.0, + "1181": 442918400.0, + "1182": 442918400.0, + "1183": 442918400.0, + "1184": 442918400.0, + "1185": 442918400.0, + "1186": 442918400.0, + "1187": 442918400.0, + "1188": 442918400.0, + "1189": 442918400.0, + "1190": 442918400.0, + "1191": 442918400.0, + "1192": 442918400.0, + "1193": 442918400.0, + "1194": 442918400.0, + "1195": 442918400.0, + "1196": 442918400.0, + "1197": 442918400.0, + "1198": 442918400.0, + "1199": 442918400.0, + "1200": 442918400.0, + "1201": 442918400.0, + "1202": 442918400.0, + "1203": 442918400.0, + "1204": 442918400.0, + "1205": 442918400.0, + "1206": 442918400.0, + "1207": 442918400.0, + "1208": 442918400.0, + "1209": 442918400.0, + "1210": 442918400.0, + "1211": 442918400.0, + "1212": 442918400.0, + "1213": 442918400.0, + "1214": 442918400.0, + "1215": 442918400.0, + "1216": 442918400.0, + "1217": 442918400.0, + "1218": 442918400.0, + "1219": 442918400.0, + "1220": 442918400.0, + "1221": 442918400.0, + "1222": 442918400.0, + "1223": 442918400.0, + "1224": 442918400.0, + "1225": 442918400.0, + "1226": 442918400.0, + "1227": 442918400.0, + "1228": 442918400.0, + "1229": 442918400.0, + "1230": 442918400.0, + "1231": 442918400.0, + "1232": 442918400.0, + "1233": 442918400.0, + "1234": 442918400.0, + "1235": 442918400.0, + "1236": 442918400.0, + "1237": 442918400.0, + "1238": 442918400.0, + "1239": 442918400.0, + "1240": 442918400.0, + "1241": 442918400.0, + "1242": 442918400.0, + "1243": 442918400.0, + "1244": 442918400.0, + "1245": 442918400.0, + "1246": 442918400.0, + "1247": 442918400.0, + "1248": 442918400.0, + "1249": 442918400.0, + "1250": 442918400.0, + "1251": 442918400.0, + "1252": 442918400.0, + "1253": 442918400.0, + "1254": 442918400.0, + "1255": 442918400.0, + "1256": 442918400.0, + "1257": 442918400.0, + "1258": 442918400.0, + "1259": 442918400.0, + "1260": 442918400.0, + "1261": 442918400.0, + "1262": 442918400.0, + "1263": 442918400.0, + "1264": 442918400.0, + "1265": 442918400.0, + "1266": 442918400.0, + "1267": 442918400.0, + "1268": 442918400.0, + "1269": 442918400.0, + "1270": 442918400.0, + "1271": 442918400.0, + "1272": 442918400.0, + "1273": 442918400.0, + "1274": 442918400.0, + "1275": 442918400.0, + "1276": 442918400.0, + "1277": 442918400.0, + "1278": 442918400.0, + "1279": 442918400.0, + "1280": 442918400.0, + "1281": 442918400.0, + "1282": 442918400.0, + "1283": 442918400.0, + "1284": 442918400.0, + "1285": 442918400.0, + "1286": 442918400.0, + "1287": 442918400.0, + "1288": 442918400.0, + "1289": 442918400.0, + "1290": 442918400.0, + "1291": 442918400.0, + "1292": 442918400.0, + "1293": 442918400.0, + "1294": 442918400.0, + "1295": 442918400.0, + "1296": 442918400.0, + "1297": 442918400.0, + "1298": 442918400.0, + "1299": 442918400.0, + "1300": 442918400.0, + "1301": 442918400.0, + "1302": 442918400.0, + "1303": 442918400.0, + "1304": 442918400.0, + "1305": 442918400.0, + "1306": 442918400.0, + "1307": 442918400.0, + "1308": 442918400.0, + "1309": 442918400.0, + "1310": 442918400.0, + "1311": 442918400.0, + "1312": 442918400.0, + "1313": 442918400.0, + "1314": 442918400.0, + "1315": 442918400.0, + "1316": 442918400.0, + "1317": 442918400.0, + "1318": 442918400.0, + "1319": 442918400.0, + "1320": 442918400.0, + "1321": 442918400.0, + "1322": 442918400.0, + "1323": 442918400.0, + "1324": 442918400.0, + "1325": 442918400.0, + "1326": 442918400.0, + "1327": 442918400.0, + "1328": 442918400.0, + "1329": 442918400.0, + "1330": 442918400.0, + "1331": 442918400.0, + "1332": 442918400.0, + "1333": 442918400.0, + "1334": 442918400.0, + "1335": 442918400.0, + "1336": 442918400.0, + "1337": 442918400.0, + "1338": 442918400.0, + "1339": 442918400.0, + "1340": 442918400.0, + "1341": 442918400.0, + "1342": 442918400.0, + "1343": 442918400.0, + "1344": 442918400.0, + "1345": 442918400.0, + "1346": 442918400.0, + "1347": 442918400.0, + "1348": 442918400.0, + "1349": 442918400.0, + "1350": 442918400.0, + "1351": 442918400.0, + "1352": 442918400.0, + "1353": 442918400.0, + "1354": 442918400.0, + "1355": 442918400.0, + "1356": 442918400.0, + "1357": 442918400.0, + "1358": 442918400.0, + "1359": 442918400.0, + "1360": 442918400.0, + "1361": 442918400.0, + "1362": 442918400.0, + "1363": 442918400.0, + "1364": 442918400.0, + "1365": 442918400.0, + "1366": 442918400.0, + "1367": 442918400.0, + "1368": 442918400.0, + "1369": 442918400.0, + "1370": 442918400.0, + "1371": 442918400.0, + "1372": 442918400.0, + "1373": 442918400.0, + "1374": 442918400.0, + "1375": 442918400.0, + "1376": 442918400.0, + "1377": 442918400.0, + "1378": 442918400.0, + "1379": 442918400.0, + "1380": 442918400.0, + "1381": 442918400.0, + "1382": 442918400.0, + "1383": 442918400.0, + "1384": 442918400.0, + "1385": 442918400.0, + "1386": 442918400.0, + "1387": 442918400.0, + "1388": 442918400.0, + "1389": 442918400.0, + "1390": 442918400.0, + "1391": 442918400.0, + "1392": 442918400.0, + "1393": 442918400.0, + "1394": 442918400.0, + "1395": 442918400.0, + "1396": 442918400.0, + "1397": 442918400.0, + "1398": 442918400.0, + "1399": 442918400.0, + "1400": 442918400.0, + "1401": 442918400.0, + "1402": 442918400.0, + "1403": 442918400.0, + "1404": 442918400.0, + "1405": 442918400.0, + "1406": 442918400.0, + "1407": 442918400.0, + "1408": 442918400.0, + "1409": 442918400.0, + "1410": 442918400.0, + "1411": 442918400.0, + "1412": 442918400.0, + "1413": 442918400.0, + "1414": 442918400.0, + "1415": 442918400.0, + "1416": 442918400.0, + "1417": 442918400.0, + "1418": 442918400.0, + "1419": 442918400.0, + "1420": 442918400.0, + "1421": 442918400.0, + "1422": 442918400.0, + "1423": 442918400.0, + "1424": 442918400.0, + "1425": 442918400.0, + "1426": 442918400.0, + "1427": 442918400.0, + "1428": 442918400.0, + "1429": 442918400.0, + "1430": 442918400.0, + "1431": 442918400.0, + "1432": 442918400.0, + "1433": 442918400.0, + "1434": 442918400.0, + "1435": 442918400.0, + "1436": 442918400.0, + "1437": 442918400.0, + "1438": 442918400.0, + "1439": 442918400.0, + "1440": 442918400.0, + "1441": 442918400.0, + "1442": 442918400.0, + "1443": 442918400.0, + "1444": 442918400.0, + "1445": 442918400.0, + "1446": 442918400.0, + "1447": 442918400.0, + "1448": 442918400.0, + "1449": 442918400.0, + "1450": 442918400.0, + "1451": 442918400.0, + "1452": 442918400.0, + "1453": 442918400.0, + "1454": 442918400.0, + "1455": 442918400.0, + "1456": 442918400.0, + "1457": 442918400.0, + "1458": 442918400.0, + "1459": 442918400.0, + "1460": 442918400.0, + "1461": 442918400.0, + "1462": 442918400.0, + "1463": 442918400.0, + "1464": 442918400.0, + "1465": 442918400.0, + "1466": 442918400.0, + "1467": 442918400.0, + "1468": 442918400.0, + "1469": 442918400.0, + "1470": 442918400.0, + "1471": 442918400.0, + "1472": 442918400.0, + "1473": 442918400.0, + "1474": 442918400.0, + "1475": 442918400.0, + "1476": 442918400.0, + "1477": 442918400.0, + "1478": 442918400.0, + "1479": 442918400.0, + "1480": 442918400.0, + "1481": 442918400.0, + "1482": 442918400.0, + "1483": 442918400.0, + "1484": 442918400.0, + "1485": 442918400.0, + "1486": 442918400.0, + "1487": 442918400.0, + "1488": 442918400.0, + "1489": 442918400.0, + "1490": 442918400.0, + "1491": 442918400.0, + "1492": 442918400.0, + "1493": 442918400.0, + "1494": 442918400.0, + "1495": 442918400.0, + "1496": 442918400.0, + "1497": 442918400.0, + "1498": 442918400.0, + "1499": 442918400.0, + "1500": 442918400.0, + "1501": 442918400.0, + "1502": 442918400.0, + "1503": 442918400.0, + "1504": 442918400.0, + "1505": 442918400.0, + "1506": 442918400.0, + "1507": 442918400.0, + "1508": 442918400.0, + "1509": 442918400.0, + "1510": 442918400.0, + "1511": 442918400.0, + "1512": 442918400.0, + "1513": 442918400.0, + "1514": 442918400.0, + "1515": 442918400.0, + "1516": 442918400.0, + "1517": 442918400.0, + "1518": 442918400.0, + "1519": 442918400.0, + "1520": 442918400.0, + "1521": 442918400.0, + "1522": 442918400.0, + "1523": 442918400.0, + "1524": 442918400.0, + "1525": 442918400.0, + "1526": 442918400.0, + "1527": 442918400.0, + "1528": 442918400.0, + "1529": 442918400.0, + "1530": 442918400.0, + "1531": 442918400.0, + "1532": 442918400.0, + "1533": 442918400.0, + "1534": 442918400.0, + "1535": 442918400.0, + "1536": 442918400.0, + "1537": 442918400.0, + "1538": 442918400.0, + "1539": 442918400.0, + "1540": 442918400.0, + "1541": 442918400.0, + "1542": 442918400.0, + "1543": 442918400.0, + "1544": 442918400.0, + "1545": 442918400.0, + "1546": 442918400.0, + "1547": 442918400.0, + "1548": 442918400.0, + "1549": 442918400.0, + "1550": 442918400.0, + "1551": 442918400.0, + "1552": 442918400.0, + "1553": 442918400.0, + "1554": 442918400.0, + "1555": 442918400.0, + "1556": 442918400.0, + "1557": 442918400.0, + "1558": 442918400.0, + "1559": 442918400.0, + "1560": 442918400.0, + "1561": 442918400.0, + "1562": 442918400.0, + "1563": 442918400.0, + "1564": 442918400.0, + "1565": 442918400.0, + "1566": 442918400.0, + "1567": 442918400.0, + "1568": 442918400.0, + "1569": 442918400.0, + "1570": 442918400.0, + "1571": 442918400.0, + "1572": 442918400.0, + "1573": 442918400.0, + "1574": 442918400.0, + "1575": 442918400.0, + "1576": 442918400.0, + "1577": 442918400.0, + "1578": 442918400.0, + "1579": 442918400.0, + "1580": 442918400.0, + "1581": 442918400.0, + "1582": 442918400.0, + "1583": 442918400.0, + "1584": 442918400.0, + "1585": 442918400.0, + "1586": 442918400.0, + "1587": 442918400.0, + "1588": 442918400.0, + "1589": 442918400.0, + "1590": 442918400.0, + "1591": 442918400.0, + "1592": 442918400.0, + "1593": 442918400.0, + "1594": 442918400.0, + "1595": 442918400.0, + "1596": 442918400.0, + "1597": 442918400.0, + "1598": 442918400.0, + "1599": 442918400.0, + "1600": 442918400.0, + "1601": 442918400.0, + "1602": 442918400.0, + "1603": 442918400.0, + "1604": 442918400.0, + "1605": 442918400.0, + "1606": 442918400.0, + "1607": 442918400.0, + "1608": 442918400.0, + "1609": 442918400.0, + "1610": 442918400.0, + "1611": 442918400.0, + "1612": 442918400.0, + "1613": 442918400.0, + "1614": 442918400.0, + "1615": 442918400.0, + "1616": 442918400.0, + "1617": 442918400.0, + "1618": 442918400.0, + "1619": 442918400.0, + "1620": 442918400.0, + "1621": 442918400.0, + "1622": 442918400.0, + "1623": 442918400.0, + "1624": 442918400.0, + "1625": 442918400.0, + "1626": 442918400.0, + "1627": 442918400.0, + "1628": 442918400.0, + "1629": 442918400.0, + "1630": 442918400.0, + "1631": 442918400.0, + "1632": 442918400.0, + "1633": 442918400.0, + "1634": 442918400.0, + "1635": 442918400.0, + "1636": 442918400.0, + "1637": 442918400.0, + "1638": 442918400.0, + "1639": 442918400.0, + "1640": 442918400.0, + "1641": 442918400.0, + "1642": 442918400.0, + "1643": 442918400.0, + "1644": 442918400.0, + "1645": 442918400.0, + "1646": 442918400.0, + "1647": 442918400.0, + "1648": 442918400.0, + "1649": 442918400.0, + "1650": 442918400.0, + "1651": 442918400.0, + "1652": 442918400.0, + "1653": 442918400.0, + "1654": 442918400.0, + "1655": 442918400.0, + "1656": 442918400.0, + "1657": 442918400.0, + "1658": 442918400.0, + "1659": 442918400.0, + "1660": 442918400.0, + "1661": 442918400.0, + "1662": 442918400.0, + "1663": 442918400.0, + "1664": 442918400.0, + "1665": 442918400.0, + "1666": 442918400.0, + "1667": 442918400.0, + "1668": 442918400.0, + "1669": 442918400.0, + "1670": 442918400.0, + "1671": 442918400.0, + "1672": 442918400.0, + "1673": 442918400.0, + "1674": 442918400.0, + "1675": 442918400.0, + "1676": 442918400.0, + "1677": 442918400.0, + "1678": 442918400.0, + "1679": 442918400.0, + "1680": 442918400.0, + "1681": 442918400.0, + "1682": 442918400.0, + "1683": 442918400.0, + "1684": 442918400.0, + "1685": 442918400.0, + "1686": 442918400.0, + "1687": 442918400.0, + "1688": 442918400.0, + "1689": 442918400.0, + "1690": 442918400.0, + "1691": 442918400.0, + "1692": 442918400.0, + "1693": 442918400.0, + "1694": 442918400.0, + "1695": 442918400.0, + "1696": 442918400.0, + "1697": 442918400.0, + "1698": 442918400.0, + "1699": 442918400.0, + "1700": 442918400.0, + "1701": 442918400.0, + "1702": 442918400.0, + "1703": 442918400.0, + "1704": 442918400.0, + "1705": 442918400.0, + "1706": 442918400.0, + "1707": 442918400.0, + "1708": 442918400.0, + "1709": 442918400.0, + "1710": 442918400.0, + "1711": 442918400.0, + "1712": 442918400.0, + "1713": 442918400.0, + "1714": 442918400.0, + "1715": 442918400.0, + "1716": 442918400.0, + "1717": 442918400.0, + "1718": 442918400.0, + "1719": 442918400.0, + "1720": 442918400.0, + "1721": 442918400.0, + "1722": 442918400.0, + "1723": 442918400.0, + "1724": 442918400.0, + "1725": 442918400.0, + "1726": 442918400.0, + "1727": 442918400.0, + "1728": 442918400.0, + "1729": 442918400.0, + "1730": 442918400.0, + "1731": 442918400.0, + "1732": 442918400.0, + "1733": 442918400.0, + "1734": 442918400.0, + "1735": 442918400.0, + "1736": 442918400.0, + "1737": 442918400.0, + "1738": 442918400.0, + "1739": 442918400.0, + "1740": 442918400.0, + "1741": 442918400.0, + "1742": 442918400.0, + "1743": 442918400.0, + "1744": 442918400.0, + "1745": 442918400.0, + "1746": 442918400.0, + "1747": 442918400.0, + "1748": 442918400.0, + "1749": 442918400.0, + "1750": 442918400.0, + "1751": 442918400.0, + "1752": 442918400.0, + "1753": 442918400.0, + "1754": 442918400.0, + "1755": 442918400.0, + "1756": 442918400.0, + "1757": 442918400.0, + "1758": 442918400.0, + "1759": 442918400.0, + "1760": 442918400.0, + "1761": 442918400.0, + "1762": 442918400.0, + "1763": 442918400.0, + "1764": 442918400.0, + "1765": 442918400.0, + "1766": 442918400.0, + "1767": 442918400.0, + "1768": 442918400.0, + "1769": 442918400.0, + "1770": 442918400.0, + "1771": 442918400.0, + "1772": 442918400.0, + "1773": 442918400.0, + "1774": 442918400.0, + "1775": 442918400.0, + "1776": 442918400.0, + "1777": 442918400.0, + "1778": 442918400.0, + "1779": 442918400.0, + "1780": 442918400.0, + "1781": 442918400.0, + "1782": 442918400.0, + "1783": 442918400.0, + "1784": 442918400.0, + "1785": 442918400.0, + "1786": 442918400.0, + "1787": 442918400.0, + "1788": 442918400.0, + "1789": 442918400.0, + "1790": 442918400.0, + "1791": 442918400.0, + "1792": 442918400.0, + "1793": 442918400.0, + "1794": 442918400.0, + "1795": 442918400.0, + "1796": 442918400.0, + "1797": 442918400.0, + "1798": 442918400.0, + "1799": 442918400.0, + "1800": 442918400.0, + "1801": 442918400.0, + "1802": 442918400.0, + "1803": 442918400.0, + "1804": 442918400.0, + "1805": 442918400.0, + "1806": 442918400.0, + "1807": 442918400.0, + "1808": 442918400.0, + "1809": 442918400.0, + "1810": 442918400.0, + "1811": 442918400.0, + "1812": 442918400.0, + "1813": 442918400.0, + "1814": 442918400.0, + "1815": 442918400.0, + "1816": 442918400.0, + "1817": 442918400.0, + "1818": 442918400.0, + "1819": 442918400.0, + "1820": 442918400.0, + "1821": 442918400.0, + "1822": 442918400.0, + "1823": 442918400.0, + "1824": 442918400.0, + "1825": 442918400.0, + "1826": 442918400.0, + "1827": 442918400.0, + "1828": 442918400.0, + "1829": 442918400.0, + "1830": 442918400.0, + "1831": 442918400.0, + "1832": 442918400.0, + "1833": 442918400.0, + "1834": 442918400.0, + "1835": 442918400.0, + "1836": 442918400.0, + "1837": 442918400.0, + "1838": 442918400.0, + "1839": 442918400.0, + "1840": 442918400.0, + "1841": 442918400.0, + "1842": 442918400.0, + "1843": 442918400.0, + "1844": 442918400.0, + "1845": 442918400.0, + "1846": 442918400.0, + "1847": 442918400.0, + "1848": 442918400.0, + "1849": 442918400.0, + "1850": 442918400.0, + "1851": 442918400.0, + "1852": 442918400.0, + "1853": 442918400.0, + "1854": 442918400.0, + "1855": 442918400.0, + "1856": 442918400.0, + "1857": 442918400.0, + "1858": 442918400.0, + "1859": 442918400.0, + "1860": 442918400.0, + "1861": 442918400.0, + "1862": 442918400.0, + "1863": 442918400.0, + "1864": 442918400.0, + "1865": 442918400.0, + "1866": 442918400.0, + "1867": 442918400.0, + "1868": 442918400.0, + "1869": 442918400.0, + "1870": 442918400.0, + "1871": 442918400.0, + "1872": 442918400.0, + "1873": 442918400.0, + "1874": 442918400.0, + "1875": 442918400.0, + "1876": 442918400.0, + "1877": 442918400.0, + "1878": 442918400.0, + "1879": 442918400.0, + "1880": 442918400.0, + "1881": 442918400.0, + "1882": 442918400.0, + "1883": 442918400.0, + "1884": 442918400.0, + "1885": 442918400.0, + "1886": 442918400.0, + "1887": 442918400.0, + "1888": 442918400.0, + "1889": 442918400.0, + "1890": 442918400.0, + "1891": 442918400.0, + "1892": 442918400.0, + "1893": 442918400.0, + "1894": 442918400.0, + "1895": 442918400.0, + "1896": 442918400.0, + "1897": 442918400.0, + "1898": 442918400.0, + "1899": 442918400.0, + "1900": 442918400.0, + "1901": 442918400.0, + "1902": 442918400.0, + "1903": 442918400.0, + "1904": 442918400.0, + "1905": 442918400.0, + "1906": 442918400.0, + "1907": 442918400.0, + "1908": 442918400.0, + "1909": 442918400.0, + "1910": 442918400.0, + "1911": 442918400.0, + "1912": 442918400.0, + "1913": 442918400.0, + "1914": 442918400.0, + "1915": 442918400.0, + "1916": 442918400.0, + "1917": 442918400.0, + "1918": 442918400.0, + "1919": 442918400.0, + "1920": 442918400.0, + "1921": 442918400.0, + "1922": 442918400.0, + "1923": 442918400.0, + "1924": 442918400.0, + "1925": 442918400.0, + "1926": 442918400.0, + "1927": 442918400.0, + "1928": 442918400.0, + "1929": 442918400.0, + "1930": 442918400.0, + "1931": 442918400.0, + "1932": 442918400.0, + "1933": 442918400.0, + "1934": 442918400.0, + "1935": 442918400.0, + "1936": 442918400.0, + "1937": 442918400.0, + "1938": 442918400.0, + "1939": 442918400.0, + "1940": 442918400.0, + "1941": 442918400.0, + "1942": 442918400.0, + "1943": 442918400.0, + "1944": 442918400.0, + "1945": 442918400.0, + "1946": 442918400.0, + "1947": 442918400.0, + "1948": 442918400.0, + "1949": 442918400.0, + "1950": 442918400.0, + "1951": 442918400.0, + "1952": 442918400.0, + "1953": 442918400.0, + "1954": 442918400.0, + "1955": 442918400.0, + "1956": 442918400.0, + "1957": 442918400.0, + "1958": 442918400.0, + "1959": 442918400.0, + "1960": 442918400.0, + "1961": 442918400.0, + "1962": 442918400.0, + "1963": 442918400.0, + "1964": 442918400.0, + "1965": 442918400.0, + "1966": 442918400.0, + "1967": 442918400.0, + "1968": 442918400.0, + "1969": 442918400.0, + "1970": 442918400.0, + "1971": 442918400.0, + "1972": 442918400.0, + "1973": 442918400.0, + "1974": 442918400.0, + "1975": 442918400.0, + "1976": 442918400.0, + "1977": 442918400.0, + "1978": 442918400.0, + "1979": 442918400.0, + "1980": 442918400.0, + "1981": 442918400.0, + "1982": 442918400.0, + "1983": 442918400.0, + "1984": 442918400.0, + "1985": 442918400.0, + "1986": 442918400.0, + "1987": 442918400.0, + "1988": 442918400.0, + "1989": 442918400.0, + "1990": 442918400.0, + "1991": 442918400.0, + "1992": 442918400.0, + "1993": 442918400.0, + "1994": 442918400.0, + "1995": 442918400.0, + "1996": 442918400.0, + "1997": 442918400.0, + "1998": 442918400.0, + "1999": 442918400.0, + "2000": 442918400.0 + } + }, + "mem-max-allocated-bytes": { + "start_step": 1, + "end_step": 2000, + "step_interval": 1, + "values": { + "1": 761183744.0, + "2": 849621504.0, + "3": 849621504.0, + "4": 849621504.0, + "5": 849621504.0, + "6": 849621504.0, + "7": 849621504.0, + "8": 849621504.0, + "9": 849621504.0, + "10": 849621504.0, + "11": 849621504.0, + "12": 849621504.0, + "13": 849621504.0, + "14": 849621504.0, + "15": 849621504.0, + "16": 849621504.0, + "17": 849621504.0, + "18": 849621504.0, + "19": 849621504.0, + "20": 849621504.0, + "21": 849621504.0, + "22": 849621504.0, + "23": 849621504.0, + "24": 849621504.0, + "25": 849621504.0, + "26": 849621504.0, + "27": 849621504.0, + "28": 849621504.0, + "29": 849621504.0, + "30": 849621504.0, + "31": 849621504.0, + "32": 849621504.0, + "33": 849621504.0, + "34": 849621504.0, + "35": 849621504.0, + "36": 849621504.0, + "37": 849621504.0, + "38": 849621504.0, + "39": 849621504.0, + "40": 849621504.0, + "41": 849621504.0, + "42": 849621504.0, + "43": 849621504.0, + "44": 849621504.0, + "45": 849621504.0, + "46": 849621504.0, + "47": 849621504.0, + "48": 849621504.0, + "49": 849621504.0, + "50": 849621504.0, + "51": 849621504.0, + "52": 849621504.0, + "53": 849621504.0, + "54": 849621504.0, + "55": 849621504.0, + "56": 849621504.0, + "57": 849621504.0, + "58": 849621504.0, + "59": 849621504.0, + "60": 849621504.0, + "61": 849621504.0, + "62": 849621504.0, + "63": 849621504.0, + "64": 849621504.0, + "65": 849621504.0, + "66": 849621504.0, + "67": 849621504.0, + "68": 849621504.0, + "69": 849621504.0, + "70": 849621504.0, + "71": 849621504.0, + "72": 849621504.0, + "73": 849621504.0, + "74": 849621504.0, + "75": 849621504.0, + "76": 849621504.0, + "77": 849621504.0, + "78": 849621504.0, + "79": 849621504.0, + "80": 849621504.0, + "81": 849621504.0, + "82": 849621504.0, + "83": 849621504.0, + "84": 849621504.0, + "85": 849621504.0, + "86": 849621504.0, + "87": 849621504.0, + "88": 849621504.0, + "89": 849621504.0, + "90": 849621504.0, + "91": 849621504.0, + "92": 849621504.0, + "93": 849621504.0, + "94": 849621504.0, + "95": 849621504.0, + "96": 849621504.0, + "97": 849621504.0, + "98": 849621504.0, + "99": 849621504.0, + "100": 849621504.0, + "101": 849621504.0, + "102": 849621504.0, + "103": 849621504.0, + "104": 849621504.0, + "105": 849621504.0, + "106": 849621504.0, + "107": 849621504.0, + "108": 849621504.0, + "109": 849621504.0, + "110": 849621504.0, + "111": 849621504.0, + "112": 849621504.0, + "113": 849621504.0, + "114": 849621504.0, + "115": 849621504.0, + "116": 849621504.0, + "117": 849621504.0, + "118": 849621504.0, + "119": 849621504.0, + "120": 849621504.0, + "121": 849621504.0, + "122": 849621504.0, + "123": 849621504.0, + "124": 849621504.0, + "125": 849621504.0, + "126": 849621504.0, + "127": 849621504.0, + "128": 849621504.0, + "129": 849621504.0, + "130": 849621504.0, + "131": 849621504.0, + "132": 849621504.0, + "133": 849621504.0, + "134": 849621504.0, + "135": 849621504.0, + "136": 849621504.0, + "137": 849621504.0, + "138": 849621504.0, + "139": 849621504.0, + "140": 849621504.0, + "141": 849621504.0, + "142": 849621504.0, + "143": 849621504.0, + "144": 849621504.0, + "145": 849621504.0, + "146": 849621504.0, + "147": 849621504.0, + "148": 849621504.0, + "149": 849621504.0, + "150": 849621504.0, + "151": 849621504.0, + "152": 849621504.0, + "153": 849621504.0, + "154": 849621504.0, + "155": 849621504.0, + "156": 849621504.0, + "157": 849621504.0, + "158": 849621504.0, + "159": 849621504.0, + "160": 849621504.0, + "161": 849621504.0, + "162": 849621504.0, + "163": 849621504.0, + "164": 849621504.0, + "165": 849621504.0, + "166": 849621504.0, + "167": 849621504.0, + "168": 849621504.0, + "169": 849621504.0, + "170": 849621504.0, + "171": 849621504.0, + "172": 849621504.0, + "173": 849621504.0, + "174": 849621504.0, + "175": 849621504.0, + "176": 849621504.0, + "177": 849621504.0, + "178": 849621504.0, + "179": 849621504.0, + "180": 849621504.0, + "181": 849621504.0, + "182": 849621504.0, + "183": 849621504.0, + "184": 849621504.0, + "185": 849621504.0, + "186": 849621504.0, + "187": 849621504.0, + "188": 849621504.0, + "189": 849621504.0, + "190": 849621504.0, + "191": 849621504.0, + "192": 849621504.0, + "193": 849621504.0, + "194": 849621504.0, + "195": 849621504.0, + "196": 849621504.0, + "197": 849621504.0, + "198": 849621504.0, + "199": 849621504.0, + "200": 849621504.0, + "201": 849621504.0, + "202": 849621504.0, + "203": 849621504.0, + "204": 849621504.0, + "205": 849621504.0, + "206": 849621504.0, + "207": 849621504.0, + "208": 849621504.0, + "209": 849621504.0, + "210": 849621504.0, + "211": 849621504.0, + "212": 849621504.0, + "213": 849621504.0, + "214": 849621504.0, + "215": 849621504.0, + "216": 849621504.0, + "217": 849621504.0, + "218": 849621504.0, + "219": 849621504.0, + "220": 849621504.0, + "221": 849621504.0, + "222": 849621504.0, + "223": 849621504.0, + "224": 849621504.0, + "225": 849621504.0, + "226": 849621504.0, + "227": 849621504.0, + "228": 849621504.0, + "229": 849621504.0, + "230": 849621504.0, + "231": 849621504.0, + "232": 849621504.0, + "233": 849621504.0, + "234": 849621504.0, + "235": 849621504.0, + "236": 849621504.0, + "237": 849621504.0, + "238": 849621504.0, + "239": 849621504.0, + "240": 849621504.0, + "241": 849621504.0, + "242": 849621504.0, + "243": 849621504.0, + "244": 849621504.0, + "245": 849621504.0, + "246": 849621504.0, + "247": 849621504.0, + "248": 849621504.0, + "249": 849621504.0, + "250": 849621504.0, + "251": 849621504.0, + "252": 849621504.0, + "253": 849621504.0, + "254": 849621504.0, + "255": 849621504.0, + "256": 849621504.0, + "257": 849621504.0, + "258": 849621504.0, + "259": 849621504.0, + "260": 849621504.0, + "261": 849621504.0, + "262": 849621504.0, + "263": 849621504.0, + "264": 849621504.0, + "265": 849621504.0, + "266": 849621504.0, + "267": 849621504.0, + "268": 849621504.0, + "269": 849621504.0, + "270": 849621504.0, + "271": 849621504.0, + "272": 849621504.0, + "273": 849621504.0, + "274": 849621504.0, + "275": 849621504.0, + "276": 849621504.0, + "277": 849621504.0, + "278": 849621504.0, + "279": 849621504.0, + "280": 849621504.0, + "281": 849621504.0, + "282": 849621504.0, + "283": 849621504.0, + "284": 849621504.0, + "285": 849621504.0, + "286": 849621504.0, + "287": 849621504.0, + "288": 849621504.0, + "289": 849621504.0, + "290": 849621504.0, + "291": 849621504.0, + "292": 849621504.0, + "293": 849621504.0, + "294": 849621504.0, + "295": 849621504.0, + "296": 849621504.0, + "297": 849621504.0, + "298": 849621504.0, + "299": 849621504.0, + "300": 849621504.0, + "301": 849621504.0, + "302": 849621504.0, + "303": 849621504.0, + "304": 849621504.0, + "305": 849621504.0, + "306": 849621504.0, + "307": 849621504.0, + "308": 849621504.0, + "309": 849621504.0, + "310": 849621504.0, + "311": 849621504.0, + "312": 849621504.0, + "313": 849621504.0, + "314": 849621504.0, + "315": 849621504.0, + "316": 849621504.0, + "317": 849621504.0, + "318": 849621504.0, + "319": 849621504.0, + "320": 849621504.0, + "321": 849621504.0, + "322": 849621504.0, + "323": 849621504.0, + "324": 849621504.0, + "325": 849621504.0, + "326": 849621504.0, + "327": 849621504.0, + "328": 849621504.0, + "329": 849621504.0, + "330": 849621504.0, + "331": 849621504.0, + "332": 849621504.0, + "333": 849621504.0, + "334": 849621504.0, + "335": 849621504.0, + "336": 849621504.0, + "337": 849621504.0, + "338": 849621504.0, + "339": 849621504.0, + "340": 849621504.0, + "341": 849621504.0, + "342": 849621504.0, + "343": 849621504.0, + "344": 849621504.0, + "345": 849621504.0, + "346": 849621504.0, + "347": 849621504.0, + "348": 849621504.0, + "349": 849621504.0, + "350": 849621504.0, + "351": 849621504.0, + "352": 849621504.0, + "353": 849621504.0, + "354": 849621504.0, + "355": 849621504.0, + "356": 849621504.0, + "357": 849621504.0, + "358": 849621504.0, + "359": 849621504.0, + "360": 849621504.0, + "361": 849621504.0, + "362": 849621504.0, + "363": 849621504.0, + "364": 849621504.0, + "365": 849621504.0, + "366": 849621504.0, + "367": 849621504.0, + "368": 849621504.0, + "369": 849621504.0, + "370": 849621504.0, + "371": 849621504.0, + "372": 849621504.0, + "373": 849621504.0, + "374": 849621504.0, + "375": 849621504.0, + "376": 849621504.0, + "377": 849621504.0, + "378": 849621504.0, + "379": 849621504.0, + "380": 849621504.0, + "381": 849621504.0, + "382": 849621504.0, + "383": 849621504.0, + "384": 849621504.0, + "385": 849621504.0, + "386": 849621504.0, + "387": 849621504.0, + "388": 849621504.0, + "389": 849621504.0, + "390": 849621504.0, + "391": 849621504.0, + "392": 849621504.0, + "393": 849621504.0, + "394": 849621504.0, + "395": 849621504.0, + "396": 849621504.0, + "397": 849621504.0, + "398": 849621504.0, + "399": 849621504.0, + "400": 849621504.0, + "401": 849621504.0, + "402": 849621504.0, + "403": 849621504.0, + "404": 849621504.0, + "405": 849621504.0, + "406": 849621504.0, + "407": 849621504.0, + "408": 849621504.0, + "409": 849621504.0, + "410": 849621504.0, + "411": 849621504.0, + "412": 849621504.0, + "413": 849621504.0, + "414": 849621504.0, + "415": 849621504.0, + "416": 849621504.0, + "417": 849621504.0, + "418": 849621504.0, + "419": 849621504.0, + "420": 849621504.0, + "421": 849621504.0, + "422": 849621504.0, + "423": 849621504.0, + "424": 849621504.0, + "425": 849621504.0, + "426": 849621504.0, + "427": 849621504.0, + "428": 849621504.0, + "429": 849621504.0, + "430": 849621504.0, + "431": 849621504.0, + "432": 849621504.0, + "433": 849621504.0, + "434": 849621504.0, + "435": 849621504.0, + "436": 849621504.0, + "437": 849621504.0, + "438": 849621504.0, + "439": 849621504.0, + "440": 849621504.0, + "441": 849621504.0, + "442": 849621504.0, + "443": 849621504.0, + "444": 849621504.0, + "445": 849621504.0, + "446": 849621504.0, + "447": 849621504.0, + "448": 849621504.0, + "449": 849621504.0, + "450": 849621504.0, + "451": 849621504.0, + "452": 849621504.0, + "453": 849621504.0, + "454": 849621504.0, + "455": 849621504.0, + "456": 849621504.0, + "457": 849621504.0, + "458": 849621504.0, + "459": 849621504.0, + "460": 849621504.0, + "461": 849621504.0, + "462": 849621504.0, + "463": 849621504.0, + "464": 849621504.0, + "465": 849621504.0, + "466": 849621504.0, + "467": 849621504.0, + "468": 849621504.0, + "469": 849621504.0, + "470": 849621504.0, + "471": 849621504.0, + "472": 849621504.0, + "473": 849621504.0, + "474": 849621504.0, + "475": 849621504.0, + "476": 849621504.0, + "477": 849621504.0, + "478": 849621504.0, + "479": 849621504.0, + "480": 849621504.0, + "481": 849621504.0, + "482": 849621504.0, + "483": 849621504.0, + "484": 849621504.0, + "485": 849621504.0, + "486": 849621504.0, + "487": 849621504.0, + "488": 849621504.0, + "489": 849621504.0, + "490": 849621504.0, + "491": 849621504.0, + "492": 849621504.0, + "493": 849621504.0, + "494": 849621504.0, + "495": 849621504.0, + "496": 849621504.0, + "497": 849621504.0, + "498": 849621504.0, + "499": 849621504.0, + "500": 849621504.0, + "501": 849621504.0, + "502": 849621504.0, + "503": 849621504.0, + "504": 849621504.0, + "505": 849621504.0, + "506": 849621504.0, + "507": 849621504.0, + "508": 849621504.0, + "509": 849621504.0, + "510": 849621504.0, + "511": 849621504.0, + "512": 849621504.0, + "513": 849621504.0, + "514": 849621504.0, + "515": 849621504.0, + "516": 849621504.0, + "517": 849621504.0, + "518": 849621504.0, + "519": 849621504.0, + "520": 849621504.0, + "521": 849621504.0, + "522": 849621504.0, + "523": 849621504.0, + "524": 849621504.0, + "525": 849621504.0, + "526": 849621504.0, + "527": 849621504.0, + "528": 849621504.0, + "529": 849621504.0, + "530": 849621504.0, + "531": 849621504.0, + "532": 849621504.0, + "533": 849621504.0, + "534": 849621504.0, + "535": 849621504.0, + "536": 849621504.0, + "537": 849621504.0, + "538": 849621504.0, + "539": 849621504.0, + "540": 849621504.0, + "541": 849621504.0, + "542": 849621504.0, + "543": 849621504.0, + "544": 849621504.0, + "545": 849621504.0, + "546": 849621504.0, + "547": 849621504.0, + "548": 849621504.0, + "549": 849621504.0, + "550": 849621504.0, + "551": 849621504.0, + "552": 849621504.0, + "553": 849621504.0, + "554": 849621504.0, + "555": 849621504.0, + "556": 849621504.0, + "557": 849621504.0, + "558": 849621504.0, + "559": 849621504.0, + "560": 849621504.0, + "561": 849621504.0, + "562": 849621504.0, + "563": 849621504.0, + "564": 849621504.0, + "565": 849621504.0, + "566": 849621504.0, + "567": 849621504.0, + "568": 849621504.0, + "569": 849621504.0, + "570": 849621504.0, + "571": 849621504.0, + "572": 849621504.0, + "573": 849621504.0, + "574": 849621504.0, + "575": 849621504.0, + "576": 849621504.0, + "577": 849621504.0, + "578": 849621504.0, + "579": 849621504.0, + "580": 849621504.0, + "581": 849621504.0, + "582": 849621504.0, + "583": 849621504.0, + "584": 849621504.0, + "585": 849621504.0, + "586": 849621504.0, + "587": 849621504.0, + "588": 849621504.0, + "589": 849621504.0, + "590": 849621504.0, + "591": 849621504.0, + "592": 849621504.0, + "593": 849621504.0, + "594": 849621504.0, + "595": 849621504.0, + "596": 849621504.0, + "597": 849621504.0, + "598": 849621504.0, + "599": 849621504.0, + "600": 849621504.0, + "601": 849621504.0, + "602": 849621504.0, + "603": 849621504.0, + "604": 849621504.0, + "605": 849621504.0, + "606": 849621504.0, + "607": 849621504.0, + "608": 849621504.0, + "609": 849621504.0, + "610": 849621504.0, + "611": 849621504.0, + "612": 849621504.0, + "613": 849621504.0, + "614": 849621504.0, + "615": 849621504.0, + "616": 849621504.0, + "617": 849621504.0, + "618": 849621504.0, + "619": 849621504.0, + "620": 849621504.0, + "621": 849621504.0, + "622": 849621504.0, + "623": 849621504.0, + "624": 849621504.0, + "625": 849621504.0, + "626": 849621504.0, + "627": 849621504.0, + "628": 849621504.0, + "629": 849621504.0, + "630": 849621504.0, + "631": 849621504.0, + "632": 849621504.0, + "633": 849621504.0, + "634": 849621504.0, + "635": 849621504.0, + "636": 849621504.0, + "637": 849621504.0, + "638": 849621504.0, + "639": 849621504.0, + "640": 849621504.0, + "641": 849621504.0, + "642": 849621504.0, + "643": 849621504.0, + "644": 849621504.0, + "645": 849621504.0, + "646": 849621504.0, + "647": 849621504.0, + "648": 849621504.0, + "649": 849621504.0, + "650": 849621504.0, + "651": 849621504.0, + "652": 849621504.0, + "653": 849621504.0, + "654": 849621504.0, + "655": 849621504.0, + "656": 849621504.0, + "657": 849621504.0, + "658": 849621504.0, + "659": 849621504.0, + "660": 849621504.0, + "661": 849621504.0, + "662": 849621504.0, + "663": 849621504.0, + "664": 849621504.0, + "665": 849621504.0, + "666": 849621504.0, + "667": 849621504.0, + "668": 849621504.0, + "669": 849621504.0, + "670": 849621504.0, + "671": 849621504.0, + "672": 849621504.0, + "673": 849621504.0, + "674": 849621504.0, + "675": 849621504.0, + "676": 849621504.0, + "677": 849621504.0, + "678": 849621504.0, + "679": 849621504.0, + "680": 849621504.0, + "681": 849621504.0, + "682": 849621504.0, + "683": 849621504.0, + "684": 849621504.0, + "685": 849621504.0, + "686": 849621504.0, + "687": 849621504.0, + "688": 849621504.0, + "689": 849621504.0, + "690": 849621504.0, + "691": 849621504.0, + "692": 849621504.0, + "693": 849621504.0, + "694": 849621504.0, + "695": 849621504.0, + "696": 849621504.0, + "697": 849621504.0, + "698": 849621504.0, + "699": 849621504.0, + "700": 849621504.0, + "701": 849621504.0, + "702": 849621504.0, + "703": 849621504.0, + "704": 849621504.0, + "705": 849621504.0, + "706": 849621504.0, + "707": 849621504.0, + "708": 849621504.0, + "709": 849621504.0, + "710": 849621504.0, + "711": 849621504.0, + "712": 849621504.0, + "713": 849621504.0, + "714": 849621504.0, + "715": 849621504.0, + "716": 849621504.0, + "717": 849621504.0, + "718": 849621504.0, + "719": 849621504.0, + "720": 849621504.0, + "721": 849621504.0, + "722": 849621504.0, + "723": 849621504.0, + "724": 849621504.0, + "725": 849621504.0, + "726": 849621504.0, + "727": 849621504.0, + "728": 849621504.0, + "729": 849621504.0, + "730": 849621504.0, + "731": 849621504.0, + "732": 849621504.0, + "733": 849621504.0, + "734": 849621504.0, + "735": 849621504.0, + "736": 849621504.0, + "737": 849621504.0, + "738": 849621504.0, + "739": 849621504.0, + "740": 849621504.0, + "741": 849621504.0, + "742": 849621504.0, + "743": 849621504.0, + "744": 849621504.0, + "745": 849621504.0, + "746": 849621504.0, + "747": 849621504.0, + "748": 849621504.0, + "749": 849621504.0, + "750": 849621504.0, + "751": 849621504.0, + "752": 849621504.0, + "753": 849621504.0, + "754": 849621504.0, + "755": 849621504.0, + "756": 849621504.0, + "757": 849621504.0, + "758": 849621504.0, + "759": 849621504.0, + "760": 849621504.0, + "761": 849621504.0, + "762": 849621504.0, + "763": 849621504.0, + "764": 849621504.0, + "765": 849621504.0, + "766": 849621504.0, + "767": 849621504.0, + "768": 849621504.0, + "769": 849621504.0, + "770": 849621504.0, + "771": 849621504.0, + "772": 849621504.0, + "773": 849621504.0, + "774": 849621504.0, + "775": 849621504.0, + "776": 849621504.0, + "777": 849621504.0, + "778": 849621504.0, + "779": 849621504.0, + "780": 849621504.0, + "781": 849621504.0, + "782": 849621504.0, + "783": 849621504.0, + "784": 849621504.0, + "785": 849621504.0, + "786": 849621504.0, + "787": 849621504.0, + "788": 849621504.0, + "789": 849621504.0, + "790": 849621504.0, + "791": 849621504.0, + "792": 849621504.0, + "793": 849621504.0, + "794": 849621504.0, + "795": 849621504.0, + "796": 849621504.0, + "797": 849621504.0, + "798": 849621504.0, + "799": 849621504.0, + "800": 849621504.0, + "801": 849621504.0, + "802": 849621504.0, + "803": 849621504.0, + "804": 849621504.0, + "805": 849621504.0, + "806": 849621504.0, + "807": 849621504.0, + "808": 849621504.0, + "809": 849621504.0, + "810": 849621504.0, + "811": 849621504.0, + "812": 849621504.0, + "813": 849621504.0, + "814": 849621504.0, + "815": 849621504.0, + "816": 849621504.0, + "817": 849621504.0, + "818": 849621504.0, + "819": 849621504.0, + "820": 849621504.0, + "821": 849621504.0, + "822": 849621504.0, + "823": 849621504.0, + "824": 849621504.0, + "825": 849621504.0, + "826": 849621504.0, + "827": 849621504.0, + "828": 849621504.0, + "829": 849621504.0, + "830": 849621504.0, + "831": 849621504.0, + "832": 849621504.0, + "833": 849621504.0, + "834": 849621504.0, + "835": 849621504.0, + "836": 849621504.0, + "837": 849621504.0, + "838": 849621504.0, + "839": 849621504.0, + "840": 849621504.0, + "841": 849621504.0, + "842": 849621504.0, + "843": 849621504.0, + "844": 849621504.0, + "845": 849621504.0, + "846": 849621504.0, + "847": 849621504.0, + "848": 849621504.0, + "849": 849621504.0, + "850": 849621504.0, + "851": 849621504.0, + "852": 849621504.0, + "853": 849621504.0, + "854": 849621504.0, + "855": 849621504.0, + "856": 849621504.0, + "857": 849621504.0, + "858": 849621504.0, + "859": 849621504.0, + "860": 849621504.0, + "861": 849621504.0, + "862": 849621504.0, + "863": 849621504.0, + "864": 849621504.0, + "865": 849621504.0, + "866": 849621504.0, + "867": 849621504.0, + "868": 849621504.0, + "869": 849621504.0, + "870": 849621504.0, + "871": 849621504.0, + "872": 849621504.0, + "873": 849621504.0, + "874": 849621504.0, + "875": 849621504.0, + "876": 849621504.0, + "877": 849621504.0, + "878": 849621504.0, + "879": 849621504.0, + "880": 849621504.0, + "881": 849621504.0, + "882": 849621504.0, + "883": 849621504.0, + "884": 849621504.0, + "885": 849621504.0, + "886": 849621504.0, + "887": 849621504.0, + "888": 849621504.0, + "889": 849621504.0, + "890": 849621504.0, + "891": 849621504.0, + "892": 849621504.0, + "893": 849621504.0, + "894": 849621504.0, + "895": 849621504.0, + "896": 849621504.0, + "897": 849621504.0, + "898": 849621504.0, + "899": 849621504.0, + "900": 849621504.0, + "901": 849621504.0, + "902": 849621504.0, + "903": 849621504.0, + "904": 849621504.0, + "905": 849621504.0, + "906": 849621504.0, + "907": 849621504.0, + "908": 849621504.0, + "909": 849621504.0, + "910": 849621504.0, + "911": 849621504.0, + "912": 849621504.0, + "913": 849621504.0, + "914": 849621504.0, + "915": 849621504.0, + "916": 849621504.0, + "917": 849621504.0, + "918": 849621504.0, + "919": 849621504.0, + "920": 849621504.0, + "921": 849621504.0, + "922": 849621504.0, + "923": 849621504.0, + "924": 849621504.0, + "925": 849621504.0, + "926": 849621504.0, + "927": 849621504.0, + "928": 849621504.0, + "929": 849621504.0, + "930": 849621504.0, + "931": 849621504.0, + "932": 849621504.0, + "933": 849621504.0, + "934": 849621504.0, + "935": 849621504.0, + "936": 849621504.0, + "937": 849621504.0, + "938": 849621504.0, + "939": 849621504.0, + "940": 849621504.0, + "941": 849621504.0, + "942": 849621504.0, + "943": 849621504.0, + "944": 849621504.0, + "945": 849621504.0, + "946": 849621504.0, + "947": 849621504.0, + "948": 849621504.0, + "949": 849621504.0, + "950": 849621504.0, + "951": 849621504.0, + "952": 849621504.0, + "953": 849621504.0, + "954": 849621504.0, + "955": 849621504.0, + "956": 849621504.0, + "957": 849621504.0, + "958": 849621504.0, + "959": 849621504.0, + "960": 849621504.0, + "961": 849621504.0, + "962": 849621504.0, + "963": 849621504.0, + "964": 849621504.0, + "965": 849621504.0, + "966": 849621504.0, + "967": 849621504.0, + "968": 849621504.0, + "969": 849621504.0, + "970": 849621504.0, + "971": 849621504.0, + "972": 849621504.0, + "973": 849621504.0, + "974": 849621504.0, + "975": 849621504.0, + "976": 849621504.0, + "977": 849621504.0, + "978": 849621504.0, + "979": 849621504.0, + "980": 849621504.0, + "981": 849621504.0, + "982": 849621504.0, + "983": 849621504.0, + "984": 849621504.0, + "985": 849621504.0, + "986": 849621504.0, + "987": 849621504.0, + "988": 849621504.0, + "989": 849621504.0, + "990": 849621504.0, + "991": 849621504.0, + "992": 849621504.0, + "993": 849621504.0, + "994": 849621504.0, + "995": 849621504.0, + "996": 849621504.0, + "997": 849621504.0, + "998": 849621504.0, + "999": 849621504.0, + "1000": 849621504.0, + "1001": 849621504.0, + "1002": 849621504.0, + "1003": 849621504.0, + "1004": 849621504.0, + "1005": 849621504.0, + "1006": 849621504.0, + "1007": 849621504.0, + "1008": 849621504.0, + "1009": 849621504.0, + "1010": 849621504.0, + "1011": 849621504.0, + "1012": 849621504.0, + "1013": 849621504.0, + "1014": 849621504.0, + "1015": 849621504.0, + "1016": 849621504.0, + "1017": 849621504.0, + "1018": 849621504.0, + "1019": 849621504.0, + "1020": 849621504.0, + "1021": 849621504.0, + "1022": 849621504.0, + "1023": 849621504.0, + "1024": 849621504.0, + "1025": 849621504.0, + "1026": 849621504.0, + "1027": 849621504.0, + "1028": 849621504.0, + "1029": 849621504.0, + "1030": 849621504.0, + "1031": 849621504.0, + "1032": 849621504.0, + "1033": 849621504.0, + "1034": 849621504.0, + "1035": 849621504.0, + "1036": 849621504.0, + "1037": 849621504.0, + "1038": 849621504.0, + "1039": 849621504.0, + "1040": 849621504.0, + "1041": 849621504.0, + "1042": 849621504.0, + "1043": 849621504.0, + "1044": 849621504.0, + "1045": 849621504.0, + "1046": 849621504.0, + "1047": 849621504.0, + "1048": 849621504.0, + "1049": 849621504.0, + "1050": 849621504.0, + "1051": 849621504.0, + "1052": 849621504.0, + "1053": 849621504.0, + "1054": 849621504.0, + "1055": 849621504.0, + "1056": 849621504.0, + "1057": 849621504.0, + "1058": 849621504.0, + "1059": 849621504.0, + "1060": 849621504.0, + "1061": 849621504.0, + "1062": 849621504.0, + "1063": 849621504.0, + "1064": 849621504.0, + "1065": 849621504.0, + "1066": 849621504.0, + "1067": 849621504.0, + "1068": 849621504.0, + "1069": 849621504.0, + "1070": 849621504.0, + "1071": 849621504.0, + "1072": 849621504.0, + "1073": 849621504.0, + "1074": 849621504.0, + "1075": 849621504.0, + "1076": 849621504.0, + "1077": 849621504.0, + "1078": 849621504.0, + "1079": 849621504.0, + "1080": 849621504.0, + "1081": 849621504.0, + "1082": 849621504.0, + "1083": 849621504.0, + "1084": 849621504.0, + "1085": 849621504.0, + "1086": 849621504.0, + "1087": 849621504.0, + "1088": 849621504.0, + "1089": 849621504.0, + "1090": 849621504.0, + "1091": 849621504.0, + "1092": 849621504.0, + "1093": 849621504.0, + "1094": 849621504.0, + "1095": 849621504.0, + "1096": 849621504.0, + "1097": 849621504.0, + "1098": 849621504.0, + "1099": 849621504.0, + "1100": 849621504.0, + "1101": 849621504.0, + "1102": 849621504.0, + "1103": 849621504.0, + "1104": 849621504.0, + "1105": 849621504.0, + "1106": 849621504.0, + "1107": 849621504.0, + "1108": 849621504.0, + "1109": 849621504.0, + "1110": 849621504.0, + "1111": 849621504.0, + "1112": 849621504.0, + "1113": 849621504.0, + "1114": 849621504.0, + "1115": 849621504.0, + "1116": 849621504.0, + "1117": 849621504.0, + "1118": 849621504.0, + "1119": 849621504.0, + "1120": 849621504.0, + "1121": 849621504.0, + "1122": 849621504.0, + "1123": 849621504.0, + "1124": 849621504.0, + "1125": 849621504.0, + "1126": 849621504.0, + "1127": 849621504.0, + "1128": 849621504.0, + "1129": 849621504.0, + "1130": 849621504.0, + "1131": 849621504.0, + "1132": 849621504.0, + "1133": 849621504.0, + "1134": 849621504.0, + "1135": 849621504.0, + "1136": 849621504.0, + "1137": 849621504.0, + "1138": 849621504.0, + "1139": 849621504.0, + "1140": 849621504.0, + "1141": 849621504.0, + "1142": 849621504.0, + "1143": 849621504.0, + "1144": 849621504.0, + "1145": 849621504.0, + "1146": 849621504.0, + "1147": 849621504.0, + "1148": 849621504.0, + "1149": 849621504.0, + "1150": 849621504.0, + "1151": 849621504.0, + "1152": 849621504.0, + "1153": 849621504.0, + "1154": 849621504.0, + "1155": 849621504.0, + "1156": 849621504.0, + "1157": 849621504.0, + "1158": 849621504.0, + "1159": 849621504.0, + "1160": 849621504.0, + "1161": 849621504.0, + "1162": 849621504.0, + "1163": 849621504.0, + "1164": 849621504.0, + "1165": 849621504.0, + "1166": 849621504.0, + "1167": 849621504.0, + "1168": 849621504.0, + "1169": 849621504.0, + "1170": 849621504.0, + "1171": 849621504.0, + "1172": 849621504.0, + "1173": 849621504.0, + "1174": 849621504.0, + "1175": 849621504.0, + "1176": 849621504.0, + "1177": 849621504.0, + "1178": 849621504.0, + "1179": 849621504.0, + "1180": 849621504.0, + "1181": 849621504.0, + "1182": 849621504.0, + "1183": 849621504.0, + "1184": 849621504.0, + "1185": 849621504.0, + "1186": 849621504.0, + "1187": 849621504.0, + "1188": 849621504.0, + "1189": 849621504.0, + "1190": 849621504.0, + "1191": 849621504.0, + "1192": 849621504.0, + "1193": 849621504.0, + "1194": 849621504.0, + "1195": 849621504.0, + "1196": 849621504.0, + "1197": 849621504.0, + "1198": 849621504.0, + "1199": 849621504.0, + "1200": 849621504.0, + "1201": 849621504.0, + "1202": 849621504.0, + "1203": 849621504.0, + "1204": 849621504.0, + "1205": 849621504.0, + "1206": 849621504.0, + "1207": 849621504.0, + "1208": 849621504.0, + "1209": 849621504.0, + "1210": 849621504.0, + "1211": 849621504.0, + "1212": 849621504.0, + "1213": 849621504.0, + "1214": 849621504.0, + "1215": 849621504.0, + "1216": 849621504.0, + "1217": 849621504.0, + "1218": 849621504.0, + "1219": 849621504.0, + "1220": 849621504.0, + "1221": 849621504.0, + "1222": 849621504.0, + "1223": 849621504.0, + "1224": 849621504.0, + "1225": 849621504.0, + "1226": 849621504.0, + "1227": 849621504.0, + "1228": 849621504.0, + "1229": 849621504.0, + "1230": 849621504.0, + "1231": 849621504.0, + "1232": 849621504.0, + "1233": 849621504.0, + "1234": 849621504.0, + "1235": 849621504.0, + "1236": 849621504.0, + "1237": 849621504.0, + "1238": 849621504.0, + "1239": 849621504.0, + "1240": 849621504.0, + "1241": 849621504.0, + "1242": 849621504.0, + "1243": 849621504.0, + "1244": 849621504.0, + "1245": 849621504.0, + "1246": 849621504.0, + "1247": 849621504.0, + "1248": 849621504.0, + "1249": 849621504.0, + "1250": 849621504.0, + "1251": 849621504.0, + "1252": 849621504.0, + "1253": 849621504.0, + "1254": 849621504.0, + "1255": 849621504.0, + "1256": 849621504.0, + "1257": 849621504.0, + "1258": 849621504.0, + "1259": 849621504.0, + "1260": 849621504.0, + "1261": 849621504.0, + "1262": 849621504.0, + "1263": 849621504.0, + "1264": 849621504.0, + "1265": 849621504.0, + "1266": 849621504.0, + "1267": 849621504.0, + "1268": 849621504.0, + "1269": 849621504.0, + "1270": 849621504.0, + "1271": 849621504.0, + "1272": 849621504.0, + "1273": 849621504.0, + "1274": 849621504.0, + "1275": 849621504.0, + "1276": 849621504.0, + "1277": 849621504.0, + "1278": 849621504.0, + "1279": 849621504.0, + "1280": 849621504.0, + "1281": 849621504.0, + "1282": 849621504.0, + "1283": 849621504.0, + "1284": 849621504.0, + "1285": 849621504.0, + "1286": 849621504.0, + "1287": 849621504.0, + "1288": 849621504.0, + "1289": 849621504.0, + "1290": 849621504.0, + "1291": 849621504.0, + "1292": 849621504.0, + "1293": 849621504.0, + "1294": 849621504.0, + "1295": 849621504.0, + "1296": 849621504.0, + "1297": 849621504.0, + "1298": 849621504.0, + "1299": 849621504.0, + "1300": 849621504.0, + "1301": 849621504.0, + "1302": 849621504.0, + "1303": 849621504.0, + "1304": 849621504.0, + "1305": 849621504.0, + "1306": 849621504.0, + "1307": 849621504.0, + "1308": 849621504.0, + "1309": 849621504.0, + "1310": 849621504.0, + "1311": 849621504.0, + "1312": 849621504.0, + "1313": 849621504.0, + "1314": 849621504.0, + "1315": 849621504.0, + "1316": 849621504.0, + "1317": 849621504.0, + "1318": 849621504.0, + "1319": 849621504.0, + "1320": 849621504.0, + "1321": 849621504.0, + "1322": 849621504.0, + "1323": 849621504.0, + "1324": 849621504.0, + "1325": 849621504.0, + "1326": 849621504.0, + "1327": 849621504.0, + "1328": 849621504.0, + "1329": 849621504.0, + "1330": 849621504.0, + "1331": 849621504.0, + "1332": 849621504.0, + "1333": 849621504.0, + "1334": 849621504.0, + "1335": 849621504.0, + "1336": 849621504.0, + "1337": 849621504.0, + "1338": 849621504.0, + "1339": 849621504.0, + "1340": 849621504.0, + "1341": 849621504.0, + "1342": 849621504.0, + "1343": 849621504.0, + "1344": 849621504.0, + "1345": 849621504.0, + "1346": 849621504.0, + "1347": 849621504.0, + "1348": 849621504.0, + "1349": 849621504.0, + "1350": 849621504.0, + "1351": 849621504.0, + "1352": 849621504.0, + "1353": 849621504.0, + "1354": 849621504.0, + "1355": 849621504.0, + "1356": 849621504.0, + "1357": 849621504.0, + "1358": 849621504.0, + "1359": 849621504.0, + "1360": 849621504.0, + "1361": 849621504.0, + "1362": 849621504.0, + "1363": 849621504.0, + "1364": 849621504.0, + "1365": 849621504.0, + "1366": 849621504.0, + "1367": 849621504.0, + "1368": 849621504.0, + "1369": 849621504.0, + "1370": 849621504.0, + "1371": 849621504.0, + "1372": 849621504.0, + "1373": 849621504.0, + "1374": 849621504.0, + "1375": 849621504.0, + "1376": 849621504.0, + "1377": 849621504.0, + "1378": 849621504.0, + "1379": 849621504.0, + "1380": 849621504.0, + "1381": 849621504.0, + "1382": 849621504.0, + "1383": 849621504.0, + "1384": 849621504.0, + "1385": 849621504.0, + "1386": 849621504.0, + "1387": 849621504.0, + "1388": 849621504.0, + "1389": 849621504.0, + "1390": 849621504.0, + "1391": 849621504.0, + "1392": 849621504.0, + "1393": 849621504.0, + "1394": 849621504.0, + "1395": 849621504.0, + "1396": 849621504.0, + "1397": 849621504.0, + "1398": 849621504.0, + "1399": 849621504.0, + "1400": 849621504.0, + "1401": 849621504.0, + "1402": 849621504.0, + "1403": 849621504.0, + "1404": 849621504.0, + "1405": 849621504.0, + "1406": 849621504.0, + "1407": 849621504.0, + "1408": 849621504.0, + "1409": 849621504.0, + "1410": 849621504.0, + "1411": 849621504.0, + "1412": 849621504.0, + "1413": 849621504.0, + "1414": 849621504.0, + "1415": 849621504.0, + "1416": 849621504.0, + "1417": 849621504.0, + "1418": 849621504.0, + "1419": 849621504.0, + "1420": 849621504.0, + "1421": 849621504.0, + "1422": 849621504.0, + "1423": 849621504.0, + "1424": 849621504.0, + "1425": 849621504.0, + "1426": 849621504.0, + "1427": 849621504.0, + "1428": 849621504.0, + "1429": 849621504.0, + "1430": 849621504.0, + "1431": 849621504.0, + "1432": 849621504.0, + "1433": 849621504.0, + "1434": 849621504.0, + "1435": 849621504.0, + "1436": 849621504.0, + "1437": 849621504.0, + "1438": 849621504.0, + "1439": 849621504.0, + "1440": 849621504.0, + "1441": 849621504.0, + "1442": 849621504.0, + "1443": 849621504.0, + "1444": 849621504.0, + "1445": 849621504.0, + "1446": 849621504.0, + "1447": 849621504.0, + "1448": 849621504.0, + "1449": 849621504.0, + "1450": 849621504.0, + "1451": 849621504.0, + "1452": 849621504.0, + "1453": 849621504.0, + "1454": 849621504.0, + "1455": 849621504.0, + "1456": 849621504.0, + "1457": 849621504.0, + "1458": 849621504.0, + "1459": 849621504.0, + "1460": 849621504.0, + "1461": 849621504.0, + "1462": 849621504.0, + "1463": 849621504.0, + "1464": 849621504.0, + "1465": 849621504.0, + "1466": 849621504.0, + "1467": 849621504.0, + "1468": 849621504.0, + "1469": 849621504.0, + "1470": 849621504.0, + "1471": 849621504.0, + "1472": 849621504.0, + "1473": 849621504.0, + "1474": 849621504.0, + "1475": 849621504.0, + "1476": 849621504.0, + "1477": 849621504.0, + "1478": 849621504.0, + "1479": 849621504.0, + "1480": 849621504.0, + "1481": 849621504.0, + "1482": 849621504.0, + "1483": 849621504.0, + "1484": 849621504.0, + "1485": 849621504.0, + "1486": 849621504.0, + "1487": 849621504.0, + "1488": 849621504.0, + "1489": 849621504.0, + "1490": 849621504.0, + "1491": 849621504.0, + "1492": 849621504.0, + "1493": 849621504.0, + "1494": 849621504.0, + "1495": 849621504.0, + "1496": 849621504.0, + "1497": 849621504.0, + "1498": 849621504.0, + "1499": 849621504.0, + "1500": 849621504.0, + "1501": 849621504.0, + "1502": 849621504.0, + "1503": 849621504.0, + "1504": 849621504.0, + "1505": 849621504.0, + "1506": 849621504.0, + "1507": 849621504.0, + "1508": 849621504.0, + "1509": 849621504.0, + "1510": 849621504.0, + "1511": 849621504.0, + "1512": 849621504.0, + "1513": 849621504.0, + "1514": 849621504.0, + "1515": 849621504.0, + "1516": 849621504.0, + "1517": 849621504.0, + "1518": 849621504.0, + "1519": 849621504.0, + "1520": 849621504.0, + "1521": 849621504.0, + "1522": 849621504.0, + "1523": 849621504.0, + "1524": 849621504.0, + "1525": 849621504.0, + "1526": 849621504.0, + "1527": 849621504.0, + "1528": 849621504.0, + "1529": 849621504.0, + "1530": 849621504.0, + "1531": 849621504.0, + "1532": 849621504.0, + "1533": 849621504.0, + "1534": 849621504.0, + "1535": 849621504.0, + "1536": 849621504.0, + "1537": 849621504.0, + "1538": 849621504.0, + "1539": 849621504.0, + "1540": 849621504.0, + "1541": 849621504.0, + "1542": 849621504.0, + "1543": 849621504.0, + "1544": 849621504.0, + "1545": 849621504.0, + "1546": 849621504.0, + "1547": 849621504.0, + "1548": 849621504.0, + "1549": 849621504.0, + "1550": 849621504.0, + "1551": 849621504.0, + "1552": 849621504.0, + "1553": 849621504.0, + "1554": 849621504.0, + "1555": 849621504.0, + "1556": 849621504.0, + "1557": 849621504.0, + "1558": 849621504.0, + "1559": 849621504.0, + "1560": 849621504.0, + "1561": 849621504.0, + "1562": 849621504.0, + "1563": 849621504.0, + "1564": 849621504.0, + "1565": 849621504.0, + "1566": 849621504.0, + "1567": 849621504.0, + "1568": 849621504.0, + "1569": 849621504.0, + "1570": 849621504.0, + "1571": 849621504.0, + "1572": 849621504.0, + "1573": 849621504.0, + "1574": 849621504.0, + "1575": 849621504.0, + "1576": 849621504.0, + "1577": 849621504.0, + "1578": 849621504.0, + "1579": 849621504.0, + "1580": 849621504.0, + "1581": 849621504.0, + "1582": 849621504.0, + "1583": 849621504.0, + "1584": 849621504.0, + "1585": 849621504.0, + "1586": 849621504.0, + "1587": 849621504.0, + "1588": 849621504.0, + "1589": 849621504.0, + "1590": 849621504.0, + "1591": 849621504.0, + "1592": 849621504.0, + "1593": 849621504.0, + "1594": 849621504.0, + "1595": 849621504.0, + "1596": 849621504.0, + "1597": 849621504.0, + "1598": 849621504.0, + "1599": 849621504.0, + "1600": 849621504.0, + "1601": 849621504.0, + "1602": 849621504.0, + "1603": 849621504.0, + "1604": 849621504.0, + "1605": 849621504.0, + "1606": 849621504.0, + "1607": 849621504.0, + "1608": 849621504.0, + "1609": 849621504.0, + "1610": 849621504.0, + "1611": 849621504.0, + "1612": 849621504.0, + "1613": 849621504.0, + "1614": 849621504.0, + "1615": 849621504.0, + "1616": 849621504.0, + "1617": 849621504.0, + "1618": 849621504.0, + "1619": 849621504.0, + "1620": 849621504.0, + "1621": 849621504.0, + "1622": 849621504.0, + "1623": 849621504.0, + "1624": 849621504.0, + "1625": 849621504.0, + "1626": 849621504.0, + "1627": 849621504.0, + "1628": 849621504.0, + "1629": 849621504.0, + "1630": 849621504.0, + "1631": 849621504.0, + "1632": 849621504.0, + "1633": 849621504.0, + "1634": 849621504.0, + "1635": 849621504.0, + "1636": 849621504.0, + "1637": 849621504.0, + "1638": 849621504.0, + "1639": 849621504.0, + "1640": 849621504.0, + "1641": 849621504.0, + "1642": 849621504.0, + "1643": 849621504.0, + "1644": 849621504.0, + "1645": 849621504.0, + "1646": 849621504.0, + "1647": 849621504.0, + "1648": 849621504.0, + "1649": 849621504.0, + "1650": 849621504.0, + "1651": 849621504.0, + "1652": 849621504.0, + "1653": 849621504.0, + "1654": 849621504.0, + "1655": 849621504.0, + "1656": 849621504.0, + "1657": 849621504.0, + "1658": 849621504.0, + "1659": 849621504.0, + "1660": 849621504.0, + "1661": 849621504.0, + "1662": 849621504.0, + "1663": 849621504.0, + "1664": 849621504.0, + "1665": 849621504.0, + "1666": 849621504.0, + "1667": 849621504.0, + "1668": 849621504.0, + "1669": 849621504.0, + "1670": 849621504.0, + "1671": 849621504.0, + "1672": 849621504.0, + "1673": 849621504.0, + "1674": 849621504.0, + "1675": 849621504.0, + "1676": 849621504.0, + "1677": 849621504.0, + "1678": 849621504.0, + "1679": 849621504.0, + "1680": 849621504.0, + "1681": 849621504.0, + "1682": 849621504.0, + "1683": 849621504.0, + "1684": 849621504.0, + "1685": 849621504.0, + "1686": 849621504.0, + "1687": 849621504.0, + "1688": 849621504.0, + "1689": 849621504.0, + "1690": 849621504.0, + "1691": 849621504.0, + "1692": 849621504.0, + "1693": 849621504.0, + "1694": 849621504.0, + "1695": 849621504.0, + "1696": 849621504.0, + "1697": 849621504.0, + "1698": 849621504.0, + "1699": 849621504.0, + "1700": 849621504.0, + "1701": 849621504.0, + "1702": 849621504.0, + "1703": 849621504.0, + "1704": 849621504.0, + "1705": 849621504.0, + "1706": 849621504.0, + "1707": 849621504.0, + "1708": 849621504.0, + "1709": 849621504.0, + "1710": 849621504.0, + "1711": 849621504.0, + "1712": 849621504.0, + "1713": 849621504.0, + "1714": 849621504.0, + "1715": 849621504.0, + "1716": 849621504.0, + "1717": 849621504.0, + "1718": 849621504.0, + "1719": 849621504.0, + "1720": 849621504.0, + "1721": 849621504.0, + "1722": 849621504.0, + "1723": 849621504.0, + "1724": 849621504.0, + "1725": 849621504.0, + "1726": 849621504.0, + "1727": 849621504.0, + "1728": 849621504.0, + "1729": 849621504.0, + "1730": 849621504.0, + "1731": 849621504.0, + "1732": 849621504.0, + "1733": 849621504.0, + "1734": 849621504.0, + "1735": 849621504.0, + "1736": 849621504.0, + "1737": 849621504.0, + "1738": 849621504.0, + "1739": 849621504.0, + "1740": 849621504.0, + "1741": 849621504.0, + "1742": 849621504.0, + "1743": 849621504.0, + "1744": 849621504.0, + "1745": 849621504.0, + "1746": 849621504.0, + "1747": 849621504.0, + "1748": 849621504.0, + "1749": 849621504.0, + "1750": 849621504.0, + "1751": 849621504.0, + "1752": 849621504.0, + "1753": 849621504.0, + "1754": 849621504.0, + "1755": 849621504.0, + "1756": 849621504.0, + "1757": 849621504.0, + "1758": 849621504.0, + "1759": 849621504.0, + "1760": 849621504.0, + "1761": 849621504.0, + "1762": 849621504.0, + "1763": 849621504.0, + "1764": 849621504.0, + "1765": 849621504.0, + "1766": 849621504.0, + "1767": 849621504.0, + "1768": 849621504.0, + "1769": 849621504.0, + "1770": 849621504.0, + "1771": 849621504.0, + "1772": 849621504.0, + "1773": 849621504.0, + "1774": 849621504.0, + "1775": 849621504.0, + "1776": 849621504.0, + "1777": 849621504.0, + "1778": 849621504.0, + "1779": 849621504.0, + "1780": 849621504.0, + "1781": 849621504.0, + "1782": 849621504.0, + "1783": 849621504.0, + "1784": 849621504.0, + "1785": 849621504.0, + "1786": 849621504.0, + "1787": 849621504.0, + "1788": 849621504.0, + "1789": 849621504.0, + "1790": 849621504.0, + "1791": 849621504.0, + "1792": 849621504.0, + "1793": 849621504.0, + "1794": 849621504.0, + "1795": 849621504.0, + "1796": 849621504.0, + "1797": 849621504.0, + "1798": 849621504.0, + "1799": 849621504.0, + "1800": 849621504.0, + "1801": 849621504.0, + "1802": 849621504.0, + "1803": 849621504.0, + "1804": 849621504.0, + "1805": 849621504.0, + "1806": 849621504.0, + "1807": 849621504.0, + "1808": 849621504.0, + "1809": 849621504.0, + "1810": 849621504.0, + "1811": 849621504.0, + "1812": 849621504.0, + "1813": 849621504.0, + "1814": 849621504.0, + "1815": 849621504.0, + "1816": 849621504.0, + "1817": 849621504.0, + "1818": 849621504.0, + "1819": 849621504.0, + "1820": 849621504.0, + "1821": 849621504.0, + "1822": 849621504.0, + "1823": 849621504.0, + "1824": 849621504.0, + "1825": 849621504.0, + "1826": 849621504.0, + "1827": 849621504.0, + "1828": 849621504.0, + "1829": 849621504.0, + "1830": 849621504.0, + "1831": 849621504.0, + "1832": 849621504.0, + "1833": 849621504.0, + "1834": 849621504.0, + "1835": 849621504.0, + "1836": 849621504.0, + "1837": 849621504.0, + "1838": 849621504.0, + "1839": 849621504.0, + "1840": 849621504.0, + "1841": 849621504.0, + "1842": 849621504.0, + "1843": 849621504.0, + "1844": 849621504.0, + "1845": 849621504.0, + "1846": 849621504.0, + "1847": 849621504.0, + "1848": 849621504.0, + "1849": 849621504.0, + "1850": 849621504.0, + "1851": 849621504.0, + "1852": 849621504.0, + "1853": 849621504.0, + "1854": 849621504.0, + "1855": 849621504.0, + "1856": 849621504.0, + "1857": 849621504.0, + "1858": 849621504.0, + "1859": 849621504.0, + "1860": 849621504.0, + "1861": 849621504.0, + "1862": 849621504.0, + "1863": 849621504.0, + "1864": 849621504.0, + "1865": 849621504.0, + "1866": 849621504.0, + "1867": 849621504.0, + "1868": 849621504.0, + "1869": 849621504.0, + "1870": 849621504.0, + "1871": 849621504.0, + "1872": 849621504.0, + "1873": 849621504.0, + "1874": 849621504.0, + "1875": 849621504.0, + "1876": 849621504.0, + "1877": 849621504.0, + "1878": 849621504.0, + "1879": 849621504.0, + "1880": 849621504.0, + "1881": 849621504.0, + "1882": 849621504.0, + "1883": 849621504.0, + "1884": 849621504.0, + "1885": 849621504.0, + "1886": 849621504.0, + "1887": 849621504.0, + "1888": 849621504.0, + "1889": 849621504.0, + "1890": 849621504.0, + "1891": 849621504.0, + "1892": 849621504.0, + "1893": 849621504.0, + "1894": 849621504.0, + "1895": 849621504.0, + "1896": 849621504.0, + "1897": 849621504.0, + "1898": 849621504.0, + "1899": 849621504.0, + "1900": 849621504.0, + "1901": 849621504.0, + "1902": 849621504.0, + "1903": 849621504.0, + "1904": 849621504.0, + "1905": 849621504.0, + "1906": 849621504.0, + "1907": 849621504.0, + "1908": 849621504.0, + "1909": 849621504.0, + "1910": 849621504.0, + "1911": 849621504.0, + "1912": 849621504.0, + "1913": 849621504.0, + "1914": 849621504.0, + "1915": 849621504.0, + "1916": 849621504.0, + "1917": 849621504.0, + "1918": 849621504.0, + "1919": 849621504.0, + "1920": 849621504.0, + "1921": 849621504.0, + "1922": 849621504.0, + "1923": 849621504.0, + "1924": 849621504.0, + "1925": 849621504.0, + "1926": 849621504.0, + "1927": 849621504.0, + "1928": 849621504.0, + "1929": 849621504.0, + "1930": 849621504.0, + "1931": 849621504.0, + "1932": 849621504.0, + "1933": 849621504.0, + "1934": 849621504.0, + "1935": 849621504.0, + "1936": 849621504.0, + "1937": 849621504.0, + "1938": 849621504.0, + "1939": 849621504.0, + "1940": 849621504.0, + "1941": 849621504.0, + "1942": 849621504.0, + "1943": 849621504.0, + "1944": 849621504.0, + "1945": 849621504.0, + "1946": 849621504.0, + "1947": 849621504.0, + "1948": 849621504.0, + "1949": 849621504.0, + "1950": 849621504.0, + "1951": 849621504.0, + "1952": 849621504.0, + "1953": 849621504.0, + "1954": 849621504.0, + "1955": 849621504.0, + "1956": 849621504.0, + "1957": 849621504.0, + "1958": 849621504.0, + "1959": 849621504.0, + "1960": 849621504.0, + "1961": 849621504.0, + "1962": 849621504.0, + "1963": 849621504.0, + "1964": 849621504.0, + "1965": 849621504.0, + "1966": 849621504.0, + "1967": 849621504.0, + "1968": 849621504.0, + "1969": 849621504.0, + "1970": 849621504.0, + "1971": 849621504.0, + "1972": 849621504.0, + "1973": 849621504.0, + "1974": 849621504.0, + "1975": 849621504.0, + "1976": 849621504.0, + "1977": 849621504.0, + "1978": 849621504.0, + "1979": 849621504.0, + "1980": 849621504.0, + "1981": 849621504.0, + "1982": 849621504.0, + "1983": 849621504.0, + "1984": 849621504.0, + "1985": 849621504.0, + "1986": 849621504.0, + "1987": 849621504.0, + "1988": 849621504.0, + "1989": 849621504.0, + "1990": 849621504.0, + "1991": 849621504.0, + "1992": 849621504.0, + "1993": 849621504.0, + "1994": 849621504.0, + "1995": 849621504.0, + "1996": 849621504.0, + "1997": 849621504.0, + "1998": 849621504.0, + "1999": 849621504.0, + "2000": 849621504.0 + } + }, + "iteration-time": { + "start_step": 1, + "end_step": 2000, + "step_interval": 1, + "values": { + "1": 14.94115, + "2": 1.30868, + "3": 1.13391, + "4": 1.12792, + "5": 1.13103, + "6": 1.1383, + "7": 1.13573, + "8": 1.15789, + "9": 1.12704, + "10": 1.1241, + "11": 1.12786, + "12": 1.1288, + "13": 1.1399, + "14": 1.13165, + "15": 1.12333, + "16": 1.12398, + "17": 1.12493, + "18": 1.11586, + "19": 1.1123, + "20": 1.11192, + "21": 1.1266, + "22": 1.13629, + "23": 1.13171, + "24": 1.14969, + "25": 1.17022, + "26": 1.14634, + "27": 1.14242, + "28": 1.14353, + "29": 1.14554, + "30": 1.28826, + "31": 1.14265, + "32": 1.14023, + "33": 1.15286, + "34": 1.14975, + "35": 1.13988, + "36": 1.62757, + "37": 2.22703, + "38": 1.36074, + "39": 1.1325, + "40": 1.14106, + "41": 1.14114, + "42": 1.13305, + "43": 1.12375, + "44": 1.12631, + "45": 1.12358, + "46": 1.12334, + "47": 1.12398, + "48": 1.12749, + "49": 1.13897, + "50": 1.13563, + "51": 1.13628, + "52": 1.12935, + "53": 1.12779, + "54": 1.13147, + "55": 1.1279, + "56": 1.12777, + "57": 1.1269, + "58": 1.13989, + "59": 1.13378, + "60": 1.13552, + "61": 1.12879, + "62": 1.4796, + "63": 1.12843, + "64": 1.12488, + "65": 1.12888, + "66": 1.14028, + "67": 1.13532, + "68": 1.13278, + "69": 1.12779, + "70": 1.12468, + "71": 1.12483, + "72": 1.12423, + "73": 1.12335, + "74": 1.12699, + "75": 1.13379, + "76": 1.13001, + "77": 1.12994, + "78": 1.13166, + "79": 1.12415, + "80": 1.126, + "81": 1.16016, + "82": 1.13845, + "83": 1.13882, + "84": 1.14455, + "85": 1.46908, + "86": 1.1259, + "87": 1.12119, + "88": 1.12312, + "89": 1.12593, + "90": 1.51995, + "91": 1.16022, + "92": 1.1304, + "93": 1.13161, + "94": 1.13511, + "95": 1.13911, + "96": 1.80205, + "97": 1.13368, + "98": 1.13335, + "99": 1.13549, + "100": 1.13409, + "101": 1.13703, + "102": 1.14592, + "103": 1.13516, + "104": 1.13661, + "105": 1.13299, + "106": 1.13577, + "107": 1.13657, + "108": 1.13144, + "109": 1.14828, + "110": 1.15036, + "111": 1.1486, + "112": 1.14183, + "113": 1.14297, + "114": 1.1411, + "115": 1.14318, + "116": 1.14291, + "117": 1.14168, + "118": 1.15055, + "119": 1.1482, + "120": 1.15352, + "121": 1.13046, + "122": 1.145, + "123": 1.14278, + "124": 1.1428, + "125": 1.14189, + "126": 1.13609, + "127": 1.14025, + "128": 1.14097, + "129": 1.13489, + "130": 1.13417, + "131": 1.13581, + "132": 1.13708, + "133": 1.17896, + "134": 1.13176, + "135": 1.12984, + "136": 1.1435, + "137": 1.15088, + "138": 1.14391, + "139": 1.14409, + "140": 1.14238, + "141": 1.14313, + "142": 1.1493, + "143": 1.13518, + "144": 1.13229, + "145": 1.13749, + "146": 1.15049, + "147": 1.16077, + "148": 1.14254, + "149": 1.14071, + "150": 1.14075, + "151": 1.13943, + "152": 1.15276, + "153": 1.15369, + "154": 1.14618, + "155": 1.14225, + "156": 1.14285, + "157": 1.14106, + "158": 1.14415, + "159": 1.14445, + "160": 1.14934, + "161": 1.14229, + "162": 1.14167, + "163": 1.14058, + "164": 1.14064, + "165": 1.14012, + "166": 1.15198, + "167": 1.15221, + "168": 1.1471, + "169": 1.14122, + "170": 1.14769, + "171": 1.14073, + "172": 1.14205, + "173": 1.14583, + "174": 1.14217, + "175": 1.14015, + "176": 1.14319, + "177": 1.14097, + "178": 1.14115, + "179": 1.14122, + "180": 1.15137, + "181": 1.14856, + "182": 1.15203, + "183": 1.14535, + "184": 1.13997, + "185": 1.15174, + "186": 1.18192, + "187": 1.14929, + "188": 1.14842, + "189": 1.14724, + "190": 1.14922, + "191": 1.14932, + "192": 1.14856, + "193": 1.1562, + "194": 1.153, + "195": 1.16371, + "196": 1.14525, + "197": 1.1411, + "198": 1.14592, + "199": 1.14301, + "200": 1.15088, + "201": 1.14229, + "202": 1.14171, + "203": 1.14083, + "204": 1.13968, + "205": 1.13977, + "206": 1.14177, + "207": 1.15548, + "208": 1.15609, + "209": 1.14509, + "210": 1.1487, + "211": 1.14163, + "212": 1.13971, + "213": 1.15326, + "214": 1.14129, + "215": 1.14055, + "216": 1.13893, + "217": 1.14191, + "218": 1.1418, + "219": 1.14249, + "220": 1.14162, + "221": 1.14077, + "222": 1.15513, + "223": 1.15668, + "224": 1.14515, + "225": 1.14589, + "226": 1.14548, + "227": 1.14318, + "228": 1.14204, + "229": 1.14391, + "230": 1.14565, + "231": 1.1439, + "232": 1.14309, + "233": 1.14396, + "234": 1.14146, + "235": 1.14229, + "236": 1.14106, + "237": 1.14362, + "238": 1.15203, + "239": 1.1942, + "240": 1.18025, + "241": 1.15197, + "242": 1.15276, + "243": 1.15399, + "244": 1.15628, + "245": 1.14958, + "246": 1.14931, + "247": 1.14093, + "248": 1.13869, + "249": 1.1385, + "250": 1.13897, + "251": 1.13787, + "252": 1.13939, + "253": 1.17282, + "254": 1.13361, + "255": 1.13502, + "256": 1.13895, + "257": 1.16245, + "258": 1.1352, + "259": 1.15685, + "260": 1.14637, + "261": 1.2867, + "262": 1.13699, + "263": 1.13959, + "264": 1.15414, + "265": 1.14324, + "266": 1.14515, + "267": 1.14328, + "268": 1.14359, + "269": 1.144, + "270": 1.15446, + "271": 1.15182, + "272": 1.15575, + "273": 1.15561, + "274": 1.15762, + "275": 1.15307, + "276": 1.1516, + "277": 1.1569, + "278": 1.15789, + "279": 1.168, + "280": 1.16711, + "281": 1.16858, + "282": 1.16899, + "283": 1.15631, + "284": 1.15543, + "285": 1.15685, + "286": 1.15663, + "287": 1.15204, + "288": 1.15333, + "289": 1.15257, + "290": 1.14865, + "291": 1.15067, + "292": 1.15626, + "293": 1.15161, + "294": 1.15116, + "295": 1.15102, + "296": 1.15104, + "297": 1.17304, + "298": 1.17562, + "299": 1.17694, + "300": 1.15026, + "301": 1.15562, + "302": 1.15582, + "303": 1.15039, + "304": 1.14517, + "305": 1.14745, + "306": 1.15392, + "307": 1.15054, + "308": 1.14391, + "309": 1.1426, + "310": 1.1434, + "311": 1.14297, + "312": 1.14164, + "313": 1.15234, + "314": 1.14891, + "315": 1.14745, + "316": 1.15325, + "317": 1.15145, + "318": 1.51061, + "319": 1.13797, + "320": 1.13871, + "321": 1.20976, + "322": 1.19788, + "323": 1.14258, + "324": 1.14169, + "325": 1.14227, + "326": 1.1426, + "327": 1.14596, + "328": 1.14584, + "329": 1.14606, + "330": 1.13676, + "331": 1.14712, + "332": 1.14502, + "333": 1.14602, + "334": 1.14598, + "335": 1.15781, + "336": 1.15666, + "337": 1.1498, + "338": 1.15651, + "339": 1.15267, + "340": 1.14703, + "341": 1.14889, + "342": 1.14863, + "343": 1.14731, + "344": 1.1479, + "345": 1.20819, + "346": 1.15653, + "347": 1.15548, + "348": 1.15594, + "349": 1.15558, + "350": 1.15652, + "351": 1.15348, + "352": 1.15517, + "353": 1.15665, + "354": 1.15895, + "355": 1.15829, + "356": 1.16229, + "357": 1.17016, + "358": 1.16317, + "359": 1.18492, + "360": 1.20126, + "361": 1.19034, + "362": 1.18723, + "363": 1.16724, + "364": 1.14627, + "365": 1.14394, + "366": 1.14503, + "367": 1.14264, + "368": 1.14464, + "369": 1.14478, + "370": 1.14447, + "371": 1.15012, + "372": 1.14509, + "373": 1.14362, + "374": 1.14617, + "375": 1.14658, + "376": 1.13748, + "377": 1.15141, + "378": 1.14564, + "379": 1.14278, + "380": 1.14166, + "381": 1.14361, + "382": 1.14293, + "383": 1.14196, + "384": 1.14178, + "385": 1.14053, + "386": 1.14184, + "387": 1.14451, + "388": 1.14162, + "389": 1.1419, + "390": 1.14477, + "391": 1.15539, + "392": 1.16117, + "393": 1.16925, + "394": 1.16815, + "395": 1.1561, + "396": 1.15146, + "397": 1.15422, + "398": 1.14884, + "399": 1.14136, + "400": 1.14059, + "401": 1.14105, + "402": 1.14013, + "403": 1.15094, + "404": 1.13492, + "405": 1.1425, + "406": 1.14173, + "407": 1.14385, + "408": 1.14421, + "409": 1.14226, + "410": 1.1417, + "411": 1.1511, + "412": 1.15763, + "413": 1.15891, + "414": 1.15294, + "415": 1.15191, + "416": 1.15346, + "417": 1.15001, + "418": 1.15279, + "419": 1.14974, + "420": 1.14848, + "421": 1.14722, + "422": 1.15396, + "423": 1.1499, + "424": 1.15269, + "425": 1.15087, + "426": 1.14945, + "427": 1.15106, + "428": 1.15515, + "429": 1.14379, + "430": 1.16231, + "431": 1.18658, + "432": 1.17212, + "433": 1.16725, + "434": 1.17832, + "435": 1.16254, + "436": 1.16094, + "437": 1.15865, + "438": 1.16104, + "439": 1.1621, + "440": 1.13911, + "441": 1.13485, + "442": 1.13534, + "443": 1.13627, + "444": 1.13432, + "445": 1.13868, + "446": 1.13561, + "447": 1.13518, + "448": 1.1365, + "449": 1.13444, + "450": 1.13455, + "451": 1.14098, + "452": 1.15368, + "453": 1.1566, + "454": 1.15931, + "455": 1.18151, + "456": 1.16215, + "457": 1.16012, + "458": 1.15916, + "459": 1.15837, + "460": 1.16214, + "461": 1.1652, + "462": 1.16044, + "463": 1.16179, + "464": 1.163, + "465": 1.16332, + "466": 1.15968, + "467": 1.16196, + "468": 1.1592, + "469": 1.15988, + "470": 1.16081, + "471": 1.16128, + "472": 1.15868, + "473": 1.16004, + "474": 1.16125, + "475": 1.15956, + "476": 1.16733, + "477": 1.18857, + "478": 1.15838, + "479": 1.16068, + "480": 1.16004, + "481": 1.15956, + "482": 1.15757, + "483": 1.15802, + "484": 1.16061, + "485": 1.15848, + "486": 1.16058, + "487": 1.15819, + "488": 1.15991, + "489": 1.15831, + "490": 1.1589, + "491": 1.16144, + "492": 1.15934, + "493": 1.15973, + "494": 1.16104, + "495": 1.15933, + "496": 1.16173, + "497": 1.16203, + "498": 1.16059, + "499": 1.16461, + "500": 1.16533, + "501": 1.1723, + "502": 1.17075, + "503": 1.17256, + "504": 1.16176, + "505": 1.15972, + "506": 1.16185, + "507": 1.21311, + "508": 1.16326, + "509": 1.15384, + "510": 1.15071, + "511": 1.15307, + "512": 1.15748, + "513": 1.1518, + "514": 1.15181, + "515": 1.15338, + "516": 1.1524, + "517": 1.15481, + "518": 1.15358, + "519": 1.16302, + "520": 1.16218, + "521": 1.15461, + "522": 1.157, + "523": 1.15817, + "524": 1.15517, + "525": 1.15361, + "526": 1.15183, + "527": 1.15237, + "528": 1.15423, + "529": 1.15637, + "530": 1.15521, + "531": 1.15012, + "532": 1.15132, + "533": 1.1495, + "534": 1.14919, + "535": 1.1546, + "536": 1.15442, + "537": 1.1514, + "538": 1.15195, + "539": 1.15221, + "540": 1.15639, + "541": 1.1549, + "542": 1.15495, + "543": 1.15683, + "544": 1.16361, + "545": 1.16186, + "546": 1.15697, + "547": 1.15978, + "548": 1.16151, + "549": 1.15737, + "550": 1.15451, + "551": 1.16057, + "552": 1.20604, + "553": 1.15937, + "554": 1.21638, + "555": 1.16193, + "556": 1.16004, + "557": 1.15937, + "558": 1.15924, + "559": 1.15864, + "560": 1.16064, + "561": 1.15935, + "562": 1.43389, + "563": 1.16041, + "564": 1.16122, + "565": 1.49173, + "566": 1.15954, + "567": 1.17345, + "568": 1.16261, + "569": 1.15966, + "570": 1.1607, + "571": 1.15553, + "572": 1.1568, + "573": 1.15385, + "574": 1.15701, + "575": 1.15849, + "576": 1.15634, + "577": 1.15908, + "578": 1.15576, + "579": 1.15627, + "580": 1.14973, + "581": 1.16027, + "582": 1.16176, + "583": 1.15493, + "584": 1.15722, + "585": 1.15744, + "586": 1.15502, + "587": 1.1559, + "588": 1.15496, + "589": 1.16378, + "590": 1.16595, + "591": 1.16611, + "592": 1.16989, + "593": 1.16842, + "594": 1.17261, + "595": 1.15925, + "596": 1.16083, + "597": 1.16113, + "598": 1.16297, + "599": 1.16456, + "600": 1.15983, + "601": 1.16187, + "602": 1.15943, + "603": 1.15985, + "604": 1.1592, + "605": 1.15871, + "606": 1.16032, + "607": 1.15919, + "608": 1.17988, + "609": 1.16067, + "610": 1.18157, + "611": 1.15299, + "612": 1.15282, + "613": 1.15274, + "614": 1.15344, + "615": 1.15192, + "616": 1.15757, + "617": 1.15404, + "618": 1.16198, + "619": 1.12381, + "620": 1.11492, + "621": 1.14943, + "622": 1.16512, + "623": 1.16958, + "624": 1.16409, + "625": 1.15844, + "626": 1.14917, + "627": 1.15285, + "628": 1.15477, + "629": 1.15363, + "630": 1.15213, + "631": 1.14647, + "632": 1.14867, + "633": 1.15423, + "634": 1.15566, + "635": 1.15345, + "636": 1.15319, + "637": 1.1511, + "638": 1.15409, + "639": 1.15188, + "640": 1.15258, + "641": 1.15414, + "642": 1.15983, + "643": 1.15819, + "644": 1.15887, + "645": 1.15631, + "646": 1.15765, + "647": 1.16277, + "648": 1.16768, + "649": 1.17095, + "650": 1.16972, + "651": 1.16894, + "652": 1.16584, + "653": 1.1612, + "654": 1.17303, + "655": 1.16406, + "656": 1.1617, + "657": 1.16573, + "658": 1.16082, + "659": 1.16677, + "660": 1.16969, + "661": 1.16374, + "662": 1.16155, + "663": 1.16674, + "664": 1.16865, + "665": 1.16719, + "666": 1.16772, + "667": 1.16872, + "668": 1.16616, + "669": 1.16505, + "670": 1.16449, + "671": 1.16777, + "672": 1.16457, + "673": 1.16059, + "674": 1.16013, + "675": 1.1589, + "676": 1.1645, + "677": 1.16737, + "678": 1.16262, + "679": 1.44417, + "680": 1.16641, + "681": 1.16441, + "682": 1.16834, + "683": 1.17163, + "684": 1.16041, + "685": 1.16815, + "686": 1.16615, + "687": 1.1689, + "688": 1.16377, + "689": 1.16277, + "690": 1.15926, + "691": 1.15823, + "692": 1.15747, + "693": 1.15897, + "694": 1.15722, + "695": 1.15679, + "696": 1.15619, + "697": 1.15686, + "698": 1.15548, + "699": 1.15619, + "700": 1.15662, + "701": 1.15701, + "702": 1.15611, + "703": 1.1578, + "704": 1.15921, + "705": 1.15626, + "706": 1.15696, + "707": 1.15676, + "708": 1.15718, + "709": 1.15643, + "710": 1.16154, + "711": 1.15995, + "712": 1.159, + "713": 1.16786, + "714": 1.15799, + "715": 1.15749, + "716": 1.52131, + "717": 1.15676, + "718": 1.16066, + "719": 1.15878, + "720": 1.16243, + "721": 1.15801, + "722": 1.16032, + "723": 1.15929, + "724": 1.16338, + "725": 1.15949, + "726": 1.16444, + "727": 1.31697, + "728": 1.15571, + "729": 1.15513, + "730": 1.15845, + "731": 1.16172, + "732": 1.15814, + "733": 1.1597, + "734": 1.15388, + "735": 1.15282, + "736": 1.15589, + "737": 1.15547, + "738": 1.1547, + "739": 1.15614, + "740": 1.15546, + "741": 1.15558, + "742": 1.15607, + "743": 1.15425, + "744": 1.15442, + "745": 1.16502, + "746": 1.15566, + "747": 1.15865, + "748": 1.15828, + "749": 1.16418, + "750": 1.15709, + "751": 1.15988, + "752": 1.15915, + "753": 1.15069, + "754": 1.15176, + "755": 1.15161, + "756": 1.1502, + "757": 1.14643, + "758": 1.7155, + "759": 1.15471, + "760": 1.15638, + "761": 1.15684, + "762": 1.16005, + "763": 1.1585, + "764": 1.16197, + "765": 1.22988, + "766": 1.16563, + "767": 1.16594, + "768": 1.16751, + "769": 1.16167, + "770": 1.16736, + "771": 1.16232, + "772": 1.16021, + "773": 1.16138, + "774": 1.16446, + "775": 1.15216, + "776": 1.15086, + "777": 1.15506, + "778": 1.15465, + "779": 1.15872, + "780": 1.15533, + "781": 1.15836, + "782": 1.15778, + "783": 1.21735, + "784": 1.15535, + "785": 1.14905, + "786": 1.14868, + "787": 1.14899, + "788": 1.1521, + "789": 1.1498, + "790": 1.15389, + "791": 1.15198, + "792": 1.14834, + "793": 1.14935, + "794": 1.14986, + "795": 1.15066, + "796": 1.15229, + "797": 1.15036, + "798": 1.15026, + "799": 1.15231, + "800": 1.15717, + "801": 1.15355, + "802": 1.15502, + "803": 1.15201, + "804": 1.15023, + "805": 1.15209, + "806": 1.15072, + "807": 1.48449, + "808": 1.15218, + "809": 1.1522, + "810": 1.15111, + "811": 1.15134, + "812": 1.15187, + "813": 1.15379, + "814": 1.15585, + "815": 1.16392, + "816": 1.15452, + "817": 1.15487, + "818": 1.15245, + "819": 1.14836, + "820": 1.14547, + "821": 1.74382, + "822": 1.14655, + "823": 1.13629, + "824": 1.15244, + "825": 1.14064, + "826": 1.14002, + "827": 1.14234, + "828": 1.1401, + "829": 1.13945, + "830": 1.14243, + "831": 1.14339, + "832": 1.13963, + "833": 1.14165, + "834": 1.13931, + "835": 1.13828, + "836": 1.13924, + "837": 1.13918, + "838": 1.14038, + "839": 1.14023, + "840": 1.13827, + "841": 1.14334, + "842": 1.26736, + "843": 1.15235, + "844": 1.16327, + "845": 1.15615, + "846": 1.15656, + "847": 1.14563, + "848": 1.14836, + "849": 1.14901, + "850": 1.14852, + "851": 1.15019, + "852": 1.14893, + "853": 1.14907, + "854": 1.14895, + "855": 1.14997, + "856": 1.14951, + "857": 1.15014, + "858": 1.14881, + "859": 1.15072, + "860": 1.16126, + "861": 1.15807, + "862": 1.15716, + "863": 1.15555, + "864": 1.15038, + "865": 1.15177, + "866": 1.15177, + "867": 1.14884, + "868": 1.14782, + "869": 1.15086, + "870": 1.14982, + "871": 1.14833, + "872": 1.14875, + "873": 1.15147, + "874": 1.15225, + "875": 1.29099, + "876": 2.39847, + "877": 2.16612, + "878": 1.53276, + "879": 1.14604, + "880": 1.1515, + "881": 1.16208, + "882": 1.15925, + "883": 1.14916, + "884": 1.14927, + "885": 1.1758, + "886": 1.17545, + "887": 1.17369, + "888": 1.17655, + "889": 1.16376, + "890": 1.14874, + "891": 1.148, + "892": 1.14787, + "893": 1.15123, + "894": 1.15168, + "895": 1.15419, + "896": 1.15535, + "897": 1.15242, + "898": 1.15508, + "899": 1.15225, + "900": 1.15072, + "901": 1.1534, + "902": 1.15136, + "903": 1.15481, + "904": 1.15989, + "905": 1.16184, + "906": 1.14716, + "907": 1.15192, + "908": 1.15696, + "909": 1.15328, + "910": 1.14059, + "911": 1.1604, + "912": 1.14941, + "913": 1.14972, + "914": 1.14954, + "915": 1.15073, + "916": 1.14475, + "917": 1.15414, + "918": 1.1385, + "919": 1.14185, + "920": 1.14089, + "921": 1.13784, + "922": 1.13875, + "923": 1.13882, + "924": 1.14141, + "925": 1.13908, + "926": 1.13874, + "927": 1.13823, + "928": 1.13737, + "929": 1.13836, + "930": 1.13809, + "931": 1.14893, + "932": 1.13972, + "933": 1.1369, + "934": 1.1362, + "935": 1.13765, + "936": 1.14369, + "937": 1.1504, + "938": 1.14208, + "939": 1.14841, + "940": 1.14975, + "941": 1.14225, + "942": 1.14185, + "943": 1.13864, + "944": 1.13915, + "945": 1.14062, + "946": 1.15111, + "947": 1.14071, + "948": 1.13898, + "949": 1.1399, + "950": 1.15937, + "951": 1.16785, + "952": 1.16807, + "953": 1.1506, + "954": 1.15006, + "955": 1.15045, + "956": 1.17067, + "957": 1.14856, + "958": 1.14992, + "959": 1.15251, + "960": 1.15045, + "961": 1.15121, + "962": 1.14957, + "963": 1.15095, + "964": 1.15, + "965": 1.15089, + "966": 1.15156, + "967": 1.15423, + "968": 1.16332, + "969": 1.15359, + "970": 1.15613, + "971": 1.15232, + "972": 1.15652, + "973": 1.15399, + "974": 1.15065, + "975": 1.1485, + "976": 1.15243, + "977": 1.15368, + "978": 1.14828, + "979": 1.14969, + "980": 1.15374, + "981": 1.1505, + "982": 1.15031, + "983": 1.15033, + "984": 1.14921, + "985": 1.15504, + "986": 1.15572, + "987": 1.153, + "988": 1.15573, + "989": 1.14747, + "990": 1.14636, + "991": 1.14517, + "992": 1.1463, + "993": 1.14805, + "994": 1.14644, + "995": 1.14583, + "996": 1.14485, + "997": 1.14418, + "998": 1.14622, + "999": 1.14662, + "1000": 1.14312, + "1001": 1.15227, + "1002": 1.14681, + "1003": 1.14794, + "1004": 1.14889, + "1005": 1.15067, + "1006": 1.14757, + "1007": 1.14767, + "1008": 1.15061, + "1009": 1.15075, + "1010": 1.14894, + "1011": 1.14975, + "1012": 1.14667, + "1013": 1.14688, + "1014": 1.14788, + "1015": 1.167, + "1016": 1.44606, + "1017": 1.14923, + "1018": 1.15268, + "1019": 1.14981, + "1020": 1.15011, + "1021": 1.47391, + "1022": 1.15277, + "1023": 1.14774, + "1024": 1.146, + "1025": 1.15253, + "1026": 1.14633, + "1027": 1.14525, + "1028": 1.14728, + "1029": 1.14654, + "1030": 1.14663, + "1031": 1.14708, + "1032": 1.14715, + "1033": 1.1454, + "1034": 1.14763, + "1035": 1.14591, + "1036": 1.14493, + "1037": 1.14584, + "1038": 1.14665, + "1039": 1.14812, + "1040": 1.14495, + "1041": 1.15044, + "1042": 1.14701, + "1043": 1.14657, + "1044": 1.14631, + "1045": 1.14822, + "1046": 1.14789, + "1047": 1.14525, + "1048": 1.14815, + "1049": 1.14939, + "1050": 1.14592, + "1051": 1.14667, + "1052": 1.15232, + "1053": 1.14863, + "1054": 1.14908, + "1055": 1.14931, + "1056": 1.14644, + "1057": 1.149, + "1058": 1.14751, + "1059": 1.14668, + "1060": 1.14758, + "1061": 1.14789, + "1062": 1.43562, + "1063": 1.14875, + "1064": 1.14846, + "1065": 1.14888, + "1066": 1.15486, + "1067": 1.15212, + "1068": 1.14934, + "1069": 1.14526, + "1070": 1.14506, + "1071": 1.14599, + "1072": 1.14774, + "1073": 1.14651, + "1074": 1.14609, + "1075": 1.14817, + "1076": 1.14662, + "1077": 1.15159, + "1078": 1.14735, + "1079": 1.14525, + "1080": 1.1516, + "1081": 1.14601, + "1082": 1.13989, + "1083": 1.13569, + "1084": 1.1371, + "1085": 1.1366, + "1086": 1.13713, + "1087": 1.13756, + "1088": 1.13768, + "1089": 1.13917, + "1090": 1.13759, + "1091": 1.13884, + "1092": 1.13707, + "1093": 1.13679, + "1094": 1.13513, + "1095": 1.1351, + "1096": 1.13494, + "1097": 1.13589, + "1098": 1.14132, + "1099": 1.13697, + "1100": 1.14195, + "1101": 1.14189, + "1102": 1.13736, + "1103": 1.13781, + "1104": 1.14284, + "1105": 1.13518, + "1106": 1.13585, + "1107": 1.13621, + "1108": 1.13665, + "1109": 1.13792, + "1110": 1.13764, + "1111": 1.13778, + "1112": 1.13619, + "1113": 1.13651, + "1114": 1.13628, + "1115": 1.13802, + "1116": 1.13792, + "1117": 1.13642, + "1118": 1.13784, + "1119": 1.14898, + "1120": 1.15049, + "1121": 1.15028, + "1122": 1.14509, + "1123": 1.1445, + "1124": 1.14756, + "1125": 1.15117, + "1126": 1.14917, + "1127": 1.1475, + "1128": 1.1481, + "1129": 1.14683, + "1130": 1.14088, + "1131": 1.13493, + "1132": 1.13613, + "1133": 1.13537, + "1134": 1.13473, + "1135": 1.13657, + "1136": 1.13516, + "1137": 1.13606, + "1138": 1.13473, + "1139": 1.13442, + "1140": 1.13398, + "1141": 1.13591, + "1142": 1.13975, + "1143": 1.13478, + "1144": 1.13376, + "1145": 1.13428, + "1146": 1.1348, + "1147": 1.13462, + "1148": 1.1351, + "1149": 1.13494, + "1150": 1.13506, + "1151": 1.13487, + "1152": 1.14039, + "1153": 1.13991, + "1154": 1.13825, + "1155": 1.1373, + "1156": 1.13451, + "1157": 1.13683, + "1158": 1.13335, + "1159": 1.13548, + "1160": 1.1339, + "1161": 1.13613, + "1162": 1.13429, + "1163": 1.13448, + "1164": 1.13542, + "1165": 1.13453, + "1166": 1.13398, + "1167": 1.13549, + "1168": 1.1342, + "1169": 1.13502, + "1170": 1.13535, + "1171": 1.13581, + "1172": 1.13532, + "1173": 1.13552, + "1174": 1.13371, + "1175": 1.13456, + "1176": 1.13401, + "1177": 1.1335, + "1178": 1.13628, + "1179": 1.13907, + "1180": 1.13757, + "1181": 1.1538, + "1182": 1.15712, + "1183": 1.16123, + "1184": 1.15318, + "1185": 1.14801, + "1186": 1.14711, + "1187": 1.1471, + "1188": 1.15109, + "1189": 1.14707, + "1190": 1.14787, + "1191": 1.1451, + "1192": 1.14677, + "1193": 1.14621, + "1194": 1.14554, + "1195": 1.14738, + "1196": 1.14756, + "1197": 1.14799, + "1198": 1.1487, + "1199": 1.14616, + "1200": 1.14688, + "1201": 1.14531, + "1202": 1.14639, + "1203": 1.14696, + "1204": 1.1469, + "1205": 1.1472, + "1206": 1.14687, + "1207": 1.1494, + "1208": 1.14873, + "1209": 1.15175, + "1210": 1.14868, + "1211": 1.14793, + "1212": 1.14766, + "1213": 1.14823, + "1214": 1.15557, + "1215": 1.15986, + "1216": 1.14175, + "1217": 1.1392, + "1218": 1.13591, + "1219": 1.13796, + "1220": 1.14086, + "1221": 1.14081, + "1222": 1.13816, + "1223": 1.13977, + "1224": 1.14436, + "1225": 1.13986, + "1226": 1.13821, + "1227": 1.13854, + "1228": 1.13738, + "1229": 1.1384, + "1230": 1.13897, + "1231": 1.13732, + "1232": 1.13852, + "1233": 1.14144, + "1234": 1.13711, + "1235": 1.14105, + "1236": 1.13578, + "1237": 1.13838, + "1238": 1.13809, + "1239": 1.13782, + "1240": 1.13859, + "1241": 1.1381, + "1242": 1.13717, + "1243": 1.14814, + "1244": 1.16451, + "1245": 1.17765, + "1246": 1.17167, + "1247": 1.15708, + "1248": 1.15406, + "1249": 1.17391, + "1250": 1.14803, + "1251": 1.14601, + "1252": 1.14796, + "1253": 1.14706, + "1254": 1.14679, + "1255": 1.14306, + "1256": 1.14387, + "1257": 1.14608, + "1258": 1.14617, + "1259": 1.14999, + "1260": 1.1468, + "1261": 1.14332, + "1262": 1.15005, + "1263": 1.1449, + "1264": 1.14544, + "1265": 1.14292, + "1266": 1.14481, + "1267": 1.154, + "1268": 1.15455, + "1269": 1.15329, + "1270": 1.15008, + "1271": 1.15345, + "1272": 1.14616, + "1273": 1.15423, + "1274": 1.15349, + "1275": 1.14785, + "1276": 1.14536, + "1277": 1.14467, + "1278": 1.1456, + "1279": 1.14593, + "1280": 1.1462, + "1281": 1.14599, + "1282": 1.14837, + "1283": 1.14585, + "1284": 1.14656, + "1285": 1.14618, + "1286": 1.14615, + "1287": 1.14657, + "1288": 1.44686, + "1289": 1.14572, + "1290": 1.14398, + "1291": 1.1431, + "1292": 1.14524, + "1293": 1.14421, + "1294": 1.14593, + "1295": 1.16051, + "1296": 1.16214, + "1297": 1.15606, + "1298": 1.14439, + "1299": 1.14445, + "1300": 1.1445, + "1301": 1.1455, + "1302": 1.14117, + "1303": 1.14365, + "1304": 1.14474, + "1305": 1.14456, + "1306": 1.14522, + "1307": 1.144, + "1308": 1.14453, + "1309": 1.14471, + "1310": 1.1456, + "1311": 1.15495, + "1312": 1.15256, + "1313": 1.14805, + "1314": 1.14996, + "1315": 1.14425, + "1316": 1.14401, + "1317": 1.14262, + "1318": 1.14556, + "1319": 1.14661, + "1320": 1.14567, + "1321": 1.14648, + "1322": 1.14709, + "1323": 1.14522, + "1324": 1.14764, + "1325": 1.14331, + "1326": 1.14538, + "1327": 1.1453, + "1328": 1.14734, + "1329": 1.18619, + "1330": 1.48212, + "1331": 1.14651, + "1332": 1.15204, + "1333": 1.14629, + "1334": 1.14624, + "1335": 1.14927, + "1336": 1.14601, + "1337": 1.15642, + "1338": 1.14811, + "1339": 1.14508, + "1340": 1.15069, + "1341": 1.14629, + "1342": 1.14635, + "1343": 1.14657, + "1344": 1.14655, + "1345": 1.14564, + "1346": 1.14633, + "1347": 1.14523, + "1348": 1.14691, + "1349": 1.14575, + "1350": 1.14592, + "1351": 1.14631, + "1352": 1.14436, + "1353": 1.14573, + "1354": 1.14471, + "1355": 1.14554, + "1356": 1.14492, + "1357": 1.14301, + "1358": 1.141, + "1359": 1.14219, + "1360": 1.14228, + "1361": 1.14109, + "1362": 1.1413, + "1363": 1.14096, + "1364": 1.15355, + "1365": 1.14229, + "1366": 1.14615, + "1367": 1.14174, + "1368": 1.13953, + "1369": 1.14014, + "1370": 1.14132, + "1371": 1.14139, + "1372": 1.13849, + "1373": 1.14304, + "1374": 1.14028, + "1375": 1.13912, + "1376": 1.14082, + "1377": 1.1416, + "1378": 1.13936, + "1379": 1.13866, + "1380": 1.13826, + "1381": 1.14443, + "1382": 1.14029, + "1383": 1.13913, + "1384": 1.14177, + "1385": 1.14492, + "1386": 1.1415, + "1387": 1.1398, + "1388": 1.14017, + "1389": 1.14077, + "1390": 1.14782, + "1391": 1.15011, + "1392": 1.15174, + "1393": 1.14605, + "1394": 1.14761, + "1395": 1.14735, + "1396": 1.14827, + "1397": 1.14566, + "1398": 1.14659, + "1399": 1.14187, + "1400": 1.14737, + "1401": 1.14674, + "1402": 1.14468, + "1403": 1.14534, + "1404": 1.14726, + "1405": 1.14773, + "1406": 1.14711, + "1407": 1.14543, + "1408": 1.14568, + "1409": 1.14559, + "1410": 1.14443, + "1411": 1.14591, + "1412": 1.14444, + "1413": 1.14904, + "1414": 1.14806, + "1415": 1.14757, + "1416": 1.14307, + "1417": 1.14119, + "1418": 1.14392, + "1419": 1.14104, + "1420": 1.14278, + "1421": 1.13949, + "1422": 1.14028, + "1423": 1.14112, + "1424": 1.14151, + "1425": 1.14321, + "1426": 1.14894, + "1427": 1.14281, + "1428": 1.14881, + "1429": 1.14225, + "1430": 1.13905, + "1431": 1.14148, + "1432": 1.14895, + "1433": 1.15186, + "1434": 1.14773, + "1435": 1.14968, + "1436": 1.14689, + "1437": 1.1487, + "1438": 1.14731, + "1439": 1.14746, + "1440": 1.14835, + "1441": 1.15151, + "1442": 1.15182, + "1443": 1.15073, + "1444": 1.14751, + "1445": 1.15081, + "1446": 1.15106, + "1447": 1.14876, + "1448": 1.15178, + "1449": 1.15117, + "1450": 1.1479, + "1451": 1.14851, + "1452": 1.14502, + "1453": 1.1454, + "1454": 1.14722, + "1455": 1.14628, + "1456": 1.14413, + "1457": 1.14761, + "1458": 1.14681, + "1459": 1.14632, + "1460": 1.14804, + "1461": 1.14676, + "1462": 1.14566, + "1463": 1.14599, + "1464": 1.14679, + "1465": 1.14572, + "1466": 1.14995, + "1467": 1.14848, + "1468": 1.14679, + "1469": 1.15027, + "1470": 1.14636, + "1471": 1.14406, + "1472": 1.14039, + "1473": 1.13768, + "1474": 1.13897, + "1475": 1.14331, + "1476": 1.1403, + "1477": 1.14139, + "1478": 1.14985, + "1479": 1.14611, + "1480": 1.47655, + "1481": 1.45511, + "1482": 1.14381, + "1483": 1.13941, + "1484": 1.13782, + "1485": 1.13771, + "1486": 1.13796, + "1487": 1.13795, + "1488": 1.13829, + "1489": 1.13758, + "1490": 1.13822, + "1491": 1.13667, + "1492": 1.13847, + "1493": 1.13787, + "1494": 1.14072, + "1495": 1.14614, + "1496": 1.14436, + "1497": 1.14422, + "1498": 1.1393, + "1499": 1.13987, + "1500": 1.13991, + "1501": 1.14215, + "1502": 1.13842, + "1503": 1.13883, + "1504": 1.1496, + "1505": 1.14028, + "1506": 1.13931, + "1507": 1.13949, + "1508": 1.14063, + "1509": 1.13913, + "1510": 1.1402, + "1511": 1.13931, + "1512": 1.13839, + "1513": 1.13771, + "1514": 1.13848, + "1515": 1.13796, + "1516": 1.13782, + "1517": 1.13889, + "1518": 1.13716, + "1519": 1.13908, + "1520": 1.13972, + "1521": 1.13966, + "1522": 1.13875, + "1523": 1.15781, + "1524": 1.15885, + "1525": 1.15802, + "1526": 1.14191, + "1527": 1.14054, + "1528": 1.1385, + "1529": 1.13922, + "1530": 1.12994, + "1531": 1.12552, + "1532": 1.27166, + "1533": 1.12707, + "1534": 1.12638, + "1535": 1.12608, + "1536": 1.12654, + "1537": 1.12511, + "1538": 1.16008, + "1539": 1.13169, + "1540": 1.13294, + "1541": 1.13386, + "1542": 1.13461, + "1543": 1.13337, + "1544": 1.1331, + "1545": 1.13294, + "1546": 1.13283, + "1547": 1.13316, + "1548": 1.13651, + "1549": 1.13626, + "1550": 1.13638, + "1551": 1.13187, + "1552": 1.20522, + "1553": 1.15894, + "1554": 1.14738, + "1555": 1.14563, + "1556": 1.14409, + "1557": 1.15018, + "1558": 1.14323, + "1559": 1.14591, + "1560": 1.14645, + "1561": 1.14673, + "1562": 1.14543, + "1563": 1.14518, + "1564": 1.14589, + "1565": 1.14486, + "1566": 1.14436, + "1567": 1.14357, + "1568": 1.1454, + "1569": 1.14493, + "1570": 1.14347, + "1571": 1.14477, + "1572": 1.14203, + "1573": 1.14441, + "1574": 1.14468, + "1575": 1.14607, + "1576": 1.14532, + "1577": 1.14389, + "1578": 1.1433, + "1579": 1.14321, + "1580": 1.14391, + "1581": 1.1421, + "1582": 1.14368, + "1583": 1.1444, + "1584": 1.14356, + "1585": 1.14875, + "1586": 1.14497, + "1587": 1.14521, + "1588": 1.14708, + "1589": 1.14631, + "1590": 1.14662, + "1591": 1.14949, + "1592": 1.15354, + "1593": 1.14014, + "1594": 1.1408, + "1595": 1.14166, + "1596": 1.14151, + "1597": 1.14228, + "1598": 1.14126, + "1599": 1.14028, + "1600": 1.14528, + "1601": 1.14125, + "1602": 1.14085, + "1603": 1.13862, + "1604": 1.13487, + "1605": 1.13314, + "1606": 1.13467, + "1607": 1.13153, + "1608": 1.12971, + "1609": 1.13044, + "1610": 1.14013, + "1611": 1.13008, + "1612": 1.13161, + "1613": 1.13128, + "1614": 1.13059, + "1615": 1.13169, + "1616": 1.13043, + "1617": 1.13141, + "1618": 1.12976, + "1619": 1.13071, + "1620": 1.12907, + "1621": 1.13138, + "1622": 1.12994, + "1623": 1.12985, + "1624": 1.12999, + "1625": 1.13035, + "1626": 1.13761, + "1627": 1.13703, + "1628": 1.15487, + "1629": 1.13257, + "1630": 1.13549, + "1631": 1.13358, + "1632": 1.13488, + "1633": 1.13601, + "1634": 1.13282, + "1635": 1.13439, + "1636": 1.13078, + "1637": 1.13147, + "1638": 1.13065, + "1639": 1.13181, + "1640": 1.13227, + "1641": 1.13282, + "1642": 1.13305, + "1643": 1.19491, + "1644": 1.15821, + "1645": 1.15349, + "1646": 1.1437, + "1647": 1.1416, + "1648": 1.14282, + "1649": 1.1408, + "1650": 1.13388, + "1651": 1.13396, + "1652": 1.15414, + "1653": 1.13734, + "1654": 1.13143, + "1655": 1.13124, + "1656": 1.13417, + "1657": 1.13376, + "1658": 1.12932, + "1659": 1.13161, + "1660": 1.13178, + "1661": 1.1315, + "1662": 1.13209, + "1663": 1.13118, + "1664": 1.13332, + "1665": 1.12981, + "1666": 1.13001, + "1667": 1.12943, + "1668": 1.12938, + "1669": 1.12973, + "1670": 1.13031, + "1671": 1.14164, + "1672": 1.14108, + "1673": 1.14165, + "1674": 1.14189, + "1675": 1.14174, + "1676": 1.14802, + "1677": 1.14434, + "1678": 1.14543, + "1679": 1.14285, + "1680": 1.14529, + "1681": 1.14548, + "1682": 1.14333, + "1683": 1.14553, + "1684": 1.14327, + "1685": 1.1476, + "1686": 1.1406, + "1687": 1.13769, + "1688": 1.13364, + "1689": 1.13418, + "1690": 1.13026, + "1691": 1.13222, + "1692": 1.13195, + "1693": 1.13247, + "1694": 1.13264, + "1695": 1.13167, + "1696": 1.13234, + "1697": 1.13335, + "1698": 1.13463, + "1699": 1.1337, + "1700": 1.13362, + "1701": 1.13339, + "1702": 1.13335, + "1703": 1.13412, + "1704": 1.1332, + "1705": 1.13109, + "1706": 1.13306, + "1707": 1.42699, + "1708": 1.14258, + "1709": 1.13227, + "1710": 1.13333, + "1711": 1.13316, + "1712": 1.13147, + "1713": 1.1325, + "1714": 1.13279, + "1715": 1.13509, + "1716": 1.132, + "1717": 1.13183, + "1718": 1.13123, + "1719": 1.13209, + "1720": 1.13195, + "1721": 1.12891, + "1722": 1.12633, + "1723": 1.12872, + "1724": 1.1269, + "1725": 1.12641, + "1726": 1.12585, + "1727": 1.12446, + "1728": 1.12583, + "1729": 1.1336, + "1730": 1.1322, + "1731": 1.13153, + "1732": 1.132, + "1733": 1.13239, + "1734": 1.13216, + "1735": 1.13252, + "1736": 1.13132, + "1737": 1.13165, + "1738": 1.13359, + "1739": 1.126, + "1740": 1.124, + "1741": 1.12533, + "1742": 1.12379, + "1743": 1.12474, + "1744": 1.12432, + "1745": 1.13505, + "1746": 1.13795, + "1747": 1.13914, + "1748": 1.17805, + "1749": 1.13962, + "1750": 1.13602, + "1751": 1.13778, + "1752": 1.13639, + "1753": 1.14452, + "1754": 1.14424, + "1755": 1.14388, + "1756": 1.14572, + "1757": 1.17074, + "1758": 1.14596, + "1759": 1.14637, + "1760": 1.14576, + "1761": 1.1441, + "1762": 1.13385, + "1763": 1.13833, + "1764": 1.13995, + "1765": 1.14229, + "1766": 1.2706, + "1767": 1.15999, + "1768": 1.13873, + "1769": 1.1421, + "1770": 1.13078, + "1771": 1.13059, + "1772": 1.13076, + "1773": 1.13527, + "1774": 1.13153, + "1775": 1.1299, + "1776": 1.13144, + "1777": 1.13048, + "1778": 1.1312, + "1779": 1.13109, + "1780": 1.13227, + "1781": 1.1318, + "1782": 1.13195, + "1783": 1.13076, + "1784": 1.13371, + "1785": 1.13513, + "1786": 1.13544, + "1787": 1.13286, + "1788": 1.13114, + "1789": 1.12859, + "1790": 1.13136, + "1791": 1.13775, + "1792": 1.1401, + "1793": 1.13769, + "1794": 1.13564, + "1795": 1.13638, + "1796": 1.13621, + "1797": 1.13614, + "1798": 1.13707, + "1799": 1.13631, + "1800": 1.13547, + "1801": 1.13673, + "1802": 1.13706, + "1803": 1.13765, + "1804": 1.13506, + "1805": 1.13603, + "1806": 1.13717, + "1807": 1.13637, + "1808": 1.13841, + "1809": 1.13734, + "1810": 1.1379, + "1811": 1.13795, + "1812": 1.13826, + "1813": 1.13875, + "1814": 1.13885, + "1815": 1.13773, + "1816": 1.13726, + "1817": 1.14087, + "1818": 1.1378, + "1819": 1.13714, + "1820": 1.13737, + "1821": 1.13928, + "1822": 1.1371, + "1823": 1.13901, + "1824": 1.14485, + "1825": 1.12803, + "1826": 1.12264, + "1827": 1.12651, + "1828": 1.13421, + "1829": 1.13198, + "1830": 1.13242, + "1831": 1.13488, + "1832": 1.13287, + "1833": 1.13394, + "1834": 1.13403, + "1835": 1.13598, + "1836": 1.13357, + "1837": 1.13518, + "1838": 1.13404, + "1839": 1.13577, + "1840": 1.13254, + "1841": 1.13422, + "1842": 1.13496, + "1843": 1.135, + "1844": 1.13791, + "1845": 1.13082, + "1846": 1.13135, + "1847": 1.13026, + "1848": 1.13098, + "1849": 1.13032, + "1850": 1.13038, + "1851": 1.13107, + "1852": 1.13535, + "1853": 1.1311, + "1854": 1.13935, + "1855": 1.13148, + "1856": 1.13042, + "1857": 1.13238, + "1858": 1.13034, + "1859": 1.13083, + "1860": 1.13262, + "1861": 1.13117, + "1862": 1.13181, + "1863": 1.13237, + "1864": 1.13125, + "1865": 1.13519, + "1866": 1.14006, + "1867": 1.13476, + "1868": 1.13101, + "1869": 1.13227, + "1870": 1.13399, + "1871": 1.13455, + "1872": 1.13237, + "1873": 1.13088, + "1874": 1.13163, + "1875": 1.13336, + "1876": 1.13121, + "1877": 1.13209, + "1878": 1.13199, + "1879": 1.13177, + "1880": 1.13322, + "1881": 1.13141, + "1882": 1.13236, + "1883": 1.12859, + "1884": 1.12504, + "1885": 1.12493, + "1886": 1.12502, + "1887": 1.12484, + "1888": 1.1248, + "1889": 1.12719, + "1890": 1.13286, + "1891": 1.1293, + "1892": 1.13422, + "1893": 1.12646, + "1894": 1.12508, + "1895": 1.12422, + "1896": 1.12724, + "1897": 1.12903, + "1898": 1.13203, + "1899": 1.12741, + "1900": 1.12527, + "1901": 1.12359, + "1902": 1.12382, + "1903": 1.12536, + "1904": 1.12683, + "1905": 1.12606, + "1906": 1.12607, + "1907": 1.12626, + "1908": 1.44717, + "1909": 1.12543, + "1910": 1.12376, + "1911": 1.12429, + "1912": 1.12442, + "1913": 1.12355, + "1914": 1.12476, + "1915": 1.12331, + "1916": 1.12342, + "1917": 1.12442, + "1918": 1.12472, + "1919": 1.12536, + "1920": 1.12387, + "1921": 1.12347, + "1922": 1.12561, + "1923": 1.12391, + "1924": 1.12342, + "1925": 1.12607, + "1926": 1.12383, + "1927": 1.12305, + "1928": 1.125, + "1929": 1.12399, + "1930": 1.1237, + "1931": 1.12459, + "1932": 1.12475, + "1933": 1.12278, + "1934": 1.12413, + "1935": 1.12588, + "1936": 1.12473, + "1937": 1.12412, + "1938": 1.12444, + "1939": 1.12303, + "1940": 1.12421, + "1941": 1.12404, + "1942": 1.12568, + "1943": 1.12645, + "1944": 1.12388, + "1945": 1.44561, + "1946": 1.12748, + "1947": 1.44404, + "1948": 1.12309, + "1949": 1.12591, + "1950": 1.124, + "1951": 1.12953, + "1952": 1.12429, + "1953": 1.48105, + "1954": 1.12576, + "1955": 1.1274, + "1956": 1.12693, + "1957": 1.1261, + "1958": 1.1276, + "1959": 1.18913, + "1960": 1.12817, + "1961": 1.12615, + "1962": 1.12581, + "1963": 1.12682, + "1964": 1.12747, + "1965": 1.14301, + "1966": 1.14417, + "1967": 1.14427, + "1968": 1.14017, + "1969": 1.13872, + "1970": 1.13824, + "1971": 1.14731, + "1972": 1.13727, + "1973": 1.13816, + "1974": 1.13684, + "1975": 1.13985, + "1976": 1.13777, + "1977": 1.13833, + "1978": 1.14247, + "1979": 1.14554, + "1980": 1.14074, + "1981": 1.1396, + "1982": 1.13784, + "1983": 1.19896, + "1984": 1.13952, + "1985": 1.13865, + "1986": 1.13959, + "1987": 1.13909, + "1988": 1.13875, + "1989": 1.13947, + "1990": 1.13762, + "1991": 1.13799, + "1992": 1.13904, + "1993": 1.13674, + "1994": 1.13869, + "1995": 1.13884, + "1996": 1.13807, + "1997": 1.13986, + "1998": 1.14151, + "1999": 1.13582, + "2000": 1.16726 + } + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt3_weekly_dgx_h100_mcore_tp2_pp2_current_scaling_native_fp8_tp_pp_sp_tp_overlap/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt3_weekly_dgx_h100_mcore_tp2_pp2_current_scaling_native_fp8_tp_pp_sp_tp_overlap/model_config.yaml index 5668a7575e2..15ac9782df5 100644 --- a/tests/functional_tests/test_cases/gpt/gpt3_weekly_dgx_h100_mcore_tp2_pp2_current_scaling_native_fp8_tp_pp_sp_tp_overlap/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt3_weekly_dgx_h100_mcore_tp2_pp2_current_scaling_native_fp8_tp_pp_sp_tp_overlap/model_config.yaml @@ -42,7 +42,7 @@ MODEL_ARGS: --pipeline-model-parallel-size: 2 --sequence-parallel: true --tp-comm-overlap: true - --tp-comm-overlap-cfg: tests/functional_tests/test_cases/gpt/gpt3_345m_weekly_dgx_h100_1N8G_mcore_tp2_pp2_current_scaling_native_fp8_tp_pp_sp_tp_overlap/tp_comm_overlap_cfg.yaml + --tp-comm-overlap-cfg: tests/functional_tests/test_cases/gpt/gpt3_weekly_dgx_h100_mcore_tp2_pp2_current_scaling_native_fp8_tp_pp_sp_tp_overlap/tp_comm_overlap_cfg.yaml --deterministic-mode: true --no-gradient-accumulation-fusion: true --fp8-format: hybrid diff --git a/tests/functional_tests/test_cases/gpt/gpt3_weekly_dgx_h100_mcore_tp4_cp2_native_fp8_tp_sp_cp_tp_overlap/golden_values_dev_dgx_h100.json b/tests/functional_tests/test_cases/gpt/gpt3_weekly_dgx_h100_mcore_tp4_cp2_native_fp8_tp_sp_cp_tp_overlap/golden_values_dev_dgx_h100.json new file mode 100644 index 00000000000..b6e543e2cf8 --- /dev/null +++ b/tests/functional_tests/test_cases/gpt/gpt3_weekly_dgx_h100_mcore_tp4_cp2_native_fp8_tp_sp_cp_tp_overlap/golden_values_dev_dgx_h100.json @@ -0,0 +1,10037 @@ +{ + "lm loss": { + "start_step": 1, + "end_step": 2000, + "step_interval": 1, + "values": { + "1": 10.85229, + "2": 10.85951, + "3": 10.85469, + "4": 10.86843, + "5": 10.85304, + "6": 10.85362, + "7": 10.8602, + "8": 10.85298, + "9": 10.84874, + "10": 10.84674, + "11": 10.83863, + "12": 10.83549, + "13": 10.82524, + "14": 10.84078, + "15": 10.78613, + "16": 10.79372, + "17": 10.76553, + "18": 10.78902, + "19": 10.73057, + "20": 10.69489, + "21": 10.64595, + "22": 10.64791, + "23": 10.65524, + "24": 10.55349, + "25": 10.56424, + "26": 10.63262, + "27": 10.47084, + "28": 10.471, + "29": 10.36495, + "30": 10.27406, + "31": 10.43126, + "32": 10.35361, + "33": 10.22439, + "34": 10.17135, + "35": 10.23744, + "36": 10.15766, + "37": 10.10704, + "38": 10.03631, + "39": 10.04895, + "40": 10.06978, + "41": 9.95276, + "42": 9.95577, + "43": 9.87217, + "44": 9.99154, + "45": 10.00766, + "46": 9.84803, + "47": 10.00018, + "48": 9.81816, + "49": 9.94941, + "50": 9.94449, + "51": 9.5964, + "52": 9.79483, + "53": 9.63207, + "54": 9.8854, + "55": 9.74063, + "56": 9.85006, + "57": 9.86123, + "58": 9.87737, + "59": 9.54716, + "60": 9.64756, + "61": 9.87994, + "62": 9.76465, + "63": 9.68066, + "64": 9.82801, + "65": 9.59733, + "66": 9.62928, + "67": 9.74212, + "68": 9.60593, + "69": 9.29694, + "70": 9.42495, + "71": 9.79013, + "72": 9.71358, + "73": 9.61909, + "74": 9.45334, + "75": 9.24289, + "76": 9.50821, + "77": 9.57857, + "78": 9.56035, + "79": 9.31048, + "80": 9.36161, + "81": 9.46136, + "82": 9.55628, + "83": 9.53353, + "84": 9.35526, + "85": 9.40111, + "86": 9.65137, + "87": 9.23621, + "88": 9.48942, + "89": 9.22457, + "90": 9.41443, + "91": 9.39014, + "92": 9.3793, + "93": 9.36366, + "94": 9.51552, + "95": 9.42012, + "96": 9.33698, + "97": 9.20729, + "98": 9.49265, + "99": 9.29333, + "100": 9.35883, + "101": 9.24766, + "102": 9.24259, + "103": 9.07796, + "104": 9.16832, + "105": 9.37671, + "106": 9.15179, + "107": 9.17832, + "108": 9.31483, + "109": 9.28984, + "110": 9.36705, + "111": 9.17605, + "112": 9.23281, + "113": 9.35413, + "114": 9.35742, + "115": 9.32337, + "116": 9.00364, + "117": 9.06445, + "118": 9.06523, + "119": 9.22504, + "120": 9.08324, + "121": 9.19428, + "122": 9.14006, + "123": 9.25894, + "124": 9.45689, + "125": 9.21857, + "126": 9.0614, + "127": 9.01413, + "128": 9.22025, + "129": 8.98394, + "130": 9.14098, + "131": 9.15643, + "132": 9.03479, + "133": 8.86261, + "134": 9.18468, + "135": 8.88922, + "136": 9.1645, + "137": 9.15944, + "138": 9.23186, + "139": 9.08834, + "140": 8.87267, + "141": 9.29752, + "142": 9.19877, + "143": 9.12079, + "144": 9.24324, + "145": 9.10527, + "146": 8.98338, + "147": 8.9881, + "148": 9.1361, + "149": 9.06877, + "150": 9.01122, + "151": 8.93192, + "152": 8.87852, + "153": 9.06711, + "154": 9.1802, + "155": 9.13786, + "156": 9.05095, + "157": 9.15163, + "158": 9.05301, + "159": 9.03638, + "160": 8.89244, + "161": 9.04764, + "162": 8.89639, + "163": 8.84472, + "164": 8.97496, + "165": 8.93105, + "166": 8.65677, + "167": 8.83411, + "168": 8.8203, + "169": 8.65961, + "170": 9.04726, + "171": 8.72167, + "172": 8.82105, + "173": 8.91105, + "174": 8.85007, + "175": 8.70985, + "176": 8.7611, + "177": 8.76567, + "178": 8.72394, + "179": 8.64132, + "180": 8.74357, + "181": 8.6941, + "182": 8.72315, + "183": 9.08667, + "184": 8.60959, + "185": 8.88334, + "186": 8.74346, + "187": 8.57546, + "188": 8.6841, + "189": 8.86656, + "190": 8.53754, + "191": 8.66593, + "192": 8.61152, + "193": 8.5763, + "194": 8.75183, + "195": 8.5938, + "196": 8.7761, + "197": 8.744, + "198": 8.63042, + "199": 8.77202, + "200": 8.73627, + "201": 8.67068, + "202": 8.55099, + "203": 8.54134, + "204": 8.71213, + "205": 8.22486, + "206": 8.85986, + "207": 8.67928, + "208": 8.70826, + "209": 8.75243, + "210": 8.58226, + "211": 8.84167, + "212": 8.4913, + "213": 8.57316, + "214": 8.51316, + "215": 8.56549, + "216": 8.50617, + "217": 8.53369, + "218": 8.53635, + "219": 8.64298, + "220": 8.54526, + "221": 8.39761, + "222": 8.50474, + "223": 8.44078, + "224": 8.52901, + "225": 8.5708, + "226": 8.44247, + "227": 8.67823, + "228": 8.3859, + "229": 8.4537, + "230": 8.4985, + "231": 8.50257, + "232": 8.49898, + "233": 8.49438, + "234": 8.64018, + "235": 8.5617, + "236": 8.39791, + "237": 8.49075, + "238": 8.30637, + "239": 8.56099, + "240": 8.67125, + "241": 8.447, + "242": 8.47179, + "243": 8.51685, + "244": 8.36975, + "245": 8.59641, + "246": 8.59557, + "247": 8.43962, + "248": 8.50986, + "249": 8.52277, + "250": 8.42301, + "251": 8.3783, + "252": 8.54698, + "253": 8.3164, + "254": 8.35246, + "255": 8.29609, + "256": 8.20858, + "257": 8.39462, + "258": 8.45148, + "259": 8.23213, + "260": 8.24039, + "261": 8.23733, + "262": 8.34866, + "263": 8.30632, + "264": 8.1907, + "265": 8.33202, + "266": 8.2336, + "267": 7.9013, + "268": 8.37861, + "269": 8.40384, + "270": 8.26475, + "271": 8.27885, + "272": 8.31844, + "273": 8.13253, + "274": 8.09818, + "275": 8.00901, + "276": 7.92522, + "277": 8.23699, + "278": 8.04701, + "279": 7.96356, + "280": 7.75515, + "281": 8.10016, + "282": 8.14722, + "283": 8.15666, + "284": 8.10022, + "285": 8.06894, + "286": 7.90037, + "287": 7.99127, + "288": 8.24359, + "289": 8.17176, + "290": 8.12684, + "291": 8.25357, + "292": 8.0756, + "293": 8.11914, + "294": 7.97501, + "295": 7.96533, + "296": 8.23576, + "297": 7.79081, + "298": 8.04236, + "299": 7.93831, + "300": 7.8498, + "301": 8.00964, + "302": 7.94515, + "303": 7.99053, + "304": 7.95899, + "305": 7.9946, + "306": 7.9738, + "307": 7.98707, + "308": 7.9953, + "309": 8.0059, + "310": 7.97168, + "311": 7.92562, + "312": 7.88182, + "313": 7.82955, + "314": 7.82035, + "315": 7.82475, + "316": 7.74495, + "317": 7.92567, + "318": 7.97631, + "319": 7.82443, + "320": 7.563, + "321": 7.74534, + "322": 7.82917, + "323": 7.76703, + "324": 7.90668, + "325": 7.79387, + "326": 7.64901, + "327": 7.86137, + "328": 7.7832, + "329": 7.87669, + "330": 7.74815, + "331": 7.52005, + "332": 7.81037, + "333": 7.8379, + "334": 7.67759, + "335": 7.69435, + "336": 7.90998, + "337": 7.64618, + "338": 7.89178, + "339": 7.7192, + "340": 7.75318, + "341": 7.70375, + "342": 7.81451, + "343": 7.61028, + "344": 7.58433, + "345": 7.60474, + "346": 7.45825, + "347": 7.55021, + "348": 7.67669, + "349": 7.57925, + "350": 7.65118, + "351": 7.74172, + "352": 7.69877, + "353": 7.4955, + "354": 7.73645, + "355": 7.75823, + "356": 7.76871, + "357": 7.8083, + "358": 7.59223, + "359": 7.54129, + "360": 7.62161, + "361": 7.53913, + "362": 7.75707, + "363": 7.58184, + "364": 7.57393, + "365": 7.61381, + "366": 7.30007, + "367": 7.55433, + "368": 7.4381, + "369": 7.34072, + "370": 7.45786, + "371": 7.45479, + "372": 7.64528, + "373": 7.51803, + "374": 7.43579, + "375": 7.52279, + "376": 7.33856, + "377": 7.23275, + "378": 7.53208, + "379": 7.48549, + "380": 7.37893, + "381": 7.46259, + "382": 7.28593, + "383": 7.26774, + "384": 7.4035, + "385": 7.38617, + "386": 7.2246, + "387": 7.41197, + "388": 7.27354, + "389": 7.42884, + "390": 7.23295, + "391": 7.63854, + "392": 7.32743, + "393": 7.41119, + "394": 7.46811, + "395": 7.43164, + "396": 7.27624, + "397": 7.22237, + "398": 7.41314, + "399": 7.14965, + "400": 7.28882, + "401": 7.34645, + "402": 7.38389, + "403": 7.27445, + "404": 7.29549, + "405": 7.25441, + "406": 7.20955, + "407": 7.35305, + "408": 7.17476, + "409": 7.15738, + "410": 7.30843, + "411": 7.21046, + "412": 7.19143, + "413": 7.22421, + "414": 6.90584, + "415": 7.32329, + "416": 7.41955, + "417": 7.01436, + "418": 7.26656, + "419": 7.03251, + "420": 7.40294, + "421": 7.17304, + "422": 7.22884, + "423": 7.08611, + "424": 7.2354, + "425": 7.3087, + "426": 7.28003, + "427": 7.12262, + "428": 7.08425, + "429": 6.87125, + "430": 7.19779, + "431": 6.99763, + "432": 7.22298, + "433": 6.96906, + "434": 6.95232, + "435": 7.01097, + "436": 7.00141, + "437": 6.9848, + "438": 6.99447, + "439": 6.93128, + "440": 7.05472, + "441": 7.03406, + "442": 7.09324, + "443": 7.0854, + "444": 6.69941, + "445": 6.98741, + "446": 7.13474, + "447": 7.11726, + "448": 6.97509, + "449": 7.04203, + "450": 7.00855, + "451": 6.82317, + "452": 6.90281, + "453": 7.00796, + "454": 6.96028, + "455": 7.02393, + "456": 6.98781, + "457": 6.96156, + "458": 6.89735, + "459": 6.68323, + "460": 7.05439, + "461": 7.088, + "462": 6.86315, + "463": 7.04576, + "464": 6.64275, + "465": 7.02272, + "466": 6.99895, + "467": 6.99097, + "468": 6.94728, + "469": 6.82004, + "470": 7.0355, + "471": 6.87321, + "472": 6.95214, + "473": 6.81396, + "474": 6.96547, + "475": 7.1584, + "476": 6.75391, + "477": 6.88861, + "478": 6.89832, + "479": 6.69636, + "480": 7.01803, + "481": 6.98503, + "482": 6.72248, + "483": 6.77484, + "484": 6.74297, + "485": 6.92045, + "486": 7.05544, + "487": 6.62222, + "488": 6.87375, + "489": 6.76024, + "490": 6.81377, + "491": 6.69837, + "492": 6.68149, + "493": 6.75646, + "494": 6.66282, + "495": 6.62263, + "496": 6.57706, + "497": 6.8292, + "498": 6.63548, + "499": 6.84385, + "500": 6.64283, + "501": 6.71966, + "502": 6.82988, + "503": 6.69833, + "504": 6.60751, + "505": 6.6112, + "506": 6.73586, + "507": 6.85391, + "508": 6.84629, + "509": 6.6384, + "510": 6.81034, + "511": 6.72977, + "512": 6.72804, + "513": 6.64821, + "514": 6.70064, + "515": 6.43824, + "516": 6.73421, + "517": 6.69542, + "518": 6.52993, + "519": 6.62474, + "520": 6.84935, + "521": 6.65329, + "522": 6.6979, + "523": 6.73262, + "524": 6.72634, + "525": 6.6655, + "526": 6.40663, + "527": 6.79088, + "528": 6.65206, + "529": 6.62295, + "530": 6.61639, + "531": 6.63503, + "532": 6.62382, + "533": 6.75435, + "534": 6.60296, + "535": 6.74138, + "536": 6.61812, + "537": 6.63086, + "538": 6.52418, + "539": 6.54299, + "540": 6.57593, + "541": 6.44382, + "542": 6.66189, + "543": 6.67325, + "544": 6.66927, + "545": 6.80511, + "546": 6.6246, + "547": 6.40979, + "548": 6.71663, + "549": 6.68986, + "550": 6.51987, + "551": 6.74092, + "552": 6.63227, + "553": 6.47534, + "554": 6.62778, + "555": 6.45222, + "556": 6.60749, + "557": 6.62431, + "558": 6.37676, + "559": 6.36118, + "560": 6.5756, + "561": 6.72381, + "562": 6.62768, + "563": 6.73287, + "564": 6.34176, + "565": 6.50706, + "566": 6.6902, + "567": 6.55838, + "568": 6.50084, + "569": 6.44415, + "570": 6.35619, + "571": 6.62259, + "572": 6.30471, + "573": 6.5721, + "574": 6.46259, + "575": 6.63541, + "576": 6.50701, + "577": 6.51656, + "578": 6.47574, + "579": 6.45618, + "580": 6.5583, + "581": 6.59714, + "582": 6.46959, + "583": 6.50413, + "584": 6.51087, + "585": 6.41424, + "586": 6.40258, + "587": 6.4501, + "588": 6.55622, + "589": 6.61456, + "590": 6.27891, + "591": 6.66415, + "592": 6.2545, + "593": 6.46521, + "594": 6.37467, + "595": 6.34819, + "596": 6.25003, + "597": 6.18054, + "598": 6.44279, + "599": 6.38602, + "600": 6.44414, + "601": 6.25051, + "602": 6.51804, + "603": 6.50819, + "604": 6.37382, + "605": 6.48026, + "606": 6.3013, + "607": 6.51999, + "608": 6.66049, + "609": 6.16075, + "610": 6.55805, + "611": 6.38737, + "612": 6.56702, + "613": 6.41056, + "614": 6.18827, + "615": 6.38286, + "616": 6.34421, + "617": 6.36273, + "618": 6.43626, + "619": 6.12502, + "620": 6.3943, + "621": 6.44427, + "622": 6.38402, + "623": 6.56769, + "624": 6.34417, + "625": 6.26521, + "626": 6.28634, + "627": 6.4276, + "628": 6.24043, + "629": 6.57298, + "630": 6.3523, + "631": 6.33431, + "632": 6.29554, + "633": 6.24213, + "634": 6.29476, + "635": 6.53142, + "636": 6.23005, + "637": 6.62121, + "638": 6.00686, + "639": 6.26506, + "640": 6.2796, + "641": 6.19435, + "642": 6.27007, + "643": 6.44413, + "644": 6.2445, + "645": 6.23092, + "646": 6.38932, + "647": 6.3209, + "648": 6.34188, + "649": 6.33297, + "650": 6.47025, + "651": 6.31782, + "652": 6.23993, + "653": 6.36817, + "654": 6.43495, + "655": 6.5135, + "656": 6.31371, + "657": 6.4163, + "658": 6.22993, + "659": 6.1432, + "660": 6.3808, + "661": 6.15725, + "662": 6.2613, + "663": 6.36151, + "664": 6.32043, + "665": 6.39194, + "666": 6.15182, + "667": 6.18562, + "668": 6.22741, + "669": 6.20408, + "670": 6.23602, + "671": 6.22904, + "672": 6.47492, + "673": 6.32812, + "674": 6.28343, + "675": 6.37362, + "676": 6.38018, + "677": 6.29511, + "678": 6.26804, + "679": 6.22803, + "680": 6.28357, + "681": 6.19077, + "682": 6.07906, + "683": 6.26403, + "684": 6.31575, + "685": 6.2874, + "686": 6.14011, + "687": 6.27685, + "688": 6.19835, + "689": 6.61075, + "690": 6.16856, + "691": 6.17286, + "692": 6.2649, + "693": 6.13689, + "694": 6.22553, + "695": 6.31786, + "696": 6.1061, + "697": 6.14556, + "698": 6.21959, + "699": 6.45326, + "700": 6.03519, + "701": 6.05302, + "702": 6.23703, + "703": 6.17441, + "704": 6.20621, + "705": 6.11844, + "706": 6.06567, + "707": 6.24456, + "708": 6.30245, + "709": 5.99551, + "710": 6.15229, + "711": 6.2479, + "712": 6.17146, + "713": 5.88608, + "714": 6.09975, + "715": 6.10497, + "716": 6.40586, + "717": 6.18363, + "718": 6.23537, + "719": 6.26862, + "720": 6.25804, + "721": 6.25605, + "722": 6.22472, + "723": 6.07187, + "724": 6.22017, + "725": 6.0314, + "726": 6.29244, + "727": 6.00644, + "728": 6.03616, + "729": 6.0826, + "730": 6.17412, + "731": 6.09163, + "732": 6.07888, + "733": 6.11348, + "734": 6.37763, + "735": 6.26791, + "736": 6.17709, + "737": 6.36077, + "738": 6.13247, + "739": 6.14636, + "740": 5.87836, + "741": 6.00499, + "742": 5.98594, + "743": 6.17515, + "744": 6.02317, + "745": 6.14565, + "746": 6.03122, + "747": 6.09452, + "748": 6.22864, + "749": 5.93308, + "750": 6.16381, + "751": 5.95292, + "752": 6.01389, + "753": 6.02392, + "754": 6.28379, + "755": 6.12598, + "756": 6.2443, + "757": 6.01404, + "758": 6.19738, + "759": 6.22084, + "760": 6.02115, + "761": 6.1856, + "762": 6.21798, + "763": 6.02971, + "764": 5.95856, + "765": 5.92315, + "766": 5.96127, + "767": 5.81063, + "768": 6.18012, + "769": 6.27004, + "770": 6.28915, + "771": 5.78425, + "772": 6.0231, + "773": 6.17908, + "774": 5.87868, + "775": 6.02111, + "776": 6.12258, + "777": 5.875, + "778": 6.04901, + "779": 5.86583, + "780": 6.13275, + "781": 5.8451, + "782": 6.03644, + "783": 5.94982, + "784": 5.91239, + "785": 6.08718, + "786": 6.0949, + "787": 5.6498, + "788": 5.99117, + "789": 6.20208, + "790": 6.25533, + "791": 5.78584, + "792": 5.98398, + "793": 6.17232, + "794": 6.02303, + "795": 5.99758, + "796": 6.15575, + "797": 6.04799, + "798": 6.04773, + "799": 6.10394, + "800": 6.00523, + "801": 6.13976, + "802": 5.97143, + "803": 6.14303, + "804": 5.99897, + "805": 5.8162, + "806": 6.08016, + "807": 6.03933, + "808": 5.91779, + "809": 5.76774, + "810": 6.00748, + "811": 5.92407, + "812": 5.89853, + "813": 5.95603, + "814": 6.0199, + "815": 5.80113, + "816": 6.10732, + "817": 5.92704, + "818": 6.05349, + "819": 5.99954, + "820": 5.71925, + "821": 5.93871, + "822": 6.18742, + "823": 5.82051, + "824": 5.97479, + "825": 6.17898, + "826": 6.18992, + "827": 6.04811, + "828": 6.0618, + "829": 5.8808, + "830": 5.9338, + "831": 5.89066, + "832": 5.95946, + "833": 6.05775, + "834": 5.98694, + "835": 5.99225, + "836": 5.78808, + "837": 6.1001, + "838": 5.85774, + "839": 5.82603, + "840": 6.17451, + "841": 5.77389, + "842": 5.88244, + "843": 5.93827, + "844": 6.0037, + "845": 6.08214, + "846": 5.68388, + "847": 5.75348, + "848": 5.96075, + "849": 6.0909, + "850": 5.83839, + "851": 6.01221, + "852": 5.74277, + "853": 5.9819, + "854": 6.00994, + "855": 5.81104, + "856": 5.99027, + "857": 5.99462, + "858": 6.04349, + "859": 5.94378, + "860": 6.08776, + "861": 6.05806, + "862": 5.99259, + "863": 5.83184, + "864": 5.83727, + "865": 5.93014, + "866": 5.88373, + "867": 5.87071, + "868": 6.0603, + "869": 6.08011, + "870": 5.96321, + "871": 6.03762, + "872": 5.89053, + "873": 5.83933, + "874": 6.02181, + "875": 5.90658, + "876": 5.96303, + "877": 5.92074, + "878": 6.09702, + "879": 5.76213, + "880": 6.0073, + "881": 5.98795, + "882": 5.90217, + "883": 5.67039, + "884": 5.95748, + "885": 5.74054, + "886": 5.98445, + "887": 5.90648, + "888": 5.8314, + "889": 6.00733, + "890": 6.01123, + "891": 5.94286, + "892": 5.70277, + "893": 6.08459, + "894": 5.72165, + "895": 5.83588, + "896": 5.83978, + "897": 5.84943, + "898": 5.92347, + "899": 5.93201, + "900": 5.8958, + "901": 5.94689, + "902": 5.82987, + "903": 6.04738, + "904": 5.92586, + "905": 5.89894, + "906": 5.61575, + "907": 5.90522, + "908": 5.73333, + "909": 5.98526, + "910": 5.85686, + "911": 5.69844, + "912": 5.69856, + "913": 5.76407, + "914": 5.82436, + "915": 5.79681, + "916": 5.88608, + "917": 5.867, + "918": 5.8166, + "919": 5.80848, + "920": 5.88971, + "921": 5.8407, + "922": 5.62064, + "923": 6.03383, + "924": 5.60482, + "925": 5.61823, + "926": 5.85786, + "927": 5.95554, + "928": 5.83872, + "929": 5.82237, + "930": 5.95411, + "931": 5.75622, + "932": 5.59098, + "933": 5.63134, + "934": 5.80496, + "935": 5.63538, + "936": 5.8317, + "937": 5.96485, + "938": 5.58943, + "939": 5.79158, + "940": 5.96089, + "941": 5.72676, + "942": 5.83595, + "943": 5.87091, + "944": 5.95881, + "945": 5.70173, + "946": 5.55832, + "947": 5.74676, + "948": 5.79172, + "949": 5.82702, + "950": 5.84636, + "951": 5.72232, + "952": 5.6926, + "953": 5.67846, + "954": 5.72814, + "955": 5.52701, + "956": 5.6247, + "957": 5.84082, + "958": 5.79725, + "959": 5.57236, + "960": 5.8033, + "961": 5.83318, + "962": 5.76931, + "963": 5.768, + "964": 5.70825, + "965": 5.63755, + "966": 5.60344, + "967": 5.72795, + "968": 5.74037, + "969": 5.82565, + "970": 5.64868, + "971": 5.70857, + "972": 5.85255, + "973": 5.67308, + "974": 5.7177, + "975": 5.86027, + "976": 5.71074, + "977": 5.77363, + "978": 5.68598, + "979": 5.5901, + "980": 5.76431, + "981": 5.89808, + "982": 5.47164, + "983": 5.61909, + "984": 5.54693, + "985": 5.58914, + "986": 5.6395, + "987": 5.57215, + "988": 5.71212, + "989": 5.69568, + "990": 5.62713, + "991": 5.85071, + "992": 5.77178, + "993": 5.87182, + "994": 5.69827, + "995": 5.7311, + "996": 5.73947, + "997": 5.81776, + "998": 5.83946, + "999": 5.83213, + "1000": 5.68618, + "1001": 5.86902, + "1002": 5.75759, + "1003": 5.64206, + "1004": 5.80056, + "1005": 5.53357, + "1006": 5.3287, + "1007": 5.7697, + "1008": 5.79391, + "1009": 5.65438, + "1010": 5.78459, + "1011": 5.89696, + "1012": 5.62269, + "1013": 5.61367, + "1014": 5.67992, + "1015": 5.56146, + "1016": 5.87263, + "1017": 5.83169, + "1018": 5.62357, + "1019": 5.73336, + "1020": 5.61404, + "1021": 5.85353, + "1022": 5.49696, + "1023": 5.65062, + "1024": 5.74334, + "1025": 5.57222, + "1026": 5.40994, + "1027": 5.59905, + "1028": 5.68935, + "1029": 5.68346, + "1030": 5.68799, + "1031": 5.40526, + "1032": 5.78443, + "1033": 5.57561, + "1034": 5.6274, + "1035": 5.71529, + "1036": 5.62368, + "1037": 5.36621, + "1038": 5.66561, + "1039": 5.6477, + "1040": 5.57324, + "1041": 5.59731, + "1042": 5.81493, + "1043": 5.56271, + "1044": 5.46406, + "1045": 5.9683, + "1046": 5.48617, + "1047": 5.39181, + "1048": 5.49562, + "1049": 5.67791, + "1050": 5.69881, + "1051": 5.5776, + "1052": 5.68149, + "1053": 5.63114, + "1054": 5.45857, + "1055": 5.59887, + "1056": 5.67508, + "1057": 5.75628, + "1058": 5.56524, + "1059": 5.74843, + "1060": 5.82162, + "1061": 5.47233, + "1062": 5.65043, + "1063": 5.50248, + "1064": 5.59125, + "1065": 5.55564, + "1066": 5.74466, + "1067": 5.67043, + "1068": 5.44061, + "1069": 5.61122, + "1070": 5.81207, + "1071": 5.51069, + "1072": 5.62291, + "1073": 5.6192, + "1074": 5.52379, + "1075": 5.70748, + "1076": 5.5951, + "1077": 5.70681, + "1078": 5.56223, + "1079": 5.61677, + "1080": 5.64259, + "1081": 5.62201, + "1082": 5.50149, + "1083": 5.64213, + "1084": 5.55087, + "1085": 5.40393, + "1086": 5.62042, + "1087": 5.44171, + "1088": 5.51111, + "1089": 5.76887, + "1090": 5.52736, + "1091": 5.51307, + "1092": 5.40781, + "1093": 5.69672, + "1094": 5.56925, + "1095": 5.5731, + "1096": 5.61367, + "1097": 5.6454, + "1098": 5.65292, + "1099": 5.51436, + "1100": 5.63973, + "1101": 5.67989, + "1102": 5.53567, + "1103": 5.54943, + "1104": 5.53818, + "1105": 5.55271, + "1106": 5.68243, + "1107": 5.68309, + "1108": 5.78112, + "1109": 5.54014, + "1110": 5.6617, + "1111": 5.59215, + "1112": 5.58702, + "1113": 5.62687, + "1114": 5.61504, + "1115": 5.59863, + "1116": 5.66461, + "1117": 5.64732, + "1118": 5.65418, + "1119": 5.70846, + "1120": 5.63501, + "1121": 5.37809, + "1122": 5.23308, + "1123": 5.47298, + "1124": 5.65454, + "1125": 5.68419, + "1126": 5.68674, + "1127": 5.56954, + "1128": 5.62438, + "1129": 5.29406, + "1130": 5.54548, + "1131": 5.6238, + "1132": 5.72077, + "1133": 5.51615, + "1134": 5.55302, + "1135": 5.51992, + "1136": 5.42021, + "1137": 5.46757, + "1138": 5.5657, + "1139": 5.41524, + "1140": 5.26144, + "1141": 5.58424, + "1142": 5.64054, + "1143": 5.385, + "1144": 5.3823, + "1145": 5.36615, + "1146": 5.62886, + "1147": 5.49181, + "1148": 5.50478, + "1149": 5.51839, + "1150": 5.39997, + "1151": 5.5553, + "1152": 5.42174, + "1153": 5.4602, + "1154": 5.50372, + "1155": 5.44072, + "1156": 5.34868, + "1157": 5.66217, + "1158": 5.39889, + "1159": 5.33332, + "1160": 5.79511, + "1161": 5.53597, + "1162": 5.45589, + "1163": 5.52529, + "1164": 5.38319, + "1165": 5.52473, + "1166": 5.48721, + "1167": 5.36058, + "1168": 5.49334, + "1169": 5.40387, + "1170": 5.58667, + "1171": 5.48535, + "1172": 5.64049, + "1173": 5.62012, + "1174": 5.51308, + "1175": 5.34473, + "1176": 5.38256, + "1177": 5.55838, + "1178": 5.46714, + "1179": 5.49373, + "1180": 5.46571, + "1181": 5.55314, + "1182": 5.59825, + "1183": 5.76884, + "1184": 5.54748, + "1185": 5.28691, + "1186": 5.60427, + "1187": 5.55401, + "1188": 5.51546, + "1189": 5.38634, + "1190": 5.40233, + "1191": 5.38976, + "1192": 5.49689, + "1193": 5.46486, + "1194": 5.45443, + "1195": 5.32542, + "1196": 5.52268, + "1197": 5.47666, + "1198": 5.52589, + "1199": 5.38688, + "1200": 5.33164, + "1201": 5.49012, + "1202": 5.43748, + "1203": 5.49375, + "1204": 5.40666, + "1205": 5.48999, + "1206": 5.33478, + "1207": 5.58651, + "1208": 5.42414, + "1209": 5.2931, + "1210": 5.49969, + "1211": 5.5071, + "1212": 5.59732, + "1213": 5.41745, + "1214": 5.49785, + "1215": 5.23706, + "1216": 5.41194, + "1217": 5.38264, + "1218": 5.4506, + "1219": 5.48501, + "1220": 5.38351, + "1221": 5.4519, + "1222": 5.31254, + "1223": 5.47747, + "1224": 5.41418, + "1225": 5.42845, + "1226": 5.32249, + "1227": 5.47547, + "1228": 5.73249, + "1229": 5.32716, + "1230": 5.41211, + "1231": 5.07649, + "1232": 5.78792, + "1233": 5.28531, + "1234": 5.24399, + "1235": 5.36824, + "1236": 5.47881, + "1237": 5.20655, + "1238": 5.41404, + "1239": 5.40719, + "1240": 5.46621, + "1241": 5.57221, + "1242": 5.45465, + "1243": 5.43424, + "1244": 5.51633, + "1245": 5.19115, + "1246": 5.71566, + "1247": 5.43, + "1248": 5.29843, + "1249": 5.40246, + "1250": 5.34088, + "1251": 5.41904, + "1252": 5.57108, + "1253": 5.489, + "1254": 5.31099, + "1255": 5.51387, + "1256": 5.60708, + "1257": 5.42325, + "1258": 5.55956, + "1259": 5.47585, + "1260": 5.50779, + "1261": 5.63801, + "1262": 5.39496, + "1263": 5.32432, + "1264": 5.50348, + "1265": 5.30656, + "1266": 5.23675, + "1267": 5.37031, + "1268": 5.38615, + "1269": 5.14823, + "1270": 5.39882, + "1271": 5.27753, + "1272": 5.52297, + "1273": 5.29632, + "1274": 5.34638, + "1275": 5.37784, + "1276": 5.3975, + "1277": 5.4606, + "1278": 5.35501, + "1279": 5.43897, + "1280": 5.45708, + "1281": 5.4056, + "1282": 5.38482, + "1283": 5.42347, + "1284": 5.34377, + "1285": 5.50505, + "1286": 5.33544, + "1287": 5.58814, + "1288": 5.2615, + "1289": 5.42995, + "1290": 5.49991, + "1291": 5.49987, + "1292": 5.44631, + "1293": 5.4171, + "1294": 5.49492, + "1295": 5.34499, + "1296": 5.18358, + "1297": 5.16726, + "1298": 5.11761, + "1299": 5.30129, + "1300": 5.21142, + "1301": 5.30283, + "1302": 5.27612, + "1303": 5.35547, + "1304": 5.43158, + "1305": 5.36825, + "1306": 5.25293, + "1307": 5.19217, + "1308": 5.27071, + "1309": 5.40774, + "1310": 5.26053, + "1311": 5.37774, + "1312": 5.35324, + "1313": 5.29428, + "1314": 5.29224, + "1315": 5.41906, + "1316": 5.25856, + "1317": 5.27981, + "1318": 5.21136, + "1319": 5.34401, + "1320": 5.4177, + "1321": 5.44957, + "1322": 5.46219, + "1323": 5.37269, + "1324": 5.24973, + "1325": 5.40538, + "1326": 5.53891, + "1327": 5.38638, + "1328": 5.21164, + "1329": 5.41667, + "1330": 5.39695, + "1331": 5.30979, + "1332": 5.3112, + "1333": 5.36823, + "1334": 5.44451, + "1335": 5.36788, + "1336": 5.43552, + "1337": 5.46933, + "1338": 5.30246, + "1339": 5.1362, + "1340": 5.41205, + "1341": 5.34033, + "1342": 5.35625, + "1343": 5.47387, + "1344": 5.37842, + "1345": 5.34238, + "1346": 5.07927, + "1347": 5.38404, + "1348": 5.49312, + "1349": 5.40746, + "1350": 5.02698, + "1351": 5.31566, + "1352": 5.15947, + "1353": 5.3409, + "1354": 5.35878, + "1355": 5.11364, + "1356": 5.25842, + "1357": 5.28929, + "1358": 5.15831, + "1359": 5.10775, + "1360": 5.17385, + "1361": 5.30604, + "1362": 5.06672, + "1363": 5.29722, + "1364": 5.3953, + "1365": 5.01953, + "1366": 5.1147, + "1367": 5.33054, + "1368": 5.18248, + "1369": 5.22391, + "1370": 5.1961, + "1371": 5.27906, + "1372": 5.25988, + "1373": 5.28404, + "1374": 5.2779, + "1375": 5.46001, + "1376": 5.26713, + "1377": 5.26807, + "1378": 5.31427, + "1379": 5.22765, + "1380": 5.25807, + "1381": 5.47919, + "1382": 5.08739, + "1383": 5.37543, + "1384": 5.36108, + "1385": 5.39028, + "1386": 5.16582, + "1387": 5.16244, + "1388": 5.27616, + "1389": 5.30262, + "1390": 5.25131, + "1391": 5.26406, + "1392": 5.36794, + "1393": 5.37824, + "1394": 5.40104, + "1395": 5.32383, + "1396": 5.21137, + "1397": 5.2828, + "1398": 5.36587, + "1399": 5.35557, + "1400": 5.26522, + "1401": 5.35981, + "1402": 5.42507, + "1403": 5.19768, + "1404": 5.27957, + "1405": 5.11754, + "1406": 4.98933, + "1407": 5.39818, + "1408": 5.1921, + "1409": 5.39429, + "1410": 5.37153, + "1411": 4.91585, + "1412": 5.35244, + "1413": 5.41055, + "1414": 5.21699, + "1415": 5.44044, + "1416": 5.32598, + "1417": 5.39078, + "1418": 5.29894, + "1419": 5.31316, + "1420": 5.43638, + "1421": 5.39683, + "1422": 5.41859, + "1423": 4.99867, + "1424": 5.33177, + "1425": 5.58491, + "1426": 5.23068, + "1427": 5.31742, + "1428": 5.33463, + "1429": 5.07871, + "1430": 5.32748, + "1431": 5.32237, + "1432": 5.34216, + "1433": 5.18496, + "1434": 5.16175, + "1435": 5.20122, + "1436": 5.10715, + "1437": 5.22566, + "1438": 5.31423, + "1439": 5.34769, + "1440": 5.34295, + "1441": 5.16777, + "1442": 5.21935, + "1443": 5.20553, + "1444": 5.12984, + "1445": 5.07414, + "1446": 5.26456, + "1447": 5.25775, + "1448": 5.29302, + "1449": 5.24616, + "1450": 5.34316, + "1451": 5.07004, + "1452": 5.26796, + "1453": 5.1741, + "1454": 5.01458, + "1455": 5.12771, + "1456": 5.27213, + "1457": 5.1882, + "1458": 5.00695, + "1459": 5.2215, + "1460": 5.23955, + "1461": 5.08, + "1462": 4.97269, + "1463": 5.15114, + "1464": 5.22113, + "1465": 5.27344, + "1466": 5.36076, + "1467": 5.34631, + "1468": 5.2303, + "1469": 5.05117, + "1470": 5.12322, + "1471": 5.25302, + "1472": 5.12175, + "1473": 5.10167, + "1474": 5.21744, + "1475": 5.18613, + "1476": 5.15517, + "1477": 5.26215, + "1478": 5.30407, + "1479": 5.01063, + "1480": 5.182, + "1481": 5.25124, + "1482": 5.3494, + "1483": 5.27058, + "1484": 4.92644, + "1485": 5.29103, + "1486": 5.04435, + "1487": 4.88432, + "1488": 5.18325, + "1489": 5.10139, + "1490": 5.04545, + "1491": 5.3188, + "1492": 5.22283, + "1493": 4.94061, + "1494": 5.10891, + "1495": 5.13402, + "1496": 5.05779, + "1497": 5.36536, + "1498": 5.30609, + "1499": 5.143, + "1500": 5.09554, + "1501": 5.0349, + "1502": 5.15423, + "1503": 5.43131, + "1504": 5.32574, + "1505": 5.00836, + "1506": 5.14423, + "1507": 5.16501, + "1508": 5.16864, + "1509": 5.3204, + "1510": 5.02703, + "1511": 5.1198, + "1512": 4.98354, + "1513": 5.1699, + "1514": 5.33407, + "1515": 5.36306, + "1516": 5.27572, + "1517": 5.2256, + "1518": 5.02899, + "1519": 5.29833, + "1520": 5.13757, + "1521": 5.15715, + "1522": 5.33462, + "1523": 5.24144, + "1524": 5.06791, + "1525": 5.20708, + "1526": 5.27861, + "1527": 5.25864, + "1528": 5.2395, + "1529": 5.18253, + "1530": 5.23913, + "1531": 5.09996, + "1532": 5.15679, + "1533": 5.05231, + "1534": 5.21917, + "1535": 5.16769, + "1536": 5.102, + "1537": 5.0318, + "1538": 4.91991, + "1539": 5.2394, + "1540": 5.11391, + "1541": 5.25502, + "1542": 5.23775, + "1543": 5.05438, + "1544": 5.08156, + "1545": 5.11794, + "1546": 5.32713, + "1547": 5.10763, + "1548": 5.23418, + "1549": 5.23089, + "1550": 4.97536, + "1551": 5.25942, + "1552": 5.0226, + "1553": 5.14887, + "1554": 5.11051, + "1555": 5.11223, + "1556": 5.19882, + "1557": 5.08844, + "1558": 5.22982, + "1559": 5.00137, + "1560": 5.11269, + "1561": 5.14639, + "1562": 5.18443, + "1563": 5.24639, + "1564": 5.26429, + "1565": 5.08809, + "1566": 5.29393, + "1567": 5.04372, + "1568": 5.08304, + "1569": 5.2002, + "1570": 5.17168, + "1571": 4.95228, + "1572": 5.04524, + "1573": 5.02748, + "1574": 4.99831, + "1575": 5.23124, + "1576": 5.20891, + "1577": 5.12722, + "1578": 5.36355, + "1579": 4.94343, + "1580": 5.12556, + "1581": 5.09739, + "1582": 5.28014, + "1583": 5.04619, + "1584": 5.0566, + "1585": 5.11727, + "1586": 5.30646, + "1587": 5.13281, + "1588": 5.22351, + "1589": 4.83814, + "1590": 5.09825, + "1591": 5.18082, + "1592": 5.14078, + "1593": 5.23646, + "1594": 5.11532, + "1595": 5.10761, + "1596": 5.19194, + "1597": 5.11362, + "1598": 5.16252, + "1599": 5.18865, + "1600": 4.86676, + "1601": 5.11898, + "1602": 5.22827, + "1603": 5.19524, + "1604": 5.05797, + "1605": 5.03277, + "1606": 4.98991, + "1607": 5.06915, + "1608": 4.97927, + "1609": 5.07061, + "1610": 5.04561, + "1611": 4.9918, + "1612": 4.75806, + "1613": 5.03141, + "1614": 4.87811, + "1615": 5.07817, + "1616": 5.22549, + "1617": 5.06182, + "1618": 4.98945, + "1619": 5.18486, + "1620": 5.14429, + "1621": 5.31666, + "1622": 5.06737, + "1623": 5.15063, + "1624": 5.1305, + "1625": 5.12197, + "1626": 5.10206, + "1627": 5.1085, + "1628": 5.06234, + "1629": 4.93316, + "1630": 5.06616, + "1631": 5.05719, + "1632": 5.10145, + "1633": 4.97087, + "1634": 4.92194, + "1635": 5.05013, + "1636": 4.9202, + "1637": 5.22863, + "1638": 5.15783, + "1639": 4.9808, + "1640": 5.00716, + "1641": 5.12367, + "1642": 5.0869, + "1643": 5.05029, + "1644": 5.12283, + "1645": 4.96415, + "1646": 5.12257, + "1647": 5.03267, + "1648": 5.1903, + "1649": 4.92263, + "1650": 5.0596, + "1651": 4.93391, + "1652": 5.21143, + "1653": 5.1587, + "1654": 5.13384, + "1655": 5.16235, + "1656": 5.34793, + "1657": 5.21074, + "1658": 5.04155, + "1659": 4.92889, + "1660": 4.8117, + "1661": 5.02968, + "1662": 5.14515, + "1663": 5.15868, + "1664": 4.98471, + "1665": 5.11027, + "1666": 5.10315, + "1667": 4.84929, + "1668": 5.10956, + "1669": 5.07311, + "1670": 5.11152, + "1671": 5.16545, + "1672": 4.77709, + "1673": 5.03502, + "1674": 4.91572, + "1675": 5.04406, + "1676": 5.0023, + "1677": 4.80013, + "1678": 5.02745, + "1679": 4.88908, + "1680": 5.03791, + "1681": 5.06371, + "1682": 5.03586, + "1683": 4.90255, + "1684": 5.06133, + "1685": 5.13096, + "1686": 5.075, + "1687": 4.97679, + "1688": 5.17279, + "1689": 5.1507, + "1690": 4.99681, + "1691": 4.99961, + "1692": 4.91412, + "1693": 5.02305, + "1694": 4.94741, + "1695": 4.91895, + "1696": 5.0846, + "1697": 5.05067, + "1698": 4.95116, + "1699": 5.00638, + "1700": 4.94576, + "1701": 5.16681, + "1702": 5.07316, + "1703": 5.16582, + "1704": 5.14235, + "1705": 4.96408, + "1706": 4.98303, + "1707": 4.78833, + "1708": 5.03283, + "1709": 5.2281, + "1710": 5.02918, + "1711": 5.18873, + "1712": 5.19088, + "1713": 5.03631, + "1714": 5.04689, + "1715": 4.91662, + "1716": 4.93663, + "1717": 4.86445, + "1718": 5.02654, + "1719": 5.12575, + "1720": 5.02353, + "1721": 4.9343, + "1722": 5.06572, + "1723": 4.93302, + "1724": 5.03906, + "1725": 5.19169, + "1726": 5.06497, + "1727": 4.91076, + "1728": 5.01922, + "1729": 5.04885, + "1730": 4.91107, + "1731": 5.00108, + "1732": 4.91468, + "1733": 5.12873, + "1734": 4.83023, + "1735": 5.21293, + "1736": 4.91729, + "1737": 4.86164, + "1738": 4.97933, + "1739": 5.16149, + "1740": 4.84041, + "1741": 4.78298, + "1742": 4.91062, + "1743": 5.09353, + "1744": 4.98531, + "1745": 4.82544, + "1746": 4.94973, + "1747": 4.86843, + "1748": 5.06696, + "1749": 4.86793, + "1750": 5.01333, + "1751": 5.12023, + "1752": 4.90813, + "1753": 5.09204, + "1754": 5.05813, + "1755": 4.89777, + "1756": 5.02216, + "1757": 5.14157, + "1758": 4.87188, + "1759": 4.94434, + "1760": 4.83222, + "1761": 5.02427, + "1762": 4.81507, + "1763": 4.77391, + "1764": 4.93175, + "1765": 5.14727, + "1766": 5.33614, + "1767": 5.22331, + "1768": 4.94712, + "1769": 5.0043, + "1770": 4.98512, + "1771": 4.96473, + "1772": 4.98299, + "1773": 4.97266, + "1774": 4.87138, + "1775": 4.9493, + "1776": 4.9958, + "1777": 4.94665, + "1778": 4.99288, + "1779": 5.08212, + "1780": 4.83608, + "1781": 5.05478, + "1782": 4.99549, + "1783": 5.01236, + "1784": 4.93254, + "1785": 5.16842, + "1786": 4.80892, + "1787": 4.9699, + "1788": 4.82948, + "1789": 4.88554, + "1790": 4.80386, + "1791": 4.74542, + "1792": 4.87988, + "1793": 5.11081, + "1794": 4.98659, + "1795": 4.97147, + "1796": 5.00354, + "1797": 4.79101, + "1798": 4.77029, + "1799": 5.01913, + "1800": 4.91155, + "1801": 5.04891, + "1802": 4.82591, + "1803": 4.95313, + "1804": 4.88492, + "1805": 4.90634, + "1806": 4.88167, + "1807": 4.92894, + "1808": 4.92469, + "1809": 5.15028, + "1810": 5.09708, + "1811": 4.96325, + "1812": 4.8059, + "1813": 5.1023, + "1814": 4.7819, + "1815": 4.86518, + "1816": 5.05104, + "1817": 4.79238, + "1818": 4.80401, + "1819": 5.02672, + "1820": 4.68884, + "1821": 5.02319, + "1822": 4.66224, + "1823": 4.86936, + "1824": 4.7914, + "1825": 5.06607, + "1826": 4.81841, + "1827": 4.79544, + "1828": 4.9506, + "1829": 5.10848, + "1830": 4.9163, + "1831": 4.89965, + "1832": 4.83328, + "1833": 4.78854, + "1834": 4.94794, + "1835": 4.96175, + "1836": 4.91339, + "1837": 4.6762, + "1838": 4.80703, + "1839": 4.89949, + "1840": 4.91213, + "1841": 4.84083, + "1842": 4.9567, + "1843": 4.71182, + "1844": 4.6194, + "1845": 5.00584, + "1846": 4.75435, + "1847": 4.86491, + "1848": 4.9035, + "1849": 4.85124, + "1850": 4.87005, + "1851": 5.01617, + "1852": 4.97859, + "1853": 4.82821, + "1854": 4.86426, + "1855": 4.82455, + "1856": 4.75214, + "1857": 4.96641, + "1858": 4.96711, + "1859": 4.7484, + "1860": 4.86558, + "1861": 5.21257, + "1862": 4.61253, + "1863": 4.83567, + "1864": 4.74748, + "1865": 4.86472, + "1866": 4.78934, + "1867": 5.00307, + "1868": 4.72073, + "1869": 4.76301, + "1870": 4.93972, + "1871": 5.00163, + "1872": 4.68713, + "1873": 4.70038, + "1874": 4.85131, + "1875": 4.85367, + "1876": 4.74378, + "1877": 4.80696, + "1878": 4.8139, + "1879": 4.82462, + "1880": 4.89248, + "1881": 4.79379, + "1882": 4.79882, + "1883": 4.78556, + "1884": 4.97714, + "1885": 4.92363, + "1886": 4.82454, + "1887": 4.82091, + "1888": 4.97246, + "1889": 4.96553, + "1890": 4.71236, + "1891": 4.65764, + "1892": 4.85277, + "1893": 4.65022, + "1894": 4.90165, + "1895": 4.79, + "1896": 4.66068, + "1897": 4.79617, + "1898": 4.92161, + "1899": 4.77736, + "1900": 4.91325, + "1901": 4.84998, + "1902": 4.787, + "1903": 4.76372, + "1904": 4.65638, + "1905": 4.55077, + "1906": 4.81577, + "1907": 4.9106, + "1908": 5.03029, + "1909": 4.89294, + "1910": 4.7884, + "1911": 4.81269, + "1912": 4.653, + "1913": 4.95098, + "1914": 4.88806, + "1915": 4.86687, + "1916": 4.9302, + "1917": 4.85504, + "1918": 4.87427, + "1919": 4.99557, + "1920": 4.77001, + "1921": 4.88729, + "1922": 4.8196, + "1923": 4.75752, + "1924": 4.8297, + "1925": 5.05687, + "1926": 4.94229, + "1927": 4.93308, + "1928": 4.92739, + "1929": 4.93147, + "1930": 4.917, + "1931": 4.77692, + "1932": 4.86743, + "1933": 4.83532, + "1934": 4.84373, + "1935": 5.11279, + "1936": 4.88728, + "1937": 4.8824, + "1938": 4.80623, + "1939": 4.70831, + "1940": 4.83067, + "1941": 4.74224, + "1942": 4.87785, + "1943": 4.74082, + "1944": 4.7536, + "1945": 4.69017, + "1946": 4.91953, + "1947": 4.87613, + "1948": 4.60452, + "1949": 4.89888, + "1950": 4.79826, + "1951": 4.9677, + "1952": 4.73855, + "1953": 4.79852, + "1954": 4.7398, + "1955": 4.85209, + "1956": 4.88278, + "1957": 4.73599, + "1958": 4.70215, + "1959": 4.76471, + "1960": 4.76967, + "1961": 4.71471, + "1962": 4.83443, + "1963": 4.82459, + "1964": 4.85019, + "1965": 4.87867, + "1966": 4.79219, + "1967": 4.60013, + "1968": 4.83399, + "1969": 4.59632, + "1970": 4.58346, + "1971": 4.90585, + "1972": 4.89941, + "1973": 4.55559, + "1974": 4.8295, + "1975": 4.83261, + "1976": 4.71818, + "1977": 4.58171, + "1978": 5.00781, + "1979": 4.6663, + "1980": 4.74961, + "1981": 4.87741, + "1982": 4.72647, + "1983": 4.89363, + "1984": 4.64954, + "1985": 4.78941, + "1986": 4.70195, + "1987": 4.8185, + "1988": 4.89272, + "1989": 4.63799, + "1990": 4.79789, + "1991": 4.70399, + "1992": 4.80349, + "1993": 4.74121, + "1994": 4.85611, + "1995": 4.5595, + "1996": 4.65792, + "1997": 4.8133, + "1998": 4.68041, + "1999": 4.73244, + "2000": 4.6301 + } + }, + "num-zeros": { + "start_step": 1, + "end_step": 2000, + "step_interval": 1, + "values": { + "1": 26.0, + "2": 32.0, + "3": 38.0, + "4": 33.0, + "5": 32.0, + "6": 30.0, + "7": 33.0, + "8": 34.0, + "9": 40.0, + "10": 31.0, + "11": 26.0, + "12": 33.0, + "13": 28.0, + "14": 29.0, + "15": 28.0, + "16": 27.0, + "17": 32.0, + "18": 28.0, + "19": 31.0, + "20": 39.0, + "21": 22.0, + "22": 29.0, + "23": 39.0, + "24": 35.0, + "25": 31.0, + "26": 40.0, + "27": 39.0, + "28": 42.0, + "29": 53.0, + "30": 51.0, + "31": 48.0, + "32": 51.0, + "33": 38.0, + "34": 48.0, + "35": 47.0, + "36": 49.0, + "37": 42.0, + "38": 43.0, + "39": 52.0, + "40": 55.0, + "41": 39.0, + "42": 54.0, + "43": 57.0, + "44": 53.0, + "45": 46.0, + "46": 61.0, + "47": 52.0, + "48": 54.0, + "49": 64.0, + "50": 64.0, + "51": 42.0, + "52": 55.0, + "53": 48.0, + "54": 71.0, + "55": 56.0, + "56": 74.0, + "57": 70.0, + "58": 57.0, + "59": 53.0, + "60": 67.0, + "61": 63.0, + "62": 59.0, + "63": 66.0, + "64": 70.0, + "65": 59.0, + "66": 74.0, + "67": 81.0, + "68": 74.0, + "69": 60.0, + "70": 60.0, + "71": 66.0, + "72": 75.0, + "73": 67.0, + "74": 63.0, + "75": 60.0, + "76": 60.0, + "77": 78.0, + "78": 78.0, + "79": 58.0, + "80": 63.0, + "81": 63.0, + "82": 50.0, + "83": 63.0, + "84": 72.0, + "85": 69.0, + "86": 80.0, + "87": 70.0, + "88": 68.0, + "89": 69.0, + "90": 63.0, + "91": 58.0, + "92": 87.0, + "93": 65.0, + "94": 50.0, + "95": 67.0, + "96": 71.0, + "97": 70.0, + "98": 81.0, + "99": 66.0, + "100": 76.0, + "101": 67.0, + "102": 44.0, + "103": 60.0, + "104": 68.0, + "105": 84.0, + "106": 61.0, + "107": 76.0, + "108": 68.0, + "109": 76.0, + "110": 74.0, + "111": 75.0, + "112": 78.0, + "113": 58.0, + "114": 66.0, + "115": 71.0, + "116": 63.0, + "117": 74.0, + "118": 52.0, + "119": 74.0, + "120": 52.0, + "121": 76.0, + "122": 66.0, + "123": 81.0, + "124": 76.0, + "125": 87.0, + "126": 49.0, + "127": 56.0, + "128": 78.0, + "129": 53.0, + "130": 76.0, + "131": 86.0, + "132": 61.0, + "133": 72.0, + "134": 62.0, + "135": 59.0, + "136": 60.0, + "137": 57.0, + "138": 81.0, + "139": 74.0, + "140": 59.0, + "141": 50.0, + "142": 64.0, + "143": 54.0, + "144": 49.0, + "145": 57.0, + "146": 51.0, + "147": 49.0, + "148": 69.0, + "149": 49.0, + "150": 66.0, + "151": 57.0, + "152": 51.0, + "153": 61.0, + "154": 58.0, + "155": 68.0, + "156": 68.0, + "157": 51.0, + "158": 68.0, + "159": 60.0, + "160": 64.0, + "161": 66.0, + "162": 75.0, + "163": 40.0, + "164": 84.0, + "165": 50.0, + "166": 68.0, + "167": 54.0, + "168": 58.0, + "169": 65.0, + "170": 71.0, + "171": 54.0, + "172": 64.0, + "173": 81.0, + "174": 55.0, + "175": 63.0, + "176": 69.0, + "177": 80.0, + "178": 68.0, + "179": 69.0, + "180": 64.0, + "181": 41.0, + "182": 63.0, + "183": 66.0, + "184": 67.0, + "185": 77.0, + "186": 77.0, + "187": 61.0, + "188": 62.0, + "189": 50.0, + "190": 57.0, + "191": 60.0, + "192": 67.0, + "193": 70.0, + "194": 72.0, + "195": 60.0, + "196": 81.0, + "197": 56.0, + "198": 47.0, + "199": 50.0, + "200": 86.0, + "201": 52.0, + "202": 64.0, + "203": 58.0, + "204": 63.0, + "205": 40.0, + "206": 72.0, + "207": 50.0, + "208": 42.0, + "209": 69.0, + "210": 68.0, + "211": 56.0, + "212": 64.0, + "213": 60.0, + "214": 62.0, + "215": 66.0, + "216": 58.0, + "217": 59.0, + "218": 70.0, + "219": 80.0, + "220": 81.0, + "221": 51.0, + "222": 57.0, + "223": 67.0, + "224": 53.0, + "225": 61.0, + "226": 68.0, + "227": 76.0, + "228": 59.0, + "229": 44.0, + "230": 50.0, + "231": 58.0, + "232": 65.0, + "233": 90.0, + "234": 60.0, + "235": 98.0, + "236": 49.0, + "237": 92.0, + "238": 71.0, + "239": 68.0, + "240": 79.0, + "241": 67.0, + "242": 75.0, + "243": 66.0, + "244": 59.0, + "245": 81.0, + "246": 80.0, + "247": 88.0, + "248": 81.0, + "249": 79.0, + "250": 80.0, + "251": 74.0, + "252": 72.0, + "253": 57.0, + "254": 67.0, + "255": 79.0, + "256": 86.0, + "257": 66.0, + "258": 94.0, + "259": 69.0, + "260": 70.0, + "261": 64.0, + "262": 77.0, + "263": 74.0, + "264": 70.0, + "265": 68.0, + "266": 67.0, + "267": 66.0, + "268": 59.0, + "269": 73.0, + "270": 85.0, + "271": 67.0, + "272": 81.0, + "273": 71.0, + "274": 69.0, + "275": 72.0, + "276": 72.0, + "277": 82.0, + "278": 61.0, + "279": 94.0, + "280": 56.0, + "281": 55.0, + "282": 73.0, + "283": 90.0, + "284": 85.0, + "285": 49.0, + "286": 50.0, + "287": 90.0, + "288": 71.0, + "289": 85.0, + "290": 75.0, + "291": 88.0, + "292": 88.0, + "293": 91.0, + "294": 84.0, + "295": 85.0, + "296": 102.0, + "297": 70.0, + "298": 65.0, + "299": 80.0, + "300": 80.0, + "301": 91.0, + "302": 94.0, + "303": 71.0, + "304": 74.0, + "305": 59.0, + "306": 72.0, + "307": 73.0, + "308": 91.0, + "309": 88.0, + "310": 82.0, + "311": 84.0, + "312": 73.0, + "313": 97.0, + "314": 74.0, + "315": 69.0, + "316": 96.0, + "317": 61.0, + "318": 99.0, + "319": 67.0, + "320": 77.0, + "321": 86.0, + "322": 70.0, + "323": 86.0, + "324": 96.0, + "325": 74.0, + "326": 97.0, + "327": 73.0, + "328": 99.0, + "329": 93.0, + "330": 96.0, + "331": 81.0, + "332": 79.0, + "333": 97.0, + "334": 81.0, + "335": 84.0, + "336": 81.0, + "337": 99.0, + "338": 89.0, + "339": 93.0, + "340": 101.0, + "341": 93.0, + "342": 57.0, + "343": 81.0, + "344": 105.0, + "345": 88.0, + "346": 85.0, + "347": 91.0, + "348": 82.0, + "349": 78.0, + "350": 101.0, + "351": 105.0, + "352": 76.0, + "353": 112.0, + "354": 72.0, + "355": 79.0, + "356": 104.0, + "357": 86.0, + "358": 77.0, + "359": 99.0, + "360": 102.0, + "361": 64.0, + "362": 123.0, + "363": 96.0, + "364": 95.0, + "365": 85.0, + "366": 82.0, + "367": 84.0, + "368": 83.0, + "369": 77.0, + "370": 118.0, + "371": 76.0, + "372": 77.0, + "373": 96.0, + "374": 68.0, + "375": 92.0, + "376": 84.0, + "377": 98.0, + "378": 99.0, + "379": 108.0, + "380": 96.0, + "381": 92.0, + "382": 75.0, + "383": 89.0, + "384": 100.0, + "385": 73.0, + "386": 85.0, + "387": 73.0, + "388": 93.0, + "389": 88.0, + "390": 90.0, + "391": 115.0, + "392": 88.0, + "393": 99.0, + "394": 104.0, + "395": 125.0, + "396": 80.0, + "397": 78.0, + "398": 67.0, + "399": 104.0, + "400": 96.0, + "401": 105.0, + "402": 88.0, + "403": 97.0, + "404": 101.0, + "405": 85.0, + "406": 114.0, + "407": 76.0, + "408": 98.0, + "409": 84.0, + "410": 102.0, + "411": 81.0, + "412": 56.0, + "413": 68.0, + "414": 90.0, + "415": 95.0, + "416": 93.0, + "417": 90.0, + "418": 60.0, + "419": 86.0, + "420": 76.0, + "421": 110.0, + "422": 89.0, + "423": 78.0, + "424": 82.0, + "425": 94.0, + "426": 80.0, + "427": 96.0, + "428": 86.0, + "429": 92.0, + "430": 84.0, + "431": 87.0, + "432": 80.0, + "433": 81.0, + "434": 93.0, + "435": 83.0, + "436": 82.0, + "437": 91.0, + "438": 62.0, + "439": 72.0, + "440": 79.0, + "441": 87.0, + "442": 106.0, + "443": 106.0, + "444": 58.0, + "445": 93.0, + "446": 89.0, + "447": 97.0, + "448": 79.0, + "449": 90.0, + "450": 83.0, + "451": 63.0, + "452": 70.0, + "453": 63.0, + "454": 80.0, + "455": 114.0, + "456": 98.0, + "457": 101.0, + "458": 70.0, + "459": 69.0, + "460": 65.0, + "461": 115.0, + "462": 63.0, + "463": 73.0, + "464": 69.0, + "465": 95.0, + "466": 76.0, + "467": 77.0, + "468": 90.0, + "469": 65.0, + "470": 91.0, + "471": 76.0, + "472": 60.0, + "473": 94.0, + "474": 69.0, + "475": 90.0, + "476": 66.0, + "477": 75.0, + "478": 78.0, + "479": 63.0, + "480": 73.0, + "481": 80.0, + "482": 77.0, + "483": 78.0, + "484": 84.0, + "485": 70.0, + "486": 84.0, + "487": 69.0, + "488": 88.0, + "489": 77.0, + "490": 59.0, + "491": 83.0, + "492": 57.0, + "493": 83.0, + "494": 69.0, + "495": 50.0, + "496": 56.0, + "497": 97.0, + "498": 77.0, + "499": 75.0, + "500": 60.0, + "501": 64.0, + "502": 64.0, + "503": 71.0, + "504": 77.0, + "505": 68.0, + "506": 65.0, + "507": 80.0, + "508": 42.0, + "509": 63.0, + "510": 77.0, + "511": 81.0, + "512": 57.0, + "513": 61.0, + "514": 60.0, + "515": 71.0, + "516": 61.0, + "517": 85.0, + "518": 43.0, + "519": 72.0, + "520": 82.0, + "521": 50.0, + "522": 58.0, + "523": 74.0, + "524": 70.0, + "525": 82.0, + "526": 60.0, + "527": 71.0, + "528": 63.0, + "529": 66.0, + "530": 67.0, + "531": 69.0, + "532": 72.0, + "533": 81.0, + "534": 62.0, + "535": 66.0, + "536": 61.0, + "537": 60.0, + "538": 55.0, + "539": 62.0, + "540": 63.0, + "541": 61.0, + "542": 61.0, + "543": 55.0, + "544": 64.0, + "545": 73.0, + "546": 77.0, + "547": 69.0, + "548": 75.0, + "549": 61.0, + "550": 61.0, + "551": 63.0, + "552": 71.0, + "553": 78.0, + "554": 67.0, + "555": 65.0, + "556": 74.0, + "557": 61.0, + "558": 62.0, + "559": 62.0, + "560": 71.0, + "561": 56.0, + "562": 65.0, + "563": 77.0, + "564": 67.0, + "565": 55.0, + "566": 58.0, + "567": 42.0, + "568": 70.0, + "569": 56.0, + "570": 60.0, + "571": 58.0, + "572": 41.0, + "573": 71.0, + "574": 69.0, + "575": 85.0, + "576": 44.0, + "577": 50.0, + "578": 69.0, + "579": 62.0, + "580": 67.0, + "581": 59.0, + "582": 58.0, + "583": 55.0, + "584": 47.0, + "585": 60.0, + "586": 41.0, + "587": 47.0, + "588": 53.0, + "589": 55.0, + "590": 46.0, + "591": 69.0, + "592": 50.0, + "593": 52.0, + "594": 56.0, + "595": 47.0, + "596": 44.0, + "597": 33.0, + "598": 61.0, + "599": 50.0, + "600": 88.0, + "601": 55.0, + "602": 64.0, + "603": 60.0, + "604": 57.0, + "605": 57.0, + "606": 45.0, + "607": 54.0, + "608": 45.0, + "609": 40.0, + "610": 45.0, + "611": 53.0, + "612": 52.0, + "613": 73.0, + "614": 53.0, + "615": 52.0, + "616": 64.0, + "617": 44.0, + "618": 59.0, + "619": 50.0, + "620": 72.0, + "621": 50.0, + "622": 58.0, + "623": 57.0, + "624": 56.0, + "625": 56.0, + "626": 71.0, + "627": 50.0, + "628": 49.0, + "629": 50.0, + "630": 50.0, + "631": 40.0, + "632": 45.0, + "633": 42.0, + "634": 38.0, + "635": 51.0, + "636": 36.0, + "637": 55.0, + "638": 45.0, + "639": 63.0, + "640": 52.0, + "641": 51.0, + "642": 52.0, + "643": 49.0, + "644": 51.0, + "645": 57.0, + "646": 57.0, + "647": 69.0, + "648": 60.0, + "649": 49.0, + "650": 49.0, + "651": 66.0, + "652": 49.0, + "653": 59.0, + "654": 42.0, + "655": 42.0, + "656": 46.0, + "657": 49.0, + "658": 50.0, + "659": 44.0, + "660": 53.0, + "661": 46.0, + "662": 60.0, + "663": 43.0, + "664": 61.0, + "665": 37.0, + "666": 30.0, + "667": 42.0, + "668": 41.0, + "669": 44.0, + "670": 44.0, + "671": 59.0, + "672": 53.0, + "673": 47.0, + "674": 42.0, + "675": 54.0, + "676": 43.0, + "677": 68.0, + "678": 41.0, + "679": 38.0, + "680": 46.0, + "681": 50.0, + "682": 33.0, + "683": 38.0, + "684": 52.0, + "685": 40.0, + "686": 43.0, + "687": 61.0, + "688": 57.0, + "689": 51.0, + "690": 35.0, + "691": 45.0, + "692": 55.0, + "693": 36.0, + "694": 50.0, + "695": 50.0, + "696": 51.0, + "697": 41.0, + "698": 37.0, + "699": 47.0, + "700": 42.0, + "701": 37.0, + "702": 33.0, + "703": 39.0, + "704": 43.0, + "705": 45.0, + "706": 32.0, + "707": 38.0, + "708": 38.0, + "709": 46.0, + "710": 35.0, + "711": 48.0, + "712": 35.0, + "713": 48.0, + "714": 37.0, + "715": 48.0, + "716": 36.0, + "717": 34.0, + "718": 26.0, + "719": 36.0, + "720": 34.0, + "721": 36.0, + "722": 35.0, + "723": 29.0, + "724": 47.0, + "725": 32.0, + "726": 39.0, + "727": 40.0, + "728": 39.0, + "729": 47.0, + "730": 36.0, + "731": 48.0, + "732": 43.0, + "733": 39.0, + "734": 51.0, + "735": 40.0, + "736": 49.0, + "737": 44.0, + "738": 27.0, + "739": 46.0, + "740": 38.0, + "741": 38.0, + "742": 45.0, + "743": 44.0, + "744": 52.0, + "745": 48.0, + "746": 50.0, + "747": 53.0, + "748": 52.0, + "749": 48.0, + "750": 46.0, + "751": 40.0, + "752": 50.0, + "753": 44.0, + "754": 43.0, + "755": 48.0, + "756": 38.0, + "757": 45.0, + "758": 40.0, + "759": 56.0, + "760": 46.0, + "761": 44.0, + "762": 48.0, + "763": 54.0, + "764": 49.0, + "765": 42.0, + "766": 57.0, + "767": 45.0, + "768": 51.0, + "769": 60.0, + "770": 51.0, + "771": 31.0, + "772": 41.0, + "773": 60.0, + "774": 37.0, + "775": 43.0, + "776": 37.0, + "777": 34.0, + "778": 42.0, + "779": 37.0, + "780": 34.0, + "781": 41.0, + "782": 25.0, + "783": 30.0, + "784": 39.0, + "785": 34.0, + "786": 38.0, + "787": 47.0, + "788": 41.0, + "789": 50.0, + "790": 44.0, + "791": 34.0, + "792": 38.0, + "793": 53.0, + "794": 45.0, + "795": 52.0, + "796": 39.0, + "797": 41.0, + "798": 39.0, + "799": 44.0, + "800": 46.0, + "801": 44.0, + "802": 40.0, + "803": 47.0, + "804": 34.0, + "805": 45.0, + "806": 43.0, + "807": 46.0, + "808": 36.0, + "809": 35.0, + "810": 35.0, + "811": 44.0, + "812": 47.0, + "813": 41.0, + "814": 36.0, + "815": 41.0, + "816": 52.0, + "817": 43.0, + "818": 35.0, + "819": 52.0, + "820": 40.0, + "821": 29.0, + "822": 34.0, + "823": 44.0, + "824": 47.0, + "825": 36.0, + "826": 40.0, + "827": 29.0, + "828": 35.0, + "829": 32.0, + "830": 30.0, + "831": 36.0, + "832": 34.0, + "833": 39.0, + "834": 50.0, + "835": 38.0, + "836": 37.0, + "837": 50.0, + "838": 45.0, + "839": 52.0, + "840": 37.0, + "841": 35.0, + "842": 30.0, + "843": 50.0, + "844": 23.0, + "845": 45.0, + "846": 25.0, + "847": 32.0, + "848": 25.0, + "849": 34.0, + "850": 39.0, + "851": 46.0, + "852": 41.0, + "853": 43.0, + "854": 45.0, + "855": 27.0, + "856": 47.0, + "857": 47.0, + "858": 46.0, + "859": 35.0, + "860": 45.0, + "861": 30.0, + "862": 39.0, + "863": 21.0, + "864": 26.0, + "865": 46.0, + "866": 44.0, + "867": 48.0, + "868": 27.0, + "869": 42.0, + "870": 45.0, + "871": 33.0, + "872": 49.0, + "873": 32.0, + "874": 56.0, + "875": 38.0, + "876": 41.0, + "877": 40.0, + "878": 37.0, + "879": 22.0, + "880": 39.0, + "881": 40.0, + "882": 49.0, + "883": 39.0, + "884": 35.0, + "885": 32.0, + "886": 45.0, + "887": 41.0, + "888": 34.0, + "889": 35.0, + "890": 37.0, + "891": 41.0, + "892": 42.0, + "893": 42.0, + "894": 34.0, + "895": 38.0, + "896": 37.0, + "897": 41.0, + "898": 33.0, + "899": 35.0, + "900": 39.0, + "901": 37.0, + "902": 39.0, + "903": 42.0, + "904": 38.0, + "905": 32.0, + "906": 34.0, + "907": 38.0, + "908": 39.0, + "909": 52.0, + "910": 34.0, + "911": 26.0, + "912": 46.0, + "913": 40.0, + "914": 48.0, + "915": 25.0, + "916": 49.0, + "917": 36.0, + "918": 31.0, + "919": 26.0, + "920": 40.0, + "921": 34.0, + "922": 38.0, + "923": 41.0, + "924": 24.0, + "925": 27.0, + "926": 43.0, + "927": 31.0, + "928": 40.0, + "929": 32.0, + "930": 42.0, + "931": 33.0, + "932": 34.0, + "933": 38.0, + "934": 41.0, + "935": 26.0, + "936": 44.0, + "937": 36.0, + "938": 37.0, + "939": 28.0, + "940": 33.0, + "941": 34.0, + "942": 31.0, + "943": 26.0, + "944": 37.0, + "945": 29.0, + "946": 31.0, + "947": 34.0, + "948": 41.0, + "949": 31.0, + "950": 35.0, + "951": 31.0, + "952": 38.0, + "953": 47.0, + "954": 43.0, + "955": 46.0, + "956": 35.0, + "957": 40.0, + "958": 37.0, + "959": 52.0, + "960": 35.0, + "961": 38.0, + "962": 41.0, + "963": 45.0, + "964": 43.0, + "965": 51.0, + "966": 38.0, + "967": 31.0, + "968": 32.0, + "969": 35.0, + "970": 48.0, + "971": 38.0, + "972": 43.0, + "973": 38.0, + "974": 40.0, + "975": 43.0, + "976": 29.0, + "977": 44.0, + "978": 31.0, + "979": 43.0, + "980": 39.0, + "981": 33.0, + "982": 30.0, + "983": 54.0, + "984": 43.0, + "985": 48.0, + "986": 40.0, + "987": 30.0, + "988": 38.0, + "989": 38.0, + "990": 42.0, + "991": 36.0, + "992": 48.0, + "993": 47.0, + "994": 50.0, + "995": 35.0, + "996": 29.0, + "997": 51.0, + "998": 42.0, + "999": 35.0, + "1000": 28.0, + "1001": 23.0, + "1002": 35.0, + "1003": 39.0, + "1004": 46.0, + "1005": 42.0, + "1006": 27.0, + "1007": 44.0, + "1008": 32.0, + "1009": 34.0, + "1010": 29.0, + "1011": 31.0, + "1012": 28.0, + "1013": 37.0, + "1014": 29.0, + "1015": 39.0, + "1016": 31.0, + "1017": 37.0, + "1018": 46.0, + "1019": 26.0, + "1020": 34.0, + "1021": 30.0, + "1022": 46.0, + "1023": 38.0, + "1024": 49.0, + "1025": 41.0, + "1026": 55.0, + "1027": 37.0, + "1028": 29.0, + "1029": 38.0, + "1030": 35.0, + "1031": 41.0, + "1032": 42.0, + "1033": 27.0, + "1034": 29.0, + "1035": 32.0, + "1036": 25.0, + "1037": 34.0, + "1038": 32.0, + "1039": 31.0, + "1040": 30.0, + "1041": 24.0, + "1042": 20.0, + "1043": 26.0, + "1044": 44.0, + "1045": 37.0, + "1046": 34.0, + "1047": 27.0, + "1048": 36.0, + "1049": 42.0, + "1050": 37.0, + "1051": 40.0, + "1052": 40.0, + "1053": 32.0, + "1054": 37.0, + "1055": 31.0, + "1056": 36.0, + "1057": 37.0, + "1058": 37.0, + "1059": 35.0, + "1060": 32.0, + "1061": 37.0, + "1062": 45.0, + "1063": 38.0, + "1064": 42.0, + "1065": 35.0, + "1066": 36.0, + "1067": 29.0, + "1068": 30.0, + "1069": 30.0, + "1070": 39.0, + "1071": 33.0, + "1072": 36.0, + "1073": 41.0, + "1074": 47.0, + "1075": 36.0, + "1076": 39.0, + "1077": 45.0, + "1078": 32.0, + "1079": 46.0, + "1080": 43.0, + "1081": 40.0, + "1082": 42.0, + "1083": 42.0, + "1084": 42.0, + "1085": 38.0, + "1086": 42.0, + "1087": 36.0, + "1088": 31.0, + "1089": 42.0, + "1090": 28.0, + "1091": 36.0, + "1092": 35.0, + "1093": 36.0, + "1094": 41.0, + "1095": 37.0, + "1096": 48.0, + "1097": 33.0, + "1098": 24.0, + "1099": 43.0, + "1100": 41.0, + "1101": 38.0, + "1102": 39.0, + "1103": 29.0, + "1104": 33.0, + "1105": 38.0, + "1106": 37.0, + "1107": 30.0, + "1108": 41.0, + "1109": 41.0, + "1110": 42.0, + "1111": 43.0, + "1112": 25.0, + "1113": 40.0, + "1114": 32.0, + "1115": 34.0, + "1116": 45.0, + "1117": 40.0, + "1118": 39.0, + "1119": 31.0, + "1120": 28.0, + "1121": 28.0, + "1122": 28.0, + "1123": 43.0, + "1124": 34.0, + "1125": 26.0, + "1126": 33.0, + "1127": 31.0, + "1128": 33.0, + "1129": 43.0, + "1130": 43.0, + "1131": 40.0, + "1132": 42.0, + "1133": 34.0, + "1134": 32.0, + "1135": 29.0, + "1136": 36.0, + "1137": 42.0, + "1138": 34.0, + "1139": 31.0, + "1140": 38.0, + "1141": 37.0, + "1142": 38.0, + "1143": 44.0, + "1144": 40.0, + "1145": 39.0, + "1146": 42.0, + "1147": 35.0, + "1148": 29.0, + "1149": 40.0, + "1150": 34.0, + "1151": 27.0, + "1152": 22.0, + "1153": 36.0, + "1154": 31.0, + "1155": 41.0, + "1156": 26.0, + "1157": 33.0, + "1158": 35.0, + "1159": 36.0, + "1160": 41.0, + "1161": 40.0, + "1162": 48.0, + "1163": 37.0, + "1164": 43.0, + "1165": 34.0, + "1166": 30.0, + "1167": 34.0, + "1168": 31.0, + "1169": 41.0, + "1170": 27.0, + "1171": 40.0, + "1172": 34.0, + "1173": 23.0, + "1174": 40.0, + "1175": 30.0, + "1176": 50.0, + "1177": 39.0, + "1178": 33.0, + "1179": 42.0, + "1180": 31.0, + "1181": 30.0, + "1182": 38.0, + "1183": 37.0, + "1184": 35.0, + "1185": 31.0, + "1186": 29.0, + "1187": 39.0, + "1188": 34.0, + "1189": 48.0, + "1190": 32.0, + "1191": 41.0, + "1192": 45.0, + "1193": 28.0, + "1194": 46.0, + "1195": 34.0, + "1196": 38.0, + "1197": 51.0, + "1198": 36.0, + "1199": 40.0, + "1200": 29.0, + "1201": 37.0, + "1202": 32.0, + "1203": 35.0, + "1204": 37.0, + "1205": 56.0, + "1206": 40.0, + "1207": 36.0, + "1208": 41.0, + "1209": 31.0, + "1210": 39.0, + "1211": 46.0, + "1212": 45.0, + "1213": 57.0, + "1214": 31.0, + "1215": 33.0, + "1216": 31.0, + "1217": 34.0, + "1218": 42.0, + "1219": 45.0, + "1220": 37.0, + "1221": 44.0, + "1222": 32.0, + "1223": 35.0, + "1224": 34.0, + "1225": 45.0, + "1226": 28.0, + "1227": 34.0, + "1228": 27.0, + "1229": 23.0, + "1230": 25.0, + "1231": 14.0, + "1232": 36.0, + "1233": 39.0, + "1234": 37.0, + "1235": 32.0, + "1236": 41.0, + "1237": 30.0, + "1238": 36.0, + "1239": 37.0, + "1240": 48.0, + "1241": 31.0, + "1242": 34.0, + "1243": 35.0, + "1244": 29.0, + "1245": 28.0, + "1246": 36.0, + "1247": 31.0, + "1248": 38.0, + "1249": 27.0, + "1250": 40.0, + "1251": 26.0, + "1252": 42.0, + "1253": 32.0, + "1254": 39.0, + "1255": 46.0, + "1256": 41.0, + "1257": 30.0, + "1258": 44.0, + "1259": 32.0, + "1260": 25.0, + "1261": 42.0, + "1262": 36.0, + "1263": 34.0, + "1264": 32.0, + "1265": 35.0, + "1266": 34.0, + "1267": 38.0, + "1268": 43.0, + "1269": 30.0, + "1270": 28.0, + "1271": 42.0, + "1272": 32.0, + "1273": 40.0, + "1274": 44.0, + "1275": 38.0, + "1276": 31.0, + "1277": 54.0, + "1278": 46.0, + "1279": 44.0, + "1280": 34.0, + "1281": 26.0, + "1282": 37.0, + "1283": 32.0, + "1284": 43.0, + "1285": 43.0, + "1286": 36.0, + "1287": 46.0, + "1288": 33.0, + "1289": 43.0, + "1290": 37.0, + "1291": 42.0, + "1292": 38.0, + "1293": 43.0, + "1294": 30.0, + "1295": 34.0, + "1296": 31.0, + "1297": 26.0, + "1298": 38.0, + "1299": 40.0, + "1300": 32.0, + "1301": 43.0, + "1302": 35.0, + "1303": 35.0, + "1304": 41.0, + "1305": 30.0, + "1306": 28.0, + "1307": 34.0, + "1308": 32.0, + "1309": 36.0, + "1310": 29.0, + "1311": 43.0, + "1312": 32.0, + "1313": 37.0, + "1314": 35.0, + "1315": 33.0, + "1316": 37.0, + "1317": 33.0, + "1318": 41.0, + "1319": 28.0, + "1320": 42.0, + "1321": 30.0, + "1322": 21.0, + "1323": 28.0, + "1324": 40.0, + "1325": 36.0, + "1326": 43.0, + "1327": 32.0, + "1328": 35.0, + "1329": 33.0, + "1330": 27.0, + "1331": 30.0, + "1332": 36.0, + "1333": 45.0, + "1334": 32.0, + "1335": 41.0, + "1336": 38.0, + "1337": 37.0, + "1338": 38.0, + "1339": 27.0, + "1340": 33.0, + "1341": 47.0, + "1342": 24.0, + "1343": 27.0, + "1344": 34.0, + "1345": 34.0, + "1346": 21.0, + "1347": 33.0, + "1348": 33.0, + "1349": 42.0, + "1350": 30.0, + "1351": 39.0, + "1352": 26.0, + "1353": 36.0, + "1354": 40.0, + "1355": 31.0, + "1356": 46.0, + "1357": 46.0, + "1358": 29.0, + "1359": 29.0, + "1360": 30.0, + "1361": 35.0, + "1362": 40.0, + "1363": 33.0, + "1364": 36.0, + "1365": 34.0, + "1366": 47.0, + "1367": 31.0, + "1368": 37.0, + "1369": 28.0, + "1370": 41.0, + "1371": 30.0, + "1372": 42.0, + "1373": 44.0, + "1374": 34.0, + "1375": 22.0, + "1376": 47.0, + "1377": 29.0, + "1378": 39.0, + "1379": 49.0, + "1380": 44.0, + "1381": 30.0, + "1382": 45.0, + "1383": 44.0, + "1384": 31.0, + "1385": 35.0, + "1386": 31.0, + "1387": 31.0, + "1388": 22.0, + "1389": 32.0, + "1390": 38.0, + "1391": 42.0, + "1392": 34.0, + "1393": 43.0, + "1394": 33.0, + "1395": 39.0, + "1396": 37.0, + "1397": 27.0, + "1398": 33.0, + "1399": 29.0, + "1400": 36.0, + "1401": 28.0, + "1402": 27.0, + "1403": 23.0, + "1404": 28.0, + "1405": 36.0, + "1406": 29.0, + "1407": 36.0, + "1408": 43.0, + "1409": 37.0, + "1410": 37.0, + "1411": 38.0, + "1412": 28.0, + "1413": 48.0, + "1414": 34.0, + "1415": 42.0, + "1416": 35.0, + "1417": 34.0, + "1418": 43.0, + "1419": 38.0, + "1420": 33.0, + "1421": 33.0, + "1422": 53.0, + "1423": 22.0, + "1424": 35.0, + "1425": 43.0, + "1426": 36.0, + "1427": 43.0, + "1428": 31.0, + "1429": 30.0, + "1430": 36.0, + "1431": 29.0, + "1432": 37.0, + "1433": 32.0, + "1434": 47.0, + "1435": 38.0, + "1436": 40.0, + "1437": 47.0, + "1438": 28.0, + "1439": 33.0, + "1440": 25.0, + "1441": 35.0, + "1442": 38.0, + "1443": 42.0, + "1444": 28.0, + "1445": 34.0, + "1446": 28.0, + "1447": 39.0, + "1448": 45.0, + "1449": 41.0, + "1450": 25.0, + "1451": 38.0, + "1452": 27.0, + "1453": 28.0, + "1454": 28.0, + "1455": 32.0, + "1456": 40.0, + "1457": 33.0, + "1458": 37.0, + "1459": 41.0, + "1460": 31.0, + "1461": 34.0, + "1462": 23.0, + "1463": 33.0, + "1464": 42.0, + "1465": 42.0, + "1466": 29.0, + "1467": 27.0, + "1468": 41.0, + "1469": 30.0, + "1470": 35.0, + "1471": 32.0, + "1472": 44.0, + "1473": 53.0, + "1474": 28.0, + "1475": 25.0, + "1476": 47.0, + "1477": 40.0, + "1478": 26.0, + "1479": 33.0, + "1480": 33.0, + "1481": 33.0, + "1482": 33.0, + "1483": 31.0, + "1484": 31.0, + "1485": 45.0, + "1486": 37.0, + "1487": 32.0, + "1488": 26.0, + "1489": 45.0, + "1490": 40.0, + "1491": 44.0, + "1492": 44.0, + "1493": 44.0, + "1494": 33.0, + "1495": 42.0, + "1496": 32.0, + "1497": 39.0, + "1498": 32.0, + "1499": 42.0, + "1500": 42.0, + "1501": 46.0, + "1502": 46.0, + "1503": 39.0, + "1504": 31.0, + "1505": 47.0, + "1506": 41.0, + "1507": 35.0, + "1508": 39.0, + "1509": 32.0, + "1510": 37.0, + "1511": 52.0, + "1512": 29.0, + "1513": 46.0, + "1514": 40.0, + "1515": 41.0, + "1516": 31.0, + "1517": 39.0, + "1518": 40.0, + "1519": 32.0, + "1520": 34.0, + "1521": 44.0, + "1522": 53.0, + "1523": 40.0, + "1524": 39.0, + "1525": 30.0, + "1526": 34.0, + "1527": 19.0, + "1528": 40.0, + "1529": 30.0, + "1530": 38.0, + "1531": 28.0, + "1532": 30.0, + "1533": 43.0, + "1534": 34.0, + "1535": 35.0, + "1536": 34.0, + "1537": 33.0, + "1538": 36.0, + "1539": 32.0, + "1540": 38.0, + "1541": 35.0, + "1542": 50.0, + "1543": 50.0, + "1544": 38.0, + "1545": 38.0, + "1546": 35.0, + "1547": 31.0, + "1548": 39.0, + "1549": 36.0, + "1550": 30.0, + "1551": 42.0, + "1552": 49.0, + "1553": 46.0, + "1554": 41.0, + "1555": 25.0, + "1556": 33.0, + "1557": 46.0, + "1558": 43.0, + "1559": 36.0, + "1560": 30.0, + "1561": 48.0, + "1562": 30.0, + "1563": 38.0, + "1564": 40.0, + "1565": 30.0, + "1566": 34.0, + "1567": 36.0, + "1568": 43.0, + "1569": 35.0, + "1570": 43.0, + "1571": 32.0, + "1572": 34.0, + "1573": 35.0, + "1574": 31.0, + "1575": 39.0, + "1576": 30.0, + "1577": 41.0, + "1578": 46.0, + "1579": 35.0, + "1580": 39.0, + "1581": 43.0, + "1582": 30.0, + "1583": 43.0, + "1584": 36.0, + "1585": 37.0, + "1586": 44.0, + "1587": 37.0, + "1588": 43.0, + "1589": 41.0, + "1590": 46.0, + "1591": 32.0, + "1592": 37.0, + "1593": 32.0, + "1594": 36.0, + "1595": 27.0, + "1596": 40.0, + "1597": 36.0, + "1598": 36.0, + "1599": 32.0, + "1600": 41.0, + "1601": 34.0, + "1602": 38.0, + "1603": 48.0, + "1604": 29.0, + "1605": 42.0, + "1606": 33.0, + "1607": 41.0, + "1608": 40.0, + "1609": 42.0, + "1610": 37.0, + "1611": 35.0, + "1612": 37.0, + "1613": 39.0, + "1614": 51.0, + "1615": 38.0, + "1616": 33.0, + "1617": 45.0, + "1618": 43.0, + "1619": 32.0, + "1620": 43.0, + "1621": 47.0, + "1622": 36.0, + "1623": 50.0, + "1624": 40.0, + "1625": 33.0, + "1626": 39.0, + "1627": 34.0, + "1628": 40.0, + "1629": 30.0, + "1630": 34.0, + "1631": 45.0, + "1632": 39.0, + "1633": 40.0, + "1634": 30.0, + "1635": 53.0, + "1636": 31.0, + "1637": 35.0, + "1638": 39.0, + "1639": 42.0, + "1640": 37.0, + "1641": 43.0, + "1642": 30.0, + "1643": 43.0, + "1644": 36.0, + "1645": 37.0, + "1646": 61.0, + "1647": 34.0, + "1648": 41.0, + "1649": 39.0, + "1650": 42.0, + "1651": 33.0, + "1652": 45.0, + "1653": 25.0, + "1654": 36.0, + "1655": 29.0, + "1656": 45.0, + "1657": 37.0, + "1658": 46.0, + "1659": 38.0, + "1660": 46.0, + "1661": 41.0, + "1662": 35.0, + "1663": 35.0, + "1664": 37.0, + "1665": 30.0, + "1666": 44.0, + "1667": 45.0, + "1668": 40.0, + "1669": 35.0, + "1670": 35.0, + "1671": 37.0, + "1672": 32.0, + "1673": 48.0, + "1674": 41.0, + "1675": 40.0, + "1676": 49.0, + "1677": 35.0, + "1678": 30.0, + "1679": 45.0, + "1680": 40.0, + "1681": 32.0, + "1682": 32.0, + "1683": 42.0, + "1684": 44.0, + "1685": 47.0, + "1686": 30.0, + "1687": 31.0, + "1688": 31.0, + "1689": 40.0, + "1690": 43.0, + "1691": 36.0, + "1692": 31.0, + "1693": 31.0, + "1694": 35.0, + "1695": 41.0, + "1696": 32.0, + "1697": 27.0, + "1698": 39.0, + "1699": 41.0, + "1700": 31.0, + "1701": 35.0, + "1702": 31.0, + "1703": 40.0, + "1704": 36.0, + "1705": 36.0, + "1706": 46.0, + "1707": 26.0, + "1708": 37.0, + "1709": 37.0, + "1710": 39.0, + "1711": 32.0, + "1712": 46.0, + "1713": 44.0, + "1714": 45.0, + "1715": 43.0, + "1716": 30.0, + "1717": 41.0, + "1718": 43.0, + "1719": 28.0, + "1720": 36.0, + "1721": 26.0, + "1722": 42.0, + "1723": 42.0, + "1724": 39.0, + "1725": 28.0, + "1726": 46.0, + "1727": 43.0, + "1728": 40.0, + "1729": 44.0, + "1730": 38.0, + "1731": 26.0, + "1732": 39.0, + "1733": 44.0, + "1734": 39.0, + "1735": 34.0, + "1736": 46.0, + "1737": 46.0, + "1738": 34.0, + "1739": 47.0, + "1740": 44.0, + "1741": 31.0, + "1742": 46.0, + "1743": 43.0, + "1744": 46.0, + "1745": 53.0, + "1746": 42.0, + "1747": 37.0, + "1748": 37.0, + "1749": 47.0, + "1750": 46.0, + "1751": 43.0, + "1752": 35.0, + "1753": 41.0, + "1754": 40.0, + "1755": 32.0, + "1756": 36.0, + "1757": 48.0, + "1758": 34.0, + "1759": 49.0, + "1760": 46.0, + "1761": 36.0, + "1762": 34.0, + "1763": 36.0, + "1764": 39.0, + "1765": 24.0, + "1766": 46.0, + "1767": 46.0, + "1768": 36.0, + "1769": 56.0, + "1770": 28.0, + "1771": 42.0, + "1772": 52.0, + "1773": 45.0, + "1774": 37.0, + "1775": 33.0, + "1776": 43.0, + "1777": 54.0, + "1778": 39.0, + "1779": 33.0, + "1780": 39.0, + "1781": 45.0, + "1782": 35.0, + "1783": 43.0, + "1784": 53.0, + "1785": 36.0, + "1786": 38.0, + "1787": 43.0, + "1788": 45.0, + "1789": 33.0, + "1790": 42.0, + "1791": 44.0, + "1792": 34.0, + "1793": 30.0, + "1794": 40.0, + "1795": 55.0, + "1796": 33.0, + "1797": 30.0, + "1798": 41.0, + "1799": 37.0, + "1800": 41.0, + "1801": 40.0, + "1802": 30.0, + "1803": 36.0, + "1804": 41.0, + "1805": 34.0, + "1806": 39.0, + "1807": 36.0, + "1808": 43.0, + "1809": 45.0, + "1810": 41.0, + "1811": 28.0, + "1812": 33.0, + "1813": 30.0, + "1814": 36.0, + "1815": 35.0, + "1816": 35.0, + "1817": 35.0, + "1818": 42.0, + "1819": 25.0, + "1820": 38.0, + "1821": 48.0, + "1822": 38.0, + "1823": 38.0, + "1824": 49.0, + "1825": 46.0, + "1826": 32.0, + "1827": 47.0, + "1828": 30.0, + "1829": 50.0, + "1830": 43.0, + "1831": 36.0, + "1832": 47.0, + "1833": 42.0, + "1834": 41.0, + "1835": 39.0, + "1836": 39.0, + "1837": 34.0, + "1838": 50.0, + "1839": 35.0, + "1840": 41.0, + "1841": 30.0, + "1842": 34.0, + "1843": 44.0, + "1844": 38.0, + "1845": 41.0, + "1846": 32.0, + "1847": 32.0, + "1848": 36.0, + "1849": 45.0, + "1850": 40.0, + "1851": 36.0, + "1852": 41.0, + "1853": 29.0, + "1854": 35.0, + "1855": 45.0, + "1856": 39.0, + "1857": 33.0, + "1858": 40.0, + "1859": 40.0, + "1860": 48.0, + "1861": 37.0, + "1862": 46.0, + "1863": 47.0, + "1864": 48.0, + "1865": 38.0, + "1866": 51.0, + "1867": 34.0, + "1868": 40.0, + "1869": 42.0, + "1870": 38.0, + "1871": 36.0, + "1872": 42.0, + "1873": 42.0, + "1874": 38.0, + "1875": 51.0, + "1876": 39.0, + "1877": 41.0, + "1878": 26.0, + "1879": 33.0, + "1880": 41.0, + "1881": 50.0, + "1882": 37.0, + "1883": 45.0, + "1884": 39.0, + "1885": 37.0, + "1886": 32.0, + "1887": 36.0, + "1888": 28.0, + "1889": 38.0, + "1890": 37.0, + "1891": 51.0, + "1892": 44.0, + "1893": 50.0, + "1894": 44.0, + "1895": 35.0, + "1896": 34.0, + "1897": 35.0, + "1898": 31.0, + "1899": 39.0, + "1900": 40.0, + "1901": 52.0, + "1902": 31.0, + "1903": 44.0, + "1904": 45.0, + "1905": 32.0, + "1906": 49.0, + "1907": 34.0, + "1908": 33.0, + "1909": 34.0, + "1910": 45.0, + "1911": 41.0, + "1912": 46.0, + "1913": 46.0, + "1914": 51.0, + "1915": 35.0, + "1916": 42.0, + "1917": 40.0, + "1918": 32.0, + "1919": 54.0, + "1920": 41.0, + "1921": 40.0, + "1922": 36.0, + "1923": 34.0, + "1924": 43.0, + "1925": 47.0, + "1926": 42.0, + "1927": 37.0, + "1928": 40.0, + "1929": 40.0, + "1930": 39.0, + "1931": 37.0, + "1932": 40.0, + "1933": 46.0, + "1934": 30.0, + "1935": 50.0, + "1936": 51.0, + "1937": 34.0, + "1938": 38.0, + "1939": 44.0, + "1940": 35.0, + "1941": 39.0, + "1942": 59.0, + "1943": 42.0, + "1944": 46.0, + "1945": 36.0, + "1946": 43.0, + "1947": 39.0, + "1948": 39.0, + "1949": 31.0, + "1950": 36.0, + "1951": 41.0, + "1952": 37.0, + "1953": 26.0, + "1954": 43.0, + "1955": 33.0, + "1956": 37.0, + "1957": 48.0, + "1958": 35.0, + "1959": 44.0, + "1960": 35.0, + "1961": 28.0, + "1962": 51.0, + "1963": 47.0, + "1964": 33.0, + "1965": 56.0, + "1966": 46.0, + "1967": 33.0, + "1968": 53.0, + "1969": 36.0, + "1970": 47.0, + "1971": 35.0, + "1972": 34.0, + "1973": 38.0, + "1974": 46.0, + "1975": 32.0, + "1976": 43.0, + "1977": 38.0, + "1978": 43.0, + "1979": 49.0, + "1980": 32.0, + "1981": 30.0, + "1982": 55.0, + "1983": 41.0, + "1984": 62.0, + "1985": 41.0, + "1986": 48.0, + "1987": 48.0, + "1988": 41.0, + "1989": 50.0, + "1990": 53.0, + "1991": 45.0, + "1992": 46.0, + "1993": 60.0, + "1994": 30.0, + "1995": 41.0, + "1996": 51.0, + "1997": 41.0, + "1998": 45.0, + "1999": 32.0, + "2000": 43.0 + } + }, + "mem-allocated-bytes": { + "start_step": 1, + "end_step": 2000, + "step_interval": 1, + "values": { + "1": 302618112.0, + "2": 302618112.0, + "3": 302618112.0, + "4": 302618112.0, + "5": 302618112.0, + "6": 302618112.0, + "7": 302618112.0, + "8": 302618112.0, + "9": 302618112.0, + "10": 302618112.0, + "11": 302618112.0, + "12": 302618112.0, + "13": 302618112.0, + "14": 302618112.0, + "15": 302618112.0, + "16": 302618112.0, + "17": 302618112.0, + "18": 302618112.0, + "19": 302618112.0, + "20": 302618112.0, + "21": 302618112.0, + "22": 302618112.0, + "23": 302618112.0, + "24": 302618112.0, + "25": 302618112.0, + "26": 302618112.0, + "27": 302618112.0, + "28": 302618112.0, + "29": 302618112.0, + "30": 302618112.0, + "31": 302618112.0, + "32": 302618112.0, + "33": 302618112.0, + "34": 302618112.0, + "35": 302618112.0, + "36": 302618112.0, + "37": 302618112.0, + "38": 302618112.0, + "39": 302618112.0, + "40": 302618112.0, + "41": 302618112.0, + "42": 302618112.0, + "43": 302618112.0, + "44": 302618112.0, + "45": 302618112.0, + "46": 302618112.0, + "47": 302618112.0, + "48": 302618112.0, + "49": 302618112.0, + "50": 302618112.0, + "51": 302618112.0, + "52": 302618112.0, + "53": 302618112.0, + "54": 302618112.0, + "55": 302618112.0, + "56": 302618112.0, + "57": 302618112.0, + "58": 302618112.0, + "59": 302618112.0, + "60": 302618112.0, + "61": 302618112.0, + "62": 302618112.0, + "63": 302618112.0, + "64": 302618112.0, + "65": 302618112.0, + "66": 302618112.0, + "67": 302618112.0, + "68": 302618112.0, + "69": 302618112.0, + "70": 302618112.0, + "71": 302618112.0, + "72": 302618112.0, + "73": 302618112.0, + "74": 302618112.0, + "75": 302618112.0, + "76": 302618112.0, + "77": 302618112.0, + "78": 302618112.0, + "79": 302618112.0, + "80": 302618112.0, + "81": 302618112.0, + "82": 302618112.0, + "83": 302618112.0, + "84": 302618112.0, + "85": 302618112.0, + "86": 302618112.0, + "87": 302618112.0, + "88": 302618112.0, + "89": 302618112.0, + "90": 302618112.0, + "91": 302618112.0, + "92": 302618112.0, + "93": 302618112.0, + "94": 302618112.0, + "95": 302618112.0, + "96": 302618112.0, + "97": 302618112.0, + "98": 302618112.0, + "99": 302618112.0, + "100": 302618112.0, + "101": 302618112.0, + "102": 302618112.0, + "103": 302618112.0, + "104": 302618112.0, + "105": 302618112.0, + "106": 302618112.0, + "107": 302618112.0, + "108": 302618112.0, + "109": 302618112.0, + "110": 302618112.0, + "111": 302618112.0, + "112": 302618112.0, + "113": 302618112.0, + "114": 302618112.0, + "115": 302618112.0, + "116": 302618112.0, + "117": 302618112.0, + "118": 302618112.0, + "119": 302618112.0, + "120": 302618112.0, + "121": 302618112.0, + "122": 302618112.0, + "123": 302618112.0, + "124": 302618112.0, + "125": 302618112.0, + "126": 302618112.0, + "127": 302618112.0, + "128": 302618112.0, + "129": 302618112.0, + "130": 302618112.0, + "131": 302618112.0, + "132": 302618112.0, + "133": 302618112.0, + "134": 302618112.0, + "135": 302618112.0, + "136": 302618112.0, + "137": 302618112.0, + "138": 302618112.0, + "139": 302618112.0, + "140": 302618112.0, + "141": 302618112.0, + "142": 302618112.0, + "143": 302618112.0, + "144": 302618112.0, + "145": 302618112.0, + "146": 302618112.0, + "147": 302618112.0, + "148": 302618112.0, + "149": 302618112.0, + "150": 302618112.0, + "151": 302618112.0, + "152": 302618112.0, + "153": 302618112.0, + "154": 302618112.0, + "155": 302618112.0, + "156": 302618112.0, + "157": 302618112.0, + "158": 302618112.0, + "159": 302618112.0, + "160": 302618112.0, + "161": 302618112.0, + "162": 302618112.0, + "163": 302618112.0, + "164": 302618112.0, + "165": 302618112.0, + "166": 302618112.0, + "167": 302618112.0, + "168": 302618112.0, + "169": 302618112.0, + "170": 302618112.0, + "171": 302618112.0, + "172": 302618112.0, + "173": 302618112.0, + "174": 302618112.0, + "175": 302618112.0, + "176": 302618112.0, + "177": 302618112.0, + "178": 302618112.0, + "179": 302618112.0, + "180": 302618112.0, + "181": 302618112.0, + "182": 302618112.0, + "183": 302618112.0, + "184": 302618112.0, + "185": 302618112.0, + "186": 302618112.0, + "187": 302618112.0, + "188": 302618112.0, + "189": 302618112.0, + "190": 302618112.0, + "191": 302618112.0, + "192": 302618112.0, + "193": 302618112.0, + "194": 302618112.0, + "195": 302618112.0, + "196": 302618112.0, + "197": 302618112.0, + "198": 302618112.0, + "199": 302618112.0, + "200": 302618112.0, + "201": 302618112.0, + "202": 302618112.0, + "203": 302618112.0, + "204": 302618112.0, + "205": 302618112.0, + "206": 302618112.0, + "207": 302618112.0, + "208": 302618112.0, + "209": 302618112.0, + "210": 302618112.0, + "211": 302618112.0, + "212": 302618112.0, + "213": 302618112.0, + "214": 302618112.0, + "215": 302618112.0, + "216": 302618112.0, + "217": 302618112.0, + "218": 302618112.0, + "219": 302618112.0, + "220": 302618112.0, + "221": 302618112.0, + "222": 302618112.0, + "223": 302618112.0, + "224": 302618112.0, + "225": 302618112.0, + "226": 302618112.0, + "227": 302618112.0, + "228": 302618112.0, + "229": 302618112.0, + "230": 302618112.0, + "231": 302618112.0, + "232": 302618112.0, + "233": 302618112.0, + "234": 302618112.0, + "235": 302618112.0, + "236": 302618112.0, + "237": 302618112.0, + "238": 302618112.0, + "239": 302618112.0, + "240": 302618112.0, + "241": 302618112.0, + "242": 302618112.0, + "243": 302618112.0, + "244": 302618112.0, + "245": 302618112.0, + "246": 302618112.0, + "247": 302618112.0, + "248": 302618112.0, + "249": 302618112.0, + "250": 302618112.0, + "251": 302618112.0, + "252": 302618112.0, + "253": 302618112.0, + "254": 302618112.0, + "255": 302618112.0, + "256": 302618112.0, + "257": 302618112.0, + "258": 302618112.0, + "259": 302618112.0, + "260": 302618112.0, + "261": 302618112.0, + "262": 302618112.0, + "263": 302618112.0, + "264": 302618112.0, + "265": 302618112.0, + "266": 302618112.0, + "267": 302618112.0, + "268": 302618112.0, + "269": 302618112.0, + "270": 302618112.0, + "271": 302618112.0, + "272": 302618112.0, + "273": 302618112.0, + "274": 302618112.0, + "275": 302618112.0, + "276": 302618112.0, + "277": 302618112.0, + "278": 302618112.0, + "279": 302618112.0, + "280": 302618112.0, + "281": 302618112.0, + "282": 302618112.0, + "283": 302618112.0, + "284": 302618112.0, + "285": 302618112.0, + "286": 302618112.0, + "287": 302618112.0, + "288": 302618112.0, + "289": 302618112.0, + "290": 302618112.0, + "291": 302618112.0, + "292": 302618112.0, + "293": 302618112.0, + "294": 302618112.0, + "295": 302618112.0, + "296": 302618112.0, + "297": 302618112.0, + "298": 302618112.0, + "299": 302618112.0, + "300": 302618112.0, + "301": 302618112.0, + "302": 302618112.0, + "303": 302618112.0, + "304": 302618112.0, + "305": 302618112.0, + "306": 302618112.0, + "307": 302618112.0, + "308": 302618112.0, + "309": 302618112.0, + "310": 302618112.0, + "311": 302618112.0, + "312": 302618112.0, + "313": 302618112.0, + "314": 302618112.0, + "315": 302618112.0, + "316": 302618112.0, + "317": 302618112.0, + "318": 302618112.0, + "319": 302618112.0, + "320": 302618112.0, + "321": 302618112.0, + "322": 302618112.0, + "323": 302618112.0, + "324": 302618112.0, + "325": 302618112.0, + "326": 302618112.0, + "327": 302618112.0, + "328": 302618112.0, + "329": 302618112.0, + "330": 302618112.0, + "331": 302618112.0, + "332": 302618112.0, + "333": 302618112.0, + "334": 302618112.0, + "335": 302618112.0, + "336": 302618112.0, + "337": 302618112.0, + "338": 302618112.0, + "339": 302618112.0, + "340": 302618112.0, + "341": 302618112.0, + "342": 302618112.0, + "343": 302618112.0, + "344": 302618112.0, + "345": 302618112.0, + "346": 302618112.0, + "347": 302618112.0, + "348": 302618112.0, + "349": 302618112.0, + "350": 302618112.0, + "351": 302618112.0, + "352": 302618112.0, + "353": 302618112.0, + "354": 302618112.0, + "355": 302618112.0, + "356": 302618112.0, + "357": 302618112.0, + "358": 302618112.0, + "359": 302618112.0, + "360": 302618112.0, + "361": 302618112.0, + "362": 302618112.0, + "363": 302618112.0, + "364": 302618112.0, + "365": 302618112.0, + "366": 302618112.0, + "367": 302618112.0, + "368": 302618112.0, + "369": 302618112.0, + "370": 302618112.0, + "371": 302618112.0, + "372": 302618112.0, + "373": 302618112.0, + "374": 302618112.0, + "375": 302618112.0, + "376": 302618112.0, + "377": 302618112.0, + "378": 302618112.0, + "379": 302618112.0, + "380": 302618112.0, + "381": 302618112.0, + "382": 302618112.0, + "383": 302618112.0, + "384": 302618112.0, + "385": 302618112.0, + "386": 302618112.0, + "387": 302618112.0, + "388": 302618112.0, + "389": 302618112.0, + "390": 302618112.0, + "391": 302618112.0, + "392": 302618112.0, + "393": 302618112.0, + "394": 302618112.0, + "395": 302618112.0, + "396": 302618112.0, + "397": 302618112.0, + "398": 302618112.0, + "399": 302618112.0, + "400": 302618112.0, + "401": 302618112.0, + "402": 302618112.0, + "403": 302618112.0, + "404": 302618112.0, + "405": 302618112.0, + "406": 302618112.0, + "407": 302618112.0, + "408": 302618112.0, + "409": 302618112.0, + "410": 302618112.0, + "411": 302618112.0, + "412": 302618112.0, + "413": 302618112.0, + "414": 302618112.0, + "415": 302618112.0, + "416": 302618112.0, + "417": 302618112.0, + "418": 302618112.0, + "419": 302618112.0, + "420": 302618112.0, + "421": 302618112.0, + "422": 302618112.0, + "423": 302618112.0, + "424": 302618112.0, + "425": 302618112.0, + "426": 302618112.0, + "427": 302618112.0, + "428": 302618112.0, + "429": 302618112.0, + "430": 302618112.0, + "431": 302618112.0, + "432": 302618112.0, + "433": 302618112.0, + "434": 302618112.0, + "435": 302618112.0, + "436": 302618112.0, + "437": 302618112.0, + "438": 302618112.0, + "439": 302618112.0, + "440": 302618112.0, + "441": 302618112.0, + "442": 302618112.0, + "443": 302618112.0, + "444": 302618112.0, + "445": 302618112.0, + "446": 302618112.0, + "447": 302618112.0, + "448": 302618112.0, + "449": 302618112.0, + "450": 302618112.0, + "451": 302618112.0, + "452": 302618112.0, + "453": 302618112.0, + "454": 302618112.0, + "455": 302618112.0, + "456": 302618112.0, + "457": 302618112.0, + "458": 302618112.0, + "459": 302618112.0, + "460": 302618112.0, + "461": 302618112.0, + "462": 302618112.0, + "463": 302618112.0, + "464": 302618112.0, + "465": 302618112.0, + "466": 302618112.0, + "467": 302618112.0, + "468": 302618112.0, + "469": 302618112.0, + "470": 302618112.0, + "471": 302618112.0, + "472": 302618112.0, + "473": 302618112.0, + "474": 302618112.0, + "475": 302618112.0, + "476": 302618112.0, + "477": 302618112.0, + "478": 302618112.0, + "479": 302618112.0, + "480": 302618112.0, + "481": 302618112.0, + "482": 302618112.0, + "483": 302618112.0, + "484": 302618112.0, + "485": 302618112.0, + "486": 302618112.0, + "487": 302618112.0, + "488": 302618112.0, + "489": 302618112.0, + "490": 302618112.0, + "491": 302618112.0, + "492": 302618112.0, + "493": 302618112.0, + "494": 302618112.0, + "495": 302618112.0, + "496": 302618112.0, + "497": 302618112.0, + "498": 302618112.0, + "499": 302618112.0, + "500": 302618112.0, + "501": 302618112.0, + "502": 302618112.0, + "503": 302618112.0, + "504": 302618112.0, + "505": 302618112.0, + "506": 302618112.0, + "507": 302618112.0, + "508": 302618112.0, + "509": 302618112.0, + "510": 302618112.0, + "511": 302618112.0, + "512": 302618112.0, + "513": 302618112.0, + "514": 302618112.0, + "515": 302618112.0, + "516": 302618112.0, + "517": 302618112.0, + "518": 302618112.0, + "519": 302618112.0, + "520": 302618112.0, + "521": 302618112.0, + "522": 302618112.0, + "523": 302618112.0, + "524": 302618112.0, + "525": 302618112.0, + "526": 302618112.0, + "527": 302618112.0, + "528": 302618112.0, + "529": 302618112.0, + "530": 302618112.0, + "531": 302618112.0, + "532": 302618112.0, + "533": 302618112.0, + "534": 302618112.0, + "535": 302618112.0, + "536": 302618112.0, + "537": 302618112.0, + "538": 302618112.0, + "539": 302618112.0, + "540": 302618112.0, + "541": 302618112.0, + "542": 302618112.0, + "543": 302618112.0, + "544": 302618112.0, + "545": 302618112.0, + "546": 302618112.0, + "547": 302618112.0, + "548": 302618112.0, + "549": 302618112.0, + "550": 302618112.0, + "551": 302618112.0, + "552": 302618112.0, + "553": 302618112.0, + "554": 302618112.0, + "555": 302618112.0, + "556": 302618112.0, + "557": 302618112.0, + "558": 302618112.0, + "559": 302618112.0, + "560": 302618112.0, + "561": 302618112.0, + "562": 302618112.0, + "563": 302618112.0, + "564": 302618112.0, + "565": 302618112.0, + "566": 302618112.0, + "567": 302618112.0, + "568": 302618112.0, + "569": 302618112.0, + "570": 302618112.0, + "571": 302618112.0, + "572": 302618112.0, + "573": 302618112.0, + "574": 302618112.0, + "575": 302618112.0, + "576": 302618112.0, + "577": 302618112.0, + "578": 302618112.0, + "579": 302618112.0, + "580": 302618112.0, + "581": 302618112.0, + "582": 302618112.0, + "583": 302618112.0, + "584": 302618112.0, + "585": 302618112.0, + "586": 302618112.0, + "587": 302618112.0, + "588": 302618112.0, + "589": 302618112.0, + "590": 302618112.0, + "591": 302618112.0, + "592": 302618112.0, + "593": 302618112.0, + "594": 302618112.0, + "595": 302618112.0, + "596": 302618112.0, + "597": 302618112.0, + "598": 302618112.0, + "599": 302618112.0, + "600": 302618112.0, + "601": 302618112.0, + "602": 302618112.0, + "603": 302618112.0, + "604": 302618112.0, + "605": 302618112.0, + "606": 302618112.0, + "607": 302618112.0, + "608": 302618112.0, + "609": 302618112.0, + "610": 302618112.0, + "611": 302618112.0, + "612": 302618112.0, + "613": 302618112.0, + "614": 302618112.0, + "615": 302618112.0, + "616": 302618112.0, + "617": 302618112.0, + "618": 302618112.0, + "619": 302618112.0, + "620": 302618112.0, + "621": 302618112.0, + "622": 302618112.0, + "623": 302618112.0, + "624": 302618112.0, + "625": 302618112.0, + "626": 302618112.0, + "627": 302618112.0, + "628": 302618112.0, + "629": 302618112.0, + "630": 302618112.0, + "631": 302618112.0, + "632": 302618112.0, + "633": 302618112.0, + "634": 302618112.0, + "635": 302618112.0, + "636": 302618112.0, + "637": 302618112.0, + "638": 302618112.0, + "639": 302618112.0, + "640": 302618112.0, + "641": 302618112.0, + "642": 302618112.0, + "643": 302618112.0, + "644": 302618112.0, + "645": 302618112.0, + "646": 302618112.0, + "647": 302618112.0, + "648": 302618112.0, + "649": 302618112.0, + "650": 302618112.0, + "651": 302618112.0, + "652": 302618112.0, + "653": 302618112.0, + "654": 302618112.0, + "655": 302618112.0, + "656": 302618112.0, + "657": 302618112.0, + "658": 302618112.0, + "659": 302618112.0, + "660": 302618112.0, + "661": 302618112.0, + "662": 302618112.0, + "663": 302618112.0, + "664": 302618112.0, + "665": 302618112.0, + "666": 302618112.0, + "667": 302618112.0, + "668": 302618112.0, + "669": 302618112.0, + "670": 302618112.0, + "671": 302618112.0, + "672": 302618112.0, + "673": 302618112.0, + "674": 302618112.0, + "675": 302618112.0, + "676": 302618112.0, + "677": 302618112.0, + "678": 302618112.0, + "679": 302618112.0, + "680": 302618112.0, + "681": 302618112.0, + "682": 302618112.0, + "683": 302618112.0, + "684": 302618112.0, + "685": 302618112.0, + "686": 302618112.0, + "687": 302618112.0, + "688": 302618112.0, + "689": 302618112.0, + "690": 302618112.0, + "691": 302618112.0, + "692": 302618112.0, + "693": 302618112.0, + "694": 302618112.0, + "695": 302618112.0, + "696": 302618112.0, + "697": 302618112.0, + "698": 302618112.0, + "699": 302618112.0, + "700": 302618112.0, + "701": 302618112.0, + "702": 302618112.0, + "703": 302618112.0, + "704": 302618112.0, + "705": 302618112.0, + "706": 302618112.0, + "707": 302618112.0, + "708": 302618112.0, + "709": 302618112.0, + "710": 302618112.0, + "711": 302618112.0, + "712": 302618112.0, + "713": 302618112.0, + "714": 302618112.0, + "715": 302618112.0, + "716": 302618112.0, + "717": 302618112.0, + "718": 302618112.0, + "719": 302618112.0, + "720": 302618112.0, + "721": 302618112.0, + "722": 302618112.0, + "723": 302618112.0, + "724": 302618112.0, + "725": 302618112.0, + "726": 302618112.0, + "727": 302618112.0, + "728": 302618112.0, + "729": 302618112.0, + "730": 302618112.0, + "731": 302618112.0, + "732": 302618112.0, + "733": 302618112.0, + "734": 302618112.0, + "735": 302618112.0, + "736": 302618112.0, + "737": 302618112.0, + "738": 302618112.0, + "739": 302618112.0, + "740": 302618112.0, + "741": 302618112.0, + "742": 302618112.0, + "743": 302618112.0, + "744": 302618112.0, + "745": 302618112.0, + "746": 302618112.0, + "747": 302618112.0, + "748": 302618112.0, + "749": 302618112.0, + "750": 302618112.0, + "751": 302618112.0, + "752": 302618112.0, + "753": 302618112.0, + "754": 302618112.0, + "755": 302618112.0, + "756": 302618112.0, + "757": 302618112.0, + "758": 302618112.0, + "759": 302618112.0, + "760": 302618112.0, + "761": 302618112.0, + "762": 302618112.0, + "763": 302618112.0, + "764": 302618112.0, + "765": 302618112.0, + "766": 302618112.0, + "767": 302618112.0, + "768": 302618112.0, + "769": 302618112.0, + "770": 302618112.0, + "771": 302618112.0, + "772": 302618112.0, + "773": 302618112.0, + "774": 302618112.0, + "775": 302618112.0, + "776": 302618112.0, + "777": 302618112.0, + "778": 302618112.0, + "779": 302618112.0, + "780": 302618112.0, + "781": 302618112.0, + "782": 302618112.0, + "783": 302618112.0, + "784": 302618112.0, + "785": 302618112.0, + "786": 302618112.0, + "787": 302618112.0, + "788": 302618112.0, + "789": 302618112.0, + "790": 302618112.0, + "791": 302618112.0, + "792": 302618112.0, + "793": 302618112.0, + "794": 302618112.0, + "795": 302618112.0, + "796": 302618112.0, + "797": 302618112.0, + "798": 302618112.0, + "799": 302618112.0, + "800": 302618112.0, + "801": 302618112.0, + "802": 302618112.0, + "803": 302618112.0, + "804": 302618112.0, + "805": 302618112.0, + "806": 302618112.0, + "807": 302618112.0, + "808": 302618112.0, + "809": 302618112.0, + "810": 302618112.0, + "811": 302618112.0, + "812": 302618112.0, + "813": 302618112.0, + "814": 302618112.0, + "815": 302618112.0, + "816": 302618112.0, + "817": 302618112.0, + "818": 302618112.0, + "819": 302618112.0, + "820": 302618112.0, + "821": 302618112.0, + "822": 302618112.0, + "823": 302618112.0, + "824": 302618112.0, + "825": 302618112.0, + "826": 302618112.0, + "827": 302618112.0, + "828": 302618112.0, + "829": 302618112.0, + "830": 302618112.0, + "831": 302618112.0, + "832": 302618112.0, + "833": 302618112.0, + "834": 302618112.0, + "835": 302618112.0, + "836": 302618112.0, + "837": 302618112.0, + "838": 302618112.0, + "839": 302618112.0, + "840": 302618112.0, + "841": 302618112.0, + "842": 302618112.0, + "843": 302618112.0, + "844": 302618112.0, + "845": 302618112.0, + "846": 302618112.0, + "847": 302618112.0, + "848": 302618112.0, + "849": 302618112.0, + "850": 302618112.0, + "851": 302618112.0, + "852": 302618112.0, + "853": 302618112.0, + "854": 302618112.0, + "855": 302618112.0, + "856": 302618112.0, + "857": 302618112.0, + "858": 302618112.0, + "859": 302618112.0, + "860": 302618112.0, + "861": 302618112.0, + "862": 302618112.0, + "863": 302618112.0, + "864": 302618112.0, + "865": 302618112.0, + "866": 302618112.0, + "867": 302618112.0, + "868": 302618112.0, + "869": 302618112.0, + "870": 302618112.0, + "871": 302618112.0, + "872": 302618112.0, + "873": 302618112.0, + "874": 302618112.0, + "875": 302618112.0, + "876": 302618112.0, + "877": 302618112.0, + "878": 302618112.0, + "879": 302618112.0, + "880": 302618112.0, + "881": 302618112.0, + "882": 302618112.0, + "883": 302618112.0, + "884": 302618112.0, + "885": 302618112.0, + "886": 302618112.0, + "887": 302618112.0, + "888": 302618112.0, + "889": 302618112.0, + "890": 302618112.0, + "891": 302618112.0, + "892": 302618112.0, + "893": 302618112.0, + "894": 302618112.0, + "895": 302618112.0, + "896": 302618112.0, + "897": 302618112.0, + "898": 302618112.0, + "899": 302618112.0, + "900": 302618112.0, + "901": 302618112.0, + "902": 302618112.0, + "903": 302618112.0, + "904": 302618112.0, + "905": 302618112.0, + "906": 302618112.0, + "907": 302618112.0, + "908": 302618112.0, + "909": 302618112.0, + "910": 302618112.0, + "911": 302618112.0, + "912": 302618112.0, + "913": 302618112.0, + "914": 302618112.0, + "915": 302618112.0, + "916": 302618112.0, + "917": 302618112.0, + "918": 302618112.0, + "919": 302618112.0, + "920": 302618112.0, + "921": 302618112.0, + "922": 302618112.0, + "923": 302618112.0, + "924": 302618112.0, + "925": 302618112.0, + "926": 302618112.0, + "927": 302618112.0, + "928": 302618112.0, + "929": 302618112.0, + "930": 302618112.0, + "931": 302618112.0, + "932": 302618112.0, + "933": 302618112.0, + "934": 302618112.0, + "935": 302618112.0, + "936": 302618112.0, + "937": 302618112.0, + "938": 302618112.0, + "939": 302618112.0, + "940": 302618112.0, + "941": 302618112.0, + "942": 302618112.0, + "943": 302618112.0, + "944": 302618112.0, + "945": 302618112.0, + "946": 302618112.0, + "947": 302618112.0, + "948": 302618112.0, + "949": 302618112.0, + "950": 302618112.0, + "951": 302618112.0, + "952": 302618112.0, + "953": 302618112.0, + "954": 302618112.0, + "955": 302618112.0, + "956": 302618112.0, + "957": 302618112.0, + "958": 302618112.0, + "959": 302618112.0, + "960": 302618112.0, + "961": 302618112.0, + "962": 302618112.0, + "963": 302618112.0, + "964": 302618112.0, + "965": 302618112.0, + "966": 302618112.0, + "967": 302618112.0, + "968": 302618112.0, + "969": 302618112.0, + "970": 302618112.0, + "971": 302618112.0, + "972": 302618112.0, + "973": 302618112.0, + "974": 302618112.0, + "975": 302618112.0, + "976": 302618112.0, + "977": 302618112.0, + "978": 302618112.0, + "979": 302618112.0, + "980": 302618112.0, + "981": 302618112.0, + "982": 302618112.0, + "983": 302618112.0, + "984": 302618112.0, + "985": 302618112.0, + "986": 302618112.0, + "987": 302618112.0, + "988": 302618112.0, + "989": 302618112.0, + "990": 302618112.0, + "991": 302618112.0, + "992": 302618112.0, + "993": 302618112.0, + "994": 302618112.0, + "995": 302618112.0, + "996": 302618112.0, + "997": 302618112.0, + "998": 302618112.0, + "999": 302618112.0, + "1000": 302618112.0, + "1001": 302618112.0, + "1002": 302618112.0, + "1003": 302618112.0, + "1004": 302618112.0, + "1005": 302618112.0, + "1006": 302618112.0, + "1007": 302618112.0, + "1008": 302618112.0, + "1009": 302618112.0, + "1010": 302618112.0, + "1011": 302618112.0, + "1012": 302618112.0, + "1013": 302618112.0, + "1014": 302618112.0, + "1015": 302618112.0, + "1016": 302618112.0, + "1017": 302618112.0, + "1018": 302618112.0, + "1019": 302618112.0, + "1020": 302618112.0, + "1021": 302618112.0, + "1022": 302618112.0, + "1023": 302618112.0, + "1024": 302618112.0, + "1025": 302618112.0, + "1026": 302618112.0, + "1027": 302618112.0, + "1028": 302618112.0, + "1029": 302618112.0, + "1030": 302618112.0, + "1031": 302618112.0, + "1032": 302618112.0, + "1033": 302618112.0, + "1034": 302618112.0, + "1035": 302618112.0, + "1036": 302618112.0, + "1037": 302618112.0, + "1038": 302618112.0, + "1039": 302618112.0, + "1040": 302618112.0, + "1041": 302618112.0, + "1042": 302618112.0, + "1043": 302618112.0, + "1044": 302618112.0, + "1045": 302618112.0, + "1046": 302618112.0, + "1047": 302618112.0, + "1048": 302618112.0, + "1049": 302618112.0, + "1050": 302618112.0, + "1051": 302618112.0, + "1052": 302618112.0, + "1053": 302618112.0, + "1054": 302618112.0, + "1055": 302618112.0, + "1056": 302618112.0, + "1057": 302618112.0, + "1058": 302618112.0, + "1059": 302618112.0, + "1060": 302618112.0, + "1061": 302618112.0, + "1062": 302618112.0, + "1063": 302618112.0, + "1064": 302618112.0, + "1065": 302618112.0, + "1066": 302618112.0, + "1067": 302618112.0, + "1068": 302618112.0, + "1069": 302618112.0, + "1070": 302618112.0, + "1071": 302618112.0, + "1072": 302618112.0, + "1073": 302618112.0, + "1074": 302618112.0, + "1075": 302618112.0, + "1076": 302618112.0, + "1077": 302618112.0, + "1078": 302618112.0, + "1079": 302618112.0, + "1080": 302618112.0, + "1081": 302618112.0, + "1082": 302618112.0, + "1083": 302618112.0, + "1084": 302618112.0, + "1085": 302618112.0, + "1086": 302618112.0, + "1087": 302618112.0, + "1088": 302618112.0, + "1089": 302618112.0, + "1090": 302618112.0, + "1091": 302618112.0, + "1092": 302618112.0, + "1093": 302618112.0, + "1094": 302618112.0, + "1095": 302618112.0, + "1096": 302618112.0, + "1097": 302618112.0, + "1098": 302618112.0, + "1099": 302618112.0, + "1100": 302618112.0, + "1101": 302618112.0, + "1102": 302618112.0, + "1103": 302618112.0, + "1104": 302618112.0, + "1105": 302618112.0, + "1106": 302618112.0, + "1107": 302618112.0, + "1108": 302618112.0, + "1109": 302618112.0, + "1110": 302618112.0, + "1111": 302618112.0, + "1112": 302618112.0, + "1113": 302618112.0, + "1114": 302618112.0, + "1115": 302618112.0, + "1116": 302618112.0, + "1117": 302618112.0, + "1118": 302618112.0, + "1119": 302618112.0, + "1120": 302618112.0, + "1121": 302618112.0, + "1122": 302618112.0, + "1123": 302618112.0, + "1124": 302618112.0, + "1125": 302618112.0, + "1126": 302618112.0, + "1127": 302618112.0, + "1128": 302618112.0, + "1129": 302618112.0, + "1130": 302618112.0, + "1131": 302618112.0, + "1132": 302618112.0, + "1133": 302618112.0, + "1134": 302618112.0, + "1135": 302618112.0, + "1136": 302618112.0, + "1137": 302618112.0, + "1138": 302618112.0, + "1139": 302618112.0, + "1140": 302618112.0, + "1141": 302618112.0, + "1142": 302618112.0, + "1143": 302618112.0, + "1144": 302618112.0, + "1145": 302618112.0, + "1146": 302618112.0, + "1147": 302618112.0, + "1148": 302618112.0, + "1149": 302618112.0, + "1150": 302618112.0, + "1151": 302618112.0, + "1152": 302618112.0, + "1153": 302618112.0, + "1154": 302618112.0, + "1155": 302618112.0, + "1156": 302618112.0, + "1157": 302618112.0, + "1158": 302618112.0, + "1159": 302618112.0, + "1160": 302618112.0, + "1161": 302618112.0, + "1162": 302618112.0, + "1163": 302618112.0, + "1164": 302618112.0, + "1165": 302618112.0, + "1166": 302618112.0, + "1167": 302618112.0, + "1168": 302618112.0, + "1169": 302618112.0, + "1170": 302618112.0, + "1171": 302618112.0, + "1172": 302618112.0, + "1173": 302618112.0, + "1174": 302618112.0, + "1175": 302618112.0, + "1176": 302618112.0, + "1177": 302618112.0, + "1178": 302618112.0, + "1179": 302618112.0, + "1180": 302618112.0, + "1181": 302618112.0, + "1182": 302618112.0, + "1183": 302618112.0, + "1184": 302618112.0, + "1185": 302618112.0, + "1186": 302618112.0, + "1187": 302618112.0, + "1188": 302618112.0, + "1189": 302618112.0, + "1190": 302618112.0, + "1191": 302618112.0, + "1192": 302618112.0, + "1193": 302618112.0, + "1194": 302618112.0, + "1195": 302618112.0, + "1196": 302618112.0, + "1197": 302618112.0, + "1198": 302618112.0, + "1199": 302618112.0, + "1200": 302618112.0, + "1201": 302618112.0, + "1202": 302618112.0, + "1203": 302618112.0, + "1204": 302618112.0, + "1205": 302618112.0, + "1206": 302618112.0, + "1207": 302618112.0, + "1208": 302618112.0, + "1209": 302618112.0, + "1210": 302618112.0, + "1211": 302618112.0, + "1212": 302618112.0, + "1213": 302618112.0, + "1214": 302618112.0, + "1215": 302618112.0, + "1216": 302618112.0, + "1217": 302618112.0, + "1218": 302618112.0, + "1219": 302618112.0, + "1220": 302618112.0, + "1221": 302618112.0, + "1222": 302618112.0, + "1223": 302618112.0, + "1224": 302618112.0, + "1225": 302618112.0, + "1226": 302618112.0, + "1227": 302618112.0, + "1228": 302618112.0, + "1229": 302618112.0, + "1230": 302618112.0, + "1231": 302618112.0, + "1232": 302618112.0, + "1233": 302618112.0, + "1234": 302618112.0, + "1235": 302618112.0, + "1236": 302618112.0, + "1237": 302618112.0, + "1238": 302618112.0, + "1239": 302618112.0, + "1240": 302618112.0, + "1241": 302618112.0, + "1242": 302618112.0, + "1243": 302618112.0, + "1244": 302618112.0, + "1245": 302618112.0, + "1246": 302618112.0, + "1247": 302618112.0, + "1248": 302618112.0, + "1249": 302618112.0, + "1250": 302618112.0, + "1251": 302618112.0, + "1252": 302618112.0, + "1253": 302618112.0, + "1254": 302618112.0, + "1255": 302618112.0, + "1256": 302618112.0, + "1257": 302618112.0, + "1258": 302618112.0, + "1259": 302618112.0, + "1260": 302618112.0, + "1261": 302618112.0, + "1262": 302618112.0, + "1263": 302618112.0, + "1264": 302618112.0, + "1265": 302618112.0, + "1266": 302618112.0, + "1267": 302618112.0, + "1268": 302618112.0, + "1269": 302618112.0, + "1270": 302618112.0, + "1271": 302618112.0, + "1272": 302618112.0, + "1273": 302618112.0, + "1274": 302618112.0, + "1275": 302618112.0, + "1276": 302618112.0, + "1277": 302618112.0, + "1278": 302618112.0, + "1279": 302618112.0, + "1280": 302618112.0, + "1281": 302618112.0, + "1282": 302618112.0, + "1283": 302618112.0, + "1284": 302618112.0, + "1285": 302618112.0, + "1286": 302618112.0, + "1287": 302618112.0, + "1288": 302618112.0, + "1289": 302618112.0, + "1290": 302618112.0, + "1291": 302618112.0, + "1292": 302618112.0, + "1293": 302618112.0, + "1294": 302618112.0, + "1295": 302618112.0, + "1296": 302618112.0, + "1297": 302618112.0, + "1298": 302618112.0, + "1299": 302618112.0, + "1300": 302618112.0, + "1301": 302618112.0, + "1302": 302618112.0, + "1303": 302618112.0, + "1304": 302618112.0, + "1305": 302618112.0, + "1306": 302618112.0, + "1307": 302618112.0, + "1308": 302618112.0, + "1309": 302618112.0, + "1310": 302618112.0, + "1311": 302618112.0, + "1312": 302618112.0, + "1313": 302618112.0, + "1314": 302618112.0, + "1315": 302618112.0, + "1316": 302618112.0, + "1317": 302618112.0, + "1318": 302618112.0, + "1319": 302618112.0, + "1320": 302618112.0, + "1321": 302618112.0, + "1322": 302618112.0, + "1323": 302618112.0, + "1324": 302618112.0, + "1325": 302618112.0, + "1326": 302618112.0, + "1327": 302618112.0, + "1328": 302618112.0, + "1329": 302618112.0, + "1330": 302618112.0, + "1331": 302618112.0, + "1332": 302618112.0, + "1333": 302618112.0, + "1334": 302618112.0, + "1335": 302618112.0, + "1336": 302618112.0, + "1337": 302618112.0, + "1338": 302618112.0, + "1339": 302618112.0, + "1340": 302618112.0, + "1341": 302618112.0, + "1342": 302618112.0, + "1343": 302618112.0, + "1344": 302618112.0, + "1345": 302618112.0, + "1346": 302618112.0, + "1347": 302618112.0, + "1348": 302618112.0, + "1349": 302618112.0, + "1350": 302618112.0, + "1351": 302618112.0, + "1352": 302618112.0, + "1353": 302618112.0, + "1354": 302618112.0, + "1355": 302618112.0, + "1356": 302618112.0, + "1357": 302618112.0, + "1358": 302618112.0, + "1359": 302618112.0, + "1360": 302618112.0, + "1361": 302618112.0, + "1362": 302618112.0, + "1363": 302618112.0, + "1364": 302618112.0, + "1365": 302618112.0, + "1366": 302618112.0, + "1367": 302618112.0, + "1368": 302618112.0, + "1369": 302618112.0, + "1370": 302618112.0, + "1371": 302618112.0, + "1372": 302618112.0, + "1373": 302618112.0, + "1374": 302618112.0, + "1375": 302618112.0, + "1376": 302618112.0, + "1377": 302618112.0, + "1378": 302618112.0, + "1379": 302618112.0, + "1380": 302618112.0, + "1381": 302618112.0, + "1382": 302618112.0, + "1383": 302618112.0, + "1384": 302618112.0, + "1385": 302618112.0, + "1386": 302618112.0, + "1387": 302618112.0, + "1388": 302618112.0, + "1389": 302618112.0, + "1390": 302618112.0, + "1391": 302618112.0, + "1392": 302618112.0, + "1393": 302618112.0, + "1394": 302618112.0, + "1395": 302618112.0, + "1396": 302618112.0, + "1397": 302618112.0, + "1398": 302618112.0, + "1399": 302618112.0, + "1400": 302618112.0, + "1401": 302618112.0, + "1402": 302618112.0, + "1403": 302618112.0, + "1404": 302618112.0, + "1405": 302618112.0, + "1406": 302618112.0, + "1407": 302618112.0, + "1408": 302618112.0, + "1409": 302618112.0, + "1410": 302618112.0, + "1411": 302618112.0, + "1412": 302618112.0, + "1413": 302618112.0, + "1414": 302618112.0, + "1415": 302618112.0, + "1416": 302618112.0, + "1417": 302618112.0, + "1418": 302618112.0, + "1419": 302618112.0, + "1420": 302618112.0, + "1421": 302618112.0, + "1422": 302618112.0, + "1423": 302618112.0, + "1424": 302618112.0, + "1425": 302618112.0, + "1426": 302618112.0, + "1427": 302618112.0, + "1428": 302618112.0, + "1429": 302618112.0, + "1430": 302618112.0, + "1431": 302618112.0, + "1432": 302618112.0, + "1433": 302618112.0, + "1434": 302618112.0, + "1435": 302618112.0, + "1436": 302618112.0, + "1437": 302618112.0, + "1438": 302618112.0, + "1439": 302618112.0, + "1440": 302618112.0, + "1441": 302618112.0, + "1442": 302618112.0, + "1443": 302618112.0, + "1444": 302618112.0, + "1445": 302618112.0, + "1446": 302618112.0, + "1447": 302618112.0, + "1448": 302618112.0, + "1449": 302618112.0, + "1450": 302618112.0, + "1451": 302618112.0, + "1452": 302618112.0, + "1453": 302618112.0, + "1454": 302618112.0, + "1455": 302618112.0, + "1456": 302618112.0, + "1457": 302618112.0, + "1458": 302618112.0, + "1459": 302618112.0, + "1460": 302618112.0, + "1461": 302618112.0, + "1462": 302618112.0, + "1463": 302618112.0, + "1464": 302618112.0, + "1465": 302618112.0, + "1466": 302618112.0, + "1467": 302618112.0, + "1468": 302618112.0, + "1469": 302618112.0, + "1470": 302618112.0, + "1471": 302618112.0, + "1472": 302618112.0, + "1473": 302618112.0, + "1474": 302618112.0, + "1475": 302618112.0, + "1476": 302618112.0, + "1477": 302618112.0, + "1478": 302618112.0, + "1479": 302618112.0, + "1480": 302618112.0, + "1481": 302618112.0, + "1482": 302618112.0, + "1483": 302618112.0, + "1484": 302618112.0, + "1485": 302618112.0, + "1486": 302618112.0, + "1487": 302618112.0, + "1488": 302618112.0, + "1489": 302618112.0, + "1490": 302618112.0, + "1491": 302618112.0, + "1492": 302618112.0, + "1493": 302618112.0, + "1494": 302618112.0, + "1495": 302618112.0, + "1496": 302618112.0, + "1497": 302618112.0, + "1498": 302618112.0, + "1499": 302618112.0, + "1500": 302618112.0, + "1501": 302618112.0, + "1502": 302618112.0, + "1503": 302618112.0, + "1504": 302618112.0, + "1505": 302618112.0, + "1506": 302618112.0, + "1507": 302618112.0, + "1508": 302618112.0, + "1509": 302618112.0, + "1510": 302618112.0, + "1511": 302618112.0, + "1512": 302618112.0, + "1513": 302618112.0, + "1514": 302618112.0, + "1515": 302618112.0, + "1516": 302618112.0, + "1517": 302618112.0, + "1518": 302618112.0, + "1519": 302618112.0, + "1520": 302618112.0, + "1521": 302618112.0, + "1522": 302618112.0, + "1523": 302618112.0, + "1524": 302618112.0, + "1525": 302618112.0, + "1526": 302618112.0, + "1527": 302618112.0, + "1528": 302618112.0, + "1529": 302618112.0, + "1530": 302618112.0, + "1531": 302618112.0, + "1532": 302618112.0, + "1533": 302618112.0, + "1534": 302618112.0, + "1535": 302618112.0, + "1536": 302618112.0, + "1537": 302618112.0, + "1538": 302618112.0, + "1539": 302618112.0, + "1540": 302618112.0, + "1541": 302618112.0, + "1542": 302618112.0, + "1543": 302618112.0, + "1544": 302618112.0, + "1545": 302618112.0, + "1546": 302618112.0, + "1547": 302618112.0, + "1548": 302618112.0, + "1549": 302618112.0, + "1550": 302618112.0, + "1551": 302618112.0, + "1552": 302618112.0, + "1553": 302618112.0, + "1554": 302618112.0, + "1555": 302618112.0, + "1556": 302618112.0, + "1557": 302618112.0, + "1558": 302618112.0, + "1559": 302618112.0, + "1560": 302618112.0, + "1561": 302618112.0, + "1562": 302618112.0, + "1563": 302618112.0, + "1564": 302618112.0, + "1565": 302618112.0, + "1566": 302618112.0, + "1567": 302618112.0, + "1568": 302618112.0, + "1569": 302618112.0, + "1570": 302618112.0, + "1571": 302618112.0, + "1572": 302618112.0, + "1573": 302618112.0, + "1574": 302618112.0, + "1575": 302618112.0, + "1576": 302618112.0, + "1577": 302618112.0, + "1578": 302618112.0, + "1579": 302618112.0, + "1580": 302618112.0, + "1581": 302618112.0, + "1582": 302618112.0, + "1583": 302618112.0, + "1584": 302618112.0, + "1585": 302618112.0, + "1586": 302618112.0, + "1587": 302618112.0, + "1588": 302618112.0, + "1589": 302618112.0, + "1590": 302618112.0, + "1591": 302618112.0, + "1592": 302618112.0, + "1593": 302618112.0, + "1594": 302618112.0, + "1595": 302618112.0, + "1596": 302618112.0, + "1597": 302618112.0, + "1598": 302618112.0, + "1599": 302618112.0, + "1600": 302618112.0, + "1601": 302618112.0, + "1602": 302618112.0, + "1603": 302618112.0, + "1604": 302618112.0, + "1605": 302618112.0, + "1606": 302618112.0, + "1607": 302618112.0, + "1608": 302618112.0, + "1609": 302618112.0, + "1610": 302618112.0, + "1611": 302618112.0, + "1612": 302618112.0, + "1613": 302618112.0, + "1614": 302618112.0, + "1615": 302618112.0, + "1616": 302618112.0, + "1617": 302618112.0, + "1618": 302618112.0, + "1619": 302618112.0, + "1620": 302618112.0, + "1621": 302618112.0, + "1622": 302618112.0, + "1623": 302618112.0, + "1624": 302618112.0, + "1625": 302618112.0, + "1626": 302618112.0, + "1627": 302618112.0, + "1628": 302618112.0, + "1629": 302618112.0, + "1630": 302618112.0, + "1631": 302618112.0, + "1632": 302618112.0, + "1633": 302618112.0, + "1634": 302618112.0, + "1635": 302618112.0, + "1636": 302618112.0, + "1637": 302618112.0, + "1638": 302618112.0, + "1639": 302618112.0, + "1640": 302618112.0, + "1641": 302618112.0, + "1642": 302618112.0, + "1643": 302618112.0, + "1644": 302618112.0, + "1645": 302618112.0, + "1646": 302618112.0, + "1647": 302618112.0, + "1648": 302618112.0, + "1649": 302618112.0, + "1650": 302618112.0, + "1651": 302618112.0, + "1652": 302618112.0, + "1653": 302618112.0, + "1654": 302618112.0, + "1655": 302618112.0, + "1656": 302618112.0, + "1657": 302618112.0, + "1658": 302618112.0, + "1659": 302618112.0, + "1660": 302618112.0, + "1661": 302618112.0, + "1662": 302618112.0, + "1663": 302618112.0, + "1664": 302618112.0, + "1665": 302618112.0, + "1666": 302618112.0, + "1667": 302618112.0, + "1668": 302618112.0, + "1669": 302618112.0, + "1670": 302618112.0, + "1671": 302618112.0, + "1672": 302618112.0, + "1673": 302618112.0, + "1674": 302618112.0, + "1675": 302618112.0, + "1676": 302618112.0, + "1677": 302618112.0, + "1678": 302618112.0, + "1679": 302618112.0, + "1680": 302618112.0, + "1681": 302618112.0, + "1682": 302618112.0, + "1683": 302618112.0, + "1684": 302618112.0, + "1685": 302618112.0, + "1686": 302618112.0, + "1687": 302618112.0, + "1688": 302618112.0, + "1689": 302618112.0, + "1690": 302618112.0, + "1691": 302618112.0, + "1692": 302618112.0, + "1693": 302618112.0, + "1694": 302618112.0, + "1695": 302618112.0, + "1696": 302618112.0, + "1697": 302618112.0, + "1698": 302618112.0, + "1699": 302618112.0, + "1700": 302618112.0, + "1701": 302618112.0, + "1702": 302618112.0, + "1703": 302618112.0, + "1704": 302618112.0, + "1705": 302618112.0, + "1706": 302618112.0, + "1707": 302618112.0, + "1708": 302618112.0, + "1709": 302618112.0, + "1710": 302618112.0, + "1711": 302618112.0, + "1712": 302618112.0, + "1713": 302618112.0, + "1714": 302618112.0, + "1715": 302618112.0, + "1716": 302618112.0, + "1717": 302618112.0, + "1718": 302618112.0, + "1719": 302618112.0, + "1720": 302618112.0, + "1721": 302618112.0, + "1722": 302618112.0, + "1723": 302618112.0, + "1724": 302618112.0, + "1725": 302618112.0, + "1726": 302618112.0, + "1727": 302618112.0, + "1728": 302618112.0, + "1729": 302618112.0, + "1730": 302618112.0, + "1731": 302618112.0, + "1732": 302618112.0, + "1733": 302618112.0, + "1734": 302618112.0, + "1735": 302618112.0, + "1736": 302618112.0, + "1737": 302618112.0, + "1738": 302618112.0, + "1739": 302618112.0, + "1740": 302618112.0, + "1741": 302618112.0, + "1742": 302618112.0, + "1743": 302618112.0, + "1744": 302618112.0, + "1745": 302618112.0, + "1746": 302618112.0, + "1747": 302618112.0, + "1748": 302618112.0, + "1749": 302618112.0, + "1750": 302618112.0, + "1751": 302618112.0, + "1752": 302618112.0, + "1753": 302618112.0, + "1754": 302618112.0, + "1755": 302618112.0, + "1756": 302618112.0, + "1757": 302618112.0, + "1758": 302618112.0, + "1759": 302618112.0, + "1760": 302618112.0, + "1761": 302618112.0, + "1762": 302618112.0, + "1763": 302618112.0, + "1764": 302618112.0, + "1765": 302618112.0, + "1766": 302618112.0, + "1767": 302618112.0, + "1768": 302618112.0, + "1769": 302618112.0, + "1770": 302618112.0, + "1771": 302618112.0, + "1772": 302618112.0, + "1773": 302618112.0, + "1774": 302618112.0, + "1775": 302618112.0, + "1776": 302618112.0, + "1777": 302618112.0, + "1778": 302618112.0, + "1779": 302618112.0, + "1780": 302618112.0, + "1781": 302618112.0, + "1782": 302618112.0, + "1783": 302618112.0, + "1784": 302618112.0, + "1785": 302618112.0, + "1786": 302618112.0, + "1787": 302618112.0, + "1788": 302618112.0, + "1789": 302618112.0, + "1790": 302618112.0, + "1791": 302618112.0, + "1792": 302618112.0, + "1793": 302618112.0, + "1794": 302618112.0, + "1795": 302618112.0, + "1796": 302618112.0, + "1797": 302618112.0, + "1798": 302618112.0, + "1799": 302618112.0, + "1800": 302618112.0, + "1801": 302618112.0, + "1802": 302618112.0, + "1803": 302618112.0, + "1804": 302618112.0, + "1805": 302618112.0, + "1806": 302618112.0, + "1807": 302618112.0, + "1808": 302618112.0, + "1809": 302618112.0, + "1810": 302618112.0, + "1811": 302618112.0, + "1812": 302618112.0, + "1813": 302618112.0, + "1814": 302618112.0, + "1815": 302618112.0, + "1816": 302618112.0, + "1817": 302618112.0, + "1818": 302618112.0, + "1819": 302618112.0, + "1820": 302618112.0, + "1821": 302618112.0, + "1822": 302618112.0, + "1823": 302618112.0, + "1824": 302618112.0, + "1825": 302618112.0, + "1826": 302618112.0, + "1827": 302618112.0, + "1828": 302618112.0, + "1829": 302618112.0, + "1830": 302618112.0, + "1831": 302618112.0, + "1832": 302618112.0, + "1833": 302618112.0, + "1834": 302618112.0, + "1835": 302618112.0, + "1836": 302618112.0, + "1837": 302618112.0, + "1838": 302618112.0, + "1839": 302618112.0, + "1840": 302618112.0, + "1841": 302618112.0, + "1842": 302618112.0, + "1843": 302618112.0, + "1844": 302618112.0, + "1845": 302618112.0, + "1846": 302618112.0, + "1847": 302618112.0, + "1848": 302618112.0, + "1849": 302618112.0, + "1850": 302618112.0, + "1851": 302618112.0, + "1852": 302618112.0, + "1853": 302618112.0, + "1854": 302618112.0, + "1855": 302618112.0, + "1856": 302618112.0, + "1857": 302618112.0, + "1858": 302618112.0, + "1859": 302618112.0, + "1860": 302618112.0, + "1861": 302618112.0, + "1862": 302618112.0, + "1863": 302618112.0, + "1864": 302618112.0, + "1865": 302618112.0, + "1866": 302618112.0, + "1867": 302618112.0, + "1868": 302618112.0, + "1869": 302618112.0, + "1870": 302618112.0, + "1871": 302618112.0, + "1872": 302618112.0, + "1873": 302618112.0, + "1874": 302618112.0, + "1875": 302618112.0, + "1876": 302618112.0, + "1877": 302618112.0, + "1878": 302618112.0, + "1879": 302618112.0, + "1880": 302618112.0, + "1881": 302618112.0, + "1882": 302618112.0, + "1883": 302618112.0, + "1884": 302618112.0, + "1885": 302618112.0, + "1886": 302618112.0, + "1887": 302618112.0, + "1888": 302618112.0, + "1889": 302618112.0, + "1890": 302618112.0, + "1891": 302618112.0, + "1892": 302618112.0, + "1893": 302618112.0, + "1894": 302618112.0, + "1895": 302618112.0, + "1896": 302618112.0, + "1897": 302618112.0, + "1898": 302618112.0, + "1899": 302618112.0, + "1900": 302618112.0, + "1901": 302618112.0, + "1902": 302618112.0, + "1903": 302618112.0, + "1904": 302618112.0, + "1905": 302618112.0, + "1906": 302618112.0, + "1907": 302618112.0, + "1908": 302618112.0, + "1909": 302618112.0, + "1910": 302618112.0, + "1911": 302618112.0, + "1912": 302618112.0, + "1913": 302618112.0, + "1914": 302618112.0, + "1915": 302618112.0, + "1916": 302618112.0, + "1917": 302618112.0, + "1918": 302618112.0, + "1919": 302618112.0, + "1920": 302618112.0, + "1921": 302618112.0, + "1922": 302618112.0, + "1923": 302618112.0, + "1924": 302618112.0, + "1925": 302618112.0, + "1926": 302618112.0, + "1927": 302618112.0, + "1928": 302618112.0, + "1929": 302618112.0, + "1930": 302618112.0, + "1931": 302618112.0, + "1932": 302618112.0, + "1933": 302618112.0, + "1934": 302618112.0, + "1935": 302618112.0, + "1936": 302618112.0, + "1937": 302618112.0, + "1938": 302618112.0, + "1939": 302618112.0, + "1940": 302618112.0, + "1941": 302618112.0, + "1942": 302618112.0, + "1943": 302618112.0, + "1944": 302618112.0, + "1945": 302618112.0, + "1946": 302618112.0, + "1947": 302618112.0, + "1948": 302618112.0, + "1949": 302618112.0, + "1950": 302618112.0, + "1951": 302618112.0, + "1952": 302618112.0, + "1953": 302618112.0, + "1954": 302618112.0, + "1955": 302618112.0, + "1956": 302618112.0, + "1957": 302618112.0, + "1958": 302618112.0, + "1959": 302618112.0, + "1960": 302618112.0, + "1961": 302618112.0, + "1962": 302618112.0, + "1963": 302618112.0, + "1964": 302618112.0, + "1965": 302618112.0, + "1966": 302618112.0, + "1967": 302618112.0, + "1968": 302618112.0, + "1969": 302618112.0, + "1970": 302618112.0, + "1971": 302618112.0, + "1972": 302618112.0, + "1973": 302618112.0, + "1974": 302618112.0, + "1975": 302618112.0, + "1976": 302618112.0, + "1977": 302618112.0, + "1978": 302618112.0, + "1979": 302618112.0, + "1980": 302618112.0, + "1981": 302618112.0, + "1982": 302618112.0, + "1983": 302618112.0, + "1984": 302618112.0, + "1985": 302618112.0, + "1986": 302618112.0, + "1987": 302618112.0, + "1988": 302618112.0, + "1989": 302618112.0, + "1990": 302618112.0, + "1991": 302618112.0, + "1992": 302618112.0, + "1993": 302618112.0, + "1994": 302618112.0, + "1995": 302618112.0, + "1996": 302618112.0, + "1997": 302618112.0, + "1998": 302618112.0, + "1999": 302618112.0, + "2000": 302618112.0 + } + }, + "mem-max-allocated-bytes": { + "start_step": 1, + "end_step": 2000, + "step_interval": 1, + "values": { + "1": 362060288.0, + "2": 428612096.0, + "3": 428612096.0, + "4": 428612096.0, + "5": 428612096.0, + "6": 428612096.0, + "7": 428612096.0, + "8": 428612096.0, + "9": 428612096.0, + "10": 428612096.0, + "11": 428612096.0, + "12": 428612096.0, + "13": 428612096.0, + "14": 428612096.0, + "15": 428612096.0, + "16": 428612096.0, + "17": 428612096.0, + "18": 428612096.0, + "19": 428612096.0, + "20": 428612096.0, + "21": 428612096.0, + "22": 428612096.0, + "23": 428612096.0, + "24": 428612096.0, + "25": 428612096.0, + "26": 428612096.0, + "27": 428612096.0, + "28": 428612096.0, + "29": 428612096.0, + "30": 428612096.0, + "31": 428612096.0, + "32": 428612096.0, + "33": 428612096.0, + "34": 428612096.0, + "35": 428612096.0, + "36": 428612096.0, + "37": 428612096.0, + "38": 428612096.0, + "39": 428612096.0, + "40": 428612096.0, + "41": 428612096.0, + "42": 428612096.0, + "43": 428612096.0, + "44": 428612096.0, + "45": 428612096.0, + "46": 428612096.0, + "47": 428612096.0, + "48": 428612096.0, + "49": 428612096.0, + "50": 428612096.0, + "51": 428612096.0, + "52": 428612096.0, + "53": 428612096.0, + "54": 428612096.0, + "55": 428612096.0, + "56": 428612096.0, + "57": 428612096.0, + "58": 428612096.0, + "59": 428612096.0, + "60": 428612096.0, + "61": 428612096.0, + "62": 428612096.0, + "63": 428612096.0, + "64": 428612096.0, + "65": 428612096.0, + "66": 428612096.0, + "67": 428612096.0, + "68": 428612096.0, + "69": 428612096.0, + "70": 428612096.0, + "71": 428612096.0, + "72": 428612096.0, + "73": 428612096.0, + "74": 428612096.0, + "75": 428612096.0, + "76": 428612096.0, + "77": 428612096.0, + "78": 428612096.0, + "79": 428612096.0, + "80": 428612096.0, + "81": 428612096.0, + "82": 428612096.0, + "83": 428612096.0, + "84": 428612096.0, + "85": 428612096.0, + "86": 428612096.0, + "87": 428612096.0, + "88": 428612096.0, + "89": 428612096.0, + "90": 428612096.0, + "91": 428612096.0, + "92": 428612096.0, + "93": 428612096.0, + "94": 428612096.0, + "95": 428612096.0, + "96": 428612096.0, + "97": 428612096.0, + "98": 428612096.0, + "99": 428612096.0, + "100": 428612096.0, + "101": 428612096.0, + "102": 428612096.0, + "103": 428612096.0, + "104": 428612096.0, + "105": 428612096.0, + "106": 428612096.0, + "107": 428612096.0, + "108": 428612096.0, + "109": 428612096.0, + "110": 428612096.0, + "111": 428612096.0, + "112": 428612096.0, + "113": 428612096.0, + "114": 428612096.0, + "115": 428612096.0, + "116": 428612096.0, + "117": 428612096.0, + "118": 428612096.0, + "119": 428612096.0, + "120": 428612096.0, + "121": 428612096.0, + "122": 428612096.0, + "123": 428612096.0, + "124": 428612096.0, + "125": 428612096.0, + "126": 428612096.0, + "127": 428612096.0, + "128": 428612096.0, + "129": 428612096.0, + "130": 428612096.0, + "131": 428612096.0, + "132": 428612096.0, + "133": 428612096.0, + "134": 428612096.0, + "135": 428612096.0, + "136": 428612096.0, + "137": 428612096.0, + "138": 428612096.0, + "139": 428612096.0, + "140": 428612096.0, + "141": 428612096.0, + "142": 428612096.0, + "143": 428612096.0, + "144": 428612096.0, + "145": 428612096.0, + "146": 428612096.0, + "147": 428612096.0, + "148": 428612096.0, + "149": 428612096.0, + "150": 428612096.0, + "151": 428612096.0, + "152": 428612096.0, + "153": 428612096.0, + "154": 428612096.0, + "155": 428612096.0, + "156": 428612096.0, + "157": 428612096.0, + "158": 428612096.0, + "159": 428612096.0, + "160": 428612096.0, + "161": 428612096.0, + "162": 428612096.0, + "163": 428612096.0, + "164": 428612096.0, + "165": 428612096.0, + "166": 428612096.0, + "167": 428612096.0, + "168": 428612096.0, + "169": 428612096.0, + "170": 428612096.0, + "171": 428612096.0, + "172": 428612096.0, + "173": 428612096.0, + "174": 428612096.0, + "175": 428612096.0, + "176": 428612096.0, + "177": 428612096.0, + "178": 428612096.0, + "179": 428612096.0, + "180": 428612096.0, + "181": 428612096.0, + "182": 428612096.0, + "183": 428612096.0, + "184": 428612096.0, + "185": 428612096.0, + "186": 428612096.0, + "187": 428612096.0, + "188": 428612096.0, + "189": 428612096.0, + "190": 428612096.0, + "191": 428612096.0, + "192": 428612096.0, + "193": 428612096.0, + "194": 428612096.0, + "195": 428612096.0, + "196": 428612096.0, + "197": 428612096.0, + "198": 428612096.0, + "199": 428612096.0, + "200": 428612096.0, + "201": 428612096.0, + "202": 428612096.0, + "203": 428612096.0, + "204": 428612096.0, + "205": 428612096.0, + "206": 428612096.0, + "207": 428612096.0, + "208": 428612096.0, + "209": 428612096.0, + "210": 428612096.0, + "211": 428612096.0, + "212": 428612096.0, + "213": 428612096.0, + "214": 428612096.0, + "215": 428612096.0, + "216": 428612096.0, + "217": 428612096.0, + "218": 428612096.0, + "219": 428612096.0, + "220": 428612096.0, + "221": 428612096.0, + "222": 428612096.0, + "223": 428612096.0, + "224": 428612096.0, + "225": 428612096.0, + "226": 428612096.0, + "227": 428612096.0, + "228": 428612096.0, + "229": 428612096.0, + "230": 428612096.0, + "231": 428612096.0, + "232": 428612096.0, + "233": 428612096.0, + "234": 428612096.0, + "235": 428612096.0, + "236": 428612096.0, + "237": 428612096.0, + "238": 428612096.0, + "239": 428612096.0, + "240": 428612096.0, + "241": 428612096.0, + "242": 428612096.0, + "243": 428612096.0, + "244": 428612096.0, + "245": 428612096.0, + "246": 428612096.0, + "247": 428612096.0, + "248": 428612096.0, + "249": 428612096.0, + "250": 428612096.0, + "251": 428612096.0, + "252": 428612096.0, + "253": 428612096.0, + "254": 428612096.0, + "255": 428612096.0, + "256": 428612096.0, + "257": 428612096.0, + "258": 428612096.0, + "259": 428612096.0, + "260": 428612096.0, + "261": 428612096.0, + "262": 428612096.0, + "263": 428612096.0, + "264": 428612096.0, + "265": 428612096.0, + "266": 428612096.0, + "267": 428612096.0, + "268": 428612096.0, + "269": 428612096.0, + "270": 428612096.0, + "271": 428612096.0, + "272": 428612096.0, + "273": 428612096.0, + "274": 428612096.0, + "275": 428612096.0, + "276": 428612096.0, + "277": 428612096.0, + "278": 428612096.0, + "279": 428612096.0, + "280": 428612096.0, + "281": 428612096.0, + "282": 428612096.0, + "283": 428612096.0, + "284": 428612096.0, + "285": 428612096.0, + "286": 428612096.0, + "287": 428612096.0, + "288": 428612096.0, + "289": 428612096.0, + "290": 428612096.0, + "291": 428612096.0, + "292": 428612096.0, + "293": 428612096.0, + "294": 428612096.0, + "295": 428612096.0, + "296": 428612096.0, + "297": 428612096.0, + "298": 428612096.0, + "299": 428612096.0, + "300": 428612096.0, + "301": 428612096.0, + "302": 428612096.0, + "303": 428612096.0, + "304": 428612096.0, + "305": 428612096.0, + "306": 428612096.0, + "307": 428612096.0, + "308": 428612096.0, + "309": 428612096.0, + "310": 428612096.0, + "311": 428612096.0, + "312": 428612096.0, + "313": 428612096.0, + "314": 428612096.0, + "315": 428612096.0, + "316": 428612096.0, + "317": 428612096.0, + "318": 428612096.0, + "319": 428612096.0, + "320": 428612096.0, + "321": 428612096.0, + "322": 428612096.0, + "323": 428612096.0, + "324": 428612096.0, + "325": 428612096.0, + "326": 428612096.0, + "327": 428612096.0, + "328": 428612096.0, + "329": 428612096.0, + "330": 428612096.0, + "331": 428612096.0, + "332": 428612096.0, + "333": 428612096.0, + "334": 428612096.0, + "335": 428612096.0, + "336": 428612096.0, + "337": 428612096.0, + "338": 428612096.0, + "339": 428612096.0, + "340": 428612096.0, + "341": 428612096.0, + "342": 428612096.0, + "343": 428612096.0, + "344": 428612096.0, + "345": 428612096.0, + "346": 428612096.0, + "347": 428612096.0, + "348": 428612096.0, + "349": 428612096.0, + "350": 428612096.0, + "351": 428612096.0, + "352": 428612096.0, + "353": 428612096.0, + "354": 428612096.0, + "355": 428612096.0, + "356": 428612096.0, + "357": 428612096.0, + "358": 428612096.0, + "359": 428612096.0, + "360": 428612096.0, + "361": 428612096.0, + "362": 428612096.0, + "363": 428612096.0, + "364": 428612096.0, + "365": 428612096.0, + "366": 428612096.0, + "367": 428612096.0, + "368": 428612096.0, + "369": 428612096.0, + "370": 428612096.0, + "371": 428612096.0, + "372": 428612096.0, + "373": 428612096.0, + "374": 428612096.0, + "375": 428612096.0, + "376": 428612096.0, + "377": 428612096.0, + "378": 428612096.0, + "379": 428612096.0, + "380": 428612096.0, + "381": 428612096.0, + "382": 428612096.0, + "383": 428612096.0, + "384": 428612096.0, + "385": 428612096.0, + "386": 428612096.0, + "387": 428612096.0, + "388": 428612096.0, + "389": 428612096.0, + "390": 428612096.0, + "391": 428612096.0, + "392": 428612096.0, + "393": 428612096.0, + "394": 428612096.0, + "395": 428612096.0, + "396": 428612096.0, + "397": 428612096.0, + "398": 428612096.0, + "399": 428612096.0, + "400": 428612096.0, + "401": 428612096.0, + "402": 428612096.0, + "403": 428612096.0, + "404": 428612096.0, + "405": 428612096.0, + "406": 428612096.0, + "407": 428612096.0, + "408": 428612096.0, + "409": 428612096.0, + "410": 428612096.0, + "411": 428612096.0, + "412": 428612096.0, + "413": 428612096.0, + "414": 428612096.0, + "415": 428612096.0, + "416": 428612096.0, + "417": 428612096.0, + "418": 428612096.0, + "419": 428612096.0, + "420": 428612096.0, + "421": 428612096.0, + "422": 428612096.0, + "423": 428612096.0, + "424": 428612096.0, + "425": 428612096.0, + "426": 428612096.0, + "427": 428612096.0, + "428": 428612096.0, + "429": 428612096.0, + "430": 428612096.0, + "431": 428612096.0, + "432": 428612096.0, + "433": 428612096.0, + "434": 428612096.0, + "435": 428612096.0, + "436": 428612096.0, + "437": 428612096.0, + "438": 428612096.0, + "439": 428612096.0, + "440": 428612096.0, + "441": 428612096.0, + "442": 428612096.0, + "443": 428612096.0, + "444": 428612096.0, + "445": 428612096.0, + "446": 428612096.0, + "447": 428612096.0, + "448": 428612096.0, + "449": 428612096.0, + "450": 428612096.0, + "451": 428612096.0, + "452": 428612096.0, + "453": 428612096.0, + "454": 428612096.0, + "455": 428612096.0, + "456": 428612096.0, + "457": 428612096.0, + "458": 428612096.0, + "459": 428612096.0, + "460": 428612096.0, + "461": 428612096.0, + "462": 428612096.0, + "463": 428612096.0, + "464": 428612096.0, + "465": 428612096.0, + "466": 428612096.0, + "467": 428612096.0, + "468": 428612096.0, + "469": 428612096.0, + "470": 428612096.0, + "471": 428612096.0, + "472": 428612096.0, + "473": 428612096.0, + "474": 428612096.0, + "475": 428612096.0, + "476": 428612096.0, + "477": 428612096.0, + "478": 428612096.0, + "479": 428612096.0, + "480": 428612096.0, + "481": 428612096.0, + "482": 428612096.0, + "483": 428612096.0, + "484": 428612096.0, + "485": 428612096.0, + "486": 428612096.0, + "487": 428612096.0, + "488": 428612096.0, + "489": 428612096.0, + "490": 428612096.0, + "491": 428612096.0, + "492": 428612096.0, + "493": 428612096.0, + "494": 428612096.0, + "495": 428612096.0, + "496": 428612096.0, + "497": 428612096.0, + "498": 428612096.0, + "499": 428612096.0, + "500": 428612096.0, + "501": 428612096.0, + "502": 428612096.0, + "503": 428612096.0, + "504": 428612096.0, + "505": 428612096.0, + "506": 428612096.0, + "507": 428612096.0, + "508": 428612096.0, + "509": 428612096.0, + "510": 428612096.0, + "511": 428612096.0, + "512": 428612096.0, + "513": 428612096.0, + "514": 428612096.0, + "515": 428612096.0, + "516": 428612096.0, + "517": 428612096.0, + "518": 428612096.0, + "519": 428612096.0, + "520": 428612096.0, + "521": 428612096.0, + "522": 428612096.0, + "523": 428612096.0, + "524": 428612096.0, + "525": 428612096.0, + "526": 428612096.0, + "527": 428612096.0, + "528": 428612096.0, + "529": 428612096.0, + "530": 428612096.0, + "531": 428612096.0, + "532": 428612096.0, + "533": 428612096.0, + "534": 428612096.0, + "535": 428612096.0, + "536": 428612096.0, + "537": 428612096.0, + "538": 428612096.0, + "539": 428612096.0, + "540": 428612096.0, + "541": 428612096.0, + "542": 428612096.0, + "543": 428612096.0, + "544": 428612096.0, + "545": 428612096.0, + "546": 428612096.0, + "547": 428612096.0, + "548": 428612096.0, + "549": 428612096.0, + "550": 428612096.0, + "551": 428612096.0, + "552": 428612096.0, + "553": 428612096.0, + "554": 428612096.0, + "555": 428612096.0, + "556": 428612096.0, + "557": 428612096.0, + "558": 428612096.0, + "559": 428612096.0, + "560": 428612096.0, + "561": 428612096.0, + "562": 428612096.0, + "563": 428612096.0, + "564": 428612096.0, + "565": 428612096.0, + "566": 428612096.0, + "567": 428612096.0, + "568": 428612096.0, + "569": 428612096.0, + "570": 428612096.0, + "571": 428612096.0, + "572": 428612096.0, + "573": 428612096.0, + "574": 428612096.0, + "575": 428612096.0, + "576": 428612096.0, + "577": 428612096.0, + "578": 428612096.0, + "579": 428612096.0, + "580": 428612096.0, + "581": 428612096.0, + "582": 428612096.0, + "583": 428612096.0, + "584": 428612096.0, + "585": 428612096.0, + "586": 428612096.0, + "587": 428612096.0, + "588": 428612096.0, + "589": 428612096.0, + "590": 428612096.0, + "591": 428612096.0, + "592": 428612096.0, + "593": 428612096.0, + "594": 428612096.0, + "595": 428612096.0, + "596": 428612096.0, + "597": 428612096.0, + "598": 428612096.0, + "599": 428612096.0, + "600": 428612096.0, + "601": 428612096.0, + "602": 428612096.0, + "603": 428612096.0, + "604": 428612096.0, + "605": 428612096.0, + "606": 428612096.0, + "607": 428612096.0, + "608": 428612096.0, + "609": 428612096.0, + "610": 428612096.0, + "611": 428612096.0, + "612": 428612096.0, + "613": 428612096.0, + "614": 428612096.0, + "615": 428612096.0, + "616": 428612096.0, + "617": 428612096.0, + "618": 428612096.0, + "619": 428612096.0, + "620": 428612096.0, + "621": 428612096.0, + "622": 428612096.0, + "623": 428612096.0, + "624": 428612096.0, + "625": 428612096.0, + "626": 428612096.0, + "627": 428612096.0, + "628": 428612096.0, + "629": 428612096.0, + "630": 428612096.0, + "631": 428612096.0, + "632": 428612096.0, + "633": 428612096.0, + "634": 428612096.0, + "635": 428612096.0, + "636": 428612096.0, + "637": 428612096.0, + "638": 428612096.0, + "639": 428612096.0, + "640": 428612096.0, + "641": 428612096.0, + "642": 428612096.0, + "643": 428612096.0, + "644": 428612096.0, + "645": 428612096.0, + "646": 428612096.0, + "647": 428612096.0, + "648": 428612096.0, + "649": 428612096.0, + "650": 428612096.0, + "651": 428612096.0, + "652": 428612096.0, + "653": 428612096.0, + "654": 428612096.0, + "655": 428612096.0, + "656": 428612096.0, + "657": 428612096.0, + "658": 428612096.0, + "659": 428612096.0, + "660": 428612096.0, + "661": 428612096.0, + "662": 428612096.0, + "663": 428612096.0, + "664": 428612096.0, + "665": 428612096.0, + "666": 428612096.0, + "667": 428612096.0, + "668": 428612096.0, + "669": 428612096.0, + "670": 428612096.0, + "671": 428612096.0, + "672": 428612096.0, + "673": 428612096.0, + "674": 428612096.0, + "675": 428612096.0, + "676": 428612096.0, + "677": 428612096.0, + "678": 428612096.0, + "679": 428612096.0, + "680": 428612096.0, + "681": 428612096.0, + "682": 428612096.0, + "683": 428612096.0, + "684": 428612096.0, + "685": 428612096.0, + "686": 428612096.0, + "687": 428612096.0, + "688": 428612096.0, + "689": 428612096.0, + "690": 428612096.0, + "691": 428612096.0, + "692": 428612096.0, + "693": 428612096.0, + "694": 428612096.0, + "695": 428612096.0, + "696": 428612096.0, + "697": 428612096.0, + "698": 428612096.0, + "699": 428612096.0, + "700": 428612096.0, + "701": 428612096.0, + "702": 428612096.0, + "703": 428612096.0, + "704": 428612096.0, + "705": 428612096.0, + "706": 428612096.0, + "707": 428612096.0, + "708": 428612096.0, + "709": 428612096.0, + "710": 428612096.0, + "711": 428612096.0, + "712": 428612096.0, + "713": 428612096.0, + "714": 428612096.0, + "715": 428612096.0, + "716": 428612096.0, + "717": 428612096.0, + "718": 428612096.0, + "719": 428612096.0, + "720": 428612096.0, + "721": 428612096.0, + "722": 428612096.0, + "723": 428612096.0, + "724": 428612096.0, + "725": 428612096.0, + "726": 428612096.0, + "727": 428612096.0, + "728": 428612096.0, + "729": 428612096.0, + "730": 428612096.0, + "731": 428612096.0, + "732": 428612096.0, + "733": 428612096.0, + "734": 428612096.0, + "735": 428612096.0, + "736": 428612096.0, + "737": 428612096.0, + "738": 428612096.0, + "739": 428612096.0, + "740": 428612096.0, + "741": 428612096.0, + "742": 428612096.0, + "743": 428612096.0, + "744": 428612096.0, + "745": 428612096.0, + "746": 428612096.0, + "747": 428612096.0, + "748": 428612096.0, + "749": 428612096.0, + "750": 428612096.0, + "751": 428612096.0, + "752": 428612096.0, + "753": 428612096.0, + "754": 428612096.0, + "755": 428612096.0, + "756": 428612096.0, + "757": 428612096.0, + "758": 428612096.0, + "759": 428612096.0, + "760": 428612096.0, + "761": 428612096.0, + "762": 428612096.0, + "763": 428612096.0, + "764": 428612096.0, + "765": 428612096.0, + "766": 428612096.0, + "767": 428612096.0, + "768": 428612096.0, + "769": 428612096.0, + "770": 428612096.0, + "771": 428612096.0, + "772": 428612096.0, + "773": 428612096.0, + "774": 428612096.0, + "775": 428612096.0, + "776": 428612096.0, + "777": 428612096.0, + "778": 428612096.0, + "779": 428612096.0, + "780": 428612096.0, + "781": 428612096.0, + "782": 428612096.0, + "783": 428612096.0, + "784": 428612096.0, + "785": 428612096.0, + "786": 428612096.0, + "787": 428612096.0, + "788": 428612096.0, + "789": 428612096.0, + "790": 428612096.0, + "791": 428612096.0, + "792": 428612096.0, + "793": 428612096.0, + "794": 428612096.0, + "795": 428612096.0, + "796": 428612096.0, + "797": 428612096.0, + "798": 428612096.0, + "799": 428612096.0, + "800": 428612096.0, + "801": 428612096.0, + "802": 428612096.0, + "803": 428612096.0, + "804": 428612096.0, + "805": 428612096.0, + "806": 428612096.0, + "807": 428612096.0, + "808": 428612096.0, + "809": 428612096.0, + "810": 428612096.0, + "811": 428612096.0, + "812": 428612096.0, + "813": 428612096.0, + "814": 428612096.0, + "815": 428612096.0, + "816": 428612096.0, + "817": 428612096.0, + "818": 428612096.0, + "819": 428612096.0, + "820": 428612096.0, + "821": 428612096.0, + "822": 428612096.0, + "823": 428612096.0, + "824": 428612096.0, + "825": 428612096.0, + "826": 428612096.0, + "827": 428612096.0, + "828": 428612096.0, + "829": 428612096.0, + "830": 428612096.0, + "831": 428612096.0, + "832": 428612096.0, + "833": 428612096.0, + "834": 428612096.0, + "835": 428612096.0, + "836": 428612096.0, + "837": 428612096.0, + "838": 428612096.0, + "839": 428612096.0, + "840": 428612096.0, + "841": 428612096.0, + "842": 428612096.0, + "843": 428612096.0, + "844": 428612096.0, + "845": 428612096.0, + "846": 428612096.0, + "847": 428612096.0, + "848": 428612096.0, + "849": 428612096.0, + "850": 428612096.0, + "851": 428612096.0, + "852": 428612096.0, + "853": 428612096.0, + "854": 428612096.0, + "855": 428612096.0, + "856": 428612096.0, + "857": 428612096.0, + "858": 428612096.0, + "859": 428612096.0, + "860": 428612096.0, + "861": 428612096.0, + "862": 428612096.0, + "863": 428612096.0, + "864": 428612096.0, + "865": 428612096.0, + "866": 428612096.0, + "867": 428612096.0, + "868": 428612096.0, + "869": 428612096.0, + "870": 428612096.0, + "871": 428612096.0, + "872": 428612096.0, + "873": 428612096.0, + "874": 428612096.0, + "875": 428612096.0, + "876": 428612096.0, + "877": 428612096.0, + "878": 428612096.0, + "879": 428612096.0, + "880": 428612096.0, + "881": 428612096.0, + "882": 428612096.0, + "883": 428612096.0, + "884": 428612096.0, + "885": 428612096.0, + "886": 428612096.0, + "887": 428612096.0, + "888": 428612096.0, + "889": 428612096.0, + "890": 428612096.0, + "891": 428612096.0, + "892": 428612096.0, + "893": 428612096.0, + "894": 428612096.0, + "895": 428612096.0, + "896": 428612096.0, + "897": 428612096.0, + "898": 428612096.0, + "899": 428612096.0, + "900": 428612096.0, + "901": 428612096.0, + "902": 428612096.0, + "903": 428612096.0, + "904": 428612096.0, + "905": 428612096.0, + "906": 428612096.0, + "907": 428612096.0, + "908": 428612096.0, + "909": 428612096.0, + "910": 428612096.0, + "911": 428612096.0, + "912": 428612096.0, + "913": 428612096.0, + "914": 428612096.0, + "915": 428612096.0, + "916": 428612096.0, + "917": 428612096.0, + "918": 428612096.0, + "919": 428612096.0, + "920": 428612096.0, + "921": 428612096.0, + "922": 428612096.0, + "923": 428612096.0, + "924": 428612096.0, + "925": 428612096.0, + "926": 428612096.0, + "927": 428612096.0, + "928": 428612096.0, + "929": 428612096.0, + "930": 428612096.0, + "931": 428612096.0, + "932": 428612096.0, + "933": 428612096.0, + "934": 428612096.0, + "935": 428612096.0, + "936": 428612096.0, + "937": 428612096.0, + "938": 428612096.0, + "939": 428612096.0, + "940": 428612096.0, + "941": 428612096.0, + "942": 428612096.0, + "943": 428612096.0, + "944": 428612096.0, + "945": 428612096.0, + "946": 428612096.0, + "947": 428612096.0, + "948": 428612096.0, + "949": 428612096.0, + "950": 428612096.0, + "951": 428612096.0, + "952": 428612096.0, + "953": 428612096.0, + "954": 428612096.0, + "955": 428612096.0, + "956": 428612096.0, + "957": 428612096.0, + "958": 428612096.0, + "959": 428612096.0, + "960": 428612096.0, + "961": 428612096.0, + "962": 428612096.0, + "963": 428612096.0, + "964": 428612096.0, + "965": 428612096.0, + "966": 428612096.0, + "967": 428612096.0, + "968": 428612096.0, + "969": 428612096.0, + "970": 428612096.0, + "971": 428612096.0, + "972": 428612096.0, + "973": 428612096.0, + "974": 428612096.0, + "975": 428612096.0, + "976": 428612096.0, + "977": 428612096.0, + "978": 428612096.0, + "979": 428612096.0, + "980": 428612096.0, + "981": 428612096.0, + "982": 428612096.0, + "983": 428612096.0, + "984": 428612096.0, + "985": 428612096.0, + "986": 428612096.0, + "987": 428612096.0, + "988": 428612096.0, + "989": 428612096.0, + "990": 428612096.0, + "991": 428612096.0, + "992": 428612096.0, + "993": 428612096.0, + "994": 428612096.0, + "995": 428612096.0, + "996": 428612096.0, + "997": 428612096.0, + "998": 428612096.0, + "999": 428612096.0, + "1000": 428612096.0, + "1001": 428612096.0, + "1002": 428612096.0, + "1003": 428612096.0, + "1004": 428612096.0, + "1005": 428612096.0, + "1006": 428612096.0, + "1007": 428612096.0, + "1008": 428612096.0, + "1009": 428612096.0, + "1010": 428612096.0, + "1011": 428612096.0, + "1012": 428612096.0, + "1013": 428612096.0, + "1014": 428612096.0, + "1015": 428612096.0, + "1016": 428612096.0, + "1017": 428612096.0, + "1018": 428612096.0, + "1019": 428612096.0, + "1020": 428612096.0, + "1021": 428612096.0, + "1022": 428612096.0, + "1023": 428612096.0, + "1024": 428612096.0, + "1025": 428612096.0, + "1026": 428612096.0, + "1027": 428612096.0, + "1028": 428612096.0, + "1029": 428612096.0, + "1030": 428612096.0, + "1031": 428612096.0, + "1032": 428612096.0, + "1033": 428612096.0, + "1034": 428612096.0, + "1035": 428612096.0, + "1036": 428612096.0, + "1037": 428612096.0, + "1038": 428612096.0, + "1039": 428612096.0, + "1040": 428612096.0, + "1041": 428612096.0, + "1042": 428612096.0, + "1043": 428612096.0, + "1044": 428612096.0, + "1045": 428612096.0, + "1046": 428612096.0, + "1047": 428612096.0, + "1048": 428612096.0, + "1049": 428612096.0, + "1050": 428612096.0, + "1051": 428612096.0, + "1052": 428612096.0, + "1053": 428612096.0, + "1054": 428612096.0, + "1055": 428612096.0, + "1056": 428612096.0, + "1057": 428612096.0, + "1058": 428612096.0, + "1059": 428612096.0, + "1060": 428612096.0, + "1061": 428612096.0, + "1062": 428612096.0, + "1063": 428612096.0, + "1064": 428612096.0, + "1065": 428612096.0, + "1066": 428612096.0, + "1067": 428612096.0, + "1068": 428612096.0, + "1069": 428612096.0, + "1070": 428612096.0, + "1071": 428612096.0, + "1072": 428612096.0, + "1073": 428612096.0, + "1074": 428612096.0, + "1075": 428612096.0, + "1076": 428612096.0, + "1077": 428612096.0, + "1078": 428612096.0, + "1079": 428612096.0, + "1080": 428612096.0, + "1081": 428612096.0, + "1082": 428612096.0, + "1083": 428612096.0, + "1084": 428612096.0, + "1085": 428612096.0, + "1086": 428612096.0, + "1087": 428612096.0, + "1088": 428612096.0, + "1089": 428612096.0, + "1090": 428612096.0, + "1091": 428612096.0, + "1092": 428612096.0, + "1093": 428612096.0, + "1094": 428612096.0, + "1095": 428612096.0, + "1096": 428612096.0, + "1097": 428612096.0, + "1098": 428612096.0, + "1099": 428612096.0, + "1100": 428612096.0, + "1101": 428612096.0, + "1102": 428612096.0, + "1103": 428612096.0, + "1104": 428612096.0, + "1105": 428612096.0, + "1106": 428612096.0, + "1107": 428612096.0, + "1108": 428612096.0, + "1109": 428612096.0, + "1110": 428612096.0, + "1111": 428612096.0, + "1112": 428612096.0, + "1113": 428612096.0, + "1114": 428612096.0, + "1115": 428612096.0, + "1116": 428612096.0, + "1117": 428612096.0, + "1118": 428612096.0, + "1119": 428612096.0, + "1120": 428612096.0, + "1121": 428612096.0, + "1122": 428612096.0, + "1123": 428612096.0, + "1124": 428612096.0, + "1125": 428612096.0, + "1126": 428612096.0, + "1127": 428612096.0, + "1128": 428612096.0, + "1129": 428612096.0, + "1130": 428612096.0, + "1131": 428612096.0, + "1132": 428612096.0, + "1133": 428612096.0, + "1134": 428612096.0, + "1135": 428612096.0, + "1136": 428612096.0, + "1137": 428612096.0, + "1138": 428612096.0, + "1139": 428612096.0, + "1140": 428612096.0, + "1141": 428612096.0, + "1142": 428612096.0, + "1143": 428612096.0, + "1144": 428612096.0, + "1145": 428612096.0, + "1146": 428612096.0, + "1147": 428612096.0, + "1148": 428612096.0, + "1149": 428612096.0, + "1150": 428612096.0, + "1151": 428612096.0, + "1152": 428612096.0, + "1153": 428612096.0, + "1154": 428612096.0, + "1155": 428612096.0, + "1156": 428612096.0, + "1157": 428612096.0, + "1158": 428612096.0, + "1159": 428612096.0, + "1160": 428612096.0, + "1161": 428612096.0, + "1162": 428612096.0, + "1163": 428612096.0, + "1164": 428612096.0, + "1165": 428612096.0, + "1166": 428612096.0, + "1167": 428612096.0, + "1168": 428612096.0, + "1169": 428612096.0, + "1170": 428612096.0, + "1171": 428612096.0, + "1172": 428612096.0, + "1173": 428612096.0, + "1174": 428612096.0, + "1175": 428612096.0, + "1176": 428612096.0, + "1177": 428612096.0, + "1178": 428612096.0, + "1179": 428612096.0, + "1180": 428612096.0, + "1181": 428612096.0, + "1182": 428612096.0, + "1183": 428612096.0, + "1184": 428612096.0, + "1185": 428612096.0, + "1186": 428612096.0, + "1187": 428612096.0, + "1188": 428612096.0, + "1189": 428612096.0, + "1190": 428612096.0, + "1191": 428612096.0, + "1192": 428612096.0, + "1193": 428612096.0, + "1194": 428612096.0, + "1195": 428612096.0, + "1196": 428612096.0, + "1197": 428612096.0, + "1198": 428612096.0, + "1199": 428612096.0, + "1200": 428612096.0, + "1201": 428612096.0, + "1202": 428612096.0, + "1203": 428612096.0, + "1204": 428612096.0, + "1205": 428612096.0, + "1206": 428612096.0, + "1207": 428612096.0, + "1208": 428612096.0, + "1209": 428612096.0, + "1210": 428612096.0, + "1211": 428612096.0, + "1212": 428612096.0, + "1213": 428612096.0, + "1214": 428612096.0, + "1215": 428612096.0, + "1216": 428612096.0, + "1217": 428612096.0, + "1218": 428612096.0, + "1219": 428612096.0, + "1220": 428612096.0, + "1221": 428612096.0, + "1222": 428612096.0, + "1223": 428612096.0, + "1224": 428612096.0, + "1225": 428612096.0, + "1226": 428612096.0, + "1227": 428612096.0, + "1228": 428612096.0, + "1229": 428612096.0, + "1230": 428612096.0, + "1231": 428612096.0, + "1232": 428612096.0, + "1233": 428612096.0, + "1234": 428612096.0, + "1235": 428612096.0, + "1236": 428612096.0, + "1237": 428612096.0, + "1238": 428612096.0, + "1239": 428612096.0, + "1240": 428612096.0, + "1241": 428612096.0, + "1242": 428612096.0, + "1243": 428612096.0, + "1244": 428612096.0, + "1245": 428612096.0, + "1246": 428612096.0, + "1247": 428612096.0, + "1248": 428612096.0, + "1249": 428612096.0, + "1250": 428612096.0, + "1251": 428612096.0, + "1252": 428612096.0, + "1253": 428612096.0, + "1254": 428612096.0, + "1255": 428612096.0, + "1256": 428612096.0, + "1257": 428612096.0, + "1258": 428612096.0, + "1259": 428612096.0, + "1260": 428612096.0, + "1261": 428612096.0, + "1262": 428612096.0, + "1263": 428612096.0, + "1264": 428612096.0, + "1265": 428612096.0, + "1266": 428612096.0, + "1267": 428612096.0, + "1268": 428612096.0, + "1269": 428612096.0, + "1270": 428612096.0, + "1271": 428612096.0, + "1272": 428612096.0, + "1273": 428612096.0, + "1274": 428612096.0, + "1275": 428612096.0, + "1276": 428612096.0, + "1277": 428612096.0, + "1278": 428612096.0, + "1279": 428612096.0, + "1280": 428612096.0, + "1281": 428612096.0, + "1282": 428612096.0, + "1283": 428612096.0, + "1284": 428612096.0, + "1285": 428612096.0, + "1286": 428612096.0, + "1287": 428612096.0, + "1288": 428612096.0, + "1289": 428612096.0, + "1290": 428612096.0, + "1291": 428612096.0, + "1292": 428612096.0, + "1293": 428612096.0, + "1294": 428612096.0, + "1295": 428612096.0, + "1296": 428612096.0, + "1297": 428612096.0, + "1298": 428612096.0, + "1299": 428612096.0, + "1300": 428612096.0, + "1301": 428612096.0, + "1302": 428612096.0, + "1303": 428612096.0, + "1304": 428612096.0, + "1305": 428612096.0, + "1306": 428612096.0, + "1307": 428612096.0, + "1308": 428612096.0, + "1309": 428612096.0, + "1310": 428612096.0, + "1311": 428612096.0, + "1312": 428612096.0, + "1313": 428612096.0, + "1314": 428612096.0, + "1315": 428612096.0, + "1316": 428612096.0, + "1317": 428612096.0, + "1318": 428612096.0, + "1319": 428612096.0, + "1320": 428612096.0, + "1321": 428612096.0, + "1322": 428612096.0, + "1323": 428612096.0, + "1324": 428612096.0, + "1325": 428612096.0, + "1326": 428612096.0, + "1327": 428612096.0, + "1328": 428612096.0, + "1329": 428612096.0, + "1330": 428612096.0, + "1331": 428612096.0, + "1332": 428612096.0, + "1333": 428612096.0, + "1334": 428612096.0, + "1335": 428612096.0, + "1336": 428612096.0, + "1337": 428612096.0, + "1338": 428612096.0, + "1339": 428612096.0, + "1340": 428612096.0, + "1341": 428612096.0, + "1342": 428612096.0, + "1343": 428612096.0, + "1344": 428612096.0, + "1345": 428612096.0, + "1346": 428612096.0, + "1347": 428612096.0, + "1348": 428612096.0, + "1349": 428612096.0, + "1350": 428612096.0, + "1351": 428612096.0, + "1352": 428612096.0, + "1353": 428612096.0, + "1354": 428612096.0, + "1355": 428612096.0, + "1356": 428612096.0, + "1357": 428612096.0, + "1358": 428612096.0, + "1359": 428612096.0, + "1360": 428612096.0, + "1361": 428612096.0, + "1362": 428612096.0, + "1363": 428612096.0, + "1364": 428612096.0, + "1365": 428612096.0, + "1366": 428612096.0, + "1367": 428612096.0, + "1368": 428612096.0, + "1369": 428612096.0, + "1370": 428612096.0, + "1371": 428612096.0, + "1372": 428612096.0, + "1373": 428612096.0, + "1374": 428612096.0, + "1375": 428612096.0, + "1376": 428612096.0, + "1377": 428612096.0, + "1378": 428612096.0, + "1379": 428612096.0, + "1380": 428612096.0, + "1381": 428612096.0, + "1382": 428612096.0, + "1383": 428612096.0, + "1384": 428612096.0, + "1385": 428612096.0, + "1386": 428612096.0, + "1387": 428612096.0, + "1388": 428612096.0, + "1389": 428612096.0, + "1390": 428612096.0, + "1391": 428612096.0, + "1392": 428612096.0, + "1393": 428612096.0, + "1394": 428612096.0, + "1395": 428612096.0, + "1396": 428612096.0, + "1397": 428612096.0, + "1398": 428612096.0, + "1399": 428612096.0, + "1400": 428612096.0, + "1401": 428612096.0, + "1402": 428612096.0, + "1403": 428612096.0, + "1404": 428612096.0, + "1405": 428612096.0, + "1406": 428612096.0, + "1407": 428612096.0, + "1408": 428612096.0, + "1409": 428612096.0, + "1410": 428612096.0, + "1411": 428612096.0, + "1412": 428612096.0, + "1413": 428612096.0, + "1414": 428612096.0, + "1415": 428612096.0, + "1416": 428612096.0, + "1417": 428612096.0, + "1418": 428612096.0, + "1419": 428612096.0, + "1420": 428612096.0, + "1421": 428612096.0, + "1422": 428612096.0, + "1423": 428612096.0, + "1424": 428612096.0, + "1425": 428612096.0, + "1426": 428612096.0, + "1427": 428612096.0, + "1428": 428612096.0, + "1429": 428612096.0, + "1430": 428612096.0, + "1431": 428612096.0, + "1432": 428612096.0, + "1433": 428612096.0, + "1434": 428612096.0, + "1435": 428612096.0, + "1436": 428612096.0, + "1437": 428612096.0, + "1438": 428612096.0, + "1439": 428612096.0, + "1440": 428612096.0, + "1441": 428612096.0, + "1442": 428612096.0, + "1443": 428612096.0, + "1444": 428612096.0, + "1445": 428612096.0, + "1446": 428612096.0, + "1447": 428612096.0, + "1448": 428612096.0, + "1449": 428612096.0, + "1450": 428612096.0, + "1451": 428612096.0, + "1452": 428612096.0, + "1453": 428612096.0, + "1454": 428612096.0, + "1455": 428612096.0, + "1456": 428612096.0, + "1457": 428612096.0, + "1458": 428612096.0, + "1459": 428612096.0, + "1460": 428612096.0, + "1461": 428612096.0, + "1462": 428612096.0, + "1463": 428612096.0, + "1464": 428612096.0, + "1465": 428612096.0, + "1466": 428612096.0, + "1467": 428612096.0, + "1468": 428612096.0, + "1469": 428612096.0, + "1470": 428612096.0, + "1471": 428612096.0, + "1472": 428612096.0, + "1473": 428612096.0, + "1474": 428612096.0, + "1475": 428612096.0, + "1476": 428612096.0, + "1477": 428612096.0, + "1478": 428612096.0, + "1479": 428612096.0, + "1480": 428612096.0, + "1481": 428612096.0, + "1482": 428612096.0, + "1483": 428612096.0, + "1484": 428612096.0, + "1485": 428612096.0, + "1486": 428612096.0, + "1487": 428612096.0, + "1488": 428612096.0, + "1489": 428612096.0, + "1490": 428612096.0, + "1491": 428612096.0, + "1492": 428612096.0, + "1493": 428612096.0, + "1494": 428612096.0, + "1495": 428612096.0, + "1496": 428612096.0, + "1497": 428612096.0, + "1498": 428612096.0, + "1499": 428612096.0, + "1500": 428612096.0, + "1501": 428612096.0, + "1502": 428612096.0, + "1503": 428612096.0, + "1504": 428612096.0, + "1505": 428612096.0, + "1506": 428612096.0, + "1507": 428612096.0, + "1508": 428612096.0, + "1509": 428612096.0, + "1510": 428612096.0, + "1511": 428612096.0, + "1512": 428612096.0, + "1513": 428612096.0, + "1514": 428612096.0, + "1515": 428612096.0, + "1516": 428612096.0, + "1517": 428612096.0, + "1518": 428612096.0, + "1519": 428612096.0, + "1520": 428612096.0, + "1521": 428612096.0, + "1522": 428612096.0, + "1523": 428612096.0, + "1524": 428612096.0, + "1525": 428612096.0, + "1526": 428612096.0, + "1527": 428612096.0, + "1528": 428612096.0, + "1529": 428612096.0, + "1530": 428612096.0, + "1531": 428612096.0, + "1532": 428612096.0, + "1533": 428612096.0, + "1534": 428612096.0, + "1535": 428612096.0, + "1536": 428612096.0, + "1537": 428612096.0, + "1538": 428612096.0, + "1539": 428612096.0, + "1540": 428612096.0, + "1541": 428612096.0, + "1542": 428612096.0, + "1543": 428612096.0, + "1544": 428612096.0, + "1545": 428612096.0, + "1546": 428612096.0, + "1547": 428612096.0, + "1548": 428612096.0, + "1549": 428612096.0, + "1550": 428612096.0, + "1551": 428612096.0, + "1552": 428612096.0, + "1553": 428612096.0, + "1554": 428612096.0, + "1555": 428612096.0, + "1556": 428612096.0, + "1557": 428612096.0, + "1558": 428612096.0, + "1559": 428612096.0, + "1560": 428612096.0, + "1561": 428612096.0, + "1562": 428612096.0, + "1563": 428612096.0, + "1564": 428612096.0, + "1565": 428612096.0, + "1566": 428612096.0, + "1567": 428612096.0, + "1568": 428612096.0, + "1569": 428612096.0, + "1570": 428612096.0, + "1571": 428612096.0, + "1572": 428612096.0, + "1573": 428612096.0, + "1574": 428612096.0, + "1575": 428612096.0, + "1576": 428612096.0, + "1577": 428612096.0, + "1578": 428612096.0, + "1579": 428612096.0, + "1580": 428612096.0, + "1581": 428612096.0, + "1582": 428612096.0, + "1583": 428612096.0, + "1584": 428612096.0, + "1585": 428612096.0, + "1586": 428612096.0, + "1587": 428612096.0, + "1588": 428612096.0, + "1589": 428612096.0, + "1590": 428612096.0, + "1591": 428612096.0, + "1592": 428612096.0, + "1593": 428612096.0, + "1594": 428612096.0, + "1595": 428612096.0, + "1596": 428612096.0, + "1597": 428612096.0, + "1598": 428612096.0, + "1599": 428612096.0, + "1600": 428612096.0, + "1601": 428612096.0, + "1602": 428612096.0, + "1603": 428612096.0, + "1604": 428612096.0, + "1605": 428612096.0, + "1606": 428612096.0, + "1607": 428612096.0, + "1608": 428612096.0, + "1609": 428612096.0, + "1610": 428612096.0, + "1611": 428612096.0, + "1612": 428612096.0, + "1613": 428612096.0, + "1614": 428612096.0, + "1615": 428612096.0, + "1616": 428612096.0, + "1617": 428612096.0, + "1618": 428612096.0, + "1619": 428612096.0, + "1620": 428612096.0, + "1621": 428612096.0, + "1622": 428612096.0, + "1623": 428612096.0, + "1624": 428612096.0, + "1625": 428612096.0, + "1626": 428612096.0, + "1627": 428612096.0, + "1628": 428612096.0, + "1629": 428612096.0, + "1630": 428612096.0, + "1631": 428612096.0, + "1632": 428612096.0, + "1633": 428612096.0, + "1634": 428612096.0, + "1635": 428612096.0, + "1636": 428612096.0, + "1637": 428612096.0, + "1638": 428612096.0, + "1639": 428612096.0, + "1640": 428612096.0, + "1641": 428612096.0, + "1642": 428612096.0, + "1643": 428612096.0, + "1644": 428612096.0, + "1645": 428612096.0, + "1646": 428612096.0, + "1647": 428612096.0, + "1648": 428612096.0, + "1649": 428612096.0, + "1650": 428612096.0, + "1651": 428612096.0, + "1652": 428612096.0, + "1653": 428612096.0, + "1654": 428612096.0, + "1655": 428612096.0, + "1656": 428612096.0, + "1657": 428612096.0, + "1658": 428612096.0, + "1659": 428612096.0, + "1660": 428612096.0, + "1661": 428612096.0, + "1662": 428612096.0, + "1663": 428612096.0, + "1664": 428612096.0, + "1665": 428612096.0, + "1666": 428612096.0, + "1667": 428612096.0, + "1668": 428612096.0, + "1669": 428612096.0, + "1670": 428612096.0, + "1671": 428612096.0, + "1672": 428612096.0, + "1673": 428612096.0, + "1674": 428612096.0, + "1675": 428612096.0, + "1676": 428612096.0, + "1677": 428612096.0, + "1678": 428612096.0, + "1679": 428612096.0, + "1680": 428612096.0, + "1681": 428612096.0, + "1682": 428612096.0, + "1683": 428612096.0, + "1684": 428612096.0, + "1685": 428612096.0, + "1686": 428612096.0, + "1687": 428612096.0, + "1688": 428612096.0, + "1689": 428612096.0, + "1690": 428612096.0, + "1691": 428612096.0, + "1692": 428612096.0, + "1693": 428612096.0, + "1694": 428612096.0, + "1695": 428612096.0, + "1696": 428612096.0, + "1697": 428612096.0, + "1698": 428612096.0, + "1699": 428612096.0, + "1700": 428612096.0, + "1701": 428612096.0, + "1702": 428612096.0, + "1703": 428612096.0, + "1704": 428612096.0, + "1705": 428612096.0, + "1706": 428612096.0, + "1707": 428612096.0, + "1708": 428612096.0, + "1709": 428612096.0, + "1710": 428612096.0, + "1711": 428612096.0, + "1712": 428612096.0, + "1713": 428612096.0, + "1714": 428612096.0, + "1715": 428612096.0, + "1716": 428612096.0, + "1717": 428612096.0, + "1718": 428612096.0, + "1719": 428612096.0, + "1720": 428612096.0, + "1721": 428612096.0, + "1722": 428612096.0, + "1723": 428612096.0, + "1724": 428612096.0, + "1725": 428612096.0, + "1726": 428612096.0, + "1727": 428612096.0, + "1728": 428612096.0, + "1729": 428612096.0, + "1730": 428612096.0, + "1731": 428612096.0, + "1732": 428612096.0, + "1733": 428612096.0, + "1734": 428612096.0, + "1735": 428612096.0, + "1736": 428612096.0, + "1737": 428612096.0, + "1738": 428612096.0, + "1739": 428612096.0, + "1740": 428612096.0, + "1741": 428612096.0, + "1742": 428612096.0, + "1743": 428612096.0, + "1744": 428612096.0, + "1745": 428612096.0, + "1746": 428612096.0, + "1747": 428612096.0, + "1748": 428612096.0, + "1749": 428612096.0, + "1750": 428612096.0, + "1751": 428612096.0, + "1752": 428612096.0, + "1753": 428612096.0, + "1754": 428612096.0, + "1755": 428612096.0, + "1756": 428612096.0, + "1757": 428612096.0, + "1758": 428612096.0, + "1759": 428612096.0, + "1760": 428612096.0, + "1761": 428612096.0, + "1762": 428612096.0, + "1763": 428612096.0, + "1764": 428612096.0, + "1765": 428612096.0, + "1766": 428612096.0, + "1767": 428612096.0, + "1768": 428612096.0, + "1769": 428612096.0, + "1770": 428612096.0, + "1771": 428612096.0, + "1772": 428612096.0, + "1773": 428612096.0, + "1774": 428612096.0, + "1775": 428612096.0, + "1776": 428612096.0, + "1777": 428612096.0, + "1778": 428612096.0, + "1779": 428612096.0, + "1780": 428612096.0, + "1781": 428612096.0, + "1782": 428612096.0, + "1783": 428612096.0, + "1784": 428612096.0, + "1785": 428612096.0, + "1786": 428612096.0, + "1787": 428612096.0, + "1788": 428612096.0, + "1789": 428612096.0, + "1790": 428612096.0, + "1791": 428612096.0, + "1792": 428612096.0, + "1793": 428612096.0, + "1794": 428612096.0, + "1795": 428612096.0, + "1796": 428612096.0, + "1797": 428612096.0, + "1798": 428612096.0, + "1799": 428612096.0, + "1800": 428612096.0, + "1801": 428612096.0, + "1802": 428612096.0, + "1803": 428612096.0, + "1804": 428612096.0, + "1805": 428612096.0, + "1806": 428612096.0, + "1807": 428612096.0, + "1808": 428612096.0, + "1809": 428612096.0, + "1810": 428612096.0, + "1811": 428612096.0, + "1812": 428612096.0, + "1813": 428612096.0, + "1814": 428612096.0, + "1815": 428612096.0, + "1816": 428612096.0, + "1817": 428612096.0, + "1818": 428612096.0, + "1819": 428612096.0, + "1820": 428612096.0, + "1821": 428612096.0, + "1822": 428612096.0, + "1823": 428612096.0, + "1824": 428612096.0, + "1825": 428612096.0, + "1826": 428612096.0, + "1827": 428612096.0, + "1828": 428612096.0, + "1829": 428612096.0, + "1830": 428612096.0, + "1831": 428612096.0, + "1832": 428612096.0, + "1833": 428612096.0, + "1834": 428612096.0, + "1835": 428612096.0, + "1836": 428612096.0, + "1837": 428612096.0, + "1838": 428612096.0, + "1839": 428612096.0, + "1840": 428612096.0, + "1841": 428612096.0, + "1842": 428612096.0, + "1843": 428612096.0, + "1844": 428612096.0, + "1845": 428612096.0, + "1846": 428612096.0, + "1847": 428612096.0, + "1848": 428612096.0, + "1849": 428612096.0, + "1850": 428612096.0, + "1851": 428612096.0, + "1852": 428612096.0, + "1853": 428612096.0, + "1854": 428612096.0, + "1855": 428612096.0, + "1856": 428612096.0, + "1857": 428612096.0, + "1858": 428612096.0, + "1859": 428612096.0, + "1860": 428612096.0, + "1861": 428612096.0, + "1862": 428612096.0, + "1863": 428612096.0, + "1864": 428612096.0, + "1865": 428612096.0, + "1866": 428612096.0, + "1867": 428612096.0, + "1868": 428612096.0, + "1869": 428612096.0, + "1870": 428612096.0, + "1871": 428612096.0, + "1872": 428612096.0, + "1873": 428612096.0, + "1874": 428612096.0, + "1875": 428612096.0, + "1876": 428612096.0, + "1877": 428612096.0, + "1878": 428612096.0, + "1879": 428612096.0, + "1880": 428612096.0, + "1881": 428612096.0, + "1882": 428612096.0, + "1883": 428612096.0, + "1884": 428612096.0, + "1885": 428612096.0, + "1886": 428612096.0, + "1887": 428612096.0, + "1888": 428612096.0, + "1889": 428612096.0, + "1890": 428612096.0, + "1891": 428612096.0, + "1892": 428612096.0, + "1893": 428612096.0, + "1894": 428612096.0, + "1895": 428612096.0, + "1896": 428612096.0, + "1897": 428612096.0, + "1898": 428612096.0, + "1899": 428612096.0, + "1900": 428612096.0, + "1901": 428612096.0, + "1902": 428612096.0, + "1903": 428612096.0, + "1904": 428612096.0, + "1905": 428612096.0, + "1906": 428612096.0, + "1907": 428612096.0, + "1908": 428612096.0, + "1909": 428612096.0, + "1910": 428612096.0, + "1911": 428612096.0, + "1912": 428612096.0, + "1913": 428612096.0, + "1914": 428612096.0, + "1915": 428612096.0, + "1916": 428612096.0, + "1917": 428612096.0, + "1918": 428612096.0, + "1919": 428612096.0, + "1920": 428612096.0, + "1921": 428612096.0, + "1922": 428612096.0, + "1923": 428612096.0, + "1924": 428612096.0, + "1925": 428612096.0, + "1926": 428612096.0, + "1927": 428612096.0, + "1928": 428612096.0, + "1929": 428612096.0, + "1930": 428612096.0, + "1931": 428612096.0, + "1932": 428612096.0, + "1933": 428612096.0, + "1934": 428612096.0, + "1935": 428612096.0, + "1936": 428612096.0, + "1937": 428612096.0, + "1938": 428612096.0, + "1939": 428612096.0, + "1940": 428612096.0, + "1941": 428612096.0, + "1942": 428612096.0, + "1943": 428612096.0, + "1944": 428612096.0, + "1945": 428612096.0, + "1946": 428612096.0, + "1947": 428612096.0, + "1948": 428612096.0, + "1949": 428612096.0, + "1950": 428612096.0, + "1951": 428612096.0, + "1952": 428612096.0, + "1953": 428612096.0, + "1954": 428612096.0, + "1955": 428612096.0, + "1956": 428612096.0, + "1957": 428612096.0, + "1958": 428612096.0, + "1959": 428612096.0, + "1960": 428612096.0, + "1961": 428612096.0, + "1962": 428612096.0, + "1963": 428612096.0, + "1964": 428612096.0, + "1965": 428612096.0, + "1966": 428612096.0, + "1967": 428612096.0, + "1968": 428612096.0, + "1969": 428612096.0, + "1970": 428612096.0, + "1971": 428612096.0, + "1972": 428612096.0, + "1973": 428612096.0, + "1974": 428612096.0, + "1975": 428612096.0, + "1976": 428612096.0, + "1977": 428612096.0, + "1978": 428612096.0, + "1979": 428612096.0, + "1980": 428612096.0, + "1981": 428612096.0, + "1982": 428612096.0, + "1983": 428612096.0, + "1984": 428612096.0, + "1985": 428612096.0, + "1986": 428612096.0, + "1987": 428612096.0, + "1988": 428612096.0, + "1989": 428612096.0, + "1990": 428612096.0, + "1991": 428612096.0, + "1992": 428612096.0, + "1993": 428612096.0, + "1994": 428612096.0, + "1995": 428612096.0, + "1996": 428612096.0, + "1997": 428612096.0, + "1998": 428612096.0, + "1999": 428612096.0, + "2000": 428612096.0 + } + }, + "iteration-time": { + "start_step": 1, + "end_step": 2000, + "step_interval": 1, + "values": { + "1": 22.43653, + "2": 5.05, + "3": 4.99632, + "4": 5.00941, + "5": 5.30047, + "6": 5.00529, + "7": 4.98693, + "8": 5.03236, + "9": 5.04733, + "10": 5.0355, + "11": 5.05504, + "12": 5.02789, + "13": 5.05026, + "14": 5.03817, + "15": 5.03065, + "16": 5.04414, + "17": 5.00251, + "18": 4.9928, + "19": 4.99792, + "20": 4.99648, + "21": 5.01668, + "22": 4.97973, + "23": 5.06379, + "24": 5.01631, + "25": 4.96187, + "26": 4.95004, + "27": 4.95649, + "28": 4.93702, + "29": 4.93675, + "30": 4.92101, + "31": 4.93325, + "32": 4.92626, + "33": 4.93256, + "34": 4.93518, + "35": 4.95011, + "36": 4.959, + "37": 5.41549, + "38": 5.7108, + "39": 4.96475, + "40": 4.95756, + "41": 5.03533, + "42": 4.94591, + "43": 5.30856, + "44": 4.93166, + "45": 5.29533, + "46": 6.02838, + "47": 4.99271, + "48": 4.93548, + "49": 4.93262, + "50": 4.93589, + "51": 4.93457, + "52": 4.9402, + "53": 4.93593, + "54": 4.93266, + "55": 4.93457, + "56": 4.926, + "57": 4.94015, + "58": 4.93606, + "59": 4.92819, + "60": 4.92679, + "61": 4.92853, + "62": 4.93744, + "63": 4.93014, + "64": 4.92895, + "65": 4.92774, + "66": 4.9263, + "67": 4.92483, + "68": 4.91654, + "69": 4.95386, + "70": 4.95969, + "71": 4.97371, + "72": 4.96736, + "73": 4.98575, + "74": 4.968, + "75": 5.68071, + "76": 4.98487, + "77": 4.98651, + "78": 4.97441, + "79": 4.97854, + "80": 4.97886, + "81": 4.98163, + "82": 4.97647, + "83": 5.33849, + "84": 4.98394, + "85": 4.98, + "86": 4.96888, + "87": 4.9685, + "88": 5.33167, + "89": 5.40565, + "90": 4.97724, + "91": 6.05451, + "92": 4.9699, + "93": 4.96947, + "94": 4.97853, + "95": 5.03234, + "96": 4.9703, + "97": 4.9766, + "98": 4.96386, + "99": 4.97968, + "100": 4.96583, + "101": 4.956, + "102": 4.94425, + "103": 4.96789, + "104": 4.96252, + "105": 4.97853, + "106": 4.98313, + "107": 4.98, + "108": 4.97528, + "109": 4.98226, + "110": 4.98532, + "111": 4.95791, + "112": 4.95409, + "113": 5.66529, + "114": 4.96347, + "115": 4.99625, + "116": 4.99199, + "117": 4.98823, + "118": 4.98114, + "119": 4.97652, + "120": 4.98449, + "121": 4.98578, + "122": 4.98423, + "123": 4.9824, + "124": 4.98111, + "125": 4.98291, + "126": 4.98215, + "127": 4.98484, + "128": 5.35151, + "129": 4.9912, + "130": 4.99188, + "131": 4.98662, + "132": 5.34041, + "133": 4.98063, + "134": 5.33235, + "135": 5.69907, + "136": 5.33587, + "137": 4.98509, + "138": 5.61624, + "139": 5.23864, + "140": 4.92839, + "141": 4.95868, + "142": 4.93611, + "143": 4.9473, + "144": 4.9282, + "145": 4.93563, + "146": 4.92822, + "147": 4.94205, + "148": 4.94037, + "149": 4.93429, + "150": 5.62642, + "151": 4.93794, + "152": 4.9323, + "153": 4.93391, + "154": 4.93581, + "155": 4.93177, + "156": 4.93719, + "157": 4.93775, + "158": 4.93223, + "159": 4.9449, + "160": 4.93898, + "161": 4.94198, + "162": 4.9436, + "163": 4.9355, + "164": 4.93432, + "165": 4.9382, + "166": 4.94332, + "167": 4.93425, + "168": 4.93189, + "169": 4.92717, + "170": 4.94393, + "171": 4.94517, + "172": 4.92976, + "173": 5.303, + "174": 4.92818, + "175": 4.92924, + "176": 4.9385, + "177": 5.27801, + "178": 4.93182, + "179": 5.28092, + "180": 5.99722, + "181": 4.92656, + "182": 4.92594, + "183": 4.92947, + "184": 4.93087, + "185": 4.92967, + "186": 4.93088, + "187": 5.62908, + "188": 4.93498, + "189": 4.9476, + "190": 4.93843, + "191": 4.94101, + "192": 4.93265, + "193": 4.93046, + "194": 4.93133, + "195": 4.94044, + "196": 4.93997, + "197": 4.93336, + "198": 6.32096, + "199": 4.95042, + "200": 4.91888, + "201": 4.91803, + "202": 4.92212, + "203": 4.91738, + "204": 4.93431, + "205": 4.93078, + "206": 4.9288, + "207": 4.9431, + "208": 4.93288, + "209": 4.93152, + "210": 4.92297, + "211": 4.92152, + "212": 4.92078, + "213": 4.93382, + "214": 4.92203, + "215": 4.92628, + "216": 4.92759, + "217": 4.91972, + "218": 4.93018, + "219": 5.30587, + "220": 4.92639, + "221": 4.92815, + "222": 5.28345, + "223": 4.93513, + "224": 5.62954, + "225": 6.35198, + "226": 4.94108, + "227": 4.94033, + "228": 4.94077, + "229": 4.9445, + "230": 4.95277, + "231": 4.93684, + "232": 4.94258, + "233": 4.9386, + "234": 4.94149, + "235": 4.94872, + "236": 4.95361, + "237": 4.94924, + "238": 4.93722, + "239": 4.94342, + "240": 4.95029, + "241": 4.94512, + "242": 4.9423, + "243": 4.93861, + "244": 4.93578, + "245": 4.93502, + "246": 4.94519, + "247": 4.93658, + "248": 4.93761, + "249": 4.94583, + "250": 4.94414, + "251": 4.94331, + "252": 4.94044, + "253": 4.94317, + "254": 4.94161, + "255": 4.95295, + "256": 4.95044, + "257": 4.94816, + "258": 4.94006, + "259": 4.94409, + "260": 4.9408, + "261": 4.94791, + "262": 5.63079, + "263": 4.95361, + "264": 5.3219, + "265": 4.96046, + "266": 4.95564, + "267": 5.30372, + "268": 5.30618, + "269": 4.94954, + "270": 6.01622, + "271": 4.9509, + "272": 4.9579, + "273": 4.9529, + "274": 4.95339, + "275": 4.94721, + "276": 4.95053, + "277": 4.9434, + "278": 4.9389, + "279": 4.94021, + "280": 4.93862, + "281": 4.93834, + "282": 4.93985, + "283": 4.94183, + "284": 4.93716, + "285": 4.9443, + "286": 4.94305, + "287": 4.93467, + "288": 4.93816, + "289": 4.93749, + "290": 4.9349, + "291": 4.939, + "292": 4.93482, + "293": 4.94665, + "294": 4.93648, + "295": 4.93823, + "296": 4.93522, + "297": 4.93472, + "298": 4.93288, + "299": 5.61551, + "300": 4.95418, + "301": 4.95347, + "302": 4.95005, + "303": 4.95224, + "304": 5.01672, + "305": 4.94451, + "306": 4.9469, + "307": 4.94674, + "308": 4.95506, + "309": 5.3147, + "310": 4.97913, + "311": 5.29357, + "312": 4.94239, + "313": 5.28356, + "314": 5.66502, + "315": 5.29945, + "316": 4.94213, + "317": 4.93439, + "318": 4.94085, + "319": 4.93452, + "320": 4.94083, + "321": 4.93407, + "322": 4.93596, + "323": 4.9411, + "324": 4.94091, + "325": 4.93723, + "326": 4.93682, + "327": 4.93712, + "328": 4.99643, + "329": 4.94011, + "330": 4.93777, + "331": 4.93553, + "332": 4.938, + "333": 4.94101, + "334": 4.93199, + "335": 4.93179, + "336": 5.28612, + "337": 5.30266, + "338": 4.96477, + "339": 4.97585, + "340": 4.95959, + "341": 4.95912, + "342": 4.96594, + "343": 4.96105, + "344": 4.96501, + "345": 4.96175, + "346": 4.96452, + "347": 4.9603, + "348": 4.95434, + "349": 4.95658, + "350": 4.95773, + "351": 4.96723, + "352": 5.02353, + "353": 4.95487, + "354": 5.32227, + "355": 4.95601, + "356": 5.29598, + "357": 4.95819, + "358": 5.29935, + "359": 6.01593, + "360": 4.96832, + "361": 4.95302, + "362": 4.95944, + "363": 4.95167, + "364": 4.9483, + "365": 4.94951, + "366": 4.9525, + "367": 4.95364, + "368": 4.94948, + "369": 4.95258, + "370": 4.94974, + "371": 4.96357, + "372": 4.94701, + "373": 4.94584, + "374": 5.27688, + "375": 5.29329, + "376": 4.93553, + "377": 4.93296, + "378": 4.93431, + "379": 4.94158, + "380": 4.98441, + "381": 4.99657, + "382": 4.97634, + "383": 4.98015, + "384": 4.98178, + "385": 4.97595, + "386": 4.97431, + "387": 4.97965, + "388": 4.91884, + "389": 4.92436, + "390": 4.9179, + "391": 4.91999, + "392": 4.92113, + "393": 4.92231, + "394": 4.91815, + "395": 4.92381, + "396": 4.91848, + "397": 4.92412, + "398": 4.91541, + "399": 4.91455, + "400": 5.29982, + "401": 5.26416, + "402": 5.2612, + "403": 4.91795, + "404": 5.63316, + "405": 5.27153, + "406": 4.90744, + "407": 4.9142, + "408": 4.90831, + "409": 4.90838, + "410": 4.92063, + "411": 5.25377, + "412": 5.26322, + "413": 4.91895, + "414": 4.92378, + "415": 4.91866, + "416": 4.91955, + "417": 4.92152, + "418": 4.91929, + "419": 4.9201, + "420": 4.91526, + "421": 4.91974, + "422": 4.92503, + "423": 4.92579, + "424": 4.91791, + "425": 4.92253, + "426": 4.92114, + "427": 4.91774, + "428": 4.91171, + "429": 4.9125, + "430": 4.91411, + "431": 4.90802, + "432": 4.9164, + "433": 4.90723, + "434": 4.92382, + "435": 4.9069, + "436": 4.91154, + "437": 4.90512, + "438": 4.9175, + "439": 4.91782, + "440": 4.91028, + "441": 4.91048, + "442": 4.90894, + "443": 4.88817, + "444": 4.88126, + "445": 5.24853, + "446": 4.87836, + "447": 5.24263, + "448": 5.25398, + "449": 6.28763, + "450": 4.88338, + "451": 4.89491, + "452": 4.88709, + "453": 4.89008, + "454": 4.90322, + "455": 4.90113, + "456": 4.90439, + "457": 4.90223, + "458": 4.90641, + "459": 4.90851, + "460": 4.9009, + "461": 4.89968, + "462": 4.89662, + "463": 4.9081, + "464": 4.88866, + "465": 4.90253, + "466": 4.90724, + "467": 4.89875, + "468": 4.90067, + "469": 4.90495, + "470": 4.89887, + "471": 4.89965, + "472": 4.90145, + "473": 4.88549, + "474": 4.87833, + "475": 4.88274, + "476": 4.87937, + "477": 4.88019, + "478": 4.87808, + "479": 4.88269, + "480": 4.87591, + "481": 4.88072, + "482": 4.87452, + "483": 4.8839, + "484": 4.87834, + "485": 5.21963, + "486": 4.8887, + "487": 5.22473, + "488": 4.88748, + "489": 4.89663, + "490": 5.6108, + "491": 5.24875, + "492": 4.88583, + "493": 5.24488, + "494": 5.59516, + "495": 4.89058, + "496": 4.91601, + "497": 4.88752, + "498": 4.88645, + "499": 4.89008, + "500": 4.89271, + "501": 4.8913, + "502": 4.89039, + "503": 4.8906, + "504": 4.88603, + "505": 4.92691, + "506": 4.91793, + "507": 4.92158, + "508": 4.91981, + "509": 4.92795, + "510": 4.91413, + "511": 4.91073, + "512": 4.90909, + "513": 4.91434, + "514": 4.91509, + "515": 4.91002, + "516": 4.9115, + "517": 4.91722, + "518": 4.91514, + "519": 4.91283, + "520": 4.91403, + "521": 4.91077, + "522": 4.91167, + "523": 5.26088, + "524": 5.27803, + "525": 4.92516, + "526": 4.93143, + "527": 4.9217, + "528": 4.92344, + "529": 4.91786, + "530": 4.9193, + "531": 4.881, + "532": 4.87697, + "533": 4.88329, + "534": 5.23628, + "535": 5.26149, + "536": 4.88132, + "537": 5.23366, + "538": 5.92272, + "539": 4.8822, + "540": 4.87645, + "541": 4.87941, + "542": 4.8726, + "543": 4.87977, + "544": 4.88572, + "545": 4.97915, + "546": 4.94014, + "547": 4.9447, + "548": 4.94585, + "549": 4.93712, + "550": 4.95428, + "551": 4.9405, + "552": 4.94013, + "553": 4.94514, + "554": 4.94542, + "555": 4.94729, + "556": 4.93818, + "557": 4.94632, + "558": 4.95928, + "559": 4.94439, + "560": 5.29538, + "561": 5.29912, + "562": 4.95591, + "563": 4.94545, + "564": 4.9589, + "565": 4.9486, + "566": 4.94487, + "567": 4.94563, + "568": 4.96795, + "569": 4.96332, + "570": 4.95731, + "571": 4.95751, + "572": 4.94401, + "573": 4.94623, + "574": 4.9438, + "575": 4.9342, + "576": 4.93847, + "577": 4.94215, + "578": 4.94036, + "579": 4.95135, + "580": 5.28996, + "581": 5.66625, + "582": 4.93892, + "583": 5.64719, + "584": 5.28091, + "585": 4.95827, + "586": 4.95725, + "587": 4.96107, + "588": 4.95092, + "589": 4.95514, + "590": 4.94845, + "591": 4.94342, + "592": 4.9488, + "593": 4.93576, + "594": 4.93657, + "595": 4.93545, + "596": 4.93595, + "597": 5.29319, + "598": 5.28921, + "599": 4.95347, + "600": 4.94896, + "601": 4.94543, + "602": 4.95405, + "603": 4.94996, + "604": 4.94726, + "605": 4.94394, + "606": 4.9443, + "607": 4.99448, + "608": 4.93032, + "609": 4.96191, + "610": 4.95086, + "611": 4.94486, + "612": 4.94403, + "613": 4.94194, + "614": 4.94624, + "615": 4.94461, + "616": 4.96458, + "617": 4.94658, + "618": 4.94254, + "619": 4.93901, + "620": 4.94138, + "621": 4.94747, + "622": 4.95796, + "623": 4.94579, + "624": 5.30372, + "625": 4.94082, + "626": 5.66834, + "627": 4.93994, + "628": 5.97473, + "629": 4.94152, + "630": 4.94328, + "631": 4.9385, + "632": 4.9688, + "633": 4.93837, + "634": 5.25732, + "635": 4.9147, + "636": 5.25839, + "637": 4.92259, + "638": 4.91081, + "639": 4.92229, + "640": 4.92687, + "641": 4.91335, + "642": 4.91557, + "643": 4.91922, + "644": 4.91847, + "645": 4.92121, + "646": 4.92251, + "647": 4.91255, + "648": 4.91291, + "649": 4.91003, + "650": 4.90867, + "651": 4.91235, + "652": 4.90719, + "653": 4.90865, + "654": 4.90719, + "655": 4.91306, + "656": 4.90861, + "657": 4.90901, + "658": 4.91095, + "659": 4.90726, + "660": 4.90915, + "661": 4.91011, + "662": 4.90721, + "663": 4.90907, + "664": 4.91699, + "665": 4.91095, + "666": 4.90826, + "667": 4.90687, + "668": 4.90738, + "669": 5.25716, + "670": 5.25453, + "671": 5.28603, + "672": 5.25386, + "673": 6.29304, + "674": 4.91719, + "675": 4.9174, + "676": 4.92014, + "677": 4.92048, + "678": 4.90878, + "679": 4.90967, + "680": 4.90981, + "681": 4.91054, + "682": 4.90885, + "683": 4.90932, + "684": 4.915, + "685": 4.90701, + "686": 4.91124, + "687": 4.91733, + "688": 4.91577, + "689": 4.91189, + "690": 4.90854, + "691": 4.90631, + "692": 4.90689, + "693": 4.9142, + "694": 4.90933, + "695": 4.90064, + "696": 4.88962, + "697": 4.89317, + "698": 4.89665, + "699": 4.90473, + "700": 4.90675, + "701": 4.90072, + "702": 4.90347, + "703": 4.90535, + "704": 4.90243, + "705": 4.90653, + "706": 4.90494, + "707": 4.90715, + "708": 4.89971, + "709": 5.25068, + "710": 5.24447, + "711": 4.91173, + "712": 4.91607, + "713": 5.26011, + "714": 4.90966, + "715": 4.90512, + "716": 5.63181, + "717": 5.62011, + "718": 5.23301, + "719": 4.91317, + "720": 4.90779, + "721": 4.90675, + "722": 4.90612, + "723": 4.90554, + "724": 4.90952, + "725": 4.90669, + "726": 4.90589, + "727": 4.9062, + "728": 4.91028, + "729": 4.905, + "730": 4.90848, + "731": 4.90621, + "732": 4.91216, + "733": 4.90248, + "734": 4.90051, + "735": 4.90319, + "736": 4.90401, + "737": 4.90646, + "738": 4.90558, + "739": 4.90438, + "740": 4.90694, + "741": 4.9036, + "742": 4.90521, + "743": 4.90326, + "744": 4.90534, + "745": 4.90658, + "746": 5.24876, + "747": 4.91293, + "748": 5.24944, + "749": 4.90712, + "750": 4.90572, + "751": 4.90977, + "752": 4.90683, + "753": 4.90815, + "754": 4.90611, + "755": 4.91427, + "756": 4.9129, + "757": 4.91264, + "758": 5.25755, + "759": 4.91199, + "760": 5.2647, + "761": 4.91559, + "762": 5.64712, + "763": 5.59149, + "764": 4.91566, + "765": 4.91348, + "766": 4.92052, + "767": 4.9149, + "768": 4.91624, + "769": 4.90919, + "770": 4.9208, + "771": 4.9111, + "772": 4.91242, + "773": 4.91183, + "774": 4.91856, + "775": 4.91524, + "776": 4.91642, + "777": 4.91271, + "778": 4.91587, + "779": 4.91173, + "780": 4.9163, + "781": 4.9101, + "782": 4.90927, + "783": 4.91594, + "784": 5.27562, + "785": 5.29399, + "786": 4.92064, + "787": 4.92508, + "788": 4.91936, + "789": 4.92025, + "790": 4.92839, + "791": 4.91829, + "792": 4.9234, + "793": 4.92615, + "794": 4.91968, + "795": 4.91417, + "796": 4.89214, + "797": 4.87642, + "798": 4.87726, + "799": 4.88691, + "800": 4.87753, + "801": 4.90361, + "802": 4.91538, + "803": 5.25822, + "804": 5.25769, + "805": 4.90985, + "806": 4.91228, + "807": 5.6423, + "808": 5.23836, + "809": 4.9314, + "810": 4.91226, + "811": 4.91382, + "812": 4.91588, + "813": 4.91005, + "814": 4.9202, + "815": 4.90766, + "816": 4.90744, + "817": 4.91497, + "818": 4.91, + "819": 4.90572, + "820": 4.91342, + "821": 5.26215, + "822": 5.25971, + "823": 4.92486, + "824": 4.92645, + "825": 4.91518, + "826": 4.91893, + "827": 4.90862, + "828": 4.9143, + "829": 4.91422, + "830": 4.91829, + "831": 4.90569, + "832": 4.91122, + "833": 4.90584, + "834": 4.90518, + "835": 4.90755, + "836": 4.90656, + "837": 4.90626, + "838": 4.90987, + "839": 4.91189, + "840": 4.90735, + "841": 4.90697, + "842": 4.91064, + "843": 4.90409, + "844": 4.90711, + "845": 4.90385, + "846": 4.90599, + "847": 5.24636, + "848": 4.89752, + "849": 5.24655, + "850": 4.90148, + "851": 4.89501, + "852": 5.98483, + "853": 4.89468, + "854": 4.89653, + "855": 4.8954, + "856": 4.89811, + "857": 4.90026, + "858": 5.24069, + "859": 4.91345, + "860": 5.2538, + "861": 4.91107, + "862": 4.90905, + "863": 4.90289, + "864": 4.90179, + "865": 4.90697, + "866": 4.89969, + "867": 4.89622, + "868": 4.89817, + "869": 4.89734, + "870": 4.89421, + "871": 4.902, + "872": 4.89737, + "873": 4.90082, + "874": 4.8986, + "875": 4.9034, + "876": 4.90213, + "877": 4.89969, + "878": 4.90652, + "879": 4.90216, + "880": 4.90541, + "881": 4.90491, + "882": 4.89798, + "883": 4.89325, + "884": 4.89662, + "885": 4.91, + "886": 4.89481, + "887": 4.90025, + "888": 4.89887, + "889": 4.89458, + "890": 4.89351, + "891": 4.89343, + "892": 5.24625, + "893": 4.90075, + "894": 5.24719, + "895": 4.89439, + "896": 5.95508, + "897": 5.92842, + "898": 4.90126, + "899": 4.91443, + "900": 4.90222, + "901": 4.89928, + "902": 4.89952, + "903": 4.89905, + "904": 4.90536, + "905": 4.90627, + "906": 4.90188, + "907": 4.90671, + "908": 4.90531, + "909": 4.90614, + "910": 4.90319, + "911": 4.90668, + "912": 4.90614, + "913": 4.90641, + "914": 4.90219, + "915": 4.89858, + "916": 4.89788, + "917": 4.90114, + "918": 4.89062, + "919": 4.89675, + "920": 4.89412, + "921": 4.89851, + "922": 4.90258, + "923": 4.89837, + "924": 4.89168, + "925": 4.90558, + "926": 4.88926, + "927": 4.89631, + "928": 4.89481, + "929": 4.89896, + "930": 4.90349, + "931": 4.90254, + "932": 4.89424, + "933": 5.2393, + "934": 4.90447, + "935": 5.24957, + "936": 4.89799, + "937": 5.24757, + "938": 4.90497, + "939": 5.26023, + "940": 4.905, + "941": 4.90603, + "942": 5.89013, + "943": 5.2754, + "944": 4.89903, + "945": 4.90825, + "946": 4.90072, + "947": 4.91095, + "948": 4.89642, + "949": 4.90314, + "950": 4.9027, + "951": 4.90276, + "952": 4.90005, + "953": 4.90591, + "954": 4.89179, + "955": 4.89648, + "956": 4.89739, + "957": 4.90258, + "958": 4.90027, + "959": 4.90627, + "960": 4.89592, + "961": 4.89153, + "962": 4.89826, + "963": 4.89281, + "964": 4.88656, + "965": 4.9056, + "966": 4.88948, + "967": 4.89075, + "968": 4.89128, + "969": 4.88907, + "970": 5.23384, + "971": 4.91197, + "972": 5.24458, + "973": 4.90766, + "974": 4.90557, + "975": 4.9059, + "976": 4.90502, + "977": 4.90392, + "978": 4.90541, + "979": 4.89927, + "980": 4.9047, + "981": 4.90276, + "982": 5.2516, + "983": 5.25121, + "984": 4.90232, + "985": 4.90209, + "986": 5.26939, + "987": 5.52932, + "988": 5.28293, + "989": 4.91742, + "990": 4.90637, + "991": 4.90953, + "992": 4.90864, + "993": 4.9075, + "994": 4.90696, + "995": 4.90473, + "996": 4.90192, + "997": 4.90199, + "998": 4.89181, + "999": 4.89111, + "1000": 4.89025, + "1001": 4.9168, + "1002": 4.90983, + "1003": 4.91875, + "1004": 4.90892, + "1005": 4.92588, + "1006": 4.91678, + "1007": 5.262, + "1008": 4.92447, + "1009": 5.26729, + "1010": 4.92803, + "1011": 4.92461, + "1012": 4.92338, + "1013": 4.9218, + "1014": 4.92051, + "1015": 4.92442, + "1016": 4.91248, + "1017": 4.92113, + "1018": 4.92046, + "1019": 4.91949, + "1020": 4.92623, + "1021": 4.92267, + "1022": 4.92249, + "1023": 4.91899, + "1024": 4.92062, + "1025": 5.26804, + "1026": 4.92131, + "1027": 5.26954, + "1028": 4.91856, + "1029": 4.91681, + "1030": 5.90813, + "1031": 4.92456, + "1032": 4.92325, + "1033": 5.3083, + "1034": 4.91916, + "1035": 4.91422, + "1036": 4.91293, + "1037": 4.91223, + "1038": 4.9211, + "1039": 4.92393, + "1040": 4.92009, + "1041": 4.92106, + "1042": 4.9242, + "1043": 4.92005, + "1044": 5.26878, + "1045": 4.92668, + "1046": 4.93095, + "1047": 5.27312, + "1048": 4.92622, + "1049": 4.92229, + "1050": 4.92078, + "1051": 4.9252, + "1052": 4.92398, + "1053": 4.92467, + "1054": 4.92254, + "1055": 4.92721, + "1056": 4.92594, + "1057": 4.93074, + "1058": 4.9202, + "1059": 4.92339, + "1060": 4.92936, + "1061": 4.92316, + "1062": 4.91832, + "1063": 4.9324, + "1064": 4.96238, + "1065": 4.94321, + "1066": 4.96241, + "1067": 4.93128, + "1068": 4.92665, + "1069": 4.93217, + "1070": 5.29473, + "1071": 5.27044, + "1072": 4.91774, + "1073": 4.92979, + "1074": 5.30092, + "1075": 5.57166, + "1076": 4.9336, + "1077": 4.91975, + "1078": 5.29838, + "1079": 4.92345, + "1080": 4.92265, + "1081": 4.93832, + "1082": 5.28966, + "1083": 4.94183, + "1084": 5.28091, + "1085": 4.94506, + "1086": 4.94668, + "1087": 4.94028, + "1088": 4.93858, + "1089": 4.93937, + "1090": 4.9454, + "1091": 4.95599, + "1092": 4.95023, + "1093": 4.94499, + "1094": 4.96028, + "1095": 4.95213, + "1096": 4.96406, + "1097": 4.93905, + "1098": 4.92198, + "1099": 4.93824, + "1100": 4.92789, + "1101": 4.92981, + "1102": 4.93937, + "1103": 4.91985, + "1104": 4.91889, + "1105": 4.93785, + "1106": 4.94007, + "1107": 4.93618, + "1108": 4.94002, + "1109": 4.96964, + "1110": 4.93965, + "1111": 4.89692, + "1112": 4.89611, + "1113": 4.89245, + "1114": 5.24194, + "1115": 4.89604, + "1116": 5.23738, + "1117": 4.89591, + "1118": 4.89712, + "1119": 6.2207, + "1120": 4.89707, + "1121": 5.24025, + "1122": 4.89987, + "1123": 5.27914, + "1124": 4.9043, + "1125": 4.89477, + "1126": 4.89625, + "1127": 4.90132, + "1128": 4.90216, + "1129": 4.90398, + "1130": 4.89594, + "1131": 4.90153, + "1132": 4.89796, + "1133": 4.89536, + "1134": 4.89807, + "1135": 4.89858, + "1136": 4.89867, + "1137": 4.89681, + "1138": 4.92931, + "1139": 4.92599, + "1140": 4.89538, + "1141": 4.89732, + "1142": 4.89242, + "1143": 4.89262, + "1144": 4.89274, + "1145": 4.93085, + "1146": 4.9294, + "1147": 4.92891, + "1148": 4.91881, + "1149": 4.89129, + "1150": 4.89171, + "1151": 4.8862, + "1152": 4.89315, + "1153": 4.89463, + "1154": 4.89481, + "1155": 4.89194, + "1156": 5.23303, + "1157": 4.89025, + "1158": 4.89312, + "1159": 5.24533, + "1160": 5.25573, + "1161": 5.23949, + "1162": 4.8914, + "1163": 4.89247, + "1164": 4.8896, + "1165": 5.88618, + "1166": 4.91824, + "1167": 4.89232, + "1168": 5.27914, + "1169": 4.88638, + "1170": 4.89624, + "1171": 4.90097, + "1172": 4.89335, + "1173": 4.90022, + "1174": 4.88823, + "1175": 4.91533, + "1176": 4.91702, + "1177": 4.91026, + "1178": 4.89204, + "1179": 4.89341, + "1180": 4.88754, + "1181": 4.89101, + "1182": 4.89528, + "1183": 4.89482, + "1184": 4.88208, + "1185": 4.87829, + "1186": 4.88501, + "1187": 4.88593, + "1188": 4.87526, + "1189": 4.88604, + "1190": 4.90872, + "1191": 4.88218, + "1192": 4.8826, + "1193": 4.88606, + "1194": 5.22378, + "1195": 4.88192, + "1196": 4.8877, + "1197": 5.23842, + "1198": 4.89888, + "1199": 4.89039, + "1200": 4.89543, + "1201": 4.8917, + "1202": 4.88928, + "1203": 4.88428, + "1204": 4.91394, + "1205": 5.27535, + "1206": 5.27273, + "1207": 4.92919, + "1208": 4.92498, + "1209": 5.60645, + "1210": 5.23108, + "1211": 4.91823, + "1212": 4.91107, + "1213": 4.90706, + "1214": 5.33395, + "1215": 4.91341, + "1216": 4.92296, + "1217": 4.92797, + "1218": 4.91436, + "1219": 4.93183, + "1220": 4.92763, + "1221": 4.91189, + "1222": 4.91524, + "1223": 4.92927, + "1224": 4.90762, + "1225": 4.91646, + "1226": 4.95199, + "1227": 4.93657, + "1228": 4.91049, + "1229": 4.90576, + "1230": 4.92418, + "1231": 5.24788, + "1232": 4.90922, + "1233": 4.90828, + "1234": 5.28741, + "1235": 4.93359, + "1236": 4.92651, + "1237": 4.92759, + "1238": 4.91812, + "1239": 4.96161, + "1240": 4.92462, + "1241": 4.9408, + "1242": 4.95151, + "1243": 4.92866, + "1244": 4.94942, + "1245": 4.93202, + "1246": 4.93118, + "1247": 4.92787, + "1248": 4.93195, + "1249": 5.31148, + "1250": 4.96525, + "1251": 5.27677, + "1252": 4.95992, + "1253": 4.89092, + "1254": 5.87598, + "1255": 4.89013, + "1256": 4.89328, + "1257": 4.88679, + "1258": 4.89107, + "1259": 5.26785, + "1260": 4.89071, + "1261": 4.89005, + "1262": 4.89216, + "1263": 4.89212, + "1264": 4.88574, + "1265": 4.88902, + "1266": 4.88642, + "1267": 4.89574, + "1268": 4.88631, + "1269": 5.22724, + "1270": 4.88943, + "1271": 5.23761, + "1272": 4.90353, + "1273": 4.89726, + "1274": 4.92161, + "1275": 4.92347, + "1276": 4.91698, + "1277": 4.92233, + "1278": 4.91979, + "1279": 4.9211, + "1280": 4.9179, + "1281": 4.92209, + "1282": 4.94485, + "1283": 4.92932, + "1284": 4.92976, + "1285": 4.91788, + "1286": 4.93408, + "1287": 4.92359, + "1288": 4.92166, + "1289": 4.9185, + "1290": 4.91424, + "1291": 4.91891, + "1292": 4.92028, + "1293": 4.9117, + "1294": 5.27044, + "1295": 5.29676, + "1296": 4.91703, + "1297": 4.92056, + "1298": 4.92207, + "1299": 5.91394, + "1300": 4.9147, + "1301": 4.9131, + "1302": 4.9176, + "1303": 4.93425, + "1304": 5.304, + "1305": 4.91978, + "1306": 5.27498, + "1307": 4.92043, + "1308": 4.91675, + "1309": 5.27831, + "1310": 4.93667, + "1311": 4.93075, + "1312": 4.92766, + "1313": 4.92554, + "1314": 4.93753, + "1315": 4.93323, + "1316": 4.92326, + "1317": 4.92226, + "1318": 4.9254, + "1319": 4.91683, + "1320": 4.91352, + "1321": 4.93361, + "1322": 4.9202, + "1323": 4.92888, + "1324": 4.94749, + "1325": 4.92427, + "1326": 4.91993, + "1327": 4.94147, + "1328": 4.91569, + "1329": 4.9082, + "1330": 4.90808, + "1331": 4.92463, + "1332": 4.94304, + "1333": 4.91833, + "1334": 4.91915, + "1335": 4.9569, + "1336": 4.91253, + "1337": 4.91228, + "1338": 4.91599, + "1339": 5.26886, + "1340": 4.94108, + "1341": 5.28895, + "1342": 4.92166, + "1343": 4.93148, + "1344": 6.20454, + "1345": 4.93732, + "1346": 4.94109, + "1347": 5.28178, + "1348": 4.92597, + "1349": 5.31528, + "1350": 4.93124, + "1351": 4.9199, + "1352": 4.92145, + "1353": 4.91761, + "1354": 4.91599, + "1355": 4.91867, + "1356": 4.92286, + "1357": 4.91965, + "1358": 4.92454, + "1359": 4.92188, + "1360": 4.91921, + "1361": 4.92021, + "1362": 4.92372, + "1363": 4.91207, + "1364": 4.96107, + "1365": 4.91388, + "1366": 4.91683, + "1367": 4.91413, + "1368": 4.91691, + "1369": 4.91871, + "1370": 4.92278, + "1371": 4.92605, + "1372": 4.92653, + "1373": 4.9264, + "1374": 4.92864, + "1375": 4.92839, + "1376": 4.93185, + "1377": 4.92304, + "1378": 4.92916, + "1379": 4.92701, + "1380": 4.92797, + "1381": 5.27325, + "1382": 4.89544, + "1383": 4.89064, + "1384": 5.60494, + "1385": 5.00482, + "1386": 5.33879, + "1387": 4.92912, + "1388": 4.92575, + "1389": 5.83703, + "1390": 4.91691, + "1391": 4.91717, + "1392": 4.92005, + "1393": 4.92211, + "1394": 4.91895, + "1395": 5.29903, + "1396": 4.92143, + "1397": 4.91551, + "1398": 4.91427, + "1399": 4.91348, + "1400": 4.92556, + "1401": 4.92553, + "1402": 4.91884, + "1403": 4.91856, + "1404": 4.95579, + "1405": 4.88917, + "1406": 4.88886, + "1407": 4.90262, + "1408": 4.88379, + "1409": 4.88976, + "1410": 4.88681, + "1411": 4.8751, + "1412": 4.89308, + "1413": 4.89122, + "1414": 4.88458, + "1415": 4.89489, + "1416": 4.88438, + "1417": 4.88183, + "1418": 5.229, + "1419": 4.96736, + "1420": 4.95735, + "1421": 5.29839, + "1422": 4.92896, + "1423": 4.9679, + "1424": 4.96109, + "1425": 4.96048, + "1426": 4.95854, + "1427": 4.95558, + "1428": 4.90503, + "1429": 5.24486, + "1430": 5.24901, + "1431": 4.8987, + "1432": 4.89075, + "1433": 5.22736, + "1434": 5.47175, + "1435": 4.89209, + "1436": 4.8986, + "1437": 4.8891, + "1438": 4.88697, + "1439": 4.88974, + "1440": 5.27298, + "1441": 4.89403, + "1442": 4.90495, + "1443": 4.89585, + "1444": 4.89766, + "1445": 4.89344, + "1446": 4.89618, + "1447": 4.88721, + "1448": 4.88735, + "1449": 4.89401, + "1450": 4.89435, + "1451": 4.89143, + "1452": 4.88553, + "1453": 4.89139, + "1454": 4.89347, + "1455": 5.23147, + "1456": 4.8987, + "1457": 4.90447, + "1458": 4.89553, + "1459": 5.23187, + "1460": 4.90546, + "1461": 4.89293, + "1462": 4.89652, + "1463": 4.88806, + "1464": 4.94852, + "1465": 4.89339, + "1466": 4.88888, + "1467": 4.89409, + "1468": 4.89028, + "1469": 4.89198, + "1470": 4.89499, + "1471": 4.89853, + "1472": 4.89989, + "1473": 5.245, + "1474": 4.89244, + "1475": 5.24744, + "1476": 4.88786, + "1477": 4.88954, + "1478": 5.81074, + "1479": 4.90603, + "1480": 4.8817, + "1481": 4.88853, + "1482": 4.88913, + "1483": 4.88525, + "1484": 4.88091, + "1485": 5.26103, + "1486": 4.88332, + "1487": 4.88482, + "1488": 4.88349, + "1489": 4.93535, + "1490": 4.93713, + "1491": 4.94008, + "1492": 4.93273, + "1493": 5.26558, + "1494": 4.92625, + "1495": 4.93119, + "1496": 4.93326, + "1497": 5.29661, + "1498": 4.94651, + "1499": 4.94563, + "1500": 4.94732, + "1501": 4.94956, + "1502": 4.93949, + "1503": 4.94314, + "1504": 4.949, + "1505": 4.93848, + "1506": 4.93655, + "1507": 4.93352, + "1508": 4.93376, + "1509": 4.93575, + "1510": 4.93237, + "1511": 4.93325, + "1512": 4.93443, + "1513": 4.93608, + "1514": 4.92875, + "1515": 4.93822, + "1516": 4.92271, + "1517": 4.93602, + "1518": 4.93135, + "1519": 5.28269, + "1520": 5.28601, + "1521": 4.93214, + "1522": 4.93238, + "1523": 4.9331, + "1524": 5.84985, + "1525": 4.93183, + "1526": 4.9312, + "1527": 4.94067, + "1528": 4.94179, + "1529": 4.93283, + "1530": 5.64255, + "1531": 4.93012, + "1532": 4.93237, + "1533": 4.93188, + "1534": 5.28642, + "1535": 4.93295, + "1536": 4.93351, + "1537": 4.93687, + "1538": 4.93395, + "1539": 4.93892, + "1540": 4.93329, + "1541": 4.93178, + "1542": 4.94011, + "1543": 4.93223, + "1544": 4.9238, + "1545": 4.93295, + "1546": 4.92789, + "1547": 4.92723, + "1548": 4.93344, + "1549": 4.93081, + "1550": 4.93484, + "1551": 4.93247, + "1552": 4.94286, + "1553": 4.93871, + "1554": 4.9346, + "1555": 4.93508, + "1556": 4.93254, + "1557": 4.93621, + "1558": 4.93402, + "1559": 4.92552, + "1560": 4.92871, + "1561": 4.9342, + "1562": 4.93981, + "1563": 4.94231, + "1564": 5.28559, + "1565": 5.2926, + "1566": 4.93393, + "1567": 5.27554, + "1568": 5.55669, + "1569": 5.22897, + "1570": 4.93426, + "1571": 5.28382, + "1572": 4.94938, + "1573": 4.95055, + "1574": 4.94811, + "1575": 4.9489, + "1576": 5.33208, + "1577": 4.94524, + "1578": 4.94592, + "1579": 4.94832, + "1580": 4.94832, + "1581": 4.94408, + "1582": 4.93963, + "1583": 5.06791, + "1584": 4.93161, + "1585": 4.93335, + "1586": 4.93849, + "1587": 4.93237, + "1588": 4.93556, + "1589": 4.93066, + "1590": 4.94768, + "1591": 4.93099, + "1592": 4.93258, + "1593": 4.93981, + "1594": 4.92949, + "1595": 4.93453, + "1596": 4.92827, + "1597": 4.92584, + "1598": 4.93755, + "1599": 4.92974, + "1600": 4.94804, + "1601": 4.93191, + "1602": 4.93369, + "1603": 4.93286, + "1604": 4.93069, + "1605": 5.27051, + "1606": 4.92329, + "1607": 4.92495, + "1608": 5.27779, + "1609": 5.28346, + "1610": 5.29602, + "1611": 4.94123, + "1612": 4.93638, + "1613": 5.856, + "1614": 4.94437, + "1615": 4.93653, + "1616": 4.93875, + "1617": 4.93536, + "1618": 4.93896, + "1619": 4.93356, + "1620": 4.93572, + "1621": 5.31736, + "1622": 4.94531, + "1623": 4.94225, + "1624": 4.94386, + "1625": 4.93406, + "1626": 4.93798, + "1627": 4.93633, + "1628": 4.93917, + "1629": 4.93696, + "1630": 4.93053, + "1631": 4.92648, + "1632": 4.92658, + "1633": 4.93841, + "1634": 4.93342, + "1635": 4.9359, + "1636": 4.93181, + "1637": 4.93503, + "1638": 4.93642, + "1639": 4.93683, + "1640": 4.93436, + "1641": 4.9443, + "1642": 5.27794, + "1643": 4.94268, + "1644": 4.91864, + "1645": 4.92135, + "1646": 5.26653, + "1647": 4.93155, + "1648": 4.94793, + "1649": 4.92681, + "1650": 4.92909, + "1651": 4.92222, + "1652": 4.93308, + "1653": 5.27802, + "1654": 5.27831, + "1655": 4.92527, + "1656": 4.92184, + "1657": 4.92535, + "1658": 5.84478, + "1659": 4.93415, + "1660": 4.98533, + "1661": 4.95752, + "1662": 4.94766, + "1663": 4.94933, + "1664": 4.95355, + "1665": 4.94643, + "1666": 5.33217, + "1667": 4.93611, + "1668": 4.93532, + "1669": 4.9092, + "1670": 4.90894, + "1671": 4.9204, + "1672": 4.92236, + "1673": 4.9082, + "1674": 4.91286, + "1675": 4.90919, + "1676": 4.90864, + "1677": 4.91312, + "1678": 4.90871, + "1679": 4.92308, + "1680": 5.26267, + "1681": 4.92022, + "1682": 4.91096, + "1683": 4.91568, + "1684": 5.26065, + "1685": 4.90909, + "1686": 4.90718, + "1687": 4.91023, + "1688": 4.91504, + "1689": 4.9123, + "1690": 4.91353, + "1691": 4.90838, + "1692": 4.90311, + "1693": 4.90235, + "1694": 4.90376, + "1695": 4.90901, + "1696": 4.90724, + "1697": 4.91094, + "1698": 5.25776, + "1699": 4.91455, + "1700": 5.2613, + "1701": 4.90973, + "1702": 4.90149, + "1703": 5.82797, + "1704": 4.9102, + "1705": 4.91831, + "1706": 4.90187, + "1707": 4.89945, + "1708": 4.89865, + "1709": 4.89632, + "1710": 4.90065, + "1711": 5.28146, + "1712": 4.90271, + "1713": 4.90852, + "1714": 4.90365, + "1715": 4.90463, + "1716": 4.91059, + "1717": 5.24655, + "1718": 4.91868, + "1719": 4.90569, + "1720": 4.91426, + "1721": 4.91116, + "1722": 5.25454, + "1723": 4.91058, + "1724": 4.90906, + "1725": 4.92075, + "1726": 4.91839, + "1727": 4.91564, + "1728": 4.91131, + "1729": 4.91291, + "1730": 4.90884, + "1731": 4.91062, + "1732": 4.90638, + "1733": 4.9061, + "1734": 4.90658, + "1735": 4.91543, + "1736": 4.90614, + "1737": 4.91107, + "1738": 4.91084, + "1739": 4.90842, + "1740": 4.91418, + "1741": 4.90881, + "1742": 4.90792, + "1743": 5.26397, + "1744": 4.91738, + "1745": 5.25587, + "1746": 4.90599, + "1747": 4.90321, + "1748": 5.78796, + "1749": 4.90348, + "1750": 4.90858, + "1751": 4.89993, + "1752": 4.90938, + "1753": 4.90593, + "1754": 5.25406, + "1755": 4.9167, + "1756": 4.92732, + "1757": 5.32154, + "1758": 4.93234, + "1759": 5.25874, + "1760": 4.90683, + "1761": 4.90629, + "1762": 4.91525, + "1763": 4.91544, + "1764": 4.91062, + "1765": 4.90636, + "1766": 4.90873, + "1767": 4.91142, + "1768": 4.96573, + "1769": 4.90448, + "1770": 4.8891, + "1771": 4.8932, + "1772": 4.88066, + "1773": 4.87927, + "1774": 4.87496, + "1775": 4.90017, + "1776": 4.88861, + "1777": 4.88943, + "1778": 4.88632, + "1779": 4.89539, + "1780": 4.88673, + "1781": 4.89482, + "1782": 4.89261, + "1783": 4.88921, + "1784": 4.89935, + "1785": 4.88986, + "1786": 4.89061, + "1787": 4.88853, + "1788": 5.24035, + "1789": 5.24993, + "1790": 4.91207, + "1791": 4.91991, + "1792": 5.55415, + "1793": 5.49039, + "1794": 4.899, + "1795": 4.88922, + "1796": 5.25127, + "1797": 4.89889, + "1798": 4.90442, + "1799": 4.89627, + "1800": 4.89346, + "1801": 4.89082, + "1802": 5.2731, + "1803": 4.89886, + "1804": 4.87379, + "1805": 4.87577, + "1806": 4.88484, + "1807": 4.87576, + "1808": 4.86783, + "1809": 4.8917, + "1810": 4.87329, + "1811": 4.87182, + "1812": 4.8594, + "1813": 4.86213, + "1814": 4.86701, + "1815": 4.86025, + "1816": 4.86454, + "1817": 4.86162, + "1818": 4.85688, + "1819": 4.85907, + "1820": 4.85765, + "1821": 4.85878, + "1822": 4.86537, + "1823": 4.86101, + "1824": 4.86218, + "1825": 4.86082, + "1826": 4.85916, + "1827": 4.86304, + "1828": 4.86335, + "1829": 4.85846, + "1830": 5.21054, + "1831": 4.87227, + "1832": 5.20618, + "1833": 4.86815, + "1834": 5.55416, + "1835": 4.87798, + "1836": 4.89752, + "1837": 5.79486, + "1838": 4.90553, + "1839": 4.90533, + "1840": 4.89368, + "1841": 4.89475, + "1842": 4.89469, + "1843": 4.88557, + "1844": 4.89, + "1845": 4.88668, + "1846": 4.89537, + "1847": 5.26263, + "1848": 4.89245, + "1849": 4.89348, + "1850": 4.88835, + "1851": 4.90708, + "1852": 4.90228, + "1853": 4.86785, + "1854": 4.87736, + "1855": 4.87369, + "1856": 4.87811, + "1857": 4.90299, + "1858": 4.88442, + "1859": 4.87297, + "1860": 4.89531, + "1861": 4.90241, + "1862": 4.89309, + "1863": 4.89512, + "1864": 4.90549, + "1865": 4.90854, + "1866": 4.9047, + "1867": 5.2401, + "1868": 4.89946, + "1869": 4.90883, + "1870": 4.90522, + "1871": 4.93888, + "1872": 5.21372, + "1873": 4.87709, + "1874": 4.86464, + "1875": 4.87233, + "1876": 4.88054, + "1877": 4.84923, + "1878": 5.17207, + "1879": 5.1976, + "1880": 4.8445, + "1881": 4.84388, + "1882": 4.84797, + "1883": 5.73664, + "1884": 4.84672, + "1885": 4.84557, + "1886": 4.85201, + "1887": 4.85018, + "1888": 4.84932, + "1889": 4.85617, + "1890": 4.84416, + "1891": 4.85089, + "1892": 4.84881, + "1893": 5.22668, + "1894": 4.8491, + "1895": 4.84681, + "1896": 4.84529, + "1897": 4.84998, + "1898": 4.8507, + "1899": 4.84271, + "1900": 4.84844, + "1901": 4.84365, + "1902": 4.83991, + "1903": 4.84228, + "1904": 5.17846, + "1905": 4.84978, + "1906": 4.84285, + "1907": 4.85138, + "1908": 4.84338, + "1909": 5.19721, + "1910": 4.85138, + "1911": 4.84739, + "1912": 4.84478, + "1913": 4.85226, + "1914": 4.85002, + "1915": 4.85039, + "1916": 4.85444, + "1917": 4.84588, + "1918": 4.8495, + "1919": 4.85217, + "1920": 4.84949, + "1921": 4.84631, + "1922": 4.84476, + "1923": 5.17493, + "1924": 5.19107, + "1925": 4.85154, + "1926": 4.84261, + "1927": 5.44494, + "1928": 5.14044, + "1929": 4.84927, + "1930": 4.84493, + "1931": 4.84048, + "1932": 4.84204, + "1933": 4.84664, + "1934": 4.84105, + "1935": 4.83981, + "1936": 4.841, + "1937": 4.84038, + "1938": 5.22894, + "1939": 4.84209, + "1940": 4.84356, + "1941": 5.20657, + "1942": 4.9004, + "1943": 4.90813, + "1944": 4.90655, + "1945": 4.88214, + "1946": 5.21239, + "1947": 4.86529, + "1948": 4.85849, + "1949": 4.85084, + "1950": 4.86533, + "1951": 4.86, + "1952": 4.85847, + "1953": 4.86113, + "1954": 4.85194, + "1955": 4.85611, + "1956": 4.87124, + "1957": 4.8777, + "1958": 4.84686, + "1959": 4.84732, + "1960": 4.86364, + "1961": 4.8509, + "1962": 4.8663, + "1963": 4.87064, + "1964": 4.86099, + "1965": 4.86103, + "1966": 4.84569, + "1967": 5.17792, + "1968": 4.84796, + "1969": 5.20648, + "1970": 4.84901, + "1971": 4.84838, + "1972": 5.74018, + "1973": 4.85813, + "1974": 4.85367, + "1975": 4.86684, + "1976": 4.87041, + "1977": 4.90603, + "1978": 4.90475, + "1979": 5.25145, + "1980": 4.94444, + "1981": 4.92124, + "1982": 4.90832, + "1983": 4.94722, + "1984": 5.67636, + "1985": 4.939, + "1986": 4.93543, + "1987": 4.96136, + "1988": 4.92447, + "1989": 4.87603, + "1990": 4.86128, + "1991": 4.86822, + "1992": 4.86666, + "1993": 4.85995, + "1994": 4.86025, + "1995": 4.85738, + "1996": 4.86953, + "1997": 4.86535, + "1998": 4.86591, + "1999": 4.86231, + "2000": 4.86466 + } + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt_dynamic_inference_tp1_pp1_dp8_583m_throughputtest_zmq/golden_values_dev_dgx_h100.json b/tests/functional_tests/test_cases/gpt/gpt_dynamic_inference_tp1_pp1_dp8_583m_throughputtest_zmq/golden_values_dev_dgx_h100.json index 9be8a9dc0ca..dc1e1921fd8 100644 --- a/tests/functional_tests/test_cases/gpt/gpt_dynamic_inference_tp1_pp1_dp8_583m_throughputtest_zmq/golden_values_dev_dgx_h100.json +++ b/tests/functional_tests/test_cases/gpt/gpt_dynamic_inference_tp1_pp1_dp8_583m_throughputtest_zmq/golden_values_dev_dgx_h100.json @@ -1,1028 +1,1028 @@ { "throughput": [ - 94.6087716527102, - 115.85992244026639, - 138.9562527069375, - 133.18726531918395, - 81.97861561771212, - 134.30726469422635, - 86.456140428456, - 114.99456351298251, - 147.3101800153954, - 3.0364623744653003, - 124.7590786954667, - 134.2276982994434, - 3.0580463134110167, - 117.03969654341354, - 130.92134521286803, - 48.493091604204935, - 1.4498729599486508, - 128.01470907994928, - 1.8330770354872434, - 66.31842482241125, - 82.24189975425459, - 1.07058112939944, - 1.8815468970982412, - 0.9373246942729808, - 134.9963160815443, - 2.285771114682068, - 43.068220270070434, - 134.9677086822377, - 82.44946740133796, - 47.71839155542011, - 114.4199568886962, - 29.67621576315833, - 144.1589742491705, - 95.8164720809401, - 122.80562228460093, - 39.21436814433054, - 3.041180292262413, - 3.2867844729646842, - 72.43808226229888, - 0.8371525937296347, - 1.2212635079980698, - 145.6869075644325, - 42.317711349146016, - 109.1196064871946, - 73.6281770453198, - 140.4495689387567, - 1.219834296561022, - 138.66856497329005, - 23.33818821323391, - 67.82342558671365, - 130.09683254313987, - 147.60199288178146, - 0.9427431720755464, - 3.2856495013162523, - 79.12426666101076, - 86.41557345094756, - 120.17346279825053, - 137.16615251640926, - 108.93291864542198, - 110.10504114490513, - 46.19253755421628, - 0.950218846923012, - 136.50642826951463, - 142.73168666846448, - 1.2206786818073785, - 1.898581377105612, - 131.72636154091063, - 2.2842414327001976, - 89.76521170090028, - 114.66053545744656, - 58.64474290044525, - 0.8367865961030284, - 128.01767795820945, - 60.87292097103301, - 124.20016865241587, - 119.59336898055426, - 0.9425820346281929, - 93.70053305431952, - 1.0728113870213674, - 135.7596767309971, - 112.89357243644062, - 89.2743296587299, - 137.86411291342458, - 135.6974706051771, - 102.59633828443238, - 129.82058179399326, - 139.57672703148444, - 140.5642311163746, - 78.49182953675201, - 123.40912657074227, - 82.74099904578694, - 75.5490641626476, - 93.38596238341951, - 141.19058076067225, - 1.072254167577298, - 100.8669047802279, - 132.77382347347034, - 92.29086179175866, - 137.20301032384705, - 89.57723938765776, - 67.5465256589703, - 0.9498935124108836, - 1.0716887464650027, - 0.8365472180547067, - 137.902625307774, - 132.67132600219722, - 1.45201860416265, - 1.8366476879619427, - 88.65095604379363, - 132.1806036761347, - 126.0481874394642, - 127.43750324083169, - 93.27238135265156, - 109.83884164204308, - 102.30516355984702, - 141.10387096377744, - 0.9425154448032942, - 95.04281981148903, - 103.11525529548061, - 0.8361762901534399, - 135.3171561172067, - 123.30032998064965, - 118.75691144485415, - 82.21375599642211, - 66.37216333263251, - 120.02349229491865, - 27.339414655466246, - 133.1312422227687, - 123.02377779863252, - 111.0798894329, - 58.88405247768833, - 131.31767475108893, - 40.19076958615912, - 123.58362152151858, - 130.6541142941889, - 61.39555613504246, - 43.92154495664044, - 1.037012527495492, - 127.16052127606021, - 137.06554800183082, - 85.67161160523041, - 1.0253417447981334, - 139.20903624514017, - 140.19068787455728, - 117.67416498245059, - 23.410837515725987, - 130.73052473972666, - 22.561824695346466, - 1.028901717647808, - 119.30712483977753, - 117.77548263464804, - 135.2959098119142, - 142.10193821260228, - 1.0366044325624144, - 1.0350271698893887, - 132.8943567509843, - 51.50353963446039, - 113.39559408843714, - 124.25424103796537, - 129.60407993083075, - 136.8566687186031, - 1.036163010240988, - 1.0345739017743927, - 118.72350056844492, - 32.453707095990595, - 43.851925176925825, - 139.39206855448938, - 141.0979597861742, - 132.81461728578432, - 80.95956255477945, - 133.42483643501154, - 57.27721135575491, - 81.47649794801364, - 79.39765285063396, - 56.40255861789973, - 0.8890603607397893, - 137.59325887086797, - 118.03982850100024, - 53.04390121587005, - 88.31177924841927, - 1.0287550608831881, - 54.67393025836421, - 54.73556135447348, - 129.6143036059356, - 123.57095756116274, - 146.05184555314386, - 55.506024155977386, - 84.40666358740559, - 62.68531518105107, - 147.42894642823578, - 1.0274253590993496, - 145.9063526676371, - 76.36231256557768, - 1.035808949157935, - 136.1858098182613, - 93.13144140533397, - 54.57886608953819, - 1.0251956490815057, - 1.0270063804838983, - 67.96952180390161, - 136.90103479290272, - 78.62986077133174, - 129.97235998681177, - 70.57784076609056, - 1.028567312218149, - 69.64434330087829, - 1.0266016363366386, - 25.142311727265525, - 139.54750333578679, - 118.80547132463877, - 1.0342055876192149, - 132.79991800938092, - 88.25494664060619, - 132.4600307114398, - 1.026200775415348, - 111.33264788932784, - 1.031301270403004, - 104.45912302410692, - 1.0337771723701492, - 124.53550504281608, - 1.0283501183885058, - 126.53361938982871, - 139.83512785200963, - 102.28350299734186, - 122.68389734539087, - 139.27095111763788, - 1.0333552237490158, - 97.04945381465573, - 60.63422077140298, - 1.0248694052483192, - 96.77644543721476, - 118.38370846079931, - 1.0309087229819596, - 136.0487423665781, - 1.032932214377732, - 104.96525711514936, - 50.75370028394122, - 125.67617176346853, - 125.47392048276225, - 101.59371483024698, - 119.1183231384482, - 134.24568445137294, - 1.0323996653747745, - 119.28563313083153, - 50.183581144589674, - 107.50817556608582, - 127.4693561344537, - 116.0234844098742, - 149.0429439759437, - 127.77855747904051, - 1.0319900690130652, - 129.7400124946839, - 60.27584011696136, - 1.0245534026749026, - 113.8687773549026, - 129.9927880985222, - 41.55332067297356, - 12.991853549713621, - 144.9384518471586, - 127.77570879015505, - 79.09214991388126, - 1.0326234729165304, - 144.50618896622706, - 44.461452482592826, - 145.75357879817352, - 150.5618330832813, - 123.17802281879979, - 147.0133924731902, - 57.07203337285457, - 140.17944630269687, - 44.5066568841284, - 150.2834791394652, - 146.37106237628518, - 135.59553639884948, - 21.91845075979551, - 1.0391172002596458, - 92.42182316100705, - 14.98578222593142, - 19.944740287073653, - 32.75622847272977, - 58.94666795839769, - 1.0428676908165904, - 97.94938911630567, - 140.5399781540016, - 36.397689902912774, - 1.0322919875583962, - 33.76444948259586, - 147.54902815924785, - 51.316830076622495, - 153.55703202636914, - 46.423895018386204, - 140.271682540213, - 1.0340651759548871, - 85.22971449383292, - 141.80480996358014, - 1.0234621691055457, - 1.0355322329825165, - 136.96321865236195, - 138.2293990177049, - 136.89440582973347, - 96.94919171687799, - 54.992986423891566, - 142.91167590864902, - 138.73615931624403, - 86.32837448704223, - 1.0424247604140402, - 127.58052889290863, - 138.2472241943501, - 1.0338260095695477, - 1.0317372756221133, - 150.59249576769173, - 1.0229533138894364, - 149.1711141084735, - 1.0419379125129562, - 1.040305113121658, - 150.13261057757276, - 62.47975017460808, - 70.20443057037575, - 76.88821624674898, - 1.0225242667788867, - 136.83301633777177, - 1.0414381555227956, - 131.6044067829552, - 1.038902005769604, - 1.0335832618537684, - 83.38230404797935, - 3.047737981863063, - 140.9843162162637, - 1.0352264324041114, - 1.0409374510445146, - 103.17228299164871, - 1.0383219913492376, - 67.5151836065632, - 126.94018489907108, - 95.29974174831813, - 1.022161551972834, - 1.0348032799350415, - 93.24855217625235, - 140.00831851627856, - 142.46553219867087, - 80.52507876480331, - 149.47939431741142, - 125.60095189608528, - 92.57991472689042, - 153.09192667088175, - 98.78787611117323, - 136.9802701171813, - 1.0378200246498124, - 79.05370338483348, - 145.63143231877774, - 107.86253722014555, - 113.1390555766259, - 150.4596904971142, - 6.010262757833046, - 138.11675690694213, - 1.0371929842524894, - 55.1702723554103, - 148.4142582794926, - 108.62464742566522, - 142.2515578682958, - 149.5588988951372, - 1.0310870179234204, - 32.798276334675066, - 145.8363475163408, - 82.52497836005318, - 144.77105210255448, - 140.95035733017403, - 145.4844811663436, - 145.0646083055648, - 139.1641494303434, - 1.0401220454548914, - 146.10598185112948, - 1.0335329080843159, - 1.0316085392161136, - 133.98012837767038, - 129.62059667226987, - 151.2681266565858, - 1.030719335336581, - 135.9600336007384, - 1.0366589924031362, - 107.70864165999221, - 118.06361914834272, - 148.4615541738592, - 135.1206190516379, - 1.0788915925864082, - 1.0662361391973343, - 1.0784094142292293, - 145.5492563111853, - 100.1745158858024, - 89.97448812790176, - 140.13008352060388, - 8.378443606045758, - 19.841723966559687, - 31.11972559764219, - 127.75589035167928, - 144.649118240912, - 83.40454687650907, - 13.609558087727212, - 144.14916775068022, - 143.0831699051951, - 144.53789580070173, - 129.35689525213576, - 126.54760361436873, - 136.72725454688293, - 83.66753329456253, - 35.238850690537326, - 138.73588075606074, - 148.39285997484404, - 141.43706957675556, - 35.20788617289704, - 140.22918428708584, - 141.42288954532623, - 80.8071906111917, - 53.480908541665116, - 96.60869116876205, - 138.83030943256392, - 146.89537016655746, - 1.0659353965573166, - 138.66041009897964, - 138.0783824554628, - 54.95061283513892, - 1.0688789370964418, - 145.4981195236156, - 107.91672388693667, - 147.39387423946786, - 143.49840246862203, - 1.0781871694837721, - 125.37215873599833, - 46.390553110182545, - 1.0683430650310588, - 60.55314896188811, - 128.32962060837178, - 142.6648214311374, - 1.065532502621677, - 145.06202945295232, - 149.5985088362253, - 43.61426254132819, - 139.2120402464869, - 138.80120892663803, - 142.59390751862693, - 147.27000174003754, - 139.5980537408405, - 142.37081759892675, - 76.47257166426981, - 0.8663971721944621, - 1.067847671923619, - 1.0752972325757186, - 139.11225337731244, - 154.1012640338781, - 91.85315813315137, - 7.34066705730821, - 1.0763437477764217, - 56.03391448680589, - 1.067309924884827, - 1.0747789028833068, - 1.057667310022394, - 146.4284745539176, - 142.32867288307636, - 132.81801172672715, - 142.5746724111237, - 43.178263922620026, - 140.19958418325498, - 1.0742201855279276, - 139.95237701874325, - 124.69044225989671, - 89.93275546978569, - 1.0778110524743836, - 108.03753008375865, - 0.8649825661375887, - 101.22782607000799, - 138.6615942910557, - 1.0572642952018412, - 143.509260845593, - 1.0651693329533294, - 97.454990956795, - 1.075960473594851, - 104.89429761368234, - 153.46849816095335, - 143.28204379991922, - 112.57923589922926, - 145.35468060283986, - 119.53338040876814, - 132.53105489182144, - 146.60735281445733, - 0.8648000721123511, - 132.61504628627392, - 140.81953388748138, - 1.05684091289561, - 147.29646966899597, - 1.0646855258714663, - 1.0772400203863821, - 137.87592499226204, - 101.79954304062817, - 134.45893707567646, - 1.0737967838723397, - 147.3289039421509, - 142.95955673278567, - 123.11846557585149, - 139.7223884224781, - 5.274894457437767, - 0.8646226703470901, - 135.27010135142623, - 134.53222451904563, - 140.4520894166607, - 148.6784682726068, - 148.83999547746723, - 144.76059628877204, - 146.09818079047014, - 0.8644123666240657, - 133.05795012757028, - 141.21253159110282, - 147.08086640702987, - 153.13511211461227, - 147.72437078211334, - 53.87242850230838, - 61.34701685378028, - 74.50771860339175, - 16.40780504974564, - 16.448796993269678, - 144.08505364828036, - 143.78069847853888, - 145.08382905436133, - 139.4144567792124, - 1.113422304912727, - 23.732299099149245, - 146.716938504402, - 1.1150428401994323, - 1.1070863332993708, - 147.462815334713, - 15.300506166735937, - 142.89311901203018, - 35.881455163220174, - 0.8959120615185874, - 134.50389621984408, - 79.91603718165896, - 145.31776951960734, - 153.19384567886857, - 142.494036234602, - 130.58249312188119, - 1.1128817603274543, - 56.157995916719756, - 35.81413980204931, - 116.5213087641768, - 63.30354399512571, - 55.0117106848875, - 47.52954249314361, - 153.04709230401787, - 1.112276523473745, - 80.1523559974256, - 136.20373724941714, - 1.114673225365626, - 1.1067132158651183, - 149.29883052073288, - 145.10950784560325, - 130.53765167080937, - 1.111788125890117, - 0.8957719496064405, - 1.1050775451489783, - 17.522300994030367, - 154.45472111064055, - 152.07616582090188, - 1.1020107149905272, - 138.6808068419634, - 76.87873177159636, - 51.43702839643221, - 138.95045176064437, - 138.64177504011988, - 140.72197385602811, - 132.80947742972836, - 149.78872816785005, - 139.94034036065392, - 154.2632802491591, - 55.57148538150843, - 1.1044580058296936, - 147.1712801496827, - 77.84198065949245, - 142.38330204183904, - 151.76812011990265, - 145.19131540821485, - 147.26566215388425, - 87.12413393605841, - 1.1038403429439656, - 141.4935550752979, - 145.7397470598185, - 3.3080164659931235, - 123.0327553358976, - 146.24080278853327, - 148.10448175245884, - 29.234562433775857, - 151.30177873039895, - 135.4653748135468, - 144.3293913931314, - 148.16163203136404, - 1.1015876034201657, - 1.1114790318458536, - 136.68047783885697, - 77.72584511329579, - 125.73692105352463, - 106.98755729483561, - 96.25926845246491, - 1.109721323323522, - 141.71073652156545, - 130.22006710827588, - 145.24478945746003, - 80.67459353439743, - 1.1033551544760267, - 150.03177939272493, - 154.12875534463626, - 150.04771421074818, - 1.1010813815407388, - 1.1110434127990452, - 145.385699877379, - 86.86487551811825, - 130.16687493633253, - 143.8726181331947, - 111.91340621077623, - 146.0394914387852, - 1.1006353022455784, - 134.47903589563677, - 148.6907436994389, - 102.87151097507036, - 137.41724911494663, - 1.1146766644704549, - 143.85952373403495, - 146.92280951248307, - 1.100156488603178, - 144.04783334738536, - 148.53630346113712, - 58.74848466983248, - 147.0485685726298, - 141.32891699761203, - 142.8441702922343, - 131.04366253726744, - 128.6305301075303, - 1.1106412111686195, - 147.90025888582002, - 0.8959265584913588, - 149.5194069726666, - 137.43649451567626, - 1.1068068376551545, - 68.05269425995475, - 138.94056631255367, - 138.43818227469507, - 69.60391199895408, - 114.83395091462887, - 151.34107787433956, - 141.57237630997332, - 146.07433910500515, - 9.941778754980154, - 131.297822968639, - 10.386636719874664, - 10.545636067043365, - 114.58677137445733, - 75.28902943071078, - 90.63452059810655, - 143.58694736923238, - 9.901118804514459, - 144.5206530902411, - 144.78737732574044, - 79.81136215142409, - 84.9314508821071, - 120.18939827456474, - 10.225253542151219, - 9.702822548173124, - 103.1188517219872, - 138.5008491242522, - 92.02238700298246, - 151.99592340131602, - 9.807595290716304, - 150.0447954775559, - 134.2614008494909, - 149.38544573345007, - 149.62298116309924, - 124.32358754465251, - 132.817456221544, - 10.50607995390264, - 9.78317681034783, - 151.07916494121415, - 146.93545537009487, - 118.45851163082196, - 145.03008316360754, - 154.4449202186591, - 146.86002069809945, - 150.6932855951215, - 110.74803327496042, - 127.40788523389726, - 150.81323854197058, - 150.0047673310006, - 149.6063654551971, - 133.87244996538675, - 10.329695475492791, - 9.414695716712222, - 106.77032789813472, - 118.34636653947105, - 123.44441062862572, - 144.9015592115516, - 153.74652990582067, - 10.065713405335144, - 129.38998560194165, - 117.69087049838025, - 99.15650839997046, - 127.90462338199198, - 147.3574863739125, - 9.696544883885949, - 9.8853852911422, - 128.35872796896587, - 145.2939860705264, - 128.72081963712404, - 94.09935653689803, - 142.8780531031409, - 130.5213122981276, - 126.89288883528536, - 153.36107852781166, - 149.17239657923582, - 9.177632630803961, - 9.387171298727486, - 109.68196882316985, - 148.55536204011432, - 152.61730207818772, - 9.648922236946333, - 132.805446535875, - 138.74295200738652, - 141.66118217831166, - 124.0399127789103, - 113.05005278683446, - 149.71230902297984, - 25.727698431920004, - 129.56419655827216, - 130.40687823665095, - 128.46470366050013, - 150.46298369674685, - 9.22073843893938, - 110.36443029340542, - 148.23878821929193, - 10.219508495480236, - 9.615051521185155, - 9.8723813087942, - 149.91378148843256, - 9.149056684599877, - 130.37704092008303, - 114.86611671621016, - 134.53633480709703, - 131.11593468604048, - 149.74665952988033, - 136.60701891253495, - 146.50864617645632, - 9.094221140419737, - 149.69902295915708, - 126.93245475406366, - 141.2463933703881, - 10.18172163650932, - 136.76582155059438, - 155.5823388453975, - 144.68082947663285, - 142.0128061769988, - 116.20800508912414, - 101.13756407758095, - 10.050927550768915, - 10.14139856150474, - 9.573219645146107, - 146.33874064646594, - 137.22302119976462, - 132.14965518046, - 148.08190796641483, - 117.6843964457568, - 153.04352772565807, - 146.79238076404926, - 9.522740968586977, - 145.93484469600287, - 13.925952420322696, - 12.697420287309185, - 146.39122941822845, - 113.94298610788566, - 13.844109957456581, - 154.57922917096633, - 13.525210269101805, - 103.83976095796662, - 97.75660804271413, - 135.83818209343426, - 158.60060111529293, - 111.57793188874757, - 13.768524263105455, - 154.2203592546867, - 108.85242762118563, - 111.15752259030245, - 149.5942138872604, - 119.77102605185765, - 120.68065341205389, - 105.29698904913548, - 151.41465167808087, - 138.90606724001483, - 13.437371194424983, - 119.97194649055415, - 144.6223725248399, - 146.9934910169238, - 149.45319992777343, - 121.48260402443249, - 13.662736071688842, - 14.448955892498802, - 144.5545360346381, - 154.00382983055897, - 151.8635735223181, - 137.2321484611102, - 119.71487519948164, - 88.24978714231261, - 147.74815341218743, - 142.1113258863455, - 132.08775922189477, - 124.63351274554526, - 145.72256212355262, - 100.50708502243579, - 139.16363846809003, - 114.82662827063822, - 154.78307253831395, - 149.22879563842886, - 152.6744734255461, - 145.81022434241217, - 152.68018782123758, - 116.75549006136289, - 12.968595875688791, - 6.824624970615158, - 125.05116103474757, - 147.66072487793718, - 147.5735120742967, - 139.1302141298083, - 146.48542990069834, - 12.674865288395944, - 147.88858853602966, - 6.8124480142416175, - 137.54766974463703, - 130.89979405333307, - 13.364169845161861, - 14.116086127002273, - 130.3002929300388, - 116.98398239487472, - 152.70827610346095, - 98.51470626500011, - 135.1252373635164, - 14.405992358855888, - 154.13709739001223, - 146.28661687368685, - 137.87827066214206, - 12.621081453489012, - 154.04574874294514, - 6.802625211185703, - 152.18661864386252, - 149.30257880598677, - 13.244501725269068, - 138.34068638798834, - 150.95140747506372, - 141.8441899037163, - 152.99022366652198, - 103.95004802425926, - 140.28144756248412, - 154.51222806007945, - 85.40777548962518, - 154.7067128296305, - 120.47843952303268, - 12.568053995018431, - 12.916583075889136, - 105.92477484543576, - 137.92878859711615, - 135.13853669037294, - 137.88549737290148, - 157.83019925734393, - 145.48927689323145, - 12.509532718065461, - 150.6233829715981, - 119.23669844460764, - 138.49099023171033, - 154.0870149904812, - 140.1862744667834, - 148.860174031694, - 147.54629689336036, - 12.448861769003683, - 152.4711466483636, - 102.47079224461186, - 152.40864885890767, - 156.21773232766026, - 13.139291580904986, - 150.30653960489693, - 145.43571147072188, - 132.8965387342577, - 144.85972103961666, - 125.5438694385711, - 158.07457773478276, - 14.359506122440205, - 137.7658155977229, - 153.68125116011197, - 156.57780724945528, - 12.394708947912125, - 12.874702780202174, - 110.61518572692995, - 149.4338565730422, - 149.67552030435513, - 146.20909415912828, - 9.308833539527914, - 26.176147260970783, - 8.701217384742513, - 66.92241449340185, - 105.12940849136734, - 145.25326276553395, - 139.68219350261262, - 131.60335890332783, - 150.53420884400245, - 17.552483447968918, - 99.60476667168517, - 9.003208512207522, - 8.539560747895454, - 9.946172723540226, - 150.55644446784382, - 9.608936841972842, - 104.80864366760326, - 25.95068644438624, - 99.42592550150236, - 108.35979254469888, - 113.9171427720856, - 9.905905876631499, - 131.1684982861573, - 154.7989292174601, - 151.34753888952145, - 150.11816141981262, - 143.00557828542912, - 126.2310299151925, - 113.53830001728545, - 148.13405630794878, - 150.7564429392251, - 155.252325076404, - 18.20048176554747, - 25.725436761645142, - 8.678711562613207, - 143.3683328827327, - 127.0294451168928, - 137.50119476282134, - 10.068367539846923, - 155.64822784014916, - 153.2789382926615, - 25.46950813818654, - 142.9138107220956, - 155.10510899417167, - 107.40557834412083, - 9.871948602847068, - 144.4712732194919, - 140.17802930301565, - 9.286026243902361, - 129.1488895575147, - 124.35586045151207, - 140.1410811550992, - 96.63692877337894, - 153.62093095799207, - 156.05800033315097, - 9.587609950939838, - 140.09721428165886, - 134.898750425008, - 8.652809034763463, - 8.989448046931262, - 107.64260577858933, - 9.825071080298192, - 150.6237132142087, - 143.76058852986372, - 154.01627264735168, - 140.85322298632985, - 143.63714834446708, - 149.7259575806535, - 8.53942846683121, - 157.02635815805976, - 150.83913162907433, - 154.0283691261865, - 9.246842209481716, - 154.5851361854829, - 133.4662155767381, - 137.55396410787307, - 105.77910782321499, - 148.97953057255376, - 111.3041581371634, - 9.543858351726714, - 142.71996301994741, - 144.2417836324451, - 148.5293262803374, - 8.95331376662564, - 105.2724164655814, - 149.16646109060707, - 151.1947852118465, - 9.503293907683512, - 133.40055362812345, - 8.776394391795916, - 148.3675722527084, - 154.66946641450528, - 122.71674068416665, - 149.62192317697068, - 153.40159484208397, - 9.46860898864519, - 146.10526710538994, - 143.96020057925128, - 8.62472208077336, - 8.906885562515198, - 105.7754218686014, - 150.17957794387223, - 144.0451331512576, - 149.95461039551162, - 151.46311089131117, - 142.22104279807664, - 147.3679944003333, - 140.5394711174869, - 123.62157744638432, - 152.32796921399395, - 156.6603241829257, - 9.43621164630811, - 158.2241383954169, - 149.33346139426692, - 144.12074054746773, - 143.1977521817863, - 8.536662624511228, - 9.785635570067782, - 147.61880087321424, - 9.402323265876474, - 159.1161790596516, - 146.56796834276156, - 147.64890403285438, - 157.70847517328534, - 114.64282143770687, - 148.5000942425868, - 10.052761003641129, - 147.38801074409378 + 98.47864949895008, + 63.93792629897559, + 166.49088904974073, + 148.10611103663214, + 136.93608898138933, + 153.87586308063382, + 90.56559317052603, + 128.5291550251628, + 162.07670305023993, + 4.196475118529487, + 147.98743190294235, + 149.72190006929446, + 1.1777631788022311, + 133.74963259040626, + 150.11088322452974, + 51.863180020864455, + 4.139051494405947, + 79.2557164919149, + 1.6071996867452278, + 70.01915930069646, + 137.26891673137558, + 1.0402098481802287, + 1.8594022431966566, + 2.039486534010741, + 146.2938256177694, + 4.149796716964247, + 46.34667799086249, + 151.47361823216394, + 137.54739677623354, + 51.120748066850325, + 136.84512611150544, + 32.11962977236786, + 157.56752902839474, + 47.12119148820226, + 145.7314367353006, + 42.20270560372231, + 1.0426098595499007, + 3.5892682955617827, + 76.57100636536596, + 1.612496526198, + 2.6881979572654413, + 111.88402006134972, + 45.58338247702666, + 111.4111889571842, + 132.16301113659247, + 161.64295403385984, + 2.664705818704618, + 157.1638935590632, + 25.286871922093454, + 37.4310109209181, + 153.65911351957632, + 170.7256762539797, + 1.042128189044151, + 3.5869040413041917, + 83.30261586197105, + 90.55970202339806, + 132.9415846015795, + 95.80834182322752, + 112.4369142570399, + 130.7156977512895, + 90.98968148626129, + 0.9371270459059615, + 159.09279181195387, + 162.9970081970886, + 2.6700708026356366, + 1.8557378891084773, + 156.12103246797463, + 1.3653778104766194, + 143.46571269908148, + 130.6346250925551, + 62.46023289115923, + 1.6116060776090406, + 139.8111163213305, + 34.86018737886305, + 146.06865198079345, + 133.96801334258495, + 1.0417626130871034, + 97.53781169320182, + 2.0478975910586503, + 151.90776052541932, + 126.40035137658552, + 44.78808603802679, + 163.9803901721219, + 152.78287546210825, + 154.77428093351637, + 145.74430748169019, + 163.03421864587594, + 146.28703545539014, + 82.55934081518444, + 73.53123347847824, + 87.20650201489909, + 79.6237289961617, + 146.76012425672718, + 162.46398331888344, + 2.046000130560097, + 104.11707807083185, + 142.7981951169222, + 45.781111784259096, + 164.13498801895528, + 93.34392878508068, + 127.09756182184553, + 0.9369885821746623, + 2.0440080852076448, + 1.6107470231739485, + 149.4484511068655, + 87.5539915318001, + 1.3670348174101508, + 1.1796264961520015, + 142.53546263417087, + 150.2065859393766, + 145.65883203776818, + 142.2125733485302, + 96.99016545580078, + 57.32416740237564, + 106.63530054957698, + 159.19142654590536, + 1.0415326032228118, + 98.71719677010607, + 106.73175053259962, + 1.6100826372227688, + 146.64805335844048, + 72.59518577946031, + 142.34132184480842, + 85.94240702745647, + 126.17687901514078, + 135.7696701691411, + 29.62308081982307, + 148.2421144346034, + 130.36261145275355, + 53.13931721337651, + 60.51160243931191, + 141.54695622051943, + 73.11803837069677, + 137.21251141324606, + 148.63844490308944, + 62.8404582738594, + 45.401831957608, + 0.9643006239654945, + 147.2298500624911, + 151.91506054646217, + 140.48716103219812, + 0.9577624967779577, + 160.06459889404132, + 155.2359539910114, + 126.59645077786885, + 15.69438649059929, + 152.80784197867072, + 23.527136960081226, + 0.9561607658842026, + 135.304826702121, + 142.47511264536794, + 149.8501903787043, + 151.43523022097875, + 0.9640793717349251, + 0.9631519875374979, + 145.2950579689095, + 104.16937732598902, + 131.1708059930721, + 144.18743838648734, + 143.6919419808989, + 145.5428193502994, + 0.9638106812588461, + 0.9627615573404509, + 116.54193238808332, + 54.308902955274014, + 45.33558667751163, + 159.57290743060722, + 156.60366994005867, + 142.03263718363198, + 40.71403223415776, + 155.40510615972553, + 58.6681100653237, + 137.0437576533739, + 80.42300690375168, + 58.033083103031665, + 0.9693871919683402, + 145.73573001557583, + 60.44621412824422, + 54.994288450325136, + 88.73692291143061, + 0.9559459748869998, + 56.08954858644736, + 56.31747770886735, + 142.34693049846092, + 132.51002333480037, + 108.96587128971876, + 57.39669142091791, + 85.1254544103699, + 122.1342568773111, + 170.14800453897098, + 0.9667745869936778, + 164.77118206030752, + 77.67607540068808, + 0.9637172808805204, + 159.27278631745818, + 93.32941075871183, + 114.31154051585622, + 0.9577271441482065, + 0.9663851340406727, + 69.18116638176265, + 145.49566595839337, + 39.99458755398874, + 151.72058228459386, + 71.71902007184255, + 0.955684788125637, + 70.8845735459765, + 0.9659986810119839, + 26.22947505868186, + 149.5122587573231, + 62.37088691999424, + 0.9626226162613168, + 144.16390862207493, + 143.18707878361667, + 148.34680655358588, + 0.9655981786202157, + 128.6357514760558, + 0.972457638109508, + 47.97113131021637, + 0.962257594040168, + 135.91488529586792, + 0.9555101570399641, + 139.87244415060783, + 161.80374363862717, + 102.03749537949356, + 119.90228156989667, + 95.01508726085196, + 0.9618747782794568, + 97.04528669323962, + 124.83482655795, + 0.9575074351185681, + 97.4749088017089, + 143.04337002379702, + 0.9720616869548507, + 88.4343283770829, + 0.9616266920922193, + 104.03159874923712, + 102.89124420706305, + 140.2496100327507, + 143.1710058572335, + 101.42975069052237, + 128.03336431254732, + 85.69336920713639, + 0.9613543134449882, + 104.07697069101184, + 100.02889226751559, + 106.63283752921622, + 144.57311516379912, + 126.07240879815421, + 161.55730431091774, + 73.12112420438781, + 0.9589217273481213, + 142.0323058738417, + 122.36148204858885, + 0.9572538602096321, + 112.98246752660035, + 142.34355181617389, + 41.04230698700827, + 8.473685991981666, + 170.80637904469666, + 142.97081601431356, + 140.00938953689527, + 1.0308124281925075, + 163.68673254202156, + 43.76708184183388, + 152.25998257998737, + 111.67117755812934, + 145.80673033340165, + 160.967274593742, + 121.82423347589321, + 151.58970194946951, + 43.836717431814456, + 168.33474851388928, + 152.8971313956712, + 72.9024488252911, + 21.820779024213074, + 1.0392675847166184, + 147.87020150991353, + 14.897143028689484, + 19.847221148151032, + 32.431828340180246, + 57.7813822991841, + 1.0334876773950952, + 94.25591710682407, + 151.42229388821934, + 62.73982551986958, + 1.0305004930196628, + 33.431851137208405, + 162.37672318207316, + 50.321107844780045, + 120.0631996858246, + 45.868384609266045, + 150.25509288811767, + 1.03641668355906, + 82.19687660990678, + 158.74432925111145, + 1.041876067399849, + 1.0459490020450795, + 74.46636703262733, + 159.72092018884473, + 145.89909226306747, + 151.4623812014693, + 53.96440008638893, + 159.793887362778, + 148.37554042172758, + 83.3128358383083, + 1.033330707971675, + 134.17516572064534, + 146.71192985844118, + 1.0352015128775223, + 1.030228349427348, + 173.4020929881413, + 1.0414756431813357, + 157.44806749626466, + 1.0330400451866075, + 1.0430419707188734, + 167.82243267657728, + 143.8312255273241, + 68.13449792020043, + 74.35987547428464, + 1.0410410061956523, + 144.46694632543532, + 1.0327651323294085, + 150.93003222189313, + 1.0391803120976406, + 1.0348231697568464, + 80.60319434281541, + 3.1207628480728475, + 151.16210456830606, + 1.044348655121621, + 1.0324784232146003, + 99.42447225407219, + 1.038776111100077, + 132.7893754958314, + 146.8726662885585, + 91.5964670484325, + 1.0406970130016908, + 1.0437330582244273, + 42.28479249749239, + 162.83839126288393, + 151.86715746595317, + 140.5094808302986, + 170.2080960063118, + 131.07684807335298, + 88.96862061056908, + 163.9922734476757, + 44.213460221990154, + 157.8010866400773, + 1.0382665374856965, + 139.57673454433854, + 163.7758432408245, + 102.99718171708128, + 107.60774917922078, + 159.16551335735969, + 4.119717517454783, + 160.5803771988876, + 1.0378430568380714, + 115.44357851711793, + 167.4238211695712, + 103.79633528746076, + 154.03506418556444, + 159.03692094687025, + 1.032427282609682, + 32.52187142118156, + 158.57750457420016, + 141.67055142208721, + 160.71458938698333, + 157.24106314480454, + 157.40833384009724, + 150.60022387354616, + 80.91896448664748, + 1.0430666391532655, + 160.36671183081978, + 1.0347878859497883, + 1.030293958907628, + 147.50533105226975, + 152.4875796332852, + 160.31618334728296, + 1.0321960030040243, + 156.27786873980907, + 1.0375321120324796, + 160.4885833961135, + 111.93639192506156, + 172.24078944530834, + 145.3287404427809, + 1.0880735082543522, + 0.7878037099331565, + 1.0864480413552253, + 158.40272521901554, + 155.28074693629694, + 87.44836891077435, + 155.54752700738993, + 8.411714256180034, + 19.862348977650086, + 18.35501539895094, + 163.43115890247273, + 157.8836387689617, + 143.68115882020365, + 13.66284888141665, + 160.7292101444063, + 155.01427847930626, + 150.31432418581997, + 60.81928120084204, + 145.3926688034953, + 145.30123372502598, + 144.98393507215505, + 35.18970147025731, + 153.82777107784506, + 164.23228082777166, + 145.88278452124027, + 20.46954502286418, + 162.0360370063431, + 150.43884956663888, + 142.41966677764808, + 53.07266306010992, + 93.50532435009316, + 150.1523142285131, + 152.33361454488718, + 0.787209685332213, + 159.9704569183677, + 147.66926829001207, + 116.31853611522087, + 1.0774618364125428, + 164.22843982362895, + 103.98183305676696, + 152.52952151222078, + 90.29170862480086, + 1.0862563048060565, + 118.53710658997939, + 90.19968385647951, + 1.0770089089852286, + 59.61890934626195, + 134.6160499563656, + 147.6477708991394, + 0.7870687303401608, + 171.47874197919785, + 165.99226887272076, + 83.5080960308232, + 151.55871514895225, + 154.9605789451006, + 154.1866343413245, + 152.69380076313175, + 78.46281024467942, + 165.86076250975873, + 74.6681179766703, + 1.0816751050475706, + 1.0766059511099162, + 1.091025249207128, + 151.61539901543878, + 165.44997737983917, + 41.75139614518547, + 7.388178711598297, + 1.0848156120039962, + 121.93333712957133, + 1.0761843006794773, + 1.0905643992997778, + 1.075801598924969, + 151.57738041471748, + 87.38815331117043, + 154.57766374016802, + 153.3353461131615, + 81.63500323812801, + 153.88446167160095, + 1.0900521500553328, + 151.65017721794743, + 118.01864188919838, + 40.91238161739305, + 1.0860502574663193, + 103.72384951664927, + 1.081356861209966, + 97.70962808524236, + 153.30715221364136, + 1.0754011583086598, + 149.80888083526256, + 0.7870161596702333, + 95.11588780527678, + 1.0824954483404, + 159.0909827809553, + 176.4607736857684, + 160.28483143240214, + 108.14616986068252, + 150.64495962435973, + 49.52814184554448, + 152.62988882612356, + 161.40766773375927, + 1.0809227984149974, + 150.0601857860385, + 156.59538854909297, + 1.072689949598873, + 152.81205676706514, + 0.7868728895290079, + 1.0857058881477388, + 143.4694111503961, + 159.8022996153893, + 144.9300712596306, + 1.089757442067835, + 160.11340438331118, + 132.79626776787333, + 50.38448421210805, + 162.42137561579725, + 5.284417747700096, + 1.0805116052247719, + 145.73004732672527, + 152.59775665509528, + 151.63963715309214, + 155.59850627759238, + 104.41906641764095, + 169.89843638971865, + 158.37348320912855, + 1.0800687750785642, + 149.5543247935483, + 156.60712632191078, + 159.6236209903005, + 163.09782416725415, + 98.6328505039743, + 53.85030009718123, + 61.00364034342645, + 142.05505100830447, + 16.614192215593924, + 16.582992843952567, + 154.47389623241062, + 150.9101058615698, + 90.42581449278116, + 159.53144787295545, + 1.1253578624639393, + 38.131573465314304, + 163.695564516746, + 1.1316048014866884, + 1.1159054012388119, + 152.5411314388352, + 111.46983099035936, + 168.09092507016115, + 36.13058934697122, + 1.1197910040154087, + 142.05200673526159, + 78.09074458708291, + 157.63502242964265, + 162.03218881710688, + 80.0426703374817, + 164.26384362727924, + 1.1222030060702506, + 123.66591496581279, + 35.97653651285592, + 112.29012034978103, + 62.69199102131731, + 54.806250360805244, + 25.5070616004963, + 187.35211092519995, + 1.1217003700976045, + 145.32823111763997, + 145.9166945337544, + 1.1301150192515073, + 1.1155615329029929, + 154.1440872758632, + 88.5586247200791, + 161.60021419086345, + 1.121175594981433, + 1.1194211460505468, + 1.1184405197027008, + 17.60883897305572, + 174.5134372600641, + 160.45245655990746, + 0.8166461657826791, + 160.30564706046655, + 75.44218827386376, + 108.54547521267394, + 150.49806131791814, + 153.04150189313873, + 150.40965861420275, + 125.63958433236749, + 103.12983995128599, + 164.17811633308784, + 175.52459662743908, + 121.09400696724566, + 1.1180201884652679, + 166.27365155489332, + 76.42072368500718, + 146.90227613796094, + 110.70803654586257, + 171.79379505267624, + 158.67043375351244, + 147.76280504628218, + 1.1175125336867027, + 156.89279233182117, + 158.0652757498143, + 3.343340016597665, + 49.779892185016756, + 173.36352621939335, + 162.4424006508065, + 49.49838297370054, + 173.86161362836785, + 128.03796900006384, + 155.68412076198788, + 137.87250806830016, + 0.8165665367853991, + 1.1298869482124425, + 159.26492424008396, + 144.56503533715272, + 120.55988523349636, + 103.3722869693168, + 93.5099865200851, + 1.1209786631771586, + 77.46613714395933, + 153.76092950699294, + 154.6841596167678, + 146.17966014780984, + 1.1171782471429414, + 172.55763339822, + 174.99117233418923, + 157.46750414970307, + 0.816424346577868, + 1.1274076620999394, + 157.20421311127953, + 148.14748951821153, + 149.14697533706817, + 158.95389608842163, + 107.97531407241593, + 151.43640801793904, + 0.8162494126902972, + 157.001545737823, + 163.80848036600747, + 158.89222886851297, + 147.3506488140666, + 1.1133445391411512, + 153.68284200756125, + 151.8834177926471, + 0.8160708323289537, + 136.43010052273473, + 162.61423354524993, + 133.02570532111102, + 137.86961562609895, + 133.04901735700332, + 154.9473181767413, + 123.93507737689346, + 50.83204611520686, + 1.1270195451857552, + 161.44093109510388, + 1.1195708009057284, + 169.64321510449827, + 129.6089117511605, + 1.115946234318508, + 60.34621183821726, + 101.29881161208688, + 160.90062346193574, + 68.21783931047266, + 154.41899008326143, + 174.07515811573973, + 159.677356250512, + 159.67728671666873, + 9.799978913114145, + 94.27732771999344, + 9.60214441506233, + 9.392617132404062, + 155.0463449410919, + 71.59183194783785, + 87.06866691125934, + 157.13349078706932, + 9.759436169606595, + 110.44611293008246, + 171.1626230380253, + 89.85437363374635, + 107.09248087440588, + 126.74466225447065, + 10.009602057141537, + 9.177527712733529, + 99.62101604875475, + 102.08957950312852, + 99.71118980213345, + 175.89684251359242, + 10.182586030301673, + 171.66004511817064, + 148.24171173832124, + 164.5397331583309, + 158.71440804719356, + 86.55832242496149, + 148.610396831239, + 9.368509685917438, + 10.136730874821687, + 173.75231796226313, + 168.18072479771067, + 125.24195815296933, + 151.26149869648452, + 130.6197551882794, + 174.23395009631983, + 170.65779238484487, + 148.1296912550562, + 131.11524857886738, + 177.99920893337523, + 167.5808938510404, + 158.60603057794222, + 93.6097533900039, + 9.587874811966838, + 9.33150536695352, + 141.2149869829261, + 117.88939818622781, + 133.45305575288236, + 156.7555665933833, + 166.2992810974147, + 8.762060933047495, + 147.60747975090285, + 125.39702986854361, + 126.29551477783566, + 133.3684883476696, + 169.84463465109542, + 9.160889914093532, + 9.75005007182584, + 91.25897804548956, + 171.15603143396729, + 137.11852945151446, + 119.70724002664221, + 157.24098320319794, + 144.12095644229885, + 131.6771710258767, + 164.00686483698965, + 120.71707004833677, + 9.560442320047777, + 9.299425721987362, + 147.15785637439873, + 170.64643820040646, + 181.465984660646, + 9.098182272291353, + 131.1874185050373, + 100.18931014367688, + 166.410568062446, + 135.47929425317378, + 151.28962080931584, + 169.34032285811423, + 24.163402926519016, + 130.3951109594527, + 133.85939391500654, + 91.24306358260182, + 183.98754016151273, + 9.265911045247684, + 147.14244062731618, + 165.66255588662568, + 10.016411965833509, + 9.03577437369573, + 9.70728564931857, + 122.8213056543772, + 9.533743128327513, + 143.45968503667223, + 155.32709571771161, + 141.06113578797667, + 145.47889938004263, + 167.35960747366406, + 138.12559014567552, + 116.75045269404782, + 9.4953352412109, + 170.07468770066882, + 172.07629747140533, + 155.39552706715028, + 9.96413703689447, + 144.65169143749998, + 169.142417216155, + 112.76319305930042, + 166.30777737368877, + 123.90774653996388, + 132.11710295459207, + 8.76790539542995, + 9.923343461828647, + 8.972068632607057, + 152.30472233633313, + 99.16466897297458, + 147.39899220637375, + 167.5046285318718, + 158.30798003347417, + 176.8098098029006, + 169.2000502496997, + 8.908205534006084, + 147.04973272590675, + 6.01978171115786, + 12.908947280828421, + 161.20885865837164, + 154.1041738397025, + 12.160112764259807, + 183.9484777068351, + 13.885015446203202, + 103.27604069377547, + 68.56270954501308, + 153.39985703870556, + 188.5641680250544, + 151.39232245655768, + 12.050089294787492, + 183.13047361941102, + 114.09672566233004, + 109.88264169611061, + 118.56400136868983, + 130.5787804713655, + 131.9836940557652, + 139.4770525169641, + 172.40959805680149, + 153.8901427211502, + 13.813000129286806, + 115.17874112168954, + 106.36053561017184, + 174.23315480590185, + 169.50614560985875, + 163.7261937236369, + 11.957456410326769, + 13.191395790527517, + 152.6369175652841, + 164.00689931377138, + 124.1532871601288, + 158.7919901602378, + 126.3012920481913, + 110.01300143579287, + 166.51966455859474, + 161.16730547199728, + 137.84358628055278, + 123.59630141121379, + 109.08989919709578, + 113.66676604314083, + 150.33107775824936, + 155.76683850736808, + 180.51837524079605, + 172.49809361722134, + 171.2412543685433, + 146.52428847969958, + 125.58622347928333, + 132.3599749727434, + 14.245461215559237, + 11.847782329285673, + 127.588030395774, + 169.03076884237493, + 160.74766094154035, + 141.23866796872034, + 111.11477769019474, + 12.898248376303878, + 164.38673745815677, + 11.791663338710885, + 148.00296428763687, + 140.16323874251623, + 13.719781371654578, + 14.365561456573998, + 89.32211257795143, + 135.76622159161508, + 175.86032158817434, + 128.11591032818185, + 141.79940543502275, + 13.157166878859636, + 176.72190145631947, + 146.35619986228915, + 98.02869268663022, + 12.811778712246966, + 178.01632978541917, + 11.747222913476566, + 173.95822172954252, + 172.47660061508643, + 13.568556768695913, + 135.1198744591959, + 122.01181780569887, + 165.54722192942938, + 176.91918611654273, + 135.48421254380435, + 152.73279297531656, + 183.54215600068494, + 94.73349204436757, + 165.3454353780521, + 84.12230571074015, + 12.73103339619439, + 14.20676756417383, + 140.07559949201985, + 145.7554344839868, + 148.14304437101455, + 144.7060493293736, + 173.19895239158285, + 107.2396185797313, + 12.648044488473259, + 173.48665402770794, + 161.86284234640354, + 144.49958539317737, + 183.33130603616738, + 149.75316477343017, + 153.29421953478465, + 112.95288962968242, + 12.55136585792316, + 173.4614521532605, + 136.4085114015674, + 173.79337782013562, + 186.83123762499903, + 13.449155280150386, + 155.12272657027916, + 108.84862656043424, + 150.6559527232612, + 161.90374448992205, + 169.6874597897037, + 185.13622778245175, + 13.139280888748093, + 148.81997444276612, + 162.826727139871, + 134.34831771089154, + 12.478143605322522, + 14.14151231689335, + 149.25750191310448, + 167.44106770036936, + 170.90279518575983, + 157.84394143590183, + 8.296884066877869, + 7.386407378393029, + 8.177010477741181, + 60.00030364994894, + 137.35670186784466, + 151.41307554547254, + 150.53265674110258, + 141.36182090288565, + 154.42392832445645, + 14.407560995301617, + 113.05280253165802, + 8.771319013508563, + 7.756832533799784, + 7.915167569814742, + 172.68555416184375, + 9.246590778625794, + 112.20973585271739, + 7.374925625154626, + 111.78749154901601, + 119.95753341645725, + 154.77722687049408, + 7.888580292543184, + 149.23559365306315, + 175.52342653145377, + 158.75097413261327, + 114.24446296440473, + 167.67413927012774, + 139.92437779140218, + 153.0991583611961, + 159.7319334713746, + 175.77990646480632, + 175.13373633806003, + 16.006580912678864, + 7.364167548538875, + 8.116426613758023, + 153.43476931019558, + 172.67401521610824, + 137.25039229504623, + 23.94869767384389, + 175.71290886984852, + 160.1562681126053, + 7.350730708586878, + 168.18537884347361, + 177.44645900467552, + 144.69151322813394, + 7.870439881886282, + 158.459275555328, + 137.04634114797315, + 8.28967641118504, + 89.24830243345173, + 138.72903724038372, + 144.82472911115988, + 132.17749274525417, + 174.5610183503014, + 187.22444190737485, + 9.045633413519324, + 144.6870829429866, + 94.13484353638168, + 8.073564944014072, + 8.763589893125177, + 144.5449141719037, + 7.849459359846659, + 172.45229931306682, + 148.0354241542905, + 164.34364023912008, + 96.62180529545114, + 168.59818307908336, + 164.06742901634536, + 7.6595131274639785, + 182.08298206042065, + 174.01829936632802, + 168.2984620634042, + 8.26924767633141, + 127.74401148092224, + 152.9540589411171, + 149.70999922953388, + 138.05370099020382, + 163.98282164395957, + 109.96934554907047, + 8.996791001407733, + 144.24233837720223, + 98.41465480385448, + 176.36506372732458, + 8.593325518249502, + 139.963285147378, + 165.79681947035346, + 175.45780184642518, + 8.96329041536684, + 125.14956588858662, + 7.352306186940013, + 177.70817869555052, + 177.69694242040705, + 165.7171562780864, + 159.43363801309405, + 181.71342376901586, + 8.928314868453635, + 147.12698777390037, + 98.19159288935101, + 8.060910754944894, + 8.549652936388815, + 141.01923634310606, + 163.73524549575265, + 160.94825111954881, + 163.89847325356007, + 158.8883048029801, + 101.49136858702906, + 175.66290061319754, + 149.61494347618603, + 167.11717553963226, + 172.02372197225566, + 187.6631035218658, + 8.900520682145078, + 171.61286914605415, + 115.54311347996163, + 169.3438620700623, + 158.53427223473756, + 7.62769842231285, + 7.851152107489017, + 166.35296262059944, + 8.869163971328895, + 174.27048762162556, + 105.1060664928901, + 176.44597164262635, + 183.93578989094215, + 155.56348841330345, + 165.030948332619, + 24.021020986288374, + 158.54498277925777 ] -} +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/gpt/gpt_dynamic_inference_tp1_pp1_dp8_583m_throughputtest_zmq/model_config.yaml b/tests/functional_tests/test_cases/gpt/gpt_dynamic_inference_tp1_pp1_dp8_583m_throughputtest_zmq/model_config.yaml index aa4fde5e512..4b3fe856099 100644 --- a/tests/functional_tests/test_cases/gpt/gpt_dynamic_inference_tp1_pp1_dp8_583m_throughputtest_zmq/model_config.yaml +++ b/tests/functional_tests/test_cases/gpt/gpt_dynamic_inference_tp1_pp1_dp8_583m_throughputtest_zmq/model_config.yaml @@ -44,7 +44,8 @@ MODEL_ARGS: --inference-dynamic-batching-buffer-size-gb: 20 --inference-dynamic-batching-cuda-graph-max-tokens: 2048 --cuda-graph-impl: local - --cuda-graph-scope: full + --cuda-graph-scope: full_iteration + --no-check-for-nan-in-loss-and-grad: true --disable-chunked-prefill: true --dist-ckpt-strictness: log_unexpected --inference-ckpt-non-strict: true # To handle the extra_state errors diff --git a/tests/functional_tests/test_cases/mixtral/deepseekv3_proxy_flex_tp1pp4emp16etp1cp1_gb_200_release/model_config.yaml b/tests/functional_tests/test_cases/mixtral/deepseekv3_proxy_flex_tp1pp4emp16etp1cp1_gb_200_release/model_config.yaml new file mode 100644 index 00000000000..ced409e5b1e --- /dev/null +++ b/tests/functional_tests/test_cases/mixtral/deepseekv3_proxy_flex_tp1pp4emp16etp1cp1_gb_200_release/model_config.yaml @@ -0,0 +1,168 @@ +# The proxy model is used for local code quality check. +# The proxy model should contain all the necessary components and settings but fewer parameters. +ENV_VARS: + TORCH_NCCL_AVOID_RECORD_STREAMS: 0 + NVTE_ALLOW_NONDETERMINISTIC_ALGO: 1 + PYTORCH_CUDA_ALLOC_CONF: expandable_segments:True + NCCL_NVLS_ENABLE: 0 + NVTE_FUSED_ATTN: 1 + NVTE_NORM_FWD_USE_CUDNN: 1 + NVTE_NORM_BWD_USE_CUDNN: 1 + PYTHONWARNINGS: ignore + NCCL_DEBUG: VERSION + NON_DETERMINSTIC_RESULTS: 1 + NVSHMEM_IB_ENABLE_IBGDA: 0 + CUDA_DEVICE_MAX_CONNECTIONS: 1 + NUM_OF_HYBRID_EP_RANKS_PER_NVLINK_DOMAIN: 16 + USE_MNNVL: 1 +TEST_TYPE: "release" +MODEL_ARGS: + # Distributed args + --distributed-timeout-minutes: 60 + --tensor-model-parallel-size: 2 + --pipeline-model-parallel-size: 4 + --pipeline-model-parallel-layout: Et*2\\|\\(tt\\|\\)*5t\\|tmL # Et*2|(tt|)*5t|tmL + --expert-model-parallel-size: 16 + --context-parallel-size: 1 + --expert-tensor-parallel-size: 1 + --use-distributed-optimizer: true + --overlap-grad-reduce: true + --overlap-param-gather: true + + # Training args + --use-mcore-models: true + --sequence-parallel: true + --use-flash-attn: true + --disable-bias-linear: true + --micro-batch-size: 1 + --global-batch-size: 512 + --train-samples: 24414062 + --exit-duration-in-mins: 220 + --no-check-for-nan-in-loss-and-grad: true + --cross-entropy-loss-fusion: true + --cross-entropy-fusion-impl: te + --manual-gc: true + --manual-gc-interval: 10 + + # Transformer Engine args + --transformer-impl: transformer_engine + + # Data args + --seq-length: 4096 + --data-cache-path: ${DATA_CACHE_PATH} + --tokenizer-type: GPTSentencePieceTokenizer + --tokenizer-model: ${DATA_PATH}/utils/nemotron_2_256k.model + --data-path: $DATA_BLEND + --split: 99,1,0 + --no-mmap-bin-files: true + --no-create-attention-mask-in-dataloader: true + --num-workers: 6 + + # Add network size args + --num-layers: 14 # original 61 layers + --hidden-size: 7168 + --ffn-hidden-size: 18432 + --num-attention-heads: 128 + --kv-channels: 128 + --max-position-embeddings: 4096 + --position-embedding-type: rope + --rotary-base: 10000 + --make-vocab-size-divisible-by: 3232 + --normalization: RMSNorm + --norm-epsilon: 1e-6 + --swiglu: true + --untie-embeddings-and-output-weights: true + --multi-latent-attention: true + --mtp-num-layers: 1 + --mtp-loss-scaling-factor: 0.1 + + # Add regularization args + --attention-dropout: 0.0 + --hidden-dropout: 0.0 + --clip-grad: 1.0 + --weight-decay: 0.1 + --qk-layernorm: true + + # Add learning rate args + --lr-decay-samples: 24413696 + --lr-warmup-samples: 1536000 + --lr-warmup-init: 1e-7 + --lr: 1e-5 + --min-lr: 1e-6 + --lr-decay-style: cosine + --adam-beta1: 0.9 + --adam-beta2: 0.95 + + # Add MoE args + --num-experts: 64 # local 4 + 1 shared, EP16 + --moe-layer-freq: ([0]*3+[1]*11) + --moe-ffn-hidden-size: 2048 + --moe-shared-expert-intermediate-size: 2048 + --moe-router-load-balancing-type: seq_aux_loss + --moe-router-topk: 8 + --moe-token-dispatcher-type: flex + --moe-flex-dispatcher-backend: hybridep + --moe-router-pre-softmax: true + --moe-grouped-gemm: true + --moe-aux-loss-coeff: 1e-4 + --moe-router-group-topk: 4 + --moe-router-num-groups: 8 + --moe-router-topk-scaling-factor: 2.5 + --moe-router-score-function: sigmoid + --moe-router-enable-expert-bias: true + --moe-router-bias-update-rate: 1e-3 + --moe-router-dtype: fp32 + --moe-permute-fusion: true + + # Add MLA args + --q-lora-rank: 1536 + --kv-lora-rank: 512 + --qk-head-dim: 128 + --qk-pos-emb-head-dim: 64 + --v-head-dim: 128 + --rotary-scaling-factor: 40 + --mscale: 1.0 + --mscale-all-dim: 1.0 + + # Add validation args + --eval-iters: 32 + --eval-interval: 200 + + # Add checkpointing args + --auto-detect-ckpt-format: + true + # Add checkpointing args + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} + --save-interval: 500 + --save-retain-interval: 10000 + --dist-ckpt-strictness: log_all + + # Add initialization args + --init-method-std: 0.02 + + # Add logging args + --log-timers-to-tensorboard: true + --log-memory-to-tensorboard: true + --log-num-zeros-in-grad: true + --log-params-norm: true + --log-validation-ppl-to-tensorboard: true + --log-throughput: true + --log-interval: 1 + --logging-level: 40 + --tensorboard-dir: ${TENSORBOARD_PATH} + --wandb-project: megatron-core-release-runs + --wandb-entity: adlr + --wandb-exp-name: ${WANDB_EXPERIMENT} + --wandb-save-dir: ${WANDB_SAVE_PATH} + + # Add mixed precision args + --bf16: true + + # enable experimental + --enable-experimental: true +METRICS: + - "iteration-time" + - "lm loss" + - "mem-allocated-bytes" + - "mem-max-allocated-bytes" diff --git a/tests/functional_tests/test_cases/mixtral/deepseekv3_proxy_flex_tp1pp4emp16etp1cp1_gb_200_release_sm/model_config.yaml b/tests/functional_tests/test_cases/mixtral/deepseekv3_proxy_flex_tp1pp4emp16etp1cp1_gb_200_release_sm/model_config.yaml new file mode 100644 index 00000000000..a6c65afb712 --- /dev/null +++ b/tests/functional_tests/test_cases/mixtral/deepseekv3_proxy_flex_tp1pp4emp16etp1cp1_gb_200_release_sm/model_config.yaml @@ -0,0 +1,171 @@ +# The proxy model is used for local code quality check. +# The proxy model should contain all the necessary components and settings but fewer parameters. +ENV_VARS: + TORCH_NCCL_AVOID_RECORD_STREAMS: 0 + NVTE_ALLOW_NONDETERMINISTIC_ALGO: 1 + PYTORCH_CUDA_ALLOC_CONF: expandable_segments:True + NCCL_NVLS_ENABLE: 0 + NVTE_FUSED_ATTN: 1 + NVTE_NORM_FWD_USE_CUDNN: 1 + NVTE_NORM_BWD_USE_CUDNN: 1 + PYTHONWARNINGS: ignore + NCCL_DEBUG: VERSION + NON_DETERMINSTIC_RESULTS: 1 + NVSHMEM_IB_ENABLE_IBGDA: 0 + CUDA_DEVICE_MAX_CONNECTIONS: 1 + NUM_OF_HYBRID_EP_RANKS_PER_NVLINK_DOMAIN: 16 + USE_MNNVL: 1 +TEST_TYPE: "release" +MODEL_ARGS: + # Distributed args + --distributed-timeout-minutes: 60 + --tensor-model-parallel-size: 2 + --pipeline-model-parallel-size: 4 + --pipeline-model-parallel-layout: Et*2\\|\\(tt\\|\\)*5t\\|tmL # Et*2|(tt|)*5t|tmL + --expert-model-parallel-size: 16 + --context-parallel-size: 1 + --expert-tensor-parallel-size: 1 + --use-distributed-optimizer: true + --overlap-grad-reduce: true + --overlap-param-gather: true + + # Training args + --use-mcore-models: true + --sequence-parallel: true + --use-flash-attn: true + --disable-bias-linear: true + --micro-batch-size: 1 + --global-batch-size: 512 + --train-samples: 24414062 + --exit-duration-in-mins: 220 + --no-check-for-nan-in-loss-and-grad: true + --cross-entropy-loss-fusion: true + --cross-entropy-fusion-impl: te + --manual-gc: true + --manual-gc-interval: 10 + + # Transformer Engine args + --transformer-impl: transformer_engine + + # Data args + --seq-length: 4096 + --data-cache-path: ${DATA_CACHE_PATH} + --tokenizer-type: GPTSentencePieceTokenizer + --tokenizer-model: ${DATA_PATH}/utils/nemotron_2_256k.model + --data-path: $DATA_BLEND + --split: 99,1,0 + --no-mmap-bin-files: true + --no-create-attention-mask-in-dataloader: true + --num-workers: 6 + + # Add network size args + --num-layers: 14 # original 61 layers + --hidden-size: 7168 + --ffn-hidden-size: 18432 + --num-attention-heads: 128 + --kv-channels: 128 + --max-position-embeddings: 4096 + --position-embedding-type: rope + --rotary-base: 10000 + --make-vocab-size-divisible-by: 3232 + --normalization: RMSNorm + --norm-epsilon: 1e-6 + --swiglu: true + --untie-embeddings-and-output-weights: true + --multi-latent-attention: true + --mtp-num-layers: 1 + --mtp-loss-scaling-factor: 0.1 + + # Add regularization args + --attention-dropout: 0.0 + --hidden-dropout: 0.0 + --clip-grad: 1.0 + --weight-decay: 0.1 + --qk-layernorm: true + + # Add learning rate args + --lr-decay-samples: 24413696 + --lr-warmup-samples: 1536000 + --lr-warmup-init: 1e-7 + --lr: 1e-5 + --min-lr: 1e-6 + --lr-decay-style: cosine + --adam-beta1: 0.9 + --adam-beta2: 0.95 + + # Add MoE args + --num-experts: 64 # local 4 + 1 shared, EP16 + --moe-layer-freq: ([0]*3+[1]*11) + --moe-ffn-hidden-size: 2048 + --moe-shared-expert-intermediate-size: 2048 + --moe-router-load-balancing-type: seq_aux_loss + --moe-router-topk: 8 + --moe-token-dispatcher-type: flex + --moe-flex-dispatcher-backend: hybridep + --moe-router-pre-softmax: true + --moe-grouped-gemm: true + --moe-aux-loss-coeff: 1e-4 + --moe-router-group-topk: 4 + --moe-router-num-groups: 8 + --moe-router-topk-scaling-factor: 2.5 + --moe-router-score-function: sigmoid + --moe-router-enable-expert-bias: true + --moe-router-bias-update-rate: 1e-3 + --moe-router-dtype: fp32 + --moe-permute-fusion: true + + # Add MLA args + --q-lora-rank: 1536 + --kv-lora-rank: 512 + --qk-head-dim: 128 + --qk-pos-emb-head-dim: 64 + --v-head-dim: 128 + --rotary-scaling-factor: 40 + --mscale: 1.0 + --mscale-all-dim: 1.0 + + # Add validation args + --eval-iters: 32 + --eval-interval: 200 + + # Add checkpointing args + --auto-detect-ckpt-format: + true + # Add checkpointing args + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} + --save-interval: 500 + --save-retain-interval: 10000 + --dist-ckpt-strictness: log_all + + # Add initialization args + --init-method-std: 0.02 + + # Add logging args + --log-timers-to-tensorboard: true + --log-memory-to-tensorboard: true + --log-num-zeros-in-grad: true + --log-params-norm: true + --log-validation-ppl-to-tensorboard: true + --log-throughput: true + --log-interval: 1 + --logging-level: 40 + --tensorboard-dir: ${TENSORBOARD_PATH} + --wandb-project: megatron-core-release-runs + --wandb-entity: adlr + --wandb-exp-name: ${WANDB_EXPERIMENT} + --wandb-save-dir: ${WANDB_SAVE_PATH} + + # Add mixed precision args + --bf16: true + + # enable experimental + --enable-experimental: true + + --exit-interval: 9536 + +METRICS: + - "iteration-time" + - "lm loss" + - "mem-allocated-bytes" + - "mem-max-allocated-bytes" diff --git a/tests/functional_tests/test_cases/mixtral/deepseekv3_proxy_flex_tp1pp4emp16etp1cp1_release/model_config.yaml b/tests/functional_tests/test_cases/mixtral/deepseekv3_proxy_flex_tp1pp4emp16etp1cp1_release/model_config.yaml index c16fedc7860..7bc14780fb3 100644 --- a/tests/functional_tests/test_cases/mixtral/deepseekv3_proxy_flex_tp1pp4emp16etp1cp1_release/model_config.yaml +++ b/tests/functional_tests/test_cases/mixtral/deepseekv3_proxy_flex_tp1pp4emp16etp1cp1_release/model_config.yaml @@ -12,11 +12,12 @@ ENV_VARS: NCCL_DEBUG: VERSION NON_DETERMINSTIC_RESULTS: 1 NVSHMEM_IB_ENABLE_IBGDA: 0 + CUDA_DEVICE_MAX_CONNECTIONS: 1 TEST_TYPE: "release" MODEL_ARGS: # Distributed args --distributed-timeout-minutes: 60 - --tensor-model-parallel-size: 1 + --tensor-model-parallel-size: 2 --pipeline-model-parallel-size: 4 --pipeline-model-parallel-layout: Et*2\\|\\(tt\\|\\)*5t\\|tmL # Et*2|(tt|)*5t|tmL --expert-model-parallel-size: 16 @@ -47,8 +48,8 @@ MODEL_ARGS: # Data args --seq-length: 4096 --data-cache-path: ${DATA_CACHE_PATH} - --tokenizer-type: HuggingFaceTokenizer - --tokenizer-model: ${TOKENIZER_PATH} + --tokenizer-type: GPTSentencePieceTokenizer + --tokenizer-model: ${DATA_PATH}/utils/nemotron_2_256k.model --data-path: $DATA_BLEND --split: 99,1,0 --no-mmap-bin-files: true @@ -81,12 +82,11 @@ MODEL_ARGS: --qk-layernorm: true # Add learning rate args - --lr-decay-samples: 584765624 + --lr-decay-samples: 24413696 --lr-warmup-samples: 1536000 - # Learning rate scaled down from 7.3e-6 (DeepSeek-V3 technical report, GBS=15360) to 3.9e-6 (GBS=8192) - --lr-warmup-init: 3.9e-7 - --lr: 3.9e-6 - --min-lr: 3.9e-7 + --lr-warmup-init: 1e-7 + --lr: 1e-5 + --min-lr: 1e-6 --lr-decay-style: cosine --adam-beta1: 0.9 --adam-beta2: 0.95 @@ -127,8 +127,6 @@ MODEL_ARGS: --eval-interval: 200 # Add checkpointing args - --no-load-optim: true - --no-load-rng: true --auto-detect-ckpt-format: true # Add checkpointing args @@ -152,6 +150,7 @@ MODEL_ARGS: --logging-level: 40 --tensorboard-dir: ${TENSORBOARD_PATH} --wandb-project: megatron-core-release-runs + --wandb-entity: adlr --wandb-exp-name: ${WANDB_EXPERIMENT} --wandb-save-dir: ${WANDB_SAVE_PATH} diff --git a/tests/functional_tests/test_cases/mixtral/deepseekv3_proxy_flex_tp1pp4emp16etc1cp1_release_sm/golden_values_dev_dgx_h100.json b/tests/functional_tests/test_cases/mixtral/deepseekv3_proxy_flex_tp1pp4emp16etp1cp1_release_sm/golden_values_dev_dgx_h100.json similarity index 100% rename from tests/functional_tests/test_cases/mixtral/deepseekv3_proxy_flex_tp1pp4emp16etc1cp1_release_sm/golden_values_dev_dgx_h100.json rename to tests/functional_tests/test_cases/mixtral/deepseekv3_proxy_flex_tp1pp4emp16etp1cp1_release_sm/golden_values_dev_dgx_h100.json diff --git a/tests/functional_tests/test_cases/mixtral/deepseekv3_proxy_flex_tp1pp4emp16etc1cp1_release_sm/model_config.yml b/tests/functional_tests/test_cases/mixtral/deepseekv3_proxy_flex_tp1pp4emp16etp1cp1_release_sm/model_config.yaml similarity index 97% rename from tests/functional_tests/test_cases/mixtral/deepseekv3_proxy_flex_tp1pp4emp16etc1cp1_release_sm/model_config.yml rename to tests/functional_tests/test_cases/mixtral/deepseekv3_proxy_flex_tp1pp4emp16etp1cp1_release_sm/model_config.yaml index 9c7d2496e2a..cc8f2b814c2 100644 --- a/tests/functional_tests/test_cases/mixtral/deepseekv3_proxy_flex_tp1pp4emp16etc1cp1_release_sm/model_config.yml +++ b/tests/functional_tests/test_cases/mixtral/deepseekv3_proxy_flex_tp1pp4emp16etp1cp1_release_sm/model_config.yaml @@ -13,7 +13,7 @@ ENV_VARS: NON_DETERMINSTIC_RESULTS: 1 NVSHMEM_IB_ENABLE_IBGDA: 0 CUDA_DEVICE_MAX_CONNECTIONS: 1 -TEST_TYPE: 'release' +TEST_TYPE: "release" MODEL_ARGS: # Distributed args --distributed-timeout-minutes: 60 @@ -150,6 +150,7 @@ MODEL_ARGS: --logging-level: 40 --tensorboard-dir: ${TENSORBOARD_PATH} --wandb-project: megatron-core-release-runs + --wandb-entity: adlr --wandb-exp-name: ${WANDB_EXPERIMENT} --wandb-save-dir: ${WANDB_SAVE_PATH} @@ -160,7 +161,7 @@ MODEL_ARGS: --enable-experimental: true --exit-interval: 9536 METRICS: - - 'iteration-time' - - 'lm loss' - - 'mem-allocated-bytes' - - 'mem-max-allocated-bytes' + - "iteration-time" + - "lm loss" + - "mem-allocated-bytes" + - "mem-max-allocated-bytes" diff --git a/tests/functional_tests/test_cases/mixtral/mixtral_8x22b_tp2pp8ep8vpp1_release/model_config.yaml b/tests/functional_tests/test_cases/mixtral/mixtral_8x22b_tp2pp8ep8vpp1_release/model_config.yaml index e2b8b212900..efe39998065 100644 --- a/tests/functional_tests/test_cases/mixtral/mixtral_8x22b_tp2pp8ep8vpp1_release/model_config.yaml +++ b/tests/functional_tests/test_cases/mixtral/mixtral_8x22b_tp2pp8ep8vpp1_release/model_config.yaml @@ -92,6 +92,7 @@ MODEL_ARGS: --log-interval: 1 --tensorboard-dir: ${TENSORBOARD_PATH} --wandb-project: megatron-core-release-runs + --wandb-entity: adlr --wandb-exp-name: ${WANDB_EXPERIMENT} --wandb-save-dir: ${WANDB_SAVE_PATH} # Add mixed precision args diff --git a/tests/functional_tests/test_cases/mixtral/mixtral_8x7b_alltoall_tp2pp4ep4_release/model_config.yaml b/tests/functional_tests/test_cases/mixtral/mixtral_8x7b_alltoall_tp2pp4ep4_release/model_config.yaml index a02fbe99537..f4476c712f2 100644 --- a/tests/functional_tests/test_cases/mixtral/mixtral_8x7b_alltoall_tp2pp4ep4_release/model_config.yaml +++ b/tests/functional_tests/test_cases/mixtral/mixtral_8x7b_alltoall_tp2pp4ep4_release/model_config.yaml @@ -92,6 +92,7 @@ MODEL_ARGS: --log-interval: 1 --tensorboard-dir: ${TENSORBOARD_PATH} --wandb-project: megatron-core-release-runs + --wandb-entity: adlr --wandb-exp-name: ${WANDB_EXPERIMENT} --wandb-save-dir: ${WANDB_SAVE_PATH} # Add mixed precision args diff --git a/tests/functional_tests/test_cases/mixtral/mixtral_8x7b_alltoall_tp2pp4ep4_release_sm/model_config.yaml b/tests/functional_tests/test_cases/mixtral/mixtral_8x7b_alltoall_tp2pp4ep4_release_sm/model_config.yaml index b43a1227ea0..cfeb7709839 100644 --- a/tests/functional_tests/test_cases/mixtral/mixtral_8x7b_alltoall_tp2pp4ep4_release_sm/model_config.yaml +++ b/tests/functional_tests/test_cases/mixtral/mixtral_8x7b_alltoall_tp2pp4ep4_release_sm/model_config.yaml @@ -92,6 +92,7 @@ MODEL_ARGS: --log-interval: 1 --tensorboard-dir: ${TENSORBOARD_PATH} --wandb-project: megatron-core-release-runs + --wandb-entity: adlr --wandb-exp-name: ${WANDB_EXPERIMENT} --wandb-save-dir: ${WANDB_SAVE_PATH} # Add mixed precision args diff --git a/tests/functional_tests/test_cases/mixtral/mixtral_8x7b_tp1pp4ep8vpp8_release/model_config.yaml b/tests/functional_tests/test_cases/mixtral/mixtral_8x7b_tp1pp4ep8vpp8_release/model_config.yaml index 1fdad2a5c70..29dcefadf0e 100644 --- a/tests/functional_tests/test_cases/mixtral/mixtral_8x7b_tp1pp4ep8vpp8_release/model_config.yaml +++ b/tests/functional_tests/test_cases/mixtral/mixtral_8x7b_tp1pp4ep8vpp8_release/model_config.yaml @@ -94,6 +94,7 @@ MODEL_ARGS: --log-interval: 1 --tensorboard-dir: ${TENSORBOARD_PATH} --wandb-project: megatron-core-release-runs + --wandb-entity: adlr --wandb-exp-name: ${WANDB_EXPERIMENT} --wandb-save-dir: ${WANDB_SAVE_PATH} # Add mixed precision args diff --git a/tests/functional_tests/test_cases/moe/gpt3_mcore_te_tp2_pp2_ep4_etp1_mtp_resume_torch_dist_fp8/golden_values_dev_dgx_h100.json b/tests/functional_tests/test_cases/moe/gpt3_mcore_te_tp2_pp2_ep4_etp1_mtp_resume_torch_dist_fp8/golden_values_dev_dgx_h100.json index ef8ee741272..a76d8667ec6 100644 --- a/tests/functional_tests/test_cases/moe/gpt3_mcore_te_tp2_pp2_ep4_etp1_mtp_resume_torch_dist_fp8/golden_values_dev_dgx_h100.json +++ b/tests/functional_tests/test_cases/moe/gpt3_mcore_te_tp2_pp2_ep4_etp1_mtp_resume_torch_dist_fp8/golden_values_dev_dgx_h100.json @@ -2,109 +2,343 @@ "lm loss": { "start_step": 1, "end_step": 50, - "step_interval": 5, + "step_interval": 1, "values": { - "1": 11.0475, - "5": 9.43078, - "10": 8.89238, - "15": 7.93732, - "20": 7.77942, - "25": 7.61408, - "30": 7.57234, - "35": 7.15189, - "40": 7.48085, - "45": 7.12056, - "50": 6.96054 + "1": 11.0474, + "2": 11.03765, + "3": 9.6074, + "4": 9.2648, + "5": 9.42291, + "6": 9.09511, + "7": 9.12753, + "8": 8.75686, + "9": 8.61627, + "10": 8.89295, + "11": 8.37933, + "12": 8.39932, + "13": 8.32626, + "14": 7.81437, + "15": 7.93661, + "16": 7.99492, + "17": 7.95458, + "18": 7.67733, + "19": 8.07234, + "20": 7.78815, + "21": 7.48342, + "22": 7.48177, + "23": 7.34879, + "24": 7.34465, + "25": 7.61117, + "26": 7.01605, + "27": 7.54878, + "28": 7.26655, + "29": 7.43507, + "30": 7.56529, + "31": 7.32669, + "32": 7.50645, + "33": 7.5577, + "34": 7.60977, + "35": 7.14607, + "36": 7.00597, + "37": 7.34071, + "38": 7.11796, + "39": 7.46649, + "40": 7.47443, + "41": 7.41032, + "42": 7.17365, + "43": 7.16495, + "44": 7.34265, + "45": 7.10918, + "46": 6.83934, + "47": 7.22335, + "48": 7.05732, + "49": 7.53394, + "50": 6.95951 } }, "num-zeros": { "start_step": 1, "end_step": 50, - "step_interval": 5, + "step_interval": 1, "values": { - "1": 38802620.0, - "5": 243556240.0, - "10": 716187584.0, - "15": 614358336.0, - "20": 677963584.0, - "25": 736321856.0, - "30": 505223648.0, - "35": 548946176.0, - "40": 412329664.0, - "45": 376634624.0, - "50": 205546672.0 + "1": 38802536.0, + "2": 38543540.0, + "3": 38739408.0, + "4": 273756736.0, + "5": 205853584.0, + "6": 284244640.0, + "7": 652227968.0, + "8": 790994816.0, + "9": 762295424.0, + "10": 665870592.0, + "11": 618336384.0, + "12": 639816192.0, + "13": 699169600.0, + "14": 620502464.0, + "15": 623699456.0, + "16": 847396864.0, + "17": 601834432.0, + "18": 642855744.0, + "19": 668078912.0, + "20": 574651008.0, + "21": 608590080.0, + "22": 599821504.0, + "23": 558380672.0, + "24": 688014720.0, + "25": 500623296.0, + "26": 532887808.0, + "27": 506526976.0, + "28": 450900800.0, + "29": 528748480.0, + "30": 445603872.0, + "31": 457250368.0, + "32": 400653888.0, + "33": 347460640.0, + "34": 268919904.0, + "35": 495515584.0, + "36": 332139008.0, + "37": 446760768.0, + "38": 391328576.0, + "39": 378290400.0, + "40": 261331328.0, + "41": 368680832.0, + "42": 337485280.0, + "43": 337755968.0, + "44": 324657920.0, + "45": 216104608.0, + "46": 218159872.0, + "47": 302569184.0, + "48": 296505312.0, + "49": 280170176.0, + "50": 268486912.0 } }, "mem-allocated-bytes": { "start_step": 1, "end_step": 50, - "step_interval": 5, + "step_interval": 1, "values": { - "1": 7321331200.0, - "5": 7321333248.0, - "10": 7321333248.0, - "15": 7321333248.0, - "20": 7321333248.0, - "25": 7321333248.0, - "30": 7321333248.0, - "35": 7321333248.0, - "40": 7321333248.0, - "45": 7321333248.0, - "50": 7321333248.0 + "1": 7316093440.0, + "2": 7316095488.0, + "3": 7316095488.0, + "4": 7316095488.0, + "5": 7316095488.0, + "6": 7316095488.0, + "7": 7316095488.0, + "8": 7316095488.0, + "9": 7316095488.0, + "10": 7316095488.0, + "11": 7316095488.0, + "12": 7316095488.0, + "13": 7316095488.0, + "14": 7316095488.0, + "15": 7316095488.0, + "16": 7316095488.0, + "17": 7316095488.0, + "18": 7316095488.0, + "19": 7316095488.0, + "20": 7316095488.0, + "21": 7316095488.0, + "22": 7316095488.0, + "23": 7316095488.0, + "24": 7316095488.0, + "25": 7316095488.0, + "26": 7316095488.0, + "27": 7316095488.0, + "28": 7316095488.0, + "29": 7316095488.0, + "30": 7316095488.0, + "31": 7316095488.0, + "32": 7316095488.0, + "33": 7316095488.0, + "34": 7316095488.0, + "35": 7316095488.0, + "36": 7316095488.0, + "37": 7316095488.0, + "38": 7316095488.0, + "39": 7316095488.0, + "40": 7316095488.0, + "41": 7316095488.0, + "42": 7316095488.0, + "43": 7316095488.0, + "44": 7316095488.0, + "45": 7316095488.0, + "46": 7316095488.0, + "47": 7316095488.0, + "48": 7316095488.0, + "49": 7316095488.0, + "50": 7316095488.0 } }, "mem-max-allocated-bytes": { "start_step": 1, "end_step": 50, - "step_interval": 5, + "step_interval": 1, "values": { - "1": 53176152064.0, - "5": 55926337536.0, - "10": 55926337536.0, - "15": 55926337536.0, - "20": 55926337536.0, - "25": 56534257664.0, - "30": 57393635328.0, - "35": 57393635328.0, - "40": 57578217472.0, - "45": 57578217472.0, - "50": 57578217472.0 + "1": 53549867008.0, + "2": 56295710720.0, + "3": 56295710720.0, + "4": 56295710720.0, + "5": 56295710720.0, + "6": 56295710720.0, + "7": 56295710720.0, + "8": 56295710720.0, + "9": 56295710720.0, + "10": 56295710720.0, + "11": 56295710720.0, + "12": 56295710720.0, + "13": 56295710720.0, + "14": 56295710720.0, + "15": 56295710720.0, + "16": 56295710720.0, + "17": 56295710720.0, + "18": 56295710720.0, + "19": 56295710720.0, + "20": 56295710720.0, + "21": 56295710720.0, + "22": 56295710720.0, + "23": 56295710720.0, + "24": 56738553856.0, + "25": 56738553856.0, + "26": 56777162752.0, + "27": 56777162752.0, + "28": 56777162752.0, + "29": 56777162752.0, + "30": 56777162752.0, + "31": 56777162752.0, + "32": 56777162752.0, + "33": 56777162752.0, + "34": 56824344576.0, + "35": 57080135680.0, + "36": 57331695616.0, + "37": 57331695616.0, + "38": 57577013248.0, + "39": 57577013248.0, + "40": 57577013248.0, + "41": 57577013248.0, + "42": 57577013248.0, + "43": 57587191808.0, + "44": 57596944384.0, + "45": 57705652224.0, + "46": 57790390272.0, + "47": 57790390272.0, + "48": 57790390272.0, + "49": 57790390272.0, + "50": 57790390272.0 } }, "mtp_1 loss": { "start_step": 1, "end_step": 50, - "step_interval": 5, + "step_interval": 1, "values": { - "1": 11.0776, - "5": 9.87653, - "10": 9.02332, - "15": 7.91471, - "20": 7.75886, - "25": 7.56825, - "30": 7.53841, - "35": 7.12192, - "40": 7.44579, - "45": 7.09307, - "50": 6.94739 + "1": 11.07756, + "2": 11.07651, + "3": 10.53063, + "4": 10.08611, + "5": 9.87524, + "6": 9.55366, + "7": 9.62345, + "8": 8.91012, + "9": 8.72228, + "10": 9.02504, + "11": 8.39501, + "12": 8.42504, + "13": 8.32334, + "14": 7.76976, + "15": 7.91789, + "16": 7.97018, + "17": 7.92051, + "18": 7.65266, + "19": 8.0377, + "20": 7.76074, + "21": 7.44752, + "22": 7.43657, + "23": 7.30984, + "24": 7.31186, + "25": 7.56562, + "26": 6.97201, + "27": 7.50933, + "28": 7.2266, + "29": 7.40633, + "30": 7.53569, + "31": 7.28904, + "32": 7.47424, + "33": 7.53526, + "34": 7.59404, + "35": 7.11968, + "36": 6.9867, + "37": 7.32338, + "38": 7.09605, + "39": 7.45524, + "40": 7.44706, + "41": 7.39271, + "42": 7.14573, + "43": 7.13128, + "44": 7.31399, + "45": 7.08836, + "46": 6.80158, + "47": 7.2062, + "48": 7.0468, + "49": 7.47982, + "50": 6.94494 } }, "iteration-time": { "start_step": 1, "end_step": 50, - "step_interval": 5, + "step_interval": 1, "values": { - "1": 51.33936, - "5": 1.24167, - "10": 1.14623, - "15": 1.16973, - "20": 1.23165, - "25": 1.13719, - "30": 1.15864, - "35": 1.13509, - "40": 1.14729, - "45": 1.14136, - "50": 1.13625 + "1": 102.52307, + "2": 1.75305, + "3": 1.36681, + "4": 1.62808, + "5": 1.13714, + "6": 1.45805, + "7": 1.6121, + "8": 1.20031, + "9": 1.09784, + "10": 1.10383, + "11": 1.10878, + "12": 1.18093, + "13": 1.43808, + "14": 1.17223, + "15": 1.11575, + "16": 1.1159, + "17": 1.11727, + "18": 1.10751, + "19": 1.11189, + "20": 1.1082, + "21": 1.10459, + "22": 1.11252, + "23": 1.10744, + "24": 1.12218, + "25": 1.09823, + "26": 1.11657, + "27": 1.08949, + "28": 1.10254, + "29": 1.10189, + "30": 1.08963, + "31": 1.10454, + "32": 1.09654, + "33": 1.08747, + "34": 1.09674, + "35": 1.09106, + "36": 1.08904, + "37": 1.1178, + "38": 1.09379, + "39": 1.10306, + "40": 1.09998, + "41": 1.08808, + "42": 1.0941, + "43": 1.0919, + "44": 1.0813, + "45": 1.08715, + "46": 1.07061, + "47": 1.07098, + "48": 1.07438, + "49": 1.07469, + "50": 1.0719 } } } \ No newline at end of file diff --git a/tests/functional_tests/test_cases/moe/gpt3_moe_mcore_te_tp4_ep2_etp2_pp2_scoped_cudagraph/golden_values_dev_dgxh100_coreweave.json b/tests/functional_tests/test_cases/moe/gpt3_moe_mcore_te_tp4_ep2_etp2_pp2_scoped_cudagraph/golden_values_dev_dgxh100_coreweave.json new file mode 100644 index 00000000000..309b2533461 --- /dev/null +++ b/tests/functional_tests/test_cases/moe/gpt3_moe_mcore_te_tp4_ep2_etp2_pp2_scoped_cudagraph/golden_values_dev_dgxh100_coreweave.json @@ -0,0 +1,644 @@ +{ + "lm loss": { + "start_step": 1, + "end_step": 100, + "step_interval": 1, + "values": { + "1": 10.93663, + "2": 10.9327, + "3": 10.94263, + "4": 10.94969, + "5": 10.95052, + "6": 10.94157, + "7": 10.94484, + "8": 10.93674, + "9": 10.94996, + "10": 10.93686, + "11": 10.94102, + "12": 10.93763, + "13": 10.9235, + "14": 10.93428, + "15": 10.88791, + "16": 10.87434, + "17": 10.86896, + "18": 10.86065, + "19": 10.86311, + "20": 10.78063, + "21": 10.73125, + "22": 10.60283, + "23": 10.73278, + "24": 10.61888, + "25": 10.55212, + "26": 10.62704, + "27": 10.6391, + "28": 10.5908, + "29": 10.59809, + "30": 10.37777, + "31": 10.1201, + "32": 10.46078, + "33": 10.45538, + "34": 10.20107, + "35": 10.25779, + "36": 10.20889, + "37": 10.33688, + "38": 10.16827, + "39": 10.40875, + "40": 10.05239, + "41": 10.09432, + "42": 10.17894, + "43": 9.74205, + "44": 9.8904, + "45": 9.74009, + "46": 9.72707, + "47": 10.09139, + "48": 9.75298, + "49": 9.40106, + "50": 9.83667, + "51": 9.77071, + "52": 9.65705, + "53": 10.03051, + "54": 9.87899, + "55": 9.79604, + "56": 9.52924, + "57": 9.36583, + "58": 9.75331, + "59": 9.48065, + "60": 9.40785, + "61": 9.60145, + "62": 9.90753, + "63": 9.2583, + "64": 9.68397, + "65": 8.80003, + "66": 9.60779, + "67": 9.25408, + "68": 9.71438, + "69": 9.71682, + "70": 9.6617, + "71": 9.52466, + "72": 9.47116, + "73": 9.38822, + "74": 8.80223, + "75": 9.33966, + "76": 8.93574, + "77": 9.99333, + "78": 9.64731, + "79": 9.28114, + "80": 9.29588, + "81": 9.39589, + "82": 9.60893, + "83": 9.21629, + "84": 9.33891, + "85": 9.52979, + "86": 8.95817, + "87": 9.51641, + "88": 9.68228, + "89": 9.50664, + "90": 9.75348, + "91": 9.23465, + "92": 9.25972, + "93": 8.94517, + "94": 8.69188, + "95": 9.44591, + "96": 9.4101, + "97": 9.20087, + "98": 9.58175, + "99": 8.75818, + "100": 9.29466 + } + }, + "num-zeros": { + "start_step": 1, + "end_step": 100, + "step_interval": 1, + "values": { + "1": 22750260.0, + "2": 22953110.0, + "3": 22604450.0, + "4": 23266322.0, + "5": 22735560.0, + "6": 23061920.0, + "7": 22793342.0, + "8": 22960820.0, + "9": 22865664.0, + "10": 22950364.0, + "11": 22499674.0, + "12": 22456088.0, + "13": 22948060.0, + "14": 22384512.0, + "15": 22846272.0, + "16": 22856858.0, + "17": 22836412.0, + "18": 22590058.0, + "19": 22627048.0, + "20": 22712308.0, + "21": 22762624.0, + "22": 22816888.0, + "23": 22545124.0, + "24": 22794440.0, + "25": 22841936.0, + "26": 22549680.0, + "27": 22464820.0, + "28": 22453684.0, + "29": 22534640.0, + "30": 22636152.0, + "31": 22989488.0, + "32": 22594070.0, + "33": 22566010.0, + "34": 22855504.0, + "35": 22813688.0, + "36": 22595396.0, + "37": 22499360.0, + "38": 22926126.0, + "39": 22825392.0, + "40": 22675666.0, + "41": 22671586.0, + "42": 22682140.0, + "43": 23013940.0, + "44": 22764458.0, + "45": 22678992.0, + "46": 22915276.0, + "47": 22642868.0, + "48": 22954190.0, + "49": 23786668.0, + "50": 22934008.0, + "51": 23866222.0, + "52": 23807290.0, + "53": 24007532.0, + "54": 22871610.0, + "55": 23571284.0, + "56": 23954310.0, + "57": 24211632.0, + "58": 23914404.0, + "59": 23771838.0, + "60": 23813560.0, + "61": 23797288.0, + "62": 23739984.0, + "63": 23916692.0, + "64": 23895952.0, + "65": 24150562.0, + "66": 23796504.0, + "67": 25032232.0, + "68": 23673188.0, + "69": 23648580.0, + "70": 23903504.0, + "71": 24864636.0, + "72": 24767108.0, + "73": 24850612.0, + "74": 24132990.0, + "75": 24146528.0, + "76": 25025540.0, + "77": 24358472.0, + "78": 24910064.0, + "79": 23810516.0, + "80": 24821440.0, + "81": 25020512.0, + "82": 23851244.0, + "83": 24961024.0, + "84": 25144020.0, + "85": 24823608.0, + "86": 23153096.0, + "87": 24850204.0, + "88": 24749150.0, + "89": 22505554.0, + "90": 24059620.0, + "91": 23839038.0, + "92": 23874568.0, + "93": 24769548.0, + "94": 23992452.0, + "95": 25189838.0, + "96": 23909262.0, + "97": 24713068.0, + "98": 23832506.0, + "99": 23983474.0, + "100": 24101108.0 + } + }, + "mem-allocated-bytes": { + "start_step": 1, + "end_step": 100, + "step_interval": 1, + "values": { + "1": 763142656.0, + "2": 778734592.0, + "3": 772525056.0, + "4": 803593216.0, + "5": 803593216.0, + "6": 803593216.0, + "7": 801299456.0, + "8": 803593216.0, + "9": 801840128.0, + "10": 803593216.0, + "11": 802987008.0, + "12": 803593216.0, + "13": 802987008.0, + "14": 801299456.0, + "15": 803593216.0, + "16": 801840128.0, + "17": 803593216.0, + "18": 802987008.0, + "19": 801299456.0, + "20": 803593216.0, + "21": 801299456.0, + "22": 803593216.0, + "23": 801299456.0, + "24": 803593216.0, + "25": 801299456.0, + "26": 803593216.0, + "27": 801299456.0, + "28": 803593216.0, + "29": 801299456.0, + "30": 803593216.0, + "31": 801299456.0, + "32": 803593216.0, + "33": 801840128.0, + "34": 803593216.0, + "35": 801840128.0, + "36": 803593216.0, + "37": 802987008.0, + "38": 801299456.0, + "39": 803593216.0, + "40": 801299456.0, + "41": 803593216.0, + "42": 801840128.0, + "43": 803593216.0, + "44": 801840128.0, + "45": 803593216.0, + "46": 801840128.0, + "47": 803593216.0, + "48": 801840128.0, + "49": 803593216.0, + "50": 801840128.0, + "51": 801299456.0, + "52": 803593216.0, + "53": 801299456.0, + "54": 803593216.0, + "55": 801840128.0, + "56": 803593216.0, + "57": 801840128.0, + "58": 803593216.0, + "59": 801840128.0, + "60": 803593216.0, + "61": 801299456.0, + "62": 803593216.0, + "63": 801299456.0, + "64": 802987008.0, + "65": 803593216.0, + "66": 801299456.0, + "67": 803593216.0, + "68": 801299456.0, + "69": 803593216.0, + "70": 801840128.0, + "71": 803593216.0, + "72": 801299456.0, + "73": 803593216.0, + "74": 803593216.0, + "75": 802987008.0, + "76": 803593216.0, + "77": 801840128.0, + "78": 803593216.0, + "79": 801299456.0, + "80": 802987008.0, + "81": 803593216.0, + "82": 801840128.0, + "83": 803593216.0, + "84": 801299456.0, + "85": 802987008.0, + "86": 803593216.0, + "87": 801840128.0, + "88": 803593216.0, + "89": 801299456.0, + "90": 802987008.0, + "91": 803593216.0, + "92": 801299456.0, + "93": 803593216.0, + "94": 801299456.0, + "95": 803593216.0, + "96": 801299456.0, + "97": 803593216.0, + "98": 801299456.0, + "99": 802987008.0, + "100": 803593216.0 + } + }, + "mem-max-allocated-bytes": { + "start_step": 1, + "end_step": 100, + "step_interval": 1, + "values": { + "1": 993582592.0, + "2": 1210942464.0, + "3": 1210942464.0, + "4": 1210942464.0, + "5": 1210942464.0, + "6": 1210942464.0, + "7": 1210942464.0, + "8": 1210942464.0, + "9": 1210942464.0, + "10": 1210942464.0, + "11": 1210942464.0, + "12": 1210942464.0, + "13": 1210942464.0, + "14": 1210942464.0, + "15": 1210942464.0, + "16": 1210942464.0, + "17": 1210942464.0, + "18": 1210942464.0, + "19": 1210942464.0, + "20": 1210942464.0, + "21": 1210942464.0, + "22": 1210942464.0, + "23": 1210942464.0, + "24": 1210942464.0, + "25": 1210942464.0, + "26": 1210942464.0, + "27": 1210942464.0, + "28": 1210942464.0, + "29": 1210942464.0, + "30": 1210942464.0, + "31": 1210942464.0, + "32": 1210942464.0, + "33": 1210942464.0, + "34": 1210942464.0, + "35": 1210942464.0, + "36": 1210942464.0, + "37": 1210942464.0, + "38": 1210942464.0, + "39": 1210942464.0, + "40": 1210942464.0, + "41": 1210942464.0, + "42": 1210942464.0, + "43": 1210942464.0, + "44": 1210942464.0, + "45": 1210942464.0, + "46": 1210942464.0, + "47": 1210942464.0, + "48": 1210942464.0, + "49": 1210942464.0, + "50": 1210942464.0, + "51": 1210942464.0, + "52": 1210942464.0, + "53": 1210942464.0, + "54": 1210942464.0, + "55": 1210942464.0, + "56": 1210942464.0, + "57": 1210942464.0, + "58": 1210942464.0, + "59": 1210942464.0, + "60": 1210942464.0, + "61": 1210942464.0, + "62": 1210942464.0, + "63": 1210942464.0, + "64": 1210942464.0, + "65": 1210942464.0, + "66": 1210942464.0, + "67": 1210942464.0, + "68": 1210942464.0, + "69": 1210942464.0, + "70": 1210942464.0, + "71": 1210942464.0, + "72": 1210942464.0, + "73": 1210942464.0, + "74": 1210942464.0, + "75": 1210942464.0, + "76": 1210942464.0, + "77": 1210942464.0, + "78": 1210942464.0, + "79": 1210942464.0, + "80": 1210942464.0, + "81": 1210942464.0, + "82": 1210942464.0, + "83": 1210942464.0, + "84": 1210942464.0, + "85": 1210942464.0, + "86": 1210942464.0, + "87": 1210942464.0, + "88": 1210942464.0, + "89": 1210942464.0, + "90": 1210942464.0, + "91": 1210942464.0, + "92": 1210942464.0, + "93": 1210942464.0, + "94": 1210942464.0, + "95": 1210942464.0, + "96": 1210942464.0, + "97": 1210942464.0, + "98": 1210942464.0, + "99": 1210942464.0, + "100": 1210942464.0 + } + }, + "mtp_1 loss": { + "start_step": 1, + "end_step": 100, + "step_interval": 1, + "values": { + "1": 10.88689, + "2": 10.90485, + "3": 10.90869, + "4": 10.86903, + "5": 10.91601, + "6": 10.906, + "7": 10.90268, + "8": 10.88984, + "9": 10.90425, + "10": 10.89144, + "11": 10.93384, + "12": 10.91647, + "13": 10.91108, + "14": 10.91974, + "15": 10.88488, + "16": 10.9077, + "17": 10.87571, + "18": 10.91379, + "19": 10.9092, + "20": 10.87837, + "21": 10.87896, + "22": 10.85583, + "23": 10.88007, + "24": 10.87245, + "25": 10.85859, + "26": 10.8696, + "27": 10.87702, + "28": 10.88641, + "29": 10.88866, + "30": 10.85422, + "31": 10.79713, + "32": 10.86631, + "33": 10.8781, + "34": 10.83982, + "35": 10.84165, + "36": 10.85012, + "37": 10.85556, + "38": 10.83674, + "39": 10.86355, + "40": 10.82887, + "41": 10.8341, + "42": 10.84469, + "43": 10.78828, + "44": 10.82123, + "45": 10.78831, + "46": 10.7823, + "47": 10.82898, + "48": 10.78985, + "49": 10.71269, + "50": 10.77382, + "51": 10.76639, + "52": 10.7397, + "53": 10.80285, + "54": 10.77365, + "55": 10.76066, + "56": 10.71068, + "57": 10.66686, + "58": 10.74378, + "59": 10.69209, + "60": 10.66474, + "61": 10.7073, + "62": 10.77206, + "63": 10.61812, + "64": 10.7178, + "65": 10.49439, + "66": 10.67106, + "67": 10.57534, + "68": 10.6873, + "69": 10.6816, + "70": 10.66836, + "71": 10.64586, + "72": 10.60925, + "73": 10.56508, + "74": 10.37144, + "75": 10.51183, + "76": 10.39914, + "77": 10.75182, + "78": 10.6268, + "79": 10.46827, + "80": 10.47524, + "81": 10.51083, + "82": 10.58769, + "83": 10.4381, + "84": 10.45057, + "85": 10.55084, + "86": 10.28076, + "87": 10.51088, + "88": 10.60323, + "89": 10.50794, + "90": 10.60274, + "91": 10.38238, + "92": 10.38703, + "93": 10.23076, + "94": 10.08438, + "95": 10.42616, + "96": 10.44905, + "97": 10.32215, + "98": 10.4966, + "99": 10.04765, + "100": 10.33491 + } + }, + "iteration-time": { + "start_step": 1, + "end_step": 100, + "step_interval": 1, + "values": { + "1": 51.30209, + "2": 1.41746, + "3": 1.28029, + "4": 10.57024, + "5": 0.66643, + "6": 0.67893, + "7": 0.65727, + "8": 0.66196, + "9": 0.66227, + "10": 0.65877, + "11": 0.65828, + "12": 0.65862, + "13": 0.65727, + "14": 0.65896, + "15": 0.65851, + "16": 0.66826, + "17": 0.65878, + "18": 0.65573, + "19": 0.65631, + "20": 0.65579, + "21": 0.65091, + "22": 0.65603, + "23": 0.65158, + "24": 0.65266, + "25": 0.65816, + "26": 0.65194, + "27": 0.6541, + "28": 0.65515, + "29": 0.65439, + "30": 0.65241, + "31": 0.65597, + "32": 0.65551, + "33": 0.65318, + "34": 0.6553, + "35": 0.65725, + "36": 0.65926, + "37": 0.65606, + "38": 0.65571, + "39": 0.65846, + "40": 0.65642, + "41": 0.65509, + "42": 0.66105, + "43": 0.65448, + "44": 0.65534, + "45": 0.65304, + "46": 0.65227, + "47": 0.64871, + "48": 0.65257, + "49": 0.65485, + "50": 0.65054, + "51": 0.67883, + "52": 0.6571, + "53": 0.65671, + "54": 0.65877, + "55": 0.65584, + "56": 0.65072, + "57": 0.64951, + "58": 0.65703, + "59": 0.65106, + "60": 0.64536, + "61": 0.64416, + "62": 0.64816, + "63": 0.64084, + "64": 0.6396, + "65": 0.64182, + "66": 0.64004, + "67": 0.64101, + "68": 0.63928, + "69": 0.65723, + "70": 0.6828, + "71": 0.64052, + "72": 0.64287, + "73": 0.64136, + "74": 0.64252, + "75": 0.64617, + "76": 0.64857, + "77": 0.64304, + "78": 0.64068, + "79": 0.64048, + "80": 0.64091, + "81": 0.64179, + "82": 0.64793, + "83": 0.641, + "84": 0.64077, + "85": 0.64011, + "86": 0.64018, + "87": 0.64132, + "88": 0.63901, + "89": 0.6407, + "90": 0.64277, + "91": 0.64132, + "92": 0.64123, + "93": 0.65051, + "94": 0.65036, + "95": 0.64542, + "96": 0.64561, + "97": 0.6504, + "98": 0.64563, + "99": 0.64524, + "100": 0.65049 + } + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/moe/gpt3_moe_mcore_te_tp4_ep2_etp2_pp2_scoped_cudagraph/golden_values_dev_dgxh100_eos.json b/tests/functional_tests/test_cases/moe/gpt3_moe_mcore_te_tp4_ep2_etp2_pp2_scoped_cudagraph/golden_values_dev_dgxh100_eos.json new file mode 100644 index 00000000000..e8c2bae571f --- /dev/null +++ b/tests/functional_tests/test_cases/moe/gpt3_moe_mcore_te_tp4_ep2_etp2_pp2_scoped_cudagraph/golden_values_dev_dgxh100_eos.json @@ -0,0 +1,644 @@ +{ + "lm loss": { + "start_step": 1, + "end_step": 100, + "step_interval": 1, + "values": { + "1": 10.93663, + "2": 10.9327, + "3": 10.94263, + "4": 10.94969, + "5": 10.95052, + "6": 10.94157, + "7": 10.94484, + "8": 10.93674, + "9": 10.94996, + "10": 10.93686, + "11": 10.94102, + "12": 10.93763, + "13": 10.9235, + "14": 10.93428, + "15": 10.88791, + "16": 10.87434, + "17": 10.86896, + "18": 10.86065, + "19": 10.86311, + "20": 10.78063, + "21": 10.73125, + "22": 10.60283, + "23": 10.73278, + "24": 10.61888, + "25": 10.55212, + "26": 10.62704, + "27": 10.6391, + "28": 10.5908, + "29": 10.59809, + "30": 10.37777, + "31": 10.1201, + "32": 10.46078, + "33": 10.45538, + "34": 10.20107, + "35": 10.25779, + "36": 10.20889, + "37": 10.33688, + "38": 10.16827, + "39": 10.40875, + "40": 10.05239, + "41": 10.09432, + "42": 10.17894, + "43": 9.74205, + "44": 9.8904, + "45": 9.74009, + "46": 9.72707, + "47": 10.09139, + "48": 9.75298, + "49": 9.40106, + "50": 9.83667, + "51": 9.77071, + "52": 9.65705, + "53": 10.03051, + "54": 9.87899, + "55": 9.79604, + "56": 9.52924, + "57": 9.36583, + "58": 9.75331, + "59": 9.48065, + "60": 9.40785, + "61": 9.60145, + "62": 9.90753, + "63": 9.2583, + "64": 9.68397, + "65": 8.80003, + "66": 9.60779, + "67": 9.25408, + "68": 9.71438, + "69": 9.71682, + "70": 9.6617, + "71": 9.52466, + "72": 9.47116, + "73": 9.38822, + "74": 8.80223, + "75": 9.33966, + "76": 8.93574, + "77": 9.99333, + "78": 9.64731, + "79": 9.28114, + "80": 9.29588, + "81": 9.39589, + "82": 9.60893, + "83": 9.21629, + "84": 9.33891, + "85": 9.52979, + "86": 8.95817, + "87": 9.51641, + "88": 9.68228, + "89": 9.50664, + "90": 9.75348, + "91": 9.23465, + "92": 9.25972, + "93": 8.94517, + "94": 8.69188, + "95": 9.44591, + "96": 9.4101, + "97": 9.20087, + "98": 9.58175, + "99": 8.75818, + "100": 9.29466 + } + }, + "num-zeros": { + "start_step": 1, + "end_step": 100, + "step_interval": 1, + "values": { + "1": 22750260.0, + "2": 22953110.0, + "3": 22604450.0, + "4": 23266322.0, + "5": 22735560.0, + "6": 23061920.0, + "7": 22793342.0, + "8": 22960820.0, + "9": 22865664.0, + "10": 22950364.0, + "11": 22499674.0, + "12": 22456088.0, + "13": 22948060.0, + "14": 22384512.0, + "15": 22846272.0, + "16": 22856858.0, + "17": 22836412.0, + "18": 22590058.0, + "19": 22627048.0, + "20": 22712308.0, + "21": 22762624.0, + "22": 22816888.0, + "23": 22545124.0, + "24": 22794440.0, + "25": 22841936.0, + "26": 22549680.0, + "27": 22464820.0, + "28": 22453684.0, + "29": 22534640.0, + "30": 22636152.0, + "31": 22989488.0, + "32": 22594070.0, + "33": 22566010.0, + "34": 22855504.0, + "35": 22813688.0, + "36": 22595396.0, + "37": 22499360.0, + "38": 22926126.0, + "39": 22825392.0, + "40": 22675666.0, + "41": 22671586.0, + "42": 22682140.0, + "43": 23013940.0, + "44": 22764458.0, + "45": 22678992.0, + "46": 22915276.0, + "47": 22642868.0, + "48": 22954190.0, + "49": 23786668.0, + "50": 22934008.0, + "51": 23866222.0, + "52": 23807290.0, + "53": 24007532.0, + "54": 22871610.0, + "55": 23571284.0, + "56": 23954310.0, + "57": 24211632.0, + "58": 23914404.0, + "59": 23771838.0, + "60": 23813560.0, + "61": 23797288.0, + "62": 23739984.0, + "63": 23916692.0, + "64": 23895952.0, + "65": 24150562.0, + "66": 23796504.0, + "67": 25032232.0, + "68": 23673188.0, + "69": 23648580.0, + "70": 23903504.0, + "71": 24864636.0, + "72": 24767108.0, + "73": 24850612.0, + "74": 24132990.0, + "75": 24146528.0, + "76": 25025540.0, + "77": 24358472.0, + "78": 24910064.0, + "79": 23810516.0, + "80": 24821440.0, + "81": 25020512.0, + "82": 23851244.0, + "83": 24961024.0, + "84": 25144020.0, + "85": 24823608.0, + "86": 23153096.0, + "87": 24850204.0, + "88": 24749150.0, + "89": 22505554.0, + "90": 24059620.0, + "91": 23839038.0, + "92": 23874568.0, + "93": 24769548.0, + "94": 23992452.0, + "95": 25189838.0, + "96": 23909262.0, + "97": 24713068.0, + "98": 23832506.0, + "99": 23983474.0, + "100": 24101108.0 + } + }, + "mem-allocated-bytes": { + "start_step": 1, + "end_step": 100, + "step_interval": 1, + "values": { + "1": 769688064.0, + "2": 775359488.0, + "3": 769690624.0, + "4": 801299456.0, + "5": 803593216.0, + "6": 801299456.0, + "7": 803593216.0, + "8": 803593216.0, + "9": 801299456.0, + "10": 803593216.0, + "11": 801299456.0, + "12": 803593216.0, + "13": 801299456.0, + "14": 803593216.0, + "15": 803593216.0, + "16": 801299456.0, + "17": 803593216.0, + "18": 801299456.0, + "19": 803593216.0, + "20": 801299456.0, + "21": 803593216.0, + "22": 803593216.0, + "23": 801840128.0, + "24": 803593216.0, + "25": 802987008.0, + "26": 801299456.0, + "27": 802987008.0, + "28": 801299456.0, + "29": 801299456.0, + "30": 803593216.0, + "31": 801299456.0, + "32": 803593216.0, + "33": 801299456.0, + "34": 803593216.0, + "35": 801299456.0, + "36": 801299456.0, + "37": 803593216.0, + "38": 801299456.0, + "39": 803593216.0, + "40": 801299456.0, + "41": 803593216.0, + "42": 801299456.0, + "43": 801299456.0, + "44": 803593216.0, + "45": 802987008.0, + "46": 801299456.0, + "47": 803593216.0, + "48": 801299456.0, + "49": 803593216.0, + "50": 801299456.0, + "51": 801299456.0, + "52": 803593216.0, + "53": 802446336.0, + "54": 801299456.0, + "55": 803593216.0, + "56": 802987008.0, + "57": 801299456.0, + "58": 801840128.0, + "59": 801299456.0, + "60": 803593216.0, + "61": 801840128.0, + "62": 801299456.0, + "63": 803593216.0, + "64": 802446336.0, + "65": 803593216.0, + "66": 801840128.0, + "67": 801299456.0, + "68": 803593216.0, + "69": 801840128.0, + "70": 801299456.0, + "71": 803593216.0, + "72": 803593216.0, + "73": 802987008.0, + "74": 801299456.0, + "75": 803593216.0, + "76": 803593216.0, + "77": 801299456.0, + "78": 801299456.0, + "79": 803593216.0, + "80": 801840128.0, + "81": 801299456.0, + "82": 803593216.0, + "83": 801299456.0, + "84": 801299456.0, + "85": 803593216.0, + "86": 801299456.0, + "87": 801299456.0, + "88": 803593216.0, + "89": 801840128.0, + "90": 803593216.0, + "91": 802987008.0, + "92": 801299456.0, + "93": 803593216.0, + "94": 801299456.0, + "95": 801299456.0, + "96": 803593216.0, + "97": 801840128.0, + "98": 803593216.0, + "99": 802987008.0, + "100": 801299456.0 + } + }, + "mem-max-allocated-bytes": { + "start_step": 1, + "end_step": 100, + "step_interval": 1, + "values": { + "1": 988765184.0, + "2": 1206831616.0, + "3": 1210116096.0, + "4": 1210116096.0, + "5": 1210116096.0, + "6": 1210116096.0, + "7": 1210116096.0, + "8": 1210116096.0, + "9": 1210116096.0, + "10": 1210116096.0, + "11": 1210116096.0, + "12": 1210116096.0, + "13": 1210116096.0, + "14": 1210116096.0, + "15": 1210116096.0, + "16": 1210116096.0, + "17": 1210116096.0, + "18": 1210116096.0, + "19": 1210116096.0, + "20": 1210116096.0, + "21": 1210116096.0, + "22": 1210116096.0, + "23": 1210116096.0, + "24": 1210116096.0, + "25": 1210116096.0, + "26": 1210116096.0, + "27": 1210116096.0, + "28": 1210116096.0, + "29": 1210116096.0, + "30": 1210116096.0, + "31": 1210116096.0, + "32": 1210116096.0, + "33": 1210116096.0, + "34": 1210116096.0, + "35": 1210116096.0, + "36": 1210116096.0, + "37": 1210116096.0, + "38": 1210116096.0, + "39": 1210116096.0, + "40": 1210116096.0, + "41": 1210116096.0, + "42": 1210116096.0, + "43": 1210116096.0, + "44": 1210116096.0, + "45": 1210116096.0, + "46": 1210116096.0, + "47": 1210116096.0, + "48": 1210116096.0, + "49": 1210116096.0, + "50": 1210116096.0, + "51": 1210116096.0, + "52": 1210116096.0, + "53": 1210116096.0, + "54": 1210116096.0, + "55": 1210116096.0, + "56": 1210116096.0, + "57": 1210116096.0, + "58": 1210116096.0, + "59": 1210116096.0, + "60": 1210116096.0, + "61": 1210116096.0, + "62": 1210116096.0, + "63": 1210116096.0, + "64": 1210116096.0, + "65": 1210116096.0, + "66": 1210116096.0, + "67": 1210116096.0, + "68": 1210116096.0, + "69": 1210116096.0, + "70": 1210116096.0, + "71": 1210116096.0, + "72": 1210116096.0, + "73": 1210116096.0, + "74": 1210116096.0, + "75": 1210116096.0, + "76": 1210116096.0, + "77": 1210116096.0, + "78": 1210116096.0, + "79": 1210116096.0, + "80": 1210116096.0, + "81": 1210116096.0, + "82": 1210116096.0, + "83": 1210116096.0, + "84": 1210116096.0, + "85": 1210116096.0, + "86": 1210116096.0, + "87": 1210116096.0, + "88": 1210116096.0, + "89": 1210116096.0, + "90": 1210116096.0, + "91": 1210116096.0, + "92": 1210116096.0, + "93": 1210116096.0, + "94": 1210116096.0, + "95": 1210116096.0, + "96": 1210116096.0, + "97": 1210116096.0, + "98": 1210116096.0, + "99": 1210116096.0, + "100": 1210116096.0 + } + }, + "mtp_1 loss": { + "start_step": 1, + "end_step": 100, + "step_interval": 1, + "values": { + "1": 10.88689, + "2": 10.90485, + "3": 10.90869, + "4": 10.86903, + "5": 10.91601, + "6": 10.906, + "7": 10.90268, + "8": 10.88984, + "9": 10.90425, + "10": 10.89144, + "11": 10.93384, + "12": 10.91647, + "13": 10.91108, + "14": 10.91974, + "15": 10.88488, + "16": 10.9077, + "17": 10.87571, + "18": 10.91379, + "19": 10.9092, + "20": 10.87837, + "21": 10.87896, + "22": 10.85583, + "23": 10.88007, + "24": 10.87245, + "25": 10.85859, + "26": 10.8696, + "27": 10.87702, + "28": 10.88641, + "29": 10.88866, + "30": 10.85422, + "31": 10.79713, + "32": 10.86631, + "33": 10.8781, + "34": 10.83982, + "35": 10.84165, + "36": 10.85012, + "37": 10.85556, + "38": 10.83674, + "39": 10.86355, + "40": 10.82887, + "41": 10.8341, + "42": 10.84469, + "43": 10.78828, + "44": 10.82123, + "45": 10.78831, + "46": 10.7823, + "47": 10.82898, + "48": 10.78985, + "49": 10.71269, + "50": 10.77382, + "51": 10.76639, + "52": 10.7397, + "53": 10.80285, + "54": 10.77365, + "55": 10.76066, + "56": 10.71068, + "57": 10.66686, + "58": 10.74378, + "59": 10.69209, + "60": 10.66474, + "61": 10.7073, + "62": 10.77206, + "63": 10.61812, + "64": 10.7178, + "65": 10.49439, + "66": 10.67106, + "67": 10.57534, + "68": 10.6873, + "69": 10.6816, + "70": 10.66836, + "71": 10.64586, + "72": 10.60925, + "73": 10.56508, + "74": 10.37144, + "75": 10.51183, + "76": 10.39914, + "77": 10.75182, + "78": 10.6268, + "79": 10.46827, + "80": 10.47524, + "81": 10.51083, + "82": 10.58769, + "83": 10.4381, + "84": 10.45057, + "85": 10.55084, + "86": 10.28076, + "87": 10.51088, + "88": 10.60323, + "89": 10.50794, + "90": 10.60274, + "91": 10.38238, + "92": 10.38703, + "93": 10.23076, + "94": 10.08438, + "95": 10.42616, + "96": 10.44905, + "97": 10.32215, + "98": 10.4966, + "99": 10.04765, + "100": 10.33491 + } + }, + "iteration-time": { + "start_step": 1, + "end_step": 100, + "step_interval": 1, + "values": { + "1": 58.67467, + "2": 1.49483, + "3": 1.38721, + "4": 11.78499, + "5": 0.75759, + "6": 0.75678, + "7": 0.76144, + "8": 0.80382, + "9": 0.74706, + "10": 0.74893, + "11": 0.75091, + "12": 0.75087, + "13": 0.74803, + "14": 0.75316, + "15": 0.80396, + "16": 0.75267, + "17": 0.75378, + "18": 0.75457, + "19": 0.75484, + "20": 0.75428, + "21": 0.75639, + "22": 0.81363, + "23": 0.75607, + "24": 0.75553, + "25": 0.75564, + "26": 0.75334, + "27": 0.75722, + "28": 0.76027, + "29": 0.8113, + "30": 0.75278, + "31": 0.75471, + "32": 0.75104, + "33": 0.75271, + "34": 0.74877, + "35": 0.74765, + "36": 0.80549, + "37": 0.75089, + "38": 0.75395, + "39": 0.75254, + "40": 0.76025, + "41": 0.75356, + "42": 0.75573, + "43": 0.79632, + "44": 0.77927, + "45": 0.75515, + "46": 0.75759, + "47": 0.75978, + "48": 0.75749, + "49": 0.75504, + "50": 0.75616, + "51": 0.77974, + "52": 0.76581, + "53": 0.76997, + "54": 0.76705, + "55": 0.76737, + "56": 0.77352, + "57": 0.77833, + "58": 0.81195, + "59": 0.77251, + "60": 0.7711, + "61": 0.77181, + "62": 0.77006, + "63": 0.76957, + "64": 0.77251, + "65": 0.82259, + "66": 0.77112, + "67": 0.7683, + "68": 0.77335, + "69": 0.77022, + "70": 0.77335, + "71": 0.77822, + "72": 0.77769, + "73": 0.79476, + "74": 0.7728, + "75": 0.7711, + "76": 0.76863, + "77": 0.77228, + "78": 0.77031, + "79": 0.76995, + "80": 0.77286, + "81": 0.76616, + "82": 0.76752, + "83": 0.76583, + "84": 0.77264, + "85": 0.76732, + "86": 0.76873, + "87": 0.77239, + "88": 0.77971, + "89": 0.76112, + "90": 0.76225, + "91": 0.75814, + "92": 0.76144, + "93": 0.75796, + "94": 0.76412, + "95": 0.777, + "96": 0.77207, + "97": 0.7628, + "98": 0.76325, + "99": 0.76204, + "100": 0.7668 + } + } +} \ No newline at end of file diff --git a/tests/functional_tests/test_cases/moe2.0/golden_values/dsv3_tp1pp1ep8/golden_values_dev_dgx_h100.json b/tests/functional_tests/test_cases/moe2.0/golden_values/dsv3_tp1pp1ep8/golden_values_dev_dgx_h100.json new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/functional_tests/test_cases/moe2.0/golden_values/dsv3_tp2pp2ep4/golden_values_dev_dgx_h100.json b/tests/functional_tests/test_cases/moe2.0/golden_values/dsv3_tp2pp2ep4/golden_values_dev_dgx_h100.json new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/functional_tests/test_cases/moe2.0/model_configs/dsv3_proxy.yaml b/tests/functional_tests/test_cases/moe2.0/model_configs/dsv3_proxy.yaml new file mode 100644 index 00000000000..70924aed0cc --- /dev/null +++ b/tests/functional_tests/test_cases/moe2.0/model_configs/dsv3_proxy.yaml @@ -0,0 +1,85 @@ +MODEL_ARGS: + # Data args + --seq-length: 4096 + --data-cache-path: ${DATA_CACHE_PATH} + --data-path: ${DATA_PATH}/text/the_pile/shard00/my-gpt3_00_text_document + --vocab-file: ${DATA_PATH}/text/the_pile/shard00/bpe/vocab.json + --merge-file: ${DATA_PATH}/text/the_pile/shard00/bpe/merges.txt + --split: 949,50,1 + # Add transformer base args + --num-layers: 16 + --hidden-size: 1024 + --normalization: RMSNorm + --norm-epsilon: 1e-6 + --disable-bias-linear: true + --max-position-embeddings: 4096 + --make-vocab-size-divisible-by: 3232 + --untie-embeddings-and-output-weights: true + # Add attention related args + --multi-latent-attention: true + --num-attention-heads: 32 + --kv-channels: 128 + --qk-layernorm: true + --position-embedding-type: rope + --rotary-base: 10000 + --q-lora-rank: 1536 + --kv-lora-rank: 512 + --qk-head-dim: 128 + --qk-pos-emb-head-dim: 64 + --v-head-dim: 128 + --rotary-scaling-factor: 40 + --mscale: 1.0 + --mscale-all-dim: 1.0 + # Add MLP related args + --swiglu: true + --ffn-hidden-size: 4096 + # Add MoE args + --num-experts: 32 + --moe-layer-freq: ([0]*1+[1]*15) + --moe-ffn-hidden-size: 1024 + --moe-shared-expert-intermediate-size: 1024 + --moe-router-load-balancing-type: seq_aux_loss + --moe-router-topk: 4 + --moe-router-pre-softmax: true + --moe-grouped-gemm: true + --moe-aux-loss-coeff: 1e-4 + --moe-router-group-topk: 2 + --moe-router-num-groups: 4 + --moe-router-topk-scaling-factor: 2.0 + --moe-router-score-function: sigmoid + --moe-router-enable-expert-bias: true + --moe-router-bias-update-rate: 1e-3 + --moe-router-dtype: fp32 + # Comment out the following MTP args to disable MTP + --mtp-num-layers: 1 + --mtp-loss-scaling-factor: 0.1 + # Add regularization args + --attention-dropout: 0.0 + --hidden-dropout: 0.0 + --clip-grad: 1.0 + --weight-decay: 0.1 + # Add learning rate args + --lr-warmup-fraction: .01 + --lr: 0.00015 + --min-lr: 1.0e-5 + --lr-decay-style: cosine + --adam-beta1: 0.9 + --adam-beta2: 0.95 + # Add validation args + --eval-iters: 32 + --eval-interval: 200 + # Add initialization args + --init-method-std: 0.02 + # Training args + --global-batch-size: 32 + --train-iters: 50 + --exit-duration-in-mins: 230 + --no-check-for-nan-in-loss-and-grad: true + +METRICS: + - "lm loss" + - "num-zeros" + - "mem-allocated-bytes" + - "mem-max-allocated-bytes" + - "mtp_1 loss" + - "seq_load_balancing_loss" diff --git a/tests/functional_tests/test_cases/moe2.0/model_configs/qwen3_proxy.yaml b/tests/functional_tests/test_cases/moe2.0/model_configs/qwen3_proxy.yaml new file mode 100644 index 00000000000..46e298ec971 --- /dev/null +++ b/tests/functional_tests/test_cases/moe2.0/model_configs/qwen3_proxy.yaml @@ -0,0 +1,74 @@ +MODEL_ARGS: + # Data args + --seq-length: 4096 + --data-cache-path: ${DATA_CACHE_PATH} + --data-path: ${DATA_PATH}/text/the_pile/shard00/my-gpt3_00_text_document + --vocab-file: ${DATA_PATH}/text/the_pile/shard00/bpe/vocab.json + --merge-file: ${DATA_PATH}/text/the_pile/shard00/bpe/merges.txt + --split: 949,50,1 + # Add transformer base args + --num-layers: 16 + --hidden-size: 1024 + --normalization: RMSNorm + --norm-epsilon: 1e-6 + --disable-bias-linear: true + --max-position-embeddings: 4096 + --make-vocab-size-divisible-by: 3232 + --untie-embeddings-and-output-weights: true + # Add attention related args + --group-query-attention: true + --num-query-groups: 4 + --kv-channels: 128 + --qk-layernorm: true + --position-embedding-type: rope + --rotary-percent: 1.0 + --rotary-base: 1000000 + # Add MLP related args + --swiglu: true + --ffn-hidden-size: 4096 + # Add MoE args + --num-experts: 32 + --moe-layer-freq: ([0]*1+[1]*15) + --moe-ffn-hidden-size: 1024 + --moe-shared-expert-intermediate-size: 1024 + --moe-router-load-balancing-type: aux_loss + --moe-router-topk: 4 + --moe-router-pre-softmax: true + --moe-grouped-gemm: true + --moe-aux-loss-coeff: 1e-4 + --moe-router-group-topk: 2 + --moe-router-num-groups: 4 + --moe-router-topk-scaling-factor: 2.0 + --moe-router-score-function: sigmoid + --moe-router-enable-expert-bias: true + --moe-router-bias-update-rate: 1e-3 + --moe-router-dtype: fp32 + # Add regularization args + --attention-dropout: 0.0 + --hidden-dropout: 0.0 + --clip-grad: 1.0 + --weight-decay: 0.1 + # Add learning rate args + --lr-warmup-fraction: .01 + --lr: 0.00015 + --min-lr: 1.0e-5 + --lr-decay-style: cosine + --adam-beta1: 0.9 + --adam-beta2: 0.95 + # Add validation args + --eval-iters: 32 + --eval-interval: 200 + # Add initialization args + --init-method-std: 0.02 + # Training args + --global-batch-size: 32 + --train-iters: 50 + --exit-duration-in-mins: 230 + --no-check-for-nan-in-loss-and-grad: true + +METRICS: + - "lm loss" + - "num-zeros" + - "mem-allocated-bytes" + - "mem-max-allocated-bytes" + - "load_balancing_loss" diff --git a/tests/functional_tests/test_cases/moe2.0/runtime_configs/tp1pp1ep8.yaml b/tests/functional_tests/test_cases/moe2.0/runtime_configs/tp1pp1ep8.yaml new file mode 100644 index 00000000000..305e2847305 --- /dev/null +++ b/tests/functional_tests/test_cases/moe2.0/runtime_configs/tp1pp1ep8.yaml @@ -0,0 +1,41 @@ +ENV_VARS: + CUDA_DEVICE_MAX_CONNECTIONS: 1 + NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 + PYTORCH_CUDA_ALLOC_CONF: expandable_segments:True + NCCL_NVLS_ENABLE: 0 + PYTHONWARNINGS: ignore + NCCL_DEBUG: VERSION + +MODEL_ARGS: + # Transformer Engine args + --transformer-impl: transformer_engine + # Distributed args + --distributed-timeout-minutes: 60 + --tensor-model-parallel-size: 1 + --pipeline-model-parallel-size: 1 + --expert-model-parallel-size: 8 + --context-parallel-size: 1 + --expert-tensor-parallel-size: 1 + --use-distributed-optimizer: true + --overlap-grad-reduce: true + --overlap-param-gather: true + # Use unfused attention since MLA with fused attention and deterministic mode leads to NaN + --attention-backend: unfused # TODO: switch back to fused attention after fix + --use-mcore-models: true + --sequence-parallel: true + --micro-batch-size: 4 + # MoE training related args + --moe-token-dispatcher-type: alltoall + --moe-permute-fusion: true + --save-interval: 25 + # Add mixed precision args + --bf16: true + --exit-interval: 50 + # kernel fusion related args + --no-rope-fusion: true + --cross-entropy-loss-fusion: true + --cross-entropy-fusion-impl: native + # MISC + --manual-gc: true + --manual-gc-interval: 100 +TEST_TYPE: resume-ckpt diff --git a/tests/functional_tests/test_cases/moe2.0/runtime_configs/tp2pp2ep4.yaml b/tests/functional_tests/test_cases/moe2.0/runtime_configs/tp2pp2ep4.yaml new file mode 100644 index 00000000000..b93862aff8c --- /dev/null +++ b/tests/functional_tests/test_cases/moe2.0/runtime_configs/tp2pp2ep4.yaml @@ -0,0 +1,55 @@ +ENV_VARS: + CUDA_DEVICE_MAX_CONNECTIONS: 1 + NVTE_ALLOW_NONDETERMINISTIC_ALGO: 0 + PYTORCH_CUDA_ALLOC_CONF: expandable_segments:True + NCCL_NVLS_ENABLE: 0 + PYTHONWARNINGS: ignore + NCCL_DEBUG: VERSION + +MODEL_ARGS: + # Transformer Engine args + --transformer-impl: transformer_engine + # Distributed args + --distributed-timeout-minutes: 60 + --tensor-model-parallel-size: 2 + --pipeline-model-parallel-size: 2 + --num-virtual-stages-per-pipeline-rank: 4 + --expert-model-parallel-size: 4 + --context-parallel-size: 1 + --expert-tensor-parallel-size: 1 + --use-distributed-optimizer: true + --overlap-grad-reduce: true + --overlap-param-gather: true + # Use unfused attention since MLA with fused attention and deterministic mode leads to NaN + --attention-backend: unfused # TODO: switch back to fused attention after fix + --use-mcore-models: true + --sequence-parallel: true + --micro-batch-size: 4 + # MoE training related args + --moe-token-dispatcher-type: alltoall + --moe-permute-fusion: true + # Add checkpointing args + --save: ${CHECKPOINT_SAVE_PATH} + --load: ${CHECKPOINT_LOAD_PATH} + --save-interval: 25 + # Add logging args + --log-timers-to-tensorboard: true + --log-memory-to-tensorboard: true + --log-num-zeros-in-grad: true + --log-params-norm: true + --log-validation-ppl-to-tensorboard: true + --log-throughput: true + --log-interval: 1 + --logging-level: 40 + --tensorboard-dir: ${TENSORBOARD_PATH} + # Add mixed precision args + --bf16: true + --exit-interval: 50 + # kernel fusion related args + --no-rope-fusion: true + --cross-entropy-loss-fusion: true + --cross-entropy-fusion-impl: native + # MISC + --manual-gc: true + --manual-gc-interval: 100 +TEST_TYPE: resume-ckpt \ No newline at end of file diff --git a/tests/functional_tests/test_cases/t5/t5_release/model_config.yaml b/tests/functional_tests/test_cases/t5/t5_release/model_config.yaml index 852fbf9819d..6cfe215b80f 100644 --- a/tests/functional_tests/test_cases/t5/t5_release/model_config.yaml +++ b/tests/functional_tests/test_cases/t5/t5_release/model_config.yaml @@ -37,7 +37,7 @@ MODEL_ARGS: --pipeline-model-parallel-size: 1 # Data args --data-path: ${DATA_BLEND} - --vocab-file: ${DATA_PATH}/text/the_pile/t5_shard00/bert-large-cased-vocab.txt + --vocab-file: ${DATA_PATH}/bert-large-cased-vocab.txt --tokenizer-type: BertWordPieceCase --split: 99982,9,9 --data-cache-path: ${DATA_CACHE_PATH} @@ -58,6 +58,7 @@ MODEL_ARGS: --log-validation-ppl-to-tensorboard: true --timing-log-level: 0 --wandb-project: megatron-core-release-runs + --wandb-entity: adlr --wandb-exp-name: ${WANDB_EXPERIMENT} --wandb-save-dir: ${WANDB_SAVE_PATH} METRICS: diff --git a/tests/test_utils/python_scripts/download_golden_values.py b/tests/test_utils/python_scripts/download_golden_values.py index 423cd0ec254..158df867a64 100644 --- a/tests/test_utils/python_scripts/download_golden_values.py +++ b/tests/test_utils/python_scripts/download_golden_values.py @@ -113,7 +113,9 @@ def main(pipeline_id: int, only_failing: bool): shutil.move(golden_values_source, golden_values_target) else: - logger.info("Golden values for %s does not exist. Skip.", str(golden_values_source)) + logger.info( + "Golden values for %s does not exist. Skip.", str(golden_values_source) + ) shutil.rmtree("tmp") diff --git a/tests/test_utils/python_scripts/merge_config.py b/tests/test_utils/python_scripts/merge_config.py new file mode 100644 index 00000000000..176706038b7 --- /dev/null +++ b/tests/test_utils/python_scripts/merge_config.py @@ -0,0 +1,92 @@ +""" +Merges base_config, runtime_config and model_config into one final config that the CI can launch. + +Starting Dec 19th 2025 MCore CI supports a new format of defining tests. We are decoupling the test +config into a modular system of base_config, model_config and runtime_config. This allows us to +re-use and parametrize a given model easily with multiple runtime configs, like parallelism settings. + +With this DRY principle, we simplify test maintenance and reduce the amount of code duplication. + +This refactoring is fully compliant with the original CI system as we merge the three configs into one +final config that the CI can launch. + +Precendence: Base config > Model config > Runtime config. + +Usage: + +python merge_config.py \ + --model_config model_config.yaml \ + --base_config base_config.yaml \ + --runtime_config runtime_config.yaml \ + --output_config output_config.yaml +""" + +import logging + +import click +import yaml + +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + + +@click.command() +@click.option("--model_config", type=str, help="Model config to merge") +@click.option("--base_config", type=str, help="Base config to merge") +@click.option("--runtime_config", type=str, help="Run time config to merge") +@click.option("--output_config", type=str, help="Output config to merge") +def main(model_config, base_config, runtime_config, output_config): + + with open(model_config, "r") as f: + model_config = yaml.safe_load(f) + with open(base_config, "r") as f: + base_config = yaml.safe_load(f) + with open(runtime_config, "r") as f: + runtime_config = yaml.safe_load(f) + + config = {} + + # Collect all top-level keys (ENV_VARS, MODEL_ARGS, etc.) + all_keys = set(base_config.keys()) | set(model_config.keys()) | set(runtime_config.keys()) + + for key in all_keys: + base_val = base_config.get(key) + model_val = model_config.get(key) + runtime_val = runtime_config.get(key) + + # Get first non-None value to check type + first_val = base_val or model_val or runtime_val + + if isinstance(first_val, dict): + # Merge dicts + config[key] = {} + for val in [base_val, model_val, runtime_val]: + if val: + config[key].update(val) + elif isinstance(first_val, list): + # Concatenate lists (deduplicate while preserving order) + config[key] = [] + seen = set() + for val in [base_val, model_val, runtime_val]: + if val: + for item in val: + if item not in seen: + config[key].append(item) + seen.add(item) + else: + # Scalar value (string, int, bool, etc.) - use last defined + if runtime_val is not None: + config[key] = runtime_val + elif model_val is not None: + config[key] = model_val + else: + config[key] = base_val + + with open(output_config, "w") as f: + yaml.dump(config, f) + + logger.info(f"Config merged and saved to {output_config}") + + +if __name__ == "__main__": + main() diff --git a/tests/test_utils/python_scripts/recipe_parser.py b/tests/test_utils/python_scripts/recipe_parser.py index 480b2dca8ae..d21551c6c46 100644 --- a/tests/test_utils/python_scripts/recipe_parser.py +++ b/tests/test_utils/python_scripts/recipe_parser.py @@ -39,14 +39,34 @@ def resolve_cluster_config(cluster: str) -> str: def flatten_products(workload_manifest: dotdict) -> dotdict: """Flattens a nested dict of products""" - workload_manifest.products = [ - dict(**dict(zip(inp.keys(), values)), **{"test_case": product["test_case"][0]}) - for product in (workload_manifest.products or []) - if "products" in product - for inp in product["products"] - for values in itertools.product(*inp.values()) - ] - + expanded_products = [] + + for product in workload_manifest.products or []: + # Skip products that don't have nested product specifications + if "products" not in product: + continue + + test_case = product["test_case"][0] + + # Iterate over each input specification in the product + for inp in product["products"]: + # Generate all combinations of the input values (Cartesian product) + model_config = inp.pop("model_config", None) + runtime_config = inp.pop("runtime_config", None) + keys = inp.keys() + value_combinations = itertools.product(*inp.values()) + + # Create a flattened product dict for each combination + for values in value_combinations: + product_dict = dict(zip(keys, values)) + product_dict["test_case"] = test_case + if model_config: + product_dict["model_config"] = model_config + if runtime_config: + product_dict["runtime_config"] = runtime_config + expanded_products.append(product_dict) + + workload_manifest.products = expanded_products return workload_manifest @@ -89,11 +109,16 @@ def load_and_flatten(config_path: str) -> List[dotdict]: def filter_by_test_case(workload_manifests: List[dotdict], test_case: str) -> Optional[dotdict]: """Returns a workload with matching name. Raises an error if there no or more than a single workload.""" + print(len(workload_manifests)) workload_manifests = list( workload_manifest for workload_manifest in workload_manifests if workload_manifest["spec"]["test_case"] == test_case ) + print(len(workload_manifests)) + + for w in workload_manifests: + print(w["spec"]["test_case"]) if len(workload_manifests) > 1: logger.info("Duplicate test_case found!") diff --git a/tests/test_utils/recipes/gpt.yaml b/tests/test_utils/recipes/gpt.yaml index 90eddc55c27..a97a4d7bb38 100644 --- a/tests/test_utils/recipes/gpt.yaml +++ b/tests/test_utils/recipes/gpt.yaml @@ -110,14 +110,14 @@ products: - test_case: [gpt3_mcore_te_tp1_pp1_dist_optimizer_no_mmap_bin_files] products: - environment: [dev] - scope: [mr] + scope: [mr, mr-github] platforms: [dgx_h100] - environment: [lts] scope: [nightly] - test_case: [gpt3_mcore_te_tp1_pp1_dist_optimizer_fim_dataset] products: - environment: [dev] - scope: [mr] + scope: [mr, mr-github] platforms: [dgx_h100] - test_case: [gpt3_mcore_te_tp1_pp1_resume_torch_dist_dist_optimizer] products: @@ -129,7 +129,7 @@ products: - test_case: [gpt3_mcore_te_tp1_pp1_resume_torch_dist_uniform_full_recompute] products: - environment: [dev] - scope: [mr] + scope: [mr, mr-github] platforms: [dgx_h100] - environment: [lts] scope: [nightly] @@ -142,28 +142,28 @@ products: - test_case: [gpt3_mcore_te_tp1_pp2_resume_torch_dist_rope_embeddings] products: - environment: [dev] - scope: [mr] + scope: [mr, mr-github] platforms: [dgx_h100] # - environment: [lts] # scope: [nightly] # outdated TE: #501 - test_case: [gpt3_mcore_te_tp1_pp2_resume_torch_dist_rope_embeddings_interleaved_no_fusion] products: - environment: [dev] - scope: [mr] + scope: [mr, mr-github] platforms: [dgx_h100] - environment: [lts] scope: [nightly] - test_case: [gpt3_mcore_te_tp1_pp4_resume_torch_dist_disable_bias_linear] products: - environment: [dev] - scope: [mr] + scope: [mr, mr-github] platforms: [dgx_h100] # - environment: [lts] # scope: [nightly] # non-determinism: #436 - test_case: [gpt3_mcore_te_tp1_pp4_resume_torch_dist_persistent_disable_bias_linear] products: - environment: [dev] - scope: [mr] + scope: [mr, mr-github] platforms: [dgx_h100] - environment: [lts] scope: [nightly] @@ -177,28 +177,28 @@ products: - test_case: [gpt3_mcore_te_tp1_pp4_resume_torch_dist_untie_embeddings_and_outputs] products: - environment: [dev] - scope: [mr] + scope: [mr, mr-github] platforms: [dgx_h100] - environment: [lts] scope: [nightly] - test_case: [gpt3_mcore_te_tp1_pp4_vp1] products: - environment: [dev] - scope: [mr] + scope: [mr, mr-github] platforms: [dgx_h100] - environment: [lts] scope: [nightly] - test_case: [gpt3_mcore_te_tp1_pp4_vp1_resume_torch_decoupled_lr] products: - environment: [dev] - scope: [mr] + scope: [mr, mr-github] platforms: [dgx_h100] - environment: [lts] scope: [nightly] - test_case: [gpt3_mcore_te_tp1_pp4_vp1_resume_torch_dist_calculate_per_token_loss] products: - environment: [dev] - scope: [mr] + scope: [mr, mr-github] platforms: [dgx_h100] - environment: [lts] scope: [nightly] @@ -212,7 +212,7 @@ products: - test_case: [gpt3_mcore_te_tp1_pp4_vp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce] products: - environment: [dev] - scope: [mr] + scope: [mr, mr-github] platforms: [dgx_h100] - environment: [lts] scope: [nightly] @@ -247,83 +247,83 @@ products: - test_case: [gpt3_mcore_te_tp1_pp4_vp1_uneven_pipeline] products: - environment: [dev] - scope: [mr] + scope: [mr, mr-github] platforms: [dgx_h100] - environment: [lts] scope: [nightly] - test_case: [gpt3_mcore_te_tp1_pp4_vp2_account_for_embedding_loss_in_pipeline_split] products: - environment: [dev] - scope: [mr] + scope: [mr, mr-github] platforms: [dgx_h100] - environment: [lts] scope: [nightly] - test_case: [gpt3_mcore_te_tp2_pp1_resume_torch_dist_cp2_nondeterministic] products: - environment: [dev] - scope: [mr] + scope: [mr, mr-github] platforms: [dgx_h100] - environment: [lts] scope: [nightly] - test_case: [gpt3_mcore_te_tp2_pp1_resume_torch_dist_multi_dist_optimizer_instances] products: - environment: [dev] - scope: [mr] + scope: [mr, mr-github] platforms: [dgx_h100] - environment: [lts] scope: [nightly] - test_case: [gpt3_mcore_te_tp2_pp2_cp2] products: - environment: [dev] - scope: [mr] + scope: [mr, mr-github] platforms: [dgx_h100] - test_case: [gpt3_mcore_te_tp2_pp2_cp2_etp4_dp_last] products: - environment: [dev] - scope: [mr] + scope: [mr, mr-github] platforms: [dgx_h100] - test_case: [gpt3_mcore_te_tp2_pp2_cp2_calculate_per_token_loss] products: - environment: [dev] - scope: [mr] + scope: [mr, mr-github] platforms: [dgx_h100] - test_case: [gpt3_mcore_te_tp2_pp2_cp2_etp4_calculate_per_token_loss_dp_last] products: - environment: [dev] - scope: [mr] + scope: [mr, mr-github] platforms: [dgx_h100] - test_case: [gpt3_mcore_te_tp2_pp2_cp2_nondeterministic] products: - environment: [dev] - scope: [mr] + scope: [mr, mr-github] platforms: [dgx_h100] - environment: [lts] scope: [nightly] - test_case: [gpt3_mcore_te_tp2_pp2_cp2_etp4_nondeterministic_dp_last] products: - environment: [dev] - scope: [mr] + scope: [mr, mr-github] platforms: [dgx_h100] - environment: [lts] scope: [nightly] - test_case: [gpt3_mcore_te_tp2_pp2_cp2_calculate_per_token_loss_nondeterministic] products: - environment: [dev] - scope: [mr] + scope: [mr, mr-github] platforms: [dgx_h100] - environment: [lts] scope: [nightly] - test_case: [gpt3_mcore_te_tp2_pp2_cp2_etp4_calculate_per_token_loss_nondeterministic_dp_last] products: - environment: [dev] - scope: [mr] + scope: [mr, mr-github] platforms: [dgx_h100] - environment: [lts] scope: [nightly] - test_case: [gpt3_mcore_te_tp2_pp2_cross_entropy_loss_fusion] products: - environment: [dev] - scope: [mr] + scope: [mr, mr-github] platforms: [dgx_h100] - environment: [lts] scope: [nightly] @@ -331,14 +331,14 @@ products: - test_case: [gpt3_mcore_te_tp2_pp2_resume_torch_dist_cp2_nondeterministic] products: - environment: [dev] - scope: [mr] + scope: [mr, mr-github] platforms: [dgx_h100] - environment: [lts] scope: [nightly] - test_case: [gpt3_mcore_te_tp2_pp2_resume_torch_dist_cross_entropy_loss_fusion] products: - environment: [dev] - scope: [mr] + scope: [mr, mr-github] platforms: [dgx_h100] - environment: [lts] scope: [nightly] @@ -350,26 +350,26 @@ products: - test_case: [gpt3_mcore_te_tp2_pp2_mla] products: - environment: [dev] - scope: [mr] + scope: [mr, mr-github] platforms: [dgx_h100] - test_case: [gpt3_mcore_te_tp2_pp2_resume_torch_dist_ddp_average_in_collective] products: - environment: [dev] - scope: [mr] + scope: [mr, mr-github] platforms: [dgx_h100] - environment: [lts] scope: [nightly] - test_case: [gpt3_mcore_te_tp2_pp2_resume_torch_dist_defer_embedding_wgrad_compute] products: - environment: [dev] - scope: [mr] + scope: [mr, mr-github] platforms: [dgx_h100] - environment: [lts] scope: [nightly] - test_case: [gpt3_mcore_te_tp2_pp2_resume_torch_dist] products: - environment: [dev] - scope: [mr] + scope: [mr, mr-github] platforms: [dgx_h100] - environment: [lts] scope: [nightly] @@ -383,28 +383,28 @@ products: - test_case: [gpt3_mcore_te_tp2_pp2_resume_torch_dist_reshard_1x4xNone] products: - environment: [dev] - scope: [mr] + scope: [mr, mr-github] platforms: [dgx_h100] - environment: [lts] scope: [nightly] - test_case: [gpt3_mcore_te_tp4_pp1_resume_torch_dist_dist_optimizer_overlap_grad_reduce] products: - environment: [dev] - scope: [mr] + scope: [mr, mr-github] platforms: [dgx_h100] - environment: [lts] scope: [nightly] - test_case: [gpt3_mcore_te_tp4_pp1_resume_torch_dist_qk_layernorm_test_mode] products: - environment: [dev] - scope: [mr] + scope: [mr, mr-github] platforms: [dgx_h100] - environment: [lts] scope: [nightly] - test_case: [gpt3_mcore_tp2_pp2_uninstall_te] products: - environment: [dev] - scope: [mr] + scope: [mr, mr-github] platforms: [dgx_h100] - environment: [lts] scope: [nightly] @@ -425,14 +425,14 @@ products: - test_case: [gpt3_mcore_te_tp2_zp_z3_resume_fsdp_dtensor] products: - environment: [dev] - scope: [mr] + scope: [mr, mr-github] platforms: [dgx_h100] # - environment: [lts] # scope: [nightly] - test_case: [gpt3_mcore_te_tp2_pp1_modelopt_distill_resume] products: - environment: [dev] - scope: [mr] + scope: [mr, mr-github] platforms: [dgx_h100] # - environment: [lts] # scope: [nightly] # Outdated: #502 @@ -469,7 +469,7 @@ products: - test_case: [gpt3_mcore_te_tp4_pp1_dist_optimizer_overlap_grad_reduce_param_gather] products: - environment: [dev] - scope: [mr] + scope: [mr, mr-github] platforms: [dgx_h100] - environment: [lts] scope: [nightly] @@ -483,7 +483,7 @@ products: - test_case: [gpt3_mcore_te_tp4_pp2_resume_torch_dist_reshard_8x1xNone] products: - environment: [dev] - scope: [mr] + scope: [mr, mr-github] platforms: [dgx_h100] - environment: [lts] scope: [nightly] diff --git a/tests/test_utils/recipes/mamba-static-inference.yaml b/tests/test_utils/recipes/mamba-static-inference.yaml index b36c4a8f765..ae9692b4edc 100644 --- a/tests/test_utils/recipes/mamba-static-inference.yaml +++ b/tests/test_utils/recipes/mamba-static-inference.yaml @@ -60,8 +60,8 @@ products: - environment: [dev] scope: [mr-broken, mr-github-broken] platforms: [dgx_h100] - - test_case: [hybrid_static_inference_tp1_pp1_2B_cudagraphs] - products: - - environment: [dev] - scope: [mr] - platforms: [dg x_h100] + # - test_case: [hybrid_static_inference_tp1_pp1_2B_cudagraphs] + # products: + # - environment: [dev] + # scope: [mr] + # platforms: [dgx_h100] # Broken after dev2main sync 01/27 diff --git a/tests/test_utils/recipes/mamba.yaml b/tests/test_utils/recipes/mamba.yaml index 47b731f7e00..456a6cbccf7 100644 --- a/tests/test_utils/recipes/mamba.yaml +++ b/tests/test_utils/recipes/mamba.yaml @@ -3,7 +3,7 @@ format_version: 1 maintainers: [mcore] loggers: [stdout] spec: - name: '{test_case}_{environment}_{platforms}' + name: "{test_case}_{environment}_{platforms}" model: hybrid build: mcore-pyt-{environment} nodes: 1 @@ -58,7 +58,7 @@ products: - test_case: [hybrid_mr_mcore_te_tp1_pp1_cp1_dgx_a100_1N8G] products: - environment: [dev] - scope: [mr] + scope: [mr, mr-github] platforms: [dgx_h100] # - environment: [lts] # disabled until triton is bumped # scope: [nightly] @@ -74,7 +74,7 @@ products: - test_case: [hybrid_mr_mcore_te_tp2_pp1_cp1_dgx_a100_1N8G] products: - environment: [dev] - scope: [mr] + scope: [mr, mr-github] platforms: [dgx_h100] # - environment: [lts] # disabled until triton is bumped # scope: [nightly] diff --git a/tests/test_utils/recipes/moe.yaml b/tests/test_utils/recipes/moe.yaml index 06039d77440..10c1140ddf5 100644 --- a/tests/test_utils/recipes/moe.yaml +++ b/tests/test_utils/recipes/moe.yaml @@ -119,7 +119,7 @@ products: - test_case: [gpt3_mcore_te_tp1_pp2_resume_torch_dist_reshard_2x1x4_te_8experts2parallel_dist_optimizer] products: - environment: [dev] - scope: [mr] + scope: [mr, mr-github] platforms: [dgx_h100] # - test_case: [gpt3_mcore_te_tp2_pp2_ep4_etp1_mtp_resume_torch_dist_fp8] # products: @@ -139,17 +139,17 @@ products: - test_case: [gpt3_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_dist_optimizer] products: - environment: [dev] - scope: [mr] + scope: [mr, mr-github] platforms: [dgx_h100] - test_case: [gpt3_mcore_te_tp2_pp1_resume_torch_dist_te_8experts2parallel_multi_dist_optimizer_instances] products: - environment: [dev] - scope: [mr] + scope: [mr, mr-github] platforms: [dgx_h100] - test_case: [gpt3_mcore_te_tp2_pp1_te_8experts2parallel_overlap_grad_reduce_param_gather_groupedGEMM] products: - environment: [dev] - scope: [mr] + scope: [mr, mr-github] platforms: [dgx_h100] - test_case: [gpt3_mcore_te_tp2_pp1_te_8experts_etp1_ep4] products: @@ -161,12 +161,12 @@ products: - test_case: [gpt3_mcore_te_tp2_pp1_te_a2a_ovlp_8experts_etp1_ep4] products: - environment: [dev] - scope: [mr] + scope: [mr, mr-github] platforms: [dgx_h100] - test_case: [gpt3_mcore_te_tp2_zp_z3_resume_torch_dist_te_8experts2parallel_top2router] products: - environment: [dev] - scope: [mr] + scope: [mr, mr-github] platforms: [dgx_h100] - test_case: [gpt3_moe_mcore_te_tp2_pp2_ep4_etp1_fine_grained_offloading] products: @@ -181,19 +181,19 @@ products: - test_case: [gpt3_mcore_te_tp2_pp1_te_8experts2parallel_ddp_average_in_collective] products: - environment: [dev] - scope: [mr] + scope: [mr, mr-github] platforms: [dgx_h100] - environment: [lts] scope: [nightly] - test_case: [gpt3_moe_mcore_te_ep8_resume_torch_dist_dist_optimizer] products: - environment: [dev] - scope: [mr] + scope: [mr, mr-github] platforms: [dgx_h100] - test_case: [gpt3_moe_mcore_te_tp4_ep2_etp2_pp2_scoped_cudagraph] products: - environment: [dev] - scope: [mr] + scope: [mr, mr-github] platforms: [dgx_h100] - test_case: [gpt3_moe_mcore_te_ep8_resume_torch_dist_dist_muon] products: diff --git a/tests/test_utils/recipes/bert.yaml b/tests/test_utils/recipes/moe2.0.yaml similarity index 59% rename from tests/test_utils/recipes/bert.yaml rename to tests/test_utils/recipes/moe2.0.yaml index 89499f93c5e..39fccd08c40 100644 --- a/tests/test_utils/recipes/bert.yaml +++ b/tests/test_utils/recipes/moe2.0.yaml @@ -3,14 +3,13 @@ format_version: 1 maintainers: [mcore] loggers: [stdout] spec: - name: '{test_case}_{environment}_{platforms}' - model: bert - nodes: 1 + name: "{test_case}_{environment}_{platforms}" + model: moe2.0 build: mcore-pyt-{environment} + nodes: 1 gpus: 8 + n_repeat: 5 platforms: dgx_a100 - time_limit: - n_repeat: script_setup: | unset https_proxy echo "machine gitlab-master.nvidia.com login okoenig password $RO_API_TOKEN" | tee -a /root/.netrc @@ -37,15 +36,24 @@ spec: script: |- ls cd /opt/megatron-lm + NAME=$(echo {test_case}_{environment} | sed 's/dgx_h100/dgx_a100/g') + + mkdir -p $(dirname ./tests/functional_tests/test_cases/{model}/{test_case}/model_config.yaml) + python ./tests/test_utils/python_scripts/merge_config.py \ + --base_config ./tests/functional_tests/test_cases/ci_base_config.yml \ + --model_config ./tests/functional_tests/test_cases/{model}/model_configs/{model_config}.yaml \ + --runtime_config ./tests/functional_tests/test_cases/{model}/runtime_configs/{runtime_config}.yaml \ + --output_config ./tests/functional_tests/test_cases/{model}/{test_case}/model_config.yaml + ARGUMENTS=( "DATA_PATH=/mnt/artifacts" - "DATA_CACHE_PATH=/workspace/data/cache" + "DATA_CACHE_PATH=/workspace/data/cache" "OUTPUT_PATH={assets_dir}" "TENSORBOARD_PATH={assets_dir}/tensorboard" "CHECKPOINT_SAVE_PATH={artifacts_dir}/checkpoints" "CHECKPOINT_LOAD_PATH=/mnt/artifacts" - "TRAINING_SCRIPT_PATH=pretrain_bert.py" + "TRAINING_SCRIPT_PATH=pretrain_gpt.py" "TRAINING_PARAMS_PATH=./tests/functional_tests/test_cases/{model}/{test_case}/model_config.yaml" "GOLDEN_VALUES_PATH=./tests/functional_tests/test_cases/{model}/{test_case}/golden_values_{environment}_{platforms}.json" "N_REPEAT={n_repeat}" @@ -56,38 +64,49 @@ spec: bash ./tests/functional_tests/shell_test_utils/run_ci_test.sh ${{ARGUMENTS[@]}} products: - - test_case: [bert_mcore_tp2_pp2] + ########################### + # Merge train tests # + ########################### + - test_case: [dsv3_tp1pp1ep8] products: - - environment: [dev] - scope: [nightly] + - model_config: dsv3_proxy + runtime_config: tp1pp1ep8 + environment: [dev] + scope: [nightly-broken] platforms: [dgx_h100] - - test_case: [bert_mcore_tp2_pp2_local_spec] + - test_case: [dsv3_tp2pp2ep4] products: - - environment: [dev] - scope: [nightly] + - model_config: dsv3_proxy + runtime_config: tp2pp2ep4 + environment: [dev] + scope: [nightly-broken] platforms: [dgx_h100] - - test_case: [bert_mcore_tp2_pp2_resume_torch_dist] + - test_case: [qwen3_tp1pp1ep1] products: - - environment: [dev] - scope: [nightly] + - model_config: qwen3_proxy + runtime_config: tp1pp1ep1 + environment: [dev] + scope: [nightly-broken] platforms: [dgx_h100] - - test_case: [bert_mcore_tp2_pp2_resume_torch_dist_local_spec] + - test_case: [qwen3_tp2pp2ep4] products: - - environment: [dev] - scope: [nightly] + - model_config: qwen3_proxy + runtime_config: tp2pp2ep4 + environment: [dev] + scope: [nightly-broken] platforms: [dgx_h100] - test_case: [bert_mcore_tp1_pp2] products: - environment: [dev] - scope: [nightly] + scope: [nightly-broken] platforms: [dgx_h100] - test_case: [bert_mcore_tp1_pp4_vp2] products: - environment: [dev] - scope: [nightly] + scope: [nightly-broken] platforms: [dgx_h100] - test_case: [bert_mcore_tp4_pp1] products: - environment: [dev] - scope: [nightly] + scope: [nightly-broken] platforms: [dgx_h100] diff --git a/tests/test_utils/recipes/t5.yaml b/tests/test_utils/recipes/t5.yaml deleted file mode 100644 index 1761cd3f1e6..00000000000 --- a/tests/test_utils/recipes/t5.yaml +++ /dev/null @@ -1,113 +0,0 @@ -type: basic -format_version: 1 -maintainers: [mcore] -loggers: [stdout] -spec: - name: '{test_case}_{environment}_{platforms}' - model: t5 - build: mcore-pyt-{environment} - nodes: 1 - gpus: 8 - platforms: dgx_a100 - script_setup: | - unset https_proxy - echo "machine gitlab-master.nvidia.com login okoenig password $RO_API_TOKEN" | tee -a /root/.netrc - - # Checkout latest - cd /opt - rm -rf /opt/megatron-lm; mkdir megatron-lm; cd megatron-lm - git init - git remote add origin $MCORE_REPO - git fetch origin '+refs/merge-requests/*:refs/remotes/merge-requests/*' - git fetch origin $MCORE_MR_COMMIT - git checkout $MCORE_MR_COMMIT - git rev-parse HEAD - - # Checkout backwards-ref - cd /opt - rm -rf /opt/megatron-lm-legacy; mkdir megatron-lm-legacy; cd megatron-lm-legacy - git init - git remote add origin $MCORE_REPO - git fetch origin $MCORE_BACKWARDS_COMMIT - git checkout $MCORE_BACKWARDS_COMMIT - git rev-parse HEAD - rm -rf megatron; cp -a /opt/megatron-lm/megatron ./ - script: |- - ls - cd /opt/megatron-lm - - NAME=$(echo {test_case}_{environment} | sed 's/dgx_h100/dgx_a100/g') - - ARGUMENTS=( - "DATA_PATH=/mnt/artifacts" - "DATA_CACHE_PATH=/workspace/data/cache" - "OUTPUT_PATH={assets_dir}" - "TENSORBOARD_PATH={assets_dir}/tensorboard" - "CHECKPOINT_SAVE_PATH={artifacts_dir}/checkpoints" - "CHECKPOINT_LOAD_PATH=/mnt/artifacts" - "TRAINING_SCRIPT_PATH=pretrain_t5.py" - "TRAINING_PARAMS_PATH=./tests/functional_tests/test_cases/{model}/{test_case}/model_config.yaml" - "GOLDEN_VALUES_PATH=./tests/functional_tests/test_cases/{model}/{test_case}/golden_values_{environment}_{platforms}.json" - "N_REPEAT={n_repeat}" - "ENABLE_LIGHTWEIGHT_MODE=${{ENABLE_LIGHTWEIGHT_MODE}}" - "RECORD_CHECKPOINTS=${{RECORD_CHECKPOINTS}}" - ) - - bash ./tests/functional_tests/shell_test_utils/run_ci_test.sh ${{ARGUMENTS[@]}} - -products: - - test_case: [t5_11b_mcore_tp4_pp1] - products: - - environment: [dev] - scope: [nightly] - platforms: [dgx_h100] - - test_case: [t5_mcore_te_tp4_pp1] - products: - - environment: [dev] - scope: [nightly] - platforms: [dgx_h100] - - test_case: [t5_mcore_te_tp4_pp1_resume_torch_dist] - products: - - environment: [dev] - scope: [nightly] - platforms: [dgx_h100] - - test_case: [t5_mcore_tp4_pp1] - products: - - environment: [dev] - scope: [nightly] - platforms: [dgx_h100] - - test_case: [t5_mcore_tp4_pp1_resume_torch_dist] - products: - - environment: [dev] - scope: [nightly] - platforms: [dgx_h100] - - test_case: [t5_mcore_te_tp1_pp1_vp1_resume_torch] - products: - - environment: [dev] - scope: [nightly] - platforms: [dgx_a100, dgx_h100] - - test_case: [t5_mcore_te_tp2_pp1_vp1] - products: - - environment: [dev] - scope: [nightly] - platforms: [dgx_a100, dgx_h100] - - test_case: [t5_mcore_te_tp2_pp1_vp1_sequence_parallel] - products: - - environment: [dev] - scope: [nightly] - platforms: [dgx_a100, dgx_h100] - - test_case: [t5_mcore_tp1_pp1_vp1] - products: - - environment: [dev] - scope: [nightly] - platforms: [dgx_a100, dgx_h100] - - test_case: [t5_mcore_tp1_pp1_vp1_resume_torch] - products: - - environment: [dev] - scope: [nightly] - platforms: [dgx_a100, dgx_h100] - - test_case: [t5_mcore_tp2_pp1_vp1] - products: - - environment: [dev] - scope: [nightly] - platforms: [dgx_a100, dgx_h100] diff --git a/tests/unit_tests/distributed/test_param_and_grad_buffer.py b/tests/unit_tests/distributed/test_param_and_grad_buffer.py index 609b2cc5a71..b60dfb1791b 100644 --- a/tests/unit_tests/distributed/test_param_and_grad_buffer.py +++ b/tests/unit_tests/distributed/test_param_and_grad_buffer.py @@ -163,6 +163,59 @@ def _pad_param_if_needed(numel_unpadded): Utils.destroy_model_parallel() +def test_param_to_index_alignment_with_padding(): + """Ensure bucket-local param offsets honor padding when DistOpt pads params.""" + Utils.initialize_model_parallel() + + # With input_dim=4, output_dim=4: + # - weight: 4*4 = 16 elements + # - bias: 4 elements + # Since 16 % 64 != 0, the bias must be padded away from the weight, + # making padding observable. + input_dim = 4 + output_dim = 4 + model, param_and_grad_buffer, _ = get_model_and_buffers( + input_dim=input_dim, + output_dim=output_dim, + num_layers=1, + bias=True, + shared_embedding=False, + bucket_size=None, # single bucket + use_distributed_optimizer=True, # enforces 64-element alignment + overlap_grad_reduce=True, + average_in_collective=False, + ) + + bucket = param_and_grad_buffer.buckets[0] + naive_offset = 0 + padding_observed = False + + for param in bucket.params_list: + global_start, global_end, _ = param_and_grad_buffer.param_index_map[param] + expected_local_start = global_start - bucket.offset + expected_local_end = global_end - bucket.offset + local_start, local_end = bucket.param_to_index[param] + + # param_to_index should match the padded offsets used in the global buffer. + assert (local_start, local_end) == (expected_local_start, expected_local_end) + + # At least one param should have been padded relative to naive packing. + if local_start != naive_offset: + padding_observed = True + naive_offset = local_end + + # Verify the slice retrieved via param_to_index matches param.data view. + param_slice = bucket.param_data.view(-1)[local_start:local_end] + torch.testing.assert_close(param_slice, param.data.view(-1)) + + assert padding_observed, ( + "Expected padding to be applied between params. " + "Ensure model dimensions are chosen such that param sizes are not multiples of 64." + ) + + Utils.destroy_model_parallel() + + @pytest.mark.parametrize("use_distributed_optimizer", [False, True]) @pytest.mark.parametrize("overlap_grad_reduce", [False, True]) @pytest.mark.parametrize("average_in_collective", [False, True]) diff --git a/tests/unit_tests/fusions/test_fused_linear_cross_entropy.py b/tests/unit_tests/fusions/test_fused_linear_cross_entropy.py new file mode 100644 index 00000000000..3ac8e7f6200 --- /dev/null +++ b/tests/unit_tests/fusions/test_fused_linear_cross_entropy.py @@ -0,0 +1,1509 @@ +# Copyright (c) 2025, NVIDIA CORPORATION. All rights reserved. + +import contextlib +import os +import typing +from contextlib import ExitStack +from dataclasses import dataclass + +import numpy as np +import pytest +import torch +import torch.distributed as dist +from torch.utils.data import DataLoader, Dataset +from torch.utils.data.distributed import DistributedSampler + +import megatron.core.parallel_state as ps +from megatron.core.fusions.fused_linear_cross_entropy import linear_cross_entropy +from megatron.core.models.gpt.gpt_layer_specs import ( + get_gpt_decoder_block_spec, + get_gpt_mtp_block_spec, +) +from megatron.core.models.gpt.gpt_model import GPTModel +from megatron.training.utils import get_device_arch_version +from tests.unit_tests.a2a_overlap.utils import ( + deterministic_mode, + get_test_config, + get_valid_fp8_flags, + get_valid_token_dispatcher_types, +) +from tests.unit_tests.test_utilities import Utils + + +# 1. Define a standardized context to hold your distributed info +@dataclass +class DistContext: + rank: int + world_size: int + group: dist.ProcessGroup + is_chief: bool + + +# 2. Create a module-scoped fixture +# This runs ONE time per file, no matter how many test classes you have. +@pytest.fixture(scope="module") +def distributed_context(): + # --- PRE-CHECK --- + if "WORLD_SIZE" not in os.environ or int(os.environ["WORLD_SIZE"]) < 2: + pytest.skip("Requires torchrun with multiple GPUs (WORLD_SIZE >= 2)") + + # --- SETUP --- + is_external_init = dist.is_initialized() + + if not is_external_init: + # Initialize only if not already done (e.g., by another test runner) + dist.init_process_group( + backend="nccl", + init_method="env://", + world_size=int(os.environ["WORLD_SIZE"]), + rank=int(os.environ["RANK"]), + ) + + # Set device immediately to avoid cross-device pollution + local_rank = int(os.environ.get("LOCAL_RANK", os.environ["RANK"])) + device = torch.device(f"cuda:{local_rank}") + torch.cuda.set_device(device) + + # Gather context data + rank = dist.get_rank() + world_size = dist.get_world_size() + group = dist.group.WORLD + + print(f"[INFO]: Initialized Rank: {rank} / {world_size}") + + context = DistContext(rank=rank, world_size=world_size, group=group, is_chief=(rank == 0)) + + # Yield control to the tests + yield context + + # --- TEARDOWN --- + # Only destroy if we were the ones who initialized it + if not is_external_init: + dist.destroy_process_group() + + +class MockDataset(Dataset): + """ + Mock dataset for torchtitan GPT training tests + Generates synthetic tokenized sequences on-the-fly + """ + + def __init__( + self, + num_samples=10000, + micro_batch_size=4, + sequence_length=2048, + vocab_size=128256, + seed=42, + ): + """ + Initialize mock dataset + + Args: + num_samples: Total number of samples + sequence_length: Length of each sequence + vocab_size: Size of vocabulary + seed: Random seed for reproducibility + """ + self.num_samples = num_samples + self.micro_batch_size = micro_batch_size + self.sequence_length = sequence_length + self.vocab_size = vocab_size + self.seed = seed + + # Set numpy seed for deterministic generation + np.random.seed(seed) + + def __len__(self): + return self.num_samples + + def __getitem__(self, idx): + """ + Generate a single training sample + + Returns: + dict with 'tokens' and 'labels' + """ + # Use idx as seed for reproducible but varied samples + rng = np.random.RandomState(self.seed + idx) + + # Generate random token sequence + tokens = rng.randint(0, self.vocab_size, size=self.sequence_length, dtype=np.int64) + + # Labels are tokens shifted by 1 (next token prediction) + labels = rng.randint(0, self.vocab_size, size=self.sequence_length, dtype=np.int64) + + return { + 'input_ids': torch.from_numpy(tokens.copy()), + 'labels': torch.from_numpy(labels.copy()), + "attention_mask": torch.ones( + (1, self.sequence_length, self.sequence_length), dtype=bool + ), + } + + +def build_model(config): + max_seq_len = 300 + + # build layer spec + transformer_layer_spec = get_gpt_decoder_block_spec(config=config, use_transformer_engine=True) + mtp_block_spec = get_gpt_mtp_block_spec(config, transformer_layer_spec.layer_specs[-1], True) + + # build model + gpt_model = GPTModel( + config=config, + transformer_layer_spec=transformer_layer_spec, + mtp_block_spec=mtp_block_spec, + vocab_size=100, + pre_process=True, + post_process=True, + max_sequence_length=max_seq_len, + ) + return gpt_model + + +# Define a reusable context manager +@contextlib.contextmanager +def init_model_parallel(tp=1, pp=1, ep=1): + try: + Utils.initialize_model_parallel( + tensor_model_parallel_size=tp, + pipeline_model_parallel_size=pp, + expert_model_parallel_size=ep, + ) + yield + finally: + Utils.destroy_model_parallel() + + +def init_gpt_dataloader( + dp_group, micro_batch_size=1, vocab_size=50257, sequence_length=128, batch_size=8 +): + dataset = MockDataset( + num_samples=1000, + micro_batch_size=micro_batch_size, + sequence_length=sequence_length, + vocab_size=vocab_size, + seed=42, + ) + sampler = DistributedSampler(dataset, num_replicas=dp_group.size(), rank=dp_group.rank()) + dataloader = DataLoader(dataset, batch_size=batch_size, sampler=sampler) + return dataloader + + +# skip it for good +@pytest.mark.skipif( + ("WORLD_SIZE" not in os.environ or int(os.environ["WORLD_SIZE"]) < 2) or True, + reason="Requires torchrun with multiple GPUs", +) +class TestFusedLinearCrossEntropyOnGptModel: + @pytest.mark.parametrize("fp8_flag", get_valid_fp8_flags()) + @pytest.mark.parametrize("mtp_layers", [0, 1]) + @pytest.mark.parametrize("dispatcher_type", get_valid_token_dispatcher_types()) + @pytest.mark.parametrize("layer_num", [2]) + def test_gpt_model(self, mtp_layers, dispatcher_type, fp8_flag, layer_num): + with ExitStack() as stack: + gpu_count = torch.cuda.device_count() + tp = min(2, gpu_count) + ep = gpu_count // tp + stack.enter_context(init_model_parallel(tp=tp, ep=ep)) + stack.enter_context(deterministic_mode()) + + # create TransformerConfig + extra_kwargs = { + "moe_token_dispatcher_type": dispatcher_type, + "sequence_parallel": tp > 1, + "tensor_model_parallel_size": tp, + } + if dispatcher_type == "flex": + extra_kwargs["moe_enable_deepep"] = True + extra_kwargs["moe_router_dtype"] = "fp32" + if fp8_flag is not None: + extra_kwargs["fp8"] = fp8_flag[0] + extra_kwargs["fp8_recipe"] = fp8_flag[1] + if mtp_layers > 0: + extra_kwargs["mtp_num_layers"] = mtp_layers + extra_kwargs["mtp_loss_scaling_factor"] = 1.1 + + # build config + config = get_test_config(num_layers=layer_num, extra_kwargs=extra_kwargs) + config.expert_model_parallel_size = ep + + # build model + gpt_model = build_model(config) + gpt_model.cuda() + + dataloader = init_gpt_dataloader( + ps.get_data_parallel_group(), + vocab_size=gpt_model.vocab_size, + micro_batch_size=1, + sequence_length=gpt_model.max_sequence_length, + batch_size=4, + ) + # for batch in dataloder: + for batch in dataloader: + batch["position_ids"] = torch.arange( + gpt_model.max_sequence_length, dtype=torch.int64 + ) + batch = {k: v.cuda() for k, v in batch.items()} + gpt_model.zero_grad() + output = gpt_model(**batch) + loss = output.sum() + loss.backward() + + +@pytest.mark.skipif( + "WORLD_SIZE" in os.environ and os.environ["WORLD_SIZE"] != "1", reason="Requires single GPU" +) +@pytest.mark.skipif(get_device_arch_version() != 10, reason="Requires GPU architecture = 10") +class TestFusedLinearCrossEntropyDataParallel: + def cleanup(self): + torch.cuda.empty_cache() + torch.cuda.reset_peak_memory_stats() + import gc + + gc.collect() + torch.cuda.synchronize() + + @staticmethod + def torch_linear_cross_entropy( + hidden: torch.Tensor, + weight: torch.Tensor, + labels: torch.Tensor, + reduction: str, + ignore_index: int, + ): + # NOTE: need to convert to fp32 to fp32 accumulation, + # thus assure accuracy + logits = hidden.to(torch.float32) @ weight.T.to(torch.float32) + logprobs = torch.nn.functional.cross_entropy( + logits.view(-1, logits.shape[-1]), + labels.view(-1), + reduction=reduction, + ignore_index=ignore_index, + ) + return logprobs.to(torch.float32) + + @staticmethod + def get_problems(): + return [ + (80, 125, 64), + (80, 152064, 64), + (1024, 152064, 4096), + (4096, 152063, 8192), + ((1, 4096), 152064, 8192), + ((2, 4096), 152064, 8192), + ] + + @staticmethod + def get_ignore_index(): + return [-100, 4] + + def test_kernel_launch(self): + """ + Check if the compiled kernel can be + launched with different problem sizes + """ + self.cleanup() + + num_tokens = [15, 26, 128, 513, 2048, 8192] + vocab_size = 152064 + dim = 4096 + dtype = torch.bfloat16 + reduction = "mean" + ignore_index = -100 + + weight = torch.randn(vocab_size, dim, dtype=dtype, device="cuda").requires_grad_() + for num_token in num_tokens: + hidden = torch.randn(num_token, dim, dtype=dtype, device="cuda").requires_grad_() + labels = torch.randint(0, vocab_size, (num_token,), dtype=torch.long, device="cuda") + + logprobs = linear_cross_entropy( + hidden, weight, labels, reduction=reduction, ignore_index=ignore_index + ) + assert not torch.isnan(logprobs).any() + + gLogprobs = torch.randn_like(logprobs) + (d_hidden, d_weight) = torch.autograd.grad( + (logprobs,), (hidden, weight), (gLogprobs,), retain_graph=False + ) + assert not torch.isnan(d_hidden).any() + assert not torch.isnan(d_weight).any() + + @pytest.mark.parametrize("dtype", [torch.bfloat16, torch.float16]) + @pytest.mark.parametrize("problem", get_problems()) + @pytest.mark.parametrize("reduction", ["none", "mean", "sum"]) + @pytest.mark.parametrize("ignore_index", get_ignore_index()) + def test_correctness(self, dtype, problem, reduction, ignore_index): + num_tokens, vocabsize, dim = problem + hidden_shape = (num_tokens, dim) if isinstance(num_tokens, int) else (*num_tokens, dim) + labels_shape = (num_tokens,) if isinstance(num_tokens, int) else num_tokens + + hidden = ( + torch.empty(hidden_shape, dtype=dtype, device="cuda") + .uniform_(-0.1, 0.1) + .requires_grad_() + ) + weight = ( + torch.empty((vocabsize, dim), dtype=dtype, device="cuda") + .uniform_(-0.1, 0.1) + .requires_grad_() + ) + labels = torch.randint(0, vocabsize, labels_shape, dtype=torch.long, device="cuda") + if ignore_index >= 0 and ignore_index < vocabsize: + pad_labels = torch.nn.functional.pad(labels, (0, 1), value=ignore_index) + labels = pad_labels[..., 1:].contiguous() + + # forward + torch_logprobs = self.torch_linear_cross_entropy( + hidden, weight, labels, reduction=reduction, ignore_index=ignore_index + ) + + custom_logprobs = linear_cross_entropy( + hidden, weight, labels, reduction=reduction, ignore_index=ignore_index + ) + + torch.testing.assert_close(torch_logprobs, custom_logprobs) + + # backward + g_logprobs = torch.empty_like(torch_logprobs).uniform_(-0.1, 0.1) + + (d_torch_hidden, d_torch_weight) = torch.autograd.grad( + (torch_logprobs,), (hidden, weight), (g_logprobs,), retain_graph=False + ) + + (d_custom_hidden, d_custom_weight) = torch.autograd.grad( + (custom_logprobs,), (hidden, weight), (g_logprobs,), retain_graph=False + ) + + torch.testing.assert_close(d_torch_hidden, d_custom_hidden, atol=1e-3, rtol=1e-3) + torch.testing.assert_close(d_torch_weight, d_custom_weight, atol=1e-3, rtol=1e-3) + + @pytest.mark.parametrize("problem", [((1, 4096), 129280, 7168)]) + @pytest.mark.parametrize("dtype", [torch.bfloat16]) + @pytest.mark.parametrize("reduction", ["mean"]) + @pytest.mark.parametrize("ignore_index", [-100]) + def test_performance(self, problem, dtype, reduction, ignore_index): + num_tokens, vocabsize, dim = problem + hidden_shape = (num_tokens, dim) if isinstance(num_tokens, int) else (*num_tokens, dim) + labels_shape = (num_tokens,) if isinstance(num_tokens, int) else num_tokens + + start_event = torch.cuda.Event(enable_timing=True) + end_event = torch.cuda.Event(enable_timing=True) + + torch_fwd_latency = list() + torch_bwd_latency = list() + custom_fwd_latency = list() + custom_bwd_latency = list() + + iterations = 5 + for i in range(iterations): + hidden = ( + torch.empty(hidden_shape, dtype=dtype, device="cuda") + .uniform_(-0.1, 0.1) + .requires_grad_() + ) + weight = ( + torch.empty((vocabsize, dim), dtype=dtype, device="cuda") + .uniform_(-0.1, 0.1) + .requires_grad_() + ) + labels = torch.randint(0, vocabsize, labels_shape, dtype=torch.long, device="cuda") + if ignore_index >= 0 and ignore_index < vocabsize: + pad_labels = torch.nn.functional.pad(labels, (0, 1), value=ignore_index) + labels = pad_labels[..., 1:].contiguous() + + # -------- forward -------- # + start_event.record() + torch_logprobs = self.torch_linear_cross_entropy( + hidden, weight, labels, reduction=reduction, ignore_index=ignore_index + ) + end_event.record() + torch.cuda.synchronize() + torch_fwd_latency.append(start_event.elapsed_time(end_event)) + + start_event.record() + custom_logprobs = linear_cross_entropy( + hidden, weight, labels, reduction=reduction, ignore_index=ignore_index + ) + end_event.record() + torch.cuda.synchronize() + custom_fwd_latency.append(start_event.elapsed_time(end_event)) + + # -------- backward -------- # + g_logprobs = torch.empty_like(torch_logprobs).uniform_(-0.1, 0.1) + + start_event.record() + (d_torch_hidden, d_torch_weight) = torch.autograd.grad( + (torch_logprobs,), (hidden, weight), (g_logprobs,), retain_graph=False + ) + end_event.record() + torch.cuda.synchronize() + torch_bwd_latency.append(start_event.elapsed_time(end_event)) + + start_event.record() + (d_custom_hidden, d_custom_weight) = torch.autograd.grad( + (custom_logprobs,), (hidden, weight), (g_logprobs,), retain_graph=False + ) + end_event.record() + torch.cuda.synchronize() + custom_bwd_latency.append(start_event.elapsed_time(end_event)) + + # --- remove first latency due to warmup --- # + torch_fwd_latency = torch_fwd_latency[1:] + torch_bwd_latency = torch_bwd_latency[1:] + custom_fwd_latency = custom_fwd_latency[1:] + custom_bwd_latency = custom_bwd_latency[1:] + + print() + print(f"[INFO]: On problem {problem}, dtype {dtype}, reduction {reduction}:") + print( + f"[INFO]: Torch forward latency: {sum(torch_fwd_latency) / len(torch_fwd_latency):.2f} ms" + ) + print( + f"[INFO]: Custom forward latency: {sum(custom_fwd_latency) / len(custom_fwd_latency):.2f} ms" + ) + print( + f"[INFO]: Torch backward latency: {sum(torch_bwd_latency) / len(torch_bwd_latency):.2f} ms" + ) + print( + f"[INFO]: Custom backward latency: {sum(custom_bwd_latency) / len(custom_bwd_latency):.2f} ms" + ) + + @pytest.mark.parametrize("problem", [((1, 4096), 129280, 7168)]) + @pytest.mark.parametrize("dtype", [torch.bfloat16]) + @pytest.mark.parametrize("reduction", ["mean"]) + @pytest.mark.parametrize("ignore_index", [-100]) + def test_storage(self, problem, dtype, reduction, ignore_index): + num_tokens, vocabsize, dim = problem + hidden_shape = (num_tokens, dim) if isinstance(num_tokens, int) else (*num_tokens, dim) + labels_shape = (num_tokens,) if isinstance(num_tokens, int) else num_tokens + print() + print(f"[INFO]: On problem {problem}, dtype {dtype}, reduction {reduction}:") + + def torch_storage(): + hidden = ( + torch.empty(hidden_shape, dtype=dtype, device="cuda") + .uniform_(-0.1, 0.1) + .requires_grad_() + ) + weight = ( + torch.empty((vocabsize, dim), dtype=dtype, device="cuda") + .uniform_(-0.1, 0.1) + .requires_grad_() + ) + labels = torch.randint(0, vocabsize, labels_shape, dtype=torch.long, device="cuda") + if ignore_index >= 0 and ignore_index < vocabsize: + pad_labels = torch.nn.functional.pad(labels, (0, 1), value=ignore_index) + labels = pad_labels[..., 1:].contiguous() + + torch.cuda.reset_peak_memory_stats() + torch_logprobs = self.torch_linear_cross_entropy( + hidden, weight, labels, reduction=reduction, ignore_index=ignore_index + ) + torch.cuda.synchronize() + torch_max_memory = torch.cuda.max_memory_allocated() / 1024 / 1024 + print(f"[INFO]: Torch Forward pass peak memory: {torch_max_memory:.2f} MB") + + torch.cuda.reset_peak_memory_stats() + g_logprobs = torch.empty_like(torch_logprobs).uniform_(-0.1, 0.1) + (d_torch_hidden, d_torch_weight) = torch.autograd.grad( + (torch_logprobs,), (hidden, weight), (g_logprobs,), retain_graph=False + ) + torch.cuda.synchronize() + torch_backward_max_memory = torch.cuda.max_memory_allocated() / 1024 / 1024 + print(f"[INFO]: Torch Backward pass peak memory: {torch_backward_max_memory:.2f} MB") + + def custom_storage(): + hidden = ( + torch.empty(hidden_shape, dtype=dtype, device="cuda") + .uniform_(-0.1, 0.1) + .requires_grad_() + ) + weight = ( + torch.empty((vocabsize, dim), dtype=dtype, device="cuda") + .uniform_(-0.1, 0.1) + .requires_grad_() + ) + labels = torch.randint(0, vocabsize, labels_shape, dtype=torch.long, device="cuda") + if ignore_index >= 0 and ignore_index < vocabsize: + pad_labels = torch.nn.functional.pad(labels, (0, 1), value=ignore_index) + labels = pad_labels[..., 1:].contiguous() + + torch.cuda.reset_peak_memory_stats() + custom_logprobs = linear_cross_entropy( + hidden, weight, labels, reduction=reduction, ignore_index=ignore_index + ) + torch.cuda.synchronize() + custom_max_memory = torch.cuda.max_memory_allocated() / 1024 / 1024 + print(f"[INFO]: Custom Forward pass peak memory: {custom_max_memory:.2f} MB") + + torch.cuda.reset_peak_memory_stats() + g_logprobs = torch.empty_like(custom_logprobs).uniform_(-0.1, 0.1) + (d_custom_hidden, d_custom_weight) = torch.autograd.grad( + (custom_logprobs,), (hidden, weight), (g_logprobs,), retain_graph=False + ) + torch.cuda.synchronize() + custom_backward_max_memory = torch.cuda.max_memory_allocated() / 1024 / 1024 + print(f"[INFO]: Custom Backward pass peak memory: {custom_backward_max_memory:.2f} MB") + + self.cleanup() + torch_storage() + self.cleanup() + custom_storage() + + +@pytest.mark.skipif( + ("WORLD_SIZE" not in os.environ or int(os.environ["WORLD_SIZE"]) < 2), # or True, + reason="Requires torchrun with multiple GPUs", +) +@pytest.mark.skipif(get_device_arch_version() != 10, reason="Requires GPU architecture = 10") +@pytest.mark.usefixtures("distributed_context") +class TestFusedLinearCrossEntropyTensorParallel: + @pytest.fixture(autouse=True) + def setup_attrs(self, distributed_context): + """ + Setup attributes for the test class. + """ + self.tp_group = distributed_context.group + self.tp_rank = distributed_context.rank + self.tp_world_size = distributed_context.world_size + self.is_chief = distributed_context.is_chief + + def cleanup(self): + torch.cuda.empty_cache() + torch.cuda.reset_peak_memory_stats() + import gc + + gc.collect() + torch.cuda.synchronize() + + @staticmethod + def torch_linear_cross_entropy_single_gpu( + hidden: torch.Tensor, + weight: torch.Tensor, + labels: torch.Tensor, + reduction: typing.Optional[str] = "mean", + ): + logits = hidden.to(torch.float32) @ weight.T.to(torch.float32) + logprobs = torch.nn.functional.cross_entropy( + logits.view(-1, logits.shape[-1]), labels.view(-1), reduction=reduction + ) + return logprobs.to(torch.float32) + + class TorchLinearCrossEntropy(torch.autograd.Function): + @staticmethod + def forward( + ctx, + hidden: torch.Tensor, + weight: torch.Tensor, + labels: torch.Tensor, + tp_group: torch.distributed.ProcessGroup, + reduction: typing.Optional[str] = "mean", + ): + tp_rank = 0 if tp_group is None else torch.distributed.get_rank(tp_group) + tp_world_size = 1 if tp_group is None else torch.distributed.get_world_size(tp_group) + + logits = hidden.to(torch.float32) @ weight.T.to(torch.float32) + + whole_logits = torch.empty( + (logits.shape[0], logits.shape[-1] * tp_world_size), + dtype=logits.dtype, + device=logits.device, + ) + whole_logits_ref = [ + whole_logits[..., i * logits.shape[-1] : (i + 1) * logits.shape[-1]] + for i in range(tp_world_size) + ] + dist.all_gather(whole_logits_ref, logits, group=tp_group) + + logprobs = torch.nn.functional.cross_entropy( + whole_logits.view(-1, whole_logits.shape[-1]), labels.view(-1), reduction=reduction + ) + + # If we don't preserve whole_logits, + # we need to re-compute it in the backward pass + ctx.save_for_backward(hidden, weight, labels) + ctx.tp_group = tp_group + ctx.reduction = reduction + ctx.tp_rank = tp_rank + ctx.tp_world_size = tp_world_size + + return logprobs.to(torch.float32) + + @staticmethod + def backward(ctx, g_logprobs: torch.Tensor): + hidden, weight, labels = ctx.saved_tensors + tp_group = ctx.tp_group + reduction = ctx.reduction + tp_rank = ctx.tp_rank + tp_world_size = ctx.tp_world_size + + num_tokens, dim = hidden.shape + + if reduction == "mean": + _g_logprobs = torch.broadcast_to(g_logprobs / num_tokens, (num_tokens,)) + elif reduction == "sum": + _g_logprobs = torch.broadcast_to(g_logprobs, (num_tokens,)) + else: + _g_logprobs = g_logprobs + + # re-compute whole_logits + logits = hidden.to(torch.float32) @ weight.T.to(torch.float32) + whole_logits = torch.empty( + (logits.shape[0], logits.shape[-1] * tp_world_size), + dtype=logits.dtype, + device=logits.device, + ) + whole_logits_ref = [ + whole_logits[..., i * logits.shape[-1] : (i + 1) * logits.shape[-1]] + for i in range(tp_world_size) + ] + dist.all_gather(whole_logits_ref, logits, group=tp_group) + + one_hot = torch.zeros_like(whole_logits) + one_hot.scatter_(1, labels.view(-1).unsqueeze(-1), 1) + + pd = torch.nn.functional.softmax(whole_logits, dim=-1) + d_logits = (pd - one_hot) * _g_logprobs.unsqueeze(-1) + d_logits = d_logits.to(hidden.dtype) + + local_size = weight.size(0) + local_d_logits = d_logits[:, tp_rank * local_size : (tp_rank + 1) * local_size] + + local_d_hidden = local_d_logits @ weight + local_d_weight = local_d_logits.T @ hidden + + dist.all_reduce(local_d_hidden, op=dist.ReduceOp.SUM, group=tp_group) + + return local_d_hidden, local_d_weight, None, None, None + + @pytest.mark.parametrize("dtype", [torch.bfloat16, torch.float16]) + @pytest.mark.parametrize("reduction", ["mean", "sum", "none"]) + @pytest.mark.parametrize("problem", [(4096, 129280, 8192)]) + def test_torch_tp_vs_single_gpu(self, dtype, reduction, problem): + num_tokens, vocabsize, dim = problem + vocabsize = vocabsize // self.tp_world_size + + hidden = ( + torch.empty((num_tokens, dim), dtype=dtype, device="cuda") + .uniform_(-0.1, 0.1) + .requires_grad_() + ) + weight = ( + torch.empty((vocabsize, dim), dtype=dtype, device="cuda") + .uniform_(-0.1, 0.1) + .requires_grad_() + ) + labels = torch.randint(0, vocabsize, (num_tokens,), dtype=torch.long, device="cuda") + + # ------------ forward pass ------------ # + dist.broadcast(hidden, src=0, group=self.tp_group) + dist.broadcast(labels, src=0, group=self.tp_group) + + # single GPU + whole_weight = torch.empty( + (vocabsize * self.tp_world_size, dim), dtype=dtype, device="cuda" + ) + whole_weight_view = [ + whole_weight[i * vocabsize : (i + 1) * vocabsize, :] for i in range(self.tp_world_size) + ] + dist.all_gather(whole_weight_view, weight, group=self.tp_group) + whole_weight = whole_weight.clone().requires_grad_() + logprobs_single_gpu = self.torch_linear_cross_entropy_single_gpu( + hidden, whole_weight, labels, reduction=reduction + ) + + # TP + logprobs_tp = self.TorchLinearCrossEntropy.apply( + hidden, weight, labels, self.tp_group, reduction + ) + torch.testing.assert_close(logprobs_single_gpu, logprobs_tp) + + # ------------ backward pass ------------ # + g_logprobs = torch.empty_like(logprobs_single_gpu).uniform_(-0.1, 0.1) + dist.broadcast(g_logprobs, src=0, group=self.tp_group) + + # single GPU + (d_hidden_single_gpu, d_weight_single_gpu) = torch.autograd.grad( + (logprobs_single_gpu,), (hidden, whole_weight), (g_logprobs,), retain_graph=False + ) + + # TP + (d_hidden_tp, d_weight_tp) = torch.autograd.grad( + (logprobs_tp,), (hidden, weight), (g_logprobs,), retain_graph=False + ) + torch.testing.assert_close(d_hidden_single_gpu, d_hidden_tp, atol=1e-3, rtol=1e-3) + local_d_weight_single_gpu = d_weight_single_gpu[ + self.tp_rank * weight.shape[0] : (self.tp_rank + 1) * weight.shape[0], : + ] + torch.testing.assert_close(local_d_weight_single_gpu, d_weight_tp, atol=1e-3, rtol=1e-3) + + @staticmethod + def get_problems(): + return [ + (80, 125, 64), + (80, 152064, 64), + (1024, 152064, 4096), + (4096, 152063, 8192), + ((1, 4096), 152064, 8192), + ((2, 4096), 152064, 8192), + ] + + @pytest.mark.parametrize("dtype", [torch.bfloat16, torch.float16]) + @pytest.mark.parametrize("reduction", ["mean", "sum", "none"]) + @pytest.mark.parametrize("problem", get_problems()) + def test_correctness(self, dtype, reduction, problem): + num_tokens, vocabsize, dim = problem + hidden_shape = (num_tokens, dim) if isinstance(num_tokens, int) else (*num_tokens, dim) + labels_shape = (num_tokens,) if isinstance(num_tokens, int) else num_tokens + + hidden = ( + torch.empty(hidden_shape, dtype=dtype, device="cuda") + .uniform_(-0.1, 0.1) + .requires_grad_() + ) + weight = ( + torch.empty((vocabsize, dim), dtype=dtype, device="cuda") + .uniform_(-0.1, 0.1) + .requires_grad_() + ) + labels = torch.randint(0, vocabsize, labels_shape, dtype=torch.long, device="cuda") + + # ------ forward pass ------ # + dist.broadcast(hidden, src=0, group=self.tp_group) + dist.broadcast(labels, src=0, group=self.tp_group) + + torch_logprobs = self.TorchLinearCrossEntropy.apply( + hidden.view(-1, dim), weight, labels, self.tp_group, reduction + ) + + custom_logprobs = linear_cross_entropy( + hidden, weight, labels, tp_group=self.tp_group, reduction=reduction + ) + + torch.testing.assert_close(torch_logprobs, custom_logprobs) + + # ------- backward pass ------- # + g_logprobs = torch.empty_like(torch_logprobs).uniform_(-0.1, 0.1) + dist.broadcast(g_logprobs, src=0, group=self.tp_group) + + (d_hidden_torch, d_weight_torch) = torch.autograd.grad( + (torch_logprobs,), (hidden, weight), (g_logprobs,), retain_graph=False + ) + (d_hidden_custom, d_weight_custom) = torch.autograd.grad( + (custom_logprobs,), (hidden, weight), (g_logprobs,), retain_graph=False + ) + torch.testing.assert_close(d_hidden_torch, d_hidden_custom, atol=1e-3, rtol=1e-3) + torch.testing.assert_close(d_weight_torch, d_weight_custom, atol=1e-4, rtol=1e-4) + + @pytest.mark.parametrize("problem", [((1, 4096), 129280, 7168)]) + @pytest.mark.parametrize("dtype", [torch.bfloat16]) + @pytest.mark.parametrize("reduction", ["mean"]) + def test_performance(self, problem, dtype, reduction): + num_tokens, vocabsize, dim = problem + hidden_shape = (num_tokens, dim) if isinstance(num_tokens, int) else (*num_tokens, dim) + labels_shape = (num_tokens,) if isinstance(num_tokens, int) else num_tokens + + start_event = torch.cuda.Event(enable_timing=True) + end_event = torch.cuda.Event(enable_timing=True) + + torch_fwd_latency = list() + torch_bwd_latency = list() + custom_fwd_latency = list() + custom_bwd_latency = list() + + iterations = 5 + for i in range(iterations): + hidden = ( + torch.empty(hidden_shape, dtype=dtype, device="cuda") + .uniform_(-0.1, 0.1) + .requires_grad_() + ) + weight = ( + torch.empty((vocabsize, dim), dtype=dtype, device="cuda") + .uniform_(-0.1, 0.1) + .requires_grad_() + ) + labels = torch.randint(0, vocabsize, labels_shape, dtype=torch.long, device="cuda") + + # ------ forward pass ------ # + dist.broadcast(hidden, src=0, group=self.tp_group) + dist.broadcast(labels, src=0, group=self.tp_group) + + start_event.record() + torch_logprobs = self.TorchLinearCrossEntropy.apply( + hidden.view(-1, dim), weight, labels, self.tp_group, reduction + ) + end_event.record() + torch.cuda.synchronize() + torch_fwd_latency.append(start_event.elapsed_time(end_event)) + + start_event.record() + custom_logprobs = linear_cross_entropy( + hidden, weight, labels, tp_group=self.tp_group, reduction=reduction + ) + end_event.record() + torch.cuda.synchronize() + custom_fwd_latency.append(start_event.elapsed_time(end_event)) + + # ------- backward pass ------- # + g_logprobs = torch.empty_like(torch_logprobs).uniform_(-0.1, 0.1) + dist.broadcast(g_logprobs, src=0, group=self.tp_group) + + start_event.record() + (d_hidden_torch, d_weight_torch) = torch.autograd.grad( + (torch_logprobs,), (hidden, weight), (g_logprobs,), retain_graph=False + ) + end_event.record() + torch.cuda.synchronize() + torch_bwd_latency.append(start_event.elapsed_time(end_event)) + + start_event.record() + (d_hidden_custom, d_weight_custom) = torch.autograd.grad( + (custom_logprobs,), (hidden, weight), (g_logprobs,), retain_graph=False + ) + end_event.record() + torch.cuda.synchronize() + custom_bwd_latency.append(start_event.elapsed_time(end_event)) + + # --- remove first latency due to warmup --- # + torch_fwd_latency = torch_fwd_latency[1:] + torch_bwd_latency = torch_bwd_latency[1:] + custom_fwd_latency = custom_fwd_latency[1:] + custom_bwd_latency = custom_bwd_latency[1:] + + if self.is_chief: + print() + print( + f"[INFO]: On problem {problem}, dtype {dtype}, reduction {reduction}, TP size {self.tp_world_size}:" + ) + print( + f"[INFO]: Torch forward latency: {sum(torch_fwd_latency) / len(torch_fwd_latency):.2f} ms" + ) + print( + f"[INFO]: Custom forward latency: {sum(custom_fwd_latency) / len(custom_fwd_latency):.2f} ms" + ) + print( + f"[INFO]: Torch backward latency: {sum(torch_bwd_latency) / len(torch_bwd_latency):.2f} ms" + ) + print( + f"[INFO]: Custom backward latency: {sum(custom_bwd_latency) / len(custom_bwd_latency):.2f} ms" + ) + + @pytest.mark.parametrize("problem", [((1, 4096), 129280, 7168)]) + @pytest.mark.parametrize("dtype", [torch.bfloat16]) + @pytest.mark.parametrize("reduction", ["mean"]) + def test_storage(self, problem, dtype, reduction): + num_tokens, vocabsize, dim = problem + hidden_shape = (num_tokens, dim) if isinstance(num_tokens, int) else (*num_tokens, dim) + labels_shape = (num_tokens,) if isinstance(num_tokens, int) else num_tokens + + if self.is_chief: + print() + print( + f"[INFO]: On problem {problem}, dtype {dtype}, reduction {reduction}, TP size {self.tp_world_size}:" + ) + + def torch_storage(): + hidden = ( + torch.empty(hidden_shape, dtype=dtype, device="cuda") + .uniform_(-0.1, 0.1) + .requires_grad_() + ) + weight = ( + torch.empty((vocabsize, dim), dtype=dtype, device="cuda") + .uniform_(-0.1, 0.1) + .requires_grad_() + ) + labels = torch.randint(0, vocabsize, labels_shape, dtype=torch.long, device="cuda") + + dist.broadcast(hidden, src=0, group=self.tp_group) + dist.broadcast(labels, src=0, group=self.tp_group) + + torch.cuda.reset_peak_memory_stats() + torch_logprobs = self.TorchLinearCrossEntropy.apply( + hidden.view(-1, dim), weight, labels, self.tp_group, reduction + ) + torch.cuda.synchronize() + torch_max_memory = torch.cuda.max_memory_allocated() / 1024 / 1024 + if self.is_chief: + print( + f"[INFO]: On GPU {self.tp_rank}, Torch Forward pass peak memory: {torch_max_memory:.2f} MB" + ) + + g_logprobs = torch.empty_like(torch_logprobs).uniform_(-0.1, 0.1) + dist.broadcast(g_logprobs, src=0, group=self.tp_group) + + torch.cuda.reset_peak_memory_stats() + (d_hidden_torch, d_weight_torch) = torch.autograd.grad( + (torch_logprobs,), (hidden, weight), (g_logprobs,), retain_graph=False + ) + torch.cuda.synchronize() + torch_max_memory = torch.cuda.max_memory_allocated() / 1024 / 1024 + if self.is_chief: + print( + f"[INFO]: On GPU {self.tp_rank}, Torch Backward pass peak memory: {torch_max_memory:.2f} MB" + ) + + def custom_storage(): + hidden = ( + torch.empty(hidden_shape, dtype=dtype, device="cuda") + .uniform_(-0.1, 0.1) + .requires_grad_() + ) + weight = ( + torch.empty((vocabsize, dim), dtype=dtype, device="cuda") + .uniform_(-0.1, 0.1) + .requires_grad_() + ) + labels = torch.randint(0, vocabsize, labels_shape, dtype=torch.long, device="cuda") + + dist.broadcast(hidden, src=0, group=self.tp_group) + dist.broadcast(labels, src=0, group=self.tp_group) + + torch.cuda.reset_peak_memory_stats() + custom_logprobs = linear_cross_entropy( + hidden, weight, labels, tp_group=self.tp_group, reduction=reduction + ) + torch.cuda.synchronize() + custom_max_memory = torch.cuda.max_memory_allocated() / 1024 / 1024 + if self.is_chief: + print( + f"[INFO]: On GPU {self.tp_rank}, Custom Forward pass peak memory: {custom_max_memory:.2f} MB" + ) + + g_logprobs = torch.empty_like(custom_logprobs).uniform_(-0.1, 0.1) + dist.broadcast(g_logprobs, src=0, group=self.tp_group) + + torch.cuda.reset_peak_memory_stats() + (d_hidden_custom, d_weight_custom) = torch.autograd.grad( + (custom_logprobs,), (hidden, weight), (g_logprobs,), retain_graph=False + ) + torch.cuda.synchronize() + custom_max_memory = torch.cuda.max_memory_allocated() / 1024 / 1024 + if self.is_chief: + print( + f"[INFO]: On GPU {self.tp_rank}, Custom Backward pass peak memory: {custom_max_memory:.2f} MB" + ) + + self.cleanup() + torch_storage() + self.cleanup() + custom_storage() + + +@pytest.mark.skipif( + "WORLD_SIZE" not in os.environ or int(os.environ["WORLD_SIZE"]) < 2, + reason="Requires torchrun with multiple GPUs", +) +@pytest.mark.skipif(get_device_arch_version() != 10, reason="Requires GPU architecture = 10") +@pytest.mark.usefixtures("distributed_context") +class TestFusedLinearCrossEntropySequenceParallel: + @pytest.fixture(autouse=True) + def setup_attrs(self, distributed_context): + """ + Setup attributes for the test class. + """ + self.tp_group = distributed_context.group + self.tp_rank = distributed_context.rank + self.tp_world_size = distributed_context.world_size + self.is_chief = distributed_context.is_chief + + @staticmethod + def timed_barrier(timeout_s=10): + import time + + work = torch.distributed.barrier(async_op=True) + t0 = time.time() + while not work.is_completed(): + if time.time() - t0 > timeout_s: + exit(1) + time.sleep(0.05) + work.wait() + + def cleanup(self): + torch.cuda.empty_cache() + torch.cuda.reset_peak_memory_stats() + import gc + + gc.collect() + torch.cuda.synchronize() + + @staticmethod + def torch_linear_cross_entropy_single_gpu( + hidden: torch.Tensor, + weight: torch.Tensor, + labels: torch.Tensor, + reduction: typing.Optional[str] = "mean", + ): + logits = hidden.to(torch.float32) @ weight.T.to(torch.float32) + logprobs = torch.nn.functional.cross_entropy( + logits.view(-1, logits.shape[-1]), labels.view(-1), reduction=reduction + ) + return logprobs.to(torch.float32) + + class TorchLinearCrossEntropy(torch.autograd.Function): + @staticmethod + def forward( + ctx, + hidden: torch.Tensor, + weight: torch.Tensor, + labels: torch.Tensor, + tp_group: torch.distributed.ProcessGroup, + reduction: typing.Optional[str] = "mean", + ): + tp_rank = 0 if tp_group is None else torch.distributed.get_rank(tp_group) + tp_world_size = 1 if tp_group is None else torch.distributed.get_world_size(tp_group) + + whole_hidden = torch.empty( + (hidden.shape[0] * tp_world_size, hidden.shape[-1]), + dtype=hidden.dtype, + device=hidden.device, + ) + dist.all_gather_into_tensor(whole_hidden, hidden, group=tp_group) + + logits = whole_hidden.to(torch.float32) @ weight.T.to(torch.float32) + + whole_logits = torch.empty( + (logits.shape[0], logits.shape[-1] * tp_world_size), + dtype=logits.dtype, + device=logits.device, + ) + whole_logits_ref = [ + whole_logits[..., i * logits.shape[-1] : (i + 1) * logits.shape[-1]] + for i in range(tp_world_size) + ] + dist.all_gather(whole_logits_ref, logits, group=tp_group) + + logprobs = torch.nn.functional.cross_entropy( + whole_logits.view(-1, whole_logits.shape[-1]), labels.view(-1), reduction=reduction + ) + + # If we don't preserve whole_logits, + # we need to re-compute it in the backward pass + ctx.save_for_backward(whole_hidden, weight, labels) + ctx.tp_group = tp_group + ctx.reduction = reduction + ctx.tp_rank = tp_rank + ctx.tp_world_size = tp_world_size + + return logprobs.to(torch.float32) + + @staticmethod + def backward(ctx, g_logprobs: torch.Tensor): + whole_hidden, weight, labels = ctx.saved_tensors + tp_group = ctx.tp_group + reduction = ctx.reduction + tp_rank = ctx.tp_rank + tp_world_size = ctx.tp_world_size + + num_tokens, dim = whole_hidden.shape + + if reduction == "mean": + _g_logprobs = torch.broadcast_to(g_logprobs / num_tokens, (num_tokens,)) + elif reduction == "sum": + _g_logprobs = torch.broadcast_to(g_logprobs, (num_tokens,)) + else: + _g_logprobs = g_logprobs + + # re-compute whole_logits + logits = whole_hidden.to(torch.float32) @ weight.T.to(torch.float32) + whole_logits = torch.empty( + (logits.shape[0], logits.shape[-1] * tp_world_size), + dtype=logits.dtype, + device=logits.device, + ) + whole_logits_ref = [ + whole_logits[..., i * logits.shape[-1] : (i + 1) * logits.shape[-1]] + for i in range(tp_world_size) + ] + dist.all_gather(whole_logits_ref, logits, group=tp_group) + + one_hot = torch.zeros_like(whole_logits) + one_hot.scatter_(1, labels.view(-1).unsqueeze(-1), 1) + + pd = torch.nn.functional.softmax(whole_logits, dim=-1) + d_logits = (pd - one_hot) * _g_logprobs.unsqueeze(-1) + d_logits = d_logits.to(whole_hidden.dtype) + + local_size = weight.size(0) + local_d_logits = d_logits[:, tp_rank * local_size : (tp_rank + 1) * local_size] + + d_hidden = local_d_logits @ weight + local_d_weight = local_d_logits.T @ whole_hidden + + # dist.all_reduce( + # local_d_hidden, + # op=dist.ReduceOp.SUM, + # group=tp_group + # ) + + # split the local_d_hidden along the sequence length dimension + local_num_tokens = num_tokens // tp_world_size + # local_d_hidden = local_d_hidden[tp_rank * local_num_tokens : (tp_rank + 1) * local_num_tokens, :] + + local_d_hidden = torch.empty( + (local_num_tokens, dim), dtype=weight.dtype, device=weight.device + ) + dist.reduce_scatter_tensor( + local_d_hidden, d_hidden, op=dist.ReduceOp.SUM, group=tp_group + ) + return local_d_hidden, local_d_weight, None, None, None + + @pytest.mark.parametrize("dtype", [torch.bfloat16, torch.float16]) + @pytest.mark.parametrize("reduction", ["mean", "sum", "none"]) + @pytest.mark.parametrize("problem", [(256, 129280, 8192)]) + def test_torch_sp_vs_single_gpu(self, dtype, reduction, problem): + num_tokens, vocabsize, dim = problem + vocabsize = vocabsize // self.tp_world_size + + hidden = ( + torch.empty((num_tokens, dim), dtype=dtype, device="cuda") + .uniform_(-0.1, 0.1) + .requires_grad_() + ) + weight = ( + torch.empty((vocabsize, dim), dtype=dtype, device="cuda") + .uniform_(-0.1, 0.1) + .requires_grad_() + ) + labels = torch.randint( + 0, vocabsize, (num_tokens * self.tp_world_size,), dtype=torch.long, device="cuda" + ) + + # ------------ forward pass ------------ # + dist.broadcast(labels, src=0, group=self.tp_group) + + # single GPU + whole_hidden = torch.empty( + (num_tokens * self.tp_world_size, dim), dtype=dtype, device="cuda" + ) + dist.all_gather_into_tensor(whole_hidden, hidden, group=self.tp_group) + whole_hidden = whole_hidden.clone().requires_grad_() + + whole_weight = torch.empty( + (vocabsize * self.tp_world_size, dim), dtype=dtype, device="cuda" + ) + whole_weight_view = [ + whole_weight[i * vocabsize : (i + 1) * vocabsize, :] for i in range(self.tp_world_size) + ] + dist.all_gather(whole_weight_view, weight, group=self.tp_group) + whole_weight = whole_weight.clone().requires_grad_() + logprobs_single_gpu = self.torch_linear_cross_entropy_single_gpu( + whole_hidden, whole_weight, labels, reduction=reduction + ) + + # TP + logprobs_tp = self.TorchLinearCrossEntropy.apply( + hidden, weight, labels, self.tp_group, reduction + ) + torch.testing.assert_close(logprobs_single_gpu, logprobs_tp) + + # ------------ backward pass ------------ # + g_logprobs = torch.empty_like(logprobs_single_gpu).uniform_(-0.1, 0.1) + dist.broadcast(g_logprobs, src=0, group=self.tp_group) + + # single GPU + (d_hidden_single_gpu, d_weight_single_gpu) = torch.autograd.grad( + (logprobs_single_gpu,), (whole_hidden, whole_weight), (g_logprobs,), retain_graph=False + ) + + # TP + (d_hidden_tp, d_weight_tp) = torch.autograd.grad( + (logprobs_tp,), (hidden, weight), (g_logprobs,), retain_graph=False + ) + + local_d_hidden_single_gpu = d_hidden_single_gpu[ + self.tp_rank * hidden.shape[0] : (self.tp_rank + 1) * hidden.shape[0], : + ] + torch.testing.assert_close(local_d_hidden_single_gpu, d_hidden_tp, atol=1e-3, rtol=1e-3) + local_d_weight_single_gpu = d_weight_single_gpu[ + self.tp_rank * weight.shape[0] : (self.tp_rank + 1) * weight.shape[0], : + ] + torch.testing.assert_close(local_d_weight_single_gpu, d_weight_tp, atol=1e-3, rtol=1e-3) + + self.cleanup() + + @staticmethod + def get_problems(): + return [ + (80, 125, 64), + (80, 152064, 64), + (1024, 152064, 4096), + (4096, 15206, 1024), + ((1, 4096), 15206, 1024), + ((4, 1024), 15206, 1024), + ] + + @pytest.mark.parametrize("dtype", [torch.bfloat16, torch.float16]) + @pytest.mark.parametrize("reduction", ["mean", "sum", "none"]) + @pytest.mark.parametrize("problem", get_problems()) + def test_correctness(self, dtype, reduction, problem): + num_tokens, vocabsize, dim = problem + hidden_shape = (num_tokens, dim) if isinstance(num_tokens, int) else (*num_tokens, dim) + labels_shape = ( + (num_tokens * self.tp_world_size,) + if isinstance(num_tokens, int) + else (num_tokens[0] * self.tp_world_size, *num_tokens[1:]) + ) + + hidden = ( + torch.empty(hidden_shape, dtype=dtype, device="cuda") + .uniform_(-0.1, 0.1) + .requires_grad_() + ) + weight = ( + torch.empty((vocabsize, dim), dtype=dtype, device="cuda") + .uniform_(-0.1, 0.1) + .requires_grad_() + ) + labels = torch.randint(0, vocabsize, labels_shape, dtype=torch.long, device="cuda") + + # ------ forward pass ------ # + dist.broadcast(labels, src=0, group=self.tp_group) + + torch_logprobs = self.TorchLinearCrossEntropy.apply( + hidden.view(-1, dim), weight, labels, self.tp_group, reduction + ) + + custom_logprobs = linear_cross_entropy( + hidden, + weight, + labels, + tp_group=self.tp_group, + reduction=reduction, + sequence_parallel=True, + ) + + torch.testing.assert_close(torch_logprobs, custom_logprobs) + + # ------- backward pass ------- # + g_logprobs = torch.empty_like(torch_logprobs).uniform_(-0.1, 0.1) + dist.broadcast(g_logprobs, src=0, group=self.tp_group) + + (d_hidden_torch, d_weight_torch) = torch.autograd.grad( + (torch_logprobs,), (hidden, weight), (g_logprobs,), retain_graph=False + ) + (d_hidden_custom, d_weight_custom) = torch.autograd.grad( + (custom_logprobs,), (hidden, weight), (g_logprobs,), retain_graph=False + ) + + # in case one GPU failed, and leading to hang + torch.testing.assert_close(d_hidden_torch, d_hidden_custom, atol=1e-3, rtol=1e-3) + torch.testing.assert_close(d_weight_torch, d_weight_custom, atol=1e-3, rtol=1e-3) + self.timed_barrier() + + self.cleanup() + + @pytest.mark.parametrize("problem", [((1, 1024), 129280, 7168)]) + @pytest.mark.parametrize("dtype", [torch.bfloat16]) + @pytest.mark.parametrize("reduction", ["mean"]) + def test_performance(self, problem, dtype, reduction): + num_tokens, vocabsize, dim = problem + hidden_shape = (num_tokens, dim) if isinstance(num_tokens, int) else (*num_tokens, dim) + labels_shape = ( + (num_tokens * self.tp_world_size,) + if isinstance(num_tokens, int) + else (num_tokens[0] * self.tp_world_size, *num_tokens[1:]) + ) + + start_event = torch.cuda.Event(enable_timing=True) + end_event = torch.cuda.Event(enable_timing=True) + + torch_fwd_latency = list() + torch_bwd_latency = list() + custom_fwd_latency = list() + custom_bwd_latency = list() + + iterations = 5 + for i in range(iterations): + hidden = ( + torch.empty(hidden_shape, dtype=dtype, device="cuda") + .uniform_(-0.1, 0.1) + .requires_grad_() + ) + weight = ( + torch.empty((vocabsize, dim), dtype=dtype, device="cuda") + .uniform_(-0.1, 0.1) + .requires_grad_() + ) + labels = torch.randint(0, vocabsize, labels_shape, dtype=torch.long, device="cuda") + + # ------ forward pass ------ # + dist.broadcast(labels, src=0, group=self.tp_group) + + start_event.record() + torch_logprobs = self.TorchLinearCrossEntropy.apply( + hidden.view(-1, dim), weight, labels, self.tp_group, reduction + ) + end_event.record() + torch.cuda.synchronize() + torch_fwd_latency.append(start_event.elapsed_time(end_event)) + + start_event.record() + custom_logprobs = linear_cross_entropy( + hidden, + weight, + labels, + tp_group=self.tp_group, + reduction=reduction, + sequence_parallel=True, + ) + end_event.record() + torch.cuda.synchronize() + custom_fwd_latency.append(start_event.elapsed_time(end_event)) + + # ------- backward pass ------- # + g_logprobs = torch.empty_like(torch_logprobs).uniform_(-0.1, 0.1) + dist.broadcast(g_logprobs, src=0, group=self.tp_group) + + start_event.record() + (d_hidden_torch, d_weight_torch) = torch.autograd.grad( + (torch_logprobs,), (hidden, weight), (g_logprobs,), retain_graph=False + ) + end_event.record() + torch.cuda.synchronize() + torch_bwd_latency.append(start_event.elapsed_time(end_event)) + + start_event.record() + (d_hidden_custom, d_weight_custom) = torch.autograd.grad( + (custom_logprobs,), (hidden, weight), (g_logprobs,), retain_graph=False + ) + end_event.record() + torch.cuda.synchronize() + custom_bwd_latency.append(start_event.elapsed_time(end_event)) + + # --- remove first latency due to warmup --- # + torch_fwd_latency = torch_fwd_latency[1:] + torch_bwd_latency = torch_bwd_latency[1:] + custom_fwd_latency = custom_fwd_latency[1:] + custom_bwd_latency = custom_bwd_latency[1:] + + if self.is_chief: + print() + print( + f"[INFO]: On problem {problem}, dtype {dtype}, reduction {reduction}, TP size {self.tp_world_size}, Sequence Parallel: True:" + ) + print( + f"[INFO]: Torch forward latency: {sum(torch_fwd_latency) / len(torch_fwd_latency):.2f} ms" + ) + print( + f"[INFO]: Custom forward latency: {sum(custom_fwd_latency) / len(custom_fwd_latency):.2f} ms" + ) + print( + f"[INFO]: Torch backward latency: {sum(torch_bwd_latency) / len(torch_bwd_latency):.2f} ms" + ) + print( + f"[INFO]: Custom backward latency: {sum(custom_bwd_latency) / len(custom_bwd_latency):.2f} ms" + ) + + @pytest.mark.parametrize("problem", [((1, 1024), 129280, 7168)]) + @pytest.mark.parametrize("dtype", [torch.bfloat16]) + @pytest.mark.parametrize("reduction", ["mean"]) + def test_storage(self, problem, dtype, reduction): + num_tokens, vocabsize, dim = problem + hidden_shape = (num_tokens, dim) if isinstance(num_tokens, int) else (*num_tokens, dim) + labels_shape = ( + (num_tokens * self.tp_world_size,) + if isinstance(num_tokens, int) + else (num_tokens[0] * self.tp_world_size, *num_tokens[1:]) + ) + + if self.is_chief: + print() + print( + f"[INFO]: On problem {problem}, dtype {dtype}, reduction {reduction}, TP size {self.tp_world_size}, Sequence Parallel: True:" + ) + + def torch_storage(): + hidden = ( + torch.empty(hidden_shape, dtype=dtype, device="cuda") + .uniform_(-0.1, 0.1) + .requires_grad_() + ) + weight = ( + torch.empty((vocabsize, dim), dtype=dtype, device="cuda") + .uniform_(-0.1, 0.1) + .requires_grad_() + ) + labels = torch.randint(0, vocabsize, labels_shape, dtype=torch.long, device="cuda") + + dist.broadcast(hidden, src=0, group=self.tp_group) + dist.broadcast(labels, src=0, group=self.tp_group) + + torch.cuda.reset_peak_memory_stats() + torch_logprobs = self.TorchLinearCrossEntropy.apply( + hidden.view(-1, dim), weight, labels, self.tp_group, reduction + ) + torch.cuda.synchronize() + torch_max_memory = torch.cuda.max_memory_allocated() / 1024 / 1024 + if self.is_chief: + print( + f"[INFO]: On GPU {self.tp_rank}, Torch Forward pass peak memory: {torch_max_memory:.2f} MB" + ) + + g_logprobs = torch.empty_like(torch_logprobs).uniform_(-0.1, 0.1) + dist.broadcast(g_logprobs, src=0, group=self.tp_group) + + torch.cuda.reset_peak_memory_stats() + (d_hidden_torch, d_weight_torch) = torch.autograd.grad( + (torch_logprobs,), (hidden, weight), (g_logprobs,), retain_graph=False + ) + torch.cuda.synchronize() + torch_max_memory = torch.cuda.max_memory_allocated() / 1024 / 1024 + if self.is_chief: + print( + f"[INFO]: On GPU {self.tp_rank}, Torch Backward pass peak memory: {torch_max_memory:.2f} MB" + ) + + def custom_storage(): + hidden = ( + torch.empty(hidden_shape, dtype=dtype, device="cuda") + .uniform_(-0.1, 0.1) + .requires_grad_() + ) + weight = ( + torch.empty((vocabsize, dim), dtype=dtype, device="cuda") + .uniform_(-0.1, 0.1) + .requires_grad_() + ) + labels = torch.randint(0, vocabsize, labels_shape, dtype=torch.long, device="cuda") + + dist.broadcast(hidden, src=0, group=self.tp_group) + dist.broadcast(labels, src=0, group=self.tp_group) + + torch.cuda.reset_peak_memory_stats() + custom_logprobs = linear_cross_entropy( + hidden, + weight, + labels, + tp_group=self.tp_group, + reduction=reduction, + sequence_parallel=True, + ) + torch.cuda.synchronize() + custom_max_memory = torch.cuda.max_memory_allocated() / 1024 / 1024 + if self.is_chief: + print( + f"[INFO]: On GPU {self.tp_rank}, Custom Forward pass peak memory: {custom_max_memory:.2f} MB" + ) + + g_logprobs = torch.empty_like(custom_logprobs).uniform_(-0.1, 0.1) + dist.broadcast(g_logprobs, src=0, group=self.tp_group) + + torch.cuda.reset_peak_memory_stats() + (d_hidden_custom, d_weight_custom) = torch.autograd.grad( + (custom_logprobs,), (hidden, weight), (g_logprobs,), retain_graph=False + ) + torch.cuda.synchronize() + custom_max_memory = torch.cuda.max_memory_allocated() / 1024 / 1024 + if self.is_chief: + print( + f"[INFO]: On GPU {self.tp_rank}, Custom Backward pass peak memory: {custom_max_memory:.2f} MB" + ) + + self.cleanup() + torch_storage() + self.cleanup() + custom_storage() diff --git a/tests/unit_tests/models/test_mamba_moe_model.py b/tests/unit_tests/models/test_mamba_moe_model.py index 3c7ae93a17c..2481649bc3f 100644 --- a/tests/unit_tests/models/test_mamba_moe_model.py +++ b/tests/unit_tests/models/test_mamba_moe_model.py @@ -273,6 +273,9 @@ "offload_modules": [], "hybrid_context_parallel": False, "max_seqlen_per_dp_cp_rank": None, + "fallback_to_eager_attn": False, + "linear_attention_type": None, + "moe_router_force_biased": None, } # Fields to ignore entirely (ephemeral, environment-specific, very large). SKIP_FIELDS = set() diff --git a/tests/unit_tests/ssm/test_gated_delta_net.py b/tests/unit_tests/ssm/test_gated_delta_net.py index 1ccc70a2327..e3f7b0c4f20 100644 --- a/tests/unit_tests/ssm/test_gated_delta_net.py +++ b/tests/unit_tests/ssm/test_gated_delta_net.py @@ -1,5 +1,6 @@ -# Copyright (c) 2025, NVIDIA CORPORATION. All rights reserved. +# Copyright (c) 2025 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +from functools import partial from unittest import mock import pytest @@ -30,6 +31,8 @@ init_checkpointing_mock_args, ) from tests.unit_tests.test_utilities import Utils +from tests.unit_tests.transformer.test_attention import _test_parallel_attention_correctness +from tests.unit_tests.transformer.test_multi_latent_attention import make_test_packed_seq_params try: import fla @@ -41,12 +44,7 @@ @pytest.mark.parametrize( ("tp_size", "sp", "cp_size"), - [ - (1, False, 1), - (2, False, 1), - (2, True, 1), - # GDN does not support CP for now. Leave it for future work. - ], + [(1, False, 1), (2, False, 1), (2, True, 1), (1, False, 2), (2, False, 2), (2, True, 2)], ) @pytest.mark.skipif(not HAVE_FLA, reason="FLA is not installed.") @pytest.mark.internal @@ -141,58 +139,63 @@ def test_gpu_forward(self): output.dtype == hidden_states.dtype ), f"Output dtype {output.dtype=} mismatch with {hidden_states.dtype=}" + def test_gpu_forward_thd_correctness(self): + if self.sp_size > 1: + pytest.skip("Sequence parallel is not supported for this test case.") + atol, rtol = 3e-4, 3e-4 + + # Input shape + sequence_length = 32 + micro_batch_size = 4 + cu_seqlens = [0, 32, 64, 96, 128] + # sbhd input shape: [sequence length, batch size, hidden size] + sub_sequence_length = sequence_length // self.cp_size + hidden_states_sbhd = torch.rand( + (sub_sequence_length, micro_batch_size, self.gdn.config.hidden_size) + ) + attention_mask_sbhd = None + hidden_states_sbhd = hidden_states_sbhd.cuda().bfloat16() + # thd input shape: [sequence length * batch size, 1, hidden size] + hidden_states_thd = hidden_states_sbhd.transpose(0, 1).contiguous() + hidden_states_thd = hidden_states_thd.view(-1, 1, self.gdn.config.hidden_size) + attention_mask_thd = None + packed_seq_params = make_test_packed_seq_params(cu_seqlens=cu_seqlens) + + # THD format + output_thd, _ = self.gdn( + hidden_states_thd, attention_mask_thd, packed_seq_params=packed_seq_params + ) + # SBHD format + output_sbhd, _ = self.gdn(hidden_states_sbhd, attention_mask_sbhd) + output_sbhd_T = output_sbhd.transpose(0, 1).contiguous().view(*output_thd.shape) + + rank = torch.distributed.get_rank() + assert output_thd.shape[0] == sub_sequence_length * micro_batch_size + assert output_thd.shape[1] == 1 + assert output_thd.shape[2] == self.gdn.config.hidden_size + torch.testing.assert_close( + output_sbhd_T, + output_thd, + atol=atol, + rtol=rtol, + msg=lambda msg: f"Output mismatch ({rank=}): {msg}", + ) + + +@pytest.mark.parametrize("sequence_packing", [False, True]) @pytest.mark.parametrize( ("tp", "sp", "cp"), [ (4, False, 1), # TP w/o SP (4, True, 1), # TP w/ SP - # CP does not support GDN for now. Add it once it is supported. + (1, False, 2), # CP + (2, False, 2), # TP w/o SP + CP + (2, True, 2), # TP w/ SP + CP ], ) @pytest.mark.skipif(not HAVE_FLA, reason="FLA is not installed.") -def test_parallel_gated_delta_net_correctness(tmp_path_dist_ckpt, tp, sp, cp): - # Constants - seed = 123 - sequence_length = 256 - micro_batch_size = 4 - hidden_size = 128 - - # Model initialization function - def initialize_gpt_model( - config, pre_process=True, post_process=True, vp_stage=None, pg_collection=None - ): - layer_spec = get_transformer_block_with_experimental_attention_variant_spec( - config=config, vp_stage=None, pp_rank=None - ) - gpt_model = GPTModel( - config=config, - transformer_layer_spec=layer_spec, - vocab_size=128, - max_sequence_length=sequence_length, - pre_process=pre_process, - post_process=post_process, - vp_stage=vp_stage, - pg_collection=pg_collection, - ) - return gpt_model - - # Initialize baseline parallel state - Utils.initialize_model_parallel( - tensor_model_parallel_size=1, pipeline_model_parallel_size=1, context_parallel_size=1 - ) - - # Initialize input hidden states - torch.manual_seed(seed) - model_parallel_cuda_manual_seed(seed) - input_hidden_states = ( - torch.rand((sequence_length, micro_batch_size, hidden_size)) - .cuda() - .bfloat16() - .requires_grad_(True) - ) - - # Initialize transformer config +def test_parallel_gated_delta_net_correctness(tmp_path_dist_ckpt, sequence_packing, tp, sp, cp): transformer_config = TransformerConfig( hidden_size=128, linear_conv_kernel_dim=2, @@ -212,118 +215,26 @@ def initialize_gpt_model( transformer_impl="transformer_engine", ) - with TempNamedDir(tmp_path_dist_ckpt / 'test_parallel_gdn', sync=True) as ckpt_dir: - # Set argument - mock_args = parse_args(ignore_unknown_args=True) - set_args(mock_args) - - # Initialize baseline model - init_basic_mock_args(mock_args, 1, 1, bf16=True) - mock_args.context_parallel_size = 1 - mock_args.sequence_parallel = 1 - gpt_model = unwrap_model(get_model(initialize_gpt_model, config=transformer_config)) - - # Initialize args and save checkpoint - init_checkpointing_mock_args(mock_args, ckpt_dir, False) - mock_args.no_save_optim = True - mock_args.no_save_rng = True - mock_args.no_load_optim = True - mock_args.no_load_rng = True - save_checkpoint(10, gpt_model, None, None, 0) - - # Calculate baseline output - attention = gpt_model[0].decoder.layers[0].self_attention - output_hidden_states_baseline, bias_hidden_states_baseline = attention( - input_hidden_states, attention_mask=None - ) - output_hidden_states_baseline.sum().backward() - - # Save baseline output - input_grad_baseline = input_hidden_states.grad.detach() - output_hidden_states_baseline = output_hidden_states_baseline.detach() - - # Initialize parallel model - Utils.destroy_model_parallel() - Utils.initialize_model_parallel( - tensor_model_parallel_size=tp, pipeline_model_parallel_size=1, context_parallel_size=cp - ) - torch.manual_seed(seed) - model_parallel_cuda_manual_seed(seed) - transformer_config.context_parallel_size = cp - transformer_config.tensor_model_parallel_size = tp - transformer_config.sequence_parallel = sp - init_basic_mock_args(mock_args, tp, 1, bf16=True) - mock_args.context_parallel_size = cp - mock_args.sequence_parallel = sp - pg_collection = ProcessGroupCollection.use_mpu_process_groups() - pg_collection.embd = parallel_state.get_embedding_group() - gpt_model = unwrap_model( - get_model(initialize_gpt_model, config=transformer_config, pg_collection=pg_collection) - ) - with mock.patch('megatron.training.checkpointing.check_checkpoint_args'): - with mock.patch('megatron.training.checkpointing.update_num_microbatches'): - load_checkpoint(gpt_model, None, None) - - # Function to get tensor on this tp and cp rank - cp_group = parallel_state.get_context_parallel_group() - tp_rank = parallel_state.get_tensor_model_parallel_rank() - - def get_tensor_on_this_rank(tensor): - if cp > 1: - tensor = get_tensor_on_this_cp_rank(tensor, 0, cp_group) - if tp > 1 and sp: - sp_seg = sequence_length // tp // cp - tensor = tensor[tp_rank * sp_seg : (tp_rank + 1) * sp_seg] - return tensor - - # Calculate parallel model output - input_hidden_states = get_tensor_on_this_rank(input_hidden_states) - input_hidden_states = input_hidden_states.detach().requires_grad_(True) - parallel_attention = gpt_model[0].decoder.layers[0].self_attention - output_hidden_states_parallel, bias_hidden_states_parallel = parallel_attention( - input_hidden_states, attention_mask=None - ) - output_hidden_states_parallel.sum().backward() - input_grad_parallel = input_hidden_states.grad.detach() - - # Check if the output is the same - if cp: - atol, rtol = 5e-3, 5e-3 - else: - atol, rtol = 5e-4, 5e-4 - output_hidden_states_baseline = get_tensor_on_this_rank(output_hidden_states_baseline) - input_grad_baseline = get_tensor_on_this_rank(input_grad_baseline) - - assert torch.all( - ~torch.isnan(output_hidden_states_baseline) - ), "output_hidden_states_baseline contains nan" - assert torch.all( - ~torch.isinf(output_hidden_states_baseline) - ), "output_hidden_states_baseline contains inf" - assert torch.all(~torch.isnan(input_grad_baseline)), "input_grad_baseline contains nan" - assert torch.all(~torch.isinf(input_grad_baseline)), "input_grad_baseline contains inf" - assert torch.all( - ~torch.isnan(output_hidden_states_parallel) - ), "output_hidden_states_parallel contains nan" - assert torch.all( - ~torch.isinf(output_hidden_states_parallel) - ), "output_hidden_states_parallel contains inf" - assert torch.all(~torch.isnan(input_grad_parallel)), "input_grad_parallel contains nan" - assert torch.all(~torch.isinf(input_grad_parallel)), "input_grad_parallel contains inf" - - torch.testing.assert_close( - output_hidden_states_baseline, - output_hidden_states_parallel, - atol=atol, - rtol=rtol, - msg=lambda msg: f"Mismatch in output_hidden_states: {msg}", - ) - torch.testing.assert_close( - input_grad_baseline, - input_grad_parallel, - atol=atol, - rtol=rtol, - msg=lambda msg: f"Mismatch in input_grad: {msg}", - ) + transformer_layer_spec = get_transformer_block_with_experimental_attention_variant_spec( + config=transformer_config, vp_stage=None, pp_rank=0 + ) - Utils.destroy_model_parallel() + if cp: + atol, rtol = 5e-3, 5e-3 + else: + atol, rtol = 5e-4, 5e-4 + + _test_parallel_attention_correctness( + transformer_config=transformer_config, + transformer_layer_spec=transformer_layer_spec, + tmp_path_dist_ckpt=tmp_path_dist_ckpt, + atol=atol, + rtol=rtol, + tp=tp, + sp=sp, + cp=cp, + seed=123, + sequence_length=256, + micro_batch_size=4, + sequence_packing=sequence_packing, + ) diff --git a/tests/unit_tests/tensor_parallel/test_tp_attrs_without_init.py b/tests/unit_tests/tensor_parallel/test_tp_attrs_without_init.py new file mode 100644 index 00000000000..f7a518e8e88 --- /dev/null +++ b/tests/unit_tests/tensor_parallel/test_tp_attrs_without_init.py @@ -0,0 +1,87 @@ +import pytest +import torch + +from megatron.core.tensor_parallel.layers import ( + ColumnParallelLinear, + RowParallelLinear, + VocabParallelEmbedding, +) +from megatron.core.transformer.transformer_config import TransformerConfig +from tests.unit_tests.test_utilities import Utils + + +class TestTPAttributesWithoutInitialization: + + def teardown_method(self, method): + Utils.destroy_model_parallel() + + @pytest.mark.skipif(not torch.cuda.is_available(), reason="CUDA not available") + @pytest.mark.parametrize("use_cpu_init", [True, False]) + def test_vocab_parallel_embedding_tp_attrs_no_init(self, use_cpu_init): + Utils.initialize_model_parallel(tensor_model_parallel_size=2) + cfg = TransformerConfig( + num_layers=1, + hidden_size=8, + num_attention_heads=4, + use_cpu_initialization=use_cpu_init, + perform_initialization=False, + ) + + emb = VocabParallelEmbedding( + num_embeddings=16, embedding_dim=8, init_method=cfg.init_method, config=cfg + ) + w = emb.weight + assert hasattr(w, "tensor_model_parallel") and w.tensor_model_parallel is True + assert hasattr(w, "partition_dim") and w.partition_dim == 0 + assert hasattr(w, "partition_stride") and w.partition_stride == 1 + + @pytest.mark.skipif(not torch.cuda.is_available(), reason="CUDA not available") + @pytest.mark.parametrize("use_cpu_init", [True, False]) + def test_column_parallel_linear_tp_attrs_no_init(self, use_cpu_init): + Utils.initialize_model_parallel(tensor_model_parallel_size=2) + cfg = TransformerConfig( + num_layers=1, + hidden_size=8, + num_attention_heads=4, + use_cpu_initialization=use_cpu_init, + perform_initialization=False, + ) + + layer = ColumnParallelLinear( + input_size=8, + output_size=8, + init_method=cfg.init_method, + bias=True, + config=cfg, + skip_bias_add=False, + ) + w = layer.weight + assert hasattr(w, "tensor_model_parallel") and w.tensor_model_parallel is True + assert hasattr(w, "partition_dim") and w.partition_dim == 0 + assert hasattr(w, "partition_stride") and w.partition_stride == 1 + + @pytest.mark.skipif(not torch.cuda.is_available(), reason="CUDA not available") + @pytest.mark.parametrize("use_cpu_init", [True, False]) + def test_row_parallel_linear_tp_attrs_no_init(self, use_cpu_init): + Utils.initialize_model_parallel(tensor_model_parallel_size=2) + cfg = TransformerConfig( + num_layers=1, + hidden_size=8, + num_attention_heads=4, + use_cpu_initialization=use_cpu_init, + perform_initialization=False, + ) + + layer = RowParallelLinear( + input_size=8, + output_size=8, + init_method=cfg.init_method, + bias=True, + input_is_parallel=True, + config=cfg, + skip_bias_add=False, + ) + w = layer.weight + assert hasattr(w, "tensor_model_parallel") and w.tensor_model_parallel is True + assert hasattr(w, "partition_dim") and w.partition_dim == 1 + assert hasattr(w, "partition_stride") and w.partition_stride == 1 diff --git a/tests/unit_tests/test_inference.py b/tests/unit_tests/test_inference.py index 518aa7f4126..5b2bc07e6e1 100644 --- a/tests/unit_tests/test_inference.py +++ b/tests/unit_tests/test_inference.py @@ -1,3 +1,5 @@ +# Copyright (c) 2025 NVIDIA CORPORATION & AFFILIATES. All rights reserved. + import argparse import unittest.mock diff --git a/tests/unit_tests/test_optimizer_state_offloading.py b/tests/unit_tests/test_optimizer_state_offloading.py new file mode 100644 index 00000000000..baaab355182 --- /dev/null +++ b/tests/unit_tests/test_optimizer_state_offloading.py @@ -0,0 +1,337 @@ +# Copyright (c) 2025, NVIDIA CORPORATION. All rights reserved. + +"""Unit tests for OptimizerStateOffloader.""" + +import pytest +import torch +import torch.nn as nn + +from megatron.core.distributed import DistributedDataParallel, DistributedDataParallelConfig +from megatron.core.optimizer import OptimizerConfig, get_megatron_optimizer +from megatron.core.transformer import TransformerConfig +from tests.unit_tests.test_utilities import Utils + +try: + from transformer_engine.pytorch.optimizers import FusedAdam # noqa: F401 + + TE_FUSED_ADAM_AVAILABLE = True +except ImportError: + TE_FUSED_ADAM_AVAILABLE = False + + +class SimpleModel(nn.Module): + """Simple model for testing.""" + + def __init__(self, hidden_size=256): + super().__init__() + self.fc1 = nn.Linear(hidden_size, hidden_size) + self.fc2 = nn.Linear(hidden_size, hidden_size) + + def forward(self, x): + return self.fc2(torch.relu(self.fc1(x))) + + +def create_model_and_optimizer(hidden_size=256, offload_optimizer_states=True, **optimizer_kwargs): + """Helper to create model and optimizer for tests.""" + model = SimpleModel(hidden_size=hidden_size).bfloat16().cuda() + ddp_config = DistributedDataParallelConfig(use_distributed_optimizer=True) + model = DistributedDataParallel( + TransformerConfig(num_attention_heads=1, num_layers=1), ddp_config, model + ) + + default_config = dict( + optimizer='adam', + bf16=True, + lr=0.001, + use_distributed_optimizer=True, + offload_optimizer_states=offload_optimizer_states, + ) + default_config.update(optimizer_kwargs) + + optimizer_config = OptimizerConfig(**default_config) + optim = get_megatron_optimizer(optimizer_config, [model]) + return model, optim + + +def run_forward_backward_step(model, optim, hidden_size=256): + """Run a single forward-backward-step cycle.""" + input_tensor = torch.randn(8, hidden_size, dtype=torch.bfloat16, device='cuda') + output = model(input_tensor) + output.sum().backward() + optim.step() + optim.zero_grad() + + +# ============================================================================= +# Test 1: Basic OptimizerStateOffloader Initialization +# ============================================================================= +@pytest.mark.skipif(not TE_FUSED_ADAM_AVAILABLE, reason="Requires TE FusedAdam") +def test_offloader_initialization(): + """Test that OptimizerStateOffloader initializes correctly.""" + Utils.initialize_model_parallel() + model, optim = create_model_and_optimizer() + dist_optim = optim.chained_optimizers[0] + + # Offloader is created in __init__ when offload_optimizer_states=True + assert dist_optim._state_offloader is not None + offloader = dist_optim._state_offloader + + # Verify offloader properties + assert offloader.adam_optimizer is not None + assert offloader._d2h_stream is not None + assert offloader._h2d_stream is not None + assert offloader._offloaded is False + + # Before first step, optimizer states are not initialized yet + assert offloader._optimizer_states_initialized is False + + # Run one step to initialize optimizer states + run_forward_backward_step(model, optim) + + # After first step, optimizer states should be marked as initialized + assert offloader._optimizer_states_initialized is True + Utils.destroy_model_parallel() + + +# ============================================================================= +# Test 2: Early Master Weight Offloading Before First Step +# ============================================================================= +@pytest.mark.skipif(not TE_FUSED_ADAM_AVAILABLE, reason="Requires TE FusedAdam") +def test_early_master_weight_offloading(): + """Test that master weights can be offloaded before the first optimizer step.""" + Utils.initialize_model_parallel() + model, optim = create_model_and_optimizer() + dist_optim = optim.chained_optimizers[0] + + # Offloader is created in __init__ + assert dist_optim._state_offloader is not None + offloader = dist_optim._state_offloader + + # Before first step, optimizer states are not initialized + assert offloader._optimizer_states_initialized is False + + # Capture original master weights before offload + original_master_weights = [] + for group in dist_optim.shard_fp32_from_float16_groups: + group_weights = [tensor.clone() for tensor in group] + original_master_weights.append(group_weights) + + # Offload before first step - should only offload master weights + offloader.offload() + offloader.release_gpu_memory() + torch.cuda.synchronize() + + # Verify master weights were offloaded (storage resized to 0) + for group in dist_optim.shard_fp32_from_float16_groups: + for tensor in group: + assert tensor.untyped_storage().size() == 0, "Master weight should be offloaded" + + # Reload master weights + offloader.reload() + offloader.sync_before_step() + + # Verify master weights match after reload + for group_idx, group in enumerate(dist_optim.shard_fp32_from_float16_groups): + for param_idx, tensor in enumerate(group): + original = original_master_weights[group_idx][param_idx] + torch.testing.assert_close( + tensor, + original, + msg=f"Master weight [{group_idx}][{param_idx}] mismatch after offload/reload", + ) + + # Now run a step and verify optimizer states can be offloaded after + run_forward_backward_step(model, optim) + assert offloader._optimizer_states_initialized is True + + Utils.destroy_model_parallel() + + +# ============================================================================= +# Test 3: Offload and Reload Correctness +# ============================================================================= +@pytest.mark.skipif(not TE_FUSED_ADAM_AVAILABLE, reason="Requires TE FusedAdam") +@pytest.mark.parametrize("offload_optimizer_states", [True, False]) +@pytest.mark.parametrize("offload_master_weights", [True, False]) +def test_offload_reload_correctness(offload_optimizer_states, offload_master_weights): + """Test that offload/reload preserves optimizer state values.""" + if not offload_optimizer_states and not offload_master_weights: + pytest.skip("At least one offload type required") + + Utils.initialize_model_parallel() + model, optim = create_model_and_optimizer() + dist_optim = optim.chained_optimizers[0] + + # Run steps to build up optimizer state + for _ in range(3): + run_forward_backward_step(model, optim) + + offloader = dist_optim._state_offloader + + # Capture original states before offload + original_states = {} + for param, state in offloader.adam_optimizer.state.items(): + original_states[param] = { + k: v.clone() for k, v in state.items() if isinstance(v, torch.Tensor) + } + + # Offload + offloader.offload( + offload_optimizer_states=offload_optimizer_states, + offload_master_weights=offload_master_weights, + ) + + # Release GPU memory + offloader.release_gpu_memory() + torch.cuda.synchronize() + + # Reload + offloader.reload() + offloader.sync_before_step() + + # Verify states match after reload + for param, state in offloader.adam_optimizer.state.items(): + if param in original_states: + for key, original_tensor in original_states[param].items(): + if key in state and isinstance(state[key], torch.Tensor): + reloaded_tensor = state[key] + assert reloaded_tensor.device.type == 'cuda', f"State {key} should be on GPU" + torch.testing.assert_close( + reloaded_tensor, + original_tensor, + msg=f"State {key} mismatch after offload/reload", + ) + Utils.destroy_model_parallel() + + +# ============================================================================= +# Test 4: GPU Memory Release Verification +# ============================================================================= +@pytest.mark.skipif(not TE_FUSED_ADAM_AVAILABLE, reason="Requires TE FusedAdam") +def test_gpu_memory_release(): + """Test that GPU memory is actually freed after release_gpu_memory().""" + Utils.initialize_model_parallel() + # Use larger model for measurable memory impact + model, optim = create_model_and_optimizer(hidden_size=1024) + dist_optim = optim.chained_optimizers[0] + + # Initialize optimizer states + run_forward_backward_step(model, optim, hidden_size=1024) + + offloader = dist_optim._state_offloader + + # Measure memory before offload + torch.cuda.synchronize() + torch.cuda.empty_cache() + memory_before = torch.cuda.memory_allocated() + + # Offload and release + offloader.offload() + offloader.release_gpu_memory() + + # Wait for async operations + torch.cuda.synchronize() + torch.cuda.empty_cache() + memory_after = torch.cuda.memory_allocated() + + # Memory should decrease + memory_freed = memory_before - memory_after + assert memory_freed > 0, f"Expected memory to be freed, but got {memory_freed} bytes difference" + Utils.destroy_model_parallel() + + +# ============================================================================= +# Test 5: Multiple Offload/Reload Cycles +# ============================================================================= +@pytest.mark.skipif(not TE_FUSED_ADAM_AVAILABLE, reason="Requires TE FusedAdam") +def test_multiple_offload_reload_cycles(): + """Test that multiple offload/reload cycles work correctly.""" + Utils.initialize_model_parallel() + model, optim = create_model_and_optimizer() + dist_optim = optim.chained_optimizers[0] + + # Initialize + run_forward_backward_step(model, optim) + + offloader = dist_optim._state_offloader + + # Run multiple cycles + for cycle in range(5): + # Offload + offloader.offload() + offloader.release_gpu_memory() + + # Reload + offloader.reload() + offloader.sync_before_step() + + # Run optimizer step + run_forward_backward_step(model, optim) + + # Verify model can still produce valid outputs + input_tensor = torch.randn(8, 256, dtype=torch.bfloat16, device='cuda') + output = model(input_tensor) + assert not output.isnan().any(), "Model output contains NaN after multiple cycles" + Utils.destroy_model_parallel() + + +# ============================================================================= +# Test 6: Training Correctness with Offloading +# ============================================================================= +@pytest.mark.skipif(not TE_FUSED_ADAM_AVAILABLE, reason="Requires TE FusedAdam") +def test_training_correctness_with_offloading(): + """Test that training with offloading produces same results as without.""" + Utils.initialize_model_parallel() + torch.manual_seed(42) + + # Model 1: with offloading + model1, optim1 = create_model_and_optimizer(offload_optimizer_states=True, lr=0.01) + + # Model 2: without offloading (reference) + torch.manual_seed(42) + model2, optim2 = create_model_and_optimizer(offload_optimizer_states=False, lr=0.01) + + # Train both models + n_steps = 10 + torch.manual_seed(123) + dist_optim1 = optim1.chained_optimizers[0] + + # Offloader is created in __init__ when offload_optimizer_states=True + assert dist_optim1._state_offloader is not None + offloader = dist_optim1._state_offloader + + for step in range(n_steps): + input_tensor = torch.randn(8, 256, dtype=torch.bfloat16, device='cuda') + + # Model 1 with offloading + # Offload states (master weights can be offloaded from the start, + # optimizer states will be skipped until after first step) + offloader.offload() + offloader.release_gpu_memory() + + output1 = model1(input_tensor) + loss1 = output1.sum() + loss1.backward() + + offloader.reload() + offloader.sync_before_step() + optim1.step() + optim1.zero_grad() + + # Model 2 without offloading + output2 = model2(input_tensor) + loss2 = output2.sum() + loss2.backward() + optim2.step() + optim2.zero_grad() + + # Compare final model weights + for (n1, p1), (n2, p2) in zip(model1.named_parameters(), model2.named_parameters()): + torch.testing.assert_close( + p1.data, + p2.data, + atol=1e-5, + rtol=1e-4, + msg=f"Parameter {n1} mismatch between offloaded and non-offloaded training", + ) + Utils.destroy_model_parallel() diff --git a/tests/unit_tests/transformer/experimental_attention_variant/test_absorbed_mla.py b/tests/unit_tests/transformer/experimental_attention_variant/test_absorbed_mla.py new file mode 100644 index 00000000000..4ed9ff8af46 --- /dev/null +++ b/tests/unit_tests/transformer/experimental_attention_variant/test_absorbed_mla.py @@ -0,0 +1,421 @@ +# Copyright (c) 2025 NVIDIA CORPORATION & AFFILIATES. All rights reserved. + +import random +from types import SimpleNamespace +from typing import List, Optional, Tuple + +import pytest +import torch +import torch.distributed as dist + +from megatron.core import parallel_state +from megatron.core.extensions.transformer_engine_spec_provider import TESpecProvider +from megatron.core.packed_seq_params import PackedSeqParams +from megatron.core.tensor_parallel.random import model_parallel_cuda_manual_seed +from megatron.core.transformer.enums import AttnMaskType +from megatron.core.transformer.experimental_attention_variant.absorbed_mla import ( + AbsorbedMLASelfAttention, + AbsorbedMLASelfAttentionSubmodules, +) +from megatron.core.transformer.identity_op import IdentityOp +from megatron.core.transformer.multi_latent_attention import ( + MLASelfAttention, + MLASelfAttentionSubmodules, +) +from megatron.core.utils import init_method_normal, scaled_init_method_normal +from tests.unit_tests.test_utilities import Utils + + +class MockCoreAttention(torch.nn.Module): + """Mock core attention for testing MLA computation flow.""" + + def __init__(self, *args, **kwargs): + super().__init__() + self.softmax_scale = kwargs.get("softmax_scale") + self.k_channels = kwargs.get("k_channels") + self.v_channels = kwargs.get("v_channels") + self.pg_collection = kwargs.get("pg_collection") + + def forward( + self, q, k, v, *args, packed_seq_params: Optional[PackedSeqParams] = None, **kwargs + ): + """Mock forward pass.""" + if packed_seq_params is None: + return self._forward_standard(q, k, v) + else: + return self._forward_thd(q, k, v, packed_seq_params) + + def _forward_standard(self, q, k, v): + """Standard forward for [s, b, n, d] format.""" + sq, b, n = q.shape[:3] + dtype = q.dtype + if v is None: + # Absorbed MLA + assert q.shape[-1] == self.k_channels + assert k.shape == (sq, b, 1, self.k_channels) + v = k[..., : self.v_channels] + k = k.expand(-1, -1, n, -1) + v = v.expand(-1, -1, n, -1) + else: + # Standard MLA + assert k.shape == q.shape + assert v.shape[:-1] == q.shape[:-1] + + q = q.permute(1, 2, 0, 3).contiguous() + k = k.permute(1, 2, 3, 0).contiguous() + v = v.permute(1, 2, 0, 3).contiguous() + + q = q.view(b * n, q.size(-2), q.size(-1)).float() + k = k.view(b * n, k.size(-2), k.size(-1)).float() + v = v.view(b * n, v.size(-2), v.size(-1)).float() + + score = torch.bmm(q, k) * self.softmax_scale + score = torch.nn.functional.softmax(score, dim=-1, dtype=torch.float32) + out = torch.bmm(score, v) + out = out.to(dtype) + out = out.permute(1, 0, 2) + out = out.reshape(sq, b, -1) + + return out + + def _forward_thd(self, q, k, v, packed_seq_params): + """Forward for THD packed sequence format.""" + cu_seqlens = packed_seq_params.cu_seqlens_q + num_seqs = len(cu_seqlens) - 1 + + sq, n = q.shape[:2] + dtype = q.dtype + if v is None: + # Absorbed MLA + assert q.shape[-1] == self.k_channels + assert k.shape == (sq, 1, self.k_channels) + v = k[..., : self.v_channels] + k = k.expand(-1, n, -1) + v = v.expand(-1, n, -1) + else: + # Standard MLA + assert k.shape == q.shape + assert v.shape[:-1] == q.shape[:-1] + + out_list = [] + for i in range(num_seqs): + start = cu_seqlens[i] // self.pg_collection.cp.size() + end = cu_seqlens[i + 1] // self.pg_collection.cp.size() + q_seq = q[start:end] + k_seq = k[start:end] + v_seq = v[start:end] + + q_seq = q_seq.permute(1, 0, 2).contiguous().float() + k_seq = k_seq.permute(1, 2, 0).contiguous().float() + v_seq = v_seq.permute(1, 0, 2).contiguous().float() + + score = torch.bmm(q_seq, k_seq) * self.softmax_scale + score = torch.nn.functional.softmax(score, dim=-1, dtype=torch.float32) + out = torch.bmm(score, v_seq) + out = out.to(dtype) + out = out.permute(1, 0, 2).contiguous() + out = out.reshape(out.shape[0], -1) + out_list.append(out) + + return torch.cat(out_list, dim=0) + + +def get_mock_mla_config( + tensor_model_parallel_size: int, + context_parallel_size: int, + sequence_parallel: bool, + recompute_mla_up_proj: bool, +) -> SimpleNamespace: + """Create test config with all attributes used in MLA.""" + return SimpleNamespace( + multi_latent_attention=True, + hidden_size=7168, + num_attention_heads=128, + q_lora_rank=1536, + kv_lora_rank=512, + qk_head_dim=128, + qk_pos_emb_head_dim=64, + v_head_dim=128, + add_bias_linear=False, + bf16=True, + params_dtype=torch.bfloat16, + layernorm_epsilon=1e-5, + normalization="RMSNorm", + layernorm_zero_centered_gamma=False, + expert_model_parallel_size=1, + tensor_model_parallel_size=tensor_model_parallel_size, + sequence_parallel=tensor_model_parallel_size > 1 and sequence_parallel, + context_parallel_size=context_parallel_size, + apply_rope_fusion=False, + rope_type="yarn", + rotary_scaling_factor=40, + mscale=1.0, + mscale_all_dim=1.0, + rotary_base=10000, + original_max_position_embeddings=4096, + beta_fast=32, + beta_slow=1, + rotary_interleaved=False, + recompute_granularity="selective" if recompute_mla_up_proj else None, + recompute_modules=["mla_up_proj"] if recompute_mla_up_proj else [], + fine_grained_activation_offloading=False, + gradient_accumulation_fusion=False, + fp8=False, + fp4=False, + init_method=init_method_normal(0.02), + output_layer_init_method=scaled_init_method_normal(0.02, 61, multiplier=2.0), + kv_channels=56, + num_query_groups=128, + batch_invariant_mode=False, + cache_mla_latents=False, + use_cpu_initialization=False, + perform_initialization=True, + symmetric_ar_type=None, + disable_parameter_transpose_cache=False, + init_model_with_meta_device=False, + delay_wgrad_compute=False, + tp_comm_overlap=False, + experimental_attention_variant=None, + softmax_scale=None, + ) + + +def get_absorbed_mla_submodules( + down_proj_use_column_parallel: bool, qk_layernorm: bool, rms_norm: bool +) -> AbsorbedMLASelfAttentionSubmodules: + """Get submodules for AbsorbedMLASelfAttention testing.""" + backend = TESpecProvider() + linear_q_down_proj = ( + backend.column_parallel_linear() if down_proj_use_column_parallel else backend.linear() + ) + linear_kv_down_proj = ( + backend.column_parallel_linear() if down_proj_use_column_parallel else backend.linear() + ) + qk_norm = backend.layer_norm(rms_norm=rms_norm, for_qk=True) if qk_layernorm else IdentityOp + return AbsorbedMLASelfAttentionSubmodules( + linear_q_proj=backend.column_parallel_linear(), + linear_q_down_proj=linear_q_down_proj, + linear_q_up_proj=backend.column_parallel_linear(), + linear_kv_down_proj=linear_kv_down_proj, + linear_k_up_proj=backend.column_parallel_linear(), + linear_v_up_proj=backend.column_parallel_linear(), + core_attention=MockCoreAttention, + linear_proj=backend.row_parallel_linear(), + q_layernorm=qk_norm, + kv_layernorm=qk_norm, + ) + + +def get_mla_submodules( + down_proj_use_column_parallel: bool, qk_layernorm: bool, rms_norm: bool +) -> MLASelfAttentionSubmodules: + """Get submodules for AbsorbedMLASelfAttention testing.""" + backend = TESpecProvider() + linear_q_down_proj = ( + backend.column_parallel_linear() if down_proj_use_column_parallel else backend.linear() + ) + linear_kv_down_proj = ( + backend.column_parallel_linear() if down_proj_use_column_parallel else backend.linear() + ) + qk_norm = backend.layer_norm(rms_norm=rms_norm, for_qk=True) if qk_layernorm else IdentityOp + return MLASelfAttentionSubmodules( + linear_q_proj=backend.column_parallel_linear(), + linear_q_down_proj=linear_q_down_proj, + linear_q_up_proj=backend.column_parallel_linear(), + linear_kv_down_proj=linear_kv_down_proj, + linear_kv_up_proj=backend.column_parallel_linear(), + core_attention=MockCoreAttention, + linear_proj=backend.row_parallel_linear(), + q_layernorm=qk_norm, + kv_layernorm=qk_norm, + ) + + +# TODO: Consider using get_gpt_layer_with_transformer_engine_spec from +# megatron.core.models.gpt.gpt_layer_specs to simplify submodule setup and cover real specs. +# TODO: Add test case to cover TP > 1 but SP = False. + + +@pytest.mark.parametrize("tp_cp_sp", [[1, 1, False], [2, 1, True], [1, 2, False], [2, 2, True]]) +@pytest.mark.parametrize("qkv_format", ['sbhd', 'thd']) +@pytest.mark.parametrize("down_proj_use_column_parallel", [False, True]) +@pytest.mark.parametrize("recompute_mla_up_proj", [False, True]) +def test_functionality( + tp_cp_sp: List, + qkv_format: str, + down_proj_use_column_parallel: bool, + recompute_mla_up_proj: bool, +): + """Test that AbsorbedMLASelfAttention is equivalent to standard MLA.""" + tp_size, cp_size, sp = tp_cp_sp + Utils.initialize_model_parallel( + tensor_model_parallel_size=tp_size, context_parallel_size=cp_size + ) + model_parallel_cuda_manual_seed(123) + + # Create model + config = get_mock_mla_config( + tensor_model_parallel_size=tp_size, + context_parallel_size=cp_size, + sequence_parallel=sp, + recompute_mla_up_proj=recompute_mla_up_proj, + ) + absorbed_submodules = get_absorbed_mla_submodules( + down_proj_use_column_parallel=down_proj_use_column_parallel, + qk_layernorm=True, + rms_norm=True, + ) + standard_submodules = get_mla_submodules( + down_proj_use_column_parallel=down_proj_use_column_parallel, + qk_layernorm=True, + rms_norm=True, + ) + absorbed_mla = AbsorbedMLASelfAttention( + config=config, + submodules=absorbed_submodules, + layer_number=0, + attn_mask_type=AttnMaskType.causal, + cp_comm_type="all_gather" if cp_size > 1 else None, + pg_collection=None, + ).cuda() + standard_mla = MLASelfAttention( + config=config, + submodules=standard_submodules, + layer_number=0, + attn_mask_type=AttnMaskType.causal, + cp_comm_type="all_gather" if cp_size > 1 else None, + pg_collection=None, + ).cuda() + + state_dict = standard_mla.state_dict() + absorbed_mla.load_state_dict(state_dict) + + # Prepare random data + if qkv_format == 'thd': + # Create random seqlens + num_seqs, min_len, max_len = 3, 128, 1024 + divisor = tp_size * cp_size * 2 + random.seed(42) + seqlens = [random.randint(min_len, max_len) // divisor * divisor for _ in range(num_seqs)] + # Create cumulative sequence lengths + cu_seqlens = [0] + for length in seqlens: + cu_seqlens.append(cu_seqlens[-1] + length) + total_tokens = cu_seqlens[-1] + cu_seqlens = torch.IntTensor(cu_seqlens).cuda() + max_seqlen = max(seqlens) + # Create packed sequence parameters + packed_seq_params = PackedSeqParams( + cu_seqlens_q=cu_seqlens, + cu_seqlens_q_padded=cu_seqlens, + cu_seqlens_kv=cu_seqlens, + cu_seqlens_kv_padded=cu_seqlens, + max_seqlen_q=max_seqlen, + max_seqlen_kv=max_seqlen, + qkv_format='thd', + ) + hidden_states = torch.randn( + (total_tokens // cp_size // (tp_size if sp else 1), 1, config.hidden_size), + dtype=torch.bfloat16, + device='cuda', + ) + grads = torch.randn_like(hidden_states) + else: + # When SP is enabled, sequence is sharded across TP ranks + # When SP is disabled, each TP rank has the full sequence + seqlen = 1024 // cp_size // (tp_size if sp else 1) + hidden_states = torch.randn((seqlen, 3, 7168), dtype=torch.bfloat16, device='cuda') + grads = torch.randn_like(hidden_states) + packed_seq_params = None + + # Forward & Backward + for name, param in absorbed_mla.named_parameters(): + if param.grad is not None: + param.grad.zero_() + absorbed_outputs, _ = absorbed_mla( + hidden_states, attention_mask=None, packed_seq_params=packed_seq_params + ) + absorbed_outputs.backward(grads) + + for name, param in standard_mla.named_parameters(): + if param.grad is not None: + param.grad.zero_() + standard_outputs, _ = standard_mla( + hidden_states, attention_mask=None, packed_seq_params=packed_seq_params + ) + standard_outputs.backward(grads) + + def _calculate_tensor_similarity(x, y): + x, y = x.data.double(), y.data.double() + denominator = (x * x + y * y).sum() + if denominator == 0: + return 1 + sim = 2 * (x * y).sum() / denominator + return sim + + # Compute cosine similarity + absorbed_flat = absorbed_outputs.flatten().float() + standard_flat = standard_outputs.flatten().float() + cosine_sim = torch.nn.functional.cosine_similarity( + absorbed_flat.unsqueeze(0), standard_flat.unsqueeze(0) + ).item() + assert cosine_sim > 0.9999, f"output cosine similarity = {cosine_sim} < 0.9999" + assert _calculate_tensor_similarity(absorbed_outputs, standard_outputs) > 0.9999 + torch.testing.assert_close(absorbed_outputs, standard_outputs, atol=5e-3, rtol=5e-3) + + for name, param in absorbed_mla.named_parameters(): + assert param.grad is not None + for name, param in standard_mla.named_parameters(): + assert param.grad is not None + + # Compare gradients with cosine similarity + absorbed_grads = dict(absorbed_mla.named_parameters()) + standard_grads = dict(standard_mla.named_parameters()) + + # Map parameter names between absorbed and standard MLA + # Most parameters have the same name, except for K/V up proj + for name, param in standard_grads.items(): + if 'linear_kv_up_proj' in name: + # Special handling: combine k and v up proj grads from absorbed_mla + k_name = name.replace('linear_kv_up_proj', 'linear_k_up_proj') + v_name = name.replace('linear_kv_up_proj', 'linear_v_up_proj') + + k_grad = absorbed_grads[k_name].grad + v_grad = absorbed_grads[v_name].grad + + # Combine k and v grads (interleaved by head) + # k_grad: [n * qk_head_dim, kv_lora_rank] + # v_grad: [n * v_head_dim, kv_lora_rank] + # combined: [n * (qk_head_dim + v_head_dim), kv_lora_rank] + n_heads = absorbed_mla.num_attention_heads_per_partition + qk_head_dim = absorbed_mla.config.qk_head_dim + v_head_dim = absorbed_mla.config.v_head_dim + kv_lora_rank = absorbed_mla.config.kv_lora_rank + + k_grad_3d = k_grad.view(n_heads, qk_head_dim, kv_lora_rank) + v_grad_3d = v_grad.view(n_heads, v_head_dim, kv_lora_rank) + combined_grad_3d = torch.cat([k_grad_3d, v_grad_3d], dim=1) + combined_grad = combined_grad_3d.view(-1, kv_lora_rank) + + absorbed_grad_flat = combined_grad.flatten().float() + standard_grad_flat = param.grad.flatten().float() + + cos_sim = torch.nn.functional.cosine_similarity( + absorbed_grad_flat.unsqueeze(0), standard_grad_flat.unsqueeze(0) + ).item() + assert cos_sim > 0.9999, f"name: {name}, cosine similarity = {cos_sim} < 0.9999" + assert _calculate_tensor_similarity(combined_grad, param.grad) > 0.9999 + else: + absorbed_grad = absorbed_grads[name].grad + standard_grad = param.grad + + absorbed_grad_flat = absorbed_grad.flatten().float() + standard_grad_flat = standard_grad.flatten().float() + + cos_sim = torch.nn.functional.cosine_similarity( + absorbed_grad_flat.unsqueeze(0), standard_grad_flat.unsqueeze(0) + ).item() + assert cos_sim > 0.9999, f"name: {name}, cosine similarity = {cos_sim} < 0.9999" + assert _calculate_tensor_similarity(absorbed_grad, standard_grad) > 0.9999 + + Utils.destroy_model_parallel() diff --git a/tests/unit_tests/transformer/test_attention_variant_dsa.py b/tests/unit_tests/transformer/experimental_attention_variant/test_attention_variant_dsa.py similarity index 80% rename from tests/unit_tests/transformer/test_attention_variant_dsa.py rename to tests/unit_tests/transformer/experimental_attention_variant/test_attention_variant_dsa.py index bd106aa6f0e..96253a4ca10 100644 --- a/tests/unit_tests/transformer/test_attention_variant_dsa.py +++ b/tests/unit_tests/transformer/experimental_attention_variant/test_attention_variant_dsa.py @@ -17,7 +17,10 @@ DSAIndexerSubmodules, DSAttention, DSAttentionSubmodules, + FusedDSAIndexerLoss, + _compute_index_scores, compute_dsa_indexer_loss, + fused_qk_topk_naive, rotate_activation, ) from megatron.core.transformer.transformer_config import MLATransformerConfig @@ -265,6 +268,320 @@ def test_backward_pass(self): ), f"Gradient should be scaled by loss scale, expected {expected_grad_per_element}, got {dummy_input.grad[0].item()}" +@pytest.mark.parametrize("seqlen_and_topk", [[16, 8], [32, 16], [64, 32]]) +@pytest.mark.parametrize("sparse_loss", [False, True]) +class TestFusedDSAIndexerLossGradient: + """Test that FusedDSAIndexerLoss manual backward matches autograd backward.""" + + @pytest.fixture(scope='function', autouse=True) + def setup_method(self): + Utils.initialize_model_parallel( + tensor_model_parallel_size=1, pipeline_model_parallel_size=1 + ) + self.pg_collection = ProcessGroupCollection.use_mpu_process_groups(required_pgs=['tp']) + yield + Utils.destroy_model_parallel() + + @pytest.mark.skipif(not torch.cuda.is_available(), reason="CUDA not available") + def test_fused_indexer_loss_gradient_matches_autograd(self, seqlen_and_topk, sparse_loss): + """ + Test that the manually written backward in FusedDSAIndexerLoss produces + the same gradients as PyTorch autograd on the unfused implementation. + """ + seqlen = seqlen_and_topk[0] + index_topk = seqlen_and_topk[1] + batch_size = 2 + num_heads = 4 + head_dim = 64 + index_n_heads = 8 + index_head_dim = 64 + softmax_scale = head_dim**-0.5 + loss_coeff = 1.0 + + torch.manual_seed(42) + + # Create inputs for indexer + # q: [seqlen, batch, index_n_heads, index_head_dim] + q_ref = ( + torch.randn(seqlen, batch_size, index_n_heads, index_head_dim, dtype=torch.float32) + .cuda() + .requires_grad_(True) + ) + # weights: [seqlen, batch, index_n_heads] + weights_ref = ( + torch.randn(seqlen, batch_size, index_n_heads, dtype=torch.float32) + .cuda() + .requires_grad_(True) + ) + # k: [seqlen, batch, index_head_dim] + k_ref = ( + torch.randn(seqlen, batch_size, index_head_dim, dtype=torch.float32) + .cuda() + .requires_grad_(True) + ) + # query: [seqlen, batch, num_heads, head_dim] - detached, not trained + query = torch.randn(seqlen, batch_size, num_heads, head_dim, dtype=torch.bfloat16).cuda() + # key: [seqlen, batch, num_heads, head_dim] - detached, not trained + key = torch.randn(seqlen, batch_size, num_heads, head_dim, dtype=torch.bfloat16).cuda() + + # Create causal mask + mask = torch.triu( + torch.full((seqlen, seqlen), float('-inf'), dtype=torch.float32).cuda(), diagonal=1 + ) + + # ============================================= + # Method 1: Autograd (reference) + # ============================================= + # Compute index scores and apply mask (matches fused_qk_topk_naive behavior) + index_scores_ref = _compute_index_scores(q_ref, weights_ref, k_ref) + # Apply mask + index_scores_masked = index_scores_ref + mask.unsqueeze(0) + # Get topk indices from masked scores + topk_k = min(index_topk, seqlen) + topk_indices = index_scores_masked.topk(topk_k, dim=-1)[1] + + # Compute loss using autograd + loss_ref = compute_dsa_indexer_loss( + index_scores=index_scores_masked, + topk_indices=topk_indices, + query=query, + key=key, + softmax_scale=softmax_scale, + loss_coeff=loss_coeff, + sparse_loss=sparse_loss, + pg_collection=self.pg_collection, + ) + + # Backward with autograd + loss_ref.backward() + + # Save reference gradients + grad_q_ref = q_ref.grad.clone() + grad_weights_ref = weights_ref.grad.clone() + grad_k_ref = k_ref.grad.clone() + + # ============================================= + # Method 2: FusedDSAIndexerLoss (manual backward) + # ============================================= + # Clone tensors from ref (detach and require grad again) + q_fused = q_ref.detach().clone().requires_grad_(True) + weights_fused = weights_ref.detach().clone().requires_grad_(True) + k_fused = k_ref.detach().clone().requires_grad_(True) + + # Use FusedDSAIndexerLoss + topk_indices_fused, loss_fused = FusedDSAIndexerLoss.apply( + q_fused, + weights_fused, + k_fused, + query.detach(), + key.detach(), + softmax_scale, + index_topk, + loss_coeff, + mask, + sparse_loss, + self.pg_collection, + ) + + # Backward with manual implementation + loss_fused.backward() + + # Get fused gradients + grad_q_fused = q_fused.grad + grad_weights_fused = weights_fused.grad + grad_k_fused = k_fused.grad + + # ============================================= + # Compare gradients + # ============================================= + # Check loss values match + assert torch.allclose( + loss_fused, loss_ref, rtol=1e-5, atol=1e-5 + ), f"Loss mismatch: fused={loss_fused.item()}, ref={loss_ref.item()}" + + # Check topk indices match + assert torch.equal( + topk_indices_fused, topk_indices + ), "Top-k indices mismatch between fused and reference" + + # Check gradients match + assert torch.allclose( + grad_q_fused, grad_q_ref, rtol=1e-5, atol=1e-5 + ), f"grad_q mismatch: max diff = {(grad_q_fused - grad_q_ref).abs().max().item()}" + + assert torch.allclose( + grad_weights_fused, grad_weights_ref, rtol=1e-5, atol=1e-5 + ), f"grad_weights mismatch: max diff = {(grad_weights_fused - grad_weights_ref).abs().max().item()}" + + assert torch.allclose( + grad_k_fused, grad_k_ref, rtol=1e-5, atol=1e-5 + ), f"grad_k mismatch: max diff = {(grad_k_fused - grad_k_ref).abs().max().item()}" + + +@pytest.mark.parametrize("tensor_model_parallel_size", [2, 4]) +@pytest.mark.parametrize("sparse_loss", [False, True]) +class TestFusedDSAIndexerLossGradientTP: + """Test FusedDSAIndexerLoss gradient consistency across different TP sizes.""" + + @pytest.mark.skipif(not torch.cuda.is_available(), reason="CUDA not available") + def test_fused_indexer_loss_gradient_tp_consistency( + self, tensor_model_parallel_size, sparse_loss + ): + """ + Test that FusedDSAIndexerLoss produces consistent gradients across TP ranks + and matches TP=1 baseline. + """ + seqlen = 64 + index_topk = 32 + batch_size = 2 + num_heads = 8 + head_dim = 64 + index_n_heads = 8 + index_head_dim = 64 + softmax_scale = head_dim**-0.5 + loss_coeff = 1.0 + + # ============================================= + # First run with TP=1 to get baseline + # ============================================= + Utils.initialize_model_parallel( + tensor_model_parallel_size=1, pipeline_model_parallel_size=1 + ) + torch.manual_seed(42) + model_parallel_cuda_manual_seed(42) + + pg_collection_tp1 = ProcessGroupCollection.use_mpu_process_groups(required_pgs=['tp']) + + # Create inputs + q_input = torch.randn( + seqlen, batch_size, index_n_heads, index_head_dim, dtype=torch.float32 + ).cuda() + weights_input = torch.randn(seqlen, batch_size, index_n_heads, dtype=torch.float32).cuda() + k_input = torch.randn(seqlen, batch_size, index_head_dim, dtype=torch.float32).cuda() + query_input = torch.randn( + seqlen, batch_size, num_heads, head_dim, dtype=torch.bfloat16 + ).cuda() + key_input = torch.randn( + seqlen, batch_size, num_heads, head_dim, dtype=torch.bfloat16 + ).cuda() + mask = torch.triu( + torch.full((seqlen, seqlen), float('-inf'), dtype=torch.float32).cuda(), diagonal=1 + ) + + # Clone for TP=1 + q_tp1 = q_input.clone().requires_grad_(True) + weights_tp1 = weights_input.clone().requires_grad_(True) + k_tp1 = k_input.clone().requires_grad_(True) + + # Forward and backward with TP=1 + topk_indices_tp1, loss_tp1 = FusedDSAIndexerLoss.apply( + q_tp1, + weights_tp1, + k_tp1, + query_input.detach(), + key_input.detach(), + softmax_scale, + index_topk, + loss_coeff, + mask, + sparse_loss, + pg_collection_tp1, + ) + loss_tp1.backward() + + # Save TP=1 results + grad_q_tp1 = q_tp1.grad.clone() + grad_weights_tp1 = weights_tp1.grad.clone() + grad_k_tp1 = k_tp1.grad.clone() + loss_tp1_value = loss_tp1.detach().clone() + + Utils.destroy_model_parallel() + + # ============================================= + # Run with target TP size + # ============================================= + Utils.initialize_model_parallel( + tensor_model_parallel_size=tensor_model_parallel_size, pipeline_model_parallel_size=1 + ) + torch.manual_seed(42) + model_parallel_cuda_manual_seed(42) + + pg_collection_tpn = ProcessGroupCollection.use_mpu_process_groups(required_pgs=['tp']) + tp_rank = parallel_state.get_tensor_model_parallel_rank() + + # Clone inputs for TP=N (same values as TP=1) + q_tpn = q_input.clone().requires_grad_(True) + weights_tpn = weights_input.clone().requires_grad_(True) + k_tpn = k_input.clone().requires_grad_(True) + + # query and key need to be split along heads for TP + head_per_rank = num_heads // tensor_model_parallel_size + start_head = tp_rank * head_per_rank + end_head = (tp_rank + 1) * head_per_rank + query_tpn = query_input[:, :, start_head:end_head, :].clone() + key_tpn = key_input[:, :, start_head:end_head, :].clone() + + # Forward and backward with TP=N + topk_indices_tpn, loss_tpn = FusedDSAIndexerLoss.apply( + q_tpn, + weights_tpn, + k_tpn, + query_tpn.detach(), + key_tpn.detach(), + softmax_scale, + index_topk, + loss_coeff, + mask, + sparse_loss, + pg_collection_tpn, + ) + loss_tpn.backward() + + # ============================================= + # Compare results + # ============================================= + # Loss should be the same + assert torch.allclose( + loss_tpn, loss_tp1_value, rtol=1e-5, atol=1e-5 + ), f"Loss mismatch: TP={tensor_model_parallel_size} got {loss_tpn.item()}, TP=1 got {loss_tp1_value.item()}" + + # Top-k indices should be the same + assert torch.equal( + topk_indices_tpn, topk_indices_tp1 + ), "Top-k indices mismatch between TP=1 and TP=N" + + # Gradients should match exactly (indexer params are duplicated across TP) + assert torch.allclose( + q_tpn.grad, grad_q_tp1, rtol=1e-5, atol=1e-5 + ), f"grad_q mismatch: max diff = {(q_tpn.grad - grad_q_tp1).abs().max().item()}" + + assert torch.allclose( + weights_tpn.grad, grad_weights_tp1, rtol=1e-5, atol=1e-5 + ), f"grad_weights mismatch: max diff = {(weights_tpn.grad - grad_weights_tp1).abs().max().item()}" + + assert torch.allclose( + k_tpn.grad, grad_k_tp1, rtol=1e-5, atol=1e-5 + ), f"grad_k mismatch: max diff = {(k_tpn.grad - grad_k_tp1).abs().max().item()}" + + # Check gradients are identical across all TP ranks + tp_size = parallel_state.get_tensor_model_parallel_world_size() + if tp_size > 1: + for grad_tensor, name in [ + (q_tpn.grad, "grad_q"), + (weights_tpn.grad, "grad_weights"), + (k_tpn.grad, "grad_k"), + ]: + grad_list = [torch.zeros_like(grad_tensor) for _ in range(tp_size)] + torch.distributed.all_gather(grad_list, grad_tensor, group=pg_collection_tpn.tp) + + for i in range(1, tp_size): + assert torch.allclose( + grad_list[0], grad_list[i], rtol=0, atol=0 + ), f"{name} differs between TP rank 0 and rank {i}" + + Utils.destroy_model_parallel() + + @pytest.mark.parametrize("seqlen", [16, 64]) class TestDSAIndexer: """Test DSA Indexer module basic functionality with TP=1.""" diff --git a/tests/unit_tests/transformer/moe/test_token_dispatcher.py b/tests/unit_tests/transformer/moe/test_token_dispatcher.py index 24617952b94..a7941837205 100644 --- a/tests/unit_tests/transformer/moe/test_token_dispatcher.py +++ b/tests/unit_tests/transformer/moe/test_token_dispatcher.py @@ -1,4 +1,4 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. +# Copyright (c) 2025 NVIDIA CORPORATION & AFFILIATES. All rights reserved. import copy import dataclasses diff --git a/tests/unit_tests/transformer/test_attention.py b/tests/unit_tests/transformer/test_attention.py index d7771d0920d..d760b314c0a 100644 --- a/tests/unit_tests/transformer/test_attention.py +++ b/tests/unit_tests/transformer/test_attention.py @@ -1,21 +1,46 @@ -# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved. +# Copyright (c) 2025 NVIDIA CORPORATION & AFFILIATES. All rights reserved. import copy +from unittest import mock +import einops import pytest import torch from packaging import version +from torch.nn import functional as F import megatron.core.parallel_state as parallel_state from megatron.core.hyper_comm_grid import HyperCommGrid -from megatron.core.models.gpt.gpt_layer_specs import get_gpt_layer_with_transformer_engine_spec +from megatron.core.models.common.embeddings.rope_utils import ( + get_pos_emb_on_this_cp_rank as get_tensor_on_this_cp_rank, +) +from megatron.core.models.gpt.gpt_layer_specs import ( + get_gpt_layer_local_spec, + get_gpt_layer_with_transformer_engine_spec, +) +from megatron.core.models.gpt.gpt_model import GPTModel from megatron.core.process_groups_config import ProcessGroupCollection from megatron.core.tensor_parallel.random import model_parallel_cuda_manual_seed from megatron.core.transformer import TransformerConfig from megatron.core.transformer.attention import SelfAttention +from megatron.core.transformer.dot_product_attention_context_parallel import ( + AttentionFuncionWithContextParallel, + to_zz_mask_attn_bias, +) from megatron.core.transformer.enums import AttnMaskType from megatron.core.utils import is_te_min_version +from megatron.training.arguments import parse_args +from megatron.training.checkpointing import load_checkpoint, save_checkpoint +from megatron.training.global_vars import set_args +from megatron.training.training import get_model +from megatron.training.utils import unwrap_model +from tests.unit_tests.dist_checkpointing import ( + TempNamedDir, + init_basic_mock_args, + init_checkpointing_mock_args, +) from tests.unit_tests.test_utilities import Utils +from tests.unit_tests.transformer.test_multi_latent_attention import make_test_packed_seq_params try: from transformer_engine.pytorch.attention.rope import apply_fused_qkv_rotary_pos_emb @@ -26,10 +51,19 @@ @pytest.mark.parametrize("output_gate", [False, True]) +@pytest.mark.parametrize( + ("transformer_impl", "fallback_to_eager_attn"), + [("transformer_engine", False), ("transformer_engine", True), ("native", False)], +) class TestParallelAttention: @pytest.fixture(scope='function', autouse=True) - def setup_method(self, output_gate): + def setup_method(self, output_gate, transformer_impl, fallback_to_eager_attn): + if output_gate: + if transformer_impl == "native": + pytest.skip("Native implementation does not support output gate.") + if fallback_to_eager_attn: + pytest.skip("No need to test output gate for fallback_to_eager_attn = True.") Utils.initialize_model_parallel(1, 1) model_parallel_cuda_manual_seed(123) self.transformer_config = TransformerConfig( @@ -40,11 +74,18 @@ def setup_method(self, output_gate): bf16=True, params_dtype=torch.bfloat16, attention_output_gate=output_gate, + transformer_impl=transformer_impl, + fallback_to_eager_attn=fallback_to_eager_attn, ) + if transformer_impl == "transformer_engine": + layer_spec = get_gpt_layer_with_transformer_engine_spec( + fallback_to_eager_attn=fallback_to_eager_attn + ) + else: + layer_spec = get_gpt_layer_local_spec() + attn_layer_spec = layer_spec.submodules.self_attention.submodules self.parallel_attention = SelfAttention( - self.transformer_config, - get_gpt_layer_with_transformer_engine_spec().submodules.self_attention.submodules, - layer_number=1, + self.transformer_config, attn_layer_spec, layer_number=1 ) def teardown_method(self): @@ -55,10 +96,19 @@ def test_constructor(self): assert self.parallel_attention.layer_number == 1 num_weights = sum([p.numel() for p in self.parallel_attention.parameters()]) + + hidden_size = self.transformer_config.hidden_size + standard_num_weights = ( + hidden_size * hidden_size * 4 + hidden_size * 4 # QKVO weight # QKVO bias + ) if self.transformer_config.attention_output_gate: - assert num_weights == 82816 - else: - assert num_weights == 66304 + standard_num_weights += hidden_size * hidden_size + hidden_size # Gate weight and bias + if self.transformer_config.transformer_impl == "transformer_engine": + standard_num_weights += hidden_size * 2 # fused pre layernorm weight and bias + + assert ( + num_weights == standard_num_weights + ), f"{num_weights=} does not match {standard_num_weights=}." def test_cpu_forward(self): # we can't currently do this because the global memory buffer is on GPU @@ -93,6 +143,8 @@ def test_gpu_forward(self): @pytest.mark.parametrize("rotary_interleaved", [True, False]) @pytest.mark.parametrize("fused_qkv_rope", [True, False]) def test_fused_rope_gpu_forward(self, rotary_interleaved, fused_qkv_rope): + if self.transformer_config.fallback_to_eager_attn: + pytest.skip("No need to test fused RoPE for fallback_to_eager_attn = True.") self.parallel_attention.config.apply_rope_fusion = True if rotary_interleaved and not is_te_min_version("2.3.0"): pytest.skip("Only TE >= 2.3.0 supports interleaved fused RoPE.") @@ -356,6 +408,199 @@ class TestSelfAttention: @pytest.fixture(scope='function', autouse=True) def setup_method(self, output_gate): + self.output_gate = output_gate + Utils.initialize_model_parallel(1, 1) + model_parallel_cuda_manual_seed(123) + + def teardown_method(self): + Utils.destroy_model_parallel() + + def test_clip_qk_disabled_raises_error(self): + """Test that clip_qk raises ValueError when qk_clip is not enabled.""" + transformer_config = TransformerConfig( + num_layers=2, + hidden_size=128, + num_attention_heads=4, + use_cpu_initialization=True, + qk_clip=False, + ) + attention = SelfAttention( + transformer_config, + get_gpt_layer_with_transformer_engine_spec().submodules.self_attention.submodules, + layer_number=1, + ) + + with pytest.raises(ValueError, match="qk_clip option needs to be enabled"): + attention.clip_qk() + + def test_clip_qk_none_logits_raises_error(self): + """Test that clip_qk raises ValueError when current_max_attn_logits is None.""" + transformer_config = TransformerConfig( + num_layers=2, + hidden_size=128, + num_attention_heads=4, + use_cpu_initialization=True, + qk_clip=True, + qk_clip_threshold=100.0, + qk_clip_alpha=0.5, + ) + attention = SelfAttention( + transformer_config, + get_gpt_layer_with_transformer_engine_spec().submodules.self_attention.submodules, + layer_number=1, + ) + + with pytest.raises(ValueError, match="current_max_attn_logits is None"): + attention.clip_qk() + + def test_clip_qk_below_threshold_no_update(self): + """Test that weights are not updated when max logits are below threshold.""" + transformer_config = TransformerConfig( + num_layers=2, + hidden_size=128, + num_attention_heads=4, + use_cpu_initialization=True, + qk_clip=True, + qk_clip_threshold=100.0, + qk_clip_alpha=0.5, + ) + attention = SelfAttention( + transformer_config, + get_gpt_layer_with_transformer_engine_spec().submodules.self_attention.submodules, + layer_number=1, + ) + attention.cuda() + + # Save original weights + original_weight = attention.linear_qkv.weight.data.clone() + + # Set current_max_attn_logits below threshold + attention.core_attention.current_max_attn_logits = torch.tensor( + [50.0, 60.0, 70.0, 80.0], device='cuda' + ) + + # Call clip_qk + attention.clip_qk() + + # Weights should not be updated + assert torch.equal(attention.linear_qkv.weight.data, original_weight) + # current_max_attn_logits should be reset + assert attention.core_attention.current_max_attn_logits is None + + def test_clip_qk_above_threshold_updates_weights(self): + """Test that weights are updated when max logits exceed threshold.""" + transformer_config = TransformerConfig( + num_layers=2, + hidden_size=128, + num_attention_heads=4, + use_cpu_initialization=True, + qk_clip=True, + qk_clip_threshold=100.0, + qk_clip_alpha=0.5, + ) + attention = SelfAttention( + transformer_config, + get_gpt_layer_with_transformer_engine_spec().submodules.self_attention.submodules, + layer_number=1, + ) + attention.cuda() + + # Save original weights + original_weight = attention.linear_qkv.weight.data.clone() + + # Set current_max_attn_logits above threshold + attention.core_attention.current_max_attn_logits = torch.tensor( + [150.0, 160.0, 170.0, 180.0], device='cuda' + ) + + # Call clip_qk + attention.clip_qk() + + # Weights should be updated + assert not torch.equal(attention.linear_qkv.weight.data, original_weight) + # current_max_attn_logits should be reset + assert attention.core_attention.current_max_attn_logits is None + + def test_clip_qk_gqa_configuration(self): + """Test clip_qk with GQA (Grouped Query Attention) configuration.""" + transformer_config = TransformerConfig( + num_layers=2, + hidden_size=128, + num_attention_heads=8, + num_query_groups=4, # GQA with 2 heads per group + use_cpu_initialization=True, + qk_clip=True, + qk_clip_threshold=100.0, + qk_clip_alpha=0.5, + ) + attention = SelfAttention( + transformer_config, + get_gpt_layer_with_transformer_engine_spec().submodules.self_attention.submodules, + layer_number=1, + ) + attention.cuda() + + # Save original weights + original_weight = attention.linear_qkv.weight.data.clone() + + # Set current_max_attn_logits for all heads (8 heads) + attention.core_attention.current_max_attn_logits = torch.tensor( + [150.0, 160.0, 170.0, 180.0, 190.0, 200.0, 210.0, 220.0], device='cuda' + ) + + # Call clip_qk + attention.clip_qk() + + # Weights should be updated + assert not torch.equal(attention.linear_qkv.weight.data, original_weight) + # current_max_attn_logits should be reset + assert attention.core_attention.current_max_attn_logits is None + + def test_clip_qk_mixed_logits(self): + """Test clip_qk with mixed logits (some above, some below threshold).""" + transformer_config = TransformerConfig( + num_layers=2, + hidden_size=128, + num_attention_heads=4, + use_cpu_initialization=True, + qk_clip=True, + qk_clip_threshold=100.0, + qk_clip_alpha=0.5, + ) + attention = SelfAttention( + transformer_config, + get_gpt_layer_with_transformer_engine_spec().submodules.self_attention.submodules, + layer_number=1, + ) + attention.cuda() + + # Save original weights + original_weight = attention.linear_qkv.weight.data.clone() + + # Set mixed current_max_attn_logits (some above, some below threshold) + attention.core_attention.current_max_attn_logits = torch.tensor( + [80.0, 150.0, 90.0, 200.0], device='cuda' + ) + + # Call clip_qk + attention.clip_qk() + + # Weights should be updated since at least one head exceeds threshold + assert not torch.equal(attention.linear_qkv.weight.data, original_weight) + # current_max_attn_logits should be reset + assert attention.core_attention.current_max_attn_logits is None + + +@pytest.mark.parametrize("output_gate", [False, True]) +@pytest.mark.parametrize("transformer_impl", ["transformer_engine", "native"]) +class TestSelfAttention: + + @pytest.fixture(scope='function', autouse=True) + def setup_method(self, output_gate, transformer_impl): + if transformer_impl == "native": + if output_gate: + pytest.skip("Native implementation does not support output gate.") + self.transformer_impl = transformer_impl self.output_gate = output_gate Utils.destroy_model_parallel() @@ -371,10 +616,15 @@ def run_self_attention(self, pg_collection): attention_output_gate=self.output_gate, tensor_model_parallel_size=tensor_model_parallel_size, use_cpu_initialization=False, + transformer_impl=self.transformer_impl, ) + if self.transformer_impl == "transformer_engine": + get_gpt_layer_spec_fn = get_gpt_layer_with_transformer_engine_spec + else: + get_gpt_layer_spec_fn = get_gpt_layer_local_spec self.self_attention = SelfAttention( self.transformer_config, - get_gpt_layer_with_transformer_engine_spec().submodules.self_attention.submodules, + get_gpt_layer_spec_fn().submodules.self_attention.submodules, layer_number=1, attn_mask_type=AttnMaskType.causal, pg_collection=pg_collection, @@ -447,3 +697,370 @@ def test_self_attention_independent_pg_smoke(self): pg_collection = ProcessGroupCollection(tp=tp_group, cp=cp_group) self.run_self_attention(pg_collection) + + +def _test_parallel_attention_correctness( + transformer_config, + transformer_layer_spec, + tmp_path_dist_ckpt, + atol, + rtol, + tp=1, + sp=False, + cp=1, + seed=123, + sequence_length=256, + micro_batch_size=4, + sequence_packing=False, +): + # Model initialization function + def initialize_gpt_model( + config, pre_process=True, post_process=True, vp_stage=None, pg_collection=None + ): + gpt_model = GPTModel( + config=config, + transformer_layer_spec=transformer_layer_spec, + vocab_size=128, + max_sequence_length=sequence_length, + pre_process=pre_process, + post_process=post_process, + vp_stage=vp_stage, + pg_collection=pg_collection, + ) + return gpt_model + + # Initialize baseline parallel state + Utils.initialize_model_parallel( + tensor_model_parallel_size=1, pipeline_model_parallel_size=1, context_parallel_size=1 + ) + + # Initialize input hidden states + torch.manual_seed(seed) + model_parallel_cuda_manual_seed(seed) + input_hidden_states = ( + torch.rand((sequence_length, micro_batch_size, transformer_config.hidden_size)) + .cuda() + .bfloat16() + .requires_grad_(True) + ) + + with TempNamedDir(tmp_path_dist_ckpt / 'test_parallel_attn', sync=True) as ckpt_dir: + # Set argument + mock_args = parse_args(ignore_unknown_args=True) + set_args(mock_args) + + # Initialize baseline model + init_basic_mock_args(mock_args, 1, 1, bf16=True) + mock_args.context_parallel_size = 1 + mock_args.sequence_parallel = 1 + gpt_model = unwrap_model(get_model(initialize_gpt_model, config=transformer_config)) + + # Initialize args and save checkpoint + init_checkpointing_mock_args(mock_args, ckpt_dir, False) + mock_args.no_save_optim = True + mock_args.no_save_rng = True + mock_args.no_load_optim = True + mock_args.no_load_rng = True + save_checkpoint(10, gpt_model, None, None, 0) + + # Calculate baseline output + attention = gpt_model[0].decoder.layers[0].self_attention + output_hidden_states_baseline, bias_hidden_states_baseline = attention( + input_hidden_states, attention_mask=None + ) + output_hidden_states_baseline.sum().backward() + + # Save baseline output + input_grad_baseline = input_hidden_states.grad.detach() + output_hidden_states_baseline = output_hidden_states_baseline.detach() + bias_hidden_states_baseline = bias_hidden_states_baseline + if bias_hidden_states_baseline is not None: + bias_hidden_states_baseline = bias_hidden_states_baseline.detach() + has_bias = True + else: + has_bias = False + + # Initialize parallel model + Utils.destroy_model_parallel() + Utils.initialize_model_parallel( + tensor_model_parallel_size=tp, pipeline_model_parallel_size=1, context_parallel_size=cp + ) + torch.manual_seed(seed) + model_parallel_cuda_manual_seed(seed) + transformer_config.context_parallel_size = cp + transformer_config.tensor_model_parallel_size = tp + transformer_config.sequence_parallel = sp + init_basic_mock_args(mock_args, tp, 1, bf16=True) + mock_args.context_parallel_size = cp + mock_args.sequence_parallel = sp + gpt_model = unwrap_model(get_model(initialize_gpt_model, config=transformer_config)) + with mock.patch('megatron.training.checkpointing.check_checkpoint_args'): + with mock.patch('megatron.training.checkpointing.update_num_microbatches'): + load_checkpoint(gpt_model, None, None) + + # Function to get tensor on this tp and cp rank + cp_group = parallel_state.get_context_parallel_group() + tp_rank = parallel_state.get_tensor_model_parallel_rank() + + def get_tensor_on_this_rank(tensor): + if cp > 1: + tensor = get_tensor_on_this_cp_rank(tensor, 0, cp_group) + if sequence_packing: + tensor = tensor.transpose(0, 1).contiguous().view(-1, 1, *tensor.shape[2:]) + if tp > 1 and sp: + sp_seg = tensor.shape[0] // tp + tensor = tensor[tp_rank * sp_seg : (tp_rank + 1) * sp_seg] + return tensor + + # Calculate parallel model output + if sequence_packing: + cu_seqlens = [i * sequence_length for i in range(micro_batch_size + 1)] + packed_seq_params = make_test_packed_seq_params(cu_seqlens=cu_seqlens) + else: + packed_seq_params = None + input_hidden_states = get_tensor_on_this_rank(input_hidden_states) + input_hidden_states = input_hidden_states.detach().requires_grad_(True) + parallel_attention = gpt_model[0].decoder.layers[0].self_attention + output_hidden_states_parallel, bias_hidden_states_parallel = parallel_attention( + input_hidden_states, attention_mask=None, packed_seq_params=packed_seq_params + ) + output_hidden_states_parallel.sum().backward() + input_grad_parallel = input_hidden_states.grad.detach() + + # Check if the output is close + output_hidden_states_baseline = get_tensor_on_this_rank(output_hidden_states_baseline) + input_grad_baseline = get_tensor_on_this_rank(input_grad_baseline) + + assert torch.all( + ~torch.isnan(output_hidden_states_baseline) + ), "output_hidden_states_baseline contains nan" + assert torch.all( + ~torch.isinf(output_hidden_states_baseline) + ), "output_hidden_states_baseline contains inf" + assert torch.all(~torch.isnan(input_grad_baseline)), "input_grad_baseline contains nan" + assert torch.all(~torch.isinf(input_grad_baseline)), "input_grad_baseline contains inf" + assert torch.all( + ~torch.isnan(output_hidden_states_parallel) + ), "output_hidden_states_parallel contains nan" + assert torch.all( + ~torch.isinf(output_hidden_states_parallel) + ), "output_hidden_states_parallel contains inf" + assert torch.all(~torch.isnan(input_grad_parallel)), "input_grad_parallel contains nan" + assert torch.all(~torch.isinf(input_grad_parallel)), "input_grad_parallel contains inf" + if has_bias: + assert torch.all( + ~torch.isnan(bias_hidden_states_baseline) + ), "bias_hidden_states_baseline contains nan" + assert torch.all( + ~torch.isinf(bias_hidden_states_baseline) + ), "bias_hidden_states_baseline contains inf" + assert torch.all( + ~torch.isnan(bias_hidden_states_parallel) + ), "bias_hidden_states_parallel contains nan" + assert torch.all( + ~torch.isinf(bias_hidden_states_parallel) + ), "bias_hidden_states_parallel contains inf" + + torch.testing.assert_close( + output_hidden_states_baseline, + output_hidden_states_parallel, + atol=atol, + rtol=rtol, + msg=lambda msg: f"Mismatch in output_hidden_states: {msg}", + ) + torch.testing.assert_close( + input_grad_baseline, + input_grad_parallel, + atol=atol, + rtol=rtol, + msg=lambda msg: f"Mismatch in input_grad: {msg}", + ) + if has_bias: + torch.testing.assert_close( + bias_hidden_states_baseline, + bias_hidden_states_parallel, + atol=atol, + rtol=rtol, + msg=lambda msg: f"Mismatch in bias_hidden_states: {msg}", + ) + + Utils.destroy_model_parallel() + + +# TODO(yuzhongw): Add test case for fallback_to_eager_attn +@pytest.mark.parametrize("sequence_packing", [False, True]) +@pytest.mark.parametrize("apply_rope_fusion", [False, True]) +@pytest.mark.parametrize( + ("tp", "sp", "cp"), + [ + (4, False, 1), # TP w/o SP + (4, True, 1), # TP w/ SP + (1, False, 4), # CP + (2, False, 2), # CP + TP w/o SP + (2, True, 2), # CP + TP w/ SP + ], +) +@pytest.mark.parametrize("qk_layernorm", [False, True]) +@pytest.mark.parametrize("output_gate", [False, True]) +def test_parallel_attention_correctness( + tmp_path_dist_ckpt, sequence_packing, apply_rope_fusion, tp, sp, cp, qk_layernorm, output_gate +): + transformer_config = TransformerConfig( + num_layers=1, + hidden_size=128, + num_attention_heads=4, + normalization="RMSNorm", + bf16=True, + qk_layernorm=qk_layernorm, + apply_rope_fusion=apply_rope_fusion, + attention_output_gate=output_gate, + hidden_dropout=0.0, + attention_dropout=0.0, + ) + + transformer_layer_spec = get_gpt_layer_with_transformer_engine_spec(qk_layernorm=qk_layernorm) + atol, rtol = 1e-2, 1e-2 + + _test_parallel_attention_correctness( + transformer_config, + transformer_layer_spec, + tmp_path_dist_ckpt, + atol=atol, + rtol=rtol, + tp=tp, + sp=sp, + cp=cp, + seed=123, + sequence_length=256, + sequence_packing=sequence_packing, + ) + + +def _torch_native_attention(query, key, value, attention_mask, sinks, scaling: float): + """Torch native attention implementation + This was not in the original implementation and slightly affect results; + it prevents overflow in BF16/FP16 when training with batch size > 1 we clamp max values. + """ + # Rearrange query, key, value to (b, h, s, d) + query = einops.rearrange(query, 's b h d -> b h s d') + key = einops.rearrange(key, 's b h d -> b h s d') + value = einops.rearrange(value, 's b h d -> b h s d') + + # Compute attention weights + attn_weights = torch.matmul(query, key.transpose(2, 3)) * scaling + if attention_mask is not None: + nheads = query.shape[1] + nheads_k = key.shape[1] + heads_k_stride = 1 + mask_bias = to_zz_mask_attn_bias( + attention_mask, 1, nheads, nheads_k, heads_k_stride, query.device, query.dtype + ) + attn_weights = attn_weights + mask_bias + + # Add sinks to attention weights + if sinks is None: + combined_logits = attn_weights + else: + sinks = sinks.reshape(1, -1, 1, 1).expand(query.shape[0], -1, query.shape[-2], -1) + combined_logits = torch.cat([attn_weights, sinks], dim=-1) + + # Compute attention scores + probs = F.softmax(combined_logits, dim=-1, dtype=combined_logits.dtype) + if sinks is None: + scores = probs + else: + scores = probs[..., :-1] + + # Compute attention output + attn_output = torch.matmul(scores, value) + attn_output = einops.rearrange(attn_output, 'b h s d -> s b h d') + attn_output = attn_output.contiguous() + return attn_output + + +def test_eager_attention_function_correctness(): + """Test the correctness of the context parallel eager attention function""" + + # Configuration + batch_size = 4 + num_heads = 2 + head_dim = 256 + seq_len_q = 512 + seq_len_k = 2048 + scale = 1 / (head_dim**2) + + # Initialize inputs + q = torch.rand( + (seq_len_q, batch_size, num_heads, head_dim), + device='cuda', + dtype=torch.bfloat16, + requires_grad=True, + ) + k = torch.rand( + (seq_len_k, batch_size, num_heads, head_dim), + device='cuda', + dtype=torch.bfloat16, + requires_grad=True, + ) + v = torch.rand( + (seq_len_k, batch_size, num_heads, head_dim), + device='cuda', + dtype=torch.bfloat16, + requires_grad=True, + ) + + def randbool(shape, **kwargs): + return torch.randn(shape, **kwargs) > 0 + + attn_bias = randbool((batch_size, 1, seq_len_q, seq_len_k), device='cuda') + sinks = None + + # Torch native attention forward and backward pass + out_torch = _torch_native_attention( + query=q, key=k, value=v, attention_mask=attn_bias, sinks=sinks, scaling=scale + ) + loss_torch = out_torch.sum() + loss_torch.backward() + torch_q_grad = q.grad.clone() + torch_k_grad = k.grad.clone() + torch_v_grad = v.grad.clone() + q.grad.zero_() + k.grad.zero_() + v.grad.zero_() + if sinks is not None: + torch_sinks_grad = sinks.grad.clone() + sinks.grad.zero_() + else: + torch_sinks_grad = None + + # Custom attention forward and backward pass + out_custom = AttentionFuncionWithContextParallel.apply( + q, k, v, attn_bias, 0.0, scale, None # dropout + ) + loss_custom = out_custom.sum() + loss_custom.backward() + custom_q_grad = q.grad.clone() + custom_k_grad = k.grad.clone() + custom_v_grad = v.grad.clone() + q.grad.zero_() + k.grad.zero_() + v.grad.zero_() + if sinks is not None: + custom_sinks_grad = sinks.grad.clone() + sinks.grad.zero_() + else: + custom_sinks_grad = None + + # Check attention output and gradients + assert torch.equal(out_custom, out_torch), "Mismatch in attention output" + tol = {"atol": 1e-4, "rtol": 1e-4} + for tensor_name, tensor_torch, tensor_custom in [ + ("q_grad", torch_q_grad, custom_q_grad), + ("k_grad", torch_k_grad, custom_k_grad), + ("v_grad", torch_v_grad, custom_v_grad), + ("sinks_grad", torch_sinks_grad, custom_sinks_grad), + ]: + if (tensor_torch is not None) and (tensor_custom is not None): + torch.testing.assert_close( + out_custom, out_torch, **tol, msg=lambda msg: f"Mismatch in {tensor_name}: {msg}" + ) diff --git a/tests/unit_tests/transformer/test_cuda_graphs.py b/tests/unit_tests/transformer/test_cuda_graphs.py index 4696a3ed439..d997e8eac39 100644 --- a/tests/unit_tests/transformer/test_cuda_graphs.py +++ b/tests/unit_tests/transformer/test_cuda_graphs.py @@ -2,15 +2,32 @@ import gc import os +import random import sys +import time +import types import pytest import torch from transformer_engine.pytorch.fp8 import check_fp8_support +from megatron.core import parallel_state from megatron.core.enums import ModelType +from megatron.core.inference.contexts import DynamicInferenceContext +from megatron.core.inference.engines import DynamicInferenceEngine +from megatron.core.inference.model_inference_wrappers.gpt.gpt_inference_wrapper import ( + GPTInferenceWrapper, +) +from megatron.core.inference.model_inference_wrappers.inference_wrapper_config import ( + InferenceWrapperConfig, +) +from megatron.core.inference.sampling_params import SamplingParams +from megatron.core.inference.text_generation_controllers.text_generation_controller import ( + TextGenerationController, +) from megatron.core.models.gpt.gpt_layer_specs import ( get_gpt_decoder_block_spec, + get_gpt_layer_local_spec, get_gpt_layer_with_transformer_engine_spec, get_gpt_mtp_block_spec, ) @@ -527,6 +544,204 @@ def test_gpu_cudagraph(self): del parallel_mamba_block.layers[_].cudagraph_manager.cudagraph_runners[0].fwd_graph +class TestCaptureFreezeGC: + + def capture_cuda_graphs(self, cuda_graph_capture_freeze_gc: bool) -> None: + """Capture multiple cuda graphs by initializing the `DynamicInferenceEngine`. + + The `DynamicInferenceEngine` is used here because it is currently (as of + August 2025) one of the heaviest users of multiple cuda graphs, and so + its setup tests a realistic use-case of multi-batch size cuda graphs. + + Args: + cuda_graph_capture_freeze_gc (bool): Flag that determines whether to + freeze garbage collection. + """ + + # Set freeze-gc environment variable. + os.environ["CUDA_GRAPH_CAPTURE_FREEZE_GC"] = str(int(cuda_graph_capture_freeze_gc)) + + # Configuration. + random_seed = 123 + vocab_size = 100 + num_tokens_to_prompt = 128 + num_tokens_to_generate = 32 + max_sequence_length = num_tokens_to_prompt + num_tokens_to_generate + num_cuda_graphs = 4 + + # Rounder values. + rounder = 4 + DynamicInferenceContext.ROUNDER = rounder # For backwards compatibility + DynamicInferenceContext.TOKEN_ROUNDER = rounder + DynamicInferenceContext.REQUEST_ROUNDER = rounder + + # Random state. + random.seed(random_seed) + torch.manual_seed(random_seed) + model_parallel_cuda_manual_seed( + seed=random_seed, + inference_rng_tracker=True, + use_cudagraphable_rng=False, + force_reset_rng=True, + ) + + # Transformer config. + transformer_config = TransformerConfig( + params_dtype=torch.bfloat16, + num_layers=4, + hidden_size=32, + num_attention_heads=4, + use_cpu_initialization=True, + cuda_graph_impl="local", + inference_rng_tracker=True, + tensor_model_parallel_size=1, # needed? + ) + + # Sampling params. + sampling_params = SamplingParams(num_tokens_to_generate=num_tokens_to_generate) + + # GPT model. + model = GPTModel( + config=transformer_config, + transformer_layer_spec=get_gpt_layer_local_spec(), + vocab_size=vocab_size, + max_sequence_length=max_sequence_length, + parallel_output=True, + ).cuda() + + for param in model.parameters(): + param.data = param.data.to(transformer_config.params_dtype) + + model.eval() + + # Inference config. + inference_config = InferenceWrapperConfig( + hidden_size=transformer_config.hidden_size, + inference_batch_times_seqlen_threshold=400, + fp32_residual_connection=False, + params_dtype=transformer_config.params_dtype, + padded_vocab_size=vocab_size, + ) + + # Inference context. + context = DynamicInferenceContext( + params_dtype=transformer_config.params_dtype, + num_layers=transformer_config.num_layers, + kv_channels=transformer_config.kv_channels, + num_attention_heads=transformer_config.num_query_groups, + max_sequence_length=max_sequence_length, + num_cuda_graphs=num_cuda_graphs, + buffer_size_gb=20, + buffer_guaranteed_fraction=0.05, + block_size_tokens=256, + buffer_overflow_factor=1.1, + max_requests_override=512, + max_tokens_override=8196, + tensor_model_parallel_size=transformer_config.tensor_model_parallel_size, + ) + + # Inference model wrapper. + inference_wrapped_model = GPTInferenceWrapper(model, inference_config, context) + + # Note: the following is taken from AbstractModelInferenceWrapper.prep_model_for_inference(). + inference_wrapped_model.model_is_pipeline_parallel = not ( + parallel_state.is_pipeline_first_stage() and parallel_state.is_pipeline_last_stage() + ) + + # Text generation controller. + text_generation_controller = TextGenerationController( + inference_wrapped_model=inference_wrapped_model, + tokenizer=types.SimpleNamespace(vocab_size=vocab_size), + ) + + # Inference engine. + engine = DynamicInferenceEngine( + text_generation_controller, + context, + termination_id=vocab_size - 1, + random_seed=random_seed, + ) + + return engine.capture_stats + + @pytest.mark.flaky_in_dev # Issue #2855 + @pytest.mark.flaky + @pytest.mark.experimental + @pytest.mark.skipif( + not is_fa_min_version("2.7.3"), reason="need latest flash attn for dynamic batching" + ) + def test_capture_freeze_gc(self): + """Test cuda graph capture while freezing the GC.""" + + Utils.initialize_model_parallel( + tensor_model_parallel_size=1, pipeline_model_parallel_size=1 + ) + + # Run tests with GC freeze off/on. + result_map = {} + for freeze_gc in (False, True): + + # Reset global cuda graph state. + _CudagraphGlobalRecord.cudagraph_created = False + _CudagraphGlobalRecord.cudagraph_record = [] + CudaGraphManager.global_mempool = None + + # Capture multiple cuda graphs by initializing DynamicInferenceEngine. + mem_stats_start = torch.cuda.memory_stats() + time_start = time.time() + internal_stats = self.capture_cuda_graphs(freeze_gc) + time_end = time.time() + mem_stats_end = torch.cuda.memory_stats() + + # Track local (external) stats, in addition to internal stats. + external_stats = { + "time": time_end - time_start, + "allocated_bytes": ( + mem_stats_end["allocated_bytes.all.current"] + - mem_stats_start["allocated_bytes.all.current"] + ), + "reserved_bytes": ( + mem_stats_end["reserved_bytes.all.current"] + - mem_stats_start["reserved_bytes.all.current"] + ), + } + + # Record results. + result_map[freeze_gc] = {"internal": internal_stats, "external": external_stats} + + # Extract results. + freeze_off_results = result_map[False] + freeze_on_results = result_map[True] + print( + "test capture | freeze off: internal %.3f, external %.3f." + % (freeze_off_results["internal"]["time"], freeze_off_results["external"]["time"]) + ) + print( + "test capture | freeze on: internal %.3f, external %.3f." + % (freeze_on_results["internal"]["time"], freeze_on_results["external"]["time"]) + ) + + # Validate time and memory usage. + assert freeze_on_results["internal"]["time"] < 0.3 * freeze_off_results["internal"]["time"] + assert freeze_on_results["external"]["time"] < 0.3 * freeze_off_results["external"]["time"] + assert ( + freeze_on_results["internal"]["allocated_bytes"] + <= freeze_off_results["internal"]["allocated_bytes"] + ) + assert ( + freeze_on_results["external"]["allocated_bytes"] + <= freeze_off_results["external"]["allocated_bytes"] + ) + assert ( + freeze_on_results["internal"]["reserved_bytes"] + <= freeze_off_results["internal"]["reserved_bytes"] + ) + assert ( + freeze_on_results["external"]["reserved_bytes"] + <= freeze_off_results["external"]["reserved_bytes"] + ) + + # Global storage for comparing unique buffer counts across different num_microbatches, # keyed by (pp_size, vpp_size) _unique_buffer_counts = {} @@ -1039,10 +1254,6 @@ def test_moe_partial_cudagraph(self, ep_size, moe_dropless_dispatcher, moe_dispa extra_kwargs["moe_token_dispatcher_type"] = "flex" extra_kwargs["moe_flex_dispatcher_backend"] = "deepep" elif moe_dispatcher_type == "hybridep": - pytest.skip( - "Currently, the Hybrid EP is broken. " - "Temporarily skip the test and wait for the fix." - ) if not is_hybrid_ep_available(): pytest.skip("Hybrid EP is not available") extra_kwargs["moe_token_dispatcher_type"] = "flex" @@ -1052,8 +1263,6 @@ def test_moe_partial_cudagraph(self, ep_size, moe_dropless_dispatcher, moe_dispa if not moe_dropless_dispatcher: if moe_dispatcher_type == "deepep": pytest.skip("Deep EP doesn't support drop&pad MoE") - if moe_dispatcher_type == "hybridep" and ep_size == 1: - pytest.skip("Hybrid EP doesn't support drop&pad MoE with ep_size == 1") extra_kwargs["moe_expert_capacity_factor"] = 1.0 extra_kwargs["moe_pad_expert_input_to_capacity"] = True @@ -1102,6 +1311,8 @@ def test_moe_partial_cudagraph(self, ep_size, moe_dropless_dispatcher, moe_dispa llava_test.test_llava_cudagraph_is_last_layer_logic() llava_test.teardown_method(method=None) + test = TestCaptureFreezeGC() + test.test_capture_freeze_gc() test = TestPartialCudaGraph() test.setup_method(method=None) test.test_moe_partial_cudagraph(4, True, "alltoall") diff --git a/tests/unit_tests/transformer/test_multi_token_prediction.py b/tests/unit_tests/transformer/test_multi_token_prediction.py index 05fb2c4fe63..ddfa9bfba16 100644 --- a/tests/unit_tests/transformer/test_multi_token_prediction.py +++ b/tests/unit_tests/transformer/test_multi_token_prediction.py @@ -104,7 +104,7 @@ def test_constructor_local(self, tp): assert num_weights == 15216 * config.mtp_num_layers @pytest.mark.skipif(not HAVE_TE, reason="transformer_engine not available") - @pytest.mark.parametrize(('tp', 'cp'), [(1, 1), (1, 2), (2, 1), (2, 2)]) + @pytest.mark.parametrize(('tp', 'cp'), [(1, 1), (2, 1), (2, 2)]) def test_constructor_ues_te(self, tp, cp): """Test basic construction of MTP module.""" torch.manual_seed(_SEED) @@ -312,7 +312,7 @@ def get_packed_batch(self, seq_lengths, micro_batch_size): not HAVE_TE or not is_te_min_version("2.1.0"), reason="grouped_gemm requires TransformerEngine >= 2.1.0", ) - @pytest.mark.parametrize(("tp", "cp"), [(1, 1), (1, 2), (2, 1), (2, 2)]) + @pytest.mark.parametrize(("tp", "cp"), [(2, 1), (2, 2)]) def test_sharded_state_dict(self, tp, cp): """Test MTP with different tensor parallel sizes.""" args = self.create_test_args(tp, cp, self.seq_length, self.micro_batch_size) @@ -331,9 +331,8 @@ def test_sharded_state_dict(self, tp, cp): not HAVE_TE or not is_te_min_version("2.1.0"), reason="grouped_gemm requires TransformerEngine >= 2.1.0", ) - @pytest.mark.parametrize("full_recompute", [False, True]) @pytest.mark.parametrize( - ("tp", "cp"), [(1, 1), (1, 2), (1, 4), (2, 1), (2, 2), (2, 4), (4, 1), (4, 2)] + ("tp", "cp", "full_recompute"), [(1, 1, False), (1, 4, False), (2, 4, False), (4, 1, True)] ) def test_forward_backward(self, tmp_path_dist_ckpt, tp, cp, full_recompute): """Test MTP forward and backward with gptmodel."""