Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
51 commits
Select commit Hold shift + click to select a range
90ecd0c
Initial WIP
NoahStapp Aug 13, 2025
d1d6e84
First draft of flat model benchmarks
NoahStapp Aug 13, 2025
4db9461
Finish first draft of benchmarks
NoahStapp Aug 14, 2025
02798aa
Linting
NoahStapp Aug 14, 2025
c4ba5ce
Linting again
NoahStapp Aug 15, 2025
f7e7345
Omit perf tests from runtests
NoahStapp Aug 15, 2025
316258f
Add Evergreen automation for perf tests
NoahStapp Aug 15, 2025
58f27da
Fix perf test path
NoahStapp Aug 15, 2025
84b5fff
Fix report and result paths
NoahStapp Aug 15, 2025
e9acd6b
Use original benchmark params
NoahStapp Aug 15, 2025
7c07dfc
Added copyright + documentation
NoahStapp Aug 15, 2025
a6c5df9
Address Tim review
NoahStapp Aug 19, 2025
dc32d06
Use Model.objects.create() where possible
NoahStapp Aug 19, 2025
f97ef89
More review changes
NoahStapp Aug 19, 2025
c03d78b
include_expansions_in_env array
NoahStapp Aug 19, 2025
b7167eb
Add test
NoahStapp Aug 29, 2025
f12d42b
Update test
NoahStapp Aug 29, 2025
58ab077
testing
NoahStapp Sep 4, 2025
87e29ac
WIP
NoahStapp Sep 15, 2025
213d88e
Merge branch 'main' into DRIVERS-2917
NoahStapp Sep 15, 2025
29ac48f
Fix teardown
NoahStapp Oct 7, 2025
6a6cde2
Perf tests run daily
NoahStapp Oct 7, 2025
c5d3d5b
Linting
NoahStapp Oct 7, 2025
d9ae14e
Fix MAX_ITERATION_TIME
NoahStapp Oct 7, 2025
263abd0
Update for new spec changes
NoahStapp Nov 20, 2025
90643e5
Skip codespell on performance json files
NoahStapp Dec 10, 2025
6bb008c
Merge branch 'main' into DRIVERS-2917
NoahStapp Dec 10, 2025
ebc9c8d
Update .evergreen/config.yml
NoahStapp Dec 16, 2025
10d8355
Update .evergreen/config.yml
NoahStapp Dec 16, 2025
c31e77c
address review
NoahStapp Dec 16, 2025
0260d80
Fixes
NoahStapp Dec 17, 2025
bee7aa9
Linting
NoahStapp Dec 17, 2025
326f15a
Fix
NoahStapp Dec 17, 2025
4c2cf49
Debugging
NoahStapp Dec 17, 2025
4545870
10k NUM_DOCS
NoahStapp Dec 18, 2025
2bab4d7
Address review + cleanup
NoahStapp Jan 5, 2026
5122aba
Fix embedded tests that use ids
NoahStapp Jan 6, 2026
8419068
10k docs
NoahStapp Jan 6, 2026
51cdc75
Linting
NoahStapp Jan 6, 2026
61fd0f5
minimize settings
timgraham Jan 7, 2026
7380907
Address review
NoahStapp Jan 7, 2026
32096d4
Cleanup
NoahStapp Jan 7, 2026
bb6e7c7
Fix docs
NoahStapp Jan 7, 2026
6350195
Add docstrings + unify
NoahStapp Jan 8, 2026
95c18fb
Fix TestSmallFlatDocFilterByIn
NoahStapp Jan 8, 2026
b4f672c
Remove unneeded TestSmallFlatDocFilterByIn
NoahStapp Jan 8, 2026
92a680e
INTPYTHON-847 Add performance tests
NoahStapp Aug 13, 2025
1690058
address review
NoahStapp Jan 28, 2026
29bd857
Merge branch 'DRIVERS-2917' of https://github.com/NoahStapp/django-mo…
NoahStapp Jan 28, 2026
0284d89
Fix EG config
NoahStapp Jan 28, 2026
fce0f71
Update TestLargeNestedDocCreation
NoahStapp Jan 28, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
99 changes: 15 additions & 84 deletions .evergreen/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,89 +11,8 @@ pre_timeout_secs: 1800 # 5 minutes
post_error_fails_task: true
post_timeout_secs: 1800 # 5 minutes

functions:
"setup":
- command: git.get_project
params:
directory: src
- command: subprocess.exec
params:
binary: bash
working_dir: "src"
add_expansions_to_env: true
args:
- ./.evergreen/setup.sh
- command: expansions.update
params:
file: src/expansion.yml

"bootstrap mongo-orchestration":
- command: subprocess.exec
params:
binary: bash
add_expansions_to_env: true
args:
- ${DRIVERS_TOOLS}/.evergreen/run-orchestration.sh
- command: expansions.update
params:
file: mo-expansion.yml

"run unit tests":
- command: subprocess.exec
type: test
params:
binary: bash
working_dir: "src"
include_expansions_in_env: ["DRIVERS_TOOLS", "MONGODB_URI"]
args:
- ./.evergreen/run-tests.sh

"teardown":
- command: subprocess.exec
params:
binary: bash
args:
- ${DRIVERS_TOOLS}/.evergreen/teardown.sh

# Encryption-specific functions
"start csfle servers":
- command: ec2.assume_role
params:
role_arn: ${aws_test_secrets_role}
- command: subprocess.exec
params:
binary: bash
include_expansions_in_env: [
"AWS_SECRET_ACCESS_KEY",
"AWS_ACCESS_KEY_ID",
"AWS_SESSION_TOKEN",
]
args:
- ${DRIVERS_TOOLS}/.evergreen/csfle/setup.sh

"teardown csfle":
- command: subprocess.exec
params:
binary: bash
args:
- ${DRIVERS_TOOLS}/.evergreen/csfle/teardown.sh

"run encryption tests":
- command: subprocess.exec
type: test
params:
binary: bash
working_dir: "src"
include_expansions_in_env: [
"AWS_KMS_ARN",
"DRIVERS_TOOLS",
"MONGODB_URI",
"DJANGO_SETTINGS_MODULE",
"CRYPT_SHARED_LIB_PATH",
]
args:
- ./.evergreen/run-tests.sh
- encryption
include:
- filename: .evergreen/functions.yml

pre:
- func: setup
Expand All @@ -106,12 +25,16 @@ tasks:
- name: run-tests
commands:
- func: "run unit tests"

- name: run-encryption-tests
commands:
- func: "start csfle servers"
- func: "run encryption tests"
- func: "teardown csfle"
- name: perf-tests
commands:
- func: "run performance tests"
- func: "attach performance test results"
- func: "send dashboard data"

buildvariants:
- name: tests-7-noauth-nossl
Expand Down Expand Up @@ -177,3 +100,11 @@ buildvariants:
DJANGO_SETTINGS_MODULE: "encrypted_aws_settings"
tasks:
- name: run-encryption-tests

- name: performance-benchmarks
display_name: Performance Benchmarks
run_on:
- rhel90-dbx-perf-large
batchtime: 1440
tasks:
- name: perf-tests
142 changes: 142 additions & 0 deletions .evergreen/functions.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,142 @@
functions:
"setup":
- command: git.get_project
params:
directory: src
- command: subprocess.exec
params:
binary: bash
working_dir: "src"
add_expansions_to_env: true
args:
- ./.evergreen/setup.sh
- command: expansions.update
params:
file: src/expansion.yml

"bootstrap mongo-orchestration":
- command: subprocess.exec
params:
binary: bash
add_expansions_to_env: true
args:
- ${DRIVERS_TOOLS}/.evergreen/run-orchestration.sh
- command: expansions.update
params:
file: mo-expansion.yml

"run unit tests":
- command: subprocess.exec
type: test
params:
binary: bash
working_dir: "src"
include_expansions_in_env: ["DRIVERS_TOOLS", "MONGODB_URI"]
args:
- ./.evergreen/run-tests.sh

"teardown":
- command: subprocess.exec
params:
binary: bash
args:
- ${DRIVERS_TOOLS}/.evergreen/teardown.sh

# Encryption-specific functions
"start csfle servers":
- command: ec2.assume_role
params:
role_arn: ${aws_test_secrets_role}
- command: subprocess.exec
params:
binary: bash
include_expansions_in_env: [
"AWS_SECRET_ACCESS_KEY",
"AWS_ACCESS_KEY_ID",
"AWS_SESSION_TOKEN",
]
args:
- ${DRIVERS_TOOLS}/.evergreen/csfle/setup.sh

"teardown csfle":
- command: subprocess.exec
params:
binary: bash
args:
- ${DRIVERS_TOOLS}/.evergreen/csfle/teardown.sh

"run encryption tests":
- command: subprocess.exec
type: test
params:
binary: bash
working_dir: "src"
include_expansions_in_env: [
"AWS_KMS_ARN",
"DRIVERS_TOOLS",
"MONGODB_URI",
"DJANGO_SETTINGS_MODULE",
"CRYPT_SHARED_LIB_PATH",
]
args:
- ./.evergreen/run-tests.sh
- encryption

# Performance test functions
"run performance tests":
- command: subprocess.exec
type: test
params:
binary: bash
working_dir: "src"
include_expansions_in_env: [ "DRIVERS_TOOLS", "MONGODB_URI" ]
args:
- ./.evergreen/run-perf-tests.sh

"attach performance test results":
- command: attach.results
params:
file_location: src/report.json

"send dashboard data":
- command: subprocess.exec
params:
binary: bash
args:
- .evergreen/perf-submission-setup.sh
working_dir: src
include_expansions_in_env: [
"requester",
"revision_order_id",
"project_id",
"version_id",
"build_variant",
"parsed_order_id",
"task_name",
"task_id",
"execution",
"is_mainline"
]
type: test
- command: expansions.update
params:
file: src/expansion.yml
- command: subprocess.exec
params:
binary: bash
args:
- .evergreen/perf-submission.sh
working_dir: src
include_expansions_in_env: [
"requester",
"revision_order_id",
"project_id",
"version_id",
"build_variant",
"parsed_order_id",
"task_name",
"task_id",
"execution",
"is_mainline"
]
type: test
15 changes: 15 additions & 0 deletions .evergreen/perf-submission-setup.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
#!/bin/bash
# We use the requester expansion to determine whether the data is from a mainline evergreen run or not

set -eu

# shellcheck disable=SC2154
if [ "${requester}" == "commit" ]; then
echo "is_mainline: true" >> expansion.yml
else
echo "is_mainline: false" >> expansion.yml
fi

# We parse the username out of the order_id as patches append that in and SPS does not need that information
# shellcheck disable=SC2154
echo "parsed_order_id: $(echo "${revision_order_id}" | awk -F'_' '{print $NF}')" >> expansion.yml
24 changes: 24 additions & 0 deletions .evergreen/perf-submission.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
#!/bin/bash

set -eu

# Submit the performance data to the SPS endpoint
# shellcheck disable=SC2154
response=$(curl -s -w "\nHTTP_STATUS:%{http_code}" -X 'POST' \
"https://performance-monitoring-api.corp.mongodb.com/raw_perf_results/cedar_report?project=${project_id}&version=${version_id}&variant=${build_variant}&order=${parsed_order_id}&task_name=${task_name}&task_id=${task_id}&execution=${execution}&mainline=${is_mainline}" \
-H 'accept: application/json' \
-H 'Content-Type: application/json' \
-d @results.json)

http_status=$(echo "$response" | grep "HTTP_STATUS" | awk -F':' '{print $2}')
response_body=$(echo "$response" | sed '/HTTP_STATUS/d')

# Throw an error if the data was not successfully submitted
if [ "$http_status" -ne 200 ]; then
echo "Error: Received HTTP status $http_status"
echo "Response Body: $response_body"
exit 1
fi

echo "Response Body: $response_body"
echo "HTTP Status: $http_status"
15 changes: 15 additions & 0 deletions .evergreen/run-perf-tests.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
#!/usr/bin/bash

set -eux

export OUTPUT_FILE="results.json"

# Install django-mongodb-backend
/opt/python/3.12/bin/python3 -m venv venv
. venv/bin/activate
python -m pip install -U pip
pip install -e .

python .evergreen/run_perf_test.py
mv performance_tests/$OUTPUT_FILE $OUTPUT_FILE
mv performance_tests/report.json report.json
69 changes: 69 additions & 0 deletions .evergreen/run_perf_test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
import json
import logging
import os
import shlex
import subprocess
import sys
from datetime import datetime
from pathlib import Path

LOGGER = logging.getLogger("test")
logging.basicConfig(level=logging.INFO, format="%(levelname)-8s %(message)s")
OUTPUT_FILE = os.environ.get("OUTPUT_FILE", "results.json")


def format_output(start_time: datetime):
"""Formats the output from the performance tests into a report.json file."""
end_time = datetime.now()
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

For duration calculations, it's better practice to use time.monotonic(); any particular reason to use datetime?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is copied from how PyMongo handles it for consistency. I believe we use it there for ease of the different ways we use the timestamps.

elapsed_secs = (end_time - start_time).total_seconds()
with open(OUTPUT_FILE) as fid: # noqa: PTH123
results = json.load(fid)
LOGGER.info("results.json:\n%s", json.dumps(results, indent=2))

results = {
"status": "PASS",
"exit_code": 0,
"test_file": "BenchmarkTests",
"start": int(start_time.timestamp()),
"end": int(end_time.timestamp()),
"elapsed": elapsed_secs,
}
report = {"results": [results]}

LOGGER.info("report.json\n%s", json.dumps(report, indent=2))

with open("report.json", "w", newline="\n") as fid: # noqa: PTH123
json.dump(report, fid)


def run_command(cmd: str | list[str], **kwargs) -> None:
"""Runs a shell command. Exits on failure."""
if isinstance(cmd, list):
cmd = " ".join(cmd)
LOGGER.info("Running command '%s'...", cmd)
kwargs.setdefault("check", True)
try:
subprocess.run(shlex.split(cmd), **kwargs) # noqa: PLW1510, S603
except subprocess.CalledProcessError as e:
LOGGER.error(e.output)
LOGGER.error(str(e))
sys.exit(e.returncode)
LOGGER.info("Running command '%s'... done.", cmd)


start_time = datetime.now()
ROOT = Path(__file__).absolute().parent.parent
data_dir = ROOT / "specifications/source/benchmarking/odm-data"
if not data_dir.exists():
run_command("git clone --depth 1 https://github.com/mongodb/specifications.git")
run_command("tar xf flat_models.tgz", cwd=data_dir)
run_command("tar xf nested_models.tgz", cwd=data_dir)

os.chdir("performance_tests")
start_time = datetime.now()
run_command(
"python manage.py test",
env=os.environ
| {"DJANGO_MONGODB_PERFORMANCE_TEST_DATA_PATH": str(data_dir), "OUTPUT_FILE": "results.json"},
)
format_output(start_time)
Loading