Skip to content

Commit 232bce5

Browse files
MementoRCclaude
andcommitted
feat: implement Task 14.4 - Establish Performance Benchmark Tests
- Comprehensive performance benchmark suite with 23 test cases - Embedding generation benchmarks (single, batch, multi-modal, cache) - Semantic search performance tests with scaling analysis - Storage operation benchmarks (insertion, search, filtering) - End-to-end workflow performance testing - Memory usage tracking and optimization validation - Added pytest-benchmark and memory-profiler dependencies - Configured benchmark thresholds and regression detection - Added pixi tasks for benchmark execution and baseline management ✅ Quality: 23 benchmark tests collected, zero critical violations ✅ Tests: Complete benchmark coverage for performance-critical operations 📋 TaskMaster: Task 14.4 marked complete (4/8 subtasks done - 37.5% progress) 🎯 Next: Task 14.5 - Create and Maintain Test Fixtures and Utilities 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude <noreply@anthropic.com>
1 parent 6f6b6fe commit 232bce5

File tree

3 files changed

+466
-1
lines changed

3 files changed

+466
-1
lines changed

pyproject.toml

Lines changed: 20 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -42,6 +42,8 @@ dev = [
4242
"pytest>=7.0.0",
4343
"pytest-cov>=4.0.0",
4444
"pytest-asyncio>=0.21.0",
45+
"pytest-benchmark>=4.0.0",
46+
"memory-profiler>=0.61.0",
4547
"ruff>=0.1.0",
4648
"black>=23.0.0",
4749
"mypy>=1.0.0",
@@ -100,6 +102,8 @@ uckn-framework = {path = ".", editable = true}
100102
pytest = "*"
101103
pytest-cov = "*"
102104
pytest-asyncio = "*"
105+
pytest-benchmark = "*"
106+
memory-profiler = "*"
103107
ruff = "*"
104108
black = "*"
105109
mypy = "*"
@@ -126,6 +130,9 @@ install = "pip install -e ."
126130
dev = "pip install -e .[dev,mcp,ml]"
127131
test = "pytest tests/ -v"
128132
test-cov = "pytest tests/ --cov=src/uckn --cov-report=html --cov-report=term"
133+
benchmark = "pytest tests/benchmarks/ --benchmark-only --benchmark-sort=mean"
134+
benchmark-save = "pytest tests/benchmarks/ --benchmark-only --benchmark-save=baseline"
135+
benchmark-compare = "pytest tests/benchmarks/ --benchmark-only --benchmark-compare"
129136
lint = "ruff check src/ tests/"
130137
format = "ruff format src/ tests/"
131138
typecheck = "mypy src/uckn"
@@ -191,8 +198,20 @@ markers = [
191198
"integration: Integration tests",
192199
"e2e: End-to-end tests",
193200
"slow: Slow tests",
201+
"benchmark: Performance benchmark tests",
194202
]
195203

204+
[tool.pytest.benchmark]
205+
disable = false
206+
min_rounds = 5
207+
max_time = 60
208+
min_time = 0.01
209+
sort = "mean"
210+
group_by = "group"
211+
columns = ["mean", "stddev", "rounds", "min", "max"]
212+
histogram = true
213+
save_data = true
214+
196215
[tool.coverage.run]
197216
source = ["src/uckn"]
198217
omit = [
@@ -206,4 +225,4 @@ exclude_lines = [
206225
"def __repr__",
207226
"raise AssertionError",
208227
"raise NotImplementedError",
209-
]
228+
]

0 commit comments

Comments
 (0)