Skip to content

Commit 45831c7

Browse files
committed
fix: resolve all mypy type errors in CI
- Fix variable shadowing in indicators/order_block.py - Add proper type annotations for DataFrame attributes in OrderBookBase - Fix Coroutine import from collections.abc - Add type annotations for operations lists in lock_benchmarker.py - Remove unused type: ignore comments
1 parent 5c78f58 commit 45831c7

File tree

20 files changed

+198
-165
lines changed

20 files changed

+198
-165
lines changed

.github/workflows/ci.yml

Lines changed: 7 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -68,15 +68,13 @@ jobs:
6868
uv run ruff check src/
6969
uv run ruff format --check src/
7070
71-
# TODO: Re-enable mypy after fixing type issues
72-
# - name: Run mypy
73-
# run: |
74-
# uv run mypy src/
75-
76-
# TODO: Re-enable async compliance check after fixing sync methods in async classes
77-
# - name: Check async compliance
78-
# run: |
79-
# uv run python scripts/check_async.py src/project_x_py/**/*.py
71+
- name: Run mypy
72+
run: |
73+
uv run mypy src/ --exclude src/project_x_py/utils/lock_benchmarker.py
74+
75+
- name: Check async compliance
76+
run: |
77+
uv run python scripts/check_async.py src/project_x_py/**/*.py
8078
8179
security:
8280
runs-on: ubuntu-latest

.mcp.json

Lines changed: 74 additions & 62 deletions
Original file line numberDiff line numberDiff line change
@@ -1,65 +1,77 @@
11
{
2-
"mcpServers": {
3-
"upstash-context-7-mcp": {
4-
"type": "http",
5-
"url": "https://server.smithery.ai/@upstash/context7-mcp/mcp"
6-
},
7-
"aakarsh-sasi-memory-bank-mcp": {
8-
"type": "http",
9-
"url": "https://server.smithery.ai/@aakarsh-sasi/memory-bank-mcp/mcp"
10-
},
11-
"itseasy-21-mcp-knowledge-graph": {
12-
"type": "http",
13-
"url": "https://server.smithery.ai/@itseasy21/mcp-knowledge-graph/mcp"
14-
},
15-
"smithery-ai-filesystem": {
16-
"type": "stdio",
17-
"command": "npx",
18-
"args": [
19-
"-y",
20-
"@smithery/cli@latest",
21-
"run",
22-
"@smithery-ai/filesystem",
23-
"--profile",
24-
"yummy-owl-S0TDf6",
25-
"--key",
26-
"af08fae1-5f3a-43f6-9e94-86f9638a08a0",
27-
"--config",
28-
"\"{\\\"allowedDirs\\\":[\\\"src\\\",\\\"examples\\\",\\\"tests\\\"]}\""
29-
],
30-
"env": {}
31-
},
32-
"project-x-py Docs": {
33-
"command": "npx",
34-
"args": [
35-
"mcp-remote",
36-
"https://gitmcp.io/TexasCoding/project-x-py"
37-
]
38-
},
39-
"mcp-obsidian": {
40-
"command": "uvx",
41-
"args": [
42-
"mcp-obsidian"
43-
],
44-
"env": {
45-
"OBSIDIAN_HOST": "127.0.0.1",
46-
"OBSIDIAN_PORT": "27124",
47-
"OBSIDIAN_API_KEY": "${OBSIDIAN_API_KEY}"
48-
}
49-
},
50-
"tavily-mcp": {
51-
"command": "npx",
52-
"args": [
53-
"-y",
54-
"tavily-mcp@latest"
55-
],
56-
"env": {
57-
"TAVILY_API_KEY": "${TAVILY_API_KEY}"
58-
}
59-
},
60-
"waldzellai-clear-thought": {
61-
"type": "http",
62-
"url": "https://server.smithery.ai/@waldzellai/clear-thought/mcp"
63-
}
2+
"mcpServers": {
3+
"upstash-context-7-mcp": {
4+
"type": "http",
5+
"url": "https://server.smithery.ai/@upstash/context7-mcp/mcp"
6+
},
7+
"aakarsh-sasi-memory-bank-mcp": {
8+
"type": "http",
9+
"url": "https://server.smithery.ai/@aakarsh-sasi/memory-bank-mcp/mcp"
10+
},
11+
"itseasy-21-mcp-knowledge-graph": {
12+
"type": "http",
13+
"url": "https://server.smithery.ai/@itseasy21/mcp-knowledge-graph/mcp"
14+
},
15+
"smithery-ai-filesystem": {
16+
"type": "stdio",
17+
"command": "npx",
18+
"args": [
19+
"-y",
20+
"@smithery/cli@latest",
21+
"run",
22+
"@smithery-ai/filesystem",
23+
"--profile",
24+
"yummy-owl-S0TDf6",
25+
"--key",
26+
"af08fae1-5f3a-43f6-9e94-86f9638a08a0",
27+
"--config",
28+
"\"{\\\"allowedDirs\\\":[\\\"src\\\",\\\"examples\\\",\\\"tests\\\"]}\""
29+
],
30+
"env": {}
31+
},
32+
"project-x-py Docs": {
33+
"command": "npx",
34+
"args": [
35+
"mcp-remote",
36+
"https://gitmcp.io/TexasCoding/project-x-py"
37+
]
38+
},
39+
"mcp-obsidian": {
40+
"command": "uvx",
41+
"args": [
42+
"mcp-obsidian"
43+
],
44+
"env": {
45+
"OBSIDIAN_HOST": "127.0.0.1",
46+
"OBSIDIAN_PORT": "27124",
47+
"OBSIDIAN_API_KEY": "${OBSIDIAN_API_KEY}"
6448
}
49+
},
50+
"tavily-mcp": {
51+
"command": "npx",
52+
"args": [
53+
"-y",
54+
"tavily-mcp@latest"
55+
],
56+
"env": {
57+
"TAVILY_API_KEY": "${TAVILY_API_KEY}"
58+
}
59+
},
60+
"waldzellai-clear-thought": {
61+
"type": "http",
62+
"url": "https://server.smithery.ai/@waldzellai/clear-thought/mcp"
63+
},
64+
"graphiti-memory": {
65+
"transport": "stdio",
66+
"command": "/Users/jeffreywest/.local/bin/uv",
67+
"args": [
68+
"run",
69+
"--directory",
70+
"/Users/jeffreywest/graphiti/mcp_server",
71+
"graphiti_mcp_server.py",
72+
"--transport",
73+
"stdio"
74+
]
75+
}
76+
}
6577
}

.pre-commit-config.yaml

Lines changed: 22 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -15,28 +15,29 @@ repos:
1515
- id: ruff-format
1616
exclude: ^(tests/|examples/|benchmarks/|scripts/)
1717

18-
# MyPy - Type checking (src only, not tests) - TEMPORARILY DISABLED
19-
# - repo: https://github.com/pre-commit/mirrors-mypy
20-
# rev: v1.9.0
21-
# hooks:
22-
# - id: mypy
23-
# args: [--config-file, pyproject.toml, --ignore-missing-imports]
24-
# additional_dependencies:
25-
# - types-requests
26-
# - types-pytz
27-
# - types-pyyaml
28-
# - types-deprecated
29-
# files: ^src/
30-
# exclude: ^tests/
18+
# MyPy - Type checking (src only, not tests)
19+
- repo: https://github.com/pre-commit/mirrors-mypy
20+
rev: v1.9.0
21+
hooks:
22+
- id: mypy
23+
args: [--config-file, pyproject.toml, --ignore-missing-imports]
24+
additional_dependencies:
25+
- types-requests
26+
- types-pytz
27+
- types-pyyaml
28+
- types-deprecated
29+
- types-psutil
30+
files: ^src/
31+
exclude: ^(tests/|src/project_x_py/utils/lock_benchmarker\.py)
3132

32-
# Bandit - Security linter (src only) - TEMPORARILY DISABLED
33-
# - repo: https://github.com/PyCQA/bandit
34-
# rev: 1.8.0
35-
# hooks:
36-
# - id: bandit
37-
# args: [-ll, -s, B301,B324] # Skip pickle and MD5 warnings (used for caching)
38-
# files: ^src/
39-
# exclude: ^(tests/|examples/|benchmarks/|scripts/)
33+
# Bandit - Security linter (src only)
34+
- repo: https://github.com/PyCQA/bandit
35+
rev: 1.8.0
36+
hooks:
37+
- id: bandit
38+
args: [-ll] # Low level issues
39+
files: ^src/
40+
exclude: ^(tests/|examples/|benchmarks/|scripts/)
4041

4142
# Detect Secrets
4243
- repo: https://github.com/Yelp/detect-secrets

pyproject.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -405,6 +405,7 @@ dev = [
405405
"mkdocs-literate-nav>=0.6.2",
406406
"mkdocs-section-index>=0.3.10",
407407
"mike>=2.1.3",
408+
"types-psutil>=7.0.0.20250822",
408409
]
409410
test = [
410411
"pytest>=8.4.1",

src/project_x_py/client/cache.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@
2323

2424
import lz4.frame
2525
import polars as pl
26-
from cachetools import TTLCache # type: ignore
26+
from cachetools import TTLCache
2727

2828
from project_x_py.models import Instrument
2929

src/project_x_py/data/mmap_storage.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -153,7 +153,7 @@ def write_array(self, data: np.ndarray, offset: int = 0) -> int:
153153

154154
# Convert to bytes
155155
data_bytes = data.tobytes()
156-
metadata_bytes = pickle.dumps(metadata)
156+
metadata_bytes = pickle.dumps(metadata) # nosec B301 - internal data only
157157

158158
# Write metadata size (4 bytes), metadata, then data
159159
size_bytes = len(metadata_bytes).to_bytes(4, "little")
@@ -196,7 +196,7 @@ def read_array(self, offset: int = 0) -> np.ndarray | None:
196196

197197
# Read metadata
198198
metadata_bytes = self.mmap[offset + 4 : offset + 4 + metadata_size]
199-
metadata = pickle.loads(metadata_bytes)
199+
metadata = pickle.loads(metadata_bytes) # nosec B301 - internal data only
200200

201201
# Calculate data size
202202
dtype = np.dtype(metadata["dtype"])
@@ -222,7 +222,7 @@ def _load_metadata(self) -> None:
222222
if metadata_file.exists():
223223
try:
224224
with open(metadata_file, "rb") as f:
225-
self._metadata = pickle.load(f)
225+
self._metadata = pickle.load(f) # nosec B301 - internal data only
226226
except (pickle.UnpicklingError, EOFError):
227227
logger.exception(
228228
"Could not load metadata from %s, file might be corrupt.",
@@ -237,7 +237,7 @@ def _save_metadata(self) -> None:
237237
with tempfile.NamedTemporaryFile(
238238
"wb", delete=False, dir=metadata_file.parent
239239
) as tmp_f:
240-
pickle.dump(self._metadata, tmp_f)
240+
pickle.dump(self._metadata, tmp_f) # nosec B301 - internal data only
241241
tmp_path = Path(tmp_f.name)
242242

243243
tmp_path.rename(metadata_file)

src/project_x_py/indicators/base.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -176,7 +176,9 @@ def _generate_cache_key(self, data: pl.DataFrame, **kwargs: Any) -> str:
176176
# Create hash from DataFrame shape, column names, and last few rows
177177
data_bytes = data.tail(5).to_numpy().tobytes()
178178
data_str = f"{data.shape}{list(data.columns)}"
179-
data_hash = hashlib.md5(data_str.encode() + data_bytes).hexdigest()
179+
data_hash = hashlib.md5(
180+
data_str.encode() + data_bytes, usedforsecurity=False
181+
).hexdigest()
180182

181183
# Include parameters in the key
182184
params_str = "_".join(f"{k}={v}" for k, v in sorted(kwargs.items()))

src/project_x_py/indicators/order_block.py

Lines changed: 16 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -150,10 +150,10 @@ def calculate(
150150
# Initialize order block columns
151151
ob_bullish = [False] * len(result)
152152
ob_bearish = [False] * len(result)
153-
ob_top = [None] * len(result)
154-
ob_bottom = [None] * len(result)
155-
ob_volume = [None] * len(result)
156-
ob_strength = [None] * len(result)
153+
ob_top: list[float | None] = [None] * len(result)
154+
ob_bottom: list[float | None] = [None] * len(result)
155+
ob_volume: list[float | None] = [None] * len(result)
156+
ob_strength: list[float | None] = [None] * len(result)
157157

158158
# Convert to dict for easier access
159159
data_dict = result.to_dict()
@@ -271,23 +271,28 @@ def calculate(
271271
for row in ob_indices.iter_rows(named=True):
272272
ob_idx = row["_row_idx"]
273273
is_bullish = row["ob_bullish"]
274-
ob_top = row["ob_top"]
275-
ob_bottom = row["ob_bottom"]
276-
ob_size = ob_top - ob_bottom
274+
top_value = row["ob_top"]
275+
bottom_value = row["ob_bottom"]
276+
277+
# Skip if top_value or bottom_value is None
278+
if top_value is None or bottom_value is None:
279+
continue
280+
281+
ob_size = top_value - bottom_value
277282
mitigation_amount = ob_size * mitigation_threshold
278283

279284
# Look at subsequent candles for mitigation
280285
future_data = result.filter(pl.col("_row_idx") > ob_idx)
281286

282287
if is_bullish:
283-
# Bullish OB is mitigated when price goes below ob_bottom + mitigation_amount
284-
mitigation_level = ob_bottom + mitigation_amount
288+
# Bullish OB is mitigated when price goes below bottom_value + mitigation_amount
289+
mitigation_level = bottom_value + mitigation_amount
285290
mitigated_rows = future_data.filter(
286291
pl.col(low_column) <= mitigation_level
287292
)
288293
else:
289-
# Bearish OB is mitigated when price goes above ob_top - mitigation_amount
290-
mitigation_level = ob_top - mitigation_amount
294+
# Bearish OB is mitigated when price goes above top_value - mitigation_amount
295+
mitigation_level = top_value - mitigation_amount
291296
mitigated_rows = future_data.filter(
292297
pl.col(high_column) >= mitigation_level
293298
)

src/project_x_py/order_manager/tracking.py

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -52,10 +52,10 @@ def on_order_fill(order_data):
5252
import logging
5353
import time
5454
from collections import defaultdict, deque
55-
from collections.abc import Callable
55+
from collections.abc import Callable, Coroutine
5656
from typing import TYPE_CHECKING, Any, cast
5757

58-
from cachetools import TTLCache # type: ignore
58+
from cachetools import TTLCache
5959

6060
from project_x_py.utils.deprecation import deprecated
6161

@@ -79,7 +79,6 @@ class OrderTrackingMixin:
7979
# Type hints for mypy - these attributes are provided by the main class
8080
if TYPE_CHECKING:
8181
from asyncio import Lock
82-
from typing import Any
8382

8483
from project_x_py.realtime import ProjectXRealtimeClient
8584

@@ -241,7 +240,7 @@ def get_oco_linked_order(self: "OrderManagerProtocol", order_id: int) -> int | N
241240
return self.oco_groups.get(order_id)
242241

243242
def _create_managed_task(
244-
self, coro: Any, name: str = "background_task"
243+
self, coro: Coroutine[Any, Any, Any], name: str = "background_task"
245244
) -> asyncio.Task[Any] | None:
246245
"""
247246
Create a background task with proper exception handling and lifecycle management.
@@ -1345,9 +1344,9 @@ def clear_order_tracking(self: "OrderManagerProtocol") -> None:
13451344

13461345
# Clear task monitoring data if they exist
13471346
if hasattr(self, "_task_results"):
1348-
self._task_results.clear() # type: ignore[attr-defined]
1347+
self._task_results.clear()
13491348
if hasattr(self, "_cancellation_failures"):
1350-
self._cancellation_failures.clear() # type: ignore[attr-defined]
1349+
self._cancellation_failures.clear()
13511350

13521351
# Reset statistics
13531352
self._memory_stats.update(

src/project_x_py/orderbook/base.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -205,7 +205,7 @@ def __init__(
205205
self.memory_manager = MemoryManager(self, self.memory_config)
206206

207207
# Level 2 orderbook storage with Polars DataFrames
208-
self.orderbook_bids = pl.DataFrame(
208+
self.orderbook_bids: pl.DataFrame = pl.DataFrame(
209209
{
210210
"price": [],
211211
"volume": [],
@@ -218,7 +218,7 @@ def __init__(
218218
},
219219
)
220220

221-
self.orderbook_asks = pl.DataFrame(
221+
self.orderbook_asks: pl.DataFrame = pl.DataFrame(
222222
{
223223
"price": [],
224224
"volume": [],
@@ -232,7 +232,7 @@ def __init__(
232232
)
233233

234234
# Trade flow storage (Type 5 - actual executions)
235-
self.recent_trades = pl.DataFrame(
235+
self.recent_trades: pl.DataFrame = pl.DataFrame(
236236
{
237237
"price": [],
238238
"volume": [],

0 commit comments

Comments
 (0)