Skip to content

Commit 7399119

Browse files
TexasCodingclaude
andcommitted
fix: completely suppress SignalR WebSocket errors in demo
- Created NullHandler to completely suppress error messages - Applied to SignalRCoreClient, websocket, and signalrcore loggers - Set propagate=False to prevent error propagation - Fixes remaining error messages in session-aware indicators section The errors were appearing because SignalR's error handling was still logging errors even at CRITICAL level. The NullHandler ensures complete suppression of these harmless connection cleanup messages. 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude <[email protected]>
1 parent 46d2ffa commit 7399119

File tree

1 file changed

+85
-44
lines changed

1 file changed

+85
-44
lines changed

examples/16_eth_vs_rth_sessions_demo.py

Lines changed: 85 additions & 44 deletions
Original file line numberDiff line numberDiff line change
@@ -21,8 +21,23 @@
2121
from project_x_py.indicators import EMA, RSI, SMA, VWAP
2222
from project_x_py.sessions import SessionConfig, SessionFilterMixin, SessionType
2323

24+
2425
# Suppress noisy WebSocket connection errors from SignalR
25-
logging.getLogger("SignalRCoreClient").setLevel(logging.CRITICAL)
26+
# These errors occur when background threads try to read from closed connections
27+
# They are harmless but make the output noisy
28+
class NullHandler(logging.Handler):
29+
"""Handler that suppresses all log records."""
30+
31+
def emit(self, record):
32+
pass
33+
34+
35+
null_handler = NullHandler()
36+
for logger_name in ["SignalRCoreClient", "websocket", "signalrcore"]:
37+
logger = logging.getLogger(logger_name)
38+
logger.handlers = [null_handler]
39+
logger.setLevel(logging.CRITICAL)
40+
logger.propagate = False
2641

2742

2843
async def demonstrate_basic_session_usage():
@@ -60,14 +75,14 @@ async def demonstrate_historical_session_analysis():
6075
rth_suite = await TradingSuite.create(
6176
"MNQ",
6277
timeframes=["1min", "5min"],
63-
session_config=SessionConfig(session_type=SessionType.RTH)
78+
session_config=SessionConfig(session_type=SessionType.RTH),
6479
)
6580
print("✅ RTH TradingSuite created")
6681

6782
eth_suite = await TradingSuite.create(
6883
"MNQ",
6984
timeframes=["1min", "5min"],
70-
session_config=SessionConfig(session_type=SessionType.ETH)
85+
session_config=SessionConfig(session_type=SessionType.ETH),
7186
)
7287
print("✅ ETH TradingSuite created")
7388

@@ -86,12 +101,16 @@ async def demonstrate_historical_session_analysis():
86101
print("\nData Comparison (1min):")
87102
print(f"RTH bars: {len(rth_data_1min):,}")
88103
print(f"ETH bars: {len(eth_data_1min):,}")
89-
print(f"ETH has {len(eth_data_1min) - len(rth_data_1min):,} more bars ({((len(eth_data_1min) / len(rth_data_1min) - 1) * 100):.1f}% more)")
104+
print(
105+
f"ETH has {len(eth_data_1min) - len(rth_data_1min):,} more bars ({((len(eth_data_1min) / len(rth_data_1min) - 1) * 100):.1f}% more)"
106+
)
90107

91108
print("\nData Comparison (5min):")
92109
print(f"RTH bars: {len(rth_data_5min):,}")
93110
print(f"ETH bars: {len(eth_data_5min):,}")
94-
print(f"ETH has {len(eth_data_5min) - len(rth_data_5min):,} more bars ({((len(eth_data_5min) / len(rth_data_5min) - 1) * 100):.1f}% more)")
111+
print(
112+
f"ETH has {len(eth_data_5min) - len(rth_data_5min):,} more bars ({((len(eth_data_5min) / len(rth_data_5min) - 1) * 100):.1f}% more)"
113+
)
95114

96115
# Analyze time ranges
97116
if not rth_data_1min.is_empty():
@@ -124,7 +143,7 @@ async def demonstrate_session_indicators():
124143
suite = await TradingSuite.create(
125144
"MNQ",
126145
timeframes=["5min"],
127-
session_config=SessionConfig(session_type=SessionType.RTH)
146+
session_config=SessionConfig(session_type=SessionType.RTH),
128147
)
129148
print("✅ RTH TradingSuite created for indicators")
130149

@@ -135,8 +154,8 @@ async def demonstrate_session_indicators():
135154
if not rth_data.is_empty():
136155
# Apply multiple indicators to RTH-only data
137156
print("\nApplying session-aware indicators...")
138-
with_indicators = (rth_data
139-
.pipe(SMA, period=20)
157+
with_indicators = (
158+
rth_data.pipe(SMA, period=20)
140159
.pipe(EMA, period=12)
141160
.pipe(RSI, period=14)
142161
.pipe(VWAP)
@@ -173,7 +192,9 @@ async def demonstrate_session_indicators():
173192
eth_sma = eth_indicators["sma_20"].drop_nulls()
174193
if len(eth_sma) > 0:
175194
eth_sma_mean = float(eth_sma.mean())
176-
rth_sma_mean = float(sma_stats.mean()) if len(sma_stats) > 0 else 0
195+
rth_sma_mean = (
196+
float(sma_stats.mean()) if len(sma_stats) > 0 else 0
197+
)
177198
print("\nSMA(20) Comparison:")
178199
print(f" RTH Mean: ${rth_sma_mean:.2f}")
179200
print(f" ETH Mean: ${eth_sma_mean:.2f}")
@@ -197,7 +218,9 @@ async def demonstrate_session_statistics():
197218
suite = await TradingSuite.create(
198219
"MNQ",
199220
timeframes=["1min"],
200-
session_config=SessionConfig(session_type=SessionType.ETH) # ETH to get both sessions
221+
session_config=SessionConfig(
222+
session_type=SessionType.ETH
223+
), # ETH to get both sessions
201224
)
202225
print("✅ ETH TradingSuite created for statistics")
203226

@@ -210,8 +233,8 @@ async def demonstrate_session_statistics():
210233
print(f"RTH Volume: {stats.get('rth_volume', 'N/A'):,}")
211234
print(f"ETH Volume: {stats.get('eth_volume', 'N/A'):,}")
212235

213-
if stats.get('rth_volume') and stats.get('eth_volume'):
214-
ratio = stats['rth_volume'] / stats['eth_volume']
236+
if stats.get("rth_volume") and stats.get("eth_volume"):
237+
ratio = stats["rth_volume"] / stats["eth_volume"]
215238
print(f"Volume Ratio (RTH/ETH): {ratio:.2f}")
216239

217240
print(f"RTH VWAP: ${stats.get('rth_vwap', 0):.2f}")
@@ -242,7 +265,7 @@ async def demonstrate_realtime_session_filtering():
242265
suite = await TradingSuite.create(
243266
"MNQ",
244267
timeframes=["1min"],
245-
session_config=SessionConfig(session_type=SessionType.RTH)
268+
session_config=SessionConfig(session_type=SessionType.RTH),
246269
)
247270
print("✅ RTH TradingSuite created for real-time demo")
248271

@@ -258,10 +281,13 @@ async def count_rth_events(event):
258281
if event_counts["new_bar"] % 5 == 0 and event_counts["new_bar"] > 0:
259282
data = event.data
260283
timestamp = datetime.now().strftime("%H:%M:%S")
261-
print(f"[{timestamp}] RTH Bar #{event_counts['new_bar']}: ${data.get('close', 0):.2f}")
284+
print(
285+
f"[{timestamp}] RTH Bar #{event_counts['new_bar']}: ${data.get('close', 0):.2f}"
286+
)
262287

263288
# Register for RTH-only events
264289
from project_x_py import EventType
290+
265291
await suite.on(EventType.NEW_BAR, count_rth_events)
266292
print("✅ Event handlers registered for RTH-only data")
267293

@@ -319,7 +345,9 @@ async def demonstrate_session_filtering_direct():
319345
prices = []
320346
volumes = []
321347

322-
base_time = datetime.now(timezone.utc).replace(hour=13, minute=0, second=0, microsecond=0) # 8 AM ET
348+
base_time = datetime.now(timezone.utc).replace(
349+
hour=13, minute=0, second=0, microsecond=0
350+
) # 8 AM ET
323351

324352
# Add some sample data across different hours
325353
for hour_offset in range(12): # 12 hours of data
@@ -329,14 +357,16 @@ async def demonstrate_session_filtering_direct():
329357
prices.append(4800.0 + hour_offset * 2 + minute * 0.1)
330358
volumes.append(100 + hour_offset * 10)
331359

332-
sample_data = pl.DataFrame({
333-
"timestamp": timestamps,
334-
"open": prices,
335-
"high": [p + 1.0 for p in prices],
336-
"low": [p - 1.0 for p in prices],
337-
"close": prices,
338-
"volume": volumes
339-
})
360+
sample_data = pl.DataFrame(
361+
{
362+
"timestamp": timestamps,
363+
"open": prices,
364+
"high": [p + 1.0 for p in prices],
365+
"low": [p - 1.0 for p in prices],
366+
"close": prices,
367+
"volume": volumes,
368+
}
369+
)
340370

341371
print(f"Created sample data: {len(sample_data)} bars")
342372

@@ -354,7 +384,9 @@ async def demonstrate_session_filtering_direct():
354384
print(f"Original data: {len(sample_data)} bars")
355385
print(f"RTH filtered: {len(rth_filtered)} bars")
356386
print(f"ETH filtered: {len(eth_filtered)} bars")
357-
print(f"RTH is {(len(rth_filtered)/len(sample_data)*100):.1f}% of total data")
387+
print(
388+
f"RTH is {(len(rth_filtered) / len(sample_data) * 100):.1f}% of total data"
389+
)
358390

359391
# Show time ranges
360392
if not rth_filtered.is_empty():
@@ -383,7 +415,7 @@ async def demonstrate_multi_product_sessions():
383415
("ES", "Equity futures (S&P 500)"),
384416
("CL", "Energy futures (Crude Oil)"),
385417
("GC", "Metal futures (Gold)"),
386-
("ZN", "Treasury futures (10-Year Note)")
418+
("ZN", "Treasury futures (10-Year Note)"),
387419
]
388420

389421
print("Session Times by Product Category:")
@@ -394,7 +426,9 @@ async def demonstrate_multi_product_sessions():
394426
for product, description in products_and_symbols:
395427
try:
396428
session_times = config.get_session_times(product)
397-
print(f"{product:3} | {description:25} | {session_times.rth_start} - {session_times.rth_end}")
429+
print(
430+
f"{product:3} | {description:25} | {session_times.rth_start} - {session_times.rth_end}"
431+
)
398432
except Exception as e:
399433
print(f"{product:3} | {description:25} | Error: {e}")
400434

@@ -430,7 +464,9 @@ async def demonstrate_performance_features():
430464
print(f"Cache size: {len(session_filter._session_boundary_cache)}")
431465

432466
# Demonstrate boundary caching
433-
boundaries = session_filter._get_cached_session_boundaries("test_hash", "ES", "RTH")
467+
boundaries = session_filter._get_cached_session_boundaries(
468+
"test_hash", "ES", "RTH"
469+
)
434470
print(f"Cached boundaries: {boundaries}")
435471
print(f"Cache size after: {len(session_filter._session_boundary_cache)}")
436472

@@ -439,23 +475,27 @@ async def demonstrate_performance_features():
439475

440476
import polars as pl
441477

442-
small_data = pl.DataFrame({
443-
"timestamp": [datetime.now(timezone.utc)] * 100,
444-
"open": [4800.0] * 100,
445-
"high": [4801.0] * 100,
446-
"low": [4799.0] * 100,
447-
"close": [4800.0] * 100,
448-
"volume": [100] * 100
449-
})
450-
451-
large_data = pl.DataFrame({
452-
"timestamp": [datetime.now(timezone.utc)] * 150_000,
453-
"open": [4800.0] * 150_000,
454-
"high": [4801.0] * 150_000,
455-
"low": [4799.0] * 150_000,
456-
"close": [4800.0] * 150_000,
457-
"volume": [100] * 150_000
458-
})
478+
small_data = pl.DataFrame(
479+
{
480+
"timestamp": [datetime.now(timezone.utc)] * 100,
481+
"open": [4800.0] * 100,
482+
"high": [4801.0] * 100,
483+
"low": [4799.0] * 100,
484+
"close": [4800.0] * 100,
485+
"volume": [100] * 100,
486+
}
487+
)
488+
489+
large_data = pl.DataFrame(
490+
{
491+
"timestamp": [datetime.now(timezone.utc)] * 150_000,
492+
"open": [4800.0] * 150_000,
493+
"high": [4801.0] * 150_000,
494+
"low": [4799.0] * 150_000,
495+
"close": [4800.0] * 150_000,
496+
"volume": [100] * 150_000,
497+
}
498+
)
459499

460500
print("\nTesting optimization strategies:")
461501
print(f"Small dataset ({len(small_data):,} rows): Standard processing")
@@ -506,6 +546,7 @@ async def main():
506546
except Exception as e:
507547
print(f"\n❌ Demo error: {e}")
508548
import traceback
549+
509550
traceback.print_exc()
510551

511552

0 commit comments

Comments
 (0)