|
| 1 | +"""Analysis routes: connection, channels, device, thresholds, gaming, channel history, correlation.""" |
| 2 | + |
| 3 | +import logging |
| 4 | +from datetime import datetime |
| 5 | + |
| 6 | +from flask import Blueprint, request, jsonify |
| 7 | + |
| 8 | +from app.web import ( |
| 9 | + require_auth, |
| 10 | + get_storage, get_config_manager, get_state, |
| 11 | + _localize_timestamps, _get_lang, _get_tz_name, |
| 12 | +) |
| 13 | +from app.gaming_index import compute_gaming_index |
| 14 | +from app.i18n import get_translations |
| 15 | + |
| 16 | +log = logging.getLogger("docsis.web") |
| 17 | + |
| 18 | +analysis_bp = Blueprint("analysis_bp", __name__) |
| 19 | + |
| 20 | + |
| 21 | +def _gaming_genres(grade): |
| 22 | + """Return genre suitability verdicts for a given grade. |
| 23 | +
|
| 24 | + Verdicts: 'ok', 'warn', or 'bad'. |
| 25 | + """ |
| 26 | + g = (grade or "").lower() |
| 27 | + return { |
| 28 | + "fps": "ok" if g in ("a", "b") else "bad", |
| 29 | + "moba": "ok" if g in ("a", "b", "c") else "bad", |
| 30 | + "mmo": "ok" if g in ("a", "b", "c", "d") else "bad", |
| 31 | + "strategy": "ok" if g in ("a", "b", "c") else ("warn" if g == "d" else "bad"), |
| 32 | + } |
| 33 | + |
| 34 | + |
| 35 | +@analysis_bp.route("/api/connection") |
| 36 | +@require_auth |
| 37 | +def api_connection(): |
| 38 | + """Return connection details: ISP name, connection type, and detected speeds. |
| 39 | +
|
| 40 | + isp_name comes from user config. The remaining fields are populated |
| 41 | + by the modem driver and may be absent if the modem has not been polled yet. |
| 42 | + """ |
| 43 | + _config_manager = get_config_manager() |
| 44 | + isp_name = _config_manager.get("isp_name", "") if _config_manager else "" |
| 45 | + conn_info = get_state().get("connection_info") or {} |
| 46 | + return jsonify({ |
| 47 | + "isp_name": isp_name or None, |
| 48 | + "connection_type": conn_info.get("connection_type"), |
| 49 | + "max_downstream_kbps": conn_info.get("max_downstream_kbps"), |
| 50 | + "max_upstream_kbps": conn_info.get("max_upstream_kbps"), |
| 51 | + }) |
| 52 | + |
| 53 | + |
| 54 | +@analysis_bp.route("/api/channels") |
| 55 | +@require_auth |
| 56 | +def api_channels(): |
| 57 | + """Return current DS and US channels with overall health summary.""" |
| 58 | + _storage = get_storage() |
| 59 | + state = get_state() |
| 60 | + analysis = state.get("analysis") |
| 61 | + summary = analysis["summary"] if analysis else None |
| 62 | + if not _storage: |
| 63 | + return jsonify({"ds_channels": [], "us_channels": [], "summary": summary}) |
| 64 | + result = _storage.get_current_channels() |
| 65 | + result["summary"] = summary |
| 66 | + return jsonify(result) |
| 67 | + |
| 68 | + |
| 69 | +@analysis_bp.route("/api/device") |
| 70 | +@require_auth |
| 71 | +def api_device(): |
| 72 | + """Return modem device information.""" |
| 73 | + state = get_state() |
| 74 | + return jsonify(state.get("device_info") or {}) |
| 75 | + |
| 76 | + |
| 77 | +@analysis_bp.route("/api/thresholds") |
| 78 | +@require_auth |
| 79 | +def api_thresholds(): |
| 80 | + """Return active analysis thresholds (read-only).""" |
| 81 | + from app.analyzer import get_thresholds |
| 82 | + return jsonify(get_thresholds()) |
| 83 | + |
| 84 | + |
| 85 | +@analysis_bp.route("/api/gaming-score") |
| 86 | +@require_auth |
| 87 | +def api_gaming_score(): |
| 88 | + """Return the current Gaming Quality Index score and its components. |
| 89 | +
|
| 90 | + Response includes: |
| 91 | + enabled - whether Gaming Quality is enabled in settings |
| 92 | + score - 0-100 numeric score (null if no data) |
| 93 | + grade - letter grade A-F (null if no data) |
| 94 | + has_speedtest - whether speedtest data was included in the calculation |
| 95 | + components - per-component scores and weights used for calculation |
| 96 | + genres - suitability verdict (ok/warn/bad) per game genre |
| 97 | + raw - raw measured values that fed into the calculation |
| 98 | + """ |
| 99 | + _config_manager = get_config_manager() |
| 100 | + enabled = _config_manager.is_gaming_quality_enabled() if _config_manager else False |
| 101 | + state = get_state() |
| 102 | + analysis = state.get("analysis") |
| 103 | + speedtest_latest = state.get("speedtest_latest") |
| 104 | + result = compute_gaming_index(analysis, speedtest_latest) |
| 105 | + if result is None: |
| 106 | + return jsonify({ |
| 107 | + "enabled": enabled, |
| 108 | + "score": None, |
| 109 | + "grade": None, |
| 110 | + "has_speedtest": False, |
| 111 | + "components": {}, |
| 112 | + "genres": _gaming_genres(None), |
| 113 | + "raw": {}, |
| 114 | + }) |
| 115 | + summary = (analysis or {}).get("summary", {}) |
| 116 | + raw = { |
| 117 | + "docsis_health": summary.get("health"), |
| 118 | + "ds_snr_min": summary.get("ds_snr_min"), |
| 119 | + } |
| 120 | + if result.get("has_speedtest") and speedtest_latest: |
| 121 | + raw["ping_ms"] = speedtest_latest.get("ping_ms") |
| 122 | + raw["jitter_ms"] = speedtest_latest.get("jitter_ms") |
| 123 | + raw["packet_loss_pct"] = speedtest_latest.get("packet_loss_pct") |
| 124 | + return jsonify({ |
| 125 | + "enabled": enabled, |
| 126 | + **result, |
| 127 | + "genres": _gaming_genres(result.get("grade")), |
| 128 | + "raw": raw, |
| 129 | + }) |
| 130 | + |
| 131 | + |
| 132 | +@analysis_bp.route("/api/channel-history") |
| 133 | +@require_auth |
| 134 | +def api_channel_history(): |
| 135 | + """Return per-channel time series data. |
| 136 | + ?channel_id=X&direction=ds|us&days=7""" |
| 137 | + _storage = get_storage() |
| 138 | + if not _storage: |
| 139 | + return jsonify([]) |
| 140 | + channel_id = request.args.get("channel_id", type=int) |
| 141 | + direction = request.args.get("direction", "ds") |
| 142 | + days = request.args.get("days", 7, type=int) |
| 143 | + if channel_id is None: |
| 144 | + return jsonify({"error": "channel_id is required"}), 400 |
| 145 | + if direction not in ("ds", "us"): |
| 146 | + return jsonify({"error": "direction must be 'ds' or 'us'"}), 400 |
| 147 | + days = max(1, min(days, 90)) |
| 148 | + data = _storage.get_channel_history(channel_id, direction, days) |
| 149 | + _localize_timestamps(data) |
| 150 | + return jsonify(data) |
| 151 | + |
| 152 | + |
| 153 | +@analysis_bp.route("/api/channel-compare") |
| 154 | +@require_auth |
| 155 | +def api_channel_compare(): |
| 156 | + """Return per-channel time series for multiple channels. |
| 157 | + ?channels=1,2,3&direction=ds|us&days=7""" |
| 158 | + _storage = get_storage() |
| 159 | + if not _storage: |
| 160 | + return jsonify({}) |
| 161 | + channels_param = request.args.get("channels", "") |
| 162 | + direction = request.args.get("direction", "ds") |
| 163 | + days = request.args.get("days", 7, type=int) |
| 164 | + if not channels_param: |
| 165 | + return jsonify({"error": "channels parameter is required"}), 400 |
| 166 | + if direction not in ("ds", "us"): |
| 167 | + return jsonify({"error": "direction must be 'ds' or 'us'"}), 400 |
| 168 | + days = max(1, min(days, 90)) |
| 169 | + try: |
| 170 | + channel_ids = [int(c.strip()) for c in channels_param.split(",") if c.strip()] |
| 171 | + except ValueError: |
| 172 | + return jsonify({"error": "channels must be comma-separated integers"}), 400 |
| 173 | + if len(channel_ids) > 6: |
| 174 | + return jsonify({"error": "maximum 6 channels"}), 400 |
| 175 | + if not channel_ids: |
| 176 | + return jsonify({"error": "at least one channel required"}), 400 |
| 177 | + result = _storage.get_multi_channel_history(channel_ids, direction, days) |
| 178 | + # Convert int keys to strings for JSON |
| 179 | + return jsonify({str(k): v for k, v in result.items()}) |
| 180 | + |
| 181 | + |
| 182 | +# ── Cross-Source Correlation API ── |
| 183 | + |
| 184 | +@analysis_bp.route("/api/correlation") |
| 185 | +@require_auth |
| 186 | +def api_correlation(): |
| 187 | + """Return unified timeline with data from all sources for cross-source correlation. |
| 188 | + Query params: |
| 189 | + hours: int (default 24, max 168) |
| 190 | + sources: comma-separated list of modem,speedtest,events (default all) |
| 191 | + """ |
| 192 | + _storage = get_storage() |
| 193 | + if not _storage: |
| 194 | + return jsonify([]) |
| 195 | + from app.tz import utc_now, utc_cutoff |
| 196 | + hours = request.args.get("hours", 24, type=int) |
| 197 | + hours = max(1, min(hours, 168)) |
| 198 | + end_ts = utc_now() |
| 199 | + start_ts = utc_cutoff(hours=hours) |
| 200 | + |
| 201 | + sources_param = request.args.get("sources", "") |
| 202 | + if sources_param: |
| 203 | + valid = {"modem", "speedtest", "events", "bnetz"} |
| 204 | + sources = valid & set(s.strip() for s in sources_param.split(",")) |
| 205 | + if not sources: |
| 206 | + sources = valid |
| 207 | + else: |
| 208 | + sources = None |
| 209 | + |
| 210 | + timeline = _storage.get_correlation_timeline(start_ts, end_ts, sources) |
| 211 | + |
| 212 | + # Enrich speedtest entries with closest modem health |
| 213 | + modem_entries = [e for e in timeline if e["source"] == "modem"] |
| 214 | + for entry in timeline: |
| 215 | + if entry["source"] == "speedtest" and modem_entries: |
| 216 | + closest = min(modem_entries, key=lambda m: abs( |
| 217 | + datetime.fromisoformat(m["timestamp"]).timestamp() - |
| 218 | + datetime.fromisoformat(entry["timestamp"]).timestamp() |
| 219 | + )) |
| 220 | + delta_min = abs( |
| 221 | + datetime.fromisoformat(closest["timestamp"]).timestamp() - |
| 222 | + datetime.fromisoformat(entry["timestamp"]).timestamp() |
| 223 | + ) / 60 |
| 224 | + if delta_min <= 120: |
| 225 | + entry["modem_health"] = closest.get("health") |
| 226 | + entry["modem_ds_snr_min"] = closest.get("ds_snr_min") |
| 227 | + entry["modem_ds_power_avg"] = closest.get("ds_power_avg") |
| 228 | + |
| 229 | + _localize_timestamps(timeline) |
| 230 | + return jsonify(timeline) |
0 commit comments