diff --git a/ui/README.md b/ui/README.md index 5f9eebdb6..2dff0faaa 100644 --- a/ui/README.md +++ b/ui/README.md @@ -66,4 +66,47 @@ Then update `public/scenarios.json` accordingly: } ``` -Now add that +## Add a live Loki streaming scenario + +For live visualization of node logs, you can configure scenarios that connect to a Loki instance via WebSocket. This allows real-time monitoring of running Cardano nodes. + +First, ensure your Loki instance is running and accessible, for example by following the [leios-demo](https://github.com/input-output-hk/leios-demo/) instructions. +Then add a scenario with a `loki` field instead of `trace` to `public/scenarios.json`: + +```json +{ + "scenarios": [ + { + "name": "Leios Demo 202511", + "topology": "topologies/prototype.yaml", + "duration": 300, + "loki": "localhost:3100" + } + ] +} +``` + +## Configuration + +Scenarios support two modes: + +- **Stored traces**: Use the `trace` field pointing to a JSONL file (optionally gzipped) +- **Live streaming**: Use the `loki` field with host:port of your Loki instance + +Both modes require a `topology` field specifying the network topology YAML file and a `duration` defining the amount of loaded data. + +### Auto-starting scenarios + +Scenarios can be auto-loaded/-connected using a URL query parameter: + +``` +?scenario= +``` + +Where `` is the zero-based index of the scenario in the scenarios.json array. For example: + +- `?scenario=0` - Auto-loads the first scenario (e.g., "200 TxkB/s") +- `?scenario=1` - Auto-loads the second scenario (e.g., "1 TxkB/s") +- `?scenario=2` - Auto-connects to the third scenario (e.g., "Leios Demo 202511") + +This is useful for direct links, bookmarking, or embedding specific scenarios. diff --git a/ui/public/scenarios.json b/ui/public/scenarios.json index d2cde4ed3..7e4d13853 100644 --- a/ui/public/scenarios.json +++ b/ui/public/scenarios.json @@ -11,6 +11,12 @@ "topology": "topologies/small.yaml", "duration": 120, "trace": "traces/small-1txkbs-nocpu.jsonl.gz" + }, + { + "name": "Leios Demo 202511", + "topology": "topologies/prototype.yaml", + "duration": 300, + "loki": "localhost:3100" } ] } diff --git a/ui/public/topologies/prototype.yaml b/ui/public/topologies/prototype.yaml new file mode 100644 index 000000000..fdc2ca4c6 --- /dev/null +++ b/ui/public/topologies/prototype.yaml @@ -0,0 +1,23 @@ +nodes: + UpstreamNode: + stake: 1 + location: + - 0 + - 0 + producers: {} + Node0: + stake: 0 + location: + - 0 + - 100 + producers: + UpstreamNode: + latency-ms: 200.0 + DownstreamNode: + stake: 0 + location: + - 0 + - 200 + producers: + Node0: + latency-ms: 200.0 diff --git a/ui/src/components/Sim/hooks/useLokiWebSocket.ts b/ui/src/components/Sim/hooks/useLokiWebSocket.ts new file mode 100644 index 000000000..ff88d984e --- /dev/null +++ b/ui/src/components/Sim/hooks/useLokiWebSocket.ts @@ -0,0 +1,491 @@ +import { useSimContext } from "@/contexts/SimContext/context"; +import { + IServerMessage, + EServerMessageType, + IRankingBlockSent, + IRankingBlockReceived, + IEndorserBlockSent, + IEndorserBlockReceived, + ITransactionSent, + ITransactionReceived, +} from "@/components/Sim/types"; +import { useRef } from "react"; +import { EConnectionState } from "@/contexts/SimContext/types"; + +// FIXME: latency in topology is wrong + +// TODO: Replace with topology-based mapping +const HOST_PORT_TO_NODE: Record = { + "127.0.0.1:3001": "UpstreamNode", + "127.0.0.1:3002": "Node0", + "127.0.0.1:3003": "DownstreamNode", + // Add more mappings as needed +}; + +const parseRankingBlockSent = ( + streamLabels: any, + timestamp: number, + logLine: string, +): IServerMessage | null => { + try { + const log = JSON.parse(logLine); + + // From cardano-node ns=BlockFetch.Server.SendBlock + // {"block":"56515bfd5751ca2c1ca0f21050cdb1cd020e396c623a16a2274528f643d4b5fd","kind":"BlockFetchServer","peer":{"connectionId":"127.0.0.1:3002 127.0.0.1:3003"}} + if (log.kind === "BlockFetchServer" && log.peer && log.block) { + const sender = streamLabels.process; + const connectionId = log.peer.connectionId; + let recipient = "Node0"; + + if (connectionId) { + const endpoints = connectionId.split(" "); + if (endpoints.length === 2) { + const recipientEndpoint = endpoints[1]; + recipient = HOST_PORT_TO_NODE[recipientEndpoint] || recipient; + } + } + + const message: IRankingBlockSent = { + type: EServerMessageType.RBSent, + slot: 0, + id: `rb-${log.block.substring(0, 8)}`, + sender, + recipient, + }; + + return { + time_s: timestamp, + message, + }; + } + + // From immdb-server (no ns) + // {"at":"2025-12-05T12:45:21.0021Z","connectionId":"127.0.0.1:3001 127.0.0.1:3002","direction":"Send","msg":"MsgBlock","mux_at":"2025-12-05T12:45:21.0020Z","prevCount":13} + if (log.msg === "MsgBlock" && log.direction === "Send") { + const sender = streamLabels.process; + const connectionId = log.connectionId; + let recipient = "Node0"; + + if (connectionId) { + const endpoints = connectionId.split(" "); + if (endpoints.length === 2) { + const recipientEndpoint = endpoints[1]; + recipient = HOST_PORT_TO_NODE[recipientEndpoint] || recipient; + } + } + + const message: IRankingBlockSent = { + type: EServerMessageType.RBSent, + slot: log.prevCount || 0, + id: `rb-upstream-${log.prevCount + 1}`, + sender, + recipient, + }; + + return { + time_s: timestamp, + message, + }; + } + } catch (error) { + console.warn("Failed to parse RankingBlockSent log line:", logLine, error); + } + + return null; +}; + +const parseRankingBlockReceived = ( + streamLabels: any, + timestamp: number, + logLine: string, +): IServerMessage | null => { + try { + const log = JSON.parse(logLine); + + // ns=BlockFetch.Client.CompletedBlockFetch + // {"block":"56515bfd5751ca2c1ca0f21050cdb1cd020e396c623a16a2274528f643d4b5fd","delay":4985924.003937032,"kind":"CompletedBlockFetch","peer":{"connectionId":"127.0.0.1:3003 127.0.0.1:3002"},"size":862} + if (log.kind === "CompletedBlockFetch" && log.peer && log.block) { + const recipient = streamLabels.process; + const connectionId = log.peer.connectionId; + let sender = "Node0"; + + if (connectionId) { + const endpoints = connectionId.split(" "); + if (endpoints.length === 2) { + const senderEndpoint = endpoints[1]; + sender = HOST_PORT_TO_NODE[senderEndpoint] || sender; + } + } + + const message: IRankingBlockReceived = { + type: EServerMessageType.RBReceived, + slot: 0, // FIXME: use proper slot + id: `rb-${log.block.substring(0, 8)}`, + sender, + recipient, + }; + + return { + time_s: timestamp, + message, + }; + } + } catch (error) { + console.warn( + "Failed to parse RankingBlockReceived log line:", + logLine, + error, + ); + } + + return null; +}; + +const parseEndorserBlockSent = ( + streamLabels: any, + timestamp: number, + logLine: string, +): IServerMessage | null => { + try { + const log = JSON.parse(logLine); + + // From immdb-server (no ns) + // {"at":"2025-12-05T12:45:20.9134Z","connectionId":"127.0.0.1:3001 127.0.0.1:3002","direction":"Send","msg":"MsgLeiosBlock","mux_at":"2025-12-05T12:45:20.9131Z","prevCount":0} + if (log.msg === "MsgLeiosBlock" && log.direction === "Send") { + const sender = streamLabels.process; + const connectionId = log.connectionId; + let recipient = "Node0"; + + if (connectionId) { + const endpoints = connectionId.split(" "); + if (endpoints.length === 2) { + const recipientEndpoint = endpoints[1]; + recipient = HOST_PORT_TO_NODE[recipientEndpoint] || recipient; + } + } + + const message: IEndorserBlockSent = { + type: EServerMessageType.EBSent, + slot: 0, // FIXME: use correct slot + id: `eb-${log.prevCount || 0}`, + sender, + recipient, + }; + + return { + time_s: timestamp, + message, + }; + } + + // From cardano-node ns=LeiosFetch.Remote.Send.Block + // {"kind":"Send","msg":{"eb":"\u003celided\u003e","ebBytesSize":27471,"kind":"MsgLeiosBlock"},"mux_at":"2025-12-05T12:45:20.93446848Z","peer":{"connectionId":"127.0.0.1:3002 127.0.0.1:3003"}} + if (log.kind === "Send" && log.msg && log.msg.kind === "MsgLeiosBlock") { + const sender = streamLabels.process; + const connectionId = log.peer?.connectionId; + let recipient = "Node0"; + + if (connectionId) { + const endpoints = connectionId.split(" "); + if (endpoints.length === 2) { + const recipientEndpoint = endpoints[1]; + recipient = HOST_PORT_TO_NODE[recipientEndpoint] || recipient; + } + } + + const message: IEndorserBlockSent = { + type: EServerMessageType.EBSent, + slot: 0, // FIXME: use correct slot + id: `eb-${log.msg.eb}`, // FIXME: msg.eb is always elided + sender, + recipient, + }; + + return { + time_s: timestamp, + message, + }; + } + } catch (error) { + console.warn("Failed to parse EndorserBlockSent log line:", logLine, error); + } + + return null; +}; + +const parseEndorserBlockReceived = ( + streamLabels: any, + timestamp: number, + logLine: string, +): IServerMessage | null => { + try { + const log = JSON.parse(logLine); + + // From cardano-node ns=LeiosFetch.Remote.Receive.Block + // {"mux_at":"2025-12-05T12:45:21.98320066Z","peer":{"connectionId":"127.0.0.1:3003 127.0.0.1:3002"},"kind":"Recv","msg":{"kind":"MsgLeiosBlock","eb":"\u003celided\u003e","ebBytesSize":27471}} + if (log.kind === "Recv" && log.msg && log.msg.kind === "MsgLeiosBlock") { + const recipient = streamLabels.process; + const connectionId = log.peer?.connectionId; + let sender = "Node0"; + + if (connectionId) { + const endpoints = connectionId.split(" "); + if (endpoints.length === 2) { + const senderEndpoint = endpoints[1]; + sender = HOST_PORT_TO_NODE[senderEndpoint] || sender; + } + } + + const message: IEndorserBlockReceived = { + type: EServerMessageType.EBReceived, + slot: 0, // FIXME: use correct slot + id: `eb-${log.msg.eb}`, // FIXME: msg.eb is always elided + sender, + recipient, + }; + + return { + time_s: timestamp, + message, + }; + } + } catch (error) { + console.warn( + "Failed to parse EndorserBlockReceived log line:", + logLine, + error, + ); + } + + return null; +}; + +const parseTransactionSent = ( + streamLabels: any, + timestamp: number, + logLine: string, +): IServerMessage | null => { + try { + const log = JSON.parse(logLine); + + // TODO: indicate this is many transactions or visualize as a very big transaction + + // From immdb-server (no ns) + // {"at":"2025-12-05T14:06:12.4254Z","connectionId":"127.0.0.1:3001 127.0.0.1:3002","direction":"Send","msg":"MsgLeiosBlockTxs","mux_at":"2025-12-05T14:06:12.4254Z","prevCount":265} + if (log.msg === "MsgLeiosBlockTxs" && log.direction === "Send") { + const sender = streamLabels.process; + const connectionId = log.connectionId; + let recipient = "Node0"; + + if (connectionId) { + const endpoints = connectionId.split(" "); + if (endpoints.length === 2) { + const recipientEndpoint = endpoints[1]; + recipient = HOST_PORT_TO_NODE[recipientEndpoint] || recipient; + } + } + + const message: ITransactionSent = { + type: EServerMessageType.TransactionSent, + id: `tx-batch-${log.prevCount}`, + sender, + recipient, + }; + + return { + time_s: timestamp, + message, + }; + } + + // From cardano-node ns=LeiosFetch.Remote.Send.BlockTxs + // {"kind":"Send","msg":{"kind":"MsgLeiosBlockTxs","numTxs":30,"txs":"\u003celided\u003e","txsBytesSize":491520},"mux_at":"2025-12-05T14:06:12.52467535Z","peer":{"connectionId":"127.0.0.1:3002 127.0.0.1:3003"}} + if (log.kind === "Send" && log.msg && log.msg.kind === "MsgLeiosBlockTxs") { + const sender = streamLabels.process; + const connectionId = log.peer?.connectionId; + let recipient = "Node0"; + + if (connectionId) { + const endpoints = connectionId.split(" "); + if (endpoints.length === 2) { + const recipientEndpoint = endpoints[1]; + recipient = HOST_PORT_TO_NODE[recipientEndpoint] || recipient; + } + } + + const message: ITransactionSent = { + type: EServerMessageType.TransactionSent, + id: `tx-batch-${log.msg.txs}`, // FIXME: msg.txs is always elided + sender, + recipient, + }; + + return { + time_s: timestamp, + message, + }; + } + } catch (error) { + console.warn("Failed to parse TransactionSent log line:", logLine, error); + } + + return null; +}; + +const parseTransactionReceived = ( + streamLabels: any, + timestamp: number, + logLine: string, +): IServerMessage | null => { + try { + const log = JSON.parse(logLine); + + // From cardano-node ns=LeiosFetch.Remote.Receive.BlockTxs + // {"mux_at":"2025-12-05T14:06:12.52499731Z","peer":{"connectionId":"127.0.0.1:3003 127.0.0.1:3002"},"kind":"Recv","msg":{"txsBytesSize":491520,"kind":"MsgLeiosBlockTxs","numTxs":30,"txs":"\u003celided\u003e"}} + if (log.kind === "Recv" && log.msg && log.msg.kind === "MsgLeiosBlockTxs") { + const recipient = streamLabels.process; + const connectionId = log.peer?.connectionId; + let sender = "Node0"; + + if (connectionId) { + const endpoints = connectionId.split(" "); + if (endpoints.length === 2) { + const senderEndpoint = endpoints[1]; + sender = HOST_PORT_TO_NODE[senderEndpoint] || sender; + } + } + + const message: ITransactionReceived = { + type: EServerMessageType.TransactionReceived, + id: `tx-${log.msg.txs}`, // FIXME: msg.txs is always elided + sender, + recipient, + }; + + return { + time_s: timestamp, + message, + }; + } + } catch (error) { + console.warn( + "Failed to parse TransactionReceived log line:", + logLine, + error, + ); + } + + return null; +}; + +function connectLokiWebSocket(lokiHost: string, dispatch: any): () => void { + // NOTE: Single websocket is essential because: + // 1. Timeline aggregation assumes events are chronologically ordered + // 2. Multiple websockets deliver events out of order across different queries + // 3. Loki naturally returns results in chronological order within a single stream + // 4. Sorting large event arrays in the reducer is too expensive for dense simulation data + const query = + '{service="cardano-node"} |~ "BlockFetchServer|MsgBlock|CompletedBlockFetch|MsgLeiosBlock|MsgLeiosBlockTxs"'; + const wsUrl = `ws://${lokiHost}/loki/api/v1/tail?query=${encodeURIComponent(query)}&limit=5000`; + console.log("Connecting to Loki:", wsUrl); + dispatch({ + type: "SET_LOKI_CONNECTION_STATE", + payload: EConnectionState.Connecting, + }); + + const ws = new WebSocket(wsUrl); + + ws.onopen = () => { + dispatch({ + type: "SET_LOKI_CONNECTION_STATE", + payload: EConnectionState.Connected, + }); + }; + + let count = 0; + ws.onmessage = (event) => { + try { + const data = JSON.parse(event.data); + console.debug("Received Loki streams:", data); + + if (data.streams && Array.isArray(data.streams)) { + const events: IServerMessage[] = []; + + data.streams.forEach((stream: any) => { + console.debug("Stream labels:", stream.stream); + if (stream.values && Array.isArray(stream.values)) { + stream.values.forEach(([timestamp, logLine]: [string, string]) => { + count++; + console.debug(`Stream value:`, count, { timestamp, logLine }); + const ts = parseFloat(timestamp) / 1000000000; + + // TODO: simplify and push further upstream (e.g. into alloy) + const event = + parseRankingBlockSent(stream.stream, ts, logLine) || + parseRankingBlockReceived(stream.stream, ts, logLine) || + parseEndorserBlockSent(stream.stream, ts, logLine) || + parseEndorserBlockReceived(stream.stream, ts, logLine) || + parseTransactionSent(stream.stream, ts, logLine) || + parseTransactionReceived(stream.stream, ts, logLine); + if (event) { + console.warn("Parsed", event.time_s, event.message); + events.push(event); + } + }); + } + }); + + if (events.length > 0) { + dispatch({ type: "ADD_TIMELINE_EVENT_BATCH", payload: events }); + } + } + } catch (error) { + console.error("Error processing Loki message:", error); + } + }; + + ws.onerror = (error) => { + console.error("WebSocket error:", error); + dispatch({ + type: "SET_LOKI_CONNECTION_STATE", + payload: EConnectionState.NotConnected, + }); + }; + + ws.onclose = () => { + dispatch({ + type: "SET_LOKI_CONNECTION_STATE", + payload: EConnectionState.NotConnected, + }); + }; + + return () => ws.close(); +} + +export const useLokiWebSocket = () => { + const { + state: { lokiHost, lokiConnectionState }, + dispatch, + } = useSimContext(); + + const cleanupRef = useRef<(() => void) | null>(null); + + const connect = () => { + if (!lokiHost || lokiConnectionState === EConnectionState.Connected) return; + + dispatch({ type: "RESET_TIMELINE" }); + + cleanupRef.current = connectLokiWebSocket(lokiHost, dispatch); + }; + + const disconnect = () => { + cleanupRef.current?.(); + cleanupRef.current = null; + dispatch({ + type: "SET_LOKI_CONNECTION_STATE", + payload: EConnectionState.NotConnected, + }); + }; + + return { connect, disconnect }; +}; diff --git a/ui/src/components/Sim/modules/Playback.tsx b/ui/src/components/Sim/modules/Playback.tsx index f3359a8e0..8966cd442 100644 --- a/ui/src/components/Sim/modules/Playback.tsx +++ b/ui/src/components/Sim/modules/Playback.tsx @@ -13,9 +13,9 @@ export const Playback: FC = () => { // Timeline playback refs const intervalRef = useRef(null); const lastUpdateRef = useRef(0); - const currentTimeRef = useRef(currentTime); - // Refs for seeking functionality + // Refs for stable seeking callbacks + const currentTimeRef = useRef(currentTime); const eventsRef = useRef(events); const maxTimeRef = useRef(maxTime); @@ -47,15 +47,28 @@ export const Playback: FC = () => { }, []); const handleStep = useCallback( + (stepAmount: number) => { + const maxEventTime = + events.length > 0 ? events[events.length - 1].time_s : maxTime; + const newTime = Math.max( + 0, + Math.min(currentTime + stepAmount, maxEventTime), + ); + dispatch({ type: "SET_TIMELINE_TIME", payload: newTime }); + }, + [dispatch, events, maxTime, currentTime], + ); + + // Stable version for seeking intervals (uses refs) + const handleStepForSeeking = useCallback( (stepAmount: number) => { const maxEventTime = eventsRef.current.length > 0 ? eventsRef.current[eventsRef.current.length - 1].time_s : maxTimeRef.current; - const currentTime = currentTimeRef.current; const newTime = Math.max( 0, - Math.min(currentTime + stepAmount, maxEventTime), + Math.min(currentTimeRef.current + stepAmount, maxEventTime), ); currentTimeRef.current = newTime; dispatch({ type: "SET_TIMELINE_TIME", payload: newTime }); @@ -68,17 +81,17 @@ export const Playback: FC = () => { // Clear any existing seeking first stopSeeking(); - // Initial step using current ref values + // Initial step using current context values handleStep(stepAmount); - // Start continuous seeking after delay + // Start continuous seeking after delay using stable callback stepTimeoutRef.current = window.setTimeout(() => { stepIntervalRef.current = window.setInterval(() => { - handleStep(stepAmount); + handleStepForSeeking(stepAmount); }, 33); // ~30 FPS smooth seeking }, 300); // initial delay }, - [handleStep, stopSeeking], + [handleStep, handleStepForSeeking, stopSeeking], ); // Timeline playback effect - handles automatic advancement when playing @@ -91,6 +104,9 @@ export const Playback: FC = () => { clearInterval(intervalRef.current); } + // Capture current time at interval start to avoid stale closure + let localCurrentTime = currentTime; + // Start playback interval intervalRef.current = window.setInterval(() => { const now = performance.now(); @@ -99,16 +115,10 @@ export const Playback: FC = () => { ((now - lastUpdateRef.current) / 1000) * speedMultiplier; lastUpdateRef.current = now; - const newTime = Math.min( - currentTimeRef.current + deltaTime, - maxEventTime, - ); - currentTimeRef.current = newTime; + const newTime = Math.min(localCurrentTime + deltaTime, maxEventTime); + localCurrentTime = newTime; - dispatch({ - type: "SET_TIMELINE_TIME", - payload: newTime, - }); + dispatch({ type: "SET_TIMELINE_TIME", payload: newTime }); // Auto-pause at the end if (newTime >= maxEventTime) { @@ -124,16 +134,9 @@ export const Playback: FC = () => { intervalRef.current = null; } } - }, [ - isPlaying, - events.length, - currentTime, - speedMultiplier, - dispatch, - stopSeeking, - ]); + }, [isPlaying, events.length, speedMultiplier, dispatch]); - // Keep refs in sync when values change externally + // Keep refs in sync with context values useEffect(() => { currentTimeRef.current = currentTime; lastUpdateRef.current = performance.now(); @@ -245,7 +248,6 @@ export const Playback: FC = () => { {isLoaded && ( )} diff --git a/ui/src/components/Sim/modules/TimelineSlider.tsx b/ui/src/components/Sim/modules/TimelineSlider.tsx index 3cf993a85..66857cb1b 100644 --- a/ui/src/components/Sim/modules/TimelineSlider.tsx +++ b/ui/src/components/Sim/modules/TimelineSlider.tsx @@ -3,28 +3,37 @@ import { type FC, useCallback } from "react"; export const TimelineSlider: FC = () => { const { - state: { events, currentTime }, + state: { events, currentTime, minTime, maxTime, isPlaying }, dispatch, } = useSimContext(); const handleTimeChange = useCallback( (event: React.ChangeEvent) => { const newTime = parseFloat(event.target.value); + + // Pause playback when slider is moved + if (isPlaying) { + dispatch({ type: "SET_TIMELINE_PLAYING", payload: false }); + } + dispatch({ type: "SET_TIMELINE_TIME", payload: newTime }); }, - [dispatch], + [dispatch, isPlaying], ); const hasEvents = events.length > 0; - const maxTime = hasEvents ? events[events.length - 1].time_s : 100; // Default duration when no events + const timeRange = maxTime - minTime; const formatTime = (timeInSeconds: number, highResolution = false) => { + // Show relative time from minTime + const relativeTime = timeInSeconds - minTime; return highResolution - ? `${timeInSeconds.toFixed(3)}s` - : `${timeInSeconds.toFixed(1)}s`; + ? `${relativeTime.toFixed(3)}s` + : `${relativeTime.toFixed(1)}s`; }; - const currentPercent = maxTime > 0 ? (currentTime / maxTime) * 100 : 0; + const currentPercent = + timeRange > 0 ? ((currentTime - minTime) / timeRange) * 100 : 0; return (
{ {/* Interactive slider */} event.time_s); + const minEventTime = Math.min(...timestamps); + const maxEventTime = Math.max(...timestamps); + + // Update timeline bounds and clamp current time + const newMinTime = + state.minTime == 0 + ? minEventTime + : Math.min(state.minTime, minEventTime); + const newMaxTime = Math.max(state.maxTime, maxEventTime); + + const clampedCurrentTime = Math.max( + newMinTime, + Math.min(state.currentTime, newMaxTime), + ); + return { ...state, - events: [...state.events, ...action.payload], + events: newEvents, + minTime: newMinTime, + maxTime: newMaxTime, + currentTime: clampedCurrentTime, }; + } case "SET_TIMELINE_TIME": { - const newTime = action.payload; + const newTime = Math.max( + state.minTime, + Math.min(action.payload, state.maxTime), + ); // Recompute complete aggregated data based on new timeline position const nodeIds = Array.from(state.topography.nodes.keys()); @@ -156,10 +197,18 @@ export const reducer = ( ...state, events: [], currentTime: 0, + minTime: 0, + maxTime: 0, isPlaying: false, speedMultiplier: 1, }; + case "SET_LOKI_CONNECTION_STATE": + return { + ...state, + lokiConnectionState: action.payload, + }; + default: return state; } diff --git a/ui/src/contexts/SimContext/types.ts b/ui/src/contexts/SimContext/types.ts index 28f2dd901..26e062830 100644 --- a/ui/src/contexts/SimContext/types.ts +++ b/ui/src/contexts/SimContext/types.ts @@ -63,24 +63,35 @@ export interface IGraphContextState { currentNode?: string; } +export enum EConnectionState { + NotConnected = "NotConnected", + Connecting = "Connecting", + Connected = "Connected", +} + export interface IScenario { name: string; topology: string; duration: number; - trace: string; + trace?: string; + loki?: string; } export interface ISimContextState { allScenarios: IScenario[]; activeScenario: string; + autoStart: boolean; graph: IGraphContextState; aggregatedData: ISimulationAggregatedDataState; tracePath: string; + lokiHost?: string; + lokiConnectionState: EConnectionState; topography: ITransformedNodeMap; topologyPath: string; topologyLoaded: boolean; events: IServerMessage[]; currentTime: number; + minTime: number; maxTime: number; isPlaying: boolean; speedMultiplier: number; @@ -88,7 +99,7 @@ export interface ISimContextState { export type TSimContextActions = | { type: "SET_SCENARIOS"; payload: IScenario[] } - | { type: "SET_SCENARIO"; payload: string } + | { type: "SET_SCENARIO"; payload: string; autoStart?: boolean } | { type: "SET_CURRENT_NODE"; payload: string | undefined } | { type: "SET_CANVAS_PROPS"; @@ -108,7 +119,8 @@ export type TSimContextActions = | { type: "SET_TIMELINE_TIME"; payload: number } | { type: "SET_TIMELINE_PLAYING"; payload: boolean } | { type: "SET_TIMELINE_SPEED"; payload: number } - | { type: "RESET_TIMELINE" }; + | { type: "RESET_TIMELINE" } + | { type: "SET_LOKI_CONNECTION_STATE"; payload: EConnectionState }; export interface ISimContext { state: ISimContextState;