diff --git a/.changeset/add-hyperdx-cli.md b/.changeset/add-hyperdx-cli.md new file mode 100644 index 0000000000..20f803727f --- /dev/null +++ b/.changeset/add-hyperdx-cli.md @@ -0,0 +1,5 @@ +--- +"@hyperdx/cli": minor +--- + +Add @hyperdx/cli package — terminal CLI for searching, tailing, and inspecting logs and traces from HyperDX with interactive TUI, trace waterfall, raw SQL queries, dashboard listing, and sourcemap uploads. diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index fc147716a0..5b27560f4d 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -418,6 +418,104 @@ jobs: "${IMAGE}:${VERSION}-arm64" done + # --------------------------------------------------------------------------- + # CLI – compile standalone binaries and upload as GitHub Release assets + # npm publishing is handled by changesets in the check_changesets job above. + # This job only compiles platform-specific binaries and creates a GH Release. + # --------------------------------------------------------------------------- + release-cli: + name: Release CLI Binaries + needs: [check_changesets, check_version] + if: needs.check_version.outputs.should_release == 'true' + runs-on: ubuntu-24.04 + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Setup node + uses: actions/setup-node@v4 + with: + node-version-file: '.nvmrc' + cache-dependency-path: 'yarn.lock' + cache: 'yarn' + - name: Setup Bun + uses: oven-sh/setup-bun@v2 + with: + bun-version: '1.3.11' + - name: Install dependencies + run: yarn install + - name: Get CLI version + id: cli_version + run: | + CLI_VERSION=$(node -p "require('./packages/cli/package.json').version") + echo "version=${CLI_VERSION}" >> $GITHUB_OUTPUT + echo "CLI version: ${CLI_VERSION}" + - name: Check if CLI release already exists + id: check_cli_release + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + if gh release view "cli-v${{ steps.cli_version.outputs.version }}" > /dev/null 2>&1; then + echo "Release cli-v${{ steps.cli_version.outputs.version }} already exists. Skipping." + echo "exists=true" >> $GITHUB_OUTPUT + else + echo "Release does not exist. Proceeding." + echo "exists=false" >> $GITHUB_OUTPUT + fi + - name: Compile CLI binaries + if: steps.check_cli_release.outputs.exists == 'false' + working-directory: packages/cli + run: | + yarn compile:linux + yarn compile:macos + yarn compile:macos-x64 + - name: Create GitHub Release + if: steps.check_cli_release.outputs.exists == 'false' + uses: softprops/action-gh-release@153bb8e04406b158c6c84fc1615b65b24149a1fe # v2.6.1 + with: + tag_name: cli-v${{ steps.cli_version.outputs.version }} + name: '@hyperdx/cli v${{ steps.cli_version.outputs.version }}' + body: | + ## @hyperdx/cli v${{ steps.cli_version.outputs.version }} + + ### Installation + + **npm (recommended):** + ```bash + npm install -g @hyperdx/cli + ``` + + **Or run directly with npx:** + ```bash + npx @hyperdx/cli tui -s + ``` + + **Manual download (standalone binary, no Node.js required):** + ```bash + # macOS Apple Silicon + curl -L https://github.com/hyperdxio/hyperdx/releases/download/cli-v${{ steps.cli_version.outputs.version }}/hdx-darwin-arm64 -o hdx + # macOS Intel + curl -L https://github.com/hyperdxio/hyperdx/releases/download/cli-v${{ steps.cli_version.outputs.version }}/hdx-darwin-x64 -o hdx + # Linux x64 + curl -L https://github.com/hyperdxio/hyperdx/releases/download/cli-v${{ steps.cli_version.outputs.version }}/hdx-linux-x64 -o hdx + + chmod +x hdx && sudo mv hdx /usr/local/bin/ + ``` + + ### Usage + + ```bash + hdx auth login -s + hdx tui + ``` + draft: false + prerelease: false + files: | + packages/cli/dist/hdx-linux-x64 + packages/cli/dist/hdx-darwin-arm64 + packages/cli/dist/hdx-darwin-x64 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + # --------------------------------------------------------------------------- # Downstream notifications # --------------------------------------------------------------------------- @@ -553,6 +651,7 @@ jobs: publish-otel-collector, publish-local, publish-all-in-one, + release-cli, notify_helm_charts, notify_ch, notify_clickhouse_clickstack, diff --git a/packages/cli/.gitignore b/packages/cli/.gitignore new file mode 100644 index 0000000000..9ef00fcce5 --- /dev/null +++ b/packages/cli/.gitignore @@ -0,0 +1,5 @@ +# Build output +dist/ + +# Bun build artifacts +*.bun-build diff --git a/packages/cli/AGENTS.md b/packages/cli/AGENTS.md new file mode 100644 index 0000000000..5511e79c01 --- /dev/null +++ b/packages/cli/AGENTS.md @@ -0,0 +1,250 @@ +# @hyperdx/cli Development Guide + +## What is @hyperdx/cli? + +A terminal CLI for searching, tailing, and inspecting logs and traces from +HyperDX. It provides both an interactive TUI (built with Ink — React for +terminals) and a non-interactive streaming mode for piping. + +The CLI connects to the HyperDX API server and queries ClickHouse directly +through the API's `/clickhouse-proxy` endpoint, using the same query generation +logic (`@hyperdx/common-utils`) as the web frontend. + +## CLI Commands + +``` +hdx tui -s # Interactive TUI (main command) +hdx stream -s --source "Logs" # Non-interactive streaming to stdout +hdx sources -s # List available sources +hdx auth login -s # Sign in (interactive or -e/-p flags) +hdx auth status # Show auth status (reads saved session) +hdx auth logout # Clear saved session +``` + +The `-s, --server ` flag is required for commands that talk to the API. If +omitted, the CLI falls back to the server URL saved in the session file from a +previous `hdx auth login`. + +## Architecture + +``` +src/ +├── cli.tsx # Entry point — Commander CLI with commands: +│ # tui, stream, sources, auth (login/logout/status) +│ # Also contains the standalone LoginPrompt component +├── App.tsx # App shell — state machine: +│ # loading → login → pick-source → EventViewer +├── api/ +│ ├── client.ts # ApiClient (REST + session cookies) +│ │ # ProxyClickhouseClient (routes through /clickhouse-proxy) +│ └── eventQuery.ts # Query builders: +│ # buildEventSearchQuery (table view, uses renderChartConfig) +│ # buildTraceSpansSql (waterfall trace spans) +│ # buildTraceLogsSql (waterfall correlated logs) +│ # buildFullRowSql (SELECT * for row detail) +├── components/ +│ ├── EventViewer.tsx # Main TUI view — table, search, detail panel with tabs +│ ├── TraceWaterfall.tsx # Trace waterfall chart with j/k navigation + event details +│ ├── RowOverview.tsx # Structured overview (top-level attrs, event attrs, resource attrs) +│ ├── ColumnValues.tsx # Shared key-value renderer (used by Column Values tab + Event Details) +│ ├── LoginForm.tsx # Email/password login form (used inside TUI App) +│ └── SourcePicker.tsx # Arrow-key source selector +└── utils/ + ├── config.ts # Session persistence (~/.config/hyperdx/cli/session.json) + ├── editor.ts # $EDITOR integration for time range and select clause editing + └── silenceLogs.ts # Suppresses console.debug/warn/error from common-utils +``` + +## Key Components + +### EventViewer (`components/EventViewer.tsx`) + +The main TUI component (~1100 lines). Handles: + +- **Table view**: Dynamic columns derived from query results, percentage-based + widths, `overflowX="hidden"` for truncation +- **Search**: Lucene query via `/` key, submits on Enter +- **Follow mode**: Slides time range forward every 2s, pauses when detail panel + is open, restores on close +- **Detail panel**: Three tabs (Overview / Column Values / Trace), cycled via + Tab key. Detail search via `/` filters content within the active tab. +- **Select editor**: `s` key opens `$EDITOR` with the current SELECT clause. + Custom selects are stored per source ID. +- **State management**: ~20 `useState` hooks. Key states include `events`, + `expandedRow`, `detailTab`, `isFollowing`, `customSelectMap`, + `traceSelectedIndex`. + +### TraceWaterfall (`components/TraceWaterfall.tsx`) + +Port of the web frontend's `DBTraceWaterfallChart`. Key details: + +- **Tree building**: Single-pass DAG builder over time-sorted rows. Direct port + of the web frontend's logic — do NOT modify without checking + `DBTraceWaterfallChart` first. +- **Correlated logs**: Fetches log events via `buildTraceLogsSql` and merges + them into the span tree (logs attach as children of the span with matching + SpanId, using `SpanId-log` suffix to avoid key collisions). +- **j/k navigation**: `selectedIndex` + `onSelectedIndexChange` props controlled + by EventViewer. `effectiveIndex` falls back to `highlightHint` when no j/k + navigation has occurred. +- **Event Details**: `SELECT *` fetch for the selected span/log, rendered via + the shared `ColumnValues` component. Uses stable scalar deps + (`selectedNodeSpanId`, `selectedNodeTimestamp`, `selectedNodeKind`) to avoid + infinite re-fetch loops. +- **Duration formatting**: Dynamic units — `1.2s`, `3.5ms`, `45.2μs`, `123ns`. +- **Highlight**: `inverse` on label and duration text for the selected row. Bar + color unchanged. + +### RowOverview (`components/RowOverview.tsx`) + +Port of the web frontend's `DBRowOverviewPanel`. Three sections: + +1. **Top Level Attributes**: Standard OTel fields (TraceId, SpanId, SpanName, + ServiceName, Duration, StatusCode, etc.) +2. **Span/Log Attributes**: Flattened from `source.eventAttributesExpression`, + shown with key count header +3. **Resource Attributes**: Flattened from + `source.resourceAttributesExpression`, rendered as chips with + `backgroundColor="#3a3a3a"` and cyan key / white value + +### ColumnValues (`components/ColumnValues.tsx`) + +Shared component for rendering key-value pairs from a row data object. Used by: + +- Column Values tab in the detail panel +- Event Details section in the Trace tab's waterfall + +Supports `searchQuery` filtering and `wrapLines` toggle. + +## Web Frontend Alignment + +This package mirrors several web frontend components. **Always check the +corresponding web component before making changes** to ensure behavior stays +consistent: + +| CLI Component | Web Component | Notes | +| ---------------- | ----------------------- | -------------------------------- | +| `TraceWaterfall` | `DBTraceWaterfallChart` | Tree builder is a direct port | +| `RowOverview` | `DBRowOverviewPanel` | Same sections and field list | +| Trace tab logic | `DBTracePanel` | Source resolution (trace/log) | +| Detail panel | `DBRowSidePanel` | Tab structure, highlight hint | +| Event query | `DBTraceWaterfallChart` | `getConfig()` → `buildTrace*Sql` | + +Key expression mappings from the web frontend's `getConfig()`: + +- `Timestamp` → `displayedTimestampValueExpression` (NOT + `timestampValueExpression`) +- `Duration` → `durationExpression` (raw, not seconds like web frontend) +- `Body` → `bodyExpression` (logs) or `spanNameExpression` (traces) +- `SpanId` → `spanIdExpression` +- `ParentSpanId` → `parentSpanIdExpression` (traces only) + +## Keybindings (TUI mode) + +| Key | Action | +| ------------- | ------------------------------------------ | +| `j` / `↓` | Move selection down | +| `k` / `↑` | Move selection up | +| `l` / `Enter` | Expand row detail | +| `h` / `Esc` | Close detail / blur search | +| `G` | Jump to newest | +| `g` | Jump to oldest | +| `/` | Search (global in table, filter in detail) | +| `Tab` | Cycle sources/searches or detail tabs | +| `Shift+Tab` | Cycle backwards | +| `s` | Edit SELECT clause in $EDITOR | +| `t` | Edit time range in $EDITOR | +| `f` | Toggle follow mode (live tail) | +| `w` | Toggle line wrap | +| `?` | Toggle help screen | +| `q` | Quit | + +In the **Trace tab**, `j`/`k` navigate spans/logs in the waterfall instead of +the main table. + +## Development + +```bash +# Run in dev mode (tsx, no compile step) +cd packages/cli +yarn dev tui -s http://localhost:8000 + +# Type check +npx tsc --noEmit + +# Bundle with tsup +yarn build + +# Compile standalone binary (current platform) +yarn compile + +# Cross-compile +yarn compile:macos # macOS ARM64 +yarn compile:macos-x64 # macOS x64 +yarn compile:linux # Linux x64 +``` + +## Key Patterns + +### Session Management + +Session is stored at `~/.config/hyperdx/cli/session.json` with mode `0o600`. +Contains `apiUrl` and `cookies[]`. The `ApiClient` constructor loads the saved +session and checks if the stored `apiUrl` matches the requested one. + +### ClickHouse Proxy Client + +`ProxyClickhouseClient` extends `BaseClickhouseClient` from common-utils. It: + +- Routes queries through `/clickhouse-proxy` (sets `pathname`) +- Injects session cookies for auth +- Passes `x-hyperdx-connection-id` header +- Disables basic auth (`set_basic_auth_header: false`) +- Forces `content-type: text/plain` to prevent Express body parser issues + +### Source Expressions + +Sources have many expression fields that map to ClickHouse column names. Key +ones used in the CLI: + +- `timestampValueExpression` — Primary timestamp (often `TimestampTime`, + DateTime) +- `displayedTimestampValueExpression` — High-precision timestamp (often + `Timestamp`, DateTime64 with nanoseconds). **Use this for waterfall queries.** +- `traceIdExpression`, `spanIdExpression`, `parentSpanIdExpression` +- `bodyExpression`, `spanNameExpression`, `serviceNameExpression` +- `durationExpression` + `durationPrecision` (3=ms, 6=μs, 9=ns) +- `eventAttributesExpression`, `resourceAttributesExpression` +- `logSourceId`, `traceSourceId` — Correlated source IDs + +### useInput Handler Ordering + +The `useInput` callback in EventViewer has a specific priority order. **Do not +reorder these checks**: + +1. `?` toggles help (except when search focused) +2. Any key closes help when showing +3. `focusDetailSearch` — consumes all keys except Esc/Enter +4. `focusSearch` — consumes all keys except Tab/Esc +5. Trace tab j/k — when detail panel open and Trace tab active +6. General j/k, G/g, Enter/Esc, Tab, etc. +7. Single-key shortcuts: `w`, `f`, `/`, `s`, `t`, `q` + +### Dynamic Table Columns + +When `customSelect` is set (via `s` key), columns are derived from the query +result keys. Otherwise, hardcoded percentage-based columns are used per source +kind. The `getDynamicColumns` function distributes 60% evenly among non-last +columns, with the last column getting the remainder. + +### Follow Mode + +- Enabled by default on startup +- Slides `timeRange` forward every 2s, triggering a replace fetch +- **Paused** when detail panel opens (`wasFollowingRef` saves previous state) +- **Restored** when detail panel closes + +### Custom Select Per Source + +`customSelectMap: Record` stores custom SELECT overrides keyed +by `source.id`. Each source remembers its own custom select independently. diff --git a/packages/cli/CONTRIBUTING.md b/packages/cli/CONTRIBUTING.md new file mode 100644 index 0000000000..c28e166537 --- /dev/null +++ b/packages/cli/CONTRIBUTING.md @@ -0,0 +1,306 @@ +# @hyperdx/cli — Development Guide + +## Prerequisites + +- **Node.js** >= 22.16.0 +- **Yarn** 4 (workspace managed from monorepo root) +- **Bun** (optional, for standalone binary compilation) +- A running HyperDX instance (API server or Next.js frontend with proxy) + +## Getting Started + +```bash +# From the monorepo root +yarn install + +# Navigate to the CLI package +cd packages/cli +``` + +### Authentication + +Before using the TUI, authenticate with your HyperDX instance: + +```bash +# Interactive login (opens email/password prompts) +yarn dev auth login -s http://localhost:8000 + +# Non-interactive login (for scripting/CI) +yarn dev auth login -s http://localhost:8000 -e user@example.com -p password + +# Verify auth status +yarn dev auth status + +# Session is saved to ~/.config/hyperdx/cli/session.json +``` + +Once authenticated, the `-s` flag is optional — the CLI reads the server URL +from the saved session. + +### Running in Dev Mode + +`yarn dev` uses `tsx` for direct TypeScript execution — no compile step needed. + +```bash +# Interactive TUI +yarn dev tui + +# With explicit server URL +yarn dev tui -s http://localhost:8000 + +# Skip source picker +yarn dev tui --source "Logs" + +# Start with a search query + follow mode +yarn dev tui -q "level:error" -f + +# Non-interactive streaming +yarn dev stream --source "Logs" + +# List available sources +yarn dev sources + +``` + +## Building & Compiling + +```bash +# Type check (no output on success) +npx tsc --noEmit + +# Bundle with tsup (outputs to dist/cli.js) +yarn build + +# Compile standalone binary for current platform +yarn compile + +# Cross-compile +yarn compile:macos # macOS ARM64 +yarn compile:macos-x64 # macOS x64 +yarn compile:linux # Linux x64 +``` + +The compiled binary is a single file at `dist/hdx` (or `dist/hdx-`). + +## Project Structure + +``` +src/ +├── cli.tsx # Entry point — Commander CLI commands +│ # (tui, stream, sources, auth login/logout/status) +│ # Also contains LoginPrompt component +├── App.tsx # Ink app shell — state machine: +│ # loading → login → pick-source → EventViewer +├── api/ +│ ├── client.ts # ApiClient (REST + session cookies) +│ │ # ProxyClickhouseClient (ClickHouse via /clickhouse-proxy) +│ └── eventQuery.ts # Query builders: +│ # buildEventSearchQuery — table view (uses renderChartConfig) +│ # buildTraceSpansSql — waterfall trace spans +│ # buildTraceLogsSql — waterfall correlated logs +│ # buildFullRowQuery — SELECT * for row detail (uses renderChartConfig) +├── components/ +│ ├── EventViewer.tsx # Main TUI view (~1275 lines) +│ │ # Table, search, detail panel with 3 tabs +│ ├── TraceWaterfall.tsx # Trace waterfall chart with j/k navigation +│ ├── RowOverview.tsx # Structured overview (Top Level, Attributes, Resources) +│ ├── ColumnValues.tsx # Shared key-value renderer with scroll support +│ ├── LoginForm.tsx # Email/password login form (used inside TUI App) +│ └── SourcePicker.tsx # j/k source selector +├── shared/ # Logic ported from packages/app (@source annotated) +│ ├── useRowWhere.ts # processRowToWhereClause, buildColumnMap, getRowWhere +│ ├── source.ts # getDisplayedTimestampValueExpression, getEventBody, etc. +│ └── rowDataPanel.ts # ROW_DATA_ALIASES, buildRowDataSelectList +└── utils/ + ├── config.ts # Session persistence (~/.config/hyperdx/cli/session.json) + ├── editor.ts # $EDITOR integration for time range and SELECT editing + └── silenceLogs.ts # Suppresses console.debug/warn/error, verbose file logging +``` + +## Data Flow + +### Table View Query + +``` +User types search → buildEventSearchQuery() + → renderChartConfig() from @hyperdx/common-utils + → ProxyClickhouseClient.query() + → API /clickhouse-proxy → ClickHouse + → JSON response with { data, meta } + → Store chSql in lastTableChSqlRef, meta in lastTableMetaRef + → Render dynamic table columns from row keys +``` + +### Row Detail Fetch (on Enter/l) + +``` +User expands row → buildFullRowQuery() + → chSqlToAliasMap(lastTableChSql) for alias resolution + → buildColumnMap(lastTableMeta, aliasMap) for type-aware WHERE + → processRowToWhereClause() with proper type handling + → renderChartConfig() with SELECT *, __hdx_* aliases + → Results include LogAttributes, ResourceAttributes, etc. +``` + +### Trace Waterfall + +``` +User switches to Trace tab + → buildTraceSpansSql() — fetch spans by TraceId + → buildTraceLogsSql() — fetch correlated logs by TraceId (if logSource exists) + → buildTree() — single-pass DAG builder (port of DBTraceWaterfallChart) + → Render waterfall with timing bars + → j/k navigation highlights spans, fetches SELECT * for Event Details +``` + +### ClickHouse Proxy Client + +`ProxyClickhouseClient` extends `BaseClickhouseClient` from common-utils: + +- Derives proxy pathname from API URL (e.g. `/api/clickhouse-proxy` for Next.js + proxy, `/clickhouse-proxy` for direct API) +- Passes `origin` only to `createClient` (not the path, which ClickHouse client + would interpret as a database name) +- Injects session cookies + `x-hyperdx-connection-id` header +- Forces `content-type: text/plain` to prevent Express body parser issues + +### Server URL Resolution + +The `-s` flag is optional on most commands. Resolution order: + +1. Explicit `-s ` flag +2. Saved session's `apiUrl` from `~/.config/hyperdx/cli/session.json` +3. Error: "No server specified" + +## Key Patterns + +### useInput Handler Ordering + +The `useInput` callback in EventViewer has a strict priority order. **Do not +reorder these checks**: + +1. `?` toggles help (except when search focused) +2. Any key closes help when showing +3. `focusDetailSearch` — consumes all keys except Esc/Enter +4. `focusSearch` — consumes all keys except Tab/Esc +5. Trace tab j/k + Ctrl+D/U — when detail panel open and Trace tab active +6. Column Values / Overview Ctrl+D/U — scroll detail view +7. General j/k, G/g, Enter/Esc, Tab, etc. +8. Single-key shortcuts: `w`, `f`, `/`, `s`, `t`, `q` + +### Follow Mode + +- Enabled by default on startup +- Slides `timeRange` forward every 2s via `setInterval`, triggering a replace + fetch +- **Paused** when detail panel opens (`wasFollowingRef` saves previous state) +- **Restored** when detail panel closes + +### Custom Select Per Source + +`customSelectMap: Record` stores custom SELECT overrides keyed +by `source.id`. Each source remembers its own custom select independently. Press +`s` to open `$EDITOR` with the current SELECT clause. + +### Scrollable Detail Panels + +All detail tabs have fixed-height viewports with Ctrl+D/U scrolling: + +- **Overview / Column Values** — Uses `fullDetailMaxRows` (full screen minus + overhead) +- **Trace Event Details** — Uses `detailMaxRows` (1/3 of terminal, since + waterfall takes the rest) + +## Web Frontend Alignment + +This package ports several web frontend components. **Always check the +corresponding web component before making changes**: + +| CLI Component | Web Component | Notes | +| ---------------- | ----------------------- | ----------------------------- | +| `TraceWaterfall` | `DBTraceWaterfallChart` | Tree builder is a direct port | +| `RowOverview` | `DBRowOverviewPanel` | Same sections and field list | +| Trace tab logic | `DBTracePanel` | Source resolution (trace/log) | +| Detail panel | `DBRowSidePanel` | Tab structure, highlight hint | +| Row WHERE clause | `useRowWhere.tsx` | processRowToWhereClause | +| Row data fetch | `DBRowDataPanel` | ROW_DATA_ALIASES, SELECT list | +| Source helpers | `source.ts` | Expression getters | + +### Shared Modules (`src/shared/`) + +Files in `src/shared/` are copied from `packages/app` with `@source` annotations +at the top of each file. These are candidates for future extraction to +`@hyperdx/common-utils`: + +```typescript +/** + * Row WHERE clause builder. + * + * @source packages/app/src/hooks/useRowWhere.tsx + */ +``` + +When updating these files, check the original source in `packages/app` first. + +## Common Tasks + +### Adding a New Keybinding + +1. Add the key handler in `EventViewer.tsx`'s `useInput` callback at the correct + priority level +2. Update the `HelpScreen` component's key list +3. Update `AGENTS.md` keybindings table +4. Update `README.md` keybindings table + +### Adding a New Detail Tab + +1. Add the tab key to the `detailTab` union type in `EventViewer.tsx` +2. Add the tab to the `tabs` array in the Tab key handler +3. Add the tab to the tab bar rendering +4. Add the tab content rendering block +5. Handle any tab-specific Ctrl+D/U scrolling + +### Adding a New CLI Command + +1. Add the command in `cli.tsx` using Commander +2. Use `resolveServer(opts.server)` for server URL resolution +3. Use `withVerbose()` if the command needs `--verbose` support +4. Update `README.md` and `AGENTS.md` with the new command + +### Porting a Web Frontend Component + +1. Create the file in `src/shared/` (if pure logic) or `src/components/` (if UI) +2. Add `@source packages/app/src/...` annotation at the top +3. Use `SourceResponse` type from `@/api/client` instead of `TSource` types +4. Replace React hooks with plain functions where possible (for non-React usage) +5. Add the mapping to the "Web Frontend Alignment" table in `AGENTS.md` + +## Troubleshooting + +### "Not logged in" error + +Run `yarn dev auth login -s ` to authenticate. The session may have expired +or the server URL may have changed. + +### HTML response instead of JSON + +The API URL may be pointing to the Next.js frontend instead of the Express API +server. Both work — the CLI auto-detects the `/api` prefix in the URL and +adjusts the ClickHouse proxy path accordingly. + +### "Database api does not exist" + +The `ProxyClickhouseClient` was sending the URL path as a database name. This +was fixed by passing `origin` only (without path) to `createClient`. + +### Row detail shows partial data + +The `SELECT *` row detail fetch requires: + +1. `lastTableChSqlRef` — the rendered SQL from the last table query (for + `chSqlToAliasMap`) +2. `lastTableMetaRef` — column metadata from the query response (for type-aware + WHERE clause) + +If these are null (e.g. first render before any query), the fetch may fail +silently and fall back to the partial table row data. diff --git a/packages/cli/README.md b/packages/cli/README.md new file mode 100644 index 0000000000..771e9e8814 --- /dev/null +++ b/packages/cli/README.md @@ -0,0 +1,40 @@ +# @hyperdx/cli + +Command line interface for HyperDX. + +## Uploading Source Maps + +Upload JavaScript source maps to HyperDX for stack trace de-obfuscation. + +In your build pipeline, run the CLI tool: + +```bash +npx @hyperdx/cli upload-sourcemaps \ + --serviceKey "$HYPERDX_API_ACCESS_KEY" \ + --apiUrl "$HYPERDX_API_URL" \ + --path .next \ + --releaseId "$RELEASE_ID" +``` + +You can also add this as an npm script: + +```json +{ + "scripts": { + "upload-sourcemaps": "npx @hyperdx/cli upload-sourcemaps --path=\".next\"" + } +} +``` + +### Options + +| Flag | Description | Default | +| ------------------------- | ------------------------------------------------------ | ------- | +| `-k, --serviceKey ` | HyperDX service account API key | | +| `-p, --path ` | Directory containing sourcemaps | `.` | +| `-u, --apiUrl ` | HyperDX API URL (required for self-hosted deployments) | | +| `-rid, --releaseId ` | Release ID to associate with the sourcemaps | | +| `-bp, --basePath ` | Base path for the uploaded sourcemaps | | + +Optionally, set the `HYPERDX_SERVICE_KEY` environment variable to avoid passing +the `--serviceKey` flag. diff --git a/packages/cli/eslint.config.mjs b/packages/cli/eslint.config.mjs new file mode 100644 index 0000000000..a840b54f1b --- /dev/null +++ b/packages/cli/eslint.config.mjs @@ -0,0 +1,63 @@ +import js from '@eslint/js'; +import tseslint from 'typescript-eslint'; +import prettierConfig from 'eslint-config-prettier'; +import prettierPlugin from 'eslint-plugin-prettier'; + +export default [ + js.configs.recommended, + ...tseslint.configs.recommended, + prettierConfig, + { + ignores: [ + 'node_modules/**', + 'dist/**', + '**/*.config.mjs', + '**/*.config.ts', + ], + }, + { + files: ['src/**/*.ts', 'src/**/*.tsx'], + plugins: { + '@typescript-eslint': tseslint.plugin, + prettier: prettierPlugin, + }, + rules: { + '@typescript-eslint/no-explicit-any': 'off', + '@typescript-eslint/no-unused-vars': [ + 'warn', + { + argsIgnorePattern: '^_', + varsIgnorePattern: '^_', + }, + ], + 'prettier/prettier': 'error', + }, + languageOptions: { + parser: tseslint.parser, + parserOptions: { + ecmaVersion: 'latest', + sourceType: 'module', + project: './tsconfig.json', + tsconfigRootDir: import.meta.dirname, + }, + globals: { + console: 'readonly', + process: 'readonly', + setTimeout: 'readonly', + clearTimeout: 'readonly', + setInterval: 'readonly', + clearInterval: 'readonly', + React: 'readonly', + fetch: 'readonly', + Response: 'readonly', + URL: 'readonly', + URLSearchParams: 'readonly', + Headers: 'readonly', + RequestInfo: 'readonly', + RequestInit: 'readonly', + ReadableStream: 'readonly', + Buffer: 'readonly', + }, + }, + }, +]; diff --git a/packages/cli/package.json b/packages/cli/package.json new file mode 100644 index 0000000000..d9768cc53c --- /dev/null +++ b/packages/cli/package.json @@ -0,0 +1,49 @@ +{ + "name": "@hyperdx/cli", + "version": "0.1.3", + "license": "MIT", + "type": "module", + "publishConfig": { + "access": "public" + }, + "bin": { + "hdx": "./dist/cli.js" + }, + "files": [ + "dist/*" + ], + "engines": { + "node": ">=22.16.0", + "bun": "1.3.11" + }, + "scripts": { + "dev": "tsx src/cli.tsx", + "build": "tsup", + "prepublishOnly": "yarn build", + "compile": "bun build src/cli.tsx --compile --outfile dist/hdx", + "compile:linux": "bun build src/cli.tsx --compile --target=bun-linux-x64 --outfile dist/hdx-linux-x64", + "compile:macos": "bun build src/cli.tsx --compile --target=bun-darwin-arm64 --outfile dist/hdx-darwin-arm64", + "compile:macos-x64": "bun build src/cli.tsx --compile --target=bun-darwin-x64 --outfile dist/hdx-darwin-x64" + }, + "devDependencies": { + "@clickhouse/client": "^1.12.1", + "@clickhouse/client-common": "^1.12.1", + "@hyperdx/common-utils": "^0.17.0", + "@types/crypto-js": "^4.2.2", + "@types/react": "^19.0.0", + "@types/sqlstring": "^2.3.2", + "chalk": "^5.3.0", + "commander": "^12.1.0", + "crypto-js": "^4.2.0", + "glob": "^13.0.6", + "ink": "6.8.0", + "ink-spinner": "^5.0.0", + "ink-text-input": "^6.0.0", + "react": "^19.0.0", + "react-devtools-core": "^7.0.1", + "sqlstring": "^2.3.3", + "tsup": "^8.4.0", + "tsx": "^4.19.0", + "typescript": "^5.9.3" + } +} diff --git a/packages/cli/src/App.tsx b/packages/cli/src/App.tsx new file mode 100644 index 0000000000..8ec745475a --- /dev/null +++ b/packages/cli/src/App.tsx @@ -0,0 +1,173 @@ +import React, { useState, useEffect, useCallback } from 'react'; +import { Box, Text } from 'ink'; +import Spinner from 'ink-spinner'; + +import { SourceKind } from '@hyperdx/common-utils/dist/types'; + +import { + ApiClient, + type SourceResponse, + type SavedSearchResponse, +} from '@/api/client'; +import LoginForm from '@/components/LoginForm'; +import SourcePicker from '@/components/SourcePicker'; +import EventViewer from '@/components/EventViewer'; + +type Screen = 'loading' | 'login' | 'pick-source' | 'events'; + +interface AppProps { + apiUrl: string; + /** Pre-set search query from CLI flags */ + query?: string; + /** Pre-set source name from CLI flags */ + sourceName?: string; + /** Start in follow/live tail mode */ + follow?: boolean; +} + +export default function App({ apiUrl, query, sourceName, follow }: AppProps) { + const [screen, setScreen] = useState('loading'); + const [client] = useState(() => new ApiClient({ apiUrl })); + const [eventSources, setLogSources] = useState([]); + const [savedSearches, setSavedSearches] = useState([]); + const [selectedSource, setSelectedSource] = useState( + null, + ); + const [activeQuery, setActiveQuery] = useState(query ?? ''); + const [error, setError] = useState(null); + + // Check existing session on mount + useEffect(() => { + (async () => { + const valid = await client.checkSession(); + if (valid) { + await loadData(); + } else { + setScreen('login'); + } + })(); + }, []); + + const loadData = async () => { + try { + const [sources, searches] = await Promise.all([ + client.getSources(), + client.getSavedSearches().catch(() => [] as SavedSearchResponse[]), + ]); + + const queryableSources = sources.filter( + s => s.kind === SourceKind.Log || s.kind === SourceKind.Trace, + ); + + if (queryableSources.length === 0) { + setError( + 'No log or trace sources found. Configure a source in HyperDX first.', + ); + return; + } + + setLogSources(queryableSources); + setSavedSearches(searches); + + // Auto-select if source name was provided via CLI + if (sourceName) { + const match = queryableSources.find( + s => s.name.toLowerCase() === sourceName.toLowerCase(), + ); + if (match) { + setSelectedSource(match); + setScreen('events'); + return; + } + } + + // Auto-select if only one source + if (queryableSources.length === 1) { + setSelectedSource(queryableSources[0]); + setScreen('events'); + return; + } + + setScreen('pick-source'); + } catch (err: unknown) { + setError(err instanceof Error ? err.message : String(err)); + } + }; + + const handleLogin = async (email: string, password: string) => { + const ok = await client.login(email, password); + if (ok) { + await loadData(); + } + return ok; + }; + + const handleSourceSelect = (source: SourceResponse) => { + setSelectedSource(source); + setScreen('events'); + }; + + const handleSavedSearchSelect = useCallback( + (search: SavedSearchResponse) => { + const source = eventSources.find( + s => s.id === search.source || s._id === search.source, + ); + if (source) { + setSelectedSource(source); + } + setActiveQuery(search.where); + setScreen('events'); + }, + [eventSources], + ); + + if (error) { + return ( + + Error: {error} + + ); + } + + switch (screen) { + case 'loading': + return ( + + + Connecting to {apiUrl}… + + + ); + + case 'login': + return ; + + case 'pick-source': + return ( + + + + HyperDX TUI + + Search and tail events from the terminal + + + + ); + + case 'events': + if (!selectedSource) return null; + return ( + + ); + } +} diff --git a/packages/cli/src/api/client.ts b/packages/cli/src/api/client.ts new file mode 100644 index 0000000000..20c973e6f4 --- /dev/null +++ b/packages/cli/src/api/client.ts @@ -0,0 +1,381 @@ +/** + * HTTP client for the HyperDX internal API. + * + * Handles session cookie auth and exposes: + * - REST calls (login, sources, connections, me) + * - A ClickHouse node client that routes through /clickhouse-proxy + * with session cookies and connection-id header injection + */ + +import { createClient } from '@clickhouse/client'; +import type { + BaseResultSet, + ClickHouseSettings, + DataFormat, +} from '@clickhouse/client-common'; + +import { + BaseClickhouseClient, + type ClickhouseClientOptions, + type QueryInputs, +} from '@hyperdx/common-utils/dist/clickhouse'; +import { + getMetadata, + type Metadata, +} from '@hyperdx/common-utils/dist/core/metadata'; + +import { loadSession, saveSession, clearSession } from '@/utils/config'; + +// ------------------------------------------------------------------ +// API Client (session management + REST calls) +// ------------------------------------------------------------------ + +interface ApiClientOptions { + apiUrl: string; +} + +export class ApiClient { + private apiUrl: string; + private cookies: string[] = []; + + constructor(opts: ApiClientOptions) { + this.apiUrl = opts.apiUrl.replace(/\/+$/, ''); + + const saved = loadSession(); + if (saved && saved.apiUrl === this.apiUrl) { + this.cookies = saved.cookies; + } + } + + getApiUrl(): string { + return this.apiUrl; + } + + getCookieHeader(): string { + return this.cookies.join('; '); + } + + // ---- Auth -------------------------------------------------------- + + async login(email: string, password: string): Promise { + const res = await fetch(`${this.apiUrl}/login/password`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ email, password }), + redirect: 'manual', + }); + + if (res.status === 302 || res.status === 200) { + this.extractCookies(res); + saveSession({ apiUrl: this.apiUrl, cookies: this.cookies }); + return true; + } + + return false; + } + + async checkSession(): Promise { + try { + const res = await this.get('/me'); + return res.ok; + } catch { + return false; + } + } + + logout(): void { + this.cookies = []; + clearSession(); + } + + // ---- Generic HTTP ------------------------------------------------ + + async get(path: string): Promise { + return fetch(`${this.apiUrl}${path}`, { + headers: this.headers(), + redirect: 'manual', + }); + } + + async post(path: string, body?: unknown): Promise { + return fetch(`${this.apiUrl}${path}`, { + method: 'POST', + headers: { ...this.headers(), 'Content-Type': 'application/json' }, + body: body != null ? JSON.stringify(body) : undefined, + redirect: 'manual', + }); + } + + // ---- Typed API calls --------------------------------------------- + + async getMe(): Promise { + const res = await this.get('/me'); + if (!res.ok) throw new Error(`GET /me failed: ${res.status}`); + return res.json() as Promise; + } + + async getSources(): Promise { + const res = await this.get('/sources'); + if (!res.ok) throw new Error(`GET /sources failed: ${res.status}`); + return res.json() as Promise; + } + + async getConnections(): Promise { + const res = await this.get('/connections'); + if (!res.ok) throw new Error(`GET /connections failed: ${res.status}`); + return res.json() as Promise; + } + + async getSavedSearches(): Promise { + const res = await this.get('/saved-searches'); + if (!res.ok) throw new Error(`GET /saved-searches failed: ${res.status}`); + return res.json() as Promise; + } + + async getDashboards(): Promise { + const res = await this.get('/dashboards'); + if (!res.ok) throw new Error(`GET /dashboards failed: ${res.status}`); + return res.json() as Promise; + } + + // ---- ClickHouse client via proxy --------------------------------- + + createClickHouseClient( + opts: Partial = {}, + ): ProxyClickhouseClient { + return new ProxyClickhouseClient(this, opts); + } + + createMetadata(opts: Partial = {}): Metadata { + return getMetadata(this.createClickHouseClient(opts)); + } + + // ---- Internal ---------------------------------------------------- + + private headers(): Record { + const h: Record = {}; + if (this.cookies.length > 0) { + h['cookie'] = this.cookies.join('; '); + } + return h; + } + + private extractCookies(res: Response): void { + const setCookies = res.headers.getSetCookie?.() ?? []; + if (setCookies.length > 0) { + this.cookies = setCookies.map(c => c.split(';')[0]); + } + } +} + +// ------------------------------------------------------------------ +// ClickHouse client that proxies through /clickhouse-proxy +// using the native Node @clickhouse/client with cookie auth +// ------------------------------------------------------------------ + +export class ProxyClickhouseClient extends BaseClickhouseClient { + private apiClient: ApiClient; + + constructor( + apiClient: ApiClient, + opts: Partial = {}, + ) { + super({ + host: `${apiClient.getApiUrl()}/clickhouse-proxy`, + ...opts, + }); + this.apiClient = apiClient; + + // The @clickhouse/client treats the path portion of `url` as the + // database name, NOT the HTTP path. Use `pathname` to set the proxy + // path so requests go to http:///clickhouse-proxy/?query=... + // Derive the clickhouse-proxy pathname from the API URL. + // If apiUrl has a path (e.g. /api), the proxy path becomes /api/clickhouse-proxy + // so it works through the Next.js proxy at pages/api/[...all].ts. + // Pass origin-only URL to createClient to prevent the path from being + // interpreted as a ClickHouse database name. + const apiUrlObj = new URL(apiClient.getApiUrl()); + const basePath = apiUrlObj.pathname.replace(/\/+$/, ''); + const chProxyPath = `${basePath}/clickhouse-proxy`; + + this.client = createClient({ + url: apiUrlObj.origin, + pathname: chProxyPath, + // No ClickHouse credentials — the proxy handles auth to ClickHouse. + // We authenticate to the proxy via session cookie. + username: '', + password: '', + // Disable the Authorization header — we auth via session cookie, + // and a stray "Authorization: Basic Og==" (empty creds) causes + // Express to reject the request before reading the session cookie. + set_basic_auth_header: false, + request_timeout: this.requestTimeout, + application: 'hyperdx-tui', + http_headers: { + cookie: apiClient.getCookieHeader(), + // Force text/plain so Express's body parsers keep req.body as a + // string. Without this, the proxy's proxyReq.write(req.body) fails + // because express.json() parses the body into an Object. + 'content-type': 'text/plain', + }, + keep_alive: { enabled: false }, + }); + } + + // Silence the "Sending Query: ..." debug output from BaseClickhouseClient + protected override logDebugQuery(): void {} + + protected async __query({ + query, + format = 'JSON' as Format, + query_params = {}, + abort_signal, + clickhouse_settings: externalClickhouseSettings, + connectionId, + queryId, + shouldSkipApplySettings, + }: QueryInputs): Promise> { + let clickhouseSettings: ClickHouseSettings | undefined; + if (!shouldSkipApplySettings) { + clickhouseSettings = await this.processClickhouseSettings({ + connectionId, + externalClickhouseSettings, + }); + } + + // Pass connection ID as HTTP header — the proxy uses this to + // look up the ClickHouse connection credentials from MongoDB + const httpHeaders: Record = {}; + if (connectionId && connectionId !== 'local') { + httpHeaders['x-hyperdx-connection-id'] = connectionId; + } + + return this.getClient().query({ + query, + query_params, + format, + abort_signal, + http_headers: httpHeaders, + clickhouse_settings: clickhouseSettings, + query_id: queryId, + }) as unknown as Promise>; + } +} + +// ------------------------------------------------------------------ +// Response types (matching the internal API shapes) +// ------------------------------------------------------------------ + +interface MeResponse { + accessKey: string; + createdAt: string; + email: string; + id: string; + name: string; + team: { + id: string; + name: string; + apiKey: string; + }; +} + +export interface SourceResponse { + id: string; + _id: string; + name: string; + kind: 'log' | 'trace' | 'session' | 'metric'; + connection: string; + from: { + databaseName: string; + tableName: string; + }; + timestampValueExpression?: string; + displayedTimestampValueExpression?: string; + defaultTableSelectExpression?: string; + implicitColumnExpression?: string; + orderByExpression?: string; + querySettings?: Array<{ setting: string; value: string }>; + + // Log source-specific + bodyExpression?: string; + severityTextExpression?: string; + serviceNameExpression?: string; + + // Trace-specific + traceIdExpression?: string; + spanIdExpression?: string; + parentSpanIdExpression?: string; + spanNameExpression?: string; + spanKindExpression?: string; + durationExpression?: string; + durationPrecision?: number; + statusCodeExpression?: string; + statusMessageExpression?: string; + eventAttributesExpression?: string; + resourceAttributesExpression?: string; + + // Correlated source IDs + logSourceId?: string; + traceSourceId?: string; + metricSourceId?: string; + sessionSourceId?: string; +} + +interface ConnectionResponse { + id: string; + _id: string; + name: string; + host: string; + username: string; +} + +export interface SavedSearchResponse { + id: string; + _id: string; + name: string; + select: string; + where: string; + whereLanguage: 'lucene' | 'sql'; + source: string; + tags: string[]; + orderBy?: string; +} + +interface DashboardTileConfig { + name?: string; + source?: string; + type?: string; + displayType?: string; + sql?: string; + [key: string]: unknown; +} + +interface DashboardTile { + id: string; + x: number; + y: number; + w: number; + h: number; + config: DashboardTileConfig; + containerId?: string; +} + +interface DashboardFilter { + key: string; + displayName?: string; + keyExpression?: string; + sourceId?: string; +} + +interface DashboardResponse { + id: string; + _id: string; + name: string; + tags: string[]; + tiles: DashboardTile[]; + filters?: DashboardFilter[]; + savedQuery?: string | null; + savedQueryLanguage?: string | null; + createdAt?: string; + updatedAt?: string; +} diff --git a/packages/cli/src/api/eventQuery.ts b/packages/cli/src/api/eventQuery.ts new file mode 100644 index 0000000000..7e25ed753d --- /dev/null +++ b/packages/cli/src/api/eventQuery.ts @@ -0,0 +1,266 @@ +/** + * Builds ClickHouse SQL for searching events (logs or traces) using + * renderChartConfig from common-utils. + */ + +import type { + ChSql, + ColumnMetaType, +} from '@hyperdx/common-utils/dist/clickhouse'; +import { chSqlToAliasMap } from '@hyperdx/common-utils/dist/clickhouse'; +import { renderChartConfig } from '@hyperdx/common-utils/dist/core/renderChartConfig'; +import type { Metadata } from '@hyperdx/common-utils/dist/core/metadata'; +import { DisplayType } from '@hyperdx/common-utils/dist/types'; +import type { BuilderChartConfigWithDateRange } from '@hyperdx/common-utils/dist/types'; +import SqlString from 'sqlstring'; + +import type { SourceResponse } from './client'; +import { + getFirstTimestampValueExpression, + getDisplayedTimestampValueExpression, +} from '@/shared/source'; +import { buildRowDataSelectList } from '@/shared/rowDataPanel'; +import { buildColumnMap, getRowWhere } from '@/shared/useRowWhere'; + +export interface SearchQueryOptions { + source: SourceResponse; + /** Override the SELECT clause (user-edited via $EDITOR) */ + selectOverride?: string; + /** Lucene search string */ + searchQuery?: string; + /** Date range */ + startTime: Date; + endTime: Date; + /** Max rows */ + limit?: number; + /** Offset for pagination */ + offset?: number; +} + +/** + * Build a default SELECT expression for a trace source when + * defaultTableSelectExpression is not set. + */ +function buildTraceSelectExpression(source: SourceResponse): string { + const cols: string[] = []; + + const ts = source.timestampValueExpression ?? 'TimestampTime'; + cols.push(ts); + + if (source.spanNameExpression) cols.push(source.spanNameExpression); + if (source.serviceNameExpression) cols.push(source.serviceNameExpression); + if (source.durationExpression) cols.push(source.durationExpression); + if (source.statusCodeExpression) cols.push(source.statusCodeExpression); + if (source.traceIdExpression) cols.push(source.traceIdExpression); + if (source.spanIdExpression) cols.push(source.spanIdExpression); + + return cols.join(', '); +} + +/** + * Build a search query using renderChartConfig — works for both + * log and trace sources. + */ +export async function buildEventSearchQuery( + opts: SearchQueryOptions, + metadata: Metadata, +): Promise { + const { + source, + selectOverride, + searchQuery = '', + startTime, + endTime, + limit = 100, + offset, + } = opts; + + const tsExpr = source.timestampValueExpression ?? 'TimestampTime'; + const firstTsExpr = getFirstTimestampValueExpression(tsExpr) ?? tsExpr; + const orderBy = source.orderByExpression ?? `${firstTsExpr} DESC`; + + // Use the override if provided, otherwise the source's default + let selectExpr = selectOverride ?? source.defaultTableSelectExpression ?? ''; + if (!selectExpr && source.kind === 'trace') { + selectExpr = buildTraceSelectExpression(source); + } + + const config: BuilderChartConfigWithDateRange = { + displayType: DisplayType.Search, + select: selectExpr, + from: source.from, + where: searchQuery, + whereLanguage: searchQuery ? 'lucene' : 'sql', + connection: source.connection, + timestampValueExpression: tsExpr, + implicitColumnExpression: source.implicitColumnExpression, + orderBy, + limit: { limit, offset }, + dateRange: [startTime, endTime], + }; + + return renderChartConfig(config, metadata, source.querySettings); +} + +// ---- Full row fetch (SELECT *) ------------------------------------- + +// ---- Trace waterfall query (all spans for a traceId) ---------------- + +export interface TraceSpansQueryOptions { + source: SourceResponse; + traceId: string; +} + +/** + * Build a raw SQL query to fetch all spans for a given traceId. + * Returns columns needed for the waterfall chart. + */ +export function buildTraceSpansSql(opts: TraceSpansQueryOptions): { + sql: string; + connectionId: string; +} { + const { source, traceId } = opts; + + const db = source.from.databaseName; + const table = source.from.tableName; + const traceIdExpr = source.traceIdExpression ?? 'TraceId'; + const spanIdExpr = source.spanIdExpression ?? 'SpanId'; + const parentSpanIdExpr = source.parentSpanIdExpression ?? 'ParentSpanId'; + const spanNameExpr = source.spanNameExpression ?? 'SpanName'; + const serviceNameExpr = source.serviceNameExpression ?? 'ServiceName'; + const durationExpr = source.durationExpression ?? 'Duration'; + const statusCodeExpr = source.statusCodeExpression ?? 'StatusCode'; + + const tsExpr = getDisplayedTimestampValueExpression(source); + + const cols = [ + `${tsExpr} AS Timestamp`, + `${traceIdExpr} AS TraceId`, + `${spanIdExpr} AS SpanId`, + `${parentSpanIdExpr} AS ParentSpanId`, + `${spanNameExpr} AS SpanName`, + `${serviceNameExpr} AS ServiceName`, + `${durationExpr} AS Duration`, + `${statusCodeExpr} AS StatusCode`, + ]; + + const escapedTraceId = SqlString.escape(traceId); + const sql = `SELECT ${cols.join(', ')} FROM ${db}.${table} WHERE ${traceIdExpr} = ${escapedTraceId} ORDER BY ${tsExpr} ASC LIMIT 10000`; + + return { + sql, + connectionId: source.connection, + }; +} + +/** + * Build a raw SQL query to fetch correlated log events for a given traceId. + * Returns columns matching the SpanRow shape used by the waterfall chart. + * Logs are linked to spans via their SpanId. + */ +export function buildTraceLogsSql(opts: TraceSpansQueryOptions): { + sql: string; + connectionId: string; +} { + const { source, traceId } = opts; + + const db = source.from.databaseName; + const table = source.from.tableName; + const traceIdExpr = source.traceIdExpression ?? 'TraceId'; + const spanIdExpr = source.spanIdExpression ?? 'SpanId'; + const bodyExpr = source.bodyExpression ?? 'Body'; + const serviceNameExpr = source.serviceNameExpression ?? 'ServiceName'; + const sevExpr = source.severityTextExpression ?? 'SeverityText'; + + const tsExpr = getDisplayedTimestampValueExpression(source); + + const cols = [ + `${tsExpr} AS Timestamp`, + `${traceIdExpr} AS TraceId`, + `${spanIdExpr} AS SpanId`, + `'' AS ParentSpanId`, + `${bodyExpr} AS SpanName`, + `${serviceNameExpr} AS ServiceName`, + `0 AS Duration`, + `${sevExpr} AS StatusCode`, + ]; + + const escapedTraceId = SqlString.escape(traceId); + const sql = `SELECT ${cols.join(', ')} FROM ${db}.${table} WHERE ${traceIdExpr} = ${escapedTraceId} ORDER BY ${tsExpr} ASC LIMIT 10000`; + + return { + sql, + connectionId: source.connection, + }; +} + +export interface FullRowQueryOptions { + source: SourceResponse; + /** The partial row data from the table (used to build the WHERE clause) */ + row: Record; +} + +/** + * Build a full row query using renderChartConfig, matching the web + * frontend's useRowData in DBRowDataPanel.tsx. + * + * @source packages/app/src/components/DBRowDataPanel.tsx (useRowData) + * @source packages/app/src/hooks/useRowWhere.tsx (processRowToWhereClause) + * + * Uses chSqlToAliasMap from the table query's rendered SQL + column + * metadata to build a proper WHERE clause with type-aware matching, + * then queries: + * SELECT *, <__hdx_* aliases> + * FROM source.from + * WHERE + * WITH + * LIMIT 1 + */ +export async function buildFullRowQuery( + opts: FullRowQueryOptions & { + /** The rendered ChSql from the table query (for alias resolution) */ + tableChSql: ChSql; + /** Column metadata from the table query response */ + tableMeta: ColumnMetaType[]; + metadata: Metadata; + }, +): Promise { + const { source, row, tableChSql, tableMeta, metadata } = opts; + + // Parse the rendered table SQL to get alias → expression mapping + const aliasMap = chSqlToAliasMap(tableChSql); + + // Build column map using both meta (types) and aliasMap (expressions) + const columnMap = buildColumnMap(tableMeta, aliasMap); + + // Build WHERE using the web frontend's processRowToWhereClause + const rowWhereResult = getRowWhere( + row as Record, + columnMap, + aliasMap, + ); + + const selectList = buildRowDataSelectList(source); + + // Use a very wide date range — the WHERE clause already uniquely + // identifies the row, so the time range is just a safety net + const now = new Date(); + const yearAgo = new Date(now.getTime() - 365 * 24 * 60 * 60 * 1000); + + const config: BuilderChartConfigWithDateRange = { + connection: source.connection, + from: source.from, + timestampValueExpression: + source.timestampValueExpression ?? 'TimestampTime', + dateRange: [yearAgo, now], + select: selectList, + where: rowWhereResult.where, + limit: { limit: 1 }, + displayType: DisplayType.Table, + ...(rowWhereResult.aliasWith.length > 0 + ? { with: rowWhereResult.aliasWith } + : {}), + }; + + return renderChartConfig(config, metadata, source.querySettings); +} diff --git a/packages/cli/src/cli.tsx b/packages/cli/src/cli.tsx new file mode 100644 index 0000000000..7e0980637c --- /dev/null +++ b/packages/cli/src/cli.tsx @@ -0,0 +1,638 @@ +#!/usr/bin/env node + +// MUST be the first import — silences console.debug/warn/error before +// any common-utils code runs. ESM hoists imports above inline code, +// so this can't be done with inline statements. +import { _origError } from '@/utils/silenceLogs'; + +import React, { useState, useCallback } from 'react'; +import { render, Box, Text, useApp } from 'ink'; +import TextInput from 'ink-text-input'; +import Spinner from 'ink-spinner'; +import { Command } from 'commander'; +import chalk from 'chalk'; + +import App from '@/App'; +import { ApiClient } from '@/api/client'; +import { clearSession, loadSession } from '@/utils/config'; +import { uploadSourcemaps } from '@/sourcemaps'; + +// ---- Standalone interactive login for `hdx auth login` ------------- + +function LoginPrompt({ + apiUrl, + client, +}: { + apiUrl: string; + client: ApiClient; +}) { + const { exit } = useApp(); + const [field, setField] = useState<'email' | 'password'>('email'); + const [email, setEmail] = useState(''); + const [password, setPassword] = useState(''); + const [loading, setLoading] = useState(false); + const [error, setError] = useState(null); + + const handleSubmitEmail = useCallback(() => { + if (!email.trim()) return; + setField('password'); + }, [email]); + + const handleSubmitPassword = useCallback(async () => { + if (!password) return; + setLoading(true); + setError(null); + const ok = await client.login(email, password); + setLoading(false); + if (ok) { + exit(); + // Small delay to let Ink unmount before writing to stdout + setTimeout(() => { + process.stdout.write( + chalk.green(`\nLogged in as ${email} (${apiUrl})\n`), + ); + }, 50); + } else { + setError('Login failed. Check your email and password.'); + setField('email'); + setEmail(''); + setPassword(''); + } + }, [email, password, client, apiUrl, exit]); + + return ( + + + HyperDX — Login + + Server: {apiUrl} + + + {error && {error}} + + {loading ? ( + + Logging in… + + ) : field === 'email' ? ( + + Email: + + + ) : ( + + Password: + + + )} + + ); +} + +/** + * Resolve the server URL: use the provided flag, or fall back to the + * saved session's apiUrl. Exits with an error if neither is available. + */ +function resolveServer(flagValue: string | undefined): string { + if (flagValue) return flagValue; + const session = loadSession(); + if (session?.apiUrl) return session.apiUrl; + _origError( + chalk.red( + `No server specified. Use ${chalk.bold('-s ')} or run ${chalk.bold('hdx auth login -s ')} first.\n`, + ), + ); + process.exit(1); +} + +const program = new Command(); + +program + .name('hdx') + .description('HyperDX CLI — search and tail events from the terminal') + .version('0.1.0') + .enablePositionalOptions(); + +// ---- Interactive mode (default) ------------------------------------ + +program + .command('tui') + .description('Interactive TUI for event search and tail') + .option('-s, --server ', 'HyperDX API server URL') + .option('-q, --query ', 'Initial Lucene search query') + .option('--source ', 'Source name (skips picker)') + .option('-f, --follow', 'Start in follow/live tail mode') + .action(opts => { + const server = resolveServer(opts.server); + render( + , + ); + }); + +// ---- Auth (login / logout / status) -------------------------------- + +const auth = program + .command('auth') + .description('Manage authentication') + .enablePositionalOptions() + .passThroughOptions(); + +auth + .command('login') + .description('Sign in to your HyperDX account') + .requiredOption('-s, --server ', 'HyperDX API server URL') + .option('-e, --email ', 'Email address') + .option('-p, --password ', 'Password') + .action(async opts => { + const client = new ApiClient({ apiUrl: opts.server }); + + if (opts.email && opts.password) { + // Non-interactive login (for scripting/CI) + const ok = await client.login(opts.email, opts.password); + if (ok) { + process.stdout.write( + chalk.green(`Logged in as ${opts.email} (${opts.server})\n`), + ); + } else { + _origError(chalk.red('Login failed. Check your email and password.\n')); + process.exit(1); + } + } else { + // Interactive login via Ink + const { waitUntilExit } = render( + , + ); + await waitUntilExit(); + } + }); + +auth + .command('logout') + .description('Log out from your HyperDX account') + .action(() => { + clearSession(); + process.stdout.write('Session cleared.\n'); + }); + +auth + .command('status') + .description('Show authentication status') + .action(async () => { + const session = loadSession(); + if (!session) { + process.stdout.write( + chalk.yellow( + `Not logged in. Run ${chalk.bold('hdx auth login -s ')} to sign in.\n`, + ), + ); + process.exit(1); + } + + const client = new ApiClient({ apiUrl: session.apiUrl }); + const ok = await client.checkSession(); + + if (!ok) { + process.stdout.write( + chalk.yellow( + `Session expired. Run ${chalk.bold('hdx auth login -s ')} to sign in again.\n`, + ), + ); + process.exit(1); + } + + try { + const me = await client.getMe(); + process.stdout.write( + `${chalk.green('Logged in')} as ${chalk.bold(me.email)} (${session.apiUrl})\n`, + ); + } catch { + process.stdout.write(chalk.green('Logged in') + ` (${session.apiUrl})\n`); + } + }); + +// ---- Sources ------------------------------------------------------- + +program + .command('sources') + .description( + 'List data sources (log, trace, session, metric) with ClickHouse table schemas', + ) + .option('-s, --server ', 'HyperDX API server URL') + .option('--json', 'Output as JSON (for programmatic consumption)') + .addHelpText( + 'after', + ` +About: + A "source" in HyperDX is a named data source backed by a ClickHouse table. + Each source has a kind (log, trace, session, or metric) and a set of + expression mappings that tell HyperDX which columns hold timestamps, trace + IDs, span names, severity levels, etc. + + This command lists all sources and fetches the ClickHouse CREATE TABLE + schema for each (metric sources are skipped since their schema is not + useful for direct queries). + + Use --json for structured output suitable for LLM / agent consumption. + +JSON output schema (--json): + Array of objects, each with: + id - Source ID (use with other hdx commands) + name - Human-readable source name + kind - "log" | "trace" | "session" | "metric" + database - ClickHouse database name + table - ClickHouse table name + connection - Connection ID for the ClickHouse proxy + schema - Full CREATE TABLE DDL (null for metric sources) + expressions - Column expression mappings: + timestamp - Primary timestamp column (e.g. "TimestampTime") + displayedTimestamp - High-precision display timestamp (DateTime64) + body - Log body column + severityText - Severity level column (e.g. "SeverityText") + serviceName - Service name column + traceId - Trace ID column + spanId - Span ID column + parentSpanId - Parent span ID column + spanName - Span name column + duration - Duration column (raw value) + durationPrecision - Duration unit: 3=ms, 6=μs, 9=ns + statusCode - Status code column + eventAttributes - Span/log attributes (Map/JSON column) + resourceAttributes - Resource attributes (Map/JSON column) + implicitColumn - Implicit column for Lucene search + defaultTableSelect - Default SELECT clause for table view + orderBy - Default ORDER BY clause + correlatedSources - IDs of linked sources: + log - Correlated log source ID + trace - Correlated trace source ID + metric - Correlated metric source ID + session - Correlated session source ID + +Examples: + $ hdx sources # Human-readable table with schemas + $ hdx sources --json # JSON for agents / scripts + $ hdx sources --json | jq '.[0]' # Inspect first source +`, + ) + .action(async opts => { + const server = resolveServer(opts.server); + const client = new ApiClient({ apiUrl: server }); + + if (!(await client.checkSession())) { + _origError( + chalk.red( + `Not logged in. Run ${chalk.bold('hdx auth login')} to sign in.\n`, + ), + ); + process.exit(1); + } + + const sources = await client.getSources(); + if (sources.length === 0) { + if (opts.json) { + process.stdout.write('[]\n'); + } else { + process.stdout.write('No sources found.\n'); + } + return; + } + + const chClient = client.createClickHouseClient(); + + // Fetch schemas for non-metric sources (in parallel) + const schemaEntries = await Promise.all( + sources.map(async (s): Promise<[string, string | null]> => { + if (s.kind === 'metric') return [s.id, null]; + try { + const resultSet = await chClient.query({ + query: `SHOW CREATE TABLE ${s.from.databaseName}.${s.from.tableName}`, + format: 'JSON', + connectionId: s.connection, + }); + const json = await resultSet.json<{ statement: string }>(); + const row = (json.data as { statement: string }[])?.[0]; + return [s.id, row?.statement?.trimEnd() ?? null]; + } catch { + return [s.id, null]; + } + }), + ); + const schemas = new Map(schemaEntries); + + if (opts.json) { + const output = sources.map(s => ({ + id: s.id, + name: s.name, + kind: s.kind, + database: s.from.databaseName, + table: s.from.tableName, + connection: s.connection, + schema: schemas.get(s.id) ?? null, + expressions: { + timestamp: s.timestampValueExpression ?? null, + displayedTimestamp: s.displayedTimestampValueExpression ?? null, + body: s.bodyExpression ?? null, + severityText: s.severityTextExpression ?? null, + serviceName: s.serviceNameExpression ?? null, + traceId: s.traceIdExpression ?? null, + spanId: s.spanIdExpression ?? null, + parentSpanId: s.parentSpanIdExpression ?? null, + spanName: s.spanNameExpression ?? null, + duration: s.durationExpression ?? null, + durationPrecision: s.durationPrecision ?? null, + statusCode: s.statusCodeExpression ?? null, + eventAttributes: s.eventAttributesExpression ?? null, + resourceAttributes: s.resourceAttributesExpression ?? null, + implicitColumn: s.implicitColumnExpression ?? null, + defaultTableSelect: s.defaultTableSelectExpression ?? null, + orderBy: s.orderByExpression ?? null, + }, + correlatedSources: { + log: s.logSourceId ?? null, + trace: s.traceSourceId ?? null, + metric: s.metricSourceId ?? null, + session: s.sessionSourceId ?? null, + }, + })); + process.stdout.write(JSON.stringify(output, null, 2) + '\n'); + return; + } + + // Human-readable output + for (const s of sources) { + const table = `${s.from.databaseName}.${s.from.tableName}`; + + process.stdout.write( + `${chalk.bold.cyan(s.name)} ${chalk.dim(s.kind)} ${chalk.dim(table)}\n`, + ); + + const schema = schemas.get(s.id); + if (schema) { + const lines = schema.split('\n'); + for (const line of lines) { + process.stdout.write(chalk.dim(` ${line}\n`)); + } + } else if (s.kind !== 'metric') { + process.stdout.write(chalk.dim(' (schema unavailable)\n')); + } + + process.stdout.write('\n'); + } + }); + +// ---- Dashboards ---------------------------------------------------- + +program + .command('dashboards') + .description('List dashboards with tile summaries') + .option('-s, --server ', 'HyperDX API server URL') + .option('--json', 'Output as JSON (for programmatic consumption)') + .addHelpText( + 'after', + ` +About: + Lists all dashboards for the authenticated team. Each dashboard + contains tiles (charts/visualizations) that query ClickHouse sources. + + Use --json for structured output suitable for LLM / agent consumption. + +JSON output schema (--json): + Array of objects, each with: + id - Dashboard ID + name - Dashboard name + tags - Array of tag strings + filters - Dashboard-level filter keys (key, displayName, sourceId) + savedQuery - Default dashboard query (if set) + createdAt - ISO timestamp + updatedAt - ISO timestamp + tiles - Array of tile summaries: + id - Tile ID + name - Chart name (may be null) + type - Chart type (time, table, number, pie, bar, etc.) + source - Source ID referenced by this tile (null for raw SQL) + sql - Raw SQL query (null for builder-mode charts) + +Examples: + $ hdx dashboards # Human-readable list with tiles + $ hdx dashboards --json # JSON for agents / scripts + $ hdx dashboards --json | jq '.[0].tiles' # List tiles of first dashboard +`, + ) + .action(async opts => { + const server = resolveServer(opts.server); + const client = new ApiClient({ apiUrl: server }); + + if (!(await client.checkSession())) { + _origError( + chalk.red( + `Not logged in. Run ${chalk.bold('hdx auth login')} to sign in.\n`, + ), + ); + process.exit(1); + } + + const dashboards = await client.getDashboards(); + if (dashboards.length === 0) { + if (opts.json) { + process.stdout.write('[]\n'); + } else { + process.stdout.write('No dashboards found.\n'); + } + return; + } + + if (opts.json) { + const output = dashboards.map(d => ({ + id: d.id, + name: d.name, + tags: d.tags ?? [], + filters: d.filters ?? [], + savedQuery: d.savedQuery ?? null, + createdAt: d.createdAt ?? null, + updatedAt: d.updatedAt ?? null, + tiles: d.tiles.map(t => ({ + id: t.id, + name: t.config.name ?? null, + type: t.config.type ?? t.config.displayType ?? null, + source: t.config.source ?? null, + sql: t.config.sql ?? null, + })), + })); + process.stdout.write(JSON.stringify(output, null, 2) + '\n'); + return; + } + + // Fetch sources to resolve source names for display + let sourceNames: Record = {}; + try { + const sources = await client.getSources(); + sourceNames = Object.fromEntries( + sources.flatMap(s => [ + [s.id, s.name], + [s._id, s.name], + ]), + ); + } catch { + // Non-fatal — just won't show source names + } + + // Human-readable output + for (const d of dashboards) { + const tags = + d.tags.length > 0 ? ` ${chalk.dim(`[${d.tags.join(', ')}]`)}` : ''; + process.stdout.write( + `${chalk.bold.cyan(d.name)}${tags} ${chalk.dim(`${d.tiles.length} tile${d.tiles.length !== 1 ? 's' : ''}`)}\n`, + ); + + for (let i = 0; i < d.tiles.length; i++) { + const t = d.tiles[i]; + const isLast = i === d.tiles.length - 1; + const prefix = isLast ? ' └─ ' : ' ├─ '; + const name = t.config.name || '(untitled)'; + const chartType = t.config.type ?? t.config.displayType ?? 'chart'; + let sourceLabel = ''; + if (t.config.sql) { + sourceLabel = 'raw SQL'; + } else if (t.config.source) { + sourceLabel = `source: ${sourceNames[t.config.source] ?? t.config.source}`; + } + const meta = [chartType, sourceLabel].filter(Boolean).join(', '); + process.stdout.write( + `${chalk.dim(prefix)}${name} ${chalk.dim(`(${meta})`)}\n`, + ); + } + + process.stdout.write('\n'); + } + }); + +// ---- Query --------------------------------------------------------- + +program + .command('query') + .description('Run a raw SQL query against a ClickHouse source') + .requiredOption('--source ', 'Source name or ID') + .requiredOption('--sql ', 'SQL query to execute') + .option('-s, --server ', 'HyperDX API server URL') + .option('--format ', 'ClickHouse output format', 'JSON') + .addHelpText( + 'after', + ` +About: + Execute a raw ClickHouse SQL query through the HyperDX proxy, using + the connection credentials associated with a source. This is useful + for ad-hoc exploration, debugging, and agent-driven queries. + + The --source flag accepts either the source name (case-insensitive) + or the source ID (from 'hdx sources --json'). + + The query is sent as-is to ClickHouse — you are responsible for + writing valid SQL. Use 'hdx sources' to discover table names and + column schemas. + + Output is written to stdout. Use --format to control the ClickHouse + response format (JSON, JSONEachRow, TabSeparated, CSV, etc.). + +Examples: + $ hdx query --source "Logs" --sql "SELECT count() FROM default.otel_logs" + $ hdx query --source "Traces" --sql "SELECT * FROM default.otel_traces LIMIT 5" + $ hdx query --source "Logs" --sql "SELECT Body FROM default.otel_logs LIMIT 3" --format JSONEachRow +`, + ) + .action(async opts => { + const server = resolveServer(opts.server); + const client = new ApiClient({ apiUrl: server }); + + if (!(await client.checkSession())) { + _origError( + chalk.red( + `Not logged in. Run ${chalk.bold('hdx auth login')} to sign in.\n`, + ), + ); + process.exit(1); + } + + const sources = await client.getSources(); + const source = sources.find( + s => + s.name.toLowerCase() === opts.source.toLowerCase() || + s.id === opts.source || + s._id === opts.source, + ); + + if (!source) { + _origError(chalk.red(`Source "${opts.source}" not found.\n`)); + _origError('Available sources:'); + for (const s of sources) { + _origError(` - ${s.name} (${s.kind}) [${s.id}]`); + } + process.exit(1); + } + + const chClient = client.createClickHouseClient(); + + try { + const resultSet = await chClient.query({ + query: opts.sql, + format: opts.format, + connectionId: source.connection, + }); + const text = await resultSet.text(); + process.stdout.write(text); + // Ensure trailing newline for clean terminal output + if (text.length > 0 && !text.endsWith('\n')) { + process.stdout.write('\n'); + } + } catch (err) { + const msg = err instanceof Error ? err.message : String(err); + _origError(chalk.red(`Query failed: ${msg}\n`)); + process.exit(1); + } + }); + +// ---- Upload Sourcemaps --------------------------------------------- + +program + .command('upload-sourcemaps') + .description( + 'Upload JavaScript source maps to HyperDX for stack trace de-obfuscation', + ) + .option('-k, --serviceKey ', 'The HyperDX service account API key') + .option( + '-u, --apiUrl [string]', + 'An optional api url for self-hosted deployments', + ) + .option( + '-rid, --releaseId [string]', + 'An optional release id to associate the sourcemaps with', + ) + .option( + '-p, --path [string]', + 'Sets the directory of where the sourcemaps are', + '.', + ) + .option( + '-bp, --basePath [string]', + 'An optional base path for the uploaded sourcemaps', + ) + .option( + '--apiVersion [string]', + 'The API version to use (v1 for HyperDX V1 Cloud, v2 for latest)', + 'v1', + ) + .action(uploadSourcemaps); + +program.parse(); diff --git a/packages/cli/src/components/ColumnValues.tsx b/packages/cli/src/components/ColumnValues.tsx new file mode 100644 index 0000000000..547ce60068 --- /dev/null +++ b/packages/cli/src/components/ColumnValues.tsx @@ -0,0 +1,98 @@ +/** + * Renders a key-value list of column values from a row data object. + * Shared between the Column Values tab and Trace tab's Event Details. + */ + +import React, { useMemo } from 'react'; +import { Box, Text } from 'ink'; + +function flatten(s: string): string { + return s + .replace(/\n/g, ' ') + .replace(/\s{2,}/g, ' ') + .trim(); +} + +interface ColumnValuesProps { + data: Record; + searchQuery?: string; + wrapLines?: boolean; + /** Max visible rows (enables scrolling viewport) */ + maxRows?: number; + /** Scroll offset into the entries list */ + scrollOffset?: number; +} + +const ColumnValues = React.memo(function ColumnValues({ + data, + searchQuery, + wrapLines, + maxRows, + scrollOffset = 0, +}: ColumnValuesProps) { + const entries = useMemo(() => { + return Object.entries(data) + .filter(([key, value]) => { + if (!searchQuery) return true; + const q = searchQuery.toLowerCase(); + const strVal = + value != null && typeof value === 'object' + ? JSON.stringify(value) + : String(value ?? ''); + return ( + key.toLowerCase().includes(q) || strVal.toLowerCase().includes(q) + ); + }) + .map(([key, value]) => { + let strVal: string; + if (value != null && typeof value === 'object') { + strVal = JSON.stringify(value, null, 2); + } else { + strVal = String(value ?? ''); + } + let displayVal: string; + if (strVal.startsWith('{') || strVal.startsWith('[')) { + try { + displayVal = JSON.stringify(JSON.parse(strVal), null, 2); + } catch { + displayVal = strVal; + } + } else { + displayVal = strVal; + } + return { key, displayVal }; + }); + }, [data, searchQuery]); + + const totalEntries = entries.length; + const visibleEntries = + maxRows != null + ? entries.slice(scrollOffset, scrollOffset + maxRows) + : entries; + + return ( + + {visibleEntries.map(({ key, displayVal }) => ( + + + + {key} + + + + + {wrapLines ? displayVal : flatten(displayVal)} + + + + ))} + + ); +}); + +export default ColumnValues; diff --git a/packages/cli/src/components/EventViewer/DetailPanel.tsx b/packages/cli/src/components/EventViewer/DetailPanel.tsx new file mode 100644 index 0000000000..d69cae9e3a --- /dev/null +++ b/packages/cli/src/components/EventViewer/DetailPanel.tsx @@ -0,0 +1,232 @@ +import React from 'react'; +import { Box, Text } from 'ink'; +import Spinner from 'ink-spinner'; + +import type { SourceResponse, ProxyClickhouseClient } from '@/api/client'; +import ColumnValues from '@/components/ColumnValues'; +import RowOverview from '@/components/RowOverview'; +import TraceWaterfall from '@/components/TraceWaterfall'; + +import type { FormattedRow } from './types'; +import { SearchBar } from './SubComponents'; +import { flatten } from './utils'; + +type DetailTab = 'overview' | 'columns' | 'trace'; + +type DetailPanelProps = { + source: SourceResponse; + sources: SourceResponse[]; + clickhouseClient: ProxyClickhouseClient; + detailTab: DetailTab; + expandedRowData: Record | null; + expandedRowLoading: boolean; + expandedTraceId: string | null; + expandedSpanId: string | null; + traceSelectedIndex: number | null; + onTraceSelectedIndexChange: (index: number | null) => void; + detailSearchQuery: string; + focusDetailSearch: boolean; + onDetailSearchQueryChange: (v: string) => void; + onDetailSearchSubmit: () => void; + wrapLines: boolean; + termHeight: number; + fullDetailMaxRows: number; + detailMaxRows: number; + columnValuesScrollOffset: number; + traceDetailScrollOffset: number; + /** The formatted row for the summary header */ + expandedFormattedRow?: FormattedRow & { + raw: Record; + }; + scrollOffset: number; + expandedRow: number; +}; + +export function DetailPanel({ + source, + sources, + clickhouseClient, + detailTab, + expandedRowData, + expandedRowLoading, + expandedTraceId, + expandedSpanId, + traceSelectedIndex, + onTraceSelectedIndexChange, + detailSearchQuery, + focusDetailSearch, + onDetailSearchQueryChange, + onDetailSearchSubmit, + wrapLines, + termHeight, + fullDetailMaxRows, + detailMaxRows, + columnValuesScrollOffset, + traceDetailScrollOffset, + expandedFormattedRow, + scrollOffset, + expandedRow, +}: DetailPanelProps) { + const hasTrace = + source.kind === 'trace' || (source.kind === 'log' && source.traceSourceId); + + const tabs: Array<{ key: DetailTab; label: string }> = [ + { key: 'overview', label: 'Overview' }, + { key: 'columns', label: 'Column Values' }, + ...(hasTrace ? [{ key: 'trace' as const, label: 'Trace' }] : []), + ]; + + return ( + + {/* Back hint */} + esc=back to table + {/* Summary header */} + + + {(() => { + if (!expandedFormattedRow) return ''; + return source.kind === 'trace' + ? `${expandedFormattedRow.cells[1] || ''} > ${expandedFormattedRow.cells[2] || ''}` + : flatten( + String( + expandedFormattedRow.raw[source.bodyExpression ?? 'Body'] ?? + '', + ), + ).slice(0, 200); + })()} + + + {/* Detail tab bar */} + + {tabs.map(tab => ( + + + {detailTab === tab.key ? '▸ ' : ' '} + {tab.label} + + + ))} + (tab to switch) + + {/* Detail search bar — only show when focused or has a query */} + {(focusDetailSearch || detailSearchQuery) && ( + + )} + {'─'.repeat(80)} + + {/* Tab content */} + {detailTab === 'overview' && ( + /* ---- Overview tab ---- */ + + {expandedRowLoading ? ( + + Loading… + + ) : expandedRowData ? ( + + ) : null} + + )} + + {detailTab === 'trace' && + /* ---- Trace waterfall tab ---- */ + (() => { + if (!expandedTraceId) { + return expandedRowLoading ? ( + + Loading trace ID… + + ) : ( + No trace ID found for this row. + ); + } + + const findSource = (id: string | undefined) => + id + ? (sources.find(s => s.id === id || s._id === id) ?? null) + : null; + + const traceSource = + source.kind === 'trace' ? source : findSource(source.traceSourceId); + const logSource = + source.kind === 'log' ? source : findSource(source.logSourceId); + + if (!traceSource) { + return No correlated trace source found.; + } + + // Reserve lines for: header, tab bar, search, separator, + // summary, col headers, separator, Event Details header + + // separator + content (~15 lines overhead) + const waterfallMaxRows = Math.max(10, termHeight - 15); + + return ( + + ); + })()} + + {detailTab === 'columns' && ( + /* ---- Column Values tab ---- */ + + {expandedRowLoading ? ( + + Loading all fields… + + ) : expandedRowData ? ( + + ) : null} + + )} + + ); +} diff --git a/packages/cli/src/components/EventViewer/EventViewer.tsx b/packages/cli/src/components/EventViewer/EventViewer.tsx new file mode 100644 index 0000000000..74d9f29d84 --- /dev/null +++ b/packages/cli/src/components/EventViewer/EventViewer.tsx @@ -0,0 +1,274 @@ +import React, { useState, useCallback, useRef, useMemo } from 'react'; +import { Box, useStdout } from 'ink'; + +import type { TimeRange } from '@/utils/editor'; + +import type { EventViewerProps, SwitchItem } from './types'; +import { getColumns, getDynamicColumns, formatDynamicRow } from './utils'; +import { Header, TabBar, SearchBar, Footer, HelpScreen } from './SubComponents'; +import { TableView } from './TableView'; +import { DetailPanel } from './DetailPanel'; +import { useEventData } from './useEventData'; +import { useKeybindings } from './useKeybindings'; + +export default function EventViewer({ + clickhouseClient, + metadata, + source, + sources, + savedSearches, + onSavedSearchSelect, + initialQuery = '', + follow = true, +}: EventViewerProps) { + const { stdout } = useStdout(); + const termHeight = stdout?.rows ?? 24; + const maxRows = Math.max(1, termHeight - 8); + // Fixed height for Event Details in Trace tab (about 1/3 of terminal) + const detailMaxRows = Math.max(5, Math.floor(termHeight / 3)); + // Full-screen height for Overview/Column Values tabs + // (termHeight minus header, body preview, tab bar, separator, footer) + const fullDetailMaxRows = Math.max(5, termHeight - 9); + + // ---- UI state ---------------------------------------------------- + + const [searchQuery, setSearchQuery] = useState(initialQuery); + const [submittedQuery, setSubmittedQuery] = useState(initialQuery); + const [isFollowing, setIsFollowing] = useState(follow); + const wasFollowingRef = useRef(false); + const [scrollOffset, setScrollOffset] = useState(0); + const [focusSearch, setFocusSearch] = useState(false); + const [showHelp, setShowHelp] = useState(false); + const [wrapLines, setWrapLines] = useState(false); + const [customSelectMap, setCustomSelectMap] = useState< + Record + >({}); + const customSelect = customSelectMap[source.id] as string | undefined; + const [selectedRow, setSelectedRow] = useState(0); + const [expandedRow, setExpandedRow] = useState(null); + const [detailTab, setDetailTab] = useState<'overview' | 'columns' | 'trace'>( + 'overview', + ); + const [detailSearchQuery, setDetailSearchQuery] = useState(''); + const [focusDetailSearch, setFocusDetailSearch] = useState(false); + const [traceDetailScrollOffset, setTraceDetailScrollOffset] = useState(0); + const [columnValuesScrollOffset, setColumnValuesScrollOffset] = useState(0); + const [traceSelectedIndex, setTraceSelectedIndex] = useState( + null, + ); + const [timeRange, setTimeRange] = useState(() => { + const now = new Date(); + return { start: new Date(now.getTime() - 60 * 60 * 1000), end: now }; + }); + + // ---- Data fetching ----------------------------------------------- + + const { + events, + loading, + error, + hasMore, + loadingMore, + expandedRowData, + expandedRowLoading, + expandedTraceId, + expandedSpanId, + fetchNextPage, + } = useEventData({ + clickhouseClient, + metadata, + source, + customSelect, + submittedQuery, + timeRange, + isFollowing, + setTimeRange, + expandedRow, + }); + + // ---- Derived values ---------------------------------------------- + + const columns = useMemo( + () => (events.length > 0 ? getDynamicColumns(events) : getColumns(source)), + [source, events], + ); + + const switchItems = useMemo(() => { + const items: SwitchItem[] = []; + for (const ss of savedSearches) { + const src = sources.find(s => s.id === ss.source || s._id === ss.source); + items.push({ + type: 'saved', + label: `${ss.name}${src ? ` (${src.name})` : ''}`, + search: ss, + }); + } + for (const src of sources) { + items.push({ type: 'source', label: src.name, source: src }); + } + return items; + }, [savedSearches, sources]); + + const findActiveIndex = useCallback(() => { + const ssIdx = switchItems.findIndex( + item => + item.type === 'saved' && + item.search && + (item.search.source === source.id || + item.search.source === source._id) && + item.search.where === submittedQuery, + ); + if (ssIdx >= 0) return ssIdx; + const srcIdx = switchItems.findIndex( + item => + item.type === 'source' && + item.source && + (item.source.id === source.id || item.source._id === source._id), + ); + return srcIdx >= 0 ? srcIdx : 0; + }, [switchItems, source, submittedQuery]); + + const activeIdx = findActiveIndex(); + const visibleRowCount = Math.min(events.length - scrollOffset, maxRows); + + // ---- Keybindings ------------------------------------------------- + + useKeybindings({ + focusSearch, + focusDetailSearch, + showHelp, + expandedRow, + detailTab, + selectedRow, + scrollOffset, + isFollowing, + hasMore, + events, + maxRows, + visibleRowCount, + source, + timeRange, + customSelect, + detailMaxRows, + fullDetailMaxRows, + switchItems, + findActiveIndex, + onSavedSearchSelect, + setFocusSearch, + setFocusDetailSearch, + setShowHelp, + setSelectedRow, + setScrollOffset, + setExpandedRow, + setDetailTab, + setIsFollowing, + setWrapLines, + setDetailSearchQuery, + setTraceSelectedIndex, + setTraceDetailScrollOffset, + setColumnValuesScrollOffset, + setTimeRange, + setCustomSelectMap, + wasFollowingRef, + fetchNextPage, + }); + + // ---- Pre-format visible rows ------------------------------------- + + const visibleRows = useMemo(() => { + return events.slice(scrollOffset, scrollOffset + maxRows).map(row => ({ + ...formatDynamicRow(row, columns), + raw: row, + })); + }, [events, scrollOffset, maxRows, columns]); + + const errorLine = error ? error.slice(0, 200) : ''; + + // ---- Render ------------------------------------------------------ + + if (showHelp) { + return ( + + + + ); + } + + return ( + +
+ {expandedRow === null && ( + <> + + { + setSubmittedQuery(searchQuery); + setScrollOffset(0); + setFocusSearch(false); + }} + /> + + )} + + {expandedRow !== null ? ( + setFocusDetailSearch(false)} + wrapLines={wrapLines} + termHeight={termHeight} + fullDetailMaxRows={fullDetailMaxRows} + detailMaxRows={detailMaxRows} + columnValuesScrollOffset={columnValuesScrollOffset} + traceDetailScrollOffset={traceDetailScrollOffset} + expandedFormattedRow={visibleRows.find( + (_, i) => scrollOffset + i === expandedRow, + )} + scrollOffset={scrollOffset} + expandedRow={expandedRow} + /> + ) : ( + + )} + +