Skip to content

Commit 2a0727c

Browse files
authored
chore: add logs, har and config data in html output (#1946)
* chore: add logs, har and config data in html output * chore: add logs, har and config data in html output * chore: keep scraper id empty for tests
1 parent cba5211 commit 2a0727c

File tree

8 files changed

+309
-27
lines changed

8 files changed

+309
-27
lines changed

api/context.go

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@ import (
66
"sync"
77
"time"
88

9+
"github.com/flanksource/commons/har"
910
"github.com/flanksource/commons/logger"
1011
v1 "github.com/flanksource/config-db/api/v1"
1112
dutyCtx "github.com/flanksource/duty/context"
@@ -20,6 +21,7 @@ type ScrapeContext struct {
2021

2122
isIncremental bool
2223
debugRun bool
24+
harCollector *har.Collector
2325

2426
namespace string
2527

@@ -111,6 +113,7 @@ func (ctx ScrapeContext) WithValue(key, val any) ScrapeContext {
111113
temp: ctx.temp,
112114
isIncremental: ctx.isIncremental,
113115
debugRun: ctx.debugRun,
116+
harCollector: ctx.harCollector,
114117
namespace: ctx.namespace,
115118
jobHistory: ctx.jobHistory,
116119
scrapeConfig: ctx.scrapeConfig,
@@ -215,3 +218,12 @@ func (ctx ScrapeContext) AsDebugRun(level string) ScrapeContext {
215218
}
216219
return ctx
217220
}
221+
222+
func (ctx ScrapeContext) WithHARCollector(collector *har.Collector) ScrapeContext {
223+
ctx.harCollector = collector
224+
return ctx
225+
}
226+
227+
func (ctx ScrapeContext) HARCollector() *har.Collector {
228+
return ctx.harCollector
229+
}

api/v1/interface.go

Lines changed: 203 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,12 +4,15 @@ import (
44
"encoding/json"
55
"errors"
66
"fmt"
7+
"regexp"
78
"strings"
89
"time"
910

1011
"github.com/flanksource/clicky"
1112
"github.com/flanksource/clicky/api"
1213
"github.com/flanksource/commons/collections/set"
14+
"github.com/flanksource/commons/har"
15+
"gopkg.in/yaml.v3"
1316
"github.com/flanksource/commons/logger"
1417
"github.com/flanksource/duty"
1518
"github.com/flanksource/duty/models"
@@ -1002,6 +1005,7 @@ func (s ScrapeResult) Columns() []api.ColumnDef {
10021005
clicky.Column("Name").Build(),
10031006
clicky.Column("Type").Build(),
10041007
clicky.Column("Health").Build(),
1008+
clicky.Column("Details").Build(),
10051009
clicky.Column("Error").Build(),
10061010
}
10071011

@@ -1012,6 +1016,7 @@ func (s ScrapeResult) Row() map[string]any {
10121016
row["Name"] = clicky.Text(s.Name)
10131017
row["Type"] = clicky.Text(s.Type)
10141018
row["Health"] = clicky.Text(string(s.Health))
1019+
row["Details"] = s.configDetails()
10151020
if s.Error != nil {
10161021
row["Error"] = clicky.Text(s.Error.Error())
10171022
} else {
@@ -1020,6 +1025,204 @@ func (s ScrapeResult) Row() map[string]any {
10201025
return row
10211026
}
10221027

1028+
func (s ScrapeResult) configDetails() api.Collapsed {
1029+
if s.Config == nil {
1030+
return clicky.Collapsed("empty", clicky.Text(""))
1031+
}
1032+
1033+
var data any
1034+
switch v := s.Config.(type) {
1035+
case string:
1036+
if json.Unmarshal([]byte(v), &data) != nil {
1037+
data = v
1038+
}
1039+
default:
1040+
data = v
1041+
}
1042+
1043+
b, err := yaml.Marshal(data)
1044+
if err != nil {
1045+
b = []byte(fmt.Sprintf("%v", s.Config))
1046+
}
1047+
yamlStr := string(b)
1048+
1049+
content := clicky.Text("")
1050+
if len(s.Labels) > 0 {
1051+
content = content.Append("Labels: ", "text-gray-500 font-medium").Append(clicky.Map(s.Labels, "badge")).NewLine()
1052+
}
1053+
if len(s.Tags) > 0 {
1054+
content = content.Append("Tags: ", "text-gray-500 font-medium").Append(clicky.Map(s.Tags, "badge")).NewLine()
1055+
}
1056+
content = content.Append(clicky.CodeBlock("yaml", yamlStr), "min-w-[600px] block")
1057+
1058+
label := fmt.Sprintf("Config (%d bytes)", len(yamlStr))
1059+
return clicky.Collapsed(label, content)
1060+
}
1061+
1062+
// CountsGrid renders scrape result counts as a 2-column grid.
1063+
// +kubebuilder:object:generate=false
1064+
type CountsGrid []countEntry
1065+
1066+
type countEntry struct {
1067+
label string
1068+
count int
1069+
}
1070+
1071+
func (g CountsGrid) HTML() string {
1072+
var b strings.Builder
1073+
b.WriteString(`<div class="grid grid-cols-2 gap-x-8 gap-y-2">`)
1074+
for _, e := range g {
1075+
fmt.Fprintf(&b,
1076+
`<div class="flex justify-between px-3 py-1 bg-gray-50 rounded">`+
1077+
`<span class="text-sm font-medium text-gray-500">%s</span>`+
1078+
`<span class="text-sm text-gray-900">%d</span>`+
1079+
`</div>`, e.label, e.count)
1080+
}
1081+
b.WriteString(`</div>`)
1082+
return b.String()
1083+
}
1084+
1085+
func (g CountsGrid) String() string {
1086+
var parts []string
1087+
for _, e := range g {
1088+
parts = append(parts, fmt.Sprintf("%s: %d", e.label, e.count))
1089+
}
1090+
return strings.Join(parts, ", ")
1091+
}
1092+
1093+
func (g CountsGrid) ANSI() string { return g.String() }
1094+
func (g CountsGrid) Markdown() string { return g.String() }
1095+
1096+
// BuildCounts returns scrape result counts as a 2-column grid.
1097+
func BuildCounts(all FullScrapeResults) CountsGrid {
1098+
return CountsGrid{
1099+
{"Configs", len(all.Configs)},
1100+
{"Analysis", len(all.Analysis)},
1101+
{"Changes", len(all.Changes)},
1102+
{"Relationships", len(all.Relationships)},
1103+
{"External Roles", len(all.ExternalRoles)},
1104+
{"External Users", len(all.ExternalUsers)},
1105+
{"External Groups", len(all.ExternalGroups)},
1106+
{"External User Groups", len(all.ExternalUserGroups)},
1107+
{"Config Access", len(all.ConfigAccess)},
1108+
{"Config Access Logs", len(all.ConfigAccessLogs)},
1109+
}
1110+
}
1111+
1112+
var ansiEscapeRegex = regexp.MustCompile(`\x1b\[[0-9;]*m`)
1113+
1114+
// LogLine is a single log entry that renders as a table row with colored level prefix.
1115+
// +kubebuilder:object:generate=false
1116+
type LogLine struct {
1117+
text api.Text
1118+
}
1119+
1120+
func (l LogLine) Columns() []api.ColumnDef {
1121+
return []api.ColumnDef{
1122+
clicky.Column("Line").Build(),
1123+
}
1124+
}
1125+
1126+
func (l LogLine) Row() map[string]any {
1127+
return map[string]any{"Line": l.text}
1128+
}
1129+
1130+
// BuildLogLines parses raw log text into LogLine rows for table rendering.
1131+
func BuildLogLines(rawLogs string) []LogLine {
1132+
cleaned := ansiEscapeRegex.ReplaceAllString(rawLogs, "")
1133+
lines := strings.Split(strings.TrimRight(cleaned, "\n"), "\n")
1134+
if len(lines) == 0 || (len(lines) == 1 && lines[0] == "") {
1135+
return nil
1136+
}
1137+
1138+
out := make([]LogLine, 0, len(lines))
1139+
for _, line := range lines {
1140+
out = append(out, LogLine{text: colorLogLine(line)})
1141+
}
1142+
return out
1143+
}
1144+
1145+
var logLevelRegex = regexp.MustCompile(`\s(INF|ERR|WRN|DBG\S*|TRC\S*|FTL)\s`)
1146+
1147+
var logLevelColors = map[string]string{
1148+
"INF": "text-green-600",
1149+
"ERR": "text-red-600",
1150+
"WRN": "text-yellow-600",
1151+
"DBG": "text-blue-600",
1152+
"TRC": "text-gray-500",
1153+
"FTL": "text-red-600",
1154+
}
1155+
1156+
// colorLogLine highlights the log level prefix with appropriate colors.
1157+
// Matches DBG, DBG-1, TRC-2, etc.
1158+
func colorLogLine(line string) api.Text {
1159+
loc := logLevelRegex.FindStringIndex(line)
1160+
if loc == nil {
1161+
return clicky.Text(line)
1162+
}
1163+
1164+
// loc covers the match including surrounding spaces
1165+
tag := strings.TrimSpace(line[loc[0]:loc[1]])
1166+
before := line[:loc[0]+1]
1167+
after := line[loc[1]-1:]
1168+
1169+
// Base level is the prefix before any dash (DBG-1 → DBG)
1170+
base := tag
1171+
if i := strings.IndexByte(tag, '-'); i >= 0 {
1172+
base = tag[:i]
1173+
}
1174+
color := logLevelColors[base]
1175+
1176+
return clicky.Text(before).Append(tag, color).Append(after)
1177+
}
1178+
1179+
// HAREntry renders a single HAR request/response as a table row.
1180+
// +kubebuilder:object:generate=false
1181+
type HAREntry struct {
1182+
Method string
1183+
URL string
1184+
Status int
1185+
Duration string
1186+
}
1187+
1188+
func (h HAREntry) Columns() []api.ColumnDef {
1189+
return []api.ColumnDef{
1190+
clicky.Column("Method").Build(),
1191+
clicky.Column("URL").Build(),
1192+
clicky.Column("Status").Build(),
1193+
clicky.Column("Duration").Build(),
1194+
}
1195+
}
1196+
1197+
func (h HAREntry) Row() map[string]any {
1198+
statusStyle := "text-green-600"
1199+
if h.Status >= 400 {
1200+
statusStyle = "text-red-600"
1201+
} else if h.Status >= 300 {
1202+
statusStyle = "text-yellow-600"
1203+
}
1204+
return map[string]any{
1205+
"Method": clicky.Text(h.Method),
1206+
"URL": clicky.Text(h.URL),
1207+
"Status": clicky.Text(fmt.Sprintf("%d", h.Status)).WithStyles(statusStyle),
1208+
"Duration": clicky.Text(h.Duration),
1209+
}
1210+
}
1211+
1212+
// BuildHAREntries converts HAR entries into table rows.
1213+
func BuildHAREntries(entries []har.Entry) []HAREntry {
1214+
out := make([]HAREntry, 0, len(entries))
1215+
for _, e := range entries {
1216+
out = append(out, HAREntry{
1217+
Method: e.Request.Method,
1218+
URL: e.Request.URL,
1219+
Status: e.Response.Status,
1220+
Duration: fmt.Sprintf("%.0fms", e.Time),
1221+
})
1222+
}
1223+
return out
1224+
}
1225+
10231226
func (s ScrapeResult) IsMetadataOnly() bool {
10241227
return s.Config == nil
10251228
}

0 commit comments

Comments
 (0)