Skip to content
This repository was archived by the owner on Jul 22, 2025. It is now read-only.

Commit 6f49653

Browse files
committed
improve report
1 parent 62f3cf0 commit 6f49653

File tree

2 files changed

+94
-30
lines changed

2 files changed

+94
-30
lines changed

assets/javascripts/discourse/components/ai-usage.gjs

Lines changed: 62 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -51,29 +51,84 @@ export default class AiUsage extends Component {
5151
this.onFilterChange();
5252
}
5353

54+
normalizeTimeSeriesData(data) {
55+
if (!data?.length) {
56+
return [];
57+
}
58+
59+
const startDate = moment(this.startDate).startOf("day");
60+
const endDate = moment(this.endDate).endOf("day");
61+
const normalized = [];
62+
63+
// Create a map of existing data points
64+
const dataMap = new Map(
65+
data.map((d) => [moment(d.period).format("YYYY-MM-DD"), d])
66+
);
67+
68+
// Fill in all days
69+
for (
70+
let m = moment(startDate);
71+
m.isSameOrBefore(endDate);
72+
m.add(1, "days")
73+
) {
74+
const dateKey = m.format("YYYY-MM-DD");
75+
const existingData = dataMap.get(dateKey);
76+
77+
normalized.push(
78+
existingData || {
79+
period: m.format(),
80+
total_tokens: 0,
81+
total_cached_tokens: 0,
82+
total_request_tokens: 0,
83+
total_response_tokens: 0,
84+
}
85+
);
86+
}
87+
88+
return normalized;
89+
}
90+
91+
// Then modify the chartConfig getter to use this normalized data:
5492
get chartConfig() {
5593
if (!this.data?.data) {
5694
return;
5795
}
5896

97+
const normalizedData = this.normalizeTimeSeriesData(this.data.data);
98+
5999
return {
60-
type: "line",
100+
type: "bar",
61101
data: {
62-
labels: this.data.data.map((row) => row.period),
102+
labels: normalizedData.map((row) => {
103+
const date = moment(row.period);
104+
return date.format("DD-MMM");
105+
}),
63106
datasets: [
64107
{
65-
label: "Tokens",
66-
data: this.data.data.map((row) => row.total_tokens),
67-
fill: false,
68-
borderColor: "rgb(75, 192, 192)",
69-
tension: 0.1,
108+
label: "Response Tokens",
109+
data: normalizedData.map((row) => row.total_response_tokens),
110+
backgroundColor: "rgba(75, 192, 192, 0.8)",
111+
},
112+
{
113+
label: "Request Tokens",
114+
data: normalizedData.map((row) => row.total_request_tokens),
115+
backgroundColor: "rgba(153, 102, 255, 0.8)",
116+
},
117+
{
118+
label: "Cached Tokens",
119+
data: normalizedData.map((row) => row.total_cached_tokens),
120+
backgroundColor: "rgba(255, 159, 64, 0.8)",
70121
},
71122
],
72123
},
73124
options: {
74125
responsive: true,
75126
scales: {
127+
x: {
128+
stacked: true,
129+
},
76130
y: {
131+
stacked: true,
77132
beginAtZero: true,
78133
},
79134
},

lib/completions/report.rb

Lines changed: 32 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -28,35 +28,44 @@ def guess_period(period)
2828

2929
def tokens_by_period(period = nil)
3030
period = guess_period(period)
31-
base_query.group("DATE_TRUNC('#{period}', created_at)").select(
32-
"DATE_TRUNC('#{period}', created_at) as period",
33-
"SUM(request_tokens + response_tokens) as total_tokens",
34-
"SUM(COALESCE(cached_tokens,0)) as total_cached_tokens",
35-
"SUM(request_tokens) as total_request_tokens",
36-
"SUM(response_tokens) as total_response_tokens",
37-
)
31+
base_query
32+
.group("DATE_TRUNC('#{period}', created_at)")
33+
.order("DATE_TRUNC('#{period}', created_at)")
34+
.select(
35+
"DATE_TRUNC('#{period}', created_at) as period",
36+
"SUM(request_tokens + response_tokens) as total_tokens",
37+
"SUM(COALESCE(cached_tokens,0)) as total_cached_tokens",
38+
"SUM(request_tokens) as total_request_tokens",
39+
"SUM(response_tokens) as total_response_tokens",
40+
)
3841
end
3942

4043
def feature_breakdown
41-
base_query.group(:feature_name).select(
42-
"feature_name",
43-
"COUNT(*) as usage_count",
44-
"SUM(request_tokens + response_tokens) as total_tokens",
45-
"SUM(COALESCE(cached_tokens,0)) as total_cached_tokens",
46-
"SUM(request_tokens) as total_request_tokens",
47-
"SUM(response_tokens) as total_response_tokens",
48-
)
44+
base_query
45+
.group(:feature_name)
46+
.order("usage_count DESC")
47+
.select(
48+
"feature_name",
49+
"COUNT(*) as usage_count",
50+
"SUM(request_tokens + response_tokens) as total_tokens",
51+
"SUM(COALESCE(cached_tokens,0)) as total_cached_tokens",
52+
"SUM(request_tokens) as total_request_tokens",
53+
"SUM(response_tokens) as total_response_tokens",
54+
)
4955
end
5056

5157
def model_breakdown
52-
base_query.group(:language_model).select(
53-
"language_model as llm",
54-
"COUNT(*) as usage_count",
55-
"SUM(request_tokens + response_tokens) as total_tokens",
56-
"SUM(COALESCE(cached_tokens,0)) as total_cached_tokens",
57-
"SUM(request_tokens) as total_request_tokens",
58-
"SUM(response_tokens) as total_response_tokens",
59-
)
58+
base_query
59+
.group(:language_model)
60+
.order("usage_count DESC")
61+
.select(
62+
"language_model as llm",
63+
"COUNT(*) as usage_count",
64+
"SUM(request_tokens + response_tokens) as total_tokens",
65+
"SUM(COALESCE(cached_tokens,0)) as total_cached_tokens",
66+
"SUM(request_tokens) as total_request_tokens",
67+
"SUM(response_tokens) as total_response_tokens",
68+
)
6069
end
6170

6271
def tokens_per_hour

0 commit comments

Comments
 (0)