Skip to content

Commit c18fc86

Browse files
committed
Add anchor around repository paths in our GHA logs viewer
1 parent 12fa5d2 commit c18fc86

File tree

1 file changed

+98
-29
lines changed

1 file changed

+98
-29
lines changed

src/gha_logs.rs

Lines changed: 98 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
use crate::github;
1+
use crate::github::{self, WorkflowRunJob};
22
use crate::handlers::Context;
33
use anyhow::Context as _;
44
use hyper::header::{CACHE_CONTROL, CONTENT_SECURITY_POLICY, CONTENT_TYPE};
@@ -14,11 +14,17 @@ const MAX_CACHE_CAPACITY_BYTES: u64 = 50 * 1024 * 1024; // 50 Mb
1414
#[derive(Default)]
1515
pub struct GitHubActionLogsCache {
1616
capacity: u64,
17-
entries: VecDeque<(String, Arc<String>)>,
17+
entries: VecDeque<(String, Arc<CachedLog>)>,
18+
}
19+
20+
pub struct CachedLog {
21+
job: WorkflowRunJob,
22+
tree_roots: String,
23+
logs: String,
1824
}
1925

2026
impl GitHubActionLogsCache {
21-
pub fn get(&mut self, key: &String) -> Option<Arc<String>> {
27+
pub fn get(&mut self, key: &String) -> Option<Arc<CachedLog>> {
2228
if let Some(pos) = self.entries.iter().position(|(k, _)| k == key) {
2329
// Move previously cached entry to the front
2430
let entry = self.entries.remove(pos).unwrap();
@@ -29,26 +35,26 @@ impl GitHubActionLogsCache {
2935
}
3036
}
3137

32-
pub fn put(&mut self, key: String, value: Arc<String>) -> Arc<String> {
33-
if value.len() as u64 > MAX_CACHE_CAPACITY_BYTES {
38+
pub fn put(&mut self, key: String, value: Arc<CachedLog>) -> Arc<CachedLog> {
39+
if value.logs.len() as u64 > MAX_CACHE_CAPACITY_BYTES {
3440
// Entry is too large, don't cache, return as is
3541
return value;
3642
}
3743

3844
// Remove duplicate or last entry when necessary
3945
let removed = if let Some(pos) = self.entries.iter().position(|(k, _)| k == &key) {
4046
self.entries.remove(pos)
41-
} else if self.capacity + value.len() as u64 >= MAX_CACHE_CAPACITY_BYTES {
47+
} else if self.capacity + value.logs.len() as u64 >= MAX_CACHE_CAPACITY_BYTES {
4248
self.entries.pop_back()
4349
} else {
4450
None
4551
};
4652
if let Some(removed) = removed {
47-
self.capacity -= removed.1.len() as u64;
53+
self.capacity -= removed.1.logs.len() as u64;
4854
}
4955

5056
// Add entry the front of the list ane return it
51-
self.capacity += value.len() as u64;
57+
self.capacity += value.logs.len() as u64;
5258
self.entries.push_front((key, value.clone()));
5359
value
5460
}
@@ -99,34 +105,75 @@ async fn process_logs(
99105

100106
let log_uuid = format!("{owner}/{repo}${log_id}");
101107

102-
let logs = 'logs: {
108+
let CachedLog {
109+
job,
110+
tree_roots,
111+
logs,
112+
} = &*'logs: {
103113
if let Some(logs) = ctx.gha_logs.write().await.get(&log_uuid) {
104114
tracing::info!("gha_logs: cache hit for {log_uuid}");
105115
break 'logs logs;
106116
}
107117

108118
tracing::info!("gha_logs: cache miss for {log_uuid}");
109-
let logs = ctx
110-
.github
111-
.raw_job_logs(
112-
&github::IssueRepository {
113-
organization: owner.to_string(),
114-
repository: repo.to_string(),
115-
},
116-
log_id,
117-
)
118-
.await
119-
.context("unable to get the raw logs")?;
120-
121-
let json_logs = serde_json::to_string(&*logs).context("unable to JSON-ify the raw logs")?;
122-
123-
ctx.gha_logs
124-
.write()
125-
.await
126-
.put(log_uuid.clone(), json_logs.into())
119+
120+
let repo = github::IssueRepository {
121+
organization: owner.to_string(),
122+
repository: repo.to_string(),
123+
};
124+
125+
let job_and_tree_roots = async {
126+
let job = ctx
127+
.github
128+
.workflow_run_job(&repo, log_id)
129+
.await
130+
.context("unable to fetch job details")?;
131+
let trees = ctx
132+
.github
133+
.repo_git_trees(&repo, &job.head_sha)
134+
.await
135+
.context("unable to fetch git tree for the repository")?;
136+
137+
let tree_roots: Vec<_> = trees
138+
.tree
139+
.iter()
140+
.filter_map(|t| (t.object_type == "tree").then_some(&t.path))
141+
.collect();
142+
let tree_roots =
143+
serde_json::to_string(&tree_roots).context("unable to serialize tree roots")?;
144+
145+
anyhow::Result::<_>::Ok((job, tree_roots))
146+
};
147+
148+
let logs = async {
149+
let logs = ctx
150+
.github
151+
.raw_job_logs(&repo, log_id)
152+
.await
153+
.context("unable to get the raw logs")?;
154+
155+
let json_logs =
156+
serde_json::to_string(&*logs).context("unable to JSON-ify the raw logs")?;
157+
158+
anyhow::Result::<_>::Ok(json_logs)
159+
};
160+
161+
let (job_and_tree_roots, logs) = futures::join!(job_and_tree_roots, logs);
162+
let ((job, tree_roots), logs) = (job_and_tree_roots?, logs?);
163+
164+
ctx.gha_logs.write().await.put(
165+
log_uuid.clone(),
166+
CachedLog {
167+
job,
168+
tree_roots,
169+
logs,
170+
}
171+
.into(),
172+
)
127173
};
128174

129175
let nonce = Uuid::new_v4().to_hyphenated().to_string();
176+
let sha = &*job.head_sha;
130177

131178
let html = format!(
132179
r###"<!DOCTYPE html>
@@ -157,11 +204,16 @@ async fn process_logs(
157204
.warning-marker {{
158205
color: #c69026;
159206
}}
207+
.path-marker {{
208+
color: #26c6a8;
209+
}}
160210
</style>
161211
<script type="module" nonce="{nonce}">
162212
import {{ AnsiUp }} from '{ANSI_UP_URL}'
163213
164214
var logs = {logs};
215+
var tree_roots = {tree_roots};
216+
165217
var ansi_up = new AnsiUp();
166218
167219
// 1. Tranform the ANSI escape codes to HTML
@@ -189,11 +241,28 @@ async fn process_logs(
189241
`<span class="warning-marker">##[warning]</span>`
190242
);
191243
192-
// 5. Add the html to the DOM
244+
// 5. Add anchors to GitHub around some paths
245+
const pathRegex = /((?:[A-Za-z]:)?[a-zA-Z0-9_.$-]*(?:[\\/][a-zA-Z0-9_$.-]+)+)(?::([0-9]*):([0-9]*))?/g;
246+
html = html.replace(pathRegex, (match, path, line, col) => {{
247+
const removePrefix = (value, prefix) =>
248+
value.startsWith(prefix) ? value.slice(prefix.length) : value;
249+
250+
var path = removePrefix(removePrefix(path, "/checkout"), "/");
251+
var root = path.substring(0, path.indexOf("/"));
252+
253+
if (tree_roots.includes(root)) {{
254+
const pos = (line !== undefined) ? `#L${{line}}` : "";
255+
return `<a href="https://github.com/{owner}/{repo}/blob/{sha}/${{path}}${{pos}}" class="path-marker">${{match}}</a>`;
256+
}}
257+
258+
return match;
259+
}});
260+
261+
// 6. Add the html to the DOM
193262
var cdiv = document.getElementById("console");
194263
cdiv.innerHTML = html;
195264
196-
// 6. If no anchor is given, scroll to the last error
265+
// 7. If no anchor is given, scroll to the last error
197266
if (location.hash === "" && errorCounter >= 0) {{
198267
const hasSmallViewport = window.innerWidth <= 750;
199268
document.getElementById(`error-${{errorCounter}}`).scrollIntoView({{

0 commit comments

Comments
 (0)