Skip to content
Merged
Show file tree
Hide file tree
Changes from 33 commits
Commits
Show all changes
46 commits
Select commit Hold shift + click to select a range
8a5efb9
add sliding window for actions logs
mattdholloway Aug 12, 2025
e6ef962
refactor: fix sliding
mattdholloway Aug 12, 2025
271c7c2
remove trim content
mattdholloway Aug 12, 2025
e65c0f0
only use up to 1mb of memory for logs
mattdholloway Aug 12, 2025
ade3852
update to tail lines in second pass
mattdholloway Aug 13, 2025
5a76cbd
add better memory usage calculation
mattdholloway Aug 13, 2025
8e60fb2
increase window size to 5MB
mattdholloway Aug 13, 2025
e1c3143
update test
mattdholloway Aug 13, 2025
75dc8e7
Merge branch 'main' into actions-job-log-buffer
mattdholloway Aug 13, 2025
b128e44
Merge branch 'actions-job-log-buffer' of https://github.com/github/gi…
mattdholloway Aug 13, 2025
88d16d2
Merge branch 'main' into actions-job-log-buffer
mattdholloway Aug 14, 2025
4bf84b2
Merge branch 'main' into actions-job-log-buffer
mattdholloway Aug 15, 2025
6b8f2ba
update vers
mattdholloway Aug 15, 2025
8002fbd
undo vers change
mattdholloway Aug 15, 2025
52e531e
add incremental memory tracking
mattdholloway Aug 15, 2025
8f85398
use ring buffer
mattdholloway Aug 15, 2025
0d19480
remove unused ctx param
mattdholloway Aug 15, 2025
f104e67
remove manual GC clear
mattdholloway Aug 15, 2025
9d273b9
fix cca feedback
mattdholloway Aug 15, 2025
4e43327
extract ring buffer logic to new package
mattdholloway Aug 15, 2025
2ff2d4f
handle log content processing errors and use correct param for maxjob…
mattdholloway Aug 15, 2025
c6f5f7f
fix tailing
mattdholloway Aug 15, 2025
1c1061c
account for if tailLines exceeds window size
mattdholloway Aug 15, 2025
75b8c94
add profiling thats reusable
mattdholloway Aug 15, 2025
71bfac8
remove profiler testing
mattdholloway Aug 15, 2025
a43b03c
refactor profiler: introduce safeMemoryDelta for accurate memory delt…
mattdholloway Aug 15, 2025
d9c8825
linter fixes
mattdholloway Aug 15, 2025
ec070ee
Update pkg/buffer/buffer.go
mattdholloway Aug 15, 2025
0434b7e
use flag for maxJobLogLines
mattdholloway Aug 18, 2025
fb301c6
add param passing for context window size
mattdholloway Aug 18, 2025
106d802
refactor: rename contextWindowSize to contentWindowSize for consistency
mattdholloway Aug 18, 2025
516c0f7
fix: use tailLines if bigger but only if <= 5000
mattdholloway Aug 18, 2025
6c3c31a
fix: limit tailLines to a maximum of 500 for log content download
mattdholloway Aug 18, 2025
82d4ce2
Update cmd/github-mcp-server/main.go
mattdholloway Aug 18, 2025
e40e289
Update cmd/github-mcp-server/main.go
mattdholloway Aug 18, 2025
4310db8
move profiler to internal/
mattdholloway Aug 18, 2025
e0767fe
update actions test with new profiler location
mattdholloway Aug 18, 2025
9677c07
fix: adjust buffer size limits
mattdholloway Aug 18, 2025
ed6bc2d
make line buffer 1028kb
mattdholloway Aug 18, 2025
696e5fd
fix mod path
mattdholloway Aug 18, 2025
10e1995
change test to use same buffer size as normal use
mattdholloway Aug 18, 2025
2eb2e16
improve test for non-sliding window implementation to not count empty…
mattdholloway Aug 18, 2025
47aaa01
make test memory measurement more accurate
mattdholloway Aug 18, 2025
6b910ff
Merge branch 'main' into actions-job-log-buffer
mattdholloway Aug 18, 2025
4a673c9
Merge branch 'main' into actions-job-log-buffer
mattdholloway Aug 18, 2025
556a41c
remove impossible conditional
mattdholloway Aug 18, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions cmd/github-mcp-server/generate_docs.go
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ func generateReadmeDocs(readmePath string) error {
t, _ := translations.TranslationHelper()

// Create toolset group with mock clients
tsg := github.DefaultToolsetGroup(false, mockGetClient, mockGetGQLClient, mockGetRawClient, t)
tsg := github.DefaultToolsetGroup(false, mockGetClient, mockGetGQLClient, mockGetRawClient, t, 5000)

// Generate toolsets documentation
toolsetsDoc := generateToolsetsDoc(tsg)
Expand Down Expand Up @@ -302,7 +302,7 @@ func generateRemoteToolsetsDoc() string {
t, _ := translations.TranslationHelper()

// Create toolset group with mock clients
tsg := github.DefaultToolsetGroup(false, mockGetClient, mockGetGQLClient, mockGetRawClient, t)
tsg := github.DefaultToolsetGroup(false, mockGetClient, mockGetGQLClient, mockGetRawClient, t, 5000)

// Generate table header
buf.WriteString("| Name | Description | API URL | 1-Click Install (VS Code) | Read-only Link | 1-Click Read-only Install (VS Code) |\n")
Expand Down
3 changes: 3 additions & 0 deletions cmd/github-mcp-server/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,7 @@ var (
ExportTranslations: viper.GetBool("export-translations"),
EnableCommandLogging: viper.GetBool("enable-command-logging"),
LogFilePath: viper.GetString("log-file"),
ContentWindowSize: viper.GetInt("content_window_size"),
}
return ghmcp.RunStdioServer(stdioServerConfig)
},
Expand All @@ -75,6 +76,7 @@ func init() {
rootCmd.PersistentFlags().Bool("enable-command-logging", false, "When enabled, the server will log all command requests and responses to the log file")
rootCmd.PersistentFlags().Bool("export-translations", false, "Save translations to a JSON file")
rootCmd.PersistentFlags().String("gh-host", "", "Specify the GitHub hostname (for GitHub Enterprise etc.)")
rootCmd.PersistentFlags().Int("content-window-size", 5000, "Specify the content window size")

// Bind flag to viper
_ = viper.BindPFlag("toolsets", rootCmd.PersistentFlags().Lookup("toolsets"))
Expand All @@ -84,6 +86,7 @@ func init() {
_ = viper.BindPFlag("enable-command-logging", rootCmd.PersistentFlags().Lookup("enable-command-logging"))
_ = viper.BindPFlag("export-translations", rootCmd.PersistentFlags().Lookup("export-translations"))
_ = viper.BindPFlag("host", rootCmd.PersistentFlags().Lookup("gh-host"))
_ = viper.BindPFlag("content_window_size", rootCmd.PersistentFlags().Lookup("content-window-size"))

// Add subcommands
rootCmd.AddCommand(stdioCmd)
Expand Down
23 changes: 15 additions & 8 deletions internal/ghmcp/server.go
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,9 @@ type MCPServerConfig struct {

// Translator provides translated text for the server tooling
Translator translations.TranslationHelperFunc

// Content window size
ContentWindowSize int
}

const stdioServerLogPrefix = "stdioserver"
Expand Down Expand Up @@ -132,7 +135,7 @@ func NewMCPServer(cfg MCPServerConfig) (*server.MCPServer, error) {
}

// Create default toolsets
tsg := github.DefaultToolsetGroup(cfg.ReadOnly, getClient, getGQLClient, getRawClient, cfg.Translator)
tsg := github.DefaultToolsetGroup(cfg.ReadOnly, getClient, getGQLClient, getRawClient, cfg.Translator, cfg.ContentWindowSize)
err = tsg.EnableToolsets(enabledToolsets)

if err != nil {
Expand Down Expand Up @@ -180,6 +183,9 @@ type StdioServerConfig struct {

// Path to the log file if not stderr
LogFilePath string

// Content window size
ContentWindowSize int
}

// RunStdioServer is not concurrent safe.
Expand All @@ -191,13 +197,14 @@ func RunStdioServer(cfg StdioServerConfig) error {
t, dumpTranslations := translations.TranslationHelper()

ghServer, err := NewMCPServer(MCPServerConfig{
Version: cfg.Version,
Host: cfg.Host,
Token: cfg.Token,
EnabledToolsets: cfg.EnabledToolsets,
DynamicToolsets: cfg.DynamicToolsets,
ReadOnly: cfg.ReadOnly,
Translator: t,
Version: cfg.Version,
Host: cfg.Host,
Token: cfg.Token,
EnabledToolsets: cfg.EnabledToolsets,
DynamicToolsets: cfg.DynamicToolsets,
ReadOnly: cfg.ReadOnly,
Translator: t,
ContentWindowSize: cfg.ContentWindowSize,
})
if err != nil {
return fmt.Errorf("failed to create MCP server: %w", err)
Expand Down
67 changes: 67 additions & 0 deletions pkg/buffer/buffer.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
package buffer

import (
"bufio"
"fmt"
"net/http"
"strings"
)

// ProcessResponseAsRingBufferToEnd reads the body of an HTTP response line by line,
// storing only the last maxJobLogLines lines using a ring buffer (sliding window).
// This efficiently retains the most recent lines, overwriting older ones as needed.
//
// Parameters:
// httpResp: The HTTP response whose body will be read.
// maxJobLogLines: The maximum number of log lines to retain.
//
// Returns:
// string: The concatenated log lines (up to maxJobLogLines), separated by newlines.
// int: The total number of lines read from the response.
// *http.Response: The original HTTP response.
// error: Any error encountered during reading.
//
// The function uses a ring buffer to efficiently store only the last maxJobLogLines lines.
// If the response contains more lines than maxJobLogLines, only the most recent lines are kept.
func ProcessResponseAsRingBufferToEnd(httpResp *http.Response, maxJobLogLines int) (string, int, *http.Response, error) {
lines := make([]string, maxJobLogLines)
validLines := make([]bool, maxJobLogLines)
totalLines := 0
writeIndex := 0

scanner := bufio.NewScanner(httpResp.Body)
scanner.Buffer(make([]byte, 0, 64*1024), 1024*1024)

for scanner.Scan() {
line := scanner.Text()
totalLines++

lines[writeIndex] = line
validLines[writeIndex] = true
writeIndex = (writeIndex + 1) % maxJobLogLines
}

if err := scanner.Err(); err != nil {
return "", 0, httpResp, fmt.Errorf("failed to read log content: %w", err)
}

var result []string
linesInBuffer := totalLines
if linesInBuffer > maxJobLogLines {
linesInBuffer = maxJobLogLines
}

startIndex := 0
if totalLines > maxJobLogLines {
startIndex = writeIndex
}

for i := 0; i < linesInBuffer; i++ {
idx := (startIndex + i) % maxJobLogLines
if validLines[idx] {
result = append(result, lines[idx])
}
}

return strings.Join(result, "\n"), totalLines, httpResp, nil
}
72 changes: 34 additions & 38 deletions pkg/github/actions.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,13 @@ import (
"context"
"encoding/json"
"fmt"
"io"
"net/http"
"strconv"
"strings"

buffer "github.com/github/github-mcp-server/pkg/buffer"
ghErrors "github.com/github/github-mcp-server/pkg/errors"
"github.com/github/github-mcp-server/pkg/profiler"
"github.com/github/github-mcp-server/pkg/translations"
"github.com/google/go-github/v74/github"
"github.com/mark3labs/mcp-go/mcp"
Expand Down Expand Up @@ -530,7 +531,7 @@ func ListWorkflowJobs(getClient GetClientFn, t translations.TranslationHelperFun
}

// GetJobLogs creates a tool to download logs for a specific workflow job or efficiently get all failed job logs for a workflow run
func GetJobLogs(getClient GetClientFn, t translations.TranslationHelperFunc) (tool mcp.Tool, handler server.ToolHandlerFunc) {
func GetJobLogs(getClient GetClientFn, t translations.TranslationHelperFunc, contentWindowSize int) (tool mcp.Tool, handler server.ToolHandlerFunc) {
return mcp.NewTool("get_job_logs",
mcp.WithDescription(t("TOOL_GET_JOB_LOGS_DESCRIPTION", "Download logs for a specific workflow job or efficiently get all failed job logs for a workflow run")),
mcp.WithToolAnnotation(mcp.ToolAnnotation{
Expand Down Expand Up @@ -613,18 +614,18 @@ func GetJobLogs(getClient GetClientFn, t translations.TranslationHelperFunc) (to

if failedOnly && runID > 0 {
// Handle failed-only mode: get logs for all failed jobs in the workflow run
return handleFailedJobLogs(ctx, client, owner, repo, int64(runID), returnContent, tailLines)
return handleFailedJobLogs(ctx, client, owner, repo, int64(runID), returnContent, tailLines, contentWindowSize)
} else if jobID > 0 {
// Handle single job mode
return handleSingleJobLogs(ctx, client, owner, repo, int64(jobID), returnContent, tailLines)
return handleSingleJobLogs(ctx, client, owner, repo, int64(jobID), returnContent, tailLines, contentWindowSize)
}

return mcp.NewToolResultError("Either job_id must be provided for single job logs, or run_id with failed_only=true for failed job logs"), nil
}
}

// handleFailedJobLogs gets logs for all failed jobs in a workflow run
func handleFailedJobLogs(ctx context.Context, client *github.Client, owner, repo string, runID int64, returnContent bool, tailLines int) (*mcp.CallToolResult, error) {
func handleFailedJobLogs(ctx context.Context, client *github.Client, owner, repo string, runID int64, returnContent bool, tailLines int, contentWindowSize int) (*mcp.CallToolResult, error) {
// First, get all jobs for the workflow run
jobs, resp, err := client.Actions.ListWorkflowJobs(ctx, owner, repo, runID, &github.ListWorkflowJobsOptions{
Filter: "latest",
Expand Down Expand Up @@ -656,7 +657,7 @@ func handleFailedJobLogs(ctx context.Context, client *github.Client, owner, repo
// Collect logs for all failed jobs
var logResults []map[string]any
for _, job := range failedJobs {
jobResult, resp, err := getJobLogData(ctx, client, owner, repo, job.GetID(), job.GetName(), returnContent, tailLines)
jobResult, resp, err := getJobLogData(ctx, client, owner, repo, job.GetID(), job.GetName(), returnContent, tailLines, contentWindowSize)
if err != nil {
// Continue with other jobs even if one fails
jobResult = map[string]any{
Expand Down Expand Up @@ -689,8 +690,8 @@ func handleFailedJobLogs(ctx context.Context, client *github.Client, owner, repo
}

// handleSingleJobLogs gets logs for a single job
func handleSingleJobLogs(ctx context.Context, client *github.Client, owner, repo string, jobID int64, returnContent bool, tailLines int) (*mcp.CallToolResult, error) {
jobResult, resp, err := getJobLogData(ctx, client, owner, repo, jobID, "", returnContent, tailLines)
func handleSingleJobLogs(ctx context.Context, client *github.Client, owner, repo string, jobID int64, returnContent bool, tailLines int, contentWindowSize int) (*mcp.CallToolResult, error) {
jobResult, resp, err := getJobLogData(ctx, client, owner, repo, jobID, "", returnContent, tailLines, contentWindowSize)
if err != nil {
return ghErrors.NewGitHubAPIErrorResponse(ctx, "failed to get job logs", resp, err), nil
}
Expand All @@ -704,7 +705,7 @@ func handleSingleJobLogs(ctx context.Context, client *github.Client, owner, repo
}

// getJobLogData retrieves log data for a single job, either as URL or content
func getJobLogData(ctx context.Context, client *github.Client, owner, repo string, jobID int64, jobName string, returnContent bool, tailLines int) (map[string]any, *github.Response, error) {
func getJobLogData(ctx context.Context, client *github.Client, owner, repo string, jobID int64, jobName string, returnContent bool, tailLines int, contentWindowSize int) (map[string]any, *github.Response, error) {
// Get the download URL for the job logs
url, resp, err := client.Actions.GetWorkflowJobLogs(ctx, owner, repo, jobID, 1)
if err != nil {
Expand All @@ -721,7 +722,7 @@ func getJobLogData(ctx context.Context, client *github.Client, owner, repo strin

if returnContent {
// Download and return the actual log content
content, originalLength, httpResp, err := downloadLogContent(url.String(), tailLines) //nolint:bodyclose // Response body is closed in downloadLogContent, but we need to return httpResp
content, originalLength, httpResp, err := downloadLogContent(ctx, url.String(), tailLines, contentWindowSize) //nolint:bodyclose // Response body is closed in downloadLogContent, but we need to return httpResp
if err != nil {
// To keep the return value consistent wrap the response as a GitHub Response
ghRes := &github.Response{
Expand All @@ -742,9 +743,11 @@ func getJobLogData(ctx context.Context, client *github.Client, owner, repo strin
return result, resp, nil
}

// downloadLogContent downloads the actual log content from a GitHub logs URL
func downloadLogContent(logURL string, tailLines int) (string, int, *http.Response, error) {
httpResp, err := http.Get(logURL) //nolint:gosec // URLs are provided by GitHub API and are safe
func downloadLogContent(ctx context.Context, logURL string, tailLines int, maxLines int) (string, int, *http.Response, error) {
prof := profiler.New(nil, profiler.IsProfilingEnabled())
finish := prof.Start(ctx, "log_buffer_processing")
Copy link
Preview

Copilot AI Aug 18, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Creating a new profiler instance for each call is inefficient. Consider reusing the global profiler or passing the profiler as a parameter to avoid repeated initialization.

Suggested change
finish := prof.Start(ctx, "log_buffer_processing")
finish := globalProfiler.Start(ctx, "log_buffer_processing")

Copilot uses AI. Check for mistakes.


httpResp, err := http.Get(logURL) //nolint:gosec
if err != nil {
return "", 0, httpResp, fmt.Errorf("failed to download logs: %w", err)
}
Expand All @@ -754,36 +757,29 @@ func downloadLogContent(logURL string, tailLines int) (string, int, *http.Respon
return "", 0, httpResp, fmt.Errorf("failed to download logs: HTTP %d", httpResp.StatusCode)
}

content, err := io.ReadAll(httpResp.Body)
if err != nil {
return "", 0, httpResp, fmt.Errorf("failed to read log content: %w", err)
if tailLines <= 0 {
tailLines = 1000
Copy link
Preview

Copilot AI Aug 18, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The magic number 1000 should be defined as a named constant to improve maintainability and make the default value explicit throughout the codebase.

Suggested change
tailLines = 1000
tailLines = DefaultTailLines

Copilot uses AI. Check for mistakes.

}

// Clean up and format the log content for better readability
logContent := strings.TrimSpace(string(content))
bufferSize := maxLines
if tailLines > maxLines && tailLines <= 500 {
bufferSize = tailLines
}

trimmedContent, lineCount := trimContent(logContent, tailLines)
return trimmedContent, lineCount, httpResp, nil
}
processedInput, totalLines, httpResp, err := buffer.ProcessResponseAsRingBufferToEnd(httpResp, bufferSize)
if err != nil {
return "", 0, httpResp, fmt.Errorf("failed to process log content: %w", err)
}

// trimContent trims the content to a maximum length and returns the trimmed content and an original length
func trimContent(content string, tailLines int) (string, int) {
// Truncate to tail_lines if specified
lineCount := 0
if tailLines > 0 {

// Count backwards to find the nth newline from the end and a total number of lines
for i := len(content) - 1; i >= 0 && lineCount < tailLines; i-- {
if content[i] == '\n' {
lineCount++
// If we have reached the tailLines, trim the content
if lineCount == tailLines {
content = content[i+1:]
}
}
}
lines := strings.Split(processedInput, "\n")
if len(lines) > tailLines {
lines = lines[len(lines)-tailLines:]
}
return content, lineCount
finalResult := strings.Join(lines, "\n")

_ = finish(len(lines), int64(len(finalResult)))

return finalResult, totalLines, httpResp, nil
}

// RerunWorkflowRun creates a tool to re-run an entire workflow run
Expand Down
Loading
Loading