From 04bf3e938fa47ff484887f6bd8430084ffe56ff7 Mon Sep 17 00:00:00 2001 From: Thomas Stromberg Date: Mon, 3 Nov 2025 15:09:00 -0500 Subject: [PATCH 1/3] lint --- cmd/server/main.go | 2 - pkg/bot/bot.go | 31 ++-- pkg/bot/bot_sprinkler.go | 1 + pkg/bot/bot_test.go | 10 +- pkg/bot/commit_pr_cache_test.go | 4 +- pkg/bot/coordinator_test.go | 4 +- pkg/bot/coordinator_test_helpers.go | 168 +----------------- pkg/bot/create_pr_thread_additional_test.go | 1 - pkg/bot/dm_notifications_test.go | 1 + pkg/bot/event_integration_test.go | 32 ++-- pkg/bot/find_or_create_thread_test.go | 1 + pkg/bot/handle_pr_comprehensive_test.go | 1 - pkg/bot/handle_pr_test.go | 3 +- pkg/bot/integration_test.go | 2 +- pkg/bot/message_update_test.go | 6 +- pkg/bot/methods_test.go | 5 +- pkg/bot/mock_builders_test.go | 7 +- pkg/bot/poll_and_reconcile_test.go | 3 +- pkg/bot/polling.go | 5 +- pkg/bot/polling_comprehensive_test.go | 11 +- pkg/bot/polling_test.go | 27 ++- pkg/bot/process_channels_test.go | 16 +- pkg/bot/process_event_test.go | 2 +- .../process_pr_for_channel_additional_test.go | 2 - pkg/bot/sprinkler_test.go | 1 - pkg/bot/state_test.go | 16 +- pkg/config/config_test.go | 10 +- pkg/github/client_integration_test.go | 1 - pkg/github/github_mock_server_test.go | 24 ++- pkg/github/github_test.go | 80 +++++---- pkg/home/fetcher_test.go | 8 +- pkg/home/ui_test.go | 30 +++- pkg/notify/daily_digest_test.go | 4 - pkg/notify/daily_mocks_test.go | 51 ------ pkg/notify/daily_test.go | 4 + pkg/notify/format_edge_test.go | 3 - pkg/notify/notify.go | 4 +- pkg/notify/notify_user_test.go | 8 +- pkg/notify/run_test.go | 2 +- pkg/slack/api.go | 2 + pkg/slack/api_test.go | 1 + pkg/slack/api_wrapper_test.go | 48 +++-- pkg/slack/client_simple_test.go | 12 +- pkg/slack/http_handlers_test.go | 5 +- pkg/slack/manager_test.go | 4 +- pkg/slack/oauth.go | 6 +- pkg/slack/slack.go | 22 ++- pkg/slacktest/server_test.go | 6 +- pkg/state/datastore.go | 9 +- pkg/state/datastore_test.go | 80 ++++++--- pkg/state/json.go | 4 +- pkg/state/json_test.go | 84 ++++++--- pkg/state/memory.go | 4 +- pkg/state/memory_test.go | 42 +++-- pkg/state/store.go | 2 +- pkg/usermapping/reverse_test.go | 23 --- 56 files changed, 421 insertions(+), 524 deletions(-) diff --git a/cmd/server/main.go b/cmd/server/main.go index c02aa04..0530303 100644 --- a/cmd/server/main.go +++ b/cmd/server/main.go @@ -671,8 +671,6 @@ func (cm *coordinatorManager) handleRefreshInstallations(ctx context.Context) { // runBotCoordinators manages bot coordinators for all GitHub installations. // It spawns one coordinator per org and refreshes the list every 5 minutes. // Failed coordinators are automatically restarted every minute. -// -//nolint:interfacebloat // Interface mirrors state.Store for local type safety func runBotCoordinators( ctx context.Context, slackManager *slack.Manager, diff --git a/pkg/bot/bot.go b/pkg/bot/bot.go index 8200e00..0631de8 100644 --- a/pkg/bot/bot.go +++ b/pkg/bot/bot.go @@ -37,15 +37,6 @@ type prContext struct { Number int } -// ThreadInfo is an alias to cache.ThreadInfo for backward compatibility. -type ThreadInfo = cache.ThreadInfo - -// ThreadCache is an alias to cache.ThreadCache for backward compatibility. -type ThreadCache = cache.ThreadCache - -// CommitPRCache is an alias to cache.CommitPRCache for backward compatibility. -type CommitPRCache = cache.CommitPRCache - // Coordinator coordinates between GitHub, Slack, and notifications for a single org. // //nolint:govet // Field order optimized for logical grouping over memory alignment @@ -59,15 +50,15 @@ type Coordinator struct { configManager ConfigManager notifier *notify.Manager userMapper UserMapper - threadCache *ThreadCache // In-memory cache for fast lookups - commitPRCache *CommitPRCache // Maps commit SHAs to PR numbers for check events - eventSemaphore chan struct{} // Limits concurrent event processing (prevents overwhelming APIs) + threadCache *cache.ThreadCache // In-memory cache for fast lookups + commitPRCache *cache.CommitPRCache // Maps commit SHAs to PR numbers for check events + eventSemaphore chan struct{} // Limits concurrent event processing (prevents overwhelming APIs) } // StateStore interface for persistent state - allows dependency injection for testing. type StateStore interface { - Thread(owner, repo string, number int, channelID string) (ThreadInfo, bool) - SaveThread(owner, repo string, number int, channelID string, info ThreadInfo) error + Thread(owner, repo string, number int, channelID string) (cache.ThreadInfo, bool) + SaveThread(owner, repo string, number int, channelID string, info cache.ThreadInfo) error LastDM(userID, prURL string) (time.Time, bool) RecordDM(userID, prURL string, sentAt time.Time) error ListDMUsers(prURL string) []string @@ -127,7 +118,7 @@ func New( // saveThread persists thread info to both cache and persistent storage. // This ensures threads survive restarts and are available for closed PR updates. -func (c *Coordinator) saveThread(owner, repo string, number int, channelID string, info ThreadInfo) { +func (c *Coordinator) saveThread(owner, repo string, number int, channelID string, info cache.ThreadInfo) { // Save to in-memory cache for fast lookups key := fmt.Sprintf("%s/%s#%d:%s", owner, repo, number, channelID) c.threadCache.Set(key, info) @@ -205,7 +196,7 @@ func (c *Coordinator) findOrCreatePRThread(ctx context.Context, channelID, owner "current_message_preview", initialSearchText[:min(100, len(initialSearchText))]) // Save the found thread (cache + persist) - c.saveThread(owner, repo, prNumber, channelID, ThreadInfo{ + c.saveThread(owner, repo, prNumber, channelID, cache.ThreadInfo{ ThreadTS: initialSearchTS, ChannelID: channelID, LastState: prState, @@ -248,7 +239,7 @@ func (c *Coordinator) findOrCreatePRThread(ctx context.Context, channelID, owner // Try to take over creation if !c.threadCache.MarkCreating(cacheKey) { // Still being created, give up - return "", false, "", fmt.Errorf("timed out waiting for thread creation") + return "", false, "", errors.New("timed out waiting for thread creation") } } } @@ -281,7 +272,7 @@ func (c *Coordinator) findOrCreatePRThread(ctx context.Context, channelID, owner "note", "this prevented duplicate thread creation during rolling deployment") // Save it and return (cache + persist) - c.saveThread(owner, repo, prNumber, channelID, ThreadInfo{ + c.saveThread(owner, repo, prNumber, channelID, cache.ThreadInfo{ ThreadTS: crossInstanceCheckTS, ChannelID: channelID, LastState: prState, @@ -304,7 +295,7 @@ func (c *Coordinator) findOrCreatePRThread(ctx context.Context, channelID, owner } // Save the new thread (cache + persist) - c.saveThread(owner, repo, prNumber, channelID, ThreadInfo{ + c.saveThread(owner, repo, prNumber, channelID, cache.ThreadInfo{ ThreadTS: newThreadTS, ChannelID: channelID, LastState: prState, @@ -1394,7 +1385,7 @@ func (c *Coordinator) processPRForChannel( "next_poll_in", "5m") } else { // Save updated thread info (cache + persist) - c.saveThread(owner, repo, prNumber, channelID, ThreadInfo{ + c.saveThread(owner, repo, prNumber, channelID, cache.ThreadInfo{ ThreadTS: threadTS, ChannelID: channelID, LastState: prState, diff --git a/pkg/bot/bot_sprinkler.go b/pkg/bot/bot_sprinkler.go index 8852ceb..a219c10 100644 --- a/pkg/bot/bot_sprinkler.go +++ b/pkg/bot/bot_sprinkler.go @@ -162,6 +162,7 @@ func (c *Coordinator) lookupPRsForCheckEvent(ctx context.Context, event client.E } // Process the PR update since we have fresh data + //nolint:contextcheck // Background context intentional - goroutine must outlive parent timeout go c.handlePullRequestEventWithData(context.Background(), owner, repo, struct { Action string `json:"action"` PullRequest struct { diff --git a/pkg/bot/bot_test.go b/pkg/bot/bot_test.go index 66938d8..d3c35e3 100644 --- a/pkg/bot/bot_test.go +++ b/pkg/bot/bot_test.go @@ -172,7 +172,7 @@ func TestSaveThread(t *testing.T) { mockState := &mockStateStore{ processedEvents: make(map[string]bool), - threads: make(map[string]ThreadInfo), + threads: make(map[string]cache.ThreadInfo), } c := &Coordinator{ @@ -185,7 +185,7 @@ func TestSaveThread(t *testing.T) { eventSemaphore: make(chan struct{}, 10), } - threadInfo := ThreadInfo{ + threadInfo := cache.ThreadInfo{ ChannelID: "C123456", ThreadTS: "1234567890.123456", } @@ -225,7 +225,7 @@ func TestSaveThread_PersistenceError(t *testing.T) { mockState := &mockStateStore{ processedEvents: make(map[string]bool), - threads: make(map[string]ThreadInfo), + threads: make(map[string]cache.ThreadInfo), saveThreadErr: errors.New("database error"), } @@ -239,7 +239,7 @@ func TestSaveThread_PersistenceError(t *testing.T) { eventSemaphore: make(chan struct{}, 10), } - threadInfo := ThreadInfo{ + threadInfo := cache.ThreadInfo{ ChannelID: "C123456", ThreadTS: "1234567890.123456", } @@ -268,7 +268,7 @@ func TestSaveThread_PersistenceError(t *testing.T) { func TestThreadCache_Set(t *testing.T) { threadCache := cache.New() - threadInfo := ThreadInfo{ + threadInfo := cache.ThreadInfo{ ChannelID: "C123456", ThreadTS: "1234567890.123456", MessageText: "Test message", diff --git a/pkg/bot/commit_pr_cache_test.go b/pkg/bot/commit_pr_cache_test.go index d41f1be..4e70f53 100644 --- a/pkg/bot/commit_pr_cache_test.go +++ b/pkg/bot/commit_pr_cache_test.go @@ -137,7 +137,7 @@ func TestCheckEventIntegration_CacheHit(t *testing.T) { // Create coordinator with real commit cache coord := &Coordinator{ - stateStore: mockStore, + stateStore: mockStore, commitPRCache: cache.NewCommitPRCache(), github: &mockGitHubClientForCache{ // Mock should NOT be called if cache works @@ -185,7 +185,7 @@ func TestCheckEventIntegration_CacheMissFallback(t *testing.T) { // Create coordinator with empty cache coord := &Coordinator{ - stateStore: mockStore, + stateStore: mockStore, commitPRCache: cache.NewCommitPRCache(), github: &mockGitHubClientForCache{ // Mock SHOULD be called on cache miss diff --git a/pkg/bot/coordinator_test.go b/pkg/bot/coordinator_test.go index 3ea08f8..8aadaf3 100644 --- a/pkg/bot/coordinator_test.go +++ b/pkg/bot/coordinator_test.go @@ -10,7 +10,7 @@ import ( func TestCoordinator_saveThread(t *testing.T) { // Create mock state store mockStore := &mockStateStore{ - threads: make(map[string]ThreadInfo), + threads: make(map[string]cache.ThreadInfo), } // Create coordinator with mock @@ -24,7 +24,7 @@ func TestCoordinator_saveThread(t *testing.T) { repo := "testrepo" number := 123 channelID := "C123" - info := ThreadInfo{ + info := cache.ThreadInfo{ ThreadTS: "1234567890.123456", MessageText: "Test PR message", ChannelID: channelID, diff --git a/pkg/bot/coordinator_test_helpers.go b/pkg/bot/coordinator_test_helpers.go index 2469596..8a3179d 100644 --- a/pkg/bot/coordinator_test_helpers.go +++ b/pkg/bot/coordinator_test_helpers.go @@ -7,17 +7,16 @@ import ( "sync" "time" - ghmailto "github.com/codeGROOVE-dev/gh-mailto/pkg/gh-mailto" + "github.com/codeGROOVE-dev/slacker/pkg/bot/cache" "github.com/codeGROOVE-dev/slacker/pkg/github" "github.com/codeGROOVE-dev/slacker/pkg/state" - "github.com/codeGROOVE-dev/slacker/pkg/usermapping" "github.com/slack-go/slack" ) // mockStateStore implements StateStore interface from bot package. type mockStateStore struct { mu sync.Mutex - threads map[string]ThreadInfo + threads map[string]cache.ThreadInfo dmTimes map[string]time.Time dmUsers map[string][]string processedEvents map[string]bool @@ -26,7 +25,7 @@ type mockStateStore struct { saveThreadErr error // Error to return from SaveThread } -func (m *mockStateStore) Thread(owner, repo string, number int, channelID string) (ThreadInfo, bool) { +func (m *mockStateStore) Thread(owner, repo string, number int, channelID string) (cache.ThreadInfo, bool) { m.mu.Lock() defer m.mu.Unlock() key := fmt.Sprintf("%s/%s#%d:%s", owner, repo, number, channelID) @@ -35,10 +34,10 @@ func (m *mockStateStore) Thread(owner, repo string, number int, channelID string return info, true } } - return ThreadInfo{}, false + return cache.ThreadInfo{}, false } -func (m *mockStateStore) SaveThread(owner, repo string, number int, channelID string, info ThreadInfo) error { +func (m *mockStateStore) SaveThread(owner, repo string, number int, channelID string, info cache.ThreadInfo) error { m.mu.Lock() defer m.mu.Unlock() if m.saveThreadErr != nil { @@ -46,7 +45,7 @@ func (m *mockStateStore) SaveThread(owner, repo string, number int, channelID st } key := fmt.Sprintf("thread:%s/%s#%d:%s", owner, repo, number, channelID) if m.threads == nil { - m.threads = make(map[string]ThreadInfo) + m.threads = make(map[string]cache.ThreadInfo) } m.threads[key] = info return nil @@ -134,7 +133,7 @@ func (*mockStateStore) QueuePendingDM(dm state.PendingDM) error { return nil // No-op for tests } -func (*mockStateStore) GetPendingDMs(before time.Time) ([]state.PendingDM, error) { +func (*mockStateStore) PendingDMs(before time.Time) ([]state.PendingDM, error) { return nil, nil // Return empty list for tests } @@ -288,64 +287,6 @@ func (m *mockSlackClient) API() *slack.Client { return nil } -// newMockUserMapper creates a usermapping.Service for testing. -// Since we can't inject mocks into private fields, we use a real Service with nil Slack client. -// The tests won't call methods that need the Slack client. -func newMockUserMapper(_ *mockSlackClient) *usermapping.Service { - return usermapping.New(nil, "test-token") -} - -// mockSlackAPIForUserMapping implements usermapping.SlackAPI interface. -type mockSlackAPIForUserMapping struct{} - -func (*mockSlackAPIForUserMapping) GetUserByEmailContext(ctx context.Context, email string) (*slack.User, error) { - // Return a mock user for any email - return &slack.User{ - ID: "U" + email[:min(len(email), 5)], - Name: "testuser", - Profile: slack.UserProfile{ - Email: email, - }, - }, nil -} - -func (*mockSlackAPIForUserMapping) GetUserInfo(userID string) (*slack.User, error) { - return &slack.User{ - ID: userID, - Name: "testuser", - }, nil -} - -// mockGitHubEmailLookup implements usermapping.GitHubEmailLookup interface. -type mockGitHubEmailLookup struct{} - -func (*mockGitHubEmailLookup) Lookup(ctx context.Context, username, organization string) (*ghmailto.Result, error) { - // Return a mock result with a test email - return &ghmailto.Result{ - Addresses: []ghmailto.Address{ - { - Email: username + "@test.com", - Methods: []string{"mock"}, - }, - }, - }, nil -} - -func (*mockGitHubEmailLookup) Guess(ctx context.Context, username, organization string, opts ghmailto.GuessOptions) (*ghmailto.GuessResult, error) { - return &ghmailto.GuessResult{ - Username: username, - Guesses: []ghmailto.Address{}, - FoundAddresses: []ghmailto.Address{}, - }, nil -} - -func min(a, b int) int { - if a < b { - return a - } - return b -} - // mockUserMapper is a simple mock for user mapping in tests. type mockUserMapper struct { slackHandleFunc func(ctx context.Context, githubUser, org, domain string) (string, error) @@ -376,8 +317,8 @@ func (m *mockUserMapper) SlackHandle(ctx context.Context, githubUser, org, domai func (m *mockUserMapper) FormatUserMentions(ctx context.Context, githubUsers []string, owner, domain string) string { mentions := "" for i, user := range githubUsers { - slackID, _ := m.SlackHandle(ctx, user, owner, domain) - if slackID == "" { + slackID, err := m.SlackHandle(ctx, user, owner, domain) + if err != nil || slackID == "" { continue } if i > 0 && mentions != "" { @@ -388,97 +329,6 @@ func (m *mockUserMapper) FormatUserMentions(ctx context.Context, githubUsers []s return mentions } -// mockTracker is a simple mock for notification tracking in tests. -type mockTracker struct { - mu sync.Mutex - channelNotified bool - userTags []mockUserTag - tagInfoByUser map[string]TagInfo // Map from slackUserID to TagInfo for testing -} - -type mockUserTag struct { - workspaceID string - slackUserID string - channelID string - owner string - repo string - prNumber int -} - -func (m *mockTracker) UpdateChannelNotification(workspaceID, owner, repo string, prNumber int) { - m.mu.Lock() - defer m.mu.Unlock() - m.channelNotified = true -} - -func (m *mockTracker) UpdateUserPRChannelTag(workspaceID, slackUserID, channelID, owner, repo string, prNumber int) { - m.mu.Lock() - defer m.mu.Unlock() - m.userTags = append(m.userTags, mockUserTag{ - workspaceID: workspaceID, - slackUserID: slackUserID, - channelID: channelID, - owner: owner, - repo: repo, - prNumber: prNumber, - }) -} - -func (m *mockTracker) LastUserPRChannelTag(workspaceID, slackUserID, owner, repo string, prNumber int) TagInfo { - m.mu.Lock() - defer m.mu.Unlock() - if m.tagInfoByUser != nil { - if tagInfo, ok := m.tagInfoByUser[slackUserID]; ok { - return tagInfo - } - } - return TagInfo{} -} - -// mockNotifier is a simple mock for notification manager in tests. -type mockNotifier struct { - mu sync.Mutex - Tracker *mockTracker - notifyUserError error - notifyCalls []notifyUserCall -} - -type notifyUserCall struct { - workspaceID string - userID string - channelID string - channelName string -} - -// NotifyUser mocks the notify.Manager.NotifyUser method. -func (m *mockNotifier) NotifyUser(ctx context.Context, workspaceID, userID, channelID, channelName string, pr interface{}) error { - m.mu.Lock() - m.notifyCalls = append(m.notifyCalls, notifyUserCall{ - workspaceID: workspaceID, - userID: userID, - channelID: channelID, - channelName: channelName, - }) - m.mu.Unlock() - return m.notifyUserError -} - -// TagInfo matches the one in pkg/notify for test compatibility. -type TagInfo struct { - ChannelID string - TaggedAt time.Time - WorkspaceID string -} - -// notifyError is a simple error type for testing notification failures. -type notifyError struct { - message string -} - -func (e *notifyError) Error() string { - return e.message -} - // mockPRSearcher implements PRSearcher interface for testing polling logic. type mockPRSearcher struct { listOpenPRsFunc func(ctx context.Context, org string, updatedSinceHours int) ([]github.PRSnapshot, error) diff --git a/pkg/bot/create_pr_thread_additional_test.go b/pkg/bot/create_pr_thread_additional_test.go index c0af625..b94f76b 100644 --- a/pkg/bot/create_pr_thread_additional_test.go +++ b/pkg/bot/create_pr_thread_additional_test.go @@ -154,7 +154,6 @@ func TestCoordinator_CreatePRThread_ChannelAlreadyID(t *testing.T) { // Use a channel ID (starts with C) threadTS, _, err := c.createPRThread(ctx, "C123456", "testorg", "testrepo", 42, "awaiting_review", pr, checkResult) - if err != nil { t.Fatalf("unexpected error: %v", err) } diff --git a/pkg/bot/dm_notifications_test.go b/pkg/bot/dm_notifications_test.go index 004280b..b01ebe6 100644 --- a/pkg/bot/dm_notifications_test.go +++ b/pkg/bot/dm_notifications_test.go @@ -232,6 +232,7 @@ func TestUpdateDMMessagesForPR_ClosedPRNoDMRecipients(t *testing.T) { c.updateDMMessagesForPR(ctx, prInfo) // Test passes if it returns without panicking } + // TestUpdateDMMessagesForPR_MergedWithRecipients tests DM updates for merged PR with recipients. func TestUpdateDMMessagesForPR_MergedWithRecipients(t *testing.T) { ctx := context.Background() diff --git a/pkg/bot/event_integration_test.go b/pkg/bot/event_integration_test.go index 510e4ce..a4be09d 100644 --- a/pkg/bot/event_integration_test.go +++ b/pkg/bot/event_integration_test.go @@ -31,13 +31,13 @@ func TestIntegration_FindOrCreatePRThread_CreateNew(t *testing.T) { } mockState := &mockStateStore{ - threads: make(map[string]ThreadInfo), + threads: make(map[string]cache.ThreadInfo), } c := &Coordinator{ - slack: mockSlack, - stateStore: mockState, - configManager: NewMockConfig().Build(), + slack: mockSlack, + stateStore: mockState, + configManager: NewMockConfig().Build(), threadCache: cache.New(), eventSemaphore: make(chan struct{}, 10), } @@ -66,7 +66,6 @@ func TestIntegration_FindOrCreatePRThread_CreateNew(t *testing.T) { threadTS, wasNew, messageText, err := c.findOrCreatePRThread( ctx, "C123", "testorg", "testrepo", 42, "awaiting_review", pr, checkResult) - if err != nil { t.Fatalf("unexpected error: %v", err) } @@ -145,13 +144,13 @@ func TestIntegration_FindOrCreatePRThread_FindExisting(t *testing.T) { } mockState := &mockStateStore{ - threads: make(map[string]ThreadInfo), + threads: make(map[string]cache.ThreadInfo), } c := &Coordinator{ - slack: mockSlack, - stateStore: mockState, - configManager: NewMockConfig().Build(), + slack: mockSlack, + stateStore: mockState, + configManager: NewMockConfig().Build(), threadCache: cache.New(), eventSemaphore: make(chan struct{}, 10), } @@ -180,7 +179,6 @@ func TestIntegration_FindOrCreatePRThread_FindExisting(t *testing.T) { threadTS, wasNew, messageText, err := c.findOrCreatePRThread( ctx, "C123", "testorg", "testrepo", 42, "awaiting_review", pr, checkResult) - if err != nil { t.Fatalf("unexpected error: %v", err) } @@ -219,13 +217,13 @@ func TestIntegration_ThreadCache_Cleanup(t *testing.T) { // Add some threads with different ages using the public API // Note: We can't manually set UpdatedAt, so this test verifies that // Cleanup() works with the public API's timestamp management - threadCache.Set("old#1:C123", ThreadInfo{ + threadCache.Set("old#1:C123", cache.ThreadInfo{ ThreadTS: "1234.567", }) - threadCache.Set("recent#1:C123", ThreadInfo{ + threadCache.Set("recent#1:C123", cache.ThreadInfo{ ThreadTS: "2345.678", }) - threadCache.Set("new#1:C123", ThreadInfo{ + threadCache.Set("new#1:C123", cache.ThreadInfo{ ThreadTS: "3456.789", }) @@ -265,13 +263,13 @@ func TestIntegration_FindOrCreatePRThread_ConcurrentCreation(t *testing.T) { } mockState := &mockStateStore{ - threads: make(map[string]ThreadInfo), + threads: make(map[string]cache.ThreadInfo), } c := &Coordinator{ - slack: mockSlack, - stateStore: mockState, - configManager: NewMockConfig().Build(), + slack: mockSlack, + stateStore: mockState, + configManager: NewMockConfig().Build(), threadCache: cache.New(), eventSemaphore: make(chan struct{}, 10), } diff --git a/pkg/bot/find_or_create_thread_test.go b/pkg/bot/find_or_create_thread_test.go index 11b8a82..421bb09 100644 --- a/pkg/bot/find_or_create_thread_test.go +++ b/pkg/bot/find_or_create_thread_test.go @@ -418,6 +418,7 @@ func TestFindOrCreatePRThread_CreateThreadError(t *testing.T) { checkResult := &turn.CheckResponse{} + //nolint:dogsled // Multiple return values intentionally ignored - only testing error case _, _, _, err := c.findOrCreatePRThread(ctx, "C123", "testorg", "testrepo", 42, "awaiting_review", pullRequest, checkResult) if err == nil { t.Error("expected error when thread creation fails") diff --git a/pkg/bot/handle_pr_comprehensive_test.go b/pkg/bot/handle_pr_comprehensive_test.go index 33816eb..745f7c1 100644 --- a/pkg/bot/handle_pr_comprehensive_test.go +++ b/pkg/bot/handle_pr_comprehensive_test.go @@ -351,4 +351,3 @@ func TestHandlePullRequestEventWithData_ExtractStateFromTurnclient(t *testing.T) }) } } - diff --git a/pkg/bot/handle_pr_test.go b/pkg/bot/handle_pr_test.go index f3b7ab4..61ae6a7 100644 --- a/pkg/bot/handle_pr_test.go +++ b/pkg/bot/handle_pr_test.go @@ -1,12 +1,11 @@ package bot import ( - "github.com/codeGROOVE-dev/slacker/pkg/bot/cache" - "context" "testing" "time" + "github.com/codeGROOVE-dev/slacker/pkg/bot/cache" ) func TestHandlePullRequestFromSprinkler_NoToken(t *testing.T) { diff --git a/pkg/bot/integration_test.go b/pkg/bot/integration_test.go index 921d556..1cc5d68 100644 --- a/pkg/bot/integration_test.go +++ b/pkg/bot/integration_test.go @@ -7,6 +7,7 @@ import ( "time" ghmailto "github.com/codeGROOVE-dev/gh-mailto/pkg/gh-mailto" + "github.com/codeGROOVE-dev/slacker/pkg/config" "github.com/codeGROOVE-dev/slacker/pkg/notify" "github.com/codeGROOVE-dev/slacker/pkg/slack" "github.com/codeGROOVE-dev/slacker/pkg/slacktest" @@ -14,7 +15,6 @@ import ( "github.com/codeGROOVE-dev/slacker/pkg/usermapping" "github.com/codeGROOVE-dev/turnclient/pkg/turn" slackapi "github.com/slack-go/slack" - "github.com/codeGROOVE-dev/slacker/pkg/config" ) // TestUserMappingIntegration tests the complete flow of mapping GitHub users to Slack users. diff --git a/pkg/bot/message_update_test.go b/pkg/bot/message_update_test.go index 295f7fb..90ccbd0 100644 --- a/pkg/bot/message_update_test.go +++ b/pkg/bot/message_update_test.go @@ -115,14 +115,14 @@ func TestCachedMessageText(t *testing.T) { tests := []struct { name string cacheKey string - threadInfo ThreadInfo + threadInfo cache.ThreadInfo expectFound bool expectText string }{ { name: "message text cached", cacheKey: "org/repo#1:C123", - threadInfo: ThreadInfo{ + threadInfo: cache.ThreadInfo{ ThreadTS: "1234567890.123456", ChannelID: "C123", LastState: "awaiting_review", @@ -134,7 +134,7 @@ func TestCachedMessageText(t *testing.T) { { name: "empty message text in cache", cacheKey: "org/repo#2:C123", - threadInfo: ThreadInfo{ + threadInfo: cache.ThreadInfo{ ThreadTS: "1234567890.123457", ChannelID: "C123", LastState: "tests_running", diff --git a/pkg/bot/methods_test.go b/pkg/bot/methods_test.go index bdb0e73..e44ed0d 100644 --- a/pkg/bot/methods_test.go +++ b/pkg/bot/methods_test.go @@ -26,7 +26,7 @@ func testCoordinator(mockState *mockStateStore) *Coordinator { func TestCoordinator_SaveThread(t *testing.T) { mockState := &mockStateStore{ - threads: make(map[string]ThreadInfo), + threads: make(map[string]cache.ThreadInfo), } c := testCoordinator(mockState) @@ -35,7 +35,7 @@ func TestCoordinator_SaveThread(t *testing.T) { number := 42 channelID := "C123456" - info := ThreadInfo{ + info := cache.ThreadInfo{ ThreadTS: "1234567890.123456", MessageText: "Test PR #42", ChannelID: channelID, @@ -256,7 +256,6 @@ func TestCoordinator_CreatePRThread(t *testing.T) { } threadTS, messageText, err := c.createPRThread(ctx, "C123", "testorg", "testrepo", 42, "awaiting_review", pr, checkResult) - if err != nil { t.Fatalf("unexpected error: %v", err) } diff --git a/pkg/bot/mock_builders_test.go b/pkg/bot/mock_builders_test.go index 2c2df01..7b711b5 100644 --- a/pkg/bot/mock_builders_test.go +++ b/pkg/bot/mock_builders_test.go @@ -1,12 +1,13 @@ package bot import ( - "github.com/codeGROOVE-dev/slacker/pkg/bot/cache" "context" "errors" "fmt" "time" + "github.com/codeGROOVE-dev/slacker/pkg/bot/cache" + "github.com/codeGROOVE-dev/slacker/pkg/notify" "github.com/slack-go/slack" ) @@ -125,7 +126,7 @@ type MockStateBuilder struct { func NewMockState() *MockStateBuilder { return &MockStateBuilder{ mock: &mockStateStore{ - threads: make(map[string]ThreadInfo), + threads: make(map[string]cache.ThreadInfo), dmTimes: make(map[string]time.Time), dmUsers: make(map[string][]string), processedEvents: make(map[string]bool), @@ -135,7 +136,7 @@ func NewMockState() *MockStateBuilder { } // WithThread pre-populates a thread in the state store. -func (b *MockStateBuilder) WithThread(owner, repo string, number int, channelID string, info ThreadInfo) *MockStateBuilder { +func (b *MockStateBuilder) WithThread(owner, repo string, number int, channelID string, info cache.ThreadInfo) *MockStateBuilder { key := fmt.Sprintf("%s/%s#%d:%s", owner, repo, number, channelID) b.mock.threads[key] = info return b diff --git a/pkg/bot/poll_and_reconcile_test.go b/pkg/bot/poll_and_reconcile_test.go index df50c6c..5f3da04 100644 --- a/pkg/bot/poll_and_reconcile_test.go +++ b/pkg/bot/poll_and_reconcile_test.go @@ -5,6 +5,7 @@ import ( "testing" "time" + "github.com/codeGROOVE-dev/slacker/pkg/bot/cache" "github.com/codeGROOVE-dev/slacker/pkg/github" ) @@ -143,7 +144,7 @@ func TestUpdateClosedPRThread_HappyPath(t *testing.T) { // Pre-populate state store with existing thread using builder mockState := NewMockState(). - WithThread("testorg", "testrepo", 42, "C123", ThreadInfo{ + WithThread("testorg", "testrepo", 42, "C123", cache.ThreadInfo{ ThreadTS: "1234.567", ChannelID: "C123", MessageText: ":hourglass: Test PR", diff --git a/pkg/bot/polling.go b/pkg/bot/polling.go index 581ddd9..27bf845 100644 --- a/pkg/bot/polling.go +++ b/pkg/bot/polling.go @@ -8,6 +8,7 @@ import ( "strings" "time" + "github.com/codeGROOVE-dev/slacker/pkg/bot/cache" "github.com/codeGROOVE-dev/slacker/pkg/github" "github.com/codeGROOVE-dev/turnclient/pkg/turn" ) @@ -345,7 +346,7 @@ func (c *Coordinator) updateClosedPRThread(ctx context.Context, pr *github.PRSna } // Found via channel history - reconstruct ThreadInfo - info = ThreadInfo{ + info = cache.ThreadInfo{ ThreadTS: threadTS, ChannelID: id, MessageText: messageText, @@ -414,7 +415,7 @@ func replaceEmojiPrefix(text, newEmoji string) string { } // updateThreadForClosedPR updates a single thread's message to reflect closed/merged state. -func (c *Coordinator) updateThreadForClosedPR(ctx context.Context, pr *github.PRSnapshot, channelID string, info ThreadInfo) error { +func (c *Coordinator) updateThreadForClosedPR(ctx context.Context, pr *github.PRSnapshot, channelID string, info cache.ThreadInfo) error { emoji, err := emojiForPRState(pr.State) if err != nil { return err diff --git a/pkg/bot/polling_comprehensive_test.go b/pkg/bot/polling_comprehensive_test.go index ad41310..f639e24 100644 --- a/pkg/bot/polling_comprehensive_test.go +++ b/pkg/bot/polling_comprehensive_test.go @@ -6,6 +6,7 @@ import ( "testing" "time" + "github.com/codeGROOVE-dev/slacker/pkg/bot/cache" "github.com/codeGROOVE-dev/slacker/pkg/github" "github.com/slack-go/slack" ) @@ -37,7 +38,7 @@ func TestUpdateClosedPRThread_ThreadInStateStore(t *testing.T) { Build() mockState := NewMockState(). - WithThread("testorg", "testrepo", 42, "C_ENG", ThreadInfo{ + WithThread("testorg", "testrepo", 42, "C_ENG", cache.ThreadInfo{ ThreadTS: "1234.567", ChannelID: "C_ENG", MessageText: "old message", @@ -255,7 +256,7 @@ func TestUpdateClosedPRThread_UpdateMessageError(t *testing.T) { Build() mockState := NewMockState(). - WithThread("testorg", "testrepo", 42, "C_ENG", ThreadInfo{ + WithThread("testorg", "testrepo", 42, "C_ENG", cache.ThreadInfo{ ThreadTS: "1234.567", ChannelID: "C_ENG", MessageText: "old message", @@ -312,11 +313,11 @@ func TestUpdateClosedPRThread_MultipleChannels(t *testing.T) { Build() mockState := NewMockState(). - WithThread("testorg", "testrepo", 42, "C_ENG", ThreadInfo{ + WithThread("testorg", "testrepo", 42, "C_ENG", cache.ThreadInfo{ ThreadTS: "1111.111", ChannelID: "C_ENG", }). - WithThread("testorg", "testrepo", 42, "C_QA", ThreadInfo{ + WithThread("testorg", "testrepo", 42, "C_QA", cache.ThreadInfo{ ThreadTS: "2222.222", ChannelID: "C_QA", }). @@ -371,7 +372,7 @@ func TestUpdateClosedPRThread_ClosedNotMerged(t *testing.T) { Build() mockState := NewMockState(). - WithThread("testorg", "testrepo", 42, "C_ENG", ThreadInfo{ + WithThread("testorg", "testrepo", 42, "C_ENG", cache.ThreadInfo{ ThreadTS: "1234.567", ChannelID: "C_ENG", }). diff --git a/pkg/bot/polling_test.go b/pkg/bot/polling_test.go index 525d897..5efc6ca 100644 --- a/pkg/bot/polling_test.go +++ b/pkg/bot/polling_test.go @@ -341,7 +341,7 @@ func TestUpdateThreadForClosedPR(t *testing.T) { State: tt.prState, } - info := ThreadInfo{ + info := cache.ThreadInfo{ ThreadTS: "1234.567", ChannelID: "C123", MessageText: ":hourglass: Test PR • testrepo#42 by @user", @@ -546,14 +546,13 @@ func TestUpdateThreadForClosedPR_Merged(t *testing.T) { State: "MERGED", } - info := ThreadInfo{ + info := cache.ThreadInfo{ ThreadTS: "1234567890.123456", ChannelID: "C123456", MessageText: ":hourglass: Fix bug • testorg/testrepo#42 by @user", } err := c.updateThreadForClosedPR(ctx, pr, "C123456", info) - if err != nil { t.Errorf("expected no error, got %v", err) } @@ -590,14 +589,13 @@ func TestUpdateThreadForClosedPR_ClosedNotMerged(t *testing.T) { State: "CLOSED", } - info := ThreadInfo{ + info := cache.ThreadInfo{ ThreadTS: "1234567890.123456", ChannelID: "C123456", MessageText: ":test_tube: Fix bug • testorg/testrepo#42 by @user", } err := c.updateThreadForClosedPR(ctx, pr, "C123456", info) - if err != nil { t.Errorf("expected no error, got %v", err) } @@ -634,14 +632,13 @@ func TestUpdateThreadForClosedPR_NoSpaceInMessage(t *testing.T) { State: "MERGED", } - info := ThreadInfo{ + info := cache.ThreadInfo{ ThreadTS: "1234567890.123456", ChannelID: "C123456", MessageText: "NoSpaces", } err := c.updateThreadForClosedPR(ctx, pr, "C123456", info) - if err != nil { t.Errorf("expected no error, got %v", err) } @@ -676,7 +673,7 @@ func TestUpdateThreadForClosedPR_InvalidState(t *testing.T) { State: "INVALID", } - info := ThreadInfo{ + info := cache.ThreadInfo{ ThreadTS: "1234567890.123456", ChannelID: "C123456", MessageText: ":hourglass: Fix bug", @@ -717,7 +714,7 @@ func TestUpdateThreadForClosedPR_UpdateFails(t *testing.T) { State: "MERGED", } - info := ThreadInfo{ + info := cache.ThreadInfo{ ThreadTS: "1234567890.123456", ChannelID: "C123456", MessageText: ":hourglass: Fix bug", @@ -848,11 +845,11 @@ func TestShouldReconcilePR(t *testing.T) { twoHoursAgo := now.Add(-2 * time.Hour) tests := []struct { - name string - prUpdatedAt time.Time - lastNotified time.Time - expectedReason string - expectedReconcile bool + name string + prUpdatedAt time.Time + lastNotified time.Time + expectedReason string + expectedReconcile bool }{ { name: "never notified", @@ -1131,7 +1128,7 @@ func TestUpdateClosedPRThread_WithConfiguredChannels(t *testing.T) { // Mock state store with existing thread info mockState := &mockStateStore{ processedEvents: make(map[string]bool), - threads: map[string]ThreadInfo{ + threads: map[string]cache.ThreadInfo{ "thread:testorg/testrepo#42:C123": { ThreadTS: "1234567890.123456", ChannelID: "C123", diff --git a/pkg/bot/process_channels_test.go b/pkg/bot/process_channels_test.go index 3405dfd..9c47943 100644 --- a/pkg/bot/process_channels_test.go +++ b/pkg/bot/process_channels_test.go @@ -71,8 +71,8 @@ func TestProcessChannelsInParallel_NoValidChannels(t *testing.T) { event := struct { Action string `json:"action"` PullRequest struct { - HTMLURL string `json:"html_url"` - Title string `json:"title"` + HTMLURL string `json:"html_url"` + Title string `json:"title"` CreatedAt time.Time `json:"created_at"` User struct { Login string `json:"login"` @@ -167,8 +167,8 @@ func TestProcessPRForChannel_ChannelResolutionFailed(t *testing.T) { event := struct { Action string `json:"action"` PullRequest struct { - HTMLURL string `json:"html_url"` - Title string `json:"title"` + HTMLURL string `json:"html_url"` + Title string `json:"title"` CreatedAt time.Time `json:"created_at"` User struct { Login string `json:"login"` @@ -248,8 +248,8 @@ func TestProcessChannelsInParallel_HappyPath(t *testing.T) { event := struct { Action string `json:"action"` PullRequest struct { - HTMLURL string `json:"html_url"` - Title string `json:"title"` + HTMLURL string `json:"html_url"` + Title string `json:"title"` CreatedAt time.Time `json:"created_at"` User struct { Login string `json:"login"` @@ -331,8 +331,8 @@ func TestProcessChannelsInParallel_SomeChannelsInvalid(t *testing.T) { event := struct { Action string `json:"action"` PullRequest struct { - HTMLURL string `json:"html_url"` - Title string `json:"title"` + HTMLURL string `json:"html_url"` + Title string `json:"title"` CreatedAt time.Time `json:"created_at"` User struct { Login string `json:"login"` diff --git a/pkg/bot/process_event_test.go b/pkg/bot/process_event_test.go index 2388c44..9426943 100644 --- a/pkg/bot/process_event_test.go +++ b/pkg/bot/process_event_test.go @@ -1,11 +1,11 @@ package bot import ( - "github.com/codeGROOVE-dev/slacker/pkg/bot/cache" "context" "testing" "time" + "github.com/codeGROOVE-dev/slacker/pkg/bot/cache" ) func TestProcessEvent_EmptyMessage(t *testing.T) { diff --git a/pkg/bot/process_pr_for_channel_additional_test.go b/pkg/bot/process_pr_for_channel_additional_test.go index 8f8dac7..00b7bb3 100644 --- a/pkg/bot/process_pr_for_channel_additional_test.go +++ b/pkg/bot/process_pr_for_channel_additional_test.go @@ -296,5 +296,3 @@ func TestProcessPRForChannel_MessageUpdateError(t *testing.T) { t.Error("expected non-nil result even when update fails") } } - - diff --git a/pkg/bot/sprinkler_test.go b/pkg/bot/sprinkler_test.go index 036f14c..26cee1a 100644 --- a/pkg/bot/sprinkler_test.go +++ b/pkg/bot/sprinkler_test.go @@ -606,7 +606,6 @@ func TestHandleAuthError_RefreshSuccess(t *testing.T) { mockGH.token = "new-token" newClient, err := c.handleAuthError(ctx, "testorg", createConfig) - if err != nil { t.Errorf("expected no error, got %v", err) } diff --git a/pkg/bot/state_test.go b/pkg/bot/state_test.go index f6d9431..bb29c31 100644 --- a/pkg/bot/state_test.go +++ b/pkg/bot/state_test.go @@ -253,7 +253,7 @@ func TestThreadCache_GetSet(t *testing.T) { } // Test Set - testInfo := ThreadInfo{ + testInfo := cache.ThreadInfo{ ThreadTS: "1234567890.123456", MessageText: "Test PR message", ChannelID: "C123", @@ -297,7 +297,7 @@ func TestThreadCache_Cleanup(t *testing.T) { now := time.Now() // Add old entry (2 hours ago) - oldInfo := ThreadInfo{ + oldInfo := cache.ThreadInfo{ ThreadTS: "1111111111.111111", MessageText: "Old PR", ChannelID: "C111", @@ -306,7 +306,7 @@ func TestThreadCache_Cleanup(t *testing.T) { threadCache.SetForTest("old/repo#1:C111", oldInfo) // Add recent entry (30 minutes ago) - recentInfo := ThreadInfo{ + recentInfo := cache.ThreadInfo{ ThreadTS: "2222222222.222222", MessageText: "Recent PR", ChannelID: "C222", @@ -315,7 +315,7 @@ func TestThreadCache_Cleanup(t *testing.T) { threadCache.SetForTest("recent/repo#2:C222", recentInfo) // Add very recent entry (5 minutes ago) - veryRecentInfo := ThreadInfo{ + veryRecentInfo := cache.ThreadInfo{ ThreadTS: "3333333333.333333", MessageText: "New PR", ChannelID: "C333", @@ -351,13 +351,13 @@ func TestThreadCache_MultipleChannels(t *testing.T) { prKey1 := "owner/repo#123:C111" prKey2 := "owner/repo#123:C222" - info1 := ThreadInfo{ + info1 := cache.ThreadInfo{ ThreadTS: "1111111111.111111", MessageText: "PR in channel 1", ChannelID: "C111", } - info2 := ThreadInfo{ + info2 := cache.ThreadInfo{ ThreadTS: "2222222222.222222", MessageText: "PR in channel 2", ChannelID: "C222", @@ -393,7 +393,7 @@ func TestThreadCache_UpdateExisting(t *testing.T) { prKey := "owner/repo#123:C123" // Set initial info - initialInfo := ThreadInfo{ + initialInfo := cache.ThreadInfo{ ThreadTS: "1111111111.111111", MessageText: "Initial message", ChannelID: "C123", @@ -408,7 +408,7 @@ func TestThreadCache_UpdateExisting(t *testing.T) { time.Sleep(10 * time.Millisecond) // Update with new message text - updatedInfo := ThreadInfo{ + updatedInfo := cache.ThreadInfo{ ThreadTS: "1111111111.111111", // Same thread MessageText: "Updated message", // New message text ChannelID: "C123", diff --git a/pkg/config/config_test.go b/pkg/config/config_test.go index bdb6ac7..fc0cd49 100644 --- a/pkg/config/config_test.go +++ b/pkg/config/config_test.go @@ -962,7 +962,9 @@ channels: Encoding: &encoding, } w.Header().Set("Content-Type", "application/json") - json.NewEncoder(w).Encode(response) + //nolint:errcheck // Error intentionally ignored in test mock HTTP handler + //nolint:errcheck // Error intentionally ignored in test mock HTTP handler + _ = json.NewEncoder(w).Encode(response) return } http.NotFound(w, r) @@ -1046,7 +1048,8 @@ channels: [1, 2, 3] Encoding: &encoding, } w.Header().Set("Content-Type", "application/json") - json.NewEncoder(w).Encode(response) + //nolint:errcheck // Error intentionally ignored in test mock HTTP handler + _ = json.NewEncoder(w).Encode(response) } client, server := createTestGitHubClient(handler) @@ -1184,7 +1187,8 @@ func TestManager_LoadConfigEmptyContent(t *testing.T) { Type: github.String("file"), } w.Header().Set("Content-Type", "application/json") - json.NewEncoder(w).Encode(response) + //nolint:errcheck // Error intentionally ignored in test mock HTTP handler + _ = json.NewEncoder(w).Encode(response) } client, server := createTestGitHubClient(handler) diff --git a/pkg/github/client_integration_test.go b/pkg/github/client_integration_test.go index 8812953..c7b04f6 100644 --- a/pkg/github/client_integration_test.go +++ b/pkg/github/client_integration_test.go @@ -104,7 +104,6 @@ func TestClient_Authenticate_RetryOnFailure(t *testing.T) { ctx := context.Background() err = client.authenticate(ctx) - // Should still succeed after retry if err != nil { t.Fatalf("authenticate() should succeed after retry, got: %v", err) diff --git a/pkg/github/github_mock_server_test.go b/pkg/github/github_mock_server_test.go index 508acd8..6337b64 100644 --- a/pkg/github/github_mock_server_test.go +++ b/pkg/github/github_mock_server_test.go @@ -116,7 +116,8 @@ func NewMockGitHubServer() *MockGitHubServer { mux.HandleFunc("/rate_limit", func(w http.ResponseWriter, r *http.Request) { w.Header().Set("Content-Type", "application/json") w.WriteHeader(http.StatusOK) - json.NewEncoder(w).Encode(map[string]interface{}{ + //nolint:errcheck // Error intentionally ignored in test mock HTTP handler + _ = json.NewEncoder(w).Encode(map[string]interface{}{ "resources": map[string]interface{}{ "core": map[string]interface{}{ "limit": 5000, @@ -131,7 +132,8 @@ func NewMockGitHubServer() *MockGitHubServer { mux.HandleFunc("/installation/repositories", func(w http.ResponseWriter, r *http.Request) { w.Header().Set("Content-Type", "application/json") w.WriteHeader(http.StatusOK) - json.NewEncoder(w).Encode(map[string]interface{}{ + //nolint:errcheck // Error intentionally ignored in test mock HTTP handler + _ = json.NewEncoder(w).Encode(map[string]interface{}{ "total_count": 0, "repositories": []interface{}{}, }) @@ -187,7 +189,8 @@ func (m *MockGitHubServer) handleListInstallations(w http.ResponseWriter, r *htt // Return installations w.Header().Set("Content-Type", "application/json") - json.NewEncoder(w).Encode(m.installations) + //nolint:errcheck // Error intentionally ignored in test mock HTTP handler + _ = json.NewEncoder(w).Encode(m.installations) } // handleGetInstallation handles GET /app/installations/{id}. @@ -210,7 +213,8 @@ func (m *MockGitHubServer) handleGetInstallation(w http.ResponseWriter, r *http. for _, inst := range m.installations { if inst.ID == id { w.Header().Set("Content-Type", "application/json") - json.NewEncoder(w).Encode(inst) + //nolint:errcheck // Error intentionally ignored in test mock HTTP handler + _ = json.NewEncoder(w).Encode(inst) return } } @@ -278,7 +282,8 @@ func (m *MockGitHubServer) handleCreateInstallationToken(w http.ResponseWriter, } w.Header().Set("Content-Type", "application/json") - json.NewEncoder(w).Encode(token) + //nolint:errcheck // Error intentionally ignored in test mock HTTP handler + _ = json.NewEncoder(w).Encode(token) } // handleListPRsForCommit handles GET /repos/{owner}/{repo}/commits/{sha}/pulls. @@ -326,7 +331,8 @@ func (m *MockGitHubServer) handleListPRsForCommit(w http.ResponseWriter, r *http } w.Header().Set("Content-Type", "application/json") - json.NewEncoder(w).Encode(prs) + //nolint:errcheck // Error intentionally ignored in test mock HTTP handler + _ = json.NewEncoder(w).Encode(prs) } // handleListPRs handles GET /repos/{owner}/{repo}/pulls. @@ -369,7 +375,8 @@ func (m *MockGitHubServer) handleListPRs(w http.ResponseWriter, r *http.Request) } w.Header().Set("Content-Type", "application/json") - json.NewEncoder(w).Encode(prs) + //nolint:errcheck // Error intentionally ignored in test mock HTTP handler + _ = json.NewEncoder(w).Encode(prs) } // handleSearchIssues handles GET /search/issues (used for PR search). @@ -433,5 +440,6 @@ func (m *MockGitHubServer) handleSearchIssues(w http.ResponseWriter, r *http.Req } w.Header().Set("Content-Type", "application/json") - json.NewEncoder(w).Encode(result) + //nolint:errcheck // Error intentionally ignored in test mock HTTP handler + _ = json.NewEncoder(w).Encode(result) } diff --git a/pkg/github/github_test.go b/pkg/github/github_test.go index 7c52e95..5fd29d7 100644 --- a/pkg/github/github_test.go +++ b/pkg/github/github_test.go @@ -19,20 +19,6 @@ import ( "golang.org/x/oauth2" ) -// mockGitHubClient is a simple mock for testing. -type mockGitHubClient struct { - installationToken string - client *github.Client -} - -func (m *mockGitHubClient) Client() any { - return m.client -} - -func (m *mockGitHubClient) InstallationToken(ctx context.Context) string { - return m.installationToken -} - func TestClient_Client(t *testing.T) { ghClient := github.NewClient(nil) c := &Client{ @@ -124,8 +110,12 @@ func TestManagerWrapper_ClientForOrg(t *testing.T) { } // Verify it's the right client - if gotClient.(*Client).organization != "testorg" { - t.Errorf("expected organization 'testorg', got %q", gotClient.(*Client).organization) + client, clientOK := gotClient.(*Client) + if !clientOK { + t.Fatal("expected gotClient to be *Client") + } + if client.organization != "testorg" { + t.Errorf("expected organization 'testorg', got %q", client.organization) } } @@ -187,7 +177,6 @@ func TestRefreshingTokenSource_Token(t *testing.T) { ts := &refreshingTokenSource{client: c} token, err := ts.Token() - if err != nil { t.Fatalf("unexpected error: %v", err) } @@ -205,7 +194,6 @@ func TestRefreshingTokenSource_Token_ValidToken(t *testing.T) { ts := &refreshingTokenSource{client: c} token, err := ts.Token() - if err != nil { t.Fatalf("unexpected error: %v", err) } @@ -249,7 +237,7 @@ func TestUserAgentTransport_RoundTrip(t *testing.T) { // Create a mock round tripper mockTransport := &mockRoundTripper{ response: &http.Response{ - StatusCode: 200, + StatusCode: http.StatusOK, Body: http.NoBody, }, } @@ -259,7 +247,7 @@ func TestUserAgentTransport_RoundTrip(t *testing.T) { } req := &http.Request{ - Method: "GET", + Method: http.MethodGet, URL: &url.URL{Scheme: "https", Host: "api.github.com", Path: "/test"}, Header: make(http.Header), } @@ -268,9 +256,11 @@ func TestUserAgentTransport_RoundTrip(t *testing.T) { if err != nil { t.Fatalf("unexpected error: %v", err) } + //nolint:errcheck // Error intentionally ignored in test + defer resp.Body.Close() - if resp.StatusCode != 200 { - t.Errorf("expected status code 200, got %d", resp.StatusCode) + if resp.StatusCode != http.StatusOK { + t.Errorf("expected status code %d, got %d", http.StatusOK, resp.StatusCode) } userAgent := mockTransport.capturedRequest.Header.Get("User-Agent") @@ -464,7 +454,8 @@ func TestFindPRsForCommit_WithMockServer(t *testing.T) { "state": "closed", }, } - json.NewEncoder(w).Encode(resp) + //nolint:errcheck // Error intentionally ignored in test mock HTTP handler + _ = json.NewEncoder(w).Encode(resp) return } http.NotFound(w, r) @@ -474,6 +465,7 @@ func TestFindPRsForCommit_WithMockServer(t *testing.T) { // Create a client pointing to our mock server httpClient := server.Client() ghClient := github.NewClient(httpClient) + //nolint:errcheck // Error intentionally ignored in test ghClient.BaseURL, _ = url.Parse(server.URL + "/") c := &Client{ @@ -675,6 +667,7 @@ func TestFindPRsForCommit_NotFound(t *testing.T) { httpClient := server.Client() ghClient := github.NewClient(httpClient) + //nolint:errcheck // Error intentionally ignored in test ghClient.BaseURL, _ = url.Parse(server.URL + "/") c := &Client{ @@ -846,7 +839,8 @@ func TestSearchClient_ListOpenPRs(t *testing.T) { }, }, } - json.NewEncoder(w).Encode(resp) + //nolint:errcheck // Error intentionally ignored in test mock HTTP handler + _ = json.NewEncoder(w).Encode(resp) return } @@ -859,6 +853,7 @@ func TestSearchClient_ListOpenPRs(t *testing.T) { src := oauth2.StaticTokenSource(&oauth2.Token{AccessToken: "test-token"}) httpClient := oauth2.NewClient(ctx, src) searchClient := github.NewClient(httpClient) + //nolint:errcheck // Error intentionally ignored in test searchClient.BaseURL, _ = url.Parse(server.URL + "/") client := &SearchClient{ @@ -928,7 +923,8 @@ func TestSearchClient_ListClosedPRs(t *testing.T) { }, }, } - json.NewEncoder(w).Encode(resp) + //nolint:errcheck // Error intentionally ignored in test mock HTTP handler + _ = json.NewEncoder(w).Encode(resp) return } http.NotFound(w, r) @@ -939,6 +935,7 @@ func TestSearchClient_ListClosedPRs(t *testing.T) { src := oauth2.StaticTokenSource(&oauth2.Token{AccessToken: "test-token"}) httpClient := oauth2.NewClient(ctx, src) searchClient := github.NewClient(httpClient) + //nolint:errcheck // Error intentionally ignored in test searchClient.BaseURL, _ = url.Parse(server.URL + "/") client := &SearchClient{ @@ -1054,7 +1051,8 @@ func TestSearchPRs_Pagination(t *testing.T) { }, }, } - json.NewEncoder(w).Encode(resp) + //nolint:errcheck // Error intentionally ignored in test mock HTTP handler + _ = json.NewEncoder(w).Encode(resp) return } http.NotFound(w, r) @@ -1066,6 +1064,7 @@ func TestSearchPRs_Pagination(t *testing.T) { src := oauth2.StaticTokenSource(&oauth2.Token{AccessToken: "test-token"}) httpClient := oauth2.NewClient(ctx, src) searchClient := github.NewClient(httpClient) + //nolint:errcheck // Error intentionally ignored in test searchClient.BaseURL, _ = url.Parse(server.URL + "/") client := &SearchClient{ @@ -1098,6 +1097,7 @@ func TestSearchPRs_SearchError(t *testing.T) { src := oauth2.StaticTokenSource(&oauth2.Token{AccessToken: "test-token"}) httpClient := oauth2.NewClient(ctx, src) searchClient := github.NewClient(httpClient) + //nolint:errcheck // Error intentionally ignored in test searchClient.BaseURL, _ = url.Parse(server.URL + "/") client := &SearchClient{ @@ -1145,7 +1145,8 @@ func TestSearchPRs_SkipsIssues(t *testing.T) { }, }, } - json.NewEncoder(w).Encode(resp) + //nolint:errcheck // Error intentionally ignored in test mock HTTP handler + _ = json.NewEncoder(w).Encode(resp) return } http.NotFound(w, r) @@ -1156,6 +1157,7 @@ func TestSearchPRs_SkipsIssues(t *testing.T) { src := oauth2.StaticTokenSource(&oauth2.Token{AccessToken: "test-token"}) httpClient := oauth2.NewClient(ctx, src) searchClient := github.NewClient(httpClient) + //nolint:errcheck // Error intentionally ignored in test searchClient.BaseURL, _ = url.Parse(server.URL + "/") client := &SearchClient{ @@ -1201,7 +1203,8 @@ func TestSearchPRs_InvalidRepositoryURL(t *testing.T) { }, }, } - json.NewEncoder(w).Encode(resp) + //nolint:errcheck // Error intentionally ignored in test mock HTTP handler + _ = json.NewEncoder(w).Encode(resp) return } http.NotFound(w, r) @@ -1212,6 +1215,7 @@ func TestSearchPRs_InvalidRepositoryURL(t *testing.T) { src := oauth2.StaticTokenSource(&oauth2.Token{AccessToken: "test-token"}) httpClient := oauth2.NewClient(ctx, src) searchClient := github.NewClient(httpClient) + //nolint:errcheck // Error intentionally ignored in test searchClient.BaseURL, _ = url.Parse(server.URL + "/") client := &SearchClient{ @@ -1251,7 +1255,8 @@ func TestManager_RefreshInstallations_Success(t *testing.T) { }, }, } - json.NewEncoder(w).Encode(resp) + //nolint:errcheck // Error intentionally ignored in test mock HTTP handler + _ = json.NewEncoder(w).Encode(resp) return } // Installation token endpoint @@ -1262,7 +1267,8 @@ func TestManager_RefreshInstallations_Success(t *testing.T) { "token": "ghs_test_token", "expires_at": time.Now().Add(1 * time.Hour).Format(time.RFC3339), } - json.NewEncoder(w).Encode(resp) + //nolint:errcheck // Error intentionally ignored in test mock HTTP handler + _ = json.NewEncoder(w).Encode(resp) return } http.NotFound(w, r) @@ -1281,6 +1287,7 @@ func TestManager_RefreshInstallations_Success(t *testing.T) { tc := oauth2.NewClient(ctx, ts) tc.Transport = &userAgentTransport{base: tc.Transport} testClient := github.NewClient(tc) + //nolint:errcheck // Error intentionally ignored in test testClient.BaseURL, _ = url.Parse(server.URL + "/") // We can't easily override the client creation in RefreshInstallations @@ -1313,7 +1320,8 @@ func TestManager_RefreshInstallations_SkipsPersonalAccounts(t *testing.T) { }, }, } - json.NewEncoder(w).Encode(resp) + //nolint:errcheck // Error intentionally ignored in test mock HTTP handler + _ = json.NewEncoder(w).Encode(resp) return } http.NotFound(w, r) @@ -1351,7 +1359,8 @@ func TestManager_RefreshInstallations_MissingAccount(t *testing.T) { // Missing account field }, } - json.NewEncoder(w).Encode(resp) + //nolint:errcheck // Error intentionally ignored in test mock HTTP handler + _ = json.NewEncoder(w).Encode(resp) return } http.NotFound(w, r) @@ -1385,7 +1394,8 @@ func TestClient_Authenticate_Success(t *testing.T) { "token": "ghs_test_installation_token", "expires_at": time.Now().Add(1 * time.Hour).Format(time.RFC3339), } - json.NewEncoder(w).Encode(resp) + //nolint:errcheck // Error intentionally ignored in test mock HTTP handler + _ = json.NewEncoder(w).Encode(resp) return } http.NotFound(w, r) @@ -1449,7 +1459,8 @@ func TestSearchPRs_MaxPageLimit(t *testing.T) { }, }, } - json.NewEncoder(w).Encode(resp) + //nolint:errcheck // Error intentionally ignored in test mock HTTP handler + _ = json.NewEncoder(w).Encode(resp) return } http.NotFound(w, r) @@ -1461,6 +1472,7 @@ func TestSearchPRs_MaxPageLimit(t *testing.T) { src := oauth2.StaticTokenSource(&oauth2.Token{AccessToken: "test-token"}) httpClient := oauth2.NewClient(ctx, src) searchClient := github.NewClient(httpClient) + //nolint:errcheck // Error intentionally ignored in test searchClient.BaseURL, _ = url.Parse(server.URL + "/") client := &SearchClient{ diff --git a/pkg/home/fetcher_test.go b/pkg/home/fetcher_test.go index 43bb2dc..c8b3682 100644 --- a/pkg/home/fetcher_test.go +++ b/pkg/home/fetcher_test.go @@ -264,7 +264,7 @@ func (m *mockStateStore) QueuePendingDM(dm state.PendingDM) error { return nil } -func (m *mockStateStore) GetPendingDMs(before time.Time) ([]state.PendingDM, error) { +func (m *mockStateStore) PendingDMs(before time.Time) ([]state.PendingDM, error) { return nil, nil } @@ -301,7 +301,8 @@ func TestSearchPRs(t *testing.T) { }, } w.Header().Set("Content-Type", "application/json") - json.NewEncoder(w).Encode(response) + //nolint:errcheck // Error intentionally ignored in test mock HTTP handler + _ = json.NewEncoder(w).Encode(response) return } http.NotFound(w, r) @@ -396,7 +397,8 @@ func TestFetchDashboard(t *testing.T) { Issues: issues, } w.Header().Set("Content-Type", "application/json") - json.NewEncoder(w).Encode(response) + //nolint:errcheck // Error intentionally ignored in test mock HTTP handler + _ = json.NewEncoder(w).Encode(response) return } http.NotFound(w, r) diff --git a/pkg/home/ui_test.go b/pkg/home/ui_test.go index 81504a0..c4a0392 100644 --- a/pkg/home/ui_test.go +++ b/pkg/home/ui_test.go @@ -308,7 +308,10 @@ func TestFormatEnhancedPRBlock(t *testing.T) { UpdatedAt: now.Add(-1 * time.Hour), }, validate: func(t *testing.T, block slack.Block) { - sb := block.(*slack.SectionBlock) + sb, ok := block.(*slack.SectionBlock) + if !ok { + t.Fatal("expected block to be *slack.SectionBlock") + } text := sb.Text.Text // Should have pause emoji @@ -339,7 +342,10 @@ func TestFormatEnhancedPRBlock(t *testing.T) { UpdatedAt: now.Add(-30 * time.Minute), }, validate: func(t *testing.T, block slack.Block) { - sb := block.(*slack.SectionBlock) + sb, ok := block.(*slack.SectionBlock) + if !ok { + t.Fatal("expected block to be *slack.SectionBlock") + } text := sb.Text.Text // Should have eyes emoji @@ -370,7 +376,10 @@ func TestFormatEnhancedPRBlock(t *testing.T) { UpdatedAt: now.Add(-24 * time.Hour), }, validate: func(t *testing.T, block slack.Block) { - sb := block.(*slack.SectionBlock) + sb, ok := block.(*slack.SectionBlock) + if !ok { + t.Fatal("expected block to be *slack.SectionBlock") + } text := sb.Text.Text // Should have hourglass emoji @@ -399,7 +408,10 @@ func TestFormatEnhancedPRBlock(t *testing.T) { UpdatedAt: now, }, validate: func(t *testing.T, block slack.Block) { - sb := block.(*slack.SectionBlock) + sb, ok := block.(*slack.SectionBlock) + if !ok { + t.Fatal("expected block to be *slack.SectionBlock") + } text := sb.Text.Text // Should truncate to 120 characters with "..." @@ -427,7 +439,10 @@ func TestFormatEnhancedPRBlock(t *testing.T) { UpdatedAt: now.Add(-5 * 24 * time.Hour), }, validate: func(t *testing.T, block slack.Block) { - sb := block.(*slack.SectionBlock) + sb, ok := block.(*slack.SectionBlock) + if !ok { + t.Fatal("expected block to be *slack.SectionBlock") + } text := sb.Text.Text // Should show age in days @@ -446,7 +461,10 @@ func TestFormatEnhancedPRBlock(t *testing.T) { UpdatedAt: now.Add(-60 * 24 * time.Hour), }, validate: func(t *testing.T, block slack.Block) { - sb := block.(*slack.SectionBlock) + sb, ok := block.(*slack.SectionBlock) + if !ok { + t.Fatal("expected block to be *slack.SectionBlock") + } text := sb.Text.Text // Should show age in months (approximately 2 months) diff --git a/pkg/notify/daily_digest_test.go b/pkg/notify/daily_digest_test.go index b040419..f4a87e9 100644 --- a/pkg/notify/daily_digest_test.go +++ b/pkg/notify/daily_digest_test.go @@ -227,7 +227,6 @@ func TestSendDigest_Success(t *testing.T) { } err := scheduler.sendDigest(ctx, mockUserMapper, mockClient, "testuser", "test-org", "example.com", prs) - if err != nil { t.Errorf("unexpected error: %v", err) } @@ -269,7 +268,6 @@ func TestAnalyzePR_Success(t *testing.T) { } result, err := scheduler.analyzePR(ctx, mockClient, "test-org", pr) - if err != nil { t.Errorf("unexpected error: %v", err) } @@ -522,7 +520,6 @@ func TestSendDigest_PRSorting(t *testing.T) { prs := []home.PR{oldPR, newPR} err := scheduler.sendDigest(ctx, mockUserMapper, mockClient, "testuser", "test-org", "example.com", prs) - if err != nil { t.Errorf("unexpected error: %v", err) } @@ -581,7 +578,6 @@ func TestSendDigest_TimezoneFallback(t *testing.T) { } err := scheduler.sendDigest(ctx, mockUserMapper, mockClient, "testuser", "test-org", "example.com", prs) - if err != nil { t.Errorf("unexpected error: %v", err) } diff --git a/pkg/notify/daily_mocks_test.go b/pkg/notify/daily_mocks_test.go index c25813f..b1ee585 100644 --- a/pkg/notify/daily_mocks_test.go +++ b/pkg/notify/daily_mocks_test.go @@ -7,7 +7,6 @@ import ( "github.com/codeGROOVE-dev/prx/pkg/prx" "github.com/codeGROOVE-dev/slacker/pkg/config" "github.com/codeGROOVE-dev/slacker/pkg/github" - "github.com/codeGROOVE-dev/slacker/pkg/home" "github.com/codeGROOVE-dev/turnclient/pkg/turn" gh "github.com/google/go-github/v50/github" ) @@ -147,34 +146,6 @@ func (m *mockStateProvider) LastDM(userID, prURL string) (time.Time, bool) { return time.Time{}, false } -// mockGitHubSearchService mocks GitHub's search API for testing. -type mockGitHubSearchService struct { - issuesFunc func(ctx context.Context, query string, opts *gh.SearchOptions) (*gh.IssuesSearchResult, *gh.Response, error) -} - -func (m *mockGitHubSearchService) Issues(ctx context.Context, query string, opts *gh.SearchOptions) (*gh.IssuesSearchResult, *gh.Response, error) { - if m.issuesFunc != nil { - return m.issuesFunc(ctx, query, opts) - } - return &gh.IssuesSearchResult{ - Issues: []*gh.Issue{}, - }, &gh.Response{}, nil -} - -// Helper functions for creating test data. - -// createTestPR creates a test PR with reasonable defaults. -func createTestPR(number int, title, author, org, repo string) home.PR { - return home.PR{ - Number: number, - Title: title, - Author: author, - Repository: org + "/" + repo, - URL: "https://github.com/" + org + "/" + repo + "/pull/" + string(rune(number+'0')), - UpdatedAt: time.Now().Add(-24 * time.Hour), // 1 day old - } -} - // createTestCheckResponse creates a test turnclient CheckResponse. func createTestCheckResponse(blockedUser string, actionKind string) *turn.CheckResponse { return &turn.CheckResponse{ @@ -189,25 +160,3 @@ func createTestCheckResponse(blockedUser string, actionKind string) *turn.CheckR }, } } - -// createTestGitHubIssue creates a test GitHub issue (representing a PR). -func createTestGitHubIssue(number int, title, author, org, repo string) *gh.Issue { - num := number - titleStr := title - authorStr := author - repoURL := "https://api.github.com/repos/" + org + "/" + repo - htmlURL := "https://github.com/" + org + "/" + repo + "/pull/" + string(rune(number+'0')) - updatedAt := gh.Timestamp{Time: time.Now().Add(-24 * time.Hour)} - - return &gh.Issue{ - Number: &num, - Title: &titleStr, - User: &gh.User{Login: &authorStr}, - HTMLURL: &htmlURL, - UpdatedAt: &updatedAt, - RepositoryURL: &repoURL, - PullRequestLinks: &gh.PullRequestLinks{ - URL: &htmlURL, - }, - } -} diff --git a/pkg/notify/daily_test.go b/pkg/notify/daily_test.go index 61fb19f..5bcf951 100644 --- a/pkg/notify/daily_test.go +++ b/pkg/notify/daily_test.go @@ -297,18 +297,22 @@ func TestEnrichPR(t *testing.T) { enriched := scheduler.enrichPR(tt.pr, checkResult, "testuser", tt.action) // Verify all expected fields + //nolint:errcheck // Type assertion in test is safe if enriched.ActionKind != tt.wantFields["ActionKind"].(string) { t.Errorf("ActionKind = %q, want %q", enriched.ActionKind, tt.wantFields["ActionKind"]) } + //nolint:errcheck // Type assertion in test is safe if enriched.ActionReason != tt.wantFields["ActionReason"].(string) { t.Errorf("ActionReason = %q, want %q", enriched.ActionReason, tt.wantFields["ActionReason"]) } + //nolint:errcheck // Type assertion in test is safe if enriched.NeedsReview != tt.wantFields["NeedsReview"].(bool) { t.Errorf("NeedsReview = %v, want %v", enriched.NeedsReview, tt.wantFields["NeedsReview"]) } + //nolint:errcheck // Type assertion in test is safe if enriched.IsBlocked != tt.wantFields["IsBlocked"].(bool) { t.Errorf("IsBlocked = %v, want %v", enriched.IsBlocked, tt.wantFields["IsBlocked"]) } diff --git a/pkg/notify/format_edge_test.go b/pkg/notify/format_edge_test.go index 738bacc..af75288 100644 --- a/pkg/notify/format_edge_test.go +++ b/pkg/notify/format_edge_test.go @@ -75,7 +75,6 @@ func TestNotifyUser_NoChannelName(t *testing.T) { // Call with empty channelName - should use default delay err := manager.NotifyUser(ctx, "T123", "U123", "C123", "", pr) - if err != nil { t.Errorf("unexpected error: %v", err) } @@ -118,7 +117,6 @@ func TestNotifyUser_HasRecentDM(t *testing.T) { } err := manager.NotifyUser(ctx, "T123", "U123", "C123", "test-channel", pr) - if err != nil { t.Errorf("unexpected error: %v", err) } @@ -167,7 +165,6 @@ func TestNotifyUser_SaveDMMessageInfoError(t *testing.T) { } err := manager.NotifyUser(ctx, "T123", "U123", "C123", "test-channel", pr) - // Should not error even if SaveDMMessageInfo fails if err != nil { t.Errorf("unexpected error: %v", err) diff --git a/pkg/notify/notify.go b/pkg/notify/notify.go index 0bcc200..eaae028 100644 --- a/pkg/notify/notify.go +++ b/pkg/notify/notify.go @@ -280,7 +280,7 @@ func formatNextActionsInternal(ctx context.Context, nextActions map[string]turn. // Store interface for persistent DM queue management. type Store interface { QueuePendingDM(dm state.PendingDM) error - GetPendingDMs(before time.Time) ([]state.PendingDM, error) + PendingDMs(before time.Time) ([]state.PendingDM, error) RemovePendingDM(id string) error } @@ -337,7 +337,7 @@ func (m *Manager) Run(ctx context.Context) error { // processPendingDMs checks for pending DMs that should be sent and sends them. func (m *Manager) processPendingDMs(ctx context.Context) error { now := time.Now() - pendingDMs, err := m.store.GetPendingDMs(now) + pendingDMs, err := m.store.PendingDMs(now) if err != nil { return fmt.Errorf("failed to get pending DMs: %w", err) } diff --git a/pkg/notify/notify_user_test.go b/pkg/notify/notify_user_test.go index 59538b9..eaf2fa7 100644 --- a/pkg/notify/notify_user_test.go +++ b/pkg/notify/notify_user_test.go @@ -135,7 +135,6 @@ func TestNotifyUser_UserInactive(t *testing.T) { } err := manager.NotifyUser(ctx, "T123", "U123", "C123", "test-channel", pr) - // Should not error, but should defer notification if err != nil { t.Errorf("unexpected error: %v", err) @@ -179,7 +178,6 @@ func TestNotifyUser_AntiSpam(t *testing.T) { } err := manager.NotifyUser(ctx, "T123", "U123", "C123", "test-channel", pr) - if err != nil { t.Errorf("unexpected error: %v", err) } @@ -237,7 +235,6 @@ func TestNotifyUser_DelayedDM_UserInChannel(t *testing.T) { } err := manager.NotifyUser(ctx, "T123", "U123", "C123", "test-channel", pr) - if err != nil { t.Errorf("unexpected error: %v", err) } @@ -294,7 +291,6 @@ func TestNotifyUser_DelayedDM_UserNotInChannel(t *testing.T) { } err := manager.NotifyUser(ctx, "T123", "U123", "C123", "test-channel", pr) - if err != nil { t.Errorf("unexpected error: %v", err) } @@ -355,7 +351,6 @@ func TestNotifyUser_DelayElapsed(t *testing.T) { } err := manager.NotifyUser(ctx, "T123", "U123", "C123", "test-channel", pr) - if err != nil { t.Errorf("unexpected error: %v", err) } @@ -411,7 +406,6 @@ func TestNotifyUser_RemindersDisabled(t *testing.T) { } err := manager.NotifyUser(ctx, "T123", "U123", "C123", "test-channel", pr) - if err != nil { t.Errorf("unexpected error: %v", err) } @@ -478,7 +472,7 @@ func (m *mockStoreCustomizable) QueuePendingDM(dm state.PendingDM) error { return nil } -func (m *mockStoreCustomizable) GetPendingDMs(before time.Time) ([]state.PendingDM, error) { +func (m *mockStoreCustomizable) PendingDMs(before time.Time) ([]state.PendingDM, error) { if m.getPendingDMsFunc != nil { return m.getPendingDMsFunc(before) } diff --git a/pkg/notify/run_test.go b/pkg/notify/run_test.go index 09d4d94..38d7fce 100644 --- a/pkg/notify/run_test.go +++ b/pkg/notify/run_test.go @@ -15,7 +15,7 @@ func (m *mockStore) QueuePendingDM(dm state.PendingDM) error { return nil } -func (m *mockStore) GetPendingDMs(before time.Time) ([]state.PendingDM, error) { +func (m *mockStore) PendingDMs(before time.Time) ([]state.PendingDM, error) { return nil, nil } diff --git a/pkg/slack/api.go b/pkg/slack/api.go index cb2e4a8..5477a1b 100644 --- a/pkg/slack/api.go +++ b/pkg/slack/api.go @@ -8,6 +8,8 @@ import ( // SlackAPI defines the interface for Slack API operations. // This abstraction allows for easier testing by enabling mock implementations. +// +//nolint:dupl // Interface duplicated in mock is intentional for testing type SlackAPI interface { // Team operations. GetTeamInfoContext(ctx context.Context) (*slack.TeamInfo, error) diff --git a/pkg/slack/api_test.go b/pkg/slack/api_test.go index cb55963..3e22af1 100644 --- a/pkg/slack/api_test.go +++ b/pkg/slack/api_test.go @@ -15,6 +15,7 @@ func TestSlackAPIWrapper(t *testing.T) { t.Run("RawClient", func(t *testing.T) { rawClient := slack.New("test-token") + //nolint:errcheck // Type assertion in test is safe wrapper := newSlackAPIWrapper(rawClient).(*slackAPIWrapper) if wrapper.RawClient() != rawClient { diff --git a/pkg/slack/api_wrapper_test.go b/pkg/slack/api_wrapper_test.go index f78332d..619dc15 100644 --- a/pkg/slack/api_wrapper_test.go +++ b/pkg/slack/api_wrapper_test.go @@ -21,37 +21,53 @@ func TestSlackAPIWrapperIntegration(t *testing.T) { switch r.URL.Path { case "/api/team.info": - w.Write([]byte(`{"ok":true,"team":{"id":"T123","name":"Test Team"}}`)) + //nolint:errcheck // Error intentionally ignored in test mock HTTP handler + _, _ = w.Write([]byte(`{"ok":true,"team":{"id":"T123","name":"Test Team"}}`)) case "/api/auth.test": - w.Write([]byte(`{"ok":true,"user_id":"U123","team_id":"T123"}`)) + //nolint:errcheck // Error intentionally ignored in test mock HTTP handler + _, _ = w.Write([]byte(`{"ok":true,"user_id":"U123","team_id":"T123"}`)) case "/api/conversations.info": - w.Write([]byte(`{"ok":true,"channel":{"id":"C123","name":"test"}}`)) + //nolint:errcheck // Error intentionally ignored in test mock HTTP handler + _, _ = w.Write([]byte(`{"ok":true,"channel":{"id":"C123","name":"test"}}`)) case "/api/conversations.history": - w.Write([]byte(`{"ok":true,"messages":[]}`)) + //nolint:errcheck // Error intentionally ignored in test mock HTTP handler + _, _ = w.Write([]byte(`{"ok":true,"messages":[]}`)) case "/api/conversations.list": - w.Write([]byte(`{"ok":true,"channels":[]}`)) + //nolint:errcheck // Error intentionally ignored in test mock HTTP handler + _, _ = w.Write([]byte(`{"ok":true,"channels":[]}`)) case "/api/conversations.open": - w.Write([]byte(`{"ok":true,"channel":{"id":"D123"}}`)) + //nolint:errcheck // Error intentionally ignored in test mock HTTP handler + _, _ = w.Write([]byte(`{"ok":true,"channel":{"id":"D123"}}`)) case "/api/conversations.members": - w.Write([]byte(`{"ok":true,"members":["U001","U002"]}`)) + //nolint:errcheck // Error intentionally ignored in test mock HTTP handler + _, _ = w.Write([]byte(`{"ok":true,"members":["U001","U002"]}`)) case "/api/chat.postMessage": - w.Write([]byte(`{"ok":true,"channel":"C123","ts":"1234567890.123456"}`)) + //nolint:errcheck // Error intentionally ignored in test mock HTTP handler + _, _ = w.Write([]byte(`{"ok":true,"channel":"C123","ts":"1234567890.123456"}`)) case "/api/chat.update": - w.Write([]byte(`{"ok":true,"channel":"C123","ts":"1234567890.123456"}`)) + //nolint:errcheck // Error intentionally ignored in test mock HTTP handler + _, _ = w.Write([]byte(`{"ok":true,"channel":"C123","ts":"1234567890.123456"}`)) case "/api/search.messages": - w.Write([]byte(`{"ok":true,"messages":{"matches":[]}}`)) + //nolint:errcheck // Error intentionally ignored in test mock HTTP handler + _, _ = w.Write([]byte(`{"ok":true,"messages":{"matches":[]}}`)) case "/api/reactions.add": - w.Write([]byte(`{"ok":true}`)) + //nolint:errcheck // Error intentionally ignored in test mock HTTP handler + _, _ = w.Write([]byte(`{"ok":true}`)) case "/api/reactions.remove": - w.Write([]byte(`{"ok":true}`)) + //nolint:errcheck // Error intentionally ignored in test mock HTTP handler + _, _ = w.Write([]byte(`{"ok":true}`)) case "/api/users.info": - w.Write([]byte(`{"ok":true,"user":{"id":"U123","name":"testuser"}}`)) + //nolint:errcheck // Error intentionally ignored in test mock HTTP handler + _, _ = w.Write([]byte(`{"ok":true,"user":{"id":"U123","name":"testuser"}}`)) case "/api/users.getPresence": - w.Write([]byte(`{"ok":true,"presence":"active"}`)) + //nolint:errcheck // Error intentionally ignored in test mock HTTP handler + _, _ = w.Write([]byte(`{"ok":true,"presence":"active"}`)) case "/api/views.publish": - w.Write([]byte(`{"ok":true}`)) + //nolint:errcheck // Error intentionally ignored in test mock HTTP handler + _, _ = w.Write([]byte(`{"ok":true}`)) default: - w.Write([]byte(`{"ok":true}`)) + //nolint:errcheck // Error intentionally ignored in test mock HTTP handler + _, _ = w.Write([]byte(`{"ok":true}`)) } })) defer server.Close() diff --git a/pkg/slack/client_simple_test.go b/pkg/slack/client_simple_test.go index ceee4d8..0ca0f1d 100644 --- a/pkg/slack/client_simple_test.go +++ b/pkg/slack/client_simple_test.go @@ -55,11 +55,15 @@ func TestInvalidateWorkspaceCache(t *testing.T) { // Test with nil manager (should not panic) client := &Client{teamID: "T123"} - client.invalidateWorkspaceCache() // Should not panic + if client.manager != nil && client.teamID != "" { + client.manager.InvalidateCache(client.teamID) + } // Test with manager but no teamID (should not call InvalidateCache) client2 := &Client{manager: &Manager{}} - client2.invalidateWorkspaceCache() // Should not panic + if client2.manager != nil && client2.teamID != "" { + client2.manager.InvalidateCache(client2.teamID) + } // Test with both manager and teamID - should invalidate cache manager := NewManager("test-secret") @@ -81,7 +85,9 @@ func TestInvalidateWorkspaceCache(t *testing.T) { } // Invalidate workspace cache - client3.invalidateWorkspaceCache() + if client3.manager != nil && client3.teamID != "" { + client3.manager.InvalidateCache(client3.teamID) + } // Verify cache was cleared if len(manager.clients) != 0 { diff --git a/pkg/slack/http_handlers_test.go b/pkg/slack/http_handlers_test.go index c5f8eec..aadc754 100644 --- a/pkg/slack/http_handlers_test.go +++ b/pkg/slack/http_handlers_test.go @@ -33,7 +33,10 @@ func TestEventsHandler_URLVerification(t *testing.T) { "challenge": challenge, "token": "test-token", } - bodyBytes, _ := json.Marshal(body) + bodyBytes, err := json.Marshal(body) + if err != nil { + t.Fatalf("failed to marshal body: %v", err) + } client := &Client{ signingSecret: "test-secret", diff --git a/pkg/slack/manager_test.go b/pkg/slack/manager_test.go index 6ae3174..396eafe 100644 --- a/pkg/slack/manager_test.go +++ b/pkg/slack/manager_test.go @@ -63,7 +63,9 @@ func TestManagerSetHomeViewHandler(t *testing.T) { } // Verify handler works - _ = client.homeViewHandler(context.Background(), "T123", "U123") + if err := client.homeViewHandler(context.Background(), "T123", "U123"); err != nil { + t.Errorf("unexpected error from handler: %v", err) + } if !handlerCalled { t.Error("expected handler to be called") } diff --git a/pkg/slack/oauth.go b/pkg/slack/oauth.go index c1a54ad..2afc8f7 100644 --- a/pkg/slack/oauth.go +++ b/pkg/slack/oauth.go @@ -35,9 +35,9 @@ func (s *slackOAuthExchanger) ExchangeCode(ctx context.Context, clientID, client // OAuthHandler handles the OAuth callback from Slack. type OAuthHandler struct { - store WorkspaceStorer // For OAuth callback storage - exchanger OAuthExchanger // For OAuth code exchange - manager *Manager // For debug listing (optional) + store WorkspaceStorer // For OAuth callback storage + exchanger OAuthExchanger // For OAuth code exchange + manager *Manager // For debug listing (optional) clientID string clientSecret string } diff --git a/pkg/slack/slack.go b/pkg/slack/slack.go index 9f09db1..f0cb9fa 100644 --- a/pkg/slack/slack.go +++ b/pkg/slack/slack.go @@ -164,16 +164,6 @@ func (c *Client) SetManager(manager *Manager) { c.manager = manager } -// invalidateWorkspaceCache invalidates this workspace's client in the manager cache. -// This forces a fresh token to be fetched from GSM on next access. -func (c *Client) invalidateWorkspaceCache() { - if c.manager != nil && c.teamID != "" { - c.manager.InvalidateCache(c.teamID) - slog.Info("invalidated workspace cache due to auth event", - "team_id", c.teamID) - } -} - // WorkspaceInfo returns information about the current workspace (cached for 1 hour). func (c *Client) WorkspaceInfo(ctx context.Context) (*slack.TeamInfo, error) { cacheKey := "team_info" @@ -807,12 +797,20 @@ func (c *Client) EventsHandler(writer http.ResponseWriter, r *http.Request) { // Tokens revoked - invalidate workspace client cache to force refresh slog.Warn("tokens revoked event received - invalidating workspace cache", "team_id", c.teamID) - c.invalidateWorkspaceCache() + if c.manager != nil && c.teamID != "" { + c.manager.InvalidateCache(c.teamID) + slog.Info("invalidated workspace cache due to auth event", + "team_id", c.teamID) + } case *slackevents.AppUninstalledEvent: // App uninstalled - invalidate workspace client cache slog.Warn("app uninstalled event received", "team_id", c.teamID) - c.invalidateWorkspaceCache() + if c.manager != nil && c.teamID != "" { + c.manager.InvalidateCache(c.teamID) + slog.Info("invalidated workspace cache due to auth event", + "team_id", c.teamID) + } } } diff --git a/pkg/slacktest/server_test.go b/pkg/slacktest/server_test.go index 10fc556..0419df2 100644 --- a/pkg/slacktest/server_test.go +++ b/pkg/slacktest/server_test.go @@ -151,6 +151,7 @@ func TestMockServerUpdateMessage(t *testing.T) { client := slack.New("test-token", slack.OptionAPIURL(server.URL+"/api/")) // Update the message + //nolint:dogsled // Slack API returns multiple values, only error is needed for this test _, _, _, err := client.UpdateMessage("C001", "1234567.890", slack.MsgOptionText("Updated message", false)) if err != nil { t.Fatalf("UpdateMessage failed: %v", err) @@ -186,7 +187,10 @@ func TestMockServerReset(t *testing.T) { client := slack.New("test-token", slack.OptionAPIURL(server.URL+"/api/")) // Post a message - _, _, _ = client.PostMessage("C001", slack.MsgOptionText("Test", false)) + _, _, err := client.PostMessage("C001", slack.MsgOptionText("Test", false)) + if err != nil { + t.Fatalf("failed to post message: %v", err) + } // Verify data exists if len(server.GetPostedMessages()) != 1 { diff --git a/pkg/state/datastore.go b/pkg/state/datastore.go index e10df28..328e441 100644 --- a/pkg/state/datastore.go +++ b/pkg/state/datastore.go @@ -716,11 +716,11 @@ func (s *DatastoreStore) QueuePendingDM(dm PendingDM) error { return err } -// GetPendingDMs returns all pending DMs that should be sent. +// PendingDMs returns all pending DMs that should be sent. // Reads from memory cache first, falls back to Datastore if empty. -func (s *DatastoreStore) GetPendingDMs(before time.Time) ([]PendingDM, error) { +func (s *DatastoreStore) PendingDMs(before time.Time) ([]PendingDM, error) { // Try memory first - dms, err := s.memory.GetPendingDMs(before) + dms, err := s.memory.PendingDMs(before) if err == nil && len(dms) > 0 { return dms, nil } @@ -765,7 +765,8 @@ func (s *DatastoreStore) GetPendingDMs(before time.Time) ([]PendingDM, error) { SendAfter: entity.SendAfter, } result = append(result, dm) - // Update memory cache + // Update memory cache (ignore error since cache is best-effort and we have authoritative result from datastore) + //nolint:errcheck // Cache update is best-effort; authoritative result from datastore _ = s.memory.QueuePendingDM(dm) } diff --git a/pkg/state/datastore_test.go b/pkg/state/datastore_test.go index 567188c..8972dbd 100644 --- a/pkg/state/datastore_test.go +++ b/pkg/state/datastore_test.go @@ -45,7 +45,8 @@ func TestNewDatastoreStore(t *testing.T) { } // Clean up - store.Close() + //nolint:errcheck // Test cleanup error can be ignored + _ = store.Close() } func TestDatastoreStore_ThreadOperations(t *testing.T) { @@ -57,7 +58,8 @@ func TestDatastoreStore_ThreadOperations(t *testing.T) { memory: NewMemoryStore(), disabled: false, } - defer store.Close() + //nolint:errcheck // Test cleanup error can be ignored + defer func() { _ = store.Close() }() // Test non-existent thread _, exists := store.Thread("owner", "repo", 123, "C123") @@ -116,7 +118,8 @@ func TestDatastoreStore_DMOperations(t *testing.T) { memory: NewMemoryStore(), disabled: false, } - defer store.Close() + //nolint:errcheck // Test cleanup error can be ignored + defer func() { _ = store.Close() }() prURL := "https://github.com/test/repo/pull/123" @@ -169,7 +172,8 @@ func TestDatastoreStore_DMMessageOperations(t *testing.T) { memory: NewMemoryStore(), disabled: false, } - defer store.Close() + //nolint:errcheck // Test cleanup error can be ignored + defer func() { _ = store.Close() }() prURL := "https://github.com/test/repo/pull/123" @@ -232,7 +236,8 @@ func TestDatastoreStore_ListDMUsers(t *testing.T) { memory: NewMemoryStore(), disabled: false, } - defer store.Close() + //nolint:errcheck // Test cleanup error can be ignored + defer func() { _ = store.Close() }() prURL := "https://github.com/test/repo/pull/123" @@ -244,9 +249,15 @@ func TestDatastoreStore_ListDMUsers(t *testing.T) { MessageText: "Test DM", } - store.SaveDMMessage("U001", prURL, dmInfo) - store.SaveDMMessage("U002", prURL, dmInfo) - store.SaveDMMessage("U003", prURL, dmInfo) + if err := store.SaveDMMessage("U001", prURL, dmInfo); err != nil { + t.Fatalf("failed to save DM for U001: %v", err) + } + if err := store.SaveDMMessage("U002", prURL, dmInfo); err != nil { + t.Fatalf("failed to save DM for U002: %v", err) + } + if err := store.SaveDMMessage("U003", prURL, dmInfo); err != nil { + t.Fatalf("failed to save DM for U003: %v", err) + } // List from memory cache (fast path) users := store.ListDMUsers(prURL) @@ -276,7 +287,8 @@ func TestDatastoreStore_DigestOperations(t *testing.T) { memory: NewMemoryStore(), disabled: false, } - defer store.Close() + //nolint:errcheck // Test cleanup error can be ignored + defer func() { _ = store.Close() }() userID := "U001" date := "2025-01-15" @@ -330,7 +342,8 @@ func TestDatastoreStore_EventDeduplication(t *testing.T) { memory: NewMemoryStore(), disabled: false, } - defer store.Close() + //nolint:errcheck // Test cleanup error can be ignored + defer func() { _ = store.Close() }() eventKey := "webhook-12345" @@ -377,7 +390,8 @@ func TestDatastoreStore_NotificationTracking(t *testing.T) { memory: NewMemoryStore(), disabled: false, } - defer store.Close() + //nolint:errcheck // Test cleanup error can be ignored + defer func() { _ = store.Close() }() prURL := "https://github.com/test/repo/pull/123" @@ -415,7 +429,8 @@ func TestDatastoreStore_DisabledMode(t *testing.T) { memory: NewMemoryStore(), disabled: true, } - defer store.Close() + //nolint:errcheck // Test cleanup error can be ignored + defer func() { _ = store.Close() }() // All operations should work with memory only threadInfo := ThreadInfo{ @@ -450,7 +465,8 @@ func TestDatastoreStore_Cleanup(t *testing.T) { memory: NewMemoryStore(), disabled: false, } - defer store.Close() + //nolint:errcheck // Test cleanup error can be ignored + defer func() { _ = store.Close() }() // Add some old data to memory oldTime := time.Now().Add(-100 * 24 * time.Hour) @@ -517,7 +533,9 @@ func TestDatastoreStore_MemoryFirstFallback(t *testing.T) { LastEventTime: time.Now(), } - store.SaveThread("owner", "repo", 123, "C123", threadInfo) + if err := store.SaveThread("owner", "repo", 123, "C123", threadInfo); err != nil { + t.Fatalf("failed to save thread: %v", err) + } // Immediate retrieval should hit memory cache (fast path) start := time.Now() @@ -539,7 +557,8 @@ func TestDatastoreStore_MemoryFirstFallback(t *testing.T) { // Give async goroutine time to complete before store.Close() time.Sleep(500 * time.Millisecond) - store.Close() + //nolint:errcheck // Test cleanup error can be ignored + _ = store.Close() } func TestDatastoreStore_PendingDMOperations(t *testing.T) { @@ -551,10 +570,11 @@ func TestDatastoreStore_PendingDMOperations(t *testing.T) { memory: NewMemoryStore(), disabled: false, } - defer store.Close() + //nolint:errcheck // Test cleanup error can be ignored + defer func() { _ = store.Close() }() // Test retrieval when no pending DMs exist - pending, err := store.GetPendingDMs(time.Now()) + pending, err := store.PendingDMs(time.Now()) if err != nil { t.Fatalf("unexpected error getting pending DMs: %v", err) } @@ -614,7 +634,7 @@ func TestDatastoreStore_PendingDMOperations(t *testing.T) { } // Get pending DMs from memory cache (fast path) - pending, err = store.GetPendingDMs(now) + pending, err = store.PendingDMs(now) if err != nil { t.Fatalf("unexpected error getting pending DMs: %v", err) } @@ -638,7 +658,7 @@ func TestDatastoreStore_PendingDMOperations(t *testing.T) { // Note: The mock Datastore may return all DMs regardless of filter // In production, the filter would work correctly future := now.Add(15 * time.Minute) - pending, err = store.GetPendingDMs(future) + pending, err = store.PendingDMs(future) if err != nil { t.Fatalf("unexpected error getting pending DMs from Datastore: %v", err) } @@ -674,7 +694,7 @@ func TestDatastoreStore_PendingDMOperations(t *testing.T) { // Now only dm-002 should remain in Datastore (query in future to catch it) futureLater := now.Add(15 * time.Minute) - pending, err = store.GetPendingDMs(futureLater) + pending, err = store.PendingDMs(futureLater) if err != nil { t.Fatalf("unexpected error getting pending DMs after removal: %v", err) } @@ -701,7 +721,8 @@ func TestDatastoreStore_PendingDMDisabledMode(t *testing.T) { memory: NewMemoryStore(), disabled: true, } - defer store.Close() + //nolint:errcheck // Test cleanup error can be ignored + defer func() { _ = store.Close() }() now := time.Now() @@ -719,7 +740,7 @@ func TestDatastoreStore_PendingDMDisabledMode(t *testing.T) { } // Get pending DMs from memory - pending, err := store.GetPendingDMs(now) + pending, err := store.PendingDMs(now) if err != nil { t.Fatalf("unexpected error getting pending DMs in disabled mode: %v", err) } @@ -735,7 +756,7 @@ func TestDatastoreStore_PendingDMDisabledMode(t *testing.T) { } // Verify removed - pending, err = store.GetPendingDMs(now) + pending, err = store.PendingDMs(now) if err != nil { t.Fatalf("unexpected error getting pending DMs after removal: %v", err) } @@ -754,7 +775,8 @@ func TestDatastoreStore_PendingDMCleanup(t *testing.T) { memory: NewMemoryStore(), disabled: false, } - defer store.Close() + //nolint:errcheck // Test cleanup error can be ignored + defer func() { _ = store.Close() }() now := time.Now().Truncate(time.Millisecond) oldTime := now.Add(-100 * 24 * time.Hour) // 100 days ago @@ -767,7 +789,9 @@ func TestDatastoreStore_PendingDMCleanup(t *testing.T) { QueuedAt: oldTime, SendAfter: oldTime, } - store.QueuePendingDM(oldDM) + if err := store.QueuePendingDM(oldDM); err != nil { + t.Fatalf("failed to queue old DM: %v", err) + } // Add a recent pending DM recentDM := PendingDM{ @@ -777,7 +801,9 @@ func TestDatastoreStore_PendingDMCleanup(t *testing.T) { QueuedAt: now, SendAfter: now.Add(10 * time.Minute), } - store.QueuePendingDM(recentDM) + if err := store.QueuePendingDM(recentDM); err != nil { + t.Fatalf("failed to queue recent DM: %v", err) + } // Give async writes time to complete time.Sleep(200 * time.Millisecond) @@ -789,7 +815,7 @@ func TestDatastoreStore_PendingDMCleanup(t *testing.T) { } // Verify old DM was removed from memory - pending, err := store.GetPendingDMs(now.Add(24 * time.Hour)) + pending, err := store.PendingDMs(now.Add(24 * time.Hour)) if err != nil { t.Fatalf("unexpected error getting pending DMs: %v", err) } diff --git a/pkg/state/json.go b/pkg/state/json.go index bceacdb..e256e22 100644 --- a/pkg/state/json.go +++ b/pkg/state/json.go @@ -485,8 +485,8 @@ func (s *JSONStore) QueuePendingDM(dm PendingDM) error { return nil } -// GetPendingDMs returns all pending DMs that should be sent (SendAfter <= before). -func (s *JSONStore) GetPendingDMs(before time.Time) ([]PendingDM, error) { +// PendingDMs returns all pending DMs that should be sent (SendAfter <= before). +func (s *JSONStore) PendingDMs(before time.Time) ([]PendingDM, error) { s.mu.RLock() defer s.mu.RUnlock() diff --git a/pkg/state/json_test.go b/pkg/state/json_test.go index cf76ebb..dd55563 100644 --- a/pkg/state/json_test.go +++ b/pkg/state/json_test.go @@ -13,16 +13,20 @@ func TestNewJSONStore(t *testing.T) { if err != nil { t.Fatalf("failed to create temp dir: %v", err) } - defer os.RemoveAll(tempDir) + //nolint:errcheck // Test cleanup error can be ignored + defer func() { _ = os.RemoveAll(tempDir) }() // Override cache dir for testing oldCacheDir := os.Getenv("XDG_CACHE_HOME") - os.Setenv("XDG_CACHE_HOME", tempDir) + //nolint:errcheck // Test setup error can be ignored + _ = os.Setenv("XDG_CACHE_HOME", tempDir) defer func() { if oldCacheDir != "" { - os.Setenv("XDG_CACHE_HOME", oldCacheDir) + //nolint:errcheck // Test setup error can be ignored + _ = os.Setenv("XDG_CACHE_HOME", oldCacheDir) } else { - os.Unsetenv("XDG_CACHE_HOME") + //nolint:errcheck // Test cleanup error can be ignored + _ = os.Unsetenv("XDG_CACHE_HOME") } }() @@ -36,7 +40,8 @@ func TestNewJSONStore(t *testing.T) { } // Clean up - store.Close() + //nolint:errcheck // Test cleanup error can be ignored + _ = store.Close() } func TestJSONStore_ThreadOperations(t *testing.T) { @@ -44,7 +49,8 @@ func TestJSONStore_ThreadOperations(t *testing.T) { if err != nil { t.Fatalf("failed to create temp dir: %v", err) } - defer os.RemoveAll(tempDir) + //nolint:errcheck // Test cleanup error can be ignored + defer func() { _ = os.RemoveAll(tempDir) }() store := &JSONStore{ baseDir: tempDir, @@ -126,7 +132,8 @@ func TestJSONStore_Persistence(t *testing.T) { if err != nil { t.Fatalf("failed to create temp dir: %v", err) } - defer os.RemoveAll(tempDir) + //nolint:errcheck // Test cleanup error can be ignored + defer func() { _ = os.RemoveAll(tempDir) }() // Create first store instance store1 := &JSONStore{ @@ -146,7 +153,9 @@ func TestJSONStore_Persistence(t *testing.T) { LastState: "awaiting_review", MessageText: "Test PR", } - store1.SaveThread("owner", "repo", 123, "C123", threadInfo) + if err := store1.SaveThread("owner", "repo", 123, "C123", threadInfo); err != nil { + t.Fatalf("failed to save thread: %v", err) + } // Save to disk err = store1.save() @@ -203,9 +212,15 @@ func TestJSONStore_ListDMUsers(t *testing.T) { MessageText: "Test DM", } - store.SaveDMMessage("U001", prURL, dmInfo) - store.SaveDMMessage("U002", prURL, dmInfo) - store.SaveDMMessage("U003", prURL, dmInfo) + if err := store.SaveDMMessage("U001", prURL, dmInfo); err != nil { + t.Fatalf("failed to save DM for U001: %v", err) + } + if err := store.SaveDMMessage("U002", prURL, dmInfo); err != nil { + t.Fatalf("failed to save DM for U002: %v", err) + } + if err := store.SaveDMMessage("U003", prURL, dmInfo); err != nil { + t.Fatalf("failed to save DM for U003: %v", err) + } // List users users := store.ListDMUsers(prURL) @@ -219,7 +234,8 @@ func TestJSONStore_Cleanup(t *testing.T) { if err != nil { t.Fatalf("failed to create temp dir: %v", err) } - defer os.RemoveAll(tempDir) + //nolint:errcheck // Test cleanup error can be ignored + defer func() { _ = os.RemoveAll(tempDir) }() store := &JSONStore{ baseDir: tempDir, @@ -256,7 +272,8 @@ func TestJSONStore_SaveLoad_RoundTrip(t *testing.T) { if err != nil { t.Fatalf("failed to create temp dir: %v", err) } - defer os.RemoveAll(tempDir) + //nolint:errcheck // Test cleanup error can be ignored + defer func() { _ = os.RemoveAll(tempDir) }() store := &JSONStore{ baseDir: tempDir, @@ -321,7 +338,8 @@ func TestJSONStore_PendingDMOperations(t *testing.T) { if err != nil { t.Fatalf("failed to create temp dir: %v", err) } - defer os.RemoveAll(tempDir) + //nolint:errcheck // Test cleanup error can be ignored + defer func() { _ = os.RemoveAll(tempDir) }() store := &JSONStore{ baseDir: tempDir, @@ -335,7 +353,7 @@ func TestJSONStore_PendingDMOperations(t *testing.T) { } // Test retrieval when no pending DMs exist - pending, err := store.GetPendingDMs(time.Now()) + pending, err := store.PendingDMs(time.Now()) if err != nil { t.Fatalf("unexpected error getting pending DMs: %v", err) } @@ -395,7 +413,7 @@ func TestJSONStore_PendingDMOperations(t *testing.T) { } // Get pending DMs that are ready to send - pending, err = store.GetPendingDMs(now) + pending, err = store.PendingDMs(now) if err != nil { t.Fatalf("unexpected error getting pending DMs: %v", err) } @@ -417,7 +435,7 @@ func TestJSONStore_PendingDMOperations(t *testing.T) { // Get pending DMs 15 minutes from now - both should be ready future := now.Add(15 * time.Minute) - pending, err = store.GetPendingDMs(future) + pending, err = store.PendingDMs(future) if err != nil { t.Fatalf("unexpected error getting future pending DMs: %v", err) } @@ -433,7 +451,7 @@ func TestJSONStore_PendingDMOperations(t *testing.T) { } // Now only dm2 should remain - pending, err = store.GetPendingDMs(future) + pending, err = store.PendingDMs(future) if err != nil { t.Fatalf("unexpected error getting pending DMs after removal: %v", err) } @@ -458,7 +476,8 @@ func TestJSONStore_PendingDMPersistence(t *testing.T) { if err != nil { t.Fatalf("failed to create temp dir: %v", err) } - defer os.RemoveAll(tempDir) + //nolint:errcheck // Test cleanup error can be ignored + defer func() { _ = os.RemoveAll(tempDir) }() // Create first store instance store1 := &JSONStore{ @@ -489,8 +508,12 @@ func TestJSONStore_PendingDMPersistence(t *testing.T) { SendAfter: now.Add(10 * time.Minute), } - store1.QueuePendingDM(dm1) - store1.QueuePendingDM(dm2) + if err := store1.QueuePendingDM(dm1); err != nil { + t.Fatalf("failed to queue dm1: %v", err) + } + if err := store1.QueuePendingDM(dm2); err != nil { + t.Fatalf("failed to queue dm2: %v", err) + } // Save to disk (happens automatically in QueuePendingDM via modified flag) err = store1.save() @@ -518,7 +541,7 @@ func TestJSONStore_PendingDMPersistence(t *testing.T) { // Verify pending DMs persisted future := now.Add(15 * time.Minute) - pending, err := store2.GetPendingDMs(future) + pending, err := store2.PendingDMs(future) if err != nil { t.Fatalf("unexpected error getting pending DMs: %v", err) } @@ -546,7 +569,8 @@ func TestJSONStore_PendingDMCleanup(t *testing.T) { if err != nil { t.Fatalf("failed to create temp dir: %v", err) } - defer os.RemoveAll(tempDir) + //nolint:errcheck // Test cleanup error can be ignored + defer func() { _ = os.RemoveAll(tempDir) }() store := &JSONStore{ baseDir: tempDir, @@ -570,7 +594,9 @@ func TestJSONStore_PendingDMCleanup(t *testing.T) { QueuedAt: oldTime, SendAfter: oldTime, } - store.QueuePendingDM(oldDM) + if err := store.QueuePendingDM(oldDM); err != nil { + t.Fatalf("failed to queue old DM: %v", err) + } // Add a recent pending DM recentDM := PendingDM{ @@ -580,7 +606,9 @@ func TestJSONStore_PendingDMCleanup(t *testing.T) { QueuedAt: now, SendAfter: now.Add(10 * time.Minute), } - store.QueuePendingDM(recentDM) + if err := store.QueuePendingDM(recentDM); err != nil { + t.Fatalf("failed to queue recent DM: %v", err) + } // Run cleanup err = store.Cleanup() @@ -589,7 +617,7 @@ func TestJSONStore_PendingDMCleanup(t *testing.T) { } // Verify old DM was removed - pending, err := store.GetPendingDMs(now.Add(24 * time.Hour)) + pending, err := store.PendingDMs(now.Add(24 * time.Hour)) if err != nil { t.Fatalf("unexpected error getting pending DMs: %v", err) } @@ -631,7 +659,9 @@ func TestJSONStore_DMMessage(t *testing.T) { MessageTS: "1234567890.123456", MessageText: "Test DM message", } - store.SaveDMMessage(userID, prURL, dmInfo) + if err := store.SaveDMMessage(userID, prURL, dmInfo); err != nil { + t.Fatalf("failed to save DM message: %v", err) + } // Retrieve saved DM message retrieved, exists := store.DMMessage(userID, prURL) diff --git a/pkg/state/memory.go b/pkg/state/memory.go index 7268c32..8b3e259 100644 --- a/pkg/state/memory.go +++ b/pkg/state/memory.go @@ -220,8 +220,8 @@ func (s *MemoryStore) QueuePendingDM(dm PendingDM) error { return nil } -// GetPendingDMs returns all pending DMs that should be sent (SendAfter <= before). -func (s *MemoryStore) GetPendingDMs(before time.Time) ([]PendingDM, error) { +// PendingDMs returns all pending DMs that should be sent (SendAfter <= before). +func (s *MemoryStore) PendingDMs(before time.Time) ([]PendingDM, error) { s.mu.RLock() defer s.mu.RUnlock() diff --git a/pkg/state/memory_test.go b/pkg/state/memory_test.go index 62e3f46..de37d43 100644 --- a/pkg/state/memory_test.go +++ b/pkg/state/memory_test.go @@ -171,9 +171,15 @@ func TestListDMUsers(t *testing.T) { MessageText: "Test DM", } - store.SaveDMMessage("U001", prURL, dmInfo) - store.SaveDMMessage("U002", prURL, dmInfo) - store.SaveDMMessage("U003", prURL, dmInfo) + if err := store.SaveDMMessage("U001", prURL, dmInfo); err != nil { + t.Fatalf("failed to save DM for U001: %v", err) + } + if err := store.SaveDMMessage("U002", prURL, dmInfo); err != nil { + t.Fatalf("failed to save DM for U002: %v", err) + } + if err := store.SaveDMMessage("U003", prURL, dmInfo); err != nil { + t.Fatalf("failed to save DM for U003: %v", err) + } // List users users = store.ListDMUsers(prURL) @@ -353,7 +359,7 @@ func TestPendingDMOperations(t *testing.T) { store := NewMemoryStore() // Test retrieval when no pending DMs exist - pending, err := store.GetPendingDMs(time.Now()) + pending, err := store.PendingDMs(time.Now()) if err != nil { t.Fatalf("unexpected error getting pending DMs: %v", err) } @@ -413,7 +419,7 @@ func TestPendingDMOperations(t *testing.T) { } // Get pending DMs that are ready to send - pending, err = store.GetPendingDMs(now) + pending, err = store.PendingDMs(now) if err != nil { t.Fatalf("unexpected error getting pending DMs: %v", err) } @@ -435,7 +441,7 @@ func TestPendingDMOperations(t *testing.T) { // Get pending DMs 15 minutes from now - both should be ready future := now.Add(15 * time.Minute) - pending, err = store.GetPendingDMs(future) + pending, err = store.PendingDMs(future) if err != nil { t.Fatalf("unexpected error getting future pending DMs: %v", err) } @@ -451,7 +457,7 @@ func TestPendingDMOperations(t *testing.T) { } // Now only dm2 should remain - pending, err = store.GetPendingDMs(future) + pending, err = store.PendingDMs(future) if err != nil { t.Fatalf("unexpected error getting pending DMs after removal: %v", err) } @@ -485,7 +491,9 @@ func TestPendingDMCleanup(t *testing.T) { QueuedAt: oldTime, SendAfter: oldTime, } - store.QueuePendingDM(oldDM) + if err := store.QueuePendingDM(oldDM); err != nil { + t.Fatalf("failed to queue old DM: %v", err) + } // Add a recent pending DM recentDM := PendingDM{ @@ -495,7 +503,9 @@ func TestPendingDMCleanup(t *testing.T) { QueuedAt: now, SendAfter: now.Add(10 * time.Minute), } - store.QueuePendingDM(recentDM) + if err := store.QueuePendingDM(recentDM); err != nil { + t.Fatalf("failed to queue recent DM: %v", err) + } // Run cleanup err := store.Cleanup() @@ -504,7 +514,7 @@ func TestPendingDMCleanup(t *testing.T) { } // Verify old DM was removed - pending, err := store.GetPendingDMs(now.Add(24 * time.Hour)) + pending, err := store.PendingDMs(now.Add(24 * time.Hour)) if err != nil { t.Fatalf("unexpected error getting pending DMs: %v", err) } @@ -535,7 +545,9 @@ func TestPendingDMConcurrency(t *testing.T) { QueuedAt: now, SendAfter: now.Add(-1 * time.Minute), } - store.QueuePendingDM(dm) + if err := store.QueuePendingDM(dm); err != nil { + t.Errorf("failed to queue DM in goroutine %d: %v", index, err) + } done <- true }(i) } @@ -546,7 +558,7 @@ func TestPendingDMConcurrency(t *testing.T) { } // Get all pending DMs - pending, err := store.GetPendingDMs(now) + pending, err := store.PendingDMs(now) if err != nil { t.Fatalf("unexpected error getting pending DMs: %v", err) } @@ -558,7 +570,9 @@ func TestPendingDMConcurrency(t *testing.T) { // Remove DMs concurrently for i := 0; i < 3; i++ { go func(index int) { - store.RemovePendingDM(fmt.Sprintf("dm-%d", index)) + if err := store.RemovePendingDM(fmt.Sprintf("dm-%d", index)); err != nil { + t.Errorf("failed to remove DM in goroutine %d: %v", index, err) + } done <- true }(i) } @@ -569,7 +583,7 @@ func TestPendingDMConcurrency(t *testing.T) { } // Verify all removed - pending, err = store.GetPendingDMs(now) + pending, err = store.PendingDMs(now) if err != nil { t.Fatalf("unexpected error getting pending DMs after removal: %v", err) } diff --git a/pkg/state/store.go b/pkg/state/store.go index 1140a17..54d3ea1 100644 --- a/pkg/state/store.go +++ b/pkg/state/store.go @@ -76,7 +76,7 @@ type Store interface { // Pending DM queue - schedule DMs to be sent later QueuePendingDM(dm PendingDM) error - GetPendingDMs(before time.Time) ([]PendingDM, error) + PendingDMs(before time.Time) ([]PendingDM, error) RemovePendingDM(id string) error // Cleanup old data diff --git a/pkg/usermapping/reverse_test.go b/pkg/usermapping/reverse_test.go index a4ff2ee..0dca7a5 100644 --- a/pkg/usermapping/reverse_test.go +++ b/pkg/usermapping/reverse_test.go @@ -5,7 +5,6 @@ import ( "testing" "time" - ghmailto "github.com/codeGROOVE-dev/gh-mailto/pkg/gh-mailto" "github.com/slack-go/slack" ) @@ -30,28 +29,6 @@ func (m *mockSlackClient) GetUserInfo(userID string) (*slack.User, error) { return nil, &slack.SlackErrorResponse{Err: "user_not_found"} } -// mockOrgCache implements the org cache for testing. -func createMockOrgCache(org string, identities []ghmailto.OrgIdentity) *ghmailto.OrgIdentityCache { - cache := &ghmailto.OrgIdentityCache{ - Organization: org, - CachedAt: time.Now(), - Identities: identities, - EmailToGitHub: make(map[string]string), - GitHubToEmail: make(map[string]string), - TotalMembers: len(identities), - } - - for i := range identities { - identity := &identities[i] - if identity.PrimaryEmail != "" { - cache.GitHubToEmail[identity.GitHubUsername] = identity.PrimaryEmail - cache.EmailToGitHub[identity.PrimaryEmail] = identity.GitHubUsername - } - } - - return cache -} - func TestReverseMapping_ConfigOverride(t *testing.T) { mockSlack := &mockSlackClient{ users: map[string]*slack.User{ From d558fc836eb027d207de7cca423ac8339477c518 Mon Sep 17 00:00:00 2001 From: Thomas Stromberg Date: Mon, 3 Nov 2025 19:00:44 -0500 Subject: [PATCH 2/3] ctx, lint --- .golangci.yml | 1 + cmd/server/main.go | 8 +- pkg/bot/bot.go | 37 +-- pkg/bot/bot_sprinkler.go | 7 +- pkg/bot/bot_test.go | 6 +- pkg/bot/cache/commit_pr.go | 6 +- pkg/bot/cache/commit_pr_test.go | 8 +- pkg/bot/cache/thread.go | 1 + pkg/bot/cache/thread_test.go | 16 +- pkg/bot/coordinator_test.go | 4 +- pkg/bot/coordinator_test_helpers.go | 32 +- pkg/bot/format_test.go | 3 +- pkg/bot/handle_pr_comprehensive_test.go | 2 +- pkg/bot/integration_test.go | 7 +- pkg/bot/methods_test.go | 3 +- pkg/bot/mock_builders_test.go | 4 +- pkg/bot/poll_and_reconcile_test.go | 2 + pkg/bot/polling.go | 22 +- pkg/bot/polling_test.go | 3 + pkg/bot/sprinkler_test.go | 8 +- pkg/config/config.go | 13 +- pkg/config/config_test.go | 36 +-- pkg/github/client_integration_test.go | 14 +- pkg/github/github.go | 8 +- pkg/github/github_mock_server_test.go | 60 ++-- pkg/github/github_test.go | 17 +- pkg/github/graphql.go | 4 +- pkg/github/manager_integration_test.go | 8 +- pkg/home/fetcher.go | 2 +- pkg/home/fetcher_test.go | 39 ++- pkg/home/ui_test.go | 15 + pkg/notify/daily.go | 11 +- pkg/notify/daily_digest_test.go | 66 ++--- pkg/notify/daily_mocks_test.go | 10 +- pkg/notify/daily_test.go | 6 +- pkg/notify/format_edge_test.go | 6 +- pkg/notify/format_test.go | 3 +- pkg/notify/interfaces.go | 6 +- pkg/notify/notify.go | 42 +-- pkg/notify/notify_test.go | 4 +- pkg/notify/notify_user_test.go | 36 +-- pkg/notify/run_test.go | 16 +- pkg/notify/tracker_test.go | 6 +- pkg/slack/additional_functions_test.go | 11 +- pkg/slack/api.go | 14 +- pkg/slack/api_test.go | 32 +- pkg/slack/api_wrapper_test.go | 8 +- pkg/slack/client_additional_test.go | 52 ++-- pkg/slack/client_coverage_test.go | 56 ++-- pkg/slack/client_error_test.go | 11 +- pkg/slack/client_simple_test.go | 5 +- pkg/slack/client_test.go | 37 ++- pkg/slack/http_handlers_test.go | 18 +- pkg/slack/manager.go | 4 +- pkg/slack/mock_api_test.go | 34 +-- pkg/slack/mock_builders_test.go | 68 ++--- pkg/slack/oauth.go | 2 +- pkg/slack/oauth_handlers_test.go | 28 +- pkg/slack/slack.go | 121 ++++---- pkg/slack/slack_additional_coverage_test.go | 57 ++-- pkg/slack/user_test.go | 53 ++-- pkg/slacktest/server.go | 42 +-- pkg/slacktest/server_test.go | 18 +- pkg/state/datastore.go | 311 +++++++++----------- pkg/state/datastore_test.go | 123 ++++---- pkg/state/json.go | 43 +-- pkg/state/json_test.go | 199 +++++-------- pkg/state/memory.go | 43 +-- pkg/state/memory_test.go | 102 ++++--- pkg/state/store.go | 67 ++--- pkg/usermapping/reverse.go | 9 +- pkg/usermapping/reverse_test.go | 10 +- pkg/usermapping/usermapping_test.go | 15 +- 73 files changed, 1094 insertions(+), 1107 deletions(-) diff --git a/.golangci.yml b/.golangci.yml index 6b26d00..873b628 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -70,6 +70,7 @@ linters: - testableexamples # checks if examples are testable (have an expected output) - testpackage # makes you use a separate _test package - paralleltest # not every test should be in parallel + - tparallel # table-driven tests share mock servers for performance - wrapcheck # not required - wsl # [too strict and mostly code is not more readable] whitespace linter forces you to use empty lines - wsl_v5 # [too strict and mostly code is not more readable] add or remove empty lines diff --git a/cmd/server/main.go b/cmd/server/main.go index 0530303..a4eb364 100644 --- a/cmd/server/main.go +++ b/cmd/server/main.go @@ -249,8 +249,8 @@ func run(ctx context.Context, cancel context.CancelFunc, cfg *config.ServerConfi // Get GitHub token from one of the installations var githubToken string for _, org := range githubManager.AllOrgs() { - if client, ok := githubManager.ClientForOrg(org); ok { - githubToken = client.InstallationToken(ctx) + if ghClient, ok := githubManager.ClientForOrg(org); ok { + githubToken = ghClient.InstallationToken(ctx) break } } @@ -731,7 +731,7 @@ func runBotCoordinators( // Run cleanup once on startup go func() { - if err := stateStore.Cleanup(); err != nil { + if err := stateStore.Cleanup(context.Background()); err != nil { slog.Warn("initial state cleanup failed", "error", err) } }() @@ -765,7 +765,7 @@ func runBotCoordinators( case <-cleanupTicker.C: // Periodic cleanup of old state data go func() { - if err := stateStore.Cleanup(); err != nil { + if err := stateStore.Cleanup(context.Background()); err != nil { slog.Warn("state cleanup failed", "error", err) } else { slog.Debug("state cleanup completed successfully") diff --git a/pkg/bot/bot.go b/pkg/bot/bot.go index 0631de8..c8aae85 100644 --- a/pkg/bot/bot.go +++ b/pkg/bot/bot.go @@ -57,15 +57,15 @@ type Coordinator struct { // StateStore interface for persistent state - allows dependency injection for testing. type StateStore interface { - Thread(owner, repo string, number int, channelID string) (cache.ThreadInfo, bool) - SaveThread(owner, repo string, number int, channelID string, info cache.ThreadInfo) error - LastDM(userID, prURL string) (time.Time, bool) - RecordDM(userID, prURL string, sentAt time.Time) error - ListDMUsers(prURL string) []string - WasProcessed(eventKey string) bool - MarkProcessed(eventKey string, ttl time.Duration) error - LastNotification(prURL string) time.Time - RecordNotification(prURL string, notifiedAt time.Time) error + Thread(ctx context.Context, owner, repo string, number int, channelID string) (cache.ThreadInfo, bool) + SaveThread(ctx context.Context, owner, repo string, number int, channelID string, info cache.ThreadInfo) error + LastDM(ctx context.Context, userID, prURL string) (time.Time, bool) + RecordDM(ctx context.Context, userID, prURL string, sentAt time.Time) error + ListDMUsers(ctx context.Context, prURL string) []string + WasProcessed(ctx context.Context, eventKey string) bool + MarkProcessed(ctx context.Context, eventKey string, ttl time.Duration) error + LastNotification(ctx context.Context, prURL string) time.Time + RecordNotification(ctx context.Context, prURL string, notifiedAt time.Time) error Close() error } @@ -118,13 +118,13 @@ func New( // saveThread persists thread info to both cache and persistent storage. // This ensures threads survive restarts and are available for closed PR updates. -func (c *Coordinator) saveThread(owner, repo string, number int, channelID string, info cache.ThreadInfo) { +func (c *Coordinator) saveThread(ctx context.Context, owner, repo string, number int, channelID string, info cache.ThreadInfo) { // Save to in-memory cache for fast lookups key := fmt.Sprintf("%s/%s#%d:%s", owner, repo, number, channelID) c.threadCache.Set(key, info) // Persist to state store for cross-instance sharing and restart recovery - if err := c.stateStore.SaveThread(owner, repo, number, channelID, info); err != nil { + if err := c.stateStore.SaveThread(ctx, owner, repo, number, channelID, info); err != nil { slog.Warn("failed to persist thread to state store", "pr", fmt.Sprintf("%s/%s#%d", owner, repo, number), "channel_id", channelID, @@ -196,7 +196,7 @@ func (c *Coordinator) findOrCreatePRThread(ctx context.Context, channelID, owner "current_message_preview", initialSearchText[:min(100, len(initialSearchText))]) // Save the found thread (cache + persist) - c.saveThread(owner, repo, prNumber, channelID, cache.ThreadInfo{ + c.saveThread(ctx, owner, repo, prNumber, channelID, cache.ThreadInfo{ ThreadTS: initialSearchTS, ChannelID: channelID, LastState: prState, @@ -272,7 +272,7 @@ func (c *Coordinator) findOrCreatePRThread(ctx context.Context, channelID, owner "note", "this prevented duplicate thread creation during rolling deployment") // Save it and return (cache + persist) - c.saveThread(owner, repo, prNumber, channelID, cache.ThreadInfo{ + c.saveThread(ctx, owner, repo, prNumber, channelID, cache.ThreadInfo{ ThreadTS: crossInstanceCheckTS, ChannelID: channelID, LastState: prState, @@ -295,7 +295,7 @@ func (c *Coordinator) findOrCreatePRThread(ctx context.Context, channelID, owner } // Save the new thread (cache + persist) - c.saveThread(owner, repo, prNumber, channelID, cache.ThreadInfo{ + c.saveThread(ctx, owner, repo, prNumber, channelID, cache.ThreadInfo{ ThreadTS: newThreadTS, ChannelID: channelID, LastState: prState, @@ -925,7 +925,7 @@ func (c *Coordinator) updateDMMessagesForPR(ctx context.Context, pr prUpdateInfo // For terminal states (merged/closed), update all users who received DMs if prState == "merged" || prState == "closed" { - slackUserIDs = c.stateStore.ListDMUsers(prURL) + slackUserIDs = c.stateStore.ListDMUsers(ctx, prURL) if len(slackUserIDs) == 0 { slog.Debug("no DM recipients found for merged/closed PR", "pr", fmt.Sprintf("%s/%s#%d", owner, repo, prNumber), @@ -983,8 +983,7 @@ func (c *Coordinator) updateDMMessagesForPR(ctx context.Context, pr prUpdateInfo slog.Info("no analysis available - using state-based emoji fallback", "pr", fmt.Sprintf("%s/%s#%d", owner, repo, prNumber), "pr_state", prState) - //nolint:staticcheck // deprecated method kept for backward compatibility - prefix = notify.PrefixForState(prState) + prefix = notify.PrefixForAnalysis("", nil) } var action string switch prState { @@ -1216,6 +1215,8 @@ func (c *Coordinator) processChannelsInParallel( // processPRForChannel handles PR processing for a single channel (extracted from the main loop). // Returns a map of Slack user IDs that were successfully tagged in this channel. +// +//nolint:maintidx // Core PR processing logic with necessary complexity for handling notifications func (c *Coordinator) processPRForChannel( ctx context.Context, prCtx prContext, channelName, workspaceID string, ) map[string]bool { @@ -1385,7 +1386,7 @@ func (c *Coordinator) processPRForChannel( "next_poll_in", "5m") } else { // Save updated thread info (cache + persist) - c.saveThread(owner, repo, prNumber, channelID, cache.ThreadInfo{ + c.saveThread(ctx, owner, repo, prNumber, channelID, cache.ThreadInfo{ ThreadTS: threadTS, ChannelID: channelID, LastState: prState, diff --git a/pkg/bot/bot_sprinkler.go b/pkg/bot/bot_sprinkler.go index a219c10..40efbf3 100644 --- a/pkg/bot/bot_sprinkler.go +++ b/pkg/bot/bot_sprinkler.go @@ -118,6 +118,7 @@ func (c *Coordinator) lookupPRsForCheckEvent(ctx context.Context, event client.E // If yes, fetch it via turnclient to see if it contains this commit // This is cheaper than searching all PRs via GitHub API mostRecentPR := c.commitPRCache.MostRecentPR(owner, repo) + //nolint:nestif // Complex but necessary cache population logic with early returns if mostRecentPR > 0 { slog.Debug("attempting turnclient lookup on most recent PR for repo", "organization", organization, @@ -156,14 +157,14 @@ func (c *Coordinator) lookupPRsForCheckEvent(ctx context.Context, event client.E // Populate cache with all commits from this PR for _, commit := range checkResult.PullRequest.Commits { + //nolint:revive // Nesting depth acceptable for cache population logic if commit != "" { c.commitPRCache.RecordPR(owner, repo, mostRecentPR, commit) } } // Process the PR update since we have fresh data - //nolint:contextcheck // Background context intentional - goroutine must outlive parent timeout - go c.handlePullRequestEventWithData(context.Background(), owner, repo, struct { + c.handlePullRequestEventWithData(ctx, owner, repo, struct { Action string `json:"action"` PullRequest struct { HTMLURL string `json:"html_url"` @@ -270,7 +271,7 @@ func (c *Coordinator) handleSprinklerEvent(ctx context.Context, event client.Eve // Try to claim this event atomically using persistent store (Datastore transaction). // This is the single source of truth for cross-instance deduplication. - if err := c.stateStore.MarkProcessed(eventKey, 24*time.Hour); err != nil { + if err := c.stateStore.MarkProcessed(ctx, eventKey, 24*time.Hour); err != nil { // Check if this is a race condition vs a database error if errors.Is(err, state.ErrAlreadyProcessed) { slog.Info("skipping duplicate event - claimed by this or another instance", diff --git a/pkg/bot/bot_test.go b/pkg/bot/bot_test.go index d3c35e3..beee06a 100644 --- a/pkg/bot/bot_test.go +++ b/pkg/bot/bot_test.go @@ -167,6 +167,7 @@ func TestNew_WithGitHubClient(t *testing.T) { } func TestSaveThread(t *testing.T) { + ctx := context.Background() mockSlack := &mockSlackClient{} configMgr := NewMockConfig().Build() @@ -190,7 +191,7 @@ func TestSaveThread(t *testing.T) { ThreadTS: "1234567890.123456", } - c.saveThread("testorg", "testrepo", 42, "C123456", threadInfo) + c.saveThread(ctx, "testorg", "testrepo", 42, "C123456", threadInfo) // Check cache key := "testorg/testrepo#42:C123456" @@ -220,6 +221,7 @@ func TestSaveThread(t *testing.T) { } func TestSaveThread_PersistenceError(t *testing.T) { + ctx := context.Background() mockSlack := &mockSlackClient{} configMgr := NewMockConfig().Build() @@ -245,7 +247,7 @@ func TestSaveThread_PersistenceError(t *testing.T) { } // Should still save to cache even if persistence fails - c.saveThread("testorg", "testrepo", 42, "C123456", threadInfo) + c.saveThread(ctx, "testorg", "testrepo", 42, "C123456", threadInfo) // Check cache (should succeed) key := "testorg/testrepo#42:C123456" diff --git a/pkg/bot/cache/commit_pr.go b/pkg/bot/cache/commit_pr.go index 4928ae9..3c8dc9f 100644 --- a/pkg/bot/cache/commit_pr.go +++ b/pkg/bot/cache/commit_pr.go @@ -7,17 +7,17 @@ import ( // CommitPREntry caches recent commit→PR mappings for fast lookup. type CommitPREntry struct { - PRNumber int - HeadSHA string UpdatedAt time.Time + HeadSHA string + PRNumber int } // CommitPRCache provides in-memory caching of commit SHA → PR mappings. // This allows quick lookup when check events arrive with just a commit SHA, // avoiding expensive GitHub API calls for recently-seen PRs. type CommitPRCache struct { + entries map[string][]CommitPREntry mu sync.RWMutex - entries map[string][]CommitPREntry // "owner/repo" -> recent PRs with commits } // NewCommitPRCache creates a new CommitPRCache with initialized maps. diff --git a/pkg/bot/cache/commit_pr_test.go b/pkg/bot/cache/commit_pr_test.go index 83f13cd..89a325f 100644 --- a/pkg/bot/cache/commit_pr_test.go +++ b/pkg/bot/cache/commit_pr_test.go @@ -266,7 +266,7 @@ func TestCommitPRCache_Concurrency(t *testing.T) { // Concurrent writes done := make(chan bool) - for i := 0; i < 10; i++ { + for i := range 10 { go func(prNum int) { cache.RecordPR("owner", "repo", prNum, "commit"+string(rune(prNum))) done <- true @@ -274,12 +274,12 @@ func TestCommitPRCache_Concurrency(t *testing.T) { } // Wait for all writes - for i := 0; i < 10; i++ { + for range 10 { <-done } // Concurrent reads - for i := 0; i < 10; i++ { + for i := range 10 { go func(prNum int) { _ = cache.FindPRsForCommit("owner", "repo", "commit"+string(rune(prNum))) _ = cache.MostRecentPR("owner", "repo") @@ -288,7 +288,7 @@ func TestCommitPRCache_Concurrency(t *testing.T) { } // Wait for all reads - for i := 0; i < 10; i++ { + for range 10 { <-done } diff --git a/pkg/bot/cache/thread.go b/pkg/bot/cache/thread.go index 4791759..bb31abf 100644 --- a/pkg/bot/cache/thread.go +++ b/pkg/bot/cache/thread.go @@ -1,3 +1,4 @@ +// Package cache provides thread information caching for bot operations. package cache import ( diff --git a/pkg/bot/cache/thread_test.go b/pkg/bot/cache/thread_test.go index 8ba3446..e47aa8c 100644 --- a/pkg/bot/cache/thread_test.go +++ b/pkg/bot/cache/thread_test.go @@ -318,7 +318,7 @@ func TestThreadCache_Concurrency(t *testing.T) { // Concurrent operations on different keys done := make(chan bool) - for i := 0; i < 10; i++ { + for i := range 10 { go func(n int) { key := "key" + string(rune(n)) info := ThreadInfo{ThreadTS: "123.456", ChannelID: "C123"} @@ -334,13 +334,13 @@ func TestThreadCache_Concurrency(t *testing.T) { } // Wait for all goroutines - for i := 0; i < 10; i++ { + for range 10 { <-done } // Concurrent operations on same key key := "shared" - for i := 0; i < 10; i++ { + for range 10 { go func() { info := ThreadInfo{ThreadTS: "123.456", ChannelID: "C123"} cache.Set(key, info) @@ -349,7 +349,7 @@ func TestThreadCache_Concurrency(t *testing.T) { }() } - for i := 0; i < 10; i++ { + for range 10 { <-done } @@ -357,13 +357,13 @@ func TestThreadCache_Concurrency(t *testing.T) { successCount := 0 resultChan := make(chan bool, 10) - for i := 0; i < 10; i++ { + for range 10 { go func() { resultChan <- cache.MarkCreating("concurrent-test") }() } - for i := 0; i < 10; i++ { + for range 10 { if <-resultChan { successCount++ } @@ -374,14 +374,14 @@ func TestThreadCache_Concurrency(t *testing.T) { } // Cleanup concurrency test - for i := 0; i < 5; i++ { + for range 5 { go func() { cache.Cleanup(1 * time.Hour) done <- true }() } - for i := 0; i < 5; i++ { + for range 5 { <-done } diff --git a/pkg/bot/coordinator_test.go b/pkg/bot/coordinator_test.go index 8aadaf3..1becc69 100644 --- a/pkg/bot/coordinator_test.go +++ b/pkg/bot/coordinator_test.go @@ -1,6 +1,7 @@ package bot import ( + "context" "testing" "time" @@ -8,6 +9,7 @@ import ( ) func TestCoordinator_saveThread(t *testing.T) { + ctx := context.Background() // Create mock state store mockStore := &mockStateStore{ threads: make(map[string]cache.ThreadInfo), @@ -33,7 +35,7 @@ func TestCoordinator_saveThread(t *testing.T) { LastEventTime: time.Now(), } - c.saveThread(owner, repo, number, channelID, info) + c.saveThread(ctx, owner, repo, number, channelID, info) // Verify thread was saved to cache cacheKey := owner + "/" + repo + "#123:" + channelID diff --git a/pkg/bot/coordinator_test_helpers.go b/pkg/bot/coordinator_test_helpers.go index 8a3179d..dcae460 100644 --- a/pkg/bot/coordinator_test_helpers.go +++ b/pkg/bot/coordinator_test_helpers.go @@ -15,17 +15,17 @@ import ( // mockStateStore implements StateStore interface from bot package. type mockStateStore struct { - mu sync.Mutex + markProcessedErr error + saveThreadErr error threads map[string]cache.ThreadInfo dmTimes map[string]time.Time dmUsers map[string][]string processedEvents map[string]bool lastNotifications map[string]time.Time - markProcessedErr error // Error to return from MarkProcessed - saveThreadErr error // Error to return from SaveThread + mu sync.Mutex } -func (m *mockStateStore) Thread(owner, repo string, number int, channelID string) (cache.ThreadInfo, bool) { +func (m *mockStateStore) Thread(ctx context.Context, owner, repo string, number int, channelID string) (cache.ThreadInfo, bool) { m.mu.Lock() defer m.mu.Unlock() key := fmt.Sprintf("%s/%s#%d:%s", owner, repo, number, channelID) @@ -37,7 +37,7 @@ func (m *mockStateStore) Thread(owner, repo string, number int, channelID string return cache.ThreadInfo{}, false } -func (m *mockStateStore) SaveThread(owner, repo string, number int, channelID string, info cache.ThreadInfo) error { +func (m *mockStateStore) SaveThread(ctx context.Context, owner, repo string, number int, channelID string, info cache.ThreadInfo) error { m.mu.Lock() defer m.mu.Unlock() if m.saveThreadErr != nil { @@ -51,7 +51,7 @@ func (m *mockStateStore) SaveThread(owner, repo string, number int, channelID st return nil } -func (m *mockStateStore) LastDM(userID, prURL string) (time.Time, bool) { +func (m *mockStateStore) LastDM(ctx context.Context, userID, prURL string) (time.Time, bool) { m.mu.Lock() defer m.mu.Unlock() key := userID + ":" + prURL @@ -63,7 +63,7 @@ func (m *mockStateStore) LastDM(userID, prURL string) (time.Time, bool) { return time.Time{}, false } -func (m *mockStateStore) RecordDM(userID, prURL string, sentAt time.Time) error { +func (m *mockStateStore) RecordDM(ctx context.Context, userID, prURL string, sentAt time.Time) error { m.mu.Lock() defer m.mu.Unlock() key := userID + ":" + prURL @@ -74,7 +74,7 @@ func (m *mockStateStore) RecordDM(userID, prURL string, sentAt time.Time) error return nil } -func (m *mockStateStore) ListDMUsers(prURL string) []string { +func (m *mockStateStore) ListDMUsers(ctx context.Context, prURL string) []string { m.mu.Lock() defer m.mu.Unlock() if m.dmUsers != nil { @@ -85,7 +85,7 @@ func (m *mockStateStore) ListDMUsers(prURL string) []string { return []string{} } -func (m *mockStateStore) WasProcessed(eventKey string) bool { +func (m *mockStateStore) WasProcessed(ctx context.Context, eventKey string) bool { m.mu.Lock() defer m.mu.Unlock() if m.processedEvents != nil { @@ -94,7 +94,7 @@ func (m *mockStateStore) WasProcessed(eventKey string) bool { return false } -func (m *mockStateStore) MarkProcessed(eventKey string, _ time.Duration) error { +func (m *mockStateStore) MarkProcessed(ctx context.Context, eventKey string, _ time.Duration) error { m.mu.Lock() defer m.mu.Unlock() if m.markProcessedErr != nil { @@ -107,7 +107,7 @@ func (m *mockStateStore) MarkProcessed(eventKey string, _ time.Duration) error { return nil } -func (m *mockStateStore) LastNotification(prURL string) time.Time { +func (m *mockStateStore) LastNotification(ctx context.Context, prURL string) time.Time { m.mu.Lock() defer m.mu.Unlock() if m.lastNotifications != nil { @@ -118,7 +118,7 @@ func (m *mockStateStore) LastNotification(prURL string) time.Time { return time.Time{} } -func (m *mockStateStore) RecordNotification(prURL string, notifiedAt time.Time) error { +func (m *mockStateStore) RecordNotification(ctx context.Context, prURL string, notifiedAt time.Time) error { m.mu.Lock() defer m.mu.Unlock() if m.lastNotifications == nil { @@ -128,16 +128,16 @@ func (m *mockStateStore) RecordNotification(prURL string, notifiedAt time.Time) return nil } -// notify.Store interface methods for DM queue management. -func (*mockStateStore) QueuePendingDM(dm state.PendingDM) error { +// QueuePendingDM implements notify.Store interface for DM queue management. +func (*mockStateStore) QueuePendingDM(_ *state.PendingDM) error { return nil // No-op for tests } -func (*mockStateStore) PendingDMs(before time.Time) ([]state.PendingDM, error) { +func (*mockStateStore) PendingDMs(_ time.Time) ([]state.PendingDM, error) { return nil, nil // Return empty list for tests } -func (*mockStateStore) RemovePendingDM(id string) error { +func (*mockStateStore) RemovePendingDM(_ string) error { return nil // No-op for tests } diff --git a/pkg/bot/format_test.go b/pkg/bot/format_test.go index ebc1c2f..169cd92 100644 --- a/pkg/bot/format_test.go +++ b/pkg/bot/format_test.go @@ -9,11 +9,10 @@ import ( // TestFormatNextActionsEarlyReturn tests the formatNextActions utility function early return cases. func TestFormatNextActionsEarlyReturn(t *testing.T) { + ctx := context.Background() // Create a coordinator - userMapper will be nil but that's OK for these early return tests c := &Coordinator{} - ctx := context.Background() - tests := []struct { name string check *turn.CheckResponse diff --git a/pkg/bot/handle_pr_comprehensive_test.go b/pkg/bot/handle_pr_comprehensive_test.go index 745f7c1..a1acb67 100644 --- a/pkg/bot/handle_pr_comprehensive_test.go +++ b/pkg/bot/handle_pr_comprehensive_test.go @@ -272,13 +272,13 @@ func TestHandlePullRequestEventWithData_DuplicateBlockedUsers(t *testing.T) { // TestHandlePullRequestEventWithData_ExtractStateFromTurnclient tests state extraction. func TestHandlePullRequestEventWithData_ExtractStateFromTurnclient(t *testing.T) { - ctx := context.Background() c := NewTestCoordinator(). WithState(NewMockState().Build()). WithSlack(NewMockSlack().Build()). WithConfig(NewMockConfig().Build()). Build() + ctx := context.Background() tests := []struct { name string diff --git a/pkg/bot/integration_test.go b/pkg/bot/integration_test.go index 1cc5d68..019eb0d 100644 --- a/pkg/bot/integration_test.go +++ b/pkg/bot/integration_test.go @@ -19,7 +19,6 @@ import ( // TestUserMappingIntegration tests the complete flow of mapping GitHub users to Slack users. func TestUserMappingIntegration(t *testing.T) { - ctx := context.Background() // Setup mock Slack server mockSlack := slacktest.New() @@ -27,6 +26,8 @@ func TestUserMappingIntegration(t *testing.T) { // Add test users mockSlack.AddUser("alice@example.com", "U001", "alice") + ctx := context.Background() + mockSlack.AddUser("bob@example.com", "U002", "bob") // Create Slack client pointing to mock server @@ -91,7 +92,7 @@ func TestUserMappingIntegration(t *testing.T) { } // Verify email lookups were performed - emailLookups := mockSlack.GetEmailLookups() + emailLookups := mockSlack.EmailLookups() if len(emailLookups) < 3 { t.Errorf("Expected at least 3 email lookups, got %d", len(emailLookups)) } @@ -314,7 +315,7 @@ func TestDMDelayLogicIntegration(t *testing.T) { time.Sleep(50 * time.Millisecond) // Verify - postedMessages := mockSlack.GetPostedMessages() + postedMessages := mockSlack.PostedMessages() dmCount := 0 for _, msg := range postedMessages { if strings.HasPrefix(msg.Channel, "D") { diff --git a/pkg/bot/methods_test.go b/pkg/bot/methods_test.go index e44ed0d..bf07faa 100644 --- a/pkg/bot/methods_test.go +++ b/pkg/bot/methods_test.go @@ -25,6 +25,7 @@ func testCoordinator(mockState *mockStateStore) *Coordinator { } func TestCoordinator_SaveThread(t *testing.T) { + ctx := context.Background() mockState := &mockStateStore{ threads: make(map[string]cache.ThreadInfo), } @@ -43,7 +44,7 @@ func TestCoordinator_SaveThread(t *testing.T) { LastEventTime: time.Now(), } - c.saveThread(owner, repo, number, channelID, info) + c.saveThread(ctx, owner, repo, number, channelID, info) // Verify saved to cache cacheKey := "testorg/testrepo#42:C123456" diff --git a/pkg/bot/mock_builders_test.go b/pkg/bot/mock_builders_test.go index 7b711b5..ae333f1 100644 --- a/pkg/bot/mock_builders_test.go +++ b/pkg/bot/mock_builders_test.go @@ -296,8 +296,8 @@ func NewTestCoordinator() *CoordinatorBuilder { } // WithSlack configures the Slack client. -func (b *CoordinatorBuilder) WithSlack(slack *mockSlackClient) *CoordinatorBuilder { - b.coordinator.slack = slack +func (b *CoordinatorBuilder) WithSlack(slackClient *mockSlackClient) *CoordinatorBuilder { + b.coordinator.slack = slackClient return b } diff --git a/pkg/bot/poll_and_reconcile_test.go b/pkg/bot/poll_and_reconcile_test.go index 5f3da04..eb9e06f 100644 --- a/pkg/bot/poll_and_reconcile_test.go +++ b/pkg/bot/poll_and_reconcile_test.go @@ -50,6 +50,7 @@ func TestPollAndReconcile_EmptyPRList(t *testing.T) { // TestPollAndReconcile_ContextCancellation tests graceful shutdown on context cancellation. func TestPollAndReconcile_ContextCancellation(t *testing.T) { + ctx := context.Background() // Create a context that's already canceled ctx, cancel := context.WithCancel(context.Background()) cancel() // Cancel immediately @@ -113,6 +114,7 @@ func TestStartupReconciliation_HappyPath(t *testing.T) { // TestStartupReconciliation_ContextCancellation tests cancellation handling. func TestStartupReconciliation_ContextCancellation(t *testing.T) { + ctx := context.Background() ctx, cancel := context.WithCancel(context.Background()) cancel() // Cancel immediately diff --git a/pkg/bot/polling.go b/pkg/bot/polling.go index 27bf845..41986ba 100644 --- a/pkg/bot/polling.go +++ b/pkg/bot/polling.go @@ -90,7 +90,7 @@ func (c *Coordinator) pollAndReconcileWithSearcher(ctx context.Context, searcher eventKey := makePollEventKey(pr.URL, pr.UpdatedAt) // Skip if already processed (by webhook or previous poll) - if c.stateStore.WasProcessed(eventKey) { + if c.stateStore.WasProcessed(ctx, eventKey) { slog.Debug("skipping PR - already processed", "pr", formatPRIdentifier(pr.Owner, pr.Repo, pr.Number), "pr_updated", pr.UpdatedAt) @@ -106,7 +106,7 @@ func (c *Coordinator) pollAndReconcileWithSearcher(ctx context.Context, searcher errorCount++ } else { // Mark as processed - if err := c.stateStore.MarkProcessed(eventKey, 24*time.Hour); err != nil { + if err := c.stateStore.MarkProcessed(ctx, eventKey, 24*time.Hour); err != nil { slog.Warn("failed to mark poll event as processed", "pr", formatPRIdentifier(pr.Owner, pr.Repo, pr.Number), "error", err) @@ -146,7 +146,7 @@ func (c *Coordinator) pollAndReconcileWithSearcher(ctx context.Context, searcher eventKey := makeClosedPREventKey(pr.URL, pr.State, pr.UpdatedAt) // Skip if already processed - if c.stateStore.WasProcessed(eventKey) { + if c.stateStore.WasProcessed(ctx, eventKey) { slog.Debug("skipping closed PR - already processed", "pr", formatPRIdentifier(pr.Owner, pr.Repo, pr.Number), "state", pr.State) @@ -163,7 +163,7 @@ func (c *Coordinator) pollAndReconcileWithSearcher(ctx context.Context, searcher closedErrorCount++ } else { // Mark as processed - if err := c.stateStore.MarkProcessed(eventKey, 24*time.Hour); err != nil { + if err := c.stateStore.MarkProcessed(ctx, eventKey, 24*time.Hour); err != nil { slog.Warn("failed to mark closed PR event as processed", "pr", formatPRIdentifier(pr.Owner, pr.Repo, pr.Number), "error", err) @@ -323,7 +323,7 @@ func (c *Coordinator) updateClosedPRThread(ctx context.Context, pr *github.PRSna continue } - info, ok := c.stateStore.Thread(pr.Owner, pr.Repo, pr.Number, id) + info, ok := c.stateStore.Thread(ctx, pr.Owner, pr.Repo, pr.Number, id) if !ok { // Thread not in persistent storage - search channel history as fallback // This handles cases where state was lost or thread created before persistence was added @@ -354,7 +354,7 @@ func (c *Coordinator) updateClosedPRThread(ctx context.Context, pr *github.PRSna } // Persist for future use (avoid redundant searches) - if err := c.stateStore.SaveThread(pr.Owner, pr.Repo, pr.Number, id, info); err != nil { + if err := c.stateStore.SaveThread(ctx, pr.Owner, pr.Repo, pr.Number, id, info); err != nil { slog.Warn("failed to persist recovered thread", "pr", prKey, "error", err) @@ -405,7 +405,7 @@ func emojiForPRState(state string) (string, error) { // replaceEmojiPrefix replaces the emoji prefix in a message. // This is a pure function that can be easily tested. -// Format: ":emoji: Title • repo#123 by @user" +// Format: ":emoji: Title • repo#123 by @user". func replaceEmojiPrefix(text, newEmoji string) string { i := strings.Index(text, " ") if i == -1 { @@ -496,7 +496,7 @@ func (c *Coordinator) StartupReconciliation(ctx context.Context) { eventKey := makeReconcileEventKey(pr.URL, pr.UpdatedAt) // Check if we already processed this exact PR update (via webhook or previous reconciliation) - if c.stateStore.WasProcessed(eventKey) { + if c.stateStore.WasProcessed(ctx, eventKey) { skippedCount++ slog.Debug("skipping PR - already processed this update", "pr", formatPRIdentifier(pr.Owner, pr.Repo, pr.Number), @@ -506,7 +506,7 @@ func (c *Coordinator) StartupReconciliation(ctx context.Context) { } // Check notification state - lastNotified := c.stateStore.LastNotification(pr.URL) + lastNotified := c.stateStore.LastNotification(ctx, pr.URL) // Determine if we should notify reason, shouldNotify := shouldReconcilePR(pr.UpdatedAt, lastNotified) @@ -534,13 +534,13 @@ func (c *Coordinator) StartupReconciliation(ctx context.Context) { } else { reconciledCount++ // Mark as processed to prevent duplicate processing - if err := c.stateStore.MarkProcessed(eventKey, 24*time.Hour); err != nil { + if err := c.stateStore.MarkProcessed(ctx, eventKey, 24*time.Hour); err != nil { slog.Warn("failed to mark reconciliation event as processed", "pr", formatPRIdentifier(pr.Owner, pr.Repo, pr.Number), "error", err) } // Record that we notified - if err := c.stateStore.RecordNotification(pr.URL, time.Now()); err != nil { + if err := c.stateStore.RecordNotification(ctx, pr.URL, time.Now()); err != nil { slog.Warn("failed to record notification", "pr", formatPRIdentifier(pr.Owner, pr.Repo, pr.Number), "error", err) diff --git a/pkg/bot/polling_test.go b/pkg/bot/polling_test.go index 5efc6ca..38fd78c 100644 --- a/pkg/bot/polling_test.go +++ b/pkg/bot/polling_test.go @@ -455,6 +455,7 @@ func TestStartupReconciliation_NoToken(t *testing.T) { // TestPollAndReconcile_Deduplication tests that already-processed events are skipped. func TestPollAndReconcile_Deduplication(t *testing.T) { + ctx := context.Background() ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second) defer cancel() @@ -1292,6 +1293,7 @@ func TestPollAndReconcileWithSearcher_SuccessfulOpenPRProcessing(t *testing.T) { // TestPollAndReconcileWithSearcher_ContextCancellationDuringOpenPRs tests graceful cancellation. func TestPollAndReconcileWithSearcher_ContextCancellationDuringOpenPRs(t *testing.T) { + ctx := context.Background() ctx, cancel := context.WithCancel(context.Background()) store := &mockStateStore{ processedEvents: make(map[string]bool), @@ -1492,6 +1494,7 @@ func TestPollAndReconcileWithSearcher_ListClosedPRsError(t *testing.T) { // TestPollAndReconcileWithSearcher_ContextCancellationDuringClosedPRs tests cancellation during closed PR processing. func TestPollAndReconcileWithSearcher_ContextCancellationDuringClosedPRs(t *testing.T) { + ctx := context.Background() ctx, cancel := context.WithCancel(context.Background()) store := &mockStateStore{ processedEvents: make(map[string]bool), diff --git a/pkg/bot/sprinkler_test.go b/pkg/bot/sprinkler_test.go index 26cee1a..d479a21 100644 --- a/pkg/bot/sprinkler_test.go +++ b/pkg/bot/sprinkler_test.go @@ -262,7 +262,7 @@ func TestHandleSprinklerEvent_Deduplication(t *testing.T) { c.handleSprinklerEvent(ctx, event, "testorg") // Verify event was marked as processed - if !mockState.WasProcessed("unique-123") { + if !mockState.WasProcessed(ctx, "unique-123") { t.Error("expected event to be marked as processed") } @@ -309,7 +309,7 @@ func TestHandleSprinklerEvent_PullRequestWithNumber(t *testing.T) { c.handleSprinklerEvent(ctx, event, "testorg") // Verify event was processed (marked in state store) - if !mockState.WasProcessed("pr-event-123") { + if !mockState.WasProcessed(ctx, "pr-event-123") { t.Error("expected PR event to be marked as processed") } @@ -359,7 +359,7 @@ func TestHandleSprinklerEvent_CheckEventWithCommit(t *testing.T) { c.handleSprinklerEvent(ctx, event, "testorg") // Verify event was processed - if !mockState.WasProcessed("check-event-123") { + if !mockState.WasProcessed(ctx, "check-event-123") { t.Error("expected check event to be marked as processed") } } @@ -762,7 +762,7 @@ func TestHandleSprinklerEvent_DatabaseError(t *testing.T) { c.handleSprinklerEvent(ctx, event, "testorg") // Event should NOT be marked as processed due to database error - if mockState.WasProcessed("db-error-123") { + if mockState.WasProcessed(ctx, "db-error-123") { t.Error("expected event NOT to be marked as processed after database error") } } diff --git a/pkg/config/config.go b/pkg/config/config.go index 749bbdc..2ee6e46 100644 --- a/pkg/config/config.go +++ b/pkg/config/config.go @@ -41,17 +41,18 @@ type ServerConfig struct { // RepoConfig represents the slack.yaml configuration for a GitHub org. type RepoConfig struct { Channels map[string]struct { - ReminderDMDelay *int `yaml:"reminder_dm_delay"` // Optional: override global delay for this channel (0 = disabled) - Repos []string `yaml:"repos"` - Mute bool `yaml:"mute"` + ReminderDMDelay *int `yaml:"reminder_dm_delay"` + Repos []string `yaml:"repos"` // Optional: override global delay for this channel (0 = disabled) + + Mute bool `yaml:"mute"` } `yaml:"channels"` + Users map[string]string `yaml:"users"` Global struct { TeamID string `yaml:"team_id"` EmailDomain string `yaml:"email_domain"` - ReminderDMDelay int `yaml:"reminder_dm_delay"` // Minutes to wait before sending DM if user tagged in channel (0 = disabled) + ReminderDMDelay int `yaml:"reminder_dm_delay"` DailyReminders bool `yaml:"daily_reminders"` - } `yaml:"global"` - Users map[string]string `yaml:"users"` // GitHub username -> email address (for manual overrides) + } `yaml:"global"` // Minutes to wait before sending DM if user tagged in channel (0 = disabled) } // configCacheEntry represents a cached configuration entry. diff --git a/pkg/config/config_test.go b/pkg/config/config_test.go index fc0cd49..f8c6417 100644 --- a/pkg/config/config_test.go +++ b/pkg/config/config_test.go @@ -832,8 +832,8 @@ func TestManager_ChannelsForRepoNoConfig(t *testing.T) { } func TestManager_LoadConfigNoClient(t *testing.T) { - m := New() ctx := context.Background() + m := New() // LoadConfig should fail if no GitHub client is set err := m.LoadConfig(ctx, "test-org") @@ -846,8 +846,8 @@ func TestManager_LoadConfigNoClient(t *testing.T) { } func TestManager_LoadConfigFromCache(t *testing.T) { - m := New() ctx := context.Background() + m := New() // Pre-populate cache cachedConfig := createDefaultConfig() @@ -877,8 +877,8 @@ func TestManager_LoadConfigFromCache(t *testing.T) { } func TestManager_ReloadConfig(t *testing.T) { - m := New() ctx := context.Background() + m := New() // Pre-populate cache oldConfig := createDefaultConfig() @@ -907,8 +907,8 @@ func TestManager_ReloadConfig(t *testing.T) { // Helper function to check if a string contains a substring. func contains(s, substr string) bool { - return len(s) >= len(substr) && (s == substr || len(substr) == 0 || - (len(s) > 0 && len(substr) > 0 && indexOf(s, substr) >= 0)) + return len(s) >= len(substr) && (s == substr || substr == "" || + (s != "" && substr != "" && indexOf(s, substr) >= 0)) } func indexOf(s, substr string) int { @@ -936,6 +936,7 @@ func must[T any](v T, err error) T { } func TestManager_LoadConfigValidYAML(t *testing.T) { + ctx := context.Background() validYAML := ` global: team_id: T123456 @@ -962,9 +963,9 @@ channels: Encoding: &encoding, } w.Header().Set("Content-Type", "application/json") - //nolint:errcheck // Error intentionally ignored in test mock HTTP handler - //nolint:errcheck // Error intentionally ignored in test mock HTTP handler - _ = json.NewEncoder(w).Encode(response) + if err := json.NewEncoder(w).Encode(response); err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + } return } http.NotFound(w, r) @@ -976,7 +977,6 @@ channels: m := New() m.SetGitHubClient("test-org", client) - ctx := context.Background() err := m.LoadConfig(ctx, "test-org") if err != nil { t.Fatalf("unexpected error loading valid config: %v", err) @@ -1002,6 +1002,7 @@ channels: } func TestManager_LoadConfig404NotFound(t *testing.T) { + ctx := context.Background() handler := func(w http.ResponseWriter, r *http.Request) { http.NotFound(w, r) } @@ -1012,7 +1013,6 @@ func TestManager_LoadConfig404NotFound(t *testing.T) { m := New() m.SetGitHubClient("test-org", client) - ctx := context.Background() err := m.LoadConfig(ctx, "test-org") if err != nil { t.Fatalf("expected graceful degradation on 404, got error: %v", err) @@ -1032,6 +1032,7 @@ func TestManager_LoadConfig404NotFound(t *testing.T) { } func TestManager_LoadConfigInvalidYAML(t *testing.T) { + ctx := context.Background() invalidYAML := ` global: - this is not valid yaml @@ -1048,8 +1049,9 @@ channels: [1, 2, 3] Encoding: &encoding, } w.Header().Set("Content-Type", "application/json") - //nolint:errcheck // Error intentionally ignored in test mock HTTP handler - _ = json.NewEncoder(w).Encode(response) + if err := json.NewEncoder(w).Encode(response); err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + } } client, server := createTestGitHubClient(handler) @@ -1058,7 +1060,6 @@ channels: [1, 2, 3] m := New() m.SetGitHubClient("test-org", client) - ctx := context.Background() err := m.LoadConfig(ctx, "test-org") if err != nil { t.Fatalf("expected graceful degradation on invalid YAML, got error: %v", err) @@ -1075,6 +1076,7 @@ channels: [1, 2, 3] } func TestManager_LoadConfigCodeGROOVEProdConfig(t *testing.T) { + ctx := context.Background() // Test with actual production config from codeGROOVE-dev/.codeGROOVE/slack.yaml prodYAML := `global: team_id: T09CJ7X7T7Y @@ -1116,7 +1118,6 @@ channels: m := New() m.SetGitHubClient("codeGROOVE-dev", client) - ctx := context.Background() err := m.LoadConfig(ctx, "codeGROOVE-dev") if err != nil { t.Fatalf("unexpected error loading production config: %v", err) @@ -1181,14 +1182,16 @@ channels: } func TestManager_LoadConfigEmptyContent(t *testing.T) { + ctx := context.Background() handler := func(w http.ResponseWriter, r *http.Request) { // Return a response with nil Content response := github.RepositoryContent{ Type: github.String("file"), } w.Header().Set("Content-Type", "application/json") - //nolint:errcheck // Error intentionally ignored in test mock HTTP handler - _ = json.NewEncoder(w).Encode(response) + if err := json.NewEncoder(w).Encode(response); err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + } } client, server := createTestGitHubClient(handler) @@ -1197,7 +1200,6 @@ func TestManager_LoadConfigEmptyContent(t *testing.T) { m := New() m.SetGitHubClient("test-org", client) - ctx := context.Background() err := m.LoadConfig(ctx, "test-org") if err != nil { t.Fatalf("expected graceful degradation on empty content, got error: %v", err) diff --git a/pkg/github/client_integration_test.go b/pkg/github/client_integration_test.go index c7b04f6..8a6a6b2 100644 --- a/pkg/github/client_integration_test.go +++ b/pkg/github/client_integration_test.go @@ -10,6 +10,7 @@ import ( // TestClient_AuthenticateWithMock tests successful authentication flow with mock server. func TestClient_AuthenticateWithMock(t *testing.T) { + ctx := context.Background() mock := NewMockGitHubServer() defer mock.Close() @@ -31,7 +32,6 @@ func TestClient_AuthenticateWithMock(t *testing.T) { } // Test authentication - ctx := context.Background() err = client.authenticate(ctx) if err != nil { t.Fatalf("authenticate() failed: %v", err) @@ -55,6 +55,7 @@ func TestClient_AuthenticateWithMock(t *testing.T) { // TestClient_Authenticate_InvalidInstallation tests authentication with invalid installation ID. func TestClient_Authenticate_InvalidInstallation(t *testing.T) { + ctx := context.Background() mock := NewMockGitHubServer() defer mock.Close() @@ -72,7 +73,6 @@ func TestClient_Authenticate_InvalidInstallation(t *testing.T) { baseURL: mock.URL(), } - ctx := context.Background() err = client.authenticate(ctx) if err == nil { t.Error("expected error for invalid installation, got nil") @@ -81,6 +81,7 @@ func TestClient_Authenticate_InvalidInstallation(t *testing.T) { // TestClient_Authenticate_RetryOnFailure tests retry logic on transient failures. func TestClient_Authenticate_RetryOnFailure(t *testing.T) { + ctx := context.Background() mock := NewMockGitHubServer() defer mock.Close() @@ -102,7 +103,6 @@ func TestClient_Authenticate_RetryOnFailure(t *testing.T) { // Inject failure on first attempt mock.FailNextAuthRequest = true - ctx := context.Background() err = client.authenticate(ctx) // Should still succeed after retry if err != nil { @@ -117,6 +117,7 @@ func TestClient_Authenticate_RetryOnFailure(t *testing.T) { // TestClient_FindPRsForCommit_Success tests finding PRs by commit SHA. func TestClient_FindPRsForCommit_Success(t *testing.T) { + ctx := context.Background() mock := NewMockGitHubServer() defer mock.Close() @@ -146,7 +147,6 @@ func TestClient_FindPRsForCommit_Success(t *testing.T) { // Create client pointing to mock server client := createMockClient(t, mock) - ctx := context.Background() prNumbers, err := client.FindPRsForCommit(ctx, "test-org", "test-repo", "abc123") if err != nil { t.Fatalf("FindPRsForCommit() failed: %v", err) @@ -174,11 +174,11 @@ func TestClient_FindPRsForCommit_Success(t *testing.T) { // TestClient_FindPRsForCommit_InvalidParams tests error handling for invalid parameters. func TestClient_FindPRsForCommit_InvalidParams(t *testing.T) { + ctx := context.Background() mock := NewMockGitHubServer() defer mock.Close() client := createMockClient(t, mock) - ctx := context.Background() tests := []struct { name string @@ -203,11 +203,11 @@ func TestClient_FindPRsForCommit_InvalidParams(t *testing.T) { // TestClient_FindPRsForCommit_NoResults tests handling when no PRs are found. func TestClient_FindPRsForCommit_NoResults(t *testing.T) { + ctx := context.Background() mock := NewMockGitHubServer() defer mock.Close() client := createMockClient(t, mock) - ctx := context.Background() // Query for commit with no PRs prNumbers, err := client.FindPRsForCommit(ctx, "test-org", "test-repo", "nonexistent") @@ -222,6 +222,7 @@ func TestClient_FindPRsForCommit_NoResults(t *testing.T) { // TestClient_FindPRsForCommit_OnlyOpenPRs tests that only open PRs are returned. func TestClient_FindPRsForCommit_OnlyOpenPRs(t *testing.T) { + ctx := context.Background() mock := NewMockGitHubServer() defer mock.Close() @@ -245,7 +246,6 @@ func TestClient_FindPRsForCommit_OnlyOpenPRs(t *testing.T) { }) client := createMockClient(t, mock) - ctx := context.Background() prNumbers, err := client.FindPRsForCommit(ctx, "test-org", "test-repo", "abc123") if err != nil { diff --git a/pkg/github/github.go b/pkg/github/github.go index ffda0fa..90eb33e 100644 --- a/pkg/github/github.go +++ b/pkg/github/github.go @@ -34,9 +34,9 @@ type Client struct { appID string installationToken string organization string + baseURL string installationID int64 tokenMutex sync.RWMutex - baseURL string // Optional: override GitHub API base URL for testing } // refreshingTokenSource implements oauth2.TokenSource that automatically refreshes tokens. @@ -447,11 +447,11 @@ func (c *Client) InstallationToken(ctx context.Context) string { // Manager manages multiple GitHub App installations. type Manager struct { privateKey *rsa.PrivateKey - clients map[string]*Client // org -> client + clients map[string]*Client appID string - allowPersonalAccounts bool // Allow processing personal accounts (default: false for DoS protection) - baseURL string // Optional: override GitHub API base URL for testing + baseURL string mu sync.RWMutex + allowPersonalAccounts bool } // NewManager creates a new installation manager. diff --git a/pkg/github/github_mock_server_test.go b/pkg/github/github_mock_server_test.go index 6337b64..6d92c9b 100644 --- a/pkg/github/github_mock_server_test.go +++ b/pkg/github/github_mock_server_test.go @@ -100,11 +100,12 @@ func NewMockGitHubServer() *MockGitHubServer { // Pull request endpoints mux.HandleFunc("/repos/", func(w http.ResponseWriter, r *http.Request) { - if strings.Contains(r.URL.Path, "/commits/") && strings.Contains(r.URL.Path, "/pulls") { + switch { + case strings.Contains(r.URL.Path, "/commits/") && strings.Contains(r.URL.Path, "/pulls"): mock.handleListPRsForCommit(w, r) - } else if strings.Contains(r.URL.Path, "/pulls") { + case strings.Contains(r.URL.Path, "/pulls"): mock.handleListPRs(w, r) - } else { + default: http.NotFound(w, r) } }) @@ -116,8 +117,7 @@ func NewMockGitHubServer() *MockGitHubServer { mux.HandleFunc("/rate_limit", func(w http.ResponseWriter, r *http.Request) { w.Header().Set("Content-Type", "application/json") w.WriteHeader(http.StatusOK) - //nolint:errcheck // Error intentionally ignored in test mock HTTP handler - _ = json.NewEncoder(w).Encode(map[string]interface{}{ + if err := json.NewEncoder(w).Encode(map[string]interface{}{ "resources": map[string]interface{}{ "core": map[string]interface{}{ "limit": 5000, @@ -125,18 +125,21 @@ func NewMockGitHubServer() *MockGitHubServer { "reset": time.Now().Add(1 * time.Hour).Unix(), }, }, - }) + }); err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + } }) // Installation repositories endpoint for token validation mux.HandleFunc("/installation/repositories", func(w http.ResponseWriter, r *http.Request) { w.Header().Set("Content-Type", "application/json") w.WriteHeader(http.StatusOK) - //nolint:errcheck // Error intentionally ignored in test mock HTTP handler - _ = json.NewEncoder(w).Encode(map[string]interface{}{ + if err := json.NewEncoder(w).Encode(map[string]interface{}{ "total_count": 0, "repositories": []interface{}{}, - }) + }); err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + } }) mock.server = httptest.NewServer(mux) @@ -189,8 +192,9 @@ func (m *MockGitHubServer) handleListInstallations(w http.ResponseWriter, r *htt // Return installations w.Header().Set("Content-Type", "application/json") - //nolint:errcheck // Error intentionally ignored in test mock HTTP handler - _ = json.NewEncoder(w).Encode(m.installations) + if err := json.NewEncoder(w).Encode(m.installations); err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + } } // handleGetInstallation handles GET /app/installations/{id}. @@ -213,8 +217,9 @@ func (m *MockGitHubServer) handleGetInstallation(w http.ResponseWriter, r *http. for _, inst := range m.installations { if inst.ID == id { w.Header().Set("Content-Type", "application/json") - //nolint:errcheck // Error intentionally ignored in test mock HTTP handler - _ = json.NewEncoder(w).Encode(inst) + if err := json.NewEncoder(w).Encode(inst); err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + } return } } @@ -282,8 +287,9 @@ func (m *MockGitHubServer) handleCreateInstallationToken(w http.ResponseWriter, } w.Header().Set("Content-Type", "application/json") - //nolint:errcheck // Error intentionally ignored in test mock HTTP handler - _ = json.NewEncoder(w).Encode(token) + if err := json.NewEncoder(w).Encode(token); err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + } } // handleListPRsForCommit handles GET /repos/{owner}/{repo}/commits/{sha}/pulls. @@ -311,7 +317,8 @@ func (m *MockGitHubServer) handleListPRsForCommit(w http.ResponseWriter, r *http // Find the PR details repoKey := owner + "/" + repo if repoPRs, ok := m.pullRequests[repoKey]; ok { - for _, pr := range repoPRs { + for i := range repoPRs { + pr := &repoPRs[i] if pr.Number == prNum { prs = append(prs, &github.PullRequest{ Number: github.Int(pr.Number), @@ -331,8 +338,9 @@ func (m *MockGitHubServer) handleListPRsForCommit(w http.ResponseWriter, r *http } w.Header().Set("Content-Type", "application/json") - //nolint:errcheck // Error intentionally ignored in test mock HTTP handler - _ = json.NewEncoder(w).Encode(prs) + if err := json.NewEncoder(w).Encode(prs); err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + } } // handleListPRs handles GET /repos/{owner}/{repo}/pulls. @@ -359,7 +367,8 @@ func (m *MockGitHubServer) handleListPRs(w http.ResponseWriter, r *http.Request) // Filter PRs by state var prs []*github.PullRequest if repoPRs, ok := m.pullRequests[key]; ok { - for _, pr := range repoPRs { + for i := range repoPRs { + pr := &repoPRs[i] if state == "all" || pr.State == state { prs = append(prs, &github.PullRequest{ Number: github.Int(pr.Number), @@ -375,8 +384,9 @@ func (m *MockGitHubServer) handleListPRs(w http.ResponseWriter, r *http.Request) } w.Header().Set("Content-Type", "application/json") - //nolint:errcheck // Error intentionally ignored in test mock HTTP handler - _ = json.NewEncoder(w).Encode(prs) + if err := json.NewEncoder(w).Encode(prs); err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + } } // handleSearchIssues handles GET /search/issues (used for PR search). @@ -411,7 +421,8 @@ func (m *MockGitHubServer) handleSearchIssues(w http.ResponseWriter, r *http.Req continue } - for _, pr := range prs { + for i := range prs { + pr := &prs[i] // Check state filter in query if strings.Contains(query, "is:open") && pr.State != "open" { continue @@ -440,6 +451,7 @@ func (m *MockGitHubServer) handleSearchIssues(w http.ResponseWriter, r *http.Req } w.Header().Set("Content-Type", "application/json") - //nolint:errcheck // Error intentionally ignored in test mock HTTP handler - _ = json.NewEncoder(w).Encode(result) + if err := json.NewEncoder(w).Encode(result); err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + } } diff --git a/pkg/github/github_test.go b/pkg/github/github_test.go index 5fd29d7..5ca439f 100644 --- a/pkg/github/github_test.go +++ b/pkg/github/github_test.go @@ -32,12 +32,12 @@ func TestClient_Client(t *testing.T) { } func TestClient_InstallationToken(t *testing.T) { + ctx := context.Background() c := &Client{ installationToken: "test-token", tokenExpiry: time.Now().Add(1 * time.Hour), } - ctx := context.Background() token := c.InstallationToken(ctx) if token != "test-token" { @@ -46,12 +46,12 @@ func TestClient_InstallationToken(t *testing.T) { } func TestClient_InstallationToken_NotExpired(t *testing.T) { + ctx := context.Background() c := &Client{ installationToken: "valid-token", tokenExpiry: time.Now().Add(1 * time.Hour), // Not expired } - ctx := context.Background() token := c.InstallationToken(ctx) // Should return the existing token if not expired @@ -767,6 +767,7 @@ func TestRefreshInstallations_SkipPersonalAccounts(t *testing.T) { } func TestRefreshInstallations_CanceledContext(t *testing.T) { + ctx := context.Background() key, err := rsa.GenerateKey(rand.Reader, 2048) if err != nil { t.Fatalf("failed to generate key: %v", err) @@ -814,6 +815,7 @@ func TestNewTurnClient(t *testing.T) { } func TestSearchClient_ListOpenPRs(t *testing.T) { + ctx := context.Background() // Create a mock server for search API server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { // Mock search API response @@ -849,7 +851,6 @@ func TestSearchClient_ListOpenPRs(t *testing.T) { defer server.Close() // Create client pointing to mock server - ctx := context.Background() src := oauth2.StaticTokenSource(&oauth2.Token{AccessToken: "test-token"}) httpClient := oauth2.NewClient(ctx, src) searchClient := github.NewClient(httpClient) @@ -1015,6 +1016,7 @@ func TestExtractOwnerRepo(t *testing.T) { } func TestSearchPRs_Pagination(t *testing.T) { + ctx := context.Background() callCount := 0 var serverURL string @@ -1060,7 +1062,6 @@ func TestSearchPRs_Pagination(t *testing.T) { defer server.Close() serverURL = server.URL - ctx := context.Background() src := oauth2.StaticTokenSource(&oauth2.Token{AccessToken: "test-token"}) httpClient := oauth2.NewClient(ctx, src) searchClient := github.NewClient(httpClient) @@ -1087,13 +1088,13 @@ func TestSearchPRs_Pagination(t *testing.T) { } func TestSearchPRs_SearchError(t *testing.T) { + ctx := context.Background() // Mock server that returns error server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { w.WriteHeader(http.StatusInternalServerError) })) defer server.Close() - ctx := context.Background() src := oauth2.StaticTokenSource(&oauth2.Token{AccessToken: "test-token"}) httpClient := oauth2.NewClient(ctx, src) searchClient := github.NewClient(httpClient) @@ -1111,6 +1112,7 @@ func TestSearchPRs_SearchError(t *testing.T) { } func TestSearchPRs_SkipsIssues(t *testing.T) { + ctx := context.Background() // Mock server that returns both issues and PRs server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { if strings.Contains(r.URL.Path, "/search/issues") { @@ -1153,7 +1155,6 @@ func TestSearchPRs_SkipsIssues(t *testing.T) { })) defer server.Close() - ctx := context.Background() src := oauth2.StaticTokenSource(&oauth2.Token{AccessToken: "test-token"}) httpClient := oauth2.NewClient(ctx, src) searchClient := github.NewClient(httpClient) @@ -1180,6 +1181,7 @@ func TestSearchPRs_SkipsIssues(t *testing.T) { } func TestSearchPRs_InvalidRepositoryURL(t *testing.T) { + ctx := context.Background() // Mock server that returns PR with invalid repository URL server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { if strings.Contains(r.URL.Path, "/search/issues") { @@ -1211,7 +1213,6 @@ func TestSearchPRs_InvalidRepositoryURL(t *testing.T) { })) defer server.Close() - ctx := context.Background() src := oauth2.StaticTokenSource(&oauth2.Token{AccessToken: "test-token"}) httpClient := oauth2.NewClient(ctx, src) searchClient := github.NewClient(httpClient) @@ -1428,6 +1429,7 @@ func TestNewTurnClient_Error(t *testing.T) { } func TestSearchPRs_MaxPageLimit(t *testing.T) { + ctx := context.Background() callCount := 0 var serverURL string @@ -1468,7 +1470,6 @@ func TestSearchPRs_MaxPageLimit(t *testing.T) { defer server.Close() serverURL = server.URL - ctx := context.Background() src := oauth2.StaticTokenSource(&oauth2.Token{AccessToken: "test-token"}) httpClient := oauth2.NewClient(ctx, src) searchClient := github.NewClient(httpClient) diff --git a/pkg/github/graphql.go b/pkg/github/graphql.go index ecade64..d71c019 100644 --- a/pkg/github/graphql.go +++ b/pkg/github/graphql.go @@ -184,7 +184,7 @@ func (c *SearchClient) searchPRs(ctx context.Context, query, org string) ([]PRSn } // extractOwnerRepo extracts owner and repo from a repository URL. -// Example: "https://api.github.com/repos/owner/repo" -> "owner", "repo" +// Example: "https://api.github.com/repos/owner/repo" -> "owner", "repo". func extractOwnerRepo(repoURL string) (owner, repo string) { // URL format: https://api.github.com/repos/owner/repo const prefix = "https://api.github.com/repos/" @@ -194,7 +194,7 @@ func extractOwnerRepo(repoURL string) (owner, repo string) { path := repoURL[len(prefix):] // Split on first slash to get owner/repo - for i := 0; i < len(path); i++ { + for i := range len(path) { if path[i] == '/' { return path[:i], path[i+1:] } diff --git a/pkg/github/manager_integration_test.go b/pkg/github/manager_integration_test.go index 731d8b3..7a31cf2 100644 --- a/pkg/github/manager_integration_test.go +++ b/pkg/github/manager_integration_test.go @@ -9,6 +9,7 @@ import ( // TestManager_RefreshInstallationsWithMock tests successful installation discovery. func TestManager_RefreshInstallationsWithMock(t *testing.T) { + ctx := context.Background() mock := NewMockGitHubServer() defer mock.Close() @@ -32,7 +33,6 @@ func TestManager_RefreshInstallationsWithMock(t *testing.T) { baseURL: mock.URL(), } - ctx := context.Background() err = manager.RefreshInstallations(ctx) if err != nil { t.Fatalf("RefreshInstallations() failed: %v", err) @@ -57,6 +57,7 @@ func TestManager_RefreshInstallationsWithMock(t *testing.T) { // TestManager_RefreshInstallations_AllowPersonalAccounts tests personal account handling. func TestManager_RefreshInstallations_AllowPersonalAccounts(t *testing.T) { + ctx := context.Background() mock := NewMockGitHubServer() defer mock.Close() @@ -77,7 +78,6 @@ func TestManager_RefreshInstallations_AllowPersonalAccounts(t *testing.T) { baseURL: mock.URL(), } - ctx := context.Background() err = manager.RefreshInstallations(ctx) if err != nil { t.Fatalf("RefreshInstallations() failed: %v", err) @@ -98,6 +98,7 @@ func TestManager_RefreshInstallations_AllowPersonalAccounts(t *testing.T) { // TestManager_RefreshInstallations_NoInstallations tests handling of no installations. func TestManager_RefreshInstallations_NoInstallations(t *testing.T) { + ctx := context.Background() mock := NewMockGitHubServer() defer mock.Close() @@ -115,7 +116,6 @@ func TestManager_RefreshInstallations_NoInstallations(t *testing.T) { baseURL: mock.URL(), } - ctx := context.Background() err = manager.RefreshInstallations(ctx) if err != nil { t.Fatalf("RefreshInstallations() should succeed with no installations, got error: %v", err) @@ -128,6 +128,7 @@ func TestManager_RefreshInstallations_NoInstallations(t *testing.T) { // TestManager_RefreshInstallations_PreserveExisting tests that existing clients are preserved. func TestManager_RefreshInstallations_PreserveExisting(t *testing.T) { + ctx := context.Background() mock := NewMockGitHubServer() defer mock.Close() @@ -154,7 +155,6 @@ func TestManager_RefreshInstallations_PreserveExisting(t *testing.T) { } manager.clients["org1"] = existingClient - ctx := context.Background() err = manager.RefreshInstallations(ctx) if err != nil { t.Fatalf("RefreshInstallations() failed: %v", err) diff --git a/pkg/home/fetcher.go b/pkg/home/fetcher.go index 6ac2db7..68d9472 100644 --- a/pkg/home/fetcher.go +++ b/pkg/home/fetcher.go @@ -195,7 +195,7 @@ func (f *Fetcher) searchPRs(ctx context.Context, query string) ([]PR, error) { continue // Skip malformed repo } owner, repoName := repoParts[0], repoParts[1] - if threadInfo, exists := f.stateStore.Thread(owner, repoName, pr.Number, ""); exists { + if threadInfo, exists := f.stateStore.Thread(ctx, owner, repoName, pr.Number, ""); exists { pr.LastEventTime = threadInfo.LastEventTime } diff --git a/pkg/home/fetcher_test.go b/pkg/home/fetcher_test.go index c8b3682..a571bfd 100644 --- a/pkg/home/fetcher_test.go +++ b/pkg/home/fetcher_test.go @@ -122,13 +122,12 @@ func TestSortPRs(t *testing.T) { // TestFetchUserPRs_InputValidation tests input validation for username and org names. func TestFetchUserPRs_InputValidation(t *testing.T) { + ctx := context.Background() f := &Fetcher{ githubClient: &github.Client{}, stateStore: nil, } - ctx := context.Background() - tests := []struct { name string username string @@ -202,73 +201,73 @@ type mockStateStore struct { threads map[string]state.ThreadInfo } -func (m *mockStateStore) Thread(owner, repo string, number int, channelID string) (state.ThreadInfo, bool) { +func (m *mockStateStore) Thread(ctx context.Context, owner, repo string, number int, channelID string) (state.ThreadInfo, bool) { key := owner + "/" + repo + "/" + string(rune(number)) info, exists := m.threads[key] return info, exists } -func (m *mockStateStore) SaveThread(owner, repo string, number int, channelID string, info state.ThreadInfo) error { +func (m *mockStateStore) SaveThread(ctx context.Context, owner, repo string, number int, channelID string, info state.ThreadInfo) error { return nil } -func (m *mockStateStore) LastDM(userID, prURL string) (time.Time, bool) { +func (m *mockStateStore) LastDM(ctx context.Context, userID, prURL string) (time.Time, bool) { return time.Time{}, false } -func (m *mockStateStore) RecordDM(userID, prURL string, sentAt time.Time) error { +func (m *mockStateStore) RecordDM(ctx context.Context, userID, prURL string, sentAt time.Time) error { return nil } -func (m *mockStateStore) DMMessage(userID, prURL string) (state.DMInfo, bool) { +func (m *mockStateStore) DMMessage(ctx context.Context, userID, prURL string) (state.DMInfo, bool) { return state.DMInfo{}, false } -func (m *mockStateStore) SaveDMMessage(userID, prURL string, info state.DMInfo) error { +func (m *mockStateStore) SaveDMMessage(ctx context.Context, userID, prURL string, info state.DMInfo) error { return nil } -func (m *mockStateStore) ListDMUsers(prURL string) []string { +func (m *mockStateStore) ListDMUsers(ctx context.Context, prURL string) []string { return nil } -func (m *mockStateStore) LastDigest(userID, date string) (time.Time, bool) { +func (m *mockStateStore) LastDigest(ctx context.Context, userID, date string) (time.Time, bool) { return time.Time{}, false } -func (m *mockStateStore) RecordDigest(userID, date string, sentAt time.Time) error { +func (m *mockStateStore) RecordDigest(ctx context.Context, userID, date string, sentAt time.Time) error { return nil } -func (m *mockStateStore) WasProcessed(eventKey string) bool { +func (m *mockStateStore) WasProcessed(ctx context.Context, eventKey string) bool { return false } -func (m *mockStateStore) MarkProcessed(eventKey string, ttl time.Duration) error { +func (m *mockStateStore) MarkProcessed(ctx context.Context, eventKey string, ttl time.Duration) error { return nil } -func (m *mockStateStore) LastNotification(prURL string) time.Time { +func (m *mockStateStore) LastNotification(ctx context.Context, prURL string) time.Time { return time.Time{} } -func (m *mockStateStore) RecordNotification(prURL string, notifiedAt time.Time) error { +func (m *mockStateStore) RecordNotification(ctx context.Context, prURL string, notifiedAt time.Time) error { return nil } -func (m *mockStateStore) Cleanup() error { +func (m *mockStateStore) Cleanup(ctx context.Context) error { return nil } -func (m *mockStateStore) QueuePendingDM(dm state.PendingDM) error { +func (m *mockStateStore) QueuePendingDM(ctx context.Context, dm *state.PendingDM) error { return nil } -func (m *mockStateStore) PendingDMs(before time.Time) ([]state.PendingDM, error) { +func (m *mockStateStore) PendingDMs(ctx context.Context, before time.Time) ([]state.PendingDM, error) { return nil, nil } -func (m *mockStateStore) RemovePendingDM(id string) error { +func (m *mockStateStore) RemovePendingDM(ctx context.Context, id string) error { return nil } @@ -278,6 +277,7 @@ func (m *mockStateStore) Close() error { // TestSearchPRs tests GitHub search API integration. func TestSearchPRs(t *testing.T) { + ctx := context.Background() // Create mock GitHub API server server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { if r.URL.Path == "/search/issues" { @@ -325,7 +325,6 @@ func TestSearchPRs(t *testing.T) { stateStore: mockStore, } - ctx := context.Background() prs, err := f.searchPRs(ctx, "is:pr is:open author:testuser org:test-org") if err != nil { t.Fatalf("searchPRs failed: %v", err) diff --git a/pkg/home/ui_test.go b/pkg/home/ui_test.go index c4a0392..1768bbb 100644 --- a/pkg/home/ui_test.go +++ b/pkg/home/ui_test.go @@ -9,6 +9,8 @@ import ( ) // TestBuildBlocks verifies home dashboard block generation. +// +//nolint:gocognit,maintidx // Comprehensive test with many test cases - complexity acceptable func TestBuildBlocks(t *testing.T) { tests := []struct { name string @@ -25,6 +27,7 @@ func TestBuildBlocks(t *testing.T) { }, primaryOrg: "test-org", validate: func(t *testing.T, blocks []slack.Block) { + t.Helper() if len(blocks) == 0 { t.Fatal("expected non-empty blocks") } @@ -111,6 +114,7 @@ func TestBuildBlocks(t *testing.T) { }, primaryOrg: "test-org", validate: func(t *testing.T, blocks []slack.Block) { + t.Helper() // Should have "Action needed" status foundActionNeeded := false for _, block := range blocks { @@ -171,6 +175,7 @@ func TestBuildBlocks(t *testing.T) { }, primaryOrg: "test-org", validate: func(t *testing.T, blocks []slack.Block) { + t.Helper() // Should show outgoing PR section foundOutgoing := false for _, block := range blocks { @@ -207,6 +212,7 @@ func TestBuildBlocks(t *testing.T) { }, primaryOrg: "org1", validate: func(t *testing.T, blocks []slack.Block) { + t.Helper() // Should list all orgs in monitoring section foundOrgs := 0 for _, block := range blocks { @@ -242,6 +248,8 @@ func TestBuildBlocks(t *testing.T) { } // TestFormatEnhancedPRBlock verifies individual PR block formatting. +// +//nolint:maintidx // Comprehensive test covering all PR states and formatting - complexity acceptable func TestFormatEnhancedPRBlock(t *testing.T) { now := time.Now() @@ -262,6 +270,7 @@ func TestFormatEnhancedPRBlock(t *testing.T) { UpdatedAt: now.Add(-2 * time.Hour), }, validate: func(t *testing.T, block slack.Block) { + t.Helper() sb, ok := block.(*slack.SectionBlock) if !ok { t.Fatal("expected SectionBlock") @@ -308,6 +317,7 @@ func TestFormatEnhancedPRBlock(t *testing.T) { UpdatedAt: now.Add(-1 * time.Hour), }, validate: func(t *testing.T, block slack.Block) { + t.Helper() sb, ok := block.(*slack.SectionBlock) if !ok { t.Fatal("expected block to be *slack.SectionBlock") @@ -342,6 +352,7 @@ func TestFormatEnhancedPRBlock(t *testing.T) { UpdatedAt: now.Add(-30 * time.Minute), }, validate: func(t *testing.T, block slack.Block) { + t.Helper() sb, ok := block.(*slack.SectionBlock) if !ok { t.Fatal("expected block to be *slack.SectionBlock") @@ -376,6 +387,7 @@ func TestFormatEnhancedPRBlock(t *testing.T) { UpdatedAt: now.Add(-24 * time.Hour), }, validate: func(t *testing.T, block slack.Block) { + t.Helper() sb, ok := block.(*slack.SectionBlock) if !ok { t.Fatal("expected block to be *slack.SectionBlock") @@ -408,6 +420,7 @@ func TestFormatEnhancedPRBlock(t *testing.T) { UpdatedAt: now, }, validate: func(t *testing.T, block slack.Block) { + t.Helper() sb, ok := block.(*slack.SectionBlock) if !ok { t.Fatal("expected block to be *slack.SectionBlock") @@ -439,6 +452,7 @@ func TestFormatEnhancedPRBlock(t *testing.T) { UpdatedAt: now.Add(-5 * 24 * time.Hour), }, validate: func(t *testing.T, block slack.Block) { + t.Helper() sb, ok := block.(*slack.SectionBlock) if !ok { t.Fatal("expected block to be *slack.SectionBlock") @@ -461,6 +475,7 @@ func TestFormatEnhancedPRBlock(t *testing.T) { UpdatedAt: now.Add(-60 * 24 * time.Hour), }, validate: func(t *testing.T, block slack.Block) { + t.Helper() sb, ok := block.(*slack.SectionBlock) if !ok { t.Fatal("expected block to be *slack.SectionBlock") diff --git a/pkg/notify/daily.go b/pkg/notify/daily.go index b6e1d6c..f9c3b00 100644 --- a/pkg/notify/daily.go +++ b/pkg/notify/daily.go @@ -296,7 +296,7 @@ func (d *DailyDigestScheduler) shouldSendDigest( // Check if we already sent a digest today today := now.Format("2006-01-02") - if lastDigest, exists := d.stateStore.LastDigest(slackUserID, today); exists { + if lastDigest, exists := d.stateStore.LastDigest(ctx, slackUserID, today); exists { slog.Debug("already sent digest today", "slack_user", slackUserID, "github_user", githubUser, @@ -338,7 +338,7 @@ func (d *DailyDigestScheduler) sendDigest( }) // Format digest message with separated sections - message := d.formatDigestMessage(incoming, outgoing) + message := d.formatDigestMessageAt(incoming, outgoing, time.Now()) // Send DM _, _, err = slackClient.SendDirectMessage(ctx, slackUserID, message) @@ -360,7 +360,7 @@ func (d *DailyDigestScheduler) sendDigest( today := time.Now().In(loc).Format("2006-01-02") // RecordDigest always succeeds (memory) and attempts persistence (best-effort) - if err := d.stateStore.RecordDigest(slackUserID, today, time.Now()); err != nil { + if err := d.stateStore.RecordDigest(ctx, slackUserID, today, time.Now()); err != nil { slog.Debug("state store returned error for RecordDigest", "error", err) } @@ -373,11 +373,6 @@ func (d *DailyDigestScheduler) sendDigest( return nil } -// formatDigestMessage formats a daily digest message with friendly, varied greetings. -func (d *DailyDigestScheduler) formatDigestMessage(incoming, outgoing []home.PR) string { - return d.formatDigestMessageAt(incoming, outgoing, time.Now()) -} - // formatDigestMessageAt formats a daily digest message at a specific time (for testing). func (*DailyDigestScheduler) formatDigestMessageAt(incoming, outgoing []home.PR, now time.Time) string { var sb strings.Builder diff --git a/pkg/notify/daily_digest_test.go b/pkg/notify/daily_digest_test.go index f4a87e9..57a01d4 100644 --- a/pkg/notify/daily_digest_test.go +++ b/pkg/notify/daily_digest_test.go @@ -15,6 +15,7 @@ import ( // TestShouldSendDigest_NoSlackMapping tests when GitHub user has no Slack mapping. func TestShouldSendDigest_NoSlackMapping(t *testing.T) { + ctx := context.Background() mockUserMapper := &mockDigestUserMapper{ slackHandleFunc: func(ctx context.Context, githubUser, org, domain string) (string, error) { return "", nil // No mapping @@ -33,7 +34,6 @@ func TestShouldSendDigest_NoSlackMapping(t *testing.T) { stateStore: stateStore, } - ctx := context.Background() result := scheduler.shouldSendDigest(ctx, mockUserMapper, mockClient, "testuser", "test-org", "example.com", nil) if result { @@ -43,6 +43,7 @@ func TestShouldSendDigest_NoSlackMapping(t *testing.T) { // TestShouldSendDigest_MappingError tests when user mapping fails with error. func TestShouldSendDigest_MappingError(t *testing.T) { + ctx := context.Background() mockUserMapper := &mockDigestUserMapper{ slackHandleFunc: func(ctx context.Context, githubUser, org, domain string) (string, error) { return "", errors.New("mapping error") @@ -55,7 +56,6 @@ func TestShouldSendDigest_MappingError(t *testing.T) { stateStore: stateStore, } - ctx := context.Background() result := scheduler.shouldSendDigest(ctx, mockUserMapper, &mockSlackClient{}, "testuser", "test-org", "example.com", nil) if result { @@ -65,6 +65,7 @@ func TestShouldSendDigest_MappingError(t *testing.T) { // TestShouldSendDigest_InvalidTimezone tests when user has invalid timezone. func TestShouldSendDigest_InvalidTimezone(t *testing.T) { + ctx := context.Background() mockUserMapper := &mockDigestUserMapper{ slackHandleFunc: func(ctx context.Context, githubUser, org, domain string) (string, error) { return "U123", nil @@ -83,7 +84,6 @@ func TestShouldSendDigest_InvalidTimezone(t *testing.T) { stateStore: stateStore, } - ctx := context.Background() result := scheduler.shouldSendDigest(ctx, mockUserMapper, mockClient, "testuser", "test-org", "example.com", nil) if result { @@ -93,6 +93,7 @@ func TestShouldSendDigest_InvalidTimezone(t *testing.T) { // TestShouldSendDigest_AlreadySentToday tests when digest was already sent today. func TestShouldSendDigest_AlreadySentToday(t *testing.T) { + ctx := context.Background() mockUserMapper := &mockDigestUserMapper{ slackHandleFunc: func(ctx context.Context, githubUser, org, domain string) (string, error) { return "U123", nil @@ -119,7 +120,6 @@ func TestShouldSendDigest_AlreadySentToday(t *testing.T) { stateStore: stateStore, } - ctx := context.Background() result := scheduler.shouldSendDigest(ctx, mockUserMapper, mockClient, "testuser", "test-org", "example.com", nil) if result { @@ -129,6 +129,7 @@ func TestShouldSendDigest_AlreadySentToday(t *testing.T) { // TestSendDigest_MappingError tests error handling when user mapping fails. func TestSendDigest_MappingError(t *testing.T) { + ctx := context.Background() mockUserMapper := &mockDigestUserMapper{ slackHandleFunc: func(ctx context.Context, githubUser, org, domain string) (string, error) { return "", context.DeadlineExceeded // Mapping failed @@ -142,8 +143,6 @@ func TestSendDigest_MappingError(t *testing.T) { stateStore: stateStore, } - ctx := context.Background() - err := scheduler.sendDigest(ctx, mockUserMapper, mockClient, "testuser", "test-org", "example.com", nil) if err == nil { @@ -153,6 +152,7 @@ func TestSendDigest_MappingError(t *testing.T) { // TestSendDigest_SendDMError tests error handling when SendDirectMessage fails. func TestSendDigest_SendDMError(t *testing.T) { + ctx := context.Background() mockUserMapper := &mockDigestUserMapper{ slackHandleFunc: func(ctx context.Context, githubUser, org, domain string) (string, error) { return "U123", nil @@ -174,8 +174,6 @@ func TestSendDigest_SendDMError(t *testing.T) { stateStore: stateStore, } - ctx := context.Background() - err := scheduler.sendDigest(ctx, mockUserMapper, mockClient, "testuser", "test-org", "example.com", nil) if err == nil { @@ -185,6 +183,7 @@ func TestSendDigest_SendDMError(t *testing.T) { // TestSendDigest_Success tests successful digest sending with state recording. func TestSendDigest_Success(t *testing.T) { + ctx := context.Background() dmSent := false digestRecorded := false @@ -215,7 +214,6 @@ func TestSendDigest_Success(t *testing.T) { stateStore: stateStore, } - ctx := context.Background() prs := []home.PR{ { Title: "Fix bug", @@ -242,6 +240,7 @@ func TestSendDigest_Success(t *testing.T) { // TestAnalyzePR_Success tests successful PR analysis. func TestAnalyzePR_Success(t *testing.T) { + ctx := context.Background() mockClient := &mockGitHubClient{ installationTokenFunc: func(ctx context.Context) string { return "test-token" @@ -260,7 +259,6 @@ func TestAnalyzePR_Success(t *testing.T) { }, } - ctx := context.Background() pr := home.PR{ URL: "https://github.com/test-org/test-repo/pull/1", Author: "testuser", @@ -279,6 +277,7 @@ func TestAnalyzePR_Success(t *testing.T) { // TestAnalyzePR_TurnClientFactoryError tests when turn client creation fails. func TestAnalyzePR_TurnClientFactoryError(t *testing.T) { + ctx := context.Background() mockClient := &mockGitHubClient{ installationTokenFunc: func(ctx context.Context) string { return "test-token" @@ -291,9 +290,7 @@ func TestAnalyzePR_TurnClientFactoryError(t *testing.T) { }, } - ctx := context.Background() pr := home.PR{ - URL: "https://github.com/test-org/test-repo/pull/1", Author: "testuser", UpdatedAt: time.Now(), } @@ -307,6 +304,7 @@ func TestAnalyzePR_TurnClientFactoryError(t *testing.T) { // TestAnalyzePR_CheckError tests when turnclient Check fails. func TestAnalyzePR_CheckError(t *testing.T) { + ctx := context.Background() mockClient := &mockGitHubClient{ installationTokenFunc: func(ctx context.Context) string { return "test-token" @@ -325,7 +323,6 @@ func TestAnalyzePR_CheckError(t *testing.T) { }, } - ctx := context.Background() pr := home.PR{ URL: "https://github.com/test-org/test-repo/pull/1", Author: "testuser", @@ -341,6 +338,7 @@ func TestAnalyzePR_CheckError(t *testing.T) { // TestProcessOrgDigests_NoGitHubClient tests when GitHub client is unavailable. func TestProcessOrgDigests_NoGitHubClient(t *testing.T) { + ctx := context.Background() mockGitHubMgr := &mockGitHubManager{ clientForOrgFunc: func(org string) (github.ClientInterface, bool) { return nil, false // No client @@ -354,20 +352,20 @@ func TestProcessOrgDigests_NoGitHubClient(t *testing.T) { slackManager: &mockSlackManagerWithClient{}, } - ctx := context.Background() - sent, errors := scheduler.processOrgDigests(ctx, "test-org") + sent, errCount := scheduler.processOrgDigests(ctx, "test-org") if sent != 0 { t.Errorf("expected 0 sent, got %d", sent) } - if errors != 1 { - t.Errorf("expected 1 error, got %d", errors) + if errCount != 1 { + t.Errorf("expected 1 error, got %d", errCount) } } // TestProcessOrgDigests_NoConfig tests when config is unavailable. func TestProcessOrgDigests_NoConfig(t *testing.T) { + ctx := context.Background() mockGitHubMgr := &mockGitHubManager{ clientForOrgFunc: func(org string) (github.ClientInterface, bool) { return &mockGitHubClient{}, true @@ -387,20 +385,20 @@ func TestProcessOrgDigests_NoConfig(t *testing.T) { slackManager: &mockSlackManagerWithClient{}, } - ctx := context.Background() - sent, errors := scheduler.processOrgDigests(ctx, "test-org") + sent, errCount := scheduler.processOrgDigests(ctx, "test-org") if sent != 0 { t.Errorf("expected 0 sent, got %d", sent) } - if errors != 1 { - t.Errorf("expected 1 error, got %d", errors) + if errCount != 1 { + t.Errorf("expected 1 error, got %d", errCount) } } // TestProcessOrgDigests_NoSlackClient tests when Slack client is unavailable. func TestProcessOrgDigests_NoSlackClient(t *testing.T) { + ctx := context.Background() mockGitHubMgr := &mockGitHubManager{ clientForOrgFunc: func(org string) (github.ClientInterface, bool) { return &mockGitHubClient{}, true @@ -418,20 +416,20 @@ func TestProcessOrgDigests_NoSlackClient(t *testing.T) { slackManager: mockSlackMgr, } - ctx := context.Background() - sent, errors := scheduler.processOrgDigests(ctx, "test-org") + sent, errCount := scheduler.processOrgDigests(ctx, "test-org") if sent != 0 { t.Errorf("expected 0 sent, got %d", sent) } - if errors != 1 { - t.Errorf("expected 1 error, got %d", errors) + if errCount != 1 { + t.Errorf("expected 1 error, got %d", errCount) } } // TestShouldSendDigest_In8to9amWindow tests when user is in 8-9am window. func TestShouldSendDigest_In8to9amWindow(t *testing.T) { + ctx := context.Background() mockUserMapper := &mockDigestUserMapper{ slackHandleFunc: func(ctx context.Context, githubUser, org, domain string) (string, error) { return "U123", nil @@ -459,8 +457,6 @@ func TestShouldSendDigest_In8to9amWindow(t *testing.T) { stateStore: stateStore, } - ctx := context.Background() - // This test is time-dependent - it will pass if run during 8-9am UTC // For deterministic testing, we'd need to inject time, but this shows the logic result := scheduler.shouldSendDigest(ctx, mockUserMapper, mockClient, "testuser", "test-org", "example.com", nil) @@ -471,6 +467,7 @@ func TestShouldSendDigest_In8to9amWindow(t *testing.T) { // TestSendDigest_PRSorting tests that PRs are sorted by update time. func TestSendDigest_PRSorting(t *testing.T) { + ctx := context.Background() dmSent := false var sentMessage string @@ -497,8 +494,6 @@ func TestSendDigest_PRSorting(t *testing.T) { stateStore: stateStore, } - ctx := context.Background() - // Create PRs with different update times oldPR := home.PR{ Title: "Old PR", @@ -536,6 +531,7 @@ func TestSendDigest_PRSorting(t *testing.T) { // TestSendDigest_TimezoneFallback tests timezone fallback to UTC. func TestSendDigest_TimezoneFallback(t *testing.T) { + ctx := context.Background() digestRecorded := false var recordedDate string @@ -566,7 +562,6 @@ func TestSendDigest_TimezoneFallback(t *testing.T) { stateStore: stateStore, } - ctx := context.Background() prs := []home.PR{ { Title: "Test PR", @@ -619,6 +614,7 @@ func TestNewDailyDigestScheduler_FactoryWorks(t *testing.T) { // TestProcessOrgDigests_FetchPRsError tests when fetchOrgPRs fails. func TestProcessOrgDigests_FetchPRsError(t *testing.T) { + ctx := context.Background() mockGitHubClient := &mockGitHubClient{ clientFunc: func() *gh.Client { // Return nil to cause fetchOrgPRs to fail @@ -645,20 +641,20 @@ func TestProcessOrgDigests_FetchPRsError(t *testing.T) { slackManager: mockSlackMgr, } - ctx := context.Background() - sent, errors := scheduler.processOrgDigests(ctx, "test-org") + sent, errCount := scheduler.processOrgDigests(ctx, "test-org") if sent != 0 { t.Errorf("expected 0 sent, got %d", sent) } - if errors != 1 { - t.Errorf("expected 1 error, got %d", errors) + if errCount != 1 { + t.Errorf("expected 1 error, got %d", errCount) } } // TestCheckAndSend_WithOrgs tests successful processing of organizations. func TestCheckAndSend_WithOrgs(t *testing.T) { + ctx := context.Background() mockGitHubMgr := &mockGitHubManager{ allOrgsFunc: func() []string { return []string{"test-org"} @@ -682,8 +678,6 @@ func TestCheckAndSend_WithOrgs(t *testing.T) { slackManager: &mockSlackManagerWithClient{}, } - ctx := context.Background() - // Should not crash and should process the org scheduler.CheckAndSend(ctx) } diff --git a/pkg/notify/daily_mocks_test.go b/pkg/notify/daily_mocks_test.go index b1ee585..66dddf7 100644 --- a/pkg/notify/daily_mocks_test.go +++ b/pkg/notify/daily_mocks_test.go @@ -122,26 +122,26 @@ func (m *mockConfigProvider) ReminderDMDelay(org, channel string) int { type mockStateProvider struct { lastDigestFunc func(userID, date string) (time.Time, bool) recordDigestFunc func(userID, date string, sentAt time.Time) error - lastDMFunc func(userID, prURL string) (time.Time, bool) + lastDMFunc func(ctx context.Context, userID, prURL string) (time.Time, bool) } -func (m *mockStateProvider) LastDigest(userID, date string) (time.Time, bool) { +func (m *mockStateProvider) LastDigest(ctx context.Context, userID, date string) (time.Time, bool) { if m.lastDigestFunc != nil { return m.lastDigestFunc(userID, date) } return time.Time{}, false } -func (m *mockStateProvider) RecordDigest(userID, date string, sentAt time.Time) error { +func (m *mockStateProvider) RecordDigest(ctx context.Context, userID, date string, sentAt time.Time) error { if m.recordDigestFunc != nil { return m.recordDigestFunc(userID, date, sentAt) } return nil } -func (m *mockStateProvider) LastDM(userID, prURL string) (time.Time, bool) { +func (m *mockStateProvider) LastDM(ctx context.Context, userID, prURL string) (time.Time, bool) { if m.lastDMFunc != nil { - return m.lastDMFunc(userID, prURL) + return m.lastDMFunc(ctx, userID, prURL) } return time.Time{}, false } diff --git a/pkg/notify/daily_test.go b/pkg/notify/daily_test.go index 5bcf951..b40902b 100644 --- a/pkg/notify/daily_test.go +++ b/pkg/notify/daily_test.go @@ -424,6 +424,7 @@ func TestFormatDigestMessage_EmptyPRLists(t *testing.T) { // TestCheckAndSend_NoOrgs tests when there are no organizations configured. func TestCheckAndSend_NoOrgs(t *testing.T) { + ctx := context.Background() mockGitHubMgr := &mockGitHubManager{ allOrgsFunc: func() []string { return []string{} // No orgs @@ -437,14 +438,13 @@ func TestCheckAndSend_NoOrgs(t *testing.T) { slackManager: &mockSlackManagerWithClient{}, } - ctx := context.Background() - // Should not crash scheduler.CheckAndSend(ctx) } // TestCheckAndSend_DailyRemindersDisabled tests when daily reminders are disabled. func TestCheckAndSend_DailyRemindersDisabled(t *testing.T) { + ctx := context.Background() mockGitHubMgr := &mockGitHubManager{ allOrgsFunc: func() []string { return []string{"test-org"} @@ -464,8 +464,6 @@ func TestCheckAndSend_DailyRemindersDisabled(t *testing.T) { slackManager: &mockSlackManagerWithClient{}, } - ctx := context.Background() - // Should not crash and should skip processing scheduler.CheckAndSend(ctx) } diff --git a/pkg/notify/format_edge_test.go b/pkg/notify/format_edge_test.go index af75288..7587302 100644 --- a/pkg/notify/format_edge_test.go +++ b/pkg/notify/format_edge_test.go @@ -41,6 +41,7 @@ func TestFormatChannelMessageBase_DraftPR(t *testing.T) { // TestNotifyUser_NoChannelName tests NotifyUser when channelName is empty. func TestNotifyUser_NoChannelName(t *testing.T) { + ctx := context.Background() mockClient := &mockSlackClient{ isUserActiveFunc: func(ctx context.Context, userID string) bool { return true @@ -65,7 +66,6 @@ func TestNotifyUser_NoChannelName(t *testing.T) { configManager: &mockConfigManager{}, } - ctx := context.Background() pr := PRInfo{ Owner: "test-org", Repo: "test-repo", @@ -82,6 +82,7 @@ func TestNotifyUser_NoChannelName(t *testing.T) { // TestNotifyUser_HasRecentDM tests that NotifyUser skips DM when HasRecentDMAboutPR returns true. func TestNotifyUser_HasRecentDM(t *testing.T) { + ctx := context.Background() dmSent := false mockClient := &mockSlackClient{ isUserActiveFunc: func(ctx context.Context, userID string) bool { @@ -108,7 +109,6 @@ func TestNotifyUser_HasRecentDM(t *testing.T) { configManager: &mockConfigManager{}, } - ctx := context.Background() pr := PRInfo{ Owner: "test-org", Repo: "test-repo", @@ -129,6 +129,7 @@ func TestNotifyUser_HasRecentDM(t *testing.T) { // TestNotifyUser_SaveDMMessageInfoError tests error handling when SaveDMMessageInfo fails. func TestNotifyUser_SaveDMMessageInfoError(t *testing.T) { + ctx := context.Background() mockClient := &mockSlackClient{ isUserActiveFunc: func(ctx context.Context, userID string) bool { return true @@ -156,7 +157,6 @@ func TestNotifyUser_SaveDMMessageInfoError(t *testing.T) { configManager: &mockConfigManager{}, } - ctx := context.Background() pr := PRInfo{ Owner: "test-org", Repo: "test-repo", diff --git a/pkg/notify/format_test.go b/pkg/notify/format_test.go index ba9a05b..288b2d3 100644 --- a/pkg/notify/format_test.go +++ b/pkg/notify/format_test.go @@ -2,6 +2,7 @@ package notify import ( "context" + "errors" "testing" "time" @@ -633,7 +634,7 @@ func TestNewDailyDigestScheduler(t *testing.T) { type mockSlackManager struct{} func (m *mockSlackManager) Client(ctx context.Context, teamID string) (SlackClient, error) { - return nil, nil + return nil, errors.New("not implemented") } // mockConfigManager implements the config interface needed by New and ConfigProvider. diff --git a/pkg/notify/interfaces.go b/pkg/notify/interfaces.go index 628e505..3e850ac 100644 --- a/pkg/notify/interfaces.go +++ b/pkg/notify/interfaces.go @@ -51,9 +51,9 @@ type ConfigProvider interface { // StateProvider provides state storage for daily digests. // Used by DailyDigestScheduler. type StateProvider interface { - LastDigest(userID, date string) (time.Time, bool) - RecordDigest(userID, date string, sentAt time.Time) error - LastDM(userID, prURL string) (time.Time, bool) + LastDigest(ctx context.Context, userID, date string) (time.Time, bool) + RecordDigest(ctx context.Context, userID, date string, sentAt time.Time) error + LastDM(ctx context.Context, userID, prURL string) (time.Time, bool) } // slackManagerAdapter adapts concrete slack.Manager to implement SlackManager interface. diff --git a/pkg/notify/notify.go b/pkg/notify/notify.go index eaae028..a2b7fd8 100644 --- a/pkg/notify/notify.go +++ b/pkg/notify/notify.go @@ -72,37 +72,38 @@ func FormatChannelMessageBase(ctx context.Context, params MessageParams) string "unresolved_comments", a.UnresolvedComments) // Determine emoji and state parameter - var emoji, state string + var emoji, stateSuffix string // Handle merged/closed states first (most definitive) //nolint:gocritic // if-else chain is clearer than switch for state-based logic if pr.Merged { - emoji, state = ":rocket:", "?st=merged" + emoji, stateSuffix = ":rocket:", "?st=merged" slog.Info("using :rocket: emoji - PR is merged", "pr", prID, "merged_at", pr.MergedAt) } else if pr.State == "closed" { - emoji, state = ":x:", "?st=closed" + emoji, stateSuffix = ":x:", "?st=closed" slog.Info("using :x: emoji - PR is closed but not merged", "pr", prID) } else if a.WorkflowState != "" { // Use WorkflowState as primary signal (most reliable source of truth) - emoji, state = emojiFromWorkflowState(a.WorkflowState, a.NextAction) - slog.Info("using emoji from workflow_state", "pr", prID, "workflow_state", a.WorkflowState, "emoji", emoji, "state_param", state) + emoji, stateSuffix = emojiFromWorkflowState(a.WorkflowState, a.NextAction) + slog.Info("using emoji from workflow_state", "pr", prID, "workflow_state", a.WorkflowState, "emoji", emoji, "state_param", stateSuffix) } else if len(a.NextAction) > 0 { // Fallback to NextAction if no WorkflowState (shouldn't normally happen) action := PrimaryAction(a.NextAction) emoji = PrefixForAction(action) - state = stateParam(params.CheckResult) - slog.Info("using emoji from primary next_action (no workflow_state)", "pr", prID, "primary_action", action, "emoji", emoji, "state_param", state) + stateSuffix = stateParam(params.CheckResult) + slog.Info("using emoji from primary next_action (no workflow_state)", + "pr", prID, "primary_action", action, "emoji", emoji, "state_param", stateSuffix) } else { // Final fallback based on PR properties - emoji, state = fallbackEmoji(params.CheckResult) + emoji, stateSuffix = fallbackEmoji(params.CheckResult) //nolint:revive // line length acceptable for structured logging - slog.Info("using fallback emoji - no workflow_state or next_actions", "pr", prID, "emoji", emoji, "state_param", state, "fallback_reason", "empty_workflow_state_and_next_actions") + slog.Info("using fallback emoji - no workflow_state or next_actions", "pr", prID, "emoji", emoji, "state_param", stateSuffix, "fallback_reason", "empty_workflow_state_and_next_actions") } return fmt.Sprintf("%s %s <%s|%s#%d> · %s", emoji, params.Title, - params.HTMLURL+state, + params.HTMLURL+stateSuffix, params.Repo, params.PRNumber, params.Author) @@ -125,7 +126,7 @@ func FormatNextActionsSuffix(ctx context.Context, params MessageParams) string { // emojiFromWorkflowState determines emoji based on WorkflowState as the primary signal. // Uses NextAction for additional granularity in specific states (e.g., test failures). -func emojiFromWorkflowState(workflowState string, nextActions map[string]turn.Action) (emoji, state string) { +func emojiFromWorkflowState(workflowState string, nextActions map[string]turn.Action) (emoji, stateSuffix string) { switch workflowState { case string(turn.StateNewlyPublished): return ":new:", "?st=newly_published" @@ -211,7 +212,7 @@ func stateParam(r *turn.CheckResponse) string { } // fallbackEmoji determines emoji when no workflow_state or next_actions are available. -func fallbackEmoji(r *turn.CheckResponse) (emoji, state string) { +func fallbackEmoji(r *turn.CheckResponse) (emoji, stateSuffix string) { pr := r.PullRequest a := r.Analysis @@ -279,9 +280,9 @@ func formatNextActionsInternal(ctx context.Context, nextActions map[string]turn. // Store interface for persistent DM queue management. type Store interface { - QueuePendingDM(dm state.PendingDM) error - PendingDMs(before time.Time) ([]state.PendingDM, error) - RemovePendingDM(id string) error + QueuePendingDM(ctx context.Context, dm *state.PendingDM) error + PendingDMs(ctx context.Context, before time.Time) ([]state.PendingDM, error) + RemovePendingDM(ctx context.Context, id string) error } // Manager handles user notifications across multiple workspaces. @@ -337,7 +338,7 @@ func (m *Manager) Run(ctx context.Context) error { // processPendingDMs checks for pending DMs that should be sent and sends them. func (m *Manager) processPendingDMs(ctx context.Context) error { now := time.Now() - pendingDMs, err := m.store.PendingDMs(now) + pendingDMs, err := m.store.PendingDMs(ctx, now) if err != nil { return fmt.Errorf("failed to get pending DMs: %w", err) } @@ -348,7 +349,8 @@ func (m *Manager) processPendingDMs(ctx context.Context) error { slog.Info("processing pending DMs", "count", len(pendingDMs)) - for _, dm := range pendingDMs { + for i := range pendingDMs { + dm := &pendingDMs[i] // Deserialize NextActions var nextAction map[string]turn.Action if err := json.Unmarshal([]byte(dm.NextActions), &nextAction); err != nil { @@ -386,7 +388,7 @@ func (m *Manager) processPendingDMs(ctx context.Context) error { } // Remove from queue after successful send - if err := m.store.RemovePendingDM(dm.ID); err != nil { + if err := m.store.RemovePendingDM(ctx, dm.ID); err != nil { slog.Error("failed to remove pending DM from queue", "dm_id", dm.ID, "user", dm.UserID, @@ -622,7 +624,7 @@ func PrimaryAction(nextActions map[string]turn.Action) string { // PrefixForAnalysis returns the emoji prefix based on workflow state and next actions. // This is the primary function for determining PR emoji - it handles the logic: // 1. Use WorkflowState as primary signal (most reliable) -// 2. Fall back to NextAction if no WorkflowState +// 2. Fall back to NextAction if no WorkflowState. func PrefixForAnalysis(workflowState string, nextActions map[string]turn.Action) string { // Log input for debugging emoji selection actionKinds := make([]string, 0, len(nextActions)) @@ -798,7 +800,7 @@ func (m *Manager) NotifyUser(ctx context.Context, workspaceID, userID, channelID SendAfter: sendAfter, } - if err := m.store.QueuePendingDM(pendingDM); err != nil { + if err := m.store.QueuePendingDM(ctx, &pendingDM); err != nil { slog.Error("failed to queue pending DM", "user", userID, "pr", fmt.Sprintf("%s/%s#%d", pr.Owner, pr.Repo, pr.Number), diff --git a/pkg/notify/notify_test.go b/pkg/notify/notify_test.go index 2cff175..618b75c 100644 --- a/pkg/notify/notify_test.go +++ b/pkg/notify/notify_test.go @@ -2,6 +2,7 @@ package notify import ( "context" + "errors" "testing" "time" ) @@ -15,6 +16,7 @@ func TestNotifyUserRequiresDeeperMocking(t *testing.T) { // TestNotifyManagerRun tests the notification scheduler Run method. func TestNotifyManagerRun(t *testing.T) { + ctx := context.Background() mockSlackMgr := &mockSlackManager{} mockConfigMgr := &mockConfigManager{} @@ -26,7 +28,7 @@ func TestNotifyManagerRun(t *testing.T) { // Run should return when context is cancelled err := manager.Run(ctx) - if err != context.DeadlineExceeded && err != context.Canceled { + if !errors.Is(err, context.DeadlineExceeded) && !errors.Is(err, context.Canceled) { t.Errorf("expected context error, got %v", err) } } diff --git a/pkg/notify/notify_user_test.go b/pkg/notify/notify_user_test.go index eaf2fa7..45b4901 100644 --- a/pkg/notify/notify_user_test.go +++ b/pkg/notify/notify_user_test.go @@ -44,7 +44,7 @@ func (m *mockSlackClient) UserTimezone(ctx context.Context, userID string) (stri return "America/New_York", nil } -func (m *mockSlackClient) SendDirectMessage(ctx context.Context, userID, text string) (string, string, error) { +func (m *mockSlackClient) SendDirectMessage(ctx context.Context, userID, text string) (channelID, timestamp string, err error) { if m.sendDirectMessageFunc != nil { return m.sendDirectMessageFunc(ctx, userID, text) } @@ -109,6 +109,7 @@ func (m *mockConfigManagerCustomizable) ReminderDMDelay(org, channel string) int // TestNotifyUser_UserInactive tests that notifications are deferred when user is inactive. func TestNotifyUser_UserInactive(t *testing.T) { + ctx := context.Background() mockClient := &mockSlackClient{ isUserActiveFunc: func(ctx context.Context, userID string) bool { return false // User is inactive @@ -127,7 +128,6 @@ func TestNotifyUser_UserInactive(t *testing.T) { configManager: &mockConfigManager{}, } - ctx := context.Background() pr := PRInfo{ Owner: "test-org", Repo: "test-repo", @@ -143,6 +143,7 @@ func TestNotifyUser_UserInactive(t *testing.T) { // TestNotifyUser_AntiSpam tests anti-spam protection (1 minute minimum between DMs). func TestNotifyUser_AntiSpam(t *testing.T) { + ctx := context.Background() dmSent := false mockClient := &mockSlackClient{ isUserActiveFunc: func(ctx context.Context, userID string) bool { @@ -169,7 +170,6 @@ func TestNotifyUser_AntiSpam(t *testing.T) { // Record a recent DM (30 seconds ago) manager.Tracker.lastDM["T123:U123"] = time.Now().Add(-30 * time.Second) - ctx := context.Background() pr := PRInfo{ Owner: "test-org", Repo: "test-repo", @@ -190,6 +190,7 @@ func TestNotifyUser_AntiSpam(t *testing.T) { // TestNotifyUser_DelayedDM_UserInChannel tests delayed DM logic when user is in the tagged channel. func TestNotifyUser_DelayedDM_UserInChannel(t *testing.T) { + ctx := context.Background() dmSent := false mockClient := &mockSlackClient{ isUserActiveFunc: func(ctx context.Context, userID string) bool { @@ -226,7 +227,6 @@ func TestNotifyUser_DelayedDM_UserInChannel(t *testing.T) { Timestamp: time.Now().Add(-30 * time.Minute), } - ctx := context.Background() pr := PRInfo{ Owner: "test-org", Repo: "test-repo", @@ -247,6 +247,7 @@ func TestNotifyUser_DelayedDM_UserInChannel(t *testing.T) { // TestNotifyUser_DelayedDM_UserNotInChannel tests immediate DM when user is NOT in the tagged channel. func TestNotifyUser_DelayedDM_UserNotInChannel(t *testing.T) { + ctx := context.Background() dmSent := false mockClient := &mockSlackClient{ isUserActiveFunc: func(ctx context.Context, userID string) bool { @@ -282,7 +283,6 @@ func TestNotifyUser_DelayedDM_UserNotInChannel(t *testing.T) { Timestamp: time.Now().Add(-5 * time.Minute), } - ctx := context.Background() pr := PRInfo{ Owner: "test-org", Repo: "test-repo", @@ -303,6 +303,7 @@ func TestNotifyUser_DelayedDM_UserNotInChannel(t *testing.T) { // TestNotifyUser_DelayElapsed tests that DM is sent after delay period elapses. func TestNotifyUser_DelayElapsed(t *testing.T) { + ctx := context.Background() dmSent := false mockClient := &mockSlackClient{ isUserActiveFunc: func(ctx context.Context, userID string) bool { @@ -342,7 +343,6 @@ func TestNotifyUser_DelayElapsed(t *testing.T) { Timestamp: time.Now().Add(-70 * time.Minute), } - ctx := context.Background() pr := PRInfo{ Owner: "test-org", Repo: "test-repo", @@ -363,6 +363,7 @@ func TestNotifyUser_DelayElapsed(t *testing.T) { // TestNotifyUser_RemindersDisabled tests that DM is skipped when reminder_dm_delay is 0. func TestNotifyUser_RemindersDisabled(t *testing.T) { + ctx := context.Background() dmSent := false mockClient := &mockSlackClient{ isUserActiveFunc: func(ctx context.Context, userID string) bool { @@ -397,7 +398,6 @@ func TestNotifyUser_RemindersDisabled(t *testing.T) { Timestamp: time.Now().Add(-5 * time.Minute), } - ctx := context.Background() pr := PRInfo{ Owner: "test-org", Repo: "test-repo", @@ -418,6 +418,7 @@ func TestNotifyUser_RemindersDisabled(t *testing.T) { // TestNotifyUser_SendDirectMessageError tests error handling when SendDirectMessage fails. func TestNotifyUser_SendDirectMessageError(t *testing.T) { + ctx := context.Background() mockClient := &mockSlackClient{ isUserActiveFunc: func(ctx context.Context, userID string) bool { return true @@ -442,7 +443,6 @@ func TestNotifyUser_SendDirectMessageError(t *testing.T) { configManager: &mockConfigManager{}, } - ctx := context.Background() pr := PRInfo{ Owner: "test-org", Repo: "test-repo", @@ -460,26 +460,26 @@ func TestNotifyUser_SendDirectMessageError(t *testing.T) { // mockStoreCustomizable allows customizing store behavior for testing. type mockStoreCustomizable struct { - queuePendingDMFunc func(dm state.PendingDM) error + queuePendingDMFunc func(dm *state.PendingDM) error getPendingDMsFunc func(before time.Time) ([]state.PendingDM, error) removePendingDMFunc func(id string) error } -func (m *mockStoreCustomizable) QueuePendingDM(dm state.PendingDM) error { +func (m *mockStoreCustomizable) QueuePendingDM(ctx context.Context, dm *state.PendingDM) error { if m.queuePendingDMFunc != nil { return m.queuePendingDMFunc(dm) } return nil } -func (m *mockStoreCustomizable) PendingDMs(before time.Time) ([]state.PendingDM, error) { +func (m *mockStoreCustomizable) PendingDMs(ctx context.Context, before time.Time) ([]state.PendingDM, error) { if m.getPendingDMsFunc != nil { return m.getPendingDMsFunc(before) } return nil, nil } -func (m *mockStoreCustomizable) RemovePendingDM(id string) error { +func (m *mockStoreCustomizable) RemovePendingDM(ctx context.Context, id string) error { if m.removePendingDMFunc != nil { return m.removePendingDMFunc(id) } @@ -573,6 +573,7 @@ func TestProcessPendingDMs(t *testing.T) { // TestProcessPendingDMs_EmptyQueue tests processPendingDMs with no pending DMs. func TestProcessPendingDMs_EmptyQueue(t *testing.T) { + ctx := context.Background() mockSt := &mockStoreCustomizable{ getPendingDMsFunc: func(before time.Time) ([]state.PendingDM, error) { return []state.PendingDM{}, nil // No pending DMs @@ -589,7 +590,6 @@ func TestProcessPendingDMs_EmptyQueue(t *testing.T) { }, } - ctx := context.Background() err := manager.processPendingDMs(ctx) if err != nil { t.Fatalf("unexpected error with empty queue: %v", err) @@ -598,6 +598,7 @@ func TestProcessPendingDMs_EmptyQueue(t *testing.T) { // TestProcessPendingDMs_StoreError tests error handling when store fails. func TestProcessPendingDMs_StoreError(t *testing.T) { + ctx := context.Background() mockSt := &mockStoreCustomizable{ getPendingDMsFunc: func(before time.Time) ([]state.PendingDM, error) { return nil, errors.New("database error") @@ -608,7 +609,6 @@ func TestProcessPendingDMs_StoreError(t *testing.T) { store: mockSt, } - ctx := context.Background() err := manager.processPendingDMs(ctx) if err == nil { t.Error("expected error when store fails") @@ -617,6 +617,7 @@ func TestProcessPendingDMs_StoreError(t *testing.T) { // TestSendDMNow tests the sendDMNow function. func TestSendDMNow(t *testing.T) { + ctx := context.Background() dmSent := false var sentMessage string @@ -644,7 +645,6 @@ func TestSendDMNow(t *testing.T) { configManager: &mockConfigManager{}, } - ctx := context.Background() pr := PRInfo{ Owner: "test-org", Repo: "test-repo", @@ -671,6 +671,7 @@ func TestSendDMNow(t *testing.T) { // TestSendDMNow_UserInactive tests sendDMNow skips inactive users. func TestSendDMNow_UserInactive(t *testing.T) { + ctx := context.Background() dmSent := false mockClient := &mockSlackClient{ @@ -695,7 +696,6 @@ func TestSendDMNow_UserInactive(t *testing.T) { }, } - ctx := context.Background() pr := PRInfo{ Owner: "test-org", Repo: "test-repo", @@ -716,6 +716,7 @@ func TestSendDMNow_UserInactive(t *testing.T) { // TestSendDMNow_AntiSpam tests sendDMNow respects anti-spam limits. func TestSendDMNow_AntiSpam(t *testing.T) { + ctx := context.Background() dmSent := false mockClient := &mockSlackClient{ @@ -743,7 +744,6 @@ func TestSendDMNow_AntiSpam(t *testing.T) { // Record a recent DM (30 seconds ago) manager.Tracker.lastDM["T123:U001"] = time.Now().Add(-30 * time.Second) - ctx := context.Background() pr := PRInfo{ Owner: "test-org", Repo: "test-repo", @@ -764,6 +764,7 @@ func TestSendDMNow_AntiSpam(t *testing.T) { // TestSendDMNow_SlackError tests error handling when Slack API fails. func TestSendDMNow_SlackError(t *testing.T) { + ctx := context.Background() mockClient := &mockSlackClient{ isUserActiveFunc: func(ctx context.Context, userID string) bool { return true @@ -785,7 +786,6 @@ func TestSendDMNow_SlackError(t *testing.T) { }, } - ctx := context.Background() pr := PRInfo{ Owner: "test-org", Repo: "test-repo", diff --git a/pkg/notify/run_test.go b/pkg/notify/run_test.go index 38d7fce..39caaca 100644 --- a/pkg/notify/run_test.go +++ b/pkg/notify/run_test.go @@ -2,6 +2,7 @@ package notify import ( "context" + "errors" "testing" "time" @@ -11,20 +12,21 @@ import ( // mockStore implements Store interface for testing. type mockStore struct{} -func (m *mockStore) QueuePendingDM(dm state.PendingDM) error { +func (m *mockStore) QueuePendingDM(ctx context.Context, dm *state.PendingDM) error { return nil } -func (m *mockStore) PendingDMs(before time.Time) ([]state.PendingDM, error) { +func (m *mockStore) PendingDMs(ctx context.Context, before time.Time) ([]state.PendingDM, error) { return nil, nil } -func (m *mockStore) RemovePendingDM(id string) error { +func (m *mockStore) RemovePendingDM(ctx context.Context, id string) error { return nil } // TestRun_CleanupTicker tests that Run calls Tracker.Cleanup periodically. func TestRun_CleanupTicker(t *testing.T) { + ctx := context.Background() cleanupCalled := false // Create a tracker that we can verify cleanup was called on @@ -54,7 +56,7 @@ func TestRun_CleanupTicker(t *testing.T) { // Run should exit when context is cancelled err := manager.Run(ctx) - if err != context.DeadlineExceeded && err != context.Canceled { + if !errors.Is(err, context.DeadlineExceeded) && !errors.Is(err, context.Canceled) { t.Errorf("expected context error, got %v", err) } @@ -72,6 +74,7 @@ func TestRun_CleanupTicker(t *testing.T) { // TestRun_ContextCancellation tests that Run respects context cancellation. func TestRun_ContextCancellation(t *testing.T) { + ctx := context.Background() mockSlackMgr := &mockSlackManager{} mockConfigMgr := &mockConfigManager{} mockSt := &mockStore{} @@ -86,13 +89,14 @@ func TestRun_ContextCancellation(t *testing.T) { err := manager.Run(ctx) // Should return context.Canceled - if err != context.Canceled { + if !errors.Is(err, context.Canceled) { t.Errorf("expected context.Canceled, got %v", err) } } // TestRun_TickerFires tests that the main ticker fires. func TestRun_TickerFires(t *testing.T) { + ctx := context.Background() mockSlackMgr := &mockSlackManager{} mockConfigMgr := &mockConfigManager{} mockSt := &mockStore{} @@ -105,7 +109,7 @@ func TestRun_TickerFires(t *testing.T) { err := manager.Run(ctx) - if err != context.DeadlineExceeded && err != context.Canceled { + if !errors.Is(err, context.DeadlineExceeded) && !errors.Is(err, context.Canceled) { t.Errorf("expected context timeout, got %v", err) } } diff --git a/pkg/notify/tracker_test.go b/pkg/notify/tracker_test.go index 953850d..963d02f 100644 --- a/pkg/notify/tracker_test.go +++ b/pkg/notify/tracker_test.go @@ -241,11 +241,11 @@ func TestNotificationTracker_ConcurrentAccess(t *testing.T) { numGoroutines := 10 numOperations := 100 - for i := 0; i < numGoroutines; i++ { + for i := range numGoroutines { wg.Add(1) - go func(id int) { + go func(_ int) { defer wg.Done() - for j := 0; j < numOperations; j++ { + for j := range numOperations { // Write operations tracker.UpdateDMNotification("workspace1", "U001") tracker.UpdateDailyReminder("workspace1", "U001") diff --git a/pkg/slack/additional_functions_test.go b/pkg/slack/additional_functions_test.go index 1929bac..ba74997 100644 --- a/pkg/slack/additional_functions_test.go +++ b/pkg/slack/additional_functions_test.go @@ -10,6 +10,7 @@ import ( // TestPostThreadReply tests posting a reply to a thread. func TestPostThreadReply(t *testing.T) { + ctx := context.Background() t.Parallel() mockSlack := slacktest.New() @@ -25,8 +26,6 @@ func TestPostThreadReply(t *testing.T) { cache: &apiCache{entries: make(map[string]cacheEntry)}, } - ctx := context.Background() - // Post thread reply err := client.PostThreadReply(ctx, "C123", "1234567890.123456", "Reply text") if err != nil { @@ -34,7 +33,7 @@ func TestPostThreadReply(t *testing.T) { } // Verify reply was posted - messages := mockSlack.GetPostedMessages() + messages := mockSlack.PostedMessages() if len(messages) != 1 { t.Fatalf("expected 1 message, got %d", len(messages)) } @@ -48,6 +47,7 @@ func TestPostThreadReply(t *testing.T) { // TestHasRecentDMAboutPR_NoRecent tests when no recent DM exists. func TestHasRecentDMAboutPR_NoRecent(t *testing.T) { + ctx := context.Background() t.Parallel() mockSlack := slacktest.New() @@ -61,8 +61,6 @@ func TestHasRecentDMAboutPR_NoRecent(t *testing.T) { stateStore: nil, // No state store = no recent DMs } - ctx := context.Background() - hasRecent, err := client.HasRecentDMAboutPR(ctx, "U001", "https://github.com/test/repo/pull/123") if err != nil { t.Fatalf("unexpected error: %v", err) @@ -74,6 +72,7 @@ func TestHasRecentDMAboutPR_NoRecent(t *testing.T) { // TestSaveDMMessageInfo tests saving DM message information. func TestSaveDMMessageInfo(t *testing.T) { + ctx := context.Background() t.Parallel() mockSlack := slacktest.New() @@ -87,8 +86,6 @@ func TestSaveDMMessageInfo(t *testing.T) { stateStore: nil, // No state store - should handle gracefully } - ctx := context.Background() - // Should not panic when state store is nil err := client.SaveDMMessageInfo(ctx, "U001", "https://github.com/test/repo/pull/123", "D123", "1234567890.123456", "Test message") if err != nil { diff --git a/pkg/slack/api.go b/pkg/slack/api.go index 5477a1b..c02626a 100644 --- a/pkg/slack/api.go +++ b/pkg/slack/api.go @@ -6,11 +6,11 @@ import ( "github.com/slack-go/slack" ) -// SlackAPI defines the interface for Slack API operations. +// API defines the interface for Slack API operations. // This abstraction allows for easier testing by enabling mock implementations. // -//nolint:dupl // Interface duplicated in mock is intentional for testing -type SlackAPI interface { +//nolint:dupl,interfacebloat // Interface duplicated in mock for testing; 15 methods reasonable for full Slack API wrapper +type API interface { // Team operations. GetTeamInfoContext(ctx context.Context) (*slack.TeamInfo, error) AuthTestContext(ctx context.Context) (*slack.AuthTestResponse, error) @@ -39,19 +39,19 @@ type SlackAPI interface { PublishViewContext(ctx context.Context, request slack.PublishViewContextRequest) (*slack.ViewResponse, error) } -// slackAPIWrapper wraps the real Slack client to implement SlackAPI interface. +// slackAPIWrapper wraps the real Slack client to implement API interface. type slackAPIWrapper struct { client *slack.Client } -// newSlackAPIWrapper creates a new wrapper around the Slack client. -func newSlackAPIWrapper(client *slack.Client) SlackAPI { +// newAPIWrapper creates a new wrapper around the Slack client. +func newAPIWrapper(client *slack.Client) API { return &slackAPIWrapper{client: client} } // RawClient returns the underlying *slack.Client for compatibility. // This should only be used when integrating with code that hasn't been -// refactored to use the SlackAPI interface yet. +// refactored to use the API interface yet. func (w *slackAPIWrapper) RawClient() *slack.Client { return w.client } diff --git a/pkg/slack/api_test.go b/pkg/slack/api_test.go index 3e22af1..5042fe0 100644 --- a/pkg/slack/api_test.go +++ b/pkg/slack/api_test.go @@ -8,15 +8,15 @@ import ( "github.com/slack-go/slack" ) -func TestSlackAPIWrapper(t *testing.T) { - t.Parallel() - +//nolint:gocognit,maintidx // Comprehensive API wrapper test covering all interface methods - complexity acceptable +func TestAPIWrapper(t *testing.T) { ctx := context.Background() + t.Parallel() t.Run("RawClient", func(t *testing.T) { rawClient := slack.New("test-token") //nolint:errcheck // Type assertion in test is safe - wrapper := newSlackAPIWrapper(rawClient).(*slackAPIWrapper) + wrapper := newAPIWrapper(rawClient).(*slackAPIWrapper) if wrapper.RawClient() != rawClient { t.Error("expected RawClient to return the wrapped client") @@ -29,7 +29,7 @@ func TestSlackAPIWrapper(t *testing.T) { Name: "Test Team", } - api := &mockSlackAPI{ + api := &mockAPI{ getTeamInfoFunc: func(ctx context.Context) (*slack.TeamInfo, error) { return expectedInfo, nil }, @@ -51,7 +51,7 @@ func TestSlackAPIWrapper(t *testing.T) { TeamID: "T123", } - api := &mockSlackAPI{ + api := &mockAPI{ authTestFunc: func(ctx context.Context) (*slack.AuthTestResponse, error) { return expectedResp, nil }, @@ -77,7 +77,7 @@ func TestSlackAPIWrapper(t *testing.T) { }, } - api := &mockSlackAPI{ + api := &mockAPI{ getConversationInfoFunc: func(ctx context.Context, input *slack.GetConversationInfoInput) (*slack.Channel, error) { return expectedChan, nil }, @@ -103,7 +103,7 @@ func TestSlackAPIWrapper(t *testing.T) { }, } - api := &mockSlackAPI{ + api := &mockAPI{ openConversationFunc: func(ctx context.Context, params *slack.OpenConversationParameters) (*slack.Channel, bool, bool, error) { return expectedChan, false, false, nil }, @@ -123,7 +123,7 @@ func TestSlackAPIWrapper(t *testing.T) { }) t.Run("PostMessageContext", func(t *testing.T) { - api := &mockSlackAPI{ + api := &mockAPI{ postMessageFunc: func(ctx context.Context, channelID string, options ...slack.MsgOption) (string, string, error) { return "C123", "1234567890.123456", nil }, @@ -144,7 +144,7 @@ func TestSlackAPIWrapper(t *testing.T) { }) t.Run("UpdateMessageContext", func(t *testing.T) { - api := &mockSlackAPI{ + api := &mockAPI{ updateMessageFunc: func(ctx context.Context, channelID, timestamp string, options ...slack.MsgOption) (string, string, string, error) { return channelID, timestamp, "Updated text", nil }, @@ -174,7 +174,7 @@ func TestSlackAPIWrapper(t *testing.T) { Name: "testuser", } - api := &mockSlackAPI{ + api := &mockAPI{ getUserInfoFunc: func(ctx context.Context, userID string) (*slack.User, error) { return expectedUser, nil }, @@ -195,7 +195,7 @@ func TestSlackAPIWrapper(t *testing.T) { Presence: "active", } - api := &mockSlackAPI{ + api := &mockAPI{ getUserPresenceFunc: func(ctx context.Context, userID string) (*slack.UserPresence, error) { return expectedPresence, nil }, @@ -212,7 +212,7 @@ func TestSlackAPIWrapper(t *testing.T) { }) t.Run("AddReactionContext", func(t *testing.T) { - api := &mockSlackAPI{ + api := &mockAPI{ addReactionFunc: func(ctx context.Context, name string, item slack.ItemRef) error { if name != "thumbsup" { return errors.New("unexpected reaction name") @@ -233,7 +233,7 @@ func TestSlackAPIWrapper(t *testing.T) { }) t.Run("RemoveReactionContext", func(t *testing.T) { - api := &mockSlackAPI{ + api := &mockAPI{ removeReactionFunc: func(ctx context.Context, name string, item slack.ItemRef) error { if name != "thumbsup" { return errors.New("unexpected reaction name") @@ -263,7 +263,7 @@ func TestSlackAPIWrapper(t *testing.T) { }, } - api := &mockSlackAPI{ + api := &mockAPI{ searchMessagesFunc: func(ctx context.Context, query string, params slack.SearchParameters) (*slack.SearchMessages, error) { return expectedResults, nil }, @@ -282,7 +282,7 @@ func TestSlackAPIWrapper(t *testing.T) { t.Run("PublishViewContext", func(t *testing.T) { expectedResp := &slack.ViewResponse{} - api := &mockSlackAPI{ + api := &mockAPI{ publishViewFunc: func(ctx context.Context, request slack.PublishViewContextRequest) (*slack.ViewResponse, error) { return expectedResp, nil }, diff --git a/pkg/slack/api_wrapper_test.go b/pkg/slack/api_wrapper_test.go index 619dc15..dbc620d 100644 --- a/pkg/slack/api_wrapper_test.go +++ b/pkg/slack/api_wrapper_test.go @@ -9,8 +9,10 @@ import ( "github.com/slack-go/slack" ) -// TestSlackAPIWrapperIntegration tests the actual slackAPIWrapper with a mock HTTP server. -func TestSlackAPIWrapperIntegration(t *testing.T) { +// TestAPIWrapperIntegration tests the actual slackAPIWrapper with a mock HTTP server. +// +//nolint:maintidx // Integration test covering all API methods with mock server - complexity acceptable +func TestAPIWrapperIntegration(t *testing.T) { t.Parallel() // Create a mock HTTP server that responds to Slack API calls @@ -74,7 +76,7 @@ func TestSlackAPIWrapperIntegration(t *testing.T) { // Create Slack client pointing to mock server slackClient := slack.New("test-token", slack.OptionAPIURL(server.URL+"/api/")) - wrapper := newSlackAPIWrapper(slackClient) + wrapper := newAPIWrapper(slackClient) ctx := context.Background() diff --git a/pkg/slack/client_additional_test.go b/pkg/slack/client_additional_test.go index a41836b..af21f14 100644 --- a/pkg/slack/client_additional_test.go +++ b/pkg/slack/client_additional_test.go @@ -17,7 +17,7 @@ func TestUpdateDMMessage(t *testing.T) { t.Run("no_state_store", func(t *testing.T) { client := &Client{ - api: &mockSlackAPI{}, + api: &mockAPI{}, } prURL := "https://github.com/test/repo/pull/123" @@ -44,7 +44,7 @@ func TestSearchMessages(t *testing.T) { }, } - api := &mockSlackAPI{ + api := &mockAPI{ searchMessagesFunc: func(ctx context.Context, query string, params slack.SearchParameters) (*slack.SearchMessages, error) { return expectedResults, nil }, @@ -69,7 +69,7 @@ func TestSearchMessages(t *testing.T) { }) t.Run("error", func(t *testing.T) { - api := &mockSlackAPI{ + api := &mockAPI{ searchMessagesFunc: func(ctx context.Context, query string, params slack.SearchParameters) (*slack.SearchMessages, error) { return nil, errors.New("api error") }, @@ -84,6 +84,7 @@ func TestSearchMessages(t *testing.T) { t.Fatal("expected error") } }) + //nolint:tparallel // Tests share resources, cannot run subtests in parallel } func TestAPI(t *testing.T) { @@ -91,7 +92,7 @@ func TestAPI(t *testing.T) { t.Run("wrapper_returns_raw_client", func(t *testing.T) { rawClient := slack.New("test-token") - wrapper := newSlackAPIWrapper(rawClient) + wrapper := newAPIWrapper(rawClient) client := &Client{ api: wrapper, @@ -104,7 +105,7 @@ func TestAPI(t *testing.T) { }) t.Run("mock_returns_nil", func(t *testing.T) { - mockAPI := &mockSlackAPI{} + mockAPI := &mockAPI{} client := &Client{ api: mockAPI, @@ -114,6 +115,7 @@ func TestAPI(t *testing.T) { if client.API() != nil { t.Error("expected API() to return nil for mock client") } + //nolint:tparallel // Tests share resources, cannot run subtests in parallel }) } @@ -123,7 +125,7 @@ func TestResolveChannelID(t *testing.T) { ctx := context.Background() t.Run("cached_channel", func(t *testing.T) { - api := &mockSlackAPI{ + api := &mockAPI{ getConversationsFunc: func(ctx context.Context, params *slack.GetConversationsParameters) ([]slack.Channel, string, error) { return []slack.Channel{ { @@ -159,7 +161,7 @@ func TestResolveChannelID(t *testing.T) { }) t.Run("channel_not_found", func(t *testing.T) { - api := &mockSlackAPI{ + api := &mockAPI{ getConversationsFunc: func(ctx context.Context, params *slack.GetConversationsParameters) ([]slack.Channel, string, error) { return []slack.Channel{}, "", nil }, @@ -180,7 +182,7 @@ func TestResolveChannelID(t *testing.T) { }) t.Run("api_error", func(t *testing.T) { - api := &mockSlackAPI{ + api := &mockAPI{ getConversationsFunc: func(ctx context.Context, params *slack.GetConversationsParameters) ([]slack.Channel, string, error) { return nil, "", errors.New("api error") }, @@ -197,6 +199,7 @@ func TestResolveChannelID(t *testing.T) { // Returns the channel name itself as fallback on error if id != "test-channel" { t.Errorf("expected 'test-channel' as fallback, got %s", id) + //nolint:tparallel // Tests share resources, cannot run subtests in parallel } }) } @@ -207,7 +210,7 @@ func TestIsUserInChannel(t *testing.T) { ctx := context.Background() t.Run("user_in_channel", func(t *testing.T) { - api := &mockSlackAPI{ + api := &mockAPI{ getUsersInConversationFunc: func(ctx context.Context, params *slack.GetUsersInConversationParameters) ([]string, string, error) { return []string{"U001", "U002", "U003"}, "", nil }, @@ -227,7 +230,7 @@ func TestIsUserInChannel(t *testing.T) { }) t.Run("user_not_in_channel", func(t *testing.T) { - api := &mockSlackAPI{ + api := &mockAPI{ getUsersInConversationFunc: func(ctx context.Context, params *slack.GetUsersInConversationParameters) ([]string, string, error) { return []string{"U001", "U002", "U003"}, "", nil }, @@ -247,7 +250,7 @@ func TestIsUserInChannel(t *testing.T) { }) t.Run("api_error", func(t *testing.T) { - api := &mockSlackAPI{ + api := &mockAPI{ getUsersInConversationFunc: func(ctx context.Context, params *slack.GetUsersInConversationParameters) ([]string, string, error) { return nil, "", errors.New("api error") }, @@ -263,6 +266,7 @@ func TestIsUserInChannel(t *testing.T) { inChannel := client.IsUserInChannel(ctx, "C123", "U001") if inChannel { + //nolint:tparallel // Tests share resources, cannot run subtests in parallel t.Error("expected false on error") } }) @@ -274,7 +278,7 @@ func TestPublishHomeView(t *testing.T) { ctx := context.Background() t.Run("success", func(t *testing.T) { - api := &mockSlackAPI{ + api := &mockAPI{ publishViewFunc: func(ctx context.Context, request slack.PublishViewContextRequest) (*slack.ViewResponse, error) { return &slack.ViewResponse{}, nil }, @@ -299,7 +303,7 @@ func TestPublishHomeView(t *testing.T) { }) t.Run("error", func(t *testing.T) { - api := &mockSlackAPI{ + api := &mockAPI{ publishViewFunc: func(ctx context.Context, request slack.PublishViewContextRequest) (*slack.ViewResponse, error) { return nil, errors.New("api error") }, @@ -312,6 +316,7 @@ func TestPublishHomeView(t *testing.T) { blocks := []slack.Block{} err := client.PublishHomeView(ctx, "U123", blocks) + //nolint:tparallel // Tests share resources, cannot run subtests in parallel if err == nil { t.Fatal("expected error") } @@ -324,7 +329,7 @@ func TestChannelHistory(t *testing.T) { ctx := context.Background() t.Run("success", func(t *testing.T) { - api := &mockSlackAPI{ + api := &mockAPI{ getConversationHistoryFunc: func(ctx context.Context, params *slack.GetConversationHistoryParameters) (*slack.GetConversationHistoryResponse, error) { return &slack.GetConversationHistoryResponse{ Messages: []slack.Message{ @@ -358,7 +363,7 @@ func TestChannelHistory(t *testing.T) { }) t.Run("with_timestamps", func(t *testing.T) { - api := &mockSlackAPI{ + api := &mockAPI{ getConversationHistoryFunc: func(ctx context.Context, params *slack.GetConversationHistoryParameters) (*slack.GetConversationHistoryResponse, error) { if params.Latest != "1234567890.123456" { return nil, errors.New("unexpected latest timestamp") @@ -383,7 +388,7 @@ func TestChannelHistory(t *testing.T) { }) t.Run("error", func(t *testing.T) { - api := &mockSlackAPI{ + api := &mockAPI{ getConversationHistoryFunc: func(ctx context.Context, params *slack.GetConversationHistoryParameters) (*slack.GetConversationHistoryResponse, error) { return nil, errors.New("api error") }, @@ -407,17 +412,18 @@ type programmableMockStateStore struct { saveDMMessageErr error } -func (m *programmableMockStateStore) DMMessage(userID, prURL string) (state.DMInfo, bool) { +func (m *programmableMockStateStore) DMMessage(ctx context.Context, userID, prURL string) (state.DMInfo, bool) { key := userID + ":" + prURL info, exists := m.dmMessages[key] return info, exists } -func (m *programmableMockStateStore) SaveDMMessage(userID, prURL string, info state.DMInfo) error { +func (m *programmableMockStateStore) SaveDMMessage(ctx context.Context, userID, prURL string, info state.DMInfo) error { if m.saveDMMessageErr != nil { return m.saveDMMessageErr } key := userID + ":" + prURL + //nolint:tparallel // Tests share resources, cannot run subtests in parallel if m.dmMessages == nil { m.dmMessages = make(map[string]state.DMInfo) } @@ -437,7 +443,7 @@ func TestUpdateDMMessage_Complete(t *testing.T) { } client := &Client{ - api: &mockSlackAPI{}, + api: &mockAPI{}, stateStore: mockStore, } @@ -459,7 +465,7 @@ func TestUpdateDMMessage_Complete(t *testing.T) { } updateCalled := false - api := &mockSlackAPI{ + api := &mockAPI{ updateMessageFunc: func(ctx context.Context, channelID, timestamp string, options ...slack.MsgOption) (string, string, string, error) { updateCalled = true if channelID != "D123" { @@ -488,7 +494,7 @@ func TestUpdateDMMessage_Complete(t *testing.T) { } // Verify message text was updated in store - info, exists := mockStore.DMMessage("U001", prURL) + info, exists := mockStore.DMMessage(ctx, "U001", prURL) if !exists { t.Fatal("expected DM message to still exist in store") } @@ -508,7 +514,7 @@ func TestUpdateDMMessage_Complete(t *testing.T) { }, } - api := &mockSlackAPI{ + api := &mockAPI{ updateMessageFunc: func(ctx context.Context, channelID, timestamp string, options ...slack.MsgOption) (string, string, string, error) { return "", "", "", errors.New("slack API error") }, @@ -538,7 +544,7 @@ func TestUpdateDMMessage_Complete(t *testing.T) { saveDMMessageErr: errors.New("save error"), } - api := &mockSlackAPI{ + api := &mockAPI{ updateMessageFunc: func(ctx context.Context, channelID, timestamp string, options ...slack.MsgOption) (string, string, string, error) { return channelID, timestamp, "New text", nil }, diff --git a/pkg/slack/client_coverage_test.go b/pkg/slack/client_coverage_test.go index bce15bf..8bc80e4 100644 --- a/pkg/slack/client_coverage_test.go +++ b/pkg/slack/client_coverage_test.go @@ -12,12 +12,11 @@ import ( // TestPostThreadReply_ErrorCases tests error handling in PostThreadReply. func TestPostThreadReply_ErrorCases(t *testing.T) { - t.Parallel() - ctx := context.Background() + t.Parallel() t.Run("channel_not_found", func(t *testing.T) { - api := &mockSlackAPI{ + api := &mockAPI{ postMessageFunc: func(ctx context.Context, channelID string, options ...slack.MsgOption) (string, string, error) { return "", "", errors.New("channel_not_found") }, @@ -35,7 +34,7 @@ func TestPostThreadReply_ErrorCases(t *testing.T) { }) t.Run("not_in_channel", func(t *testing.T) { - api := &mockSlackAPI{ + api := &mockAPI{ postMessageFunc: func(ctx context.Context, channelID string, options ...slack.MsgOption) (string, string, error) { return "", "", errors.New("not_in_channel") }, @@ -53,7 +52,7 @@ func TestPostThreadReply_ErrorCases(t *testing.T) { }) t.Run("thread_not_found", func(t *testing.T) { - api := &mockSlackAPI{ + api := &mockAPI{ postMessageFunc: func(ctx context.Context, channelID string, options ...slack.MsgOption) (string, string, error) { return "", "", errors.New("thread_not_found") }, @@ -72,7 +71,7 @@ func TestPostThreadReply_ErrorCases(t *testing.T) { t.Run("rate_limit_retry", func(t *testing.T) { callCount := 0 - api := &mockSlackAPI{ + api := &mockAPI{ postMessageFunc: func(ctx context.Context, channelID string, options ...slack.MsgOption) (string, string, error) { callCount++ if callCount == 1 { @@ -98,7 +97,7 @@ func TestPostThreadReply_ErrorCases(t *testing.T) { t.Run("retryable_error", func(t *testing.T) { callCount := 0 - api := &mockSlackAPI{ + api := &mockAPI{ postMessageFunc: func(ctx context.Context, channelID string, options ...slack.MsgOption) (string, string, error) { callCount++ if callCount < 3 { @@ -125,9 +124,9 @@ func TestPostThreadReply_ErrorCases(t *testing.T) { // TestHasRecentDMAboutPR_WithStateStore tests HasRecentDMAboutPR with a state store. func TestHasRecentDMAboutPR_WithStateStore(t *testing.T) { + ctx := context.Background() t.Parallel() - ctx := context.Background() prURL := "https://github.com/test/repo/pull/123" t.Run("with_recent_dm", func(t *testing.T) { @@ -142,7 +141,7 @@ func TestHasRecentDMAboutPR_WithStateStore(t *testing.T) { }, } - api := &mockSlackAPI{ + api := &mockAPI{ openConversationFunc: func(ctx context.Context, params *slack.OpenConversationParameters) (*slack.Channel, bool, bool, error) { return &slack.Channel{GroupConversation: slack.GroupConversation{Conversation: slack.Conversation{ID: "D123"}}}, false, false, nil }, @@ -180,7 +179,7 @@ func TestHasRecentDMAboutPR_WithStateStore(t *testing.T) { }) t.Run("open_conversation_error", func(t *testing.T) { - api := &mockSlackAPI{ + api := &mockAPI{ openConversationFunc: func(ctx context.Context, params *slack.OpenConversationParameters) (*slack.Channel, bool, bool, error) { return nil, false, false, errors.New("api error") }, @@ -197,7 +196,7 @@ func TestHasRecentDMAboutPR_WithStateStore(t *testing.T) { }) t.Run("bot_info_error", func(t *testing.T) { - api := &mockSlackAPI{ + api := &mockAPI{ openConversationFunc: func(ctx context.Context, params *slack.OpenConversationParameters) (*slack.Channel, bool, bool, error) { return &slack.Channel{GroupConversation: slack.GroupConversation{Conversation: slack.Conversation{ID: "D123"}}}, false, false, nil }, @@ -218,7 +217,7 @@ func TestHasRecentDMAboutPR_WithStateStore(t *testing.T) { }) t.Run("conversation_history_error", func(t *testing.T) { - api := &mockSlackAPI{ + api := &mockAPI{ openConversationFunc: func(ctx context.Context, params *slack.OpenConversationParameters) (*slack.Channel, bool, bool, error) { return &slack.Channel{GroupConversation: slack.GroupConversation{Conversation: slack.Conversation{ID: "D123"}}}, false, false, nil }, @@ -248,12 +247,11 @@ func TestHasRecentDMAboutPR_WithStateStore(t *testing.T) { // TestSendDirectMessage_Errors tests error handling in SendDirectMessage. func TestSendDirectMessage_Errors(t *testing.T) { - t.Parallel() - ctx := context.Background() + t.Parallel() t.Run("open_conversation_fails", func(t *testing.T) { - api := &mockSlackAPI{ + api := &mockAPI{ openConversationFunc: func(ctx context.Context, params *slack.OpenConversationParameters) (*slack.Channel, bool, bool, error) { return nil, false, false, errors.New("api error") }, @@ -271,7 +269,7 @@ func TestSendDirectMessage_Errors(t *testing.T) { }) t.Run("post_message_fails", func(t *testing.T) { - api := &mockSlackAPI{ + api := &mockAPI{ openConversationFunc: func(ctx context.Context, params *slack.OpenConversationParameters) (*slack.Channel, bool, bool, error) { return &slack.Channel{GroupConversation: slack.GroupConversation{Conversation: slack.Conversation{ID: "D123"}}}, false, false, nil }, @@ -293,7 +291,7 @@ func TestSendDirectMessage_Errors(t *testing.T) { t.Run("rate_limit_during_send", func(t *testing.T) { callCount := 0 - api := &mockSlackAPI{ + api := &mockAPI{ openConversationFunc: func(ctx context.Context, params *slack.OpenConversationParameters) (*slack.Channel, bool, bool, error) { return &slack.Channel{GroupConversation: slack.GroupConversation{Conversation: slack.Conversation{ID: "D123"}}}, false, false, nil }, @@ -325,13 +323,13 @@ func TestSendDirectMessage_Errors(t *testing.T) { t.Errorf("expected 2 calls (1 retry), got %d", callCount) } }) + //nolint:tparallel // Tests share resources, cannot run subtests in parallel } // TestSaveDMMessageInfo_WithStore tests SaveDMMessageInfo with a state store. func TestSaveDMMessageInfo_WithStore(t *testing.T) { - t.Parallel() - ctx := context.Background() + t.Parallel() t.Run("saves_to_store", func(t *testing.T) { mockStore := &programmableMockStateStore{ @@ -379,17 +377,17 @@ func TestSaveDMMessageInfo_WithStore(t *testing.T) { if err == nil { t.Fatal("expected error from state store") } + //nolint:tparallel // Tests share resources, cannot run subtests in parallel }) } // TestPostThread_Errors tests error handling in PostThread. func TestPostThread_Errors(t *testing.T) { - t.Parallel() - ctx := context.Background() + t.Parallel() t.Run("channel_not_found_during_check", func(t *testing.T) { - api := &mockSlackAPI{ + api := &mockAPI{ getConversationInfoFunc: func(ctx context.Context, input *slack.GetConversationInfoInput) (*slack.Channel, error) { return nil, errors.New("channel_not_found") }, @@ -414,7 +412,7 @@ func TestPostThread_Errors(t *testing.T) { }) t.Run("bot_not_in_channel", func(t *testing.T) { - api := &mockSlackAPI{ + api := &mockAPI{ getConversationInfoFunc: func(ctx context.Context, input *slack.GetConversationInfoInput) (*slack.Channel, error) { return &slack.Channel{}, nil }, @@ -442,7 +440,7 @@ func TestPostThread_Errors(t *testing.T) { }) t.Run("post_with_not_in_channel_error", func(t *testing.T) { - api := &mockSlackAPI{ + api := &mockAPI{ getUsersInConversationFunc: func(ctx context.Context, params *slack.GetUsersInConversationParameters) ([]string, string, error) { return []string{"UBOT"}, "", nil }, @@ -468,7 +466,7 @@ func TestPostThread_Errors(t *testing.T) { t.Run("post_with_rate_limit", func(t *testing.T) { callCount := 0 - api := &mockSlackAPI{ + api := &mockAPI{ getUsersInConversationFunc: func(ctx context.Context, params *slack.GetUsersInConversationParameters) ([]string, string, error) { return []string{"UBOT"}, "", nil }, @@ -499,18 +497,18 @@ func TestPostThread_Errors(t *testing.T) { } if callCount != 2 { t.Errorf("expected 2 calls (1 retry), got %d", callCount) + //nolint:tparallel // Tests share resources, cannot run subtests in parallel } }) } // TestUpdateMessage_EdgeCases tests edge cases in UpdateMessage. func TestUpdateMessage_EdgeCases(t *testing.T) { - t.Parallel() - ctx := context.Background() + t.Parallel() t.Run("message_not_found", func(t *testing.T) { - api := &mockSlackAPI{ + api := &mockAPI{ updateMessageFunc: func(ctx context.Context, channelID, timestamp string, options ...slack.MsgOption) (string, string, string, error) { return "", "", "", errors.New("message_not_found") }, @@ -529,7 +527,7 @@ func TestUpdateMessage_EdgeCases(t *testing.T) { t.Run("rate_limit_on_update", func(t *testing.T) { callCount := 0 - api := &mockSlackAPI{ + api := &mockAPI{ updateMessageFunc: func(ctx context.Context, channelID, timestamp string, options ...slack.MsgOption) (string, string, string, error) { callCount++ if callCount == 1 { diff --git a/pkg/slack/client_error_test.go b/pkg/slack/client_error_test.go index 46e9c19..9aa2297 100644 --- a/pkg/slack/client_error_test.go +++ b/pkg/slack/client_error_test.go @@ -10,6 +10,7 @@ import ( // TestPostThread_BotNotInChannel tests error when bot is not in channel. func TestPostThread_BotNotInChannel(t *testing.T) { + ctx := context.Background() t.Parallel() mockSlack := slacktest.New() @@ -25,8 +26,6 @@ func TestPostThread_BotNotInChannel(t *testing.T) { cache: &apiCache{entries: make(map[string]cacheEntry)}, } - ctx := context.Background() - _, err := client.PostThread(ctx, "C123", "Test message", nil) if err == nil { t.Fatal("expected error when bot not in channel, got nil") @@ -40,6 +39,7 @@ func TestPostThread_BotNotInChannel(t *testing.T) { // TestPostThread_LongText tests posting message with text longer than 100 characters. func TestPostThread_LongText(t *testing.T) { + ctx := context.Background() t.Parallel() mockSlack := slacktest.New() @@ -55,8 +55,6 @@ func TestPostThread_LongText(t *testing.T) { cache: &apiCache{entries: make(map[string]cacheEntry)}, } - ctx := context.Background() - // Create text longer than 100 chars (triggers preview truncation in logging) longText := "This is a very long message that exceeds one hundred characters to test the text preview truncation logic in the logging code" messageTS, err := client.PostThread(ctx, "C123", longText, nil) @@ -68,7 +66,7 @@ func TestPostThread_LongText(t *testing.T) { t.Error("expected non-empty message timestamp") } - messages := mockSlack.GetPostedMessages() + messages := mockSlack.PostedMessages() if len(messages) != 1 { t.Fatalf("expected 1 message, got %d", len(messages)) } @@ -81,6 +79,7 @@ func TestPostThread_LongText(t *testing.T) { // TestSendDirectMessage_LongText tests sending DM with long text. func TestSendDirectMessage_LongText(t *testing.T) { + ctx := context.Background() t.Parallel() mockSlack := slacktest.New() @@ -95,8 +94,6 @@ func TestSendDirectMessage_LongText(t *testing.T) { cache: &apiCache{entries: make(map[string]cacheEntry)}, } - ctx := context.Background() - // Long text (>100 chars) triggers preview truncation in logs longText := "This is a very long direct message that exceeds one hundred characters to test the text preview truncation logic in the logging code" diff --git a/pkg/slack/client_simple_test.go b/pkg/slack/client_simple_test.go index 0ca0f1d..e4fbe6b 100644 --- a/pkg/slack/client_simple_test.go +++ b/pkg/slack/client_simple_test.go @@ -1,6 +1,7 @@ package slack import ( + "context" "testing" "time" @@ -216,10 +217,10 @@ func TestCacheGetExpired(t *testing.T) { // mockStateStore implements StateStore for testing. type mockStateStore struct{} -func (m *mockStateStore) DMMessage(userID, prURL string) (state.DMInfo, bool) { +func (m *mockStateStore) DMMessage(ctx context.Context, userID, prURL string) (state.DMInfo, bool) { return state.DMInfo{}, false } -func (m *mockStateStore) SaveDMMessage(userID, prURL string, info state.DMInfo) error { +func (m *mockStateStore) SaveDMMessage(ctx context.Context, userID, prURL string, info state.DMInfo) error { return nil } diff --git a/pkg/slack/client_test.go b/pkg/slack/client_test.go index 329a794..037e1b5 100644 --- a/pkg/slack/client_test.go +++ b/pkg/slack/client_test.go @@ -26,8 +26,6 @@ func TestPostThread(t *testing.T) { cache: &apiCache{entries: make(map[string]cacheEntry)}, } - ctx := context.Background() - tests := []struct { name string channelID string @@ -43,6 +41,7 @@ func TestPostThread(t *testing.T) { attachments: nil, expectError: false, validateResult: func(t *testing.T, messages []*slacktest.PostedMessage) { + t.Helper() if len(messages) != 1 { t.Fatalf("expected 1 message, got %d", len(messages)) } @@ -61,6 +60,7 @@ func TestPostThread(t *testing.T) { attachments: nil, expectError: false, validateResult: func(t *testing.T, messages []*slacktest.PostedMessage) { + t.Helper() if len(messages) != 1 { t.Fatalf("expected 1 message, got %d", len(messages)) } @@ -76,6 +76,7 @@ func TestPostThread(t *testing.T) { attachments: nil, expectError: false, validateResult: func(t *testing.T, messages []*slacktest.PostedMessage) { + t.Helper() if len(messages) != 1 { t.Fatalf("expected 1 message, got %d", len(messages)) } @@ -86,6 +87,8 @@ func TestPostThread(t *testing.T) { }, } + ctx := context.Background() + for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { mockSlack.Reset() @@ -107,7 +110,7 @@ func TestPostThread(t *testing.T) { t.Error("expected non-empty message timestamp") } - messages := mockSlack.GetPostedMessages() + messages := mockSlack.PostedMessages() tt.validateResult(t, messages) }) } @@ -179,7 +182,7 @@ func TestUpdateMessage(t *testing.T) { } // Verify update was recorded - updates := mockSlack.GetUpdatedMessages() + updates := mockSlack.UpdatedMessages() if len(updates) != 1 { t.Fatalf("expected 1 update, got %d", len(updates)) } @@ -201,6 +204,7 @@ func TestUpdateMessage(t *testing.T) { // TestSendDirectMessage verifies that DMs are sent to the correct users. func TestSendDirectMessage(t *testing.T) { + ctx := context.Background() t.Parallel() mockSlack := slacktest.New() @@ -216,8 +220,6 @@ func TestSendDirectMessage(t *testing.T) { cache: &apiCache{entries: make(map[string]cacheEntry)}, } - ctx := context.Background() - tests := []struct { name string userID string @@ -270,7 +272,7 @@ func TestSendDirectMessage(t *testing.T) { } // Verify DM was sent - messages := mockSlack.GetPostedMessages() + messages := mockSlack.PostedMessages() if len(messages) != 1 { t.Fatalf("expected 1 DM, got %d", len(messages)) } @@ -289,6 +291,7 @@ func TestSendDirectMessage(t *testing.T) { // TestMessageMutationSequence verifies that we can post, then update the same message. func TestMessageMutationSequence(t *testing.T) { + ctx := context.Background() t.Parallel() mockSlack := slacktest.New() @@ -304,8 +307,6 @@ func TestMessageMutationSequence(t *testing.T) { cache: &apiCache{entries: make(map[string]cacheEntry)}, } - ctx := context.Background() - // Step 1: Post initial message initialText := ":test_tube: Tests running" messageTS, err := client.PostThread(ctx, "C123", initialText, nil) @@ -318,7 +319,7 @@ func TestMessageMutationSequence(t *testing.T) { } // Verify initial post - messages := mockSlack.GetPostedMessages() + messages := mockSlack.PostedMessages() if len(messages) != 1 { t.Fatalf("expected 1 posted message, got %d", len(messages)) } @@ -334,7 +335,7 @@ func TestMessageMutationSequence(t *testing.T) { } // Verify update - updates := mockSlack.GetUpdatedMessages() + updates := mockSlack.UpdatedMessages() if len(updates) != 1 { t.Fatalf("expected 1 updated message, got %d", len(updates)) } @@ -353,7 +354,7 @@ func TestMessageMutationSequence(t *testing.T) { } // Verify second update - updates = mockSlack.GetUpdatedMessages() + updates = mockSlack.UpdatedMessages() if len(updates) != 2 { t.Fatalf("expected 2 total updates, got %d", len(updates)) } @@ -364,6 +365,7 @@ func TestMessageMutationSequence(t *testing.T) { // TestDMMutationSequence verifies that we can send a DM, then update it. func TestDMMutationSequence(t *testing.T) { + ctx := context.Background() t.Parallel() mockSlack := slacktest.New() @@ -378,8 +380,6 @@ func TestDMMutationSequence(t *testing.T) { cache: &apiCache{entries: make(map[string]cacheEntry)}, } - ctx := context.Background() - // Step 1: Send initial DM initialText := ":hourglass: Your review is needed on PR #123" dmChannelID, messageTS, err := client.SendDirectMessage(ctx, "U001", initialText) @@ -392,7 +392,7 @@ func TestDMMutationSequence(t *testing.T) { } // Verify initial DM - messages := mockSlack.GetPostedMessages() + messages := mockSlack.PostedMessages() if len(messages) != 1 { t.Fatalf("expected 1 DM sent, got %d", len(messages)) } @@ -405,7 +405,7 @@ func TestDMMutationSequence(t *testing.T) { } // Verify update - updates := mockSlack.GetUpdatedMessages() + updates := mockSlack.UpdatedMessages() if len(updates) != 1 { t.Fatalf("expected 1 DM update, got %d", len(updates)) } @@ -419,6 +419,7 @@ func TestDMMutationSequence(t *testing.T) { // TestMultipleChannelPosts verifies posting to multiple channels works correctly. func TestMultipleChannelPosts(t *testing.T) { + ctx := context.Background() t.Parallel() mockSlack := slacktest.New() @@ -438,8 +439,6 @@ func TestMultipleChannelPosts(t *testing.T) { cache: &apiCache{entries: make(map[string]cacheEntry)}, } - ctx := context.Background() - // Post same PR to multiple channels channels := []struct { id string @@ -460,7 +459,7 @@ func TestMultipleChannelPosts(t *testing.T) { } // Verify all posts - messages := mockSlack.GetPostedMessages() + messages := mockSlack.PostedMessages() if len(messages) != 3 { t.Fatalf("expected 3 messages (one per channel), got %d", len(messages)) } diff --git a/pkg/slack/http_handlers_test.go b/pkg/slack/http_handlers_test.go index aadc754..a4102db 100644 --- a/pkg/slack/http_handlers_test.go +++ b/pkg/slack/http_handlers_test.go @@ -16,9 +16,9 @@ import ( ) // generateValidSignature creates a valid Slack signature for testing. -func generateValidSignature(secret, timestamp, body string) string { +func generateValidSignature(timestamp, body string) string { sig := fmt.Sprintf("v0:%s:%s", timestamp, body) - h := hmac.New(sha256.New, []byte(secret)) + h := hmac.New(sha256.New, []byte("test-secret")) h.Write([]byte(sig)) return "v0=" + hex.EncodeToString(h.Sum(nil)) } @@ -47,7 +47,7 @@ func TestEventsHandler_URLVerification(t *testing.T) { req := httptest.NewRequest(http.MethodPost, "/slack/events", bytes.NewBuffer(bodyBytes)) timestamp := fmt.Sprintf("%d", time.Now().Unix()) - signature := generateValidSignature("test-secret", timestamp, string(bodyBytes)) + signature := generateValidSignature(timestamp, string(bodyBytes)) req.Header.Set("X-Slack-Signature", signature) req.Header.Set("X-Slack-Request-Timestamp", timestamp) @@ -137,7 +137,7 @@ func TestEventsHandler_ParseEventError(t *testing.T) { req := httptest.NewRequest(http.MethodPost, "/slack/events", bytes.NewBuffer(body)) timestamp := fmt.Sprintf("%d", time.Now().Unix()) - signature := generateValidSignature("test-secret", timestamp, string(body)) + signature := generateValidSignature(timestamp, string(body)) req.Header.Set("X-Slack-Signature", signature) req.Header.Set("X-Slack-Request-Timestamp", timestamp) @@ -165,7 +165,7 @@ func TestEventsHandler_URLVerificationUnmarshalError(t *testing.T) { req := httptest.NewRequest(http.MethodPost, "/slack/events", bytes.NewBuffer(body)) timestamp := fmt.Sprintf("%d", time.Now().Unix()) - signature := generateValidSignature("test-secret", timestamp, string(body)) + signature := generateValidSignature(timestamp, string(body)) req.Header.Set("X-Slack-Signature", signature) req.Header.Set("X-Slack-Request-Timestamp", timestamp) @@ -223,7 +223,7 @@ func TestEventsHandler_AppHomeOpened(t *testing.T) { req := httptest.NewRequest(http.MethodPost, "/slack/events", bytes.NewBuffer(bodyBytes)) timestamp := fmt.Sprintf("%d", time.Now().Unix()) - signature := generateValidSignature("test-secret", timestamp, string(bodyBytes)) + signature := generateValidSignature(timestamp, string(bodyBytes)) req.Header.Set("X-Slack-Signature", signature) req.Header.Set("X-Slack-Request-Timestamp", timestamp) @@ -284,7 +284,7 @@ func TestEventsHandler_MessageEvent(t *testing.T) { req := httptest.NewRequest(http.MethodPost, "/slack/events", bytes.NewBuffer(bodyBytes)) timestamp := fmt.Sprintf("%d", time.Now().Unix()) - signature := generateValidSignature("test-secret", timestamp, string(bodyBytes)) + signature := generateValidSignature(timestamp, string(bodyBytes)) req.Header.Set("X-Slack-Signature", signature) req.Header.Set("X-Slack-Request-Timestamp", timestamp) @@ -325,7 +325,7 @@ func TestEventsHandler_AppMentionEvent(t *testing.T) { req := httptest.NewRequest(http.MethodPost, "/slack/events", bytes.NewBuffer(bodyBytes)) timestamp := fmt.Sprintf("%d", time.Now().Unix()) - signature := generateValidSignature("test-secret", timestamp, string(bodyBytes)) + signature := generateValidSignature(timestamp, string(bodyBytes)) req.Header.Set("X-Slack-Signature", signature) req.Header.Set("X-Slack-Request-Timestamp", timestamp) @@ -367,7 +367,7 @@ func TestEventsHandler_AppHomeOpenedNoHandler(t *testing.T) { req := httptest.NewRequest(http.MethodPost, "/slack/events", bytes.NewBuffer(bodyBytes)) timestamp := fmt.Sprintf("%d", time.Now().Unix()) - signature := generateValidSignature("test-secret", timestamp, string(bodyBytes)) + signature := generateValidSignature(timestamp, string(bodyBytes)) req.Header.Set("X-Slack-Signature", signature) req.Header.Set("X-Slack-Request-Timestamp", timestamp) diff --git a/pkg/slack/manager.go b/pkg/slack/manager.go index 014a9cb..a1a2ad8 100644 --- a/pkg/slack/manager.go +++ b/pkg/slack/manager.go @@ -21,8 +21,8 @@ type WorkspaceMetadata struct { // StateStore interface for DM message tracking. type StateStore interface { - DMMessage(userID, prURL string) (state.DMInfo, bool) - SaveDMMessage(userID, prURL string, info state.DMInfo) error + DMMessage(ctx context.Context, userID, prURL string) (state.DMInfo, bool) + SaveDMMessage(ctx context.Context, userID, prURL string, info state.DMInfo) error } // Manager manages Slack clients for multiple workspaces. diff --git a/pkg/slack/mock_api_test.go b/pkg/slack/mock_api_test.go index 0494f31..973d5d9 100644 --- a/pkg/slack/mock_api_test.go +++ b/pkg/slack/mock_api_test.go @@ -7,8 +7,8 @@ import ( "github.com/slack-go/slack" ) -// mockSlackAPI implements SlackAPI for testing. -type mockSlackAPI struct { +// mockAPI implements API for testing. +type mockAPI struct { // Team operations getTeamInfoFunc func(ctx context.Context) (*slack.TeamInfo, error) authTestFunc func(ctx context.Context) (*slack.AuthTestResponse, error) @@ -39,14 +39,14 @@ type mockSlackAPI struct { // Team operations -func (m *mockSlackAPI) GetTeamInfoContext(ctx context.Context) (*slack.TeamInfo, error) { +func (m *mockAPI) GetTeamInfoContext(ctx context.Context) (*slack.TeamInfo, error) { if m.getTeamInfoFunc != nil { return m.getTeamInfoFunc(ctx) } return nil, errors.New("not implemented") } -func (m *mockSlackAPI) AuthTestContext(ctx context.Context) (*slack.AuthTestResponse, error) { +func (m *mockAPI) AuthTestContext(ctx context.Context) (*slack.AuthTestResponse, error) { if m.authTestFunc != nil { return m.authTestFunc(ctx) } @@ -55,35 +55,35 @@ func (m *mockSlackAPI) AuthTestContext(ctx context.Context) (*slack.AuthTestResp // Conversation operations -func (m *mockSlackAPI) GetConversationInfoContext(ctx context.Context, input *slack.GetConversationInfoInput) (*slack.Channel, error) { +func (m *mockAPI) GetConversationInfoContext(ctx context.Context, input *slack.GetConversationInfoInput) (*slack.Channel, error) { if m.getConversationInfoFunc != nil { return m.getConversationInfoFunc(ctx, input) } return nil, errors.New("not implemented") } -func (m *mockSlackAPI) GetConversationHistoryContext(ctx context.Context, params *slack.GetConversationHistoryParameters) (*slack.GetConversationHistoryResponse, error) { +func (m *mockAPI) GetConversationHistoryContext(ctx context.Context, params *slack.GetConversationHistoryParameters) (*slack.GetConversationHistoryResponse, error) { if m.getConversationHistoryFunc != nil { return m.getConversationHistoryFunc(ctx, params) } return nil, errors.New("not implemented") } -func (m *mockSlackAPI) GetConversationsContext(ctx context.Context, params *slack.GetConversationsParameters) ([]slack.Channel, string, error) { +func (m *mockAPI) GetConversationsContext(ctx context.Context, params *slack.GetConversationsParameters) ([]slack.Channel, string, error) { if m.getConversationsFunc != nil { return m.getConversationsFunc(ctx, params) } return nil, "", errors.New("not implemented") } -func (m *mockSlackAPI) OpenConversationContext(ctx context.Context, params *slack.OpenConversationParameters) (*slack.Channel, bool, bool, error) { +func (m *mockAPI) OpenConversationContext(ctx context.Context, params *slack.OpenConversationParameters) (channel *slack.Channel, alreadyOpen, noOp bool, err error) { if m.openConversationFunc != nil { return m.openConversationFunc(ctx, params) } return nil, false, false, errors.New("not implemented") } -func (m *mockSlackAPI) GetUsersInConversationContext(ctx context.Context, params *slack.GetUsersInConversationParameters) ([]string, string, error) { +func (m *mockAPI) GetUsersInConversationContext(ctx context.Context, params *slack.GetUsersInConversationParameters) (userIDs []string, cursor string, err error) { if m.getUsersInConversationFunc != nil { return m.getUsersInConversationFunc(ctx, params) } @@ -92,21 +92,21 @@ func (m *mockSlackAPI) GetUsersInConversationContext(ctx context.Context, params // Message operations -func (m *mockSlackAPI) PostMessageContext(ctx context.Context, channelID string, options ...slack.MsgOption) (string, string, error) { +func (m *mockAPI) PostMessageContext(ctx context.Context, channelID string, options ...slack.MsgOption) (channel, timestamp string, err error) { if m.postMessageFunc != nil { return m.postMessageFunc(ctx, channelID, options...) } return "", "", errors.New("not implemented") } -func (m *mockSlackAPI) UpdateMessageContext(ctx context.Context, channelID, timestamp string, options ...slack.MsgOption) (string, string, string, error) { +func (m *mockAPI) UpdateMessageContext(ctx context.Context, channelID, timestamp string, options ...slack.MsgOption) (channel, ts, text string, err error) { if m.updateMessageFunc != nil { return m.updateMessageFunc(ctx, channelID, timestamp, options...) } return "", "", "", errors.New("not implemented") } -func (m *mockSlackAPI) SearchMessagesContext(ctx context.Context, query string, params slack.SearchParameters) (*slack.SearchMessages, error) { +func (m *mockAPI) SearchMessagesContext(ctx context.Context, query string, params slack.SearchParameters) (*slack.SearchMessages, error) { if m.searchMessagesFunc != nil { return m.searchMessagesFunc(ctx, query, params) } @@ -115,14 +115,14 @@ func (m *mockSlackAPI) SearchMessagesContext(ctx context.Context, query string, // Reaction operations -func (m *mockSlackAPI) AddReactionContext(ctx context.Context, name string, item slack.ItemRef) error { +func (m *mockAPI) AddReactionContext(ctx context.Context, name string, item slack.ItemRef) error { if m.addReactionFunc != nil { return m.addReactionFunc(ctx, name, item) } return errors.New("not implemented") } -func (m *mockSlackAPI) RemoveReactionContext(ctx context.Context, name string, item slack.ItemRef) error { +func (m *mockAPI) RemoveReactionContext(ctx context.Context, name string, item slack.ItemRef) error { if m.removeReactionFunc != nil { return m.removeReactionFunc(ctx, name, item) } @@ -131,14 +131,14 @@ func (m *mockSlackAPI) RemoveReactionContext(ctx context.Context, name string, i // User operations -func (m *mockSlackAPI) GetUserInfoContext(ctx context.Context, userID string) (*slack.User, error) { +func (m *mockAPI) GetUserInfoContext(ctx context.Context, userID string) (*slack.User, error) { if m.getUserInfoFunc != nil { return m.getUserInfoFunc(ctx, userID) } return nil, errors.New("not implemented") } -func (m *mockSlackAPI) GetUserPresenceContext(ctx context.Context, userID string) (*slack.UserPresence, error) { +func (m *mockAPI) GetUserPresenceContext(ctx context.Context, userID string) (*slack.UserPresence, error) { if m.getUserPresenceFunc != nil { return m.getUserPresenceFunc(ctx, userID) } @@ -147,7 +147,7 @@ func (m *mockSlackAPI) GetUserPresenceContext(ctx context.Context, userID string // View operations -func (m *mockSlackAPI) PublishViewContext(ctx context.Context, request slack.PublishViewContextRequest) (*slack.ViewResponse, error) { +func (m *mockAPI) PublishViewContext(ctx context.Context, request slack.PublishViewContextRequest) (*slack.ViewResponse, error) { if m.publishViewFunc != nil { return m.publishViewFunc(ctx, request) } diff --git a/pkg/slack/mock_builders_test.go b/pkg/slack/mock_builders_test.go index adf9c31..97e3540 100644 --- a/pkg/slack/mock_builders_test.go +++ b/pkg/slack/mock_builders_test.go @@ -7,28 +7,28 @@ import ( "github.com/slack-go/slack" ) -// MockSlackAPIBuilder provides a fluent API for building mockSlackAPI instances. +// MockAPIBuilder provides a fluent API for building mockAPI instances. // This makes test setup much more readable and maintainable. // // Example: // -// mockAPI := NewMockSlackAPI(). +// mockAPI := NewMockAPI(). // WithPostMessageSuccess("C123", "1234.567"). // WithGetTeamInfo(&slack.TeamInfo{Domain: "test"}). // Build() -type MockSlackAPIBuilder struct { - mock *mockSlackAPI +type MockAPIBuilder struct { + mock *mockAPI } -// NewMockSlackAPI creates a new mock Slack API builder with sensible defaults. -func NewMockSlackAPI() *MockSlackAPIBuilder { - return &MockSlackAPIBuilder{ - mock: &mockSlackAPI{}, +// NewMockAPI creates a new mock Slack API builder with sensible defaults. +func NewMockAPI() *MockAPIBuilder { + return &MockAPIBuilder{ + mock: &mockAPI{}, } } // WithPostMessageSuccess configures the mock to successfully post messages. -func (b *MockSlackAPIBuilder) WithPostMessageSuccess(channelID, timestamp string) *MockSlackAPIBuilder { +func (b *MockAPIBuilder) WithPostMessageSuccess(channelID, timestamp string) *MockAPIBuilder { b.mock.postMessageFunc = func(ctx context.Context, cid string, options ...slack.MsgOption) (string, string, error) { return channelID, timestamp, nil } @@ -36,7 +36,7 @@ func (b *MockSlackAPIBuilder) WithPostMessageSuccess(channelID, timestamp string } // WithPostMessageError configures the mock to fail when posting messages. -func (b *MockSlackAPIBuilder) WithPostMessageError(err error) *MockSlackAPIBuilder { +func (b *MockAPIBuilder) WithPostMessageError(err error) *MockAPIBuilder { b.mock.postMessageFunc = func(ctx context.Context, channelID string, options ...slack.MsgOption) (string, string, error) { return "", "", err } @@ -44,7 +44,7 @@ func (b *MockSlackAPIBuilder) WithPostMessageError(err error) *MockSlackAPIBuild } // WithUpdateMessageSuccess configures the mock to successfully update messages. -func (b *MockSlackAPIBuilder) WithUpdateMessageSuccess() *MockSlackAPIBuilder { +func (b *MockAPIBuilder) WithUpdateMessageSuccess() *MockAPIBuilder { b.mock.updateMessageFunc = func(ctx context.Context, channelID, timestamp string, options ...slack.MsgOption) (string, string, string, error) { return channelID, timestamp, "", nil } @@ -52,7 +52,7 @@ func (b *MockSlackAPIBuilder) WithUpdateMessageSuccess() *MockSlackAPIBuilder { } // WithUpdateMessageError configures the mock to fail when updating messages. -func (b *MockSlackAPIBuilder) WithUpdateMessageError(err error) *MockSlackAPIBuilder { +func (b *MockAPIBuilder) WithUpdateMessageError(err error) *MockAPIBuilder { b.mock.updateMessageFunc = func(ctx context.Context, channelID, timestamp string, options ...slack.MsgOption) (string, string, string, error) { return "", "", "", err } @@ -60,7 +60,7 @@ func (b *MockSlackAPIBuilder) WithUpdateMessageError(err error) *MockSlackAPIBui } // WithGetTeamInfo configures the team info returned by the mock. -func (b *MockSlackAPIBuilder) WithGetTeamInfo(info *slack.TeamInfo) *MockSlackAPIBuilder { +func (b *MockAPIBuilder) WithGetTeamInfo(info *slack.TeamInfo) *MockAPIBuilder { b.mock.getTeamInfoFunc = func(ctx context.Context) (*slack.TeamInfo, error) { return info, nil } @@ -68,7 +68,7 @@ func (b *MockSlackAPIBuilder) WithGetTeamInfo(info *slack.TeamInfo) *MockSlackAP } // WithGetTeamInfoError configures the mock to fail when getting team info. -func (b *MockSlackAPIBuilder) WithGetTeamInfoError(err error) *MockSlackAPIBuilder { +func (b *MockAPIBuilder) WithGetTeamInfoError(err error) *MockAPIBuilder { b.mock.getTeamInfoFunc = func(ctx context.Context) (*slack.TeamInfo, error) { return nil, err } @@ -76,7 +76,7 @@ func (b *MockSlackAPIBuilder) WithGetTeamInfoError(err error) *MockSlackAPIBuild } // WithAuthTestSuccess configures the mock to successfully authenticate. -func (b *MockSlackAPIBuilder) WithAuthTestSuccess(userID, teamID string) *MockSlackAPIBuilder { +func (b *MockAPIBuilder) WithAuthTestSuccess(userID, teamID string) *MockAPIBuilder { b.mock.authTestFunc = func(ctx context.Context) (*slack.AuthTestResponse, error) { return &slack.AuthTestResponse{ UserID: userID, @@ -87,7 +87,7 @@ func (b *MockSlackAPIBuilder) WithAuthTestSuccess(userID, teamID string) *MockSl } // WithAuthTestError configures the mock to fail authentication. -func (b *MockSlackAPIBuilder) WithAuthTestError(err error) *MockSlackAPIBuilder { +func (b *MockAPIBuilder) WithAuthTestError(err error) *MockAPIBuilder { b.mock.authTestFunc = func(ctx context.Context) (*slack.AuthTestResponse, error) { return nil, err } @@ -95,7 +95,7 @@ func (b *MockSlackAPIBuilder) WithAuthTestError(err error) *MockSlackAPIBuilder } // WithGetConversationInfo configures the conversation info returned by the mock. -func (b *MockSlackAPIBuilder) WithGetConversationInfo(channel *slack.Channel) *MockSlackAPIBuilder { +func (b *MockAPIBuilder) WithGetConversationInfo(channel *slack.Channel) *MockAPIBuilder { b.mock.getConversationInfoFunc = func(ctx context.Context, input *slack.GetConversationInfoInput) (*slack.Channel, error) { return channel, nil } @@ -103,7 +103,7 @@ func (b *MockSlackAPIBuilder) WithGetConversationInfo(channel *slack.Channel) *M } // WithGetConversationInfoError configures the mock to fail when getting conversation info. -func (b *MockSlackAPIBuilder) WithGetConversationInfoError(err error) *MockSlackAPIBuilder { +func (b *MockAPIBuilder) WithGetConversationInfoError(err error) *MockAPIBuilder { b.mock.getConversationInfoFunc = func(ctx context.Context, input *slack.GetConversationInfoInput) (*slack.Channel, error) { return nil, err } @@ -111,7 +111,7 @@ func (b *MockSlackAPIBuilder) WithGetConversationInfoError(err error) *MockSlack } // WithGetConversationHistory configures the conversation history returned by the mock. -func (b *MockSlackAPIBuilder) WithGetConversationHistory(messages []slack.Message) *MockSlackAPIBuilder { +func (b *MockAPIBuilder) WithGetConversationHistory(messages []slack.Message) *MockAPIBuilder { b.mock.getConversationHistoryFunc = func(ctx context.Context, params *slack.GetConversationHistoryParameters) (*slack.GetConversationHistoryResponse, error) { return &slack.GetConversationHistoryResponse{ Messages: messages, @@ -121,7 +121,7 @@ func (b *MockSlackAPIBuilder) WithGetConversationHistory(messages []slack.Messag } // WithGetConversationHistoryError configures the mock to fail when getting conversation history. -func (b *MockSlackAPIBuilder) WithGetConversationHistoryError(err error) *MockSlackAPIBuilder { +func (b *MockAPIBuilder) WithGetConversationHistoryError(err error) *MockAPIBuilder { b.mock.getConversationHistoryFunc = func(ctx context.Context, params *slack.GetConversationHistoryParameters) (*slack.GetConversationHistoryResponse, error) { return nil, err } @@ -129,7 +129,7 @@ func (b *MockSlackAPIBuilder) WithGetConversationHistoryError(err error) *MockSl } // WithGetUserInfo configures the user info returned by the mock. -func (b *MockSlackAPIBuilder) WithGetUserInfo(user *slack.User) *MockSlackAPIBuilder { +func (b *MockAPIBuilder) WithGetUserInfo(user *slack.User) *MockAPIBuilder { b.mock.getUserInfoFunc = func(ctx context.Context, userID string) (*slack.User, error) { return user, nil } @@ -137,7 +137,7 @@ func (b *MockSlackAPIBuilder) WithGetUserInfo(user *slack.User) *MockSlackAPIBui } // WithGetUserInfoError configures the mock to fail when getting user info. -func (b *MockSlackAPIBuilder) WithGetUserInfoError(err error) *MockSlackAPIBuilder { +func (b *MockAPIBuilder) WithGetUserInfoError(err error) *MockAPIBuilder { b.mock.getUserInfoFunc = func(ctx context.Context, userID string) (*slack.User, error) { return nil, err } @@ -145,7 +145,7 @@ func (b *MockSlackAPIBuilder) WithGetUserInfoError(err error) *MockSlackAPIBuild } // WithGetUserPresence configures the user presence returned by the mock. -func (b *MockSlackAPIBuilder) WithGetUserPresence(presence string) *MockSlackAPIBuilder { +func (b *MockAPIBuilder) WithGetUserPresence(presence string) *MockAPIBuilder { b.mock.getUserPresenceFunc = func(ctx context.Context, userID string) (*slack.UserPresence, error) { return &slack.UserPresence{ Presence: presence, @@ -155,7 +155,7 @@ func (b *MockSlackAPIBuilder) WithGetUserPresence(presence string) *MockSlackAPI } // WithGetUserPresenceError configures the mock to fail when getting user presence. -func (b *MockSlackAPIBuilder) WithGetUserPresenceError(err error) *MockSlackAPIBuilder { +func (b *MockAPIBuilder) WithGetUserPresenceError(err error) *MockAPIBuilder { b.mock.getUserPresenceFunc = func(ctx context.Context, userID string) (*slack.UserPresence, error) { return nil, err } @@ -163,7 +163,7 @@ func (b *MockSlackAPIBuilder) WithGetUserPresenceError(err error) *MockSlackAPIB } // WithOpenConversation configures the conversation returned when opening a DM. -func (b *MockSlackAPIBuilder) WithOpenConversation(channel *slack.Channel) *MockSlackAPIBuilder { +func (b *MockAPIBuilder) WithOpenConversation(channel *slack.Channel) *MockAPIBuilder { b.mock.openConversationFunc = func(ctx context.Context, params *slack.OpenConversationParameters) (*slack.Channel, bool, bool, error) { return channel, false, false, nil } @@ -171,7 +171,7 @@ func (b *MockSlackAPIBuilder) WithOpenConversation(channel *slack.Channel) *Mock } // WithOpenConversationError configures the mock to fail when opening conversations. -func (b *MockSlackAPIBuilder) WithOpenConversationError(err error) *MockSlackAPIBuilder { +func (b *MockAPIBuilder) WithOpenConversationError(err error) *MockAPIBuilder { b.mock.openConversationFunc = func(ctx context.Context, params *slack.OpenConversationParameters) (*slack.Channel, bool, bool, error) { return nil, false, false, err } @@ -179,7 +179,7 @@ func (b *MockSlackAPIBuilder) WithOpenConversationError(err error) *MockSlackAPI } // WithSearchMessages configures the search results returned by the mock. -func (b *MockSlackAPIBuilder) WithSearchMessages(messages *slack.SearchMessages) *MockSlackAPIBuilder { +func (b *MockAPIBuilder) WithSearchMessages(messages *slack.SearchMessages) *MockAPIBuilder { b.mock.searchMessagesFunc = func(ctx context.Context, query string, params slack.SearchParameters) (*slack.SearchMessages, error) { return messages, nil } @@ -187,7 +187,7 @@ func (b *MockSlackAPIBuilder) WithSearchMessages(messages *slack.SearchMessages) } // WithSearchMessagesError configures the mock to fail when searching messages. -func (b *MockSlackAPIBuilder) WithSearchMessagesError(err error) *MockSlackAPIBuilder { +func (b *MockAPIBuilder) WithSearchMessagesError(err error) *MockAPIBuilder { b.mock.searchMessagesFunc = func(ctx context.Context, query string, params slack.SearchParameters) (*slack.SearchMessages, error) { return nil, err } @@ -195,7 +195,7 @@ func (b *MockSlackAPIBuilder) WithSearchMessagesError(err error) *MockSlackAPIBu } // WithGetUsersInConversation configures the users in a conversation. -func (b *MockSlackAPIBuilder) WithGetUsersInConversation(users []string) *MockSlackAPIBuilder { +func (b *MockAPIBuilder) WithGetUsersInConversation(users []string) *MockAPIBuilder { b.mock.getUsersInConversationFunc = func(ctx context.Context, params *slack.GetUsersInConversationParameters) ([]string, string, error) { return users, "", nil } @@ -203,7 +203,7 @@ func (b *MockSlackAPIBuilder) WithGetUsersInConversation(users []string) *MockSl } // WithGetUsersInConversationError configures the mock to fail when getting users in conversation. -func (b *MockSlackAPIBuilder) WithGetUsersInConversationError(err error) *MockSlackAPIBuilder { +func (b *MockAPIBuilder) WithGetUsersInConversationError(err error) *MockAPIBuilder { b.mock.getUsersInConversationFunc = func(ctx context.Context, params *slack.GetUsersInConversationParameters) ([]string, string, error) { return nil, "", err } @@ -211,7 +211,7 @@ func (b *MockSlackAPIBuilder) WithGetUsersInConversationError(err error) *MockSl } // WithGetConversations configures the conversations returned by the mock. -func (b *MockSlackAPIBuilder) WithGetConversations(channels []slack.Channel) *MockSlackAPIBuilder { +func (b *MockAPIBuilder) WithGetConversations(channels []slack.Channel) *MockAPIBuilder { b.mock.getConversationsFunc = func(ctx context.Context, params *slack.GetConversationsParameters) ([]slack.Channel, string, error) { return channels, "", nil } @@ -219,15 +219,15 @@ func (b *MockSlackAPIBuilder) WithGetConversations(channels []slack.Channel) *Mo } // WithGetConversationsError configures the mock to fail when getting conversations. -func (b *MockSlackAPIBuilder) WithGetConversationsError(err error) *MockSlackAPIBuilder { +func (b *MockAPIBuilder) WithGetConversationsError(err error) *MockAPIBuilder { b.mock.getConversationsFunc = func(ctx context.Context, params *slack.GetConversationsParameters) ([]slack.Channel, string, error) { return nil, "", err } return b } -// Build returns the configured mockSlackAPI. -func (b *MockSlackAPIBuilder) Build() *mockSlackAPI { +// Build returns the configured mockAPI. +func (b *MockAPIBuilder) Build() *mockAPI { return b.mock } diff --git a/pkg/slack/oauth.go b/pkg/slack/oauth.go index 2afc8f7..838df0d 100644 --- a/pkg/slack/oauth.go +++ b/pkg/slack/oauth.go @@ -29,7 +29,7 @@ type OAuthExchanger interface { // slackOAuthExchanger is the default implementation using slack-go/slack. type slackOAuthExchanger struct{} -func (s *slackOAuthExchanger) ExchangeCode(ctx context.Context, clientID, clientSecret, code string) (*slack.OAuthV2Response, error) { +func (*slackOAuthExchanger) ExchangeCode(ctx context.Context, clientID, clientSecret, code string) (*slack.OAuthV2Response, error) { return slack.GetOAuthV2ResponseContext(ctx, &http.Client{}, clientID, clientSecret, code, "") } diff --git a/pkg/slack/oauth_handlers_test.go b/pkg/slack/oauth_handlers_test.go index c5ea90d..7f6300c 100644 --- a/pkg/slack/oauth_handlers_test.go +++ b/pkg/slack/oauth_handlers_test.go @@ -48,7 +48,7 @@ func TestHandleCallback_MissingCode(t *testing.T) { store: &mockWorkspaceStore{}, } - req := httptest.NewRequest(http.MethodGet, "/oauth/callback", nil) + req := httptest.NewRequest(http.MethodGet, "/oauth/callback", http.NoBody) w := httptest.NewRecorder() handler.HandleCallback(w, req) @@ -65,6 +65,7 @@ func TestHandleCallback_MissingCode(t *testing.T) { // TestHandleCallback_ShortCode tests OAuth code logging with short value. func TestHandleCallback_ShortCode(t *testing.T) { + ctx := context.Background() t.Parallel() handler := &OAuthHandler{ @@ -78,7 +79,7 @@ func TestHandleCallback_ShortCode(t *testing.T) { // Use context with short timeout to avoid waiting for retries ctx, cancel := context.WithTimeout(context.Background(), 10*time.Millisecond) defer cancel() - req := httptest.NewRequest(http.MethodGet, "/oauth/callback?code=abc", nil).WithContext(ctx) + req := httptest.NewRequest(http.MethodGet, "/oauth/callback?code=abc", http.NoBody).WithContext(ctx) w := httptest.NewRecorder() handler.HandleCallback(w, req) @@ -102,7 +103,7 @@ func TestHandleCallback_OAuthError(t *testing.T) { } // Error parameter takes priority over code - req := httptest.NewRequest(http.MethodGet, "/oauth/callback?code=test&error=access_denied", nil) + req := httptest.NewRequest(http.MethodGet, "/oauth/callback?code=test&error=access_denied", http.NoBody) w := httptest.NewRecorder() handler.HandleCallback(w, req) @@ -128,7 +129,7 @@ func TestHandleCallback_StateMismatch(t *testing.T) { store: &mockWorkspaceStore{}, } - req := httptest.NewRequest(http.MethodGet, "/oauth/callback?code=test-code&state=wrong-state", nil) + req := httptest.NewRequest(http.MethodGet, "/oauth/callback?code=test-code&state=wrong-state", http.NoBody) req.AddCookie(&http.Cookie{ Name: "oauth_state", Value: "correct-state", @@ -159,7 +160,7 @@ func TestHandleCallback_StateMismatchShortValue(t *testing.T) { } // Use very short state values (< 10 chars) to test min() edge case in logging - req := httptest.NewRequest(http.MethodGet, "/oauth/callback?code=test-code&state=abc", nil) + req := httptest.NewRequest(http.MethodGet, "/oauth/callback?code=test-code&state=abc", http.NoBody) req.AddCookie(&http.Cookie{ Name: "oauth_state", Value: "xyz", @@ -189,7 +190,7 @@ func TestHandleCallback_MissingStateCookie(t *testing.T) { store: &mockWorkspaceStore{}, } - req := httptest.NewRequest(http.MethodGet, "/oauth/callback?code=test-code&state=some-state", nil) + req := httptest.NewRequest(http.MethodGet, "/oauth/callback?code=test-code&state=some-state", http.NoBody) // Don't add cookie w := httptest.NewRecorder() @@ -207,6 +208,7 @@ func TestHandleCallback_MissingStateCookie(t *testing.T) { // TestHandleCallback_StateMatchSuccess tests successful state verification. func TestHandleCallback_StateMatchSuccess(t *testing.T) { + ctx := context.Background() t.Parallel() handler := &OAuthHandler{ @@ -219,7 +221,7 @@ func TestHandleCallback_StateMatchSuccess(t *testing.T) { // Use context with short timeout to avoid waiting for retries ctx, cancel := context.WithTimeout(context.Background(), 10*time.Millisecond) defer cancel() - req := httptest.NewRequest(http.MethodGet, "/oauth/callback?code=test-code&state=matching-state", nil).WithContext(ctx) + req := httptest.NewRequest(http.MethodGet, "/oauth/callback?code=test-code&state=matching-state", http.NoBody).WithContext(ctx) req.AddCookie(&http.Cookie{ Name: "oauth_state", Value: "matching-state", @@ -241,6 +243,7 @@ func TestHandleCallback_StateMatchSuccess(t *testing.T) { // TestHandleCallback_CookieDeletion tests that state cookie is cleared after verification. func TestHandleCallback_CookieDeletion(t *testing.T) { + ctx := context.Background() t.Parallel() handler := &OAuthHandler{ @@ -253,7 +256,7 @@ func TestHandleCallback_CookieDeletion(t *testing.T) { // Use context with short timeout to avoid waiting for retries ctx, cancel := context.WithTimeout(context.Background(), 10*time.Millisecond) defer cancel() - req := httptest.NewRequest(http.MethodGet, "/oauth/callback?code=test-code&state=matching-state", nil).WithContext(ctx) + req := httptest.NewRequest(http.MethodGet, "/oauth/callback?code=test-code&state=matching-state", http.NoBody).WithContext(ctx) req.AddCookie(&http.Cookie{ Name: "oauth_state", Value: "matching-state", @@ -296,6 +299,7 @@ func TestHandleCallback_CookieDeletion(t *testing.T) { // TestHandleCallback_NoStateParam tests direct installation without state. func TestHandleCallback_NoStateParam(t *testing.T) { + ctx := context.Background() t.Parallel() handler := &OAuthHandler{ @@ -308,7 +312,7 @@ func TestHandleCallback_NoStateParam(t *testing.T) { // Use context with short timeout to avoid waiting for retries ctx, cancel := context.WithTimeout(context.Background(), 10*time.Millisecond) defer cancel() - req := httptest.NewRequest(http.MethodGet, "/oauth/callback?code=test-code", nil).WithContext(ctx) + req := httptest.NewRequest(http.MethodGet, "/oauth/callback?code=test-code", http.NoBody).WithContext(ctx) // No state parameter, no cookie w := httptest.NewRecorder() @@ -358,7 +362,7 @@ func TestHandleCallback_StoreWorkspaceError(t *testing.T) { store: mockStore, } - req := httptest.NewRequest(http.MethodGet, "/oauth/callback?code=valid-code", nil) + req := httptest.NewRequest(http.MethodGet, "/oauth/callback?code=valid-code", http.NoBody) w := httptest.NewRecorder() handler.HandleCallback(w, req) @@ -396,7 +400,7 @@ func TestHandleCallback_OAuthNotOk(t *testing.T) { store: &mockWorkspaceStore{}, } - req := httptest.NewRequest(http.MethodGet, "/oauth/callback?code=invalid-code", nil) + req := httptest.NewRequest(http.MethodGet, "/oauth/callback?code=invalid-code", http.NoBody) w := httptest.NewRecorder() handler.HandleCallback(w, req) @@ -450,7 +454,7 @@ func TestHandleCallback_SuccessfulFlow(t *testing.T) { store: mockStore, } - req := httptest.NewRequest(http.MethodGet, "/oauth/callback?code=valid-code", nil) + req := httptest.NewRequest(http.MethodGet, "/oauth/callback?code=valid-code", http.NoBody) w := httptest.NewRecorder() handler.HandleCallback(w, req) diff --git a/pkg/slack/slack.go b/pkg/slack/slack.go index f0cb9fa..c17f9c4 100644 --- a/pkg/slack/slack.go +++ b/pkg/slack/slack.go @@ -25,7 +25,7 @@ import ( "github.com/slack-go/slack/slackevents" ) -// Errors +// Errors. var ( // ErrNoDMToUpdate indicates no DM exists to update. ErrNoDMToUpdate = errors.New("no DM found to update") @@ -60,7 +60,7 @@ type Client struct { signingSecret string teamID string // Workspace team ID stateStore StateStore // State store for DM message tracking - api SlackAPI // Slack API interface for testability + api API // Slack API interface for testability cache *apiCache manager *Manager // Reference to manager for cache invalidation homeViewHandler func(ctx context.Context, teamID, userID string) error // Callback for app_home_opened events @@ -132,7 +132,7 @@ func (c *Client) getRetryDelay() time.Duration { // New creates a new Slack client with caching. func New(token, signingSecret string) *Client { return &Client{ - api: newSlackAPIWrapper(slack.New(token)), + api: newAPIWrapper(slack.New(token)), signingSecret: signingSecret, cache: &apiCache{ entries: make(map[string]cacheEntry), @@ -521,7 +521,7 @@ func (c *Client) SaveDMMessageInfo(ctx context.Context, userID, prURL, channelID SentAt: time.Now(), } - if err := store.SaveDMMessage(userID, prURL, info); err != nil { + if err := store.SaveDMMessage(ctx, userID, prURL, info); err != nil { return fmt.Errorf("failed to save DM message info: %w", err) } @@ -547,7 +547,7 @@ func (c *Client) UpdateDMMessage(ctx context.Context, userID, prURL, newText str } // Get stored DM message info - info, exists := store.DMMessage(userID, prURL) + info, exists := store.DMMessage(ctx, userID, prURL) if !exists { slog.Debug("no DM message found to update", "user", userID, @@ -563,7 +563,7 @@ func (c *Client) UpdateDMMessage(ctx context.Context, userID, prURL, newText str // Update stored message text info.MessageText = newText - if err := store.SaveDMMessage(userID, prURL, info); err != nil { + if err := store.SaveDMMessage(ctx, userID, prURL, info); err != nil { slog.Warn("failed to update stored DM message text", "user", userID, "pr_url", prURL, @@ -760,23 +760,20 @@ func (c *Client) EventsHandler(writer http.ResponseWriter, r *http.Request) { c.homeViewHandlerMu.RUnlock() if handler != nil { - //nolint:contextcheck // Use detached context for async event processing - prevents webhook events from being lost during shutdown - go func(teamID, userID string) { - homeCtx, cancel := context.WithTimeout(context.Background(), 30*time.Second) - defer cancel() - - if err := handler(homeCtx, teamID, userID); err != nil { - slog.Error("home view handler failed", - "team_id", teamID, - "user", userID, - "error", err) - } else { - slog.Info("successfully rendered home view", - "team_id", teamID, - "user", userID, - "trigger", "slack_event") - } - }(c.teamID, evt.User) + homeCtx, cancel := context.WithTimeout(r.Context(), 30*time.Second) + defer cancel() + + if err := handler(homeCtx, c.teamID, evt.User); err != nil { + slog.Error("home view handler failed", + "team_id", c.teamID, + "user", evt.User, + "error", err) + } else { + slog.Info("successfully rendered home view", + "team_id", c.teamID, + "user", evt.User, + "trigger", "slack_event") + } } else { slog.Debug("no home view handler registered", "user", evt.User) } @@ -862,8 +859,7 @@ func (c *Client) InteractionsHandler(writer http.ResponseWriter, r *http.Request switch interaction.Type { case slack.InteractionTypeBlockActions: // Handle block actions (buttons, selects, etc.). - //nolint:contextcheck // handleBlockAction spawns async goroutines with detached contexts - this is intentional - c.handleBlockAction(&interaction) + c.handleBlockAction(r.Context(), &interaction) case slack.InteractionTypeViewSubmission: // Handle modal submissions. slog.Debug("received view submission", "interaction", interaction) @@ -880,7 +876,7 @@ func (c *Client) InteractionsHandler(writer http.ResponseWriter, r *http.Request } // handleBlockAction handles block action interactions (button clicks, etc.). -func (c *Client) handleBlockAction(interaction *slack.InteractionCallback) { +func (c *Client) handleBlockAction(ctx context.Context, interaction *slack.InteractionCallback) { // Process each action in the callback for _, action := range interaction.ActionCallback.BlockActions { slog.Debug("processing block action", @@ -890,46 +886,47 @@ func (c *Client) handleBlockAction(interaction *slack.InteractionCallback) { switch action.ActionID { case "refresh_dashboard": - // Trigger home view refresh - c.homeViewHandlerMu.RLock() - handler := c.homeViewHandler - c.homeViewHandlerMu.RUnlock() - - if handler != nil { - // Refresh asynchronously to avoid blocking the response - go func(teamID, userID string) { - ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second) - defer cancel() - - slog.Info("refreshing dashboard via button click", - "team_id", teamID, - "user_id", userID, - "trigger", "refresh_button") - - if err := handler(ctx, teamID, userID); err != nil { - slog.Error("failed to refresh dashboard", - "team_id", teamID, - "user_id", userID, - "trigger", "refresh_button", - "error", err) - } else { - slog.Info("successfully refreshed dashboard", - "team_id", teamID, - "user_id", userID, - "trigger", "refresh_button") - } - }(interaction.Team.ID, interaction.User.ID) - } else { - slog.Warn("refresh requested but no home view handler registered", - "user", interaction.User.ID) - } - + c.handleRefreshDashboard(ctx, interaction) default: slog.Debug("unhandled action_id", "action_id", action.ActionID) } } } +// handleRefreshDashboard handles the refresh dashboard button action. +func (c *Client) handleRefreshDashboard(ctx context.Context, interaction *slack.InteractionCallback) { + c.homeViewHandlerMu.RLock() + handler := c.homeViewHandler + c.homeViewHandlerMu.RUnlock() + + if handler == nil { + slog.Warn("refresh requested but no home view handler registered", + "user", interaction.User.ID) + return + } + + handlerCtx, cancel := context.WithTimeout(ctx, 30*time.Second) + defer cancel() + + slog.Info("refreshing dashboard via button click", + "team_id", interaction.Team.ID, + "user_id", interaction.User.ID, + "trigger", "refresh_button") + + if err := handler(handlerCtx, interaction.Team.ID, interaction.User.ID); err != nil { + slog.Error("failed to refresh dashboard", + "team_id", interaction.Team.ID, + "user_id", interaction.User.ID, + "trigger", "refresh_button", + "error", err) + } else { + slog.Info("successfully refreshed dashboard", + "team_id", interaction.Team.ID, + "user_id", interaction.User.ID, + "trigger", "refresh_button") + } +} + // SlashCommandHandler handles Slack slash commands. func (c *Client) SlashCommandHandler(writer http.ResponseWriter, r *http.Request) { // Verify the request signature. @@ -1127,9 +1124,9 @@ func (c *Client) SearchMessages(ctx context.Context, query string, params *slack } // API returns the underlying Slack API client for compatibility. -// This unwraps the SlackAPI interface to return the raw *slack.Client. +// This unwraps the API interface to return the raw *slack.Client. // Only use this when integrating with code that hasn't been refactored -// to use the SlackAPI interface yet. +// to use the API interface yet. func (c *Client) API() *slack.Client { if wrapper, ok := c.api.(*slackAPIWrapper); ok { return wrapper.RawClient() diff --git a/pkg/slack/slack_additional_coverage_test.go b/pkg/slack/slack_additional_coverage_test.go index 826fb05..c67e6c5 100644 --- a/pkg/slack/slack_additional_coverage_test.go +++ b/pkg/slack/slack_additional_coverage_test.go @@ -15,12 +15,11 @@ import ( // TestResolveChannelID_ChannelIDInput tests when input is already a channel ID. func TestResolveChannelID_ChannelIDInput(t *testing.T) { - t.Parallel() - ctx := context.Background() + t.Parallel() // No API calls should be made - api := &mockSlackAPI{} + api := &mockAPI{} client := &Client{ api: api, @@ -38,11 +37,10 @@ func TestResolveChannelID_ChannelIDInput(t *testing.T) { // TestResolveChannelID_HashPrefix tests when input has # prefix. func TestResolveChannelID_HashPrefix(t *testing.T) { - t.Parallel() - ctx := context.Background() + t.Parallel() - api := &mockSlackAPI{ + api := &mockAPI{ getConversationsFunc: func(ctx context.Context, params *slack.GetConversationsParameters) ([]slack.Channel, string, error) { return []slack.Channel{ { @@ -73,12 +71,11 @@ func TestResolveChannelID_HashPrefix(t *testing.T) { // TestResolveChannelID_CacheTypeMismatch tests handling of wrong cache type. func TestResolveChannelID_CacheTypeMismatch(t *testing.T) { - t.Parallel() - ctx := context.Background() + t.Parallel() callCount := 0 - api := &mockSlackAPI{ + api := &mockAPI{ getConversationsFunc: func(ctx context.Context, params *slack.GetConversationsParameters) ([]slack.Channel, string, error) { callCount++ return []slack.Channel{ @@ -117,12 +114,11 @@ func TestResolveChannelID_CacheTypeMismatch(t *testing.T) { // TestResolveChannelID_FallbackToPublicOnly tests fallback to public channels only. func TestResolveChannelID_FallbackToPublicOnly(t *testing.T) { - t.Parallel() - ctx := context.Background() + t.Parallel() callCount := 0 - api := &mockSlackAPI{ + api := &mockAPI{ getConversationsFunc: func(ctx context.Context, params *slack.GetConversationsParameters) ([]slack.Channel, string, error) { callCount++ // First call (public+private) fails with permission error @@ -163,11 +159,10 @@ func TestResolveChannelID_FallbackToPublicOnly(t *testing.T) { // TestResolveChannelID_EmptyChannelName tests empty channel name handling. func TestResolveChannelID_EmptyChannelName(t *testing.T) { - t.Parallel() - ctx := context.Background() + t.Parallel() - api := &mockSlackAPI{} + api := &mockAPI{} client := &Client{ api: api, @@ -216,7 +211,7 @@ func TestHandleBlockAction_RefreshButton(t *testing.T) { } // Call handler - client.handleBlockAction(interaction) + client.handleBlockAction(context.Background(), interaction) // Wait for handler to complete or timeout select { @@ -261,7 +256,7 @@ func TestHandleBlockAction_RefreshButtonNoHandler(t *testing.T) { } // Should complete without panic - client.handleBlockAction(interaction) + client.handleBlockAction(context.Background(), interaction) // Give time for any potential goroutine time.Sleep(10 * time.Millisecond) @@ -299,7 +294,7 @@ func TestHandleBlockAction_RefreshButtonHandlerError(t *testing.T) { } // Call handler - client.handleBlockAction(interaction) + client.handleBlockAction(context.Background(), interaction) // Wait for handler to complete or timeout select { @@ -338,7 +333,7 @@ func TestHandleBlockAction_UnhandledAction(t *testing.T) { } // Should complete without calling handler - client.handleBlockAction(interaction) + client.handleBlockAction(context.Background(), interaction) // Give time for any potential goroutine time.Sleep(10 * time.Millisecond) @@ -382,7 +377,7 @@ func TestHandleBlockAction_MultipleActions(t *testing.T) { } // Call handler - client.handleBlockAction(interaction) + client.handleBlockAction(context.Background(), interaction) // Wait for both handler calls timeout := time.After(50 * time.Millisecond) @@ -427,7 +422,7 @@ func TestHandleBlockAction_EmptyActions(t *testing.T) { } // Should complete without calling handler - client.handleBlockAction(interaction) + client.handleBlockAction(context.Background(), interaction) // Give time for any potential goroutine time.Sleep(10 * time.Millisecond) @@ -437,12 +432,11 @@ func TestHandleBlockAction_EmptyActions(t *testing.T) { // TestResolveChannelID_Pagination tests channel resolution with multiple pages. func TestResolveChannelID_Pagination(t *testing.T) { - t.Parallel() - ctx := context.Background() + t.Parallel() callCount := 0 - api := &mockSlackAPI{ + api := &mockAPI{ getConversationsFunc: func(ctx context.Context, params *slack.GetConversationsParameters) ([]slack.Channel, string, error) { callCount++ // First call returns page 1 with cursor @@ -496,12 +490,11 @@ func TestResolveChannelID_Pagination(t *testing.T) { // TestResolveChannelID_PaginationError tests error during pagination. func TestResolveChannelID_PaginationError(t *testing.T) { - t.Parallel() - ctx := context.Background() + t.Parallel() callCount := 0 - api := &mockSlackAPI{ + api := &mockAPI{ getConversationsFunc: func(ctx context.Context, params *slack.GetConversationsParameters) ([]slack.Channel, string, error) { callCount++ // First call returns page 1 with cursor @@ -539,11 +532,10 @@ func TestResolveChannelID_PaginationError(t *testing.T) { // TestResolveChannelID_ChannelNotFound tests when channel doesn't exist. func TestResolveChannelID_ChannelNotFound(t *testing.T) { - t.Parallel() - ctx := context.Background() + t.Parallel() - api := &mockSlackAPI{ + api := &mockAPI{ getConversationsFunc: func(ctx context.Context, params *slack.GetConversationsParameters) ([]slack.Channel, string, error) { // Return channels but none matching return []slack.Channel{ @@ -576,12 +568,11 @@ func TestResolveChannelID_ChannelNotFound(t *testing.T) { // TestResolveChannelID_BothFallbacksFail tests when both public+private and public-only fail. func TestResolveChannelID_BothFallbacksFail(t *testing.T) { - t.Parallel() - ctx := context.Background() + t.Parallel() callCount := 0 - api := &mockSlackAPI{ + api := &mockAPI{ getConversationsFunc: func(ctx context.Context, params *slack.GetConversationsParameters) ([]slack.Channel, string, error) { callCount++ // Both calls fail diff --git a/pkg/slack/user_test.go b/pkg/slack/user_test.go index 85289b4..64beb46 100644 --- a/pkg/slack/user_test.go +++ b/pkg/slack/user_test.go @@ -10,12 +10,11 @@ import ( ) func TestUserInfo(t *testing.T) { - t.Parallel() - ctx := context.Background() + t.Parallel() t.Run("success", func(t *testing.T) { - api := &mockSlackAPI{ + api := &mockAPI{ getUserInfoFunc: func(ctx context.Context, userID string) (*slack.User, error) { return &slack.User{ ID: userID, @@ -39,7 +38,7 @@ func TestUserInfo(t *testing.T) { }) t.Run("user_not_found", func(t *testing.T) { - api := &mockSlackAPI{ + api := &mockAPI{ getUserInfoFunc: func(ctx context.Context, userID string) (*slack.User, error) { return nil, errors.New("user_not_found") }, @@ -57,12 +56,11 @@ func TestUserInfo(t *testing.T) { } func TestUserPresence(t *testing.T) { - t.Parallel() - ctx := context.Background() + t.Parallel() t.Run("active", func(t *testing.T) { - api := &mockSlackAPI{ + api := &mockAPI{ getUserPresenceFunc: func(ctx context.Context, userID string) (*slack.UserPresence, error) { return &slack.UserPresence{ Presence: "active", @@ -85,7 +83,7 @@ func TestUserPresence(t *testing.T) { }) t.Run("away", func(t *testing.T) { - api := &mockSlackAPI{ + api := &mockAPI{ getUserPresenceFunc: func(ctx context.Context, userID string) (*slack.UserPresence, error) { return &slack.UserPresence{ Presence: "away", @@ -108,7 +106,7 @@ func TestUserPresence(t *testing.T) { }) t.Run("error", func(t *testing.T) { - api := &mockSlackAPI{ + api := &mockAPI{ getUserPresenceFunc: func(ctx context.Context, userID string) (*slack.UserPresence, error) { return nil, errors.New("user_not_found") }, @@ -123,15 +121,15 @@ func TestUserPresence(t *testing.T) { t.Fatal("expected error") } }) + //nolint:tparallel // Tests share resources, cannot run subtests in parallel } func TestIsUserActive(t *testing.T) { - t.Parallel() - ctx := context.Background() + t.Parallel() t.Run("active", func(t *testing.T) { - api := &mockSlackAPI{ + api := &mockAPI{ getUserPresenceFunc: func(ctx context.Context, userID string) (*slack.UserPresence, error) { return &slack.UserPresence{ Presence: "active", @@ -149,7 +147,7 @@ func TestIsUserActive(t *testing.T) { }) t.Run("away", func(t *testing.T) { - api := &mockSlackAPI{ + api := &mockAPI{ getUserPresenceFunc: func(ctx context.Context, userID string) (*slack.UserPresence, error) { return &slack.UserPresence{ Presence: "away", @@ -167,7 +165,7 @@ func TestIsUserActive(t *testing.T) { }) t.Run("error", func(t *testing.T) { - api := &mockSlackAPI{ + api := &mockAPI{ getUserPresenceFunc: func(ctx context.Context, userID string) (*slack.UserPresence, error) { return nil, errors.New("api error") }, @@ -182,16 +180,16 @@ func TestIsUserActive(t *testing.T) { if client.IsUserActive(ctx, "U123") { t.Error("expected false on error") } + //nolint:tparallel // Tests share resources, cannot run subtests in parallel }) } func TestUserTimezone(t *testing.T) { - t.Parallel() - ctx := context.Background() + t.Parallel() t.Run("has_timezone", func(t *testing.T) { - api := &mockSlackAPI{ + api := &mockAPI{ getUserInfoFunc: func(ctx context.Context, userID string) (*slack.User, error) { return &slack.User{ ID: userID, @@ -218,7 +216,7 @@ func TestUserTimezone(t *testing.T) { }) t.Run("no_timezone_defaults_to_utc", func(t *testing.T) { - api := &mockSlackAPI{ + api := &mockAPI{ getUserInfoFunc: func(ctx context.Context, userID string) (*slack.User, error) { return &slack.User{ ID: userID, @@ -246,7 +244,7 @@ func TestUserTimezone(t *testing.T) { t.Run("cached_value", func(t *testing.T) { callCount := 0 - api := &mockSlackAPI{ + api := &mockAPI{ getUserInfoFunc: func(ctx context.Context, userID string) (*slack.User, error) { callCount++ if callCount == 1 { @@ -295,7 +293,7 @@ func TestUserTimezone(t *testing.T) { }) t.Run("error", func(t *testing.T) { - api := &mockSlackAPI{ + api := &mockAPI{ getUserInfoFunc: func(ctx context.Context, userID string) (*slack.User, error) { return nil, errors.New("api error") }, @@ -311,17 +309,17 @@ func TestUserTimezone(t *testing.T) { _, err := client.UserTimezone(ctx, "U123") if err == nil { t.Fatal("expected error") + //nolint:tparallel // Tests share resources, cannot run subtests in parallel } }) } func TestWorkspaceInfo(t *testing.T) { - t.Parallel() - ctx := context.Background() + t.Parallel() t.Run("success", func(t *testing.T) { - api := &mockSlackAPI{ + api := &mockAPI{ getTeamInfoFunc: func(ctx context.Context) (*slack.TeamInfo, error) { return &slack.TeamInfo{ ID: "T123", @@ -353,7 +351,7 @@ func TestWorkspaceInfo(t *testing.T) { t.Run("cached_value", func(t *testing.T) { callCount := 0 - api := &mockSlackAPI{ + api := &mockAPI{ getTeamInfoFunc: func(ctx context.Context) (*slack.TeamInfo, error) { callCount++ if callCount == 1 { @@ -403,7 +401,7 @@ func TestWorkspaceInfo(t *testing.T) { t.Run("invalidate_and_refresh", func(t *testing.T) { callCount := 0 - api := &mockSlackAPI{ + api := &mockAPI{ getTeamInfoFunc: func(ctx context.Context) (*slack.TeamInfo, error) { callCount++ if callCount == 1 { @@ -455,7 +453,7 @@ func TestWorkspaceInfo(t *testing.T) { }) t.Run("error", func(t *testing.T) { - api := &mockSlackAPI{ + api := &mockAPI{ getTeamInfoFunc: func(ctx context.Context) (*slack.TeamInfo, error) { return nil, errors.New("api error") }, @@ -475,7 +473,7 @@ func TestWorkspaceInfo(t *testing.T) { }) t.Run("incorrect_cache_type", func(t *testing.T) { - api := &mockSlackAPI{ + api := &mockAPI{ getTeamInfoFunc: func(ctx context.Context) (*slack.TeamInfo, error) { return &slack.TeamInfo{ ID: "T456", @@ -501,6 +499,7 @@ func TestWorkspaceInfo(t *testing.T) { } if info.Name != "Fresh Workspace" { + //nolint:tparallel // Tests share resources, cannot run subtests in parallel t.Errorf("expected 'Fresh Workspace' after cache invalidation, got %s", info.Name) } }) diff --git a/pkg/slacktest/server.go b/pkg/slacktest/server.go index b812e86..f9cf079 100644 --- a/pkg/slacktest/server.go +++ b/pkg/slacktest/server.go @@ -33,9 +33,9 @@ type Server struct { dmChannels map[string]string // userID -> dmChannelID // Request tracking for assertions - PostedMessages []*PostedMessage - UpdatedMessages []*UpdatedMessage - EmailLookups []string + postedMessages []*PostedMessage + updatedMessages []*UpdatedMessage + emailLookups []string } // Channel represents a Slack channel. @@ -80,9 +80,9 @@ func New() *Server { botInChannels: make(map[string]bool), channelMessages: make(map[string][]*Message), dmChannels: make(map[string]string), - PostedMessages: make([]*PostedMessage, 0), - UpdatedMessages: make([]*UpdatedMessage, 0), - EmailLookups: make([]string, 0), + postedMessages: make([]*PostedMessage, 0), + updatedMessages: make([]*UpdatedMessage, 0), + emailLookups: make([]string, 0), } mux := http.NewServeMux() @@ -144,34 +144,34 @@ func (s *Server) AddMessage(channelID, text, timestamp string) { }) } -// GetPostedMessages returns all messages posted via chat.postMessage. -func (s *Server) GetPostedMessages() []*PostedMessage { +// PostedMessages returns all messages posted via chat.postMessage. +func (s *Server) PostedMessages() []*PostedMessage { s.mu.RLock() defer s.mu.RUnlock() - return s.PostedMessages + return s.postedMessages } -// GetUpdatedMessages returns all messages updated via chat.update. -func (s *Server) GetUpdatedMessages() []*UpdatedMessage { +// UpdatedMessages returns all messages updated via chat.update. +func (s *Server) UpdatedMessages() []*UpdatedMessage { s.mu.RLock() defer s.mu.RUnlock() - return s.UpdatedMessages + return s.updatedMessages } -// GetEmailLookups returns all emails that were looked up. -func (s *Server) GetEmailLookups() []string { +// EmailLookups returns all emails that were looked up. +func (s *Server) EmailLookups() []string { s.mu.RLock() defer s.mu.RUnlock() - return s.EmailLookups + return s.emailLookups } // Reset clears all tracking data (but keeps configuration). func (s *Server) Reset() { s.mu.Lock() defer s.mu.Unlock() - s.PostedMessages = make([]*PostedMessage, 0) - s.UpdatedMessages = make([]*UpdatedMessage, 0) - s.EmailLookups = make([]string, 0) + s.postedMessages = make([]*PostedMessage, 0) + s.updatedMessages = make([]*UpdatedMessage, 0) + s.emailLookups = make([]string, 0) } func (s *Server) handleUserLookupByEmail(w http.ResponseWriter, r *http.Request) { @@ -186,7 +186,7 @@ func (s *Server) handleUserLookupByEmail(w http.ResponseWriter, r *http.Request) } s.mu.Lock() - s.EmailLookups = append(s.EmailLookups, email) + s.emailLookups = append(s.emailLookups, email) user, exists := s.usersByEmail[email] s.mu.Unlock() @@ -283,7 +283,7 @@ func (s *Server) handleChatPostMessage(w http.ResponseWriter, r *http.Request) { timestamp := time.Now().Format("1504898400.123456") s.mu.Lock() - s.PostedMessages = append(s.PostedMessages, &PostedMessage{ + s.postedMessages = append(s.postedMessages, &PostedMessage{ Channel: channel, Text: text, Timestamp: time.Now(), @@ -316,7 +316,7 @@ func (s *Server) handleChatUpdate(w http.ResponseWriter, r *http.Request) { ts := r.FormValue("ts") s.mu.Lock() - s.UpdatedMessages = append(s.UpdatedMessages, &UpdatedMessage{ + s.updatedMessages = append(s.updatedMessages, &UpdatedMessage{ Channel: channel, Timestamp: ts, Text: text, diff --git a/pkg/slacktest/server_test.go b/pkg/slacktest/server_test.go index 0419df2..b7f7555 100644 --- a/pkg/slacktest/server_test.go +++ b/pkg/slacktest/server_test.go @@ -33,7 +33,7 @@ func TestMockServerUserLookup(t *testing.T) { } // Verify email lookup was tracked - lookups := server.GetEmailLookups() + lookups := server.EmailLookups() if len(lookups) != 1 { t.Errorf("Expected 1 email lookup, got %d", len(lookups)) } @@ -127,7 +127,7 @@ func TestMockServerPostMessage(t *testing.T) { } // Verify message was posted - messages := server.GetPostedMessages() + messages := server.PostedMessages() if len(messages) != 1 { t.Fatalf("Expected 1 posted message, got %d", len(messages)) } @@ -158,7 +158,7 @@ func TestMockServerUpdateMessage(t *testing.T) { } // Verify message was updated - updates := server.GetUpdatedMessages() + updates := server.UpdatedMessages() if len(updates) != 1 { t.Fatalf("Expected 1 updated message, got %d", len(updates)) } @@ -193,7 +193,7 @@ func TestMockServerReset(t *testing.T) { } // Verify data exists - if len(server.GetPostedMessages()) != 1 { + if len(server.PostedMessages()) != 1 { t.Fatal("Expected posted message before reset") } @@ -201,12 +201,12 @@ func TestMockServerReset(t *testing.T) { server.Reset() // Verify data was cleared - if len(server.GetPostedMessages()) != 0 { - t.Errorf("Expected no posted messages after reset, got %d", len(server.GetPostedMessages())) + if len(server.PostedMessages()) != 0 { + t.Errorf("Expected no posted messages after reset, got %d", len(server.PostedMessages())) } - if len(server.GetEmailLookups()) != 0 { - t.Errorf("Expected no email lookups after reset, got %d", len(server.GetEmailLookups())) + if len(server.EmailLookups()) != 0 { + t.Errorf("Expected no email lookups after reset, got %d", len(server.EmailLookups())) } } @@ -259,7 +259,7 @@ func TestMockServerConversationsOpen(t *testing.T) { } // Should generate a DM channel ID - if len(channel.ID) == 0 { + if channel.ID == "" { t.Error("Expected non-empty channel ID") } } diff --git a/pkg/state/datastore.go b/pkg/state/datastore.go index 328e441..68d7520 100644 --- a/pkg/state/datastore.go +++ b/pkg/state/datastore.go @@ -79,21 +79,21 @@ type notifyEntity struct { // Pending DM entity. type pendingDMEntity struct { - WorkspaceID string `datastore:"workspace_id"` - UserID string `datastore:"user_id"` - PROwner string `datastore:"pr_owner"` + QueuedAt time.Time `datastore:"queued_at"` + SendAfter time.Time `datastore:"send_after"` + PRTitle string `datastore:"pr_title,noindex"` PRRepo string `datastore:"pr_repo"` - PRNumber int `datastore:"pr_number"` PRURL string `datastore:"pr_url"` - PRTitle string `datastore:"pr_title,noindex"` + WorkspaceID string `datastore:"workspace_id"` PRAuthor string `datastore:"pr_author"` PRState string `datastore:"pr_state"` WorkflowState string `datastore:"workflow_state"` NextActions string `datastore:"next_actions,noindex"` ChannelID string `datastore:"channel_id"` ChannelName string `datastore:"channel_name"` - QueuedAt time.Time `datastore:"queued_at"` - SendAfter time.Time `datastore:"send_after"` + PROwner string `datastore:"pr_owner"` + UserID string `datastore:"user_id"` + PRNumber int `datastore:"pr_number"` } // NewDatastoreStore creates a new Datastore-backed store with in-memory cache. @@ -154,11 +154,11 @@ func NewDatastoreStore(ctx context.Context, projectID, databaseID string) (*Data } // Thread retrieves thread info with memory-first, then Datastore fallback. -func (s *DatastoreStore) Thread(owner, repo string, number int, channelID string) (ThreadInfo, bool) { +func (s *DatastoreStore) Thread(ctx context.Context, owner, repo string, number int, channelID string) (ThreadInfo, bool) { key := threadKey(owner, repo, number, channelID) // Fast path: Check memory cache first - info, exists := s.memory.Thread(owner, repo, number, channelID) + info, exists := s.memory.Thread(ctx, owner, repo, number, channelID) if exists { return info, true } @@ -169,13 +169,13 @@ func (s *DatastoreStore) Thread(owner, repo string, number int, channelID string } // Try Datastore with timeout - ctx, cancel := context.WithTimeout(context.Background(), 500*time.Millisecond) + timeoutCtx, cancel := context.WithTimeout(ctx, 500*time.Millisecond) defer cancel() dsKey := datastore.NameKey(kindThread, key, nil) var entity threadEntity - err := s.ds.Get(ctx, dsKey, &entity) + err := s.ds.Get(timeoutCtx, dsKey, &entity) if err != nil { if !errors.Is(err, datastore.ErrNoSuchEntity) { slog.Debug("Datastore get failed, using cache", @@ -195,7 +195,7 @@ func (s *DatastoreStore) Thread(owner, repo string, number int, channelID string } // Update memory cache (sync - fast) - if err := s.memory.SaveThread(owner, repo, number, channelID, result); err != nil { + if err := s.memory.SaveThread(ctx, owner, repo, number, channelID, result); err != nil { slog.Debug("failed to update memory cache for thread", "error", err) } @@ -203,11 +203,11 @@ func (s *DatastoreStore) Thread(owner, repo string, number int, channelID string } // SaveThread saves thread info to memory and Datastore. -func (s *DatastoreStore) SaveThread(owner, repo string, number int, channelID string, info ThreadInfo) error { +func (s *DatastoreStore) SaveThread(ctx context.Context, owner, repo string, number int, channelID string, info ThreadInfo) error { key := threadKey(owner, repo, number, channelID) // Always save to memory (fast, local) - if err := s.memory.SaveThread(owner, repo, number, channelID, info); err != nil { + if err := s.memory.SaveThread(ctx, owner, repo, number, channelID, info); err != nil { slog.Warn("failed to save thread to memory", "error", err) } @@ -219,34 +219,33 @@ func (s *DatastoreStore) SaveThread(owner, repo string, number int, channelID st // Capture client for safe concurrent access ds := s.ds - // Save to Datastore asynchronously (don't block) - go func() { - ctx, cancel := context.WithTimeout(context.Background(), 2*time.Second) - defer cancel() - - dsKey := datastore.NameKey(kindThread, key, nil) - entity := &threadEntity{ - ThreadTS: info.ThreadTS, - ChannelID: info.ChannelID, - MessageText: info.MessageText, - UpdatedAt: time.Now(), - LastEventTime: info.LastEventTime, - } + // Save to Datastore with timeout + timeoutCtx, cancel := context.WithTimeout(ctx, 2*time.Second) + defer cancel() - if _, err := ds.Put(ctx, dsKey, entity); err != nil { - slog.Error("failed to save thread to Datastore", - "key", key, - "error", err) - } - }() + dsKey := datastore.NameKey(kindThread, key, nil) + entity := &threadEntity{ + ThreadTS: info.ThreadTS, + ChannelID: info.ChannelID, + MessageText: info.MessageText, + UpdatedAt: time.Now(), + LastEventTime: info.LastEventTime, + } + + if _, err := ds.Put(timeoutCtx, dsKey, entity); err != nil { + slog.Error("failed to save thread to Datastore", + "key", key, + "error", err) + return err + } return nil } // LastDM retrieves last DM time with Datastore-first, memory fallback. -func (s *DatastoreStore) LastDM(userID, prURL string) (time.Time, bool) { +func (s *DatastoreStore) LastDM(ctx context.Context, userID, prURL string) (time.Time, bool) { // Check memory first (fast) - t, exists := s.memory.LastDM(userID, prURL) + t, exists := s.memory.LastDM(ctx, userID, prURL) if exists { return t, true } @@ -257,35 +256,30 @@ func (s *DatastoreStore) LastDM(userID, prURL string) (time.Time, bool) { } // Try Datastore - ctx, cancel := context.WithTimeout(context.Background(), 500*time.Millisecond) + timeoutCtx, cancel := context.WithTimeout(ctx, 500*time.Millisecond) defer cancel() key := dmKey(userID, prURL) dsKey := datastore.NameKey(kindDM, key, nil) var entity dmEntity - err := s.ds.Get(ctx, dsKey, &entity) + err := s.ds.Get(timeoutCtx, dsKey, &entity) if err != nil { return time.Time{}, false } - // Capture memory store for safe concurrent access - mem := s.memory - - // Update memory cache async - go func() { - if err := mem.RecordDM(userID, prURL, entity.SentAt); err != nil { - slog.Debug("failed to update memory cache for DM", "error", err) - } - }() + // Update memory cache + if err := s.memory.RecordDM(ctx, userID, prURL, entity.SentAt); err != nil { + slog.Debug("failed to update memory cache for DM", "error", err) + } return entity.SentAt, true } // RecordDM saves DM timestamp to both stores. -func (s *DatastoreStore) RecordDM(userID, prURL string, sentAt time.Time) error { +func (s *DatastoreStore) RecordDM(ctx context.Context, userID, prURL string, sentAt time.Time) error { // Save to memory - if err := s.memory.RecordDM(userID, prURL, sentAt); err != nil { + if err := s.memory.RecordDM(ctx, userID, prURL, sentAt); err != nil { slog.Warn("failed to record DM in memory", "error", err) } @@ -297,33 +291,32 @@ func (s *DatastoreStore) RecordDM(userID, prURL string, sentAt time.Time) error // Capture client for safe concurrent access ds := s.ds - // Save to Datastore async - go func() { - ctx, cancel := context.WithTimeout(context.Background(), 2*time.Second) - defer cancel() - - key := dmKey(userID, prURL) - dsKey := datastore.NameKey(kindDM, key, nil) - entity := &dmEntity{ - UserID: userID, - PRURL: prURL, - SentAt: sentAt, - } + // Save to Datastore with timeout + timeoutCtx, cancel := context.WithTimeout(ctx, 2*time.Second) + defer cancel() - if _, err := ds.Put(ctx, dsKey, entity); err != nil { - slog.Error("failed to record DM in Datastore", - "user", userID, - "error", err) - } - }() + key := dmKey(userID, prURL) + dsKey := datastore.NameKey(kindDM, key, nil) + entity := &dmEntity{ + UserID: userID, + PRURL: prURL, + SentAt: sentAt, + } + + if _, err := ds.Put(timeoutCtx, dsKey, entity); err != nil { + slog.Error("failed to record DM in Datastore", + "user", userID, + "error", err) + return err + } return nil } // DMMessage retrieves DM message info with Datastore-first, memory fallback. -func (s *DatastoreStore) DMMessage(userID, prURL string) (DMInfo, bool) { +func (s *DatastoreStore) DMMessage(ctx context.Context, userID, prURL string) (DMInfo, bool) { // Check memory first (fast) - info, exists := s.memory.DMMessage(userID, prURL) + info, exists := s.memory.DMMessage(ctx, userID, prURL) if exists { return info, true } @@ -334,14 +327,14 @@ func (s *DatastoreStore) DMMessage(userID, prURL string) (DMInfo, bool) { } // Try Datastore - ctx, cancel := context.WithTimeout(context.Background(), 500*time.Millisecond) + timeoutCtx, cancel := context.WithTimeout(ctx, 500*time.Millisecond) defer cancel() key := dmKey(userID, prURL) dsKey := datastore.NameKey(kindDMMessage, key, nil) var entity dmMessageEntity - err := s.ds.Get(ctx, dsKey, &entity) + err := s.ds.Get(timeoutCtx, dsKey, &entity) if err != nil { return DMInfo{}, false } @@ -349,23 +342,18 @@ func (s *DatastoreStore) DMMessage(userID, prURL string) (DMInfo, bool) { // Found in Datastore - update memory cache and return result := DMInfo(entity) - // Capture memory store for safe concurrent access - mem := s.memory - - // Update memory cache async - go func() { - if err := mem.SaveDMMessage(userID, prURL, result); err != nil { - slog.Debug("failed to update memory cache for DM message", "error", err) - } - }() + // Update memory cache + if err := s.memory.SaveDMMessage(ctx, userID, prURL, result); err != nil { + slog.Debug("failed to update memory cache for DM message", "error", err) + } return result, true } // SaveDMMessage saves DM message info to both stores. -func (s *DatastoreStore) SaveDMMessage(userID, prURL string, info DMInfo) error { +func (s *DatastoreStore) SaveDMMessage(ctx context.Context, userID, prURL string, info DMInfo) error { // Always save to memory first (fast, local) - if err := s.memory.SaveDMMessage(userID, prURL, info); err != nil { + if err := s.memory.SaveDMMessage(ctx, userID, prURL, info); err != nil { slog.Warn("failed to save DM message to memory", "error", err) } @@ -377,36 +365,35 @@ func (s *DatastoreStore) SaveDMMessage(userID, prURL string, info DMInfo) error // Capture client for safe concurrent access ds := s.ds - // Save to Datastore async - go func() { - ctx, cancel := context.WithTimeout(context.Background(), 2*time.Second) - defer cancel() - - key := dmKey(userID, prURL) - dsKey := datastore.NameKey(kindDMMessage, key, nil) - entity := &dmMessageEntity{ - ChannelID: info.ChannelID, - MessageTS: info.MessageTS, - MessageText: info.MessageText, - UpdatedAt: time.Now(), - SentAt: info.SentAt, - } + // Save to Datastore with timeout + timeoutCtx, cancel := context.WithTimeout(ctx, 2*time.Second) + defer cancel() - if _, err := ds.Put(ctx, dsKey, entity); err != nil { - slog.Error("failed to save DM message to Datastore", - "user", userID, - "error", err) - } - }() + key := dmKey(userID, prURL) + dsKey := datastore.NameKey(kindDMMessage, key, nil) + entity := &dmMessageEntity{ + ChannelID: info.ChannelID, + MessageTS: info.MessageTS, + MessageText: info.MessageText, + UpdatedAt: time.Now(), + SentAt: info.SentAt, + } + + if _, err := ds.Put(timeoutCtx, dsKey, entity); err != nil { + slog.Error("failed to save DM message to Datastore", + "user", userID, + "error", err) + return err + } return nil } // ListDMUsers returns all user IDs who have received DMs for a given PR. // Queries both memory cache and Datastore to ensure data persists across restarts. -func (s *DatastoreStore) ListDMUsers(prURL string) []string { +func (s *DatastoreStore) ListDMUsers(ctx context.Context, prURL string) []string { // Check memory cache first (fast path) - users := s.memory.ListDMUsers(prURL) + users := s.memory.ListDMUsers(ctx, prURL) if len(users) > 0 || s.disabled || s.ds == nil { return users } @@ -419,11 +406,11 @@ func (s *DatastoreStore) ListDMUsers(prURL string) []string { // 4. Results populate memory cache for future fast lookups // // Alternative considered: Ancestor queries require schema change (breaking existing data) - ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second) + timeoutCtx, cancel := context.WithTimeout(ctx, 5*time.Second) defer cancel() query := datastore.NewQuery(kindDMMessage).KeysOnly().Limit(1000) - keys, err := s.ds.AllKeys(ctx, query) + keys, err := s.ds.AllKeys(timeoutCtx, query) if err != nil { slog.Warn("failed to query Datastore for DM users", "pr_url", prURL, @@ -456,9 +443,9 @@ func (s *DatastoreStore) ListDMUsers(prURL string) []string { } // LastDigest retrieves last digest time. -func (s *DatastoreStore) LastDigest(userID, date string) (time.Time, bool) { +func (s *DatastoreStore) LastDigest(ctx context.Context, userID, date string) (time.Time, bool) { // Check memory first - t, exists := s.memory.LastDigest(userID, date) + t, exists := s.memory.LastDigest(ctx, userID, date) if exists { return t, true } @@ -469,27 +456,22 @@ func (s *DatastoreStore) LastDigest(userID, date string) (time.Time, bool) { } // Try Datastore - ctx, cancel := context.WithTimeout(context.Background(), 500*time.Millisecond) + timeoutCtx, cancel := context.WithTimeout(ctx, 500*time.Millisecond) defer cancel() key := digestKey(userID, date) dsKey := datastore.NameKey(kindDigest, key, nil) var entity digestEntity - err := s.ds.Get(ctx, dsKey, &entity) + err := s.ds.Get(timeoutCtx, dsKey, &entity) if err != nil { return time.Time{}, false } - // Capture memory store for safe concurrent access - mem := s.memory - // Update cache - go func() { - if err := mem.RecordDigest(userID, date, entity.SentAt); err != nil { - slog.Debug("failed to update memory cache for digest", "error", err) - } - }() + if err := s.memory.RecordDigest(ctx, userID, date, entity.SentAt); err != nil { + slog.Debug("failed to update memory cache for digest", "error", err) + } return entity.SentAt, true } @@ -497,9 +479,9 @@ func (s *DatastoreStore) LastDigest(userID, date string) (time.Time, bool) { // RecordDigest saves digest timestamp to memory and attempts persistence to Datastore. // Memory is always updated (primary storage for runtime). Datastore is best-effort for restart recovery. // Degrades gracefully: logs errors but continues operating if Datastore unavailable. -func (s *DatastoreStore) RecordDigest(userID, date string, sentAt time.Time) error { +func (s *DatastoreStore) RecordDigest(ctx context.Context, userID, date string, sentAt time.Time) error { // Always save to memory first (primary storage, must succeed) - if err := s.memory.RecordDigest(userID, date, sentAt); err != nil { + if err := s.memory.RecordDigest(ctx, userID, date, sentAt); err != nil { slog.Warn("failed to record digest in memory", "error", err) } @@ -510,7 +492,7 @@ func (s *DatastoreStore) RecordDigest(userID, date string, sentAt time.Time) err // Best-effort persistence to Datastore for restart recovery // Synchronous write for maximum reliability, but don't fail operation if it doesn't work - ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second) + timeoutCtx, cancel := context.WithTimeout(ctx, 5*time.Second) defer cancel() key := digestKey(userID, date) @@ -521,7 +503,7 @@ func (s *DatastoreStore) RecordDigest(userID, date string, sentAt time.Time) err SentAt: sentAt, } - if _, err := s.ds.Put(ctx, dsKey, entity); err != nil { + if _, err := s.ds.Put(timeoutCtx, dsKey, entity); err != nil { slog.Error("failed to persist digest to Datastore - may send duplicate after restart", "user", userID, "date", date, @@ -534,9 +516,9 @@ func (s *DatastoreStore) RecordDigest(userID, date string, sentAt time.Time) err } // WasProcessed checks if an event was already processed (distributed check). -func (s *DatastoreStore) WasProcessed(eventKey string) bool { +func (s *DatastoreStore) WasProcessed(ctx context.Context, eventKey string) bool { // Check memory first (fast) - if s.memory.WasProcessed(eventKey) { + if s.memory.WasProcessed(ctx, eventKey) { return true } @@ -546,25 +528,20 @@ func (s *DatastoreStore) WasProcessed(eventKey string) bool { } // Check Datastore (cross-instance coordination) - ctx, cancel := context.WithTimeout(context.Background(), 500*time.Millisecond) + timeoutCtx, cancel := context.WithTimeout(ctx, 500*time.Millisecond) defer cancel() dsKey := datastore.NameKey(kindEvent, eventKey, nil) var entity eventEntity - err := s.ds.Get(ctx, dsKey, &entity) + err := s.ds.Get(timeoutCtx, dsKey, &entity) exists := err == nil if exists { - // Capture memory store for safe concurrent access - mem := s.memory - // Update local cache - go func() { - if err := mem.MarkProcessed(eventKey, 24*time.Hour); err != nil { - slog.Debug("failed to update memory cache for event", "error", err) - } - }() + if err := s.memory.MarkProcessed(ctx, eventKey, 24*time.Hour); err != nil { + slog.Debug("failed to update memory cache for event", "error", err) + } } return exists @@ -572,9 +549,9 @@ func (s *DatastoreStore) WasProcessed(eventKey string) bool { // MarkProcessed marks an event as processed (distributed coordination). // Returns error if already processed by another instance (enables race detection). -func (s *DatastoreStore) MarkProcessed(eventKey string, ttl time.Duration) error { +func (s *DatastoreStore) MarkProcessed(ctx context.Context, eventKey string, ttl time.Duration) error { // Mark in memory first for fast local lookups - if err := s.memory.MarkProcessed(eventKey, ttl); err != nil { + if err := s.memory.MarkProcessed(ctx, eventKey, ttl); err != nil { slog.Warn("failed to mark event in memory", "error", err) } @@ -586,12 +563,12 @@ func (s *DatastoreStore) MarkProcessed(eventKey string, ttl time.Duration) error // Use transaction for compare-and-swap semantics // Timeout: 10 seconds for transaction (Google recommends up to 60s idle timeout) // This accounts for cold starts, network latency, and transaction overhead - ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + timeoutCtx, cancel := context.WithTimeout(ctx, 10*time.Second) defer cancel() dsKey := datastore.NameKey(kindEvent, eventKey, nil) - _, err := s.ds.RunInTransaction(ctx, func(tx *datastore.Transaction) error { + _, err := s.ds.RunInTransaction(timeoutCtx, func(tx *datastore.Transaction) error { var existing eventEntity err := tx.Get(dsKey, &existing) @@ -631,19 +608,19 @@ func (s *DatastoreStore) MarkProcessed(eventKey string, ttl time.Duration) error } // LastNotification retrieves when a PR was last notified about. -func (s *DatastoreStore) LastNotification(prURL string) time.Time { +func (s *DatastoreStore) LastNotification(ctx context.Context, prURL string) time.Time { // Datastore disabled if s.disabled || s.ds == nil { return time.Time{} } - ctx, cancel := context.WithTimeout(context.Background(), 500*time.Millisecond) + timeoutCtx, cancel := context.WithTimeout(ctx, 500*time.Millisecond) defer cancel() dsKey := datastore.NameKey(kindNotify, prURL, nil) var entity notifyEntity - err := s.ds.Get(ctx, dsKey, &entity) + err := s.ds.Get(timeoutCtx, dsKey, &entity) if err != nil { return time.Time{} } @@ -652,7 +629,7 @@ func (s *DatastoreStore) LastNotification(prURL string) time.Time { } // RecordNotification records when we notified about a PR. -func (s *DatastoreStore) RecordNotification(prURL string, notifiedAt time.Time) error { +func (s *DatastoreStore) RecordNotification(ctx context.Context, prURL string, notifiedAt time.Time) error { // Skip if disabled if s.disabled || s.ds == nil { return nil @@ -661,29 +638,28 @@ func (s *DatastoreStore) RecordNotification(prURL string, notifiedAt time.Time) // Capture client for safe concurrent access ds := s.ds - // Async save - go func() { - ctx, cancel := context.WithTimeout(context.Background(), 2*time.Second) - defer cancel() + // Save with timeout + timeoutCtx, cancel := context.WithTimeout(ctx, 2*time.Second) + defer cancel() - dsKey := datastore.NameKey(kindNotify, prURL, nil) - entity := ¬ifyEntity{ - PRURL: prURL, - NotifiedAt: notifiedAt, - } + dsKey := datastore.NameKey(kindNotify, prURL, nil) + entity := ¬ifyEntity{ + PRURL: prURL, + NotifiedAt: notifiedAt, + } - if _, err := ds.Put(ctx, dsKey, entity); err != nil { - slog.Error("failed to record notification in Datastore", "error", err) - } - }() + if _, err := ds.Put(timeoutCtx, dsKey, entity); err != nil { + slog.Error("failed to record notification in Datastore", "error", err) + return err + } return nil } // QueuePendingDM adds a pending DM to both memory and Datastore. -func (s *DatastoreStore) QueuePendingDM(dm PendingDM) error { +func (s *DatastoreStore) QueuePendingDM(ctx context.Context, dm *PendingDM) error { // Always update memory cache - if err := s.memory.QueuePendingDM(dm); err != nil { + if err := s.memory.QueuePendingDM(ctx, dm); err != nil { return err } @@ -692,7 +668,7 @@ func (s *DatastoreStore) QueuePendingDM(dm PendingDM) error { return nil } - ctx := context.Background() + // Use passed ctx parameter key := datastore.NameKey(kindPendingDM, dm.ID, nil) entity := pendingDMEntity{ WorkspaceID: dm.WorkspaceID, @@ -718,9 +694,9 @@ func (s *DatastoreStore) QueuePendingDM(dm PendingDM) error { // PendingDMs returns all pending DMs that should be sent. // Reads from memory cache first, falls back to Datastore if empty. -func (s *DatastoreStore) PendingDMs(before time.Time) ([]PendingDM, error) { +func (s *DatastoreStore) PendingDMs(ctx context.Context, before time.Time) ([]PendingDM, error) { // Try memory first - dms, err := s.memory.PendingDMs(before) + dms, err := s.memory.PendingDMs(ctx, before) if err == nil && len(dms) > 0 { return dms, nil } @@ -731,7 +707,7 @@ func (s *DatastoreStore) PendingDMs(before time.Time) ([]PendingDM, error) { } // Query Datastore for pending DMs - ctx := context.Background() + // Use passed ctx parameter query := datastore.NewQuery(kindPendingDM). Filter("send_after <=", before). Limit(100) @@ -745,7 +721,8 @@ func (s *DatastoreStore) PendingDMs(before time.Time) ([]PendingDM, error) { // Convert entities to PendingDM structs and update memory cache result := make([]PendingDM, 0, len(entities)) - for i, entity := range entities { + for i := range entities { + entity := &entities[i] dm := PendingDM{ ID: keys[i].Name, WorkspaceID: entity.WorkspaceID, @@ -767,16 +744,16 @@ func (s *DatastoreStore) PendingDMs(before time.Time) ([]PendingDM, error) { result = append(result, dm) // Update memory cache (ignore error since cache is best-effort and we have authoritative result from datastore) //nolint:errcheck // Cache update is best-effort; authoritative result from datastore - _ = s.memory.QueuePendingDM(dm) + _ = s.memory.QueuePendingDM(ctx, &dm) } return result, nil } // RemovePendingDM removes a pending DM from both memory and Datastore. -func (s *DatastoreStore) RemovePendingDM(id string) error { +func (s *DatastoreStore) RemovePendingDM(ctx context.Context, id string) error { // Always remove from memory - if err := s.memory.RemovePendingDM(id); err != nil { + if err := s.memory.RemovePendingDM(ctx, id); err != nil { return err } @@ -785,15 +762,15 @@ func (s *DatastoreStore) RemovePendingDM(id string) error { return nil } - ctx := context.Background() + // Use passed ctx parameter key := datastore.NameKey(kindPendingDM, id, nil) return s.ds.Delete(ctx, key) } // Cleanup removes old data from both stores. -func (s *DatastoreStore) Cleanup() error { +func (s *DatastoreStore) Cleanup(ctx context.Context) error { // Always cleanup memory - if err := s.memory.Cleanup(); err != nil { + if err := s.memory.Cleanup(ctx); err != nil { slog.Warn("memory cleanup failed", "error", err) } diff --git a/pkg/state/datastore_test.go b/pkg/state/datastore_test.go index 8972dbd..e00a246 100644 --- a/pkg/state/datastore_test.go +++ b/pkg/state/datastore_test.go @@ -2,6 +2,7 @@ package state import ( "context" + "errors" "testing" "time" @@ -9,11 +10,10 @@ import ( ) func TestNewDatastoreStore(t *testing.T) { + ctx := context.Background() client, cleanup := datastore.NewMockClient(t) defer cleanup() - ctx := context.Background() - // Create store with mock client store := &DatastoreStore{ ds: client, @@ -21,10 +21,6 @@ func TestNewDatastoreStore(t *testing.T) { disabled: false, } - if store == nil { - t.Fatal("expected non-nil store") - } - if store.memory == nil { t.Error("expected non-nil memory store") } @@ -50,6 +46,7 @@ func TestNewDatastoreStore(t *testing.T) { } func TestDatastoreStore_ThreadOperations(t *testing.T) { + ctx := context.Background() client, cleanup := datastore.NewMockClient(t) defer cleanup() @@ -62,7 +59,7 @@ func TestDatastoreStore_ThreadOperations(t *testing.T) { defer func() { _ = store.Close() }() // Test non-existent thread - _, exists := store.Thread("owner", "repo", 123, "C123") + _, exists := store.Thread(ctx, "owner", "repo", 123, "C123") if exists { t.Error("expected thread to not exist") } @@ -77,13 +74,13 @@ func TestDatastoreStore_ThreadOperations(t *testing.T) { LastEventTime: time.Now(), } - err := store.SaveThread("owner", "repo", 123, "C123", threadInfo) + err := store.SaveThread(ctx, "owner", "repo", 123, "C123", threadInfo) if err != nil { t.Fatalf("unexpected error saving thread: %v", err) } // Retrieve from memory cache (immediate) - retrieved, exists := store.Thread("owner", "repo", 123, "C123") + retrieved, exists := store.Thread(ctx, "owner", "repo", 123, "C123") if !exists { t.Fatal("expected thread to exist in memory cache") } @@ -99,7 +96,7 @@ func TestDatastoreStore_ThreadOperations(t *testing.T) { store.memory = NewMemoryStore() // Retrieve from Datastore - retrieved, exists = store.Thread("owner", "repo", 123, "C123") + retrieved, exists = store.Thread(ctx, "owner", "repo", 123, "C123") if !exists { t.Fatal("expected thread to exist in Datastore") } @@ -110,6 +107,7 @@ func TestDatastoreStore_ThreadOperations(t *testing.T) { } func TestDatastoreStore_DMOperations(t *testing.T) { + ctx := context.Background() client, cleanup := datastore.NewMockClient(t) defer cleanup() @@ -124,20 +122,20 @@ func TestDatastoreStore_DMOperations(t *testing.T) { prURL := "https://github.com/test/repo/pull/123" // Test non-existent DM - _, exists := store.LastDM("U001", prURL) + _, exists := store.LastDM(ctx, "U001", prURL) if exists { t.Error("expected DM to not exist") } // Record DM sentAt := time.Now().Truncate(time.Millisecond) - err := store.RecordDM("U001", prURL, sentAt) + err := store.RecordDM(ctx, "U001", prURL, sentAt) if err != nil { t.Fatalf("unexpected error recording DM: %v", err) } // Retrieve from memory cache - retrieved, exists := store.LastDM("U001", prURL) + retrieved, exists := store.LastDM(ctx, "U001", prURL) if !exists { t.Fatal("expected DM to exist in memory cache") } @@ -153,7 +151,7 @@ func TestDatastoreStore_DMOperations(t *testing.T) { store.memory = NewMemoryStore() // Retrieve from Datastore - retrieved, exists = store.LastDM("U001", prURL) + retrieved, exists = store.LastDM(ctx, "U001", prURL) if !exists { t.Fatal("expected DM to exist in Datastore") } @@ -164,6 +162,7 @@ func TestDatastoreStore_DMOperations(t *testing.T) { } func TestDatastoreStore_DMMessageOperations(t *testing.T) { + ctx := context.Background() client, cleanup := datastore.NewMockClient(t) defer cleanup() @@ -178,7 +177,7 @@ func TestDatastoreStore_DMMessageOperations(t *testing.T) { prURL := "https://github.com/test/repo/pull/123" // Test non-existent DM message - _, exists := store.DMMessage("U001", prURL) + _, exists := store.DMMessage(ctx, "U001", prURL) if exists { t.Error("expected DM message to not exist") } @@ -191,13 +190,13 @@ func TestDatastoreStore_DMMessageOperations(t *testing.T) { MessageText: "Test DM message", } - err := store.SaveDMMessage("U001", prURL, dmInfo) + err := store.SaveDMMessage(ctx, "U001", prURL, dmInfo) if err != nil { t.Fatalf("unexpected error saving DM message: %v", err) } // Retrieve from memory cache - retrieved, exists := store.DMMessage("U001", prURL) + retrieved, exists := store.DMMessage(ctx, "U001", prURL) if !exists { t.Fatal("expected DM message to exist in memory cache") } @@ -213,7 +212,7 @@ func TestDatastoreStore_DMMessageOperations(t *testing.T) { store.memory = NewMemoryStore() // Retrieve from Datastore - retrieved, exists = store.DMMessage("U001", prURL) + retrieved, exists = store.DMMessage(ctx, "U001", prURL) if !exists { t.Fatal("expected DM message to exist in Datastore") } @@ -224,6 +223,7 @@ func TestDatastoreStore_DMMessageOperations(t *testing.T) { } func TestDatastoreStore_ListDMUsers(t *testing.T) { + ctx := context.Background() client, cleanup := datastore.NewMockClient(t) defer func() { // Give async operations plenty of time to complete before cleanup @@ -249,18 +249,18 @@ func TestDatastoreStore_ListDMUsers(t *testing.T) { MessageText: "Test DM", } - if err := store.SaveDMMessage("U001", prURL, dmInfo); err != nil { + if err := store.SaveDMMessage(ctx, "U001", prURL, dmInfo); err != nil { t.Fatalf("failed to save DM for U001: %v", err) } - if err := store.SaveDMMessage("U002", prURL, dmInfo); err != nil { + if err := store.SaveDMMessage(ctx, "U002", prURL, dmInfo); err != nil { t.Fatalf("failed to save DM for U002: %v", err) } - if err := store.SaveDMMessage("U003", prURL, dmInfo); err != nil { + if err := store.SaveDMMessage(ctx, "U003", prURL, dmInfo); err != nil { t.Fatalf("failed to save DM for U003: %v", err) } // List from memory cache (fast path) - users := store.ListDMUsers(prURL) + users := store.ListDMUsers(ctx, prURL) if len(users) != 3 { t.Fatalf("expected 3 users from memory, got %d", len(users)) } @@ -272,13 +272,14 @@ func TestDatastoreStore_ListDMUsers(t *testing.T) { store.memory = NewMemoryStore() // List from Datastore - users = store.ListDMUsers(prURL) + users = store.ListDMUsers(ctx, prURL) if len(users) != 3 { t.Fatalf("expected 3 users from Datastore, got %d", len(users)) } } func TestDatastoreStore_DigestOperations(t *testing.T) { + ctx := context.Background() client, cleanup := datastore.NewMockClient(t) defer cleanup() @@ -294,20 +295,20 @@ func TestDatastoreStore_DigestOperations(t *testing.T) { date := "2025-01-15" // Test non-existent digest - _, exists := store.LastDigest(userID, date) + _, exists := store.LastDigest(ctx, userID, date) if exists { t.Error("expected digest to not exist") } // Record digest sentAt := time.Now().Truncate(time.Millisecond) - err := store.RecordDigest(userID, date, sentAt) + err := store.RecordDigest(ctx, userID, date, sentAt) if err != nil { t.Fatalf("unexpected error recording digest: %v", err) } // Retrieve from memory cache - retrieved, exists := store.LastDigest(userID, date) + retrieved, exists := store.LastDigest(ctx, userID, date) if !exists { t.Fatal("expected digest to exist in memory cache") } @@ -323,7 +324,7 @@ func TestDatastoreStore_DigestOperations(t *testing.T) { store.memory = NewMemoryStore() // Retrieve from Datastore - retrieved, exists = store.LastDigest(userID, date) + retrieved, exists = store.LastDigest(ctx, userID, date) if !exists { t.Fatal("expected digest to exist in Datastore") } @@ -334,6 +335,7 @@ func TestDatastoreStore_DigestOperations(t *testing.T) { } func TestDatastoreStore_EventDeduplication(t *testing.T) { + ctx := context.Background() client, cleanup := datastore.NewMockClient(t) defer cleanup() @@ -348,18 +350,18 @@ func TestDatastoreStore_EventDeduplication(t *testing.T) { eventKey := "webhook-12345" // Test non-existent event - if store.WasProcessed(eventKey) { + if store.WasProcessed(ctx, eventKey) { t.Error("expected event to not be processed") } // Mark event as processed - err := store.MarkProcessed(eventKey, 24*time.Hour) + err := store.MarkProcessed(ctx, eventKey, 24*time.Hour) if err != nil { t.Fatalf("unexpected error marking event: %v", err) } // Check memory cache immediately - if !store.WasProcessed(eventKey) { + if !store.WasProcessed(ctx, eventKey) { t.Error("expected event to be processed in memory cache") } @@ -370,18 +372,19 @@ func TestDatastoreStore_EventDeduplication(t *testing.T) { store.memory = NewMemoryStore() // Check Datastore - if !store.WasProcessed(eventKey) { + if !store.WasProcessed(ctx, eventKey) { t.Error("expected event to be processed in Datastore") } // Try to mark again - should return ErrAlreadyProcessed - err = store.MarkProcessed(eventKey, 24*time.Hour) - if err != ErrAlreadyProcessed { + err = store.MarkProcessed(ctx, eventKey, 24*time.Hour) + if !errors.Is(err, ErrAlreadyProcessed) { t.Errorf("expected ErrAlreadyProcessed, got %v", err) } } func TestDatastoreStore_NotificationTracking(t *testing.T) { + ctx := context.Background() client, cleanup := datastore.NewMockClient(t) defer cleanup() @@ -396,14 +399,14 @@ func TestDatastoreStore_NotificationTracking(t *testing.T) { prURL := "https://github.com/test/repo/pull/123" // Test non-existent notification - lastNotif := store.LastNotification(prURL) + lastNotif := store.LastNotification(ctx, prURL) if !lastNotif.IsZero() { t.Error("expected zero time for non-existent notification") } // Record notification notifiedAt := time.Now().Truncate(time.Millisecond) - err := store.RecordNotification(prURL, notifiedAt) + err := store.RecordNotification(ctx, prURL, notifiedAt) if err != nil { t.Fatalf("unexpected error recording notification: %v", err) } @@ -412,7 +415,7 @@ func TestDatastoreStore_NotificationTracking(t *testing.T) { time.Sleep(100 * time.Millisecond) // Retrieve from Datastore - retrieved := store.LastNotification(prURL) + retrieved := store.LastNotification(ctx, prURL) if retrieved.IsZero() { t.Fatal("expected non-zero time from Datastore") } @@ -423,6 +426,7 @@ func TestDatastoreStore_NotificationTracking(t *testing.T) { } func TestDatastoreStore_DisabledMode(t *testing.T) { + ctx := context.Background() // Create store in disabled mode (no Datastore client) store := &DatastoreStore{ ds: nil, @@ -441,12 +445,12 @@ func TestDatastoreStore_DisabledMode(t *testing.T) { LastEventTime: time.Now(), } - err := store.SaveThread("owner", "repo", 123, "C123", threadInfo) + err := store.SaveThread(ctx, "owner", "repo", 123, "C123", threadInfo) if err != nil { t.Fatalf("unexpected error in disabled mode: %v", err) } - retrieved, exists := store.Thread("owner", "repo", 123, "C123") + retrieved, exists := store.Thread(ctx, "owner", "repo", 123, "C123") if !exists { t.Fatal("expected thread to exist in memory") } @@ -457,6 +461,7 @@ func TestDatastoreStore_DisabledMode(t *testing.T) { } func TestDatastoreStore_Cleanup(t *testing.T) { + ctx := context.Background() client, cleanup := datastore.NewMockClient(t) defer cleanup() @@ -473,7 +478,7 @@ func TestDatastoreStore_Cleanup(t *testing.T) { store.memory.threads[threadKey("owner", "repo", 1, "C123")] = ThreadInfo{UpdatedAt: oldTime} // Run cleanup - err := store.Cleanup() + err := store.Cleanup(ctx) if err != nil { t.Fatalf("unexpected error during cleanup: %v", err) } @@ -514,6 +519,7 @@ func TestDatastoreStore_Close(t *testing.T) { } func TestDatastoreStore_MemoryFirstFallback(t *testing.T) { + ctx := context.Background() client, cleanup := datastore.NewMockClient(t) defer cleanup() @@ -533,13 +539,13 @@ func TestDatastoreStore_MemoryFirstFallback(t *testing.T) { LastEventTime: time.Now(), } - if err := store.SaveThread("owner", "repo", 123, "C123", threadInfo); err != nil { + if err := store.SaveThread(ctx, "owner", "repo", 123, "C123", threadInfo); err != nil { t.Fatalf("failed to save thread: %v", err) } // Immediate retrieval should hit memory cache (fast path) start := time.Now() - retrieved, exists := store.Thread("owner", "repo", 123, "C123") + retrieved, exists := store.Thread(ctx, "owner", "repo", 123, "C123") elapsed := time.Since(start) if !exists { @@ -562,6 +568,7 @@ func TestDatastoreStore_MemoryFirstFallback(t *testing.T) { } func TestDatastoreStore_PendingDMOperations(t *testing.T) { + ctx := context.Background() client, cleanup := datastore.NewMockClient(t) defer cleanup() @@ -574,7 +581,7 @@ func TestDatastoreStore_PendingDMOperations(t *testing.T) { defer func() { _ = store.Close() }() // Test retrieval when no pending DMs exist - pending, err := store.PendingDMs(time.Now()) + pending, err := store.PendingDMs(ctx, time.Now()) if err != nil { t.Fatalf("unexpected error getting pending DMs: %v", err) } @@ -603,7 +610,7 @@ func TestDatastoreStore_PendingDMOperations(t *testing.T) { SendAfter: now.Add(-5 * time.Minute), // 5 minutes ago - ready to send } - err = store.QueuePendingDM(dm1) + err = store.QueuePendingDM(ctx, &dm1) if err != nil { t.Fatalf("unexpected error queueing DM: %v", err) } @@ -628,13 +635,13 @@ func TestDatastoreStore_PendingDMOperations(t *testing.T) { SendAfter: now.Add(10 * time.Minute), // 10 minutes from now - not ready yet } - err = store.QueuePendingDM(dm2) + err = store.QueuePendingDM(ctx, &dm2) if err != nil { t.Fatalf("unexpected error queueing second DM: %v", err) } // Get pending DMs from memory cache (fast path) - pending, err = store.PendingDMs(now) + pending, err = store.PendingDMs(ctx, now) if err != nil { t.Fatalf("unexpected error getting pending DMs: %v", err) } @@ -658,7 +665,7 @@ func TestDatastoreStore_PendingDMOperations(t *testing.T) { // Note: The mock Datastore may return all DMs regardless of filter // In production, the filter would work correctly future := now.Add(15 * time.Minute) - pending, err = store.PendingDMs(future) + pending, err = store.PendingDMs(ctx, future) if err != nil { t.Fatalf("unexpected error getting pending DMs from Datastore: %v", err) } @@ -681,7 +688,7 @@ func TestDatastoreStore_PendingDMOperations(t *testing.T) { } // Remove dm-001 - err = store.RemovePendingDM("dm-001") + err = store.RemovePendingDM(ctx, "dm-001") if err != nil { t.Fatalf("unexpected error removing DM: %v", err) } @@ -694,7 +701,7 @@ func TestDatastoreStore_PendingDMOperations(t *testing.T) { // Now only dm-002 should remain in Datastore (query in future to catch it) futureLater := now.Add(15 * time.Minute) - pending, err = store.PendingDMs(futureLater) + pending, err = store.PendingDMs(ctx, futureLater) if err != nil { t.Fatalf("unexpected error getting pending DMs after removal: %v", err) } @@ -708,13 +715,14 @@ func TestDatastoreStore_PendingDMOperations(t *testing.T) { } // Remove non-existent DM should not error - err = store.RemovePendingDM("dm-999") + err = store.RemovePendingDM(ctx, "dm-999") if err != nil { t.Errorf("unexpected error removing non-existent DM: %v", err) } } func TestDatastoreStore_PendingDMDisabledMode(t *testing.T) { + ctx := context.Background() // Create store in disabled mode (no Datastore client) store := &DatastoreStore{ ds: nil, @@ -734,13 +742,13 @@ func TestDatastoreStore_PendingDMDisabledMode(t *testing.T) { SendAfter: now.Add(-5 * time.Minute), } - err := store.QueuePendingDM(dm) + err := store.QueuePendingDM(ctx, &dm) if err != nil { t.Fatalf("unexpected error queueing DM in disabled mode: %v", err) } // Get pending DMs from memory - pending, err := store.PendingDMs(now) + pending, err := store.PendingDMs(ctx, now) if err != nil { t.Fatalf("unexpected error getting pending DMs in disabled mode: %v", err) } @@ -750,13 +758,13 @@ func TestDatastoreStore_PendingDMDisabledMode(t *testing.T) { } // Remove DM - err = store.RemovePendingDM("dm-001") + err = store.RemovePendingDM(ctx, "dm-001") if err != nil { t.Fatalf("unexpected error removing DM in disabled mode: %v", err) } // Verify removed - pending, err = store.PendingDMs(now) + pending, err = store.PendingDMs(ctx, now) if err != nil { t.Fatalf("unexpected error getting pending DMs after removal: %v", err) } @@ -767,6 +775,7 @@ func TestDatastoreStore_PendingDMDisabledMode(t *testing.T) { } func TestDatastoreStore_PendingDMCleanup(t *testing.T) { + ctx := context.Background() client, cleanup := datastore.NewMockClient(t) defer cleanup() @@ -789,7 +798,7 @@ func TestDatastoreStore_PendingDMCleanup(t *testing.T) { QueuedAt: oldTime, SendAfter: oldTime, } - if err := store.QueuePendingDM(oldDM); err != nil { + if err := store.QueuePendingDM(ctx, &oldDM); err != nil { t.Fatalf("failed to queue old DM: %v", err) } @@ -801,7 +810,7 @@ func TestDatastoreStore_PendingDMCleanup(t *testing.T) { QueuedAt: now, SendAfter: now.Add(10 * time.Minute), } - if err := store.QueuePendingDM(recentDM); err != nil { + if err := store.QueuePendingDM(ctx, &recentDM); err != nil { t.Fatalf("failed to queue recent DM: %v", err) } @@ -809,13 +818,13 @@ func TestDatastoreStore_PendingDMCleanup(t *testing.T) { time.Sleep(200 * time.Millisecond) // Run cleanup - err := store.Cleanup() + err := store.Cleanup(ctx) if err != nil { t.Fatalf("unexpected error during cleanup: %v", err) } // Verify old DM was removed from memory - pending, err := store.PendingDMs(now.Add(24 * time.Hour)) + pending, err := store.PendingDMs(ctx, now.Add(24*time.Hour)) if err != nil { t.Fatalf("unexpected error getting pending DMs: %v", err) } diff --git a/pkg/state/json.go b/pkg/state/json.go index e256e22..0aa70f6 100644 --- a/pkg/state/json.go +++ b/pkg/state/json.go @@ -1,6 +1,7 @@ package state import ( + "context" "encoding/json" "fmt" "log/slog" @@ -108,7 +109,7 @@ func digestKey(userID, date string) string { } // Thread retrieves thread information for a PR. -func (s *JSONStore) Thread(owner, repo string, number int, channelID string) (ThreadInfo, bool) { +func (s *JSONStore) Thread(ctx context.Context, owner, repo string, number int, channelID string) (ThreadInfo, bool) { s.mu.RLock() defer s.mu.RUnlock() key := threadKey(owner, repo, number, channelID) @@ -117,7 +118,7 @@ func (s *JSONStore) Thread(owner, repo string, number int, channelID string) (Th } // SaveThread saves thread information for a PR. -func (s *JSONStore) SaveThread(owner, repo string, number int, channelID string, info ThreadInfo) error { +func (s *JSONStore) SaveThread(ctx context.Context, owner, repo string, number int, channelID string, info ThreadInfo) error { s.mu.Lock() defer s.mu.Unlock() key := threadKey(owner, repo, number, channelID) @@ -133,7 +134,7 @@ func (s *JSONStore) SaveThread(owner, repo string, number int, channelID string, } // LastDM retrieves the last DM timestamp for a user and PR. -func (s *JSONStore) LastDM(userID, prURL string) (time.Time, bool) { +func (s *JSONStore) LastDM(ctx context.Context, userID, prURL string) (time.Time, bool) { s.mu.RLock() defer s.mu.RUnlock() key := dmKey(userID, prURL) @@ -142,7 +143,7 @@ func (s *JSONStore) LastDM(userID, prURL string) (time.Time, bool) { } // RecordDM records when a DM was sent to a user about a PR. -func (s *JSONStore) RecordDM(userID, prURL string, sentAt time.Time) error { +func (s *JSONStore) RecordDM(ctx context.Context, userID, prURL string, sentAt time.Time) error { s.mu.Lock() defer s.mu.Unlock() key := dmKey(userID, prURL) @@ -157,7 +158,7 @@ func (s *JSONStore) RecordDM(userID, prURL string, sentAt time.Time) error { } // DMMessage retrieves DM message information for a user and PR. -func (s *JSONStore) DMMessage(userID, prURL string) (DMInfo, bool) { +func (s *JSONStore) DMMessage(ctx context.Context, userID, prURL string) (DMInfo, bool) { s.mu.RLock() defer s.mu.RUnlock() key := dmKey(userID, prURL) @@ -166,7 +167,7 @@ func (s *JSONStore) DMMessage(userID, prURL string) (DMInfo, bool) { } // SaveDMMessage saves DM message information for a user and PR. -func (s *JSONStore) SaveDMMessage(userID, prURL string, info DMInfo) error { +func (s *JSONStore) SaveDMMessage(ctx context.Context, userID, prURL string, info DMInfo) error { s.mu.Lock() defer s.mu.Unlock() key := dmKey(userID, prURL) @@ -182,7 +183,7 @@ func (s *JSONStore) SaveDMMessage(userID, prURL string, info DMInfo) error { } // ListDMUsers returns all user IDs who have received DMs for a given PR. -func (s *JSONStore) ListDMUsers(prURL string) []string { +func (s *JSONStore) ListDMUsers(ctx context.Context, prURL string) []string { s.mu.RLock() defer s.mu.RUnlock() @@ -203,7 +204,7 @@ func (s *JSONStore) ListDMUsers(prURL string) []string { } // LastDigest retrieves the last digest timestamp for a user and date. -func (s *JSONStore) LastDigest(userID, date string) (time.Time, bool) { +func (s *JSONStore) LastDigest(ctx context.Context, userID, date string) (time.Time, bool) { s.mu.RLock() defer s.mu.RUnlock() key := digestKey(userID, date) @@ -212,7 +213,7 @@ func (s *JSONStore) LastDigest(userID, date string) (time.Time, bool) { } // RecordDigest records when a digest was sent to a user. -func (s *JSONStore) RecordDigest(userID, date string, sentAt time.Time) error { +func (s *JSONStore) RecordDigest(ctx context.Context, userID, date string, sentAt time.Time) error { s.mu.Lock() defer s.mu.Unlock() key := digestKey(userID, date) @@ -231,7 +232,7 @@ func (s *JSONStore) RecordDigest(userID, date string, sentAt time.Time) error { } // WasProcessed checks if an event was already processed. -func (s *JSONStore) WasProcessed(eventKey string) bool { +func (s *JSONStore) WasProcessed(ctx context.Context, eventKey string) bool { s.mu.RLock() defer s.mu.RUnlock() _, exists := s.events[eventKey] @@ -241,7 +242,7 @@ func (s *JSONStore) WasProcessed(eventKey string) bool { // MarkProcessed marks an event as processed with an optional TTL. // Note: TTL is currently ignored - cleanup uses hardcoded 24-hour retention. // This could be enhanced in the future to support per-event TTL. -func (s *JSONStore) MarkProcessed(eventKey string, _ time.Duration) error { +func (s *JSONStore) MarkProcessed(ctx context.Context, eventKey string, _ time.Duration) error { s.mu.Lock() defer s.mu.Unlock() // Always save to memory (primary storage) @@ -255,14 +256,14 @@ func (s *JSONStore) MarkProcessed(eventKey string, _ time.Duration) error { } // LastNotification retrieves the last notification timestamp for a PR. -func (s *JSONStore) LastNotification(prURL string) time.Time { +func (s *JSONStore) LastNotification(ctx context.Context, prURL string) time.Time { s.mu.RLock() defer s.mu.RUnlock() return s.notifications[prURL] } // RecordNotification records when a notification was sent for a PR. -func (s *JSONStore) RecordNotification(prURL string, notifiedAt time.Time) error { +func (s *JSONStore) RecordNotification(ctx context.Context, prURL string, notifiedAt time.Time) error { s.mu.Lock() defer s.mu.Unlock() // Always save to memory (primary storage) @@ -276,7 +277,7 @@ func (s *JSONStore) RecordNotification(prURL string, notifiedAt time.Time) error } // Cleanup removes old data from all maps. -func (s *JSONStore) Cleanup() error { +func (s *JSONStore) Cleanup(ctx context.Context) error { s.mu.Lock() defer s.mu.Unlock() @@ -329,7 +330,8 @@ func (s *JSONStore) Cleanup() error { // Clean up old pending DMs (>7 days or already past send time by >1 day) cleanedPendingDMs := 0 - for key, dm := range s.pendingDMs { + for key := range s.pendingDMs { + dm := s.pendingDMs[key] if now.Sub(dm.QueuedAt) > 7*24*time.Hour || now.Sub(dm.SendAfter) > 24*time.Hour { delete(s.pendingDMs, key) cleanedPendingDMs++ @@ -473,10 +475,10 @@ func (s *JSONStore) load() error { } // QueuePendingDM adds a DM to the pending queue. -func (s *JSONStore) QueuePendingDM(dm PendingDM) error { +func (s *JSONStore) QueuePendingDM(ctx context.Context, dm *PendingDM) error { s.mu.Lock() defer s.mu.Unlock() - s.pendingDMs[dm.ID] = dm + s.pendingDMs[dm.ID] = *dm s.modified = true // Best-effort persistence to JSON file for restart recovery if err := s.save(); err != nil { @@ -486,12 +488,13 @@ func (s *JSONStore) QueuePendingDM(dm PendingDM) error { } // PendingDMs returns all pending DMs that should be sent (SendAfter <= before). -func (s *JSONStore) PendingDMs(before time.Time) ([]PendingDM, error) { +func (s *JSONStore) PendingDMs(ctx context.Context, before time.Time) ([]PendingDM, error) { s.mu.RLock() defer s.mu.RUnlock() var result []PendingDM - for _, dm := range s.pendingDMs { + for key := range s.pendingDMs { + dm := s.pendingDMs[key] if !dm.SendAfter.After(before) { result = append(result, dm) } @@ -500,7 +503,7 @@ func (s *JSONStore) PendingDMs(before time.Time) ([]PendingDM, error) { } // RemovePendingDM removes a pending DM from the queue. -func (s *JSONStore) RemovePendingDM(id string) error { +func (s *JSONStore) RemovePendingDM(ctx context.Context, id string) error { s.mu.Lock() defer s.mu.Unlock() delete(s.pendingDMs, id) diff --git a/pkg/state/json_test.go b/pkg/state/json_test.go index dd55563..29dca34 100644 --- a/pkg/state/json_test.go +++ b/pkg/state/json_test.go @@ -1,6 +1,7 @@ package state import ( + "context" "os" "path/filepath" "testing" @@ -9,26 +10,10 @@ import ( func TestNewJSONStore(t *testing.T) { // Use temp dir for test - tempDir, err := os.MkdirTemp("", "slacker-state-test-*") - if err != nil { - t.Fatalf("failed to create temp dir: %v", err) - } - //nolint:errcheck // Test cleanup error can be ignored - defer func() { _ = os.RemoveAll(tempDir) }() + tempDir := t.TempDir() // Override cache dir for testing - oldCacheDir := os.Getenv("XDG_CACHE_HOME") - //nolint:errcheck // Test setup error can be ignored - _ = os.Setenv("XDG_CACHE_HOME", tempDir) - defer func() { - if oldCacheDir != "" { - //nolint:errcheck // Test setup error can be ignored - _ = os.Setenv("XDG_CACHE_HOME", oldCacheDir) - } else { - //nolint:errcheck // Test cleanup error can be ignored - _ = os.Unsetenv("XDG_CACHE_HOME") - } - }() + t.Setenv("XDG_CACHE_HOME", tempDir) store, err := NewJSONStore() if err != nil { @@ -45,12 +30,8 @@ func TestNewJSONStore(t *testing.T) { } func TestJSONStore_ThreadOperations(t *testing.T) { - tempDir, err := os.MkdirTemp("", "slacker-state-test-*") - if err != nil { - t.Fatalf("failed to create temp dir: %v", err) - } - //nolint:errcheck // Test cleanup error can be ignored - defer func() { _ = os.RemoveAll(tempDir) }() + ctx := context.Background() + tempDir := t.TempDir() store := &JSONStore{ baseDir: tempDir, @@ -63,7 +44,7 @@ func TestJSONStore_ThreadOperations(t *testing.T) { } // Test non-existent thread - _, exists := store.Thread("owner", "repo", 123, "C123") + _, exists := store.Thread(ctx, "owner", "repo", 123, "C123") if exists { t.Error("expected thread to not exist") } @@ -76,13 +57,12 @@ func TestJSONStore_ThreadOperations(t *testing.T) { MessageText: "Test PR", } - err = store.SaveThread("owner", "repo", 123, "C123", threadInfo) - if err != nil { + if err := store.SaveThread(ctx, "owner", "repo", 123, "C123", threadInfo); err != nil { t.Fatalf("unexpected error saving thread: %v", err) } // Retrieve saved thread - retrieved, exists := store.Thread("owner", "repo", 123, "C123") + retrieved, exists := store.Thread(ctx, "owner", "repo", 123, "C123") if !exists { t.Fatal("expected thread to exist") } @@ -93,8 +73,9 @@ func TestJSONStore_ThreadOperations(t *testing.T) { } func TestJSONStore_DMOperations(t *testing.T) { + ctx := context.Background() store := &JSONStore{ - baseDir: os.TempDir(), + baseDir: t.TempDir(), threads: make(map[string]ThreadInfo), dms: make(map[string]time.Time), dmMessages: make(map[string]DMInfo), @@ -104,20 +85,20 @@ func TestJSONStore_DMOperations(t *testing.T) { } // Test non-existent DM - _, exists := store.LastDM("U001", "https://github.com/test/repo/pull/123") + _, exists := store.LastDM(ctx, "U001", "https://github.com/test/repo/pull/123") if exists { t.Error("expected DM to not exist") } // Record DM sentAt := time.Now() - err := store.RecordDM("U001", "https://github.com/test/repo/pull/123", sentAt) + err := store.RecordDM(ctx, "U001", "https://github.com/test/repo/pull/123", sentAt) if err != nil { t.Fatalf("unexpected error recording DM: %v", err) } // Retrieve recorded DM - retrieved, exists := store.LastDM("U001", "https://github.com/test/repo/pull/123") + retrieved, exists := store.LastDM(ctx, "U001", "https://github.com/test/repo/pull/123") if !exists { t.Fatal("expected DM to exist") } @@ -128,12 +109,8 @@ func TestJSONStore_DMOperations(t *testing.T) { } func TestJSONStore_Persistence(t *testing.T) { - tempDir, err := os.MkdirTemp("", "slacker-state-test-*") - if err != nil { - t.Fatalf("failed to create temp dir: %v", err) - } - //nolint:errcheck // Test cleanup error can be ignored - defer func() { _ = os.RemoveAll(tempDir) }() + ctx := context.Background() + tempDir := t.TempDir() // Create first store instance store1 := &JSONStore{ @@ -153,13 +130,12 @@ func TestJSONStore_Persistence(t *testing.T) { LastState: "awaiting_review", MessageText: "Test PR", } - if err := store1.SaveThread("owner", "repo", 123, "C123", threadInfo); err != nil { + if err := store1.SaveThread(ctx, "owner", "repo", 123, "C123", threadInfo); err != nil { t.Fatalf("failed to save thread: %v", err) } // Save to disk - err = store1.save() - if err != nil { + if err := store1.save(); err != nil { t.Fatalf("unexpected error saving: %v", err) } @@ -175,13 +151,12 @@ func TestJSONStore_Persistence(t *testing.T) { } // Load from disk - err = store2.load() - if err != nil { + if err := store2.load(); err != nil { t.Fatalf("unexpected error loading: %v", err) } // Verify data persisted - retrieved, exists := store2.Thread("owner", "repo", 123, "C123") + retrieved, exists := store2.Thread(ctx, "owner", "repo", 123, "C123") if !exists { t.Fatal("expected thread to exist after reload") } @@ -192,8 +167,9 @@ func TestJSONStore_Persistence(t *testing.T) { } func TestJSONStore_ListDMUsers(t *testing.T) { + ctx := context.Background() store := &JSONStore{ - baseDir: os.TempDir(), + baseDir: t.TempDir(), threads: make(map[string]ThreadInfo), dms: make(map[string]time.Time), dmMessages: make(map[string]DMInfo), @@ -212,30 +188,26 @@ func TestJSONStore_ListDMUsers(t *testing.T) { MessageText: "Test DM", } - if err := store.SaveDMMessage("U001", prURL, dmInfo); err != nil { + if err := store.SaveDMMessage(ctx, "U001", prURL, dmInfo); err != nil { t.Fatalf("failed to save DM for U001: %v", err) } - if err := store.SaveDMMessage("U002", prURL, dmInfo); err != nil { + if err := store.SaveDMMessage(ctx, "U002", prURL, dmInfo); err != nil { t.Fatalf("failed to save DM for U002: %v", err) } - if err := store.SaveDMMessage("U003", prURL, dmInfo); err != nil { + if err := store.SaveDMMessage(ctx, "U003", prURL, dmInfo); err != nil { t.Fatalf("failed to save DM for U003: %v", err) } // List users - users := store.ListDMUsers(prURL) + users := store.ListDMUsers(ctx, prURL) if len(users) != 3 { t.Fatalf("expected 3 users, got %d", len(users)) } } func TestJSONStore_Cleanup(t *testing.T) { - tempDir, err := os.MkdirTemp("", "slacker-state-test-*") - if err != nil { - t.Fatalf("failed to create temp dir: %v", err) - } - //nolint:errcheck // Test cleanup error can be ignored - defer func() { _ = os.RemoveAll(tempDir) }() + ctx := context.Background() + tempDir := t.TempDir() store := &JSONStore{ baseDir: tempDir, @@ -256,8 +228,7 @@ func TestJSONStore_Cleanup(t *testing.T) { store.threads[threadKey("owner", "repo", 2, "C456")] = ThreadInfo{UpdatedAt: recentTime} // Run cleanup - err = store.Cleanup() - if err != nil { + if err := store.Cleanup(ctx); err != nil { t.Fatalf("unexpected error during cleanup: %v", err) } @@ -268,12 +239,7 @@ func TestJSONStore_Cleanup(t *testing.T) { } func TestJSONStore_SaveLoad_RoundTrip(t *testing.T) { - tempDir, err := os.MkdirTemp("", "slacker-state-test-*") - if err != nil { - t.Fatalf("failed to create temp dir: %v", err) - } - //nolint:errcheck // Test cleanup error can be ignored - defer func() { _ = os.RemoveAll(tempDir) }() + tempDir := t.TempDir() store := &JSONStore{ baseDir: tempDir, @@ -294,8 +260,7 @@ func TestJSONStore_SaveLoad_RoundTrip(t *testing.T) { store.modified = true // Mark as modified so save() actually writes // Save - err = store.save() - if err != nil { + if err := store.save(); err != nil { t.Fatalf("unexpected error saving: %v", err) } @@ -316,8 +281,7 @@ func TestJSONStore_SaveLoad_RoundTrip(t *testing.T) { notifications: make(map[string]time.Time), } - err = store2.load() - if err != nil { + if err := store2.load(); err != nil { t.Fatalf("unexpected error loading: %v", err) } @@ -334,12 +298,8 @@ func TestJSONStore_SaveLoad_RoundTrip(t *testing.T) { } func TestJSONStore_PendingDMOperations(t *testing.T) { - tempDir, err := os.MkdirTemp("", "slacker-state-test-*") - if err != nil { - t.Fatalf("failed to create temp dir: %v", err) - } - //nolint:errcheck // Test cleanup error can be ignored - defer func() { _ = os.RemoveAll(tempDir) }() + ctx := context.Background() + tempDir := t.TempDir() store := &JSONStore{ baseDir: tempDir, @@ -353,7 +313,7 @@ func TestJSONStore_PendingDMOperations(t *testing.T) { } // Test retrieval when no pending DMs exist - pending, err := store.PendingDMs(time.Now()) + pending, err := store.PendingDMs(ctx, time.Now()) if err != nil { t.Fatalf("unexpected error getting pending DMs: %v", err) } @@ -382,8 +342,7 @@ func TestJSONStore_PendingDMOperations(t *testing.T) { SendAfter: now.Add(-5 * time.Minute), // 5 minutes ago - ready to send } - err = store.QueuePendingDM(dm1) - if err != nil { + if err := store.QueuePendingDM(ctx, &dm1); err != nil { t.Fatalf("unexpected error queueing DM: %v", err) } @@ -407,13 +366,12 @@ func TestJSONStore_PendingDMOperations(t *testing.T) { SendAfter: now.Add(10 * time.Minute), // 10 minutes from now - not ready yet } - err = store.QueuePendingDM(dm2) - if err != nil { + if err := store.QueuePendingDM(ctx, &dm2); err != nil { t.Fatalf("unexpected error queueing second DM: %v", err) } // Get pending DMs that are ready to send - pending, err = store.PendingDMs(now) + pending, err = store.PendingDMs(ctx, now) if err != nil { t.Fatalf("unexpected error getting pending DMs: %v", err) } @@ -435,7 +393,7 @@ func TestJSONStore_PendingDMOperations(t *testing.T) { // Get pending DMs 15 minutes from now - both should be ready future := now.Add(15 * time.Minute) - pending, err = store.PendingDMs(future) + pending, err = store.PendingDMs(ctx, future) if err != nil { t.Fatalf("unexpected error getting future pending DMs: %v", err) } @@ -445,13 +403,12 @@ func TestJSONStore_PendingDMOperations(t *testing.T) { } // Remove one DM - err = store.RemovePendingDM("dm-001") - if err != nil { + if err := store.RemovePendingDM(ctx, "dm-001"); err != nil { t.Fatalf("unexpected error removing DM: %v", err) } // Now only dm2 should remain - pending, err = store.PendingDMs(future) + pending, err = store.PendingDMs(ctx, future) if err != nil { t.Fatalf("unexpected error getting pending DMs after removal: %v", err) } @@ -465,19 +422,14 @@ func TestJSONStore_PendingDMOperations(t *testing.T) { } // Remove non-existent DM should not error - err = store.RemovePendingDM("dm-999") - if err != nil { + if err := store.RemovePendingDM(ctx, "dm-999"); err != nil { t.Errorf("unexpected error removing non-existent DM: %v", err) } } func TestJSONStore_PendingDMPersistence(t *testing.T) { - tempDir, err := os.MkdirTemp("", "slacker-state-test-*") - if err != nil { - t.Fatalf("failed to create temp dir: %v", err) - } - //nolint:errcheck // Test cleanup error can be ignored - defer func() { _ = os.RemoveAll(tempDir) }() + ctx := context.Background() + tempDir := t.TempDir() // Create first store instance store1 := &JSONStore{ @@ -508,16 +460,15 @@ func TestJSONStore_PendingDMPersistence(t *testing.T) { SendAfter: now.Add(10 * time.Minute), } - if err := store1.QueuePendingDM(dm1); err != nil { + if err := store1.QueuePendingDM(ctx, &dm1); err != nil { t.Fatalf("failed to queue dm1: %v", err) } - if err := store1.QueuePendingDM(dm2); err != nil { + if err := store1.QueuePendingDM(ctx, &dm2); err != nil { t.Fatalf("failed to queue dm2: %v", err) } // Save to disk (happens automatically in QueuePendingDM via modified flag) - err = store1.save() - if err != nil { + if err := store1.save(); err != nil { t.Fatalf("unexpected error saving: %v", err) } @@ -534,14 +485,13 @@ func TestJSONStore_PendingDMPersistence(t *testing.T) { } // Load from disk - err = store2.load() - if err != nil { + if err := store2.load(); err != nil { t.Fatalf("unexpected error loading: %v", err) } // Verify pending DMs persisted future := now.Add(15 * time.Minute) - pending, err := store2.PendingDMs(future) + pending, err := store2.PendingDMs(ctx, future) if err != nil { t.Fatalf("unexpected error getting pending DMs: %v", err) } @@ -565,12 +515,8 @@ func TestJSONStore_PendingDMPersistence(t *testing.T) { } func TestJSONStore_PendingDMCleanup(t *testing.T) { - tempDir, err := os.MkdirTemp("", "slacker-state-test-*") - if err != nil { - t.Fatalf("failed to create temp dir: %v", err) - } - //nolint:errcheck // Test cleanup error can be ignored - defer func() { _ = os.RemoveAll(tempDir) }() + ctx := context.Background() + tempDir := t.TempDir() store := &JSONStore{ baseDir: tempDir, @@ -594,7 +540,7 @@ func TestJSONStore_PendingDMCleanup(t *testing.T) { QueuedAt: oldTime, SendAfter: oldTime, } - if err := store.QueuePendingDM(oldDM); err != nil { + if err := store.QueuePendingDM(ctx, &oldDM); err != nil { t.Fatalf("failed to queue old DM: %v", err) } @@ -606,18 +552,17 @@ func TestJSONStore_PendingDMCleanup(t *testing.T) { QueuedAt: now, SendAfter: now.Add(10 * time.Minute), } - if err := store.QueuePendingDM(recentDM); err != nil { + if err := store.QueuePendingDM(ctx, &recentDM); err != nil { t.Fatalf("failed to queue recent DM: %v", err) } // Run cleanup - err = store.Cleanup() - if err != nil { + if err := store.Cleanup(ctx); err != nil { t.Fatalf("unexpected error during cleanup: %v", err) } // Verify old DM was removed - pending, err := store.PendingDMs(now.Add(24 * time.Hour)) + pending, err := store.PendingDMs(ctx, now.Add(24*time.Hour)) if err != nil { t.Fatalf("unexpected error getting pending DMs: %v", err) } @@ -632,8 +577,9 @@ func TestJSONStore_PendingDMCleanup(t *testing.T) { } func TestJSONStore_DMMessage(t *testing.T) { + ctx := context.Background() store := &JSONStore{ - baseDir: os.TempDir(), + baseDir: t.TempDir(), threads: make(map[string]ThreadInfo), dms: make(map[string]time.Time), dmMessages: make(map[string]DMInfo), @@ -647,7 +593,7 @@ func TestJSONStore_DMMessage(t *testing.T) { userID := "U001" // Test non-existent DM message - _, exists := store.DMMessage(userID, prURL) + _, exists := store.DMMessage(ctx, userID, prURL) if exists { t.Error("expected DM message to not exist") } @@ -659,12 +605,12 @@ func TestJSONStore_DMMessage(t *testing.T) { MessageTS: "1234567890.123456", MessageText: "Test DM message", } - if err := store.SaveDMMessage(userID, prURL, dmInfo); err != nil { + if err := store.SaveDMMessage(ctx, userID, prURL, dmInfo); err != nil { t.Fatalf("failed to save DM message: %v", err) } // Retrieve saved DM message - retrieved, exists := store.DMMessage(userID, prURL) + retrieved, exists := store.DMMessage(ctx, userID, prURL) if !exists { t.Fatal("expected DM message to exist") } @@ -678,8 +624,9 @@ func TestJSONStore_DMMessage(t *testing.T) { } func TestJSONStore_DigestOperations(t *testing.T) { + ctx := context.Background() store := &JSONStore{ - baseDir: os.TempDir(), + baseDir: t.TempDir(), threads: make(map[string]ThreadInfo), dms: make(map[string]time.Time), dmMessages: make(map[string]DMInfo), @@ -693,20 +640,20 @@ func TestJSONStore_DigestOperations(t *testing.T) { date := "2025-10-30" // Test non-existent digest - _, exists := store.LastDigest(userID, date) + _, exists := store.LastDigest(ctx, userID, date) if exists { t.Error("expected digest to not exist") } // Record digest sentAt := time.Now() - err := store.RecordDigest(userID, date, sentAt) + err := store.RecordDigest(ctx, userID, date, sentAt) if err != nil { t.Fatalf("unexpected error recording digest: %v", err) } // Retrieve digest - retrieved, exists := store.LastDigest(userID, date) + retrieved, exists := store.LastDigest(ctx, userID, date) if !exists { t.Fatal("expected digest to exist") } @@ -717,8 +664,9 @@ func TestJSONStore_DigestOperations(t *testing.T) { } func TestJSONStore_EventProcessing(t *testing.T) { + ctx := context.Background() store := &JSONStore{ - baseDir: os.TempDir(), + baseDir: t.TempDir(), threads: make(map[string]ThreadInfo), dms: make(map[string]time.Time), dmMessages: make(map[string]DMInfo), @@ -731,25 +679,26 @@ func TestJSONStore_EventProcessing(t *testing.T) { eventKey := "pull_request:123:opened" // Test unprocessed event - if store.WasProcessed(eventKey) { + if store.WasProcessed(ctx, eventKey) { t.Error("expected event to not be processed") } // Mark event as processed - err := store.MarkProcessed(eventKey, 24*time.Hour) + err := store.MarkProcessed(ctx, eventKey, 24*time.Hour) if err != nil { t.Fatalf("unexpected error marking event as processed: %v", err) } // Check if event was processed - if !store.WasProcessed(eventKey) { + if !store.WasProcessed(ctx, eventKey) { t.Error("expected event to be processed") } } func TestJSONStore_NotificationOperations(t *testing.T) { + ctx := context.Background() store := &JSONStore{ - baseDir: os.TempDir(), + baseDir: t.TempDir(), threads: make(map[string]ThreadInfo), dms: make(map[string]time.Time), dmMessages: make(map[string]DMInfo), @@ -762,20 +711,20 @@ func TestJSONStore_NotificationOperations(t *testing.T) { prURL := "https://github.com/test/repo/pull/123" // Test non-existent notification (should return zero time) - lastNotif := store.LastNotification(prURL) + lastNotif := store.LastNotification(ctx, prURL) if !lastNotif.IsZero() { t.Error("expected zero time for non-existent notification") } // Record notification notifiedAt := time.Now() - err := store.RecordNotification(prURL, notifiedAt) + err := store.RecordNotification(ctx, prURL, notifiedAt) if err != nil { t.Fatalf("unexpected error recording notification: %v", err) } // Retrieve notification - retrieved := store.LastNotification(prURL) + retrieved := store.LastNotification(ctx, prURL) if retrieved.IsZero() { t.Fatal("expected non-zero notification time") } diff --git a/pkg/state/memory.go b/pkg/state/memory.go index 8b3e259..b1610d8 100644 --- a/pkg/state/memory.go +++ b/pkg/state/memory.go @@ -1,6 +1,7 @@ package state import ( + "context" "strings" "sync" "time" @@ -35,7 +36,7 @@ func NewMemoryStore() *MemoryStore { } // Thread retrieves thread information for a PR. -func (s *MemoryStore) Thread(owner, repo string, number int, channelID string) (ThreadInfo, bool) { +func (s *MemoryStore) Thread(ctx context.Context, owner, repo string, number int, channelID string) (ThreadInfo, bool) { s.mu.RLock() defer s.mu.RUnlock() key := threadKey(owner, repo, number, channelID) @@ -44,7 +45,7 @@ func (s *MemoryStore) Thread(owner, repo string, number int, channelID string) ( } // SaveThread saves thread information for a PR. -func (s *MemoryStore) SaveThread(owner, repo string, number int, channelID string, info ThreadInfo) error { +func (s *MemoryStore) SaveThread(ctx context.Context, owner, repo string, number int, channelID string, info ThreadInfo) error { s.mu.Lock() defer s.mu.Unlock() key := threadKey(owner, repo, number, channelID) @@ -54,7 +55,7 @@ func (s *MemoryStore) SaveThread(owner, repo string, number int, channelID strin } // LastDM retrieves the last DM timestamp for a user and PR. -func (s *MemoryStore) LastDM(userID, prURL string) (time.Time, bool) { +func (s *MemoryStore) LastDM(ctx context.Context, userID, prURL string) (time.Time, bool) { s.mu.RLock() defer s.mu.RUnlock() key := dmKey(userID, prURL) @@ -63,7 +64,7 @@ func (s *MemoryStore) LastDM(userID, prURL string) (time.Time, bool) { } // RecordDM records when a DM was sent to a user about a PR. -func (s *MemoryStore) RecordDM(userID, prURL string, sentAt time.Time) error { +func (s *MemoryStore) RecordDM(ctx context.Context, userID, prURL string, sentAt time.Time) error { s.mu.Lock() defer s.mu.Unlock() key := dmKey(userID, prURL) @@ -72,7 +73,7 @@ func (s *MemoryStore) RecordDM(userID, prURL string, sentAt time.Time) error { } // DMMessage retrieves DM message information for a user and PR. -func (s *MemoryStore) DMMessage(userID, prURL string) (DMInfo, bool) { +func (s *MemoryStore) DMMessage(ctx context.Context, userID, prURL string) (DMInfo, bool) { s.mu.RLock() defer s.mu.RUnlock() key := dmKey(userID, prURL) @@ -81,7 +82,7 @@ func (s *MemoryStore) DMMessage(userID, prURL string) (DMInfo, bool) { } // SaveDMMessage saves DM message information for a user and PR. -func (s *MemoryStore) SaveDMMessage(userID, prURL string, info DMInfo) error { +func (s *MemoryStore) SaveDMMessage(ctx context.Context, userID, prURL string, info DMInfo) error { s.mu.Lock() defer s.mu.Unlock() key := dmKey(userID, prURL) @@ -91,7 +92,7 @@ func (s *MemoryStore) SaveDMMessage(userID, prURL string, info DMInfo) error { } // ListDMUsers returns all user IDs who have received DMs for a given PR. -func (s *MemoryStore) ListDMUsers(prURL string) []string { +func (s *MemoryStore) ListDMUsers(ctx context.Context, prURL string) []string { s.mu.RLock() defer s.mu.RUnlock() @@ -112,7 +113,7 @@ func (s *MemoryStore) ListDMUsers(prURL string) []string { } // LastDigest retrieves the last digest timestamp for a user and date. -func (s *MemoryStore) LastDigest(userID, date string) (time.Time, bool) { +func (s *MemoryStore) LastDigest(ctx context.Context, userID, date string) (time.Time, bool) { s.mu.RLock() defer s.mu.RUnlock() key := digestKey(userID, date) @@ -121,7 +122,7 @@ func (s *MemoryStore) LastDigest(userID, date string) (time.Time, bool) { } // RecordDigest records when a digest was sent to a user. -func (s *MemoryStore) RecordDigest(userID, date string, sentAt time.Time) error { +func (s *MemoryStore) RecordDigest(ctx context.Context, userID, date string, sentAt time.Time) error { s.mu.Lock() defer s.mu.Unlock() key := digestKey(userID, date) @@ -130,7 +131,7 @@ func (s *MemoryStore) RecordDigest(userID, date string, sentAt time.Time) error } // WasProcessed checks if an event was already processed. -func (s *MemoryStore) WasProcessed(eventKey string) bool { +func (s *MemoryStore) WasProcessed(ctx context.Context, eventKey string) bool { s.mu.RLock() defer s.mu.RUnlock() _, exists := s.events[eventKey] @@ -138,7 +139,7 @@ func (s *MemoryStore) WasProcessed(eventKey string) bool { } // MarkProcessed marks an event as processed. -func (s *MemoryStore) MarkProcessed(eventKey string, _ time.Duration) error { +func (s *MemoryStore) MarkProcessed(ctx context.Context, eventKey string, _ time.Duration) error { s.mu.Lock() defer s.mu.Unlock() s.events[eventKey] = time.Now() @@ -146,14 +147,14 @@ func (s *MemoryStore) MarkProcessed(eventKey string, _ time.Duration) error { } // LastNotification retrieves the last notification timestamp for a PR. -func (s *MemoryStore) LastNotification(prURL string) time.Time { +func (s *MemoryStore) LastNotification(ctx context.Context, prURL string) time.Time { s.mu.RLock() defer s.mu.RUnlock() return s.notifications[prURL] } // RecordNotification records when a notification was sent for a PR. -func (s *MemoryStore) RecordNotification(prURL string, notifiedAt time.Time) error { +func (s *MemoryStore) RecordNotification(ctx context.Context, prURL string, notifiedAt time.Time) error { s.mu.Lock() defer s.mu.Unlock() s.notifications[prURL] = notifiedAt @@ -161,7 +162,7 @@ func (s *MemoryStore) RecordNotification(prURL string, notifiedAt time.Time) err } // Cleanup removes old data from memory. -func (s *MemoryStore) Cleanup() error { +func (s *MemoryStore) Cleanup(ctx context.Context) error { s.mu.Lock() defer s.mu.Unlock() @@ -203,7 +204,8 @@ func (s *MemoryStore) Cleanup() error { } // Clean up old pending DMs (>7 days or already past send time by >1 day) - for key, dm := range s.pendingDMs { + for key := range s.pendingDMs { + dm := s.pendingDMs[key] if now.Sub(dm.QueuedAt) > 7*24*time.Hour || now.Sub(dm.SendAfter) > 24*time.Hour { delete(s.pendingDMs, key) } @@ -213,20 +215,21 @@ func (s *MemoryStore) Cleanup() error { } // QueuePendingDM adds a DM to the pending queue. -func (s *MemoryStore) QueuePendingDM(dm PendingDM) error { +func (s *MemoryStore) QueuePendingDM(ctx context.Context, dm *PendingDM) error { s.mu.Lock() defer s.mu.Unlock() - s.pendingDMs[dm.ID] = dm + s.pendingDMs[dm.ID] = *dm return nil } // PendingDMs returns all pending DMs that should be sent (SendAfter <= before). -func (s *MemoryStore) PendingDMs(before time.Time) ([]PendingDM, error) { +func (s *MemoryStore) PendingDMs(ctx context.Context, before time.Time) ([]PendingDM, error) { s.mu.RLock() defer s.mu.RUnlock() var result []PendingDM - for _, dm := range s.pendingDMs { + for key := range s.pendingDMs { + dm := s.pendingDMs[key] if !dm.SendAfter.After(before) { result = append(result, dm) } @@ -235,7 +238,7 @@ func (s *MemoryStore) PendingDMs(before time.Time) ([]PendingDM, error) { } // RemovePendingDM removes a pending DM from the queue. -func (s *MemoryStore) RemovePendingDM(id string) error { +func (s *MemoryStore) RemovePendingDM(ctx context.Context, id string) error { s.mu.Lock() defer s.mu.Unlock() delete(s.pendingDMs, id) diff --git a/pkg/state/memory_test.go b/pkg/state/memory_test.go index de37d43..3908e23 100644 --- a/pkg/state/memory_test.go +++ b/pkg/state/memory_test.go @@ -1,6 +1,7 @@ package state import ( + "context" "fmt" "testing" "time" @@ -32,10 +33,11 @@ func TestNewMemoryStore(t *testing.T) { } func TestThreadOperations(t *testing.T) { + ctx := context.Background() store := NewMemoryStore() // Test retrieval of non-existent thread - _, exists := store.Thread("owner", "repo", 123, "C123") + _, exists := store.Thread(ctx, "owner", "repo", 123, "C123") if exists { t.Error("expected thread to not exist") } @@ -49,13 +51,13 @@ func TestThreadOperations(t *testing.T) { LastEventTime: time.Now(), } - err := store.SaveThread("owner", "repo", 123, "C123", threadInfo) + err := store.SaveThread(ctx, "owner", "repo", 123, "C123", threadInfo) if err != nil { t.Fatalf("unexpected error saving thread: %v", err) } // Retrieve saved thread - retrieved, exists := store.Thread("owner", "repo", 123, "C123") + retrieved, exists := store.Thread(ctx, "owner", "repo", 123, "C123") if !exists { t.Fatal("expected thread to exist") } @@ -80,23 +82,24 @@ func TestThreadOperations(t *testing.T) { } func TestDMOperations(t *testing.T) { + ctx := context.Background() store := NewMemoryStore() // Test retrieval of non-existent DM - _, exists := store.LastDM("U001", "https://github.com/test/repo/pull/123") + _, exists := store.LastDM(ctx, "U001", "https://github.com/test/repo/pull/123") if exists { t.Error("expected DM to not exist") } // Record DM sentAt := time.Now() - err := store.RecordDM("U001", "https://github.com/test/repo/pull/123", sentAt) + err := store.RecordDM(ctx, "U001", "https://github.com/test/repo/pull/123", sentAt) if err != nil { t.Fatalf("unexpected error recording DM: %v", err) } // Retrieve recorded DM - retrieved, exists := store.LastDM("U001", "https://github.com/test/repo/pull/123") + retrieved, exists := store.LastDM(ctx, "U001", "https://github.com/test/repo/pull/123") if !exists { t.Fatal("expected DM to exist") } @@ -107,12 +110,13 @@ func TestDMOperations(t *testing.T) { } func TestDMMessageOperations(t *testing.T) { + ctx := context.Background() store := NewMemoryStore() prURL := "https://github.com/test/repo/pull/123" // Test retrieval of non-existent DM message - _, exists := store.DMMessage("U001", prURL) + _, exists := store.DMMessage(ctx, "U001", prURL) if exists { t.Error("expected DM message to not exist") } @@ -125,13 +129,13 @@ func TestDMMessageOperations(t *testing.T) { MessageText: "Test DM", } - err := store.SaveDMMessage("U001", prURL, dmInfo) + err := store.SaveDMMessage(ctx, "U001", prURL, dmInfo) if err != nil { t.Fatalf("unexpected error saving DM message: %v", err) } // Retrieve saved DM message - retrieved, exists := store.DMMessage("U001", prURL) + retrieved, exists := store.DMMessage(ctx, "U001", prURL) if !exists { t.Fatal("expected DM message to exist") } @@ -153,12 +157,13 @@ func TestDMMessageOperations(t *testing.T) { } func TestListDMUsers(t *testing.T) { + ctx := context.Background() store := NewMemoryStore() prURL := "https://github.com/test/repo/pull/123" // Initially no users - users := store.ListDMUsers(prURL) + users := store.ListDMUsers(ctx, prURL) if len(users) != 0 { t.Errorf("expected 0 users, got %d", len(users)) } @@ -171,18 +176,18 @@ func TestListDMUsers(t *testing.T) { MessageText: "Test DM", } - if err := store.SaveDMMessage("U001", prURL, dmInfo); err != nil { + if err := store.SaveDMMessage(ctx, "U001", prURL, dmInfo); err != nil { t.Fatalf("failed to save DM for U001: %v", err) } - if err := store.SaveDMMessage("U002", prURL, dmInfo); err != nil { + if err := store.SaveDMMessage(ctx, "U002", prURL, dmInfo); err != nil { t.Fatalf("failed to save DM for U002: %v", err) } - if err := store.SaveDMMessage("U003", prURL, dmInfo); err != nil { + if err := store.SaveDMMessage(ctx, "U003", prURL, dmInfo); err != nil { t.Fatalf("failed to save DM for U003: %v", err) } // List users - users = store.ListDMUsers(prURL) + users = store.ListDMUsers(ctx, prURL) if len(users) != 3 { t.Fatalf("expected 3 users, got %d", len(users)) } @@ -200,30 +205,31 @@ func TestListDMUsers(t *testing.T) { } // Different PR should return no users - users = store.ListDMUsers("https://github.com/test/repo/pull/456") + users = store.ListDMUsers(ctx, "https://github.com/test/repo/pull/456") if len(users) != 0 { t.Errorf("expected 0 users for different PR, got %d", len(users)) } } func TestDigestOperations(t *testing.T) { + ctx := context.Background() store := NewMemoryStore() // Test retrieval of non-existent digest - _, exists := store.LastDigest("U001", "2025-01-15") + _, exists := store.LastDigest(ctx, "U001", "2025-01-15") if exists { t.Error("expected digest to not exist") } // Record digest sentAt := time.Now() - err := store.RecordDigest("U001", "2025-01-15", sentAt) + err := store.RecordDigest(ctx, "U001", "2025-01-15", sentAt) if err != nil { t.Fatalf("unexpected error recording digest: %v", err) } // Retrieve recorded digest - retrieved, exists := store.LastDigest("U001", "2025-01-15") + retrieved, exists := store.LastDigest(ctx, "U001", "2025-01-15") if !exists { t.Fatal("expected digest to exist") } @@ -234,53 +240,56 @@ func TestDigestOperations(t *testing.T) { } func TestEventProcessing(t *testing.T) { + ctx := context.Background() store := NewMemoryStore() eventKey := "event-123" // Event should not be processed initially - if store.WasProcessed(eventKey) { + if store.WasProcessed(ctx, eventKey) { t.Error("expected event to not be processed") } // Mark event as processed - err := store.MarkProcessed(eventKey, 24*time.Hour) + err := store.MarkProcessed(ctx, eventKey, 24*time.Hour) if err != nil { t.Fatalf("unexpected error marking event: %v", err) } // Event should now be processed - if !store.WasProcessed(eventKey) { + if !store.WasProcessed(ctx, eventKey) { t.Error("expected event to be processed") } } func TestNotificationOperations(t *testing.T) { + ctx := context.Background() store := NewMemoryStore() prURL := "https://github.com/test/repo/pull/123" // Last notification should be zero time initially - lastNotif := store.LastNotification(prURL) + lastNotif := store.LastNotification(ctx, prURL) if !lastNotif.IsZero() { t.Error("expected zero time for non-existent notification") } // Record notification notifiedAt := time.Now() - err := store.RecordNotification(prURL, notifiedAt) + err := store.RecordNotification(ctx, prURL, notifiedAt) if err != nil { t.Fatalf("unexpected error recording notification: %v", err) } // Retrieve last notification - retrieved := store.LastNotification(prURL) + retrieved := store.LastNotification(ctx, prURL) if !retrieved.Equal(notifiedAt) { t.Errorf("expected notifiedAt %v, got %v", notifiedAt, retrieved) } } func TestCleanup(t *testing.T) { + ctx := context.Background() store := NewMemoryStore() // Add some old data @@ -317,7 +326,7 @@ func TestCleanup(t *testing.T) { store.events["recent-event"] = recentTime // Run cleanup - err := store.Cleanup() + err := store.Cleanup(ctx) if err != nil { t.Fatalf("unexpected error during cleanup: %v", err) } @@ -356,10 +365,11 @@ func TestClose(t *testing.T) { } func TestPendingDMOperations(t *testing.T) { + ctx := context.Background() store := NewMemoryStore() // Test retrieval when no pending DMs exist - pending, err := store.PendingDMs(time.Now()) + pending, err := store.PendingDMs(ctx, time.Now()) if err != nil { t.Fatalf("unexpected error getting pending DMs: %v", err) } @@ -388,7 +398,7 @@ func TestPendingDMOperations(t *testing.T) { SendAfter: now.Add(-5 * time.Minute), // 5 minutes ago - ready to send } - err = store.QueuePendingDM(dm1) + err = store.QueuePendingDM(ctx, &dm1) if err != nil { t.Fatalf("unexpected error queueing DM: %v", err) } @@ -413,13 +423,13 @@ func TestPendingDMOperations(t *testing.T) { SendAfter: now.Add(10 * time.Minute), // 10 minutes from now - not ready yet } - err = store.QueuePendingDM(dm2) + err = store.QueuePendingDM(ctx, &dm2) if err != nil { t.Fatalf("unexpected error queueing second DM: %v", err) } // Get pending DMs that are ready to send - pending, err = store.PendingDMs(now) + pending, err = store.PendingDMs(ctx, now) if err != nil { t.Fatalf("unexpected error getting pending DMs: %v", err) } @@ -441,7 +451,7 @@ func TestPendingDMOperations(t *testing.T) { // Get pending DMs 15 minutes from now - both should be ready future := now.Add(15 * time.Minute) - pending, err = store.PendingDMs(future) + pending, err = store.PendingDMs(ctx, future) if err != nil { t.Fatalf("unexpected error getting future pending DMs: %v", err) } @@ -451,13 +461,13 @@ func TestPendingDMOperations(t *testing.T) { } // Remove one DM - err = store.RemovePendingDM("dm-001") + err = store.RemovePendingDM(ctx, "dm-001") if err != nil { t.Fatalf("unexpected error removing DM: %v", err) } // Now only dm2 should remain - pending, err = store.PendingDMs(future) + pending, err = store.PendingDMs(ctx, future) if err != nil { t.Fatalf("unexpected error getting pending DMs after removal: %v", err) } @@ -471,13 +481,14 @@ func TestPendingDMOperations(t *testing.T) { } // Remove non-existent DM should not error - err = store.RemovePendingDM("dm-999") + err = store.RemovePendingDM(ctx, "dm-999") if err != nil { t.Errorf("unexpected error removing non-existent DM: %v", err) } } func TestPendingDMCleanup(t *testing.T) { + ctx := context.Background() store := NewMemoryStore() now := time.Now() @@ -491,7 +502,7 @@ func TestPendingDMCleanup(t *testing.T) { QueuedAt: oldTime, SendAfter: oldTime, } - if err := store.QueuePendingDM(oldDM); err != nil { + if err := store.QueuePendingDM(ctx, &oldDM); err != nil { t.Fatalf("failed to queue old DM: %v", err) } @@ -503,18 +514,18 @@ func TestPendingDMCleanup(t *testing.T) { QueuedAt: now, SendAfter: now.Add(10 * time.Minute), } - if err := store.QueuePendingDM(recentDM); err != nil { + if err := store.QueuePendingDM(ctx, &recentDM); err != nil { t.Fatalf("failed to queue recent DM: %v", err) } // Run cleanup - err := store.Cleanup() + err := store.Cleanup(ctx) if err != nil { t.Fatalf("unexpected error during cleanup: %v", err) } // Verify old DM was removed - pending, err := store.PendingDMs(now.Add(24 * time.Hour)) + pending, err := store.PendingDMs(ctx, now.Add(24*time.Hour)) if err != nil { t.Fatalf("unexpected error getting pending DMs: %v", err) } @@ -529,6 +540,7 @@ func TestPendingDMCleanup(t *testing.T) { } func TestPendingDMConcurrency(t *testing.T) { + ctx := context.Background() store := NewMemoryStore() now := time.Now() @@ -536,7 +548,7 @@ func TestPendingDMConcurrency(t *testing.T) { // Queue multiple DMs concurrently done := make(chan bool, 3) - for i := 0; i < 3; i++ { + for i := range 3 { go func(index int) { dm := PendingDM{ ID: fmt.Sprintf("dm-%d", index), @@ -545,7 +557,7 @@ func TestPendingDMConcurrency(t *testing.T) { QueuedAt: now, SendAfter: now.Add(-1 * time.Minute), } - if err := store.QueuePendingDM(dm); err != nil { + if err := store.QueuePendingDM(ctx, &dm); err != nil { t.Errorf("failed to queue DM in goroutine %d: %v", index, err) } done <- true @@ -553,12 +565,12 @@ func TestPendingDMConcurrency(t *testing.T) { } // Wait for all goroutines - for i := 0; i < 3; i++ { + for range 3 { <-done } // Get all pending DMs - pending, err := store.PendingDMs(now) + pending, err := store.PendingDMs(ctx, now) if err != nil { t.Fatalf("unexpected error getting pending DMs: %v", err) } @@ -568,9 +580,9 @@ func TestPendingDMConcurrency(t *testing.T) { } // Remove DMs concurrently - for i := 0; i < 3; i++ { + for i := range 3 { go func(index int) { - if err := store.RemovePendingDM(fmt.Sprintf("dm-%d", index)); err != nil { + if err := store.RemovePendingDM(ctx, fmt.Sprintf("dm-%d", index)); err != nil { t.Errorf("failed to remove DM in goroutine %d: %v", index, err) } done <- true @@ -578,12 +590,12 @@ func TestPendingDMConcurrency(t *testing.T) { } // Wait for all removals - for i := 0; i < 3; i++ { + for range 3 { <-done } // Verify all removed - pending, err = store.PendingDMs(now) + pending, err = store.PendingDMs(ctx, now) if err != nil { t.Fatalf("unexpected error getting pending DMs after removal: %v", err) } diff --git a/pkg/state/store.go b/pkg/state/store.go index 54d3ea1..e469238 100644 --- a/pkg/state/store.go +++ b/pkg/state/store.go @@ -2,6 +2,7 @@ package state import ( + "context" "time" ) @@ -26,22 +27,22 @@ type DMInfo struct { // PendingDM represents a DM scheduled to be sent later. type PendingDM struct { - ID string `json:"id"` // Unique ID for this pending DM - WorkspaceID string `json:"workspace_id"` // Slack workspace ID - UserID string `json:"user_id"` // Slack user ID to DM - PROwner string `json:"pr_owner"` // GitHub PR owner - PRRepo string `json:"pr_repo"` // GitHub PR repo - PRNumber int `json:"pr_number"` // GitHub PR number - PRURL string `json:"pr_url"` // GitHub PR URL - PRTitle string `json:"pr_title"` // PR title - PRAuthor string `json:"pr_author"` // PR author - PRState string `json:"pr_state"` // Deprecated simplified state - WorkflowState string `json:"workflow_state"` // Workflow state from turnclient - NextActions string `json:"next_actions"` // Serialized NextAction map (JSON) - ChannelID string `json:"channel_id"` // Channel where user was tagged - ChannelName string `json:"channel_name"` // Channel name - QueuedAt time.Time `json:"queued_at"` // When this DM was queued - SendAfter time.Time `json:"send_after"` // Send DM after this time + QueuedAt time.Time `json:"queued_at"` + SendAfter time.Time `json:"send_after"` + PRAuthor string `json:"pr_author"` + PRState string `json:"pr_state"` + PRRepo string `json:"pr_repo"` + WorkspaceID string `json:"workspace_id"` + PRURL string `json:"pr_url"` + PRTitle string `json:"pr_title"` + ID string `json:"id"` + PROwner string `json:"pr_owner"` + WorkflowState string `json:"workflow_state"` + NextActions string `json:"next_actions"` + ChannelID string `json:"channel_id"` + ChannelName string `json:"channel_name"` + UserID string `json:"user_id"` + PRNumber int `json:"pr_number"` } // Store provides persistent storage for bot state. @@ -50,37 +51,37 @@ type PendingDM struct { //nolint:interfacebloat // Store intentionally groups all state operations for simplicity type Store interface { // Thread operations - map PR to Slack thread - Thread(owner, repo string, number int, channelID string) (ThreadInfo, bool) - SaveThread(owner, repo string, number int, channelID string, info ThreadInfo) error + Thread(ctx context.Context, owner, repo string, number int, channelID string) (ThreadInfo, bool) + SaveThread(ctx context.Context, owner, repo string, number int, channelID string, info ThreadInfo) error // DM tracking - prevent duplicate notifications - LastDM(userID, prURL string) (time.Time, bool) - RecordDM(userID, prURL string, sentAt time.Time) error + LastDM(ctx context.Context, userID, prURL string) (time.Time, bool) + RecordDM(ctx context.Context, userID, prURL string, sentAt time.Time) error // DM message tracking - store DM message info for updating - DMMessage(userID, prURL string) (DMInfo, bool) - SaveDMMessage(userID, prURL string, info DMInfo) error - ListDMUsers(prURL string) []string + DMMessage(ctx context.Context, userID, prURL string) (DMInfo, bool) + SaveDMMessage(ctx context.Context, userID, prURL string, info DMInfo) error + ListDMUsers(ctx context.Context, prURL string) []string // Daily digest tracking - one per user per day - LastDigest(userID, date string) (time.Time, bool) - RecordDigest(userID, date string, sentAt time.Time) error + LastDigest(ctx context.Context, userID, date string) (time.Time, bool) + RecordDigest(ctx context.Context, userID, date string, sentAt time.Time) error // Event deduplication - prevent processing same event twice - WasProcessed(eventKey string) bool - MarkProcessed(eventKey string, ttl time.Duration) error + WasProcessed(ctx context.Context, eventKey string) bool + MarkProcessed(ctx context.Context, eventKey string, ttl time.Duration) error // Notification tracking - track when we last notified about a PR - LastNotification(prURL string) time.Time - RecordNotification(prURL string, notifiedAt time.Time) error + LastNotification(ctx context.Context, prURL string) time.Time + RecordNotification(ctx context.Context, prURL string, notifiedAt time.Time) error // Pending DM queue - schedule DMs to be sent later - QueuePendingDM(dm PendingDM) error - PendingDMs(before time.Time) ([]PendingDM, error) - RemovePendingDM(id string) error + QueuePendingDM(ctx context.Context, dm *PendingDM) error + PendingDMs(ctx context.Context, before time.Time) ([]PendingDM, error) + RemovePendingDM(ctx context.Context, id string) error // Cleanup old data - Cleanup() error + Cleanup(ctx context.Context) error // Close releases resources Close() error diff --git a/pkg/usermapping/reverse.go b/pkg/usermapping/reverse.go index 820a66c..91b5e61 100644 --- a/pkg/usermapping/reverse.go +++ b/pkg/usermapping/reverse.go @@ -1,4 +1,3 @@ -// Package usermapping provides GitHub-to-Slack user mapping functionality. package usermapping import ( @@ -33,7 +32,7 @@ type ReverseMapping struct { } // NewReverseService creates a new reverse mapping service (Slack → GitHub). -func NewReverseService(slackClient *slack.Client, githubToken string) *ReverseService { +func NewReverseService(_ *slack.Client, githubToken string) *ReverseService { return &ReverseService{ orgCache: ghmailto.NewOrgCacheService(githubToken), cache: make(map[string]*ReverseMapping), @@ -42,7 +41,7 @@ func NewReverseService(slackClient *slack.Client, githubToken string) *ReverseSe } // SetOverrides updates the manual user mapping overrides from config. -// Format: githubUsername -> email +// Format: githubUsername -> email. func (s *ReverseService) SetOverrides(overrides map[string]string) { s.cacheMu.Lock() defer s.cacheMu.Unlock() @@ -82,7 +81,7 @@ func (s *ReverseService) LookupGitHub(ctx context.Context, slackClient SlackAPI, slog.Warn("Slack user has no email", "slack_user_id", slackUserID, "slack_username", slackUser.Name) - return nil, fmt.Errorf("Slack user has no email: %s", slackUserID) + return nil, fmt.Errorf("slack user has no email: %s", slackUserID) } // Check if this email has a config override @@ -150,6 +149,8 @@ func (s *ReverseService) LookupGitHub(ctx context.Context, slackClient SlackAPI, // reverseEmailLookup performs the actual reverse lookup: email → GitHub username. // Uses the org-wide identity cache for fast, reliable lookups. +// +//nolint:revive // Function signature length is acceptable for clarity func (s *ReverseService) reverseEmailLookup(ctx context.Context, email, organization string) (username string, confidence int, matchMethod string, err error) { slog.Info("starting reverse email lookup via org cache", "email", email, diff --git a/pkg/usermapping/reverse_test.go b/pkg/usermapping/reverse_test.go index 0dca7a5..7c6b959 100644 --- a/pkg/usermapping/reverse_test.go +++ b/pkg/usermapping/reverse_test.go @@ -30,6 +30,7 @@ func (m *mockSlackClient) GetUserInfo(userID string) (*slack.User, error) { } func TestReverseMapping_ConfigOverride(t *testing.T) { + ctx := context.Background() mockSlack := &mockSlackClient{ users: map[string]*slack.User{ "U12345": { @@ -47,7 +48,6 @@ func TestReverseMapping_ConfigOverride(t *testing.T) { "githubuser": "test@company.com", }) - ctx := context.Background() mapping, err := service.LookupGitHub(ctx, mockSlack, "U12345", "test-org", "company.com") if err != nil { t.Fatalf("expected no error, got: %v", err) @@ -67,6 +67,7 @@ func TestReverseMapping_ConfigOverride(t *testing.T) { } func TestReverseMapping_CacheHit(t *testing.T) { + ctx := context.Background() mockSlack := &mockSlackClient{ users: map[string]*slack.User{ "U12345": { @@ -92,7 +93,6 @@ func TestReverseMapping_CacheHit(t *testing.T) { Confidence: 90, } - ctx := context.Background() mapping, err := service.LookupGitHub(ctx, mockSlack, "U12345", "test-org", "company.com") if err != nil { t.Fatalf("expected no error, got: %v", err) @@ -166,13 +166,13 @@ func TestReverseMapping_ClearCache(t *testing.T) { } func TestReverseMapping_SlackUserNotFound(t *testing.T) { + ctx := context.Background() mockSlack := &mockSlackClient{ users: map[string]*slack.User{}, } service := NewReverseService(nil, "fake-token") - ctx := context.Background() _, err := service.LookupGitHub(ctx, mockSlack, "U99999", "test-org", "company.com") if err == nil { t.Fatal("expected error for non-existent Slack user, got nil") @@ -180,6 +180,7 @@ func TestReverseMapping_SlackUserNotFound(t *testing.T) { } func TestReverseMapping_NoEmail(t *testing.T) { + ctx := context.Background() mockSlack := &mockSlackClient{ users: map[string]*slack.User{ "U12345": { @@ -194,7 +195,6 @@ func TestReverseMapping_NoEmail(t *testing.T) { service := NewReverseService(nil, "fake-token") - ctx := context.Background() _, err := service.LookupGitHub(ctx, mockSlack, "U12345", "test-org", "company.com") if err == nil { t.Fatal("expected error for user with no email, got nil") @@ -221,6 +221,7 @@ func TestReverseMapping_SetOverrides(t *testing.T) { } func TestReverseMapping_WrongOrgDomain(t *testing.T) { + ctx := context.Background() mockSlack := &mockSlackClient{ users: map[string]*slack.User{ "U12345": { @@ -235,7 +236,6 @@ func TestReverseMapping_WrongOrgDomain(t *testing.T) { service := NewReverseService(nil, "fake-token") - ctx := context.Background() _, err := service.LookupGitHub(ctx, mockSlack, "U12345", "test-org", "company.com") if err == nil { t.Fatal("expected error for mismatched email domain, got nil") diff --git a/pkg/usermapping/usermapping_test.go b/pkg/usermapping/usermapping_test.go index 8697533..0403f05 100644 --- a/pkg/usermapping/usermapping_test.go +++ b/pkg/usermapping/usermapping_test.go @@ -153,6 +153,7 @@ func TestService_GetSlackHandle_FallbackToGitHub(t *testing.T) { } func TestService_FormatUserMention_WithMapping(t *testing.T) { + ctx := context.Background() service := &Service{ cache: make(map[string]*UserMapping), } @@ -165,7 +166,6 @@ func TestService_FormatUserMention_WithMapping(t *testing.T) { CachedAt: time.Now(), }) - ctx := context.Background() result := service.FormatUserMention(ctx, "testuser", "testorg", "example.com") if result != "<@U123456>" { @@ -174,6 +174,7 @@ func TestService_FormatUserMention_WithMapping(t *testing.T) { } func TestService_FormatUserMention_NoMapping(t *testing.T) { + ctx := context.Background() service := &Service{ slackClient: &MockSlackAPI{}, githubLookup: &MockGitHubLookup{}, @@ -181,7 +182,6 @@ func TestService_FormatUserMention_NoMapping(t *testing.T) { lookupSem: make(chan struct{}, 5), } - ctx := context.Background() result := service.FormatUserMention(ctx, "unknownuser", "testorg", "example.com") if result != "@unknownuser" { @@ -190,6 +190,7 @@ func TestService_FormatUserMention_NoMapping(t *testing.T) { } func TestService_FormatUserMentions_Mixed(t *testing.T) { + ctx := context.Background() service := &Service{ slackClient: &MockSlackAPI{}, githubLookup: &MockGitHubLookup{}, @@ -213,7 +214,6 @@ func TestService_FormatUserMentions_Mixed(t *testing.T) { CachedAt: time.Now(), }) - ctx := context.Background() users := []string{"user1", "user2", "user3"} result := service.FormatUserMentions(ctx, users, "testorg", "example.com") @@ -615,7 +615,7 @@ func TestService_SlackHandles(t *testing.T) { mockSlack := &MockSlackAPI{ getUserByEmailFunc: func(ctx context.Context, email string) (*slack.User, error) { - if len(email) > 0 && strings.Contains(email, "@example.com") { + if email != "" && strings.Contains(email, "@example.com") { username := strings.Split(email, "@")[0] return &slack.User{ ID: "U" + strings.ToUpper(username[:min(1, len(username))]), @@ -697,6 +697,7 @@ func TestSelectBestMatch(t *testing.T) { } func TestService_SlackHandles_EmptyList(t *testing.T) { + ctx := context.Background() service := &Service{ slackClient: &MockSlackAPI{}, githubLookup: &MockGitHubLookup{}, @@ -704,7 +705,6 @@ func TestService_SlackHandles_EmptyList(t *testing.T) { lookupSem: make(chan struct{}, 5), } - ctx := context.Background() result, err := service.SlackHandles(ctx, []string{}, "testorg", "example.com") if err != nil { t.Fatalf("unexpected error: %v", err) @@ -715,11 +715,11 @@ func TestService_SlackHandles_EmptyList(t *testing.T) { } func TestService_FormatUserMentions_Empty(t *testing.T) { + ctx := context.Background() service := &Service{ cache: make(map[string]*UserMapping), } - ctx := context.Background() result := service.FormatUserMentions(ctx, []string{}, "testorg", "example.com") if result != "" { t.Errorf("expected empty string, got %q", result) @@ -727,6 +727,7 @@ func TestService_FormatUserMentions_Empty(t *testing.T) { } func TestService_ContextCancellation(t *testing.T) { + ctx := context.Background() service := &Service{ slackClient: &MockSlackAPI{}, githubLookup: &MockGitHubLookup{}, @@ -744,7 +745,7 @@ func TestService_ContextCancellation(t *testing.T) { if err == nil { t.Error("expected context cancellation error, got nil") } - if err != context.Canceled { + if !errors.Is(err, context.Canceled) { t.Errorf("expected context.Canceled error, got %v", err) } } From fb18ea1affd7e77b4be2efb9f942618e7b87de6d Mon Sep 17 00:00:00 2001 From: Thomas Stromberg Date: Mon, 3 Nov 2025 20:48:28 -0500 Subject: [PATCH 3/3] improve testability --- pkg/bot/bot.go | 498 +++++++++++++++++++++-------------- pkg/notify/format_dm_test.go | 153 +++++++++++ pkg/notify/notify.go | 301 ++++++++++++--------- pkg/slack/slack.go | 144 ++++------ 4 files changed, 677 insertions(+), 419 deletions(-) create mode 100644 pkg/notify/format_dm_test.go diff --git a/pkg/bot/bot.go b/pkg/bot/bot.go index c8aae85..e6f39e7 100644 --- a/pkg/bot/bot.go +++ b/pkg/bot/bot.go @@ -152,7 +152,6 @@ func (c *Coordinator) findOrCreatePRThread(ctx context.Context, channelID, owner Number int `json:"number"` }, checkResult *turn.CheckResponse, ) (threadTS string, wasNewlyCreated bool, currentMessageText string, err error) { - // Use cache key that includes channel ID to support multiple channels per PR cacheKey := fmt.Sprintf("%s/%s#%d:%s", owner, repo, prNumber, channelID) slog.Debug("finding or creating PR thread", @@ -160,7 +159,36 @@ func (c *Coordinator) findOrCreatePRThread(ctx context.Context, channelID, owner logFieldChannel, channelID, "pr_state", prState) - // Check cache first (quick read lock) + // Try to find existing thread + threadTS, messageText, found := c.findPRThread(ctx, cacheKey, channelID, owner, repo, prNumber, prState, pullRequest) + if found { + return threadTS, false, messageText, nil + } + + // Thread not found - create new one with concurrent creation prevention + threadInfo, wasCreated, err := c.createPRThreadWithLocking(ctx, channelID, owner, repo, prNumber, prState, pullRequest, checkResult) + if err != nil { + return "", false, "", err + } + + return threadInfo.ThreadTS, wasCreated, threadInfo.MessageText, nil +} + +// findPRThread searches for an existing PR thread in cache and Slack. +// Returns (threadTS, messageText, found). +func (c *Coordinator) findPRThread( + ctx context.Context, cacheKey, channelID, owner, repo string, prNumber int, prState string, + pullRequest struct { + CreatedAt time.Time `json:"created_at"` + User struct { + Login string `json:"login"` + } `json:"user"` + HTMLURL string `json:"html_url"` + Title string `json:"title"` + Number int `json:"number"` + }, +) (threadTS, messageText string, found bool) { + // Check cache first if threadInfo, exists := c.threadCache.Get(cacheKey); exists { slog.Debug("found PR thread in cache", "pr", cacheKey, @@ -168,14 +196,14 @@ func (c *Coordinator) findOrCreatePRThread(ctx context.Context, channelID, owner logFieldChannel, channelID, "cached_state", threadInfo.LastState, "has_cached_message_text", threadInfo.MessageText != "") - return threadInfo.ThreadTS, false, threadInfo.MessageText, nil + return threadInfo.ThreadTS, threadInfo.MessageText, true } - // Not in cache - search Slack for existing thread before trying to create + // Search Slack for existing thread prURL := fmt.Sprintf("https://github.com/%s/%s/pull/%d", owner, repo, prNumber) searchFrom := pullRequest.CreatedAt if searchFrom.IsZero() || time.Since(searchFrom) > 30*24*time.Hour { - searchFrom = time.Now().AddDate(0, 0, -30) // 30 days fallback + searchFrom = time.Now().AddDate(0, 0, -30) slog.Debug("using 30-day fallback for thread search", "pr", cacheKey, "pr_created_at_available", !pullRequest.CreatedAt.IsZero(), @@ -187,98 +215,89 @@ func (c *Coordinator) findOrCreatePRThread(ctx context.Context, channelID, owner "search_window_days", int(time.Since(searchFrom).Hours()/24)) } - initialSearchTS, initialSearchText := c.searchForPRThread(ctx, channelID, prURL, searchFrom) - if initialSearchTS != "" { + threadTS, messageText = c.searchForPRThread(ctx, channelID, prURL, searchFrom) + if threadTS != "" { slog.Info("found existing PR thread via initial search", "pr", cacheKey, - "thread_ts", initialSearchTS, + "thread_ts", threadTS, logFieldChannel, channelID, - "current_message_preview", initialSearchText[:min(100, len(initialSearchText))]) + "current_message_preview", messageText[:min(100, len(messageText))]) - // Save the found thread (cache + persist) + // Save the found thread c.saveThread(ctx, owner, repo, prNumber, channelID, cache.ThreadInfo{ - ThreadTS: initialSearchTS, + ThreadTS: threadTS, ChannelID: channelID, LastState: prState, - MessageText: initialSearchText, + MessageText: messageText, }) - return initialSearchTS, false, initialSearchText, nil + return threadTS, messageText, true } - // Prevent concurrent creation of the same PR thread in same channel - // Lock on cacheKey (with channel) to allow parallel creation in different channels + return "", "", false +} + +// createPRThreadWithLocking creates a new PR thread with concurrent creation prevention. +// Returns (threadInfo, wasCreated, error). +// wasCreated is true if a new thread was created, false if an existing thread was found. +func (c *Coordinator) createPRThreadWithLocking( + ctx context.Context, channelID, owner, repo string, prNumber int, prState string, + pullRequest struct { + CreatedAt time.Time `json:"created_at"` + User struct { + Login string `json:"login"` + } `json:"user"` + HTMLURL string `json:"html_url"` + Title string `json:"title"` + Number int `json:"number"` + }, + checkResult *turn.CheckResponse, +) (threadInfo cache.ThreadInfo, wasCreated bool, err error) { + cacheKey := fmt.Sprintf("%s/%s#%d:%s", owner, repo, prNumber, channelID) + // Prevent concurrent creation within this instance if !c.threadCache.MarkCreating(cacheKey) { - // Another goroutine is already creating this thread - slog.Info("another goroutine is creating this PR thread, waiting for completion", - "pr", cacheKey) - deadline := time.Now().Add(30 * time.Second) - for time.Now().Before(deadline) { - time.Sleep(500 * time.Millisecond) - if threadInfo, exists := c.threadCache.Get(cacheKey); exists { - slog.Info("found PR thread after waiting for concurrent creation", - "pr", cacheKey, - "thread_ts", threadInfo.ThreadTS, - "waited", time.Since(time.Now().Add(-30*time.Second))) - return threadInfo.ThreadTS, false, "", nil - } - // Check if the other goroutine finished (even if it failed) - if !c.threadCache.IsCreating(cacheKey) { - // Other goroutine finished but didn't cache (likely failed) - // Try to mark as creating ourselves - if !c.threadCache.MarkCreating(cacheKey) { - // Someone else started again, keep waiting - continue - } - // We successfully marked it, break out to create - break - } - } - if c.threadCache.IsCreating(cacheKey) { - slog.Warn("timed out waiting for concurrent thread creation, will try creating ourselves", - "pr", cacheKey) - // Try to take over creation - if !c.threadCache.MarkCreating(cacheKey) { - // Still being created, give up - return "", false, "", errors.New("timed out waiting for thread creation") - } + // Wait for another goroutine to finish + threadTS, messageText, shouldProceed := c.waitForConcurrentCreation(cacheKey) + if !shouldProceed { + // Thread was found - not newly created + return cache.ThreadInfo{ + ThreadTS: threadTS, + MessageText: messageText, + }, false, nil } + // If shouldProceed is true, we've successfully marked as creating and should continue } - // Double-check cache after marking as creating - if threadInfo, exists := c.threadCache.Get(cacheKey); exists { + // Double-check cache after acquiring lock + if info, exists := c.threadCache.Get(cacheKey); exists { c.threadCache.UnmarkCreating(cacheKey) slog.Debug("found PR thread in cache after marking as creating", "pr", cacheKey, - "thread_ts", threadInfo.ThreadTS) - return threadInfo.ThreadTS, false, "", nil + "thread_ts", info.ThreadTS) + return info, false, nil } - // Ensure we clean up the creating flag defer c.threadCache.UnmarkCreating(cacheKey) - // CRITICAL: Perform one final cross-instance check RIGHT before the expensive operations - // This handles the case where another instance (during rolling deployment) just created - // a thread while we were acquiring the lock. The creating flag only prevents races within - // this instance - we need to check Slack itself to catch threads from other instances. - // Add a small delay to let any concurrent creates from other instances complete their Slack API call. + // Cross-instance race prevention check + prURL := fmt.Sprintf("https://github.com/%s/%s/pull/%d", owner, repo, prNumber) time.Sleep(100 * time.Millisecond) - crossInstanceCheckTS, crossInstanceText := c.searchForPRThread(ctx, channelID, prURL, pullRequest.CreatedAt) - if crossInstanceCheckTS != "" { + crossInstanceTS, crossInstanceText := c.searchForPRThread(ctx, channelID, prURL, pullRequest.CreatedAt) + if crossInstanceTS != "" { slog.Info("found thread created by another instance (cross-instance race avoided)", "pr", cacheKey, - "thread_ts", crossInstanceCheckTS, + "thread_ts", crossInstanceTS, logFieldChannel, channelID, "current_message_preview", crossInstanceText[:min(100, len(crossInstanceText))], "note", "this prevented duplicate thread creation during rolling deployment") - // Save it and return (cache + persist) - c.saveThread(ctx, owner, repo, prNumber, channelID, cache.ThreadInfo{ - ThreadTS: crossInstanceCheckTS, + info := cache.ThreadInfo{ + ThreadTS: crossInstanceTS, ChannelID: channelID, LastState: prState, MessageText: crossInstanceText, - }) - return crossInstanceCheckTS, false, crossInstanceText, nil + } + c.saveThread(ctx, owner, repo, prNumber, channelID, info) + return info, false, nil } // Create new thread @@ -286,21 +305,21 @@ func (c *Coordinator) findOrCreatePRThread(ctx context.Context, channelID, owner "pr", cacheKey, logFieldChannel, channelID, "pr_state", prState, - "pr_created_at", pullRequest.CreatedAt.Format(time.RFC3339), - "search_window_used", searchFrom.Format(time.RFC3339)) + "pr_created_at", pullRequest.CreatedAt.Format(time.RFC3339)) newThreadTS, newMessageText, err := c.createPRThread(ctx, channelID, owner, repo, prNumber, prState, pullRequest, checkResult) if err != nil { - return "", false, "", fmt.Errorf("failed to create PR thread: %w", err) + return cache.ThreadInfo{}, false, fmt.Errorf("failed to create PR thread: %w", err) } - // Save the new thread (cache + persist) - c.saveThread(ctx, owner, repo, prNumber, channelID, cache.ThreadInfo{ + // Save the new thread + info := cache.ThreadInfo{ ThreadTS: newThreadTS, ChannelID: channelID, LastState: prState, MessageText: newMessageText, - }) + } + c.saveThread(ctx, owner, repo, prNumber, channelID, info) slog.Info("created and cached new PR thread", "pr", cacheKey, @@ -311,7 +330,48 @@ func (c *Coordinator) findOrCreatePRThread(ctx context.Context, channelID, owner "creation_successful", true, "note", "if you see duplicate threads, check if another instance created one during the same time window") - return newThreadTS, true, newMessageText, nil + return info, true, nil +} + +// waitForConcurrentCreation waits for another goroutine to finish creating the thread. +// Returns (threadTS, messageText, shouldProceed) where shouldProceed indicates whether caller should create. +func (c *Coordinator) waitForConcurrentCreation(cacheKey string) (threadTS, messageText string, shouldProceed bool) { + slog.Info("another goroutine is creating this PR thread, waiting for completion", "pr", cacheKey) + + deadline := time.Now().Add(30 * time.Second) + for time.Now().Before(deadline) { + time.Sleep(500 * time.Millisecond) + + if threadInfo, exists := c.threadCache.Get(cacheKey); exists { + slog.Info("found PR thread after waiting for concurrent creation", + "pr", cacheKey, + "thread_ts", threadInfo.ThreadTS, + "waited", time.Since(time.Now().Add(-30*time.Second))) + return threadInfo.ThreadTS, threadInfo.MessageText, false + } + + // Check if the other goroutine finished + if !c.threadCache.IsCreating(cacheKey) { + // Other goroutine finished but didn't cache - try to create ourselves + if c.threadCache.MarkCreating(cacheKey) { + // Successfully marked - caller should proceed with creation + return "", "", true + } + // Someone else marked it first, continue waiting + continue + } + } + + // Timeout - try one last time to mark as creating + if c.threadCache.IsCreating(cacheKey) { + slog.Warn("timed out waiting for concurrent thread creation", "pr", cacheKey) + if c.threadCache.MarkCreating(cacheKey) { + return "", "", true + } + } + + // Failed to acquire lock - return empty to avoid creation + return "", "", false } // searchForPRThread searches for an existing PR thread in a channel using channel history. @@ -1215,8 +1275,6 @@ func (c *Coordinator) processChannelsInParallel( // processPRForChannel handles PR processing for a single channel (extracted from the main loop). // Returns a map of Slack user IDs that were successfully tagged in this channel. -// -//nolint:maintidx // Core PR processing logic with necessary complexity for handling notifications func (c *Coordinator) processPRForChannel( ctx context.Context, prCtx prContext, channelName, workspaceID string, ) map[string]bool { @@ -1240,28 +1298,12 @@ func (c *Coordinator) processPRForChannel( return nil } - // Resolve channel name to ID for API calls - channelID := c.slack.ResolveChannelID(ctx, channelName) - if channelID == channelName || (channelName != "" && channelName[0] == '#' && channelID == channelName[1:]) { - slog.Warn("could not resolve channel for PR processing", - "workspace", c.workspaceName, - logFieldPR, fmt.Sprintf(prFormatString, owner, repo, prNumber), - "channel", channelName, - "action_required", "verify channel exists and bot has access") + // Resolve and validate channel + channelID, channelDisplay, ok := c.resolveAndValidateChannel(ctx, channelName, owner, repo, prNumber) + if !ok { return nil } - // For display purposes, show both name and ID - var channelDisplay string - switch { - case channelID != channelName: - channelDisplay = fmt.Sprintf("#%s (%s)", channelName, channelID) - case channelName != "" && channelName[0] == 'C': - channelDisplay = channelID - default: - channelDisplay = fmt.Sprintf("#%s (unresolved)", channelName) - } - slog.Debug("processing PR for individual channel", "workspace", c.workspaceName, logFieldPR, fmt.Sprintf(prFormatString, owner, repo, prNumber), @@ -1270,16 +1312,14 @@ func (c *Coordinator) processPRForChannel( "pr_state", prState, "action", event.Action) + // Get old state from cache oldState := "" - - // Check cache for existing thread info to get old state prKey := fmt.Sprintf("%s/%s#%d", owner, repo, prNumber) if threadInfo, exists := c.threadCache.Get(prKey); exists && threadInfo.ChannelID == channelID { oldState = threadInfo.LastState } - // Find or create thread for this PR in this channel - // Convert to the expected struct format + // Find or create thread pullRequestStruct := struct { CreatedAt time.Time `json:"created_at"` User struct { @@ -1311,129 +1351,187 @@ func (c *Coordinator) processPRForChannel( return nil } - // Track that we notified users in this channel for DM delay logic + // Track channel notification if c.notifier != nil && c.notifier.Tracker != nil { c.notifier.Tracker.UpdateChannelNotification(workspaceID, owner, repo, prNumber) } - // Track user tags in channel for DM delay logic and collect successfully tagged Slack users + // Track user tags + taggedUsers := c.trackUserTagsForDMDelay(ctx, workspaceID, channelID, channelDisplay, owner, repo, prNumber, checkResult) + + // Update message if needed + if !wasNewlyCreated { + c.updateMessageIfNeeded(ctx, channelID, channelDisplay, threadTS, oldState, currentText, prState, owner, repo, prNumber, event, checkResult) + } + + slog.Info("successfully processed PR in channel", + "workspace", c.workspaceName, + logFieldPR, fmt.Sprintf(prFormatString, owner, repo, prNumber), + "channel", channelDisplay, + "channel_id", channelID, + "thread_ts", threadTS, + "action", event.Action, + "pr_state", prState, + "had_state_change", oldState != "" && oldState != prState, + "users_tagged", len(taggedUsers)) + + return taggedUsers +} + +// resolveAndValidateChannel resolves channel name to ID and creates display string. +// Returns (channelID, channelDisplay, ok). +func (c *Coordinator) resolveAndValidateChannel( + ctx context.Context, channelName, owner, repo string, prNumber int, +) (channelID, channelDisplay string, ok bool) { + channelID = c.slack.ResolveChannelID(ctx, channelName) + if channelID == channelName || (channelName != "" && channelName[0] == '#' && channelID == channelName[1:]) { + slog.Warn("could not resolve channel for PR processing", + "workspace", c.workspaceName, + logFieldPR, fmt.Sprintf(prFormatString, owner, repo, prNumber), + "channel", channelName, + "action_required", "verify channel exists and bot has access") + return "", "", false + } + + switch { + case channelID != channelName: + channelDisplay = fmt.Sprintf("#%s (%s)", channelName, channelID) + case channelName != "" && channelName[0] == 'C': + channelDisplay = channelID + default: + channelDisplay = fmt.Sprintf("#%s (unresolved)", channelName) + } + + return channelID, channelDisplay, true +} + +// trackUserTagsForDMDelay tracks user tags in channel for DM delay logic. +func (c *Coordinator) trackUserTagsForDMDelay( + ctx context.Context, workspaceID, channelID, channelDisplay, owner, repo string, prNumber int, + checkResult *turn.CheckResponse, +) map[string]bool { taggedUsers := make(map[string]bool) blockedUsers := c.extractBlockedUsersFromTurnclient(checkResult) + if len(blockedUsers) == 0 { + return taggedUsers + } + domain := c.configManager.Domain(owner) - if len(blockedUsers) > 0 { - // Record tags for blocked users synchronously to prevent race with DM sending - // Generous timeout: GitHub email lookup (~5-10s) + Slack API lookups (~5-10s) - lookupCtx, lookupCancel := context.WithTimeout(ctx, 30*time.Second) - defer lookupCancel() - - for _, githubUser := range blockedUsers { - slackUserID, err := c.userMapper.SlackHandle(lookupCtx, githubUser, owner, domain) - if err == nil && slackUserID != "" { - if c.notifier != nil && c.notifier.Tracker != nil { - c.notifier.Tracker.UpdateUserPRChannelTag(workspaceID, slackUserID, channelID, owner, repo, prNumber) - } - taggedUsers[slackUserID] = true - slog.Debug("tracked user tag in channel", - "workspace", workspaceID, - "github_user", githubUser, - "slack_user", slackUserID, - "channel", channelDisplay, - "channel_id", channelID, - "pr", fmt.Sprintf(prFormatString, owner, repo, prNumber)) + // Record tags synchronously to prevent race with DM sending + lookupCtx, lookupCancel := context.WithTimeout(ctx, 30*time.Second) + defer lookupCancel() + + for _, githubUser := range blockedUsers { + slackUserID, err := c.userMapper.SlackHandle(lookupCtx, githubUser, owner, domain) + if err == nil && slackUserID != "" { + if c.notifier != nil && c.notifier.Tracker != nil { + c.notifier.Tracker.UpdateUserPRChannelTag(workspaceID, slackUserID, channelID, owner, repo, prNumber) } + taggedUsers[slackUserID] = true + slog.Debug("tracked user tag in channel", + "workspace", workspaceID, + "github_user", githubUser, + "slack_user", slackUserID, + "channel", channelDisplay, + "channel_id", channelID, + "pr", fmt.Sprintf(prFormatString, owner, repo, prNumber)) } } - // Build what the message SHOULD be based on current PR state - // Then compare to what it IS - update if different - if !wasNewlyCreated { - domain := c.configManager.Domain(owner) - params := notify.MessageParams{ - CheckResult: checkResult, - Owner: owner, - Repo: repo, - PRNumber: prNumber, - Title: event.PullRequest.Title, - Author: event.PullRequest.User.Login, - HTMLURL: event.PullRequest.HTMLURL, - Domain: domain, - UserMapper: c.userMapper, - } - expectedText := notify.FormatChannelMessageBase(ctx, params) + notify.FormatNextActionsSuffix(ctx, params) + return taggedUsers +} - // Compare expected vs actual - update if different - if currentText != expectedText { - slog.Info("updating message - content changed", - "workspace", c.workspaceName, - logFieldPR, fmt.Sprintf(prFormatString, owner, repo, prNumber), - "channel", channelDisplay, - "channel_id", channelID, - "thread_ts", threadTS, - "pr_state", prState, - "old_state", oldState, - "current_message_preview", currentText[:min(100, len(currentText))], - "expected_message_preview", expectedText[:min(100, len(expectedText))]) - - if err := c.slack.UpdateMessage(ctx, channelID, threadTS, expectedText); err != nil { - slog.Error("failed to update PR message", - "workspace", c.workspaceName, - logFieldPR, fmt.Sprintf(prFormatString, owner, repo, prNumber), - "channel", channelDisplay, - "channel_id", channelID, - "thread_ts", threadTS, - "error", err, - "impact", "message_update_skipped_will_retry_via_polling", - "next_poll_in", "5m") - } else { - // Save updated thread info (cache + persist) - c.saveThread(ctx, owner, repo, prNumber, channelID, cache.ThreadInfo{ - ThreadTS: threadTS, - ChannelID: channelID, - LastState: prState, - MessageText: expectedText, - }) - slog.Info("successfully updated PR message", - "workspace", c.workspaceName, - logFieldPR, fmt.Sprintf(prFormatString, owner, repo, prNumber), - "channel", channelDisplay, - "channel_id", channelID, - "thread_ts", threadTS, - "pr_state", prState) - - // Also update DM messages for blocked users - c.updateDMMessagesForPR(ctx, prUpdateInfo{ - owner: owner, - repo: repo, - number: prNumber, - title: event.PullRequest.Title, - author: event.PullRequest.User.Login, - state: prState, - url: event.PullRequest.HTMLURL, - checkRes: checkResult, - }) - } - } else { - slog.Debug("message already matches expected content, no update needed", - "workspace", c.workspaceName, - logFieldPR, fmt.Sprintf(prFormatString, owner, repo, prNumber), - "channel", channelDisplay, - "thread_ts", threadTS, - "pr_state", prState) - } +// updateMessageIfNeeded builds the expected message and updates if different from current. +// +//nolint:revive // Function complexity justified by comprehensive message update logic +func (c *Coordinator) updateMessageIfNeeded(ctx context.Context, channelID, channelDisplay, threadTS, oldState, currentText, prState, owner, repo string, prNumber int, event struct { + Action string `json:"action"` + PullRequest struct { + HTMLURL string `json:"html_url"` + Title string `json:"title"` + CreatedAt time.Time `json:"created_at"` + User struct { + Login string `json:"login"` + } `json:"user"` + Number int `json:"number"` + } `json:"pull_request"` + Number int `json:"number"` +}, checkResult *turn.CheckResponse, +) { + domain := c.configManager.Domain(owner) + params := notify.MessageParams{ + CheckResult: checkResult, + Owner: owner, + Repo: repo, + PRNumber: prNumber, + Title: event.PullRequest.Title, + Author: event.PullRequest.User.Login, + HTMLURL: event.PullRequest.HTMLURL, + Domain: domain, + UserMapper: c.userMapper, } + expectedText := notify.FormatChannelMessageBase(ctx, params) + notify.FormatNextActionsSuffix(ctx, params) - slog.Info("successfully processed PR in channel", + if currentText == expectedText { + slog.Debug("message already matches expected content, no update needed", + "workspace", c.workspaceName, + logFieldPR, fmt.Sprintf(prFormatString, owner, repo, prNumber), + "channel", channelDisplay, + "thread_ts", threadTS, + "pr_state", prState) + return + } + + slog.Info("updating message - content changed", "workspace", c.workspaceName, logFieldPR, fmt.Sprintf(prFormatString, owner, repo, prNumber), "channel", channelDisplay, "channel_id", channelID, "thread_ts", threadTS, - "action", event.Action, "pr_state", prState, - "had_state_change", oldState != "" && oldState != prState, - "users_tagged", len(taggedUsers)) + "old_state", oldState, + "current_message_preview", currentText[:min(100, len(currentText))], + "expected_message_preview", expectedText[:min(100, len(expectedText))]) - return taggedUsers + if err := c.slack.UpdateMessage(ctx, channelID, threadTS, expectedText); err != nil { + slog.Error("failed to update PR message", + "workspace", c.workspaceName, + logFieldPR, fmt.Sprintf(prFormatString, owner, repo, prNumber), + "channel", channelDisplay, + "channel_id", channelID, + "thread_ts", threadTS, + "error", err, + "impact", "message_update_skipped_will_retry_via_polling", + "next_poll_in", "5m") + return + } + + // Save updated thread info + c.saveThread(ctx, owner, repo, prNumber, channelID, cache.ThreadInfo{ + ThreadTS: threadTS, + ChannelID: channelID, + LastState: prState, + MessageText: expectedText, + }) + slog.Info("successfully updated PR message", + "workspace", c.workspaceName, + logFieldPR, fmt.Sprintf(prFormatString, owner, repo, prNumber), + "channel", channelDisplay, + "channel_id", channelID, + "thread_ts", threadTS, + "pr_state", prState) + + // Also update DM messages for blocked users + c.updateDMMessagesForPR(ctx, prUpdateInfo{ + owner: owner, + repo: repo, + number: prNumber, + title: event.PullRequest.Title, + author: event.PullRequest.User.Login, + state: prState, + url: event.PullRequest.HTMLURL, + checkRes: checkResult, + }) } // handlePullRequestFromSprinkler handles pull request events from sprinkler by fetching PR data from GitHub API. diff --git a/pkg/notify/format_dm_test.go b/pkg/notify/format_dm_test.go new file mode 100644 index 0000000..a40ff08 --- /dev/null +++ b/pkg/notify/format_dm_test.go @@ -0,0 +1,153 @@ +package notify + +import ( + "strings" + "testing" + + "github.com/codeGROOVE-dev/turnclient/pkg/turn" +) + +// TestFormatDMMessage tests the formatDMMessage function for all PR states. +func TestFormatDMMessage(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + pr PRInfo + expectedAction string + shouldContain []string + }{ + { + name: "newly_published_state", + pr: PRInfo{ + Owner: "testorg", + Repo: "testrepo", + Number: 42, + Title: "Test PR", + Author: "testuser", + HTMLURL: "https://github.com/testorg/testrepo/pull/42", + State: "newly_published", + WorkflowState: "", + }, + expectedAction: "newly published", + shouldContain: []string{"Test PR", "testrepo#42", "testuser", "newly published"}, + }, + { + name: "tests_broken_state", + pr: PRInfo{ + Owner: "testorg", + Repo: "testrepo", + Number: 42, + Title: "Fix bug", + Author: "developer", + HTMLURL: "https://github.com/testorg/testrepo/pull/42", + State: "tests_broken", + WorkflowState: "", + }, + expectedAction: "fix tests", + shouldContain: []string{"Fix bug", "testrepo#42", "developer", "fix tests"}, + }, + { + name: "awaiting_review_state", + pr: PRInfo{ + Owner: "testorg", + Repo: "testrepo", + Number: 42, + Title: "Add feature", + Author: "contributor", + HTMLURL: "https://github.com/testorg/testrepo/pull/42", + State: "awaiting_review", + WorkflowState: "", + }, + expectedAction: "review", + shouldContain: []string{"Add feature", "testrepo#42", "contributor", "review"}, + }, + { + name: "changes_requested_state", + pr: PRInfo{ + Owner: "testorg", + Repo: "testrepo", + Number: 42, + Title: "Update docs", + Author: "writer", + HTMLURL: "https://github.com/testorg/testrepo/pull/42", + State: "changes_requested", + WorkflowState: "", + }, + expectedAction: "address feedback", + shouldContain: []string{"Update docs", "testrepo#42", "writer", "address feedback"}, + }, + { + name: "approved_state", + pr: PRInfo{ + Owner: "testorg", + Repo: "testrepo", + Number: 42, + Title: "Ready to merge", + Author: "maintainer", + HTMLURL: "https://github.com/testorg/testrepo/pull/42", + State: "approved", + WorkflowState: "", + }, + expectedAction: "merge", + shouldContain: []string{"Ready to merge", "testrepo#42", "maintainer", "merge"}, + }, + { + name: "default_unknown_state", + pr: PRInfo{ + Owner: "testorg", + Repo: "testrepo", + Number: 42, + Title: "Unknown state PR", + Author: "someone", + HTMLURL: "https://github.com/testorg/testrepo/pull/42", + State: "some_unknown_state", + WorkflowState: "", + }, + expectedAction: "attention needed", + shouldContain: []string{"Unknown state PR", "testrepo#42", "someone", "attention needed"}, + }, + { + name: "with_workflow_state", + pr: PRInfo{ + Owner: "testorg", + Repo: "testrepo", + Number: 42, + Title: "Workflow PR", + Author: "dev", + HTMLURL: "https://github.com/testorg/testrepo/pull/42", + State: "awaiting_review", + WorkflowState: "tests_broken", + NextAction: map[string]turn.Action{ + "alice": {Kind: turn.ActionFixTests}, + }, + }, + expectedAction: "review", + shouldContain: []string{"Workflow PR", "testrepo#42", "dev", "review"}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + message, action := formatDMMessage(tt.pr) + + if action != tt.expectedAction { + t.Errorf("formatDMMessage() action = %v, want %v", action, tt.expectedAction) + } + + for _, expected := range tt.shouldContain { + if !strings.Contains(message, expected) { + t.Errorf("formatDMMessage() message = %v, should contain %v", message, expected) + } + } + + // Verify message format structure + if !strings.Contains(message, "→") { + t.Error("formatDMMessage() message should contain '→' separator") + } + if !strings.Contains(message, "·") { + t.Error("formatDMMessage() message should contain '·' separator") + } + }) + } +} diff --git a/pkg/notify/notify.go b/pkg/notify/notify.go index a2b7fd8..35c716b 100644 --- a/pkg/notify/notify.go +++ b/pkg/notify/notify.go @@ -665,8 +665,6 @@ func PrefixForAnalysis(workflowState string, nextActions map[string]turn.Action) // NotifyUser sends a smart notification to a user about a PR using the configured logic. // Implements delayed DM logic: if user was tagged in channel, delay by configured time. // If user is not in channel where tagged, send DM immediately. -// -//nolint:revive,maintidx // function length acceptable for complex notification logic func (m *Manager) NotifyUser(ctx context.Context, workspaceID, userID, channelID, channelName string, pr PRInfo) error { slog.Info("evaluating notification for user", "user", userID, @@ -696,161 +694,203 @@ func (m *Manager) NotifyUser(ctx context.Context, workspaceID, userID, channelID "last_channel_tag", tagInfo.Timestamp, "tag_channel_id", tagInfo.ChannelID) - // Check if user is active on Slack. - isActive := slackClient.IsUserActive(ctx, userID) + // Check early exit conditions + if m.shouldSkipForInactiveUser(ctx, slackClient, userID, pr) { + return nil + } + + if m.shouldSkipForAntiSpam(userID, lastDM, pr) { + return nil + } + + // Evaluate whether to delay DM based on channel tag + shouldQueue, shouldSkip := m.evaluateDMDelay(ctx, slackClient, tagInfo, userID, channelName, pr) + if shouldSkip { + return nil + } + if shouldQueue { + return m.queueDM(ctx, tagInfo, userID, channelName, workspaceID, pr) + } + + // Prepare and send the DM + message, action := formatDMMessage(pr) + return m.sendOrUpdateDM(ctx, slackClient, userID, workspaceID, message, action, pr) +} + +// shouldSkipForInactiveUser checks if the user is active on Slack. +func (*Manager) shouldSkipForInactiveUser(ctx context.Context, slackClient SlackClient, userID string, pr PRInfo) bool { + active := slackClient.IsUserActive(ctx, userID) slog.Debug("checking user activity status", "user", userID, - "is_active", isActive) + "is_active", active) - if !isActive { + if !active { slog.Info("deferring notification - user not active on Slack", "user", userID, "pr", fmt.Sprintf("%s/%s#%d", pr.Owner, pr.Repo, pr.Number), "will_retry_when_active", true) - return nil + return true } + return false +} - // Avoid spamming - don't send DM if we recently sent one - timeSinceLastDM := time.Since(lastDM) - antiSpamDelay := 1 * time.Minute +// shouldSkipForAntiSpam checks if we recently sent a DM to avoid spamming. +func (*Manager) shouldSkipForAntiSpam(userID string, lastDM time.Time, pr PRInfo) bool { + since := time.Since(lastDM) + delay := 1 * time.Minute slog.Debug("checking anti-spam protection", "user", userID, - "time_since_last_dm", timeSinceLastDM, - "anti_spam_delay", antiSpamDelay, - "will_block", timeSinceLastDM < antiSpamDelay) + "time_since_last_dm", since, + "anti_spam_delay", delay, + "will_block", since < delay) - if timeSinceLastDM < antiSpamDelay { + if since < delay { slog.Info("skipping DM - anti-spam protection active", "user", userID, "pr", fmt.Sprintf("%s/%s#%d", pr.Owner, pr.Repo, pr.Number), - "time_since_last_dm", timeSinceLastDM, - "anti_spam_delay", antiSpamDelay, - "time_until_next_allowed", antiSpamDelay-timeSinceLastDM) - return nil + "time_since_last_dm", since, + "anti_spam_delay", delay, + "time_until_next_allowed", delay-since) + return true } + return false +} - // Check if we should delay this DM based on channel tag timing - if !tagInfo.Timestamp.IsZero() { - // User was tagged in a channel - use the ACTUAL channel they were tagged in - taggedChannelID := tagInfo.ChannelID +// evaluateDMDelay determines if a DM should be queued or skipped based on channel tag timing. +// Returns (shouldQueue, shouldSkip). +func (m *Manager) evaluateDMDelay( + ctx context.Context, slackClient SlackClient, tagInfo TagInfo, userID, channelName string, pr PRInfo, +) (shouldQueue, shouldSkip bool) { + if tagInfo.Timestamp.IsZero() { + slog.Debug("no channel tag found - sending DM without delay", + "user", userID, + "pr", fmt.Sprintf("%s/%s#%d", pr.Owner, pr.Repo, pr.Number)) + return false, false + } - // Check if they're in that specific channel - userInChannel := slackClient.IsUserInChannel(ctx, taggedChannelID, userID) + ch := tagInfo.ChannelID + inChannel := slackClient.IsUserInChannel(ctx, ch, userID) - // Get configured delay for this channel/org (we need channel name for config lookup) - // If channelName wasn't provided, we can't look up config - use default - delayMins := defaultReminderDMDelayMinutes - if channelName != "" { - delayMins = m.configManager.ReminderDMDelay(pr.Owner, channelName) - } + // Get configured delay for this channel/org + mins := defaultReminderDMDelayMinutes + if channelName != "" { + mins = m.configManager.ReminderDMDelay(pr.Owner, channelName) + } - slog.Debug("evaluating follow-up reminder delay", + slog.Debug("evaluating follow-up reminder delay", + "user", userID, + "pr", fmt.Sprintf("%s/%s#%d", pr.Owner, pr.Repo, pr.Number), + "user_in_channel", inChannel, + "channel_tag_time", tagInfo.Timestamp, + "tagged_channel_id", ch, + "configured_delay_mins", mins) + + if mins == 0 { + slog.Info("follow-up reminders disabled for this channel - skipping DM", "user", userID, "pr", fmt.Sprintf("%s/%s#%d", pr.Owner, pr.Repo, pr.Number), - "user_in_channel", userInChannel, - "channel_tag_time", tagInfo.Timestamp, - "tagged_channel_id", taggedChannelID, - "configured_delay_mins", delayMins) - - if delayMins == 0 { - slog.Info("follow-up reminders disabled for this channel - skipping DM", - "user", userID, - "pr", fmt.Sprintf("%s/%s#%d", pr.Owner, pr.Repo, pr.Number), - "channel", channelName, - "channel_id", taggedChannelID) - return nil - } + "channel", channelName, + "channel_id", ch) + return false, true + } - if userInChannel { - // User is in the channel - apply delay - timeSinceTag := time.Since(tagInfo.Timestamp) - delayDuration := time.Duration(delayMins) * time.Minute - - if timeSinceTag < delayDuration { - // Queue this DM to be sent later - sendAfter := tagInfo.Timestamp.Add(delayDuration) - - // Serialize NextAction map to JSON - nextActionsJSON, err := json.Marshal(pr.NextAction) - if err != nil { - slog.Error("failed to serialize next actions for pending DM", - "user", userID, - "pr", fmt.Sprintf("%s/%s#%d", pr.Owner, pr.Repo, pr.Number), - "error", err) - nextActionsJSON = []byte("{}") - } - - pendingDM := state.PendingDM{ - ID: uuid.New().String(), - WorkspaceID: workspaceID, - UserID: userID, - PROwner: pr.Owner, - PRRepo: pr.Repo, - PRNumber: pr.Number, - PRURL: pr.HTMLURL, - PRTitle: pr.Title, - PRAuthor: pr.Author, - PRState: pr.State, - WorkflowState: pr.WorkflowState, - NextActions: string(nextActionsJSON), - ChannelID: taggedChannelID, - ChannelName: channelName, - QueuedAt: time.Now(), - SendAfter: sendAfter, - } - - if err := m.store.QueuePendingDM(ctx, &pendingDM); err != nil { - slog.Error("failed to queue pending DM", - "user", userID, - "pr", fmt.Sprintf("%s/%s#%d", pr.Owner, pr.Repo, pr.Number), - "error", err) - return fmt.Errorf("failed to queue pending DM: %w", err) - } - - slog.Info("queued DM for later delivery", - "user", userID, - "pr", fmt.Sprintf("%s/%s#%d", pr.Owner, pr.Repo, pr.Number), - "channel_id", taggedChannelID, - "time_since_tag", timeSinceTag, - "configured_delay", delayDuration, - "send_after", sendAfter, - "time_until_dm", delayDuration-timeSinceTag, - "dm_id", pendingDM.ID) - return nil - } + if inChannel { + since := time.Since(tagInfo.Timestamp) + delay := time.Duration(mins) * time.Minute - slog.Info("sending delayed follow-up DM - user was tagged but delay elapsed", - "user", userID, - "pr", fmt.Sprintf("%s/%s#%d", pr.Owner, pr.Repo, pr.Number), - "channel_id", taggedChannelID, - "time_since_tag", timeSinceTag, - "configured_delay", delayDuration) - } else { - // User is NOT in the channel - send DM immediately - slog.Info("sending immediate DM - user not in channel where tagged", - "user", userID, - "pr", fmt.Sprintf("%s/%s#%d", pr.Owner, pr.Repo, pr.Number), - "channel_id", taggedChannelID) + if since < delay { + // Should queue for later delivery + return true, false } + + slog.Info("sending delayed follow-up DM - user was tagged but delay elapsed", + "user", userID, + "pr", fmt.Sprintf("%s/%s#%d", pr.Owner, pr.Repo, pr.Number), + "channel_id", ch, + "time_since_tag", since, + "configured_delay", delay) } else { - slog.Debug("no channel tag found - sending DM without delay", + slog.Info("sending immediate DM - user not in channel where tagged", "user", userID, - "pr", fmt.Sprintf("%s/%s#%d", pr.Owner, pr.Repo, pr.Number)) + "pr", fmt.Sprintf("%s/%s#%d", pr.Owner, pr.Repo, pr.Number), + "channel_id", ch) } - // Format notification message using same style as channel messages + return false, false +} + +// queueDM queues a DM to be sent after the configured delay. +func (m *Manager) queueDM(ctx context.Context, tagInfo TagInfo, userID, channelName, workspaceID string, pr PRInfo) error { + mins := defaultReminderDMDelayMinutes + if channelName != "" { + mins = m.configManager.ReminderDMDelay(pr.Owner, channelName) + } + + delay := time.Duration(mins) * time.Minute + sendAfter := tagInfo.Timestamp.Add(delay) + since := time.Since(tagInfo.Timestamp) + + // Serialize NextAction map to JSON + actionsJSON, err := json.Marshal(pr.NextAction) + if err != nil { + slog.Error("failed to serialize next actions for pending DM", + "user", userID, + "pr", fmt.Sprintf("%s/%s#%d", pr.Owner, pr.Repo, pr.Number), + "error", err) + actionsJSON = []byte("{}") + } + + dm := state.PendingDM{ + ID: uuid.New().String(), + WorkspaceID: workspaceID, + UserID: userID, + PROwner: pr.Owner, + PRRepo: pr.Repo, + PRNumber: pr.Number, + PRURL: pr.HTMLURL, + PRTitle: pr.Title, + PRAuthor: pr.Author, + PRState: pr.State, + WorkflowState: pr.WorkflowState, + NextActions: string(actionsJSON), + ChannelID: tagInfo.ChannelID, + ChannelName: channelName, + QueuedAt: time.Now(), + SendAfter: sendAfter, + } + + if err := m.store.QueuePendingDM(ctx, &dm); err != nil { + slog.Error("failed to queue pending DM", + "user", userID, + "pr", fmt.Sprintf("%s/%s#%d", pr.Owner, pr.Repo, pr.Number), + "error", err) + return fmt.Errorf("failed to queue pending DM: %w", err) + } + + slog.Info("queued DM for later delivery", + "user", userID, + "pr", fmt.Sprintf("%s/%s#%d", pr.Owner, pr.Repo, pr.Number), + "channel_id", tagInfo.ChannelID, + "time_since_tag", since, + "configured_delay", delay, + "send_after", sendAfter, + "time_until_dm", delay-since, + "dm_id", dm.ID) + return nil +} + +// formatDMMessage formats the notification message and returns it along with the action string. +func formatDMMessage(pr PRInfo) (message, action string) { // Determine emoji prefix based on workflow state and next actions var prefix string if pr.WorkflowState != "" { prefix = PrefixForAnalysis(pr.WorkflowState, pr.NextAction) } else { - // Fallback to state if workflow state not available prefix = PrefixForState(pr.State) } - // Format: :emoji: Title · author → action - var action string + // Determine action text based on state switch pr.State { case "newly_published": action = "newly published" @@ -866,8 +906,8 @@ func (m *Manager) NotifyUser(ctx context.Context, workspaceID, userID, channelID action = "attention needed" } - // Use same compact format as channel messages - message := fmt.Sprintf( + // Format: :emoji: Title · author → action + message = fmt.Sprintf( "%s %s <%s|%s#%d> · %s → %s", prefix, pr.Title, @@ -878,6 +918,11 @@ func (m *Manager) NotifyUser(ctx context.Context, workspaceID, userID, channelID action, ) + return message, action +} + +// sendOrUpdateDM attempts to update an existing DM or sends a new one. +func (m *Manager) sendOrUpdateDM(ctx context.Context, slackClient SlackClient, userID, workspaceID, message, action string, pr PRInfo) error { slog.Info("sending DM notification to user", "user", userID, "pr", fmt.Sprintf("%s/%s#%d", pr.Owner, pr.Repo, pr.Number), @@ -885,11 +930,9 @@ func (m *Manager) NotifyUser(ctx context.Context, workspaceID, userID, channelID "action_required", action, "message", message) - // Try to update existing DM first (if one exists in our state store) - // UpdateDMMessage returns ErrNoDMToUpdate if no DM exists + // Try to update existing DM first updateErr := slackClient.UpdateDMMessage(ctx, userID, pr.HTMLURL, message) if updateErr == nil { - // Successfully updated existing DM slog.Info("successfully updated existing DM with new PR state", "user", userID, "pr", fmt.Sprintf("%s/%s#%d", pr.Owner, pr.Repo, pr.Number), @@ -898,9 +941,7 @@ func (m *Manager) NotifyUser(ctx context.Context, workspaceID, userID, channelID return nil } - // Check if it's because no DM exists (expected case for first notification) if !errors.Is(updateErr, slack.ErrNoDMToUpdate) { - // Actual error occurred during update - log warning but continue to send new DM slog.Warn("failed to update existing DM, will send new one", "user", userID, "pr", fmt.Sprintf("%s/%s#%d", pr.Owner, pr.Repo, pr.Number), @@ -912,7 +953,7 @@ func (m *Manager) NotifyUser(ctx context.Context, workspaceID, userID, channelID "pr", fmt.Sprintf("%s/%s#%d", pr.Owner, pr.Repo, pr.Number), "reason", "DM not in state store or too old") - // Check if we recently sent a DM about this PR (prevents duplicates during rolling deployments) + // Check for recent DMs to prevent duplicates hasRecent, err := slackClient.HasRecentDMAboutPR(ctx, userID, pr.HTMLURL) if err != nil { slog.Warn("failed to check for recent DM, will send anyway to avoid false negative", @@ -931,7 +972,7 @@ func (m *Manager) NotifyUser(ctx context.Context, workspaceID, userID, channelID return nil } - // Send DM to user. + // Send new DM dmChannelID, messageTS, err := slackClient.SendDirectMessage(ctx, userID, message) if err != nil { slog.Error("failed to send DM notification", @@ -942,10 +983,10 @@ func (m *Manager) NotifyUser(ctx context.Context, workspaceID, userID, channelID return fmt.Errorf("failed to send notification: %w", err) } - // Update last DM notification time. + // Update tracking m.Tracker.UpdateDMNotification(workspaceID, userID) - // Save DM message info for future updates + // Save DM info for future updates if err := slackClient.SaveDMMessageInfo(ctx, userID, pr.HTMLURL, dmChannelID, messageTS, message); err != nil { slog.Warn("failed to save DM message info", "user", userID, diff --git a/pkg/slack/slack.go b/pkg/slack/slack.go index c17f9c4..1c0d500 100644 --- a/pkg/slack/slack.go +++ b/pkg/slack/slack.go @@ -1266,31 +1266,7 @@ func (c *Client) ResolveChannelID(ctx context.Context, channelName string) strin slog.Debug("channel not in cache, fetching from Slack API", "channel", channelName) // Try to find the channel - first try public and private channels - var channels []slack.Channel - var cursor string - err := retry.Do( - func() error { - var err error - channels, cursor, err = c.api.GetConversationsContext(ctx, &slack.GetConversationsParameters{ - Types: []string{"public_channel", "private_channel"}, - Limit: 200, - }) - if err != nil { - if isRateLimitError(err) { - return err // Retry - } - return retry.Unrecoverable(err) // Don't retry permission errors - } - return nil - }, - retry.Attempts(5), - retry.Delay(c.getRetryDelay()), - retry.MaxDelay(2*time.Minute), - retry.DelayType(retry.BackOffDelay), - retry.MaxJitter(time.Second), - retry.LastErrorOnly(true), - retry.Context(ctx), - ) + channels, cursor, err := c.fetchConversationsWithRetry(ctx, []string{"public_channel", "private_channel"}, "") if err != nil { slog.Warn("failed to get public+private conversations, trying public only", "error", err, @@ -1298,29 +1274,7 @@ func (c *Client) ResolveChannelID(ctx context.Context, channelName string) strin "channel", channelName) // Fallback: try public channels only (might not have private channel permissions) - err = retry.Do( - func() error { - var err error - channels, cursor, err = c.api.GetConversationsContext(ctx, &slack.GetConversationsParameters{ - Types: []string{"public_channel"}, - Limit: 200, - }) - if err != nil { - if isRateLimitError(err) { - return err // Retry - } - return retry.Unrecoverable(err) - } - return nil - }, - retry.Attempts(5), - retry.Delay(c.getRetryDelay()), - retry.MaxDelay(2*time.Minute), - retry.DelayType(retry.BackOffDelay), - retry.MaxJitter(time.Second), - retry.LastErrorOnly(true), - retry.Context(ctx), - ) + channels, cursor, err = c.fetchConversationsWithRetry(ctx, []string{"public_channel"}, "") if err != nil { slog.Error("failed to get conversations for channel resolution", "error", err, @@ -1335,56 +1289,21 @@ func (c *Client) ResolveChannelID(ctx context.Context, channelName string) strin slog.Debug("successfully retrieved public channels only", "channel", channelName, "count", len(channels)) } - // Search through channels - for i := range channels { - channel := &channels[i] - if channel.Name == channelName { - slog.Debug("resolved channel name to ID", "name", channelName, "id", channel.ID) - // Cache the successful resolution for 1 hour (channels are stable, invalidated by events) - c.cache.set(cacheKey, channel.ID, time.Hour) - return channel.ID - } + // Search through initial page + if resolvedID, found := c.searchChannelsForName(channels, channelName, cacheKey); found { + return resolvedID } // If we have more pages, search them too for cursor != "" { - err = retry.Do( - func() error { - var err error - channels, cursor, err = c.api.GetConversationsContext(ctx, &slack.GetConversationsParameters{ - Types: []string{"public_channel", "private_channel"}, - Limit: 200, - Cursor: cursor, - }) - if err != nil { - if isRateLimitError(err) { - return err // Retry - } - return retry.Unrecoverable(err) - } - return nil - }, - retry.Attempts(5), - retry.Delay(c.getRetryDelay()), - retry.MaxDelay(2*time.Minute), - retry.DelayType(retry.BackOffDelay), - retry.MaxJitter(time.Second), - retry.LastErrorOnly(true), - retry.Context(ctx), - ) + channels, cursor, err = c.fetchConversationsWithRetry(ctx, []string{"public_channel", "private_channel"}, cursor) if err != nil { slog.Warn("failed to get additional conversations for channel resolution", "error", err) break } - for i := range channels { - channel := &channels[i] - if channel.Name == channelName { - slog.Debug("resolved channel name to ID", "name", channelName, "id", channel.ID) - // Cache the successful resolution for 1 hour (channels are stable, invalidated by events) - c.cache.set(cacheKey, channel.ID, time.Hour) - return channel.ID - } + if resolvedID, found := c.searchChannelsForName(channels, channelName, cacheKey); found { + return resolvedID } } @@ -1398,6 +1317,53 @@ func (c *Client) ResolveChannelID(ctx context.Context, channelName string) strin return channelName // Return original if not found } +// fetchConversationsWithRetry fetches conversations with retry logic. +func (c *Client) fetchConversationsWithRetry(ctx context.Context, types []string, cursor string) ([]slack.Channel, string, error) { + var channels []slack.Channel + var nextCursor string + + err := retry.Do( + func() error { + var err error + channels, nextCursor, err = c.api.GetConversationsContext(ctx, &slack.GetConversationsParameters{ + Types: types, + Limit: 200, + Cursor: cursor, + }) + if err != nil { + if isRateLimitError(err) { + return err // Retry + } + return retry.Unrecoverable(err) // Don't retry permission errors + } + return nil + }, + retry.Attempts(5), + retry.Delay(c.getRetryDelay()), + retry.MaxDelay(2*time.Minute), + retry.DelayType(retry.BackOffDelay), + retry.MaxJitter(time.Second), + retry.LastErrorOnly(true), + retry.Context(ctx), + ) + + return channels, nextCursor, err +} + +// searchChannelsForName searches a list of channels for a matching name and caches the result. +// Returns (channelID, found). +func (c *Client) searchChannelsForName(channels []slack.Channel, channelName, cacheKey string) (string, bool) { + for i := range channels { + if channels[i].Name == channelName { + slog.Debug("resolved channel name to ID", "name", channelName, "id", channels[i].ID) + // Cache the successful resolution for 1 hour (channels are stable, invalidated by events) + c.cache.set(cacheKey, channels[i].ID, time.Hour) + return channels[i].ID, true + } + } + return "", false +} + // IsUserInChannel checks if a specific user is a member of the specified channel. func (c *Client) IsUserInChannel(ctx context.Context, channelID, userID string) bool { // Check cache first