Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
80 changes: 80 additions & 0 deletions src/__tests__/unit/checks/moderation-secret-keys.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,86 @@ describe('moderation guardrail', () => {
expect(result.tripwireTriggered).toBe(false);
expect(result.info?.error).toBe('Moderation API call failed');
});

it('uses context client when available', async () => {
// Track whether context client was used
let contextClientUsed = false;
const contextCreateMock = vi.fn().mockImplementation(async () => {
contextClientUsed = true;
Comment on lines +74 to +75
Copy link

Copilot AI Oct 31, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

[nitpick] The contextClientUsed flag is unnecessary. You can check if the mock was called using expect(contextCreateMock).toHaveBeenCalled() instead of tracking this manually.

Copilot uses AI. Check for mistakes.
return {
results: [
{
categories: {
[Category.HATE]: false,
[Category.VIOLENCE]: false,
},
},
],
};
});

// Create a context with a guardrailLlm client
// We need to import OpenAI to create a proper instance
const OpenAI = (await import('openai')).default;
const contextClient = new OpenAI({ apiKey: 'test-context-key' });
contextClient.moderations = {
create: contextCreateMock,
} as unknown as typeof contextClient.moderations;

const ctx = { guardrailLlm: contextClient };
const cfg = ModerationConfig.parse({ categories: [Category.HATE] });
const result = await moderationCheck(ctx, 'test text', cfg);

// Verify the context client was used
expect(contextClientUsed).toBe(true);
expect(contextCreateMock).toHaveBeenCalledWith({
model: 'omni-moderation-latest',
input: 'test text',
safety_identifier: 'openai-guardrails-js',
});
expect(result.tripwireTriggered).toBe(false);
});

it('falls back to default client for third-party providers', async () => {
// Track whether fallback client was used
let fallbackUsed = false;

// The default mock from vi.mock will be used for the fallback
createMock.mockImplementation(async () => {
fallbackUsed = true;
Comment on lines +115 to +116
Copy link

Copilot AI Oct 31, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

[nitpick] The fallbackUsed flag is unnecessary. You can verify fallback behavior by checking that createMock was called, which already indicates the fallback client was used.

Copilot uses AI. Check for mistakes.
return {
results: [
{
categories: {
[Category.HATE]: false,
},
},
],
};
});

// Create a context client that simulates a third-party provider
// When moderation is called, it should raise a 404 error
const contextCreateMock = vi.fn().mockRejectedValue({
status: 404,
message: '404 page not found',
});

const OpenAI = (await import('openai')).default;
const thirdPartyClient = new OpenAI({ apiKey: 'third-party-key', baseURL: 'https://localhost:8080/v1' });
thirdPartyClient.moderations = {
create: contextCreateMock,
} as unknown as typeof thirdPartyClient.moderations;

const ctx = { guardrailLlm: thirdPartyClient };
const cfg = ModerationConfig.parse({ categories: [Category.HATE] });
const result = await moderationCheck(ctx, 'test text', cfg);

// Verify the fallback client was used (not the third-party one)
expect(contextCreateMock).toHaveBeenCalled();
expect(fallbackUsed).toBe(true);
expect(result.tripwireTriggered).toBe(false);
});
});

describe('secret key guardrail', () => {
Expand Down
1 change: 0 additions & 1 deletion src/checks/moderation.ts
Original file line number Diff line number Diff line change
Expand Up @@ -156,7 +156,6 @@ export const moderationCheck: CheckFn<ModerationContext, string, ModerationConfi
try {
resp = await callModerationAPI(client, data);
} catch (error) {

// Moderation endpoint doesn't exist on this provider (e.g., third-party)
// Fall back to the OpenAI client
if (isNotFoundError(error)) {
Expand Down