Skip to content

Commit 5722a35

Browse files
Dev AgentRader
authored andcommitted
remove embedding moderation
1 parent 64b3ac9 commit 5722a35

File tree

3 files changed

+13
-13
lines changed

3 files changed

+13
-13
lines changed

aigateway/component/moderation.go

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ const (
2626
// cache ttl
2727
cacheTTL = 24 * time.Hour
2828
// moderation cache prefix
29-
moderationCachePrefix = "moderation:"
29+
moderationCachePrpmptPrefix = "moderation:prompt:"
3030
)
3131

3232
type Moderation interface {
@@ -94,7 +94,7 @@ func splitContentIntoChunksByWindow(content string) []string {
9494
func (modImpl *moderationImpl) checkSingleChunk(ctx context.Context, content, key string) (*rpc.CheckResult, error) {
9595
if modImpl.cacheClient != nil {
9696
chunkHash := md5.Sum([]byte(content))
97-
cacheKey := moderationCachePrefix + fmt.Sprintf("%x", chunkHash)
97+
cacheKey := moderationCachePrpmptPrefix + fmt.Sprintf("%x", chunkHash)
9898
cached, err := modImpl.cacheClient.Get(ctx, cacheKey)
9999
if err == nil {
100100
var result rpc.CheckResult
@@ -112,7 +112,7 @@ func (modImpl *moderationImpl) checkSingleChunk(ctx context.Context, content, ke
112112

113113
if modImpl.cacheClient != nil {
114114
// Cache the result for the single chunk
115-
cacheKey := moderationCachePrefix + content
115+
cacheKey := moderationCachePrpmptPrefix + content
116116
resultBytes, err := json.Marshal(result)
117117
if err == nil {
118118
err := modImpl.cacheClient.SetEx(ctx, cacheKey, string(resultBytes), cacheTTL)
@@ -143,7 +143,7 @@ func (modImpl *moderationImpl) checkBuffer(
143143
// cache each chunk in the current buffer
144144
for _, chunk := range currentBufferChunks {
145145
chunkHash := md5.Sum([]byte(chunk))
146-
cacheKey := moderationCachePrefix + fmt.Sprintf("%x", chunkHash)
146+
cacheKey := moderationCachePrpmptPrefix + fmt.Sprintf("%x", chunkHash)
147147
resultBytes, err := json.Marshal(result)
148148
if err == nil {
149149
err := modImpl.cacheClient.SetEx(ctx, cacheKey, string(resultBytes), cacheTTL)
@@ -236,7 +236,7 @@ func (modImpl *moderationImpl) checkLLMPrompt(ctx context.Context, content, key
236236
// Check if chunk is in cache
237237
if modImpl.cacheClient != nil {
238238
chunkHash := md5.Sum([]byte(chunk))
239-
cacheKey := moderationCachePrefix + fmt.Sprintf("%x", chunkHash)
239+
cacheKey := moderationCachePrpmptPrefix + fmt.Sprintf("%x", chunkHash)
240240
cached, err := modImpl.cacheClient.Get(ctx, cacheKey)
241241
if err == nil {
242242
var result rpc.CheckResult
@@ -265,7 +265,7 @@ func (modImpl *moderationImpl) checkLLMPrompt(ctx context.Context, content, key
265265
for _, chunk := range unCheckedChunks {
266266
if modImpl.cacheClient != nil {
267267
chunkHash := md5.Sum([]byte(chunk))
268-
cacheKey := moderationCachePrefix + fmt.Sprintf("%x", chunkHash)
268+
cacheKey := moderationCachePrpmptPrefix + fmt.Sprintf("%x", chunkHash)
269269
cached, err := modImpl.cacheClient.Get(ctx, cacheKey)
270270
if err == nil {
271271
var result rpc.CheckResult

aigateway/component/moderation_test.go

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -369,7 +369,7 @@ func TestModerationImpl_CheckChatNonStreamResponse(t *testing.T) {
369369
})
370370
}
371371

372-
// TestModerationImpl_CheckLLMPrompt_CacheCheck 测试 moderation.go 中第231-255行的缓存检查逻辑
372+
// TestModerationImpl_CheckLLMPrompt_CacheCheck tests the cache checking logic in moderation.go
373373
func TestModerationImpl_CheckLLMPrompt_CacheCheck(t *testing.T) {
374374
ctx := context.Background()
375375
key := "test-key"
@@ -388,7 +388,7 @@ func TestModerationImpl_CheckLLMPrompt_CacheCheck(t *testing.T) {
388388
testContent := sensitiveChunk + ". " + safeContent
389389

390390
chunkHash := md5.Sum([]byte(sensitiveChunk))
391-
cacheKey := moderationCachePrefix + fmt.Sprintf("%x", chunkHash)
391+
cacheKey := moderationCachePrpmptPrefix + fmt.Sprintf("%x", chunkHash)
392392

393393
sensitiveResult := &rpc.CheckResult{IsSensitive: true, Reason: "contains inappropriate content"}
394394
resultJSON, _ := json.Marshal(sensitiveResult)
@@ -425,8 +425,8 @@ func TestModerationImpl_CheckLLMPrompt_CacheCheck(t *testing.T) {
425425
testContent := testChunk + ". " + strings.Repeat("y", slidingWindowSize*2)
426426

427427
chunkHash := md5.Sum([]byte(testChunk))
428-
cacheKey1 := moderationCachePrefix + fmt.Sprintf("%x", chunkHash)
429-
cacheKey2 := moderationCachePrefix + fmt.Sprintf("%x", md5.Sum([]byte(strings.Repeat("y", slidingWindowSize))))
428+
cacheKey1 := moderationCachePrpmptPrefix + fmt.Sprintf("%x", chunkHash)
429+
cacheKey2 := moderationCachePrpmptPrefix + fmt.Sprintf("%x", md5.Sum([]byte(strings.Repeat("y", slidingWindowSize))))
430430

431431
mockCacheClient.EXPECT().Get(mock.Anything, cacheKey1).Return("", errors.New("cache error"))
432432

aigateway/handler/requests.go

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -70,7 +70,7 @@ type ChatMessageHistoryResponse struct {
7070

7171
// EmbeddingRequest represents an embedding request structure
7272
type EmbeddingRequest struct {
73-
Input string `json:"input"` // Input text content
74-
Model string `json:"model"` // Model name used (e.g., "text-embedding-ada-002")
75-
EncodingFormat string `json:"encoding_format"` // Encoding format (e.g., "float")
73+
Input string `json:"input"` // Input text content
74+
Model string `json:"model"` // Model name used (e.g., "text-embedding-ada-002")
75+
EncodingFormat string `json:"encoding_format,omitempty"` // Encoding format (e.g., "float")
7676
}

0 commit comments

Comments
 (0)