Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
40 changes: 20 additions & 20 deletions .husky/pre-commit
Original file line number Diff line number Diff line change
@@ -1,25 +1,25 @@
#!/bin/sh
set -u
# #!/bin/sh
# set -u

# Function to check formatting and then format if necessary
check_and_format() {
# Temporarily disable exit on error
set +e
cd server
bunx @biomejs/biome check ../ --formatter-enabled=true --linter-enabled=false --vcs-use-ignore-file=true --organize-imports-enabled=false
CHECK_STATUS=$?
# set -e
if [ $CHECK_STATUS -ne 0 ]; then
echo "Formatting issues detected. Running formatter..."
# # Function to check formatting and then format if necessary
# check_and_format() {
# # Temporarily disable exit on error
# set +e
# cd server
# bunx @biomejs/biome check ../ --formatter-enabled=true --linter-enabled=false --vcs-use-ignore-file=true --organize-imports-enabled=false
# CHECK_STATUS=$?
# # set -e
# if [ $CHECK_STATUS -ne 0 ]; then
# echo "Formatting issues detected. Running formatter..."

# Format all applicable files, not just staged ones
bun run format
# # Format all applicable files, not just staged ones
# bun run format

echo "Files have been formatted. Please add them to staging and commit again."
exit 1
fi
}
# echo "Files have been formatted. Please add them to staging and commit again."
# exit 1
# fi
# }

# Run the check and format function
check_and_format
# # Run the check and format function
# check_and_format
Comment on lines +1 to +24
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

The pre-commit hook script has been completely commented out. This disables the formatting checks that run before a commit, which can lead to inconsistent code formatting in the repository. Was this intentional? If this is a temporary change for development, please remember to re-enable it before merging. If the hook is no longer needed, this file should be removed to avoid confusion.


3 changes: 2 additions & 1 deletion server/api/chat/agent-schemas.ts
Original file line number Diff line number Diff line change
Expand Up @@ -88,10 +88,11 @@ export interface Decision {
export interface ReviewState {
lastReviewTurn: number | null
reviewFrequency: number // Review every N turns
lastReviewSummary: string | null
outstandingAnomalies: string[]
clarificationQuestions: string[]
lastReviewResult: ReviewResult | null
lockedByFinalSynthesis: boolean
lockedAtTurn: number | null
pendingReview?: Promise<void>
cachedPlanSummary?: {
hash: string
Expand Down
201 changes: 148 additions & 53 deletions server/api/chat/agents.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
import {
answerContextMap,
answerContextMapFromFragments,
cleanContext,
userContext,
} from "@/ai/context"
import { AgentCreationSource } from "@/db/schema"
Expand All @@ -10,7 +9,6 @@ import {
jsonParseLLMOutput,
baselineRAGOffJsonStream,
agentWithNoIntegrationsQuestion,
extractBestDocumentIndexes,
} from "@/ai/provider"

import {
Expand All @@ -32,47 +30,38 @@ import { type SelectChat, type SelectMessage } from "@/db/schema"
import { getUserAndWorkspaceByEmail } from "@/db/user"
import { getLogger, getLoggerWithChild } from "@/logger"
import {
AgentReasoningStepType,
ApiKeyScopes,
ChatSSEvents,
ContextSysthesisState,
KnowledgeBaseEntity,
type AgentReasoningStep,
type MessageReqType,
} from "@/shared/types"
import { MessageRole, Subsystem, type UserMetadataType } from "@/types"
import { getErrorMessage, splitGroupedCitationsWithSpaces } from "@/utils"
import { getErrorMessage } from "@/utils"
import {
type ConversationRole,
type Message,
} from "@aws-sdk/client-bedrock-runtime"
import type { Context } from "hono"
import { HTTPException } from "hono/http-exception"
import { streamSSE } from "hono/streaming" // Import SSEStreamingApi
import { z } from "zod"
import { getTracer, type Span, type Tracer } from "@/tracer"
import { streamSSE } from "hono/streaming"
import { getTracer, type Tracer } from "@/tracer"
import {
GetDocumentsByDocIds,
getDocumentOrNull,
searchVespaAgent,
SearchVespaThreads,
getAllDocumentsForAgent,
searchSlackInVespa,
} from "@/search/vespa"
import {
expandSheetIds,
validateVespaIdInAgentIntegrations,
} from "@/search/utils"
import {
Apps,
chatUserSchema,
chatContainerSchema,
VespaChatContainerSearchSchema,
VespaChatUserSchema,
type VespaSearchResult,
type VespaSearchResults,
AttachmentEntity,
} from "@xyne/vespa-ts/types"
import { APIError } from "openai"
import { insertChatTrace } from "@/db/chatTrace"
import type { AttachmentMetadata, SelectPublicAgent } from "@/shared/types"
import type { SelectPublicAgent } from "@/shared/types"
import { storeAttachmentMetadata } from "@/db/attachment"
import { getRecordBypath } from "@/db/knowledgeBase"
import { parseAttachmentMetadata } from "@/utils/parseAttachment"
import { isCuid } from "@paralleldrive/cuid2"
import {
Expand All @@ -94,7 +83,6 @@ import {
extractItemIdsFromPath,
handleError,
isMessageWithContext,
mimeTypeMap,
processMessage,
searchToCitation,
parseAppSelections,
Expand All @@ -105,9 +93,7 @@ import {
import config from "@/config"
import { getModelValueFromLabel } from "@/ai/modelConfig"
import {
buildContext,
buildUserQuery,
getThreadContext,
isContextSelected,
UnderstandMessageAndAnswer,
generateAnswerFromDualRag,
Expand All @@ -116,10 +102,7 @@ import {
import { getDateForAI } from "@/utils/index"
import { getAuth, safeGet } from "../agent"
const {
JwtPayloadKey,
defaultBestModel,
defaultBestModelAgenticMode,
defaultFastModel,
maxDefaultSummary,
isReasoning,
StartThinkingToken,
Expand All @@ -129,17 +112,6 @@ const {
const Logger = getLogger(Subsystem.Chat)
const loggerWithChild = getLoggerWithChild(Subsystem.Chat)

const isRecord = (value: unknown): value is Record<string, unknown> =>
typeof value === "object" && value !== null

type ChatContainerFields = z.infer<typeof VespaChatContainerSearchSchema>
type ChatUserFields = z.infer<typeof VespaChatUserSchema>

const isChatContainerFields = (value: unknown): value is ChatContainerFields =>
isRecord(value) && VespaChatContainerSearchSchema.safeParse(value).success

const isChatUserFields = (value: unknown): value is ChatUserFields =>
isRecord(value) && VespaChatUserSchema.safeParse(value).success

// Create mock agent from form data for testing
const createMockAgentFromFormData = (
Expand Down Expand Up @@ -208,12 +180,143 @@ export const checkAgentWithNoIntegrations = (
return false
}

const vespaResultToMinimalAgentFragment = async (
child: VespaSearchResult,
idx: number,
userMetadata: UserMetadataType,
query: string,
): Promise<MinimalAgentFragment> => ({
id: `${(child.fields as any)?.docId || `Frangment_id_${idx}`}`,
content: await answerContextMap(
child as VespaSearchResults,
userMetadata,
0,
true,
undefined,
query,
),
source: searchToCitation(child as VespaSearchResults),
confidence: 1.0,
})

async function* nonRagIterator(
message: string,
userCtx: string,
dateForAI: string,
context: string,
results: MinimalAgentFragment[],
agentPrompt?: string,
messages: Message[] = [],
imageFileNames: string[] = [],
email?: string,
isReasoning = true,
modelId?: string,
): AsyncIterableIterator<
ConverseResponse & {
citation?: { index: number; item: Citation }
imageCitation?: ImageCitation
}
> {
const ragOffIterator = baselineRAGOffJsonStream(
message,
userCtx,
dateForAI,
context,
{
modelId: (modelId as Models) || defaultBestModel,
stream: true,
json: false,
reasoning: isReasoning,
imageFileNames,
},
agentPrompt ?? "",
messages,
)

// const previousResultsLength = 0
let buffer = ""
let thinking = ""
let reasoning = isReasoning
let yieldedCitations = new Set<number>()
let yieldedImageCitations = new Map<number, Set<number>>()

for await (const chunk of ragOffIterator) {
try {
if (chunk.text) {
if (reasoning) {
if (thinking && !chunk.text.includes(EndThinkingToken)) {
thinking += chunk.text
yield* checkAndYieldCitationsForAgent(
thinking,
yieldedCitations,
results,
undefined,
email!,
)
yield { text: chunk.text, reasoning }
} else {
const startThinkingIndex = chunk.text.indexOf(StartThinkingToken)
if (
startThinkingIndex !== -1 &&
chunk.text.trim().length > StartThinkingToken.length
) {
let token = chunk.text.slice(
startThinkingIndex + StartThinkingToken.length,
)
if (chunk.text.includes(EndThinkingToken)) {
token = chunk.text.split(EndThinkingToken)[0]
thinking += token
} else {
thinking += token
}
yield* checkAndYieldCitationsForAgent(
thinking,
yieldedCitations,
results,
undefined,
email!,
)
yield { text: token, reasoning }
}
}
}

if (reasoning && chunk.text.includes(EndThinkingToken)) {
reasoning = false
chunk.text = chunk.text.split(EndThinkingToken)[1].trim()
}

if (!reasoning) {
buffer += chunk.text
yield { text: chunk.text }
yield* checkAndYieldCitationsForAgent(
buffer,
yieldedCitations,
results,
yieldedImageCitations,
email ?? "",
)
}
}

if (chunk.cost) {
yield { cost: chunk.cost }
}
if (chunk.metadata) {
yield { metadata: chunk.metadata }
}
} catch (error) {
Logger.error(`Error processing chunk: ${error}`)
continue
}
}
}

export const AgentMessageApiRagOff = async (c: Context) => {
const tracer: Tracer = getTracer("chat")
const rootSpan = tracer.startSpan("AgentMessageApiRagOff")

let stream: any
let chat: SelectChat

let assistantMessageId: string | null = null
let streamKey: string | null = null
const { email, workspaceExternalId: workspaceId, via_apiKey } = getAuth(c)
Expand Down Expand Up @@ -325,9 +428,6 @@ export const AgentMessageApiRagOff = async (c: Context) => {
fileIds: [],
}
const fileIds = extractedInfo?.fileIds
const totalValidFileIdsFromLinkCount =
extractedInfo?.totalValidFileIdsFromLinkCount

let messages: SelectMessage[] = []
const costArr: number[] = []
const tokenArr: { inputTokens: number; outputTokens: number }[] = []
Expand Down Expand Up @@ -476,7 +576,6 @@ export const AgentMessageApiRagOff = async (c: Context) => {
let finalImageFileNames: string[] = []
let fragments: MinimalAgentFragment[] = []
if (docIds.length > 0) {
let previousResultsLength = 0
const chunksSpan = streamSpan.startSpan("get_documents_by_doc_ids")
const allChunks = await GetDocumentsByDocIds(docIds, chunksSpan)
// const allChunksCopy
Expand Down Expand Up @@ -580,9 +679,9 @@ export const AgentMessageApiRagOff = async (c: Context) => {
}

if (chunk.imageCitation) {
loggerWithChild({ email: email }).info(
`Found image citation, sending it`,
loggerWithChild({ email }).info(
{ citationKey: chunk.imageCitation.citationKey },
"Found image citation, sending it",
)
imageCitations.push(chunk.imageCitation)
stream.writeSSE({
Expand Down Expand Up @@ -1169,8 +1268,6 @@ export const AgentMessageApi = async (c: Context) => {
fileIds = [...fileIds, ...nonImageAttachmentFileIds]
}

const agentDocs = agentForDb?.docIds || []

//add docIds of agents here itself
const totalValidFileIdsFromLinkCount =
extractedInfo?.totalValidFileIdsFromLinkCount
Expand Down Expand Up @@ -1773,9 +1870,9 @@ export const AgentMessageApi = async (c: Context) => {
citationValues[index] = item
}
if (chunk.imageCitation) {
loggerWithChild({ email: email }).info(
`Found image citation, sending it`,
loggerWithChild({ email }).info(
{ citationKey: chunk.imageCitation.citationKey },
"Found image citation, sending it",
)
imageCitations.push(chunk.imageCitation)
stream.writeSSE({
Expand Down Expand Up @@ -2400,9 +2497,9 @@ export const AgentMessageApi = async (c: Context) => {
citationValues[index] = item
}
if (chunk.imageCitation) {
loggerWithChild({ email: email }).info(
`Found image citation, sending it`,
loggerWithChild({ email }).info(
{ citationKey: chunk.imageCitation.citationKey },
"Found image citation, sending it",
)
imageCitations.push(chunk.imageCitation)
stream.writeSSE({
Expand Down Expand Up @@ -2681,8 +2778,6 @@ export const AgentMessageApi = async (c: Context) => {
}
})

const limitedMessages = messagesWithNoErrResponse.slice(-8)

// Extract previous classification for pagination and follow-up queries
let previousClassification: QueryRouterLLMResponse | null = null
if (messages.length >= 2) {
Expand Down
Loading