Skip to content

Commit a4f2830

Browse files
Revert "chatmode changes"
This reverts commit 60e32c3.
1 parent 60e32c3 commit a4f2830

File tree

5 files changed

+37
-40
lines changed

5 files changed

+37
-40
lines changed

frontend/src/components/ChatBot/Chatbot.tsx

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -394,7 +394,6 @@ const Chatbot: React.FC<ChatbotProps> = (props) => {
394394
chunk_ids={chunkModal}
395395
response_time={responseTime}
396396
total_tokens={tokensUsed}
397-
mode={chatMode}
398397
/>
399398
</Modal>
400399
</div>

frontend/src/components/ChatBot/Info/InfoModal.tsx

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ import { calcWordColor } from '@neo4j-devtools/word-color';
1717
import ReactMarkdown from 'react-markdown';
1818
import { GlobeAltIconOutline } from '@neo4j-ndl/react/icons';
1919
import { youtubeLinkValidation } from '../../../utils/Utils';
20-
const InfoModal: React.FC<chatInfoMessage> = ({ sources, model, total_tokens, response_time, chunk_ids, mode }) => {
20+
const InfoModal: React.FC<chatInfoMessage> = ({ sources, model, total_tokens, response_time, chunk_ids }) => {
2121
const [activeTab, setActiveTab] = useState<number>(3);
2222
const [infoEntities, setInfoEntities] = useState<Entity[]>([]);
2323
const [loading, setLoading] = useState<boolean>(false);
@@ -106,7 +106,7 @@ const InfoModal: React.FC<chatInfoMessage> = ({ sources, model, total_tokens, re
106106
</Box>
107107
<Tabs size='large' fill='underline' onChange={onChangeTabs} value={activeTab}>
108108
<Tabs.Tab tabId={3}>Sources used</Tabs.Tab>
109-
{mode === 'graph+vector' && (<Tabs.Tab tabId={4}>Top Entities used</Tabs.Tab>)}
109+
<Tabs.Tab tabId={4}>Top Entities used</Tabs.Tab>
110110
<Tabs.Tab tabId={5}>Chunks</Tabs.Tab>
111111
</Tabs>
112112
<Flex className='p-4'>

frontend/src/components/FileTable.tsx

Lines changed: 18 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@ import {
1515
useReactTable,
1616
getCoreRowModel,
1717
createColumnHelper,
18+
ColumnFiltersState,
1819
getFilteredRowModel,
1920
getPaginationRowModel,
2021
CellContext,
@@ -26,7 +27,7 @@ import { useFileContext } from '../context/UsersFiles';
2627
import { getSourceNodes } from '../services/GetFiles';
2728
import { v4 as uuidv4 } from 'uuid';
2829
import { statusCheck, capitalize } from '../utils/Utils';
29-
import { SourceNode, CustomFile, FileTableProps, UserCredentials, statusupdate, alertStateType, ColumnFiltersState } from '../types';
30+
import { SourceNode, CustomFile, FileTableProps, UserCredentials, statusupdate, alertStateType } from '../types';
3031
import { useCredentials } from '../context/UserCredentials';
3132
import { MagnifyingGlassCircleIconSolid } from '@neo4j-ndl/react/icons';
3233
import CustomAlert from './UI/Alert';
@@ -361,14 +362,14 @@ const FileTable: React.FC<FileTableProps> = ({ isExpanded, connectionStatus, set
361362
item?.fileSource === 's3 bucket' && localStorage.getItem('accesskey') === item?.awsAccessKeyId
362363
? item?.status
363364
: item?.fileSource === 'local file'
364-
? item?.status
365-
: item?.status === 'Completed' || item.status === 'Failed'
366-
? item?.status
367-
: item?.fileSource == 'Wikipedia' ||
368-
item?.fileSource == 'youtube' ||
369-
item?.fileSource == 'gcs bucket'
370-
? item?.status
371-
: 'N/A',
365+
? item?.status
366+
: item?.status === 'Completed' || item.status === 'Failed'
367+
? item?.status
368+
: item?.fileSource == 'Wikipedia' ||
369+
item?.fileSource == 'youtube' ||
370+
item?.fileSource == 'gcs bucket'
371+
? item?.status
372+
: 'N/A',
372373
model: item?.model ?? model,
373374
id: uuidv4(),
374375
source_url: item?.url != 'None' && item?.url != '' ? item.url : '',
@@ -381,8 +382,8 @@ const FileTable: React.FC<FileTableProps> = ({ isExpanded, connectionStatus, set
381382
language: item?.language ?? '',
382383
processingProgress:
383384
item?.processed_chunk != undefined &&
384-
item?.total_chunks != undefined &&
385-
!isNaN(Math.floor((item?.processed_chunk / item?.total_chunks) * 100))
385+
item?.total_chunks != undefined &&
386+
!isNaN(Math.floor((item?.processed_chunk / item?.total_chunks) * 100))
386387
? Math.floor((item?.processed_chunk / item?.total_chunks) * 100)
387388
: undefined,
388389
// total_pages: item?.total_pages ?? 0,
@@ -577,6 +578,11 @@ const FileTable: React.FC<FileTableProps> = ({ isExpanded, connectionStatus, set
577578
getFilteredRowModel: getFilteredRowModel(),
578579
getPaginationRowModel: getPaginationRowModel(),
579580
onColumnFiltersChange: setColumnFilters,
581+
// initialState: {
582+
// pagination: {
583+
// pageSize: pageSizeCalculation < 0 ? 9 : pageSizeCalculation,
584+
// },
585+
// },
580586
state: {
581587
columnFilters,
582588
rowSelection,
@@ -622,8 +628,7 @@ const FileTable: React.FC<FileTableProps> = ({ isExpanded, connectionStatus, set
622628

623629
const handleChange = (e: React.ChangeEvent<HTMLInputElement>) => {
624630
table.getColumn('status')?.setFilterValue(e.target.checked);
625-
// if (!table.getCanNextPage() || table.getRowCount()) {
626-
if (!table.getCanNextPage()) {
631+
if (!table.getCanNextPage() || table.getRowCount()) {
627632
table.setPageIndex(0);
628633
}
629634
};

frontend/src/types.ts

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -395,7 +395,6 @@ export interface chatInfoMessage extends Partial<Messages> {
395395
response_time: number;
396396
chunk_ids: chunk[];
397397
total_tokens: number;
398-
mode:string;
399398
}
400399

401400
export interface eventResponsetypes {
@@ -522,9 +521,3 @@ export interface Origin {
522521
vertical: Vertical;
523522
horizontal: Horizontal;
524523
}
525-
526-
export type ColumnFiltersState = ColumnFilter[];
527-
export interface ColumnFilter {
528-
id: string;
529-
value: unknown;
530-
}

frontend/src/utils/Constants.ts

Lines changed: 17 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -37,25 +37,25 @@ export const llms =
3737
process.env?.LLM_MODELS?.trim() != ''
3838
? process.env.LLM_MODELS?.split(',')
3939
: [
40-
'diffbot',
41-
'openai-gpt-3.5',
42-
'openai-gpt-4o',
43-
'gemini-1.0-pro',
44-
'gemini-1.5-pro',
45-
'LLM_MODEL_CONFIG_azure-ai-gpt-35',
46-
'LLM_MODEL_CONFIG_azure-ai-gpt-4o',
47-
'LLM_MODEL_CONFIG_ollama_llama3',
48-
'LLM_MODEL_CONFIG_groq-llama3-70b',
49-
'LLM_MODEL_CONFIG_anthropic-claude-3-5-sonnet',
50-
'LLM_MODEL_CONFIG_fireworks-llama-v3-70b',
51-
'LLM_MODEL_CONFIG_bedrock-claude-3-5-sonnet',
52-
];
40+
'diffbot',
41+
'openai-gpt-3.5',
42+
'openai-gpt-4o',
43+
'gemini-1.0-pro',
44+
'gemini-1.5-pro',
45+
'LLM_MODEL_CONFIG_azure-ai-gpt-35',
46+
'LLM_MODEL_CONFIG_azure-ai-gpt-4o',
47+
'LLM_MODEL_CONFIG_ollama_llama3',
48+
'LLM_MODEL_CONFIG_groq-llama3-70b',
49+
'LLM_MODEL_CONFIG_anthropic-claude-3-5-sonnet',
50+
'LLM_MODEL_CONFIG_fireworks-llama-v3-70b',
51+
'LLM_MODEL_CONFIG_bedrock-claude-3-5-sonnet',
52+
];
5353

5454
export const defaultLLM = llms?.includes('openai-gpt-3.5')
5555
? 'openai-gpt-3.5'
5656
: llms?.includes('gemini-1.0-pro')
57-
? 'gemini-1.0-pro'
58-
: 'diffbot';
57+
? 'gemini-1.0-pro'
58+
: 'diffbot';
5959

6060
export const chunkSize = process.env.CHUNK_SIZE ? parseInt(process.env.CHUNK_SIZE) : 1 * 1024 * 1024;
6161
export const timeperpage = process.env.TIME_PER_PAGE ? parseInt(process.env.TIME_PER_PAGE) : 50;
@@ -157,8 +157,8 @@ export const buttonCaptions = {
157157
};
158158

159159
export const ChatModeOptions = [
160-
{ Icon: VisualizeBloomIcon, value: 'vector' },
161-
{ Icon: 'abc', value: 'graph+vector' },
160+
{ Icon: VisualizeBloomIcon, value: 'graph+vector' },
161+
{ Icon: 'abc', value: 'vector' },
162162
];
163163

164164
export const taskParam: string[] = ['update_similarity_graph', 'create_fulltext_index'];

0 commit comments

Comments
 (0)