diff --git a/.github/workflows/react.yml b/.github/workflows/react.yml
new file mode 100644
index 0000000..6eb2654
--- /dev/null
+++ b/.github/workflows/react.yml
@@ -0,0 +1,31 @@
+name: Deploy React App
+
+on:
+ push:
+ branches:
+ - main # 监听 main 分支的 push 事件
+
+jobs:
+ build-and-deploy:
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Checkout the code
+ uses: actions/checkout@v3
+
+ - name: Set up Node.js
+ uses: actions/setup-node@v3
+ with:
+ node-version: '16'
+
+ - name: Install dependencies
+ run: yarn
+
+ - name: Build the React app
+ run: yarn build
+
+ - name: Deploy to GitHub Pages
+ uses: peaceiris/actions-gh-pages@v3
+ with:
+ github_token: ${{ secrets.GITHUB_TOKEN }}
+ publish_dir: ./build
diff --git a/src/App.js b/src/App.js
index 7ef3183..5f3ad2d 100644
--- a/src/App.js
+++ b/src/App.js
@@ -1,9 +1,29 @@
-import React, { useState, useEffect } from 'react';
-import {
- Paper, TextField, Button, Typography,
- List, ListItem, IconButton, Box, Select, MenuItem, FormControl, InputLabel, Slider, Dialog, DialogTitle, DialogContent, DialogActions,
- Switch, FormControlLabel, Accordion, AccordionSummary, AccordionDetails,
- AppBar, Toolbar
+import React, {useEffect, useState} from 'react';
+import {
+ Accordion,
+ AccordionDetails,
+ AccordionSummary,
+ AppBar,
+ Box,
+ Button,
+ Dialog,
+ DialogActions,
+ DialogContent,
+ DialogTitle,
+ FormControl,
+ FormControlLabel,
+ IconButton,
+ InputLabel,
+ List,
+ ListItem,
+ MenuItem,
+ Paper,
+ Select,
+ Slider,
+ Switch,
+ TextField,
+ Toolbar,
+ Typography
} from '@mui/material';
import DeleteIcon from '@mui/icons-material/Delete';
import AddIcon from '@mui/icons-material/Add';
@@ -11,821 +31,849 @@ import SaveIcon from '@mui/icons-material/Save';
import DownloadIcon from '@mui/icons-material/Download';
import ExpandMoreIcon from '@mui/icons-material/ExpandMore';
import axios from 'axios';
-import { v4 as uuidv4 } from 'uuid';
-import { Panel, PanelGroup, PanelResizeHandle } from 'react-resizable-panels';
+import {v4 as uuidv4} from 'uuid';
+import {Panel, PanelGroup, PanelResizeHandle} from 'react-resizable-panels';
function App() {
- const [modelConfigs, setModelConfigs] = useState([
- {
- id: Date.now(),
- provider: 'openai',
- model: 'gpt-3.5-turbo',
- temperature: 0.7,
- maxTokens: 1000,
- apiKey: '',
- endpoint: 'https://api.openai.com/v1/chat/completions',
- prompts: [{ id: Date.now(), systemPrompt: '', userPrompt: '', output: '' }]
- }
- ]);
- const [variables, setVariables] = useState([]);
- const [savedPrompts, setSavedPrompts] = useState([]);
- const [savedModels, setSavedModels] = useState([]);
- const [savedVariables, setSavedVariables] = useState([]);
- const [openSaveModelDialog, setOpenSaveModelDialog] = useState(false);
- const [openSaveVariablesDialog, setOpenSaveVariablesDialog] = useState(false);
- const [openSavePromptDialog, setOpenSavePromptDialog] = useState(false);
- const [modelNameToSave, setModelNameToSave] = useState('');
- const [variablesNameToSave, setVariablesNameToSave] = useState('');
- const [promptNameToSave, setPromptNameToSave] = useState('');
- const [modelToSave, setModelToSave] = useState(null);
- const [promptToSave, setPromptToSave] = useState(null);
- const [globalSystemPrompt, setGlobalSystemPrompt] = useState('');
- const [globalUserPrompt, setGlobalUserPrompt] = useState('');
- const [useGlobalPrompt, setUseGlobalPrompt] = useState(false);
- const [selectedReviewModel, setSelectedReviewModel] = useState('');
- const [promptReviewSuggestion, setPromptReviewSuggestion] = useState('');
-
- useEffect(() => {
- const saved = localStorage.getItem('savedPrompts');
- if (saved) setSavedPrompts(JSON.parse(saved));
- const savedModelsData = localStorage.getItem('savedModels');
- if (savedModelsData) setSavedModels(JSON.parse(savedModelsData));
- const savedVariablesData = localStorage.getItem('savedVariables');
- if (savedVariablesData) setSavedVariables(JSON.parse(savedVariablesData));
- }, []);
-
- const handleAddVariable = () => setVariables([...variables, { name: '', value: '' }]);
-
- const handleVariableChange = (index, field, value) => {
- const newVariables = [...variables];
- newVariables[index][field] = value;
- setVariables(newVariables);
- };
-
- const handleDeleteVariable = (index) => {
- const newVariables = variables.filter((_, i) => i !== index);
- setVariables(newVariables);
- };
-
- const handleModelConfigChange = (id, field, value) => {
- const newConfigs = modelConfigs.map(config =>
- config.id === id ? {...config, [field]: value} : config
- );
- setModelConfigs(newConfigs);
- };
-
- const handleAddModel = () => {
- const newModel = {
- id: Date.now(),
- provider: 'openai',
- model: 'gpt-3.5-turbo',
- temperature: 0.7,
- maxTokens: 1000,
- apiKey: '',
- endpoint: 'https://api.openai.com/v1/chat/completions',
- prompts: [{ id: Date.now(), systemPrompt: '', userPrompt: '', output: '' }]
+ const ProviderCanQueryModelList = ["llamaedge"]
+
+ const [modelConfigs, setModelConfigs] = useState([{
+ id: Date.now(),
+ provider: 'openai',
+ model: 'gpt-3.5-turbo',
+ temperature: 0.7,
+ maxTokens: 1000,
+ apiKey: '',
+ endpoint: 'https://api.openai.com/v1/chat/completions',
+ prompts: [{id: Date.now(), systemPrompt: '', userPrompt: '', output: ''}]
+ }]);
+ const [variables, setVariables] = useState([]);
+ const [savedPrompts, setSavedPrompts] = useState([]);
+ const [savedModels, setSavedModels] = useState([]);
+ const [savedVariables, setSavedVariables] = useState([]);
+ const [openSaveModelDialog, setOpenSaveModelDialog] = useState(false);
+ const [openSaveVariablesDialog, setOpenSaveVariablesDialog] = useState(false);
+ const [openSavePromptDialog, setOpenSavePromptDialog] = useState(false);
+ const [modelNameToSave, setModelNameToSave] = useState('');
+ const [variablesNameToSave, setVariablesNameToSave] = useState('');
+ const [promptNameToSave, setPromptNameToSave] = useState('');
+ const [modelToSave, setModelToSave] = useState(null);
+ const [promptToSave, setPromptToSave] = useState(null);
+ const [globalSystemPrompt, setGlobalSystemPrompt] = useState('');
+ const [globalUserPrompt, setGlobalUserPrompt] = useState('');
+ const [useGlobalPrompt, setUseGlobalPrompt] = useState(false);
+ const [selectedReviewModel, setSelectedReviewModel] = useState('');
+ const [promptReviewSuggestion, setPromptReviewSuggestion] = useState('');
+
+ const [modelsList, setModelsList] = useState({});
+
+ useEffect(() => {
+ const saved = localStorage.getItem('savedPrompts');
+ if (saved) setSavedPrompts(JSON.parse(saved));
+ const savedModelsData = localStorage.getItem('savedModels');
+ if (savedModelsData) setSavedModels(JSON.parse(savedModelsData));
+ const savedVariablesData = localStorage.getItem('savedVariables');
+ if (savedVariablesData) setSavedVariables(JSON.parse(savedVariablesData));
+ }, []);
+
+ const handleAddVariable = () => setVariables([...variables, {name: '', value: ''}]);
+
+ const handleVariableChange = (index, field, value) => {
+ const newVariables = [...variables];
+ newVariables[index][field] = value;
+ setVariables(newVariables);
+ };
+
+ const handleDeleteVariable = (index) => {
+ const newVariables = variables.filter((_, i) => i !== index);
+ setVariables(newVariables);
};
- setModelConfigs([...modelConfigs, newModel]);
- };
-
- const handleDeleteModel = (id) => {
- const newConfigs = modelConfigs.filter(config => config.id !== id);
- setModelConfigs(newConfigs);
- };
-
- const handleAddPrompt = (modelId) => {
- const model = modelConfigs.find(m => m.id === modelId);
- if (model.prompts[model.prompts.length - 1].output) {
- const newConfigs = modelConfigs.map(config => {
- if (config.id === modelId) {
- return {
- ...config,
- prompts: [...config.prompts, { id: Date.now(), userPrompt: '', output: '' }]
- };
+
+ const queryModel = async (id, url) => {
+ try {
+ let baseUrl = url.split('/').slice(0, 4).join('/');
+ const query = await fetch(baseUrl + "/models")
+ const queryData = await query.json()
+ const newData = {...modelsList}
+ newData[id] = queryData.data.map(model => model.id)
+ setModelsList(newData)
+ } catch (e) {
+ console.log(e)
}
- return config;
- });
- setModelConfigs(newConfigs);
}
- };
-
- const handleRemoveLastTurn = (modelId) => {
- const newConfigs = modelConfigs.map(config => {
- if (config.id === modelId && config.prompts.length > 1) {
- return {
- ...config,
- prompts: config.prompts.slice(0, -1)
- };
- }
- return config;
- });
- setModelConfigs(newConfigs);
- };
-
- const handlePromptChange = (modelId, promptId, field, value) => {
- const newConfigs = modelConfigs.map(config => {
- if (config.id === modelId) {
- return {
- ...config,
- prompts: config.prompts.map(prompt =>
- prompt.id === promptId ? {...prompt, [field]: value} : prompt
- )
+
+ const handleModelConfigChange = (id, field, value) => {
+ const newConfigs = modelConfigs.map(config => {
+ if (config.id === id) {
+ if (field === "endpoint" && ProviderCanQueryModelList.includes(config.provider) && value) {
+ queryModel(id, value)
+ }
+ return {...config, [field]: value}
+ } else {
+ return config
+ }
+ });
+ setModelConfigs(newConfigs);
+ };
+
+ const handleMultipleModelConfigChange = (id, updates) => {
+ const newConfigs = modelConfigs.map(config => config.id === id ? {...config, ...updates} : config);
+ setModelConfigs(newConfigs);
+ };
+
+ const handleAddModel = () => {
+ const newModel = {
+ id: Date.now(),
+ provider: 'openai',
+ model: 'gpt-3.5-turbo',
+ temperature: 0.7,
+ maxTokens: 1000,
+ apiKey: '',
+ endpoint: 'https://api.openai.com/v1/chat/completions',
+ prompts: [{id: Date.now(), systemPrompt: '', userPrompt: '', output: ''}]
};
- }
- return config;
- });
- setModelConfigs(newConfigs);
- };
-
- const openSavePromptDialogHandler = (modelId, promptId) => {
- setPromptToSave({ modelId, promptId });
- setPromptNameToSave('');
- setOpenSavePromptDialog(true);
- };
-
- const savePrompt = () => {
- if (promptNameToSave) {
- let promptContent;
- if (useGlobalPrompt) {
- promptContent = { systemPrompt: globalSystemPrompt, userPrompt: globalUserPrompt };
- } else if (promptToSave) {
- const { modelId, promptId } = promptToSave;
+ setModelConfigs([...modelConfigs, newModel]);
+ };
+
+ const handleDeleteModel = (id) => {
+ const newConfigs = modelConfigs.filter(config => config.id !== id);
+ setModelConfigs(newConfigs);
+ };
+
+ const handleAddPrompt = (modelId) => {
const model = modelConfigs.find(m => m.id === modelId);
- const prompt = model.prompts.find(p => p.id === promptId);
- promptContent = { systemPrompt: prompt.systemPrompt, userPrompt: prompt.userPrompt };
- }
- const newSavedPrompts = [...savedPrompts, { name: promptNameToSave, content: promptContent }];
- setSavedPrompts(newSavedPrompts);
- localStorage.setItem('savedPrompts', JSON.stringify(newSavedPrompts));
- setOpenSavePromptDialog(false);
- }
- };
-
- const loadPrompt = (savedPrompt) => {
- if (useGlobalPrompt) {
- setGlobalSystemPrompt(savedPrompt.content.systemPrompt);
- setGlobalUserPrompt(savedPrompt.content.userPrompt);
- } else {
- const newConfigs = modelConfigs.map(config => ({
- ...config,
- prompts: config.prompts.map(prompt => ({
- ...prompt,
- systemPrompt: savedPrompt.content.systemPrompt,
- userPrompt: savedPrompt.content.userPrompt
- }))
- }));
- setModelConfigs(newConfigs);
- }
- };
-
- const removeSavedPrompt = (index) => {
- const newSavedPrompts = savedPrompts.filter((_, i) => i !== index);
- setSavedPrompts(newSavedPrompts);
- localStorage.setItem('savedPrompts', JSON.stringify(newSavedPrompts));
- };
-
- const openSaveModelDialogHandler = (model) => {
- setModelToSave(model);
- setModelNameToSave('');
- setOpenSaveModelDialog(true);
- };
-
- const saveModel = () => {
- if (modelNameToSave && modelToSave) {
- const modelToSaveWithoutId = {...modelToSave, id: undefined};
- const newSavedModels = [...savedModels, { name: modelNameToSave, config: modelToSaveWithoutId }];
- setSavedModels(newSavedModels);
- localStorage.setItem('savedModels', JSON.stringify(newSavedModels));
- setOpenSaveModelDialog(false);
- }
- };
-
- const loadModel = (savedModel, modelId) => {
- const newConfigs = modelConfigs.map(config => {
- if (config.id === modelId) {
- return {
- ...config,
- ...savedModel.config,
- id: config.id,
- prompts: config.prompts
- };
- }
- return config;
- });
- setModelConfigs(newConfigs);
- };
-
- const removeSavedModel = (index) => {
- const newSavedModels = savedModels.filter((_, i) => i !== index);
- setSavedModels(newSavedModels);
- localStorage.setItem('savedModels', JSON.stringify(newSavedModels));
- };
-
- const openSaveVariablesDialogHandler = () => {
- setVariablesNameToSave('');
- setOpenSaveVariablesDialog(true);
- };
-
- const saveVariables = () => {
- if (variablesNameToSave) {
- const newSavedVariables = [...savedVariables, { name: variablesNameToSave, variables: variables }];
- setSavedVariables(newSavedVariables);
- localStorage.setItem('savedVariables', JSON.stringify(newSavedVariables));
- setOpenSaveVariablesDialog(false);
- }
- };
-
- const loadVariables = (savedVariableSet) => {
- setVariables(savedVariableSet.variables);
- };
-
- const removeSavedVariables = (index) => {
- const newSavedVariables = savedVariables.filter((_, i) => i !== index);
- setSavedVariables(newSavedVariables);
- localStorage.setItem('savedVariables', JSON.stringify(newSavedVariables));
- };
-
- const toggleGlobalPrompt = () => {
- setUseGlobalPrompt(!useGlobalPrompt);
- };
-
- const runPrompt = async (modelId, promptId) => {
- const model = modelConfigs.find(m => m.id === modelId);
- const prompt = model.prompts.find(p => p.id === promptId);
- let messages = [];
-
- if (promptId === model.prompts[0].id && useGlobalPrompt) {
- messages = [
- { role: 'system', content: globalSystemPrompt },
- { role: 'user', content: globalUserPrompt }
- ];
- } else {
- const previousPrompts = model.prompts.slice(0, model.prompts.findIndex(p => p.id === promptId) + 1);
- messages = previousPrompts.flatMap((p, index) => {
- if (index === 0 && !useGlobalPrompt) {
- return [
- { role: 'system', content: p.systemPrompt },
- { role: 'user', content: p.userPrompt }
- ];
+ if (model.prompts[model.prompts.length - 1].output) {
+ const newConfigs = modelConfigs.map(config => {
+ if (config.id === modelId) {
+ return {
+ ...config, prompts: [...config.prompts, {id: Date.now(), userPrompt: '', output: ''}]
+ };
+ }
+ return config;
+ });
+ setModelConfigs(newConfigs);
+ }
+ };
+
+
+ const handlePromptChange = (modelId, promptId, field, value) => {
+ const newConfigs = modelConfigs.map(config => {
+ if (config.id === modelId) {
+ return {
+ ...config,
+ prompts: config.prompts.map(prompt => prompt.id === promptId ? {...prompt, [field]: value} : prompt)
+ };
+ }
+ return config;
+ });
+ setModelConfigs(newConfigs);
+ };
+
+ const handleRemoveLastTurn = (modelId) => {
+ const newConfigs = modelConfigs.map(config => {
+ if (config.id === modelId && config.prompts.length > 1) {
+ return {
+ ...config, prompts: config.prompts.slice(0, -1)
+ };
+ }
+ return config;
+ });
+ setModelConfigs(newConfigs);
+ };
+
+ const openSavePromptDialogHandler = (modelId, promptId) => {
+ setPromptToSave({modelId, promptId});
+ setPromptNameToSave('');
+ setOpenSavePromptDialog(true);
+ };
+
+ const savePrompt = () => {
+ if (promptNameToSave) {
+ let promptContent;
+ if (useGlobalPrompt) {
+ promptContent = {systemPrompt: globalSystemPrompt, userPrompt: globalUserPrompt};
+ } else if (promptToSave) {
+ const {modelId, promptId} = promptToSave;
+ const model = modelConfigs.find(m => m.id === modelId);
+ const prompt = model.prompts.find(p => p.id === promptId);
+ promptContent = {systemPrompt: prompt.systemPrompt, userPrompt: prompt.userPrompt};
+ }
+ const newSavedPrompts = [...savedPrompts, {name: promptNameToSave, content: promptContent}];
+ setSavedPrompts(newSavedPrompts);
+ localStorage.setItem('savedPrompts', JSON.stringify(newSavedPrompts));
+ setOpenSavePromptDialog(false);
+ }
+ };
+
+ const loadPrompt = (savedPrompt) => {
+ if (useGlobalPrompt) {
+ setGlobalSystemPrompt(savedPrompt.content.systemPrompt);
+ setGlobalUserPrompt(savedPrompt.content.userPrompt);
} else {
- return [
- { role: 'user', content: p.userPrompt },
- ...(p.output ? [{ role: 'assistant', content: p.output }] : [])
- ];
+ const newConfigs = modelConfigs.map(config => ({
+ ...config, prompts: config.prompts.map(prompt => ({
+ ...prompt,
+ systemPrompt: savedPrompt.content.systemPrompt,
+ userPrompt: savedPrompt.content.userPrompt
+ }))
+ }));
+ setModelConfigs(newConfigs);
}
- });
- }
+ };
+
+ const removeSavedPrompt = (index) => {
+ const newSavedPrompts = savedPrompts.filter((_, i) => i !== index);
+ setSavedPrompts(newSavedPrompts);
+ localStorage.setItem('savedPrompts', JSON.stringify(newSavedPrompts));
+ };
+
+ const openSaveModelDialogHandler = (model) => {
+ setModelToSave(model);
+ setModelNameToSave('');
+ setOpenSaveModelDialog(true);
+ };
+
+ const saveModel = () => {
+ if (modelNameToSave && modelToSave) {
+ const modelToSaveWithoutId = {...modelToSave, id: undefined};
+ const newSavedModels = [...savedModels, {name: modelNameToSave, config: modelToSaveWithoutId}];
+ setSavedModels(newSavedModels);
+ localStorage.setItem('savedModels', JSON.stringify(newSavedModels));
+ setOpenSaveModelDialog(false);
+ }
+ };
- // Apply variables
- messages = messages.map(message => ({
- ...message,
- content: variables.reduce((content, variable) =>
- content.replace(new RegExp(`\\{${variable.name}\\}`, 'g'), variable.value),
- message.content
- )
- }));
-
- try {
- let response;
- switch (model.provider) {
- case 'openai':
- response = await axios.post(model.endpoint, {
- model: model.model,
- messages: messages,
- temperature: model.temperature,
- max_tokens: model.maxTokens,
- }, {
- headers: { 'Authorization': `Bearer ${model.apiKey}` }
- });
- break;
- case 'anthropic':
- // Implement Anthropic API call here
- break;
- case 'azure':
- response = await axios.post(
- `https://${model.resourceName}.openai.azure.com/openai/deployments/${model.deploymentId}/chat/completions?api-version=2023-05-15`,
- {
- messages: messages,
- temperature: model.temperature,
- max_tokens: model.maxTokens,
- },
- {
- headers: { 'api-key': model.apiKey }
+ const loadModel = (savedModel, modelId) => {
+ const newConfigs = modelConfigs.map(config => {
+ if (config.id === modelId) {
+ return {
+ ...config, ...savedModel.config, id: config.id, prompts: config.prompts
+ };
}
- );
- break;
- case 'bedrock':
- // This is a placeholder and won't work as-is
- // You'll need to use AWS SDK for JavaScript v3 for this
- console.log('Amazon Bedrock API call not implemented');
- break;
- default:
- throw new Error('Unknown provider');
- }
-
- const output = response.data.choices[0].message.content;
- setModelConfigs(prevConfigs => prevConfigs.map(config => {
- if (config.id === modelId) {
- return {
- ...config,
- prompts: config.prompts.map(p =>
- p.id === promptId ? {...p, output} : p
- )
- };
+ return config;
+ });
+ setModelConfigs(newConfigs);
+ };
+
+ const removeSavedModel = (index) => {
+ const newSavedModels = savedModels.filter((_, i) => i !== index);
+ setSavedModels(newSavedModels);
+ localStorage.setItem('savedModels', JSON.stringify(newSavedModels));
+ };
+
+ const openSaveVariablesDialogHandler = () => {
+ setVariablesNameToSave('');
+ setOpenSaveVariablesDialog(true);
+ };
+
+ const saveVariables = () => {
+ if (variablesNameToSave) {
+ const newSavedVariables = [...savedVariables, {name: variablesNameToSave, variables: variables}];
+ setSavedVariables(newSavedVariables);
+ localStorage.setItem('savedVariables', JSON.stringify(newSavedVariables));
+ setOpenSaveVariablesDialog(false);
}
- return config;
- }));
- } catch (error) {
- console.error('Error calling API:', error);
- const errorOutput = `Error: ${error.message}`;
- setModelConfigs(prevConfigs => prevConfigs.map(config => {
- if (config.id === modelId) {
- return {
- ...config,
- prompts: config.prompts.map(p =>
- p.id === promptId ? {...p, output: errorOutput} : p
- )
- };
+ };
+
+ const loadVariables = (savedVariableSet) => {
+ setVariables(savedVariableSet.variables);
+ };
+
+ const removeSavedVariables = (index) => {
+ const newSavedVariables = savedVariables.filter((_, i) => i !== index);
+ setSavedVariables(newSavedVariables);
+ localStorage.setItem('savedVariables', JSON.stringify(newSavedVariables));
+ };
+
+ const toggleGlobalPrompt = () => {
+ setUseGlobalPrompt(!useGlobalPrompt);
+ };
+
+ const runPrompt = async (modelId, promptId) => {
+ const model = modelConfigs.find(m => m.id === modelId);
+ let messages = [];
+
+ if (promptId === model.prompts[0].id && useGlobalPrompt) {
+ messages = [{role: 'system', content: globalSystemPrompt}, {role: 'user', content: globalUserPrompt}];
+ } else {
+ const previousPrompts = model.prompts.slice(0, model.prompts.findIndex(p => p.id === promptId) + 1);
+ messages = previousPrompts.flatMap((p, index) => {
+ if (index === 0 && !useGlobalPrompt) {
+ return [{role: 'system', content: p.systemPrompt}, {role: 'user', content: p.userPrompt}];
+ } else {
+ return [{role: 'user', content: p.userPrompt}, ...(p.output ? [{
+ role: 'assistant', content: p.output
+ }] : [])];
+ }
+ });
}
- return config;
- }));
- }
- };
- const runAllPrompts = async () => {
- for (const config of modelConfigs) {
- for (const prompt of config.prompts) {
- await runPrompt(config.id, prompt.id);
- }
- }
- };
+ // Apply variables
+ messages = messages.map(message => ({
+ ...message,
+ content: variables.reduce((content, variable) => content.replace(new RegExp(`\\{${variable.name}\\}`, 'g'), variable.value), message.content)
+ }));
+
+ try {
+ let response;
+ switch (model.provider) {
+ case 'openai':
+ case 'llamaedge':
+ response = await axios.post(model.endpoint, {
+ model: model.model,
+ messages: messages,
+ temperature: model.temperature,
+ max_tokens: model.maxTokens,
+ }, {
+ headers: {'Authorization': `Bearer ${model.apiKey}`}
+ });
+ break;
+ case 'anthropic':
+ // Implement Anthropic API call here
+ break;
+ case 'azure':
+ response = await axios.post(`https://${model.resourceName}.openai.azure.com/openai/deployments/${model.deploymentId}/chat/completions?api-version=2023-05-15`, {
+ messages: messages, temperature: model.temperature, max_tokens: model.maxTokens,
+ }, {
+ headers: {'api-key': model.apiKey}
+ });
+ break;
+ case 'bedrock':
+ // This is a placeholder and won't work as-is
+ // You'll need to use AWS SDK for JavaScript v3 for this
+ console.log('Amazon Bedrock API call not implemented');
+ break;
+ default:
+ throw new Error('Unknown provider');
+ }
- const reviewPrompt = async () => {
- if (!selectedReviewModel) return;
+ const output = response.data.choices[0].message.content;
+ setModelConfigs(prevConfigs => prevConfigs.map(config => {
+ if (config.id === modelId) {
+ return {
+ ...config, prompts: config.prompts.map(p => p.id === promptId ? {...p, output} : p)
+ };
+ }
+ return config;
+ }));
+ } catch (error) {
+ console.error('Error calling API:', error);
+ const errorOutput = `Error: ${error.message}`;
+ setModelConfigs(prevConfigs => prevConfigs.map(config => {
+ if (config.id === modelId) {
+ return {
+ ...config, prompts: config.prompts.map(p => p.id === promptId ? {...p, output: errorOutput} : p)
+ };
+ }
+ return config;
+ }));
+ }
+ };
- const model = savedModels.find(m => m.name === selectedReviewModel)?.config;
- if (!model) return;
+ const runAllPrompts = async () => {
+ for (const config of modelConfigs) {
+ for (const prompt of config.prompts) {
+ await runPrompt(config.id, prompt.id);
+ }
+ }
+ };
+
+ const reviewPrompt = async () => {
+ if (!selectedReviewModel) return;
- const prompt = `Please review and suggest improvements for the following prompt:
+ const model = savedModels.find(m => m.name === selectedReviewModel)?.config;
+ if (!model) return;
+
+ const prompt = `Please review and suggest improvements for the following prompt:
System Prompt: ${globalSystemPrompt}
User Prompt: ${globalUserPrompt}
Provide concise suggestions to improve the prompt's effectiveness.`;
- try {
- const response = await axios.post(model.endpoint, {
- model: model.model,
- messages: [{ role: 'user', content: prompt }],
- temperature: model.temperature,
- max_tokens: model.maxTokens,
- }, {
- headers: { 'Authorization': `Bearer ${model.apiKey}` }
- });
- setPromptReviewSuggestion(response.data.choices[0].message.content);
- } catch (error) {
- console.error('Error reviewing prompt:', error);
- setPromptReviewSuggestion(`Error reviewing prompt: ${error.message}`);
- }
- };
-
- const saveConversationData = () => {
- const conversationData = modelConfigs.map(config => {
- const modelName = savedModels.find(m => m.config.model === config.model)?.name ||
- `${config.provider}-Endpoint-${config.endpoint.split('/').pop()}-${config.model}-temp-${config.temperature}`;
-
- const conversations = config.prompts.flatMap(prompt => [
- { role: "system", content: prompt.systemPrompt },
- { role: "user", content: prompt.userPrompt },
- { role: "assistant", content: prompt.output }
- ]);
-
- return {
- conversations,
- model: modelName
- };
- });
-
- const jsonData = JSON.stringify(conversationData, null, 2);
- const blob = new Blob([jsonData], { type: 'application/json' });
- const url = URL.createObjectURL(blob);
- const link = document.createElement('a');
- link.href = url;
- link.download = `conversation-${uuidv4()}.json`;
- document.body.appendChild(link);
- link.click();
- document.body.removeChild(link);
- };
-
- return (
-
- Prompt Engineering Tool
-
-
-
-
- Saved Prompts
-
- {savedPrompts.map((savedPrompt, index) => (
-
-
- removeSavedPrompt(index)}>
-
- ))}
-
-
-
-
-
-
-
- }
- label="Use Global Prompt"
- />
- {useGlobalPrompt && (
- <>
- setGlobalSystemPrompt(e.target.value)}
- variant="outlined"
- sx={{ mt: 2, mb: 2 }}
- />
- setGlobalUserPrompt(e.target.value)}
- variant="outlined"
- sx={{ mb: 2 }}
- />
- >
- )}
-
-
-
-
-
-
- {modelConfigs.map((config) => (
-
-
-
- Model
-
-
- {!useGlobalPrompt ? (
- config.prompts.map((prompt, index) => (
-
- Turn {index + 1}
- {index === 0 && (
- handlePromptChange(config.id, prompt.id, 'systemPrompt', e.target.value)}
- variant="outlined"
- sx={{ mb: 2 }}
- />
- )}
- handlePromptChange(config.id, prompt.id, 'userPrompt', e.target.value)}
- variant="outlined"
- sx={{ mb: 2 }}
+ try {
+ const response = await axios.post(model.endpoint, {
+ model: model.model,
+ messages: [{role: 'user', content: prompt}],
+ temperature: model.temperature,
+ max_tokens: model.maxTokens,
+ }, {
+ headers: {'Authorization': `Bearer ${model.apiKey}`}
+ });
+ setPromptReviewSuggestion(response.data.choices[0].message.content);
+ } catch (error) {
+ console.error('Error reviewing prompt:', error);
+ setPromptReviewSuggestion(`Error reviewing prompt: ${error.message}`);
+ }
+ };
+
+ const saveConversationData = () => {
+ const conversationData = modelConfigs.map(config => {
+ const modelName = savedModels.find(m => m.config.model === config.model)?.name || `${config.provider}-Endpoint-${config.endpoint.split('/').pop()}-${config.model}-temp-${config.temperature}`;
+
+ const conversations = config.prompts.flatMap(prompt => [{
+ role: "system", content: prompt.systemPrompt
+ }, {role: "user", content: prompt.userPrompt}, {role: "assistant", content: prompt.output}]);
+
+ return {
+ conversations, model: modelName
+ };
+ });
+
+ const jsonData = JSON.stringify(conversationData, null, 2);
+ const blob = new Blob([jsonData], {type: 'application/json'});
+ const url = URL.createObjectURL(blob);
+ const link = document.createElement('a');
+ link.href = url;
+ link.download = `conversation-${uuidv4()}.json`;
+ document.body.appendChild(link);
+ link.click();
+ document.body.removeChild(link);
+ };
+
+ return (
+ Prompt Engineering Tool
+
+
+
+
+ Saved Prompts
+
+ {savedPrompts.map((savedPrompt, index) => (
+
+
+ removeSavedPrompt(index)}>
+ ))}
+
+
+
+
+
+
+
+ }
+ label="Use Global Prompt"
/>
-
-
-
+ {useGlobalPrompt && (<>
+ setGlobalSystemPrompt(e.target.value)}
+ variant="outlined"
+ sx={{mt: 2, mb: 2}}
+ />
+ setGlobalUserPrompt(e.target.value)}
+ variant="outlined"
+ sx={{mb: 2}}
+ />
+ >)}
+
+
+
-
-
- ))
- ) : (
-
- )}
- {!useGlobalPrompt && (
-
- {config.prompts[config.prompts.length - 1].output && (
-
- )}
- {config.prompts.length > 1 && (
-
+
+ {modelConfigs.map((config) => (
+
+
+ Model
+
+
+ {!useGlobalPrompt ? (config.prompts.map((prompt, index) => (
+
+ Turn {index + 1}
+ {index === 0 && ( handlePromptChange(config.id, prompt.id, 'systemPrompt', e.target.value)}
+ variant="outlined"
+ sx={{mb: 2}}
+ />)}
+ handlePromptChange(config.id, prompt.id, 'userPrompt', e.target.value)}
+ variant="outlined"
+ sx={{mb: 2}}
+ />
+
+
+
+
+
+ ))) : ()}
+ {!useGlobalPrompt && (
+ {config.prompts[config.prompts.length - 1].output && ()}
+ {config.prompts.length > 1 && ()}
+ )}
+
+ ))}
+
+
+
- )}
- )}
-
-
- ))}
-
-
- }>
- Add Model
+
+
+
+
+
+
+ Variables
+
+ {variables.map((variable, index) => (
+
+ handleVariableChange(index, 'name', e.target.value)}
+ sx={{mb: 1}}
+ />
+ handleVariableChange(index, 'value', e.target.value)}
+ />
+ handleDeleteVariable(index)}
+ sx={{position: 'absolute', top: 0, right: 0}}
+ >
+
+
+
+ ))}
+
+
+
+
+
+
+
+ Saved Variables
+
+ {savedVariables.map((savedVarSet, index) => (
+
+
+ removeSavedVariables(index)}>
+ ))}
+
+
+
+ Model Configurations
+ {modelConfigs.map((config) => (
+
+ Provider
+
+
+ {config.provider === 'azure' && (<>
+ handleModelConfigChange(config.id, 'resourceName', e.target.value)}
+ />
+ handleModelConfigChange(config.id, 'deploymentId', e.target.value)}
+ />
+ >)}
+
+ {config.provider === 'bedrock' && (<>
+ handleModelConfigChange(config.id, 'region', e.target.value)}
+ />
+ handleModelConfigChange(config.id, 'modelId', e.target.value)}
+ />
+ >)}
+
+ {ProviderCanQueryModelList.includes(config.provider) ? <>
+ handleModelConfigChange(config.id, 'endpoint', e.target.value)}
+ />
+ handleModelConfigChange(config.id, 'apiKey', e.target.value)}
+ type="password"
+ />
+
+ Model
+
+
+ > : handleModelConfigChange(config.id, 'model', e.target.value)}
+ />}
+
+ {!ProviderCanQueryModelList.includes(config.provider) ? <>
+ handleModelConfigChange(config.id, 'apiKey', e.target.value)}
+ type="password"
+ />
+ handleModelConfigChange(config.id, 'endpoint', e.target.value)}
+ />
+ > : ""}
+ handleModelConfigChange(config.id, 'maxTokens', parseInt(e.target.value))}
+ />
+ Temperature: {config.temperature}
+ handleModelConfigChange(config.id, 'temperature', newValue)}
+ min={0} max={1} step={0.1}
+ />
+
+
+
+
+ ))}
+
+
+ }
+ aria-controls="saved-models-content"
+ id="saved-models-header"
+ >
+ Saved Models
+
+
+ {savedModels.map((savedModel, index) => (
+ {savedModel.name}
+ Provider: {savedModel.config.provider}
+ Model: {savedModel.config.model}
+ Temperature: {savedModel.config.temperature}
+ Max Tokens: {savedModel.config.maxTokens}
+
+
+
+
+ ))}
+
+
+
+
+
+
+
+
+
+
+ Review Model
+
+
+
-
-
-
-
-
-
-
- Variables
-
- {variables.map((variable, index) => (
-
-
- handleVariableChange(index, 'name', e.target.value)}
- sx={{ mb: 1 }}
- />
- handleVariableChange(index, 'value', e.target.value)}
- />
- handleDeleteVariable(index)}
- sx={{ position: 'absolute', top: 0, right: 0 }}
- >
-
-
-
-
- ))}
-
-
- }>
- Add Variable
-
- }>
- Save Variables
-
-
-
-
- Saved Variables
-
- {savedVariables.map((savedVarSet, index) => (
-
-
- removeSavedVariables(index)}>
-
- ))}
-
-
-
- Model Configurations
- {modelConfigs.map((config) => (
-
-
- Provider
-
-
- handleModelConfigChange(config.id, 'model', e.target.value)}
- />
- {config.provider === 'azure' && (
- <>
- handleModelConfigChange(config.id, 'resourceName', e.target.value)}
- />
- handleModelConfigChange(config.id, 'deploymentId', e.target.value)}
- />
- >
- )}
- {config.provider === 'bedrock' && (
- <>
- handleModelConfigChange(config.id, 'region', e.target.value)}
- />
- handleModelConfigChange(config.id, 'modelId', e.target.value)}
- />
- >
- )}
- handleModelConfigChange(config.id, 'apiKey', e.target.value)}
- type="password"
- />
- handleModelConfigChange(config.id, 'endpoint', e.target.value)}
- />
- handleModelConfigChange(config.id, 'maxTokens', parseInt(e.target.value))}
- />
- Temperature: {config.temperature}
- handleModelConfigChange(config.id, 'temperature', newValue)}
- min={0} max={1} step={0.1}
- />
-
-
-
-
-
- ))}
-
- }
- aria-controls="saved-models-content"
- id="saved-models-header"
- >
- Saved Models
-
-
- {savedModels.map((savedModel, index) => (
-
- {savedModel.name}
- Provider: {savedModel.config.provider}
- Model: {savedModel.config.model}
- Temperature: {savedModel.config.temperature}
- Max Tokens: {savedModel.config.maxTokens}
-
-
-
-
-
- ))}
-
-
-
-
-
-
-
-
-
-
- Review Model
-
-
-
- {promptReviewSuggestion}
- }>
- Save Conversation
-
-
-
-
-
-
-
- );
+ {promptReviewSuggestion}
+
+
+
+
+
+
+ );
}
export default App;
\ No newline at end of file