Skip to content
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@ import { LLM_CODE_TEMPLATE_EXAMPLES, LLM_CODE_TEMPLATE_OPTIONS } from "./LLM/llm
import KernButton from "@/submodules/react-components/components/kern-button/KernButton";
import { MemoIconAlertTriangleFilled, MemoIconArrowLeft, MemoIconCircleCheckFilled } from "@/submodules/react-components/components/kern-icons/icons";
import { LookupListWithOnClick } from "@/src/types/components/projects/projectId/lookup-lists";
import { InfoButton } from "@/submodules/react-components/components/InfoButton";

const EDITOR_OPTIONS = { theme: 'vs-light', language: 'python', readOnly: false };

Expand Down Expand Up @@ -386,10 +387,12 @@ export default function AttributeCalculation() {
selectedOption={(option) => setAdditionalConfigTmp(p => ({ ...p, llmIdentifier: option }))}
disabled={currentAttribute.state == AttributeState.USABLE}
/>
<label className="block text-sm font-medium text-gray-900 whitespace-nowrap">Api Key</label>
<label className={"block text-sm font-medium text-gray-900 whitespace-nowrap" + (additionalConfigTmp?.llmIdentifier != 'Privatemode AI' ? "" : " line-through")}>Api Key</label>

<input type="text" disabled={currentAttribute.state == AttributeState.USABLE} value={additionalConfigTmp?.llmConfig.apiKey || ""} onInput={(e: any) => setAdditionalConfigTmp(p => ({ ...p, llmConfig: { ...additionalConfigTmp.llmConfig, apiKey: e.target.value } }))}
className="h-8 text-sm border-gray-300 rounded-md placeholder-italic w-full border text-gray-700 pl-4 placeholder:text-gray-400 focus:outline-none focus:ring-2 focus:ring-gray-300 focus:ring-offset-2 focus:ring-offset-gray-100 disabled:opacity-50" />
{additionalConfigTmp?.llmIdentifier != 'Privatemode AI' ?
<input type="text" disabled={currentAttribute.state == AttributeState.USABLE} value={additionalConfigTmp?.llmConfig.apiKey || ""} onInput={(e: any) => setAdditionalConfigTmp(p => ({ ...p, llmConfig: { ...additionalConfigTmp.llmConfig, apiKey: e.target.value } }))}
className="h-8 text-sm border-gray-300 rounded-md placeholder-italic w-full border text-gray-700 pl-4 placeholder:text-gray-400 focus:outline-none focus:ring-2 focus:ring-gray-300 focus:ring-offset-2 focus:ring-offset-gray-100 disabled:opacity-50" />
: <InfoButton content="Set in backend" divPosition="right" infoButtonSize="sm" />}
</div>}
</div>
<div className="text-sm leading-5 font-medium text-gray-700 inline-block">Attributes</div>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ import OpenAI from "./models/OpenAI"
import Azure from "./models/Azure"
import { LLMConfigProps } from "./types";
import AzureFoundry from "./models/AzureFoundry";
import PrivatemodeAi from "./models/PrivatemodeAi";

export function LLMConfig(props: LLMConfigProps) {
switch (props.llmIdentifier) {
Expand All @@ -12,6 +13,8 @@ export function LLMConfig(props: LLMConfigProps) {
return <Azure disabled={props.disabled} llmConfig={props.llmConfig} setLlmConfig={props.setLlmConfig} onlyEssential={props.onlyEssential} projectId={props.projectId} />
case 'Azure Foundry':
return <AzureFoundry disabled={props.disabled} llmConfig={props.llmConfig} setLlmConfig={props.setLlmConfig} onlyEssential={props.onlyEssential} projectId={props.projectId} />
case 'Privatemode AI':
return <PrivatemodeAi disabled={props.disabled} llmConfig={props.llmConfig} setLlmConfig={props.setLlmConfig} onlyEssential={props.onlyEssential} projectId={props.projectId} />
default:
return null
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ import { jsonCopy } from "@/submodules/javascript-functions/general";
import { TEMPLATE_EXAMPLES, TEMPLATE_OPTIONS } from "./llmTemplates";
import { LLM_PROVIDER_OPTIONS, postProcessLLMPlaygroundRecordData } from "@/src/util/components/projects/projectId/settings/attribute-calculation-helper";
import { MemoIconHandClick, MemoIconPlayCardStar, MemoIconPlayerPlay, MemoIconRefresh, MemoIconTerminal } from "@/submodules/react-components/components/kern-icons/icons";
import { InfoButton } from "@/submodules/react-components/components/InfoButton";

const ACCEPT_BUTTON = { buttonCaption: "Use current values for attribute", useButton: true };
const DISPLAY_STATES = [AttributeState.AUTOMATICALLY_CREATED, AttributeState.UPLOADED, AttributeState.USABLE]
Expand Down Expand Up @@ -176,10 +177,12 @@ export default function LLMPlaygroundModal() {
options={LLM_PROVIDER_OPTIONS}
dropdownWidth="w-64"
selectedOption={(option) => setFullLlmConfig(p => ({ ...p, llmIdentifier: option }))}
/><label className="block text-sm font-medium text-gray-900 whitespace-nowrap">Api Key</label>
/><label className={"block text-sm font-medium text-gray-900 whitespace-nowrap" + (fullLlmConfig?.llmIdentifier != 'Privatemode AI' ? "" : " line-through")}>Api Key</label>

<input type="text" value={fullLlmConfig?.llmConfig.apiKey || ""} onInput={(e: any) => setFullLlmConfig(p => ({ ...p, llmConfig: { ...fullLlmConfig.llmConfig, apiKey: e.target.value } }))}
className="h-8 text-sm border-gray-300 rounded-md placeholder-italic w-full border text-gray-700 pl-4 placeholder:text-gray-400 focus:outline-none focus:ring-2 focus:ring-gray-300 focus:ring-offset-2 focus:ring-offset-gray-100" />
{fullLlmConfig?.llmIdentifier != 'Privatemode AI' ?
<input type="text" value={fullLlmConfig?.llmConfig.apiKey || ""} onInput={(e: any) => setFullLlmConfig(p => ({ ...p, llmConfig: { ...fullLlmConfig.llmConfig, apiKey: e.target.value } }))}
className="h-8 text-sm border-gray-300 rounded-md placeholder-italic w-full border text-gray-700 pl-4 placeholder:text-gray-400 focus:outline-none focus:ring-2 focus:ring-gray-300 focus:ring-offset-2 focus:ring-offset-gray-100" />
: <InfoButton content="Set in backend" divPosition="right" infoButtonSize="sm" />}

</div>}
<LLMResponseConfig attributeId={modal.attributeId} fullLlmConfig={fullLlmConfig} setFullLlmConfig={setFullLlmConfig} noPlayground keepConfigOpen />
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@
import { useEffect } from "react";
import KernDropdown from "@/submodules/react-components/components/KernDropdown";
import { InfoButton } from "@/submodules/react-components/components/InfoButton";
import { InputWithSlider } from "@/submodules/react-components/components/InputWithSlider";
import { LLmPropsPrivatemodeAI } from "../types";


export const MODEL_MAP_FULL_NAME = {
'Meta-Llama-3.3-70B': "ibnzterrell/Meta-Llama-3.3-70B-Instruct-AWQ-INT4",
'gemma-3-27b': "leon-se/gemma-3-27b-it-fp8-dynamic",
}
const REVERSE_MAP_FULL_NAME = Object.fromEntries(Object.entries(MODEL_MAP_FULL_NAME).map(([key, value]) => [value, key]));
export const MODEL_OPTIONS = Object.keys(MODEL_MAP_FULL_NAME);

export default function PrivatemodeAi(props: LLmPropsPrivatemodeAI) {
useEffect(() => {
if (props.llmConfig.model && !MODEL_OPTIONS.includes(props.llmConfig.model)) {
props.setLlmConfig({ ...props.llmConfig, model: MODEL_MAP_FULL_NAME[MODEL_OPTIONS[0]] })
}
}, []);
return (
<div className='flex flex-col'>
{props.onlyEssential ?
null : <div className="mb-2 flex items-center">
<span className="text-sm font-medium text-gray-900 line-through">API Key </span>
<InfoButton content="Set in backend" divPosition="right" infoButtonSize="sm" />
</div>}
<div>
<label className="block mb-2 text-sm font-medium text-gray-900">Model</label>
<KernDropdown
buttonName={props.llmConfig.model ? REVERSE_MAP_FULL_NAME[props.llmConfig.model] : 'Select model'}
options={MODEL_OPTIONS}
selectedOption={(option) => props.setLlmConfig({ ...props.llmConfig, model: MODEL_MAP_FULL_NAME[option] })}
disabled={props.disabled}
/>
</div>
{props.onlyEssential ? null : <>
<InputWithSlider
label="Temperature"
value={props.llmConfig.temperature}
min={0}
max={2}
step={0.01}
onChange={(value) => props.setLlmConfig({ ...props.llmConfig, temperature: value })}
disabled={props.disabled}
/>

<InputWithSlider
label="Top P"
value={props.llmConfig.topP}
min={0}
max={1}
step={0.01}
onChange={(value) => props.setLlmConfig({ ...props.llmConfig, topP: value })}
disabled={props.disabled}
/>
<InputWithSlider
label='Frequency penalty'
value={props.llmConfig.frequencyPenalty}
min={-2}
max={2}
step={0.01}
onChange={(value) => props.setLlmConfig({ ...props.llmConfig, frequencyPenalty: value })}
disabled={props.disabled}
/>
<InputWithSlider
label='Presence penalty'
value={props.llmConfig.presencePenalty}
min={-2}
max={2}
step={0.01}
onChange={(value) => props.setLlmConfig({ ...props.llmConfig, presencePenalty: value })}
disabled={props.disabled}
/>
</>}

</div>
)
}
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,14 @@ export type LLmPropsOpenAI = {
disabled?: boolean;
}

export type LLmPropsPrivatemodeAI = {
llmConfig: any;
setLlmConfig: (llmConfig: any) => void;
onlyEssential?: boolean;
projectId?: string;
disabled?: boolean;
}


export type LLmPropsAzure = {
llmConfig: any;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ export default function AddNewEmbeddingModal() {

useEffect(() => {
prepareSuggestions();
checkIfPlatformHasToken();
// checkIfPlatformHasToken();
}, []);

useEffect(() => {
Expand Down Expand Up @@ -84,7 +84,7 @@ export default function AddNewEmbeddingModal() {
const suggestionListFiltered = suggestionList.map((suggestion: any) => {
const suggestionCopy = { ...suggestion };
const applicability = JSON.parse(suggestionCopy.applicability);
if ((granularity.value == EmbeddingType.ON_ATTRIBUTE && applicability.attribute) || (granularity.value == EmbeddingType.ON_TOKEN && applicability.token)) {
if ((granularity.value == EmbeddingType.ON_ATTRIBUTE && applicability.attribute) /*|| (granularity.value == EmbeddingType.ON_TOKEN && applicability.token)*/) {
suggestionCopy.forceHidden = false;
} else {
suggestionCopy.forceHidden = true;
Expand All @@ -99,11 +99,13 @@ export default function AddNewEmbeddingModal() {

function checkIfAttributeHasToken() {
const attribute = useableEmbedableAttributes.find((a) => a.id == targetAttribute.id);
if (attribute?.dataType == DataTypeEnum.EMBEDDING_LIST) {
setGranularityArray(GRANULARITY_TYPES_ARRAY.filter((g) => g.value != EmbeddingType.ON_TOKEN));
} else {
checkIfPlatformHasToken();
}
setGranularityArray(GRANULARITY_TYPES_ARRAY);

// if (attribute?.dataType == DataTypeEnum.EMBEDDING_LIST) {
// setGranularityArray(GRANULARITY_TYPES_ARRAY.filter((g) => g.value != EmbeddingType.ON_TOKEN));
// } else {
// checkIfPlatformHasToken();
// }
}

function changePlatformOrGranularity() {
Expand Down Expand Up @@ -131,18 +133,21 @@ export default function AddNewEmbeddingModal() {
acceptButtonCopy.disabled = checkIfCreateEmbeddingIsDisabled({ platform, model, apiToken, termsAccepted, embeddings, targetAttribute, granularity, engine, url, version, embeddingPlatforms });
setAcceptButton(acceptButtonCopy);
setTermsAccepted(false);
setModel(null);
if (savePlatform == PlatformType.PRIVATEMODE_AI) {
console.log("Private Mode AI platform selected, setting model to default.");
setModel("intfloat/multilingual-e5-large-instruct");
} else setModel(null);
setApiToken('');
}

function checkIfPlatformHasToken() {
if (!platform) return;
if (platform.name == platformNamesDict[PlatformType.OPEN_AI] || platform.name == platformNamesDict[PlatformType.AZURE]) {
setGranularityArray(GRANULARITY_TYPES_ARRAY.filter((g) => g.value != EmbeddingType.ON_TOKEN));
} else {
setGranularityArray(GRANULARITY_TYPES_ARRAY);
}
}
// function checkIfPlatformHasToken() {
// if (!platform) return;
// if (platform.name == platformNamesDict[PlatformType.OPEN_AI] || platform.name == platformNamesDict[PlatformType.AZURE]) {
// setGranularityArray(GRANULARITY_TYPES_ARRAY.filter((g) => g.value != EmbeddingType.ON_TOKEN));
// } else {
// setGranularityArray(GRANULARITY_TYPES_ARRAY);
// }
// }

const prepareAzureData = useCallback(() => {
const getAzureUrl = localStorage.getItem('azureUrls');
Expand Down Expand Up @@ -175,7 +180,8 @@ export default function AddNewEmbeddingModal() {
platform: platform.platform,
termsText: gdprText.current != null ? gdprText.current.innerText : null,
termsAccepted: termsAccepted,
embeddingType: granularity.value == EmbeddingType.ON_TOKEN ? EmbeddingType.ON_TOKEN : EmbeddingType.ON_ATTRIBUTE,
// embeddingType: granularity.value == EmbeddingType.ON_TOKEN ? EmbeddingType.ON_TOKEN : EmbeddingType.ON_ATTRIBUTE,
embeddingType: EmbeddingType.ON_ATTRIBUTE,
filterAttributes: filteredAttributes
}

Expand All @@ -191,6 +197,8 @@ export default function AddNewEmbeddingModal() {
config.type = DEFAULT_AZURE_TYPE;
config.version = version;
prepareAzureData();
} else if (platform.name == platformNamesDict[PlatformType.PRIVATEMODE_AI]) {
config.model = model;
}
createEmbeddingPost(projectId, targetAttribute.id, JSON.stringify(config), (res) => { });

Expand Down Expand Up @@ -290,13 +298,20 @@ export default function AddNewEmbeddingModal() {
<SuggestionsAzure options={azureVersions} selectedOption={(option: string) => setVersion(option)} name="Version" tooltip="The latest version of the Azure OpenAI service can also be found here." />
</>}
</>}
{platform && platform.name == platformNamesDict[PlatformType.PRIVATEMODE_AI] && <>
<Tooltip content={TOOLTIPS_DICT.PROJECT_SETTINGS.EMBEDDINGS.MODEL} placement="right" color="invert">
<span className="card-title mb-0 label-text flex"><span className="cursor-help underline filtersUnderline">Model</span></span>
</Tooltip>
<input defaultValue="intfloat/multilingual-e5-large-instruct" disabled className="h-9 w-full text-sm border-gray-300 rounded-md placeholder-italic border text-gray-900 pl-4 focus:outline-none focus:ring-2 focus:ring-gray-300 focus:ring-offset-2 focus:ring-offset-gray-100 disabled:opacity-50" />
</>}
</div>
{platform && (platform.name == platformNamesDict[PlatformType.OPEN_AI] || platform.name == platformNamesDict[PlatformType.AZURE]) && <div className="text-center mt-3">
{platform && (platform.name == platformNamesDict[PlatformType.OPEN_AI] || platform.name == platformNamesDict[PlatformType.AZURE] || platform.name == platformNamesDict[PlatformType.PRIVATEMODE_AI]) && <div className="text-center mt-3">
<div className="border border-gray-300 text-xs text-gray-500 p-2.5 rounded-lg text-justify">
<label ref={gdprText} className="text-gray-700">
{selectedPlatform.splitTerms[0]}
{platform.name == platformNamesDict[PlatformType.OPEN_AI] && <a href={selectedPlatform.link} target="_blank" className="underline">openai terms of service.</a>}
{platform.name == platformNamesDict[PlatformType.AZURE] && <a href={selectedPlatform.link} target="_blank" className="underline">azure terms of service.</a>}
{platform.name == platformNamesDict[PlatformType.PRIVATEMODE_AI] && <a href={selectedPlatform.link} target="_blank" className="underline">private mode ai terms of service.</a>}
<div>{selectedPlatform.splitTerms[1]}</div>
</label>
</div>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ export type EmbeddingWithOnClick = Embedding & {

export enum EmbeddingType {
ON_ATTRIBUTE = "ON_ATTRIBUTE",
ON_TOKEN = "ON_TOKEN"
// ON_TOKEN = "ON_TOKEN" //currently removed since it doesn't seem to be used but kept code wise for easy reenabling
};

export type EmbeddingPlatform = {
Expand All @@ -46,7 +46,8 @@ export type EmbeddingPlatform = {
export enum PlatformType {
HUGGING_FACE = "huggingface",
OPEN_AI = "openai",
AZURE = "azure"
AZURE = "azure",
PRIVATEMODE_AI = "privatemode-ai"
}

export type RecommendedEncoder = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,8 @@ export function postProcessRecordByRecordId(record: Record): Record {
export const LLM_PROVIDER_OPTIONS = [
'Open AI',
'Azure',
'Azure Foundry'
'Azure Foundry',
'Privatemode AI'
];

export function postProcessLLMPlaygroundRecordData(recordList: any[]): any[] {
Expand Down
Loading