File tree Expand file tree Collapse file tree 5 files changed +101
-16
lines changed
Expand file tree Collapse file tree 5 files changed +101
-16
lines changed Original file line number Diff line number Diff line change 1+ # !! SERVER SIDE ONLY !!
2+ # Keep all your API keys secret — use only on the server side.
3+
4+ # TODO: ADD: Both in your production and local env files.
5+ # Langbase API key for your User or Org account.
6+ # How to get this API key https://langbase.com/docs/api-reference/api-keys
7+ LANGBASE_API_KEY=
8+
9+ # TODO: ADD: LOCAL ONLY. Add only to local env files.
10+ # Following keys are needed for local pipe runs. For providers you are using.
11+ # For Langbase, please add the key to your LLM keysets.
12+ # Read more: Langbase LLM Keysets https://langbase.com/docs/features/keysets
13+ OPENAI_API_KEY=
14+ ANTHROPIC_API_KEY=
15+ COHERE_API_KEY=
16+ FIREWORKS_API_KEY=
17+ GOOGLE_API_KEY=
18+ GROQ_API_KEY=
19+ MISTRAL_API_KEY=
20+ PERPLEXITY_API_KEY=
21+ TOGETHER_API_KEY=
Original file line number Diff line number Diff line change 1+ # baseai
2+ ** /.baseai /
3+ # env file
4+ .env
Original file line number Diff line number Diff line change 1+ export const config = {
2+ log : {
3+ isEnabled : true ,
4+ isEnabledInProd : true ,
5+ logSensitiveData : false ,
6+ pipe : true ,
7+ 'pipe.completion' : true ,
8+ 'pipe.request' : true ,
9+ 'pipe.response' : true ,
10+ 'pipe.request.prodOptions' : false ,
11+ 'pipe.request.localOptions' : false ,
12+ tool : false ,
13+ memory : false ,
14+ } ,
15+ memory : {
16+ useLocalEmbeddings : false ,
17+ } ,
18+ envFilePath : '.env' ,
19+ env : {
20+ NODE_ENV : 'production' ,
21+ LANGBASE_API_KEY : process . env . LANGBASE_API_KEY ,
22+ OPENAI_API_KEY : process . env . OPENAI_API_KEY ,
23+ } ,
24+ } ;
Original file line number Diff line number Diff line change 1+ import { PipeI } from '@baseai/core' ;
2+
3+ const buildPipe = ( ) : PipeI => ( {
4+ apiKey : process . env . LANGBASE_API_KEY ,
5+ name : 'summary-testing' ,
6+ description : '' ,
7+ status : 'private' ,
8+ model : 'openai:gpt-4o-mini' ,
9+ stream : true ,
10+ json : false ,
11+ store : true ,
12+ moderate : true ,
13+ top_p : 1 ,
14+ max_tokens : 1000 ,
15+ temperature : 0.7 ,
16+ presence_penalty : 1 ,
17+ frequency_penalty : 1 ,
18+ stop : [ ] ,
19+ tool_choice : 'auto' ,
20+ parallel_tool_calls : false ,
21+ messages : [
22+ {
23+ role : 'system' ,
24+ content : `You are a helpful AI assistant. Make everything less wordy.` ,
25+ } ,
26+ ] ,
27+ variables : [ ] ,
28+ memory : [ ] ,
29+ tools : [ ] ,
30+ } ) ;
31+
32+ export default buildPipe ;
Original file line number Diff line number Diff line change 1- // import {Pipe} from '@baseai/core';
2- // import pipeSummary from '../baseai/pipes/summary';
1+ import { Pipe } from '@baseai/core' ;
2+ import 'dotenv/config' ;
3+ import { config } from './baseai/baseai.config' ;
4+ import pipeSummary from './baseai/pipes/summary' ;
35
4- // const pipe = new Pipe(pipeSummary());
6+ const pipe = new Pipe ( {
7+ ...pipeSummary ( ) ,
8+ config,
9+ } ) ;
510
6- // async function main() {
7- // const userMsg = 'Who is an AI Engineer?';
11+ async function main ( ) {
12+ const userMsg = 'Who is an AI Engineer?' ;
813
9- // const response = await pipe.run({
10- // messages: [
11- // {
12- // role: 'user',
13- // content: userMsg,
14- // },
15- // ],
16- // });
17- // console.log('response: ', response);
18- // }
14+ const response = await pipe . run ( {
15+ messages : [
16+ {
17+ role : 'user' ,
18+ content : userMsg ,
19+ } ,
20+ ] ,
21+ } ) ;
22+ }
1923
20- // main();
24+ main ( ) ;
You can’t perform that action at this time.
0 commit comments