Skip to content

Commit e0e8257

Browse files
uyriquyriq
andauthored
yet another basic demo for openai's node.js client (#2)
Co-authored-by: uyriq <uyriq@local+buntu>
1 parent 5465fed commit e0e8257

File tree

4 files changed

+104
-1
lines changed

4 files changed

+104
-1
lines changed

examples/node-js/.env.sample

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,3 @@
1-
# Create a file called ".env" and add this env var:
1+
# Create a file called ".env" and add these env vars:
22
OPENAI_API_KEY=...insertme...
3+
PROXY_PATH=...do `wrangler publish` and place working url of your worker there...

examples/node-js/README.md

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
### Example client usage
2+
3+
With `example.js`, you can test how openai-caching-proxy-worker works by running `wrangler dev` in one terminal
4+
and `cd examples/node-js && node example.js`in another, then you can see in your local environment
5+
how open-ai requests are executed using redis caching.
6+
If you want to publish your local instance of openai-caching-proxy-worker to Cloudflare after running `wrangler publish`
7+
you will get a working url, then add it in your local variable`.env` as PROXY_PATH and then you can try `example2.js`,
8+
it has command line options, note that order is important
9+
10+
1. option model < text-ada-001 | text-davinci-003 >
11+
2. the 'cacheYES' option <boolean> : string|any
12+
3. option prompt - your question to chatGPT <string>
13+
14+
so execute like this
15+
`node example2.js text-davinci-003 cacheNO 'is any difference between white dogs and black mice?'`
16+
17+
```markdown
18+
R fresh:
19+
Q:
20+
is any difference between white dogs and black mice?
21+
R:
22+
Yes, there is a difference between white dogs and black mice. White...
23+
```

examples/node-js/example2.js

Lines changed: 78 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,78 @@
1+
// yet another basic demo usage for openai's node.js client with command line args and non-local worker.
2+
// Run with:
3+
// node example2.js
4+
// With example.js, you can test how openai-caching-proxy-worker works by running `wrangler dev` in one terminal and
5+
// `cd examples/node-js && node example.js` in another, then you can see in your local environment how requests
6+
// to open-ai are executed using redis caching.
7+
// If you want to publish your local instance of openai-caching-proxy-worker to Cloudflare after running `wrangler publish`
8+
// you will get a working url, add it in your local variable .env PROXY_PATH and then you can try `example2.js`, it has command line options, note that order is important:
9+
// 1. option model < text-ada-001 | text-davinci-003 | ...any suported by openAI >
10+
// 2. the 'cacheYES' option <boolean> : string|any
11+
// 3. option prompt - your question to chatGPT <string>
12+
13+
import * as dotenv from 'dotenv';
14+
import { Configuration, OpenAIApi } from 'openai';
15+
16+
dotenv.config();
17+
18+
const apiKey = process.env.OPENAI_API_KEY;
19+
const proxyPath = process.env.PROXY_PATH;
20+
if (!apiKey) {
21+
console.error('Error: OPENAI_API_KEY must be set');
22+
process.exit(1);
23+
}
24+
25+
// cli args are following: 1. option model < text-ada-001 | text-davinci-003 > 2. option 'cacheYES' <boolean> : string|any 3. option prompt - question to chatGPT <string>
26+
const args = process.argv.slice(2);
27+
let model = '';
28+
const prompt = '' || String(args.at(-1));
29+
const cache = String(args.at(1)) === 'cacheYES' ? true : false;
30+
31+
const configuration = new Configuration({
32+
apiKey,
33+
// Set this to your local instance or Cloudflare deployment after doing `wrangler publish` and got cloudflare worker url to local .env PROXY_PATH variable
34+
basePath: proxyPath || `http://localhost:8787/proxy`,
35+
baseOptions: {
36+
headers: {
37+
// Cache responses for 3600 seconds (1 hour)
38+
'X-Proxy-TTL': 3600,
39+
// If you need to force refresh cache, you can uncomment below:
40+
'X-Proxy-Refresh': !cache,
41+
},
42+
},
43+
});
44+
const openai = new OpenAIApi(configuration);
45+
46+
switch (args[0]) {
47+
case 'text-ada-001':
48+
// cheap
49+
model = 'text-ada-001';
50+
break;
51+
case 'text-davinci-003':
52+
// overprice
53+
model = 'text-davinci-003';
54+
break;
55+
default:
56+
break;
57+
}
58+
59+
const makeSampleRequests = async () => {
60+
const completionOpts = {
61+
model: model || 'text-ada-001',
62+
prompt: prompt || 'HOW MUCH IS 2 + 2 * 2 ?',
63+
};
64+
const completion = await openai.createCompletion(completionOpts);
65+
console.log(
66+
`${cache ? 'R from chache' : 'R fresh'}:\nQ:\n ${
67+
completionOpts.prompt
68+
}\nR:\n${completion.data.choices[0].text.trim()}`,
69+
);
70+
};
71+
72+
const main = async () => {
73+
// The first time these requests are made, they should
74+
// be proxied as-is to OpenAI API:
75+
await makeSampleRequests();
76+
};
77+
78+
main();

examples/node-js/package.json

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,3 +15,4 @@
1515
"openai": "^3.1.0"
1616
}
1717
}
18+

0 commit comments

Comments
 (0)