-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathcli-app-commonjs.js
More file actions
495 lines (421 loc) · 15.8 KB
/
cli-app-commonjs.js
File metadata and controls
495 lines (421 loc) · 15.8 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
const fs = require('fs');
const path = require('path');
const crypto = require('crypto');
const readline = require('readline');
const { Pinecone } = require('@pinecone-database/pinecone');
// Configuration
const PINECONE_API_KEY = 'pcsk_52AbGy_CDjDs6zTiABmKYor3xnxxh6qkYQFQEwsNmg9XCQwprpfv4NCWmk6TDnJ3jTECE4';
const PINECONE_INDEX = 'code-embedddings';
const PINECONE_HOST = 'https://code-embedddings-r7rx8or.svc.aped-4627-b74a.pinecone.io';
const PINECONE_DIMENSION = 768;
const PINECONE_METRIC = 'cosine';
const PINECONE_CLOUD = 'aws';
const PINECONE_REGION = 'us-east-1';
const OLLAMA_URL = 'http://localhost:11434/api/generate';
const OLLAMA_MODEL = 'deepseek-r1:8b';
// Initialize the CLI interface
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout
});
async function main() {
console.log('\n------ Code AI Assistant CLI (Simple Mode) ------');
console.log('1. Test Pinecone Connection');
console.log('2. Test Ollama Connection');
console.log('3. Index a File');
console.log('4. Query Similar Code');
console.log('5. Exit');
console.log('----------------------------------------------\n');
rl.question('Choose an option (1-5): ', async (answer) => {
switch (answer) {
case '1':
try {
console.log('Testing Pinecone connection...');
// Create Pinecone client
const pc = new Pinecone({
apiKey: PINECONE_API_KEY
});
// List indexes
console.log('Listing indexes...');
const indexes = await pc.listIndexes();
console.log('Available indexes:', JSON.stringify(indexes, null, 2));
// Check if our index exists
const indexExists = indexes.indexes?.some(idx => idx.name === PINECONE_INDEX);
if (indexExists) {
console.log(`Index '${PINECONE_INDEX}' exists!`);
// Get index reference
const index = pc.index(PINECONE_INDEX);
// Get index stats
const stats = await index.describeIndexStats();
console.log('Index statistics:', JSON.stringify(stats, null, 2));
console.log(`Total vectors in index: ${stats.totalRecordCount}`);
} else {
console.log(`Index '${PINECONE_INDEX}' does not exist. Please create it in the Pinecone dashboard.`);
}
console.log('Pinecone connection successful!');
await showMenu();
} catch (error) {
console.error('Failed to connect to Pinecone:', error);
await showMenu();
}
break;
case '2':
try {
console.log('Testing Ollama connection...');
const fetch = (...args) => import('node-fetch').then(({default: fetch}) => fetch(...args));
const response = await fetch(OLLAMA_URL, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
model: OLLAMA_MODEL,
prompt: 'Say hello and introduce yourself in one sentence',
stream: false
}),
});
if (!response.ok) {
throw new Error(`HTTP error! Status: ${response.status}`);
}
const data = await response.json();
console.log('Ollama response:', data.response);
await showMenu();
} catch (error) {
console.error('Failed to connect to Ollama:', error);
console.log('Make sure Ollama is running and the deepseek-r1:8b model is available');
console.log('Install Ollama from https://ollama.ai/download');
console.log('Then run: ollama pull deepseek-r1:8b');
await showMenu();
}
break;
case '3':
// Index a file
rl.question('Enter path to the file you want to index: ', async (filePath) => {
try {
await indexFile(filePath);
await showMenu();
} catch (error) {
console.error('Error indexing file:', error);
await showMenu();
}
});
break;
case '4':
// Query similar code
rl.question('Enter path to the file with code you want to find similar code for: ', async (filePath) => {
try {
if (!fs.existsSync(filePath)) {
throw new Error(`File not found: ${filePath}`);
}
const code = fs.readFileSync(filePath, 'utf8');
rl.question('Enter your question about this code: ', async (question) => {
try {
await querySimilarCode(code, question);
await showMenu();
} catch (error) {
console.error('Error querying similar code:', error);
await showMenu();
}
});
} catch (error) {
console.error('Error reading file:', error);
await showMenu();
}
});
break;
case '5':
console.log('Goodbye!');
rl.close();
process.exit(0);
break;
default:
console.log('Invalid option. Please try again.');
await showMenu();
break;
}
});
}
async function showMenu() {
console.log('\n------ Code AI Assistant CLI (Simple Mode) ------');
console.log('1. Test Pinecone Connection');
console.log('2. Test Ollama Connection');
console.log('3. Index a File');
console.log('4. Query Similar Code');
console.log('5. Exit');
console.log('----------------------------------------------\n');
rl.question('Choose an option (1-5): ', async (answer) => {
switch (answer) {
case '1':
try {
console.log('Testing Pinecone connection...');
// Create Pinecone client
const pc = new Pinecone({
apiKey: PINECONE_API_KEY
});
// List indexes
console.log('Listing indexes...');
const indexes = await pc.listIndexes();
console.log('Available indexes:', JSON.stringify(indexes, null, 2));
// Check if our index exists
const indexExists = indexes.indexes?.some(idx => idx.name === PINECONE_INDEX);
if (indexExists) {
console.log(`Index '${PINECONE_INDEX}' exists!`);
// Get index reference
const index = pc.index(PINECONE_INDEX);
// Get index stats
const stats = await index.describeIndexStats();
console.log('Index statistics:', JSON.stringify(stats, null, 2));
console.log(`Total vectors in index: ${stats.totalRecordCount}`);
} else {
console.log(`Index '${PINECONE_INDEX}' does not exist. Please create it in the Pinecone dashboard.`);
}
console.log('Pinecone connection successful!');
await showMenu();
} catch (error) {
console.error('Failed to connect to Pinecone:', error);
await showMenu();
}
break;
case '2':
try {
console.log('Testing Ollama connection...');
const fetch = (...args) => import('node-fetch').then(({default: fetch}) => fetch(...args));
const response = await fetch(OLLAMA_URL, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
model: OLLAMA_MODEL,
prompt: 'Say hello and introduce yourself in one sentence',
stream: false
}),
});
if (!response.ok) {
throw new Error(`HTTP error! Status: ${response.status}`);
}
const data = await response.json();
console.log('Ollama response:', data.response);
await showMenu();
} catch (error) {
console.error('Failed to connect to Ollama:', error);
console.log('Make sure Ollama is running and the deepseek-r1:8b model is available');
console.log('Install Ollama from https://ollama.ai/download');
console.log('Then run: ollama pull deepseek-r1:8b');
await showMenu();
}
break;
case '3':
// Index a file
rl.question('Enter path to the file you want to index: ', async (filePath) => {
try {
await indexFile(filePath);
await showMenu();
} catch (error) {
console.error('Error indexing file:', error);
await showMenu();
}
});
break;
case '4':
// Query similar code
rl.question('Enter path to the file with code you want to find similar code for: ', async (filePath) => {
try {
if (!fs.existsSync(filePath)) {
throw new Error(`File not found: ${filePath}`);
}
const code = fs.readFileSync(filePath, 'utf8');
rl.question('Enter your question about this code: ', async (question) => {
try {
await querySimilarCode(code, question);
await showMenu();
} catch (error) {
console.error('Error querying similar code:', error);
await showMenu();
}
});
} catch (error) {
console.error('Error reading file:', error);
await showMenu();
}
});
break;
case '5':
console.log('Goodbye!');
rl.close();
process.exit(0);
break;
default:
console.log('Invalid option. Please try again.');
await showMenu();
break;
}
});
}
async function loadTransformers() {
try {
console.log('Loading transformers...');
const { pipeline } = await import('@xenova/transformers');
console.log('Initializing CodeT5-small model...');
const embeddingModel = await pipeline('feature-extraction', 'Salesforce/codet5-small');
console.log('Model loaded successfully!');
return embeddingModel;
} catch (error) {
console.error('Error loading transformers:', error);
throw new Error('Failed to load the embedding model');
}
}
async function generateEmbedding(text, embeddingModel) {
try {
// Generate embeddings using the model
const output = await embeddingModel(text, { pooling: 'mean', normalize: true });
// Convert to array format
const embedding = Array.from(output.data);
return embedding;
} catch (error) {
console.error('Error generating embedding:', error);
throw new Error('Failed to generate embedding');
}
}
function splitIntoChunks(text, filePath, language) {
const lines = text.split('\n');
const chunks = [];
// Simple chunking strategy
const maxChunkSize = 100; // Maximum lines per chunk
const minChunkSize = 10; // Minimum lines per chunk
let currentChunk = [];
let startLine = 0;
for (let i = 0; i < lines.length; i++) {
currentChunk.push(lines[i]);
// Check if we need to create a new chunk
if (currentChunk.length >= maxChunkSize) {
chunks.push({
content: currentChunk.join('\n'),
filePath: filePath,
startLine: startLine,
endLine: i,
language: language
});
currentChunk = [];
startLine = i + 1;
}
}
// Add the remaining lines as a chunk if not empty
if (currentChunk.length >= minChunkSize) {
chunks.push({
content: currentChunk.join('\n'),
filePath: filePath,
startLine: startLine,
endLine: lines.length - 1,
language: language
});
}
return chunks;
}
async function indexFile(filePath) {
try {
if (!fs.existsSync(filePath)) {
throw new Error(`File not found: ${filePath}`);
}
// Create Pinecone client
const pc = new Pinecone({
apiKey: PINECONE_API_KEY
});
// Get index reference
const index = pc.index(PINECONE_INDEX);
// Load the model
const embeddingModel = await loadTransformers();
// Read the file
const fileContent = fs.readFileSync(filePath, 'utf8');
const fileName = path.basename(filePath);
const language = path.extname(filePath).substring(1); // Remove the dot
console.log(`Indexing file: ${filePath}`);
console.log(`Language detected: ${language}`);
// Split content into chunks
const chunks = splitIntoChunks(fileContent, filePath, language);
console.log(`Split file into ${chunks.length} chunks`);
// Process each chunk
for (const chunk of chunks) {
// Generate unique ID for this chunk
const id = crypto.createHash('md5').update(`${chunk.filePath}:${chunk.startLine}-${chunk.endLine}`).digest('hex');
// Get embedding for the chunk
console.log(`Generating embedding for lines ${chunk.startLine}-${chunk.endLine}...`);
const embedding = await generateEmbedding(chunk.content, embeddingModel);
// Store in Pinecone
console.log('Storing embedding in Pinecone...');
await index.upsert([{
id,
values: embedding,
metadata: chunk
}]);
console.log(`Indexed chunk from lines ${chunk.startLine}-${chunk.endLine}`);
}
console.log(`Successfully indexed file: ${filePath}`);
// Get updated stats
const stats = await index.describeIndexStats();
console.log(`Total vectors in index: ${stats.totalRecordCount}`);
} catch (error) {
console.error(`Error indexing file ${filePath}:`, error);
throw error;
}
}
async function querySimilarCode(code, question) {
try {
// Create Pinecone client
const pc = new Pinecone({
apiKey: PINECONE_API_KEY
});
// Get index reference
const index = pc.index(PINECONE_INDEX);
// Load the model
const embeddingModel = await loadTransformers();
// Generate embedding for the code
console.log('Generating embedding for the query code...');
const embedding = await generateEmbedding(code, embeddingModel);
// Query similar code
console.log('Querying similar code from Pinecone...');
const queryResult = await index.query({
vector: embedding,
topK: 5,
includeMetadata: true
});
// Prepare context from matched chunks
let context = '';
if (queryResult.matches && queryResult.matches.length > 0) {
context = queryResult.matches.map(match => {
const metadata = match.metadata;
return `From ${metadata.filePath} (lines ${metadata.startLine}-${metadata.endLine}):\n${metadata.content}`;
}).join('\n\n');
console.log(`Found ${queryResult.matches.length} similar code segments`);
} else {
context = 'No similar code found in the indexed files.';
console.log('No similar code found');
}
// Call Ollama with the context and question
console.log('Asking Ollama for explanation...');
const fetch = (...args) => import('node-fetch').then(({default: fetch}) => fetch(...args));
const response = await fetch(OLLAMA_URL, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
model: OLLAMA_MODEL,
prompt: `You are a programming assistant. Based on this context:\n\n${context}\n\nPlease answer: ${question}`,
stream: false
}),
});
if (!response.ok) {
throw new Error(`HTTP error! Status: ${response.status}`);
}
const data = await response.json();
console.log('\n------ AI Response ------');
console.log(data.response);
console.log('-------------------------\n');
} catch (error) {
console.error('Error querying similar code:', error);
throw error;
}
}
// Run the application
main();