diff --git a/src/main/cache/CacheManager.ts b/src/main/cache/CacheManager.ts new file mode 100644 index 0000000..e97e039 --- /dev/null +++ b/src/main/cache/CacheManager.ts @@ -0,0 +1,257 @@ +import { QueryCache } from './QueryCache' +import { QueryNormalizer } from './QueryNormalizer' +import { QueryResult } from '../database/interface' +import { CacheConfig, CacheStats, CacheMetadata, DEFAULT_CACHE_CONFIG } from './types' + +export class CacheManager { + private static instance: CacheManager | null = null + private queryCache: QueryCache + private config: CacheConfig + private enabled: boolean = true + + private constructor(config: Partial = {}) { + this.config = { ...DEFAULT_CACHE_CONFIG, ...config } + this.queryCache = new QueryCache(this.config) + } + + static getInstance(config?: Partial): CacheManager { + if (!CacheManager.instance) { + CacheManager.instance = new CacheManager(config) + } + return CacheManager.instance + } + + /** + * Get cached query result + */ + async getCachedResult( + query: string, + connectionId: string, + database?: string, + sessionId?: string + ): Promise { + if (!this.enabled || !QueryNormalizer.shouldCache(query)) { + return null + } + + const cacheKey = QueryNormalizer.generateCacheKey(query, connectionId, database, sessionId) + const isSessionIndependent = QueryNormalizer.isSessionIndependentQuery(query) + + const result = await this.queryCache.get(cacheKey) + + // Enhanced logging for cache strategy monitoring + if (result) { + console.log( + `[CACHE HIT] ${isSessionIndependent ? 'Session-Independent' : 'Session-Dependent'} query cache hit`, + { + queryType: QueryNormalizer.normalize(query).queryType, + isSessionIndependent, + sessionId: isSessionIndependent ? 'N/A' : sessionId, + cacheKey: cacheKey.substring(0, 16) + '...' + } + ) + } + + return result + } + + /** + * Cache query result + */ + async cacheResult( + query: string, + result: QueryResult, + connectionId: string, + database?: string, + sessionId?: string + ): Promise { + if (!this.enabled || !QueryNormalizer.shouldCache(query)) { + return + } + + // Don't cache failed queries + if (!result.success) { + return + } + + const normalizedQuery = QueryNormalizer.normalize(query) + const cacheKey = QueryNormalizer.generateCacheKey(query, connectionId, database, sessionId) + const isSessionIndependent = QueryNormalizer.isSessionIndependentQuery(query) + + const metadata: CacheMetadata = { + connectionId, + originalQuery: query, + queryHash: normalizedQuery.hash, + hitCount: 0, + lastAccessed: Date.now(), + tables: normalizedQuery.tables, + queryType: normalizedQuery.queryType, + sessionId: isSessionIndependent ? undefined : sessionId, + isSessionIndependent + } + + await this.queryCache.set(cacheKey, result, metadata) + + // Enhanced logging for cache strategy monitoring + console.log( + `[CACHE STORE] ${isSessionIndependent ? 'Session-Independent' : 'Session-Dependent'} query cached`, + { + queryType: normalizedQuery.queryType, + isSessionIndependent, + sessionId: isSessionIndependent ? 'N/A' : sessionId, + tables: normalizedQuery.tables, + cacheKey: cacheKey.substring(0, 16) + '...' + } + ) + } + + /** + * Invalidate cache entries by table name + */ + invalidateTable(tableName: string): number { + console.log(`Invalidating cache for table: ${tableName}`) + return this.queryCache.invalidateByTable(tableName) + } + + /** + * Invalidate cache entries by connection + */ + invalidateConnection(connectionId: string): number { + console.log(`Invalidating cache for connection: ${connectionId}`) + return this.queryCache.invalidateByConnection(connectionId) + } + + /** + * Invalidate cache based on query type + */ + invalidateByQueryType(query: string, connectionId: string, _database?: string): void { + const normalizedQuery = QueryNormalizer.normalize(query) + + switch (normalizedQuery.queryType) { + case 'DDL': + // DDL operations might affect schema, invalidate all for this connection + this.invalidateConnection(connectionId) + break + + case 'DML': + // DML operations affect specific tables + normalizedQuery.tables.forEach((table) => { + this.invalidateTable(table) + }) + break + + default: + // No invalidation needed for SELECT queries + break + } + } + + /** + * Clear all cache entries + */ + clearAll(): void { + console.log('Clearing all cache entries') + this.queryCache.clear() + } + + /** + * Get cache statistics + */ + getStats(): CacheStats { + return this.queryCache.getStats() + } + + /** + * Get cache configuration + */ + getConfig(): CacheConfig { + return { ...this.config } + } + + /** + * Update cache configuration + */ + updateConfig(newConfig: Partial): void { + this.config = { ...this.config, ...newConfig } + this.queryCache.updateConfig(this.config) + } + + /** + * Enable or disable caching + */ + setEnabled(enabled: boolean): void { + this.enabled = enabled + if (!enabled) { + this.clearAll() + } + console.log(`Query caching ${enabled ? 'enabled' : 'disabled'}`) + } + + /** + * Check if caching is enabled + */ + isEnabled(): boolean { + return this.enabled + } + + /** + * Get cache entry details for debugging + */ + getCacheEntryInfo( + query: string, + connectionId: string, + database?: string, + sessionId?: string + ): any { + const cacheKey = QueryNormalizer.generateCacheKey(query, connectionId, database, sessionId) + const normalizedQuery = QueryNormalizer.normalize(query) + const isSessionIndependent = QueryNormalizer.isSessionIndependentQuery(query) + + return { + cacheKey, + normalizedQuery, + shouldCache: QueryNormalizer.shouldCache(query), + isSessionIndependent, + isEnabled: this.enabled + } + } + + /** + * Dispose of cache manager and clean up resources + */ + dispose(): void { + this.queryCache.dispose() + CacheManager.instance = null + } + + /** + * Export cache statistics for monitoring + */ + exportMetrics(): any { + const stats = this.getStats() + const config = this.getConfig() + + return { + timestamp: new Date().toISOString(), + enabled: this.enabled, + stats, + config: { + maxSize: config.maxSize, + maxMemory: config.maxMemory, + defaultTTL: config.defaultTTL, + compressionThreshold: config.compressionThreshold, + enableCompression: config.enableCompression + }, + memoryUsage: { + used: stats.totalMemory, + limit: config.maxMemory, + utilization: stats.totalMemory / config.maxMemory + }, + performance: { + hitRatio: stats.hitRatio, + totalQueries: stats.hits + stats.misses, + cacheEfficiency: stats.hits > 0 ? stats.hits / (stats.hits + stats.misses) : 0 + } + } + } +} diff --git a/src/main/cache/QueryCache.ts b/src/main/cache/QueryCache.ts new file mode 100644 index 0000000..9feae99 --- /dev/null +++ b/src/main/cache/QueryCache.ts @@ -0,0 +1,285 @@ +import * as zlib from 'zlib' +import { promisify } from 'util' +import { QueryResult } from '../database/interface' +import { CacheEntry, CacheConfig, CacheStats, CacheMetadata, DEFAULT_CACHE_CONFIG } from './types' + +const gzip = promisify(zlib.gzip) +const gunzip = promisify(zlib.gunzip) + +export class QueryCache { + private cache: Map = new Map() + private accessOrder: string[] = [] + private config: CacheConfig + private stats: CacheStats + private cleanupInterval: NodeJS.Timeout | null = null + + constructor(config: Partial = {}) { + this.config = { ...DEFAULT_CACHE_CONFIG, ...config } + this.stats = { + hits: 0, + misses: 0, + evictions: 0, + totalEntries: 0, + totalMemory: 0, + hitRatio: 0 + } + + // Start periodic cleanup + this.startCleanup() + } + + async get(key: string): Promise { + const entry = this.cache.get(key) + + if (!entry) { + this.stats.misses++ + this.updateHitRatio() + return null + } + + // Check TTL + if (Date.now() > entry.timestamp + entry.ttl) { + this.delete(key) + this.stats.misses++ + this.updateHitRatio() + return null + } + + // Update access order + this.updateAccessOrder(key) + entry.metadata.lastAccessed = Date.now() + entry.metadata.hitCount++ + + this.stats.hits++ + this.updateHitRatio() + + // Return decompressed result + if (entry.isCompressed && entry.compressedData) { + try { + const decompressed = await gunzip(entry.compressedData) + return JSON.parse(decompressed.toString()) + } catch (error) { + console.error('Failed to decompress cache entry:', error) + this.delete(key) + return null + } + } + + // Return cloned result to prevent mutation + return entry.result ? this.cloneResult(entry.result) : null + } + + async set(key: string, result: QueryResult, metadata: CacheMetadata): Promise { + // Calculate size + const size = this.estimateSize(result) + + // Check if we should compress + const shouldCompress = this.config.enableCompression && size > this.config.compressionThreshold + + let entry: CacheEntry + + if (shouldCompress) { + try { + const compressed = await gzip(JSON.stringify(result)) + entry = { + key, + result: null, + compressedData: compressed, + isCompressed: true, + timestamp: Date.now(), + ttl: this.config.defaultTTL, + size: compressed.length, + metadata: { ...metadata, lastAccessed: Date.now(), hitCount: 0 } + } + } catch (error) { + console.error('Failed to compress cache entry:', error) + // Fall back to uncompressed storage + entry = this.createUncompressedEntry(key, result, metadata, size) + } + } else { + entry = this.createUncompressedEntry(key, result, metadata, size) + } + + // Remove existing entry if it exists + if (this.cache.has(key)) { + this.delete(key) + } + + // Check size constraints before adding + await this.ensureCapacity(entry.size) + + // Add to cache + this.cache.set(key, entry) + this.updateAccessOrder(key) + + // Update stats + this.stats.totalEntries = this.cache.size + this.stats.totalMemory += entry.size + } + + delete(key: string): boolean { + const entry = this.cache.get(key) + if (!entry) return false + + this.cache.delete(key) + this.removeFromAccessOrder(key) + this.stats.totalMemory -= entry.size + this.stats.totalEntries = this.cache.size + + return true + } + + clear(): void { + this.cache.clear() + this.accessOrder = [] + this.stats.totalEntries = 0 + this.stats.totalMemory = 0 + } + + invalidateByTable(tableName: string): number { + let invalidated = 0 + + for (const [key, entry] of this.cache.entries()) { + if (entry.metadata.tables.includes(tableName)) { + this.delete(key) + invalidated++ + } + } + + return invalidated + } + + invalidateByConnection(connectionId: string): number { + let invalidated = 0 + + for (const [key, entry] of this.cache.entries()) { + if (entry.metadata.connectionId === connectionId) { + this.delete(key) + invalidated++ + } + } + + return invalidated + } + + getStats(): CacheStats { + return { ...this.stats } + } + + getConfig(): CacheConfig { + return { ...this.config } + } + + updateConfig(newConfig: Partial): void { + this.config = { ...this.config, ...newConfig } + } + + private createUncompressedEntry( + key: string, + result: QueryResult, + metadata: CacheMetadata, + size: number + ): CacheEntry { + return { + key, + result: this.cloneResult(result), + compressedData: null, + isCompressed: false, + timestamp: Date.now(), + ttl: this.config.defaultTTL, + size, + metadata: { ...metadata, lastAccessed: Date.now(), hitCount: 0 } + } + } + + private async ensureCapacity(newEntrySize: number): Promise { + // Check memory limit + while (this.stats.totalMemory + newEntrySize > this.config.maxMemory && this.cache.size > 0) { + await this.evictLRU() + } + + // Check size limit + while (this.cache.size >= this.config.maxSize && this.cache.size > 0) { + await this.evictLRU() + } + } + + private async evictLRU(): Promise { + if (this.accessOrder.length === 0) return + + const lruKey = this.accessOrder[0] + this.delete(lruKey) + this.stats.evictions++ + } + + private updateAccessOrder(key: string): void { + this.removeFromAccessOrder(key) + this.accessOrder.push(key) + } + + private removeFromAccessOrder(key: string): void { + const index = this.accessOrder.indexOf(key) + if (index > -1) { + this.accessOrder.splice(index, 1) + } + } + + private estimateSize(result: QueryResult): number { + // Base size for QueryResult metadata + let size = 1024 + + // Add size of data array + if (result.data && Array.isArray(result.data)) { + size += JSON.stringify(result.data).length * 2 // Account for object overhead + } + + return size + } + + private cloneResult(result: QueryResult): QueryResult { + return { + ...result, + data: result.data ? result.data.map((row) => ({ ...row })) : undefined + } + } + + private updateHitRatio(): void { + const total = this.stats.hits + this.stats.misses + this.stats.hitRatio = total > 0 ? this.stats.hits / total : 0 + } + + private startCleanup(): void { + // Run cleanup every 5 minutes + this.cleanupInterval = setInterval( + () => { + this.cleanup() + }, + 5 * 60 * 1000 + ) + } + + private cleanup(): void { + const now = Date.now() + const expiredKeys: string[] = [] + + for (const [key, entry] of this.cache.entries()) { + if (now > entry.timestamp + entry.ttl) { + expiredKeys.push(key) + } + } + + for (const key of expiredKeys) { + this.delete(key) + } + + console.log(`Cache cleanup: removed ${expiredKeys.length} expired entries`) + } + + dispose(): void { + if (this.cleanupInterval) { + clearInterval(this.cleanupInterval) + this.cleanupInterval = null + } + this.clear() + } +} diff --git a/src/main/cache/QueryNormalizer.ts b/src/main/cache/QueryNormalizer.ts new file mode 100644 index 0000000..e8790e3 --- /dev/null +++ b/src/main/cache/QueryNormalizer.ts @@ -0,0 +1,269 @@ +import * as crypto from 'crypto' + +export interface NormalizedQuery { + normalized: string + tables: string[] + queryType: 'SELECT' | 'DDL' | 'DML' | 'OTHER' + hash: string +} + +export class QueryNormalizer { + /** + * Normalize a SQL query for consistent caching + */ + static normalize(query: string): NormalizedQuery { + // Remove comments and extra whitespace + let normalized = this.removeComments(query) + normalized = this.normalizeWhitespace(normalized) + + // Convert to uppercase for case-insensitive comparison + normalized = normalized.toUpperCase() + + // Extract tables and determine query type + const tables = this.extractTables(normalized) + const queryType = this.determineQueryType(normalized) + + // Generate hash + const hash = this.generateHash(normalized) + + return { + normalized, + tables, + queryType, + hash + } + } + + /** + * Check if a query can be cached session-independently + */ + static isSessionIndependentQuery(query: string): boolean { + const normalized = this.normalize(query) + + // Only SELECT queries can be session-independent + if (normalized.queryType !== 'SELECT') { + return false + } + + const upperQuery = normalized.normalized + + // Session-dependent patterns to exclude + const sessionDependentPatterns = [ + // User or session-specific functions + 'USER()', + 'CURRENT_USER', + 'SESSION_USER', + 'CONNECTION_ID()', + 'LAST_INSERT_ID()', + + // Temporary tables + '#', + 'TEMPORARY', + + // Variables + '@', + 'SET @', + 'SELECT @', + + // Time-sensitive functions (already covered in shouldCache) + 'NOW()', + 'CURRENT_TIMESTAMP', + 'CURRENT_DATE', + 'CURRENT_TIME', + 'RAND()', + 'RANDOM()', + 'UUID()', + 'NEWID()' + ] + + // Check for session-dependent patterns + const hasSessionDependentContent = sessionDependentPatterns.some((pattern) => + upperQuery.includes(pattern) + ) + + if (hasSessionDependentContent) { + return false + } + + // Check for table browsing patterns (simple SELECT * with basic conditions) + const isTableBrowsing = + // Simple SELECT * queries + (upperQuery.match(/^SELECT \* FROM/) !== null || + // SELECT with specific columns from single table + upperQuery.match(/^SELECT [^()]+FROM [^\s,;()]+$/) !== null) && + // With basic WHERE conditions (no subqueries or complex joins) + (!upperQuery.includes('(') || upperQuery.match(/WHERE [^()]+$/) !== null) && + // No complex operations + !upperQuery.includes('JOIN') && + !upperQuery.includes('UNION') && + !upperQuery.includes('CASE') && + !upperQuery.includes('EXISTS') + + return isTableBrowsing + } + + /** + * Generate a cache key from query and connection parameters + */ + static generateCacheKey( + query: string, + connectionId: string, + database?: string, + sessionId?: string + ): string { + const normalized = this.normalize(query) + + // Check if this query can be cached session-independently + const isSessionIndependent = this.isSessionIndependentQuery(query) + + let contextData: string + if (isSessionIndependent) { + // For session-independent queries, exclude sessionId from cache key + contextData = `${connectionId}:${database || 'default'}` + } else { + // For session-dependent queries, include sessionId + contextData = `${connectionId}:${database || 'default'}:${sessionId || 'default'}` + } + + return crypto + .createHash('sha256') + .update(`${normalized.normalized}:${contextData}`) + .digest('hex') + } + + /** + * Check if a query should be cached + */ + static shouldCache(query: string): boolean { + const normalized = this.normalize(query) + + // Only cache SELECT queries + if (normalized.queryType !== 'SELECT') { + return false + } + + // Don't cache queries with time-sensitive functions + const timeSensitiveFunctions = [ + 'NOW()', + 'CURRENT_TIMESTAMP', + 'CURRENT_DATE', + 'CURRENT_TIME', + 'RAND()', + 'RANDOM()', + 'UUID()', + 'NEWID()' + ] + + const upperQuery = normalized.normalized + return !timeSensitiveFunctions.some((func) => upperQuery.includes(func)) + } + + private static removeComments(query: string): string { + // Remove single line comments (-- comment) + query = query.replace(/--.*$/gm, '') + + // Remove multi-line comments (/* comment */) + query = query.replace(/\/\*[\s\S]*?\*\//g, '') + + return query + } + + private static normalizeWhitespace(query: string): string { + // Replace multiple whitespace characters with single space + return query.replace(/\s+/g, ' ').trim() + } + + private static extractTables(normalizedQuery: string): string[] { + const tables: Set = new Set() + + // Simple regex patterns to extract table names + // This is a basic implementation - a full SQL parser would be more accurate + + // FROM clause + const fromMatches = normalizedQuery.match(/\bFROM\s+([^\s,;()]+)/gi) + if (fromMatches) { + fromMatches.forEach((match) => { + const tableName = match.replace(/^FROM\s+/i, '').trim() + tables.add(this.cleanTableName(tableName)) + }) + } + + // JOIN clauses + const joinMatches = normalizedQuery.match(/\bJOIN\s+([^\s,;()]+)/gi) + if (joinMatches) { + joinMatches.forEach((match) => { + const tableName = match.replace(/^JOIN\s+/i, '').trim() + tables.add(this.cleanTableName(tableName)) + }) + } + + // UPDATE clauses + const updateMatches = normalizedQuery.match(/\bUPDATE\s+([^\s,;()]+)/gi) + if (updateMatches) { + updateMatches.forEach((match) => { + const tableName = match.replace(/^UPDATE\s+/i, '').trim() + tables.add(this.cleanTableName(tableName)) + }) + } + + // INSERT INTO clauses + const insertMatches = normalizedQuery.match(/\bINSERT\s+INTO\s+([^\s,;()]+)/gi) + if (insertMatches) { + insertMatches.forEach((match) => { + const tableName = match.replace(/^INSERT\s+INTO\s+/i, '').trim() + tables.add(this.cleanTableName(tableName)) + }) + } + + // DELETE FROM clauses + const deleteMatches = normalizedQuery.match(/\bDELETE\s+FROM\s+([^\s,;()]+)/gi) + if (deleteMatches) { + deleteMatches.forEach((match) => { + const tableName = match.replace(/^DELETE\s+FROM\s+/i, '').trim() + tables.add(this.cleanTableName(tableName)) + }) + } + + return Array.from(tables).filter(Boolean) + } + + private static cleanTableName(tableName: string): string { + // Remove quotes and backticks + tableName = tableName.replace(/[`"'[\]]/g, '') + + // Remove database prefix if present (keep only table name for cache invalidation) + const parts = tableName.split('.') + return parts.length > 1 ? parts[parts.length - 1] : tableName + } + + private static determineQueryType(normalizedQuery: string): 'SELECT' | 'DDL' | 'DML' | 'OTHER' { + const trimmed = normalizedQuery.trim() + + if (trimmed.startsWith('SELECT') || trimmed.startsWith('WITH')) { + return 'SELECT' + } + + if ( + trimmed.startsWith('INSERT') || + trimmed.startsWith('UPDATE') || + trimmed.startsWith('DELETE') + ) { + return 'DML' + } + + if ( + trimmed.startsWith('CREATE') || + trimmed.startsWith('DROP') || + trimmed.startsWith('ALTER') || + trimmed.startsWith('TRUNCATE') + ) { + return 'DDL' + } + + return 'OTHER' + } + + private static generateHash(normalizedQuery: string): string { + return crypto.createHash('md5').update(normalizedQuery).digest('hex') + } +} diff --git a/src/main/cache/index.ts b/src/main/cache/index.ts new file mode 100644 index 0000000..7b12481 --- /dev/null +++ b/src/main/cache/index.ts @@ -0,0 +1,4 @@ +export { QueryCache } from './QueryCache' +export { QueryNormalizer } from './QueryNormalizer' +export { CacheManager } from './CacheManager' +export * from './types' diff --git a/src/main/cache/types.ts b/src/main/cache/types.ts new file mode 100644 index 0000000..9e7eaa9 --- /dev/null +++ b/src/main/cache/types.ts @@ -0,0 +1,51 @@ +import { QueryResult } from '../database/interface' + +export interface CacheEntry { + key: string + result: QueryResult | null + compressedData: Buffer | null + isCompressed: boolean + timestamp: number + ttl: number + size: number + metadata: CacheMetadata +} + +export interface CacheMetadata { + connectionId: string + originalQuery: string + queryHash: string + hitCount: number + lastAccessed: number + tables: string[] + queryType: 'SELECT' | 'DDL' | 'DML' | 'OTHER' + sessionId?: string + isSessionIndependent?: boolean +} + +export interface CacheConfig { + maxSize: number // Maximum number of entries + maxMemory: number // Maximum memory in bytes + defaultTTL: number // Default TTL in milliseconds + compressionThreshold: number // Size threshold for compression in bytes + enableCompression: boolean + enablePersistence: boolean +} + +export interface CacheStats { + hits: number + misses: number + evictions: number + totalEntries: number + totalMemory: number + hitRatio: number +} + +export const DEFAULT_CACHE_CONFIG: CacheConfig = { + maxSize: 1000, + maxMemory: 100 * 1024 * 1024, // 100MB + defaultTTL: 5 * 60 * 1000, // 5 minutes + compressionThreshold: 1024 * 1024, // 1MB + enableCompression: true, + enablePersistence: false +} diff --git a/src/main/database/base.ts b/src/main/database/base.ts index 1baec37..03bf8d7 100644 --- a/src/main/database/base.ts +++ b/src/main/database/base.ts @@ -15,10 +15,16 @@ import { TableQueryOptions, TableFilter } from './interface' +import { CacheManager } from '../cache' export abstract class BaseDatabaseManager implements DatabaseManagerInterface { protected connections: Map = new Map() protected readonlyConnections: Set = new Set() + protected cacheManager: CacheManager + + constructor() { + this.cacheManager = CacheManager.getInstance() + } abstract connect(config: DatabaseConfig, connectionId: string): Promise abstract disconnect(connectionId: string): Promise<{ success: boolean; message: string }> @@ -98,7 +104,52 @@ export abstract class BaseDatabaseManager implements DatabaseManagerInterface { } } - abstract query(connectionId: string, sql: string): Promise + abstract executeQuery(connectionId: string, sql: string, sessionId?: string): Promise + + async query(connectionId: string, sql: string, sessionId?: string): Promise { + try { + // Get connection info for cache context + const connectionInfo = this.getConnectionInfo(connectionId) + const database = connectionInfo?.database + + // Try to get cached result first + const cachedResult = await this.cacheManager.getCachedResult( + sql, + connectionId, + database, + sessionId + ) + if (cachedResult) { + console.log('Cache hit for query:', sql.substring(0, 100) + '...') + return cachedResult + } + + console.log('Cache miss for query:', sql.substring(0, 100) + '...') + + // Execute the query + const result = await this.executeQuery(connectionId, sql, sessionId) + + // Cache the result if successful + if (result.success) { + await this.cacheManager.cacheResult(sql, result, connectionId, database, sessionId) + } + + // Handle cache invalidation for DDL/DML queries + if (result.success && (result.isDDL || result.isDML)) { + this.cacheManager.invalidateByQueryType(sql, connectionId, database) + } + + return result + } catch (error) { + console.error('Query execution error:', error) + return this.createQueryResult( + false, + 'Query execution failed', + undefined, + error instanceof Error ? error.message : 'Unknown error' + ) + } + } async cancelQuery( connectionId: string, @@ -146,7 +197,7 @@ export abstract class BaseDatabaseManager implements DatabaseManagerInterface { sql += ` OFFSET ${offset}` } - return this.query(connectionId, sql, sessionId) + return this.executeQuery(connectionId, sql, sessionId) } protected buildWhereClause(filter: TableFilter): string { @@ -209,7 +260,7 @@ export abstract class BaseDatabaseManager implements DatabaseManagerInterface { .join(', ') const finalSql = `INSERT INTO ${qualifiedTable} (${columns.join(', ')}) VALUES (${escapedValues})` - const result = await this.query(connectionId, finalSql) + const result = await this.executeQuery(connectionId, finalSql) return result as InsertResult } @@ -239,7 +290,7 @@ export abstract class BaseDatabaseManager implements DatabaseManagerInterface { const qualifiedTable = database ? `${database}.${table}` : table const sql = `UPDATE ${qualifiedTable} SET ${setClauses.join(', ')} WHERE ${whereClauses.join(' AND ')}` - const result = await this.query(connectionId, sql) + const result = await this.executeQuery(connectionId, sql) return result as UpdateResult } @@ -264,7 +315,7 @@ export abstract class BaseDatabaseManager implements DatabaseManagerInterface { const qualifiedTable = database ? `${database}.${table}` : table const sql = `DELETE FROM ${qualifiedTable} WHERE ${whereClauses.join(' AND ')}` - const result = await this.query(connectionId, sql) + const result = await this.executeQuery(connectionId, sql) return result as DeleteResult } @@ -326,7 +377,7 @@ export abstract class BaseDatabaseManager implements DatabaseManagerInterface { if (hasTransactionSupport) { // Try to use transactions try { - await this.query(connectionId, 'BEGIN TRANSACTION') + await this.executeQuery(connectionId, 'BEGIN TRANSACTION') const results: QueryResult[] = [] let allSuccess = true @@ -373,7 +424,7 @@ export abstract class BaseDatabaseManager implements DatabaseManagerInterface { } if (allSuccess) { - await this.query(connectionId, 'COMMIT') + await this.executeQuery(connectionId, 'COMMIT') // Don't try to fetch updated data - let the client handle refreshing let updatedData: any[] | undefined = undefined @@ -384,7 +435,7 @@ export abstract class BaseDatabaseManager implements DatabaseManagerInterface { data: updatedData } } else { - await this.query(connectionId, 'ROLLBACK') + await this.executeQuery(connectionId, 'ROLLBACK') return { success: false, results, @@ -394,7 +445,7 @@ export abstract class BaseDatabaseManager implements DatabaseManagerInterface { } catch (error) { // Try to rollback if possible try { - await this.query(connectionId, 'ROLLBACK') + await this.executeQuery(connectionId, 'ROLLBACK') } catch (rollbackError) { // Ignore rollback errors } diff --git a/src/main/database/clickhouse.ts b/src/main/database/clickhouse.ts index 209ad22..6cde5aa 100644 --- a/src/main/database/clickhouse.ts +++ b/src/main/database/clickhouse.ts @@ -10,6 +10,7 @@ import { TableQueryOptions, TableFilter } from './interface' +import { logger } from '../utils/logger' interface ClickHouseConfig { host: string @@ -34,6 +35,32 @@ class ClickHouseManager extends BaseDatabaseManager { protected connections: Map = new Map() private activeQueries: Map = new Map() // Track active queries by queryId + constructor() { + super() + // Configure ClickHouse-specific cache settings + this.cacheManager.updateConfig({ + defaultTTL: 10 * 60 * 1000, // 10 minutes for ClickHouse (longer than default) + compressionThreshold: 512 * 1024, // 512KB - ClickHouse can return large results + enableCompression: true + }) + } + + // Override query method to add ClickHouse-specific cache logic + async query(connectionId: string, sql: string, sessionId?: string): Promise { + // For ClickHouse SYSTEM queries, don't cache results as they change frequently + const upperSql = sql.trim().toUpperCase() + if ( + upperSql.startsWith('SHOW') || + upperSql.startsWith('DESCRIBE') || + upperSql.startsWith('SYSTEM') + ) { + return this.executeQuery(connectionId, sql, sessionId) + } + + // Use base class caching for other queries + return super.query(connectionId, sql, sessionId) + } + async connect(config: DatabaseConfig, connectionId: string): Promise { try { // Check if connection already exists @@ -139,7 +166,10 @@ class ClickHouseManager extends BaseDatabaseManager { } } - async query(connectionId: string, sql: string, sessionId?: string): Promise { + async executeQuery(connectionId: string, sql: string, sessionId?: string): Promise { + const startTime = Date.now() + const timestamp = new Date().toISOString() + try { const connection = this.connections.get(connectionId) if (!connection || !connection.isConnected) { @@ -149,6 +179,12 @@ class ClickHouseManager extends BaseDatabaseManager { // Validate read-only queries const validation = this.validateReadOnlyQuery(connectionId, sql) if (!validation.valid) { + logger.warn(`[${timestamp}] ClickHouse read-only query validation failed`, { + connectionId, + sql: sql.slice(0, 100) + (sql.length > 100 ? '...' : ''), + error: validation.error, + timestamp + }) return this.createQueryResult(false, validation.error || 'Query not allowed') } @@ -160,13 +196,41 @@ class ClickHouseManager extends BaseDatabaseManager { const isDDL = queryType === QueryType.DDL const isDML = [QueryType.INSERT, QueryType.UPDATE, QueryType.DELETE].includes(queryType) + logger.debug(`[${timestamp}] ClickHouse query details`, { + connectionId, + queryType, + isDDL, + isDML, + sessionId, + host: connection.config.host, + database: connection.config.database, + timestamp + }) + if (isDDL || isDML) { + logger.info(`[${timestamp}] Executing ClickHouse command`, { + connectionId, + queryType, + sessionId, + sql: sql.slice(0, 200) + (sql.length > 200 ? '...' : ''), + timestamp + }) + // Use command() for DDL/DML queries that don't return data await connection.client.command({ query: sql, query_id: sessionId || undefined }) + const duration = Date.now() - startTime + const completionTimestamp = new Date().toISOString() + logger.info(`[${completionTimestamp}] ClickHouse command completed in ${duration}ms`, { + queryType, + sessionId, + duration, + timestamp: completionTimestamp + }) + return this.createQueryResult( true, 'Command executed successfully', @@ -176,6 +240,14 @@ class ClickHouseManager extends BaseDatabaseManager { isDML ? 1 : 0 // For DML, we don't get affected rows from ClickHouse easily ) } else { + logger.info(`[${timestamp}] Executing ClickHouse query`, { + connectionId, + queryType, + sessionId, + sql: sql.slice(0, 200) + (sql.length > 200 ? '...' : ''), + timestamp + }) + // Use query() for SELECT and data-returning queries const abortController = new AbortController() @@ -208,6 +280,16 @@ class ClickHouseManager extends BaseDatabaseManager { this.activeQueries.delete(sessionId) } + const duration = Date.now() - startTime + const completionTimestamp = new Date().toISOString() + logger.info(`[${completionTimestamp}] ClickHouse query completed in ${duration}ms`, { + queryType, + rowCount: data.length, + sessionId, + duration, + timestamp: completionTimestamp + }) + return this.createQueryResult( true, `Query executed successfully. Returned ${data.length} rows.`, @@ -222,10 +304,27 @@ class ClickHouseManager extends BaseDatabaseManager { this.activeQueries.delete(sessionId) } - console.error('ClickHouse query error:', error) + const duration = Date.now() - startTime + const errorTimestamp = new Date().toISOString() + + logger.error(`[${errorTimestamp}] ClickHouse query error after ${duration}ms`, { + error: error instanceof Error ? error.message : 'Unknown error', + connectionId, + sessionId, + duration, + sql: sql.slice(0, 200) + (sql.length > 200 ? '...' : ''), + timestamp: errorTimestamp + }) // Check if it was cancelled if (error instanceof Error && error.name === 'AbortError') { + logger.info(`[${errorTimestamp}] ClickHouse query was cancelled`, { + connectionId, + sessionId, + duration, + timestamp: errorTimestamp + }) + return this.createQueryResult( false, 'Query was cancelled', @@ -322,13 +421,13 @@ class ClickHouseManager extends BaseDatabaseManager { } // Execute the main query - const result = await this.query(connectionId, sql, sessionId) + const result = await this.executeQuery(connectionId, sql, sessionId) // If successful and we have pagination, get the total count if (result.success && (limit || offset)) { try { const countSql = `SELECT count() as total ${baseQuery}` - const countResult = await this.query(connectionId, countSql) + const countResult = await this.executeQuery(connectionId, countSql) if (countResult.success && countResult.data && countResult.data[0]) { result.totalRows = Number(countResult.data[0].total) @@ -383,7 +482,7 @@ class ClickHouseManager extends BaseDatabaseManager { connectionId: string ): Promise<{ success: boolean; databases?: string[]; message: string }> { try { - const result = await this.query(connectionId, 'SHOW DATABASES') + const result = await this.executeQuery(connectionId, 'SHOW DATABASES') if (result.success && result.data) { const databases = result.data.map((row: any) => row.name || row.database || row[0]) return { @@ -418,7 +517,7 @@ class ClickHouseManager extends BaseDatabaseManager { } console.log('DEBUG: Executing query:', query) - const result = await this.query(connectionId, query) + const result = await this.executeQuery(connectionId, query) console.log('DEBUG: Query result:', result) @@ -455,7 +554,7 @@ class ClickHouseManager extends BaseDatabaseManager { ): Promise<{ success: boolean; schema?: any[]; message: string }> { try { const fullTableName = database ? `${database}.${tableName}` : tableName - const result = await this.query(connectionId, `DESCRIBE ${fullTableName}`) + const result = await this.executeQuery(connectionId, `DESCRIBE ${fullTableName}`) if (result.success && result.data) { return { success: true, @@ -532,8 +631,7 @@ class ClickHouseManager extends BaseDatabaseManager { // Execute the ALTER TABLE UPDATE command await connection.client.command({ - query: sql, - session_id: sessionId + query: sql }) return this.createQueryResult( @@ -570,7 +668,7 @@ class ClickHouseManager extends BaseDatabaseManager { async supportsTransactions(connectionId: string): Promise { try { // Check if experimental transactions are enabled - const result = await this.query( + const result = await this.executeQuery( connectionId, "SELECT value FROM system.settings WHERE name = 'allow_experimental_transactions'" ) @@ -615,7 +713,7 @@ class ClickHouseManager extends BaseDatabaseManager { ORDER BY position ` - const pkResult = await this.query(connectionId, pkQuery) + const pkResult = await this.executeQuery(connectionId, pkQuery) const primaryKeys = pkResult.success && pkResult.data ? pkResult.data.map((row) => row.name) : [] diff --git a/src/main/database/manager.ts b/src/main/database/manager.ts index b8a789a..144905e 100644 --- a/src/main/database/manager.ts +++ b/src/main/database/manager.ts @@ -14,6 +14,7 @@ import { TableQueryOptions } from './interface' import { DatabaseManagerFactory } from './factory' +import { logger } from '../utils/logger' class DatabaseManager { private factory: DatabaseManagerFactory @@ -145,6 +146,9 @@ class DatabaseManager { } async query(connectionId: string, sql: string, sessionId?: string): Promise { + const startTime = Date.now() + const timestamp = new Date().toISOString() + try { if (!this.activeConnection || this.activeConnection.id !== connectionId) { return { @@ -154,10 +158,42 @@ class DatabaseManager { } } + logger.info(`[${timestamp}] Executing query on connection ${connectionId}`, { + sql: sql.slice(0, 200) + (sql.length > 200 ? '...' : ''), + sessionId, + connectionType: this.activeConnection.type, + timestamp + }) + // Execute query using the specific manager - return await this.activeConnection.manager.query(connectionId, sql, sessionId) + const result = await this.activeConnection.manager.query(connectionId, sql, sessionId) + + const duration = Date.now() - startTime + const completionTimestamp = new Date().toISOString() + + logger.info(`[${completionTimestamp}] Query completed in ${duration}ms`, { + success: result.success, + rowCount: result.data?.length || 0, + sessionId, + duration, + executionTime: `${startTime} - ${Date.now()}`, + timestamp: completionTimestamp + }) + + return result } catch (error) { - console.error('Database query error:', error) + const duration = Date.now() - startTime + const errorTimestamp = new Date().toISOString() + + logger.error(`[${errorTimestamp}] Database query error after ${duration}ms`, { + error: error instanceof Error ? error.message : 'Unknown error', + connectionId, + sessionId, + duration, + sql: sql.slice(0, 200) + (sql.length > 200 ? '...' : ''), + timestamp: errorTimestamp + }) + return { success: false, message: 'Query execution failed', diff --git a/src/main/index.ts b/src/main/index.ts index c13c436..de549a4 100644 --- a/src/main/index.ts +++ b/src/main/index.ts @@ -6,6 +6,7 @@ import { DatabaseManager } from './database/manager' import { DatabaseConfig, TableQueryOptions } from './database/interface' import { LangChainAgent } from './llm/langchainAgent' import QueryHistoryService from './services/QueryHistoryService' +import { CacheManager } from './cache' import * as fs from 'fs' function createWindow(): void { @@ -50,6 +51,9 @@ const queryHistoryService = new QueryHistoryService() // Initialize AI agent const aiAgent = new LangChainAgent(databaseManager, secureStorage) +// Initialize cache manager +const cacheManager = CacheManager.getInstance() + app.whenReady().then(() => { // Set the app name for macOS menu bar app.setName('DataPup') @@ -644,3 +648,94 @@ ipcMain.handle( } } ) + +// IPC handlers for cache management +ipcMain.handle('cache:getStats', async () => { + try { + const stats = cacheManager.getStats() + return { success: true, stats } + } catch (error) { + console.error('Error getting cache stats:', error) + return { success: false, stats: null } + } +}) + +ipcMain.handle('cache:getConfig', async () => { + try { + const config = cacheManager.getConfig() + return { success: true, config } + } catch (error) { + console.error('Error getting cache config:', error) + return { success: false, config: null } + } +}) + +ipcMain.handle('cache:updateConfig', async (_, newConfig) => { + try { + cacheManager.updateConfig(newConfig) + return { success: true } + } catch (error) { + console.error('Error updating cache config:', error) + return { success: false } + } +}) + +ipcMain.handle('cache:clear', async () => { + try { + cacheManager.clearAll() + return { success: true } + } catch (error) { + console.error('Error clearing cache:', error) + return { success: false } + } +}) + +ipcMain.handle('cache:setEnabled', async (_, enabled: boolean) => { + try { + cacheManager.setEnabled(enabled) + return { success: true } + } catch (error) { + console.error('Error setting cache enabled state:', error) + return { success: false } + } +}) + +ipcMain.handle('cache:isEnabled', async () => { + try { + const enabled = cacheManager.isEnabled() + return { success: true, enabled } + } catch (error) { + console.error('Error checking cache enabled state:', error) + return { success: false, enabled: false } + } +}) + +ipcMain.handle('cache:invalidateTable', async (_, tableName: string) => { + try { + const count = cacheManager.invalidateTable(tableName) + return { success: true, invalidatedCount: count } + } catch (error) { + console.error('Error invalidating table cache:', error) + return { success: false, invalidatedCount: 0 } + } +}) + +ipcMain.handle('cache:invalidateConnection', async (_, connectionId: string) => { + try { + const count = cacheManager.invalidateConnection(connectionId) + return { success: true, invalidatedCount: count } + } catch (error) { + console.error('Error invalidating connection cache:', error) + return { success: false, invalidatedCount: 0 } + } +}) + +ipcMain.handle('cache:getMetrics', async () => { + try { + const metrics = cacheManager.exportMetrics() + return { success: true, metrics } + } catch (error) { + console.error('Error getting cache metrics:', error) + return { success: false, metrics: null } + } +}) diff --git a/src/preload/index.ts b/src/preload/index.ts index 4957742..2d2d112 100644 --- a/src/preload/index.ts +++ b/src/preload/index.ts @@ -87,6 +87,18 @@ const api = { get: (filter?: any) => ipcRenderer.invoke('saved-queries:get', filter), update: (id: number, updates: any) => ipcRenderer.invoke('saved-queries:update', id, updates), delete: (id: number) => ipcRenderer.invoke('saved-queries:delete', id) + }, + cache: { + getStats: () => ipcRenderer.invoke('cache:getStats'), + getConfig: () => ipcRenderer.invoke('cache:getConfig'), + updateConfig: (config: any) => ipcRenderer.invoke('cache:updateConfig', config), + clear: () => ipcRenderer.invoke('cache:clear'), + setEnabled: (enabled: boolean) => ipcRenderer.invoke('cache:setEnabled', enabled), + isEnabled: () => ipcRenderer.invoke('cache:isEnabled'), + invalidateTable: (tableName: string) => ipcRenderer.invoke('cache:invalidateTable', tableName), + invalidateConnection: (connectionId: string) => + ipcRenderer.invoke('cache:invalidateConnection', connectionId), + getMetrics: () => ipcRenderer.invoke('cache:getMetrics') } } diff --git a/src/renderer/components/ActiveConnectionLayout/ActiveConnectionLayout.tsx b/src/renderer/components/ActiveConnectionLayout/ActiveConnectionLayout.tsx index 0ac01d5..bfeaa0c 100644 --- a/src/renderer/components/ActiveConnectionLayout/ActiveConnectionLayout.tsx +++ b/src/renderer/components/ActiveConnectionLayout/ActiveConnectionLayout.tsx @@ -19,8 +19,8 @@ export function ActiveConnectionLayout({ onDisconnect }: ActiveConnectionLayoutProps) { const [isReadOnly, setIsReadOnly] = useState(false) - const queryWorkspaceRef = useRef(null) const newTabHandlerRef = useRef<(() => void) | null>(null) + const queryWorkspaceRef = useRef(null) useEffect(() => { const checkReadOnly = async () => { @@ -117,7 +117,6 @@ export function ActiveConnectionLayout({ { newTabHandlerRef.current = handler diff --git a/src/renderer/components/CacheMonitor/CacheMonitor.tsx b/src/renderer/components/CacheMonitor/CacheMonitor.tsx new file mode 100644 index 0000000..9ab6664 --- /dev/null +++ b/src/renderer/components/CacheMonitor/CacheMonitor.tsx @@ -0,0 +1,247 @@ +import React, { useState, useEffect } from 'react' +import { Box, Text, Button, Flex, Badge, Card, Switch } from '@radix-ui/themes' +import { RefreshIcon, TrashIcon } from '@radix-ui/react-icons' + +interface CacheStats { + hits: number + misses: number + evictions: number + totalEntries: number + totalMemory: number + hitRatio: number +} + +interface CacheConfig { + maxSize: number + maxMemory: number + defaultTTL: number + compressionThreshold: number + enableCompression: boolean + enablePersistence: boolean +} + +export function CacheMonitor() { + const [stats, setStats] = useState(null) + const [config, setConfig] = useState(null) + const [enabled, setEnabled] = useState(true) + const [loading, setLoading] = useState(false) + + const fetchCacheData = async () => { + setLoading(true) + try { + const [statsResult, configResult, enabledResult] = await Promise.all([ + window.api.cache.getStats(), + window.api.cache.getConfig(), + window.api.cache.isEnabled() + ]) + + if (statsResult.success) setStats(statsResult.stats) + if (configResult.success) setConfig(configResult.config) + if (enabledResult.success) setEnabled(enabledResult.enabled) + } catch (error) { + console.error('Failed to fetch cache data:', error) + } finally { + setLoading(false) + } + } + + const handleToggleEnabled = async (newEnabled: boolean) => { + try { + const result = await window.api.cache.setEnabled(newEnabled) + if (result.success) { + setEnabled(newEnabled) + await fetchCacheData() // Refresh stats + } + } catch (error) { + console.error('Failed to toggle cache:', error) + } + } + + const handleClearCache = async () => { + try { + const result = await window.api.cache.clear() + if (result.success) { + await fetchCacheData() // Refresh stats + } + } catch (error) { + console.error('Failed to clear cache:', error) + } + } + + const formatBytes = (bytes: number): string => { + if (bytes === 0) return '0 B' + const k = 1024 + const sizes = ['B', 'KB', 'MB', 'GB'] + const i = Math.floor(Math.log(bytes) / Math.log(k)) + return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + ' ' + sizes[i] + } + + const formatDuration = (ms: number): string => { + const minutes = Math.floor(ms / 60000) + const seconds = Math.floor((ms % 60000) / 1000) + return `${minutes}m ${seconds}s` + } + + useEffect(() => { + fetchCacheData() + + // Auto-refresh every 5 seconds + const interval = setInterval(fetchCacheData, 5000) + return () => clearInterval(interval) + }, []) + + if (loading && !stats) { + return ( + + Loading cache statistics... + + ) + } + + return ( + + + + + Query Cache Monitor + + + + + + + + + + + + Cache Enabled + + + + + {stats && ( + <> + + Hit Ratio + 0.5 ? 'green' : stats.hitRatio > 0.2 ? 'yellow' : 'red'} + > + {(stats.hitRatio * 100).toFixed(1)}% + + + + + Hits + + {stats.hits.toLocaleString()} + + + + + Misses + + {stats.misses.toLocaleString()} + + + + + Total Entries + {stats.totalEntries.toLocaleString()} + + + + Memory Usage + {formatBytes(stats.totalMemory)} + + + + Evictions + {stats.evictions.toLocaleString()} + + + )} + + {config && ( + <> + + + Configuration + + + + + Max Entries + {config.maxSize.toLocaleString()} + + + + Max Memory + {formatBytes(config.maxMemory)} + + + + Default TTL + {formatDuration(config.defaultTTL)} + + + + Compression + + {config.enableCompression ? 'Enabled' : 'Disabled'} + + + + )} + + + + {stats && config && ( + + + + Performance Insights + + + {stats.hitRatio < 0.2 && ( + + • Low cache hit ratio - consider increasing TTL or checking query patterns + + )} + + {stats.evictions > stats.hits * 0.1 && ( + + • High eviction rate - consider increasing max size or memory limit + + )} + + {stats.totalMemory / config.maxMemory > 0.8 && ( + + • Memory usage is high - cache may start evicting entries soon + + )} + + {stats.hitRatio > 0.5 && stats.evictions < stats.hits * 0.05 && ( + + • Cache is performing well! + + )} + + + )} + + + ) +} diff --git a/src/renderer/components/CacheMonitor/index.ts b/src/renderer/components/CacheMonitor/index.ts new file mode 100644 index 0000000..822c297 --- /dev/null +++ b/src/renderer/components/CacheMonitor/index.ts @@ -0,0 +1 @@ +export { CacheMonitor } from './CacheMonitor'