Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1 +1,2 @@
memmachine-n8n-nodes-memmachine-*.tgz
**/.DS_Store
54 changes: 26 additions & 28 deletions nodes/Memory/MemMachineMemory.ts
Original file line number Diff line number Diff line change
Expand Up @@ -162,7 +162,6 @@ export class MemMachineMemory {
};

// DEBUG: Log request body
console.log('[MemMachineMemory] DEBUG - Search Body:', JSON.stringify(searchBody, null, 2));
this.config.logger?.info('[MemMachineMemory] DEBUG - Search Body', searchBody);

const headers = {
Expand Down Expand Up @@ -245,7 +244,7 @@ export class MemMachineMemory {
} catch (e) {
// Not JSON
}

const apiResponseBody = JSON.stringify(data, null, 2);

// Complete API call span
Expand All @@ -266,19 +265,19 @@ export class MemMachineMemory {
'http.status_code': response.status,
'http.status_text': response.statusText,
});

// DEBUG: Log successful response body to debug data format issues
if (response.ok) {
console.log('[MemMachineMemory] DEBUG - Search API Success Response:', apiResponseBody);
this.config.logger?.info('[MemMachineMemory] DEBUG - Search API Success Response:', apiResponseBody);
}

if (!response.ok) {
console.error('[MemMachineMemory] DEBUG - Search API Error:', {
this.config.logger?.error('[MemMachineMemory] DEBUG - Search API Error:', {
status: response.status,
statusText: response.statusText,
body: responseText // Log the raw text
});

const error = new Error(`MemMachine API error: ${response.status} ${response.statusText} - ${responseText}`);
searchSpan && this.config.tracer?.endSpanWithError(searchSpan, error);
throw error;
Expand Down Expand Up @@ -350,8 +349,8 @@ export class MemMachineMemory {
}

// DEBUG: Log extracted memory counts
console.log(`[MemMachineMemory] DEBUG - Extracted Memories: Total=${rawMemories.length}, Episodic=${rawEpisodicMemory.length}, Semantic=${rawSemanticMemory.length}`);
this.config.logger?.info(`[MemMachineMemory] DEBUG - Extracted Memories: Total=${rawMemories.length}, Episodic=${rawEpisodicMemory.length}, Semantic=${rawSemanticMemory.length}`);

// Add response metrics to span
searchSpan && this.config.tracer?.addAttributes(searchSpan, {
'memmachine.response.total_count': rawMemories.length,
Expand Down Expand Up @@ -401,13 +400,13 @@ export class MemMachineMemory {
if (memory && memory.content && typeof memory.content === 'string' && memory.content.trim() !== '') {
const content = memory.content;
const producer = memory.producer_id || memory.producer || '';

// Determine if this is a user message or agent message based on producer
// Check against configured userId array
const isUserMessage = this.config.userId.some((uid: string) =>
producer && producer.includes(uid)
) || (memory.producer_role === 'user'); // Also check role if available

if (isUserMessage) {
messages.push({
type: 'human',
Expand All @@ -423,7 +422,7 @@ export class MemMachineMemory {
}
}
// Handle nested structure (Legacy/Alternative format)
else if (memory && Array.isArray(memory.messages) && memory.messages.length > 0) {
else if (memory && Array.isArray(memory.messages) && memory.messages.length > 0) {
const message = memory.messages[0];
if (message.content && message.content.trim() !== '') {
// Determine if this is a user message or agent message based on producer
Expand Down Expand Up @@ -452,8 +451,8 @@ export class MemMachineMemory {
// Sort by timestamp if available, otherwise maintain order
// Limit to contextWindowLength most recent messages
const recentMessages = messages.slice(-this.config.contextWindowLength!);

console.log(`[MemMachineMemory] DEBUG - Final Messages: Total=${messages.length}, Returned=${recentMessages.length}`);
this.config.logger?.info(`[MemMachineMemory] DEBUG - Final Messages: Total=${messages.length}, Returned=${recentMessages.length}`);

this.config.logger?.info('loadMemoryVariables - Retrieved messages', {
totalMessages: messages.length,
Expand Down Expand Up @@ -660,9 +659,8 @@ export class MemMachineMemory {
},
],
};

// DEBUG: Log store body
console.log('[MemMachineMemory] DEBUG - Store Body:', JSON.stringify(storeBody, null, 2));
this.config.logger?.info('[MemMachineMemory] DEBUG - Store Body', storeBody);

const headers = {
Expand Down Expand Up @@ -849,7 +847,7 @@ export class MemMachineMemory {
* Format memory using template and return as system message
*/
private formatTemplatedMemory(rawEpisodicMemory: any[], rawProfileMemory: any[], rawSemanticMemory: any[], episodeSummary: string[]): MemoryVariables {
console.log('[MemMachineMemory] Formatting templated memory', {
this.config.logger?.info('[MemMachineMemory] Formatting templated memory', {
episodicCount: rawEpisodicMemory.length,
profileCount: rawProfileMemory.length,
semanticCount: rawSemanticMemory.length,
Expand All @@ -860,15 +858,15 @@ export class MemMachineMemory {
const flattenedMemories: EpisodicMemoryItem[] = [];
const seenEpisodes = new Set<string>();

console.log('[MemMachineMemory] DEBUG - Processing episodic memories. Raw count:', rawEpisodicMemory.length);
this.config.logger?.info('[MemMachineMemory] DEBUG - Processing episodic memories. Raw count:', rawEpisodicMemory.length);

if (Array.isArray(rawEpisodicMemory)) {
for (const group of rawEpisodicMemory) {
// Handle both nested array (groups) and flat object structures
const items = Array.isArray(group) ? group : [group];

if (Array.isArray(group)) {
console.log('[MemMachineMemory] DEBUG - Processing memory group size:', group.length);
this.config.logger?.info('[MemMachineMemory] DEBUG - Processing memory group size:', group.length);
}

for (const item of items) {
Expand All @@ -885,7 +883,7 @@ export class MemMachineMemory {
seenEpisodes.add(episodeKey);

// DEBUG: Log accepted episode
// console.log('[MemMachineMemory] DEBUG - Adding episode:', content.substring(0, 30) + '...');
// this.config.logger?.info('[MemMachineMemory] DEBUG - Adding episode:', content.substring(0, 30) + '...');

flattenedMemories.push({
episode_content: content,
Expand All @@ -900,16 +898,16 @@ export class MemMachineMemory {
user_metadata: item.user_metadata,
});
} else {
console.log('[MemMachineMemory] DEBUG - Duplicate episode skipped:', episodeKey.substring(0, 50) + '...');
this.config.logger?.info('[MemMachineMemory] DEBUG - Duplicate episode skipped:', episodeKey.substring(0, 50) + '...');
}
} else {
console.log('[MemMachineMemory] DEBUG - Invalid or empty episode item encountered:', JSON.stringify(item).substring(0, 100));
this.config.logger?.info('[MemMachineMemory] DEBUG - Invalid or empty episode item encountered:', JSON.stringify(item).substring(0, 100));
}
}
}
}

console.log('[MemMachineMemory] DEBUG - Final flattened memories count:', flattenedMemories.length);
this.config.logger?.info('[MemMachineMemory] DEBUG - Final flattened memories count:', flattenedMemories.length);

// Transform profile memory to expected structure with deduplication
const profileMemoryFacts: any[] = [];
Expand Down Expand Up @@ -949,7 +947,7 @@ export class MemMachineMemory {
entities: {},
};

console.log('[MemMachineMemory] DEBUG - Profile/Semantic Memory Processing:', {
this.config.logger?.info('[MemMachineMemory] DEBUG - Profile/Semantic Memory Processing:', {
rawProfileCount: Array.isArray(rawProfileMemory) ? rawProfileMemory.length : 0,
dedupedProfileCount: profileMemoryFacts.length,
dedupedSemanticCount: deduplicatedSemanticMemory.length
Expand All @@ -959,8 +957,8 @@ export class MemMachineMemory {
const historyCount = this.config.historyCount !== undefined ? this.config.historyCount : 5;
const shortTermCount = this.config.shortTermCount !== undefined ? this.config.shortTermCount : 10;
const categorized = categorizeMemories(flattenedMemories, historyCount, shortTermCount);

console.log('[MemMachineMemory] DEBUG - Memory Categorization:', {
this.config.logger?.info('[MemMachineMemory] DEBUG - Memory Categorization:', {
totalFlattened: flattenedMemories.length,
historyCountConfig: historyCount,
shortTermCountConfig: shortTermCount,
Expand All @@ -978,8 +976,8 @@ export class MemMachineMemory {
episodeSummary,
);

console.log('[MemMachineMemory] Templated context length:', contextText.length);
console.log('[MemMachineMemory] Rendered context preview:', contextText.substring(0, 500));
this.config.logger?.info('[MemMachineMemory] Templated context length:', contextText.length);
this.config.logger?.info('[MemMachineMemory] Rendered context preview:', contextText.substring(0, 500));

// Return formatted context as a system message (n8n compatible format)
return {
Expand Down
24 changes: 12 additions & 12 deletions nodes/Memory/Memory.node.ts
Original file line number Diff line number Diff line change
Expand Up @@ -577,11 +577,11 @@ export class MemMachine implements INodeType {
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
const items = this.getInputData();
const returnData: INodeExecutionData[] = [];

// Get mode first to determine if operation parameter is available
const mode = this.getNodeParameter('mode', 0, 'manual') as string;
let operation = '';

// Only get operation parameter if mode is 'manual' (it's only visible in manual mode)
if (mode === 'manual') {
try {
Expand Down Expand Up @@ -1342,28 +1342,28 @@ export class MemMachine implements INodeType {
let tracingEnabled = false;
let traceFormat: 'json' | 'human' = 'json';
let traceVerbosity: 'minimal' | 'normal' | 'verbose' = 'minimal';

try {
tracingEnabled = this.getNodeParameter('tracingEnabled', itemIndex, false) as boolean;
} catch (error) {
// Default to false if parameter can't be retrieved
tracingEnabled = false;
}

try {
traceFormat = this.getNodeParameter('traceFormat', itemIndex, 'json') as 'json' | 'human';
} catch (error) {
// Default to 'json' if parameter can't be retrieved
traceFormat = 'json';
}

try {
traceVerbosity = this.getNodeParameter('traceVerbosity', itemIndex, 'minimal') as 'minimal' | 'normal' | 'verbose';
} catch (error) {
// Default to 'minimal' if parameter can't be retrieved
traceVerbosity = 'minimal';
}

const cloudTracer = new MemoryTracer({
enabled: tracingEnabled,
format: traceFormat,
Expand Down Expand Up @@ -1466,15 +1466,15 @@ export class MemMachine implements INodeType {
{ itemIndex }
);
}

let contextWindowLength: number;
try {
contextWindowLength = this.getNodeParameter('contextWindowLength', itemIndex, 10) as number;
} catch (error) {
// Use default value if parameter can't be retrieved
contextWindowLength = 10;
}

// Extract template parameters with conditional access
// Note: memoryContextTemplate and memoryTemplateOptions are only visible when enableMemoryTemplate is true
let enableTemplate: boolean;
Expand All @@ -1484,12 +1484,12 @@ export class MemMachine implements INodeType {
// If parameter can't be retrieved, default to false
enableTemplate = false;
}

let contextTemplate = '';
let templateOptions: IDataObject = {};
let historyCount = 5;
let shortTermCount = 10;

// Only get template parameters if template is enabled (they're only visible when enabled)
if (enableTemplate) {
try {
Expand Down Expand Up @@ -1548,14 +1548,14 @@ export class MemMachine implements INodeType {
const legacyTracer: MemoryTracer | undefined = undefined;
let exportToJaeger = false;
let jaegerEndpoint = 'http://jaeger:4318/v1/traces';

try {
exportToJaeger = this.getNodeParameter('exportToJaeger', itemIndex, false) as boolean;
} catch (error) {
// Default to false if parameter can't be retrieved
exportToJaeger = false;
}

try {
jaegerEndpoint = this.getNodeParameter('jaegerOtlpEndpoint', itemIndex, 'http://jaeger:4318/v1/traces') as string;
} catch (error) {
Expand Down
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@memmachine/n8n-nodes-memmachine",
"version": "2.0.0",
"version": "2.0.2",
"description": "n8n community node for MemMachine conversational memory integration with AI Agents",
"author": {
"name": "Christian Kniep",
Expand Down