1
1
import os
2
- from typing import Literal
2
+ from typing import Any , Literal
3
3
4
4
import yaml
5
5
from dotenv import load_dotenv
9
9
load_dotenv ()
10
10
11
11
12
- def load_yaml_settings ():
13
- config_path = os .getenv ("APP_CONFIG_FILE" , "config.yaml" )
14
- if os .path .exists (config_path ):
15
- with open (config_path ) as f :
16
- return yaml .safe_load (f ) or {}
17
- return {}
12
+ # Model configuration mapping
13
+ MODEL_CONFIGS = {
14
+ "gpt-4o" : {"provider" : "openai" , "embedding_dimensions" : None },
15
+ "gpt-4o-mini" : {"provider" : "openai" , "embedding_dimensions" : None },
16
+ "gpt-4" : {"provider" : "openai" , "embedding_dimensions" : None },
17
+ "gpt-3.5-turbo" : {"provider" : "openai" , "embedding_dimensions" : None },
18
+ "text-embedding-3-small" : {"provider" : "openai" , "embedding_dimensions" : 1536 },
19
+ "text-embedding-3-large" : {"provider" : "openai" , "embedding_dimensions" : 3072 },
20
+ "text-embedding-ada-002" : {"provider" : "openai" , "embedding_dimensions" : 1536 },
21
+ "claude-3-opus-20240229" : {"provider" : "anthropic" , "embedding_dimensions" : None },
22
+ "claude-3-sonnet-20240229" : {"provider" : "anthropic" , "embedding_dimensions" : None },
23
+ "claude-3-haiku-20240307" : {"provider" : "anthropic" , "embedding_dimensions" : None },
24
+ "claude-3-5-sonnet-20240620" : {
25
+ "provider" : "anthropic" ,
26
+ "embedding_dimensions" : None ,
27
+ },
28
+ "claude-3-5-sonnet-20241022" : {
29
+ "provider" : "anthropic" ,
30
+ "embedding_dimensions" : None ,
31
+ },
32
+ "claude-3-5-haiku-20241022" : {
33
+ "provider" : "anthropic" ,
34
+ "embedding_dimensions" : None ,
35
+ },
36
+ "claude-3-7-sonnet-20250219" : {
37
+ "provider" : "anthropic" ,
38
+ "embedding_dimensions" : None ,
39
+ },
40
+ "claude-3-7-sonnet-latest" : {"provider" : "anthropic" , "embedding_dimensions" : None },
41
+ "claude-3-5-sonnet-latest" : {"provider" : "anthropic" , "embedding_dimensions" : None },
42
+ "claude-3-5-haiku-latest" : {"provider" : "anthropic" , "embedding_dimensions" : None },
43
+ "claude-3-opus-latest" : {"provider" : "anthropic" , "embedding_dimensions" : None },
44
+ "o1" : {"provider" : "openai" , "embedding_dimensions" : None },
45
+ "o1-mini" : {"provider" : "openai" , "embedding_dimensions" : None },
46
+ "o3-mini" : {"provider" : "openai" , "embedding_dimensions" : None },
47
+ }
18
48
19
49
20
50
class Settings (BaseSettings ):
@@ -28,55 +58,19 @@ class Settings(BaseSettings):
28
58
port : int = 8000
29
59
mcp_port : int = 9000
30
60
31
- # Long-term memory backend configuration
32
- long_term_memory_backend : str = (
33
- "redis" # redis, chroma, pinecone, weaviate, qdrant, etc.
61
+ # Vector store factory configuration
62
+ # Python dotted path to function that returns VectorStore or VectorStoreAdapter
63
+ # Function signature: (embeddings: Embeddings) -> Union[VectorStore, VectorStoreAdapter]
64
+ # Examples:
65
+ # - "agent_memory_server.vectorstore_factory.create_redis_vectorstore"
66
+ # - "my_module.my_vectorstore_factory"
67
+ # - "my_package.adapters.create_custom_adapter"
68
+ vectorstore_factory : str = (
69
+ "agent_memory_server.vectorstore_factory.create_redis_vectorstore"
34
70
)
35
71
36
- # Redis backend settings (existing)
37
- # redis_url already defined above
38
-
39
- # Chroma backend settings
40
- chroma_host : str = "localhost"
41
- chroma_port : int = 8000
42
- chroma_collection_name : str = "agent_memory"
43
- chroma_persist_directory : str | None = None
44
-
45
- # Pinecone backend settings
46
- pinecone_api_key : str | None = None
47
- pinecone_environment : str | None = None
48
- pinecone_index_name : str = "agent-memory"
49
-
50
- # Weaviate backend settings
51
- weaviate_url : str = "http://localhost:8080"
52
- weaviate_api_key : str | None = None
53
- weaviate_class_name : str = "AgentMemory"
54
-
55
- # Qdrant backend settings
56
- qdrant_url : str = "http://localhost:6333"
57
- qdrant_api_key : str | None = None
58
- qdrant_collection_name : str = "agent_memory"
59
-
60
- # Milvus backend settings
61
- milvus_host : str = "localhost"
62
- milvus_port : int = 19530
63
- milvus_collection_name : str = "agent_memory"
64
- milvus_user : str | None = None
65
- milvus_password : str | None = None
66
-
67
- # PostgreSQL/PGVector backend settings
68
- postgres_url : str | None = None
69
- postgres_table_name : str = "agent_memory"
70
-
71
- # LanceDB backend settings
72
- lancedb_uri : str = "./lancedb"
73
- lancedb_table_name : str = "agent_memory"
74
-
75
- # OpenSearch backend settings
76
- opensearch_url : str = "http://localhost:9200"
77
- opensearch_username : str | None = None
78
- opensearch_password : str | None = None
79
- opensearch_index_name : str = "agent-memory"
72
+ # RedisVL configuration (used by default Redis factory)
73
+ redisvl_index_name : str = "memory_records"
80
74
81
75
# The server indexes messages in long-term memory by default. If this
82
76
# setting is enabled, we also extract discrete memories from message text
@@ -95,10 +89,9 @@ class Settings(BaseSettings):
95
89
ner_model : str = "dbmdz/bert-large-cased-finetuned-conll03-english"
96
90
enable_ner : bool = True
97
91
98
- # RedisVL Settings (kept for backwards compatibility)
92
+ # RedisVL Settings
99
93
redisvl_distance_metric : str = "COSINE"
100
94
redisvl_vector_dimensions : str = "1536"
101
- redisvl_index_name : str = "memory_idx"
102
95
redisvl_index_prefix : str = "memory_idx"
103
96
104
97
# Docket settings
@@ -122,8 +115,54 @@ class Settings(BaseSettings):
122
115
class Config :
123
116
env_file = ".env"
124
117
env_file_encoding = "utf-8"
118
+ extra = "ignore" # Ignore extra environment variables
119
+
120
+ @property
121
+ def generation_model_config (self ) -> dict [str , Any ]:
122
+ """Get configuration for the generation model."""
123
+ return MODEL_CONFIGS .get (self .generation_model , {})
124
+
125
+ @property
126
+ def embedding_model_config (self ) -> dict [str , Any ]:
127
+ """Get configuration for the embedding model."""
128
+ return MODEL_CONFIGS .get (self .embedding_model , {})
129
+
130
+ def load_yaml_config (self , config_path : str ) -> dict [str , Any ]:
131
+ """Load configuration from YAML file."""
132
+ if not os .path .exists (config_path ):
133
+ return {}
134
+ with open (config_path ) as f :
135
+ return yaml .safe_load (f ) or {}
136
+
137
+
138
+ settings = Settings ()
139
+
140
+
141
+ def get_config ():
142
+ """Get configuration from environment and settings files."""
143
+ config_data = {}
144
+
145
+ # If REDIS_MEMORY_CONFIG is set, load config from file
146
+ config_file = os .getenv ("REDIS_MEMORY_CONFIG" )
147
+ if config_file :
148
+ try :
149
+ with open (config_file ) as f :
150
+ if config_file .endswith ((".yaml" , ".yml" )):
151
+ config_data = yaml .safe_load (f ) or {}
152
+ else :
153
+ # Assume JSON
154
+ import json
155
+
156
+ config_data = json .load (f ) or {}
157
+ except FileNotFoundError :
158
+ print (f"Warning: Config file { config_file } not found" )
159
+ except Exception as e :
160
+ print (f"Warning: Error loading config file { config_file } : { e } " )
125
161
162
+ # Environment variables override file config
163
+ for key , value in os .environ .items ():
164
+ if key .startswith ("REDIS_MEMORY_" ):
165
+ config_key = key [13 :].lower () # Remove REDIS_MEMORY_ prefix
166
+ config_data [config_key ] = value
126
167
127
- # Load YAML config first, then let env vars override
128
- yaml_settings = load_yaml_settings ()
129
- settings = Settings (** yaml_settings )
168
+ return config_data
0 commit comments