Skip to content

Commit eae6c63

Browse files
committed
feat(cli): add ReMeCli class with interactive chat functionality
1 parent bba5094 commit eae6c63

File tree

16 files changed

+680
-512
lines changed

16 files changed

+680
-512
lines changed

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -109,7 +109,7 @@ Repository = "https://github.com/agentscope-ai/ReMe"
109109
[project.scripts]
110110
reme = "reme_ai.main:main"
111111
reme2 = "reme.reme:main"
112-
remefs = "reme.reme_fs:main"
112+
remecli = "reme.reme_cli:main"
113113

114114
[tool.pytest.ini_options]
115115
asyncio_default_fixture_loop_scope = "function"

reme/__init__.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66
from . import tool
77
from . import workflow
88
from .reme import ReMe
9+
from .reme_cli import ReMeCli
910
from .reme_fs import ReMeFs
1011

1112
__all__ = [
@@ -15,10 +16,11 @@
1516
"tool",
1617
"workflow",
1718
"ReMe",
19+
"ReMeCli",
1820
"ReMeFs",
1921
]
2022

21-
__version__ = "0.3.0.0a4"
23+
__version__ = "0.3.0.0a5"
2224

2325

2426
"""

reme/agent/chat/fs_cli.py

Lines changed: 1 addition & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -20,10 +20,6 @@ def __init__(
2020
context_window_tokens: int = 128000,
2121
reserve_tokens: int = 36000,
2222
keep_recent_tokens: int = 20000,
23-
hybrid_enabled: bool = True,
24-
hybrid_vector_weight: float = 0.7,
25-
hybrid_text_weight: float = 0.3,
26-
hybrid_candidate_multiplier: float = 3.0,
2723
**kwargs,
2824
):
2925
super().__init__(**kwargs)
@@ -32,10 +28,6 @@ def __init__(
3228
self.context_window_tokens: int = context_window_tokens
3329
self.reserve_tokens: int = reserve_tokens
3430
self.keep_recent_tokens: int = keep_recent_tokens
35-
self.hybrid_enabled: bool = hybrid_enabled
36-
self.hybrid_vector_weight: float = hybrid_vector_weight
37-
self.hybrid_text_weight: float = hybrid_text_weight
38-
self.hybrid_candidate_multiplier: float = hybrid_candidate_multiplier
3931

4032
self.messages: list[Message] = []
4133
self.previous_summary: str = ""
@@ -168,7 +160,7 @@ async def execute(self):
168160
messages = await self.build_messages()
169161
for i, message in enumerate(messages):
170162
role = message.name or message.role
171-
logger.info(f"[{self.__class__.__name__}] role={role} {message.simple_dump(as_dict=False)}")
163+
logger.info(f"[{self.__class__.__name__}] msg[{i}] role={role} {message.simple_dump(as_dict=False)}")
172164

173165
t_tools, messages, success = await self.react(messages, self.tools)
174166

reme/agent/fs/fs_summarizer.py

Lines changed: 17 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,13 +29,29 @@ async def build_messages(self) -> list[Message]:
2929
role=Role.USER,
3030
content=self.prompt_format(
3131
"user_message_default",
32-
conversation=format_messages(messages, add_index=False),
3332
working_dir=self.working_dir,
3433
date=date_str,
3534
memory_dir=self.memory_dir,
3635
),
3736
),
3837
)
38+
39+
elif self.version == "v1":
40+
conversation = format_messages(messages, add_index=False)
41+
messages = [
42+
Message(
43+
role=Role.USER,
44+
content=f"<conversation>\n{conversation}\n</conversation>\n"
45+
+ self.prompt_format(
46+
"user_message_default",
47+
conversation=format_messages(messages, add_index=False),
48+
working_dir=self.working_dir,
49+
date=date_str,
50+
memory_dir=self.memory_dir,
51+
),
52+
),
53+
]
54+
3955
else:
4056
messages.extend(
4157
[

reme/config/fs.yaml

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,13 +18,15 @@ embedding_models:
1818
memory_stores:
1919
default:
2020
backend: sqlite
21-
store_name: test_hybrid
21+
store_name: reme
2222
embedding_model: default
2323
fts_enabled: true
24+
vector_enabled: false
2425

2526
file_watchers:
2627
default:
2728
backend: full
29+
memory_store: default
2830
watch_paths: [".reme", ".reme/memory"]
2931
suffix_filters: [".md"]
3032
recursive: false

reme/core/application.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,7 @@ def __init__(
2424
llm_base_url: str | None = None,
2525
embedding_api_key: str | None = None,
2626
embedding_base_url: str | None = None,
27+
working_dir: str | None = None,
2728
config_path: str | None = None,
2829
enable_logo: bool = True,
2930
log_to_console: bool = True,
@@ -44,6 +45,7 @@ def __init__(
4445
embedding_base_url=embedding_base_url,
4546
service_config=None,
4647
parser=parser,
48+
working_dir=working_dir,
4749
config_path=config_path,
4850
enable_logo=enable_logo,
4951
log_to_console=log_to_console,

reme/core/context/service_context.py

Lines changed: 56 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22

33
import os
44
from concurrent.futures import ThreadPoolExecutor
5+
from pathlib import Path
56
from typing import TYPE_CHECKING
67

78
from loguru import logger
@@ -34,6 +35,7 @@ def __init__(
3435
embedding_base_url: str | None = None,
3536
service_config: ServiceConfig | None = None,
3637
parser: type[PydanticConfigParser] | None = None,
38+
working_dir: str | None = None,
3739
config_path: str | None = None,
3840
enable_logo: bool = True,
3941
log_to_console: bool = True,
@@ -74,13 +76,23 @@ def __init__(
7476
self._update_section_config(kwargs, "memory_stores", **default_memory_store_config)
7577
if default_file_watcher_config:
7678
self._update_section_config(kwargs, "file_watchers", **default_file_watcher_config)
77-
kwargs["enable_logo"] = enable_logo
78-
kwargs["log_to_console"] = log_to_console
79+
80+
kwargs.update(
81+
{
82+
"enable_logo": enable_logo,
83+
"log_to_console": log_to_console,
84+
"working_dir": working_dir,
85+
},
86+
)
7987
logger.info(f"update with args: {input_args} kwargs: {kwargs}")
8088
service_config = parser.parse_args(*input_args, **kwargs)
8189

8290
self.service_config: ServiceConfig = service_config
8391
init_logger(log_to_console=self.service_config.log_to_console)
92+
logger.info(f"ReMe Config: {service_config.model_dump_json()}")
93+
94+
if self.service_config.working_dir:
95+
Path(self.service_config.working_dir).mkdir(parents=True, exist_ok=True)
8496

8597
if self.service_config.enable_logo:
8698
print_logo(service_config=self.service_config)
@@ -147,41 +159,58 @@ def _build_flows(self):
147159
async def start(self):
148160
"""Start the service context by initializing all configured components."""
149161
for name, config in self.service_config.llms.items():
150-
self.llms[name] = R.llms[config.backend](model_name=config.model_name, **config.model_extra)
162+
if config.backend not in R.llms:
163+
logger.warning(f"LLM backend {config.backend} is not supported.")
164+
else:
165+
self.llms[name] = R.llms[config.backend](model_name=config.model_name, **config.model_extra)
151166

152167
for name, config in self.service_config.embedding_models.items():
153-
self.embedding_models[name] = R.embedding_models[config.backend](
154-
model_name=config.model_name,
155-
**config.model_extra,
156-
)
168+
if config.backend not in R.embedding_models:
169+
logger.warning(f"Embedding model backend {config.backend} is not supported.")
170+
else:
171+
self.embedding_models[name] = R.embedding_models[config.backend](
172+
model_name=config.model_name,
173+
**config.model_extra,
174+
)
157175

158176
for name, config in self.service_config.token_counters.items():
159-
self.token_counters[name] = R.token_counters[config.backend](
160-
model_name=config.model_name,
161-
**config.model_extra,
162-
)
177+
if config.backend not in R.token_counters:
178+
logger.warning(f"Token counter backend {config.backend} is not supported.")
179+
else:
180+
self.token_counters[name] = R.token_counters[config.backend](
181+
model_name=config.model_name,
182+
**config.model_extra,
183+
)
163184

164185
for name, config in self.service_config.vector_stores.items():
165-
# Extract config dict and replace special fields with actual instances
166-
config_dict = config.model_dump(exclude={"backend", "embedding_model"})
167-
config_dict["embedding_model"] = self.embedding_models[config.embedding_model]
168-
config_dict["thread_pool"] = self.thread_pool
169-
self.vector_stores[name] = R.vector_stores[config.backend](**config_dict)
170-
await self.vector_stores[name].create_collection(config.collection_name)
186+
if config.backend not in R.vector_stores:
187+
logger.warning(f"Vector store backend {config.backend} is not supported.")
188+
else:
189+
# Extract config dict and replace special fields with actual instances
190+
config_dict = config.model_dump(exclude={"backend", "embedding_model"})
191+
config_dict["embedding_model"] = self.embedding_models[config.embedding_model]
192+
config_dict["thread_pool"] = self.thread_pool
193+
self.vector_stores[name] = R.vector_stores[config.backend](**config_dict)
194+
await self.vector_stores[name].create_collection(config.collection_name)
171195

172196
for name, config in self.service_config.memory_stores.items():
173-
# Extract config dict and replace embedding_model string with actual instance
174-
config_dict = config.model_dump(exclude={"backend", "embedding_model"})
175-
config_dict["embedding_model"] = self.embedding_models[config.embedding_model]
176-
self.memory_stores[name] = R.memory_stores[config.backend](**config_dict)
177-
await self.memory_stores[name].start()
197+
if config.backend not in R.memory_stores:
198+
logger.warning(f"Memory store backend {config.backend} is not supported.")
199+
else:
200+
# Extract config dict and replace embedding_model string with actual instance
201+
config_dict = config.model_dump(exclude={"backend", "embedding_model"})
202+
config_dict["embedding_model"] = self.embedding_models[config.embedding_model]
203+
self.memory_stores[name] = R.memory_stores[config.backend](**config_dict)
204+
await self.memory_stores[name].start()
178205

179206
for name, config in self.service_config.file_watchers.items():
180-
# Extract config dict and replace memory_store string with actual instance
181-
config_dict = config.model_dump(exclude={"backend", "memory_store"})
182-
config_dict["memory_store"] = self.memory_stores[config.memory_store]
183-
self.file_watchers[name] = R.file_watchers[config.backend](**config_dict)
184-
await self.file_watchers[name].start()
207+
if config.backend not in R.file_watchers:
208+
logger.warning(f"File watcher backend {config.backend} is not supported.")
209+
else:
210+
config_dict = config.model_dump(exclude={"backend", "memory_store"})
211+
config_dict["memory_store"] = self.memory_stores[config.memory_store]
212+
self.file_watchers[name] = R.file_watchers[config.backend](**config_dict)
213+
await self.file_watchers[name].start()
185214

186215
if self.service_config.mcp_servers:
187216
await self.prepare_mcp_servers()

reme/core/file_watcher/full_file_watcher.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,6 @@
55
"""
66

77
import asyncio
8-
import os
98
from pathlib import Path
109

1110
from loguru import logger

reme/core/memory_store/base_memory_store.py

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@ def __init__(
1515
self,
1616
store_name: str,
1717
embedding_model: BaseEmbeddingModel,
18+
vector_enabled: bool = False,
1819
fts_enabled: bool = True,
1920
**kwargs,
2021
):
@@ -23,14 +24,17 @@ def __init__(
2324
# Only allow alphanumeric characters and underscores
2425
if not re.match(r"^[a-zA-Z0-9_]+$", store_name):
2526
raise ValueError(f"Invalid '{store_name}'. Only alphanumeric characters and underscores are allowed.")
27+
28+
# Ensure at least one search method is enabled
29+
if not vector_enabled and not fts_enabled:
30+
raise ValueError("At least one of vector_enabled or fts_enabled must be True.")
31+
2632
self.store_name: str = store_name
2733
self.embedding_model: BaseEmbeddingModel = embedding_model
34+
self.vector_enabled: bool = vector_enabled
2835
self.fts_enabled: bool = fts_enabled
2936
self.kwargs: dict = kwargs
3037

31-
self.vector_available = False
32-
self.fts_available = False
33-
3438
@property
3539
def embedding_dim(self) -> int:
3640
"""Get the embedding model's dimensionality."""

0 commit comments

Comments
 (0)