Skip to content

Commit 58f215d

Browse files
authored
Merge pull request #158 from nextcloud/feat/memories
feat(chat): Add optional memories input for core:text2text:chat
2 parents cbdc6e2 + 0eb727b commit 58f215d

File tree

2 files changed

+18
-11
lines changed

2 files changed

+18
-11
lines changed

lib/chat.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,9 @@ def __call__(
2525
self,
2626
inputs: dict[str, Any],
2727
) -> dict[str, str]:
28+
system_prompt = inputs['system_prompt']
29+
if inputs.get('memories'):
30+
system_prompt += "\n\nYou can remember things from other conversations with the user. If they are relevant, take into account the following memories: \n" + "\n\n".join(inputs['memories']) + "\n\n"
2831
return {'output': self.runnable.invoke(
29-
[(message['role'], message['content']) for message in [json.loads(message) for message in inputs['history']]] + [('human', inputs['input'])]
32+
[('human', system_prompt)] + [(message['role'], message['content']) for message in [json.loads(message) for message in inputs['history']]] + [('human', inputs['input'])]
3033
).content}

lib/main.py

Lines changed: 14 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,8 @@
1717
from fastapi import FastAPI
1818
from nc_py_api import AsyncNextcloudApp, NextcloudApp, NextcloudException
1919
from nc_py_api.ex_app import LogLvl, persistent_storage, run_app, set_handlers
20-
from nc_py_api.ex_app.providers.task_processing import TaskProcessingProvider, ShapeEnumValue
20+
from nc_py_api.ex_app.providers.task_processing import ShapeDescriptor, ShapeType, TaskProcessingProvider, \
21+
ShapeEnumValue
2122

2223
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', handlers=[logging.StreamHandler()])
2324
logger = logging.getLogger(__name__)
@@ -157,15 +158,18 @@ async def enabled_handler(enabled: bool, nc: AsyncNextcloudApp) -> str:
157158
name="Local Large language Model: " + model,
158159
task_type=task,
159160
expected_runtime=30,
160-
input_shape_enum_values= {
161-
"tone": [
162-
ShapeEnumValue(name= "Friendlier", value= "friendlier"),
163-
ShapeEnumValue(name= "More formal", value= "more formal"),
164-
ShapeEnumValue(name= "Funnier", value= "funnier"),
165-
ShapeEnumValue(name= "More casual", value= "more casual"),
166-
ShapeEnumValue(name= "More urgent", value= "more urgent"),
167-
],
168-
} if task == "core:text2text:changetone" else {}
161+
input_shape_enum_values= {
162+
"tone": [
163+
ShapeEnumValue(name= "Friendlier", value= "friendlier"),
164+
ShapeEnumValue(name= "More formal", value= "more formal"),
165+
ShapeEnumValue(name= "Funnier", value= "funnier"),
166+
ShapeEnumValue(name= "More casual", value= "more casual"),
167+
ShapeEnumValue(name= "More urgent", value= "more urgent"),
168+
],
169+
} if task == "core:text2text:changetone" else {},
170+
optional_input_shape=[
171+
ShapeDescriptor(name="memories", description="Memories to inject into the prompt", shape_type=ShapeType.LIST_OF_TEXTS)
172+
] if task == "core:text2text:chat" else [],
169173
)
170174
await nc.providers.task_processing.register(provider)
171175
log(nc, LogLvl.INFO, f"Registered {task_processor_name}")

0 commit comments

Comments
 (0)