|
| 1 | +from ....env import ContanstTable, CONFIG, LOG |
| 2 | +from ...status import append_user_status, get_user_statuses |
| 3 | +from ...profile import get_user_profiles, truncate_profiles |
| 4 | +from ...project import get_project_profile_config |
| 5 | +from ....models.blob import OpenAICompatibleMessage |
| 6 | +from ....models.utils import Promise |
| 7 | +from ....models.response import ProactiveTopicData |
| 8 | +from ...profile import get_user_profiles, truncate_profiles |
| 9 | +from .detect_interest import detect_chat_interest |
| 10 | +from .predict_new_topics import predict_new_topics |
| 11 | + |
| 12 | +# from .types import |
| 13 | + |
| 14 | + |
| 15 | +def pack_timeline_prompt(timeline: str, language: str) -> str: |
| 16 | + if language == "zh": |
| 17 | + return f"## 下面是你的剧本,如果我没有主动提供话题的话,参考下面剧情推动我们的对话:\n{timeline}##" |
| 18 | + else: |
| 19 | + return f"## Here is your script, if I don't provide a topic, please refer to the following plot to drive our conversation: \n{timeline}##" |
| 20 | + |
| 21 | + |
| 22 | +async def process_messages( |
| 23 | + user_id: str, |
| 24 | + project_id: str, |
| 25 | + messages: list[OpenAICompatibleMessage], |
| 26 | + agent_context: str = None, |
| 27 | + prefer_topics: list[str] = None, |
| 28 | + topk: int = None, |
| 29 | + max_token_size: int = None, |
| 30 | + only_topics: list[str] = None, |
| 31 | + max_subtopic_size: int = None, |
| 32 | + topic_limits: dict[str, int] = None, |
| 33 | +) -> Promise[ProactiveTopicData]: |
| 34 | + p = await get_project_profile_config(project_id) |
| 35 | + if not p.ok(): |
| 36 | + return p |
| 37 | + project_profiles = p.data() |
| 38 | + USE_LANGUAGE = "zh" |
| 39 | + # USE_LANGUAGE = project_profiles.language or CONFIG.language |
| 40 | + |
| 41 | + interest = await detect_chat_interest( |
| 42 | + project_id, |
| 43 | + messages, |
| 44 | + profile_config=project_profiles, |
| 45 | + ) |
| 46 | + if not interest.ok(): |
| 47 | + return interest |
| 48 | + interest_data = interest.data() |
| 49 | + # if interest_data["action"] != "new_topic": |
| 50 | + # await append_user_status( |
| 51 | + # user_id, |
| 52 | + # project_id, |
| 53 | + # ContanstTable.roleplay_plot_status, |
| 54 | + # { |
| 55 | + # "interest": interest_data, |
| 56 | + # }, |
| 57 | + # ) |
| 58 | + # return Promise.resolve(ProactiveTopicData(action="continue")) |
| 59 | + latests_statuses = await get_user_statuses( |
| 60 | + user_id, project_id, type=ContanstTable.roleplay_plot_status |
| 61 | + ) |
| 62 | + if not latests_statuses.ok(): |
| 63 | + return latests_statuses |
| 64 | + latests_statuses_data = latests_statuses.data() |
| 65 | + |
| 66 | + p = await get_user_profiles(user_id, project_id) |
| 67 | + if not p.ok(): |
| 68 | + return p |
| 69 | + p = await truncate_profiles( |
| 70 | + p.data(), |
| 71 | + prefer_topics=prefer_topics, |
| 72 | + topk=topk, |
| 73 | + max_token_size=max_token_size, |
| 74 | + only_topics=only_topics, |
| 75 | + max_subtopic_size=max_subtopic_size, |
| 76 | + topic_limits=topic_limits, |
| 77 | + ) |
| 78 | + if not p.ok(): |
| 79 | + return p |
| 80 | + user_profiles_data = p.data() |
| 81 | + use_user_profiles = user_profiles_data.profiles |
| 82 | + user_context = "\n".join( |
| 83 | + [ |
| 84 | + f"{p.attributes.get('topic')}::{p.attributes.get('sub_topic')}: {p.content}" |
| 85 | + for p in use_user_profiles |
| 86 | + ] |
| 87 | + ) |
| 88 | + |
| 89 | + p = await predict_new_topics( |
| 90 | + project_id, |
| 91 | + messages, |
| 92 | + latests_statuses_data, |
| 93 | + user_context, |
| 94 | + agent_context, |
| 95 | + project_profiles, |
| 96 | + ) |
| 97 | + if not p.ok(): |
| 98 | + return p |
| 99 | + plot = p.data() |
| 100 | + await append_user_status( |
| 101 | + user_id, |
| 102 | + project_id, |
| 103 | + ContanstTable.roleplay_plot_status, |
| 104 | + { |
| 105 | + "interest": interest_data, |
| 106 | + "new_topic": plot, |
| 107 | + "chats": [m.model_dump() for m in messages], |
| 108 | + }, |
| 109 | + ) |
| 110 | + |
| 111 | + return Promise.resolve( |
| 112 | + ProactiveTopicData( |
| 113 | + action="new_topic", |
| 114 | + topic_prompt=pack_timeline_prompt(plot["timeline"], USE_LANGUAGE), |
| 115 | + ) |
| 116 | + ) |
0 commit comments