|
1 | | -import asyncio |
2 | 1 | import logging |
3 | | -from urllib.parse import urljoin |
4 | 2 |
|
5 | | -import aiohttp |
6 | | -import streamlit as st |
7 | | - |
8 | | -from backend.schemas import azure_openai as azure_openai_schemas |
| 3 | +from frontend.pages import misc as misc_pages |
9 | 4 |
|
10 | 5 | logger = logging.getLogger(__name__) |
11 | 6 |
|
12 | 7 |
|
13 | | -async def http_get(url: str) -> dict: |
14 | | - async with aiohttp.ClientSession() as session: |
15 | | - async with session.get(url) as response: |
16 | | - response.raise_for_status() |
17 | | - return await response.json() |
18 | | - |
19 | | - |
20 | | -async def http_post(url: str, data: dict) -> dict: |
21 | | - async with aiohttp.ClientSession() as session: |
22 | | - async with session.post( |
23 | | - url=url, |
24 | | - json=data, |
25 | | - ) as response: |
26 | | - response.raise_for_status() |
27 | | - return await response.json() |
28 | | - |
29 | | - |
30 | 8 | def start( |
31 | 9 | solution_name: str, |
32 | 10 | backend_url: str, |
33 | 11 | log_level: int, |
34 | 12 | ): |
35 | | - logger.setLevel(log_level) |
36 | | - logger.debug(f"set log level to {log_level}") |
37 | | - |
38 | | - st.write(f"Solution name: {solution_name}") |
39 | | - |
40 | | - # GET |
41 | | - if st.button("GET"): |
42 | | - logger.info("Fetching data from backend...") |
43 | | - try: |
44 | | - with st.spinner("Calling API..."): |
45 | | - response = asyncio.run(http_get(url=urljoin(base=backend_url, url=""))) |
46 | | - st.write(response) |
47 | | - logger.info("Data fetched successfully.") |
48 | | - except Exception as e: |
49 | | - st.write(f"Error: {e}") |
50 | | - logger.error(f"Error: {e}") |
51 | | - |
52 | | - st.write("---") |
53 | | - |
54 | | - # POST |
55 | | - prompt = st.text_input( |
56 | | - label="Prompt", |
57 | | - value="Hello", |
58 | | - ) |
59 | | - if st.button("POST"): |
60 | | - logger.info("Posting data to backend...") |
61 | | - try: |
62 | | - with st.spinner("Calling API..."): |
63 | | - response = asyncio.run( |
64 | | - http_post( |
65 | | - url=urljoin(base=backend_url, url="/azure_openai/chat_completions/"), |
66 | | - data=azure_openai_schemas.ChatCompletionRequest( |
67 | | - content=prompt, |
68 | | - stream=False, |
69 | | - ).model_dump(), |
70 | | - ) |
71 | | - ) |
72 | | - st.write(response) |
73 | | - logger.info("Data posted successfully.") |
74 | | - except Exception as e: |
75 | | - st.write(f"Error: {e}") |
76 | | - logger.error(f"Error: {e}") |
| 13 | + # add solution-specific logic here |
| 14 | + if solution_name == "hoge": |
| 15 | + pass |
| 16 | + else: |
| 17 | + misc_pages.start( |
| 18 | + backend_url=backend_url, |
| 19 | + log_level=log_level, |
| 20 | + ) |
0 commit comments