Skip to content

Commit cfdc506

Browse files
committed
Add placeholder for sidepanel back in and adjust health check.
1 parent 639ceac commit cfdc506

File tree

27 files changed

+1592
-101
lines changed

27 files changed

+1592
-101
lines changed

apps/fluster/src-python/fluster_sidecar_api/core/api/v1/ai/chat/ai_chat.py renamed to apps/fluster/src-python/fluster_sidecar_api/core/api/v1/ai/chat/general.py

File renamed without changes.

apps/fluster/src-python/fluster_sidecar_api/core/api/v1/ai/chat/note_chat.py renamed to apps/fluster/src-python/fluster_sidecar_api/core/api/v1/ai/chat/note.py

File renamed without changes.
Lines changed: 76 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,76 @@
1+
from typing import List
2+
from flask_restful import Resource, reqparse
3+
from flusterpy.core.static.database_tables import DatabaseTable
4+
from flusterpy.features.db.methods.get_table import get_database, get_database_dir
5+
from langchain_ollama.llms import OllamaLLM
6+
7+
8+
class SyncAi(Resource):
9+
tables_to_drop: List[DatabaseTable] = [DatabaseTable.Vector]
10+
11+
async def drop_tables(self):
12+
db = get_database()
13+
for table in self.tables_to_drop:
14+
db.drop_table(str(table))
15+
16+
async def get(self):
17+
parser = reqparse.RequestParser()
18+
parser.add_argument(
19+
"model",
20+
required=True,
21+
type=str,
22+
help="The embedding model to use with Ollama.",
23+
)
24+
parser.add_argument(
25+
"notes_directory",
26+
required=True,
27+
type=str,
28+
help="The absolute path to the user's notes directory.",
29+
)
30+
parser.add_argument(
31+
"parsable_files",
32+
required=True,
33+
type=List[str],
34+
help="A list of absolute paths to all parsable files.",
35+
)
36+
parser.add_argument(
37+
"temperature",
38+
required=False,
39+
type=List[str],
40+
help="The temperature to use for the embedding model.",
41+
)
42+
43+
parser.add_argument(
44+
"top_k",
45+
required=False,
46+
type=int | None,
47+
help="The top_k to use for the embedding model.",
48+
)
49+
50+
parser.add_argument(
51+
"url_override",
52+
required=False,
53+
type=int | None,
54+
help="The ollama connection url to use.",
55+
)
56+
parser.add_argument(
57+
"top_p",
58+
required=False,
59+
type=float | None,
60+
help="The top_p to use for the embedding model.",
61+
)
62+
try:
63+
args = parser.parse_args()
64+
except Exception as e:
65+
# Handle parsing errors (e.g., missing required field or wrong type)
66+
return {"message": "Invalid request data.", "error": str(e)}, 400
67+
vector_store = OllamaLLM(
68+
model=args["model"],
69+
temperature=args["temperature"],
70+
top_k=args["top_k"],
71+
top_p=args["top_p"],
72+
base_url=args["url_override"],
73+
# embedding_func
74+
)
75+
model = Ollama(model=args["model"])
76+
return "Syncing..."

apps/fluster/src-python/fluster_sidecar_api/features/ai/chat/methods/get_chat_messages.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,5 +5,4 @@
55
def get_chat_messages(chat_id: str):
66
table = get_table(DatabaseTable.AiChatMessage)
77
res = table.search(f"chat_id={chat_id}")
8-
print("Res", res)
9-
return table.to_pandas()
8+
return res.to_pandas()

apps/fluster/src-python/fluster_sidecar_api/main.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,8 +3,9 @@
33
from flask_cors import CORS
44
from flask_restful import Api
55

6-
from core.api.v1.ai.chat.ai_chat import AiGeneralChatRoute
7-
from core.api.v1.ai.chat.note_chat import SingleNoteChat
6+
from core.api.v1.ai.chat.general import AiGeneralChatRoute
7+
from core.api.v1.ai.chat.note import SingleNoteChat
8+
from core.api.v1.ai.sync.index import SyncAi
89

910

1011
app = Flask(__name__)
@@ -15,6 +16,7 @@
1516

1617
api.add_resource(AiGeneralChatRoute, "/ai/chat/general")
1718
api.add_resource(SingleNoteChat, "/ai/chat/note")
19+
api.add_resource(SyncAi, "/ai/sync")
1820

1921

2022
@app.after_request

apps/fluster/src-tauri/src/core/database/tables/table_paths.rs

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -81,4 +81,6 @@ pub enum DatabaseTables {
8181
// -- Whiteboard --
8282
#[strum(to_string = "whiteboard")]
8383
Whiteboard,
84+
#[strum(to_string = "vector")]
85+
Vector,
8486
}

apps/fluster/src-tauri/src/core/utils/initialize/initialize_database.rs

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -257,6 +257,13 @@ pub async fn initialize_database() -> FlusterResult<()> {
257257
entity: WhiteboardEntity::arrow_schema(None),
258258
set_indices: None,
259259
},
260+
// -- Vector Store --
261+
TableInitData {
262+
table: DatabaseTables::Vector,
263+
// Use any schema here since it will just be overwritten.
264+
entity: WhiteboardEntity::arrow_schema(None),
265+
set_indices: None,
266+
},
260267
];
261268
if let Ok(db_path) = get_database_path() {
262269
let db = connect(db_path.to_str().unwrap())

apps/fluster/src-tauri/src/features/ai/commands/get_local_ollama_models.rs

Lines changed: 13 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -10,10 +10,21 @@ pub struct LocalModelData {
1010
pub size: u64,
1111
}
1212

13+
#[derive(Serialize, Deserialize, specta::Type)]
14+
pub struct OllamaConnectionData {
15+
url: String,
16+
port: u16,
17+
}
18+
1319
#[tauri::command]
1420
#[specta::specta]
15-
pub async fn get_local_ollama_models() -> FlusterResult<Vec<LocalModelData>> {
16-
let ollama = Ollama::default();
21+
pub async fn get_local_ollama_models(
22+
connection_data: Option<OllamaConnectionData>,
23+
) -> FlusterResult<Vec<LocalModelData>> {
24+
let ollama = match connection_data {
25+
None => Ollama::default(),
26+
Some(cd) => Ollama::new(cd.url, cd.port),
27+
};
1728
let res = ollama
1829
.list_local_models()
1930
.await

apps/fluster/src-tauri/src/features/health/get_health_report.rs

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -16,12 +16,13 @@ pub struct DesktopHealthReport {
1616
}
1717

1818
pub async fn database_tables_exist(db: &FlusterDb<'_>) -> bool {
19-
let table_names = db.table_names().execute().await;
20-
if table_names.is_err() {
21-
false
19+
if let Ok(table_names) = db.table_names().execute().await {
20+
let vec_string = DatabaseTables::Vector.to_string();
21+
DatabaseTables::iter()
22+
.filter(|x| x.to_string() != vec_string)
23+
.all(|x| table_names.contains(&x.to_string()))
2224
} else {
23-
let d = table_names.unwrap();
24-
DatabaseTables::iter().all(|x| d.contains(&x.to_string()))
25+
false
2526
}
2627
}
2728

apps/fluster/src/core/lib/bindings.ts

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -660,9 +660,9 @@ async addAiChatRequest(chatId: string, ai: AiSyncSettings, chatInput: AiChatMess
660660
else return { status: "error", error: e as any };
661661
}
662662
},
663-
async getLocalOllamaModels() : Promise<Result<LocalModelData[], FlusterError>> {
663+
async getLocalOllamaModels(connectionData: OllamaConnectionData | null) : Promise<Result<LocalModelData[], FlusterError>> {
664664
try {
665-
return { status: "ok", data: await TAURI_INVOKE("get_local_ollama_models") };
665+
return { status: "ok", data: await TAURI_INVOKE("get_local_ollama_models", { connectionData }) };
666666
} catch (e) {
667667
if(e instanceof Error) throw e;
668668
else return { status: "error", error: e as any };
@@ -1050,6 +1050,7 @@ file_path: string; raw_body: string; ctime: string;
10501050
*/
10511051
last_read: string; vec: number[] }
10521052
export type NoteSummary = { title: string; file_path: string }
1053+
export type OllamaConnectionData = { url: string; port: number }
10531054
export type PaginationProps = { per_page: string; page_number: string }
10541055
export type PlotlyTheme = "ggplot2" | "seaborn" | "simple_white" | "plotly" | "plotly_white" | "plotly_dark" | "presentation" | "xgridoff" | "ygridoff" | "gridon" | "none"
10551056
export type RecentlyAccessedNoteData = { last_read: string; file_path: string }

0 commit comments

Comments
 (0)