Skip to content

Commit 56a2ba9

Browse files
author
Daniele Briggi
committed
Initial commit
0 parents  commit 56a2ba9

File tree

4 files changed

+176
-0
lines changed

4 files changed

+176
-0
lines changed

.github/workflows/test.yaml

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,20 @@
1+
name: Test
2+
3+
on:
4+
push:
5+
6+
jobs:
7+
test:
8+
runs-on: ubuntu-latest
9+
10+
steps:
11+
- uses: actions/checkout@v4
12+
- name: Clone the SQLite Cloud docs repository
13+
run: git clone https://github.com/sqlitecloud/docs
14+
shell: bash
15+
- uses: ./
16+
with:
17+
connection_string: ${{ secrets.CONNECTION_STRING }}
18+
base_url: https://docs.sqlitecloud.io/docs/
19+
database: aisearch-action-test.sqlite
20+
source_files: docs

action.yaml

Lines changed: 84 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,84 @@
1+
name: "SQLite AI - AI search for documents"
2+
description: "Parses document files to create a database on your SQLite Cloud project."
3+
author: "SQLite AI Team"
4+
5+
inputs:
6+
connection_string:
7+
description: The SQLite Cloud project connection string.
8+
required: true
9+
base_url:
10+
description: Your website's documentation base url.
11+
required: true
12+
database:
13+
description: The name of the database to use on SQLite Cloud, just remeber to create first a database on your project!
14+
required: true
15+
source_files:
16+
description: The path of the files, by default it will parse every file recursively starting from the working directory.
17+
required: false
18+
default: $(pwd)
19+
hf_model_id:
20+
description: The Hugging Face model ID to use for generating embeddings.
21+
required: false
22+
default: "Qwen/Qwen3-Embedding-0.6B-GGUF"
23+
hf_gguf_file:
24+
description: The GGUF file name for the Hugging Face model.
25+
required: false
26+
default: "Qwen3-Embedding-0.6B-f16.gguf"
27+
hf_gguf_update_date:
28+
description: The date to force update the GGUF model cache, format YYYYMMDD.
29+
required: false
30+
default: "202507014"
31+
hf_model_local_path:
32+
description: The local path to store the downloaded Hugging Face model.
33+
required: false
34+
default: "./models"
35+
36+
37+
branding:
38+
icon: "search"
39+
color: "blue"
40+
41+
runs:
42+
using: "composite"
43+
steps:
44+
45+
- name: Set GitHub Path
46+
run: echo "${{ github.action_path }}/src" >> $GITHUB_PATH
47+
shell: bash
48+
49+
- name: Set up Python
50+
uses: actions/setup-python@v5
51+
52+
- name: Install SQLite RAG
53+
run: |
54+
python -m pip install --upgrade pip
55+
pip install -i https://test.pypi.org/simple/ sqlite-rag
56+
shell: bash
57+
58+
# Cache the downloaded model between workflows
59+
- name: Restore GGUF model cache
60+
uses: actions/cache@v4
61+
id: cache-model
62+
with:
63+
path: ${{ inputs.hf_model_local_path }}
64+
# Change the HF_GGUF_UPDATE_DATE variable to force update the cache
65+
key: gguf-${{ inputs.hf_model_id }}-${{ inputs.hf_gguf_file }}-${{ inputs.hf_gguf_update_date }}
66+
restore-keys: |
67+
gguf-${{ inputs.hf_model_id }}-${{ inputs.hf_gguf_file }}-${{ inputs.hf_gguf_update_date }}-
68+
69+
- name: Parse documents and create the database
70+
run: |
71+
sqlite-rag download-model "${{ inputs.hf_model_id }}" "${{ inputs.hf_gguf_file }}" --local-dir="${{ inputs.hf_model_local_path }}"
72+
sqlite-rag add \
73+
--recursive {{ inputs.source_files }} \
74+
--metadata '{"base_url": "{{ inputs.base_url }}"}'
75+
shell: bash
76+
77+
- name: Upload the database to SQLite Cloud
78+
run: bash $GITHUB_ACTION_PATH/upload.sh
79+
env:
80+
GITHUB_ACTION_PATH: ${{ github.action_path }}
81+
CONNECTION_STRING: ${{ inputs.connection_string }}
82+
DATABASE_PATH: ${{ inputs.database_path }}
83+
DATABASE: ${{ inputs.database }}
84+
shell: bash

chinook.sqlite

866 KB
Binary file not shown.

upload.sh

Lines changed: 72 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,72 @@
1+
#!/bin/bash
2+
3+
4+
# === Inputs ===
5+
# CONNECTION_STRING: The SQLite Cloud project connection string.
6+
# DATABASE: The name of the database to use on SQLite Cloud.
7+
# DATABASE_PATH: The path to the SQLite database file to upload.
8+
9+
echo "Database: $DATABASE"
10+
11+
12+
13+
if [[ ! "${CONNECTION_STRING}" =~ ^sqlitecloud:// ]]; then
14+
echo "${CONNECTION_STRING} incorrect project connection string"
15+
exit 1
16+
fi
17+
18+
if [[ -z "${DATABASE}" ]]; then
19+
echo "database input is empty"
20+
exit 1
21+
fi
22+
23+
API_BASE="https:$(echo ${CONNECTION_STRING} | awk -F ':' '{print $2}')"
24+
# Extract the apikey from the project string
25+
API_KEY=$(echo "${CONNECTION_STRING}" | awk -F 'apikey=' '{print $2}' | awk -F '&' '{print $1}')
26+
27+
if [[ -z "${API_KEY}" ]]; then
28+
echo "API key not found in project connection string"
29+
exit 1
30+
fi
31+
32+
# --- 1. Get upload URL from v2/storage/databases/singlepart ---
33+
STORAGE_URL="${API_BASE}/v2/storage/databases/singlepart"
34+
STORAGE_RES=$(curl "${STORAGE_URL}" \
35+
-H "Authorization: Bearer ${API_KEY}" \
36+
-H 'accept: application/json')
37+
38+
# --- 2. Extract data.url from JSON response ---
39+
if command -v jq >/dev/null 2>&1; then
40+
UPLOAD_URL=$(echo "${STORAGE_RES}" | jq -r '.data.url')
41+
fi
42+
43+
if [[ -z "${UPLOAD_URL}" || "${UPLOAD_URL}" == "null" ]]; then
44+
echo "Failed to get upload URL: ${STORAGE_RES}"
45+
exit 1
46+
fi
47+
48+
# --- 3. Upload the database file ---
49+
PUT_RES=$(curl -s -X PUT --data-binary @"${DATABASE_PATH}" \
50+
-H 'Content-Type: application/octet-stream' \
51+
"${UPLOAD_URL}")
52+
if [[ $? -ne 0 ]]; then
53+
echo "Failed to upload database file"
54+
exit 1
55+
fi
56+
57+
# --- 4. Add or replace the database via Weblite ---
58+
PATCH_URL="${API_BASE}/v2/weblite/${DATABASE}"
59+
PATCH_BODY="{\"location\":\"${UPLOAD_URL}\"}"
60+
PATCH_RES=$(curl --compressed -s -X PATCH "${PATCH_URL}" \
61+
-H 'Content-Type: application/json' \
62+
-H "Authorization: Bearer ${API_KEY}" \
63+
-H 'accept: application/json' \
64+
-d "${PATCH_BODY}")
65+
66+
echo "${PATCH_RES}"
67+
if [[ "${PATCH_RES}" =~ error ]]; then
68+
echo "Error updating SQLite Cloud database: ${PATCH_RES}"
69+
exit 1
70+
fi
71+
72+
echo "Database uploaded successfully."

0 commit comments

Comments
 (0)