diff --git a/.github/workflows/build-and-release.yml b/.github/workflows/build-and-release.yml index 62e90f580..c90987b38 100644 --- a/.github/workflows/build-and-release.yml +++ b/.github/workflows/build-and-release.yml @@ -353,3 +353,63 @@ jobs: releaseDraft: false prerelease: false args: ${{ matrix.args }} + + + # Create tar.gz archives for AUR (Arch Linux) + create-aur-tarball: + needs: [publish-tauri] + runs-on: ubuntu-22.04 + permissions: + contents: write + steps: + - uses: actions/checkout@v4 + + - name: Get version from tag + id: get_version + run: | + if [ "${{ github.event_name }}" = "release" ]; then + TAG="${{ github.event.release.tag_name }}" + else + TAG="${{ github.event.inputs.tag }}" + fi + VERSION="${TAG#v}" + echo "version=${VERSION}" >> $GITHUB_OUTPUT + echo "tag=${TAG}" >> $GITHUB_OUTPUT + + - name: Download Linux deb from release + run: | + VERSION="${{ steps.get_version.outputs.version }}" + TAG="${{ steps.get_version.outputs.tag }}" + + # Wait for release assets to be available + sleep 30 + + # Download the deb package + curl -fLo pictopy.deb "https://github.com/AOSSIE-Org/PictoPy/releases/download/${TAG}/picto-py_${VERSION}_amd64.deb" || \ + curl -fLo pictopy.deb "https://github.com/AOSSIE-Org/PictoPy/releases/download/${TAG}/PictoPy_${VERSION}_amd64.deb" + + - name: Extract and repackage as tar.gz + run: | + VERSION="${{ steps.get_version.outputs.version }}" + + # Extract deb package + mkdir -p extract + dpkg-deb -x pictopy.deb extract/ + + # Create tar.gz with proper structure + cd extract + tar -czvf "../pictopy_${VERSION}_amd64.tar.gz" . + cd .. + + # Calculate checksum + sha256sum "pictopy_${VERSION}_amd64.tar.gz" > "pictopy_${VERSION}_amd64.tar.gz.sha256" + + - name: Upload tar.gz to release + uses: softprops/action-gh-release@v1 + with: + tag_name: ${{ steps.get_version.outputs.tag }} + files: | + pictopy_${{ steps.get_version.outputs.version }}_amd64.tar.gz + pictopy_${{ steps.get_version.outputs.version }}_amd64.tar.gz.sha256 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/publish-aur.yml b/.github/workflows/publish-aur.yml new file mode 100644 index 000000000..28516f34c --- /dev/null +++ b/.github/workflows/publish-aur.yml @@ -0,0 +1,162 @@ +name: Publish to AUR + +on: + release: + types: [published] + workflow_dispatch: + inputs: + tag: + description: "Tag name for the release (e.g., v1.1.0)" + required: true + type: string + +jobs: + publish-aur: + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Get version from tag + id: get_version + run: | + if [ "${{ github.event_name }}" = "release" ]; then + TAG="${{ github.event.release.tag_name }}" + else + TAG="${{ github.event.inputs.tag }}" + fi + # Remove 'v' prefix if present + VERSION="${TAG#v}" + echo "version=${VERSION}" >> $GITHUB_OUTPUT + echo "tag=${TAG}" >> $GITHUB_OUTPUT + + - name: Wait for release assets + run: | + echo "Waiting for release assets to be available..." + sleep 60 + + - name: Calculate SHA256 checksums + id: checksums + run: | + VERSION="${{ steps.get_version.outputs.version }}" + BASE_URL="https://github.com/AOSSIE-Org/PictoPy/releases/download/v${VERSION}" + + # Download and calculate checksums for x86_64 + echo "Downloading x86_64 tarball..." + if curl -fLo pictopy_amd64.tar.gz "${BASE_URL}/pictopy_${VERSION}_amd64.tar.gz"; then + SHA256_X86_64=$(sha256sum pictopy_amd64.tar.gz | cut -d' ' -f1) + echo "sha256_x86_64=${SHA256_X86_64}" >> $GITHUB_OUTPUT + else + echo "sha256_x86_64=SKIP" >> $GITHUB_OUTPUT + fi + + # Download and calculate checksums for aarch64 + echo "Downloading aarch64 tarball..." + if curl -fLo pictopy_arm64.tar.gz "${BASE_URL}/pictopy_${VERSION}_arm64.tar.gz"; then + SHA256_AARCH64=$(sha256sum pictopy_arm64.tar.gz | cut -d' ' -f1) + echo "sha256_aarch64=${SHA256_AARCH64}" >> $GITHUB_OUTPUT + else + echo "sha256_aarch64=SKIP" >> $GITHUB_OUTPUT + fi + + - name: Update PKGBUILD + run: | + VERSION="${{ steps.get_version.outputs.version }}" + SHA256_X86_64="${{ steps.checksums.outputs.sha256_x86_64 }}" + SHA256_AARCH64="${{ steps.checksums.outputs.sha256_aarch64 }}" + + cd aur + + # Update version in PKGBUILD + sed -i "s/^pkgver=.*/pkgver=${VERSION}/" PKGBUILD + sed -i "s/^pkgrel=.*/pkgrel=1/" PKGBUILD + + # Update source URLs + sed -i "s|pictopy_[0-9.]*_amd64|pictopy_${VERSION}_amd64|g" PKGBUILD + sed -i "s|pictopy_[0-9.]*_arm64|pictopy_${VERSION}_arm64|g" PKGBUILD + sed -i "s|/v[0-9.]*/|/v${VERSION}/|g" PKGBUILD + + # Update checksums + if [ "${SHA256_X86_64}" != "SKIP" ]; then + sed -i "s/^sha256sums_x86_64=.*/sha256sums_x86_64=('${SHA256_X86_64}')/" PKGBUILD + fi + if [ "${SHA256_AARCH64}" != "SKIP" ]; then + sed -i "s/^sha256sums_aarch64=.*/sha256sums_aarch64=('${SHA256_AARCH64}')/" PKGBUILD + fi + + cat PKGBUILD + + - name: Generate .SRCINFO + run: | + cd aur + + VERSION="${{ steps.get_version.outputs.version }}" + SHA256_X86_64="${{ steps.checksums.outputs.sha256_x86_64 }}" + SHA256_AARCH64="${{ steps.checksums.outputs.sha256_aarch64 }}" + + cat > .SRCINFO << EOF + pkgbase = pictopy + pkgdesc = A privacy-focused photo management application with AI-powered tagging and face recognition + pkgver = ${VERSION} + pkgrel = 1 + url = https://github.com/AOSSIE-Org/PictoPy + install = pictopy.install + arch = x86_64 + arch = aarch64 + license = MIT + makedepends = rust + makedepends = cargo + makedepends = nodejs + makedepends = npm + makedepends = python + makedepends = python-pip + makedepends = pyinstaller + makedepends = webkit2gtk-4.1 + makedepends = base-devel + makedepends = curl + makedepends = wget + makedepends = file + makedepends = openssl + makedepends = appmenu-gtk-module + makedepends = librsvg + depends = webkit2gtk-4.1 + depends = gtk3 + depends = glib2 + depends = cairo + depends = pango + depends = gdk-pixbuf2 + depends = libsoup3 + depends = openssl + depends = hicolor-icon-theme + optdepends = python-onnxruntime: For AI model inference + optdepends = python-opencv: For image processing + optdepends = python-numpy: For numerical operations + options = !strip + options = !emptydirs + source_x86_64 = pictopy-${VERSION}-x86_64.tar.gz::https://github.com/AOSSIE-Org/PictoPy/releases/download/v${VERSION}/pictopy_${VERSION}_amd64.tar.gz + sha256sums_x86_64 = ${SHA256_X86_64} + source_aarch64 = pictopy-${VERSION}-aarch64.tar.gz::https://github.com/AOSSIE-Org/PictoPy/releases/download/v${VERSION}/pictopy_${VERSION}_arm64.tar.gz + sha256sums_aarch64 = ${SHA256_AARCH64} + + pkgname = pictopy + EOF + + # Remove leading whitespace + sed -i 's/^ //' .SRCINFO + + cat .SRCINFO + + - name: Publish to AUR + uses: KSXGitHub/github-actions-deploy-aur@v3.0.1 + with: + pkgname: pictopy + pkgbuild: ./aur/PKGBUILD + commit_username: ${{ secrets.AUR_USERNAME }} + commit_email: ${{ secrets.AUR_EMAIL }} + ssh_private_key: ${{ secrets.AUR_SSH_PRIVATE_KEY }} + commit_message: "Update to version ${{ steps.get_version.outputs.version }}" + ssh_keyscan_types: ed25519 + force_push: true + assets: | + ./aur/pictopy.install + ./aur/.SRCINFO diff --git a/README.md b/README.md index 595889480..dbf643161 100644 --- a/README.md +++ b/README.md @@ -2,6 +2,36 @@ PictoPy is an advanced desktop gallery application that combines the power of Tauri, React, and Rust for the frontend with a Python backend for sophisticated image analysis and management. +## Installation + +### Arch Linux (AUR) + +PictoPy is available on the Arch User Repository (AUR) for Arch-based distributions (Arch, Manjaro, EndeavourOS, etc.): + +```bash +# Using yay +yay -S pictopy + +# Using paru +paru -S pictopy + +# Using pikaur +pikaur -S pictopy +``` + +For the development version built from source: +```bash +yay -S pictopy-git +``` + +### Other Linux Distributions + +Download the AppImage or .deb package from the [Releases](https://github.com/AOSSIE-Org/PictoPy/releases) page. + +### Windows & macOS + +Download the installer from the [Releases](https://github.com/AOSSIE-Org/PictoPy/releases) page. + # Want to Contribute? 😄     diff --git a/aur/.SRCINFO b/aur/.SRCINFO new file mode 100644 index 000000000..5ef354a1c --- /dev/null +++ b/aur/.SRCINFO @@ -0,0 +1,44 @@ +pkgbase = pictopy + pkgdesc = A privacy-focused photo management application with AI-powered tagging and face recognition + pkgver = 1.1.0 + pkgrel = 1 + url = https://github.com/AOSSIE-Org/PictoPy + install = pictopy.install + arch = x86_64 + arch = aarch64 + license = MIT + makedepends = rust + makedepends = cargo + makedepends = nodejs + makedepends = npm + makedepends = python + makedepends = python-pip + makedepends = pyinstaller + makedepends = webkit2gtk-4.1 + makedepends = base-devel + makedepends = curl + makedepends = wget + makedepends = file + makedepends = openssl + makedepends = appmenu-gtk-module + makedepends = librsvg + depends = webkit2gtk-4.1 + depends = gtk3 + depends = glib2 + depends = cairo + depends = pango + depends = gdk-pixbuf2 + depends = libsoup3 + depends = openssl + depends = hicolor-icon-theme + optdepends = python-onnxruntime: For AI model inference + optdepends = python-opencv: For image processing + optdepends = python-numpy: For numerical operations + options = !strip + options = !emptydirs + source_x86_64 = pictopy-1.1.0-x86_64.tar.gz::https://github.com/AOSSIE-Org/PictoPy/releases/download/v1.1.0/pictopy_1.1.0_amd64.tar.gz + sha256sums_x86_64 = SKIP + source_aarch64 = pictopy-1.1.0-aarch64.tar.gz::https://github.com/AOSSIE-Org/PictoPy/releases/download/v1.1.0/pictopy_1.1.0_arm64.tar.gz + sha256sums_aarch64 = SKIP + +pkgname = pictopy diff --git a/aur/PKGBUILD b/aur/PKGBUILD new file mode 100644 index 000000000..b14905656 --- /dev/null +++ b/aur/PKGBUILD @@ -0,0 +1,85 @@ +# Maintainer: AOSSIE +# Contributor: PictoPy Team + +pkgname=pictopy +pkgver=1.1.0 +pkgrel=1 +pkgdesc="A privacy-focused photo management application with AI-powered tagging and face recognition" +arch=('x86_64' 'aarch64') +url="https://github.com/AOSSIE-Org/PictoPy" +license=('MIT') +depends=( + 'webkit2gtk-4.1' + 'gtk3' + 'glib2' + 'cairo' + 'pango' + 'gdk-pixbuf2' + 'libsoup3' + 'openssl' + 'hicolor-icon-theme' +) +makedepends=( + 'rust' + 'cargo' + 'nodejs' + 'npm' + 'python' + 'python-pip' + 'pyinstaller' + 'webkit2gtk-4.1' + 'base-devel' + 'curl' + 'wget' + 'file' + 'openssl' + 'appmenu-gtk-module' + 'librsvg' +) +optdepends=( + 'python-onnxruntime: For AI model inference' + 'python-opencv: For image processing' + 'python-numpy: For numerical operations' +) +options=('!strip' '!emptydirs') +install=${pkgname}.install +source_x86_64=("${pkgname}-${pkgver}-x86_64.tar.gz::${url}/releases/download/v${pkgver}/pictopy_${pkgver}_amd64.tar.gz") +source_aarch64=("${pkgname}-${pkgver}-aarch64.tar.gz::${url}/releases/download/v${pkgver}/pictopy_${pkgver}_arm64.tar.gz") +sha256sums_x86_64=('SKIP') +sha256sums_aarch64=('SKIP') + +package() { + cd "${srcdir}" + + # Install the main application binary + install -Dm755 "usr/bin/picto-py" "${pkgdir}/usr/bin/pictopy" + + # Install libraries and resources + if [ -d "usr/lib" ]; then + cp -r usr/lib "${pkgdir}/usr/" + fi + + # Install desktop entry + install -Dm644 "usr/share/applications/picto-py.desktop" \ + "${pkgdir}/usr/share/applications/pictopy.desktop" + + # Update desktop entry to use correct binary name + sed -i 's/Exec=picto-py/Exec=pictopy/g' "${pkgdir}/usr/share/applications/pictopy.desktop" + + # Install icons + for size in 32x32 128x128 256x256; do + if [ -f "usr/share/icons/hicolor/${size}/apps/picto-py.png" ]; then + install -Dm644 "usr/share/icons/hicolor/${size}/apps/picto-py.png" \ + "${pkgdir}/usr/share/icons/hicolor/${size}/apps/pictopy.png" + fi + done + + # Install scalable icon if available + if [ -f "usr/share/icons/hicolor/scalable/apps/picto-py.svg" ]; then + install -Dm644 "usr/share/icons/hicolor/scalable/apps/picto-py.svg" \ + "${pkgdir}/usr/share/icons/hicolor/scalable/apps/pictopy.svg" + fi + + # Install license + install -Dm644 "${srcdir}/../LICENSE" "${pkgdir}/usr/share/licenses/${pkgname}/LICENSE" 2>/dev/null || true +} diff --git a/aur/PKGBUILD-git b/aur/PKGBUILD-git new file mode 100644 index 000000000..c52bebdb3 --- /dev/null +++ b/aur/PKGBUILD-git @@ -0,0 +1,164 @@ +# Maintainer: AOSSIE +# Contributor: PictoPy Team + +pkgname=pictopy-git +pkgver=1.1.0 +pkgrel=1 +pkgdesc="A privacy-focused photo management application with AI-powered tagging and face recognition (git version)" +arch=('x86_64' 'aarch64') +url="https://github.com/AOSSIE-Org/PictoPy" +license=('MIT') +depends=( + 'webkit2gtk-4.1' + 'gtk3' + 'glib2' + 'cairo' + 'pango' + 'gdk-pixbuf2' + 'libsoup3' + 'openssl' + 'hicolor-icon-theme' +) +makedepends=( + 'git' + 'rust' + 'cargo' + 'nodejs' + 'npm' + 'python' + 'python-pip' + 'python-virtualenv' + 'webkit2gtk-4.1' + 'base-devel' + 'curl' + 'wget' + 'file' + 'openssl' + 'appmenu-gtk-module' + 'librsvg' +) +optdepends=( + 'python-onnxruntime: For AI model inference' + 'python-opencv: For image processing' + 'python-numpy: For numerical operations' +) +provides=('pictopy') +conflicts=('pictopy') +options=('!strip' '!emptydirs') +install=pictopy.install +source=("${pkgname}::git+${url}.git") +sha256sums=('SKIP') + +pkgver() { + cd "${srcdir}/${pkgname}" + git describe --tags --long 2>/dev/null | sed 's/^v//;s/\([^-]*-g\)/r\1/;s/-/./g' || echo "1.1.0" +} + +build() { + cd "${srcdir}/${pkgname}" + + # Build backend server + echo "Building backend server..." + cd backend + python -m venv venv + source venv/bin/activate + pip install --upgrade pip + pip install -r requirements.txt + pip install pyinstaller + pyinstaller main.py --name PictoPy_Server --onedir --distpath dist + mkdir -p dist/PictoPy_Server/images + cp -r app dist/PictoPy_Server/ + deactivate + cd .. + + # Build sync microservice + echo "Building sync microservice..." + cd sync-microservice + python -m venv venv + source venv/bin/activate + pip install --upgrade pip + pip install -r requirements.txt + pip install pyinstaller + pyinstaller main.py --name PictoPy_Sync --onedir --distpath dist + cp -r app dist/PictoPy_Sync/ + deactivate + cd .. + + # Build frontend with Tauri + echo "Building frontend..." + cd frontend + npm install + npm run build + cd src-tauri + cargo build --release + cd ../.. +} + +package() { + cd "${srcdir}/${pkgname}" + + # Install the main application binary + install -Dm755 "frontend/src-tauri/target/release/picto-py" \ + "${pkgdir}/usr/bin/pictopy" + + # Install backend resources + install -dm755 "${pkgdir}/usr/lib/pictopy/resources/backend" + cp -r backend/dist/PictoPy_Server/* "${pkgdir}/usr/lib/pictopy/resources/backend/" + + # Install sync microservice resources + install -dm755 "${pkgdir}/usr/lib/pictopy/resources/sync-microservice" + cp -r sync-microservice/dist/PictoPy_Sync/* "${pkgdir}/usr/lib/pictopy/resources/sync-microservice/" + + # Set permissions for resources + chmod -R 755 "${pkgdir}/usr/lib/pictopy/resources" + + # Install desktop entry + install -Dm644 /dev/stdin "${pkgdir}/usr/share/applications/pictopy.desktop" << EOF +[Desktop Entry] +Name=PictoPy +Comment=Privacy-focused photo management with AI-powered tagging +Exec=pictopy +Icon=pictopy +Terminal=false +Type=Application +Categories=Graphics;Photography;Viewer; +Keywords=photo;image;gallery;ai;tagging;face;recognition; +StartupWMClass=PictoPy +EOF + + # Install icons + install -Dm644 "frontend/src-tauri/icons/32x32.png" \ + "${pkgdir}/usr/share/icons/hicolor/32x32/apps/pictopy.png" + install -Dm644 "frontend/src-tauri/icons/128x128.png" \ + "${pkgdir}/usr/share/icons/hicolor/128x128/apps/pictopy.png" + install -Dm644 "frontend/src-tauri/icons/128x128@2x.png" \ + "${pkgdir}/usr/share/icons/hicolor/256x256/apps/pictopy.png" + install -Dm644 "frontend/src-tauri/icons/icon.png" \ + "${pkgdir}/usr/share/icons/hicolor/512x512/apps/pictopy.png" + + # Install license + install -Dm644 "LICENSE" "${pkgdir}/usr/share/licenses/${pkgname}/LICENSE" 2>/dev/null || \ + install -Dm644 /dev/stdin "${pkgdir}/usr/share/licenses/${pkgname}/LICENSE" << 'EOF' +MIT License + +Copyright (c) AOSSIE + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +EOF +} diff --git a/aur/README.md b/aur/README.md new file mode 100644 index 000000000..a9a47e0d8 --- /dev/null +++ b/aur/README.md @@ -0,0 +1,126 @@ +# PictoPy AUR Package + +This directory contains the files needed to publish PictoPy to the [Arch User Repository (AUR)](https://aur.archlinux.org/). + +## Package Variants + +### `pictopy` (Binary Package) +The main package that installs pre-built binaries from GitHub releases. This is the recommended option for most users. + +### `pictopy-git` (Source Package) +Builds PictoPy from the latest git source. Use this if you want the bleeding-edge version or need to make modifications. + +## Installation + +### Using an AUR Helper (Recommended) + +```bash +# Using yay +yay -S pictopy + +# Using paru +paru -S pictopy + +# Using pikaur +pikaur -S pictopy +``` + +### Manual Installation + +```bash +# Clone the AUR repository +git clone https://aur.archlinux.org/pictopy.git +cd pictopy + +# Build and install +makepkg -si +``` + +### Installing from Git Source + +```bash +# Using yay +yay -S pictopy-git + +# Or manually +git clone https://aur.archlinux.org/pictopy-git.git +cd pictopy-git +makepkg -si +``` + +## Dependencies + +### Runtime Dependencies +- `webkit2gtk-4.1` - WebKit rendering engine +- `gtk3` - GTK+ 3 toolkit +- `glib2` - GLib library +- `cairo` - 2D graphics library +- `pango` - Text rendering +- `gdk-pixbuf2` - Image loading +- `libsoup3` - HTTP library +- `openssl` - Cryptography +- `hicolor-icon-theme` - Icon theme + +### Optional Dependencies +- `python-onnxruntime` - For AI model inference +- `python-opencv` - For image processing +- `python-numpy` - For numerical operations + +## Updating the AUR Package + +The package is automatically updated via GitHub Actions when a new release is published. To manually update: + +1. Update the `pkgver` in `PKGBUILD` +2. Update the source URLs if needed +3. Regenerate checksums: `updpkgsums` +4. Regenerate `.SRCINFO`: `makepkg --printsrcinfo > .SRCINFO` +5. Commit and push to AUR + +## GitHub Actions Setup + +To enable automatic AUR publishing, add these secrets to your GitHub repository: + +- `AUR_USERNAME` - Your AUR username +- `AUR_EMAIL` - Your AUR email +- `AUR_SSH_PRIVATE_KEY` - SSH private key registered with AUR + +### Generating SSH Key for AUR + +```bash +# Generate a new SSH key +ssh-keygen -t ed25519 -C "your-email@example.com" -f aur_key + +# Add the public key to your AUR account +cat aur_key.pub +# Copy this to: https://aur.archlinux.org/account/YOUR_USERNAME/edit + +# Add the private key as a GitHub secret (AUR_SSH_PRIVATE_KEY) +cat aur_key +``` + +## Troubleshooting + +### Build Fails with Missing Dependencies +```bash +# Install all build dependencies +sudo pacman -S --needed base-devel rust cargo nodejs npm python python-pip webkit2gtk-4.1 +``` + +### Application Won't Start +```bash +# Check for missing libraries +ldd /usr/bin/pictopy | grep "not found" + +# Install missing dependencies +sudo pacman -S webkit2gtk-4.1 gtk3 +``` + +### AI Features Not Working +```bash +# Install optional AI dependencies +sudo pacman -S python-onnxruntime python-opencv python-numpy +``` + +## License + +MIT License - See the main repository for details. diff --git a/aur/pictopy.install b/aur/pictopy.install new file mode 100644 index 000000000..b9f8a9f13 --- /dev/null +++ b/aur/pictopy.install @@ -0,0 +1,47 @@ +# PictoPy post-install hooks for Arch Linux + +post_install() { + echo "==> PictoPy has been installed successfully!" + echo "" + echo "==> To start PictoPy, run: pictopy" + echo "==> Or find it in your application menu." + echo "" + echo "==> Note: On first run, PictoPy will download required AI models." + echo "==> This may take a few minutes depending on your internet connection." + echo "" + + # Update icon cache + if [ -x /usr/bin/gtk-update-icon-cache ]; then + gtk-update-icon-cache -q -t -f /usr/share/icons/hicolor + fi + + # Update desktop database + if [ -x /usr/bin/update-desktop-database ]; then + update-desktop-database -q /usr/share/applications + fi +} + +post_upgrade() { + post_install + echo "==> PictoPy has been upgraded to version $1" +} + +pre_remove() { + echo "==> Removing PictoPy..." +} + +post_remove() { + # Update icon cache + if [ -x /usr/bin/gtk-update-icon-cache ]; then + gtk-update-icon-cache -q -t -f /usr/share/icons/hicolor + fi + + # Update desktop database + if [ -x /usr/bin/update-desktop-database ]; then + update-desktop-database -q /usr/share/applications + fi + + echo "==> PictoPy has been removed." + echo "==> User data in ~/.local/share/pictopy has been preserved." + echo "==> To remove all data, run: rm -rf ~/.local/share/pictopy" +} diff --git a/backend/app/database/albums.py b/backend/app/database/albums.py index b9e5b149a..28790012e 100644 --- a/backend/app/database/albums.py +++ b/backend/app/database/albums.py @@ -1,13 +1,20 @@ -import sqlite3 import bcrypt -from app.config.settings import DATABASE_PATH -from app.database.connection import get_db_connection +from typing import List, Optional, Tuple + +from app.database.connection import ( + get_db_connection, + get_db_transaction, + get_db_write_transaction, +) +from app.logging.setup_logging import get_logger + +# Initialize logger +logger = get_logger(__name__) def db_create_albums_table() -> None: - conn = None - try: - conn = sqlite3.connect(DATABASE_PATH) + """Create the albums table if it doesn't exist.""" + with get_db_transaction() as conn: cursor = conn.cursor() cursor.execute( """ @@ -20,16 +27,11 @@ def db_create_albums_table() -> None: ) """ ) - conn.commit() - finally: - if conn is not None: - conn.close() def db_create_album_images_table() -> None: - conn = None - try: - conn = sqlite3.connect(DATABASE_PATH) + """Create the album_images junction table if it doesn't exist.""" + with get_db_transaction() as conn: cursor = conn.cursor() cursor.execute( """ @@ -42,46 +44,59 @@ def db_create_album_images_table() -> None: ) """ ) - conn.commit() - finally: - if conn is not None: - conn.close() -def db_get_all_albums(show_hidden: bool = False): - conn = sqlite3.connect(DATABASE_PATH) - cursor = conn.cursor() - try: +def db_get_all_albums(show_hidden: bool = False) -> List[Tuple]: + """ + Get all albums from the database. + + Args: + show_hidden: Whether to include hidden albums + + Returns: + List of album tuples + """ + with get_db_connection() as conn: + cursor = conn.cursor() if show_hidden: cursor.execute("SELECT * FROM albums") else: cursor.execute("SELECT * FROM albums WHERE is_hidden = 0") - albums = cursor.fetchall() - return albums - finally: - conn.close() + return cursor.fetchall() + +def db_get_album_by_name(name: str) -> Optional[Tuple]: + """ + Get an album by its name. -def db_get_album_by_name(name: str): - conn = sqlite3.connect(DATABASE_PATH) - cursor = conn.cursor() - try: + Args: + name: Album name to search for + + Returns: + Album tuple if found, None otherwise + """ + with get_db_connection() as conn: + cursor = conn.cursor() cursor.execute("SELECT * FROM albums WHERE album_name = ?", (name,)) album = cursor.fetchone() return album if album else None - finally: - conn.close() -def db_get_album(album_id: str): - conn = sqlite3.connect(DATABASE_PATH) - cursor = conn.cursor() - try: +def db_get_album(album_id: str) -> Optional[Tuple]: + """ + Get an album by its ID. + + Args: + album_id: Album ID to search for + + Returns: + Album tuple if found, None otherwise + """ + with get_db_connection() as conn: + cursor = conn.cursor() cursor.execute("SELECT * FROM albums WHERE album_id = ?", (album_id,)) album = cursor.fetchone() return album if album else None - finally: - conn.close() def db_insert_album( @@ -90,10 +105,19 @@ def db_insert_album( description: str = "", is_hidden: bool = False, password: str = None, -): - conn = sqlite3.connect(DATABASE_PATH) - cursor = conn.cursor() - try: +) -> None: + """ + Insert a new album into the database. + + Args: + album_id: Unique album ID + album_name: Album name + description: Album description + is_hidden: Whether the album is hidden + password: Optional password for protected albums + """ + with get_db_write_transaction() as conn: + cursor = conn.cursor() password_hash = None if password: password_hash = bcrypt.hashpw( @@ -106,9 +130,6 @@ def db_insert_album( """, (album_id, album_name, description, int(is_hidden), password_hash), ) - conn.commit() - finally: - conn.close() def db_update_album( @@ -117,10 +138,19 @@ def db_update_album( description: str, is_hidden: bool, password: str = None, -): - conn = sqlite3.connect(DATABASE_PATH) - cursor = conn.cursor() - try: +) -> None: + """ + Update an existing album. + + Args: + album_id: Album ID to update + album_name: New album name + description: New description + is_hidden: New hidden status + password: New password (None to keep existing) + """ + with get_db_write_transaction() as conn: + cursor = conn.cursor() if password is not None: # Update with new password password_hash = bcrypt.hashpw( @@ -144,32 +174,51 @@ def db_update_album( """, (album_name, description, int(is_hidden), album_id), ) - conn.commit() - finally: - conn.close() -def db_delete_album(album_id: str): - with get_db_connection() as conn: +def db_delete_album(album_id: str) -> None: + """ + Delete an album from the database. + + Args: + album_id: Album ID to delete + """ + with get_db_write_transaction() as conn: cursor = conn.cursor() cursor.execute("DELETE FROM albums WHERE album_id = ?", (album_id,)) -def db_get_album_images(album_id: str): - conn = sqlite3.connect(DATABASE_PATH) - cursor = conn.cursor() - try: +def db_get_album_images(album_id: str) -> List[str]: + """ + Get all image IDs in an album. + + Args: + album_id: Album ID to get images for + + Returns: + List of image IDs + """ + with get_db_connection() as conn: + cursor = conn.cursor() cursor.execute( "SELECT image_id FROM album_images WHERE album_id = ?", (album_id,) ) images = cursor.fetchall() return [img[0] for img in images] - finally: - conn.close() -def db_add_images_to_album(album_id: str, image_ids: list[str]): - with get_db_connection() as conn: +def db_add_images_to_album(album_id: str, image_ids: List[str]) -> None: + """ + Add images to an album. + + Args: + album_id: Album ID to add images to + image_ids: List of image IDs to add + + Raises: + ValueError: If none of the provided image IDs exist + """ + with get_db_write_transaction() as conn: cursor = conn.cursor() query = ( @@ -187,8 +236,18 @@ def db_add_images_to_album(album_id: str, image_ids: list[str]): raise ValueError("None of the provided image IDs exist in the database.") -def db_remove_image_from_album(album_id: str, image_id: str): - with get_db_connection() as conn: +def db_remove_image_from_album(album_id: str, image_id: str) -> None: + """ + Remove a single image from an album. + + Args: + album_id: Album ID to remove image from + image_id: Image ID to remove + + Raises: + ValueError: If the image is not in the album + """ + with get_db_write_transaction() as conn: cursor = conn.cursor() cursor.execute( @@ -206,23 +265,35 @@ def db_remove_image_from_album(album_id: str, image_id: str): raise ValueError("Image not found in the specified album") -def db_remove_images_from_album(album_id: str, image_ids: list[str]): - conn = sqlite3.connect(DATABASE_PATH) - cursor = conn.cursor() - try: +def db_remove_images_from_album(album_id: str, image_ids: List[str]) -> None: + """ + Remove multiple images from an album. + + Args: + album_id: Album ID to remove images from + image_ids: List of image IDs to remove + """ + with get_db_write_transaction() as conn: + cursor = conn.cursor() cursor.executemany( "DELETE FROM album_images WHERE album_id = ? AND image_id = ?", [(album_id, img_id) for img_id in image_ids], ) - conn.commit() - finally: - conn.close() def verify_album_password(album_id: str, password: str) -> bool: - conn = sqlite3.connect(DATABASE_PATH) - cursor = conn.cursor() - try: + """ + Verify the password for a protected album. + + Args: + album_id: Album ID to verify password for + password: Password to verify + + Returns: + True if password is correct, False otherwise + """ + with get_db_connection() as conn: + cursor = conn.cursor() cursor.execute( "SELECT password_hash FROM albums WHERE album_id = ?", (album_id,) ) @@ -230,5 +301,3 @@ def verify_album_password(album_id: str, password: str) -> bool: if not row or not row[0]: return False return bcrypt.checkpw(password.encode("utf-8"), row[0].encode("utf-8")) - finally: - conn.close() diff --git a/backend/app/database/connection.py b/backend/app/database/connection.py index 599526dc1..1d6232f68 100644 --- a/backend/app/database/connection.py +++ b/backend/app/database/connection.py @@ -1,32 +1,247 @@ +""" +Thread-safe SQLite connection management for FastAPI backend. + +This module provides thread-safe database connection handling using: +1. threading.local() for per-thread connection storage +2. Context managers for automatic resource cleanup +3. Connection pooling with proper isolation + +SQLite's threading model requires each thread to have its own connection +to avoid race conditions and data corruption during concurrent operations. +""" + import sqlite3 +import threading from contextlib import contextmanager -from typing import Generator +from typing import Generator, Optional from app.config.settings import DATABASE_PATH +from app.logging.setup_logging import get_logger + +# Initialize logger +logger = get_logger(__name__) + +# Thread-local storage for database connections +_thread_local = threading.local() + +# Lock for write operations to serialize database writes +_write_lock = threading.Lock() + + +class DatabaseConnectionManager: + """ + Thread-safe SQLite connection manager. + + Provides per-thread connections with proper lifecycle management. + Each thread gets its own connection to avoid SQLite threading issues. + """ + + def __init__(self, database_path: str = DATABASE_PATH): + self.database_path = database_path + self._local = threading.local() + + def _get_connection(self) -> sqlite3.Connection: + """ + Get or create a connection for the current thread. + + Returns: + sqlite3.Connection: Thread-local database connection + """ + if not hasattr(self._local, 'connection') or self._local.connection is None: + self._local.connection = self._create_connection() + logger.debug(f"Created new connection for thread {threading.current_thread().name}") + return self._local.connection + + def _create_connection(self) -> sqlite3.Connection: + """ + Create a new SQLite connection with proper configuration. + + Returns: + sqlite3.Connection: Configured database connection + """ + conn = sqlite3.connect( + self.database_path, + timeout=30.0, # Wait up to 30 seconds for locks + isolation_level=None, # Autocommit mode, we handle transactions manually + ) + + # Enable WAL mode for better concurrent read performance + conn.execute("PRAGMA journal_mode=WAL;") + + # Enforce all integrity constraints + conn.execute("PRAGMA foreign_keys = ON;") + conn.execute("PRAGMA ignore_check_constraints = OFF;") + conn.execute("PRAGMA recursive_triggers = ON;") + conn.execute("PRAGMA defer_foreign_keys = OFF;") + conn.execute("PRAGMA case_sensitive_like = ON;") + + # Optimize for concurrent access + conn.execute("PRAGMA busy_timeout = 30000;") # 30 second busy timeout + conn.execute("PRAGMA synchronous = NORMAL;") # Balance safety and speed + + return conn + + def close_connection(self) -> None: + """Close the connection for the current thread if it exists.""" + if hasattr(self._local, 'connection') and self._local.connection is not None: + try: + self._local.connection.close() + logger.debug(f"Closed connection for thread {threading.current_thread().name}") + except Exception as e: + logger.warning(f"Error closing connection: {e}") + finally: + self._local.connection = None + + @contextmanager + def get_connection(self) -> Generator[sqlite3.Connection, None, None]: + """ + Context manager for getting a thread-safe database connection. + + The connection is reused within the same thread but properly + isolated between different threads. + + Yields: + sqlite3.Connection: Thread-local database connection + """ + conn = self._get_connection() + try: + yield conn + except Exception as e: + logger.error(f"Database error: {e}") + raise + + @contextmanager + def transaction(self) -> Generator[sqlite3.Connection, None, None]: + """ + Context manager for database transactions with automatic commit/rollback. + + Provides: + - Automatic transaction begin + - Commit on success + - Rollback on failure + - Thread-safe connection handling + + Yields: + sqlite3.Connection: Thread-local database connection within a transaction + """ + conn = self._get_connection() + try: + conn.execute("BEGIN IMMEDIATE") # Acquire write lock immediately + yield conn + conn.execute("COMMIT") + except Exception as e: + conn.execute("ROLLBACK") + logger.error(f"Transaction rolled back due to error: {e}") + raise + + @contextmanager + def write_transaction(self) -> Generator[sqlite3.Connection, None, None]: + """ + Context manager for serialized write operations. + + Uses a global lock to ensure only one write operation happens at a time, + preventing database lock contention issues. + + Yields: + sqlite3.Connection: Thread-local database connection with write lock + """ + with _write_lock: + with self.transaction() as conn: + yield conn + + +# Global connection manager instance +_connection_manager = DatabaseConnectionManager() @contextmanager def get_db_connection() -> Generator[sqlite3.Connection, None, None]: """ - SQLite connection context manager with all integrity constraints enforced. + Get a thread-safe database connection. + + This is the primary interface for obtaining database connections. + Each thread gets its own connection, ensuring thread safety. + + Yields: + sqlite3.Connection: Thread-local database connection + + Example: + with get_db_connection() as conn: + cursor = conn.cursor() + cursor.execute("SELECT * FROM images") + results = cursor.fetchall() + """ + with _connection_manager.get_connection() as conn: + yield conn + - - Enables all major relational integrity PRAGMAs - - Works for both single and multi-step transactions - - Automatically commits on success or rolls back on failure +@contextmanager +def get_db_transaction() -> Generator[sqlite3.Connection, None, None]: + """ + Get a database connection with automatic transaction management. + + Automatically commits on success or rolls back on failure. + + Yields: + sqlite3.Connection: Thread-local database connection in a transaction + + Example: + with get_db_transaction() as conn: + cursor = conn.cursor() + cursor.execute("INSERT INTO images ...") + cursor.execute("INSERT INTO image_classes ...") + # Automatically committed if no exception """ - conn = sqlite3.connect(DATABASE_PATH) + with _connection_manager.transaction() as conn: + yield conn - # --- Strict enforcement of all relational and logical rules --- - conn.execute("PRAGMA foreign_keys = ON;") # Enforce FK constraints - conn.execute("PRAGMA ignore_check_constraints = OFF;") # Enforce CHECK constraints - conn.execute("PRAGMA recursive_triggers = ON;") # Allow nested triggers - conn.execute("PRAGMA defer_foreign_keys = OFF;") # Immediate FK checking - conn.execute("PRAGMA case_sensitive_like = ON;") # Make LIKE case-sensitive - try: +@contextmanager +def get_db_write_transaction() -> Generator[sqlite3.Connection, None, None]: + """ + Get a serialized write transaction. + + Use this for write operations that need to be serialized across threads + to prevent lock contention. + + Yields: + sqlite3.Connection: Thread-local database connection with write lock + + Example: + with get_db_write_transaction() as conn: + cursor = conn.cursor() + cursor.execute("UPDATE images SET ...") + # Write lock released after context exits + """ + with _connection_manager.write_transaction() as conn: yield conn - conn.commit() - except Exception: - conn.rollback() - raise - finally: - conn.close() + + +def close_thread_connection() -> None: + """ + Close the database connection for the current thread. + + Call this when a thread is about to terminate to clean up resources. + """ + _connection_manager.close_connection() + + +def get_new_connection() -> sqlite3.Connection: + """ + Create a new standalone database connection. + + Use this only when you need a connection outside of the thread-local + management system (e.g., for background tasks or process pools). + + The caller is responsible for closing this connection. + + Returns: + sqlite3.Connection: New database connection + """ + conn = sqlite3.connect( + DATABASE_PATH, + timeout=30.0, + ) + conn.execute("PRAGMA foreign_keys = ON;") + conn.execute("PRAGMA journal_mode=WAL;") + conn.execute("PRAGMA busy_timeout = 30000;") + return conn diff --git a/backend/app/database/face_clusters.py b/backend/app/database/face_clusters.py index ceac7f556..5cca3f4f3 100644 --- a/backend/app/database/face_clusters.py +++ b/backend/app/database/face_clusters.py @@ -1,6 +1,16 @@ import sqlite3 +import json from typing import Optional, List, Dict, TypedDict, Union -from app.config.settings import DATABASE_PATH + +from app.database.connection import ( + get_db_connection, + get_db_transaction, + get_db_write_transaction, +) +from app.logging.setup_logging import get_logger + +# Initialize logger +logger = get_logger(__name__) # Type definitions ClusterId = str @@ -20,9 +30,7 @@ class ClusterData(TypedDict): def db_create_clusters_table() -> None: """Create the face_clusters table if it doesn't exist.""" - conn = None - try: - conn = sqlite3.connect(DATABASE_PATH) + with get_db_transaction() as conn: cursor = conn.cursor() cursor.execute( """ @@ -33,10 +41,6 @@ def db_create_clusters_table() -> None: ) """ ) - conn.commit() - finally: - if conn is not None: - conn.close() def db_delete_all_clusters(cursor: Optional[sqlite3.Cursor] = None) -> int: @@ -49,25 +53,16 @@ def db_delete_all_clusters(cursor: Optional[sqlite3.Cursor] = None) -> int: Returns: Number of deleted clusters """ - own_connection = cursor is None - if own_connection: - conn = sqlite3.connect(DATABASE_PATH) - cursor = conn.cursor() - - try: + if cursor is not None: + # Use provided cursor (external transaction management) cursor.execute("DELETE FROM face_clusters") - deleted_count = cursor.rowcount - if own_connection: - conn.commit() - return deleted_count - except Exception: - if own_connection: - conn.rollback() - print("Error deleting all clusters.") - raise - finally: - if own_connection: - conn.close() + return cursor.rowcount + else: + # Use our own transaction + with get_db_write_transaction() as conn: + cur = conn.cursor() + cur.execute("DELETE FROM face_clusters") + return cur.rowcount def db_insert_clusters_batch( @@ -86,23 +81,19 @@ def db_insert_clusters_batch( if not clusters: return [] - own_connection = cursor is None - if own_connection: - conn = sqlite3.connect(DATABASE_PATH) - cursor = conn.cursor() - - try: - cluster_ids = [] - insert_data = [] + cluster_ids = [] + insert_data = [] - for cluster in clusters: - cluster_id = cluster.get("cluster_id") - cluster_name = cluster.get("cluster_name") - face_image_base64 = cluster.get("face_image_base64") + for cluster in clusters: + cluster_id = cluster.get("cluster_id") + cluster_name = cluster.get("cluster_name") + face_image_base64 = cluster.get("face_image_base64") - insert_data.append((cluster_id, cluster_name, face_image_base64)) - cluster_ids.append(cluster_id) + insert_data.append((cluster_id, cluster_name, face_image_base64)) + cluster_ids.append(cluster_id) + if cursor is not None: + # Use provided cursor (external transaction management) cursor.executemany( """ INSERT INTO face_clusters (cluster_id, cluster_name, face_image_base64) @@ -110,17 +101,19 @@ def db_insert_clusters_batch( """, insert_data, ) + else: + # Use our own transaction + with get_db_write_transaction() as conn: + cur = conn.cursor() + cur.executemany( + """ + INSERT INTO face_clusters (cluster_id, cluster_name, face_image_base64) + VALUES (?, ?, ?) + """, + insert_data, + ) - if own_connection: - conn.commit() - return cluster_ids - except Exception: - if own_connection: - conn.rollback() - raise - finally: - if own_connection: - conn.close() + return cluster_ids def db_get_cluster_by_id(cluster_id: ClusterId) -> Optional[ClusterData]: @@ -133,10 +126,9 @@ def db_get_cluster_by_id(cluster_id: ClusterId) -> Optional[ClusterData]: Returns: ClusterData if found, None otherwise """ - conn = sqlite3.connect(DATABASE_PATH) - cursor = conn.cursor() + with get_db_connection() as conn: + cursor = conn.cursor() - try: cursor.execute( "SELECT cluster_id, cluster_name, face_image_base64 FROM face_clusters WHERE cluster_id = ?", (cluster_id,), @@ -149,8 +141,6 @@ def db_get_cluster_by_id(cluster_id: ClusterId) -> Optional[ClusterData]: cluster_id=row[0], cluster_name=row[1], face_image_base64=row[2] ) return None - finally: - conn.close() def db_get_all_clusters() -> List[ClusterData]: @@ -160,10 +150,9 @@ def db_get_all_clusters() -> List[ClusterData]: Returns: List of ClusterData objects """ - conn = sqlite3.connect(DATABASE_PATH) - cursor = conn.cursor() + with get_db_connection() as conn: + cursor = conn.cursor() - try: cursor.execute( "SELECT cluster_id, cluster_name, face_image_base64 FROM face_clusters ORDER BY cluster_id" ) @@ -179,8 +168,6 @@ def db_get_all_clusters() -> List[ClusterData]: ) return clusters - finally: - conn.close() def db_update_cluster( @@ -194,42 +181,31 @@ def db_update_cluster( Args: cluster_id: The ID of the cluster to update cluster_name: New cluster name (optional) - conn: Optional existing database connection. If None, creates a new connection. + conn: Optional existing database connection (deprecated, kept for compatibility) Returns: True if the cluster was updated, False if not found """ - # Use provided connection or create a new one - own_connection = conn is None - if own_connection: - conn = sqlite3.connect(DATABASE_PATH) - - cursor = conn.cursor() + # Build the update query dynamically based on provided parameters + update_fields = [] + update_values = [] - try: - # Build the update query dynamically based on provided parameters - update_fields = [] - update_values = [] + if cluster_name is not None: + update_fields.append("cluster_name = ?") + update_values.append(cluster_name) - if cluster_name is not None: - update_fields.append("cluster_name = ?") - update_values.append(cluster_name) + if not update_fields: + return False - if not update_fields: - return False - - update_values.append(cluster_id) + update_values.append(cluster_id) + with get_db_write_transaction() as db_conn: + cursor = db_conn.cursor() cursor.execute( f"UPDATE face_clusters SET {', '.join(update_fields)} WHERE cluster_id = ?", update_values, ) - - updated = cursor.rowcount > 0 - conn.commit() - return updated - finally: - conn.close() + return cursor.rowcount > 0 def db_get_all_clusters_with_face_counts() -> ( @@ -241,10 +217,9 @@ def db_get_all_clusters_with_face_counts() -> ( Returns: List of dictionaries containing cluster_id, cluster_name, face_count, and face_image_base64 """ - conn = sqlite3.connect(DATABASE_PATH) - cursor = conn.cursor() + with get_db_connection() as conn: + cursor = conn.cursor() - try: cursor.execute( """ SELECT @@ -274,8 +249,6 @@ def db_get_all_clusters_with_face_counts() -> ( ) return clusters - finally: - conn.close() def db_get_images_by_cluster_id( @@ -290,10 +263,9 @@ def db_get_images_by_cluster_id( Returns: List of dictionaries containing image data with face information """ - conn = sqlite3.connect(DATABASE_PATH) - cursor = conn.cursor() + with get_db_connection() as conn: + cursor = conn.cursor() - try: cursor.execute( """ SELECT DISTINCT @@ -326,8 +298,6 @@ def db_get_images_by_cluster_id( bbox_json, ) = row - import json - metadata_dict = json.loads(metadata) if metadata else None # Parse bbox JSON if it exists bbox = None @@ -347,5 +317,3 @@ def db_get_images_by_cluster_id( ) return images - finally: - conn.close() diff --git a/backend/app/database/faces.py b/backend/app/database/faces.py index 0e43f7117..98ad66bcd 100644 --- a/backend/app/database/faces.py +++ b/backend/app/database/faces.py @@ -2,7 +2,16 @@ import json import numpy as np from typing import Optional, List, Dict, Union, TypedDict -from app.config.settings import DATABASE_PATH + +from app.database.connection import ( + get_db_connection, + get_db_transaction, + get_db_write_transaction, +) +from app.logging.setup_logging import get_logger + +# Initialize logger +logger = get_logger(__name__) # Type definitions FaceId = int @@ -27,10 +36,8 @@ class FaceData(TypedDict): def db_create_faces_table() -> None: - conn = None - try: - conn = sqlite3.connect(DATABASE_PATH) - conn.execute("PRAGMA foreign_keys = ON") + """Create the faces table if it doesn't exist.""" + with get_db_transaction() as conn: cursor = conn.cursor() cursor.execute( """ @@ -46,10 +53,6 @@ def db_create_faces_table() -> None: ) """ ) - conn.commit() - finally: - if conn is not None: - conn.close() def db_insert_face_embeddings( @@ -62,18 +65,19 @@ def db_insert_face_embeddings( """ Insert face embeddings with additional metadata. - Args: image_id: ID of the image this face belongs to embeddings: Face embedding vector (numpy array) confidence: Confidence score for face detection (optional) bbox: Bounding box coordinates as dict with keys: x, y, width, height (optional) cluster_id: ID of the face cluster this face belongs to (optional) + + Returns: + The face_id of the inserted record """ - conn = sqlite3.connect(DATABASE_PATH) - cursor = conn.cursor() + with get_db_write_transaction() as conn: + cursor = conn.cursor() - try: embeddings_json = json.dumps([emb.tolist() for emb in embeddings]) # Convert bbox to JSON string if provided @@ -87,11 +91,7 @@ def db_insert_face_embeddings( (image_id, cluster_id, embeddings_json, confidence, bbox_json), ) - face_id = cursor.lastrowid - conn.commit() - return face_id - finally: - conn.close() + return cursor.lastrowid def db_insert_face_embeddings_by_image_id( @@ -110,8 +110,10 @@ def db_insert_face_embeddings_by_image_id( confidence: Confidence score(s) for face detection (optional) bbox: Bounding box coordinates or list of bounding boxes (optional) cluster_id: Cluster ID(s) for the face(s) (optional) - """ + Returns: + Single face_id or list of face_ids + """ # Handle multiple faces in one image if ( isinstance(embeddings, list) @@ -142,10 +144,10 @@ def db_insert_face_embeddings_by_image_id( def get_all_face_embeddings(): - conn = sqlite3.connect(DATABASE_PATH) - cursor = conn.cursor() + """Get all face embeddings with associated image data.""" + with get_db_connection() as conn: + cursor = conn.cursor() - try: cursor.execute( """ SELECT @@ -212,8 +214,6 @@ def get_all_face_embeddings(): # Sort by path images.sort(key=lambda x: x["path"]) return images - finally: - conn.close() def db_get_faces_unassigned_clusters() -> List[Dict[str, Union[FaceId, FaceEmbedding]]]: @@ -223,10 +223,9 @@ def db_get_faces_unassigned_clusters() -> List[Dict[str, Union[FaceId, FaceEmbed Returns: List of dictionaries containing face_id and embeddings (as numpy array) """ - conn = sqlite3.connect(DATABASE_PATH) - cursor = conn.cursor() + with get_db_connection() as conn: + cursor = conn.cursor() - try: cursor.execute("SELECT face_id, embeddings FROM faces WHERE cluster_id IS NULL") rows = cursor.fetchall() @@ -239,8 +238,6 @@ def db_get_faces_unassigned_clusters() -> List[Dict[str, Union[FaceId, FaceEmbed faces.append({"face_id": face_id, "embeddings": embeddings}) return faces - finally: - conn.close() def db_get_all_faces_with_cluster_names() -> ( @@ -252,10 +249,9 @@ def db_get_all_faces_with_cluster_names() -> ( Returns: List of dictionaries containing face_id, embeddings (as numpy array), and cluster_name """ - conn = sqlite3.connect(DATABASE_PATH) - cursor = conn.cursor() + with get_db_connection() as conn: + cursor = conn.cursor() - try: cursor.execute( """ SELECT f.face_id, f.embeddings, fc.cluster_name @@ -281,8 +277,6 @@ def db_get_all_faces_with_cluster_names() -> ( ) return faces - finally: - conn.close() def db_update_face_cluster_ids_batch( @@ -307,19 +301,15 @@ def db_update_face_cluster_ids_batch( if not face_cluster_mapping: return - own_connection = cursor is None - if own_connection: - conn = sqlite3.connect(DATABASE_PATH) - cursor = conn.cursor() - - try: - # Prepare update data as tuples (cluster_id, face_id) - update_data = [] - for mapping in face_cluster_mapping: - face_id = mapping.get("face_id") - cluster_id = mapping.get("cluster_id") - update_data.append((cluster_id, face_id)) + # Prepare update data as tuples (cluster_id, face_id) + update_data = [] + for mapping in face_cluster_mapping: + face_id = mapping.get("face_id") + cluster_id = mapping.get("cluster_id") + update_data.append((cluster_id, face_id)) + if cursor is not None: + # Use provided cursor (external transaction management) cursor.executemany( """ UPDATE faces @@ -328,17 +318,18 @@ def db_update_face_cluster_ids_batch( """, update_data, ) - - if own_connection: - conn.commit() - except Exception: - if own_connection: - conn.rollback() - print("Error updating face cluster IDs in batch.") - raise - finally: - if own_connection: - conn.close() + else: + # Use our own transaction + with get_db_write_transaction() as conn: + cur = conn.cursor() + cur.executemany( + """ + UPDATE faces + SET cluster_id = ? + WHERE face_id = ? + """, + update_data, + ) def db_get_cluster_mean_embeddings() -> List[Dict[str, Union[str, FaceEmbedding]]]: @@ -349,10 +340,9 @@ def db_get_cluster_mean_embeddings() -> List[Dict[str, Union[str, FaceEmbedding] List of dictionaries containing cluster_id and mean_embedding (as numpy array) Only returns clusters that have at least one face assigned """ - conn = sqlite3.connect(DATABASE_PATH) - cursor = conn.cursor() + with get_db_connection() as conn: + cursor = conn.cursor() - try: cursor.execute( """ SELECT f.cluster_id, f.embeddings @@ -390,5 +380,3 @@ def db_get_cluster_mean_embeddings() -> List[Dict[str, Union[str, FaceEmbedding] ) return cluster_means - finally: - conn.close() diff --git a/backend/app/database/folders.py b/backend/app/database/folders.py index 3a2ac976d..9123ee2a7 100644 --- a/backend/app/database/folders.py +++ b/backend/app/database/folders.py @@ -1,8 +1,16 @@ -import sqlite3 import os import uuid from typing import List, Tuple, Dict, Optional -from app.config.settings import DATABASE_PATH + +from app.database.connection import ( + get_db_connection, + get_db_transaction, + get_db_write_transaction, +) +from app.logging.setup_logging import get_logger + +# Initialize logger +logger = get_logger(__name__) # Type definitions FolderId = str @@ -13,9 +21,8 @@ def db_create_folders_table() -> None: - conn = None - try: - conn = sqlite3.connect(DATABASE_PATH) + """Create the folders table if it doesn't exist.""" + with get_db_transaction() as conn: cursor = conn.cursor() cursor.execute( """ @@ -30,32 +37,20 @@ def db_create_folders_table() -> None: ) """ ) - conn.commit() - finally: - if conn is not None: - conn.close() def db_insert_folders_batch(folders_data: List[FolderData]) -> None: """ Insert multiple folders in a single database transaction. folders_data: list of tuples (folder_id, folder_path, - parent_folder_id,last_modified_time, AI_Tagging, taggingCompleted) + parent_folder_id, last_modified_time, AI_Tagging, taggingCompleted) """ - conn = sqlite3.connect(DATABASE_PATH) - cursor = conn.cursor() - - try: + with get_db_write_transaction() as conn: + cursor = conn.cursor() cursor.executemany( """INSERT OR IGNORE INTO folders (folder_id, folder_path, parent_folder_id, last_modified_time, AI_Tagging, taggingCompleted) VALUES (?, ?, ?, ?, ?, ?)""", folders_data, ) - conn.commit() - except Exception as e: - conn.rollback() - raise e - finally: - conn.close() def db_insert_folder( @@ -65,13 +60,25 @@ def db_insert_folder( taggingCompleted: Optional[bool] = None, folder_id: Optional[FolderId] = None, ) -> FolderId: - conn = sqlite3.connect(DATABASE_PATH) - cursor = conn.cursor() + """ + Insert a single folder into the database. - try: - abs_folder_path = os.path.abspath(folder_path) - if not os.path.isdir(abs_folder_path): - raise ValueError(f"Error: '{folder_path}' is not a valid directory.") + Args: + folder_path: Path to the folder + parent_folder_id: ID of the parent folder (optional) + AI_Tagging: Whether AI tagging is enabled + taggingCompleted: Whether tagging is completed + folder_id: Custom folder ID (optional, auto-generated if not provided) + + Returns: + The folder ID of the inserted or existing folder + """ + abs_folder_path = os.path.abspath(folder_path) + if not os.path.isdir(abs_folder_path): + raise ValueError(f"Error: '{folder_path}' is not a valid directory.") + + with get_db_write_transaction() as conn: + cursor = conn.cursor() cursor.execute( "SELECT folder_id FROM folders WHERE folder_path = ?", @@ -100,16 +107,13 @@ def db_insert_folder( ), ) - conn.commit() return folder_id - finally: - conn.close() def db_get_folder_id_from_path(folder_path: FolderPath) -> Optional[FolderId]: - conn = sqlite3.connect(DATABASE_PATH) - cursor = conn.cursor() - try: + """Get folder ID from folder path.""" + with get_db_connection() as conn: + cursor = conn.cursor() abs_folder_path = os.path.abspath(folder_path) cursor.execute( "SELECT folder_id FROM folders WHERE folder_path = ?", @@ -117,39 +121,36 @@ def db_get_folder_id_from_path(folder_path: FolderPath) -> Optional[FolderId]: ) result = cursor.fetchone() return result[0] if result else None - finally: - conn.close() def db_get_folder_path_from_id(folder_id: FolderId) -> Optional[FolderPath]: - conn = sqlite3.connect(DATABASE_PATH) - cursor = conn.cursor() - try: + """Get folder path from folder ID.""" + with get_db_connection() as conn: + cursor = conn.cursor() cursor.execute( "SELECT folder_path FROM folders WHERE folder_id = ?", (folder_id,), ) result = cursor.fetchone() return result[0] if result else None - finally: - conn.close() def db_get_all_folders() -> List[FolderPath]: - with sqlite3.connect(DATABASE_PATH) as conn: - rows = conn.execute("SELECT folder_path FROM folders").fetchall() + """Get all folder paths from the database.""" + with get_db_connection() as conn: + cursor = conn.cursor() + cursor.execute("SELECT folder_path FROM folders") + rows = cursor.fetchall() return [row[0] for row in rows] if rows else [] def db_get_all_folder_ids() -> List[FolderId]: - conn = sqlite3.connect(DATABASE_PATH) - cursor = conn.cursor() - try: + """Get all folder IDs from the database.""" + with get_db_connection() as conn: + cursor = conn.cursor() cursor.execute("SELECT folder_id from folders") rows = cursor.fetchall() return [row[0] for row in rows] if rows else [] - finally: - conn.close() def db_delete_folders_batch(folder_ids: List[FolderId]) -> int: @@ -161,13 +162,8 @@ def db_delete_folders_batch(folder_ids: List[FolderId]) -> int: if not folder_ids: return 0 - conn = sqlite3.connect(DATABASE_PATH) - cursor = conn.cursor() - - try: - # Enable foreign keys for cascading deletes - cursor.execute("PRAGMA foreign_keys = ON;") - conn.commit() + with get_db_write_transaction() as conn: + cursor = conn.cursor() # Create placeholders for the IN clause placeholders = ",".join("?" * len(folder_ids)) @@ -177,25 +173,16 @@ def db_delete_folders_batch(folder_ids: List[FolderId]) -> int: folder_ids, ) - deleted_count = cursor.rowcount - conn.commit() - return deleted_count - except Exception as e: - conn.rollback() - raise e - finally: - conn.close() + return cursor.rowcount def db_delete_folder(folder_path: FolderPath) -> None: - conn = sqlite3.connect(DATABASE_PATH) - cursor = conn.cursor() - try: - abs_folder_path = os.path.abspath(folder_path) - cursor.execute( - "PRAGMA foreign_keys = ON;" - ) # Important for deleting rows in image_id_mapping and images table because they reference this folder_id - conn.commit() + """Delete a folder from the database by path.""" + abs_folder_path = os.path.abspath(folder_path) + + with get_db_write_transaction() as conn: + cursor = conn.cursor() + cursor.execute( "SELECT folder_id FROM folders WHERE folder_path = ?", (abs_folder_path,), @@ -212,10 +199,6 @@ def db_delete_folder(folder_path: FolderPath) -> None: (abs_folder_path,), ) - conn.commit() - finally: - conn.close() - def db_update_parent_ids_for_subtree( root_folder_path: FolderPath, folder_map: FolderMap @@ -225,9 +208,8 @@ def db_update_parent_ids_for_subtree( Only updates folders whose parent_folder_id is NULL. folder_map: dict mapping folder_path to tuple of (folder_id, parent_id) """ - conn = sqlite3.connect(DATABASE_PATH) - cursor = conn.cursor() - try: + with get_db_write_transaction() as conn: + cursor = conn.cursor() for folder_path, (folder_id, parent_id) in folder_map.items(): if parent_id: cursor.execute( @@ -238,9 +220,6 @@ def db_update_parent_ids_for_subtree( """, (parent_id, folder_path), ) - conn.commit() - finally: - conn.close() def db_folder_exists(folder_path: FolderPath) -> bool: @@ -248,17 +227,14 @@ def db_folder_exists(folder_path: FolderPath) -> bool: Check if a folder exists in the database. Returns True if the folder exists, False otherwise. """ - conn = sqlite3.connect(DATABASE_PATH) - cursor = conn.cursor() - try: + with get_db_connection() as conn: + cursor = conn.cursor() abs_path = os.path.abspath(folder_path) cursor.execute( "SELECT folder_id FROM folders WHERE folder_path = ?", (abs_path,) ) result = cursor.fetchone() return bool(result) - finally: - conn.close() def db_find_parent_folder_id(folder_path: FolderPath) -> Optional[FolderId]: @@ -270,16 +246,13 @@ def db_find_parent_folder_id(folder_path: FolderPath) -> Optional[FolderId]: if not parent_path or parent_path == folder_path: # Root directory return None - conn = sqlite3.connect(DATABASE_PATH) - cursor = conn.cursor() - try: + with get_db_connection() as conn: + cursor = conn.cursor() cursor.execute( "SELECT folder_id FROM folders WHERE folder_path = ?", (parent_path,) ) result = cursor.fetchone() return result[0] if result else None - finally: - conn.close() def db_update_ai_tagging_batch( @@ -294,10 +267,9 @@ def db_update_ai_tagging_batch( if not folder_ids: return 0 - conn = sqlite3.connect(DATABASE_PATH) - cursor = conn.cursor() + with get_db_write_transaction() as conn: + cursor = conn.cursor() - try: # Create placeholders for the IN clause placeholders = ",".join("?" * len(folder_ids)) @@ -306,14 +278,7 @@ def db_update_ai_tagging_batch( [ai_tagging_enabled] + folder_ids, ) - updated_count = cursor.rowcount - conn.commit() - return updated_count - except Exception as e: - conn.rollback() - raise e - finally: - conn.close() + return cursor.rowcount def db_enable_ai_tagging_batch(folder_ids: List[FolderId]) -> int: @@ -336,10 +301,9 @@ def db_disable_ai_tagging_batch(folder_ids: List[FolderId]) -> int: def db_get_folder_ids_by_path_prefix(root_path: str) -> List[FolderIdPath]: """Get all folder IDs and paths whose path starts with the given root path.""" - conn = sqlite3.connect(DATABASE_PATH) - cursor = conn.cursor() + with get_db_connection() as conn: + cursor = conn.cursor() - try: # Use path LIKE with wildcard to match all subfolders cursor.execute( """ @@ -350,8 +314,6 @@ def db_get_folder_ids_by_path_prefix(root_path: str) -> List[FolderIdPath]: ) return cursor.fetchall() - finally: - conn.close() def db_get_folder_ids_by_paths( @@ -369,10 +331,9 @@ def db_get_folder_ids_by_paths( if not folder_paths: return {} - conn = sqlite3.connect(DATABASE_PATH) - cursor = conn.cursor() + with get_db_connection() as conn: + cursor = conn.cursor() - try: # Convert all paths to absolute paths abs_paths = [os.path.abspath(path) for path in folder_paths] @@ -390,8 +351,6 @@ def db_get_folder_ids_by_paths( path_to_id = {folder_path: folder_id for folder_path, folder_id in results} return path_to_id - finally: - conn.close() def db_get_all_folder_details() -> ( @@ -402,10 +361,9 @@ def db_get_all_folder_details() -> ( last_modified_time, AI_Tagging, and taggingCompleted. Returns list of tuples with all folder information. """ - conn = sqlite3.connect(DATABASE_PATH) - cursor = conn.cursor() + with get_db_connection() as conn: + cursor = conn.cursor() - try: cursor.execute( """ SELECT folder_id, folder_path, parent_folder_id, last_modified_time, AI_Tagging, taggingCompleted @@ -414,8 +372,6 @@ def db_get_all_folder_details() -> ( """ ) return cursor.fetchall() - finally: - conn.close() def db_get_direct_child_folders(parent_folder_id: str) -> List[Tuple[str, str]]: @@ -423,10 +379,9 @@ def db_get_direct_child_folders(parent_folder_id: str) -> List[Tuple[str, str]]: Get all direct child folders (not subfolders) for a given parent folder. Returns list of tuples (folder_id, folder_path). """ - conn = sqlite3.connect(DATABASE_PATH) - cursor = conn.cursor() + with get_db_connection() as conn: + cursor = conn.cursor() - try: cursor.execute( """ SELECT folder_id, folder_path FROM folders @@ -436,5 +391,3 @@ def db_get_direct_child_folders(parent_folder_id: str) -> List[Tuple[str, str]]: ) return cursor.fetchall() - finally: - conn.close() diff --git a/backend/app/database/images.py b/backend/app/database/images.py index ec9541a56..c46732503 100644 --- a/backend/app/database/images.py +++ b/backend/app/database/images.py @@ -1,10 +1,11 @@ # Standard library imports -import sqlite3 from typing import Any, List, Mapping, Tuple, TypedDict, Union # App-specific imports -from app.config.settings import ( - DATABASE_PATH, +from app.database.connection import ( + get_db_connection, + get_db_transaction, + get_db_write_transaction, ) from app.logging.setup_logging import get_logger @@ -42,48 +43,39 @@ class UntaggedImageRecord(TypedDict): ImageClassPair = Tuple[ImageId, ClassId] -def _connect() -> sqlite3.Connection: - conn = sqlite3.connect(DATABASE_PATH) - # Ensure ON DELETE CASCADE and other FKs are enforced - conn.execute("PRAGMA foreign_keys = ON") - return conn - - def db_create_images_table() -> None: - conn = _connect() - cursor = conn.cursor() + """Create the images and image_classes tables if they don't exist.""" + with get_db_transaction() as conn: + cursor = conn.cursor() - # Create new images table with merged fields - cursor.execute( + # Create new images table with merged fields + cursor.execute( + """ + CREATE TABLE IF NOT EXISTS images ( + id TEXT PRIMARY KEY, + path VARCHAR UNIQUE, + folder_id INTEGER, + thumbnailPath TEXT UNIQUE, + metadata TEXT, + isTagged BOOLEAN DEFAULT 0, + isFavourite BOOLEAN DEFAULT 0, + FOREIGN KEY (folder_id) REFERENCES folders(folder_id) ON DELETE CASCADE + ) """ - CREATE TABLE IF NOT EXISTS images ( - id TEXT PRIMARY KEY, - path VARCHAR UNIQUE, - folder_id INTEGER, - thumbnailPath TEXT UNIQUE, - metadata TEXT, - isTagged BOOLEAN DEFAULT 0, - isFavourite BOOLEAN DEFAULT 0, - FOREIGN KEY (folder_id) REFERENCES folders(folder_id) ON DELETE CASCADE ) - """ - ) - # Create new image_classes junction table - cursor.execute( + # Create new image_classes junction table + cursor.execute( + """ + CREATE TABLE IF NOT EXISTS image_classes ( + image_id TEXT, + class_id INTEGER, + PRIMARY KEY (image_id, class_id), + FOREIGN KEY (image_id) REFERENCES images(id) ON DELETE CASCADE, + FOREIGN KEY (class_id) REFERENCES mappings(class_id) ON DELETE CASCADE + ) """ - CREATE TABLE IF NOT EXISTS image_classes ( - image_id TEXT, - class_id INTEGER, - PRIMARY KEY (image_id, class_id), - FOREIGN KEY (image_id) REFERENCES images(id) ON DELETE CASCADE, - FOREIGN KEY (class_id) REFERENCES mappings(class_id) ON DELETE CASCADE ) - """ - ) - - conn.commit() - conn.close() def db_bulk_insert_images(image_records: List[ImageRecord]) -> bool: @@ -91,33 +83,28 @@ def db_bulk_insert_images(image_records: List[ImageRecord]) -> bool: if not image_records: return True - conn = _connect() - cursor = conn.cursor() - try: - cursor.executemany( - """ - INSERT INTO images (id, path, folder_id, thumbnailPath, metadata, isTagged) - VALUES (:id, :path, :folder_id, :thumbnailPath, :metadata, :isTagged) - ON CONFLICT(path) DO UPDATE SET - folder_id=excluded.folder_id, - thumbnailPath=excluded.thumbnailPath, - metadata=excluded.metadata, - isTagged=CASE - WHEN excluded.isTagged THEN 1 - ELSE images.isTagged - END - """, - image_records, - ) - conn.commit() + with get_db_write_transaction() as conn: + cursor = conn.cursor() + cursor.executemany( + """ + INSERT INTO images (id, path, folder_id, thumbnailPath, metadata, isTagged) + VALUES (:id, :path, :folder_id, :thumbnailPath, :metadata, :isTagged) + ON CONFLICT(path) DO UPDATE SET + folder_id=excluded.folder_id, + thumbnailPath=excluded.thumbnailPath, + metadata=excluded.metadata, + isTagged=CASE + WHEN excluded.isTagged THEN 1 + ELSE images.isTagged + END + """, + image_records, + ) return True except Exception as e: logger.error(f"Error inserting image records: {e}") - conn.rollback() return False - finally: - conn.close() def db_get_all_images(tagged: Union[bool, None] = None) -> List[dict]: @@ -131,87 +118,84 @@ def db_get_all_images(tagged: Union[bool, None] = None) -> List[dict]: Returns: List of dictionaries containing all image data including tags """ - conn = _connect() - cursor = conn.cursor() - try: - # Build the query with optional WHERE clause - query = """ - SELECT - i.id, - i.path, - i.folder_id, - i.thumbnailPath, - i.metadata, - i.isTagged, - i.isFavourite, - m.name as tag_name - FROM images i - LEFT JOIN image_classes ic ON i.id = ic.image_id - LEFT JOIN mappings m ON ic.class_id = m.class_id - """ - - params = [] - if tagged is not None: - query += " WHERE i.isTagged = ?" - params.append(tagged) - - query += " ORDER BY i.path, m.name" - - cursor.execute(query, params) - - results = cursor.fetchall() - - # Group results by image ID - images_dict = {} - for ( - image_id, - path, - folder_id, - thumbnail_path, - metadata, - is_tagged, - is_favourite, - tag_name, - ) in results: - if image_id not in images_dict: - # Safely parse metadata JSON -> dict - from app.utils.images import image_util_parse_metadata - - metadata_dict = image_util_parse_metadata(metadata) - - images_dict[image_id] = { - "id": image_id, - "path": path, - "folder_id": str(folder_id), - "thumbnailPath": thumbnail_path, - "metadata": metadata_dict, - "isTagged": bool(is_tagged), - "isFavourite": bool(is_favourite), - "tags": [], - } - - # Add tag if it exists (avoid duplicates) - if tag_name and tag_name not in images_dict[image_id]["tags"]: - images_dict[image_id]["tags"].append(tag_name) - - # Convert to list and set tags to None if empty - images = [] - for image_data in images_dict.values(): - if not image_data["tags"]: - image_data["tags"] = None - images.append(image_data) - - # Sort by path - images.sort(key=lambda x: x["path"]) + with get_db_connection() as conn: + cursor = conn.cursor() + + # Build the query with optional WHERE clause + query = """ + SELECT + i.id, + i.path, + i.folder_id, + i.thumbnailPath, + i.metadata, + i.isTagged, + i.isFavourite, + m.name as tag_name + FROM images i + LEFT JOIN image_classes ic ON i.id = ic.image_id + LEFT JOIN mappings m ON ic.class_id = m.class_id + """ - return images + params = [] + if tagged is not None: + query += " WHERE i.isTagged = ?" + params.append(tagged) + + query += " ORDER BY i.path, m.name" + + cursor.execute(query, params) + results = cursor.fetchall() + + # Group results by image ID + images_dict = {} + for ( + image_id, + path, + folder_id, + thumbnail_path, + metadata, + is_tagged, + is_favourite, + tag_name, + ) in results: + if image_id not in images_dict: + # Safely parse metadata JSON -> dict + from app.utils.images import image_util_parse_metadata + + metadata_dict = image_util_parse_metadata(metadata) + + images_dict[image_id] = { + "id": image_id, + "path": path, + "folder_id": str(folder_id), + "thumbnailPath": thumbnail_path, + "metadata": metadata_dict, + "isTagged": bool(is_tagged), + "isFavourite": bool(is_favourite), + "tags": [], + } + + # Add tag if it exists (avoid duplicates) + if tag_name and tag_name not in images_dict[image_id]["tags"]: + images_dict[image_id]["tags"].append(tag_name) + + # Convert to list and set tags to None if empty + images = [] + for image_data in images_dict.values(): + if not image_data["tags"]: + image_data["tags"] = None + images.append(image_data) + + # Sort by path + images.sort(key=lambda x: x["path"]) + + return images except Exception as e: logger.error(f"Error getting all images: {e}") return [] - finally: - conn.close() def db_get_untagged_images() -> List[UntaggedImageRecord]: @@ -224,10 +208,9 @@ def db_get_untagged_images() -> List[UntaggedImageRecord]: Returns: List of dictionaries containing image data: id, path, folder_id, thumbnailPath, metadata """ - conn = _connect() - cursor = conn.cursor() + with get_db_connection() as conn: + cursor = conn.cursor() - try: cursor.execute( """ SELECT i.id, i.path, i.folder_id, i.thumbnailPath, i.metadata @@ -257,9 +240,6 @@ def db_get_untagged_images() -> List[UntaggedImageRecord]: return untagged_images - finally: - conn.close() - def db_update_image_tagged_status(image_id: ImageId, is_tagged: bool = True) -> bool: """ @@ -272,22 +252,17 @@ def db_update_image_tagged_status(image_id: ImageId, is_tagged: bool = True) -> Returns: True if update was successful, False otherwise """ - conn = _connect() - cursor = conn.cursor() - try: - cursor.execute( - "UPDATE images SET isTagged = ? WHERE id = ?", - (is_tagged, image_id), - ) - conn.commit() - return cursor.rowcount > 0 + with get_db_write_transaction() as conn: + cursor = conn.cursor() + cursor.execute( + "UPDATE images SET isTagged = ? WHERE id = ?", + (is_tagged, image_id), + ) + return cursor.rowcount > 0 except Exception as e: logger.error(f"Error updating image tagged status: {e}") - conn.rollback() return False - finally: - conn.close() def db_insert_image_classes_batch(image_class_pairs: List[ImageClassPair]) -> bool: @@ -303,25 +278,20 @@ def db_insert_image_classes_batch(image_class_pairs: List[ImageClassPair]) -> bo if not image_class_pairs: return True - conn = _connect() - cursor = conn.cursor() - try: - cursor.executemany( - """ - INSERT OR IGNORE INTO image_classes (image_id, class_id) - VALUES (?, ?) - """, - image_class_pairs, - ) - conn.commit() + with get_db_write_transaction() as conn: + cursor = conn.cursor() + cursor.executemany( + """ + INSERT OR IGNORE INTO image_classes (image_id, class_id) + VALUES (?, ?) + """, + image_class_pairs, + ) return True except Exception as e: logger.error(f"Error inserting image classes: {e}") - conn.rollback() return False - finally: - conn.close() def db_get_images_by_folder_ids( @@ -339,26 +309,24 @@ def db_get_images_by_folder_ids( if not folder_ids: return [] - conn = _connect() - cursor = conn.cursor() - try: - # Create placeholders for the IN clause - placeholders = ",".join("?" for _ in folder_ids) - cursor.execute( - f""" - SELECT id, path, thumbnailPath - FROM images - WHERE folder_id IN ({placeholders}) - """, - folder_ids, - ) - return cursor.fetchall() + with get_db_connection() as conn: + cursor = conn.cursor() + + # Create placeholders for the IN clause + placeholders = ",".join("?" for _ in folder_ids) + cursor.execute( + f""" + SELECT id, path, thumbnailPath + FROM images + WHERE folder_id IN ({placeholders}) + """, + folder_ids, + ) + return cursor.fetchall() except Exception as e: logger.error(f"Error getting images by folder IDs: {e}") return [] - finally: - conn.close() def db_delete_images_by_ids(image_ids: List[ImageId]) -> bool: @@ -375,47 +343,50 @@ def db_delete_images_by_ids(image_ids: List[ImageId]) -> bool: if not image_ids: return True - conn = _connect() - cursor = conn.cursor() - try: - # Create placeholders for the IN clause - placeholders = ",".join("?" for _ in image_ids) - cursor.execute( - f"DELETE FROM images WHERE id IN ({placeholders})", - image_ids, - ) - conn.commit() - logger.info(f"Deleted {cursor.rowcount} obsolete image(s) from database") + with get_db_write_transaction() as conn: + cursor = conn.cursor() + + # Create placeholders for the IN clause + placeholders = ",".join("?" for _ in image_ids) + cursor.execute( + f"DELETE FROM images WHERE id IN ({placeholders})", + image_ids, + ) + logger.info(f"Deleted {cursor.rowcount} obsolete image(s) from database") return True except Exception as e: logger.error(f"Error deleting images: {e}") - conn.rollback() return False - finally: - conn.close() def db_toggle_image_favourite_status(image_id: str) -> bool: - conn = sqlite3.connect(DATABASE_PATH) - cursor = conn.cursor() + """ + Toggle the favourite status of an image. + + Args: + image_id: ID of the image to toggle + + Returns: + True if toggle was successful, False otherwise + """ try: - cursor.execute("SELECT id FROM images WHERE id = ?", (image_id,)) - if not cursor.fetchone(): - return False - cursor.execute( - """ - UPDATE images - SET isFavourite = CASE WHEN isFavourite = 1 THEN 0 ELSE 1 END - WHERE id = ? - """, - (image_id,), - ) - conn.commit() - return cursor.rowcount > 0 + with get_db_write_transaction() as conn: + cursor = conn.cursor() + + cursor.execute("SELECT id FROM images WHERE id = ?", (image_id,)) + if not cursor.fetchone(): + return False + + cursor.execute( + """ + UPDATE images + SET isFavourite = CASE WHEN isFavourite = 1 THEN 0 ELSE 1 END + WHERE id = ? + """, + (image_id,), + ) + return cursor.rowcount > 0 except Exception as e: logger.error(f"Database error: {e}") - conn.rollback() return False - finally: - conn.close() diff --git a/backend/app/database/metadata.py b/backend/app/database/metadata.py index d431f6e2b..f49834d3b 100644 --- a/backend/app/database/metadata.py +++ b/backend/app/database/metadata.py @@ -2,14 +2,21 @@ import sqlite3 import json from typing import Optional, Dict, Any -from app.config.settings import DATABASE_PATH + +from app.database.connection import ( + get_db_connection, + get_db_transaction, + get_db_write_transaction, +) +from app.logging.setup_logging import get_logger + +# Initialize logger +logger = get_logger(__name__) def db_create_metadata_table() -> None: """Create the metadata table if it doesn't exist.""" - conn = None - try: - conn = sqlite3.connect(DATABASE_PATH) + with get_db_transaction() as conn: cursor = conn.cursor() cursor.execute( """ @@ -24,11 +31,6 @@ def db_create_metadata_table() -> None: if cursor.fetchone()[0] == 0: cursor.execute("INSERT INTO metadata (metadata) VALUES (?)", ("{}",)) - conn.commit() - finally: - if conn is not None: - conn.close() - def db_get_metadata() -> Optional[Dict[str, Any]]: """ @@ -37,10 +39,9 @@ def db_get_metadata() -> Optional[Dict[str, Any]]: Returns: Dictionary containing metadata, or None if not found """ - conn = sqlite3.connect(DATABASE_PATH) - cursor = conn.cursor() + with get_db_connection() as conn: + cursor = conn.cursor() - try: cursor.execute("SELECT metadata FROM metadata LIMIT 1") row = cursor.fetchone() @@ -51,8 +52,6 @@ def db_get_metadata() -> Optional[Dict[str, Any]]: except json.JSONDecodeError: return None return None - finally: - conn.close() def db_update_metadata( @@ -63,33 +62,22 @@ def db_update_metadata( Args: metadata: Dictionary containing metadata to store - cursor: Optional existing database cursor. If None, creates a new connection. + cursor: Optional existing database cursor (deprecated, kept for compatibility) Returns: True if the metadata was updated, False otherwise """ - own_connection = cursor is None - if own_connection: - conn = sqlite3.connect(DATABASE_PATH) - cursor = conn.cursor() - - try: - metadata_json = json.dumps(metadata) + metadata_json = json.dumps(metadata) - # Delete all existing rows and insert new one + if cursor is not None: + # Use provided cursor (external transaction management) cursor.execute("DELETE FROM metadata") cursor.execute("INSERT INTO metadata (metadata) VALUES (?)", (metadata_json,)) - - success = cursor.rowcount > 0 - if own_connection: - conn.commit() - return success - except Exception as e: - if own_connection: - conn.rollback() - - print(f"Error updating metadata: {e}") - raise - finally: - if own_connection: - conn.close() + return cursor.rowcount > 0 + else: + # Use our own transaction + with get_db_write_transaction() as conn: + cur = conn.cursor() + cur.execute("DELETE FROM metadata") + cur.execute("INSERT INTO metadata (metadata) VALUES (?)", (metadata_json,)) + return cur.rowcount > 0 diff --git a/backend/app/database/yolo_mapping.py b/backend/app/database/yolo_mapping.py index af5c18927..06639ee26 100644 --- a/backend/app/database/yolo_mapping.py +++ b/backend/app/database/yolo_mapping.py @@ -1,24 +1,22 @@ -import sqlite3 -from app.config.settings import DATABASE_PATH +from app.database.connection import get_db_transaction from app.utils.YOLO import class_names +from app.logging.setup_logging import get_logger +# Initialize logger +logger = get_logger(__name__) -def db_create_YOLO_classes_table(): - # print current directory: - import os - print(os.getcwd()) - conn = None - try: - conn = sqlite3.connect(DATABASE_PATH) +def db_create_YOLO_classes_table() -> None: + """Create the YOLO class mappings table and populate it with class names.""" + with get_db_transaction() as conn: cursor = conn.cursor() cursor.execute( """ - CREATE TABLE IF NOT EXISTS mappings ( + CREATE TABLE IF NOT EXISTS mappings ( class_id INTEGER PRIMARY KEY, name VARCHAR NOT NULL - ) - """ + ) + """ ) for class_id, name in enumerate(class_names): cursor.execute( @@ -28,8 +26,3 @@ def db_create_YOLO_classes_table(): name, ), # Keep class_id as integer to match image_classes.class_id ) - - conn.commit() - finally: - if conn is not None: - conn.close() diff --git a/backend/app/utils/face_clusters.py b/backend/app/utils/face_clusters.py index 4c373c981..4b2058e2b 100644 --- a/backend/app/utils/face_clusters.py +++ b/backend/app/utils/face_clusters.py @@ -607,31 +607,33 @@ def _update_cluster_face_image( Returns: True if update was successful, False otherwise """ - own_connection = cursor is None - if own_connection: - conn = sqlite3.connect(DATABASE_PATH) - cursor = conn.cursor() - - try: - cursor.execute( - "UPDATE face_clusters SET face_image_base64 = ? WHERE cluster_id = ?", - (face_image_base64, cluster_id), - ) - success = cursor.rowcount > 0 - if own_connection: - conn.commit() - return success - except Exception as e: - logger.error(f"Error updating face image for cluster {cluster_id}: {e}") - if own_connection: - conn.rollback() + if cursor is not None: + # Use provided cursor (external transaction management) + try: + cursor.execute( + "UPDATE face_clusters SET face_image_base64 = ? WHERE cluster_id = ?", + (face_image_base64, cluster_id), + ) + return cursor.rowcount > 0 + except Exception as e: + logger.error(f"Error updating face image for cluster {cluster_id}: {e}") + raise + else: + # Use thread-safe connection manager + from app.database.connection import get_db_write_transaction + + try: + with get_db_write_transaction() as conn: + cur = conn.cursor() + cur.execute( + "UPDATE face_clusters SET face_image_base64 = ? WHERE cluster_id = ?", + (face_image_base64, cluster_id), + ) + return cur.rowcount > 0 + except Exception as e: + logger.error(f"Error updating face image for cluster {cluster_id}: {e}") return False - raise - finally: - if own_connection: - conn.close() - def _get_cluster_face_data( cluster_uuid: str, cursor: sqlite3.Cursor diff --git a/backend/test.py b/backend/test.py index 8f9b5da22..ff6381ca6 100644 --- a/backend/test.py +++ b/backend/test.py @@ -1,35 +1,33 @@ -import sqlite3 import json import numpy as np from sklearn.cluster import DBSCAN -from app.config.settings import DATABASE_PATH +from app.database.connection import get_db_connection def get_all_face_embeddings(): - conn = sqlite3.connect(DATABASE_PATH) - cursor = conn.cursor() + with get_db_connection() as conn: + cursor = conn.cursor() - cursor.execute("SELECT image_id, embeddings FROM faces") - results = cursor.fetchall() + cursor.execute("SELECT image_id, embeddings FROM faces") + results = cursor.fetchall() - all_embeddings = [] - image_paths = [] - skipped_images = [] - for image_id, embeddings_json in results: - cursor.execute("SELECT path FROM image_id_mapping WHERE id = ?", (image_id,)) - image_path = cursor.fetchone()[0] - embeddings = np.array(json.loads(embeddings_json)) + all_embeddings = [] + image_paths = [] + skipped_images = [] + for image_id, embeddings_json in results: + cursor.execute("SELECT path FROM image_id_mapping WHERE id = ?", (image_id,)) + image_path = cursor.fetchone()[0] + embeddings = np.array(json.loads(embeddings_json)) - # Skip images with more than 10 faces - if len(embeddings) > 10: - skipped_images.append(image_path) - continue + # Skip images with more than 10 faces + if len(embeddings) > 10: + skipped_images.append(image_path) + continue - all_embeddings.extend(embeddings) - image_paths.extend([image_path] * len(embeddings)) + all_embeddings.extend(embeddings) + image_paths.extend([image_path] * len(embeddings)) - conn.close() - return np.array(all_embeddings), image_paths, skipped_images + return np.array(all_embeddings), image_paths, skipped_images def main(): diff --git a/frontend/src-tauri/tauri.conf.json b/frontend/src-tauri/tauri.conf.json index 8ad815dfd..b56801605 100644 --- a/frontend/src-tauri/tauri.conf.json +++ b/frontend/src-tauri/tauri.conf.json @@ -7,11 +7,17 @@ }, "bundle": { "active": true, - "targets": ["nsis", "deb", "app"], + "targets": "all", "createUpdaterArtifacts": true, "linux": { "deb": { "postInstallScript": "./postinstall.sh" + }, + "appimage": { + "bundleMediaFramework": true + }, + "rpm": { + "release": "1" } }, "icon": [