diff --git a/README.md b/README.md index 9b6983c..7177e3d 100644 --- a/README.md +++ b/README.md @@ -35,6 +35,7 @@ docker run -d \ -e DB_FILE=/config/api.db `#optional` \ -e INVALIDATE_HOURS=24 `#optional` \ -e PAT=token `#optional` \ +-e URL=http://localhost:8000 `#optional` \ -p 8000:8000 \ -v /path/to/lsio-api/config:/config \ --restart unless-stopped \ diff --git a/readme-vars.yml b/readme-vars.yml index e3863eb..aade1b4 100644 --- a/readme-vars.yml +++ b/readme-vars.yml @@ -40,6 +40,7 @@ full_custom_readme: | -e DB_FILE=/config/api.db `#optional` \ -e INVALIDATE_HOURS=24 `#optional` \ -e PAT=token `#optional` \ + -e URL=http://localhost:8000 `#optional` \ -p 8000:8000 \ -v /path/to/lsio-api/config:/config \ --restart unless-stopped \ diff --git a/root/app/api.py b/root/app/api.py index e0a13ec..e176b6a 100644 --- a/root/app/api.py +++ b/root/app/api.py @@ -1,17 +1,49 @@ -from fastapi import FastAPI +from fastapi import FastAPI, HTTPException +from fastapi.openapi.docs import get_swagger_ui_html +from fastapi.responses import JSONResponse +from fastapi.staticfiles import StaticFiles from keyvaluestore import KeyValueStore -from models import ImagesResponse, ImagesData +from models import ImagesResponse +from pydantic import ValidationError +import json +import os +import traceback -api = FastAPI(docs_url="/") +URL = os.environ.get("URL", "http://localhost:8000") +api = FastAPI(docs_url=None, redoc_url=None, version="1.0", title="LinuxServer API", servers=[{"url": URL}]) +api.mount("/static", StaticFiles(directory="static"), name="static") + + +@api.get("/", include_in_schema=False) +async def swagger_ui_html(): + return get_swagger_ui_html(openapi_url="/openapi.json", title="LinuxServer API", swagger_favicon_url="/static/logo.png") @api.get("/health", summary="Get the health status") async def health(): return "Success" -@api.get("/api/v1/images", response_model=ImagesResponse, summary="Get a list of images") -async def images(): +async def get_images(): with KeyValueStore() as kv: - return ImagesResponse(status="OK", data=ImagesData.model_validate_json(kv["images"])) + return kv["images"] + +@api.get("/api/v1/images", response_model=ImagesResponse, summary="Get a list of images", response_model_exclude_none=True) +async def images(include_config: bool = False, include_deprecated: bool = False): + try: + response = await get_images() + image_response = ImagesResponse.model_validate_json(response) + if not include_deprecated: + image_response.exclude_deprecated() + if not include_config: + image_response.exclude_config() + return image_response + except ValidationError: + print(traceback.format_exc()) + response = await get_images() + content = json.loads(response) + return JSONResponse(content=content) + except Exception: + print(traceback.format_exc()) + raise HTTPException(status_code=404, detail="Not found") if __name__ == "__main__": api.run() diff --git a/root/app/keyvaluestore.py b/root/app/keyvaluestore.py index 3322021..396393c 100644 --- a/root/app/keyvaluestore.py +++ b/root/app/keyvaluestore.py @@ -2,15 +2,27 @@ import sqlite3 DB_FILE = os.environ.get("DB_FILE", "/config/api.db") +# Increment to drop tables and start over +DB_SCHEMA_VERSION = 1 +def set_db_schema(): + conn = sqlite3.connect(DB_FILE) + conn.execute("CREATE TABLE IF NOT EXISTS db_schema (key TEXT UNIQUE, version INTEGER DEFAULT 0)") + is_updated = conn.execute(f"SELECT 1 FROM db_schema WHERE version = {DB_SCHEMA_VERSION}").fetchone() is not None + if not is_updated: + conn.execute(f"DROP TABLE IF EXISTS kv") + conn.execute(f"REPLACE INTO db_schema (key, version) VALUES('schema_version', {DB_SCHEMA_VERSION})") + conn.commit() + conn.close() + class KeyValueStore(dict): def __init__( self, invalidate_hours=24, readonly=True): self.invalidate_hours = invalidate_hours self.readonly = readonly if not readonly: self.conn = sqlite3.connect(DB_FILE) - self.conn.execute("CREATE TABLE IF NOT EXISTS kv (key TEXT UNIQUE, value TEXT, updated_at TEXT)") + self.conn.execute("CREATE TABLE IF NOT EXISTS kv (key TEXT UNIQUE, value TEXT, updated_at TEXT, schema_version INTEGER)") self.conn.commit() self.conn.close() def __enter__(self): @@ -21,10 +33,13 @@ def __exit__(self, exc_type, exc_val, exc_tb): self.conn.commit() self.conn.close() def __contains__(self, key): - return self.conn.execute(f"SELECT 1 FROM kv WHERE key = '{key}' AND updated_at >= DATETIME('now', '-{self.invalidate_hours} hours')").fetchone() is not None + where_clause = "" if self.invalidate_hours == 0 else f" AND updated_at >= DATETIME('now', '-{self.invalidate_hours} hours', 'utc')" + return self.conn.execute(f"SELECT 1 FROM kv WHERE key = '{key}' {where_clause}").fetchone() is not None def __getitem__(self, key): item = self.conn.execute("SELECT value FROM kv WHERE key = ?", (key,)).fetchone() return item[0] if item else None - def __setitem__(self, key, value): - self.conn.execute("REPLACE INTO kv (key, value, updated_at) VALUES (?,?, CURRENT_TIMESTAMP)", (key, value)) + def set_value(self, key, value, schema_version): + self.conn.execute("REPLACE INTO kv (key, value, updated_at, schema_version) VALUES (?, ?, DATETIME('now', 'utc'), ?)", (key, value, schema_version)) self.conn.commit() + def is_current_schema(self, key, schema_version): + return self.conn.execute(f"SELECT 1 FROM kv WHERE key = '{key}' AND schema_version = {schema_version}").fetchone() is not None diff --git a/root/app/lsio_github.py b/root/app/lsio_github.py new file mode 100644 index 0000000..822fcaf --- /dev/null +++ b/root/app/lsio_github.py @@ -0,0 +1,38 @@ +from github import Auth +from github import Github + +import os +import yaml + +PAT = os.environ.get("PAT", None) +GH_AUTH = Auth.Token(PAT) if PAT else None +GH = Github(auth=GH_AUTH) + + +def get_repos(): + org = GH.get_organization("linuxserver") + return org.get_repos() + +def get_file(repo, branch, path, is_yaml=False): + try: + content = repo.get_contents(path, ref=branch).decoded_content + return yaml.load(content, Loader=yaml.CLoader) if is_yaml else content + except: + return None + +def get_last_stable_release(repo): + for release in repo.get_releases(): + if release.prerelease: + continue + return release.tag_name, str(release.published_at) + return "latest", str(repo.pushed_at) + +def get_readme_vars(repo): + return (get_file(repo, "master", "readme-vars.yml", is_yaml=True) or + get_file(repo, "main", "readme-vars.yml", is_yaml=True) or + get_file(repo, "develop", "readme-vars.yml", is_yaml=True) or + get_file(repo, "nightly", "readme-vars.yml", is_yaml=True)) + +def print_rate_limit(): + ratelimit = GH.get_rate_limit().core + print(f"Github ratelimit - {ratelimit.remaining}/{ratelimit.limit} resets at {ratelimit.reset}") diff --git a/root/app/models.py b/root/app/models.py index 5d13269..d0f2956 100644 --- a/root/app/models.py +++ b/root/app/models.py @@ -1,12 +1,111 @@ from pydantic import BaseModel +# Increment when updating schema +IMAGES_SCHEMA_VERSION = 1 + + +class Tag(BaseModel): + tag: str + desc: str + +class Architecture(BaseModel): + arch: str + tag: str + +class Changelog(BaseModel): + date: str + desc: str + +class Volume(BaseModel): + path: str + host_path: str + desc: str + optional: bool + +class Port(BaseModel): + external: str + internal: str + desc: str + optional: bool + +class EnvVar(BaseModel): + name: str + value: str + desc: str + optional: bool + +class EnvVar(BaseModel): + name: str + value: str + desc: str + optional: bool + +class Custom(BaseModel): + name: str + name_compose: str + value: str | list[str] + desc: str + optional: bool + +class SecurityOpt(BaseModel): + run_var: str + compose_var: str + desc: str + optional: bool + +class Device(BaseModel): + path: str + host_path: str + desc: str + optional: bool + +class Cap(BaseModel): + cap_add: str + desc: str + optional: bool + +class Hostname(BaseModel): + hostname: str + desc: str + optional: bool + +class MacAddress(BaseModel): + mac_address: str + desc: str + optional: bool + +class Config(BaseModel): + application_setup: str | None = None + readonly_supported: bool | None = None + nonroot_supported: bool | None = None + privileged: bool | None = None + networking: str | None = None + hostname: Hostname | None = None + mac_address: MacAddress | None = None + env_vars: list[EnvVar] | None = None + volumes: list[Volume] | None = None + ports: list[Port] | None = None + custom: list[Custom] | None = None + security_opt: list[SecurityOpt] | None = None + devices: list[Device] | None = None + caps: list[Cap] | None = None class Image(BaseModel): name: str + github_url: str + project_url: str | None = None + project_logo: str | None = None + description: str version: str + version_timestamp: str category: str stable: bool deprecated: bool + stars: int + tags: list[Tag] + architectures: list[Architecture] + changelog: list[Changelog] | None = None + config: Config | None = None class Repository(BaseModel): linuxserver: list[Image] @@ -16,4 +115,13 @@ class ImagesData(BaseModel): class ImagesResponse(BaseModel): status: str + last_updated: str data: ImagesData + + def exclude_config(self): + for image in self.data.repositories.linuxserver: + image.config = None + + def exclude_deprecated(self): + images = self.data.repositories.linuxserver + self.data.repositories.linuxserver = list(filter(lambda image: not image.deprecated, images)) diff --git a/root/app/static/logo.png b/root/app/static/logo.png new file mode 100644 index 0000000..ccd9666 Binary files /dev/null and b/root/app/static/logo.png differ diff --git a/root/app/updater.py b/root/app/updater.py index e6d9f22..c387321 100644 --- a/root/app/updater.py +++ b/root/app/updater.py @@ -1,70 +1,225 @@ -from github import Auth -from github import Github -from keyvaluestore import KeyValueStore -from models import Image, Repository, ImagesData +import lsio_github as gh +from keyvaluestore import KeyValueStore, set_db_schema +from models import Architecture, Changelog, Tag, EnvVar, Volume, Port, Config +from models import Custom, SecurityOpt, Device, Cap, Hostname, MacAddress, Image +from models import Repository, ImagesData, ImagesResponse, IMAGES_SCHEMA_VERSION import datetime import os -import threading import time -import yaml CI = os.environ.get("CI", None) INVALIDATE_HOURS = int(os.environ.get("INVALIDATE_HOURS", "24")) -PAT = os.environ.get("PAT", None) - - -def get_repos(): - auth = Auth.Token(PAT) if PAT else None - gh = Github(auth=auth) - org = gh.get_organization("linuxserver") - repos = org.get_repos() - return [repo for repo in repos if repo.full_name.startswith("linuxserver/docker-") - and not repo.full_name.startswith("linuxserver/docker-baseimage-") - and (repo.description is None or "DEPRECATED" not in repo.description)] - -def get_vars(repo, branch): - try: - content = repo.get_contents("readme-vars.yml", ref=branch).decoded_content - return yaml.load(content, Loader=yaml.CLoader) - except: + + +def get_tags(readme_vars): + if "development_versions_items" not in readme_vars: + return [Tag(tag="latest", desc="Stable releases")], True + tags = [] + stable = False + for item in readme_vars["development_versions_items"]: + if item["tag"] == "latest": + stable = True + tags.append(Tag(tag=item["tag"], desc=item["desc"])) + return tags, stable + +def get_architectures(readme_vars): + if "available_architectures" not in readme_vars: + return [Architecture(arch="arch_x86_64", tag="amd64-latest")] + archs = [] + for item in readme_vars["available_architectures"]: + archs.append(Architecture(arch=item["arch"][8:-3], tag=item["tag"])) + return archs + +def get_changelogs(readme_vars): + if "changelogs" not in readme_vars: return None + changelogs = [] + for item in readme_vars["changelogs"][0:3]: + changelogs.append(Changelog(date=item["date"][0:-1], desc=item["desc"])) + return changelogs + +def get_description(readme_vars): + description = readme_vars.get("project_blurb", "No description") + description = description.replace("\n", " ").strip(" \t\n\r") + if "project_name" in readme_vars: + description = description.replace("[{{ project_name|capitalize }}]", readme_vars["project_name"]) + description = description.replace("[{{ project_name }}]", readme_vars["project_name"]) + if "project_url" in readme_vars: + description = description.replace("({{ project_url }})", "") + return description + +def get_env_vars(readme_vars): + env_vars = [] + if readme_vars.get("common_param_env_vars_enabled", False): + env_vars.extend([ + EnvVar(name="PUID", value="1000", desc="User ID", optional=False), + EnvVar(name="PGID", value="1000", desc="Group ID", optional=False), + EnvVar(name="TZ", value="Etc/UTC", desc="Timezone", optional=False), + ]) + if "param_env_vars" in readme_vars: + for item in readme_vars["param_env_vars"]: + env_vars.append(EnvVar(name=item["env_var"], value=item["env_value"], desc=item["desc"], optional=False)) + if "opt_param_env_vars" in readme_vars: + for item in readme_vars["opt_param_env_vars"]: + env_vars.append(EnvVar(name=item["env_var"], value=item["env_value"], desc=item["desc"], optional=True)) + return env_vars if env_vars else None + +def get_volumes(readme_vars): + volumes = [] + if "param_volumes" in readme_vars: + for item in readme_vars["param_volumes"]: + volumes.append(Volume(path=item["vol_path"], host_path=item["vol_host_path"], desc=item["desc"], optional=False)) + if "opt_param_volumes" in readme_vars: + for item in readme_vars["opt_param_volumes"]: + volumes.append(Volume(path=item["vol_path"], host_path=item["vol_host_path"], desc=item["desc"], optional=True)) + return volumes if volumes else None + +def get_ports(readme_vars): + ports = [] + if "param_ports" in readme_vars: + for item in readme_vars["param_ports"]: + ports.append(Port(external=item["external_port"], internal=item["internal_port"], desc=item["port_desc"], optional=False)) + if "opt_param_ports" in readme_vars: + for item in readme_vars["opt_param_ports"]: + ports.append(Port(external=item["external_port"], internal=item["internal_port"], desc=item["port_desc"], optional=True)) + return ports if ports else None + +def get_custom(readme_vars): + custom = [] + if "custom_params" in readme_vars: + for item in readme_vars["custom_params"]: + custom.append(Custom(name=item["name"], name_compose=item["name_compose"], value=item["value"], desc=item["desc"], optional=False)) + if "opt_custom_params" in readme_vars: + for item in readme_vars["opt_custom_params"]: + custom.append(Custom(name=item["name"], name_compose=item["name_compose"], value=item["value"], desc=item["desc"], optional=True)) + return custom if custom else None + +def get_security_opt(readme_vars): + security_opts = [] + if "security_opt_param_vars" in readme_vars: + for item in readme_vars["security_opt_param_vars"]: + security_opts.append(SecurityOpt(run_var=item["run_var"], compose_var=item["compose_var"], desc=item["desc"], optional=False)) + if "opt_security_opt_param_vars" in readme_vars: + for item in readme_vars["opt_security_opt_param_vars"]: + security_opts.append(SecurityOpt(run_var=item["run_var"], compose_var=item["compose_var"], desc=item["desc"], optional=True)) + return security_opts if security_opts else None -def get_state(): - images = [] - repos = get_repos() - for repo in sorted(repos, key=lambda repo: repo.full_name): - readme_vars = get_vars(repo, "master") or get_vars(repo, "main") or get_vars(repo, "develop") or get_vars(repo, "nightly") - if not readme_vars or "'project_deprecation_status': True" in str(readme_vars): - continue - categories = readme_vars.get("project_categories", "") - if "Internal" in categories: - continue - version = "latest" if "development_versions_items" not in readme_vars else readme_vars["development_versions_items"][0]["tag"] - images.append(Image( - name=repo.full_name.replace("linuxserver/docker-", ""), - version=version, - category=categories, - stable=version == "latest", - deprecated=False - )) - return ImagesData(repositories=Repository(linuxserver=images)).model_dump_json() +def get_devices(readme_vars): + devices = [] + if "param_devices" in readme_vars: + for item in readme_vars["param_devices"]: + devices.append(Device(path=item["device_path"], host_path=item["device_host_path"], desc=item["desc"], optional=False)) + if "opt_param_devices" in readme_vars: + for item in readme_vars["opt_param_devices"]: + devices.append(Device(path=item["device_path"], host_path=item["device_host_path"], desc=item["desc"], optional=True)) + return devices if devices else None + +def get_caps(readme_vars): + caps = [] + if "cap_add_param_vars" in readme_vars: + for item in readme_vars["cap_add_param_vars"]: + caps.append(Cap(cap_add=item["cap_add_var"], desc=item["desc"], optional=False)) + if "opt_cap_add_param_vars" in readme_vars: + for item in readme_vars["opt_cap_add_param_vars"]: + caps.append(Cap(cap_add=item["cap_add_var"], desc=item["desc"], optional=True)) + return caps if caps else None + +def get_hostname(readme_vars): + include_hostname = readme_vars.get("param_usage_include_hostname", False) + if not include_hostname: + return None + optional = include_hostname == "optional" + hostname = readme_vars.get("param_hostname", False).replace("{{ project_name }}", readme_vars["project_name"]) + return Hostname(hostname=hostname, desc=readme_vars.get("param_hostname_desc", ""), optional=optional) + +def get_mac_address(readme_vars): + include_mac_address = readme_vars.get("param_usage_include_mac_address", False) + if not include_mac_address: + return None + optional = include_mac_address == "optional" + hostname = readme_vars.get("param_mac_address", False) + return MacAddress(mac_address=hostname, desc=readme_vars.get("param_mac_address_desc", ""), optional=optional) + +def get_image(repo): + print(f"Processing {repo.name}") + if not repo.name.startswith("docker-") or repo.name.startswith("docker-baseimage-"): + return None + readme_vars = gh.get_readme_vars(repo) + if not readme_vars: + return None + categories = readme_vars.get("project_categories", "") + if "Internal" in categories: + return None + tags, stable = get_tags(readme_vars) + deprecated = readme_vars.get("project_deprecation_status", False) + version, version_timestamp = gh.get_last_stable_release(repo) + application_setup = None + if readme_vars.get("app_setup_block_enabled", False): + application_setup = f"{repo.html_url}?tab=readme-ov-file#application-setup" + config = Config( + application_setup=application_setup, + readonly_supported=readme_vars.get("readonly_supported", None), + nonroot_supported=readme_vars.get("nonroot_supported", None), + privileged=readme_vars.get("privileged", None), + networking=readme_vars.get("param_net", None), + hostname=get_hostname(readme_vars), + mac_address=get_mac_address(readme_vars), + env_vars=get_env_vars(readme_vars), + volumes=get_volumes(readme_vars), + ports=get_ports(readme_vars), + custom=get_custom(readme_vars), + security_opt=get_security_opt(readme_vars), + devices=get_devices(readme_vars), + caps=get_caps(readme_vars), + ) + return Image( + name=repo.name.replace("docker-", ""), + github_url=repo.html_url, + stars=repo.stargazers_count, + project_url=readme_vars.get("project_url", None), + project_logo=readme_vars.get("project_logo", None), + description=get_description(readme_vars), + version=version, + version_timestamp=version_timestamp, + category=categories, + stable=stable, + deprecated=deprecated, + tags=tags, + architectures=get_architectures(readme_vars), + changelog=get_changelogs(readme_vars), + config=config, + ) def update_images(): with KeyValueStore(invalidate_hours=INVALIDATE_HOURS, readonly=False) as kv: - if "images" in kv or CI == "1": + is_current_schema = kv.is_current_schema("images", IMAGES_SCHEMA_VERSION) + if ("images" in kv and is_current_schema) or CI == "1": print(f"{datetime.datetime.now()} - skipped - already updated") return print(f"{datetime.datetime.now()} - updating images") - kv["images"] = get_state() + images = [] + repos = gh.get_repos() + for repo in sorted(repos, key=lambda repo: repo.name): + image = get_image(repo) + if not image: + continue + images.append(image) + + data = ImagesData(repositories=Repository(linuxserver=images)) + last_updated = datetime.datetime.now(datetime.timezone.utc).isoformat(' ', 'seconds') + response = ImagesResponse(status="OK", last_updated=last_updated, data=data) + new_state = response.model_dump_json(exclude_none=True) + kv.set_value("images", new_state, IMAGES_SCHEMA_VERSION) print(f"{datetime.datetime.now()} - updated images") -class UpdateImages(threading.Thread): - def run(self,*args,**kwargs): - while True: - update_images() - time.sleep(INVALIDATE_HOURS*60*60) +def main(): + set_db_schema() + while True: + gh.print_rate_limit() + update_images() + gh.print_rate_limit() + time.sleep(INVALIDATE_HOURS*60*60) if __name__ == "__main__": - update_images_thread = UpdateImages() - update_images_thread.start() + main()