diff --git a/.github/workflows/python-publish.yml b/.github/workflows/python-publish.yml new file mode 100644 index 0000000..82f8dbd --- /dev/null +++ b/.github/workflows/python-publish.yml @@ -0,0 +1,70 @@ +# This workflow will upload a Python Package to PyPI when a release is created +# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python#publishing-to-package-registries + +# This workflow uses actions that are not certified by GitHub. +# They are provided by a third-party and are governed by +# separate terms of service, privacy policy, and support +# documentation. + +name: Upload Python Package + +on: + release: + types: [published] + +permissions: + contents: read + +jobs: + release-build: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + + - uses: actions/setup-python@v5 + with: + python-version: "3.x" + + - name: Build release distributions + run: | + # NOTE: put your own distribution build steps here. + python -m pip install build + python -m build + + - name: Upload distributions + uses: actions/upload-artifact@v4 + with: + name: release-dists + path: dist/ + + pypi-publish: + runs-on: ubuntu-latest + needs: + - release-build + permissions: + # IMPORTANT: this permission is mandatory for trusted publishing + id-token: write + + # Dedicated environments with protections for publishing are strongly recommended. + # For more information, see: https://docs.github.com/en/actions/deployment/targeting-different-environments/using-environments-for-deployment#deployment-protection-rules + environment: + name: pypi + # OPTIONAL: uncomment and update to include your PyPI project URL in the deployment status: + # url: https://pypi.org/p/YOURPROJECT + # + # ALTERNATIVE: if your GitHub Release name is the PyPI project version string + # ALTERNATIVE: exactly, uncomment the following line instead: + # url: https://pypi.org/project/YOURPROJECT/${{ github.event.release.name }} + + steps: + - name: Retrieve release distributions + uses: actions/download-artifact@v4 + with: + name: release-dists + path: dist/ + + - name: Publish release distributions to PyPI + uses: pypa/gh-action-pypi-publish@release/v1 + with: + packages-dir: dist/ diff --git a/.vscode/tasks.json b/.vscode/tasks.json new file mode 100644 index 0000000..a5778c3 --- /dev/null +++ b/.vscode/tasks.json @@ -0,0 +1,10 @@ +{ + "version": "2.0.0", + "tasks": [ + { + "type": "shell", + "label": "Run chatbot example", + "command": "python3 examples/chatbot.py" + } + ] +} \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index edbe330..c528f8d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -92,7 +92,8 @@ addopts = "--strict-markers" markers = [ "slow: marks tests as slow (deselect with '-m \"not slow\"')", "lmstudio: marks tests as needing LM Studio (deselect with '-m \"not lmstudio\"')", - "wip: marks tests as a work-in-progress (select with '-m \"wip\"')" + "wip: marks tests as a work-in-progress (select with '-m \"wip\"')", + "asyncio: marks tests as asyncio-based", ] # Warnings should only be emitted when being specifically tested filterwarnings = [ @@ -102,7 +103,6 @@ filterwarnings = [ log_format = "%(asctime)s %(levelname)s %(message)s" log_date_format = "%Y-%m-%d %H:%M:%S" # Each async test case gets a fresh event loop by default -asyncio_default_fixture_loop_scope = "function" [tool.coverage.run] relative_files = true diff --git a/src/lmstudio/__init__.py b/src/lmstudio/__init__.py index 957df80..836f6bf 100644 --- a/src/lmstudio/__init__.py +++ b/src/lmstudio/__init__.py @@ -17,6 +17,14 @@ from .schemas import * from .history import * from .json_api import * +from .json_api import ( + LMStudioPredictionError, + LMStudioModelLoadError, + LMStudioInputValidationError, + LMStudioPredictionTimeoutError, + LMStudioPredictionCancelledError, + LMStudioPredictionRuntimeError, +) from .async_api import * from .sync_api import * diff --git a/src/lmstudio/json_api.py b/src/lmstudio/json_api.py index 610f953..011be49 100644 --- a/src/lmstudio/json_api.py +++ b/src/lmstudio/json_api.py @@ -418,10 +418,32 @@ def __init__(self, message: str) -> None: super().__init__(message, None) +@sdk_public_type + @sdk_public_type class LMStudioPredictionError(LMStudioServerError): """Problems reported by the LM Studio instance during a model prediction.""" +@sdk_public_type +class LMStudioModelLoadError(LMStudioPredictionError): + """Raised when a model fails to load for a prediction.""" + +@sdk_public_type +class LMStudioInputValidationError(LMStudioPredictionError): + """Raised when input to a prediction is invalid (e.g., bad prompt, bad parameters).""" + +@sdk_public_type +class LMStudioPredictionTimeoutError(LMStudioPredictionError): + """Raised when a prediction times out before completion.""" + +@sdk_public_type +class LMStudioPredictionCancelledError(LMStudioPredictionError): + """Raised when a prediction is cancelled before completion.""" + +@sdk_public_type +class LMStudioPredictionRuntimeError(LMStudioPredictionError): + """Raised for unexpected runtime errors during prediction.""" + @sdk_public_type class LMStudioClientError(LMStudioError): diff --git a/tests/async/test_embedding_async.py b/tests/async/test_embedding_async.py index 53e52cf..afa40a5 100644 --- a/tests/async/test_embedding_async.py +++ b/tests/async/test_embedding_async.py @@ -8,7 +8,7 @@ from lmstudio import AsyncClient, EmbeddingLoadModelConfig, LMStudioModelNotFoundError -from ..support import ( +from tests.support import ( EXPECTED_EMBEDDING, EXPECTED_EMBEDDING_CONTEXT_LENGTH, EXPECTED_EMBEDDING_ID, diff --git a/tests/async/test_images_async.py b/tests/async/test_images_async.py index 8535076..ad794b0 100644 --- a/tests/async/test_images_async.py +++ b/tests/async/test_images_async.py @@ -9,7 +9,7 @@ from lmstudio import AsyncClient, Chat, FileHandle, LMStudioServerError -from ..support import ( +from tests.support import ( EXPECTED_VLM_ID, IMAGE_FILEPATH, SHORT_PREDICTION_CONFIG, diff --git a/tests/async/test_inference_async.py b/tests/async/test_inference_async.py index f8b16ac..937cb21 100644 --- a/tests/async/test_inference_async.py +++ b/tests/async/test_inference_async.py @@ -28,7 +28,7 @@ ToolCallRequest, ) -from ..support import ( +from tests.support import ( ADDITION_TOOL_SPEC, EXPECTED_LLM_ID, GBNF_GRAMMAR, diff --git a/tests/async/test_llm_async.py b/tests/async/test_llm_async.py index 6e94a0f..0b21722 100644 --- a/tests/async/test_llm_async.py +++ b/tests/async/test_llm_async.py @@ -13,7 +13,7 @@ history, ) -from ..support import EXPECTED_LLM, EXPECTED_LLM_ID, check_sdk_error +from tests.support import EXPECTED_LLM, EXPECTED_LLM_ID, check_sdk_error @pytest.mark.asyncio diff --git a/tests/async/test_model_catalog_async.py b/tests/async/test_model_catalog_async.py index 662c371..4758db8 100644 --- a/tests/async/test_model_catalog_async.py +++ b/tests/async/test_model_catalog_async.py @@ -12,7 +12,7 @@ from lmstudio import AsyncClient, LMStudioModelNotFoundError, LMStudioServerError from lmstudio.json_api import DownloadedModelBase, ModelHandleBase -from ..support import ( +from tests.support import ( LLM_LOAD_CONFIG, EXPECTED_LLM, EXPECTED_LLM_ID, diff --git a/tests/async/test_model_handles_async.py b/tests/async/test_model_handles_async.py index f744522..eb4691d 100644 --- a/tests/async/test_model_handles_async.py +++ b/tests/async/test_model_handles_async.py @@ -12,7 +12,7 @@ from lmstudio import AsyncClient, PredictionResult -from ..support import ( +from tests.support import ( EXPECTED_EMBEDDING, EXPECTED_EMBEDDING_ID, EXPECTED_EMBEDDING_LENGTH, diff --git a/tests/async/test_repository_async.py b/tests/async/test_repository_async.py index ae6337e..06f7deb 100644 --- a/tests/async/test_repository_async.py +++ b/tests/async/test_repository_async.py @@ -7,7 +7,7 @@ from lmstudio import AsyncClient, LMStudioClientError -from ..support import SMALL_LLM_SEARCH_TERM +from tests.support import SMALL_LLM_SEARCH_TERM # N.B. We can maybe provide a reference list for what should be available diff --git a/tests/conftest.py b/tests/conftest.py index c3850a3..08f0edf 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -2,5 +2,27 @@ import pytest + # Ensure support module assertions provide failure details pytest.register_assert_rewrite("tests.support") + +# Session-scoped fixture for required model loading +import asyncio +import sys + +@pytest.fixture(scope="session", autouse=True) +def load_required_models(): + """Load required models at the start of the test session.""" + # Only run if LM Studio is accessible + try: + from tests.load_models import reload_models + asyncio.run(reload_models()) + except Exception as e: + print(f"[Fixture] Skipping model loading: {e}", file=sys.stderr) + yield + # Optionally unload models at the end of the session + try: + from tests.unload_models import unload_models + asyncio.run(unload_models()) + except Exception as e: + print(f"[Fixture] Skipping model unloading: {e}", file=sys.stderr) diff --git a/tests/load_models.py b/tests/load_models.py index 24b033e..b0377cf 100644 --- a/tests/load_models.py +++ b/tests/load_models.py @@ -6,7 +6,7 @@ import lmstudio as lms -from .support import ( +from tests.support import ( EXPECTED_EMBEDDING_ID, EXPECTED_LLM_ID, EXPECTED_VLM_ID, diff --git a/tests/sync/test_embedding_sync.py b/tests/sync/test_embedding_sync.py index 223dabe..46001fb 100644 --- a/tests/sync/test_embedding_sync.py +++ b/tests/sync/test_embedding_sync.py @@ -15,7 +15,7 @@ from lmstudio import Client, EmbeddingLoadModelConfig, LMStudioModelNotFoundError -from ..support import ( +from tests.support import ( EXPECTED_EMBEDDING, EXPECTED_EMBEDDING_CONTEXT_LENGTH, EXPECTED_EMBEDDING_ID, diff --git a/tests/sync/test_images_sync.py b/tests/sync/test_images_sync.py index b944b40..c372123 100644 --- a/tests/sync/test_images_sync.py +++ b/tests/sync/test_images_sync.py @@ -16,7 +16,7 @@ from lmstudio import Client, Chat, FileHandle, LMStudioServerError -from ..support import ( +from tests.support import ( EXPECTED_VLM_ID, IMAGE_FILEPATH, SHORT_PREDICTION_CONFIG, diff --git a/tests/sync/test_inference_sync.py b/tests/sync/test_inference_sync.py index b519baf..66198ed 100644 --- a/tests/sync/test_inference_sync.py +++ b/tests/sync/test_inference_sync.py @@ -35,7 +35,7 @@ ToolCallRequest, ) -from ..support import ( +from tests.support import ( ADDITION_TOOL_SPEC, EXPECTED_LLM_ID, GBNF_GRAMMAR, diff --git a/tests/sync/test_llm_sync.py b/tests/sync/test_llm_sync.py index 3b99cab..a90a668 100644 --- a/tests/sync/test_llm_sync.py +++ b/tests/sync/test_llm_sync.py @@ -20,7 +20,7 @@ history, ) -from ..support import EXPECTED_LLM, EXPECTED_LLM_ID, check_sdk_error +from tests.support import EXPECTED_LLM, EXPECTED_LLM_ID, check_sdk_error @pytest.mark.lmstudio diff --git a/tests/sync/test_model_catalog_sync.py b/tests/sync/test_model_catalog_sync.py index 77ffecd..b9cbb71 100644 --- a/tests/sync/test_model_catalog_sync.py +++ b/tests/sync/test_model_catalog_sync.py @@ -19,7 +19,7 @@ from lmstudio import Client, LMStudioModelNotFoundError, LMStudioServerError from lmstudio.json_api import DownloadedModelBase, ModelHandleBase -from ..support import ( +from tests.support import ( LLM_LOAD_CONFIG, EXPECTED_LLM, EXPECTED_LLM_ID, diff --git a/tests/sync/test_model_handles_sync.py b/tests/sync/test_model_handles_sync.py index 2977210..be188ad 100644 --- a/tests/sync/test_model_handles_sync.py +++ b/tests/sync/test_model_handles_sync.py @@ -19,7 +19,7 @@ from lmstudio import Client, PredictionResult -from ..support import ( +from tests.support import ( EXPECTED_EMBEDDING, EXPECTED_EMBEDDING_ID, EXPECTED_EMBEDDING_LENGTH, diff --git a/tests/sync/test_repository_sync.py b/tests/sync/test_repository_sync.py index 55dd6f2..2f1d595 100644 --- a/tests/sync/test_repository_sync.py +++ b/tests/sync/test_repository_sync.py @@ -14,7 +14,7 @@ from lmstudio import Client, LMStudioClientError -from ..support import SMALL_LLM_SEARCH_TERM +from tests.support import SMALL_LLM_SEARCH_TERM # N.B. We can maybe provide a reference list for what should be available diff --git a/tests/test_convenience_api.py b/tests/test_convenience_api.py index 05eaf87..f144593 100644 --- a/tests/test_convenience_api.py +++ b/tests/test_convenience_api.py @@ -7,7 +7,7 @@ import pytest -from .support import ( +from tests.support import ( EXPECTED_EMBEDDING_ID, EXPECTED_LLM_ID, EXPECTED_VLM_ID, diff --git a/tests/test_history.py b/tests/test_history.py index ce20483..000274d 100644 --- a/tests/test_history.py +++ b/tests/test_history.py @@ -35,7 +35,7 @@ ToolCallResultDataDict, ) -from .support import IMAGE_FILEPATH, check_sdk_error +from tests.support import IMAGE_FILEPATH, check_sdk_error INPUT_ENTRIES: list[DictObject] = [ # Entries with multi-word keys mix snake_case and camelCase diff --git a/tests/test_inference.py b/tests/test_inference.py index 0472b10..4905a98 100644 --- a/tests/test_inference.py +++ b/tests/test_inference.py @@ -20,7 +20,7 @@ from lmstudio.json_api import ChatResponseEndpoint from lmstudio._sdk_models import LlmToolParameters -from .support import ( +from tests.support import ( ADDITION_TOOL_SPEC, EXPECTED_LLM_ID, MAX_PREDICTED_TOKENS, diff --git a/tests/test_logging.py b/tests/test_logging.py index 0ea5c2e..9b18530 100644 --- a/tests/test_logging.py +++ b/tests/test_logging.py @@ -8,7 +8,7 @@ from lmstudio import AsyncClient -from .support import InvalidEndpoint +from tests.support import InvalidEndpoint @pytest.mark.asyncio diff --git a/tests/test_schemas.py b/tests/test_schemas.py index 28b8e99..ba71455 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -19,7 +19,7 @@ ModelSpecifierQueryDict, ) -from .support import EXPECTED_LLM_ID +from tests.support import EXPECTED_LLM_ID def test_lists_of_lists_rejected() -> None: diff --git a/tests/test_session_errors.py b/tests/test_session_errors.py index f4e3e2a..a3576cb 100644 --- a/tests/test_session_errors.py +++ b/tests/test_session_errors.py @@ -22,7 +22,7 @@ SyncSessionSystem, ) -from .support import ( +from tests.support import ( EXPECT_TB_TRUNCATION, InvalidEndpoint, nonresponsive_api_host, @@ -31,7 +31,7 @@ check_unfiltered_error, ) -from .support.lmstudio import ErrFunc +from tests.support.lmstudio import ErrFunc async def check_call_errors_async(session: _AsyncSession) -> None: diff --git a/tests/test_sessions.py b/tests/test_sessions.py index e93c87c..9959d14 100644 --- a/tests/test_sessions.py +++ b/tests/test_sessions.py @@ -24,7 +24,7 @@ from lmstudio._ws_impl import AsyncTaskManager from lmstudio._ws_thread import AsyncWebsocketThread -from .support import LOCAL_API_HOST +from tests.support import LOCAL_API_HOST async def check_connected_async_session(session: _AsyncSession) -> None: diff --git a/tests/test_timeouts.py b/tests/test_timeouts.py index d27a370..ad5fa2d 100644 --- a/tests/test_timeouts.py +++ b/tests/test_timeouts.py @@ -16,7 +16,7 @@ ) from lmstudio.sync_api import _DEFAULT_TIMEOUT -from .support import EXPECTED_LLM_ID +from tests.support import EXPECTED_LLM_ID # Sync only, as async API uses standard async timeout constructs like anyio.move_on_after diff --git a/tests/test_traceback_filtering.py b/tests/test_traceback_filtering.py index 28df001..f51a894 100644 --- a/tests/test_traceback_filtering.py +++ b/tests/test_traceback_filtering.py @@ -7,8 +7,8 @@ from lmstudio.sdk_api import sdk_callback_invocation from lmstudio._logging import new_logger -from .support import check_sdk_error, check_unfiltered_error -from .support.lmstudio import ( +from tests.support import check_sdk_error, check_unfiltered_error +from tests.support.lmstudio import ( TestCoro, TestFunc, SYNC_API, diff --git a/tests/unload_models.py b/tests/unload_models.py index d8fcdfd..c8e7fd2 100644 --- a/tests/unload_models.py +++ b/tests/unload_models.py @@ -3,7 +3,7 @@ import asyncio import lmstudio as lms -from .support import ( +from tests.support import ( EXPECTED_EMBEDDING_ID, EXPECTED_LLM_ID, EXPECTED_VLM_ID,