Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
61 changes: 6 additions & 55 deletions .devcontainer/devcontainer.json
Original file line number Diff line number Diff line change
@@ -1,56 +1,7 @@
// For format details, see https://aka.ms/devcontainer.json. For config options, see the
// README at: https://github.com/devcontainers/templates/tree/main/src/docker-existing-docker-compose
{
"name": "Existing Docker Compose (Extend)",
// Update the 'dockerComposeFile' list if you have more compose files or use different names.
// The .devcontainer/docker-compose.yml file contains any overrides you need/want to make.
"dockerComposeFile": [
"../docker-compose.yaml",
"docker-compose.yml"
],
// The 'service' property is the name of the service for the container that VS Code should
// use. Update this value and .devcontainer/docker-compose.yml to the real service name.
"service": "app",
// The optional 'workspaceFolder' property is the path VS Code should open by default when
// connected. This is typically a file mount in .devcontainer/docker-compose.yml
"workspaceFolder": "/workspaces/${localWorkspaceFolderBasename}",
// Features to add to the dev container. More info: https://containers.dev/features.
// "features": {},
// Use 'forwardPorts' to make a list of ports inside the container available locally.
"forwardPorts": [
80,
443,
3306,
8080,
9000
],
"mounts": [
"type=bind,source=${env:SSH_AUTH_SOCK},target=/ssh-agent"
],
"containerEnv": {
"SSH_AUTH_SOCK": "/ssh-agent"
},
// Uncomment the next line if you want start specific services in your Docker Compose config.
// "runServices": [],
// Uncomment the next line if you want to keep your containers running after VS Code shuts down.
"shutdownAction": "stopCompose",
"onCreateCommand": "python3 -m pip install -q -e .[dev]",
"features": {
"ghcr.io/devcontainers/features/git:1": {},
"ghcr.io/devcontainers/features/docker-in-docker:2": {},
"ghcr.io/devcontainers/features/github-cli:1": {},
},
// Configure tool-specific properties.
"customizations": {
"vscode": {
"extensions": [
"ms-python.python"
]
}
},
"remoteEnv": {
"LOCAL_WORKSPACE_FOLDER": "${localWorkspaceFolder}"
}
// Uncomment to connect as an existing user other than the container default. More info: https://aka.ms/dev-containers-non-root.
// "remoteUser": "devcontainer"
}
"image": "mcr.microsoft.com/devcontainers/typescript-node:0-18",
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

we just need pixi and docker now

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

nodejs is in here for LLM tooling, we can remove if that's not needed

"features": {
"ghcr.io/devcontainers/features/docker-in-docker:2": {}
},
"postCreateCommand": "curl -fsSL https://pixi.sh/install.sh | bash && echo 'export PATH=\"$HOME/.pixi/bin:$PATH\"' >> ~/.bashrc"
}
3 changes: 1 addition & 2 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ repos:
rev: 25.1.0 # matching versions in pyproject.toml and github actions
hooks:
- id: black
args: ["--check", "-v", "src", "tests", "--diff"] # --required-version is conflicting with pre-commit
args: ["-v", "src", "tests", "--diff"] # --required-version is conflicting with pre-commit
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

makes pre-commit much more useful by applying the formatting changes

- repo: https://github.com/PyCQA/flake8
rev: 7.3.0
hooks:
Expand All @@ -50,7 +50,6 @@ repos:
- --max-complexity=62
- --max-line-length=127
- --statistics
- --per-file-ignores=datajoint/diagram.py:C901
files: src/ # a lot of files in tests are not compliant
- repo: https://github.com/rhysd/actionlint
rev: v1.7.7
Expand Down
4 changes: 4 additions & 0 deletions activate.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
#! /usr/bin/bash
# This script registers dot plugins so that we can use graphviz
# to write png images
dot -c
Comment on lines +1 to +4
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

the dot tool used by graphviz uses a post-installation step to register different file format backends. this script performs that step. without this, the graphviz-related tests will fail in a fresh environment.

3,629 changes: 3,507 additions & 122 deletions pixi.lock

Large diffs are not rendered by default.

8 changes: 6 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ dependencies = [
"urllib3",
"setuptools",
]
requires-python = ">=3.9,<4.0"
requires-python = ">=3.9,<3.14"
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

linux tests fail on python 3.14 due to attempts to pickle things that should not be pickled (locks). Fixing this architecturally requires quite a bit more work so we avoid python 3.14 for now.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

3.9 has reached its end of life.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

👍 then lets move to 3.11

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

as a lower bound

authors = [
{name = "Dimitri Yatsenko", email = "[email protected]"},
{name = "Thinh Nguyen", email = "[email protected]"},
Expand Down Expand Up @@ -125,7 +125,7 @@ JUPYTER_PASSWORD="datajoint"

[tool.pixi.workspace]
channels = ["conda-forge"]
platforms = ["linux-64"]
platforms = ["linux-64", "osx-arm64", "linux-aarch64"]

[tool.pixi.pypi-dependencies]
datajoint = { path = ".", editable = true }
Expand All @@ -138,4 +138,8 @@ test = { features = ["test"], solve-group = "default" }
[tool.pixi.tasks]

[tool.pixi.dependencies]
python = ">=3.9,<3.14"
graphviz = ">=13.1.2,<14"

[tool.pixi.activation]
scripts=["activate.sh"]
2 changes: 1 addition & 1 deletion src/datajoint/diagram.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@
logger = logging.getLogger(__name__.split(".")[0])


if not diagram_active:
if not diagram_active: # noqa: C901

class Diagram:
"""
Expand Down
116 changes: 112 additions & 4 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,57 @@ def pytest_configure(config):
pass


@pytest.fixture
def clean_autopopulate(experiment, trial, ephys):
"""
Explicit cleanup fixture for autopopulate tests.

Cleans experiment/trial/ephys tables after test completes.
Tests must explicitly request this fixture to get cleanup.
"""
yield
# Cleanup after test - delete in reverse dependency order
ephys.delete()
trial.delete()
experiment.delete()


@pytest.fixture
def clean_jobs(schema_any):
"""
Explicit cleanup fixture for jobs tests.

Cleans jobs table before test runs.
Tests must explicitly request this fixture to get cleanup.
"""
try:
schema_any.jobs.delete()
except DataJointError:
pass
yield


@pytest.fixture
def clean_test_tables(test, test_extra, test_no_extra):
"""
Explicit cleanup fixture for relation tests using test tables.

Ensures test table has lookup data and restores clean state after test.
Tests must explicitly request this fixture to get cleanup.
"""
# Ensure lookup data exists before test
if not test:
test.insert(test.contents, skip_duplicates=True)

yield

# Restore original state after test
test.delete()
test.insert(test.contents, skip_duplicates=True)
test_extra.delete()
test_no_extra.delete()


# Global container registry for cleanup
_active_containers = set()
_docker_client = None
Expand Down Expand Up @@ -547,7 +598,7 @@ def mock_cache(tmpdir_factory):
dj.config["cache"] = og_cache


@pytest.fixture
@pytest.fixture(scope="module")
def schema_any(connection_test, prefix):
schema_any = dj.Schema(
prefix + "_test1", schema.LOCALS_ANY, connection=connection_test
Expand Down Expand Up @@ -603,6 +654,63 @@ def schema_any(connection_test, prefix):
schema_any.drop()


@pytest.fixture
def schema_any_fresh(connection_test, prefix):
"""Function-scoped schema_any for tests that need fresh schema state."""
schema_any = dj.Schema(
prefix + "_test1_fresh", schema.LOCALS_ANY, connection=connection_test
)
assert schema.LOCALS_ANY, "LOCALS_ANY is empty"
try:
schema_any.jobs.delete()
except DataJointError:
pass
schema_any(schema.TTest)
schema_any(schema.TTest2)
schema_any(schema.TTest3)
schema_any(schema.NullableNumbers)
schema_any(schema.TTestExtra)
schema_any(schema.TTestNoExtra)
schema_any(schema.Auto)
schema_any(schema.User)
schema_any(schema.Subject)
schema_any(schema.Language)
schema_any(schema.Experiment)
schema_any(schema.Trial)
schema_any(schema.Ephys)
schema_any(schema.Image)
schema_any(schema.UberTrash)
schema_any(schema.UnterTrash)
schema_any(schema.SimpleSource)
schema_any(schema.SigIntTable)
schema_any(schema.SigTermTable)
schema_any(schema.DjExceptionName)
schema_any(schema.ErrorClass)
schema_any(schema.DecimalPrimaryKey)
schema_any(schema.IndexRich)
schema_any(schema.ThingA)
schema_any(schema.ThingB)
schema_any(schema.ThingC)
schema_any(schema.ThingD)
schema_any(schema.ThingE)
schema_any(schema.Parent)
schema_any(schema.Child)
schema_any(schema.ComplexParent)
schema_any(schema.ComplexChild)
schema_any(schema.SubjectA)
schema_any(schema.SessionA)
schema_any(schema.SessionStatusA)
schema_any(schema.SessionDateA)
schema_any(schema.Stimulus)
schema_any(schema.Longblob)
yield schema_any
try:
schema_any.jobs.delete()
except DataJointError:
pass
schema_any.drop()


@pytest.fixture
def thing_tables(schema_any):
a = schema.ThingA()
Expand All @@ -623,7 +731,7 @@ def thing_tables(schema_any):
yield a, b, c, d, e


@pytest.fixture
@pytest.fixture(scope="module")
def schema_simp(connection_test, prefix):
schema = dj.Schema(
prefix + "_relational", schema_simple.LOCALS_SIMPLE, connection=connection_test
Expand Down Expand Up @@ -653,7 +761,7 @@ def schema_simp(connection_test, prefix):
schema.drop()


@pytest.fixture
@pytest.fixture(scope="module")
def schema_adv(connection_test, prefix):
schema = dj.Schema(
prefix + "_advanced",
Expand Down Expand Up @@ -694,7 +802,7 @@ def schema_ext(
schema.drop()


@pytest.fixture
@pytest.fixture(scope="module")
def schema_uuid(connection_test, prefix):
schema = dj.Schema(
prefix + "_test1",
Expand Down
12 changes: 6 additions & 6 deletions tests/test_alter.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,12 +14,12 @@


@pytest.fixture
def schema_alter(connection_test, schema_any):
# Overwrite Experiment and Parent nodes
schema_any(Experiment, context=LOCALS_ALTER)
schema_any(Parent, context=LOCALS_ALTER)
yield schema_any
schema_any.drop()
def schema_alter(connection_test, schema_any_fresh):
# Overwrite Experiment and Parent nodes using fresh schema
schema_any_fresh(Experiment, context=LOCALS_ALTER)
schema_any_fresh(Parent, context=LOCALS_ALTER)
yield schema_any_fresh
schema_any_fresh.drop()


class TestAlter:
Expand Down
24 changes: 16 additions & 8 deletions tests/test_autopopulate.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
import platform

import pymysql
import pytest

Expand All @@ -7,7 +9,7 @@
from . import schema


def test_populate(trial, subject, experiment, ephys, channel):
def test_populate(clean_autopopulate, trial, subject, experiment, ephys, channel):
# test simple populate
assert subject, "root tables are empty"
assert not experiment, "table already filled?"
Expand All @@ -33,7 +35,7 @@ def test_populate(trial, subject, experiment, ephys, channel):
assert channel


def test_populate_with_success_count(subject, experiment, trial):
def test_populate_with_success_count(clean_autopopulate, subject, experiment, trial):
# test simple populate
assert subject, "root tables are empty"
assert not experiment, "table already filled?"
Expand All @@ -51,7 +53,7 @@ def test_populate_with_success_count(subject, experiment, trial):
assert len(trial.key_source & trial) == success_count


def test_populate_key_list(subject, experiment, trial):
def test_populate_key_list(clean_autopopulate, subject, experiment, trial):
# test simple populate
assert subject, "root tables are empty"
assert not experiment, "table already filled?"
Expand All @@ -63,7 +65,9 @@ def test_populate_key_list(subject, experiment, trial):
assert n == ret["success_count"]


def test_populate_exclude_error_and_ignore_jobs(schema_any, subject, experiment):
def test_populate_exclude_error_and_ignore_jobs(
clean_autopopulate, schema_any, subject, experiment
):
# test simple populate
assert subject, "root tables are empty"
assert not experiment, "table already filled?"
Expand All @@ -79,23 +83,27 @@ def test_populate_exclude_error_and_ignore_jobs(schema_any, subject, experiment)
assert len(experiment.key_source & experiment) == len(experiment.key_source) - 2


def test_allow_direct_insert(subject, experiment):
def test_allow_direct_insert(clean_autopopulate, subject, experiment):
assert subject, "root tables are empty"
key = subject.fetch("KEY", limit=1)[0]
key["experiment_id"] = 1000
key["experiment_date"] = "2018-10-30"
experiment.insert1(key, allow_direct_insert=True)


@pytest.mark.skipif(
platform.system() == "Darwin",
reason="multiprocessing with spawn method (macOS default) cannot pickle thread locks",
)
@pytest.mark.parametrize("processes", [None, 2])
def test_multi_processing(subject, experiment, processes):
def test_multi_processing(clean_autopopulate, subject, experiment, processes):
assert subject, "root tables are empty"
assert not experiment, "table already filled?"
experiment.populate(processes=None)
experiment.populate(processes=processes)
assert len(experiment) == len(subject) * experiment.fake_experiments_per_subject


def test_allow_insert(subject, experiment):
def test_allow_insert(clean_autopopulate, subject, experiment):
assert subject, "root tables are empty"
key = subject.fetch("KEY")[0]
key["experiment_id"] = 1001
Expand Down
11 changes: 11 additions & 0 deletions tests/test_cascading_delete.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,17 @@

@pytest.fixture
def schema_simp_pop(schema_simp):
# Clean up tables first to ensure fresh state with module-scoped schema
# Delete in reverse dependency order
Profile().delete()
Website().delete()
G().delete()
E().delete()
D().delete()
B().delete()
L().delete()
A().delete()

A().insert(A.contents, skip_duplicates=True)
L().insert(L.contents, skip_duplicates=True)
B().populate()
Expand Down
Loading
Loading