Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions pytest.ini
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
[pytest]
markers =
happy_path: marks tests as part of the primary success path (end-to-end test)
2 changes: 2 additions & 0 deletions requirements_test.txt
Original file line number Diff line number Diff line change
@@ -1 +1,3 @@
pytest
python-dotenv==1.1.1
psycopg2-binary==2.9.10
3 changes: 3 additions & 0 deletions tests/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
# This can be empty, it just marks the directory as a Python package

__version__ = "0.1.0"
3 changes: 3 additions & 0 deletions tests/integration/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
# This can be empty, it just marks the directory as a Python package

__version__ = "0.1.0"
67 changes: 67 additions & 0 deletions tests/integration/config.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
# minds/tests/integration/config.py
import logging
import os

from dotenv import load_dotenv

# ===================================================================
# 1. CONFIGURATION
# ===================================================================

# Set up basic logging to output INFO level messages.
logging.basicConfig(
level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s"
)

# Load environment variables from a .env file if it exists.
load_dotenv()

# --- API and Schema Configuration ---
MINDS_API_BASE_URL = os.getenv(
"MINDS_API_BASE_URL", "https://minds-terabase.dev.mdb.ai"
) #
MINDS_OPENAPI_SPEC_URL = os.getenv(
"MINDS_OPENAPI_SPEC_URL", f"{MINDS_API_BASE_URL.strip('/')}/openapi.json"
)
AUTH_TOKEN = os.getenv("MINDS_API_TOKEN", "remove me when auth is implemented")

# --- DATASOURCE CONFIGURATIONS ---
DATASOURCE_CONFIGS = []

# --- PostgreSQL Configuration (Reads from your existing PG_ environment variables) ---
POSTGRES_CONFIG = {
"host": os.getenv("PG_HOST", "samples.mindsdb.com"),
"port": int(os.getenv("PG_PORT", 5432)),
"user": os.getenv("PG_USER", "demo_user"),
"password": os.getenv("PG_PASSWORD", "demo_password"),
"database": os.getenv("PG_DB_NAME", "demo"),
"schema": os.getenv("PG_SCHEMA", "demo"),
}
if all(POSTGRES_CONFIG.values()):
DATASOURCE_CONFIGS.append(
{
"engine": "postgres",
"name_prefix": "test_pg_ds",
"connection_data": POSTGRES_CONFIG,
"sample_table": "house_sales", # A known table in the demo PG database
}
)

# --- Snowflake Configuration (Only enabled if all credentials are provided) ---
SNOWFLAKE_CONFIG = {
"account": os.getenv("SNOWFLAKE_ACCOUNT"),
"user": os.getenv("SNOWFLAKE_USER"),
"password": os.getenv("SNOWFLAKE_PASSWORD"),
"schema": os.getenv("SNOWFLAKE_SCHEMA"),
"database": os.getenv("SNOWFLAKE_DATABASE"),
"warehouse": os.getenv("SNOWFLAKE_WAREHOUSE"),
}
if all(SNOWFLAKE_CONFIG.values()):
DATASOURCE_CONFIGS.append(
{
"engine": "snowflake",
"name_prefix": "test-sf-ds",
"connection_data": SNOWFLAKE_CONFIG,
"sample_table": "CUSTOMER",
}
)
113 changes: 113 additions & 0 deletions tests/integration/conftest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,113 @@
import logging
import pytest
import time
import psycopg2

from minds.client import Client
from minds.exceptions import ObjectNotFound
from . import config

logging.basicConfig(
level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s"
)


@pytest.fixture(scope="session")
def db_ground_truth():
"""
Connects directly to the PG database to fetch the true max values for assertions,
making tests resilient to data changes.
"""
conn = None
try:
# Use a dictionary of connection parameters that psycopg2 expects
conn_params = {
"dbname": config.POSTGRES_CONFIG["database"],
"user": config.POSTGRES_CONFIG["user"],
"password": config.POSTGRES_CONFIG["password"],
"host": config.POSTGRES_CONFIG["host"],
"port": config.POSTGRES_CONFIG["port"],
}
conn = psycopg2.connect(**conn_params)
cur = conn.cursor()

# Fully qualify the table name with its schema ('demo_data') to resolve the "relation does not exist" error.
cur.execute("SELECT MAX(rental_price) FROM demo_data.home_rentals;")
max_rental_price = cur.fetchone()[0]

cur.close()
logging.info(
f"Fetched ground truth from DB: max_rental_price={max_rental_price}"
)
return {"max_rental_price": max_rental_price}
except Exception as e:
pytest.skip(f"Could not connect to PG database to get ground truth: {e}")
finally:
if conn:
conn.close()


@pytest.fixture(scope="session")
def sdk_client() -> Client:
"""Initialize a Minds Client using centralized config."""
logging.info(f"Connecting to Minds API at {config.MINDS_API_BASE_URL}")
client = Client(api_key=config.AUTH_TOKEN, base_url=config.MINDS_API_BASE_URL)
return client


@pytest.fixture(scope="function")
def sdk_datasource(sdk_client: Client):
"""Creates a temporary datasource for test use and cleans it up."""
timestamp = str(int(time.time()))[-6:]
ds_base = config.DATASOURCE_CONFIGS[0]
ds_name = f"{ds_base['name_prefix']}_{timestamp}"

ds_config = {
"name": ds_name,
"engine": ds_base["engine"],
"connection_data": ds_base["connection_data"],
"description": "Temporary test datasource",
}

logging.info(f"Creating test datasource {ds_name}")
ds = sdk_client.datasources.create(**ds_config)

yield ds # make datasource available to tests

# Cleanup
logging.info(f"Dropping test datasource {ds_name}")
try:
sdk_client.datasources.drop(ds_name)
except ObjectNotFound:
logging.warning(f"Datasource {ds_name} already removed.")


@pytest.fixture(scope="function")
def sdk_mind(sdk_client: Client, sdk_datasource):
"""Creates a temporary mind for test use and cleans it up."""
timestamp = int(time.time())
mind_name = f"test_mind_{timestamp}"

logging.info(
f"Creating test mind {mind_name} with datasource {sdk_datasource.name}"
)
# Create the mind with 'home_rentals' from the start.
sdk_client.minds.create(
mind_name,
datasources=[{"name": sdk_datasource.name, "tables": ["home_rentals"]}],
provider="openai",
)

yield mind_name

# Cleanup
mind_name_upd = f"{mind_name}_upd"
logging.info(f"Dropping test mind {mind_name_upd}")
try:
sdk_client.minds.drop(mind_name_upd)
except ObjectNotFound:
# Also try dropping the original name in case the test failed before the update
try:
sdk_client.minds.drop(mind_name)
except ObjectNotFound:
logging.warning(f"Mind {mind_name} or {mind_name_upd} already removed.")
Loading
Loading