Skip to content

Commit bef3145

Browse files
Only pass TILEDB_REST_TOKEN to test_cloud during CI (#327)
Only pass TILEDB_REST_TOKEN to test_cloud and Notebook tests during CI
1 parent 9d3cb2f commit bef3145

File tree

3 files changed

+20
-12
lines changed

3 files changed

+20
-12
lines changed

.github/workflows/ci-python.yml

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,9 +36,10 @@ jobs:
3636
#pip install -e .
3737
#pytest
3838
pip install -r test/ipynb/requirements.txt
39+
export TILEDB_REST_TOKEN=$TILEDB_CLOUD_HELPER_VAR
3940
pytest --nbmake test/ipynb
4041
env:
41-
TILEDB_REST_TOKEN: ${{ secrets.TILEDB_CLOUD_HELPER_VAR }}
42+
TILEDB_CLOUD_HELPER_VAR: ${{ secrets.TILEDB_CLOUD_HELPER_VAR }}
4243
shell: bash -el {0}
4344
- name: Check tiledb-vector-search version
4445
run: |

apis/python/test/common.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -329,10 +329,13 @@ def random_name(name: str) -> str:
329329

330330

331331
def check_training_input_vectors(
332-
index_uri: str, expected_training_sample_size: int, expected_dimensions: int
332+
index_uri: str,
333+
expected_training_sample_size: int,
334+
expected_dimensions: int,
335+
config=None,
333336
):
334337
training_input_vectors_uri = f"{index_uri}/{storage_formats[STORAGE_VERSION]['TRAINING_INPUT_VECTORS_ARRAY_NAME']}"
335-
with tiledb.open(training_input_vectors_uri, mode="r") as src_array:
338+
with tiledb.open(training_input_vectors_uri, mode="r", config=config) as src_array:
336339
training_input_vectors = np.transpose(src_array[:, :]["values"])
337340
assert training_input_vectors.shape[0] == expected_training_sample_size
338341
assert training_input_vectors.shape[1] == expected_dimensions

apis/python/test/test_cloud.py

Lines changed: 13 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -18,9 +18,10 @@ class CloudTests(unittest.TestCase):
1818

1919
@classmethod
2020
def setUpClass(cls):
21-
if not os.getenv("TILEDB_REST_TOKEN"):
22-
raise ValueError("TILEDB_REST_TOKEN not set")
23-
tiledb.cloud.login(token=os.getenv("TILEDB_REST_TOKEN"))
21+
token = os.getenv("TILEDB_REST_TOKEN")
22+
if os.getenv("TILEDB_CLOUD_HELPER_VAR"):
23+
token = os.getenv("TILEDB_CLOUD_HELPER_VAR")
24+
tiledb.cloud.login(token=token)
2425
namespace, storage_path, _ = groups._default_ns_path_cred()
2526
storage_path = storage_path.replace("//", "/").replace("/", "//", 1)
2627
rand_name = random_name("vector_search")
@@ -58,7 +59,9 @@ def test_cloud_flat(self):
5859
mode=Mode.BATCH,
5960
)
6061
tiledb_index_uri = groups.info(index_uri).tiledb_uri
61-
index = vs.flat_index.FlatIndex(uri=tiledb_index_uri)
62+
index = vs.flat_index.FlatIndex(
63+
uri=tiledb_index_uri, config=tiledb.cloud.Config().dict()
64+
)
6265

6366
_, result_i = index.query(queries, k=k)
6467
assert accuracy(result_i, gt_i) > MINIMUM_ACCURACY
@@ -91,7 +94,10 @@ def test_cloud_ivf_flat(self):
9194
)
9295

9396
tiledb_index_uri = groups.info(index_uri).tiledb_uri
94-
index = vs.ivf_flat_index.IVFFlatIndex(uri=tiledb_index_uri)
97+
index = vs.ivf_flat_index.IVFFlatIndex(
98+
uri=tiledb_index_uri,
99+
config=tiledb.cloud.Config().dict(),
100+
)
95101

96102
_, result_i = index.query(queries, k=k, nprobe=nprobe)
97103
assert accuracy(result_i, gt_i) > MINIMUM_ACCURACY
@@ -183,16 +189,14 @@ def test_cloud_ivf_flat_random_sampling(self):
183189
training_sample_size=training_sample_size,
184190
max_sampling_tasks=max_sampling_tasks,
185191
config=tiledb.cloud.Config().dict(),
186-
# TODO Re-enable.
187-
# This is temporarily disabled due to an incompatibility of new ingestion code and previous
188-
# UDF library releases.
189-
# mode=Mode.BATCH,
192+
mode=Mode.BATCH,
190193
)
191194

192195
check_training_input_vectors(
193196
index_uri=index_uri,
194197
expected_training_sample_size=training_sample_size,
195198
expected_dimensions=queries.shape[1],
199+
config=tiledb.cloud.Config().dict(),
196200
)
197201

198202
_, result_i = index.query(queries, k=k, nprobe=nprobe)

0 commit comments

Comments
 (0)