Skip to content

Commit c0eb558

Browse files
committed
Add tests to evaluate the new metatypes
1 parent 341f827 commit c0eb558

File tree

14 files changed

+202
-8
lines changed

14 files changed

+202
-8
lines changed

tests/hub/dataplugin/conftest.py

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,10 @@
11
"""
2-
Fixtures for mocking different types of plugin types
2+
Fixtures for mocking different types of plugin types,
3+
along with miscellaneous infrastructure
34
"""
45

6+
import asyncio
7+
58
from pathlib import Path
69
import functools
710
import http.server
@@ -15,20 +18,19 @@
1518

1619
import pytest
1720

18-
1921
logger = logging.getLogger(__name__)
2022

2123

2224
@pytest.fixture(scope="session")
2325
def temporary_mock_data(tmp_path_factory):
2426
"""
25-
Generates a subset of random binary files for populating the mock data hosting
27+
Generates binary files for populating the mock data hosting
2628
"""
2729
set_file_size_bytes = [1024, 2048, 4096, 8192]
2830

2931
temp_directory_name = "submarine"
3032
temp_directory = tmp_path_factory.mktemp(temp_directory_name)
31-
num_data_files = random.randint(3, 7)
33+
num_data_files = 15
3234

3335
for file_index, file_size_bytes in enumerate(random.choices(set_file_size_bytes, k=num_data_files)):
3436
random_binary_filename = f"dleaf{file_index}"
Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
{
2+
"version": "1.0",
3+
"dumper": {
4+
"data_url": [],
5+
"uncompress": false,
6+
"release": "version:get_release"
7+
},
8+
"uploader": {
9+
"parser": "parser:size_loader",
10+
"on_duplicates": "error"
11+
}
12+
}
Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
def size_mapping(cls):
2+
"""
3+
Mock elasticsearch mapping for the size loader
4+
"""
5+
elasticsearch_mapping = {
6+
"associatedWith": {"properties": {"name": {"type": "keyword"}, "size": {"type": "keyword"}}}
7+
}
8+
return elasticsearch_mapping
Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
"""
2+
Mock handler for parsing data associated with our plugin
3+
"""
4+
5+
from pathlib import Path
6+
import uuid
7+
8+
9+
def size_loader(data_folder):
10+
"""
11+
Iterates over the data folder and generates documents of the following structure:
12+
{
13+
"name": <filename>
14+
"size": <filesize>
15+
}
16+
"""
17+
for file_path_object in Path(data_folder).glob("**/*"):
18+
document = {
19+
"_id": str(uuid.uuid4()),
20+
"name": file_path_object.name,
21+
"size": Path(file_path_object).stat().st_size,
22+
}
23+
yield document
Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
def get_release(self):
2+
release_date = "1970-01-01"
3+
return release_date
Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
{
2+
"version": "1.0",
3+
"dumper": {
4+
"data_url": [],
5+
"uncompress": false,
6+
"release": "version:get_release",
7+
"disabled": true
8+
},
9+
"uploader": {
10+
"parser": "parser:size_loader",
11+
"on_duplicates": "error"
12+
}
13+
}
Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
def size_mapping(cls):
2+
"""
3+
Mock elasticsearch mapping for the size loader
4+
"""
5+
elasticsearch_mapping = {
6+
"associatedWith": {"properties": {"name": {"type": "keyword"}, "size": {"type": "keyword"}}}
7+
}
8+
return elasticsearch_mapping
Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
"""
2+
Mock handler for parsing data associated with our plugin
3+
"""
4+
5+
from pathlib import Path
6+
import uuid
7+
8+
9+
def size_loader(data_folder):
10+
"""
11+
Iterates over the data folder and generates documents of the following structure:
12+
{
13+
"name": <filename>
14+
"size": <filesize>
15+
}
16+
"""
17+
for file_path_object in Path(data_folder).glob("**/*"):
18+
document = {
19+
"_id": str(uuid.uuid4()),
20+
"name": file_path_object.name,
21+
"size": Path(file_path_object).stat().st_size,
22+
}
23+
yield document
Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
def get_release(self):
2+
release_date = "1970-01-01"
3+
return release_date
Lines changed: 70 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,70 @@
1+
"""
2+
Integration test for evaluating the various pieces
3+
required to execute a dump job within our hub
4+
"""
5+
6+
import asyncio
7+
from pathlib import Path
8+
from types import SimpleNamespace
9+
10+
import pytest
11+
12+
from biothings import config
13+
from biothings.hub.dataload.dumper import DumperManager
14+
from biothings.utils.manager import JobManager
15+
from biothings.hub.dataload.uploader import UploaderManager
16+
from biothings.hub.dataplugin.assistant import LocalAssistant
17+
from biothings.hub.dataplugin.manager import DataPluginManager
18+
from biothings.utils import hub_db
19+
20+
plugin_designs = ["single_uploader_plugin", "multiple_uploader_plugin"]
21+
22+
23+
@pytest.mark.asyncio
24+
@pytest.mark.parametrize("plugin", plugin_designs, indirect=True)
25+
async def test_job_dump_operation(plugin):
26+
hub_db.setup(config)
27+
28+
# construct our manager instances
29+
job_manager = JobManager(
30+
loop=asyncio.get_running_loop(),
31+
process_queue=None,
32+
thread_queue=None,
33+
max_memory_usage=None,
34+
num_workers=None,
35+
num_threads=None,
36+
auto_recycle=True,
37+
)
38+
39+
LocalAssistant.data_plugin_manager = DataPluginManager(job_manager=None)
40+
LocalAssistant.uploader_manager = UploaderManager(job_manager=None)
41+
LocalAssistant.dumper_manager = DumperManager(job_manager=job_manager, poll_schedule=None, datasource_path=None)
42+
43+
plugin_name = plugin.name
44+
assistant_url = f"local://{plugin_name}"
45+
assistant_instance = LocalAssistant(assistant_url)
46+
47+
data_plugin_entry = hub_db.get_data_plugin()
48+
data_plugin_entry.remove({"_id": assistant_instance.plugin_name})
49+
plugin_entry = {
50+
"_id": assistant_instance.plugin_name,
51+
"plugin": {
52+
"url": assistant_url,
53+
"type": assistant_instance.plugin_type,
54+
"active": True,
55+
},
56+
"download": {"data_folder": str(Path(plugin))},
57+
}
58+
59+
data_plugin_entry.insert_one(plugin_entry)
60+
61+
plugin_loader = assistant_instance.loader
62+
plugin_loader.load_plugin()
63+
64+
current_plugin = SimpleNamespace(
65+
plugin_name=plugin_name,
66+
data_plugin_dir=plugin,
67+
in_plugin_dir=plugin_name is None,
68+
)
69+
70+
await assistant_instance.dumper_manager.dump_all()

0 commit comments

Comments
 (0)