Skip to content

Commit 14d5039

Browse files
committed
pytest skip
1 parent 39fedd9 commit 14d5039

File tree

1 file changed

+132
-130
lines changed

1 file changed

+132
-130
lines changed
Lines changed: 132 additions & 130 deletions
Original file line numberDiff line numberDiff line change
@@ -1,130 +1,132 @@
1-
# import os
2-
# import unittest
3-
# from unittest import mock
4-
# from unittest.mock import MagicMock, patch, Mock
5-
6-
# from faker import Faker
7-
# from google.cloud import storage
8-
9-
# from test_shared.test_utils.database_utils import default_db_url
10-
# from main import (
11-
# get_latest_datasets_without_validation_reports,
12-
# get_datasets_for_validation,
13-
# update_validation_report,
14-
# )
15-
16-
# faker = Faker()
17-
18-
19-
# def _create_storage_blob(name, metadata):
20-
# """Create a mock storage blob."""
21-
# blob = MagicMock(spec=storage.Blob)
22-
# blob.metadata = metadata
23-
# blob.name = name
24-
# blob.patch = Mock(return_value=None)
25-
# return blob
26-
27-
28-
# class TestUpdateReportProcessor(unittest.TestCase):
29-
# def test_get_latest_datasets(self):
30-
# """Test get_latest_datasets function."""
31-
# session = MagicMock()
32-
# session.query.return_value.filter.return_value.all = MagicMock()
33-
# get_latest_datasets_without_validation_reports(session, "1.0.1")
34-
# session.query.assert_called_once()
35-
36-
# @patch("google.cloud.storage.Client")
37-
# def test_get_datasets_for_validation(self, mock_client):
38-
# """Test get_datasets_for_validation function"""
39-
# test_dataset_id = "dataset1"
40-
# test_feed_id = "feed1"
41-
42-
# def create_dataset_blob(name, exists):
43-
# mock_dataset_blob = Mock(spec=storage.Blob)
44-
# mock_dataset_blob.exists.return_value = exists
45-
# mock_dataset_blob.name = name
46-
# return mock_dataset_blob
47-
48-
# # Setup mock storage client and bucket
49-
# mock_bucket = Mock()
50-
# mock_client.return_value.bucket.return_value = mock_bucket
51-
52-
# # Setup mock blobs and existence results
53-
# mock_dataset_blob_exists = create_dataset_blob(
54-
# f"{test_feed_id}/{test_dataset_id}/{test_dataset_id}.zip", True
55-
# )
56-
# mock_dataset_blob_not_exists = create_dataset_blob(
57-
# f"{test_feed_id}/{test_dataset_id}1/{test_dataset_id}1.zip", False
58-
# )
59-
60-
# mock_bucket.blob.side_effect = lambda name: {
61-
# f"{test_feed_id}/{test_dataset_id}/{test_dataset_id}.zip": mock_dataset_blob_exists,
62-
# f"{test_feed_id}/{test_dataset_id}1/{test_dataset_id}1.zip": mock_dataset_blob_not_exists,
63-
# }[name]
64-
65-
# # Input parameters
66-
# nonexistent_dataset = (test_feed_id, f"{test_dataset_id}2")
67-
# latest_datasets = [
68-
# (test_feed_id, test_dataset_id),
69-
# (test_feed_id, f"{test_dataset_id}1"),
70-
# nonexistent_dataset,
71-
# ]
72-
73-
# result = get_datasets_for_validation(latest_datasets)
74-
75-
# # Assertions
76-
# self.assertEqual(len(result), 1)
77-
# mock_dataset_blob_exists.exists.assert_called_once()
78-
# mock_dataset_blob_not_exists.exists.assert_called_once()
79-
# # Only the existing dataset should be returned
80-
# self.assertEqual(result[0][0], test_feed_id)
81-
# self.assertEqual(result[0][1], test_dataset_id)
82-
83-
# @mock.patch.dict(
84-
# os.environ,
85-
# {
86-
# "FEEDS_DATABASE_URL": default_db_url,
87-
# "WEB_VALIDATOR_URL": faker.url(),
88-
# "MAX_RETRY": "2",
89-
# "BATCH_SIZE": "2",
90-
# "SLEEP_TIME": "0",
91-
# },
92-
# )
93-
# @patch(
94-
# "main.get_latest_datasets_without_validation_reports",
95-
# autospec=True,
96-
# return_value=[("feed1", "dataset1")],
97-
# )
98-
# @patch(
99-
# "main.get_datasets_for_validation",
100-
# autospec=True,
101-
# return_value=[("feed1", "dataset1")],
102-
# )
103-
# @patch("google.cloud.storage.Blob", autospec=True)
104-
# @patch("requests.get", autospec=True)
105-
# @patch("google.cloud.storage.Client", autospec=True)
106-
# @patch("main.Logger", autospec=True)
107-
# @patch("google.cloud.workflows_v1.WorkflowsClient", autospec=True)
108-
# @patch("google.cloud.workflows.executions_v1.ExecutionsClient", autospec=True)
109-
# @patch("google.cloud.workflows.executions_v1.Execution", autospec=True)
110-
# def test_update_validation_report(
111-
# self,
112-
# execution_mock,
113-
# executions_client_mock,
114-
# workflows_client_mock,
115-
# mock_logger,
116-
# mock_client,
117-
# mock_get,
118-
# mock_blob,
119-
# mock_get_latest_datasets,
120-
# mock_get_datasets_for_validation,
121-
# ):
122-
# """Test update_validation_report function."""
123-
# mock_get.return_value.json.return_value = {"version": "1.0.1"}
124-
# mock_request = MagicMock()
125-
# mock_request.get_json.return_value = {"validator_url": faker.url()}
126-
# response = update_validation_report(mock_request)
127-
# self.assertTrue("message" in response[0])
128-
# self.assertTrue("dataset_workflow_triggered" in response[0])
129-
# self.assertEqual(response[1], 200)
130-
# self.assertEqual(response[0]["dataset_workflow_triggered"], ["dataset1"])
1+
import os
2+
import unittest
3+
from unittest import mock
4+
from unittest.mock import MagicMock, patch, Mock
5+
6+
from faker import Faker
7+
from google.cloud import storage
8+
9+
from test_shared.test_utils.database_utils import default_db_url
10+
from main import (
11+
get_latest_datasets_without_validation_reports,
12+
get_datasets_for_validation,
13+
update_validation_report,
14+
)
15+
16+
faker = Faker()
17+
18+
19+
def _create_storage_blob(name, metadata):
20+
"""Create a mock storage blob."""
21+
blob = MagicMock(spec=storage.Blob)
22+
blob.metadata = metadata
23+
blob.name = name
24+
blob.patch = Mock(return_value=None)
25+
return blob
26+
27+
28+
class TestUpdateReportProcessor(unittest.TestCase):
29+
@pytest.mark.skip(reason="Skip this test for now")
30+
def test_get_latest_datasets(self):
31+
"""Test get_latest_datasets function."""
32+
session = MagicMock()
33+
session.query.return_value.filter.return_value.all = MagicMock()
34+
get_latest_datasets_without_validation_reports(session, "1.0.1")
35+
session.query.assert_called_once()
36+
37+
@pytest.mark.skip(reason="Skip this test for now")
38+
@patch("google.cloud.storage.Client")
39+
def test_get_datasets_for_validation(self, mock_client):
40+
"""Test get_datasets_for_validation function"""
41+
test_dataset_id = "dataset1"
42+
test_feed_id = "feed1"
43+
44+
def create_dataset_blob(name, exists):
45+
mock_dataset_blob = Mock(spec=storage.Blob)
46+
mock_dataset_blob.exists.return_value = exists
47+
mock_dataset_blob.name = name
48+
return mock_dataset_blob
49+
50+
# Setup mock storage client and bucket
51+
mock_bucket = Mock()
52+
mock_client.return_value.bucket.return_value = mock_bucket
53+
54+
# Setup mock blobs and existence results
55+
mock_dataset_blob_exists = create_dataset_blob(
56+
f"{test_feed_id}/{test_dataset_id}/{test_dataset_id}.zip", True
57+
)
58+
mock_dataset_blob_not_exists = create_dataset_blob(
59+
f"{test_feed_id}/{test_dataset_id}1/{test_dataset_id}1.zip", False
60+
)
61+
62+
mock_bucket.blob.side_effect = lambda name: {
63+
f"{test_feed_id}/{test_dataset_id}/{test_dataset_id}.zip": mock_dataset_blob_exists,
64+
f"{test_feed_id}/{test_dataset_id}1/{test_dataset_id}1.zip": mock_dataset_blob_not_exists,
65+
}[name]
66+
67+
# Input parameters
68+
nonexistent_dataset = (test_feed_id, f"{test_dataset_id}2")
69+
latest_datasets = [
70+
(test_feed_id, test_dataset_id),
71+
(test_feed_id, f"{test_dataset_id}1"),
72+
nonexistent_dataset,
73+
]
74+
75+
result = get_datasets_for_validation(latest_datasets)
76+
77+
# Assertions
78+
self.assertEqual(len(result), 1)
79+
mock_dataset_blob_exists.exists.assert_called_once()
80+
mock_dataset_blob_not_exists.exists.assert_called_once()
81+
# Only the existing dataset should be returned
82+
self.assertEqual(result[0][0], test_feed_id)
83+
self.assertEqual(result[0][1], test_dataset_id)
84+
85+
@mock.patch.dict(
86+
os.environ,
87+
{
88+
"FEEDS_DATABASE_URL": default_db_url,
89+
"WEB_VALIDATOR_URL": faker.url(),
90+
"MAX_RETRY": "2",
91+
"BATCH_SIZE": "2",
92+
"SLEEP_TIME": "0",
93+
},
94+
)
95+
@patch(
96+
"main.get_latest_datasets_without_validation_reports",
97+
autospec=True,
98+
return_value=[("feed1", "dataset1")],
99+
)
100+
@patch(
101+
"main.get_datasets_for_validation",
102+
autospec=True,
103+
return_value=[("feed1", "dataset1")],
104+
)
105+
@patch("google.cloud.storage.Blob", autospec=True)
106+
@patch("requests.get", autospec=True)
107+
@patch("google.cloud.storage.Client", autospec=True)
108+
@patch("main.Logger", autospec=True)
109+
@patch("google.cloud.workflows_v1.WorkflowsClient", autospec=True)
110+
@patch("google.cloud.workflows.executions_v1.ExecutionsClient", autospec=True)
111+
@patch("google.cloud.workflows.executions_v1.Execution", autospec=True)
112+
def test_update_validation_report(
113+
self,
114+
execution_mock,
115+
executions_client_mock,
116+
workflows_client_mock,
117+
mock_logger,
118+
mock_client,
119+
mock_get,
120+
mock_blob,
121+
mock_get_latest_datasets,
122+
mock_get_datasets_for_validation,
123+
):
124+
"""Test update_validation_report function."""
125+
mock_get.return_value.json.return_value = {"version": "1.0.1"}
126+
mock_request = MagicMock()
127+
mock_request.get_json.return_value = {"validator_url": faker.url()}
128+
response = update_validation_report(mock_request)
129+
self.assertTrue("message" in response[0])
130+
self.assertTrue("dataset_workflow_triggered" in response[0])
131+
self.assertEqual(response[1], 200)
132+
self.assertEqual(response[0]["dataset_workflow_triggered"], ["dataset1"])

0 commit comments

Comments
 (0)