|
1 | 1 | import logging |
2 | | -from datetime import timedelta, datetime |
| 2 | +from datetime import timedelta, datetime, timezone |
3 | 3 | import pytest |
4 | 4 |
|
5 | 5 | # pylint: disable-next=import-private-name |
@@ -112,35 +112,39 @@ def test_dbfs_fixture(make_mounted_location): |
112 | 112 | def test_remove_after_tag_jobs(ws, env_or_skip, make_job): |
113 | 113 | new_job = make_job(spark_conf=_SPARK_CONF) |
114 | 114 | created_job = ws.jobs.get(new_job.job_id) |
115 | | - assert "RemoveAfter" in created_job.settings.tags |
| 115 | + job_tags = created_job.settings.tags |
| 116 | + assert "RemoveAfter" in job_tags |
116 | 117 |
|
117 | | - purge_time = datetime.strptime(created_job.settings.tags.get("RemoveAfter"), "%Y%m%d%H") |
118 | | - assert purge_time - datetime.utcnow() < timedelta(hours=1, minutes=15) |
| 118 | + purge_time = datetime.strptime(job_tags["RemoveAfter"], "%Y%m%d%H").replace(tzinfo=timezone.utc) |
| 119 | + assert (purge_time - datetime.now(timezone.utc)) < (TEST_RESOURCE_PURGE_TIMEOUT + timedelta(hours=1)) # noqa: F405 |
119 | 120 |
|
120 | 121 |
|
121 | 122 | def test_remove_after_tag_clusters(ws, env_or_skip, make_cluster): |
122 | 123 | new_cluster = make_cluster(single_node=True, instance_pool_id=env_or_skip('TEST_INSTANCE_POOL_ID')) |
123 | 124 | created_cluster = ws.clusters.get(new_cluster.cluster_id) |
124 | | - assert "RemoveAfter" in created_cluster.custom_tags |
125 | | - purge_time = datetime.strptime(created_cluster.custom_tags.get("RemoveAfter"), "%Y%m%d%H") |
126 | | - assert purge_time - datetime.utcnow() < timedelta(hours=1, minutes=15) |
| 125 | + cluster_tags = created_cluster.custom_tags |
| 126 | + assert "RemoveAfter" in cluster_tags |
| 127 | + purge_time = datetime.strptime(cluster_tags["RemoveAfter"], "%Y%m%d%H").replace(tzinfo=timezone.utc) |
| 128 | + assert (purge_time - datetime.now(timezone.utc)) < (TEST_RESOURCE_PURGE_TIMEOUT + timedelta(hours=1)) # noqa: F405 |
127 | 129 |
|
128 | 130 |
|
129 | 131 | def test_remove_after_tag_warehouse(ws, env_or_skip, make_warehouse): |
130 | 132 | new_warehouse = make_warehouse() |
131 | 133 | created_warehouse = ws.warehouses.get(new_warehouse.response.id) |
132 | | - custom_tags = created_warehouse.tags.as_dict() |
133 | | - assert 'RemoveAfter' in custom_tags.get("custom_tags")[0]["key"] |
134 | | - purge_time = datetime.strptime(custom_tags.get("custom_tags")[0]["value"], "%Y%m%d%H") |
135 | | - assert purge_time - datetime.utcnow() < timedelta(hours=1, minutes=15) |
| 134 | + warehouse_tags = created_warehouse.tags.as_dict() |
| 135 | + assert warehouse_tags["custom_tags"][0]["key"] == "RemoveAfter" |
| 136 | + remove_after_tag = warehouse_tags["custom_tags"][0]["value"] |
| 137 | + purge_time = datetime.strptime(remove_after_tag, "%Y%m%d%H").replace(tzinfo=timezone.utc) |
| 138 | + assert (purge_time - datetime.now(timezone.utc)) < (TEST_RESOURCE_PURGE_TIMEOUT + timedelta(hours=1)) # noqa: F405 |
136 | 139 |
|
137 | 140 |
|
138 | 141 | def test_remove_after_tag_instance_pool(ws, make_instance_pool): |
139 | 142 | new_instance_pool = make_instance_pool() |
140 | 143 | created_instance_pool = ws.instance_pools.get(new_instance_pool.instance_pool_id) |
141 | | - assert "RemoveAfter" in created_instance_pool.custom_tags |
142 | | - purge_time = datetime.strptime(created_instance_pool.custom_tags.get("RemoveAfter"), "%Y%m%d%H") |
143 | | - assert purge_time - datetime.utcnow() < timedelta(hours=1, minutes=15) |
| 144 | + pool_tags = created_instance_pool.custom_tags |
| 145 | + assert "RemoveAfter" in pool_tags |
| 146 | + purge_time = datetime.strptime(pool_tags["RemoveAfter"], "%Y%m%d%H").replace(tzinfo=timezone.utc) |
| 147 | + assert (purge_time - datetime.now(timezone.utc)) < (TEST_RESOURCE_PURGE_TIMEOUT + timedelta(hours=1)) # noqa: F405 |
144 | 148 |
|
145 | 149 |
|
146 | 150 | def test_remove_after_property_table(ws, make_table, sql_backend): |
|
0 commit comments