Skip to content

Commit 8f3b8a3

Browse files
gost-serbSerhii Betin
andauthored
Add tests for oval generation (#1100)
Also, updated preflight and run-tests pipeline. Resolves: AlmaLinux/build-system#413 --------- Co-authored-by: Serhii Betin <[email protected]>
1 parent 2cd1d0f commit 8f3b8a3

File tree

8 files changed

+4462
-2
lines changed

8 files changed

+4462
-2
lines changed

.github/workflows/preflight.yml

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -48,6 +48,13 @@ jobs:
4848
repository: AlmaLinux/albs-sign-file
4949
path: albs-sign-file
5050

51+
- name: Checkout oval-processor
52+
uses: actions/checkout@v4
53+
with:
54+
repository: AlmaLinux/oval-processor
55+
path: oval-processor
56+
ssh-key: ${{ secrets.OVAL_PROCESSOR_PRIVKEY }}
57+
5158
- name: Set up Docker Buildx
5259
# https://github.com/marketplace/actions/docker-setup-buildx
5360
uses: docker/setup-buildx-action@v3
@@ -85,6 +92,7 @@ jobs:
8592
mkdir -p $REPORTS_DIR
8693
mkdir -p ../{alts,albs-frontend,albs-node,albs-sign-node,alma-tests-cacher}
8794
ln -sf tests/test-vars.env vars.env
95+
ln -sf oval-processor/almalinux almalinux
8896
8997
- name: Start services
9098
run: docker compose up -d test_db sign_file

.github/workflows/run-tests.yml

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,13 @@ jobs:
2525
repository: AlmaLinux/albs-sign-file
2626
path: albs-sign-file
2727

28+
- name: Checkout oval-processor
29+
uses: actions/checkout@v4
30+
with:
31+
repository: AlmaLinux/oval-processor
32+
path: oval-processor
33+
ssh-key: ${{ secrets.OVAL_PROCESSOR_PRIVKEY }}
34+
2835
- name: Set up Docker Buildx
2936
# https://github.com/marketplace/actions/docker-setup-buildx
3037
uses: docker/setup-buildx-action@v3
@@ -49,6 +56,7 @@ jobs:
4956
mkdir -p $REPORTS_DIR
5057
mkdir -p ../{alts,albs-frontend,albs-node,albs-sign-node,alma-tests-cacher}
5158
ln -sf tests/test-vars.env vars.env
59+
ln -sf oval-processor/almalinux almalinux
5260
5361
- name: Start services
5462
run: docker compose up -d test_db sign_file
Lines changed: 158 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,158 @@
1+
import datetime
2+
import json
3+
import logging
4+
import os
5+
import sys
6+
from enum import Enum
7+
from pathlib import Path
8+
9+
import sqlalchemy
10+
from dotenv import load_dotenv
11+
from sqlalchemy import create_engine, select
12+
from sqlalchemy.dialects.postgresql import JSONB
13+
from sqlalchemy.inspection import inspect
14+
from sqlalchemy.orm import Session
15+
16+
sys.path.append(str(Path(__file__).parents[1]))
17+
load_dotenv('vars.env')
18+
19+
20+
from alws.models import NewErrataRecord, NewErrataPackage, Platform
21+
22+
23+
def serialize_model(
24+
instance, seen=None, exclude_models=None, exclude_columns=None
25+
):
26+
seen = set() if seen is None else seen
27+
exclude_models = [] if exclude_models is None else exclude_models
28+
exclude_columns = [] if exclude_columns is None else exclude_columns
29+
30+
if type(instance) in exclude_models:
31+
return None
32+
33+
if instance in seen:
34+
return {"id": getattr(instance, "id", None)}
35+
36+
seen.add(instance)
37+
38+
logging.info('Serializing %s', instance.__class__.__name__)
39+
40+
data = {}
41+
for column in inspect(instance).mapper.column_attrs:
42+
if column.key in exclude_columns:
43+
data[column.key] = None
44+
continue
45+
46+
value = getattr(instance, column.key)
47+
48+
if isinstance(value, Enum):
49+
data[column.key] = value.name
50+
elif isinstance(value, datetime.datetime):
51+
data[column.key] = value.isoformat()
52+
elif isinstance(column.expression.type, JSONB):
53+
data[column.key] = json.dumps(value)
54+
else:
55+
data[column.key] = value
56+
57+
for relationship in inspect(instance).mapper.relationships:
58+
related_value = getattr(instance, relationship.key)
59+
if related_value is None:
60+
data[relationship.key] = None
61+
elif relationship.uselist:
62+
if type(related_value[0]) in exclude_models:
63+
data[relationship.key] = []
64+
continue
65+
data[relationship.key] = [
66+
serialize_model(item, seen, exclude_models)
67+
for item in related_value
68+
]
69+
else:
70+
data[relationship.key] = serialize_model(
71+
related_value, seen, exclude_models
72+
)
73+
74+
return data
75+
76+
77+
def deserialize_model(cls, data):
78+
instance = cls()
79+
80+
if data is None:
81+
return instance
82+
83+
for column in inspect(cls).columns:
84+
value = data.get(column.key)
85+
86+
if value is None:
87+
continue
88+
89+
if isinstance(column.type, sqlalchemy.Enum):
90+
enum_class = column.type.python_type
91+
value = enum_class[value]
92+
93+
elif isinstance(column.type, sqlalchemy.DateTime):
94+
value = datetime.datetime.fromisoformat(value)
95+
96+
elif isinstance(column.type, JSONB):
97+
value = json.loads(value) if isinstance(value, str) else value
98+
99+
setattr(instance, column.key, value)
100+
101+
for relationship in inspect(cls).relationships:
102+
related_data = data.get(relationship.key)
103+
if related_data is not None:
104+
if isinstance(related_data, dict):
105+
related_instance = deserialize_model(
106+
relationship.mapper.class_, related_data
107+
)
108+
setattr(instance, relationship.key, related_instance)
109+
110+
elif isinstance(related_data, list):
111+
related_instances = [
112+
deserialize_model(relationship.mapper.class_, item)
113+
for item in related_data
114+
]
115+
setattr(instance, relationship.key, related_instances)
116+
117+
return instance
118+
119+
120+
if __name__ == '__main__':
121+
logging.basicConfig(level=logging.INFO, handlers=[logging.StreamHandler()])
122+
123+
sqlalchemy_url = os.getenv('SQLALCHEMY_URL')
124+
125+
engine = create_engine(sqlalchemy_url, echo=False)
126+
session = Session(engine)
127+
stmt = select(NewErrataRecord).where(
128+
NewErrataRecord.id.in_((
129+
'ALSA-2025:0281',
130+
'ALSA-2025:0325',
131+
'ALSA-2024:9644',
132+
'ALSA-2024:6964',
133+
))
134+
)
135+
errata_records = session.execute(stmt).scalars().fetchall()
136+
137+
serialized_records = [
138+
serialize_model(
139+
rec,
140+
exclude_models=[NewErrataPackage, Platform],
141+
exclude_columns=[
142+
'last_release_log',
143+
'original_title',
144+
'original_criteria',
145+
'original_tests',
146+
'original_objects',
147+
'original_states',
148+
'original_variables',
149+
],
150+
)
151+
for rec in errata_records
152+
]
153+
154+
dst_path = (
155+
Path(__file__).parents[1] / 'tests/samples/new_errata_records.json'
156+
)
157+
with dst_path.open('w', encoding='utf-8') as f:
158+
json.dump(serialized_records, f, indent=4)

tests/README.md

Lines changed: 17 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,9 @@
55
`fixtures/` - a directory with pytest fixtures, new module should be also added in `conftest.pytest_plugins`
66

77
`mock_classes.py` - a module which contain base class with `httpx` request method, setup logic for each test suite and HTTP status codes
8+
9+
`../scripts/serialize_new_errata_rec.py` - a helper module for exporting/importing NewErrataRecords from a DB to json file.
10+
811
## How to run tests locally
912
1. Adjust variables in `vars.env`
1013
```
@@ -29,7 +32,20 @@
2932
docker compose up -d sign_file
3033
```
3134

32-
4. Run `pytest` within `web_server_tests` container
35+
4. Update `tests/samples/new_errata_records.json`:
36+
Needed only on major changes in `NewErrataRecord` model that affect OVAL generation.
37+
38+
Note: In order to export data you'll need DB with some data,\
39+
ideally the data must be as similar to production as possible
40+
```bash
41+
# set SQLALCHEMY_URL is it was not done on step 1
42+
# export SQLALCHEMY_URL="postgresql+psycopg2://<your-db-address>"
43+
python3 scripts/serialize_new_errata_rec.py
44+
```
45+
Also, you'll need to update `tests/samples/test_oval.xml`.\
46+
For that uncomment block inside the test case in `tests/test_oval/test_oval_generation.py` file
47+
48+
5. Run `pytest` within `web_server_tests` container
3349
```bash
3450
docker compose run --rm web_server_tests pytest -v
3551
```

tests/fixtures/errata.py

Lines changed: 27 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,15 @@
11
import datetime
2+
from pathlib import Path
23
import typing
4+
import json
35

46
import pytest
57
from sqlalchemy.ext.asyncio import AsyncSession
68

79
from alws.crud.errata import create_errata_record, create_new_errata_record
810
from alws.dramatiq.errata import create_new_errata
9-
from alws.schemas.errata_schema import BaseErrataRecord
11+
from alws.models import NewErrataRecord
12+
from scripts.serialize_new_errata_rec import deserialize_model
1013

1114

1215
@pytest.fixture(
@@ -128,6 +131,7 @@ async def func(*args, **kwargs):
128131
await async_session.commit()
129132
yield
130133

134+
131135
@pytest.fixture
132136
def pulp_updateinfos():
133137
return [
@@ -1222,3 +1226,25 @@ def pulp_updateinfos():
12221226
"reboot_suggested": False,
12231227
},
12241228
]
1229+
1230+
1231+
@pytest.fixture
1232+
def new_errata_records_samples():
1233+
with (Path(__file__).parents[1] / 'samples/new_errata_records.json').open(
1234+
encoding='utf-8'
1235+
) as f:
1236+
json_data = json.load(f)
1237+
1238+
records = [deserialize_model(NewErrataRecord, el) for el in json_data]
1239+
1240+
return records
1241+
1242+
1243+
@pytest.fixture
1244+
def oval_sample():
1245+
with (Path(__file__).parents[1] / 'samples/test_oval.xml').open(
1246+
encoding='utf-8'
1247+
) as f:
1248+
oval = f.read().encode()
1249+
1250+
return oval

tests/samples/new_errata_records.json

Lines changed: 550 additions & 0 deletions
Large diffs are not rendered by default.

0 commit comments

Comments
 (0)