Skip to content

Commit d12aabf

Browse files
authored
Integration of GitHub Actions workflows (#2)
1 parent 99e9f97 commit d12aabf

File tree

21 files changed

+373
-180
lines changed

21 files changed

+373
-180
lines changed

.flake8

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
[flake8]
2+
max-line-length = 100
3+
extend-ignore = E203
4+
exclude =
5+
.git,
6+
__pycache__,
7+
venv,
8+
.venv,
9+
.mypy_cache,
10+
.pytest_cache

.github/workflows/ci.yml

Lines changed: 40 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,40 @@
1+
name: CI
2+
3+
on:
4+
push:
5+
branches:
6+
- main
7+
pull_request:
8+
9+
jobs:
10+
lint-test:
11+
runs-on: ubuntu-latest
12+
steps:
13+
- uses: actions/checkout@v4
14+
15+
- name: Set up Python
16+
uses: actions/setup-python@v5
17+
with:
18+
python-version: "3.10"
19+
20+
- name: Create .env file
21+
run: |
22+
echo "DATABASE_URL=sqlite:///:memory:" >> .env
23+
24+
- name: Install dependencies
25+
run: |
26+
python -m pip install --upgrade pip
27+
pip install -r requirements.txt
28+
pip install pytest pytest-cov flake8 mypy
29+
30+
- name: Lint with flake8
31+
run: |
32+
flake8 app tests
33+
34+
- name: Type check with mypy
35+
run: |
36+
mypy app
37+
38+
- name: Run tests
39+
run: |
40+
pytest --cov=app --cov-report=term-missing

.github/workflows/release.yml

Lines changed: 80 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,80 @@
1+
name: Release
2+
3+
on:
4+
workflow_dispatch:
5+
inputs:
6+
version:
7+
description: "Version number (e.g., 1.2.3)"
8+
required: true
9+
10+
11+
env:
12+
DOCKER_IMAGE_NAME: apex-dispatcher-api
13+
DOCKER_REGISTRY_REGION: eu-nl
14+
DOCKER_REGISTRY_URL: swr.eu-nl.otc.t-systems.com
15+
DOCKER_REGISTRY_BUCKET: apex
16+
17+
jobs:
18+
publish-docker:
19+
runs-on: ubuntu-latest
20+
steps:
21+
- name: Checkout code
22+
uses: actions/checkout@v4
23+
- name: Get release version
24+
env:
25+
TAG_VERSION: ${{ inputs.version }}
26+
run: |
27+
echo "RELEASE_VERSION=${TAG_VERSION#v*}" >> $GITHUB_ENV
28+
echo $RELEASE_VERSION
29+
- name: Prepare envs
30+
run: |
31+
echo "IMAGE_NAME=${DOCKER_IMAGE_NAME}:${RELEASE_VERSION}" >> $GITHUB_ENV
32+
echo "IMAGE_TAG_RELEASE=${DOCKER_REGISTRY_URL}/${DOCKER_REGISTRY_BUCKET}/${DOCKER_IMAGE_NAME}:${RELEASE_VERSION}" >> $GITHUB_ENV
33+
echo "IMAGE_TAG_LATEST=${DOCKER_REGISTRY_URL}/${DOCKER_REGISTRY_BUCKET}/${DOCKER_IMAGE_NAME}:latest" >> $GITHUB_ENV
34+
- name: Build docker image
35+
run: |
36+
docker build . --tag $IMAGE_NAME
37+
- name: Tag the image
38+
run: |
39+
docker tag $IMAGE_NAME $IMAGE_TAG_RELEASE
40+
docker tag $IMAGE_NAME $IMAGE_TAG_LATEST
41+
- name: Docker login
42+
env:
43+
OS_ACCESS_KEY: ${{ secrets.OS_ACCESS_KEY }}
44+
OS_SECRET_KEY: ${{ secrets.OS_SECRET_KEY }}
45+
run: |
46+
LOGIN_KEY=`printf "$OS_ACCESS_KEY" | openssl dgst -binary -sha256 -hmac "$OS_SECRET_KEY" | od -An -vtx1 | sed 's/[ \n]//g' | sed 'N;s/\n//'`
47+
docker login -u ${DOCKER_REGISTRY_REGION}@${OS_ACCESS_KEY} -p ${LOGIN_KEY} ${DOCKER_REGISTRY_URL}
48+
- name: Push image to registry
49+
run: |
50+
docker push $IMAGE_TAG_RELEASE
51+
docker push $IMAGE_TAG_LATEST
52+
- name: Clean up the local images
53+
run: |
54+
docker rmi $IMAGE_TAG_RELEASE
55+
docker rmi $IMAGE_TAG_LATEST
56+
docker rmi $IMAGE_NAME
57+
58+
github-release:
59+
runs-on: ubuntu-latest
60+
needs: [publish-docker]
61+
permissions:
62+
contents: write
63+
packages: write
64+
steps:
65+
- name: Create Git Tag
66+
run: |
67+
git config user.name "github-actions[bot]"
68+
git config user.email "github-actions[bot]@users.noreply.github.com"
69+
git tag -a v${{ github.event.inputs.version }} -m "Release v${{ github.event.inputs.version }}"
70+
git push origin v${{ github.event.inputs.version }}
71+
72+
- name: Create GitHub Release
73+
uses: softprops/action-gh-release@v2
74+
with:
75+
tag_name: v${{ github.event.inputs.version }}
76+
name: Release v${{ github.event.inputs.version }}
77+
env:
78+
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
79+
80+

app/config/logger.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -25,11 +25,12 @@
2525
"uvicorn.error": {"level": "INFO"},
2626
"uvicorn.access": {"level": "INFO"},
2727
# custom API loggers
28-
"app.routers": {"level": "DEBUG"}, # all your routers
29-
"app.services": {"level": "DEBUG"}, # all your services
30-
"app.platforms": {"level": "DEBUG"}, # all platform implementations
28+
"app.routers": {"level": "DEBUG"}, # all your routers
29+
"app.services": {"level": "DEBUG"}, # all your services
30+
"app.platforms": {"level": "DEBUG"}, # all platform implementations
3131
},
3232
}
3333

34+
3435
def setup_logging():
35-
logging.config.dictConfig(LOGGING_CONFIG)
36+
logging.config.dictConfig(LOGGING_CONFIG)

app/config/settings.py

Lines changed: 23 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,19 +1,31 @@
1-
from pydantic import AnyHttpUrl, ConfigDict, Field
2-
from pydantic_settings import BaseSettings
1+
from pydantic import AnyHttpUrl, Field
2+
from pydantic_settings import BaseSettings, SettingsConfigDict
3+
34

45
class Settings(BaseSettings):
5-
app_name: str = Field("", json_schema_extra={"env": "APP_NAME"})
6-
app_description: str = Field("", json_schema_extra={"env":"APP_DESCRIPTION"})
7-
env: str = Field("development", json_schema_extra={"env":"APP_ENV"})
6+
app_name: str = Field(
7+
default="APEx Disatpcher API", json_schema_extra={"env": "APP_NAME"}
8+
)
9+
app_description: str = Field(
10+
default="API description for the APEx Dispatcher",
11+
json_schema_extra={"env": "APP_DESCRIPTION"},
12+
)
13+
env: str = Field(default="development", json_schema_extra={"env": "APP_ENV"})
814

915
# Keycloak / OIDC
10-
keycloak_server_url: AnyHttpUrl = Field(None, json_schema_extra={"env":"KEYCLOAK_SERVER_URL"})
11-
keycloak_realm: str = Field(None, json_schema_extra={"env":"KEYCLOAK_REALM"})
12-
keycloak_client_id: str = Field(None, json_schema_extra={"env":"KEYCLOAK_CLIENT_ID"})
13-
keycloak_client_secret: str | None = Field(None, json_schema_extra={"env":"KEYCLOAK_CLIENT_SECRET"})
14-
16+
keycloak_server_url: AnyHttpUrl = Field(
17+
default=AnyHttpUrl("https://localhost"),
18+
json_schema_extra={"env": "KEYCLOAK_SERVER_URL"},
19+
)
20+
keycloak_realm: str = Field(default="", json_schema_extra={"env": "KEYCLOAK_REALM"})
21+
keycloak_client_id: str = Field(
22+
default="", json_schema_extra={"env": "KEYCLOAK_CLIENT_ID"}
23+
)
24+
keycloak_client_secret: str | None = Field(
25+
default="", json_schema_extra={"env": "KEYCLOAK_CLIENT_SECRET"}
26+
)
1527

16-
model_config = ConfigDict(
28+
model_config = SettingsConfigDict(
1729
env_file=".env",
1830
env_file_encoding="utf-8",
1931
extra="allow",

app/database/db.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -39,4 +39,4 @@ def get_db():
3939
db.rollback()
4040
raise
4141
finally:
42-
db.close()
42+
db.close()
Lines changed: 53 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -1,51 +1,76 @@
11
import datetime
22
import logging
3-
from typing import List
4-
from sqlalchemy import Column, DateTime, Enum, Integer, String
3+
from typing import List, Optional
4+
from sqlalchemy import DateTime, Enum, Integer, String
55
from app.database.db import Base
6-
from sqlalchemy.orm import Session
6+
from sqlalchemy.orm import Session, Mapped, mapped_column
77

8-
from app.schemas import ProcessingStatusEnum
8+
from app.schemas import ProcessTypeEnum, ProcessingStatusEnum
99

1010

1111
logger = logging.getLogger(__name__)
1212

1313

1414
class ProcessingJobRecord(Base):
15-
__tablename__ = 'processing_jobs'
16-
17-
id = Column(Integer, primary_key=True, index=True, autoincrement=True)
18-
title = Column(String, index=True)
19-
label = Column(String, index=True)
20-
status = Column(Enum(ProcessingStatusEnum), index=True)
21-
user_id = Column(String, index=True)
22-
platform_job_id = Column(String, index=True)
23-
parameters = Column(String, index=False)
24-
result_link = Column(String, index=False)
25-
service_record = Column(String, index=True)
26-
created = Column(DateTime, default=datetime.datetime.utcnow, index=True)
27-
updated = Column(DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow, index=True)
28-
29-
30-
31-
def save_job_to_db(db_session: Session, job: ProcessingJobRecord) -> ProcessingJobRecord:
15+
__tablename__ = "processing_jobs"
16+
17+
id: Mapped[int] = mapped_column(
18+
Integer, primary_key=True, index=True, autoincrement=True
19+
)
20+
title: Mapped[str] = mapped_column(String, index=True)
21+
label: Mapped[ProcessTypeEnum] = mapped_column(Enum(ProcessTypeEnum), index=True)
22+
status: Mapped[ProcessingStatusEnum] = mapped_column(
23+
Enum(ProcessingStatusEnum), index=True
24+
)
25+
user_id: Mapped[str] = mapped_column(String, index=True)
26+
platform_job_id: Mapped[Optional[str]] = mapped_column(String, index=True)
27+
parameters: Mapped[Optional[str]] = mapped_column(String, index=False)
28+
result_link: Mapped[Optional[str]] = mapped_column(String, index=False)
29+
service_record: Mapped[Optional[str]] = mapped_column(String, index=True)
30+
created: Mapped[datetime.datetime] = mapped_column(
31+
DateTime, default=datetime.datetime.utcnow, index=True
32+
)
33+
updated: Mapped[datetime.datetime] = mapped_column(
34+
DateTime,
35+
default=datetime.datetime.utcnow,
36+
onupdate=datetime.datetime.utcnow,
37+
index=True,
38+
)
39+
40+
41+
def save_job_to_db(
42+
db_session: Session, job: ProcessingJobRecord
43+
) -> ProcessingJobRecord:
3244
"""
3345
Save a processing job record to the database and update the ID of the job.
34-
46+
3547
:param db_session: The database session to use for saving the job.
3648
:param job: The ProcessingJobRecord instance to save.
37-
"""
49+
"""
3850
db_session.add(job)
3951
db_session.commit()
4052
db_session.refresh(job) # Refresh to get the ID after commit
41-
logger.debug("Processing job saved with ID: {job.id}")
53+
logger.debug("Processing job saved with ID: {job.id}")
4254
return job
43-
55+
4456

4557
def get_jobs_by_user_id(database: Session, user_id: str) -> List[ProcessingJobRecord]:
4658
logger.info(f"Retrieving all processing jobs for user {user_id}")
47-
return database.query(ProcessingJobRecord).filter(ProcessingJobRecord.user_id == user_id).all()
59+
return (
60+
database.query(ProcessingJobRecord)
61+
.filter(ProcessingJobRecord.user_id == user_id)
62+
.all()
63+
)
64+
4865

49-
def get_job_by_user_id(database: Session, job_id: int, user_id: str) -> ProcessingJobRecord:
66+
def get_job_by_user_id(
67+
database: Session, job_id: int, user_id: str
68+
) -> Optional[ProcessingJobRecord]:
5069
logger.info(f"Retrieving processing job with ID {job_id} for user {user_id}")
51-
return database.query(ProcessingJobRecord).filter(ProcessingJobRecord.id == job_id, ProcessingJobRecord.user_id == user_id).first()
70+
return (
71+
database.query(ProcessingJobRecord)
72+
.filter(
73+
ProcessingJobRecord.id == job_id, ProcessingJobRecord.user_id == user_id
74+
)
75+
.first()
76+
)

app/main.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
app = FastAPI(
1313
title=settings.app_name,
1414
description=settings.app_description,
15-
version="1.0.0",
15+
version="1.0.0",
1616
)
1717

1818
# Register Keycloak - must be done after FastAPI app creation
@@ -21,4 +21,4 @@
2121
# include routers
2222
app.include_router(jobs_status.router)
2323
app.include_router(unit_jobs.router)
24-
app.include_router(health.router)
24+
app.include_router(health.router)

app/platforms/base.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,4 +19,4 @@ def execute_job(self, title: str, details: ServiceDetails, parameters: dict) ->
1919
:param parameters: The parameters required for the job execution.
2020
:return: Return the ID of the job that was created
2121
"""
22-
pass
22+
pass

app/platforms/dispatcher.py

Lines changed: 17 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
1-
21
import importlib
32
import logging
4-
import app.platforms.implementations
3+
import app.platforms.implementations
54
import pkgutil
65
from typing import Dict, Type
76
from app.platforms.base import BaseProcessingPlatform
@@ -11,28 +10,37 @@
1110

1211
logger = logging.getLogger(__name__)
1312

13+
1414
def load_processing_platforms():
1515
"""Dynamically load all processing platform implementations."""
16-
for _, module_name, _ in pkgutil.iter_modules(app.platforms.implementations.__path__):
16+
for _, module_name, _ in pkgutil.iter_modules(
17+
app.platforms.implementations.__path__
18+
):
1719
importlib.import_module(f"app.platforms.implementations.{module_name}")
1820

19-
def register_processing_platform(service_type: ProcessTypeEnum, cls: Type[BaseProcessingPlatform]):
20-
""""Register a new processing platform class for a specific service type.
21-
21+
22+
def register_processing_platform(
23+
service_type: ProcessTypeEnum, cls: Type[BaseProcessingPlatform]
24+
):
25+
""" "Register a new processing platform class for a specific service type.
26+
2227
:param service_type: The type of service for which to register the platform.
2328
:param cls: The class that implements BaseProcessingPlatform.
2429
"""
25-
logger.debug(f"Registering processing platform with class {cls} for service type: {service_type}")
30+
logger.debug(
31+
f"Registering processing platform with class {cls} for service type: {service_type}"
32+
)
2633
PROCESSING_PLATFORMS[service_type] = cls
2734

35+
2836
def get_processing_platform(service_type: ProcessTypeEnum) -> BaseProcessingPlatform:
2937
"""
3038
Factory function to get the appropriate processing platform based on the service type.
31-
39+
3240
:param service_type: The type of service for which to get the processing platform.
3341
:return: An instance of a class that implements BaseProcessingPlatform.
3442
"""
3543
try:
3644
return PROCESSING_PLATFORMS[service_type]()
3745
except KeyError:
38-
raise ValueError(f"Unsupported service type: {service_type}")
46+
raise ValueError(f"Unsupported service type: {service_type}")

0 commit comments

Comments
 (0)