Skip to content

Commit 9ce44a3

Browse files
authored
Merge pull request #15 from GSA-TTS/tts-changes-to-keep
TTS changes to keep
2 parents be7569c + fa41f31 commit 9ce44a3

File tree

9 files changed

+1868
-1366
lines changed

9 files changed

+1868
-1366
lines changed

.github/workflows/build_docker_images.yml

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -31,6 +31,7 @@ on:
3131
push:
3232
branches:
3333
- main
34+
- pic-stable
3435
tags: [v*]
3536

3637
jobs:
@@ -40,13 +41,13 @@ jobs:
4041
fail-fast: false
4142
matrix:
4243
include:
43-
- image_name: sartography/spiffworkflow-frontend
44+
- image_name: gsa-tts/spiffworkflow-frontend
4445
context: spiffworkflow-frontend
4546
description: "Frontend component of SpiffWorkflow, a software development platform for building, running, and monitoring executable diagrams"
46-
- image_name: sartography/spiffworkflow-backend
47+
- image_name: gsa-tts/spiffworkflow-backend
4748
context: spiffworkflow-backend
4849
description: "Backend component of SpiffWorkflow, a software development platform for building, running, and monitoring executable diagrams"
49-
- image_name: sartography/connector-proxy-demo
50+
- image_name: gsa-tts/connector-proxy-demo
5051
context: connector-proxy-demo
5152
description: "Connector proxy component of SpiffWorkflow, providing integration capabilities for external services"
5253

spiffworkflow-backend/bin/boot_server_in_docker

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -102,6 +102,9 @@ if [[ "${SPIFFWORKFLOW_BACKEND_RUN_DATA_SETUP:-}" != "false" ]]; then
102102
uv run python bin/refresh_all_caches.py
103103
fi
104104

105+
# uv run python bin/bootstrap.py
106+
## TODO: turn back on
107+
105108
log_info "Starting gunicorn server"
106109
export IS_GUNICORN="true"
107110
# THIS MUST BE THE LAST COMMAND!

spiffworkflow-backend/bin/wait_for_db_schema_migrations

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -16,8 +16,8 @@ while true; do
1616
break
1717
else
1818
echo "Waiting for db migrations to finish"
19-
echo "current revision: ${current_db_migration_head}"
20-
echo "head revision: ${current_db_migration_revision}"
19+
echo "current revision: ${current_db_migration_revision}"
20+
echo "head revision: ${current_db_migration_head}"
2121
sleep 2
2222
fi
2323
done
Lines changed: 66 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,66 @@
1+
"""empty message
2+
3+
Revision ID: 8b258f77eafe
4+
Revises: 4efc3d8655be
5+
Create Date: 2026-01-12 16:59:26.261161
6+
7+
"""
8+
9+
from alembic import op
10+
import sqlalchemy as sa
11+
from sqlalchemy.dialects import mysql
12+
13+
# revision identifiers, used by Alembic.
14+
revision = "8b258f77eafe"
15+
down_revision = "4efc3d8655be"
16+
branch_labels = None
17+
depends_on = None
18+
19+
20+
def upgrade():
21+
# ### commands auto generated by Alembic - please adjust! ###
22+
with op.batch_alter_table("task_draft_data", schema=None) as batch_op:
23+
batch_op.add_column(sa.Column("task_guid", sa.String(length=36), nullable=False))
24+
batch_op.drop_constraint("process_instance_task_definition_pk", type_="primary")
25+
batch_op.drop_constraint("process_instance_task_definition_unique", type_="unique")
26+
27+
28+
batch_op.create_unique_constraint(
29+
"process_instance_task_unique", ["process_instance_id", "task_guid"]
30+
)
31+
batch_op.create_primary_key(
32+
"process_instance_task_pk", ["process_instance_id", "task_guid"]
33+
)
34+
35+
batch_op.drop_index("ix_task_draft_data_task_definition_id_path")
36+
batch_op.create_index("ix_task_draft_data_task_guid", ["task_guid"], unique=False)
37+
38+
batch_op.create_foreign_key("task_draft_data_task_guid_fk", "task", ["task_guid"], ["guid"])
39+
batch_op.drop_column("task_definition_id_path")
40+
41+
# ### end Alembic commands ###
42+
43+
44+
def downgrade():
45+
# ### commands auto generated by Alembic - please adjust! ###
46+
with op.batch_alter_table("task_draft_data", schema=None) as batch_op:
47+
batch_op.add_column(
48+
sa.Column(
49+
"task_definition_id_path", mysql.VARCHAR(length=255), nullable=False
50+
)
51+
)
52+
batch_op.drop_constraint(None, type_="foreignkey")
53+
batch_op.drop_constraint("process_instance_task_unique", type_="unique")
54+
batch_op.drop_index(batch_op.f("ix_task_draft_data_task_guid"))
55+
batch_op.create_unique_constraint(
56+
batch_op.f("process_instance_task_definition_unique"),
57+
["process_instance_id", "task_definition_id_path"],
58+
)
59+
batch_op.create_index(
60+
batch_op.f("ix_task_draft_data_task_definition_id_path"),
61+
["task_definition_id_path"],
62+
unique=False,
63+
)
64+
batch_op.drop_column("task_guid")
65+
66+
# ### end Alembic commands ###

spiffworkflow-backend/src/spiffworkflow_backend/models/task_draft_data.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -15,11 +15,12 @@
1515
from spiffworkflow_backend.models.db import db
1616
from spiffworkflow_backend.models.json_data import JsonDataModel # noqa: F401
1717
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
18+
from spiffworkflow_backend.models.task import TaskModel
1819

1920

2021
class TaskDraftDataDict(TypedDict):
2122
process_instance_id: int
22-
task_definition_id_path: str
23+
task_guid: str
2324
saved_form_data_hash: str | None
2425

2526

@@ -29,20 +30,19 @@ class TaskDraftDataModel(SpiffworkflowBaseDBModel):
2930
__table_args__ = (
3031
UniqueConstraint(
3132
"process_instance_id",
32-
"task_definition_id_path",
33-
name="process_instance_task_definition_unique",
33+
"task_guid",
34+
name="process_instance_task_unique",
3435
),
3536
PrimaryKeyConstraint(
3637
"process_instance_id",
37-
"task_definition_id_path",
38-
name="process_instance_task_definition_pk",
38+
"task_guid",
39+
name="process_instance_task_pk",
3940
),
4041
)
4142

4243
process_instance_id: int = db.Column(ForeignKey(ProcessInstanceModel.id), nullable=False, index=True) # type: ignore
4344

44-
# a colon delimited path of bpmn_process_definition_ids for a given task
45-
task_definition_id_path: str = db.Column(db.String(255), nullable=False, index=True)
45+
task_guid: str = db.Column(ForeignKey(TaskModel.guid), nullable=False, index=True)
4646

4747
saved_form_data_hash: str | None = db.Column(db.String(255), nullable=True, index=True)
4848

@@ -66,7 +66,7 @@ def insert_or_update_task_draft_data_dict(cls, task_draft_data_dict: TaskDraftDa
6666
else:
6767
insert_stmt = postgres_insert(TaskDraftDataModel).values([task_draft_data_dict])
6868
on_duplicate_key_stmt = insert_stmt.on_conflict_do_update(
69-
index_elements=["process_instance_id", "task_definition_id_path"],
69+
index_elements=["process_instance_id", "task_guid"],
7070
set_={"saved_form_data_hash": task_draft_data_dict["saved_form_data_hash"]},
7171
)
7272
db.session.execute(on_duplicate_key_stmt)

spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/openid_blueprint.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -131,6 +131,7 @@ def token() -> Response:
131131
{
132132
"iss": f"{host_url}{url_for('openid.index')}",
133133
"aud": client_id,
134+
"azp": client_id[0],
134135
"iat": math.floor(time.time()),
135136
"exp": round(time.time()) + two_days,
136137
"sub": user_name,
@@ -145,6 +146,7 @@ def token() -> Response:
145146
"access_token": id_token,
146147
"id_token": id_token,
147148
"refresh_token": id_token,
149+
"token_type": "Bearer",
148150
}
149151
return make_response(jsonify(response), 200)
150152

spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -755,11 +755,9 @@ def task_save_draft(
755755
return make_response(jsonify({"ok": True}), 200)
756756

757757
task_model = _get_task_model_from_guid_or_raise(task_guid, process_instance_id)
758-
full_bpmn_process_id_path = TaskService.full_bpmn_process_path(task_model.bpmn_process, "id")
759-
task_definition_id_path = f"{':'.join(map(str, full_bpmn_process_id_path))}:{task_model.task_definition_id}"
760758
task_draft_data_dict: TaskDraftDataDict = {
761759
"process_instance_id": process_instance.id,
762-
"task_definition_id_path": task_definition_id_path,
760+
"task_guid": task_guid,
763761
"saved_form_data_hash": None,
764762
}
765763

spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py

Lines changed: 2 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -656,15 +656,11 @@ def full_bpmn_process_path(cls, bpmn_process: BpmnProcessModel, definition_colum
656656
def task_draft_data_from_task_model(
657657
cls, task_model: TaskModel, create_if_not_exists: bool = False
658658
) -> TaskDraftDataModel | None:
659-
full_bpmn_process_id_path = cls.full_bpmn_process_path(task_model.bpmn_process, "id")
660-
task_definition_id_path = f"{':'.join(map(str, full_bpmn_process_id_path))}:{task_model.task_definition_id}"
661659
task_draft_data: TaskDraftDataModel | None = TaskDraftDataModel.query.filter_by(
662-
process_instance_id=task_model.process_instance_id, task_definition_id_path=task_definition_id_path
660+
process_instance_id=task_model.process_instance_id, task_guid=task_model.guid
663661
).first()
664662
if task_draft_data is None and create_if_not_exists:
665-
task_draft_data = TaskDraftDataModel(
666-
process_instance_id=task_model.process_instance_id, task_definition_id_path=task_definition_id_path
667-
)
663+
task_draft_data = TaskDraftDataModel(process_instance_id=task_model.process_instance_id, task_guid=task_model.guid)
668664
return task_draft_data
669665

670666
@classmethod

0 commit comments

Comments
 (0)