Skip to content

Commit 4db95cc

Browse files
chore: duplicate the uploaded assets for duplicated page (#6311)
* chore: duplicate the uploaded assets in the entity * chore: changed the filtering logic * chore: captured exception
1 parent 6aa139a commit 4db95cc

File tree

4 files changed

+177
-1
lines changed

4 files changed

+177
-1
lines changed

apiserver/plane/app/views/page/base.py

Lines changed: 11 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@
4040
from plane.bgtasks.page_transaction_task import page_transaction
4141
from plane.bgtasks.page_version_task import page_version
4242
from plane.bgtasks.recent_visited_task import recent_visited_task
43-
43+
from plane.bgtasks.copy_s3_object import copy_s3_objects
4444

4545
def unarchive_archive_page_and_descendants(page_id, archived_at):
4646
# Your SQL query
@@ -597,6 +597,16 @@ def post(self, request, slug, project_id, page_id):
597597
page_transaction.delay(
598598
{"description_html": page.description_html}, None, page.id
599599
)
600+
601+
# Copy the s3 objects uploaded in the page
602+
copy_s3_objects.delay(
603+
entity_name="PAGE",
604+
entity_identifier=page.id,
605+
project_id=project_id,
606+
slug=slug,
607+
user_id=request.user.id,
608+
)
609+
600610
page = (
601611
Page.objects.filter(pk=page.id)
602612
.annotate(
Lines changed: 150 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,150 @@
1+
# Python imports
2+
import uuid
3+
import base64
4+
import requests
5+
from bs4 import BeautifulSoup
6+
7+
# Django imports
8+
from django.conf import settings
9+
10+
# Module imports
11+
from plane.db.models import FileAsset, Page, Issue
12+
from plane.utils.exception_logger import log_exception
13+
from plane.settings.storage import S3Storage
14+
from celery import shared_task
15+
16+
17+
def get_entity_id_field(entity_type, entity_id):
18+
entity_mapping = {
19+
FileAsset.EntityTypeContext.WORKSPACE_LOGO: {"workspace_id": entity_id},
20+
FileAsset.EntityTypeContext.PROJECT_COVER: {"project_id": entity_id},
21+
FileAsset.EntityTypeContext.USER_AVATAR: {"user_id": entity_id},
22+
FileAsset.EntityTypeContext.USER_COVER: {"user_id": entity_id},
23+
FileAsset.EntityTypeContext.ISSUE_ATTACHMENT: {"issue_id": entity_id},
24+
FileAsset.EntityTypeContext.ISSUE_DESCRIPTION: {"issue_id": entity_id},
25+
FileAsset.EntityTypeContext.PAGE_DESCRIPTION: {"page_id": entity_id},
26+
FileAsset.EntityTypeContext.COMMENT_DESCRIPTION: {"comment_id": entity_id},
27+
FileAsset.EntityTypeContext.DRAFT_ISSUE_DESCRIPTION: {
28+
"draft_issue_id": entity_id
29+
},
30+
}
31+
return entity_mapping.get(entity_type, {})
32+
33+
34+
def extract_asset_ids(html, tag):
35+
try:
36+
soup = BeautifulSoup(html, "html.parser")
37+
return [tag.get("src") for tag in soup.find_all(tag) if tag.get("src")]
38+
except Exception as e:
39+
log_exception(e)
40+
return []
41+
42+
43+
def replace_asset_ids(html, tag, duplicated_assets):
44+
try:
45+
soup = BeautifulSoup(html, "html.parser")
46+
for mention_tag in soup.find_all(tag):
47+
for asset in duplicated_assets:
48+
if mention_tag.get("src") == asset["old_asset_id"]:
49+
mention_tag["src"] = asset["new_asset_id"]
50+
return str(soup)
51+
except Exception as e:
52+
log_exception(e)
53+
return html
54+
55+
56+
def update_description(entity, duplicated_assets, tag):
57+
updated_html = replace_asset_ids(entity.description_html, tag, duplicated_assets)
58+
entity.description_html = updated_html
59+
entity.save()
60+
return updated_html
61+
62+
63+
# Get the description binary and description from the live server
64+
def sync_with_external_service(entity_name, description_html):
65+
try:
66+
data = {
67+
"description_html": description_html,
68+
"variant": "rich" if entity_name == "PAGE" else "document",
69+
}
70+
response = requests.post(
71+
f"{settings.LIVE_BASE_URL}/convert-document/",
72+
json=data,
73+
headers=None,
74+
)
75+
if response.status_code == 200:
76+
return response.json()
77+
except requests.RequestException as e:
78+
log_exception(e)
79+
return {}
80+
81+
82+
@shared_task
83+
def copy_s3_objects(entity_name, entity_identifier, project_id, slug, user_id):
84+
"""
85+
Step 1: Extract asset ids from the description_html of the entity
86+
Step 2: Duplicate the assets
87+
Step 3: Update the description_html of the entity with the new asset ids (change the src of img tag)
88+
Step 4: Request the live server to generate the description_binary and description for the entity
89+
90+
"""
91+
try:
92+
model_class = {"PAGE": Page, "ISSUE": Issue}.get(entity_name)
93+
if not model_class:
94+
raise ValueError(f"Unsupported entity_name: {entity_name}")
95+
96+
entity = model_class.objects.get(id=entity_identifier)
97+
asset_ids = extract_asset_ids(entity.description_html, "image-component")
98+
99+
duplicated_assets = []
100+
workspace = entity.workspace
101+
storage = S3Storage()
102+
original_assets = FileAsset.objects.filter(
103+
workspace=workspace, project_id=project_id, id__in=asset_ids
104+
)
105+
106+
for original_asset in original_assets:
107+
destination_key = f"{workspace.id}/{uuid.uuid4().hex}-{original_asset.attributes.get('name')}"
108+
duplicated_asset = FileAsset.objects.create(
109+
attributes={
110+
"name": original_asset.attributes.get("name"),
111+
"type": original_asset.attributes.get("type"),
112+
"size": original_asset.attributes.get("size"),
113+
},
114+
asset=destination_key,
115+
size=original_asset.size,
116+
workspace=workspace,
117+
created_by_id=user_id,
118+
entity_type=original_asset.entity_type,
119+
project_id=project_id,
120+
storage_metadata=original_asset.storage_metadata,
121+
**get_entity_id_field(original_asset.entity_type, entity_identifier),
122+
)
123+
storage.copy_object(original_asset.asset, destination_key)
124+
duplicated_assets.append(
125+
{
126+
"new_asset_id": str(duplicated_asset.id),
127+
"old_asset_id": str(original_asset.id),
128+
}
129+
)
130+
131+
if duplicated_assets:
132+
FileAsset.objects.filter(
133+
pk__in=[item["new_asset_id"] for item in duplicated_assets]
134+
).update(is_uploaded=True)
135+
updated_html = update_description(
136+
entity, duplicated_assets, "image-component"
137+
)
138+
external_data = sync_with_external_service(entity_name, updated_html)
139+
140+
if external_data:
141+
entity.description = external_data.get("description")
142+
entity.description_binary = base64.b64decode(
143+
external_data.get("description_binary")
144+
)
145+
entity.save()
146+
147+
return
148+
except Exception as e:
149+
log_exception(e)
150+
return []

apiserver/plane/settings/common.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -336,6 +336,8 @@
336336
ADMIN_BASE_URL = os.environ.get("ADMIN_BASE_URL", None)
337337
SPACE_BASE_URL = os.environ.get("SPACE_BASE_URL", None)
338338
APP_BASE_URL = os.environ.get("APP_BASE_URL")
339+
LIVE_BASE_URL = os.environ.get("LIVE_BASE_URL")
340+
339341

340342
HARD_DELETE_AFTER_DAYS = int(os.environ.get("HARD_DELETE_AFTER_DAYS", 60))
341343

apiserver/plane/settings/storage.py

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -151,3 +151,17 @@ def get_object_metadata(self, object_name):
151151
"ETag": response.get("ETag"),
152152
"Metadata": response.get("Metadata", {}),
153153
}
154+
155+
def copy_object(self, object_name, new_object_name):
156+
"""Copy an S3 object to a new location"""
157+
try:
158+
response = self.s3_client.copy_object(
159+
Bucket=self.aws_storage_bucket_name,
160+
CopySource={"Bucket": self.aws_storage_bucket_name, "Key": object_name},
161+
Key=new_object_name,
162+
)
163+
except ClientError as e:
164+
log_exception(e)
165+
return None
166+
167+
return response

0 commit comments

Comments
 (0)