11# Python imports
22import io
3+ import logging
34import zipfile
45from typing import List
5- import boto3
6- from botocore .client import Config
76from uuid import UUID
87
98# Third party imports
109from celery import shared_task
1110
1211# Django imports
13- from django .conf import settings
14- from django .utils import timezone
1512from django .db .models import Prefetch
13+ from django .utils import timezone
1614
1715# Module imports
18- from plane .db .models import ExporterHistory , Issue , IssueRelation
16+ from plane .db .models import ExporterHistory , Issue , IssueRelation , StateGroup
17+ from plane .settings .storage import S3Storage
1918from plane .utils .exception_logger import log_exception
20- from plane .utils .exporters import Exporter , IssueExportSchema
19+ from plane .utils .porters import DataExporter , CSVFormatter , JSONFormatter , XLSXFormatter , IssueExportSerializer
20+ from plane .utils .filters import ComplexFilterBackend , IssueFilterSet
21+ from plane .utils .host import base_host
22+ from plane .utils .issue_filters import issue_filters
23+
24+ # Logger
25+ logger = logging .getLogger ("plane.worker" )
26+
27+
28+ class _FakeDjangoRequest :
29+ def __init__ (self ):
30+ from urllib .parse import urlparse
31+
32+ from django .http import QueryDict
33+
34+ self .GET = QueryDict (mutable = True )
35+
36+ # Get the public URL from environment variables
37+ web_url = base_host (is_app = True , request = None )
38+ parsed_url = urlparse (web_url )
39+
40+ # Add scheme and host attributes needed by S3Storage
41+ self .scheme = parsed_url .scheme or "http"
42+ self ._host = parsed_url .netloc or "localhost"
43+
44+ def get_host (self ):
45+ return self ._host
46+
47+
48+ class _FakeDRFRequest :
49+ def __init__ (self ):
50+ self ._request = _FakeDjangoRequest ()
51+
52+ @property
53+ def query_params (self ):
54+ return self ._request .GET
55+
56+
57+ class _ExportFilterView :
58+ filterset_class = IssueFilterSet
59+
60+ def __init__ (self , request ):
61+ self .request = request
2162
2263
2364def create_zip_file (files : List [tuple [str , str | bytes ]]) -> io .BytesIO :
@@ -39,83 +80,63 @@ def upload_to_s3(zip_file: io.BytesIO, workspace_id: UUID, token_id: str, slug:
3980 Upload a ZIP file to S3 and generate a presigned URL.
4081 """
4182 file_name = f"{ workspace_id } /export-{ slug } -{ token_id [:6 ]} -{ str (timezone .now ().date ())} .zip"
42- expires_in = 7 * 24 * 60 * 60
4383
44- if settings .USE_MINIO :
45- upload_s3 = boto3 .client (
46- "s3" ,
47- endpoint_url = settings .AWS_S3_ENDPOINT_URL ,
48- aws_access_key_id = settings .AWS_ACCESS_KEY_ID ,
49- aws_secret_access_key = settings .AWS_SECRET_ACCESS_KEY ,
50- config = Config (signature_version = "s3v4" ),
51- )
52- upload_s3 .upload_fileobj (
53- zip_file ,
54- settings .AWS_STORAGE_BUCKET_NAME ,
55- file_name ,
56- ExtraArgs = {"ACL" : "public-read" , "ContentType" : "application/zip" },
57- )
84+ logger .info (
85+ "Uploading export file to S3" ,
86+ {
87+ "file_name" : file_name ,
88+ },
89+ )
5890
59- # Generate presigned url for the uploaded file with different base
60- presign_s3 = boto3 .client (
61- "s3" ,
62- endpoint_url = (
63- f"{ settings .AWS_S3_URL_PROTOCOL } //{ str (settings .AWS_S3_CUSTOM_DOMAIN ).replace ('/uploads' , '' )} /"
64- ),
65- aws_access_key_id = settings .AWS_ACCESS_KEY_ID ,
66- aws_secret_access_key = settings .AWS_SECRET_ACCESS_KEY ,
67- config = Config (signature_version = "s3v4" ),
68- )
91+ expires_in = 7 * 24 * 60 * 60
6992
70- presigned_url = presign_s3 .generate_presigned_url (
71- "get_object" ,
72- Params = {"Bucket" : settings .AWS_STORAGE_BUCKET_NAME , "Key" : file_name },
73- ExpiresIn = expires_in ,
74- )
75- else :
76- # If endpoint url is present, use it
77- if settings .AWS_S3_ENDPOINT_URL :
78- s3 = boto3 .client (
79- "s3" ,
80- endpoint_url = settings .AWS_S3_ENDPOINT_URL ,
81- aws_access_key_id = settings .AWS_ACCESS_KEY_ID ,
82- aws_secret_access_key = settings .AWS_SECRET_ACCESS_KEY ,
83- config = Config (signature_version = "s3v4" ),
84- )
85- else :
86- s3 = boto3 .client (
87- "s3" ,
88- region_name = settings .AWS_REGION ,
89- aws_access_key_id = settings .AWS_ACCESS_KEY_ID ,
90- aws_secret_access_key = settings .AWS_SECRET_ACCESS_KEY ,
91- config = Config (signature_version = "s3v4" ),
92- )
93+ storage = S3Storage (request = None )
9394
94- # Upload the file to S3
95- s3 .upload_fileobj (
96- zip_file ,
97- settings .AWS_STORAGE_BUCKET_NAME ,
98- file_name ,
99- ExtraArgs = {"ContentType" : "application/zip" },
100- )
95+ # Upload the file to S3
96+ is_uploaded = storage .upload_file (
97+ file_obj = zip_file ,
98+ object_name = file_name ,
99+ content_type = "application/zip" ,
100+ )
101+ if not is_uploaded :
102+ logger .error ("Failed to upload export file to S3" )
103+ return
101104
102- # Generate presigned url for the uploaded file
103- presigned_url = s3 .generate_presigned_url (
104- "get_object" ,
105- Params = {"Bucket" : settings .AWS_STORAGE_BUCKET_NAME , "Key" : file_name },
106- ExpiresIn = expires_in ,
107- )
105+ # Generate a presigned URL for the uploaded file
106+ fake_request = _FakeDjangoRequest ()
107+ storage = S3Storage (request = fake_request )
108+
109+ presigned_url = storage .generate_presigned_url (
110+ file_name ,
111+ expiration = expires_in ,
112+ http_method = "GET" ,
113+ disposition = "inline" ,
114+ filename = file_name ,
115+ )
108116
109117 exporter_instance = ExporterHistory .objects .get (token = token_id )
110118
111119 # Update the exporter instance with the presigned url
112120 if presigned_url :
121+ logger .info (
122+ "Uploaded export file to S3" ,
123+ {
124+ "file_name" : file_name ,
125+ },
126+ )
113127 exporter_instance .url = presigned_url
114128 exporter_instance .status = "completed"
115129 exporter_instance .key = file_name
116130 else :
117131 exporter_instance .status = "failed"
132+ logger .error ("Failed to upload export file to S3" )
118133
134+ logger .info (
135+ "Saving exporter instance" ,
136+ {
137+ "exporter_instance" : exporter_instance ,
138+ },
139+ )
119140 exporter_instance .save (update_fields = ["status" , "url" , "key" ])
120141
121142
@@ -127,21 +148,49 @@ def issue_export_task(
127148 token_id : str ,
128149 multiple : bool ,
129150 slug : str ,
151+ export_type : str | None = None ,
130152):
131153 """
132154 Export issues from the workspace.
133155 provider (str): The provider to export the issues to csv | json | xlsx.
134156 token_id (str): The export object token id.
135157 multiple (bool): Whether to export the issues to multiple files per project.
158+ export_type (str | None): The type of export (epic, intake, issue, etc.).
136159 """
160+
161+ logger .info (f"Export started for work-items for project { project_ids } in workspace { workspace_id } " )
162+
137163 try :
138164 exporter_instance = ExporterHistory .objects .get (token = token_id )
139165 exporter_instance .status = "processing"
140166 exporter_instance .save (update_fields = ["status" ])
141167
142- # Build base queryset for issues
168+ logger .info (
169+ "Building base queryset for issues" ,
170+ {
171+ "workspace_id" : workspace_id ,
172+ "type" : exporter_instance .type ,
173+ "export_type" : export_type ,
174+ },
175+ )
176+
177+ # Build base queryset with export_type-specific manager and filters
178+ if export_type == "epic" :
179+ # Use issue_and_epics_objects manager for epics with epic filter
180+ base_queryset = Issue .issue_and_epics_objects .filter (type__is_epic = True )
181+ elif export_type == "intake" :
182+ # Use objects manager for intake with triage state filter
183+ base_queryset = Issue .objects .filter (state__group = StateGroup .TRIAGE .value )
184+ elif export_type == "issue" :
185+ # Use issue_objects manager for regular issues (workitem, cycle, module, view)
186+ base_queryset = Issue .issue_objects .all ()
187+ else :
188+ # Default: Use objects manager to export all types of issues (workspace export)
189+ base_queryset = Issue .objects .all ()
190+
191+ # Apply common filters
143192 workspace_issues = (
144- Issue . objects .filter (
193+ base_queryset .filter (
145194 workspace__id = workspace_id ,
146195 project_id__in = project_ids ,
147196 project__project_projectmember__member = exporter_instance .initiated_by_id ,
@@ -152,11 +201,12 @@ def issue_export_task(
152201 "project" ,
153202 "workspace" ,
154203 "state" ,
204+ "type" ,
155205 "created_by" ,
156206 "estimate_point" ,
157207 )
158208 .prefetch_related (
159- "labels " ,
209+ "label_issue__label " ,
160210 "issue_cycle__cycle" ,
161211 "issue_module__module" ,
162212 "issue_comments" ,
@@ -178,33 +228,67 @@ def issue_export_task(
178228 )
179229 )
180230
181- # Create exporter for the specified format
182- try :
183- exporter = Exporter (
184- format_type = provider ,
185- schema_class = IssueExportSchema ,
186- options = {"list_joiner" : ", " },
231+ # Apply filters if present
232+ rich_filters = exporter_instance .rich_filters
233+ logger .info (
234+ "Applying rich filters" ,
235+ {
236+ "rich_filters" : rich_filters ,
237+ },
238+ )
239+ if rich_filters :
240+ backend = ComplexFilterBackend ()
241+ fake_request = _FakeDRFRequest ()
242+ view = _ExportFilterView (fake_request )
243+ workspace_issues = backend .filter_queryset (
244+ fake_request ,
245+ workspace_issues ,
246+ view ,
247+ filter_data = rich_filters ,
187248 )
188- except ValueError as e :
189- # Invalid format type
190- exporter_instance = ExporterHistory .objects .get (token = token_id )
249+
250+ # Apply legacy filters if present
251+ filters = exporter_instance .filters
252+ logger .info (
253+ "Applying legacy filters" ,
254+ {
255+ "filters" : filters ,
256+ },
257+ )
258+ if filters :
259+ filters = issue_filters (filters , "GET" )
260+ workspace_issues = workspace_issues .filter (** filters )
261+
262+ # Create exporter for the specified format
263+ formatters = {"csv" : CSVFormatter (), "json" : JSONFormatter (), "xlsx" : XLSXFormatter ()}
264+ if provider not in formatters :
191265 exporter_instance .status = "failed"
192- exporter_instance .reason = str ( e )
266+ exporter_instance .reason = f"Unsupported format: { provider } . Available: csv, json, xlsx"
193267 exporter_instance .save (update_fields = ["status" , "reason" ])
194268 return
195269
270+ formatter = formatters [provider ]
271+ exporter = DataExporter (IssueExportSerializer )
272+
273+ logger .info (
274+ "Creating files" ,
275+ {
276+ "multiple" : multiple ,
277+ "project_ids" : project_ids ,
278+ },
279+ )
196280 files = []
197281 if multiple :
198282 # Export each project separately with its own queryset
199283 for project_id in project_ids :
200284 project_issues = workspace_issues .filter (project_id = project_id )
201- export_filename = f"{ slug } -{ project_id } "
202- filename , content = exporter .export ( export_filename , project_issues )
285+ filename = f"{ slug } -{ project_id } . { formatter . extension } "
286+ content = exporter .to_string ( project_issues , formatter )
203287 files .append ((filename , content ))
204288 else :
205289 # Export all issues in a single file
206- export_filename = f"{ slug } -{ workspace_id } "
207- filename , content = exporter .export ( export_filename , workspace_issues )
290+ filename = f"{ slug } -{ workspace_id } . { formatter . extension } "
291+ content = exporter .to_string ( workspace_issues , formatter )
208292 files .append ((filename , content ))
209293
210294 zip_buffer = create_zip_file (files )
0 commit comments