4848 ),
4949)
5050def assemble_preprod_artifact (
51- org_id ,
52- project_id ,
53- checksum ,
54- chunks ,
55- artifact_id ,
56- ** kwargs ,
51+ org_id : int ,
52+ project_id : int ,
53+ checksum : Any ,
54+ chunks : Any ,
55+ artifact_id : int ,
56+ ** kwargs : Any ,
5757) -> None :
5858 """
5959 Creates a preprod artifact from uploaded chunks.
@@ -241,6 +241,15 @@ def create_preprod_artifact(
241241 extras = extras ,
242242 )
243243
244+ # TODO(preprod): add gating to only create if has quota
245+ PreprodArtifactSizeMetrics .objects .get_or_create (
246+ preprod_artifact = preprod_artifact ,
247+ metrics_artifact_type = PreprodArtifactSizeMetrics .MetricsArtifactType .MAIN_ARTIFACT ,
248+ defaults = {
249+ "state" : PreprodArtifactSizeMetrics .SizeAnalysisState .PENDING ,
250+ },
251+ )
252+
244253 logger .info (
245254 "Created preprod artifact row" ,
246255 extra = {
@@ -329,8 +338,9 @@ def _assemble_preprod_artifact_file(
329338
330339
331340def _assemble_preprod_artifact_size_analysis (
332- assemble_result : AssembleResult , project , artifact_id , org_id
341+ assemble_result : AssembleResult , project , artifact_id : int , org_id : int
333342):
343+ preprod_artifact = None
334344 try :
335345 preprod_artifact = PreprodArtifact .objects .get (
336346 project = project ,
@@ -357,35 +367,73 @@ def _assemble_preprod_artifact_size_analysis(
357367 pass # Ignore cleanup errors
358368 raise Exception (f"PreprodArtifact with id { artifact_id } does not exist" )
359369
360- size_analysis_results = SizeAnalysisResults .parse_raw (assemble_result .bundle_temp_file .read ())
370+ try :
371+ size_analysis_results = SizeAnalysisResults .parse_raw (
372+ assemble_result .bundle_temp_file .read ()
373+ )
374+
375+ with transaction .atomic (router .db_for_write (PreprodArtifactSizeMetrics )):
376+ # TODO(preprod): parse this from the treemap json and handle other artifact types
377+ size_metrics , created = PreprodArtifactSizeMetrics .objects .update_or_create (
378+ preprod_artifact = preprod_artifact ,
379+ metrics_artifact_type = PreprodArtifactSizeMetrics .MetricsArtifactType .MAIN_ARTIFACT ,
380+ defaults = {
381+ "analysis_file_id" : assemble_result .bundle .id ,
382+ "min_install_size" : None , # No min value at this time
383+ "max_install_size" : size_analysis_results .install_size ,
384+ "min_download_size" : None , # No min value at this time
385+ "max_download_size" : size_analysis_results .download_size ,
386+ "state" : PreprodArtifactSizeMetrics .SizeAnalysisState .COMPLETED ,
387+ },
388+ )
361389
362- # Update size metrics in its own transaction
363- with transaction . atomic ( router . db_for_write ( PreprodArtifactSizeMetrics )):
364- size_metrics , created = PreprodArtifactSizeMetrics . objects . update_or_create (
365- preprod_artifact = preprod_artifact ,
366- defaults = {
390+ logger . info (
391+ "Created or updated preprod artifact size metrics with analysis file" ,
392+ extra = {
393+ "preprod_artifact_id" : preprod_artifact . id ,
394+ "size_metrics_id" : size_metrics . id ,
367395 "analysis_file_id" : assemble_result .bundle .id ,
368- "metrics_artifact_type" : PreprodArtifactSizeMetrics .MetricsArtifactType .MAIN_ARTIFACT , # TODO: parse this from the treemap json
369- "min_install_size" : None , # No min value at this time
370- "max_install_size" : size_analysis_results .install_size ,
371- "min_download_size" : None , # No min value at this time
372- "max_download_size" : size_analysis_results .download_size ,
373- "state" : PreprodArtifactSizeMetrics .SizeAnalysisState .COMPLETED ,
396+ "was_created" : created ,
397+ "project_id" : project .id ,
398+ "organization_id" : org_id ,
374399 },
375400 )
376401
377- logger .info (
378- "Created or updated preprod artifact size metrics with analysis file" ,
379- extra = {
380- "preprod_artifact_id" : preprod_artifact .id ,
381- "size_metrics_id" : size_metrics .id ,
382- "analysis_file_id" : assemble_result .bundle .id ,
383- "was_created" : created ,
384- "project_id" : project .id ,
385- "organization_id" : org_id ,
386- },
387- )
402+ except Exception as e :
403+ logger .exception (
404+ "Failed to process size analysis results" ,
405+ extra = {
406+ "preprod_artifact_id" : artifact_id ,
407+ "project_id" : project .id ,
408+ "organization_id" : org_id ,
409+ },
410+ )
411+
412+ with transaction .atomic (router .db_for_write (PreprodArtifactSizeMetrics )):
413+ try :
414+ PreprodArtifactSizeMetrics .objects .update_or_create (
415+ preprod_artifact = preprod_artifact ,
416+ metrics_artifact_type = PreprodArtifactSizeMetrics .MetricsArtifactType .MAIN_ARTIFACT ,
417+ defaults = {
418+ "state" : PreprodArtifactSizeMetrics .SizeAnalysisState .FAILED ,
419+ "error_code" : PreprodArtifactSizeMetrics .ErrorCode .PROCESSING_ERROR ,
420+ "error_message" : str (e ),
421+ },
422+ )
423+ except Exception :
424+ logger .exception (
425+ "Failed to update preprod artifact size metrics" ,
426+ extra = {
427+ "preprod_artifact_id" : artifact_id ,
428+ "project_id" : project .id ,
429+ "organization_id" : org_id ,
430+ },
431+ )
432+
433+ # Re-raise to trigger further error handling if needed
434+ raise
388435
436+ # Always trigger status check update (success or failure)
389437 create_preprod_status_check_task .apply_async (
390438 kwargs = {
391439 "preprod_artifact_id" : artifact_id ,
0 commit comments