@@ -315,7 +315,8 @@ useEffect(() => {
315315 setPollingInterval ( null ) ;
316316 }
317317 // Clear processing start time if completed or failed
318- if ( audioFile && ( audioFile . status === "completed" || audioFile . status === "failed" ) ) {
318+ const status = currentStatus || audioFile ?. status ;
319+ if ( status && ( status === "completed" || status === "failed" ) ) {
319320 setProcessingStartTime ( null ) ;
320321 setElapsedTime ( 0 ) ;
321322 }
@@ -353,7 +354,7 @@ useEffect(() => {
353354 fetchSpeakerMappings ( ) ;
354355 }
355356 // eslint-disable-next-line react-hooks/exhaustive-deps
356- } , [ audioFile ] ) ;
357+ } , [ audioFile ?. id ] ) ;
357358
358359// Also clear and refetch speaker mappings when transcript changes (handles retranscription)
359360useEffect ( ( ) => {
@@ -393,7 +394,7 @@ useEffect(() => {
393394 wavesurferRef . current = null ;
394395 }
395396 } ;
396- } , [ audioFile ] ) ;
397+ } , [ audioFile ?. id , audioFile ?. audio_path ] ) ;
397398
398399 // Update current word index based on audio time
399400 useEffect ( ( ) => {
@@ -450,6 +451,44 @@ useEffect(() => {
450451 }
451452 } , [ currentWordIndex ] ) ;
452453
454+ const fetchTranscriptOnly = async ( ) => {
455+ try {
456+ const transcriptResponse = await fetch (
457+ `/api/v1/transcription/${ audioId } /transcript` ,
458+ {
459+ headers : {
460+ ...getAuthHeaders ( ) ,
461+ } ,
462+ } ,
463+ ) ;
464+
465+ if ( transcriptResponse . ok ) {
466+ const transcriptData = await transcriptResponse . json ( ) ;
467+
468+ // The API returns transcript data in a nested structure
469+ if ( transcriptData . transcript ) {
470+ // Check if transcript has segments or text
471+ if ( typeof transcriptData . transcript === "string" ) {
472+ setTranscript ( { text : transcriptData . transcript } ) ;
473+ } else if ( transcriptData . transcript . text ) {
474+ setTranscript ( {
475+ text : transcriptData . transcript . text ,
476+ segments : transcriptData . transcript . segments ,
477+ word_segments : transcriptData . transcript . word_segments ,
478+ } ) ;
479+ } else if ( transcriptData . transcript . segments ) {
480+ setTranscript ( {
481+ text : "" ,
482+ segments : transcriptData . transcript . segments ,
483+ } ) ;
484+ }
485+ }
486+ }
487+ } catch ( error ) {
488+ console . error ( "Error fetching transcript:" , error ) ;
489+ }
490+ } ;
491+
453492 const fetchStatusOnly = async ( ) => {
454493 try {
455494 const response = await fetch ( `/api/v1/transcription/${ audioId } ` , {
@@ -465,9 +504,10 @@ useEffect(() => {
465504 // Only update the status state, not the entire audioFile
466505 setCurrentStatus ( data . status ) ;
467506
468- // If status changed to completed, fetch full details
507+ // If status changed to completed, update audioFile status and fetch transcript
469508 if ( data . status === "completed" && previousStatus === "processing" ) {
470- await fetchAudioDetails ( ) ;
509+ setAudioFile ( prev => prev ? { ...prev , status : "completed" } : null ) ;
510+ await fetchTranscriptOnly ( ) ;
471511 }
472512 }
473513 } catch ( error ) {
@@ -1359,7 +1399,7 @@ useEffect(() => {
13591399 </ div >
13601400 </ div >
13611401
1362- { audioFile . status === "completed" && transcript && (
1402+ { ( currentStatus || audioFile . status ) === "completed" && transcript && (
13631403 < div className = "bg-white dark:bg-gray-800 rounded-xl p-3 sm:p-6" >
13641404 { /* Header Section */ }
13651405 < div className = "mb-3 sm:mb-6" >
0 commit comments