From c1ab3ec2c3e1b02f038acecdac4c6a1ffcd93407 Mon Sep 17 00:00:00 2001 From: Richie McIlroy <33632126+richiemcilroy@users.noreply.github.com> Date: Wed, 25 Jun 2025 13:28:12 +0100 Subject: [PATCH] feat: Import video to Cap, open in editor --- apps/desktop/src-tauri/src/importer.rs | 516 ++++++++++++++++++ apps/desktop/src-tauri/src/lib.rs | 16 +- .../src/routes/(window-chrome)/settings.tsx | 5 + .../(window-chrome)/settings/importer.tsx | 94 ++++ apps/desktop/src/routes/editor/Editor.tsx | 10 +- apps/desktop/src/routes/editor/Player.tsx | 15 +- apps/desktop/src/routes/editor/context.ts | 18 +- apps/desktop/src/utils/tauri.ts | 3 + 8 files changed, 654 insertions(+), 23 deletions(-) create mode 100644 apps/desktop/src-tauri/src/importer.rs create mode 100644 apps/desktop/src/routes/(window-chrome)/settings/importer.tsx diff --git a/apps/desktop/src-tauri/src/importer.rs b/apps/desktop/src-tauri/src/importer.rs new file mode 100644 index 000000000..3424b2767 --- /dev/null +++ b/apps/desktop/src-tauri/src/importer.rs @@ -0,0 +1,516 @@ +use crate::windows::{CapWindowId, ShowCapWindow}; +use cap_project::{ + AudioMeta, Platform, ProjectConfiguration, RecordingMeta, RecordingMetaInner, SingleSegment, + StudioRecordingMeta, VideoMeta, +}; +use cap_utils::ensure_dir; +use chrono::{DateTime, Local}; +use ffmpeg::{self, format::input as ffmpeg_input}; +use relative_path::RelativePathBuf; +use serde::{Deserialize, Serialize}; +use specta::Type; +use std::fs; +use std::path::{Path, PathBuf}; +use tauri::{AppHandle, Manager}; +use uuid::Uuid; + +#[derive(Debug, Serialize, Deserialize, Type)] +pub struct ImportProgress { + pub status: String, + pub progress: f64, + pub message: String, +} + +#[tauri::command] +#[specta::specta] +pub async fn import_video_file(app: AppHandle, video_path: String) -> Result { + if !Path::new(&video_path).exists() { + return Err("Video file not found".to_string()); + } + + let id = Uuid::new_v4().to_string(); + let recordings_dir = app + .path() + .app_data_dir() + .map_err(|e| format!("Failed to get app data dir: {}", e))? + .join("recordings"); + + let project_dir = recordings_dir.join(format!("{}.cap", id)); + + let allowed_exts = ["mp4", "mov", "webm", "m4v"]; + let ext = Path::new(&video_path) + .extension() + .and_then(|e| e.to_str()) + .map(|s| s.to_ascii_lowercase()); + + if ext + .as_deref() + .map(|e| !allowed_exts.contains(&e)) + .unwrap_or(true) + { + return Err(format!( + "Unsupported video format. Supported formats are: {}", + allowed_exts.join(", ") + )); + } + + ensure_dir(&project_dir).map_err(|e| format!("Failed to create project directory: {}", e))?; + let content_dir = project_dir.join("content"); + ensure_dir(&content_dir).map_err(|e| format!("Failed to create content directory: {}", e))?; + + let video_output_path = content_dir.join("display.mp4"); + let audio_output_path = content_dir.join("audio-mic.m4a"); + + let metadata = get_video_metadata(&video_path)?; + + convert_and_extract(&video_path, &video_output_path, &audio_output_path).await?; + + let now: DateTime = Local::now(); + let pretty_name = format!("Import {}", now.format("%Y-%m-%d at %H.%M.%S")); + + let meta = RecordingMeta { + platform: Some(Platform::default()), + project_path: project_dir.clone(), + pretty_name, + sharing: None, + inner: RecordingMetaInner::Studio(StudioRecordingMeta::SingleSegment { + segment: SingleSegment { + display: VideoMeta { + path: RelativePathBuf::from("content/display.mp4"), + fps: metadata.fps, + start_time: None, + }, + camera: None, + audio: if audio_output_path.exists() { + Some(AudioMeta { + path: RelativePathBuf::from("content/audio-mic.m4a"), + start_time: None, + }) + } else { + None + }, + cursor: None, + }, + }), + }; + + meta.save_for_project() + .map_err(|e| format!("Failed to save project metadata: {:?}", e))?; + + let project_config = ProjectConfiguration::default(); + project_config + .write(&project_dir) + .map_err(|e| format!("Failed to write project config: {}", e))?; + + ShowCapWindow::Editor { + project_path: project_dir.clone(), + } + .show(&app) + .await + .map_err(|e| format!("Failed to open editor: {}", e))?; + + Ok(project_dir.to_string_lossy().to_string()) +} + +#[derive(Debug)] +struct VideoMetadata { + fps: u32, + width: u32, + height: u32, + duration: f64, +} + +fn get_video_metadata(video_path: &str) -> Result { + ffmpeg::init().map_err(|e| format!("Failed to initialise ffmpeg: {e}"))?; + + let ictx = ffmpeg_input(video_path).map_err(|e| format!("Failed to open input video: {e}"))?; + + let video_stream = ictx + .streams() + .best(ffmpeg::media::Type::Video) + .ok_or_else(|| "No video stream found".to_string())?; + + let frame_rate = video_stream.avg_frame_rate(); + let fps = if frame_rate.denominator() != 0 { + ((frame_rate.numerator() as f64) / (frame_rate.denominator() as f64)).round() as u32 + } else { + 30 + }; + + let codec_ctx = ffmpeg::codec::context::Context::from_parameters(video_stream.parameters()) + .map_err(|e| format!("Unable to read codec parameters: {e}"))?; + let video_decoder = codec_ctx + .decoder() + .video() + .map_err(|e| format!("Unable to create decoder: {e}"))?; + + let width = video_decoder.width(); + let height = video_decoder.height(); + + let duration = ictx.duration() as f64 / 1_000_000.0; + + Ok(VideoMetadata { + fps, + width, + height, + duration, + }) +} + +async fn convert_and_extract( + input_path: &str, + video_output_path: &PathBuf, + audio_output_path: &PathBuf, +) -> Result<(), String> { + let input_path = input_path.to_owned(); + let video_output_path = video_output_path.clone(); + let audio_output_path = audio_output_path.clone(); + + tokio::task::spawn_blocking(move || -> Result<(), String> { + ffmpeg::init().map_err(|e| format!("Failed to initialise ffmpeg: {e}"))?; + + if video_output_path.exists() { + std::fs::remove_file(&video_output_path).ok(); + } + + let input_ext = Path::new(&input_path) + .extension() + .and_then(|ext| ext.to_str()) + .unwrap_or(""); + + if input_ext.eq_ignore_ascii_case("mp4") { + std::fs::copy(&input_path, &video_output_path) + .map_err(|e| format!("Failed to copy video file: {e}"))?; + } else { + match remux_to_mp4(&input_path, &video_output_path) { + Ok(()) => { + println!("Successfully remuxed video to MP4"); + } + Err(e) => { + println!("Remux failed: {}, falling back to transcoding", e); + transcode_to_mp4(&input_path, &video_output_path)?; + } + } + } + + if audio_output_path.exists() { + std::fs::remove_file(&audio_output_path).ok(); + } + + if let Err(e) = extract_audio(&input_path, &audio_output_path) { + eprintln!("Audio extraction failed: {} — continuing without audio", e); + let _ = std::fs::remove_file(&audio_output_path); + } + + Ok(()) + }) + .await + .map_err(|e| format!("Join error: {e}"))??; + + Ok(()) +} + +fn remux_to_mp4(input: &str, output_path: &Path) -> Result<(), String> { + use std::collections::HashMap; + + let mut ictx = ffmpeg_input(input).map_err(|e| format!("Failed to open input: {e}"))?; + let mut octx = + ffmpeg::format::output(output_path).map_err(|e| format!("Failed to create output: {e}"))?; + + let mut stream_mapping: HashMap = HashMap::new(); + let mut last_dts: HashMap> = HashMap::new(); + + for (stream_index, istream) in ictx.streams().enumerate() { + match istream.parameters().medium() { + ffmpeg::media::Type::Video | ffmpeg::media::Type::Audio => { + let codec_id = istream.parameters().id(); + + let needs_transcode = match istream.parameters().medium() { + ffmpeg::media::Type::Video => { + matches!( + codec_id, + ffmpeg::codec::Id::VP8 + | ffmpeg::codec::Id::VP9 + | ffmpeg::codec::Id::AV1 + | ffmpeg::codec::Id::THEORA + ) + } + ffmpeg::media::Type::Audio => { + matches!( + codec_id, + ffmpeg::codec::Id::VORBIS + | ffmpeg::codec::Id::OPUS + | ffmpeg::codec::Id::FLAC + ) + } + _ => false, + }; + + if needs_transcode { + return Err(format!( + "Input uses codec {:?} which requires transcoding. Please use a compatible format or convert first.", + codec_id + )); + } + + let encoder = ffmpeg::codec::encoder::find(codec_id) + .ok_or_else(|| format!("No encoder found for codec id {:?}", codec_id))?; + + let out_index = { + let mut ostream = octx + .add_stream(encoder) + .map_err(|e| format!("Unable to add stream: {e}"))?; + + ostream.set_parameters(istream.parameters()); + + ostream.index() + }; + + stream_mapping.insert(stream_index, out_index); + last_dts.insert(out_index, None); + } + _ => {} + } + } + + octx.set_metadata(ictx.metadata().to_owned()); + octx.write_header() + .map_err(|e| format!("Failed to write header: {e}"))?; + + for (istream, mut packet) in ictx.packets() { + if let Some(&out_index) = stream_mapping.get(&istream.index()) { + let in_tb = istream.time_base(); + let out_tb = { + let out_stream = octx.stream(out_index).unwrap(); + out_stream.time_base() + }; + + packet.set_stream(out_index); + packet.rescale_ts(in_tb, out_tb); + + if let Some(dts) = packet.dts() { + if let Some(last) = last_dts.get(&out_index).and_then(|&d| d) { + if dts <= last { + packet.set_dts(Some(last + 1)); + } + } + last_dts.insert(out_index, packet.dts()); + } + + packet + .write_interleaved(&mut octx) + .map_err(|e| format!("Error writing packet: {e}"))?; + } + } + + octx.write_trailer() + .map_err(|e| format!("Failed to write trailer: {e}"))?; + Ok(()) +} + +fn transcode_to_mp4(input: &str, output_path: &Path) -> Result<(), String> { + use ffmpeg::{codec, format, frame, media}; + + let mut ictx = ffmpeg_input(input).map_err(|e| format!("Failed to open input: {e}"))?; + let input_stream = ictx + .streams() + .best(media::Type::Video) + .ok_or_else(|| "No video stream found".to_string())?; + let video_stream_index = input_stream.index(); + + let mut decoder = codec::context::Context::from_parameters(input_stream.parameters()) + .map_err(|e| format!("Failed to create decoder context: {e}"))? + .decoder() + .video() + .map_err(|e| format!("Failed to open video decoder: {e}"))?; + + let h264 = codec::encoder::find(codec::Id::H264) + .ok_or_else(|| "H264 encoder not found in FFmpeg build".to_string())?; + let mut enc_ctx = codec::Context::new_with_codec(h264); + let mut encoder = enc_ctx.encoder().video().map_err(|e| e.to_string())?; + + encoder.set_width(decoder.width()); + encoder.set_height(decoder.height()); + encoder.set_format(decoder.format()); + + let frame_rate = input_stream.avg_frame_rate(); + encoder.set_frame_rate(Some(frame_rate)); + encoder.set_time_base(input_stream.time_base()); + + let pixels = decoder.width() as u64 * decoder.height() as u64; + let fps = if frame_rate.denominator() != 0 { + (frame_rate.numerator() as f64 / frame_rate.denominator() as f64) + } else { + 30.0 + }; + let base_bitrate = 4_000_000; + let bitrate = (base_bitrate as f64 * (pixels as f64 / (1920.0 * 1080.0)) * (fps / 30.0)) as i64; + encoder.set_bit_rate(bitrate.max(1_000_000) as usize); + + let mut encoder = encoder.open().map_err(|e| format!("Encoder open: {e}"))?; + + let mut octx = + format::output(output_path).map_err(|e| format!("Failed to create output: {e}"))?; + + let mut audio_stream_mapping = std::collections::HashMap::new(); + for (i, stream) in ictx.streams().enumerate() { + if stream.parameters().medium() == media::Type::Audio { + let codec_id = stream.parameters().id(); + + if codec_id != codec::Id::AAC { + continue; + } + + if let Some(enc) = codec::encoder::find(codec_id) { + let out_index = { + let mut ostream = octx + .add_stream(enc) + .map_err(|e| format!("Unable to add audio stream: {e}"))?; + + ostream.set_parameters(stream.parameters()); + ostream.index() + }; + + audio_stream_mapping.insert(i, out_index); + } + } + } + + let video_out_index = { + let mut ostream = octx + .add_stream(h264) + .map_err(|e| format!("Unable to add video stream: {e}"))?; + + ostream.set_parameters(&encoder); + ostream.index() + }; + + octx.set_metadata(ictx.metadata().to_owned()); + octx.write_header() + .map_err(|e| format!("Failed to write header: {e}"))?; + + let mut decoded = frame::Video::empty(); + let mut encoded = ffmpeg::Packet::empty(); + + for (stream, packet) in ictx.packets() { + if stream.index() == video_stream_index { + decoder.send_packet(&packet).map_err(|e| e.to_string())?; + while decoder.receive_frame(&mut decoded).is_ok() { + encoder.send_frame(&decoded).map_err(|e| e.to_string())?; + while encoder.receive_packet(&mut encoded).is_ok() { + encoded.set_stream(video_out_index); + encoded.rescale_ts( + encoder.time_base(), + octx.stream(video_out_index).unwrap().time_base(), + ); + encoded + .write_interleaved(&mut octx) + .map_err(|e| format!("Error writing video packet: {e}"))?; + } + } + } else if let Some(&out_index) = audio_stream_mapping.get(&stream.index()) { + let mut packet = packet.clone(); + packet.set_stream(out_index); + packet.rescale_ts( + stream.time_base(), + octx.stream(out_index).unwrap().time_base(), + ); + packet + .write_interleaved(&mut octx) + .map_err(|e| format!("Error writing audio packet: {e}"))?; + } + } + + encoder.send_eof().ok(); + while encoder.receive_packet(&mut encoded).is_ok() { + encoded.set_stream(video_out_index); + encoded.rescale_ts( + encoder.time_base(), + octx.stream(video_out_index).unwrap().time_base(), + ); + encoded + .write_interleaved(&mut octx) + .map_err(|e| format!("Error writing video packet: {e}"))?; + } + + octx.write_trailer() + .map_err(|e| format!("Failed to write trailer: {e}"))?; + + Ok(()) +} + +fn extract_audio(input: &str, output_path: &Path) -> Result<(), String> { + use ffmpeg::{codec, media}; + + let mut ictx = ffmpeg_input(input).map_err(|e| format!("Failed to open input: {e}"))?; + let audio_stream = ictx + .streams() + .best(media::Type::Audio) + .ok_or_else(|| "No audio stream found".to_string())?; + + let codec_id = audio_stream.parameters().id(); + + if codec_id == codec::Id::AAC { + let stream_index = audio_stream.index(); + remux_single_audio_stream(&mut ictx, stream_index, output_path) + } else { + Err(format!( + "Unsupported audio codec {:?}. Only AAC is currently supported for import.", + codec_id + )) + } +} + +fn remux_single_audio_stream( + ictx: &mut ffmpeg::format::context::Input, + audio_stream_index: usize, + output_path: &Path, +) -> Result<(), String> { + use ffmpeg::format; + + if output_path.exists() { + std::fs::remove_file(output_path).ok(); + } + + let mut octx = + format::output(output_path).map_err(|e| format!("Failed to create output: {e}"))?; + + let audio_stream = ictx + .stream(audio_stream_index) + .ok_or_else(|| "Invalid audio stream index".to_string())?; + + let encoder = ffmpeg::codec::encoder::find(audio_stream.parameters().id()) + .ok_or_else(|| "No encoder for codec".to_string())?; + + let out_index = { + let mut ostream = octx + .add_stream(encoder) + .map_err(|e| format!("Unable to add stream: {e}"))?; + + ostream.set_parameters(audio_stream.parameters()); + ostream.index() + }; + + octx.write_header() + .map_err(|e| format!("Failed to write header: {e}"))?; + + let in_tb = audio_stream.time_base(); + let out_tb = octx.stream(out_index).unwrap().time_base(); + + for (istream, mut packet) in ictx.packets() { + if istream.index() != audio_stream_index { + continue; + } + + packet.set_stream(out_index); + packet.rescale_ts(in_tb, out_tb); + packet + .write_interleaved(&mut octx) + .map_err(|e| format!("Error writing packet: {e}"))?; + } + + octx.write_trailer() + .map_err(|e| format!("Failed to write trailer: {e}"))?; + + Ok(()) +} diff --git a/apps/desktop/src-tauri/src/lib.rs b/apps/desktop/src-tauri/src/lib.rs index 35340ed9d..8d096207a 100644 --- a/apps/desktop/src-tauri/src/lib.rs +++ b/apps/desktop/src-tauri/src/lib.rs @@ -1,22 +1,21 @@ mod audio; +mod audio_meter; mod auth; mod camera; mod captions; mod deeplink_actions; +mod editor_window; +mod export; +mod fake_window; mod flags; mod general_settings; mod hotkeys; +mod importer; mod notifications; mod permissions; mod platform; -mod recording; -// mod resource; -mod audio_meter; -mod editor_window; -mod export; -mod fake_window; -// mod live_state; mod presets; +mod recording; mod tray; mod upload; mod web_api; @@ -1825,7 +1824,8 @@ pub async fn run(recording_logging_handle: LoggingHandle) { captions::download_whisper_model, captions::check_model_exists, captions::delete_whisper_model, - captions::export_captions_srt + captions::export_captions_srt, + importer::import_video_file, ]) .events(tauri_specta::collect_events![ RecordingOptionsChanged, diff --git a/apps/desktop/src/routes/(window-chrome)/settings.tsx b/apps/desktop/src/routes/(window-chrome)/settings.tsx index 5bf375911..a545db637 100644 --- a/apps/desktop/src/routes/(window-chrome)/settings.tsx +++ b/apps/desktop/src/routes/(window-chrome)/settings.tsx @@ -46,6 +46,11 @@ export default function Settings(props: RouteSectionProps) { name: "Integrations", icon: IconLucideUnplug, }, + { + href: "importer", + name: "Importer", + icon: IconCapFile, + }, { href: "license", name: "License", diff --git a/apps/desktop/src/routes/(window-chrome)/settings/importer.tsx b/apps/desktop/src/routes/(window-chrome)/settings/importer.tsx new file mode 100644 index 000000000..3a92067aa --- /dev/null +++ b/apps/desktop/src/routes/(window-chrome)/settings/importer.tsx @@ -0,0 +1,94 @@ +import { Button } from "@cap/ui-solid"; +import { open } from "@tauri-apps/plugin-dialog"; +import { createSignal, Show } from "solid-js"; +import toast from "solid-toast"; +import { commands } from "~/utils/tauri"; + +export default function ImporterSettings() { + const [isImporting, setIsImporting] = createSignal(false); + const [progress, setProgress] = createSignal(""); + + const handleImport = async (filePath: string) => { + try { + setIsImporting(true); + setProgress("Processing video file..."); + + const projectPath = await commands.importVideoFile(filePath); + + toast.success("Video imported successfully!"); + setProgress(""); + } catch (error) { + console.error("Import error:", error); + toast.error(`Failed to import video: ${error}`); + setProgress(""); + } finally { + setIsImporting(false); + } + }; + + const handleFileSelect = async () => { + try { + const selected = await open({ + multiple: false, + filters: [ + { + name: "Video", + extensions: ["mp4", "mov", "webm", "m4v"], + }, + ], + }); + + if (selected) { + await handleImport(selected as string); + } + } catch (error) { + console.error("File selection error:", error); + toast.error("Failed to select file"); + } + }; + + return ( +
+
+
+
+

Import Videos

+

+ Import existing video files into Cap to edit them with the Cap + Editor. Most common video formats are supported including .mp4, + .mov, .webm and .m4v. +

+
+ +
+ + + + +
+

+ {isImporting() ? progress() : "Select a video file to import"} +

+
+ + + + +
+
+
+
+ ); +} diff --git a/apps/desktop/src/routes/editor/Editor.tsx b/apps/desktop/src/routes/editor/Editor.tsx index f2cea828e..3652b8551 100644 --- a/apps/desktop/src/routes/editor/Editor.tsx +++ b/apps/desktop/src/routes/editor/Editor.tsx @@ -73,24 +73,24 @@ export function Editor() { } function Inner() { - const { project, editorState, setEditorState } = useEditorContext(); + const { project, editorState, setEditorState, videoFps } = useEditorContext(); onMount(() => events.editorStateChanged.listen((e) => { renderFrame.clear(); - setEditorState("playbackTime", e.payload.playhead_position / FPS); + setEditorState("playbackTime", e.payload.playhead_position / videoFps()); }) ); const renderFrame = throttle((time: number) => { if (!editorState.playing) { events.renderFrameEvent.emit({ - frame_number: Math.max(Math.floor(time * FPS), 0), - fps: FPS, + frame_number: Math.max(Math.floor(time * videoFps()), 0), + fps: videoFps(), resolution_base: OUTPUT_SIZE, }); } - }, 1000 / FPS); + }, 1000 / videoFps()); const frameNumberToRender = createMemo(() => { const preview = editorState.previewTime; diff --git a/apps/desktop/src/routes/editor/Player.tsx b/apps/desktop/src/routes/editor/Player.tsx index 53d26224a..239429d86 100644 --- a/apps/desktop/src/routes/editor/Player.tsx +++ b/apps/desktop/src/routes/editor/Player.tsx @@ -32,6 +32,7 @@ export function Player() { setEditorState, zoomOutLimit, setProject, + videoFps, } = useEditorContext(); // Load captions on mount @@ -120,15 +121,17 @@ export function Player() { await commands.stopPlayback(); setEditorState("playbackTime", 0); await commands.seekTo(0); - await commands.startPlayback(FPS, OUTPUT_SIZE); + await commands.startPlayback(videoFps(), OUTPUT_SIZE); setEditorState("playing", true); } else if (editorState.playing) { await commands.stopPlayback(); setEditorState("playing", false); } else { // Ensure we seek to the current playback time before starting playback - await commands.seekTo(Math.floor(editorState.playbackTime * FPS)); - await commands.startPlayback(FPS, OUTPUT_SIZE); + await commands.seekTo( + Math.floor(editorState.playbackTime * videoFps()) + ); + await commands.startPlayback(videoFps(), OUTPUT_SIZE); setEditorState("playing", true); } if (editorState.playing) setEditorState("previewTime", null); @@ -146,7 +149,9 @@ export function Player() { if (!editorState.playing) { if (prevTime !== null) setEditorState("playbackTime", prevTime); - await commands.seekTo(Math.floor(editorState.playbackTime * FPS)); + await commands.seekTo( + Math.floor(editorState.playbackTime * videoFps()) + ); } await handlePlayPauseClick(); @@ -320,8 +325,8 @@ function PreviewCanvas() { return (
{(currentFrame) => { diff --git a/apps/desktop/src/routes/editor/context.ts b/apps/desktop/src/routes/editor/context.ts index 4239f0aaf..77c0016f7 100644 --- a/apps/desktop/src/routes/editor/context.ts +++ b/apps/desktop/src/routes/editor/context.ts @@ -11,6 +11,7 @@ import { createEffect, createResource, createSignal, + createMemo, on, } from "solid-js"; import { createStore, produce, reconcile, unwrap } from "solid-js/store"; @@ -63,6 +64,17 @@ export const [EditorContextProvider, useEditorContext] = createContextProvider( props.editorInstance.savedProjectConfig ); + const videoFps = createMemo(() => { + const meta = props.meta(); + if ("display" in meta) { + return meta.display.fps || 30; + } + if ("segments" in meta && meta.segments.length > 0) { + return meta.segments[0].display.fps || 30; + } + return 30; + }); + const projectActions = { splitClipSegment: (time: number) => { setProject( @@ -231,7 +243,6 @@ export const [EditorContextProvider, useEditorContext] = createContextProvider( | { type: "zoom"; index: number } | { type: "clip"; index: number }, transform: { - // visible seconds zoom: zoomOutLimit(), updateZoom(z: number, origin: number) { const { zoom, position } = updateZoom( @@ -250,7 +261,6 @@ export const [EditorContextProvider, useEditorContext] = createContextProvider( transform.setPosition(position); }); }, - // number of seconds of leftmost point position: 0, setPosition(p: number) { setEditorState( @@ -288,9 +298,9 @@ export const [EditorContextProvider, useEditorContext] = createContextProvider( zoomOutLimit, exportState, setExportState, + videoFps, }; }, - // biome-ignore lint/style/noNonNullAssertion: it's ok null! ); @@ -384,8 +394,6 @@ export const [EditorInstanceContextProvider, useEditorInstanceContext] = function createStoreHistory( ...[state, setState]: ReturnType> ) { - // not working properly yet - // const getDelta = captureStoreUpdates(state); const [pauseCount, setPauseCount] = createSignal(0); diff --git a/apps/desktop/src/utils/tauri.ts b/apps/desktop/src/utils/tauri.ts index bc6cff9ae..7559d9838 100644 --- a/apps/desktop/src/utils/tauri.ts +++ b/apps/desktop/src/utils/tauri.ts @@ -226,6 +226,9 @@ async deleteWhisperModel(modelPath: string) : Promise { */ async exportCaptionsSrt(videoId: string) : Promise { return await TAURI_INVOKE("export_captions_srt", { videoId }); +}, +async importVideoFile(videoPath: string) : Promise { + return await TAURI_INVOKE("import_video_file", { videoPath }); } }