diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index c73854a6a5..cb8f8d87f5 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -143,7 +143,7 @@ jobs: - target: aarch64-apple-darwin runner: macos-latest-xlarge - target: x86_64-pc-windows-msvc - runner: windows-latest + runner: windows-latest-8-cores env: TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }} TURBO_TEAM: ${{ secrets.TURBO_TEAM }} diff --git a/apps/desktop/src-tauri/Cargo.toml b/apps/desktop/src-tauri/Cargo.toml index b0a1ba8b8c..67ab6b2890 100644 --- a/apps/desktop/src-tauri/Cargo.toml +++ b/apps/desktop/src-tauri/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "cap-desktop" -version = "0.4.1" +version = "0.4.2" description = "Beautiful screen recordings, owned by you." authors = ["you"] edition = "2024" diff --git a/apps/desktop/src-tauri/src/lib.rs b/apps/desktop/src-tauri/src/lib.rs index de11b751d1..d1d3418e10 100644 --- a/apps/desktop/src-tauri/src/lib.rs +++ b/apps/desktop/src-tauri/src/lib.rs @@ -2223,6 +2223,13 @@ async fn reset_microphone_permissions(_app: AppHandle) -> Result<(), ()> { Ok(()) } +#[tauri::command] +#[specta::specta] +#[instrument(skip(app))] +async fn clear_presets(app: AppHandle) -> Result<(), String> { + presets::PresetsStore::clear(&app) +} + #[tauri::command] #[specta::specta] #[instrument(skip(app))] @@ -2604,6 +2611,7 @@ pub async fn run(recording_logging_handle: LoggingHandle, logs_dir: PathBuf) { hotkeys::set_hotkey, reset_camera_permissions, reset_microphone_permissions, + clear_presets, is_camera_window_open, seek_to, get_display_frame_for_cropping, diff --git a/apps/desktop/src-tauri/src/presets.rs b/apps/desktop/src-tauri/src/presets.rs index bc2130bfef..3b0c3d0669 100644 --- a/apps/desktop/src-tauri/src/presets.rs +++ b/apps/desktop/src-tauri/src/presets.rs @@ -23,20 +23,28 @@ pub struct Preset { impl PresetsStore { fn get(app: &AppHandle) -> Result, String> { match app.store("store").map(|s| s.get("presets")) { - Ok(Some(store)) => { - // Handle potential deserialization errors gracefully - match serde_json::from_value(store) { - Ok(settings) => Ok(Some(settings)), - Err(_) => { - error!("Failed to deserialize presets store"); - Ok(None) - } + Ok(Some(store)) => match serde_json::from_value(store.clone()) { + Ok(settings) => Ok(Some(settings)), + Err(e) => { + error!( + "Failed to deserialize presets store: {}. Raw value: {}", + e, + serde_json::to_string_pretty(&store).unwrap_or_default() + ); + Ok(None) } - } + }, _ => Ok(None), } } + pub fn clear(app: &AppHandle) -> Result<(), String> { + let store = app.store("store").map_err(|e| e.to_string())?; + store.delete("presets"); + store.save().map_err(|e| e.to_string())?; + Ok(()) + } + pub fn get_default_preset(app: &AppHandle) -> Result, String> { let Some(this) = Self::get(app)? else { return Ok(None); diff --git a/apps/desktop/src/routes/editor/ConfigSidebar.tsx b/apps/desktop/src/routes/editor/ConfigSidebar.tsx index 97c57d636b..8765313716 100644 --- a/apps/desktop/src/routes/editor/ConfigSidebar.tsx +++ b/apps/desktop/src/routes/editor/ConfigSidebar.tsx @@ -1262,7 +1262,7 @@ function BackgroundConfig(props: { scrollRef: HTMLDivElement }) { photoUrl.replace("file://", ""), ); - debouncedSetProject(rawPath); + setWallpaperSource(rawPath); } catch (_err) { toast.error("Failed to set wallpaper"); } @@ -1324,17 +1324,14 @@ function BackgroundConfig(props: { scrollRef: HTMLDivElement }) { let fileInput!: HTMLInputElement; - // Optimize the debounced set project function - const debouncedSetProject = (wallpaperPath: string) => { + const setWallpaperSource = (wallpaperPath: string) => { const resumeHistory = projectHistory.pause(); - queueMicrotask(() => { - batch(() => { - setProject("background", "source", { - type: "wallpaper", - path: wallpaperPath, - } as const); - resumeHistory(); - }); + batch(() => { + setProject("background", "source", { + type: "wallpaper", + path: wallpaperPath, + } as const); + resumeHistory(); }); }; @@ -1600,7 +1597,7 @@ function BackgroundConfig(props: { scrollRef: HTMLDivElement }) { // Get the raw path without any URL prefixes - debouncedSetProject(wallpaper.rawPath); + setWallpaperSource(wallpaper.rawPath); ensurePaddingForBackground(); } catch (_err) { diff --git a/apps/desktop/src/routes/editor/Player.tsx b/apps/desktop/src/routes/editor/Player.tsx index 099089ccc6..f8b7353b06 100644 --- a/apps/desktop/src/routes/editor/Player.tsx +++ b/apps/desktop/src/routes/editor/Player.tsx @@ -73,22 +73,7 @@ export function PlayerContent() { end: segment.end, text: segment.text, })), - settings: { - enabled: captionsStore.state.settings.enabled, - font: captionsStore.state.settings.font, - size: captionsStore.state.settings.size, - color: captionsStore.state.settings.color, - backgroundColor: captionsStore.state.settings.backgroundColor, - backgroundOpacity: captionsStore.state.settings.backgroundOpacity, - position: captionsStore.state.settings.position, - italic: captionsStore.state.settings.italic, - outline: captionsStore.state.settings.outline, - outlineColor: captionsStore.state.settings.outlineColor, - exportWithSubtitles: - captionsStore.state.settings.exportWithSubtitles, - highlightColor: captionsStore.state.settings.highlightColor, - fadeDuration: captionsStore.state.settings.fadeDuration, - }, + settings: { ...captionsStore.state.settings }, }; // Update the project with captions data diff --git a/apps/desktop/src/routes/editor/PresetsDropdown.tsx b/apps/desktop/src/routes/editor/PresetsDropdown.tsx index eed4ee26ab..1df533ace4 100644 --- a/apps/desktop/src/routes/editor/PresetsDropdown.tsx +++ b/apps/desktop/src/routes/editor/PresetsDropdown.tsx @@ -49,7 +49,7 @@ export function PresetsDropdown() { setShowSettings(false); const normalizedConfig = normalizeProject({ ...preset.config, - timeline: project.timeline, + timeline: project.timeline ?? null, clips: project.clips, }); setProject(reconcile(normalizedConfig)); diff --git a/apps/desktop/src/routes/screenshot-editor/popovers/BackgroundSettingsPopover.tsx b/apps/desktop/src/routes/screenshot-editor/popovers/BackgroundSettingsPopover.tsx index 9bbf394746..7e500d8ac7 100644 --- a/apps/desktop/src/routes/screenshot-editor/popovers/BackgroundSettingsPopover.tsx +++ b/apps/desktop/src/routes/screenshot-editor/popovers/BackgroundSettingsPopover.tsx @@ -170,17 +170,14 @@ export function BackgroundSettingsPopover() { setProject("background", "source", source); }; - // Debounced set project for history - const debouncedSetProject = (wallpaperPath: string) => { + const setWallpaperSource = (wallpaperPath: string) => { const resumeHistory = projectHistory.pause(); - queueMicrotask(() => { - batch(() => { - setProject("background", "source", { - type: "wallpaper", - path: wallpaperPath, - } as const); - resumeHistory(); - }); + batch(() => { + setProject("background", "source", { + type: "wallpaper", + path: wallpaperPath, + } as const); + resumeHistory(); }); }; @@ -319,7 +316,7 @@ export function BackgroundSettingsPopover() { (w) => w.url === photoUrl, ); if (wallpaper) { - debouncedSetProject(wallpaper.rawPath); + setWallpaperSource(wallpaper.rawPath); ensurePaddingForBackground(); } }} diff --git a/apps/desktop/src/utils/createPresets.ts b/apps/desktop/src/utils/createPresets.ts index 1da8476695..24050703b7 100644 --- a/apps/desktop/src/utils/createPresets.ts +++ b/apps/desktop/src/utils/createPresets.ts @@ -25,23 +25,28 @@ export function createPresets() { return { query, createPreset: async (preset: CreatePreset) => { - const config = { ...preset.config }; - // @ts-expect-error we reeeally don't want the timeline in the preset - config.timeline = undefined; - config.clips = undefined; + const config = { + ...preset.config, + timeline: null, + clips: [], + }; await updatePresets((store) => { store.presets.push({ name: preset.name, config }); - store.default = preset.default ? store.presets.length : store.default; + store.default = preset.default + ? store.presets.length - 1 + : store.default; }); }, deletePreset: (index: number) => updatePresets((store) => { store.presets.splice(index, 1); - store.default = - index > store.presets.length - 1 - ? store.presets.length - 1 - : store.default; + if (store.default === null) return; + if (index === store.default) { + store.default = store.presets.length > 0 ? 0 : null; + } else if (index < store.default) { + store.default = store.default - 1; + } }), setDefault: (index: number) => updatePresets((store) => { diff --git a/apps/desktop/src/utils/tauri.ts b/apps/desktop/src/utils/tauri.ts index 433b5b9c79..5cb79a0dad 100644 --- a/apps/desktop/src/utils/tauri.ts +++ b/apps/desktop/src/utils/tauri.ts @@ -194,6 +194,9 @@ async resetCameraPermissions() : Promise { async resetMicrophonePermissions() : Promise { return await TAURI_INVOKE("reset_microphone_permissions"); }, +async clearPresets() : Promise { + return await TAURI_INVOKE("clear_presets"); +}, async isCameraWindowOpen() : Promise { return await TAURI_INVOKE("is_camera_window_open"); }, @@ -380,15 +383,15 @@ export type AnnotationType = "arrow" | "circle" | "rectangle" | "text" | "mask" export type AppTheme = "system" | "light" | "dark" export type AspectRatio = "wide" | "vertical" | "square" | "classic" | "tall" export type Audio = { duration: number; sample_rate: number; channels: number; start_time: number } -export type AudioConfiguration = { mute: boolean; improve: boolean; micVolumeDb?: number; micStereoMode?: StereoMode; systemVolumeDb?: number } +export type AudioConfiguration = { mute: boolean; improve: boolean; micVolumeDb: number; micStereoMode: StereoMode; systemVolumeDb: number } export type AudioInputLevelChange = number export type AudioMeta = { path: string; start_time?: number | null; device_id?: string | null } export type AuthSecret = { api_key: string } | { token: string; expires: number } export type AuthStore = { secret: AuthSecret; user_id: string | null; plan: Plan | null; intercom_hash: string | null; organizations?: Organization[] } -export type BackgroundConfiguration = { source: BackgroundSource; blur: number; padding: number; rounding: number; roundingType?: CornerStyle; inset: number; crop: Crop | null; shadow?: number; advancedShadow?: ShadowConfiguration | null; border?: BorderConfiguration | null } +export type BackgroundConfiguration = { source: BackgroundSource; blur: number; padding: number; rounding: number; roundingType: CornerStyle; inset: number; crop: Crop | null; shadow: number; advancedShadow: ShadowConfiguration | null; border: BorderConfiguration | null } export type BackgroundSource = { type: "wallpaper"; path: string | null } | { type: "image"; path: string | null } | { type: "color"; value: [number, number, number]; alpha?: number } | { type: "gradient"; from: [number, number, number]; to: [number, number, number]; angle?: number } export type BorderConfiguration = { enabled: boolean; width: number; color: [number, number, number]; opacity: number } -export type Camera = { hide: boolean; mirror: boolean; position: CameraPosition; size: number; zoomSize: number | null; rounding?: number; shadow?: number; advancedShadow?: ShadowConfiguration | null; shape?: CameraShape; roundingType?: CornerStyle } +export type Camera = { hide: boolean; mirror: boolean; position: CameraPosition; size: number; zoomSize: number | null; rounding: number; shadow: number; advancedShadow: ShadowConfiguration | null; shape: CameraShape; roundingType: CornerStyle } export type CameraInfo = { device_id: string; model_id: ModelIDType | null; display_name: string } export type CameraPosition = { x: CameraXPosition; y: CameraYPosition } export type CameraPreviewShape = "round" | "square" | "full" @@ -398,7 +401,7 @@ export type CameraXPosition = "left" | "center" | "right" export type CameraYPosition = "top" | "bottom" export type CaptionData = { segments: CaptionSegment[]; settings: CaptionSettings | null } export type CaptionSegment = { id: string; start: number; end: number; text: string; words?: CaptionWord[] } -export type CaptionSettings = { enabled: boolean; font: string; size: number; color: string; backgroundColor: string; backgroundOpacity: number; position?: string; italic: boolean; fontWeight?: number; outline: boolean; outlineColor: string; exportWithSubtitles: boolean; highlightColor?: string; fadeDuration?: number; lingerDuration?: number; wordTransitionDuration?: number; activeWordHighlight?: boolean } +export type CaptionSettings = { enabled: boolean; font: string; size: number; color: string; backgroundColor: string; backgroundOpacity: number; position: string; italic: boolean; fontWeight: number; outline: boolean; outlineColor: string; exportWithSubtitles: boolean; highlightColor: string; fadeDuration: number; lingerDuration: number; wordTransitionDuration: number; activeWordHighlight: boolean } export type CaptionWord = { text: string; start: number; end: number } export type CaptionsData = { segments: CaptionSegment[]; settings: CaptionSettings } export type CaptureDisplay = { id: DisplayId; name: string; refresh_rate: number } @@ -414,7 +417,7 @@ export type CurrentRecording = { target: CurrentRecordingTarget; mode: Recording export type CurrentRecordingChanged = null export type CurrentRecordingTarget = { window: { id: WindowId; bounds: LogicalBounds | null } } | { screen: { id: DisplayId } } | { area: { screen: DisplayId; bounds: LogicalBounds } } export type CursorAnimationStyle = "slow" | "mellow" | "custom" -export type CursorConfiguration = { hide?: boolean; hideWhenIdle?: boolean; hideWhenIdleDelay?: number; size: number; type: CursorType; animationStyle: CursorAnimationStyle; tension: number; mass: number; friction: number; raw?: boolean; motionBlur?: number; useSvg?: boolean } +export type CursorConfiguration = { hide: boolean; hideWhenIdle: boolean; hideWhenIdleDelay: number; size: number; type: CursorType; animationStyle: CursorAnimationStyle; tension: number; mass: number; friction: number; raw: boolean; motionBlur: number; useSvg: boolean } export type CursorMeta = { imagePath: string; hotspot: XY; shape?: string | null } export type CursorType = "auto" | "pointer" | "circle" export type Cursors = { [key in string]: string } | { [key in string]: CursorMeta } @@ -483,7 +486,7 @@ export type PostDeletionBehaviour = "doNothing" | "reopenRecordingWindow" export type PostStudioRecordingBehaviour = "openEditor" | "showOverlay" export type Preset = { name: string; config: ProjectConfiguration } export type PresetsStore = { presets: Preset[]; default: number | null } -export type ProjectConfiguration = { aspectRatio: AspectRatio | null; background: BackgroundConfiguration; camera: Camera; audio: AudioConfiguration; cursor: CursorConfiguration; hotkeys: HotkeysConfiguration; timeline?: TimelineConfiguration | null; captions?: CaptionsData | null; clips?: ClipConfiguration[]; annotations?: Annotation[] } +export type ProjectConfiguration = { aspectRatio: AspectRatio | null; background: BackgroundConfiguration; camera: Camera; audio: AudioConfiguration; cursor: CursorConfiguration; hotkeys: HotkeysConfiguration; timeline: TimelineConfiguration | null; captions: CaptionsData | null; clips: ClipConfiguration[]; annotations: Annotation[] } export type ProjectRecordingsMeta = { segments: SegmentRecordings[] } export type RecordingAction = "Started" | "InvalidAuthentication" | "UpgradeRequired" export type RecordingDeleted = { path: string } diff --git a/apps/media-server/src/__tests__/index.test.ts b/apps/media-server/src/__tests__/index.test.ts index c87f4fc382..0eda76841e 100644 --- a/apps/media-server/src/__tests__/index.test.ts +++ b/apps/media-server/src/__tests__/index.test.ts @@ -10,7 +10,7 @@ describe("GET /", () => { expect(data).toEqual({ name: "@cap/media-server", version: "1.0.0", - endpoints: ["/health", "/audio/check", "/audio/extract"], + endpoints: ["/health", "/audio/status", "/audio/check", "/audio/extract"], }); }); }); diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 2007755225..012869016a 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -46,7 +46,6 @@ impl Renderer { )?); let mut max_duration = recordings.duration(); - // Check camera duration if it exists if let Some(camera_path) = meta.camera_path() && let Ok(camera_duration) = recordings.get_source_duration(&recording_meta.path(&camera_path)) @@ -143,7 +142,7 @@ impl Renderer { break; } } - let frame = frame_renderer + match frame_renderer .render( current.segment_frames, current.uniforms, @@ -151,8 +150,14 @@ impl Renderer { &mut layers, ) .await - .unwrap(); - (self.frame_cb)(frame); + { + Ok(frame) => { + (self.frame_cb)(frame); + } + Err(e) => { + tracing::error!(error = %e, "Failed to render frame in editor"); + } + } let _ = current.finished.send(()); } @@ -182,7 +187,6 @@ impl RendererHandle { } pub async fn stop(&self) { - // Send a stop message to the renderer let (tx, rx) = oneshot::channel(); if self .tx @@ -190,9 +194,8 @@ impl RendererHandle { .await .is_err() { - println!("Failed to send stop message to renderer"); + tracing::warn!("Failed to send stop message to renderer"); } - // Wait for the renderer to acknowledge the stop let _ = rx.await; } } diff --git a/crates/project/src/configuration.rs b/crates/project/src/configuration.rs index 7de49265b8..f3883914e5 100644 --- a/crates/project/src/configuration.rs +++ b/crates/project/src/configuration.rs @@ -205,37 +205,34 @@ impl Crop { } #[derive(Type, Serialize, Deserialize, Clone, Debug)] +#[serde(default)] pub struct ShadowConfiguration { - pub size: f32, // Overall shadow size (0-100) - pub opacity: f32, // Shadow opacity (0-100) - pub blur: f32, // Shadow blur amount (0-100) + pub size: f32, + pub opacity: f32, + pub blur: f32, } #[derive(Type, Serialize, Deserialize, Clone, Debug, PartialEq)] -#[serde(rename_all = "camelCase")] +#[serde(rename_all = "camelCase", default)] pub struct BorderConfiguration { pub enabled: bool, - pub width: f32, // Border width in pixels - pub color: Color, // Border color (RGB) - pub opacity: f32, // Border opacity (0-100) + pub width: f32, + pub color: Color, + pub opacity: f32, } #[derive(Type, Serialize, Deserialize, Clone, Debug)] -#[serde(rename_all = "camelCase")] +#[serde(rename_all = "camelCase", default)] pub struct BackgroundConfiguration { pub source: BackgroundSource, pub blur: f64, pub padding: f64, pub rounding: f64, - #[serde(default)] pub rounding_type: CornerStyle, pub inset: u32, pub crop: Option, - #[serde(default)] pub shadow: f32, - #[serde(default)] pub advanced_shadow: Option, - #[serde(default)] pub border: Option, } @@ -285,14 +282,14 @@ pub enum CameraYPosition { } #[derive(Type, Serialize, Deserialize, Clone, Debug, Default)] -#[serde(rename_all = "camelCase")] +#[serde(rename_all = "camelCase", default)] pub struct CameraPosition { pub x: CameraXPosition, pub y: CameraYPosition, } #[derive(Debug, Clone, Serialize, Deserialize, Type)] -#[serde(rename_all = "camelCase")] +#[serde(rename_all = "camelCase", default)] pub struct Camera { pub hide: bool, pub mirror: bool, @@ -300,15 +297,12 @@ pub struct Camera { pub size: f32, #[serde(alias = "zoom_size")] pub zoom_size: Option, - #[serde(default = "Camera::default_rounding")] pub rounding: f32, - #[serde(default)] pub shadow: f32, - #[serde(alias = "advanced_shadow", default)] + #[serde(alias = "advanced_shadow")] pub advanced_shadow: Option, - #[serde(default)] pub shape: CameraShape, - #[serde(alias = "rounding_type", default)] + #[serde(alias = "rounding_type")] pub rounding_type: CornerStyle, } @@ -371,15 +365,12 @@ pub enum StereoMode { } #[derive(Type, Serialize, Deserialize, Clone, Debug, PartialEq)] -#[serde(rename_all = "camelCase")] +#[serde(rename_all = "camelCase", default)] pub struct AudioConfiguration { pub mute: bool, pub improve: bool, - #[serde(default)] pub mic_volume_db: f32, - #[serde(default)] pub mic_stereo_mode: StereoMode, - #[serde(default)] pub system_volume_db: f32, } @@ -440,13 +431,10 @@ impl CursorAnimationStyle { } #[derive(Type, Serialize, Deserialize, Clone, Debug)] -#[serde(rename_all = "camelCase")] +#[serde(rename_all = "camelCase", default)] pub struct CursorConfiguration { - #[serde(default)] pub hide: bool, - #[serde(default)] pub hide_when_idle: bool, - #[serde(default = "CursorConfiguration::default_hide_when_idle_delay")] pub hide_when_idle_delay: f32, pub size: u32, r#type: CursorType, @@ -454,18 +442,11 @@ pub struct CursorConfiguration { pub tension: f32, pub mass: f32, pub friction: f32, - #[serde(default = "CursorConfiguration::default_raw")] pub raw: bool, - #[serde(default)] pub motion_blur: f32, - #[serde(default = "yes")] pub use_svg: bool, } -fn yes() -> bool { - true -} - impl Default for CursorConfiguration { fn default() -> Self { let animation_style = CursorAnimationStyle::default(); @@ -479,7 +460,7 @@ impl Default for CursorConfiguration { tension: 65.0, mass: 1.8, friction: 16.0, - raw: false, + raw: true, motion_blur: 0.5, use_svg: true, }; @@ -494,10 +475,6 @@ impl Default for CursorConfiguration { } } impl CursorConfiguration { - fn default_raw() -> bool { - true - } - fn default_hide_when_idle_delay() -> f32 { 2.0 } @@ -508,7 +485,7 @@ impl CursorConfiguration { } #[derive(Type, Serialize, Deserialize, Clone, Debug, Default)] -#[serde(rename_all = "camelCase")] +#[serde(rename_all = "camelCase", default)] pub struct HotkeysConfiguration { show: bool, } @@ -777,7 +754,7 @@ pub enum CaptionPosition { } #[derive(Type, Serialize, Deserialize, Clone, Debug)] -#[serde(rename_all = "camelCase")] +#[serde(rename_all = "camelCase", default)] pub struct CaptionSettings { pub enabled: bool, pub font: String, @@ -787,40 +764,24 @@ pub struct CaptionSettings { pub background_color: String, #[serde(alias = "backgroundOpacity")] pub background_opacity: u32, - #[serde(default)] pub position: String, pub italic: bool, - #[serde(alias = "fontWeight", default = "CaptionSettings::default_font_weight")] + #[serde(alias = "fontWeight")] pub font_weight: u32, pub outline: bool, #[serde(alias = "outlineColor")] pub outline_color: String, #[serde(alias = "exportWithSubtitles")] pub export_with_subtitles: bool, - #[serde( - alias = "highlightColor", - default = "CaptionSettings::default_highlight_color" - )] + #[serde(alias = "highlightColor")] pub highlight_color: String, - #[serde( - alias = "fadeDuration", - default = "CaptionSettings::default_fade_duration" - )] + #[serde(alias = "fadeDuration")] pub fade_duration: f32, - #[serde( - alias = "lingerDuration", - default = "CaptionSettings::default_linger_duration" - )] + #[serde(alias = "lingerDuration")] pub linger_duration: f32, - #[serde( - alias = "wordTransitionDuration", - default = "CaptionSettings::default_word_transition_duration" - )] + #[serde(alias = "wordTransitionDuration")] pub word_transition_duration: f32, - #[serde( - alias = "activeWordHighlight", - default = "CaptionSettings::default_active_word_highlight" - )] + #[serde(alias = "activeWordHighlight")] pub active_word_highlight: bool, } @@ -892,7 +853,7 @@ pub struct ClipOffsets { } #[derive(Type, Serialize, Deserialize, Clone, Debug, Default)] -#[serde(rename_all = "camelCase")] +#[serde(rename_all = "camelCase", default)] pub struct ClipConfiguration { pub index: u32, pub offsets: ClipOffsets, @@ -1020,7 +981,7 @@ impl Annotation { } #[derive(Type, Serialize, Deserialize, Clone, Debug, Default)] -#[serde(rename_all = "camelCase")] +#[serde(rename_all = "camelCase", default)] pub struct ProjectConfiguration { pub aspect_ratio: Option, pub background: BackgroundConfiguration, @@ -1028,15 +989,11 @@ pub struct ProjectConfiguration { pub audio: AudioConfiguration, pub cursor: CursorConfiguration, pub hotkeys: HotkeysConfiguration, - #[serde(default)] pub timeline: Option, - #[serde(default)] pub captions: Option, - #[serde(default)] pub clips: Vec, - #[serde(default)] pub annotations: Vec, - #[serde(default, skip_serializing)] + #[serde(skip_serializing)] pub hidden_text_segments: Vec, } diff --git a/crates/recording/src/output_pipeline/core.rs b/crates/recording/src/output_pipeline/core.rs index fb1cc05fec..ed150eb3e2 100644 --- a/crates/recording/src/output_pipeline/core.rs +++ b/crates/recording/src/output_pipeline/core.rs @@ -29,113 +29,32 @@ const CONSECUTIVE_ANOMALY_ERROR_THRESHOLD: u64 = 30; const LARGE_BACKWARD_JUMP_SECS: f64 = 1.0; const LARGE_FORWARD_JUMP_SECS: f64 = 5.0; -struct AudioDriftTracker { - baseline_offset_secs: Option, - drift_warning_logged: bool, - first_frame_timestamp_secs: Option, - last_valid_wall_clock_secs: f64, - resync_count: u32, +struct AudioTimestampGenerator { + sample_rate: u32, + total_samples: u64, } -const AUDIO_WALL_CLOCK_TOLERANCE_SECS: f64 = 0.1; const VIDEO_WALL_CLOCK_TOLERANCE_SECS: f64 = 0.1; -const AUDIO_LARGE_FORWARD_JUMP_SECS: f64 = 5.0; -impl AudioDriftTracker { - fn new() -> Self { +impl AudioTimestampGenerator { + fn new(sample_rate: u32) -> Self { Self { - baseline_offset_secs: None, - drift_warning_logged: false, - first_frame_timestamp_secs: None, - last_valid_wall_clock_secs: 0.0, - resync_count: 0, + sample_rate, + total_samples: 0, } } - fn calculate_timestamp( - &mut self, - frame_timestamp_secs: f64, - wall_clock_secs: f64, - ) -> Option { - if frame_timestamp_secs < 0.0 { - return None; - } - - let first_timestamp = *self - .first_frame_timestamp_secs - .get_or_insert(frame_timestamp_secs); - let frame_elapsed_secs = frame_timestamp_secs - first_timestamp; - - if frame_elapsed_secs < 0.0 { - return None; - } - - if frame_elapsed_secs > wall_clock_secs + AUDIO_LARGE_FORWARD_JUMP_SECS { - self.resync_count += 1; - warn!( - frame_elapsed_secs, - wall_clock_secs, - forward_jump_secs = frame_elapsed_secs - wall_clock_secs, - resync_count = self.resync_count, - "Audio timestamp jumped forward (system sleep/wake?), resyncing to wall clock" - ); - - self.first_frame_timestamp_secs = Some(frame_timestamp_secs - wall_clock_secs); - self.baseline_offset_secs = None; - self.drift_warning_logged = false; - - self.last_valid_wall_clock_secs = wall_clock_secs; - return Some(Duration::from_secs_f64(wall_clock_secs)); - } - - if frame_elapsed_secs > wall_clock_secs + AUDIO_WALL_CLOCK_TOLERANCE_SECS { - return None; - } - - if wall_clock_secs >= 2.0 && frame_elapsed_secs >= 2.0 { - if self.baseline_offset_secs.is_none() { - let offset = frame_elapsed_secs - wall_clock_secs; - debug!( - wall_clock_secs, - frame_elapsed_secs, - baseline_offset_secs = offset, - "Capturing audio baseline offset after warmup" - ); - self.baseline_offset_secs = Some(offset); - } - - let baseline = self.baseline_offset_secs.unwrap_or(0.0); - let adjusted_frame_elapsed = frame_elapsed_secs - baseline; - let drift_ratio = if adjusted_frame_elapsed > 0.0 { - wall_clock_secs / adjusted_frame_elapsed - } else { - 1.0 - }; - - if !(0.90..=1.10).contains(&drift_ratio) && !self.drift_warning_logged { - warn!( - drift_ratio, - wall_clock_secs, - adjusted_frame_elapsed, - baseline, - "Significant audio clock drift detected" - ); - self.drift_warning_logged = true; - } - - self.last_valid_wall_clock_secs = wall_clock_secs; - let corrected_secs = adjusted_frame_elapsed * drift_ratio; - return Some(Duration::from_secs_f64(corrected_secs.max(0.0))); - } - - self.last_valid_wall_clock_secs = wall_clock_secs; - Some(Duration::from_secs_f64(frame_elapsed_secs.max(0.0))) + fn next_timestamp(&mut self, frame_samples: u64) -> Duration { + let timestamp_secs = self.total_samples as f64 / self.sample_rate as f64; + self.total_samples += frame_samples; + Duration::from_secs_f64(timestamp_secs) } } struct VideoDriftTracker { baseline_offset_secs: Option, capped_frame_count: u64, + drift_warning_logged: bool, } impl VideoDriftTracker { @@ -143,6 +62,7 @@ impl VideoDriftTracker { Self { baseline_offset_secs: None, capped_frame_count: 0, + drift_warning_logged: false, } } @@ -184,13 +104,16 @@ impl VideoDriftTracker { }; let corrected_secs = if !(0.95..=1.05).contains(&drift_ratio) { - warn!( - drift_ratio, - wall_clock_secs, - adjusted_camera_secs, - baseline, - "Extreme video clock drift detected after baseline correction, clamping" - ); + if !self.drift_warning_logged { + warn!( + drift_ratio, + wall_clock_secs, + adjusted_camera_secs, + baseline, + "Extreme video clock drift detected after baseline correction, clamping" + ); + self.drift_warning_logged = true; + } let clamped_ratio = drift_ratio.clamp(0.95, 1.05); adjusted_camera_secs * clamped_ratio } else { @@ -471,6 +394,60 @@ impl SharedPauseState { } } +struct SharedWallClockPauseInner { + pause_started_at: Option, + total_pause_duration: Duration, +} + +#[derive(Clone)] +pub struct SharedWallClockPause { + flag: Arc, + inner: Arc>, +} + +impl SharedWallClockPause { + pub fn new(flag: Arc) -> Self { + Self { + flag, + inner: Arc::new(std::sync::Mutex::new(SharedWallClockPauseInner { + pause_started_at: None, + total_pause_duration: Duration::ZERO, + })), + } + } + + pub fn check(&self) -> (bool, Duration) { + let is_paused = self.flag.load(Ordering::Acquire); + let mut inner = match self.inner.lock() { + Ok(guard) => guard, + Err(poisoned) => poisoned.into_inner(), + }; + + if is_paused { + if inner.pause_started_at.is_none() { + inner.pause_started_at = Some(std::time::Instant::now()); + } + } else if let Some(started) = inner.pause_started_at.take() { + let delta = started.elapsed(); + inner.total_pause_duration = inner.total_pause_duration.saturating_add(delta); + debug!( + pause_delta_ms = delta.as_millis(), + total_pause_ms = inner.total_pause_duration.as_millis(), + "Shared pause state: resumed" + ); + } + + (is_paused, inner.total_pause_duration) + } + + pub fn total_pause_duration(&self) -> Duration { + match self.inner.lock() { + Ok(guard) => guard.total_pause_duration, + Err(poisoned) => poisoned.into_inner().total_pause_duration, + } + } +} + pub struct OnceSender(Option>); impl OnceSender { @@ -652,6 +629,8 @@ impl OutputPipelineBuilder> { ) .await?; + let shared_pause = SharedWallClockPause::new(build_ctx.pause_flag.clone()); + spawn_video_encoder( &mut setup_ctx, video_source, @@ -660,6 +639,7 @@ impl OutputPipelineBuilder> { build_ctx.stop_token.clone(), muxer.clone(), timestamps, + shared_pause.clone(), ); finish_build( @@ -671,7 +651,7 @@ impl OutputPipelineBuilder> { build_ctx.done_tx, None, &path, - build_ctx.pause_flag.clone(), + shared_pause, ) .await?; @@ -727,6 +707,8 @@ impl OutputPipelineBuilder { ) .await?; + let shared_pause = SharedWallClockPause::new(build_ctx.pause_flag.clone()); + finish_build( setup_ctx, audio, @@ -736,7 +718,7 @@ impl OutputPipelineBuilder { build_ctx.done_tx, Some(first_tx), &path, - build_ctx.pause_flag.clone(), + shared_pause, ) .await?; @@ -791,7 +773,7 @@ async fn finish_build( done_tx: oneshot::Sender>, first_tx: Option>, path: &Path, - pause_flag: Arc, + shared_pause: SharedWallClockPause, ) -> anyhow::Result<()> { if let Some(audio) = audio { audio.configure( @@ -800,7 +782,7 @@ async fn finish_build( stop_token.clone(), timestamps, first_tx, - pause_flag, + shared_pause, ); } @@ -885,6 +867,7 @@ async fn setup_muxer( Ok(muxer) } +#[allow(clippy::too_many_arguments)] fn spawn_video_encoder, TVideo: VideoSource>( setup_ctx: &mut SetupCtx, mut video_source: TVideo, @@ -893,6 +876,7 @@ fn spawn_video_encoder, TVideo: V stop_token: CancellationToken, muxer: Arc>, timestamps: Timestamps, + shared_pause: SharedWallClockPause, ) { setup_ctx.tasks().spawn("capture-video", { let stop_token = stop_token.clone(); @@ -916,10 +900,18 @@ fn spawn_video_encoder, TVideo: V let mut frame_count = 0u64; let mut anomaly_tracker = TimestampAnomalyTracker::new("video"); let mut drift_tracker = VideoDriftTracker::new(); + let mut dropped_during_pause: u64 = 0; let res = stop_token .run_until_cancelled(async { while let Some(frame) = video_rx.next().await { + let (is_paused, total_pause_duration) = shared_pause.check(); + + if is_paused { + dropped_during_pause += 1; + continue; + } + frame_count += 1; let timestamp = frame.timestamp(); @@ -938,7 +930,8 @@ fn spawn_video_encoder, TVideo: V } }; - let wall_clock_elapsed = timestamps.instant().elapsed(); + let raw_wall_clock = timestamps.instant().elapsed(); + let wall_clock_elapsed = raw_wall_clock.saturating_sub(total_pause_duration); let duration = drift_tracker.calculate_timestamp(raw_duration, wall_clock_elapsed); if frame_count.is_multiple_of(300) { @@ -954,6 +947,7 @@ fn spawn_video_encoder, TVideo: V corrected_secs = duration.as_secs_f64(), drift_ratio, baseline_offset = drift_tracker.baseline_offset_secs, + total_pause_ms = total_pause_duration.as_millis(), "Video drift correction status" ); } @@ -1006,7 +1000,9 @@ fn spawn_video_encoder, TVideo: V } }; - let wall_clock_elapsed = timestamps.instant().elapsed(); + let raw_wall_clock = timestamps.instant().elapsed(); + let total_pause = shared_pause.total_pause_duration(); + let wall_clock_elapsed = raw_wall_clock.saturating_sub(total_pause); let duration = drift_tracker.calculate_timestamp(raw_duration, wall_clock_elapsed); match muxer.lock().await.send_video_frame(frame, duration) { @@ -1029,6 +1025,16 @@ fn spawn_video_encoder, TVideo: V } } + let final_pause_duration = shared_pause.total_pause_duration(); + + if dropped_during_pause > 0 { + debug!( + dropped_during_pause, + total_pause_ms = final_pause_duration.as_millis(), + "Video frames dropped during pause" + ); + } + anomaly_tracker.log_stats_if_notable(); if drift_tracker.capped_frame_count() > 0 { debug!( @@ -1067,7 +1073,7 @@ impl PreparedAudioSources { stop_token: CancellationToken, timestamps: Timestamps, mut first_tx: Option>, - pause_flag: Arc, + shared_pause: SharedWallClockPause, ) { let sample_rate = self.audio_info.sample_rate; @@ -1075,82 +1081,44 @@ impl PreparedAudioSources { let stop_token = stop_token.child_token(); let muxer = muxer.clone(); async move { - let mut anomaly_tracker = TimestampAnomalyTracker::new("audio"); - let mut drift_tracker = AudioDriftTracker::new(); - let mut total_samples: u64 = 0; + let mut timestamp_generator = AudioTimestampGenerator::new(sample_rate); let mut dropped_during_pause: u64 = 0; - let mut pause_start: Option = None; - let mut total_pause_duration = Duration::ZERO; + let mut frame_count: u64 = 0; let res = stop_token .run_until_cancelled(async { while let Some(frame) = self.audio_rx.next().await { - let is_paused = pause_flag.load(Ordering::Acquire); + let (is_paused, total_pause_duration) = shared_pause.check(); if is_paused { - if pause_start.is_none() { - pause_start = Some(std::time::Instant::now()); - } dropped_during_pause += 1; continue; } - if let Some(start) = pause_start.take() { - let pause_duration = start.elapsed(); - total_pause_duration = - total_pause_duration.saturating_add(pause_duration); - debug!( - pause_duration_ms = pause_duration.as_millis(), - total_pause_duration_ms = total_pause_duration.as_millis(), - "Audio resumed after pause" - ); - } - if let Some(first_tx) = first_tx.take() { let _ = first_tx.send(frame.timestamp); } let frame_samples = frame.inner.samples() as u64; - total_samples += frame_samples; - - let _ = anomaly_tracker.process_timestamp(frame.timestamp, timestamps); - - let raw_wall_clock = timestamps.instant().elapsed(); - let effective_wall_clock = - raw_wall_clock.saturating_sub(total_pause_duration); - let wall_clock_secs = effective_wall_clock.as_secs_f64(); - - let frame_timestamp_secs = - frame.timestamp.signed_duration_since_secs(timestamps); - - if wall_clock_secs >= 5.0 && (wall_clock_secs as u64).is_multiple_of(10) - { - let total_input_duration_secs = - total_samples as f64 / sample_rate as f64; - let drift_ratio = if frame_timestamp_secs > 0.0 { - wall_clock_secs / frame_timestamp_secs - } else { - 1.0 - }; + frame_count += 1; + + let timestamp = timestamp_generator.next_timestamp(frame_samples); + + if frame_count.is_multiple_of(500) { + let raw_wall_clock = timestamps.instant().elapsed(); + let effective_wall_clock = + raw_wall_clock.saturating_sub(total_pause_duration); debug!( - wall_clock_secs, - total_input_duration_secs, - frame_timestamp_secs, - drift_ratio, - total_samples, - baseline_offset = drift_tracker.baseline_offset_secs, + wall_clock_secs = effective_wall_clock.as_secs_f64(), + sample_based_secs = timestamp.as_secs_f64(), + total_samples = timestamp_generator.total_samples, + frame_count, total_pause_ms = total_pause_duration.as_millis(), - "Audio drift correction status" + "Audio timestamp status" ); } - let timestamp = drift_tracker - .calculate_timestamp(frame_timestamp_secs, wall_clock_secs); - - if let Some(timestamp) = timestamp - && let Err(e) = - muxer.lock().await.send_audio_frame(frame, timestamp) - { + if let Err(e) = muxer.lock().await.send_audio_frame(frame, timestamp) { error!("Audio encoder: {e}"); } } @@ -1158,16 +1126,16 @@ impl PreparedAudioSources { }) .await; + let final_pause_duration = shared_pause.total_pause_duration(); + if dropped_during_pause > 0 { debug!( dropped_during_pause, - total_pause_ms = total_pause_duration.as_millis(), + total_pause_ms = final_pause_duration.as_millis(), "Audio frames dropped during pause (not counted in samples)" ); } - anomaly_tracker.log_stats_if_notable(); - for source in &mut self.erased_audio_sources { let _ = (source.stop_fn)(source.inner.as_mut()).await; } @@ -1533,182 +1501,120 @@ pub trait VideoMuxer: Muxer { mod tests { use super::*; - mod audio_drift_tracker { + mod audio_timestamp_generator { use super::*; #[test] - fn returns_frame_based_time_during_warmup() { - let mut tracker = AudioDriftTracker::new(); - let frame_timestamp = 1.0; - let wall_clock = 1.5; - let result = tracker - .calculate_timestamp(frame_timestamp, wall_clock) - .expect("Should not be capped when frame time < wall clock"); - let expected = Duration::ZERO; - assert!( - (result.as_secs_f64() - expected.as_secs_f64()).abs() < 0.001, - "First frame should have ~0s timestamp, got {:.3}s", - result.as_secs_f64() - ); - assert!( - tracker.baseline_offset_secs.is_none(), - "Baseline should not be set during warmup" + fn first_timestamp_is_zero() { + let mut generator = AudioTimestampGenerator::new(48000); + let result = generator.next_timestamp(960); + assert_eq!( + result, + Duration::ZERO, + "First frame should have 0s timestamp" ); } #[test] - fn captures_baseline_after_warmup() { - let mut tracker = AudioDriftTracker::new(); - let buffer_delay = 0.05; - - tracker.calculate_timestamp(0.0, 0.0); - - let frame_timestamp = 2.0 + buffer_delay; - let wall_clock = 2.0; - tracker.calculate_timestamp(frame_timestamp, wall_clock); - - assert!(tracker.baseline_offset_secs.is_some()); - let baseline = tracker.baseline_offset_secs.unwrap(); - assert!( - (baseline - buffer_delay).abs() < 0.001, - "Baseline should be ~{buffer_delay:.3}s, got {baseline:.3}s" + fn tracks_samples_correctly() { + let mut generator = AudioTimestampGenerator::new(48000); + generator.next_timestamp(960); + assert_eq!( + generator.total_samples, 960, + "Should track samples after first call" ); + + generator.next_timestamp(960); + assert_eq!(generator.total_samples, 1920, "Should accumulate samples"); } #[test] - fn applies_drift_correction_after_warmup() { - let mut tracker = AudioDriftTracker::new(); - let buffer_delay = 0.05; - - tracker.calculate_timestamp(0.0, 0.0); - - let frame_timestamp_1 = 2.0 + buffer_delay; - let wall_clock_1 = 2.0; - tracker.calculate_timestamp(frame_timestamp_1, wall_clock_1); + fn calculates_timestamp_from_samples() { + let sample_rate = 48000; + let mut generator = AudioTimestampGenerator::new(sample_rate); + let samples_per_frame = 960; - let frame_timestamp_2 = 10.0 + buffer_delay; - let wall_clock_2 = 10.0; - let result = tracker - .calculate_timestamp(frame_timestamp_2, wall_clock_2) - .expect("Should not be capped"); + generator.next_timestamp(samples_per_frame); + let second = generator.next_timestamp(samples_per_frame); - let expected = Duration::from_secs_f64(wall_clock_2); + let expected_secs = samples_per_frame as f64 / sample_rate as f64; assert!( - (result.as_secs_f64() - expected.as_secs_f64()).abs() < 0.1, - "Expected drift-corrected time ~{:.3}s, got {:.3}s", - expected.as_secs_f64(), - result.as_secs_f64() + (second.as_secs_f64() - expected_secs).abs() < 0.0001, + "Expected {expected_secs:.6}s, got {:.6}s", + second.as_secs_f64() ); } #[test] fn continuous_timestamps_no_gaps() { - let mut tracker = AudioDriftTracker::new(); + let sample_rate = 48000; + let mut generator = AudioTimestampGenerator::new(sample_rate); + let samples_per_frame = 960; let mut last_timestamp = Duration::ZERO; for i in 0..100 { - let frame_timestamp = i as f64 * 0.02; - let wall_clock = i as f64 * 0.02; - if let Some(result) = tracker.calculate_timestamp(frame_timestamp, wall_clock) { - if i > 0 { - let gap = result.as_secs_f64() - last_timestamp.as_secs_f64(); - assert!( - (0.0..0.05).contains(&gap), - "Gap between frames should be small: {gap:.3}s at frame {i}" - ); - } - - last_timestamp = result; + let result = generator.next_timestamp(samples_per_frame); + if i > 0 { + let gap = result.as_secs_f64() - last_timestamp.as_secs_f64(); + let expected_gap = samples_per_frame as f64 / sample_rate as f64; + assert!( + (gap - expected_gap).abs() < 0.0001, + "Gap between frames should be {expected_gap:.6}s, got {gap:.6}s at frame {i}" + ); } + last_timestamp = result; } } #[test] - fn continuous_across_warmup_boundary() { - let mut tracker = AudioDriftTracker::new(); - - tracker.calculate_timestamp(0.0, 0.0); + fn handles_variable_frame_sizes() { + let sample_rate = 48000; + let mut generator = AudioTimestampGenerator::new(sample_rate); - let frame_timestamp_1 = 2.0; - let wall_clock_1 = 2.1; - let result1 = tracker - .calculate_timestamp(frame_timestamp_1, wall_clock_1) - .expect("Should not be capped"); + generator.next_timestamp(480); + let second = generator.next_timestamp(960); + let third = generator.next_timestamp(1920); - let frame_timestamp_2 = 2.02; - let wall_clock_2 = 2.12; - let result2 = tracker - .calculate_timestamp(frame_timestamp_2, wall_clock_2) - .expect("Should not be capped"); + let expected_second = 480.0 / sample_rate as f64; + let expected_third = (480.0 + 960.0) / sample_rate as f64; - let gap = result2.as_secs_f64() - result1.as_secs_f64(); - let expected_gap = 0.02; assert!( - (gap - expected_gap).abs() < 0.01, - "Gap across warmup boundary should be continuous: expected {expected_gap:.3}s, got {gap:.3}s" + (second.as_secs_f64() - expected_second).abs() < 0.0001, + "Second timestamp: expected {expected_second:.6}s, got {:.6}s", + second.as_secs_f64() + ); + assert!( + (third.as_secs_f64() - expected_third).abs() < 0.0001, + "Third timestamp: expected {expected_third:.6}s, got {:.6}s", + third.as_secs_f64() ); } #[test] - fn simulates_real_world_scenario_with_drift() { - let mut tracker = AudioDriftTracker::new(); - let initial_offset = 0.05; - let drift_rate = 0.004; + fn simulates_long_recording() { + let sample_rate = 48000; + let mut generator = AudioTimestampGenerator::new(sample_rate); + let samples_per_frame = 960u64; + let frames_per_second = sample_rate as u64 / samples_per_frame; + let duration_secs = 3600u64; + let total_frames = frames_per_second * duration_secs; - let mut frame_time = initial_offset; - let mut wall_time = 0.0; - let step = 0.5; - - while wall_time < 60.0 { - if let Some(result) = tracker.calculate_timestamp(frame_time, wall_time) { - if wall_time >= 2.0 { - let error = (result.as_secs_f64() - wall_time).abs(); - assert!( - error < 0.5, - "At wall_time={:.1}s: result {:.3}s should be close to wall clock", - wall_time, - result.as_secs_f64() - ); - } - } - - wall_time += step; - frame_time += step * (1.0 + drift_rate); + let mut last_timestamp = Duration::ZERO; + for _ in 0..total_frames { + last_timestamp = generator.next_timestamp(samples_per_frame); } - } - - #[test] - fn preserves_baseline_across_multiple_calls() { - let mut tracker = AudioDriftTracker::new(); - - tracker.calculate_timestamp(0.0, 0.0); - tracker.calculate_timestamp(2.1, 2.0); - - let first_baseline = tracker.baseline_offset_secs; - - tracker.calculate_timestamp(10.1, 10.0); - - assert_eq!( - first_baseline, tracker.baseline_offset_secs, - "Baseline should not change after initial capture" - ); - } - - #[test] - fn rejects_negative_timestamps() { - let mut tracker = AudioDriftTracker::new(); - let result = tracker.calculate_timestamp(-1.0, 1.0); - assert!(result.is_none(), "Negative timestamps should be rejected"); - } - #[test] - fn rejects_timestamps_too_far_ahead_of_wall_clock() { - let mut tracker = AudioDriftTracker::new(); - tracker.calculate_timestamp(0.0, 0.0); - let result = tracker.calculate_timestamp(5.0, 1.0); + let expected_secs = + ((total_frames - 1) * samples_per_frame) as f64 / sample_rate as f64; assert!( - result.is_none(), - "Timestamps too far ahead of wall clock should be rejected" + (last_timestamp.as_secs_f64() - expected_secs).abs() < 0.001, + "After 1 hour: expected {expected_secs:.3}s, got {:.3}s", + last_timestamp.as_secs_f64() + ); + assert_eq!( + generator.total_samples, + total_frames * samples_per_frame, + "Total samples should equal total_frames * samples_per_frame" ); } } diff --git a/crates/recording/src/output_pipeline/win.rs b/crates/recording/src/output_pipeline/win.rs index 71d5a127c2..9da08a5d48 100644 --- a/crates/recording/src/output_pipeline/win.rs +++ b/crates/recording/src/output_pipeline/win.rs @@ -8,7 +8,7 @@ use std::{ path::PathBuf, sync::{ Arc, Mutex, - atomic::{AtomicBool, Ordering}, + atomic::AtomicBool, mpsc::{RecvTimeoutError, SyncSender, TrySendError, sync_channel}, }, time::Duration, @@ -96,76 +96,10 @@ use windows::{ }, }; -struct PauseTracker { - flag: Arc, - paused_at: Option, - offset: Duration, -} - -impl PauseTracker { - fn new(flag: Arc) -> Self { - Self { - flag, - paused_at: None, - offset: Duration::ZERO, - } - } - - fn adjust(&mut self, timestamp: Duration) -> anyhow::Result> { - if self.flag.load(Ordering::Acquire) { - if self.paused_at.is_none() { - self.paused_at = Some(timestamp); - } - return Ok(None); - } - - if let Some(start) = self.paused_at.take() { - let delta = match timestamp.checked_sub(start) { - Some(d) => d, - None => { - warn!( - resume_at = ?start, - current = ?timestamp, - "Timestamp anomaly: frame timestamp went backward during unpause (clock skew?), treating as zero delta" - ); - Duration::ZERO - } - }; - - self.offset = match self.offset.checked_add(delta) { - Some(o) => o, - None => { - warn!( - offset = ?self.offset, - delta = ?delta, - "Timestamp anomaly: pause offset overflow, clamping to MAX" - ); - Duration::MAX - } - }; - } - - let adjusted = match timestamp.checked_sub(self.offset) { - Some(t) => t, - None => { - warn!( - timestamp = ?timestamp, - offset = ?self.offset, - "Timestamp anomaly: adjusted timestamp underflow (clock skew?), using zero" - ); - Duration::ZERO - } - }; - - Ok(Some(adjusted)) - } -} - pub struct WindowsMuxer { video_tx: SyncSender>, output: Arc>, audio_encoder: Option, - pause: PauseTracker, frame_drops: FrameDropTracker, } @@ -188,7 +122,7 @@ impl Muxer for WindowsMuxer { output_path: PathBuf, video_config: Option, audio_config: Option, - pause_flag: Arc, + _pause_flag: Arc, tasks: &mut TaskPool, ) -> anyhow::Result where @@ -539,7 +473,6 @@ impl Muxer for WindowsMuxer { video_tx, output, audio_encoder, - pause: PauseTracker::new(pause_flag), frame_drops: FrameDropTracker::new(), }) } @@ -573,17 +506,15 @@ impl VideoMuxer for WindowsMuxer { frame: Self::VideoFrame, timestamp: Duration, ) -> anyhow::Result<()> { - if let Some(timestamp) = self.pause.adjust(timestamp)? { - match self.video_tx.try_send(Some((frame.frame, timestamp))) { - Ok(()) => { - self.frame_drops.record_frame(); - } - Err(TrySendError::Full(_)) => { - self.frame_drops.record_drop(); - } - Err(TrySendError::Disconnected(_)) => { - trace!("Windows MP4 encoder channel disconnected"); - } + match self.video_tx.try_send(Some((frame.frame, timestamp))) { + Ok(()) => { + self.frame_drops.record_frame(); + } + Err(TrySendError::Full(_)) => { + self.frame_drops.record_drop(); + } + Err(TrySendError::Disconnected(_)) => { + trace!("Windows MP4 encoder channel disconnected"); } } @@ -593,8 +524,7 @@ impl VideoMuxer for WindowsMuxer { impl AudioMuxer for WindowsMuxer { fn send_audio_frame(&mut self, frame: AudioFrame, timestamp: Duration) -> anyhow::Result<()> { - if let Some(timestamp) = self.pause.adjust(timestamp)? - && let Some(encoder) = self.audio_encoder.as_mut() + if let Some(encoder) = self.audio_encoder.as_mut() && let Ok(mut output) = self.output.lock() { encoder.send_frame(frame.inner, timestamp, &mut output)?; @@ -658,7 +588,6 @@ pub struct WindowsCameraMuxer { video_tx: SyncSender>, output: Arc>, audio_encoder: Option, - pause: PauseTracker, frame_drops: FrameDropTracker, } @@ -700,7 +629,7 @@ impl Muxer for WindowsCameraMuxer { output_path: PathBuf, video_config: Option, audio_config: Option, - pause_flag: Arc, + _pause_flag: Arc, tasks: &mut TaskPool, ) -> anyhow::Result where @@ -1073,7 +1002,6 @@ impl Muxer for WindowsCameraMuxer { video_tx, output, audio_encoder, - pause: PauseTracker::new(pause_flag), frame_drops: FrameDropTracker::new(), }) } @@ -1107,17 +1035,15 @@ impl VideoMuxer for WindowsCameraMuxer { frame: Self::VideoFrame, timestamp: Duration, ) -> anyhow::Result<()> { - if let Some(timestamp) = self.pause.adjust(timestamp)? { - match self.video_tx.try_send(Some((frame, timestamp))) { - Ok(()) => { - self.frame_drops.record_frame(); - } - Err(TrySendError::Full(_)) => { - self.frame_drops.record_drop(); - } - Err(TrySendError::Disconnected(_)) => { - trace!("Windows MP4 camera encoder channel disconnected"); - } + match self.video_tx.try_send(Some((frame, timestamp))) { + Ok(()) => { + self.frame_drops.record_frame(); + } + Err(TrySendError::Full(_)) => { + self.frame_drops.record_drop(); + } + Err(TrySendError::Disconnected(_)) => { + trace!("Windows MP4 camera encoder channel disconnected"); } } @@ -1127,8 +1053,7 @@ impl VideoMuxer for WindowsCameraMuxer { impl AudioMuxer for WindowsCameraMuxer { fn send_audio_frame(&mut self, frame: AudioFrame, timestamp: Duration) -> anyhow::Result<()> { - if let Some(timestamp) = self.pause.adjust(timestamp)? - && let Some(encoder) = self.audio_encoder.as_mut() + if let Some(encoder) = self.audio_encoder.as_mut() && let Ok(mut output) = self.output.lock() { encoder.send_frame(frame.inner, timestamp, &mut output)?; diff --git a/crates/rendering/src/decoder/avassetreader.rs b/crates/rendering/src/decoder/avassetreader.rs index 6f05ba6b74..55295a8602 100644 --- a/crates/rendering/src/decoder/avassetreader.rs +++ b/crates/rendering/src/decoder/avassetreader.rs @@ -601,7 +601,7 @@ impl AVAssetReaderDecoder { *last_sent_frame.borrow_mut() = Some(data.clone()); let _ = req.sender.send(data.to_decoded_frame()); } else { - const MAX_FALLBACK_DISTANCE: u32 = 10; + const MAX_FALLBACK_DISTANCE: u32 = 30; let nearest = cache .range(..=req.frame) @@ -655,12 +655,20 @@ impl AVAssetReaderDecoder { } let mut unfulfilled_count = 0u32; + let decoder_returned_no_frames = frames_iterated == 0; for req in pending_requests.drain(..) { if let Some(cached) = cache.get(&req.frame) { let data = cached.data().clone(); let _ = req.sender.send(data.to_decoded_frame()); } else { const MAX_FALLBACK_DISTANCE: u32 = 10; + const MAX_FALLBACK_DISTANCE_EOF: u32 = 60; + + let fallback_distance = if decoder_returned_no_frames { + MAX_FALLBACK_DISTANCE_EOF + } else { + MAX_FALLBACK_DISTANCE + }; let nearest = cache .range(..=req.frame) @@ -669,7 +677,7 @@ impl AVAssetReaderDecoder { if let Some((&frame_num, cached)) = nearest { let distance = req.frame.abs_diff(frame_num); - if distance <= MAX_FALLBACK_DISTANCE { + if distance <= fallback_distance { let _ = req.sender.send(cached.data().to_decoded_frame()); } else { unfulfilled_count += 1;