From abcab8d6ab1233f9881e3177a3e284743c7e2944 Mon Sep 17 00:00:00 2001 From: TheNexter Date: Tue, 18 Nov 2025 17:58:47 +0100 Subject: [PATCH 01/10] Audio recording --- .../MemoEditor/ActionButton/InsertMenu.tsx | 112 +++++++++++++----- .../InsertMenu/useAudioRecorder.ts | 103 ++++++++++++++++ 2 files changed, 186 insertions(+), 29 deletions(-) create mode 100644 web/src/components/MemoEditor/ActionButton/InsertMenu/useAudioRecorder.ts diff --git a/web/src/components/MemoEditor/ActionButton/InsertMenu.tsx b/web/src/components/MemoEditor/ActionButton/InsertMenu.tsx index 6de3991b59178..234cddff3e6be 100644 --- a/web/src/components/MemoEditor/ActionButton/InsertMenu.tsx +++ b/web/src/components/MemoEditor/ActionButton/InsertMenu.tsx @@ -1,8 +1,9 @@ import { LatLng } from "leaflet"; import { uniqBy } from "lodash-es"; -import { FileIcon, LinkIcon, LoaderIcon, MapPinIcon, Maximize2Icon, MoreHorizontalIcon, PlusIcon } from "lucide-react"; +import { FileIcon, LinkIcon, LoaderIcon, MapPinIcon, Maximize2Icon, MicIcon, MoreHorizontalIcon, PlusIcon, XIcon } from "lucide-react"; import { observer } from "mobx-react-lite"; import { useContext, useState } from "react"; +import { toast } from "react-hot-toast"; import { Button } from "@/components/ui/button"; import { DropdownMenu, @@ -13,12 +14,14 @@ import { DropdownMenuSubTrigger, DropdownMenuTrigger, } from "@/components/ui/dropdown-menu"; +import { attachmentStore } from "@/store"; import { Attachment } from "@/types/proto/api/v1/attachment_service"; import { Location, MemoRelation } from "@/types/proto/api/v1/memo_service"; import { useTranslate } from "@/utils/i18n"; import { MemoEditorContext } from "../types"; import { LinkMemoDialog } from "./InsertMenu/LinkMemoDialog"; import { LocationDialog } from "./InsertMenu/LocationDialog"; +import { useAudioRecorder } from "./InsertMenu/useAudioRecorder"; import { useFileUpload } from "./InsertMenu/useFileUpload"; import { useLinkMemo } from "./InsertMenu/useLinkMemo"; import { useLocation } from "./InsertMenu/useLocation"; @@ -52,6 +55,7 @@ const InsertMenu = observer((props: Props) => { }); const location = useLocation(props.location); + const audioRecorder = useAudioRecorder(); const isUploading = uploadingFlag || props.isUploading; @@ -112,41 +116,91 @@ const InsertMenu = observer((props: Props) => { }); }; + const handleStopRecording = async () => { + try { + const blob = await audioRecorder.stopRecording(); + const filename = `recording-${Date.now()}.webm`; + const file = new File([blob], filename, { type: "audio/webm" }); + const { name, size, type } = file; + const buffer = new Uint8Array(await file.arrayBuffer()); + + const attachment = await attachmentStore.createAttachment({ + attachment: Attachment.fromPartial({ + filename: name, + size, + type, + content: buffer, + }), + attachmentId: "", + }); + context.setAttachmentList([...context.attachmentList, attachment]); + } catch (error: any) { + console.error("Failed to upload audio recording:", error); + toast.error(error.details || "Failed to upload audio recording"); + } + }; + return ( <> - + {audioRecorder.isRecording ? ( + + ) : ( + + )} - - - {t("common.upload")} - - setLinkDialogOpen(true)}> - - {t("tooltip.link-memo")} - - - - {t("tooltip.select-location")} - - {/* View submenu with Focus Mode */} - - - - {t("common.more")} - - - - - {t("editor.focus-mode")} - ⌘⇧F + {audioRecorder.isRecording ? ( + <> + +
+ Stop Recording + + + + Cancel Recording + + + ) : ( + <> + + + {t("common.upload")} + + setLinkDialogOpen(true)}> + + {t("tooltip.link-memo")} + + + + {t("tooltip.select-location")} + + + + Record Audio - - + {/* View submenu with Focus Mode */} + + + + {t("common.more")} + + + + + {t("editor.focus-mode")} + ⌘⇧F + + + + + )} diff --git a/web/src/components/MemoEditor/ActionButton/InsertMenu/useAudioRecorder.ts b/web/src/components/MemoEditor/ActionButton/InsertMenu/useAudioRecorder.ts new file mode 100644 index 0000000000000..b8da4f39c69ab --- /dev/null +++ b/web/src/components/MemoEditor/ActionButton/InsertMenu/useAudioRecorder.ts @@ -0,0 +1,103 @@ +import { useRef, useState } from "react"; + +interface AudioRecorderState { + isRecording: boolean; + isPaused: boolean; + recordingTime: number; + mediaRecorder: MediaRecorder | null; +} + +export const useAudioRecorder = () => { + const [state, setState] = useState({ + isRecording: false, + isPaused: false, + recordingTime: 0, + mediaRecorder: null, + }); + const chunksRef = useRef([]); + const timerRef = useRef(null); + + const startRecording = async () => { + try { + const stream = await navigator.mediaDevices.getUserMedia({ audio: true }); + const mediaRecorder = new MediaRecorder(stream); + chunksRef.current = []; + + mediaRecorder.ondataavailable = (e: BlobEvent) => { + if (e.data.size > 0) { + chunksRef.current.push(e.data); + } + }; + + mediaRecorder.start(); + setState((prev: AudioRecorderState) => ({ ...prev, isRecording: true, mediaRecorder })); + + timerRef.current = window.setInterval(() => { + setState((prev: AudioRecorderState) => ({ ...prev, recordingTime: prev.recordingTime + 1 })); + }, 1000); + } catch (error) { + console.error("Error accessing microphone:", error); + throw error; + } + }; + + const stopRecording = (): Promise => { + return new Promise((resolve, reject) => { + const { mediaRecorder } = state; + if (!mediaRecorder) { + reject(new Error("No active recording")); + return; + } + + mediaRecorder.onstop = () => { + const blob = new Blob(chunksRef.current, { type: "audio/webm" }); + chunksRef.current = []; + resolve(blob); + }; + + mediaRecorder.stop(); + mediaRecorder.stream.getTracks().forEach((track: MediaStreamTrack) => track.stop()); + + if (timerRef.current) { + clearInterval(timerRef.current); + timerRef.current = null; + } + + setState({ + isRecording: false, + isPaused: false, + recordingTime: 0, + mediaRecorder: null, + }); + }); + }; + + const cancelRecording = () => { + const { mediaRecorder } = state; + if (mediaRecorder) { + mediaRecorder.stop(); + mediaRecorder.stream.getTracks().forEach((track: MediaStreamTrack) => track.stop()); + } + + if (timerRef.current) { + clearInterval(timerRef.current); + timerRef.current = null; + } + + chunksRef.current = []; + setState({ + isRecording: false, + isPaused: false, + recordingTime: 0, + mediaRecorder: null, + }); + }; + + return { + isRecording: state.isRecording, + recordingTime: state.recordingTime, + startRecording, + stopRecording, + cancelRecording, + }; +}; From 9027ece3956ef1d13335eedf94b95d3f173c975e Mon Sep 17 00:00:00 2001 From: TheNexter Date: Tue, 18 Nov 2025 18:13:29 +0100 Subject: [PATCH 02/10] Better design audio record button --- .../MemoEditor/ActionButton/InsertMenu.tsx | 112 +++++++++--------- .../InsertMenu/useAudioRecorder.ts | 22 +++- 2 files changed, 75 insertions(+), 59 deletions(-) diff --git a/web/src/components/MemoEditor/ActionButton/InsertMenu.tsx b/web/src/components/MemoEditor/ActionButton/InsertMenu.tsx index 234cddff3e6be..949554ae89dcb 100644 --- a/web/src/components/MemoEditor/ActionButton/InsertMenu.tsx +++ b/web/src/components/MemoEditor/ActionButton/InsertMenu.tsx @@ -142,67 +142,63 @@ const InsertMenu = observer((props: Props) => { return ( <> - - - {audioRecorder.isRecording ? ( - - ) : ( + {audioRecorder.isRecording ? ( +
+
+
+ {new Date(audioRecorder.recordingTime * 1000).toISOString().substr(14, 5)} +
+ + + +
+ ) : ( + + - )} - - - {audioRecorder.isRecording ? ( - <> - -
- Stop Recording - - - - Cancel Recording - - - ) : ( - <> - - - {t("common.upload")} - - setLinkDialogOpen(true)}> - - {t("tooltip.link-memo")} - - - - {t("tooltip.select-location")} - - - - Record Audio - - {/* View submenu with Focus Mode */} - - - - {t("common.more")} - - - - - {t("editor.focus-mode")} - ⌘⇧F - - - - - )} - - + + + + + {t("common.upload")} + + setLinkDialogOpen(true)}> + + {t("tooltip.link-memo")} + + + + {t("tooltip.select-location")} + + + + Record Audio + + {/* View submenu with Focus Mode */} + + + + {t("common.more")} + + + + + {t("editor.focus-mode")} + ⌘⇧F + + + + + + )} {/* Hidden file input */} { setState((prev: AudioRecorderState) => ({ ...prev, isRecording: true, mediaRecorder })); timerRef.current = window.setInterval(() => { - setState((prev: AudioRecorderState) => ({ ...prev, recordingTime: prev.recordingTime + 1 })); + setState((prev) => { + if (prev.isPaused) { + return prev; + } + return { ...prev, recordingTime: prev.recordingTime + 1 }; + }); }, 1000); } catch (error) { console.error("Error accessing microphone:", error); @@ -93,11 +98,26 @@ export const useAudioRecorder = () => { }); }; + const togglePause = () => { + const { mediaRecorder, isPaused } = state; + if (!mediaRecorder) return; + + if (isPaused) { + mediaRecorder.resume(); + } else { + mediaRecorder.pause(); + } + + setState((prev) => ({ ...prev, isPaused: !prev.isPaused })); + }; + return { isRecording: state.isRecording, + isPaused: state.isPaused, recordingTime: state.recordingTime, startRecording, stopRecording, cancelRecording, + togglePause, }; }; From bbc82f9fe52d8cb423bd1612b3d353cd11bfb95b Mon Sep 17 00:00:00 2001 From: TheNexter Date: Tue, 18 Nov 2025 18:32:27 +0100 Subject: [PATCH 03/10] Custom audio player that match design of memos --- web/src/components/AudioPlayer.tsx | 121 ++++++++++++++++++++++++++ web/src/components/MemoAttachment.tsx | 3 +- 2 files changed, 123 insertions(+), 1 deletion(-) create mode 100644 web/src/components/AudioPlayer.tsx diff --git a/web/src/components/AudioPlayer.tsx b/web/src/components/AudioPlayer.tsx new file mode 100644 index 0000000000000..59590fecebeec --- /dev/null +++ b/web/src/components/AudioPlayer.tsx @@ -0,0 +1,121 @@ +import { PauseIcon, PlayIcon } from "lucide-react"; +import { useEffect, useRef, useState } from "react"; + +interface Props { + src: string; + className?: string; +} + +const AudioPlayer = ({ src, className = "" }: Props) => { + const audioRef = useRef(null); + const [isPlaying, setIsPlaying] = useState(false); + const [currentTime, setCurrentTime] = useState(0); + const [duration, setDuration] = useState(0); + const [isLoading, setIsLoading] = useState(true); + + useEffect(() => { + const audio = audioRef.current; + if (!audio) return; + + const handleLoadedMetadata = () => { + setDuration(audio.duration); + setIsLoading(false); + }; + + const handleTimeUpdate = () => { + setCurrentTime(audio.currentTime); + }; + + const handleEnded = () => { + setIsPlaying(false); + setCurrentTime(0); + }; + + const handleLoadedData = () => { + // For files without proper duration in metadata, + // try to get it after some data is loaded + if (audio.duration && !isNaN(audio.duration) && audio.duration !== Infinity) { + setDuration(audio.duration); + setIsLoading(false); + } + }; + + audio.addEventListener("loadedmetadata", handleLoadedMetadata); + audio.addEventListener("loadeddata", handleLoadedData); + audio.addEventListener("timeupdate", handleTimeUpdate); + audio.addEventListener("ended", handleEnded); + + return () => { + audio.removeEventListener("loadedmetadata", handleLoadedMetadata); + audio.removeEventListener("loadeddata", handleLoadedData); + audio.removeEventListener("timeupdate", handleTimeUpdate); + audio.removeEventListener("ended", handleEnded); + }; + }, []); + + const togglePlayPause = () => { + const audio = audioRef.current; + if (!audio) return; + + if (isPlaying) { + audio.pause(); + } else { + audio.play(); + } + setIsPlaying(!isPlaying); + }; + + const handleSeek = (e: React.ChangeEvent) => { + const audio = audioRef.current; + if (!audio) return; + + const newTime = parseFloat(e.target.value); + audio.currentTime = newTime; + setCurrentTime(newTime); + }; + + const formatTime = (time: number): string => { + if (!isFinite(time) || isNaN(time)) return "0:00"; + + const minutes = Math.floor(time / 60); + const seconds = Math.floor(time % 60); + return `${minutes}:${seconds.toString().padStart(2, "0")}`; + }; + + return ( +
+
+ ); +}; + +export default AudioPlayer; diff --git a/web/src/components/MemoAttachment.tsx b/web/src/components/MemoAttachment.tsx index 2c6e318b5165c..1833614b336e1 100644 --- a/web/src/components/MemoAttachment.tsx +++ b/web/src/components/MemoAttachment.tsx @@ -1,6 +1,7 @@ import { Attachment } from "@/types/proto/api/v1/attachment_service"; import { getAttachmentUrl, isMidiFile } from "@/utils/attachment"; import AttachmentIcon from "./AttachmentIcon"; +import AudioPlayer from "./AudioPlayer"; interface Props { attachment: Attachment; @@ -20,7 +21,7 @@ const MemoAttachment: React.FC = (props: Props) => { className={`w-auto flex flex-row justify-start items-center text-muted-foreground hover:text-foreground hover:bg-accent rounded px-2 py-1 transition-colors ${className}`} > {attachment.type.startsWith("audio") && !isMidiFile(attachment.type) ? ( - + ) : ( <> From 61a314254b1eab81da6ccd00f83fb614a76969f4 Mon Sep 17 00:00:00 2001 From: TheNexter Date: Tue, 18 Nov 2025 20:06:05 +0100 Subject: [PATCH 04/10] Audio player redesign, better looking version --- web/src/components/AudioPlayer.tsx | 59 ++++++++++++++++-------------- 1 file changed, 31 insertions(+), 28 deletions(-) diff --git a/web/src/components/AudioPlayer.tsx b/web/src/components/AudioPlayer.tsx index 59590fecebeec..f55fd13247206 100644 --- a/web/src/components/AudioPlayer.tsx +++ b/web/src/components/AudioPlayer.tsx @@ -1,5 +1,6 @@ import { PauseIcon, PlayIcon } from "lucide-react"; import { useEffect, useRef, useState } from "react"; +import { Button } from "@/components/ui/button"; interface Props { src: string; @@ -18,12 +19,17 @@ const AudioPlayer = ({ src, className = "" }: Props) => { if (!audio) return; const handleLoadedMetadata = () => { - setDuration(audio.duration); + if (audio.duration && !isNaN(audio.duration) && isFinite(audio.duration)) { + setDuration(audio.duration); + } setIsLoading(false); }; const handleTimeUpdate = () => { setCurrentTime(audio.currentTime); + if (duration === 0 && audio.duration && !isNaN(audio.duration) && isFinite(audio.duration)) { + setDuration(audio.duration); + } }; const handleEnded = () => { @@ -32,9 +38,9 @@ const AudioPlayer = ({ src, className = "" }: Props) => { }; const handleLoadedData = () => { - // For files without proper duration in metadata, + // For files without proper duration in metadata, // try to get it after some data is loaded - if (audio.duration && !isNaN(audio.duration) && audio.duration !== Infinity) { + if (audio.duration && !isNaN(audio.duration) && isFinite(audio.duration)) { setDuration(audio.duration); setIsLoading(false); } @@ -86,34 +92,31 @@ const AudioPlayer = ({ src, className = "" }: Props) => {
); }; -export default AudioPlayer; +export default AudioPlayer; \ No newline at end of file From 02b36d5baed07b515609f511fd789e6e0e5e95a3 Mon Sep 17 00:00:00 2001 From: TheNexter Date: Wed, 19 Nov 2025 08:19:29 +0100 Subject: [PATCH 07/10] Lint fix (sorry) --- web/src/components/AudioPlayer.tsx | 210 ++++++++--------- .../InsertMenu/useAudioRecorder.ts | 218 +++++++++--------- 2 files changed, 214 insertions(+), 214 deletions(-) diff --git a/web/src/components/AudioPlayer.tsx b/web/src/components/AudioPlayer.tsx index fdc6031ddc50a..a0d9277dc8b87 100644 --- a/web/src/components/AudioPlayer.tsx +++ b/web/src/components/AudioPlayer.tsx @@ -3,120 +3,120 @@ import { useEffect, useRef, useState } from "react"; import { Button } from "@/components/ui/button"; interface Props { - src: string; - className?: string; + src: string; + className?: string; } const AudioPlayer = ({ src, className = "" }: Props) => { - const audioRef = useRef(null); - const [isPlaying, setIsPlaying] = useState(false); - const [currentTime, setCurrentTime] = useState(0); - const [duration, setDuration] = useState(0); - const [isLoading, setIsLoading] = useState(true); - - useEffect(() => { - const audio = audioRef.current; - if (!audio) return; - - const handleLoadedMetadata = () => { - if (audio.duration && !isNaN(audio.duration) && isFinite(audio.duration)) { - setDuration(audio.duration); - } - setIsLoading(false); - }; - - const handleTimeUpdate = () => { - setCurrentTime(audio.currentTime); - if (duration === 0 && audio.duration && !isNaN(audio.duration) && isFinite(audio.duration)) { - setDuration(audio.duration); - } - }; - - const handleEnded = () => { - setIsPlaying(false); - setCurrentTime(0); - }; - - const handleLoadedData = () => { - // For files without proper duration in metadata, - // try to get it after some data is loaded - if (audio.duration && !isNaN(audio.duration) && isFinite(audio.duration)) { - setDuration(audio.duration); - setIsLoading(false); - } - }; - - audio.addEventListener("loadedmetadata", handleLoadedMetadata); - audio.addEventListener("loadeddata", handleLoadedData); - audio.addEventListener("timeupdate", handleTimeUpdate); - audio.addEventListener("ended", handleEnded); - - return () => { - audio.removeEventListener("loadedmetadata", handleLoadedMetadata); - audio.removeEventListener("loadeddata", handleLoadedData); - audio.removeEventListener("timeupdate", handleTimeUpdate); - audio.removeEventListener("ended", handleEnded); - }; - }, []); - - const togglePlayPause = () => { - const audio = audioRef.current; - if (!audio) return; - - if (isPlaying) { - audio.pause(); - } else { - audio.play(); - } - setIsPlaying(!isPlaying); + const audioRef = useRef(null); + const [isPlaying, setIsPlaying] = useState(false); + const [currentTime, setCurrentTime] = useState(0); + const [duration, setDuration] = useState(0); + const [isLoading, setIsLoading] = useState(true); + + useEffect(() => { + const audio = audioRef.current; + if (!audio) return; + + const handleLoadedMetadata = () => { + if (audio.duration && !isNaN(audio.duration) && isFinite(audio.duration)) { + setDuration(audio.duration); + } + setIsLoading(false); }; - const handleSeek = (e: React.ChangeEvent) => { - const audio = audioRef.current; - if (!audio) return; - - const newTime = parseFloat(e.target.value); - audio.currentTime = newTime; - setCurrentTime(newTime); + const handleTimeUpdate = () => { + setCurrentTime(audio.currentTime); + if (duration === 0 && audio.duration && !isNaN(audio.duration) && isFinite(audio.duration)) { + setDuration(audio.duration); + } }; - const formatTime = (time: number): string => { - if (!isFinite(time) || isNaN(time)) return "0:00"; + const handleEnded = () => { + setIsPlaying(false); + setCurrentTime(0); + }; - const minutes = Math.floor(time / 60); - const seconds = Math.floor(time % 60); - return `${minutes}:${seconds.toString().padStart(2, "0")}`; + const handleLoadedData = () => { + // For files without proper duration in metadata, + // try to get it after some data is loaded + if (audio.duration && !isNaN(audio.duration) && isFinite(audio.duration)) { + setDuration(audio.duration); + setIsLoading(false); + } }; - return ( -
-
- ); + audio.addEventListener("loadedmetadata", handleLoadedMetadata); + audio.addEventListener("loadeddata", handleLoadedData); + audio.addEventListener("timeupdate", handleTimeUpdate); + audio.addEventListener("ended", handleEnded); + + return () => { + audio.removeEventListener("loadedmetadata", handleLoadedMetadata); + audio.removeEventListener("loadeddata", handleLoadedData); + audio.removeEventListener("timeupdate", handleTimeUpdate); + audio.removeEventListener("ended", handleEnded); + }; + }, []); + + const togglePlayPause = () => { + const audio = audioRef.current; + if (!audio) return; + + if (isPlaying) { + audio.pause(); + } else { + audio.play(); + } + setIsPlaying(!isPlaying); + }; + + const handleSeek = (e: React.ChangeEvent) => { + const audio = audioRef.current; + if (!audio) return; + + const newTime = parseFloat(e.target.value); + audio.currentTime = newTime; + setCurrentTime(newTime); + }; + + const formatTime = (time: number): string => { + if (!isFinite(time) || isNaN(time)) return "0:00"; + + const minutes = Math.floor(time / 60); + const seconds = Math.floor(time % 60); + return `${minutes}:${seconds.toString().padStart(2, "0")}`; + }; + + return ( +
+
+ ); }; -export default AudioPlayer; \ No newline at end of file +export default AudioPlayer; diff --git a/web/src/components/MemoEditor/ActionButton/InsertMenu/useAudioRecorder.ts b/web/src/components/MemoEditor/ActionButton/InsertMenu/useAudioRecorder.ts index d8f671d7f0fd1..9888bf76c4129 100644 --- a/web/src/components/MemoEditor/ActionButton/InsertMenu/useAudioRecorder.ts +++ b/web/src/components/MemoEditor/ActionButton/InsertMenu/useAudioRecorder.ts @@ -1,123 +1,123 @@ import { useRef, useState } from "react"; interface AudioRecorderState { - isRecording: boolean; - isPaused: boolean; - recordingTime: number; - mediaRecorder: MediaRecorder | null; + isRecording: boolean; + isPaused: boolean; + recordingTime: number; + mediaRecorder: MediaRecorder | null; } export const useAudioRecorder = () => { - const [state, setState] = useState({ - isRecording: false, - isPaused: false, - recordingTime: 0, - mediaRecorder: null, - }); - const chunksRef = useRef([]); - const timerRef = useRef(null); - - const startRecording = async () => { - try { - const stream = await navigator.mediaDevices.getUserMedia({ audio: true }); - const mediaRecorder = new MediaRecorder(stream); - chunksRef.current = []; - - mediaRecorder.ondataavailable = (e: BlobEvent) => { - if (e.data.size > 0) { - chunksRef.current.push(e.data); - } - }; - - mediaRecorder.start(); - setState((prev: AudioRecorderState) => ({ ...prev, isRecording: true, mediaRecorder })); - - timerRef.current = window.setInterval(() => { - setState((prev) => { - if (prev.isPaused) { - return prev; - } - return { ...prev, recordingTime: prev.recordingTime + 1 }; - }); - }, 1000); - } catch (error) { - console.error("Error accessing microphone:", error); - throw error; - } - }; - - const stopRecording = (): Promise => { - return new Promise((resolve, reject) => { - const { mediaRecorder } = state; - if (!mediaRecorder) { - reject(new Error("No active recording")); - return; - } - - mediaRecorder.onstop = () => { - const blob = new Blob(chunksRef.current, { type: "audio/webm" }); - chunksRef.current = []; - resolve(blob); - }; - - mediaRecorder.stop(); - mediaRecorder.stream.getTracks().forEach((track: MediaStreamTrack) => track.stop()); - - if (timerRef.current) { - clearInterval(timerRef.current); - timerRef.current = null; - } - - setState({ - isRecording: false, - isPaused: false, - recordingTime: 0, - mediaRecorder: null, - }); - }); - }; - - const cancelRecording = () => { - const { mediaRecorder } = state; - if (mediaRecorder) { - mediaRecorder.stop(); - mediaRecorder.stream.getTracks().forEach((track: MediaStreamTrack) => track.stop()); + const [state, setState] = useState({ + isRecording: false, + isPaused: false, + recordingTime: 0, + mediaRecorder: null, + }); + const chunksRef = useRef([]); + const timerRef = useRef(null); + + const startRecording = async () => { + try { + const stream = await navigator.mediaDevices.getUserMedia({ audio: true }); + const mediaRecorder = new MediaRecorder(stream); + chunksRef.current = []; + + mediaRecorder.ondataavailable = (e: BlobEvent) => { + if (e.data.size > 0) { + chunksRef.current.push(e.data); } + }; - if (timerRef.current) { - clearInterval(timerRef.current); - timerRef.current = null; - } + mediaRecorder.start(); + setState((prev: AudioRecorderState) => ({ ...prev, isRecording: true, mediaRecorder })); - chunksRef.current = []; - setState({ - isRecording: false, - isPaused: false, - recordingTime: 0, - mediaRecorder: null, + timerRef.current = window.setInterval(() => { + setState((prev) => { + if (prev.isPaused) { + return prev; + } + return { ...prev, recordingTime: prev.recordingTime + 1 }; }); - }; + }, 1000); + } catch (error) { + console.error("Error accessing microphone:", error); + throw error; + } + }; + + const stopRecording = (): Promise => { + return new Promise((resolve, reject) => { + const { mediaRecorder } = state; + if (!mediaRecorder) { + reject(new Error("No active recording")); + return; + } + + mediaRecorder.onstop = () => { + const blob = new Blob(chunksRef.current, { type: "audio/webm" }); + chunksRef.current = []; + resolve(blob); + }; - const togglePause = () => { - const { mediaRecorder, isPaused } = state; - if (!mediaRecorder) return; + mediaRecorder.stop(); + mediaRecorder.stream.getTracks().forEach((track: MediaStreamTrack) => track.stop()); - if (isPaused) { - mediaRecorder.resume(); - } else { - mediaRecorder.pause(); - } + if (timerRef.current) { + clearInterval(timerRef.current); + timerRef.current = null; + } - setState((prev) => ({ ...prev, isPaused: !prev.isPaused })); - }; - - return { - isRecording: state.isRecording, - isPaused: state.isPaused, - recordingTime: state.recordingTime, - startRecording, - stopRecording, - cancelRecording, - togglePause, - }; + setState({ + isRecording: false, + isPaused: false, + recordingTime: 0, + mediaRecorder: null, + }); + }); + }; + + const cancelRecording = () => { + const { mediaRecorder } = state; + if (mediaRecorder) { + mediaRecorder.stop(); + mediaRecorder.stream.getTracks().forEach((track: MediaStreamTrack) => track.stop()); + } + + if (timerRef.current) { + clearInterval(timerRef.current); + timerRef.current = null; + } + + chunksRef.current = []; + setState({ + isRecording: false, + isPaused: false, + recordingTime: 0, + mediaRecorder: null, + }); + }; + + const togglePause = () => { + const { mediaRecorder, isPaused } = state; + if (!mediaRecorder) return; + + if (isPaused) { + mediaRecorder.resume(); + } else { + mediaRecorder.pause(); + } + + setState((prev) => ({ ...prev, isPaused: !prev.isPaused })); + }; + + return { + isRecording: state.isRecording, + isPaused: state.isPaused, + recordingTime: state.recordingTime, + startRecording, + stopRecording, + cancelRecording, + togglePause, + }; }; From 1cd79734b8e5b465681d01bb61691545fe781b7c Mon Sep 17 00:00:00 2001 From: TheNexter Date: Wed, 19 Nov 2025 12:53:59 +0100 Subject: [PATCH 08/10] Apply suggestion made by Copilot --- web/src/components/AudioPlayer.tsx | 18 ++++++++++++------ .../MemoEditor/ActionButton/InsertMenu.tsx | 4 ++-- web/src/locales/en.json | 5 +++-- 3 files changed, 17 insertions(+), 10 deletions(-) diff --git a/web/src/components/AudioPlayer.tsx b/web/src/components/AudioPlayer.tsx index a0d9277dc8b87..c05dbc4cc6b00 100644 --- a/web/src/components/AudioPlayer.tsx +++ b/web/src/components/AudioPlayer.tsx @@ -27,8 +27,8 @@ const AudioPlayer = ({ src, className = "" }: Props) => { const handleTimeUpdate = () => { setCurrentTime(audio.currentTime); - if (duration === 0 && audio.duration && !isNaN(audio.duration) && isFinite(audio.duration)) { - setDuration(audio.duration); + if (audio.duration && !isNaN(audio.duration) && isFinite(audio.duration)) { + setDuration((prev) => (prev === 0 ? audio.duration : prev)); } }; @@ -59,16 +59,22 @@ const AudioPlayer = ({ src, className = "" }: Props) => { }; }, []); - const togglePlayPause = () => { + const togglePlayPause = async () => { const audio = audioRef.current; if (!audio) return; if (isPlaying) { audio.pause(); + setIsPlaying(false); } else { - audio.play(); + try { + await audio.play(); + setIsPlaying(true); + } catch (error) { + console.error("Failed to play audio:", error); + setIsPlaying(false); + } } - setIsPlaying(!isPlaying); }; const handleSeek = (e: React.ChangeEvent) => { @@ -112,7 +118,7 @@ const AudioPlayer = ({ src, className = "" }: Props) => { value={currentTime} onChange={handleSeek} disabled={isLoading || !duration} - className="flex-1 h-1 bg-muted hover:bg-background/50 hover:bg-background/50 rounded-lg appearance-none cursor-pointer disabled:opacity-50 [&::-webkit-slider-thumb]:appearance-none [&::-webkit-slider-thumb]:w-3 [&::-webkit-slider-thumb]:h-3 [&::-webkit-slider-thumb]:rounded-full [&::-webkit-slider-thumb]:bg-primary [&::-moz-range-thumb]:w-3 [&::-moz-range-thumb]:h-3 [&::-moz-range-thumb]:rounded-full [&::-moz-range-thumb]:bg-primary [&::-moz-range-thumb]:border-0" + className="flex-1 h-1 bg-muted hover:bg-background/50 rounded-lg appearance-none cursor-pointer disabled:opacity-50 [&::-webkit-slider-thumb]:appearance-none [&::-webkit-slider-thumb]:w-3 [&::-webkit-slider-thumb]:h-3 [&::-webkit-slider-thumb]:rounded-full [&::-webkit-slider-thumb]:bg-primary [&::-moz-range-thumb]:w-3 [&::-moz-range-thumb]:h-3 [&::-moz-range-thumb]:rounded-full [&::-moz-range-thumb]:bg-primary [&::-moz-range-thumb]:border-0" />
diff --git a/web/src/components/MemoEditor/ActionButton/InsertMenu.tsx b/web/src/components/MemoEditor/ActionButton/InsertMenu.tsx index 949554ae89dcb..1fea285b325aa 100644 --- a/web/src/components/MemoEditor/ActionButton/InsertMenu.tsx +++ b/web/src/components/MemoEditor/ActionButton/InsertMenu.tsx @@ -146,7 +146,7 @@ const InsertMenu = observer((props: Props) => {
- {new Date(audioRecorder.recordingTime * 1000).toISOString().substr(14, 5)} + {new Date(audioRecorder.recordingTime * 1000).toISOString().substring(14, 19)}
- -
+ + + + {formatTime(currentTime)} / {formatTime(duration)} +
); }; diff --git a/web/src/components/MemoEditor/ActionButton/InsertMenu.tsx b/web/src/components/MemoEditor/ActionButton/InsertMenu.tsx index 1fea285b325aa..ed320b4fe3ec1 100644 --- a/web/src/components/MemoEditor/ActionButton/InsertMenu.tsx +++ b/web/src/components/MemoEditor/ActionButton/InsertMenu.tsx @@ -1,6 +1,18 @@ import { LatLng } from "leaflet"; import { uniqBy } from "lodash-es"; -import { FileIcon, LinkIcon, LoaderIcon, MapPinIcon, Maximize2Icon, MicIcon, MoreHorizontalIcon, PlusIcon, XIcon } from "lucide-react"; +import { + FileIcon, + LinkIcon, + LoaderIcon, + MapPinIcon, + Maximize2Icon, + MicIcon, + MoreHorizontalIcon, + PauseIcon, + PlayIcon, + PlusIcon, + XIcon, +} from "lucide-react"; import { observer } from "mobx-react-lite"; import { useContext, useState } from "react"; import { toast } from "react-hot-toast"; @@ -136,7 +148,7 @@ const InsertMenu = observer((props: Props) => { context.setAttachmentList([...context.attachmentList, attachment]); } catch (error: any) { console.error("Failed to upload audio recording:", error); - toast.error(error.details || "Failed to upload audio recording"); + toast.error(error.message || "Failed to upload audio recording"); } }; @@ -148,13 +160,31 @@ const InsertMenu = observer((props: Props) => {
{new Date(audioRecorder.recordingTime * 1000).toISOString().substring(14, 19)}
- - -
diff --git a/web/src/components/MemoEditor/ActionButton/InsertMenu/useAudioRecorder.ts b/web/src/components/MemoEditor/ActionButton/InsertMenu/useAudioRecorder.ts index 9888bf76c4129..28c130b89a1a3 100644 --- a/web/src/components/MemoEditor/ActionButton/InsertMenu/useAudioRecorder.ts +++ b/web/src/components/MemoEditor/ActionButton/InsertMenu/useAudioRecorder.ts @@ -1,46 +1,60 @@ -import { useRef, useState } from "react"; - -interface AudioRecorderState { - isRecording: boolean; - isPaused: boolean; - recordingTime: number; - mediaRecorder: MediaRecorder | null; -} +import { useEffect, useRef, useState } from "react"; export const useAudioRecorder = () => { - const [state, setState] = useState({ - isRecording: false, - isPaused: false, - recordingTime: 0, - mediaRecorder: null, - }); + const [isRecording, setIsRecording] = useState(false); + const [isPaused, setIsPaused] = useState(false); + const [recordingTime, setRecordingTime] = useState(0); + const chunksRef = useRef([]); const timerRef = useRef(null); + const durationRef = useRef(0); + const mediaRecorderRef = useRef(null); + + useEffect(() => { + return () => { + if (mediaRecorderRef.current) { + mediaRecorderRef.current.stream.getTracks().forEach((track) => track.stop()); + mediaRecorderRef.current = null; + } + if (timerRef.current) { + clearInterval(timerRef.current); + timerRef.current = null; + } + }; + }, []); const startRecording = async () => { + let stream: MediaStream | null = null; try { - const stream = await navigator.mediaDevices.getUserMedia({ audio: true }); - const mediaRecorder = new MediaRecorder(stream); + stream = await navigator.mediaDevices.getUserMedia({ audio: true }); + const recorder = new MediaRecorder(stream); chunksRef.current = []; + durationRef.current = 0; + setRecordingTime(0); - mediaRecorder.ondataavailable = (e: BlobEvent) => { + recorder.ondataavailable = (e: BlobEvent) => { if (e.data.size > 0) { chunksRef.current.push(e.data); } }; - mediaRecorder.start(); - setState((prev: AudioRecorderState) => ({ ...prev, isRecording: true, mediaRecorder })); + recorder.start(); + mediaRecorderRef.current = recorder; + + setIsRecording(true); + setIsPaused(false); timerRef.current = window.setInterval(() => { - setState((prev) => { - if (prev.isPaused) { - return prev; - } - return { ...prev, recordingTime: prev.recordingTime + 1 }; - }); + if (!mediaRecorderRef.current || mediaRecorderRef.current.state === "paused") { + return; + } + durationRef.current += 1; + setRecordingTime(durationRef.current); }, 1000); } catch (error) { + if (stream) { + stream.getTracks().forEach((track) => track.stop()); + } console.error("Error accessing microphone:", error); throw error; } @@ -48,73 +62,92 @@ export const useAudioRecorder = () => { const stopRecording = (): Promise => { return new Promise((resolve, reject) => { - const { mediaRecorder } = state; - if (!mediaRecorder) { + // Cleanup timer immediately to prevent further updates + if (timerRef.current) { + clearInterval(timerRef.current); + timerRef.current = null; + } + + const recorder = mediaRecorderRef.current; + if (!recorder) { reject(new Error("No active recording")); return; } - mediaRecorder.onstop = () => { + let isResolved = false; + + const finalize = () => { + if (isResolved) return; + isResolved = true; + const blob = new Blob(chunksRef.current, { type: "audio/webm" }); chunksRef.current = []; + durationRef.current = 0; + + setIsRecording(false); + setIsPaused(false); + setRecordingTime(0); + + mediaRecorderRef.current = null; + resolve(blob); }; - mediaRecorder.stop(); - mediaRecorder.stream.getTracks().forEach((track: MediaStreamTrack) => track.stop()); + recorder.onstop = finalize; - if (timerRef.current) { - clearInterval(timerRef.current); - timerRef.current = null; + try { + recorder.stop(); + recorder.stream.getTracks().forEach((track: MediaStreamTrack) => track.stop()); + } catch (error) { + // Ignore errors during stop, as we'll finalize anyway + console.warn("Error stopping media recorder:", error); } - setState({ - isRecording: false, - isPaused: false, - recordingTime: 0, - mediaRecorder: null, - }); + // Safety timeout in case onstop never fires + setTimeout(finalize, 1000); }); }; const cancelRecording = () => { - const { mediaRecorder } = state; - if (mediaRecorder) { - mediaRecorder.stop(); - mediaRecorder.stream.getTracks().forEach((track: MediaStreamTrack) => track.stop()); - } - + // Cleanup timer immediately if (timerRef.current) { clearInterval(timerRef.current); timerRef.current = null; } + const recorder = mediaRecorderRef.current; + if (recorder) { + recorder.stop(); + recorder.stream.getTracks().forEach((track: MediaStreamTrack) => track.stop()); + } + chunksRef.current = []; - setState({ - isRecording: false, - isPaused: false, - recordingTime: 0, - mediaRecorder: null, - }); + durationRef.current = 0; + + setIsRecording(false); + setIsPaused(false); + setRecordingTime(0); + + mediaRecorderRef.current = null; }; const togglePause = () => { - const { mediaRecorder, isPaused } = state; - if (!mediaRecorder) return; + const recorder = mediaRecorderRef.current; + if (!recorder) return; if (isPaused) { - mediaRecorder.resume(); + recorder.resume(); + setIsPaused(false); } else { - mediaRecorder.pause(); + recorder.pause(); + setIsPaused(true); } - - setState((prev) => ({ ...prev, isPaused: !prev.isPaused })); }; return { - isRecording: state.isRecording, - isPaused: state.isPaused, - recordingTime: state.recordingTime, + isRecording, + isPaused, + recordingTime, startRecording, stopRecording, cancelRecording, diff --git a/web/src/index.css b/web/src/index.css index 72dc407114554..c4824043cd5dd 100644 --- a/web/src/index.css +++ b/web/src/index.css @@ -10,6 +10,7 @@ * { @apply border-border outline-none ring-0; } + body { @apply bg-background text-foreground; }