@@ -15,6 +15,8 @@ import reducer_function from "./Dispatch";
1515import ScoreDisplay from "./components/ScoreDisplay" ;
1616import { SynthesizeButton } from "./components/SynthesizeButton" ;
1717import Icon from 'react-native-vector-icons/Feather' ;
18+ import { ChromaMaker } from "./utils/features" ;
19+ import FontAwesome from 'react-native-vector-icons/FontAwesome' ;
1820
1921// Define the main application component
2022export default function App ( ) {
@@ -61,6 +63,56 @@ export default function App() {
6163 setSessionToken ( newToken )
6264 } , [ ] ) ;
6365
66+ // Initialize the chroma state as an array of 12 zeros (used to capture chroma vector at each chunk of audio).
67+ const [ chroma , setChroma ] = useState < number [ ] > ( new Array ( 12 ) . fill ( 0 ) ) ;
68+ const [ started , setStarted ] = useState ( false ) ; // state used to determine user selects live microphone option or not
69+
70+ useEffect ( ( ) => {
71+ let audioCtx : AudioContext ; // Declare a reference to the AudioContext, which manages all audio processing
72+ let micStream : MediaStream ; // Declare a reference to the MediaStream from the user's microphone
73+
74+ const initAudio = async ( ) => {
75+ try {
76+ micStream = await navigator . mediaDevices . getUserMedia ( { audio : true } ) ; // Request access to user's microphone
77+ audioCtx = new AudioContext ( ) ; // Create a new AudioContext for audio processing
78+ await audioCtx . audioWorklet . addModule ( '../utils/mic-processor.js' ) ; // Load the custom AudioWorkletProcessor
79+ const source = audioCtx . createMediaStreamSource ( micStream ) ; // Create a source node from the microphone stream
80+ const workletNode = new AudioWorkletNode ( audioCtx , 'mic-processor' ) ; // Create an AudioWorkletNode linked to our custom 'mic-processor'
81+ source . connect ( workletNode ) ; // Connect the mic source to the worklet
82+ workletNode . connect ( audioCtx . destination ) ; // connect worklet to output
83+
84+ // Initialize the ChromaMaker for extracting chroma features
85+ const n_fft = 4096 ;
86+ const chromaMaker = new ChromaMaker ( audioCtx . sampleRate , n_fft ) ;
87+
88+ // Handle incoming audio chunks from the worklet
89+ workletNode . port . onmessage = ( event ) => {
90+ const audioChunk = event . data as Float32Array ;
91+ try {
92+ // Extract chroma features and update state
93+ const chromaResult = chromaMaker . insert ( audioChunk ) ;
94+ setChroma ( chromaResult ) ;
95+ } catch ( e ) {
96+ console . error ( 'Chroma extraction error:' , e ) ;
97+ }
98+ } ;
99+ } catch ( err ) {
100+ console . error ( 'Failed to initialize audio:' , err ) ;
101+ }
102+ } ;
103+ // If "started" state is true, initialize audio processing
104+ if ( started ) {
105+ initAudio ( ) ;
106+ }
107+
108+ // Cleanup: when the component unmounts or `started` becomes false,
109+ // stop the microphone stream and close the audio context to free up resources
110+ return ( ) => {
111+ if ( micStream ) micStream . getTracks ( ) . forEach ( ( track ) => track . stop ( ) ) ;
112+ if ( audioCtx ) audioCtx . close ( ) ;
113+ } ;
114+ } , [ started ] ) ;
115+
64116 ////////////////////////////////////////////////////////////////////////////////
65117 // The lines below were modified, copied and pasted out of the audio recorder object
66118 // (which never really needed a UI).
@@ -70,112 +122,112 @@ export default function App() {
70122
71123 // Audio-related states and refs
72124 // State for whether we have microphone permissions - is set to true on first trip to playmode
73- const [ permission , setPermission ] = useState ( false ) ;
74- // Assorted audio-related objects in need of reference
75- // Tend to be re-created upon starting a recording
76- const mediaRecorder = useRef < MediaRecorder > (
77- new MediaRecorder ( new MediaStream ( ) ) ,
78- ) ;
79- const [ stream , setStream ] = useState < MediaStream > ( new MediaStream ( ) ) ;
80- const [ audioChunks , setAudioChunks ] = useState < Blob [ ] > ( [ ] ) ;
125+ // const [permission, setPermission] = useState(false);
126+ // // Assorted audio-related objects in need of reference
127+ // // Tend to be re-created upon starting a recording
128+ // const mediaRecorder = useRef<MediaRecorder>(
129+ // new MediaRecorder(new MediaStream()),
130+ // );
131+ // const [stream, setStream] = useState<MediaStream>(new MediaStream());
132+ // const [audioChunks, setAudioChunks] = useState<Blob[]>([]);
81133
82- const audioContextRef = useRef < any > ( null ) ;
83- const analyserRef = useRef < any > ( null ) ;
84- const dataArrayRef = useRef < any > ( null ) ;
85- const startTimeRef = useRef < any > ( null ) ;
134+ // const audioContextRef = useRef<any>(null);
135+ // const analyserRef = useRef<any>(null);
136+ // const dataArrayRef = useRef<any>(null);
137+ // const startTimeRef = useRef<any>(null);
86138
87- // Audio-related functions
88- /////////////////////////////////////////////////////////
89- // This function sends a synchronization request and updates the state with the result
90- const UPDATE_INTERVAL = 100 ;
139+ // // Audio-related functions
140+ // // ///////////////////////////////////////////////////////
141+ // // This function sends a synchronization request and updates the state with the result
142+ // const UPDATE_INTERVAL = 100;
91143
92- const getAPIData = async ( ) => {
93- analyserRef . current ?. getByteTimeDomainData ( dataArrayRef . current ) ;
94- const {
95- playback_rate : newPlayRate ,
96- estimated_position : estimated_position ,
97- } = await synchronize ( state . sessionToken , Array . from ( dataArrayRef . current ) , state . timestamp ) ;
144+ // const getAPIData = async () => {
145+ // analyserRef.current?.getByteTimeDomainData(dataArrayRef.current);
146+ // const {
147+ // playback_rate: newPlayRate,
148+ // estimated_position: estimated_position,
149+ // } = await synchronize(state.sessionToken, Array.from(dataArrayRef.current), state.timestamp);
98150
99- dispatch ( {
100- type : "increment" ,
101- time : estimated_position ,
102- rate : newPlayRate ,
103- } ) ;
104- }
151+ // dispatch({
152+ // type: "increment",
153+ // time: estimated_position,
154+ // rate: newPlayRate,
155+ // });
156+ // }
105157
106- // This function established new recording instances when re-entering play mode
107- const startRecording = async ( ) => {
108- // It's possible some of these can be removed; not sure which relate to the
109- // making of the recorded object we don't need and which relate to the
110- // buffer we send to the backend.
111- startTimeRef . current = Date . now ( ) ;
112- //create new Media recorder instance using the stream
113- const media = new MediaRecorder ( stream , { mimeType : "audio/webm" } ) ;
114- //set the MediaRecorder instance to the mediaRecorder ref
115- mediaRecorder . current = media ;
116- //invokes the start method to start the recording process
117- mediaRecorder . current . start ( ) ;
118- let localAudioChunks : Blob [ ] = [ ] ;
119- mediaRecorder . current . ondataavailable = ( event ) => {
120- if ( typeof event . data === "undefined" ) return ;
121- if ( event . data . size === 0 ) return ;
122- localAudioChunks . push ( event . data ) ;
123- } ;
124- setAudioChunks ( localAudioChunks ) ;
158+ // // This function established new recording instances when re-entering play mode
159+ // const startRecording = async () => {
160+ // // It's possible some of these can be removed; not sure which relate to the
161+ // // making of the recorded object we don't need and which relate to the
162+ // // buffer we send to the backend.
163+ // startTimeRef.current = Date.now();
164+ // //create new Media recorder instance using the stream
165+ // const media = new MediaRecorder(stream, { mimeType: "audio/webm" });
166+ // //set the MediaRecorder instance to the mediaRecorder ref
167+ // mediaRecorder.current = media;
168+ // //invokes the start method to start the recording process
169+ // mediaRecorder.current.start();
170+ // let localAudioChunks: Blob[] = [];
171+ // mediaRecorder.current.ondataavailable = (event) => {
172+ // if (typeof event.data === "undefined") return;
173+ // if (event.data.size === 0) return;
174+ // localAudioChunks.push(event.data);
175+ // };
176+ // setAudioChunks(localAudioChunks);
125177
126- audioContextRef . current = new window . AudioContext ( ) ;
127- const source = audioContextRef . current . createMediaStreamSource ( stream ) ;
128- analyserRef . current = audioContextRef . current . createAnalyser ( ) ;
129- analyserRef . current . fftSize = 2048 ;
130- source . connect ( analyserRef . current ) ;
178+ // audioContextRef.current = new window.AudioContext();
179+ // const source = audioContextRef.current.createMediaStreamSource(stream);
180+ // analyserRef.current = audioContextRef.current.createAnalyser();
181+ // analyserRef.current.fftSize = 2048;
182+ // source.connect(analyserRef.current);
131183
132- const bufferLength = analyserRef . current . frequencyBinCount ;
133- dataArrayRef . current = new Uint8Array ( bufferLength ) ;
184+ // const bufferLength = analyserRef.current.frequencyBinCount;
185+ // dataArrayRef.current = new Uint8Array(bufferLength);
134186
135- getAPIData ( ) ; // run the first call
136- } ;
187+ // getAPIData(); // run the first call
188+ // };
137189
138- //stops the recording instance
139- const stopRecording = ( ) => {
140- mediaRecorder . current . stop ( ) ;
141- audioContextRef . current ?. close ( ) ;
142- } ;
190+ // // stops the recording instance
191+ // const stopRecording = () => {
192+ // mediaRecorder.current.stop();
193+ // audioContextRef.current?.close();
194+ // };
143195
144- // Function to get permission to use browser microphone
145- const getMicrophonePermission = async ( ) => {
146- if ( "MediaRecorder" in window ) {
147- try {
148- const streamData = await navigator . mediaDevices . getUserMedia ( {
149- audio : true ,
150- video : false ,
151- } ) ;
152- setPermission ( true ) ;
153- setStream ( streamData ) ;
154- } catch ( err ) {
155- alert ( ( err as Error ) . message ) ;
156- }
157- } else {
158- alert ( "The MediaRecorder API is not supported in your browser." ) ;
159- }
160- } ;
196+ // // Function to get permission to use browser microphone
197+ // const getMicrophonePermission = async () => {
198+ // if ("MediaRecorder" in window) {
199+ // try {
200+ // const streamData = await navigator.mediaDevices.getUserMedia({
201+ // audio: true,
202+ // video: false,
203+ // });
204+ // setPermission(true);
205+ // setStream(streamData);
206+ // } catch (err) {
207+ // alert((err as Error).message);
208+ // }
209+ // } else {
210+ // alert("The MediaRecorder API is not supported in your browser.");
211+ // }
212+ // };
161213
162- /////////////////////////////////////////////
163- // Audio-related effects
164- // Get microphone permission on first time entering play state
165- useEffect ( ( ) => {
166- if ( ! permission ) getMicrophonePermission ( ) ;
167- } , [ state . inPlayMode ] ) ;
214+ // // ///////////////////////////////////////////
215+ // // Audio-related effects
216+ // // Get microphone permission on first time entering play state
217+ // useEffect(() => {
218+ // if (!permission) getMicrophonePermission();
219+ // }, [state.inPlayMode]);
168220
169- // Start and stop recording when player is or isn't playing
170- useEffect ( ( ) => {
171- if ( state . playing ) startRecording ( ) ;
172- else stopRecording ( ) ;
173- } , [ state . playing ] ) ;
221+ // // Start and stop recording when player is or isn't playing
222+ // useEffect(() => {
223+ // if (state.playing) startRecording();
224+ // else stopRecording();
225+ // }, [state.playing]);
174226
175- // Keep synchronizing while playing
176- useEffect ( ( ) => {
177- if ( state . playing ) setTimeout ( getAPIData , UPDATE_INTERVAL ) ;
178- } , [ state . timestamp ] )
227+ // // Keep synchronizing while playing
228+ // useEffect(() => {
229+ // if (state.playing) setTimeout(getAPIData, UPDATE_INTERVAL);
230+ // }, [state.timestamp])
179231
180232 // State to conditionally render the style type of the components (can only be "light" or "dark")
181233 const [ theme , setTheme ] = useState < "light" | "dark" > ( "light" ) ;
@@ -255,7 +307,6 @@ export default function App() {
255307 // Boolean used for dynmaic display (row or column)
256308 const isSmallScreen = width < 960 ;
257309
258-
259310 ////////////////////////////////////////////////////////////////////////////////
260311 // Render the component's UI
261312 ////////////////////////////////////////////////////////////////////////////////
@@ -264,10 +315,20 @@ export default function App() {
264315 { /* Header with image */ }
265316 < Animated . View style = { [ styles . menu_bar , { backgroundColor : menubarBackgroundColor , height : isSmallScreen ? 40 : 80 } ] } >
266317 < Image source = { require ( './assets/companion.png' ) } style = { [ styles . logo , { height : isSmallScreen ? 30 : 100 , width : isSmallScreen ? 100 : 200 } ] } />
267- < TouchableOpacity onPress = { toggleTheme } >
268- < Icon name = { theme === 'light' ? 'sun' : 'moon' } size = { isSmallScreen ? 15 : 30 } color = "white" />
269- </ TouchableOpacity >
318+ < View style = { { flexDirection : 'row' , alignItems : 'center' , gap : 10 } } >
319+ < TouchableOpacity onPress = { ( ) => setStarted ( ! started ) } >
320+ < FontAwesome
321+ name = { started ? 'microphone' : 'microphone-slash' }
322+ size = { isSmallScreen ? 15 : 30 }
323+ color = "white"
324+ />
325+ </ TouchableOpacity >
326+ < TouchableOpacity onPress = { toggleTheme } >
327+ < Icon name = { theme === 'light' ? 'sun' : 'moon' } size = { isSmallScreen ? 15 : 30 } color = "white" />
328+ </ TouchableOpacity >
270329
330+ </ View >
331+
271332 </ Animated . View >
272333
273334 { /* Provides safe area insets for mobile devices */ }
@@ -326,41 +387,17 @@ export default function App() {
326387 < ScoreDisplay state = { state } dispatch = { dispatch } />
327388 </ Animated . View >
328389 </ ScrollView >
329-
330-
331390 </ View >
332-
333391 { /* Footer display for status */ }
334392 < StatusBar style = "auto" />
335393 { /* Automatically adjusts the status bar style */ }
336394 </ ScrollView >
337395 </ Animated . View >
338396 < AudioPlayer state = { state } menuStyle = { { backgroundColor : menubarBackgroundColor } } />
339-
340397 </ SafeAreaView >
341398 ) ;
342399}
343400
344- // Theme-based styles (not needed since we have animated API to do light and dark transitions smoother)
345- // const themeStyles = {
346- // light: {
347- // container: { backgroundColor: '#F5F5F5' },
348- // menu_bar: { backgroundColor: '#2C3E50' },
349- // sidebar: { backgroundColor: '#ECF0F1' },
350- // mainContent: { backgroundColor: '#FFFFFF' },
351- // text: { color: "#2C3E50", fontWeight: "bold"} as TextStyle, // use for typscirpt syntax
352- // button: { backgroundColor: "#2C3E50"}
353- // },
354- // dark: {
355- // container: { backgroundColor: '#0F0F0F' },
356- // menu_bar: { backgroundColor: '#1A252F' },
357- // sidebar: { backgroundColor: '#4A627A' },
358- // mainContent: { backgroundColor: '#6B87A3' },
359- // text: { color: '#ffffff', fontWeight: "bold"} as TextStyle, // use for typscirpt syntax
360- // button: { backgroundColor: "#ffffff"}
361- // },
362- // };
363-
364401// Define styles for the components using StyleSheet
365402const styles = StyleSheet . create ( {
366403
0 commit comments