Skip to content

Commit c83df8a

Browse files
authored
Merge pull request #10 from nipunaw/sprint-2
Sprint 2 (Beta Build)
2 parents 193a06d + 6d120d3 commit c83df8a

File tree

10 files changed

+1396
-332
lines changed

10 files changed

+1396
-332
lines changed

package-lock.json

Lines changed: 1065 additions & 236 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

package.json

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,13 +4,19 @@
44
"description": "FPGA and software crossover to bring your microphone clarity and fidelity",
55
"main": "public/main.js",
66
"dependencies": {
7+
"@joe_six/duarte-watanabe-peak-detection": "^0.2.0",
8+
"@joe_six/smoothed-z-score-peak-signal-detection": "^0.1.2",
79
"@testing-library/jest-dom": "^5.11.4",
810
"@testing-library/react": "^11.1.0",
911
"@testing-library/user-event": "^12.1.10",
1012
"@types/jest": "^27.4.0",
1113
"@types/node": "^17.0.10",
1214
"@types/react": "^17.0.38",
1315
"bootstrap": "^5.1.3",
16+
"fft-js": "^0.0.12",
17+
"fft-windowing": "^0.1.4",
18+
"fili": "^2.0.3",
19+
"nodeplotlib": "^1.0.0",
1420
"pitchfinder": "^2.3.2",
1521
"python-shell": "^3.0.1",
1622
"react": "^17.0.2",

public/main.js

Lines changed: 23 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,8 @@
22
const { app, BrowserWindow, Menu, ipcMain } = require("electron");
33
const fs = require("fs");
44
const { resolve } = require("path");
5-
const { GetPitchValue } = require("./main/audioProcess");
5+
const { electron } = require("process");
6+
const { GetPitchValue, fftAnalysis } = require("./main/audioProcess");
67

78
function createWindow() {
89
// Create the browser window.
@@ -60,13 +61,27 @@ function createWindow() {
6061
}
6162
);
6263

63-
ipcMain.on("process-audio", (event, float32Array) => {
64-
const pitch = GetPitchValue(float32Array);
65-
console.log(`Value: ${pitch}`);
66-
win.webContents.send(
67-
"audio-finished",
68-
`Sent data over UART if FPGA is connected`
69-
);
64+
65+
ipcMain.on("process-audio", (event, rawRecordedData, sampleRate) => {
66+
//Pitch method is deprecated
67+
try {
68+
const pitch = GetPitchValue(rawRecordedData);
69+
const noiseTaps = fftAnalysis(rawRecordedData, sampleRate);
70+
71+
win.webContents.send(
72+
"audio-finished",
73+
true,
74+
`Sent information over UART if connected`,
75+
noiseTaps
76+
);
77+
} catch {
78+
win.webContents.send(
79+
"audio-finished",
80+
false,
81+
`An error has occured.`,
82+
[]
83+
);
84+
}
7085
});
7186
}
7287

public/main/audioProcess.js

Lines changed: 115 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,125 @@
11
const { app, BrowserWindow, Menu, ipcMain } = require("electron");
22
const Pitchfinder = require("pitchfinder");
3+
// FIR and FFT Analysis
4+
const fft = require('fft-js').fft;
5+
const fftUtil = require('fft-js').util;
6+
const windowing = require('fft-windowing');
7+
const Fili = require('fili');
8+
// Smoothing and Plots
9+
const smoothed_z_score = require("@joe_six/smoothed-z-score-peak-signal-detection");
10+
const detect_peaks = require("@joe_six/duarte-watanabe-peak-detection")
11+
const { plot, Plot } = require("nodeplotlib");
312

4-
const GetPitchValue = (arrayData) => {
13+
const fftAnalysis = (rawRecordedData, sampleRate) => {
14+
var windowedRecordedData = windowing.hann(rawRecordedData);
15+
var phasors = fft(windowedRecordedData.slice(0, 262144)); // 48000 > samples
16+
var frequencies = fftUtil.fftFreq(phasors, sampleRate); // Sample rate and coef is just used for length, and frequency step
17+
var magnitudes = fftUtil.fftMag(phasors);
18+
19+
console.log("Sample Rate: " + sampleRate);
20+
console.log("Length of raw data: " + rawRecordedData.length);
21+
console.log("Length of windowed data: " + windowedRecordedData.length);
22+
console.log("Length of phasors: " + phasors.length);
23+
console.log("Length of frequencies: " + frequencies.length);
24+
console.log("Length of magnitudes: " + magnitudes.length);
25+
console.log("");
26+
return firFilterTaps(frequencies, magnitudes, sampleRate);
27+
}
28+
29+
const firFilterTaps = (frequencies, magnitudes, sampleRate) => {
30+
// TO-DO: Continue testing smoothing methods, for now electing Watanabe method
31+
//const peaksSmoothed = smoothed_z_score(magnitudes, {lag: 40, threshold: 4.5, influence: 0.2});
32+
33+
const noiseTaps = noiseRemoval(frequencies, magnitudes, 101, sampleRate);
34+
return noiseTaps;
35+
}
36+
37+
const identifyPeaks = (magnitudes, mpdVal) => {
38+
const magnitudesNoiseDescending = [...magnitudes].sort(function (a, b) { return a - b; }).reverse();
39+
const magnitudesNoiseThreshold = magnitudesNoiseDescending[15]; // Assumes no more than 15 peaks
40+
return detect_peaks(magnitudes, {mpd: mpdVal, mph: magnitudesNoiseThreshold});
41+
}
42+
43+
// Helper function for graphing/testing
44+
const graphFrequencies= (frequencies, magnitudes, logScale, plotType) => {
45+
let logFrequencies = [];
46+
if (logScale == true) {
47+
for (let i = 0; i < frequencies.length; i++) {
48+
logFrequencies.push(Math.log10(frequencies[i]));
49+
}
50+
} else {
51+
logFrequencies = frequencies;
52+
}
53+
54+
const data = [
55+
{
56+
x: logFrequencies,
57+
y: magnitudes,
58+
type: plotType
59+
},
60+
];
61+
62+
plot(data);
63+
}
64+
65+
const bandstopTaps = (filterOrder, sampleRate, lowerFreq, upperFreq, attenuation) => {
66+
var firCalculator = new Fili.FirCoeffs();
67+
var firFilterCoeffsK = firCalculator.kbFilter({
68+
order: filterOrder, // filter order (must be odd)
69+
Fs: sampleRate, // sampling frequency
70+
Fa: lowerFreq, // rise, 0 for lowpass
71+
Fb: upperFreq, // fall, Fs/2 for highpass
72+
Att: attenuation // attenuation in dB
73+
});
74+
return firFilterCoeffsK;
75+
}
76+
77+
const noiseRemoval = (frequencies, magnitudes, filterOrder, sampleRate) => {
78+
const peaksWatanabeIndices = identifyPeaks(magnitudes, 50);
79+
let noisyFrequencies = [];
80+
for (let i = 0; i < peaksWatanabeIndices.length; i++) {
81+
if (frequencies[peaksWatanabeIndices[i]] < 80) {
82+
noisyFrequencies.push(Math.round(frequencies[peaksWatanabeIndices[i]]));
83+
}
84+
}
85+
86+
87+
let bands = [];
88+
for (let i = 0; i < noisyFrequencies.length; i++) {
89+
if (noisyFrequencies[i] > 5) {
90+
bands.push(bandstopTaps(filterOrder, sampleRate, noisyFrequencies[i]-5, noisyFrequencies[i]+5, 5)); //Attenuate by 5 dB, order of 101
91+
} else {
92+
bands.push(bandstopTaps(filterOrder, sampleRate, 0, noisyFrequencies[i]+5, 5)); //Attenuate by 5 dB, order of 101
93+
}
94+
}
95+
96+
// Safely assume independence of bands
97+
var sum = (r, a) => r.map((b, i) => a[i] + b);
98+
let noisyTaps = bands.reduce(sum);
99+
100+
return noisyTaps;
101+
}
102+
103+
const generalAnalysis = (frequencies, magnitudes) => {
104+
const magnitudesDescending = [...magnitudes].sort(function (a, b) { return a - b; }).reverse();
105+
const magnitudesThreshold = magnitudesDescending[15];
106+
107+
for (let i = 0; i < frequencies.length; i++) {
108+
if (magnitudes[i] > magnitudesThreshold) {
109+
console.log("General Frequency (Hz): "+ frequencies[i] + ", Noise Magnitude: "+ magnitudes[i])
110+
}
111+
}
112+
console.log("");
113+
}
114+
115+
// Deprecated analysis method using pitch values instead of FFT
116+
const GetPitchValue = (recordedData) => {
5117
const detectPitch = Pitchfinder.AMDF();
6-
const pitch = detectPitch(arrayData);
118+
const pitch = detectPitch(recordedData);
7119
return pitch;
8120
};
9121

10122
module.exports = {
11123
GetPitchValue: GetPitchValue,
124+
fftAnalysis: fftAnalysis
12125
};

src/components/AudioRecord.tsx

Lines changed: 69 additions & 41 deletions
Original file line numberDiff line numberDiff line change
@@ -2,13 +2,16 @@ import { useEffect, useState } from "react";
22
import "../css/MainContent.css";
33
import AudioDeviceList from "./AudioDeviceList";
44
import { writeSerial } from "functions/serial";
5+
import Loading from "./Loading";
56
const electron = window.require("electron");
67

78
enum AudioState {
8-
Loading,
9-
Recording,
10-
Ready,
119
Idle,
10+
Ready,
11+
Recording,
12+
Processing,
13+
Error,
14+
Finished,
1215
}
1316

1417
interface AudioDevice extends MediaStreamConstraints {
@@ -23,39 +26,68 @@ var constraints: AudioDevice = {
2326
},
2427
};
2528

29+
const RECORD_MS_TIME = 5500;
30+
31+
const handleStart = (event: Event) => {
32+
setTimeout(function () {
33+
let eventRecorder = event.target as MediaRecorder;
34+
eventRecorder.stop();
35+
}, RECORD_MS_TIME);
36+
};
37+
38+
const handleStop = (setRecorderState: () => void, updateState: () => void) => {
39+
setRecorderState();
40+
updateState();
41+
};
42+
2643
const handleDataAvailable = (event: BlobEvent) => {
2744
let audioCtx = new AudioContext();
2845
event.data.arrayBuffer().then((arrayBuf) => {
2946
audioCtx.decodeAudioData(arrayBuf).then((buffer) => {
30-
const float32Array = buffer.getChannelData(0); // get a single channel of sound
31-
electron.ipcRenderer.send("process-audio", float32Array);
32-
const data = new Uint8Array([104, 101, 108, 108, 111]); // hello
33-
writeSerial(data);
47+
//sample rate is 48kHz for my device
48+
const rawRecordedData = buffer.getChannelData(0); // get a single channel of sound
49+
const sampleRate = audioCtx.sampleRate;
50+
electron.ipcRenderer.send("process-audio", rawRecordedData, sampleRate);
3451
});
3552
});
3653
};
3754

3855
export default function AudioRecord() {
39-
//const audioRef = useRef<HTMLAudioElement>(null);
4056
const [state, setState] = useState<AudioState>(AudioState.Idle);
41-
const [mediaStream, setMediaStream] = useState<MediaStream>();
4257
const [recorder, setRecorder] = useState<MediaRecorder>();
4358
const [selectedDevice, setSelectedDevice] = useState<MediaDeviceInfo>();
4459
const [feedbackMsg, setFeedbackMsg] = useState<string | null>();
4560

4661
useEffect(() => {
47-
electron.ipcRenderer.on("audio-finished", (event, message) => {
48-
setState(AudioState.Ready);
49-
setFeedbackMsg(message);
50-
});
62+
electron.ipcRenderer.on(
63+
"audio-finished",
64+
(event, status, message, data) => {
65+
console.log(status);
66+
if (status === true) {
67+
setState(AudioState.Finished);
68+
setFeedbackMsg(message);
69+
writeSerial(data).then((serialStatus) => {
70+
console.log(serialStatus);
71+
});
72+
} else {
73+
setState(AudioState.Error);
74+
}
75+
}
76+
);
5177
}, []);
5278

5379
function handleSuccess(stream: MediaStream) {
54-
setMediaStream(stream);
5580
const options = { mimeType: "audio/webm" };
5681
const _recorder = new MediaRecorder(stream, options);
82+
_recorder.onstart = handleStart;
83+
_recorder.onstop = () =>
84+
handleStop(
85+
() => setRecorder(undefined),
86+
() => setState(AudioState.Processing)
87+
);
88+
_recorder.ondataavailable = handleDataAvailable;
89+
_recorder.start();
5790
setRecorder(_recorder);
58-
//audioRef.current.srcObject = stream;
5991
}
6092

6193
function handleError(error: Error) {
@@ -73,13 +105,6 @@ export default function AudioRecord() {
73105
}
74106
}, [selectedDevice]);
75107

76-
useEffect(() => {
77-
if (recorder) {
78-
recorder.ondataavailable = handleDataAvailable;
79-
recorder.start();
80-
}
81-
}, [recorder]);
82-
83108
const handleClick = () => {
84109
if (state === AudioState.Ready && selectedDevice) {
85110
setFeedbackMsg(null);
@@ -89,15 +114,6 @@ export default function AudioRecord() {
89114
.getUserMedia(constraints)
90115
.then(handleSuccess)
91116
.catch(handleError);
92-
} else if (state === AudioState.Recording) {
93-
setState(AudioState.Loading);
94-
if (recorder) {
95-
recorder.stop();
96-
setRecorder(undefined);
97-
}
98-
if (mediaStream) {
99-
setMediaStream(undefined);
100-
}
101117
}
102118
};
103119

@@ -112,18 +128,29 @@ export default function AudioRecord() {
112128
break;
113129
}
114130
case AudioState.Recording: {
115-
buttonDisplay = "Click to Stop";
116-
isDisabled = false;
131+
buttonDisplay = "Recording...";
132+
isDisabled = true;
117133
break;
118134
}
119135
case AudioState.Ready: {
120136
buttonDisplay = "Click to Start";
121137
isDisabled = false;
122138
break;
123139
}
124-
case AudioState.Loading: {
140+
case AudioState.Processing: {
125141
buttonDisplay = "Processing...";
126142
isDisabled = true;
143+
break;
144+
}
145+
case AudioState.Finished: {
146+
buttonDisplay = "Finished";
147+
isDisabled = true;
148+
break;
149+
}
150+
case AudioState.Error: {
151+
buttonDisplay = "Error";
152+
isDisabled = true;
153+
break;
127154
}
128155
}
129156
return (
@@ -136,10 +163,12 @@ export default function AudioRecord() {
136163
const getStateMessage = (): string | null => {
137164
if (state === AudioState.Idle) {
138165
return "Please select an audio device";
139-
} else if (state === AudioState.Loading) {
166+
} else if (state === AudioState.Processing) {
140167
return "Processing...";
141168
} else if (state === AudioState.Recording) {
142169
return "Recording...";
170+
} else if (state === AudioState.Error) {
171+
return "An error occured while performing test. Please try again.";
143172
}
144173
return null;
145174
};
@@ -150,12 +179,11 @@ export default function AudioRecord() {
150179
{getStateMessage()}
151180
{feedbackMsg}
152181
</div>
153-
<AudioDeviceList selectDevice={setSelectedDevice} />
154-
{/*
155-
<div className="media-player">
156-
<audio id="gum-local" ref={audioRef} controls autoPlay></audio>
157-
</div>
158-
*/}
182+
{state == AudioState.Recording || state == AudioState.Processing ? (
183+
<Loading />
184+
) : (
185+
<AudioDeviceList selectDevice={setSelectedDevice} />
186+
)}
159187
{getButton()}
160188
</div>
161189
);

0 commit comments

Comments
 (0)