Skip to content
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
157 changes: 123 additions & 34 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -41,47 +41,136 @@ To add to your project, run
```sh
yarn add @spotify/basic-pitch
```
or
```sh
npm install @spotify/basic-pitch
```

From there you can look at `src/inference.test.ts` for examples of how to use Basic Pitch. To summarize how to use it,
From there you can look at `src/inference.test.ts` for examples of how to use Basic Pitch.

For instance, here's a detailed example of a typescript function called `audioToNoteEvents`, that allows you to manipulate the detected notes through a input callback function (see `onSuccess`):

```typescript
const audioCtx = new AudioContext();
let audioBuffer = undefined;

audioCtx.decodeAudioData(
fs.readFileSync(/* Path to audio file */),
async (_audioBuffer: AudioBuffer) => {
audioBuffer = _audioBuffer;
},
() => {},
);

while (audioBuffer === undefined) {
await new Promise(r => setTimeout(r, 1));
// audioToNoteEvents.ts

import { BasicPitch, noteFramesToTime, addPitchBendsToNoteEvents, outputToNotesPoly } from "@spotify/basic-pitch";

/**
* Converts audio to note events using the BasicPitch library.
*
* @param audioURL - The URL of the audio file.
* @param onSuccess - A callback function called when the conversion is successful. It receives an array of note events.
* @param onError - An optional callback function called if an error occurs during the conversion.
* @param onProgress - An optional callback function called to get progress percentage value.
* @param options - Optional detection options.
* @returns A promise that resolves when the conversion is complete.
*/
export default async function audioToNoteEvents(
audioURL: string,
onSuccess: onSuccessCallback,
onError?: onErrorCallback,
onProgress?: onProgressCallback,
options?: detectionOptions
): Promise<void> {
const audioContext = new AudioContext({ sampleRate: 22050 });

const frames: number[][] = [];
const onsets: number[][] = [];
const contours: number[][] = [];

try {
const response = await fetch(audioURL);
const arrayBuffer = await response.arrayBuffer();
const decodedData = await audioContext.decodeAudioData(arrayBuffer);

// Instantiate the BasicPitch model
const basicPitch = new BasicPitch("https://raw.githubusercontent.com/spotify/basic-pitch-ts/main/model/model.json");

// Evaluate the model on the decoded audio data
await basicPitch.evaluateModel(
decodedData,
(frame: number[][], onset: number[][], contour: number[][]) => {
// Collect the frame, onset, and contour data
frames.push(...frame);
onsets.push(...onset);
contours.push(...contour);
},
(pct: number) => {
if (onProgress) {
onProgress(pct);
}
}
);

// Destructure the options with default values
const {
onsetThresh = 0.5,
frameThresh = 0.3,
minNoteLen = 5,
inferOnsets = true,
maxFreq = null,
minFreq = null,
melodiaTrick = true,
energyTolerance = 11,
} = options || {};

// Convert the collected data to note events
const notes = noteFramesToTime(
addPitchBendsToNoteEvents(
contours,
outputToNotesPoly(frames, onsets, onsetThresh, frameThresh, minNoteLen, inferOnsets, maxFreq, minFreq, melodiaTrick, energyTolerance)
)
);
const noteEvents = notes.map((n) => ({
pitch: n.pitchMidi,
duration: n.durationSeconds,
onset: n.startTimeSeconds,
pitchBends: n.pitchBends,
velocity: n.amplitude,
}));

// Sort the note events by onset time and pitch
noteEvents.sort((a, b) => a.onset - b.onset || a.pitch - b.pitch);

// Call the success callback with the resulting note events
onSuccess(noteEvents);
} catch (error) {
// Call the error callback if provided
if (onError) {
onError(error);
}
}
}

const basicPitch = new BasicPitch(model);
await basicPitch.evaluateModel(
audioBuffer as unknown as AudioBuffer,
(f: number[][], o: number[][], c: number[][]) => {
frames.push(...f);
onsets.push(...o);
contours.push(...c);
},
(p: number) => {
pct = p;
},
);

const notes = noteFramesToTime(
addPitchBendsToNoteEvents(
contours,
outputToNotesPoly(frames, onsets, 0.25, 0.25, 5),
),
);
// Define note event
type NoteEvent = {
pitch: number;
onset: number;
duration: number;
velocity?: number;
pitchBends?: number[];
};

// Define the options for audio detection
type detectionOptions = {
onsetThresh?: number;
frameThresh?: number;
minNoteLen?: number;
inferOnsets?: boolean;
maxFreq?: number | null;
minFreq?: number | null;
melodiaTrick?: boolean;
energyTolerance?: number;
};

// Define the callback types
type onSuccessCallback = (notes: NoteEvent[]) => void;
type onErrorCallback = (error: any) => void;
type onProgressCallback = (percent: number) => void;

```

You can then use `notes` in your application however you wish!
You can then use this function in your application however you wish!

### Scripts

Expand Down