Skip to content

Commit 4642657

Browse files
committed
Initial work on the ScriptProcessorNode
1 parent d92903a commit 4642657

File tree

8 files changed

+291
-3
lines changed

8 files changed

+291
-3
lines changed

examples/script_processor.rs

Lines changed: 47 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,47 @@
1+
use rand::Rng;
2+
3+
use web_audio_api::context::{
4+
AudioContext, AudioContextLatencyCategory, AudioContextOptions, BaseAudioContext,
5+
};
6+
use web_audio_api::node::{AudioNode, AudioScheduledSourceNode};
7+
8+
// ScriptProcessorNode example
9+
//
10+
// `cargo run --release --example script_processor`
11+
//
12+
// If you are on Linux and use ALSA as audio backend backend, you might want to run
13+
// the example with the `WEB_AUDIO_LATENCY=playback ` env variable which will
14+
// increase the buffer size to 1024
15+
//
16+
// `WEB_AUDIO_LATENCY=playback cargo run --release --example script_processor`
17+
fn main() {
18+
env_logger::init();
19+
20+
let latency_hint = match std::env::var("WEB_AUDIO_LATENCY").as_deref() {
21+
Ok("playback") => AudioContextLatencyCategory::Playback,
22+
_ => AudioContextLatencyCategory::default(),
23+
};
24+
25+
let context = AudioContext::new(AudioContextOptions {
26+
latency_hint,
27+
..AudioContextOptions::default()
28+
});
29+
30+
let node = context.create_script_processor(128, 1, 1);
31+
node.set_onaudioprocess(|e| {
32+
let mut rng = rand::thread_rng();
33+
e.output_buffer
34+
.get_channel_data_mut(0)
35+
.iter_mut()
36+
.zip(e.input_buffer.get_channel_data(0))
37+
.for_each(|(o, i)| *o = *i + rng.gen_range(-0.3..0.3));
38+
});
39+
40+
let mut src = context.create_oscillator();
41+
src.frequency().set_value(400.);
42+
src.start();
43+
src.connect(&node);
44+
node.connect(&context.destination());
45+
46+
std::thread::sleep(std::time::Duration::from_millis(5000));
47+
}

src/context/base.rs

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -214,6 +214,22 @@ pub trait BaseAudioContext {
214214
PeriodicWave::new(self.base(), options)
215215
}
216216

217+
/// Creates an `ScriptProcessorNode` for custom audio processing (deprecated);
218+
#[must_use]
219+
fn create_script_processor(
220+
&self,
221+
buffer_size: usize,
222+
number_of_input_channels: usize,
223+
number_of_output_channels: usize,
224+
) -> node::ScriptProcessorNode {
225+
node::ScriptProcessorNode::new(
226+
self.base(),
227+
buffer_size,
228+
number_of_input_channels,
229+
number_of_output_channels,
230+
)
231+
}
232+
217233
/// Creates an `StereoPannerNode` to pan a stereo output
218234
#[must_use]
219235
fn create_stereo_panner(&self) -> node::StereoPannerNode {

src/context/mod.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -90,6 +90,7 @@ impl From<u8> for AudioContextState {
9090
/// This object allows for communication with the render thread and dynamic lifetime management.
9191
//
9292
// The only way to construct this object is by calling [`BaseAudioContext::register`]
93+
#[derive(Clone)]
9394
pub struct AudioContextRegistration {
9495
/// the audio context in which nodes and connections lives
9596
context: ConcreteBaseAudioContext,

src/events.rs

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@ pub(crate) enum EventType {
2626
Diagnostics,
2727
Message(AudioNodeId),
2828
Complete,
29+
AudioProcessing(AudioNodeId),
2930
}
3031

3132
/// The Error Event interface
@@ -40,6 +41,19 @@ pub struct ErrorEvent {
4041
pub event: Event,
4142
}
4243

44+
/// The AudioProcessingEvent interface
45+
#[non_exhaustive]
46+
#[derive(Debug)]
47+
pub struct AudioProcessingEvent {
48+
/// The input buffer
49+
pub input_buffer: AudioBuffer,
50+
/// The output buffer
51+
pub output_buffer: AudioBuffer,
52+
/// The time when the audio will be played in the same time coordinate system as the
53+
/// AudioContext's currentTime.
54+
pub playback_time: f64,
55+
}
56+
4357
/// The OfflineAudioCompletionEvent Event interface
4458
#[non_exhaustive]
4559
#[derive(Debug)]
@@ -59,6 +73,7 @@ pub(crate) enum EventPayload {
5973
Message(Box<dyn Any + Send + 'static>),
6074
AudioContextState(AudioContextState),
6175
Complete(AudioBuffer),
76+
AudioProcessing(AudioProcessingEvent),
6277
}
6378

6479
#[derive(Debug)]
@@ -123,6 +138,13 @@ impl EventDispatch {
123138
payload: EventPayload::Complete(buffer),
124139
}
125140
}
141+
142+
pub fn audio_processing(id: AudioNodeId, value: AudioProcessingEvent) -> Self {
143+
EventDispatch {
144+
type_: EventType::AudioProcessing(id),
145+
payload: EventPayload::AudioProcessing(value),
146+
}
147+
}
126148
}
127149

128150
pub(crate) enum EventHandler {

src/lib.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@ pub mod media_streams;
3131
pub mod node;
3232

3333
mod events;
34-
pub use events::{ErrorEvent, Event, OfflineAudioCompletionEvent};
34+
pub use events::*;
3535

3636
mod message_port;
3737
pub use message_port::MessagePort;

src/node/mod.rs

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -47,6 +47,8 @@ mod oscillator;
4747
pub use oscillator::*;
4848
mod panner;
4949
pub use panner::*;
50+
mod script_processor;
51+
pub use script_processor::*;
5052
mod stereo_panner;
5153
pub use stereo_panner::*;
5254
mod waveshaper;

src/node/script_processor.rs

Lines changed: 182 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,182 @@
1+
use crate::context::{AudioContextRegistration, BaseAudioContext};
2+
use crate::events::{AudioProcessingEvent, EventHandler, EventPayload, EventType};
3+
use crate::node::{ChannelCountMode, ChannelInterpretation};
4+
use crate::render::{
5+
AudioParamValues, AudioProcessor, AudioRenderQuantum, AudioWorkletGlobalScope,
6+
};
7+
use crate::{AudioBuffer, RENDER_QUANTUM_SIZE};
8+
9+
use super::{AudioNode, ChannelConfig, ChannelConfigOptions};
10+
11+
use std::any::Any;
12+
13+
/// An AudioNode which can generate, process, or analyse audio directly using a script (deprecated)
14+
#[derive(Debug)]
15+
pub struct ScriptProcessorNode {
16+
registration: AudioContextRegistration,
17+
channel_config: ChannelConfig,
18+
// bufferSize MUST be one of the following values: 256, 512, 1024, 2048, 4096, 8192, 16384
19+
buffer_size: usize,
20+
}
21+
22+
impl AudioNode for ScriptProcessorNode {
23+
fn registration(&self) -> &AudioContextRegistration {
24+
&self.registration
25+
}
26+
27+
fn channel_config(&self) -> &ChannelConfig {
28+
&self.channel_config
29+
}
30+
31+
fn number_of_inputs(&self) -> usize {
32+
1
33+
}
34+
35+
fn number_of_outputs(&self) -> usize {
36+
1
37+
}
38+
39+
// TODO channel config constraints
40+
}
41+
42+
impl ScriptProcessorNode {
43+
pub(crate) fn new<C: BaseAudioContext>(
44+
context: &C,
45+
buffer_size: usize,
46+
number_of_input_channels: usize,
47+
number_of_output_channels: usize,
48+
) -> Self {
49+
// TODO assert valid arguments
50+
51+
context.base().register(move |registration| {
52+
let render = ScriptProcessorRenderer {
53+
buffer: None,
54+
buffer_size,
55+
number_of_output_channels,
56+
};
57+
58+
let channel_config = ChannelConfigOptions {
59+
count: number_of_input_channels,
60+
count_mode: ChannelCountMode::Explicit,
61+
interpretation: ChannelInterpretation::Speakers,
62+
};
63+
64+
let node = ScriptProcessorNode {
65+
registration,
66+
channel_config: channel_config.into(),
67+
buffer_size,
68+
};
69+
70+
(node, Box::new(render))
71+
})
72+
}
73+
74+
pub fn buffer_size(&self) -> usize {
75+
self.buffer_size
76+
}
77+
78+
/// Register callback to run when the AudioProcessingEvent is dispatched
79+
///
80+
/// The event handler processes audio from the input (if any) by accessing the audio data from
81+
/// the inputBuffer attribute. The audio data which is the result of the processing (or the
82+
/// synthesized data if there are no inputs) is then placed into the outputBuffer.
83+
///
84+
/// Only a single event handler is active at any time. Calling this method multiple times will
85+
/// override the previous event handler.
86+
pub fn set_onaudioprocess<F: FnMut(&mut AudioProcessingEvent) + Send + 'static>(
87+
&self,
88+
mut callback: F,
89+
) {
90+
let registration = self.registration.clone();
91+
let callback = move |v| {
92+
let mut payload = match v {
93+
EventPayload::AudioProcessing(v) => v,
94+
_ => unreachable!(),
95+
};
96+
callback(&mut payload);
97+
registration.post_message(payload.output_buffer);
98+
};
99+
100+
self.context().set_event_handler(
101+
EventType::AudioProcessing(self.registration().id()),
102+
EventHandler::Multiple(Box::new(callback)),
103+
);
104+
}
105+
106+
/// Unset the callback to run when the AudioProcessingEvent is dispatched
107+
pub fn clear_onaudioprocess(&self) {
108+
self.context()
109+
.clear_event_handler(EventType::AudioProcessing(self.registration().id()));
110+
}
111+
}
112+
113+
struct ScriptProcessorRenderer {
114+
buffer: Option<AudioRenderQuantum>, // TODO buffer_size
115+
buffer_size: usize,
116+
number_of_output_channels: usize,
117+
}
118+
119+
// SAFETY:
120+
// AudioRenderQuantums are not Send but we promise the `buffer` is None before we ship it to the
121+
// render thread.
122+
#[allow(clippy::non_send_fields_in_send_ty)]
123+
unsafe impl Send for ScriptProcessorRenderer {}
124+
125+
impl AudioProcessor for ScriptProcessorRenderer {
126+
fn process(
127+
&mut self,
128+
inputs: &[AudioRenderQuantum],
129+
outputs: &mut [AudioRenderQuantum],
130+
_params: AudioParamValues<'_>,
131+
scope: &AudioWorkletGlobalScope,
132+
) -> bool {
133+
// single input/output node
134+
let input = &inputs[0];
135+
let output = &mut outputs[0];
136+
137+
let mut silence = input.clone();
138+
silence.make_silent();
139+
if let Some(buffer) = self.buffer.replace(silence) {
140+
*output = buffer;
141+
}
142+
143+
// TODO buffer_size
144+
let input_samples = input.channels().iter().map(|c| c.to_vec()).collect();
145+
let input_buffer = AudioBuffer::from(input_samples, scope.sample_rate);
146+
let output_samples = vec![vec![0.; RENDER_QUANTUM_SIZE]; self.number_of_output_channels];
147+
let output_buffer = AudioBuffer::from(output_samples, scope.sample_rate);
148+
149+
let playback_time =
150+
scope.current_time + (RENDER_QUANTUM_SIZE as f32 / scope.sample_rate) as f64; // TODO
151+
scope.send_audio_processing_event(input_buffer, output_buffer, playback_time);
152+
153+
true // TODO - spec says false but that seems weird
154+
}
155+
156+
fn onmessage(&mut self, msg: &mut dyn Any) {
157+
if let Some(buffer) = msg.downcast_mut::<AudioBuffer>() {
158+
if let Some(render_quantum) = &mut self.buffer {
159+
buffer
160+
.channels()
161+
.iter()
162+
.zip(render_quantum.channels_mut())
163+
.for_each(|(i, o)| o.copy_from_slice(i.as_slice())); // TODO bounds check
164+
}
165+
return;
166+
};
167+
168+
log::warn!("ScriptProcessorRenderer: Dropping incoming message {msg:?}");
169+
}
170+
}
171+
172+
#[cfg(test)]
173+
mod tests {
174+
use super::*;
175+
use crate::context::OfflineAudioContext;
176+
use float_eq::assert_float_eq;
177+
178+
#[test]
179+
fn test() {
180+
// TODO how to test?
181+
}
182+
}

src/render/processor.rs

Lines changed: 20 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
//! Audio processing code that runs on the audio rendering thread
22
use crate::context::{AudioNodeId, AudioParamId};
3-
use crate::events::{ErrorEvent, EventDispatch};
4-
use crate::{Event, RENDER_QUANTUM_SIZE};
3+
use crate::events::{AudioProcessingEvent, ErrorEvent, EventDispatch};
4+
use crate::{AudioBuffer, Event, RENDER_QUANTUM_SIZE};
55

66
use super::{graph::Node, AudioRenderQuantum, NodeCollection};
77

@@ -57,6 +57,24 @@ impl AudioWorkletGlobalScope {
5757
.try_send(EventDispatch::ended(self.node_id.get()));
5858
}
5959

60+
pub(crate) fn send_audio_processing_event(
61+
&self,
62+
input_buffer: AudioBuffer,
63+
output_buffer: AudioBuffer,
64+
playback_time: f64,
65+
) {
66+
if let Some(sender) = self.event_sender.as_ref() {
67+
// sending could fail if the channel is saturated or the main thread is shutting down
68+
let event = AudioProcessingEvent {
69+
input_buffer,
70+
output_buffer,
71+
playback_time,
72+
};
73+
let dispatch = EventDispatch::audio_processing(self.node_id.get(), event);
74+
let _ = sender.try_send(dispatch);
75+
}
76+
}
77+
6078
pub(crate) fn report_error(&self, error: Box<dyn Any + Send>) {
6179
pub fn type_name_of_val<T: ?Sized>(_val: &T) -> &'static str {
6280
std::any::type_name::<T>()

0 commit comments

Comments
 (0)