Skip to content

Commit 9760dd0

Browse files
authored
update inline examples (#388) (#403)
- Update inline examples to start audio context on a user gesture, either by playing a sound (soundFile.play() / oscillator.start()) or by explicitly calling userStartAudio() #388 - Use let in examples, instead of var, but not const, as discussed in this issue where it was decided to use let exclusively in examples: processing/p5.js#3877 - Update styles for consistency with other p5 inline examples Some examples use soundFile.play() or oscillator.start() rather than a potentially redundant call to userStartAudio(). It might be worth that redundant call, because the difference between methods that call "start" on a WebAudio node (thus enabling audio on a user gesture) and those that do not is pretty obfuscated...
1 parent 7ce64b9 commit 9760dd0

22 files changed

+1010
-769
lines changed

fragments/before.frag

Lines changed: 32 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -1,40 +1,46 @@
11
/**
2-
* p5.sound extends p5 with <a href="http://caniuse.com/audio-api"
2+
* <p>p5.sound extends p5 with <a href="http://caniuse.com/audio-api"
33
* target="_blank">Web Audio</a> functionality including audio input,
44
* playback, analysis and synthesis.
5-
* <br/><br/>
6-
* <a href="#/p5.SoundFile"><b>p5.SoundFile</b></a>: Load and play sound files.<br/>
7-
* <a href="#/p5.Amplitude"><b>p5.Amplitude</b></a>: Get the current volume of a sound.<br/>
8-
* <a href="#/p5.AudioIn"><b>p5.AudioIn</b></a>: Get sound from an input source, typically
9-
* a computer microphone.<br/>
10-
* <a href="#/p5.FFT"><b>p5.FFT</b></a>: Analyze the frequency of sound. Returns
11-
* results from the frequency spectrum or time domain (waveform).<br/>
12-
* <a href="#/p5.Oscillator"><b>p5.Oscillator</b></a>: Generate Sine,
5+
* </p>
6+
* <ul>
7+
* <li><a href="#/p5.SoundFile"><b>p5.SoundFile</b></a>: Load and play sound files.</li>
8+
* <li><a href="#/p5.Amplitude"><b>p5.Amplitude</b></a>: Get the current volume of a sound.</li>
9+
* <li><a href="#/p5.AudioIn"><b>p5.AudioIn</b></a>: Get sound from an input source, typically
10+
* a computer microphone.</li>
11+
* <li><a href="#/p5.FFT"><b>p5.FFT</b></a>: Analyze the frequency of sound. Returns
12+
* results from the frequency spectrum or time domain (waveform).</li>
13+
* <li><a href="#/p5.Oscillator"><b>p5.Oscillator</b></a>: Generate Sine,
1314
* Triangle, Square and Sawtooth waveforms. Base class of
14-
* <a href="#/p5.Noise">p5.Noise</a> and <a href="#/p5.Pulse">p5.Pulse</a>.
15-
* <br/>
16-
* <a href="#/p5.Envelope"><b>p5.Envelope</b></a>: An Envelope is a series
15+
* <li><a href="#/p5.Noise">p5.Noise</a> and <a href="#/p5.Pulse">p5.Pulse</a>.
16+
* </li>
17+
* <li>
18+
* <a href="#/p5.MonoSynth">p5.MonoSynth</a> and <a href="#/p5.PolySynth">p5.PolySynth</a>: Play musical notes
19+
* </li>
20+
* <li><a href="#/p5.Envelope"><b>p5.Envelope</b></a>: An Envelope is a series
1721
* of fades over time. Often used to control an object's
1822
* output gain level as an "ADSR Envelope" (Attack, Decay,
19-
* Sustain, Release). Can also modulate other parameters.<br/>
20-
* <a href="#/p5.Delay"><b>p5.Delay</b></a>: A delay effect with
21-
* parameters for feedback, delayTime, and lowpass filter.<br/>
22-
* <a href="#/p5.Filter"><b>p5.Filter</b></a>: Filter the frequency range of a
23+
* Sustain, Release). Can also modulate other parameters.</li>
24+
* <li><a href="#/p5.Delay"><b>p5.Delay</b></a>: A delay effect with
25+
* parameters for feedback, delayTime, and lowpass filter.</li>
26+
* <li><a href="#/p5.Filter"><b>p5.Filter</b></a>: Filter the frequency range of a
2327
* sound.
24-
* <br/>
25-
* <a href="#/p5.Reverb"><b>p5.Reverb</b></a>: Add reverb to a sound by specifying
26-
* duration and decay. <br/>
27-
* <b><a href="#/p5.Convolver">p5.Convolver</a>:</b> Extends
28+
* </li>
29+
* <li><a href="#/p5.Reverb"><b>p5.Reverb</b></a>: Add reverb to a sound by specifying
30+
* duration and decay. </li>
31+
* <b><li><a href="#/p5.Convolver">p5.Convolver</a>:</b> Extends
2832
* <a href="#/p5.Reverb">p5.Reverb</a> to simulate the sound of real
29-
* physical spaces through convolution.<br/>
30-
* <b><a href="#/p5.SoundRecorder">p5.SoundRecorder</a></b>: Record sound for playback
33+
* physical spaces through convolution.</li>
34+
* <b><li><a href="#/p5.SoundRecorder">p5.SoundRecorder</a></b>: Record sound for playback
3135
* / save the .wav file.
32-
* <b><a href="#/p5.Phrase">p5.Phrase</a></b>, <b><a href="#/p5.Part">p5.Part</a></b> and
36+
* <b><li><a href="#/p5.SoundLoop">p5.SoundLoop</a>, <a href="#/p5.Phrase">p5.Phrase</a></b>, <b><a href="#/p5.Part">p5.Part</a></b> and
3337
* <b><a href="#/p5.Score">p5.Score</a></b>: Compose musical sequences.
34-
* <br/><br/>
35-
* p5.sound is on <a href="https://github.com/therewasaguy/p5.sound/">GitHub</a>.
38+
* </li>
39+
* <li><a href="#/p5/userStartAudio">userStartAudio</a>: Enable audio in a
40+
* browser- and user-friendly way.</a>
41+
* <p>p5.sound is on <a href="https://github.com/therewasaguy/p5.sound/">GitHub</a>.
3642
* Download the latest version
37-
* <a href="https://github.com/therewasaguy/p5.sound/blob/master/lib/p5.sound.js">here</a>.
43+
* <a href="https://github.com/therewasaguy/p5.sound/blob/master/lib/p5.sound.js">here</a>.</p>
3844
*
3945
* @module p5.sound
4046
* @submodule p5.sound

src/amplitude.js

Lines changed: 52 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -17,32 +17,34 @@ define(function (require) {
1717
* amplitude readings (defaults to 0)
1818
* @example
1919
* <div><code>
20-
* var sound, amplitude, cnv;
20+
* let sound, amplitude;
2121
*
2222
* function preload(){
2323
* sound = loadSound('assets/beat.mp3');
2424
* }
2525
* function setup() {
26-
* cnv = createCanvas(100,100);
26+
* let cnv = createCanvas(100,100);
27+
* cnv.mouseClicked(toggleSound);
2728
* amplitude = new p5.Amplitude();
28-
*
29-
* // start / stop the sound when canvas is clicked
30-
* cnv.mouseClicked(function() {
31-
* if (sound.isPlaying() ){
32-
* sound.stop();
33-
* } else {
34-
* sound.play();
35-
* }
36-
* });
3729
* }
30+
*
3831
* function draw() {
39-
* background(0);
40-
* fill(255);
41-
* var level = amplitude.getLevel();
42-
* var size = map(level, 0, 1, 0, 200);
32+
* background(220);
33+
* text('tap to play', 20, 20);
34+
*
35+
* let level = amplitude.getLevel();
36+
* let size = map(level, 0, 1, 0, 200);
4337
* ellipse(width/2, height/2, size, size);
4438
* }
4539
*
40+
* function toggleSound() {
41+
* if (sound.isPlaying() ){
42+
* sound.stop();
43+
* } else {
44+
* sound.play();
45+
* }
46+
* }
47+
*
4648
* </code></div>
4749
*/
4850
p5.Amplitude = function(smoothing) {
@@ -117,21 +119,30 @@ define(function (require) {
117119
* sound2 = loadSound('assets/drum.mp3');
118120
* }
119121
* function setup(){
122+
* cnv = createCanvas(100, 100);
123+
* cnv.mouseClicked(toggleSound);
124+
*
120125
* amplitude = new p5.Amplitude();
121-
* sound1.play();
122-
* sound2.play();
123126
* amplitude.setInput(sound2);
124127
* }
128+
*
125129
* function draw() {
126-
* background(0);
127-
* fill(255);
128-
* var level = amplitude.getLevel();
129-
* var size = map(level, 0, 1, 0, 200);
130+
* background(220);
131+
* text('tap to play', 20, 20);
132+
*
133+
* let level = amplitude.getLevel();
134+
* let size = map(level, 0, 1, 0, 200);
130135
* ellipse(width/2, height/2, size, size);
131136
* }
132-
* function mouseClicked(){
133-
* sound1.stop();
134-
* sound2.stop();
137+
*
138+
* function toggleSound(){
139+
* if (sound1.isPlaying() && sound2.isPlaying()) {
140+
* sound1.stop();
141+
* sound2.stop();
142+
* } else {
143+
* sound1.play();
144+
* sound2.play();
145+
* }
135146
* }
136147
* </code></div>
137148
*/
@@ -197,19 +208,29 @@ define(function (require) {
197208
* function preload(){
198209
* sound = loadSound('assets/beat.mp3');
199210
* }
211+
*
200212
* function setup() {
213+
* let cnv = createCanvas(100, 100);
214+
* cnv.mouseClicked(toggleSound);
201215
* amplitude = new p5.Amplitude();
202-
* sound.play();
203216
* }
217+
*
204218
* function draw() {
205-
* background(0);
206-
* fill(255);
207-
* var level = amplitude.getLevel();
208-
* var size = map(level, 0, 1, 0, 200);
219+
* background(220, 150);
220+
* textAlign(CENTER);
221+
* text('tap to play', width/2, 20);
222+
*
223+
* let level = amplitude.getLevel();
224+
* let size = map(level, 0, 1, 0, 200);
209225
* ellipse(width/2, height/2, size, size);
210226
* }
211-
* function mouseClicked(){
212-
* sound.stop();
227+
*
228+
* function toggleSound(){
229+
* if (sound.isPlaying()) {
230+
* sound.stop();
231+
* } else {
232+
* sound.play();
233+
* }
213234
* }
214235
* </code></div>
215236
*/

src/audiocontext.js

Lines changed: 36 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@ define(['startaudiocontext', 'Tone/core/Context', 'Tone/core/Tone'], function (S
1919
* <p>Some browsers require users to startAudioContext
2020
* with a user gesture, such as touchStarted in the example below.</p>
2121
*
22+
* @for p5
2223
* @method getAudioContext
2324
* @return {Object} AudioContext for this sketch
2425
* @example
@@ -50,44 +51,55 @@ define(['startaudiocontext', 'Tone/core/Context', 'Tone/core/Tone'], function (S
5051

5152

5253
/**
53-
* <p>It is a good practice to give users control over starting audio playback.
54-
* This practice is enforced by Google Chrome's autoplay policy as of r70
55-
* (<a href="https://goo.gl/7K7WLu">info</a>), iOS Safari, and other browsers.
56-
* </p>
54+
* <p>It is not only a good practice to give users control over starting
55+
* audio. This policy is enforced by many web browsers, including iOS and
56+
* <a href="https://goo.gl/7K7WLu" title="Google Chrome's autoplay
57+
* policy">Google Chrome</a>, which create the Web Audio API's
58+
* <a href="https://developer.mozilla.org/en-US/docs/Web/API/AudioContext"
59+
* title="Audio Context @ MDN">Audio Context</a>
60+
* in a suspended state.</p>
5761
*
58-
* <p>
59-
* userStartAudio() starts the <a href="https://developer.mozilla.org/en-US/docs/Web/API/AudioContext"
60-
* target="_blank" title="Audio Context @ MDN">Audio Context</a> on a user gesture. It utilizes
61-
* the <a href="https://github.com/tambien/StartAudioContext">StartAudioContext</a> library by
62-
* Yotam Mann (MIT Licence, 2016). Read more at https://github.com/tambien/StartAudioContext.
63-
* </p>
62+
* <p>In these browser-specific policies, sound will not play until a user
63+
* interaction event (i.e. <code>mousePressed()</code>) explicitly resumes
64+
* the AudioContext, or starts an audio node. This can be accomplished by
65+
* calling <code>start()</code> on a <code>p5.Oscillator</code>,
66+
* <code> play()</code> on a <code>p5.SoundFile</code>, or simply
67+
* <code>userStartAudio()</code>.</p>
6468
*
65-
* <p>Starting the audio context on a user gesture can be as simple as <code>userStartAudio()</code>.
66-
* Optional parameters let you decide on a specific element that will start the audio context,
67-
* and/or call a function once the audio context is started.</p>
69+
* <p><code>userStartAudio()</code> starts the AudioContext on a user
70+
* gesture. The default behavior will enable audio on any
71+
* mouseUp or touchEnd event. It can also be placed in a specific
72+
* interaction function, such as <code>mousePressed()</code> as in the
73+
* example below. This method utilizes
74+
* <a href="https://github.com/tambien/StartAudioContext">StartAudioContext
75+
* </a>, a library by Yotam Mann (MIT Licence, 2016).</p>
6876
* @param {Element|Array} [element(s)] This argument can be an Element,
6977
* Selector String, NodeList, p5.Element,
7078
* jQuery Element, or an Array of any of those.
71-
* @param {Function} [callback] Callback to invoke when the AudioContext has started
72-
* @return {Promise} Returns a Promise which is resolved when
79+
* @param {Function} [callback] Callback to invoke when the AudioContext
80+
* has started
81+
* @return {Promise} Returns a Promise that resolves when
7382
* the AudioContext state is 'running'
7483
* @method userStartAudio
7584
* @for p5
7685
* @example
7786
* <div><code>
7887
* function setup() {
79-
* var myDiv = createDiv('click to start audio');
80-
* myDiv.position(0, 0);
88+
* // mimics the autoplay policy
89+
* getAudioContext().suspend();
8190
*
82-
* var mySynth = new p5.MonoSynth();
91+
* let mySynth = new p5.MonoSynth();
8392
*
84-
* // This won't play until the context has started
93+
* // This won't play until the context has resumed
8594
* mySynth.play('A6');
86-
*
87-
* // Start the audio context on a click/touch event
88-
* userStartAudio().then(function() {
89-
* myDiv.remove();
90-
* });
95+
* }
96+
* function draw() {
97+
* background(220);
98+
* textAlign(CENTER, CENTER);
99+
* text(getAudioContext().state, width/2, height/2);
100+
* }
101+
* function mousePressed() {
102+
* userStartAudio();
91103
* }
92104
* </code></div>
93105
*/

src/audioin.js

Lines changed: 43 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -29,15 +29,24 @@ define(function (require) {
2929
* currently allow microphone access.
3030
* @example
3131
* <div><code>
32-
* var mic;
33-
* function setup(){
34-
* mic = new p5.AudioIn()
32+
* let mic;
33+
*
34+
* function setup(){
35+
* let cnv = createCanvas(100, 100);
36+
* cnv.mousePressed(userStartAudio);
37+
* textAlign(CENTER);
38+
* mic = new p5.AudioIn();
3539
* mic.start();
3640
* }
41+
*
3742
* function draw(){
3843
* background(0);
44+
* fill(255);
45+
* text('tap to start', width/2, 20);
46+
*
3947
* micLevel = mic.getLevel();
40-
* ellipse(width/2, constrain(height-micLevel*height*5, 0, height), 10, 10);
48+
* let y = height - micLevel * height;
49+
* ellipse(width/2, y, 10, 10);
4150
* }
4251
* </code></div>
4352
*/
@@ -267,18 +276,21 @@ define(function (require) {
267276
* to the enumerateDevices() method
268277
* @example
269278
* <div><code>
270-
* var audiograb;
279+
* let audioIn;
271280
*
272281
* function setup(){
273-
* //new audioIn
274-
* audioGrab = new p5.AudioIn();
282+
* text('getting sources...', 0, 20);
283+
* audioIn = new p5.AudioIn();
284+
* audioIn.getSources(gotSources);
285+
* }
275286
*
276-
* audioGrab.getSources(function(deviceList) {
277-
* //print out the array of available sources
278-
* console.log(deviceList);
287+
* function gotSources(deviceList) {
288+
* if (deviceList.length > 0) {
279289
* //set the source to the first item in the deviceList array
280-
* audioGrab.setSource(0);
281-
* });
290+
* audioIn.setSource(0);
291+
* let currentSource = deviceList[audioIn.currentSource];
292+
* text('set source to: ' + currentSource.deviceId, 5, 20, width);
293+
* }
282294
* }
283295
* </code></div>
284296
*/
@@ -316,6 +328,25 @@ define(function (require) {
316328
* @method setSource
317329
* @for p5.AudioIn
318330
* @param {number} num position of input source in the array
331+
* @example
332+
* <div><code>
333+
* let audioIn;
334+
*
335+
* function setup(){
336+
* text('getting sources...', 0, 20);
337+
* audioIn = new p5.AudioIn();
338+
* audioIn.getSources(gotSources);
339+
* }
340+
*
341+
* function gotSources(deviceList) {
342+
* if (deviceList.length > 0) {
343+
* //set the source to the first item in the deviceList array
344+
* audioIn.setSource(0);
345+
* let currentSource = deviceList[audioIn.currentSource];
346+
* text('set source to: ' + currentSource.deviceId, 5, 20, width);
347+
* }
348+
* }
349+
* </code></div>
319350
*/
320351
p5.AudioIn.prototype.setSource = function(num) {
321352
if (p5sound.inputSources.length > 0 && num < p5sound.inputSources.length) {

0 commit comments

Comments
 (0)