First draft!
This commit is contained in:
parent
6a1182886d
commit
8754e1907b
3 changed files with 286 additions and 0 deletions
BIN
content/blog/2022/03/audio-scope-with-p5js/amen.mp3
Normal file
BIN
content/blog/2022/03/audio-scope-with-p5js/amen.mp3
Normal file
Binary file not shown.
197
content/blog/2022/03/audio-scope-with-p5js/index.md
Normal file
197
content/blog/2022/03/audio-scope-with-p5js/index.md
Normal file
|
@ -0,0 +1,197 @@
|
||||||
|
---
|
||||||
|
title: "Making an Audio Scope with P5.js"
|
||||||
|
date: 2022-03-24T08:48:37-07:00
|
||||||
|
draft: true
|
||||||
|
tags: ["P5.js", "Programming", "Tech", "Web"]
|
||||||
|
resources:
|
||||||
|
- src: sketch.js
|
||||||
|
params:
|
||||||
|
is_module: false
|
||||||
|
---
|
||||||
|
|
||||||
|
{{< audio id="amen" mp3="amen.mp3" >}}
|
||||||
|
|
||||||
|
This is a quick write-up to share with y'all a small project I've been working
|
||||||
|
on using [P5.js][p5] and [Web Audio][webaudio] to implement some audio
|
||||||
|
visualizations. By the end, we'll have something like this:
|
||||||
|
|
||||||
|
{{< p5_sketch id="oscilloscopeFinal" bordered=1 >}}
|
||||||
|
|
||||||
|
## Embedding an Audio File
|
||||||
|
|
||||||
|
HTML has the ability to [embed audio][mdn-audio-tag] in a page with the
|
||||||
|
`<audio>` tag. This one declares a single MP3 file as a source.
|
||||||
|
|
||||||
|
```html
|
||||||
|
<audio id="amen">
|
||||||
|
<source src="amen.mp3" type="audio/mpeg">
|
||||||
|
</audio>
|
||||||
|
```
|
||||||
|
|
||||||
|
In this form, the `<audio>` element doesn't do anything except declare some
|
||||||
|
audio that can be played. It's invisible and the user can't interact with it or
|
||||||
|
control playback. That fine because because I'm going to implement my own
|
||||||
|
playback control as part of my sketch below.
|
||||||
|
|
||||||
|
## Processing Audio with Web Audio
|
||||||
|
|
||||||
|
Web Audio uses a node-based paradigm to process audio. Audio flows from source
|
||||||
|
nodes, through a web of interconnected processing nodes, and out through
|
||||||
|
destination nodes.
|
||||||
|
|
||||||
|
Sources can be `<audio>` tags or realtime waveform generators; processing nodes
|
||||||
|
might be filters, gain adjustments, or more complex effects like reverb; and
|
||||||
|
destinations could be your computer's speakers or a file.
|
||||||
|
|
||||||
|
Here's the entire code snippet that sets up the audio processing I need for the
|
||||||
|
sketch:
|
||||||
|
|
||||||
|
```js {linenostart=2}
|
||||||
|
let analyzerNode = null;
|
||||||
|
let samples = null;
|
||||||
|
|
||||||
|
let audioElement = (() => {
|
||||||
|
return document.querySelector('audio#amen');
|
||||||
|
})();
|
||||||
|
|
||||||
|
let audioContext = (() => {
|
||||||
|
const audioContext = new AudioContext();
|
||||||
|
const track =
|
||||||
|
audioContext.createMediaElementSource(audioElement);
|
||||||
|
analyzerNode = audioContext.createAnalyser();
|
||||||
|
|
||||||
|
track.connect(analyzerNode)
|
||||||
|
.connect(audioContext.destination);
|
||||||
|
|
||||||
|
return audioContext;
|
||||||
|
})();
|
||||||
|
```
|
||||||
|
|
||||||
|
The [`AudioContext`][mdn-audio-context] is the object that encapsulates the
|
||||||
|
entire node graph. On line 10, I create a new `AudioContext`.
|
||||||
|
|
||||||
|
On line 11, I create a [`MediaElementSourceNode`][mdn-webaudio-media-source-tag]
|
||||||
|
with the `<audio>` element I declared on this page.
|
||||||
|
|
||||||
|
Next, line 13 creates an [AnalyzerNode][mdn-analyzer-node]. Analyzer nodes don't
|
||||||
|
affect the audio that flows through them. Instead, this node gives the sketch
|
||||||
|
access to the raw audio samples as they're passing through the AudioContext.
|
||||||
|
We'll use this to plot the waveform as the audio is playing!
|
||||||
|
|
||||||
|
Line 15 hooks up the nodes in the graph. We connect the output of the source
|
||||||
|
node to the input of the analyzer node, and the output of the analyzer node to
|
||||||
|
the audio context's `destination` node that routes to the computer's speakers.
|
||||||
|
Our audio processing graph looks like this:
|
||||||
|
|
||||||
|
{{< railroad_diagram id="audioContextDiagram" >}}
|
||||||
|
return rr.Diagram(
|
||||||
|
rr.Sequence(
|
||||||
|
rr.Terminal("<audio>"),
|
||||||
|
rr.Terminal("Analyzer"),
|
||||||
|
rr.Terminal("destination")));
|
||||||
|
{{< / railroad_diagram >}}
|
||||||
|
|
||||||
|
By itself the AudioContext doesn't actually play any audio. I'll tackle that
|
||||||
|
next.
|
||||||
|
|
||||||
|
## Playing Audio
|
||||||
|
|
||||||
|
Next up is starting playback. The following snippet creates a Play button using
|
||||||
|
P5.js's DOM manipulation API, and hooks up the button's `click` event to start
|
||||||
|
and stop playback.
|
||||||
|
|
||||||
|
```js {linenostart=29}
|
||||||
|
const playPauseButton = p.createButton('Play');
|
||||||
|
playPauseButton.position(10, 10);
|
||||||
|
|
||||||
|
const playPauseButtonElement = playPauseButton.elt;
|
||||||
|
playPauseButtonElement.dataset.playing = 'false';
|
||||||
|
playPauseButtonElement.addEventListener('click', function() {
|
||||||
|
if (audioContext.state === 'suspended') {
|
||||||
|
audioContext.resume();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.dataset.playing === 'false') {
|
||||||
|
audioElement.play();
|
||||||
|
this.dataset.playing = 'true';
|
||||||
|
this.innerHTML = '<span>Pause</span>';
|
||||||
|
} else if (this.dataset.playing === 'true') {
|
||||||
|
audioElement.pause();
|
||||||
|
this.dataset.playing = 'false';
|
||||||
|
this.innerHTML = '<span>Play</span>';
|
||||||
|
}
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
Something I found odd while working with these audio components is there isn't a
|
||||||
|
way to ask any of them if audio is playing back at any given moment. Instead it
|
||||||
|
is up to the script to listen for the appropriate [events][mdn-audio-tag-events]
|
||||||
|
and track playback state itself.
|
||||||
|
|
||||||
|
If this snippet looks a little convoluted, that's why.
|
||||||
|
|
||||||
|
To track playback status, I decided to set a `playing` property on the button's
|
||||||
|
`dataset` indicating whether to call `audioElement.play()` or
|
||||||
|
`audioElement.pause()` and to set the label of the button appropriately.
|
||||||
|
|
||||||
|
The last bit of playback state tracking to do is to listen for when playback
|
||||||
|
ends because it reached the end of the audio file. I did that with the `ended`
|
||||||
|
event:
|
||||||
|
|
||||||
|
```js {linenostart=53}
|
||||||
|
audioElement.addEventListener('ended', function() {
|
||||||
|
playPauseButtonElement.dataset.playing = 'false';
|
||||||
|
playPauseButtonElement.innerHTML = '<span>Play</span>';
|
||||||
|
}, false);
|
||||||
|
```
|
||||||
|
|
||||||
|
This handler resets the `playing` flag and the label of the button.
|
||||||
|
|
||||||
|
## The Sketch
|
||||||
|
|
||||||
|
Now it's time to draw some waveforms! The main part of a P5 sketch is the `draw` method. Here's mine:
|
||||||
|
|
||||||
|
```js {linenostart=57}
|
||||||
|
const amplitude = p.height / 2;
|
||||||
|
const axis = p.height / 2;
|
||||||
|
|
||||||
|
const blue = p.color(24, 62, 140);
|
||||||
|
const purple = p.color(255, 0, 255);
|
||||||
|
|
||||||
|
p.background(255);
|
||||||
|
|
||||||
|
if (analyzerNode) {
|
||||||
|
analyzerNode.getFloatTimeDomainData(samples);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (let i = 0; i < samples.length; i++) {
|
||||||
|
const sampleValue = samples[i];
|
||||||
|
const absSampleValue = Math.abs(sampleValue);
|
||||||
|
|
||||||
|
const weight = p.lerp(2, 12, 1.5 * absSampleValue);
|
||||||
|
p.strokeWeight(sampleValue === 0 ? 1 : weight);
|
||||||
|
p.stroke(p.lerpColor(blue, purple, absSampleValue));
|
||||||
|
|
||||||
|
p.point(i, axis + amplitude * sampleValue);
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
The most interesting part of this function starts at line 66 where we get an array of samples from the analyzer node. The `samples` variable is a JavaScript `Float32Array`, with one element for each pixel of width.
|
||||||
|
|
||||||
|
```js {linenos=false}
|
||||||
|
samples = new Float32Array(p.width);
|
||||||
|
```
|
||||||
|
|
||||||
|
Once the sample data is populated from the analyzer, we can render them by
|
||||||
|
plotting them along the X axis, scaling them to the height of the sketch.
|
||||||
|
|
||||||
|
I also manipulate the weight (size) of the point and its color by interpolating sizes and colors based on the value of the sample.
|
||||||
|
|
||||||
|
[p5]: https://p5js.org
|
||||||
|
[webaudio]: https://developer.mozilla.org/en-US/docs/Web/API/Web_Audio_API
|
||||||
|
[mdn-audio-tag]: https://developer.mozilla.org/en-US/docs/Web/HTML/Element/audio
|
||||||
|
[mdn-audio-tag-events]: https://developer.mozilla.org/en-US/docs/Web/HTML/Element/audio#events
|
||||||
|
[mdn-audio-context]: https://developer.mozilla.org/en-US/docs/Web/API/AudioContext
|
||||||
|
[mdn-webaudio-media-source-tag]: https://developer.mozilla.org/en-US/docs/Web/API/MediaElementAudioSourceNode
|
||||||
|
[mdn-create-media-element-source]: https://developer.mozilla.org/en-US/docs/Web/API/AudioContext/createMediaElementSource
|
||||||
|
[mdn-analyzer-node]: https://developer.mozilla.org/en-US/docs/Web/API/AnalyserNode
|
89
content/blog/2022/03/audio-scope-with-p5js/sketch.js
Normal file
89
content/blog/2022/03/audio-scope-with-p5js/sketch.js
Normal file
|
@ -0,0 +1,89 @@
|
||||||
|
const oscilloscopeFinal = p => {
|
||||||
|
let analyzerNode = null;
|
||||||
|
let samples = null;
|
||||||
|
|
||||||
|
let audioElement = (() => {
|
||||||
|
return document.querySelector('audio#amen');
|
||||||
|
})();
|
||||||
|
|
||||||
|
let audioContext = (() => {
|
||||||
|
const audioContext = new AudioContext();
|
||||||
|
const track =
|
||||||
|
audioContext.createMediaElementSource(audioElement);
|
||||||
|
analyzerNode = audioContext.createAnalyser();
|
||||||
|
|
||||||
|
track.connect(analyzerNode)
|
||||||
|
.connect(audioContext.destination);
|
||||||
|
|
||||||
|
return audioContext;
|
||||||
|
})();
|
||||||
|
|
||||||
|
p.setup = () => {
|
||||||
|
const sketchContainer = document.querySelector('#oscilloscopeFinal');
|
||||||
|
const canvasWidth = parseFloat(getComputedStyle(sketchContainer).width);
|
||||||
|
let canvas = p.createCanvas(canvasWidth, 250);
|
||||||
|
canvas.canvas.removeAttribute('style');
|
||||||
|
sketchContainer.appendChild(canvas.canvas);
|
||||||
|
|
||||||
|
p.pixelDensity(p.displayDensity());
|
||||||
|
|
||||||
|
samples = new Float32Array(p.width);
|
||||||
|
|
||||||
|
const playPauseButton = p.createButton('Play');
|
||||||
|
playPauseButton.position(10, 10);
|
||||||
|
|
||||||
|
const playPauseButtonElement = playPauseButton.elt;
|
||||||
|
playPauseButtonElement.dataset.playing = 'false';
|
||||||
|
playPauseButtonElement.addEventListener('click', function() {
|
||||||
|
if (audioContext.state === 'suspended') {
|
||||||
|
audioContext.resume();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.dataset.playing === 'false') {
|
||||||
|
audioElement.play();
|
||||||
|
this.dataset.playing = 'true';
|
||||||
|
this.innerHTML = '<span>Pause</span>';
|
||||||
|
} else if (this.dataset.playing === 'true') {
|
||||||
|
audioElement.pause();
|
||||||
|
this.dataset.playing = 'false';
|
||||||
|
this.innerHTML = '<span>Play</span>';
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
audioElement.addEventListener('ended', function() {
|
||||||
|
playPauseButtonElement.dataset.playing = 'false';
|
||||||
|
playPauseButtonElement.innerHTML = '<span>Play</span>';
|
||||||
|
}, false);
|
||||||
|
};
|
||||||
|
|
||||||
|
p.draw = () => {
|
||||||
|
const amplitude = p.height / 2;
|
||||||
|
const axis = p.height / 2;
|
||||||
|
|
||||||
|
const blue = p.color(24, 62, 140);
|
||||||
|
const purple = p.color(255, 0, 255);
|
||||||
|
|
||||||
|
p.background(255);
|
||||||
|
|
||||||
|
if (analyzerNode) {
|
||||||
|
analyzerNode.getFloatTimeDomainData(samples);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (let i = 0; i < samples.length; i++) {
|
||||||
|
const sampleValue = samples[i];
|
||||||
|
const absSampleValue = Math.abs(sampleValue);
|
||||||
|
|
||||||
|
const weight = p.lerp(2, 12, 1.5 * absSampleValue);
|
||||||
|
p.strokeWeight(sampleValue === 0 ? 1 : weight);
|
||||||
|
p.stroke(p.lerpColor(blue, purple, absSampleValue));
|
||||||
|
|
||||||
|
p.point(i, axis + amplitude * sampleValue);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
p.mouseClicked = () => {
|
||||||
|
p.clear();
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
new p5(oscilloscopeFinal, 'oscilloscopeFinal');
|
Loading…
Add table
Add a link
Reference in a new issue