-
Notifications
You must be signed in to change notification settings - Fork 0
/
index.js
191 lines (135 loc) · 5.14 KB
/
index.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
/* jshint node: true */
/* global document: false */
/* global HTMLVideoElement: false */
'use strict';
var raf = require('cog/raf');
/**
# rtc-audioproc
This is a small helper module that allows you to render a canvas
to visualize audio from an audio or video element, or a getUserMedia
stream. It uses the Web Audio API. This can be useful to display
audio visually, or to track down when a video or audio element does
not behave as you expect.
BROWSER SUPPORT:
* Chrome is the only browser that has support for Web Audio & WebRTC
* works in Chrome for getUserMedia(), Audio(), audio and video elements
* broken in Chrome for PeerConnection, see https://code.google.com/p/chromium/issues/detail?id=121673
## Usage with WebRTC
This was primarily written to work with the
[rtc-media](https://github.com/rtc-io/rtc-media) library so here's an
example of how it works there:
<<< examples/rtc-media.js
## Usage with Audio interface
This example shows how to pipe an audio file into the waveform display.
The canvas will be added to the body element unless you provide a different
element to attach it to.
<<< examples/new-audio.js
## Usage with a media element
This example shows how to create a waveform display for a video element.
It also shows how to attach the waveform to a separate element.
<<< examples/video-element.js
## Parameters for waveform
* target : media element or Audio instance
* opts:
* width : the width of the canvas
* height : the height of the canvas
* stream : if you're using WebRTC, you need to hand in the MediaStream directly
* play : if set to true, also route the audio to the output device
* attach : element to which the canvas will be added as a child
## Running the examples
You can use [beefy](http://didact.us/beefy/) to run the examples, e.g.
$ beefy examples/rtc-media.js
**/
module.exports = function(target, opts) {
var canvas = document.createElement('canvas');
var media = (target instanceof HTMLVideoElement ||
target instanceof HTMLAudioElement) ?
target :
document.createElement('video');
var attach = (opts || {}).attach;
// attach canvas to DOM
if (attach) {
// if there is an attach element, use that
attach.appendChild(canvas);
} else if (target === media && media.parentNode) {
// insert the canvas into the media parent element
media.parentNode.appendChild(canvas);
} else {
// fallback: insert canvas after body
var body = document.getElementsByTagName('body')[0];
body.appendChild(canvas);
}
// initialise the canvas width and height
canvas.width = (opts || {}).width || 480;
canvas.height = (opts || {}).height || 100;
canvas.style.border = 'red 1px solid';
// initialise the canvas pipeline
media.addEventListener('loadedmetadata', function() {
createWaveform(canvas, media, opts);
});
return canvas;
};
/*
### createWaveform(canvas, media, opts) ==> EventEmitter
Push the audio through the drawing loop and display the waveform
in the canvas.
*/
function createWaveform(canvas, media, opts) {
var analyser;
var audioContext;
var mediaStreamSource;
// initialize MediaStream if available
var stream = ((opts || {}).stream || {}).stream || null;
var context = canvas.getContext('2d');
// convert from stereo to mono
function convertToMono(input) {
// access the two channels
var splitter = audioContext.createChannelSplitter(2);
// prepare to merge 2 inputs
var merger = audioContext.createChannelMerger(2);
// connect the splitter to the input stream
input.connect( splitter );
// connect the merger to the splitters
splitter.connect( merger, 0, 0 );
splitter.connect( merger, 0, 1 );
// return the merged stream
return merger;
}
function draw() {
var width, height, waveData, barCount, barHeight, loopStep, value;
width = canvas.width;
height = canvas.height;
// Create arrays to store sound data
waveData = new Uint8Array(analyser.fftSize);
// Retrieve data; max value is 128 (it's a byte ;-)
analyser.getByteTimeDomainData(waveData);
context.clearRect(0, 0, width, height);
barCount = Math.round(width);
loopStep = Math.floor(waveData.length / barCount);
for (var i = 0; i < barCount; i++) {
value = waveData[i * loopStep];
// scale to canvas height, then position in middle of canvas
barHeight = (1.0 - value/128)*(height/2) + 1;
context.fillRect(i, (height/2)-barHeight, 1, 2*barHeight);
}
raf(draw);
}
// Create an AudioNode from the stream.
audioContext = new webkitAudioContext();
if (stream) {
mediaStreamSource = audioContext.createMediaStreamSource(stream);
} else {
mediaStreamSource = audioContext.createMediaElementSource(media);
}
// route media stream to a mono-converter
var monoStream = convertToMono(mediaStreamSource);
// route mono stream to a frequence analyser
analyser = audioContext.createAnalyser();
monoStream.connect(analyser);
// start the drawing loop
draw();
// start playback if requested
if ((opts || {}).play) {
analyser.connect(audioContext.destination);
}
}