-
Notifications
You must be signed in to change notification settings - Fork 4
/
sound.js
108 lines (80 loc) · 3.5 KB
/
sound.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
// Global Variables for Audio
var audioContext;
var sourceNode;
var javascriptNode;
var audioStream;
window.craicAudioContext = (function(){
return window.webkitAudioContext || window.AudioContext ;
})();
navigator.getMedia = ( navigator.mozGetUserMedia ||
navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.msGetUserMedia);
$(document).ready(function() {
socket.on('audio',playSound);
try {
audioContext = new craicAudioContext();
}
catch(e) {
alert('Web Audio API is not supported in this browser');
}
// get the input audio stream and set up the nodes
try {
navigator.getMedia({audio:true}, setupAudioNodes, onError);
} catch (e) {
alert('webkitGetUserMedia threw exception :' + e);
}
$("body").on('click', "#start_button",function(e) {
e.preventDefault();
// execute every time a new sample has been acquired
javascriptNode.onaudioprocess = function (e) {
var bufferArray = Array.prototype.slice.call(e.inputBuffer.getChannelData(0));
socket.emit("audio", bufferArray);
}
});
// Stop recording by setting onaudioprocess to null
$("body").on('click', "#stop_button",function(e) {
e.preventDefault();
javascriptNode.onaudioprocess = null;
});
// Disable audio completely
$("body").on('click', "#disable_audio",function(e) {
e.preventDefault();
javascriptNode.onaudioprocess = null;
if(audioStream) audioStream.stop();
if(sourceNode) sourceNode.disconnect();
});
});
function onError(e) {
console.log(e);
}
function setupAudioNodes(stream) {
var sampleSize = 16384; // number of samples to collect before analyzing FFT
// decreasing this gives a faster sonogram, increasing it slows it down
audioStream = stream;
// The nodes are: sourceNode -> analyserNode -> javascriptNode -> destination
// create an audio buffer source node
sourceNode = audioContext.createMediaStreamSource(audioStream);
// Set up the javascript node - this uses only one channel - i.e. a mono microphone
javascriptNode = audioContext.createJavaScriptNode(sampleSize, 1, 1);
// connect the nodes together
sourceNode.connect(javascriptNode);
javascriptNode.connect(audioContext.destination);
}
function playSound(receivedBuffer) {
var audioBufferNode = null;
var audioBuffer = null;
var recording = new Float32Array(receivedBuffer);
if ( recording != null ) {
// create the Buffer from the recording
audioBuffer = audioContext.createBuffer( 1, recording.length, audioContext.sampleRate );
audioBuffer.getChannelData(0).set(recording, 0);
// create the Buffer Node with this Buffer
audioBufferNode = audioContext.createBufferSource();
audioBufferNode.buffer = audioBuffer;
console.log('recording buffer length ' + audioBufferNode.buffer.length);
// connect the node to the destination and play the audio
audioBufferNode.connect(audioContext.destination);
audioBufferNode.start(0);
}
}