-
Notifications
You must be signed in to change notification settings - Fork 3
/
Copy pathindex.html
136 lines (113 loc) · 4.27 KB
/
index.html
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Audio Amplitude Visualization</title>
<style>
#visualization {
width: 200px;
height: 200px;
background-color: #3498db;
transition: height 0.2s ease-out;
margin: 10px auto;
border-radius: 100%;
transform: scale(0.75);
}
</style>
</head>
<body>
<input type="file" id="audioFileInput" accept="audio/*" />
<div id="visualization"></div>
<script>
// Initialize AudioContext once when the page loads
const audioContext = new (window.AudioContext ||
window.webkitAudioContext)();
let analyser;
document
.getElementById("audioFileInput")
.addEventListener("change", handleFileSelect);
let audioIsPlaying = false;
let animationFrameId;
let audioSource;
let recognition;
function handleFileSelect(event) {
const file = event.target.files[0];
// If audio is playing, stop it and clear the file input
if (audioIsPlaying) {
audioSource.stop();
audioIsPlaying = false;
// event.target.value = null;
// return;
}
if (file) {
analyser = audioContext.createAnalyser();
const visualizationElement = document.getElementById("visualization");
const fileReader = new FileReader();
fileReader.onload = function (e) {
audioContext.decodeAudioData(e.target.result, function (buffer) {
audioSource = audioContext.createBufferSource();
audioSource.buffer = buffer;
audioSource.connect(analyser);
analyser.connect(audioContext.destination);
analyser.fftSize = 256;
const bufferLength = analyser.frequencyBinCount;
const dataArray = new Uint8Array(bufferLength);
// Start playing the audio
audioSource.start();
audioIsPlaying = true;
// Stop the audio after the duration of the audio
audioSource.onended = () => {
audioIsPlaying = false;
audioSource.stop();
};
// Set up speech recognition
recognition = new SpeechRecognition();
recognition.continuous = true;
recognition.interimResults = true;
recognition.onresult = function (event) {
const transcript =
event.results[event.results.length - 1][0].transcript;
// Display the transcript or process it as needed
console.log(transcript);
};
recognition.onerror = function (event) {
console.error("Speech recognition error:", event.error);
};
recognition.onend = function () {
console.log("Speech recognition ended.");
};
// Update visualization on audio data changes
function updateVisualization() {
analyser.getByteFrequencyData(dataArray);
// Calculate average amplitude
const averageAmplitude =
dataArray.reduce((sum, value) => sum + value, 0) /
bufferLength;
// Update visualization element height based on the average amplitude
visualizationElement.style.transform = `scale(${
averageAmplitude / 200
})`;
// Start speech recognition on audio data
recognition.start();
// Call the update function recursively
animationFrameId = requestAnimationFrame(updateVisualization);
}
// Start the visualization loop
animationFrameId = requestAnimationFrame(updateVisualization);
});
};
fileReader.readAsArrayBuffer(file);
}
}
function stopAudio() {
if (audioIsPlaying) {
// cancelAnimationFrame(animationFrameId);
audioIsPlaying = false;
// Disconnect the analyser
// analyser.disconnect();
}
}
</script>
</body>
</html>