HTML5 Audio API示例
汇总记录,非常感谢老师们,大牛们的分享:
示例一和示例二 参考尚硅谷老师的视频教程;
示例三参考链接:https://blog.csdn.net/u011354613/article/details/51272907
示例一:纯js播放自定义音乐
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<title>首页</title>
<style>
</style>
</head>
<body>
<button id="button">两只老虎</button>
<script type="text/javascript">
window.AudioContext = window.AudioContext||window.webkitAudioContext||window.mozAudioContext;
(function(){
if(!window.AudioContext){
alert("当前浏览器不支持哟!");
return;
}
var btn = document.getElementById("button");
var audioCtx = new AudioContext();
//声音频率数据
var arrFrequency = [261.63, 293.66, 329.63, 261.63, 261.63, 293.66, 329.63, 261.63, 329.63,
349.23, 392.00, 329.63, 349.23, 329.00, 329.00, 440.00, 329.00, 349.23, 329.63, 261.63, 329.00,
440.00, 329.00, 349.23, 329.63, 261.63, 293.66, 196.00, 261.63, 293.66, 196.00, 261.63];
var start = 0;
btn.addEventListener('mouseenter',function(){
var frequency = arrFrequency[start];
if(!frequency){
start = 0;
frequency = arrFrequency[start];
}
start++;
//创建音调控制对象
var oscillator = audioCtx.createOscillator();
//创建音量控制对象
var gainNode = audioCtx.createGain();
//音调音量关联
oscillator.connect(gainNode);
//音量和设备关联
gainNode.connect(audioCtx.destination);
//音色类型指定为正玄波
oscillator.type = "sine";
//设置音调频率
oscillator.frequency.value = frequency;
//先把当前音量设置为0
gainNode.gain.setValueAtTime(0,audioCtx.currentTime);
//声音音量线性从0到1
gainNode.gain.linearRampToValueAtTime(1,audioCtx.currentTime + 0.01);
//声音走起
oscillator.start(audioCtx.currentTime);
//0.5秒时间内音量指数曲线变为0.01
gainNode.gain.exponentialRampToValueAtTime(0.01,audioCtx.currentTime + 0.5);
//关闭声音
oscillator.stop(audioCtx.currentTime + 0.5);
})
})();
</script>
</body>
</html>
示例二:钢琴八音盒
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
<title>钢琴8音盒</title>
<style>
html,body{
margin: 0;
padding: 0;
}
.keys{
margin: 20px auto;
background: #F8F8F8;
padding: 10px;
font-size: 0;
text-align: center;
}
.item-key{
float: left;
width: 40px;
height: 160px;
background: #fff;
border: 1px solid #333;
z-index: 1;
}
.active{
box-shadow: 0 -2px 3px #333 inset;
background: linear-gradient(to bottom,#fff,#ddd);
}
.area{
display: inline-block;
}
.black{
background: #333333;
color: #fff;
height: 80px;
width: 20px;
margin-left: -11px;
margin-right: -11px;
z-index: 2;
position: relative;
}
.black .active{
box-shadow: 0 -2px 3px #fff inset;
background: linear-gradient(#000,#ddd);
}
</style>
</head>
<body>
<div class="keys">
</div>
<script src="./assets/global/plugins/jquery.min.js" type="text/javascript"></script>
<script>
//创建音频上下文
var audioCtx = new AudioContext();
//4个8度,C大调
const voiceList = [
[261.63,277.18,293.66,311.13,329.63,349.23,369.99,392,415.3,440,466.16,493.88],
[523.25,554.37,587.33,622.25,659.26,698.46,739.99,783.99,830.61,880,932.33,987.77],
[1046.5,1108.7,1174.7,1244.5,1318.5,1396.9,1480,1568,1661.2,1760,1864.7,1975.5],
[2093,2217.5,2349.3,2489,2637,2793.8,2960,3136,3322.4,3520,3729.3,3951.1]
];
function buildKey(){
let html = [];
voiceList.forEach(item => {
html.push('<span class="area">');
item.forEach((em,index) => {
if(index == 1 || index == 3 || index == 6 || index == 8 || index == 10){
html.push('<a href="javascript:;" class="item-key black" data-frequency="'+em+'">'+(index + 1)+'</a>');
}else{
html.push('<a href="javascript:;" class="item-key" data-frequency="'+em+'">'+(index + 1)+'</a>');
}
});
html.push('</span>');
})
$('.keys').html(html.join(''));
}
function bind(){
$(document).on('mousedown','.item-key',function(e){
showVoice.call(this);
}).on('mouseup','.item-key',function(e){
stopVoice.call(this);
$(this).off();
}).on('mousedown',function(){
$(this).on('mousemove',function(e){
return false;
}).on('mouseenter','.item-key',function(e){
showVoice.call(this);
}).on('mouseleave','.item-key',function(e){
stopVoice.call(this);
}).on('mouseup',function(){
stopVoice.call(this);
})
});
$(document).on('mouseup',function(){
stopVoice.call(this);
})
}
function bindM(){
$(document).on('touchstart','.item-key',function(e){
showVoice.call(this);
}).on('touchend','.item-key',function(e){
stopVoice.call(this);
$(this).off();
}).on('touchstart',function(){
$(this).on('mousemove',function(e){
return false;
}).on('mouseenter','.item-key',function(e){
showVoice.call(this);
}).on('mouseleave','.item-key',function(e){
stopVoice.call(this);
}).on('touchend',function(){
stopVoice.call(this);
})
});
$(document).on('touchend',function(){
stopVoice.call(this);
})
}
function showVoice(){
let val = $(this).data('frequency');
$(this).addClass('active');
this.gainNode && this.gainNode.gain.setValueAtTime(0,audioCtx.currentTime);
// this.OscillatorNode && this.OscillatorNode.stop(audioCtx.currentTime + 1);
this.oscillator && this.oscillator.stop(audioCtx.currentTime + 1);
//创建音调控制对象
this.oscillator = audioCtx.createOscillator();
//创建音量控制对象
this.gainNode = audioCtx.createGain();
//音调音量关联
this.oscillator.connect(this.gainNode);
//音量和设备关联
this.gainNode.connect(audioCtx.destination);
//音色类型指定为正玄波
this.oscillator.type = "sine";
//设置音调频率
this.oscillator.frequency.value = val;
//先把当前音量设置为0
this.gainNode.gain.setValueAtTime(0,audioCtx.currentTime);
//声音走起
this.oscillator.start(audioCtx.currentTime);
//0.01秒时间内,声音音量线性从0到1
this.gainNode.gain.linearRampToValueAtTime(1,audioCtx.currentTime + 0.01);
}
function stopVoice(){
$(this).removeClass('active');
this.gainNode && this.gainNode.gain.exponentialRampToValueAtTime(0.001,audioCtx.currentTime + 0.6);
//0.8秒后停止声音
this.oscillator && this.oscillator.stop(audioCtx.currentTime + 0.6);
this.oscillator = this.gainNode = null;
$(this).off('mouseenter').off('mouseleave');
}
console.log(navigator.userAgent);
buildKey();
var u = navigator.userAgent;
var isAndroid = u.indexOf('Android') > -1 || u.indexOf('Adr') > -1; //android终端
var isioS = !!u.match(/\(i[^;]+;( U;)? CPU.+Mac Os X/); //ios终端
if(isAndroid || isioS){
bindM()
}else{
bind();
}
</script>
</body>
</html>
示例三:选择文件播放
html代码:
<!DOCTYPE html>
<html>
<head>
<meta name="description" content="HTML5 Audio Spectrum Visualizer">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>HTML5 Audio API showcase | Audio visualizer</title>
<link type="text/css" rel="stylesheet" href="style.css">
</head>
<body>
<div id="wrapper">
<div id="fileWrapper" class="file_wrapper">
<div id="info">
HTML5 Audio API showcase | An Audio Viusalizer
</div>
<label for="uploadedFile">Drag&drop or select a file to play:</label>
<input type="file" id="uploadedFile"></input>
</div>
<div id="visualizer_wrapper">
<canvas id='canvas' width="800" height="350"></canvas>
</div>
</div>
<footer>
<small>Star me on <a href="https://github.com/Wayou/HTML5_Audio_Visualizer" target="_blank">GitHub</a></small>
</footer>
<script type="text/javascript" src="html5_audio_visualizer.js"></script>
</body>
</html>
js代码:
/**
* An audio spectrum visualizer built with HTML5 Audio API
* Author:Wayou
* License: MIT
* Feb 15, 2014
*/
window.onload = function() {
new Visualizer().ini();
};
var Visualizer = function() {
this.file = null; //the current file
this.fileName = null; //the current file name
this.audioContext = null;
this.source = null; //the audio source
this.info = document.getElementById('info').innerHTML; //used to upgrade the UI information
this.infoUpdateId = null; //to store the setTimeout ID and clear the interval
this.animationId = null;
this.status = 0; //flag for sound is playing 1 or stopped 0
this.forceStop = false;
this.allCapsReachBottom = false;
};
Visualizer.prototype = {
ini: function() {
this._prepareAPI();
this._addEventListner();
},
_prepareAPI: function() {
//fix browser vender for AudioContext and requestAnimationFrame
window.AudioContext = window.AudioContext || window.webkitAudioContext || window.mozAudioContext || window.msAudioContext;
window.requestAnimationFrame = window.requestAnimationFrame || window.webkitRequestAnimationFrame || window.mozRequestAnimationFrame || window.msRequestAnimationFrame;
window.cancelAnimationFrame = window.cancelAnimationFrame || window.webkitCancelAnimationFrame || window.mozCancelAnimationFrame || window.msCancelAnimationFrame;
try {
this.audioContext = new AudioContext();
} catch (e) {
this._updateInfo('!Your browser does not support AudioContext', false);
console.log(e);
}
},
_addEventListner: function() {
var that = this,
audioInput = document.getElementById('uploadedFile'),
dropContainer = document.getElementsByTagName("canvas")[0];
//listen the file upload
audioInput.onchange = function() {
if (that.audioContext===null) {return;};
//the if statement fixes the file selction cancle, because the onchange will trigger even the file selection been canceled
if (audioInput.files.length !== 0) {
//only process the first file
that.file = audioInput.files[0];
that.fileName = that.file.name;
if (that.status === 1) {
//the sound is still playing but we upload another file, so set the forceStop flag to true
that.forceStop = true;
};
document.getElementById('fileWrapper').style.opacity = 1;
that._updateInfo('Uploading', true);
//once the file is ready,start the visualizer
that._start();
};
};
//listen the drag & drop
dropContainer.addEventListener("dragenter", function() {
document.getElementById('fileWrapper').style.opacity = 1;
that._updateInfo('Drop it on the page', true);
}, false);
dropContainer.addEventListener("dragover", function(e) {
e.stopPropagation();
e.preventDefault();
//set the drop mode
e.dataTransfer.dropEffect = 'copy';
}, false);
dropContainer.addEventListener("dragleave", function() {
document.getElementById('fileWrapper').style.opacity = 0.2;
that._updateInfo(that.info, false);
}, false);
dropContainer.addEventListener("drop", function(e) {
e.stopPropagation();
e.preventDefault();
if (that.audioContext===null) {return;};
document.getElementById('fileWrapper').style.opacity = 1;
that._updateInfo('Uploading', true);
//get the dropped file
that.file = e.dataTransfer.files[0];
if (that.status === 1) {
document.getElementById('fileWrapper').style.opacity = 1;
that.forceStop = true;
};
that.fileName = that.file.name;
//once the file is ready,start the visualizer
that._start();
}, false);
},
_start: function() {
//read and decode the file into audio array buffer
var that = this,
file = this.file,
fr = new FileReader();
fr.onload = function(e) {
var fileResult = e.target.result;
var audioContext = that.audioContext;
if (audioContext === null) {
return;
};
that._updateInfo('Decoding the audio', true);
audioContext.decodeAudioData(fileResult, function(buffer) {
that._updateInfo('Decode succussfully,start the visualizer', true);
that._visualize(audioContext, buffer);
}, function(e) {
that._updateInfo('!Fail to decode the file', false);
console.error(e);
});
};
fr.onerror = function(e) {
that._updateInfo('!Fail to read the file', false);
console.error(e);
};
//assign the file to the reader
this._updateInfo('Starting read the file', true);
fr.readAsArrayBuffer(file);
},
_visualize: function(audioContext, buffer) {
var audioBufferSouceNode = audioContext.createBufferSource(),
analyser = audioContext.createAnalyser(),
that = this;
//connect the source to the analyser
audioBufferSouceNode.connect(analyser);
//connect the analyser to the destination(the speaker), or we won't hear the sound
analyser.connect(audioContext.destination);
//then assign the buffer to the buffer source node
audioBufferSouceNode.buffer = buffer;
//play the source
if (!audioBufferSouceNode.start) {
audioBufferSouceNode.start = audioBufferSouceNode.noteOn //in old browsers use noteOn method
audioBufferSouceNode.stop = audioBufferSouceNode.noteOff //in old browsers use noteOff method
};
//stop the previous sound if any
if (this.animationId !== null) {
cancelAnimationFrame(this.animationId);
}
if (this.source !== null) {
this.source.stop(0);
}
audioBufferSouceNode.start(0);
this.status = 1;
this.source = audioBufferSouceNode;
audioBufferSouceNode.onended = function() {
that._audioEnd(that);
};
this._updateInfo('Playing ' + this.fileName, false);
this.info = 'Playing ' + this.fileName;
document.getElementById('fileWrapper').style.opacity = 0.2;
this._drawSpectrum(analyser);
},
_drawSpectrum: function(analyser) {
var that = this,
canvas = document.getElementById('canvas'),
cwidth = canvas.width,
cheight = canvas.height - 2,
meterWidth = 10, //width of the meters in the spectrum
gap = 2, //gap between meters
capHeight = 2,
capStyle = '#fff',
meterNum = 800 / (10 + 2), //count of the meters
capYPositionArray = []; ////store the vertical position of hte caps for the preivous frame
ctx = canvas.getContext('2d'),
gradient = ctx.createLinearGradient(0, 0, 0, 300);
gradient.addColorStop(1, '#0f0');
gradient.addColorStop(0.5, '#ff0');
gradient.addColorStop(0, '#f00');
var drawMeter = function() {
var array = new Uint8Array(analyser.frequencyBinCount);
analyser.getByteFrequencyData(array);
if (that.status === 0) {
//fix when some sounds end the value still not back to zero
for (var i = array.length - 1; i >= 0; i--) {
array[i] = 0;
};
allCapsReachBottom = true;
for (var i = capYPositionArray.length - 1; i >= 0; i--) {
allCapsReachBottom = allCapsReachBottom && (capYPositionArray[i] === 0);
};
if (allCapsReachBottom) {
cancelAnimationFrame(that.animationId); //since the sound is stoped and animation finished, stop the requestAnimation to prevent potential memory leak,THIS IS VERY IMPORTANT!
return;
};
};
var step = Math.round(array.length / meterNum); //sample limited data from the total array
ctx.clearRect(0, 0, cwidth, cheight);
for (var i = 0; i < meterNum; i++) {
var value = array[i * step];
if (capYPositionArray.length < Math.round(meterNum)) {
capYPositionArray.push(value);
};
ctx.fillStyle = capStyle;
//draw the cap, with transition effect
if (value < capYPositionArray[i]) {
ctx.fillRect(i * 12, cheight - (--capYPositionArray[i]), meterWidth, capHeight);
} else {
ctx.fillRect(i * 12, cheight - value, meterWidth, capHeight);
capYPositionArray[i] = value;
};
ctx.fillStyle = gradient; //set the filllStyle to gradient for a better look
ctx.fillRect(i * 12 /*meterWidth+gap*/ , cheight - value + capHeight, meterWidth, cheight); //the meter
}
that.animationId = requestAnimationFrame(drawMeter);
}
this.animationId = requestAnimationFrame(drawMeter);
},
_audioEnd: function(instance) {
if (this.forceStop) {
this.forceStop = false;
this.status = 1;
return;
};
this.status = 0;
var text = 'HTML5 Audio API showcase | An Audio Viusalizer';
document.getElementById('fileWrapper').style.opacity = 1;
document.getElementById('info').innerHTML = text;
instance.info = text;
document.getElementById('uploadedFile').value = '';
},
_updateInfo: function(text, processing) {
var infoBar = document.getElementById('info'),
dots = '...',
i = 0,
that = this;
infoBar.innerHTML = text + dots.substring(0, i++);
if (this.infoUpdateId !== null) {
clearTimeout(this.infoUpdateId);
};
if (processing) {
//animate dots at the end of the info text
var animateDot = function() {
if (i > 3) {
i = 0
};
infoBar.innerHTML = text + dots.substring(0, i++);
that.infoUpdateId = setTimeout(animateDot, 250);
}
this.infoUpdateId = setTimeout(animateDot, 250);
};
}
}
css代码:
html, body {
margin: 0;
font-family: arial, "Microsoft YaHei";
background-color: #272822;
color: #FEFEFE;
}
#fileWrapper{
transition:all 0.5s ease;
}
#fileWrapper:hover{
opacity: 1!important;
}
#visualizer_wrapper{
text-align: center;
}
footer{
position: fixed;
bottom: 2px;
color:#aaa;
}
相关推荐
HTML5 API调用示例是现代网页开发中的关键部分,它极大地扩展了Web应用程序的功能,使其可以实现更多原生应用的特性。W3C(万维网联盟)是定义HTML5标准的权威组织,它的API规范为开发者提供了丰富的工具集,以创建...
这个“html5 audio demo”是一个示例,展示了如何使用HTML5的`<audio>`标签来创建自定义样式的音频播放器,并实现播放控制功能。 一、HTML5 `<audio>` 标签 `<audio>`标签是HTML5引入的新元素,用于在网页上嵌入...
本压缩包“HTML5经典示例”包含7个精心挑选的示例,旨在展示HTML5的强大之处,以及如何通过它来实现各种华丽的视觉效果。 1. **Canvas画布** HTML5的Canvas元素提供了在网页上动态绘制图形的能力。通过JavaScript...
以上只是HTML5 API中的一部分,实际的文档会更详细地介绍每个API的用法、示例代码和兼容性信息。"html5参考手册.chm"应该包含了这些内容,对于开发者来说,是一个宝贵的资源。而"提醒.txt"可能是关于如何使用或注意...
AudioJS是一个基于HTML5 Audio API的JavaScript库,旨在帮助开发者更方便地控制网页中的音乐播放器,包括开关声音、播放、暂停等操作。在这个主题中,我们将深入探讨HTML5的Audio元素以及AudioJS库在实现网页音乐...
这个"bang-audio-master"压缩包很可能包含了实现一个自定义HTML5 Audio播放器的源代码或者示例。 在JavaScript开发中,HTML5 Audio API是一个重要的部分,它提供了一系列的接口和方法,允许我们控制音频的播放、...
HTML5是超文本标记语言(HyperText Markup Language)的第五个重大版本,它引入了许多新的API和HTML元素,旨在提高网页...文件可能包含了更详尽的HTML5规范、示例代码和最佳实践,对于深入理解和应用HTML5非常有帮助。
这个“最新Html5中文API”CHM文件很可能是包含了上述所有或部分API的详细说明和示例,对于开发者来说,是一个非常有价值的参考资料。有了这个中文版的API,中国开发者可以更轻松地查找和学习HTML5的相关信息,提升...
HTML5的Audio API是现代网页开发中用于处理音频内容的核心技术。audio.js是一个JavaScript库,它扩展了HTML5 Audio的功能,使得在网页上播放、控制音频变得更加简单和灵活。在这个项目中,我们将深入探讨如何使用...
这个"HEML5 AUDIO"的案例源码提供了一个实践学习的机会,让你深入了解如何使用HTML5的Audio API来处理和控制音频播放。 首先,我们要知道HTML5 Audio的核心是`<audio>`标签。这是一个内建的HTML元素,用于嵌入音频...
在压缩包中,"HTML5 资料"可能包含了关于HTML5新标签的详细解释、移动端开发的示例代码、教程或者相关工具的介绍。通过学习这些资料,开发者可以深入理解HTML5的新特性和如何将其应用于移动端开发,从而提升开发效率...
这份中文帮助文档详细解释了这些API的使用方法、示例代码以及常见问题解答,对于HTML5开发者来说是一份宝贵的参考资料。通过学习和实践,开发者可以充分利用HTML5 API来创建功能强大、用户体验优秀的现代网页应用。
此外,HTML5还支持本地存储(Web Storage)、离线存储(App Cache)、拖放功能(Drag and Drop)以及媒体元素(如`<audio>`和`<video>`),使得在浏览器中处理多媒体内容变得更加便捷。 **CSS3.0** 则是层叠样式表...
此外,离线储存、拖放功能、画布绘图、SVG矢量图、WebRTC(实时通信)、Web Audio API等都是HTML5的重要组成部分,它们共同构建了一个更强大、更灵活的Web开发环境。 此中文手册应该详细涵盖了这些知识点,并可能...
这个“宇宙最全html5官方Javascript API文档”不仅包含了以上提到的API,还可能包括更多细节和示例,帮助开发者深入理解并熟练运用HTML5的JavaScript功能。无论你是初学者还是经验丰富的开发者,这份文档都能作为一...
高保真空间音频API示例该存储库包含使用示例项目。 这些示例项目扩展了您可以在找到的API演练指南。 我们希望您的贡献! 如果您有兴趣提交自己的示例应用程序,请针对此存储库打开请求请求。examples目录 :smiling_...
#### 三、HTML5 API示例 ##### 1. 2D绘图API - 这个API与HTML5的`<canvas>`元素结合使用,可以实现在网页上绘制各种图形、动画等功能。 ##### 2. 离线Web应用缓存API - 该API支持Web应用在离线状态下运行,通过...
此外,HTML5还引入了Web Socket API,实现了双向通信,使实时通信如聊天应用、在线游戏等变得更加容易。 总的来说,“HTML5示例汇总”将涵盖以上提及的各种特性,并通过实例展示如何在实际项目中应用它们。初学者...
在这个"HTML5视频播放器美化"项目中,`index.html`可能是实际的示例页面,`readme.htm`可能包含了项目介绍和使用说明,而`开优网络.txt`和`.url`文件可能与项目开发者或提供者有关。`dist`目录通常包含编译或打包后...
14. **Biolab Disaster**:这个名字暗示了一个生物实验室主题的游戏,利用HTML5的Gamepad API和Audio API增加沉浸感。 15. **Google Gravity**:可能是一个模拟重力效果的实验,利用HTML5的MutationObserver API...