怎么把这个实时录音原生js代码变成vue代码(vue2.0那种)?
<!DOCTYPE html><html>
<head>
<meta charset="UTF-8">
<meta name="viewport"
content="width=device-width, user-scalable=no, initial-scale=1.0, maximum-scale=1.0, minimum-scale=1.0">
<meta name="apple-mobile-web-capable" content="yes">
<title>录音并传递给后台</title>
</head>
<body>
<button id="intercomBegin">开始对讲</button>
<button id="intercomEnd">关闭对讲</button>
</body>
<script type="text/javascript">
var begin = document.getElementById('intercomBegin');
var end = document.getElementById('intercomEnd');
var ws = null; //实现WebSocket
var record = null; //多媒体对象,用来处理音频
var timeInte;
function init(rec) {
record = rec;
}
//录音对象
var Recorder = function (stream) {
var sampleBits = 16;
var sampleRate = 16000;
var context = new AudioContext();
var audioInput = context.createMediaStreamSource(stream);
var recorder = context.createScriptProcessor(4096, 1, 1);
var audioData = {
size: 0,
buffer: [],
inputSampleRate: 48000, //输入采样率
inputSampleBits: 16, //输入采样数位
outputSampleRate: sampleRate, //输出采样数位
oututSampleBits: sampleBits, //输出采样率
clear: function () {
this.buffer = [];
this.size = 0;
},
input: function (data) {
this.buffer.push(new Float32Array(data));
this.size += data.length;
},
compress: function () { //合并压缩
//合并
var data = new Float32Array(this.size);
var offset = 0;
for (var i = 0; i < this.buffer.length; i++) {
data.set(this.buffer[i], offset);
offset += this.buffer[i].length;
}
//压缩
var compression = parseInt(this.inputSampleRate / this.outputSampleRate);
var length = data.length / compression;
var result = new Float32Array(length);
var index = 0,
j = 0;
while (index < length) {
result[index] = data[j];
j += compression;
index++;
}
return result;
},
encodePCM: function () { //这里不对采集到的数据进行其他格式处理,如有需要均交给服务器端处理。
var sampleRate = Math.min(this.inputSampleRate, this.outputSampleRate);
var sampleBits = Math.min(this.inputSampleBits, this.oututSampleBits);
var bytes = this.compress();
var dataLength = bytes.length * (sampleBits / 8);
var buffer = new ArrayBuffer(dataLength);
var data = new DataView(buffer);
var offset = 0;
for (var i = 0; i < bytes.length; i++, offset += 2) {
var s = Math.max(-1, Math.min(1, bytes[i]));
data.setInt16(offset, s < 0 ? s * 0x8000 : s * 0x7FFF, true);
}
return new Blob([data]);
}
};
var sendData = function () { //对以获取的数据进行处理(分包)
var reader = new FileReader();
console.log('读取的文件对象', reader)
reader.onload = e => {
var outbuffer = e.target.result;
console.log('文件读取后的结果', outbuffer)
var arr = new Int8Array(outbuffer);
console.log('转化的二进制数据', arr)
if (arr.length > 0) {
var tmparr = new Int8Array(1024);
var j = 0;
for (var i = 0; i < arr.byteLength; i++) {
tmparr[j++] = arr[i];
if (((i + 1) % 1024) == 0) {
console.log('断点测试', tmparr)
ws.send(tmparr);
if (arr.byteLength - i - 1 >= 1024) {
tmparr = new Int8Array(1024);
} else {
tmparr = new Int8Array(arr.byteLength - i - 1);
}
j = 0;
}
if ((i + 1 == arr.byteLength) && ((i + 1) % 1024) != 0) {
ws.send(tmparr);
}
}
}
};
reader.readAsArrayBuffer(audioData.encodePCM());
audioData.clear();
};
this.start = function () {
audioInput.connect(recorder);
recorder.connect(context.destination);
}
this.stop = function () {
recorder.disconnect();
window.clearInterval(timeInte);
audioData.clear();
}
this.getBlob = function () {
return audioData.encodePCM();
}
this.clear = function () {
audioData.clear();
}
recorder.onaudioprocess = function (e) {
console.log('测试一下', e)
var inputBuffer = e.inputBuffer.getChannelData(0);
audioData.input(inputBuffer);
console.log('显示', inputBuffer)
sendData();
}
}
/*
* WebSocket
*/
function useWebSocket() {
ws = new WebSocket("wss://api.tl.supremind.cloud");
ws.binaryType = 'arraybuffer'; //传输的是 ArrayBuffer 类型的数据
ws.onopen = function (event) {
console.log('连接成功');
let obj = { "action": "audio_lock", "data": [{ "projectJid": "fe843627233020c110101c8f7e85ba53", "guid": "12c00001363b21cf", "playVolume": 20 }], "requestId": "cf3253b2-e491-4ce0-bf66-4a5bc36d46a1" }
ws.send(JSON.stringify(obj))
timeInte = setInterval(function () {
record.start();
}, 300);
};
ws.onmessage = function (msg) {
console.info(msg)
}
ws.onerror = function (err) { }
}
/*
* 开始对讲
*/
begin.onclick = function () {
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia ||
navigator.msGetUserMedia;
if (!navigator.getUserMedia) {
alert('浏览器不支持音频输入');
} else {
navigator.getUserMedia({
audio: true
},
function (mediaStream) {
console.log('ddddd', mediaStream)
init(new Recorder(mediaStream));
console.log('开始对讲');
useWebSocket();
},
function (error) {
switch (error.message || error.name) {
case 'PERMISSION_DENIED':
case 'PermissionDeniedError':
console.info('用户拒绝提供信息。');
break;
case 'NOT_SUPPORTED_ERROR':
case 'NotSupportedError':
console.info('浏览器不支持硬件设备。');
break;
case 'MANDATORY_UNSATISFIED_ERROR':
case 'MandatoryUnsatisfiedError':
console.info('无法发现指定的硬件设备。');
break;
default:
console.info('无法打开麦克风。异常信息:' + (error.code || error.name));
break;
}
}
)
}
}
/*
* 关闭对讲
*/
end.onclick = function () {
if (ws) {
record.stop();
var tmparr = new Int8Array(1024);
let obj = { "action": "audio_unlock", "data": [{ "projectJid": "fe843627233020c110101c8f7e85ba53", "guid": "12c00001363b21cf", "playVolume": 80 }], "requestId": "cf3253b2-e491-4ce0-bf66-4a5bc36d46a1" }
ws.send(JSON.stringify(obj))
console.log('关闭对讲');
}
}
</script>
</html>
回答:
1. 提取 JS 代码到 src/assets/record.js
,去除dom操作,改为 export beginRecord
和 stopRecord
函数
// var begin = document.getElementById('intercomBegin')// var end = document.getElementById('intercomEnd')
var ws = null // 实现WebSocket
var record = null // 多媒体对象,用来处理音频
var timeInte
function init(rec) {
record = rec
}
// 录音对象
var Recorder = function(stream) {
var sampleBits = 16
var sampleRate = 16000
var context = new AudioContext()
var audioInput = context.createMediaStreamSource(stream)
var recorder = context.createScriptProcessor(4096, 1, 1)
var audioData = {
size: 0,
buffer: [],
inputSampleRate: 48000, // 输入采样率
inputSampleBits: 16, // 输入采样数位
outputSampleRate: sampleRate, // 输出采样数位
oututSampleBits: sampleBits, // 输出采样率
clear: function() {
this.buffer = []
this.size = 0
},
input: function(data) {
this.buffer.push(new Float32Array(data))
this.size += data.length
},
compress: function() { // 合并压缩
// 合并
var data = new Float32Array(this.size)
var offset = 0
for (var i = 0; i < this.buffer.length; i++) {
data.set(this.buffer[i], offset)
offset += this.buffer[i].length
}
// 压缩
var compression = parseInt(this.inputSampleRate / this.outputSampleRate)
var length = data.length / compression
var result = new Float32Array(length)
var index = 0,
j = 0
while (index < length) {
result[index] = data[j]
j += compression
index++
}
return result
},
encodePCM: function() { // 这里不对采集到的数据进行其他格式处理,如有需要均交给服务器端处理。
var sampleRate = Math.min(this.inputSampleRate, this.outputSampleRate)
var sampleBits = Math.min(this.inputSampleBits, this.oututSampleBits)
var bytes = this.compress()
var dataLength = bytes.length * (sampleBits / 8)
var buffer = new ArrayBuffer(dataLength)
var data = new DataView(buffer)
var offset = 0
for (var i = 0; i < bytes.length; i++, offset += 2) {
var s = Math.max(-1, Math.min(1, bytes[i]))
data.setInt16(offset, s < 0 ? s * 0x8000 : s * 0x7FFF, true)
}
return new Blob([data])
}
}
var sendData = function() { // 对以获取的数据进行处理(分包)
var reader = new FileReader()
console.log('读取的文件对象', reader)
reader.onload = e => {
var outbuffer = e.target.result
console.log('文件读取后的结果', outbuffer)
var arr = new Int8Array(outbuffer)
console.log('转化的二进制数据', arr)
if (arr.length > 0) {
var tmparr = new Int8Array(1024)
var j = 0
for (var i = 0; i < arr.byteLength; i++) {
tmparr[j++] = arr[i]
if (((i + 1) % 1024) == 0) {
console.log('断点测试', tmparr)
ws.send(tmparr)
if (arr.byteLength - i - 1 >= 1024) {
tmparr = new Int8Array(1024)
} else {
tmparr = new Int8Array(arr.byteLength - i - 1)
}
j = 0
}
if ((i + 1 == arr.byteLength) && ((i + 1) % 1024) != 0) {
ws.send(tmparr)
}
}
}
}
reader.readAsArrayBuffer(audioData.encodePCM())
audioData.clear()
}
this.start = function() {
audioInput.connect(recorder)
recorder.connect(context.destination)
}
this.stop = function() {
recorder.disconnect()
window.clearInterval(timeInte)
audioData.clear()
}
this.getBlob = function() {
return audioData.encodePCM()
}
this.clear = function() {
audioData.clear()
}
recorder.onaudioprocess = function(e) {
console.log('测试一下', e)
var inputBuffer = e.inputBuffer.getChannelData(0)
audioData.input(inputBuffer)
console.log('显示', inputBuffer)
sendData()
}
}
/*
* WebSocket
*/
function useWebSocket() {
ws = new WebSocket('wss://api.tl.supremind.cloud')
ws.binaryType = 'arraybuffer' // 传输的是 ArrayBuffer 类型的数据
ws.onopen = function(event) {
console.log('连接成功')
const obj = { 'action': 'audio_lock', 'data': [{ 'projectJid': 'fe843627233020c110101c8f7e85ba53', 'guid': '12c00001363b21cf', 'playVolume': 20 }], 'requestId': 'cf3253b2-e491-4ce0-bf66-4a5bc36d46a1' }
ws.send(JSON.stringify(obj))
timeInte = setInterval(function() {
record.start()
}, 300)
}
ws.onmessage = function(msg) {
console.info(msg)
}
ws.onerror = function(err) { }
}
/*
* 开始对讲
*/
export function beginRecord() {
// begin.onclick = function() {
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia ||
navigator.msGetUserMedia
if (!navigator.getUserMedia) {
alert('浏览器不支持音频输入')
} else {
navigator.getUserMedia({
audio: true
},
function(mediaStream) {
console.log('ddddd', mediaStream)
init(new Recorder(mediaStream))
console.log('开始对讲')
useWebSocket()
},
function(error) {
switch (error.message || error.name) {
case 'PERMISSION_DENIED':
case 'PermissionDeniedError':
console.info('用户拒绝提供信息。')
break
case 'NOT_SUPPORTED_ERROR':
case 'NotSupportedError':
console.info('浏览器不支持硬件设备。')
break
case 'MANDATORY_UNSATISFIED_ERROR':
case 'MandatoryUnsatisfiedError':
console.info('无法发现指定的硬件设备。')
break
default:
console.info('无法打开麦克风。异常信息:' + (error.code || error.name))
break
}
}
)
}
}
/*
* 关闭对讲
*/
// end.onclick = function() {
export function stopRecord() {
if (ws) {
record.stop()
var tmparr = new Int8Array(1024)
const obj = { 'action': 'audio_unlock', 'data': [{ 'projectJid': 'fe843627233020c110101c8f7e85ba53', 'guid': '12c00001363b21cf', 'playVolume': 80 }], 'requestId': 'cf3253b2-e491-4ce0-bf66-4a5bc36d46a1' }
ws.send(JSON.stringify(obj))
console.log('关闭对讲')
}
}
2. 在vue代码中这样写
<template> <div>
<button @click="beginRecord">开始对讲</button>
<button @click="stopRecord">关闭对讲</button>
</div>
</template>
<script>
// 引入封裝的函數
import { beginRecord, stopRecord } from 'src/assets/record.js'
export default {
methods: {
// 定义方法,方便在dom綁定@click
beginRecord() {
beginRecord()
},
stopRecord() {
stopRecord()
}
}
}
</script>
3. 代码中的问题
如下图,这个变量的意义不明
希望这个解答可以帮到你!
以上是 怎么把这个实时录音原生js代码变成vue代码(vue2.0那种)? 的全部内容, 来源链接: utcz.com/p/935268.html