怎么把这个实时录音原生js代码变成vue代码(vue2.0那种)?-灵析社区

kunkun小黑子

录音并传递给后台 开始对讲 关闭对讲 var begin = document.getElementById('intercomBegin'); var end = document.getElementById('intercomEnd'); var ws = null; //实现WebSocket var record = null; //多媒体对象,用来处理音频 var timeInte; function init(rec) { record = rec; } //录音对象 var Recorder = function (stream) { var sampleBits = 16; var sampleRate = 16000; var context = new AudioContext(); var audioInput = context.createMediaStreamSource(stream); var recorder = context.createScriptProcessor(4096, 1, 1); var audioData = { size: 0, buffer: [], inputSampleRate: 48000, //输入采样率 inputSampleBits: 16, //输入采样数位 outputSampleRate: sampleRate, //输出采样数位 oututSampleBits: sampleBits, //输出采样率 clear: function () { this.buffer = []; this.size = 0; }, input: function (data) { this.buffer.push(new Float32Array(data)); this.size += data.length; }, compress: function () { //合并压缩 //合并 var data = new Float32Array(this.size); var offset = 0; for (var i = 0; i { var outbuffer = e.target.result; console.log('文件读取后的结果', outbuffer) var arr = new Int8Array(outbuffer); console.log('转化的二进制数据', arr) if (arr.length > 0) { var tmparr = new Int8Array(1024); var j = 0; for (var i = 0; i = 1024) { tmparr = new Int8Array(1024); } else { tmparr = new Int8Array(arr.byteLength - i - 1); } j = 0; } if ((i + 1 == arr.byteLength) && ((i + 1) % 1024) != 0) { ws.send(tmparr); } } } }; reader.readAsArrayBuffer(audioData.encodePCM()); audioData.clear(); }; this.start = function () { audioInput.connect(recorder); recorder.connect(context.destination); } this.stop = function () { recorder.disconnect(); window.clearInterval(timeInte); audioData.clear(); } this.getBlob = function () { return audioData.encodePCM(); } this.clear = function () { audioData.clear(); } recorder.onaudioprocess = function (e) { console.log('测试一下', e) var inputBuffer = e.inputBuffer.getChannelData(0); audioData.input(inputBuffer); console.log('显示', inputBuffer) sendData(); } } /* * WebSocket */ function useWebSocket() { ws = new WebSocket("wss://api.tl.supremind.cloud"); ws.binaryType = 'arraybuffer'; //传输的是 ArrayBuffer 类型的数据 ws.onopen = function (event) { console.log('连接成功'); let obj = { "action": "audio_lock", "data": [{ "projectJid": "fe843627233020c110101c8f7e85ba53", "guid": "12c00001363b21cf", "playVolume": 20 }], "requestId": "cf3253b2-e491-4ce0-bf66-4a5bc36d46a1" } ws.send(JSON.stringify(obj)) timeInte = setInterval(function () { record.start(); }, 300); }; ws.onmessage = function (msg) { console.info(msg) } ws.onerror = function (err) { } } /* * 开始对讲 */ begin.onclick = function () { navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia; if (!navigator.getUserMedia) { alert('浏览器不支持音频输入'); } else { navigator.getUserMedia({ audio: true }, function (mediaStream) { console.log('ddddd', mediaStream) init(new Recorder(mediaStream)); console.log('开始对讲'); useWebSocket(); }, function (error) { switch (error.message || error.name) { case 'PERMISSION_DENIED': case 'PermissionDeniedError': console.info('用户拒绝提供信息。'); break; case 'NOT_SUPPORTED_ERROR': case 'NotSupportedError': console.info('浏览器不支持硬件设备。'); break; case 'MANDATORY_UNSATISFIED_ERROR': case 'MandatoryUnsatisfiedError': console.info('无法发现指定的硬件设备。'); break; default: console.info('无法打开麦克风。异常信息:' + (error.code || error.name)); break; } } ) } } /* * 关闭对讲 */ end.onclick = function () { if (ws) { record.stop(); var tmparr = new Int8Array(1024); let obj = { "action": "audio_unlock", "data": [{ "projectJid": "fe843627233020c110101c8f7e85ba53", "guid": "12c00001363b21cf", "playVolume": 80 }], "requestId": "cf3253b2-e491-4ce0-bf66-4a5bc36d46a1" } ws.send(JSON.stringify(obj)) console.log('关闭对讲'); } }

阅读量:12

点赞量:0

问AI
1. 提取 JS 代码到 "src/assets/record.js",去除dom操作,改为 export "beginRecord" 和 "stopRecord" 函数 // var begin = document.getElementById('intercomBegin') // var end = document.getElementById('intercomEnd') var ws = null // 实现WebSocket var record = null // 多媒体对象,用来处理音频 var timeInte function init(rec) { record = rec } // 录音对象 var Recorder = function(stream) { var sampleBits = 16 var sampleRate = 16000 var context = new AudioContext() var audioInput = context.createMediaStreamSource(stream) var recorder = context.createScriptProcessor(4096, 1, 1) var audioData = { size: 0, buffer: [], inputSampleRate: 48000, // 输入采样率 inputSampleBits: 16, // 输入采样数位 outputSampleRate: sampleRate, // 输出采样数位 oututSampleBits: sampleBits, // 输出采样率 clear: function() { this.buffer = [] this.size = 0 }, input: function(data) { this.buffer.push(new Float32Array(data)) this.size += data.length }, compress: function() { // 合并压缩 // 合并 var data = new Float32Array(this.size) var offset = 0 for (var i = 0; i { var outbuffer = e.target.result console.log('文件读取后的结果', outbuffer) var arr = new Int8Array(outbuffer) console.log('转化的二进制数据', arr) if (arr.length > 0) { var tmparr = new Int8Array(1024) var j = 0 for (var i = 0; i = 1024) { tmparr = new Int8Array(1024) } else { tmparr = new Int8Array(arr.byteLength - i - 1) } j = 0 } if ((i + 1 == arr.byteLength) && ((i + 1) % 1024) != 0) { ws.send(tmparr) } } } } reader.readAsArrayBuffer(audioData.encodePCM()) audioData.clear() } this.start = function() { audioInput.connect(recorder) recorder.connect(context.destination) } this.stop = function() { recorder.disconnect() window.clearInterval(timeInte) audioData.clear() } this.getBlob = function() { return audioData.encodePCM() } this.clear = function() { audioData.clear() } recorder.onaudioprocess = function(e) { console.log('测试一下', e) var inputBuffer = e.inputBuffer.getChannelData(0) audioData.input(inputBuffer) console.log('显示', inputBuffer) sendData() } } /* * WebSocket */ function useWebSocket() { ws = new WebSocket('wss://api.tl.supremind.cloud') ws.binaryType = 'arraybuffer' // 传输的是 ArrayBuffer 类型的数据 ws.onopen = function(event) { console.log('连接成功') const obj = { 'action': 'audio_lock', 'data': [{ 'projectJid': 'fe843627233020c110101c8f7e85ba53', 'guid': '12c00001363b21cf', 'playVolume': 20 }], 'requestId': 'cf3253b2-e491-4ce0-bf66-4a5bc36d46a1' } ws.send(JSON.stringify(obj)) timeInte = setInterval(function() { record.start() }, 300) } ws.onmessage = function(msg) { console.info(msg) } ws.onerror = function(err) { } } /* * 开始对讲 */ export function beginRecord() { // begin.onclick = function() { navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia if (!navigator.getUserMedia) { alert('浏览器不支持音频输入') } else { navigator.getUserMedia({ audio: true }, function(mediaStream) { console.log('ddddd', mediaStream) init(new Recorder(mediaStream)) console.log('开始对讲') useWebSocket() }, function(error) { switch (error.message || error.name) { case 'PERMISSION_DENIED': case 'PermissionDeniedError': console.info('用户拒绝提供信息。') break case 'NOT_SUPPORTED_ERROR': case 'NotSupportedError': console.info('浏览器不支持硬件设备。') break case 'MANDATORY_UNSATISFIED_ERROR': case 'MandatoryUnsatisfiedError': console.info('无法发现指定的硬件设备。') break default: console.info('无法打开麦克风。异常信息:' + (error.code || error.name)) break } } ) } } /* * 关闭对讲 */ // end.onclick = function() { export function stopRecord() { if (ws) { record.stop() var tmparr = new Int8Array(1024) const obj = { 'action': 'audio_unlock', 'data': [{ 'projectJid': 'fe843627233020c110101c8f7e85ba53', 'guid': '12c00001363b21cf', 'playVolume': 80 }], 'requestId': 'cf3253b2-e491-4ce0-bf66-4a5bc36d46a1' } ws.send(JSON.stringify(obj)) console.log('关闭对讲') } } 2. 在vue代码中这样写 开始对讲 关闭对讲 // 引入封裝的函數 import { beginRecord, stopRecord } from 'src/assets/record.js' export default { methods: { // 定义方法,方便在dom綁定@click beginRecord() { beginRecord() }, stopRecord() { stopRecord() } } } 3. 代码中的问题 如下图,这个变量的意义不明 "image.png" (https://wmlx-new-image.oss-cn-shanghai.aliyuncs.com/images/20241025/d2aef2a1f5212649597dccf6296077d0.png) 希望这个解答可以帮到你!