当前位置: 首页 > news >正文

音频录制实现 绘制频谱

思路

获取设备信息

获取录音的频谱数据

绘制频谱图

具体实现

封装 loadDevices.js


/*** 是否支持录音*/
const recordingSupport = () => {const scope = navigator.mediaDevices || {};if (!scope.getUserMedia) {scope = navigatorscope.getUserMedia || (scope.getUserMedia = scope.webkitGetUserMedia || scope.mozGetUserMedia || scope.msGetUserMedia);}if (!scope.getUserMedia) {return false}return scope
}// 获取麦克风权限
export const getUserMediaPermission = () => {return new Promise((resolve, reject) => {const mediaDevices = recordingSupport()if (mediaDevices.getUserMedia) {let constraints = { audio: true }mediaDevices.getUserMedia(constraints).then(resolve, reject);} else { reject(false) } // 浏览器不支持录音})
}function checkMime() {var types = ["audio/mpeg","audio/webm","audio/mp4","audio/wav","audio/ogg","audio/flac","audio/m4a","audio/mp3","audio/mpga","audio/oga",];let first;for (var i in types) {// 判断当前浏览器支持哪种let supported = MediaRecorder.isTypeSupported(types[i]);if (supported && !first) {console.log("Is " + types[i] + " supported? " + (supported ? "Yes!" : "Nope :("));first = types[i];}}return first;
}let streams = []
let stopDraw = false/*** 释放资源*/
export const devicesDispose = () => {console.log('devicesDispose-释放资源');stopDraw = truestreams.forEach(e => {e.getTracks().forEach(track => track.stop());})
}export const getAudioContext = () => window.AudioContext ||window.webkitAudioContext ||window.mozAudioContext ||window.msAudioContext;export default function loadDevices(options = {}) {const { readover = () => { }, change = () => { }, stop = () => { } } = optionslet analyser;let mediaRecorder;let dataArray;let audioChunks = [];try {const draw = () => {if (stopDraw) returnrequestAnimationFrame(draw);analyser.getByteTimeDomainData(dataArray);change(dataArray);};let mimeType = checkMime();getUserMediaPermission().then((stream) => {streams.push(streams)// 创建记录器mediaRecorder = new MediaRecorder(stream, { mimeType });// 音频数据发生变化时收集音频片段,用于合成音频文件mediaRecorder.addEventListener("dataavailable", (event) => {console.log("mediaRecorder-dataavailable:", event);audioChunks.push(event.data);});// // 监听音频开始录制// mediaRecorder.addEventListener('start', () => {//     console.log("mediaRecorder-start:");//     audioChunks = []// })// 音频录制结束回调mediaRecorder.addEventListener("stop", () => {console.log("mediaRecorder-end:", audioChunks);const audioBlob = new Blob(audioChunks, { type: "audio/mp4" }); // wav webm mp4  stop(audioBlob);// 清空 chunks 以便下一次录音 audioChunks = []});// 获取音频数据const audioContext = new getAudioContext()();const source = audioContext.createMediaStreamSource(stream);// 通过AnalyserNode对象的getByteTimeDomainData方法来获取音频数据的波形形式:// 获取音频时间和频率数据analyser = audioContext.createAnalyser();// 定义长度analyser.fftSize = 2048; // 可以调整这个值来改变细节const bufferLength = analyser.frequencyBinCount;dataArray = new Uint8Array(bufferLength);// 合并流数据source.connect(analyser);draw()readover(mediaRecorder)}).catch((err) => {console.log("stream-errr", err);});} catch (err) {console.log("mediaDevices-errr", err);}
}

示例

import { onMounted, onUnmounted } from "vue";
import loadDevices, {devicesDispose,getAudioContext,
} from "../compositions/VerbalChat/loadDevices";let mediaRecorder;
const speak = ref(false);// 停止录制
const uploadAudio = (blob) => {// others 获取录音数据之后后续处理 上传// const formData = new FormData();// formData.append("file", blob);// 接口formData上传
};// 绘制方法
const draw = ({ data }) => {// 调用子组件的绘制方法,传递数据// verCanvas.value && verCanvas.value.draw({ data });
};const btnClick = () => {if (!speak.value) {console.log("开始录制"); speak.value = true;mediaRecorder && mediaRecorder.start();} else {console.log("停止录制");speak.value = false;mediaRecorder && mediaRecorder.stop();}
};onMounted(() => {loadDevices({readover: (r) => (mediaRecorder = r),change: (dataArray) => {if (speak.value) {// 处于录制中draw({ data: dataArray });}},stop: (blob) => uploadAudio(blob),});
});onUnmounted(()=>devicesDispose())

绘制频谱图

<template><canvas class="VerbalCanvas" ref="canvasRef"></canvas>
</template><script setup>
import { onMounted, ref, watch } from "vue";let ctx, canvas;
const canvasRef = ref();const draw = ({ data }) => {if (!canvasRef.value) return;canvas = canvasRef.value;canvas.height = parseFloat(getComputedStyle(canvas)["height"]);canvas.width = parseFloat(getComputedStyle(canvas)["width"]);ctx = canvas.getContext("2d");// drawWave(ctx, canvas, type, data);// drawLoop(ctx, canvas, type, data);drawCircle(ctx, canvas, type, data);
}const clear = () => {try {ctx.clearRect(0, 0, canvas.width, canvas.height);} catch (er) {console.log("er", er);}
}defineExpose({ draw, clear });

绘制曲线

const waveH = 150; // 波区域高度
const obj = {top: 0,center: canvas.height / 2,bottom: canvas.height - waveH,
};
const initY = obj[type];
const dataArray = data || []; // 模拟数据 随机生成一个数组,值随机ctx.fillStyle = "rgba(200, 200, 200, 0)";
ctx.fillRect(0, 0, canvas.width, canvas.height);ctx.lineWidth = 1;
ctx.strokeStyle = "#0077FF"; //"rgb(0, 0, 0)";ctx.clearRect(0, 0, canvas.width, canvas.height);
ctx.beginPath();const sliceWidth = (canvas.width * 1.0) / dataArray.length;
let x = 0;for (let i = 0; i < dataArray.length; i++) {const v = dataArray[i] / 128.0;// const y = (v * canvas.height) / 2 ;const y = (v * waveH) / 2 + initY;if (i === 0) {ctx.moveTo(x, y);} else {ctx.lineTo(x, y);}x += sliceWidth;
}// ctx.lineTo(canvas.width, canvas.height / 2);
ctx.lineTo(canvas.width, waveH / 2) + initY;
ctx.stroke();

绘制音频环

ctx.clearRect(0, 0, canvas.width, canvas.height);
const cX = canvas.width / 2;
const cY = canvas.height / 2;
const r = 100;
const basel = Math.floor(data.length / 360);
for (var i = 0; i < 360; i++) {var value = (data[i * basel] / 60) * 8; //8;   // 模拟数据 value = Math.random() * 100ctx.beginPath();ctx.lineWidth = 2;ctx.strokeStyle = "#08a3ef";ctx.moveTo(cX, cY);//R * cos (PI/180*一次旋转的角度数) ,-R * sin (PI/180*一次旋转的角度数)ctx.lineTo(Math.cos(((i * 1) / 180) * Math.PI) * (r + value) + cX,-Math.sin(((i * 1) / 180) * Math.PI) * (r + value) + cY);ctx.stroke();
}
//画一个小圆,将线条覆盖
ctx.beginPath();
ctx.lineWidth = 1;
ctx.arc(cX, cY, r, 0, 2 * Math.PI, false);
ctx.fillStyle = "#000";
ctx.stroke();ctx.fill();

绘制圆

/** 绘制圆 */
const drawCircle = (ctx, canvas, type, data) => {ctx.clearRect(0, 0, canvas.width, canvas.height);const cX = canvas.width / 2;const cY = canvas.height / 2;const r = 100;for (var i = 0; i < data.length; i += 4) {const v = (data[i] + data[i + 1] + data[i + 2] + data[i + 3]) / 4;const r = v * 0.5;// for (var i = 0; i < 254; i += 4) {//   const r = Math.random() * 100;ctx.beginPath();ctx.lineWidth = 1;ctx.arc(cX, cY, r, 0, 2 * Math.PI, false);ctx.strokeStyle = "#c46868";ctx.stroke();}
};
http://www.lryc.cn/news/236573.html

相关文章:

  • nginx代理本地服务请求,避免跨域;前端图片压缩并上传
  • Vue3-readonly(深只读) 与 shallowReadonly(浅只读)
  • 中小企业怎么实现数字化转型?有什么实用的工单管理系统?
  • vue3.x中父组件添加自定义参数后,如何获取子组件$emit传递过来的参数
  • 【Machine Learning in R - Next Generation • mlr3】
  • CorelDraw2024(CDR)- 矢量图制作软件介绍
  • RT-DETR优化改进:轻量级Backbone改进 | VanillaNet极简神经网络模型 | 华为诺亚2023
  • 本地部署 EmotiVoice易魔声 多音色提示控制TTS
  • 5g路由器赋能园区无人配送车联网应用方案
  • ARTS 打卡第一周
  • 第八部分:JSP
  • Github小彩蛋显示自己的README,git 个人首页的 README,readme基本语法
  • dxva2+ffmpeg硬件解码(Windows)终结发布
  • C#密封类、偏类
  • C++菱形继承问题
  • 第20章 数据库编程
  • PS学习笔记——初识PS界面
  • JDBC,Java连接数据库
  • java智慧校园信息管理系统源码带微信小程序
  • 智能电销机器人好做吗?ai机器人有没有用?
  • 吴恩达《机器学习》9-1:代价函数
  • 代码随想录算法训练营第五十九天 | LeetCode 739. 每日温度、496. 下一个更大元素 I
  • mybatisPlus的简单使用
  • vue+element实现多级表头加树结构
  • internet download manager2024中文绿色版(IDM下载器)
  • (二)Pytorch快速搭建神经网络模型实现气温预测回归(代码+详细注解)
  • markdown 公式编辑
  • 20231117在ubuntu20.04下使用ZIP命令压缩文件夹
  • IPKISS Tutorials 1------导入 pdk
  • 使用ChatGPT进行数据分析案例——贷款数据分析