diff --git a/src/store/modules/index.ts b/src/store/modules/index.ts index 25ab4f1..ab079aa 100644 --- a/src/store/modules/index.ts +++ b/src/store/modules/index.ts @@ -4,3 +4,4 @@ export * from './user' export * from './prompt' export * from './settings' export * from './auth' +export * from './text' diff --git a/src/store/modules/text/helper.ts b/src/store/modules/text/helper.ts new file mode 100644 index 0000000..af42bc5 --- /dev/null +++ b/src/store/modules/text/helper.ts @@ -0,0 +1,26 @@ +import { ss } from '@/utils/storage' + +const LOCAL_NAME = 'textStorage' + +export interface TextInfo { + pormat: string +} + +export interface TextState { + textInfo: TextInfo +} + +export function defaultSetting(): TextState { + return { + pormat: '', + } +} + +export function getLocalState(): TextState { + const localSetting: TextState | undefined = ss.get(LOCAL_NAME) + return { ...defaultSetting(), ...localSetting } +} + +export function setLocalState(setting: TextState): void { + ss.set(LOCAL_NAME, setting) +} diff --git a/src/store/modules/text/index.ts b/src/store/modules/text/index.ts new file mode 100644 index 0000000..f51e809 --- /dev/null +++ b/src/store/modules/text/index.ts @@ -0,0 +1,22 @@ +import { defineStore } from 'pinia' +import type { TextInfo, TextState } from './helper' +import { defaultSetting, getLocalState, setLocalState } from './helper' + +export const useTextStore = defineStore('text-store', { + state: (): TextState => getLocalState(), + actions: { + updateTextInfo(textInfo: Partial) { + this.textInfo = { ...this.textInfo, ...textInfo } + this.recordState() + }, + + resetTextInfo() { + this.textInfo = { ...defaultSetting().textInfo } + this.recordState() + }, + + recordState() { + setLocalState(this.$state) + }, + }, +}) diff --git a/src/utils/latRecorder.js b/src/utils/latRecorder.js index c683198..e70e641 100644 --- a/src/utils/latRecorder.js +++ b/src/utils/latRecorder.js @@ -1,8 +1,8 @@ /* eslint-disable no-undef */ +import CryptoJS from 'crypto-js' const APPID = '2eda6c2e' const API_SECRET = 'MDEyMzE5YTc5YmQ5NjMwOTU1MWY4N2Y2' const API_KEY = '12ec1f9d113932575fc4b114a2f60ffd' -import CryptoJS from 'crypto-js' // import Worker from './transcode.worker.js' const transWorker = new Worker(new URL('./transcode.worker.js', import.meta.url)) let startTime = '' @@ -42,7 +42,7 @@ const IatRecorder = class { this.resultText = '' // wpgs下的听写结果需要中间状态辅助记录 this.resultTextTemp = '' - transWorker.onmessage = function(event) { + transWorker.onmessage = function (event) { // console.log("构造方法中",self.audioData) self.audioData.push(...event.data) } @@ -50,73 +50,80 @@ const IatRecorder = class { // 修改录音听写状态 setStatus(status) { - this.onWillStatusChange && - this.status !== status && - this.onWillStatusChange(this.status, status) + this.onWillStatusChange + && this.status !== status + && this.onWillStatusChange(this.status, status) this.status = status } + setResultText({ resultText, resultTextTemp } = {}) { this.onTextChange && this.onTextChange(resultTextTemp || resultText || '') resultText !== undefined && (this.resultText = resultText) resultTextTemp !== undefined && (this.resultTextTemp = resultTextTemp) } + // 修改听写参数 setParams({ language, accent } = {}) { language && (this.language = language) accent && (this.accent = accent) } + // 连接websocket connectWebSocket() { - return getWebSocketUrl().then(url => { + return getWebSocketUrl().then((url) => { let iatWS if ('WebSocket' in window) { iatWS = new WebSocket(url) - } else if ('MozWebSocket' in window) { + } + else if ('MozWebSocket' in window) { iatWS = new MozWebSocket(url) - } else { + } + else { alert('浏览器不支持WebSocket') return } this.webSocket = iatWS this.setStatus('init') - iatWS.onopen = e => { + iatWS.onopen = (e) => { this.setStatus('ing') // 重新开始录音 setTimeout(() => { this.webSocketSend() }, 500) } - iatWS.onmessage = e => { + iatWS.onmessage = (e) => { this.result(e.data) } - iatWS.onerror = e => { + iatWS.onerror = (e) => { this.recorderStop() } - iatWS.onclose = e => { + iatWS.onclose = (e) => { endTime = Date.parse(new Date()) // console.log('持续时间', endTime - startTime) this.recorderStop() } }) } + // 初始化浏览器录音 recorderInit() { - navigator.getUserMedia = - navigator.getUserMedia || - navigator.webkitGetUserMedia || - navigator.mozGetUserMedia || - navigator.msGetUserMedia + navigator.getUserMedia + = navigator.getUserMedia + || navigator.webkitGetUserMedia + || navigator.mozGetUserMedia + || navigator.msGetUserMedia // 创建音频环境 try { - this.audioContext = new (window.AudioContext || - window.webkitAudioContext)() + this.audioContext = new (window.AudioContext + || window.webkitAudioContext)() this.audioContext.resume() if (!this.audioContext) { alert('浏览器不支持webAudioApi相关接口') return } - } catch (e) { + } + catch (e) { if (!this.audioContext) { alert('浏览器不支持webAudioApi相关接口') return @@ -130,34 +137,37 @@ const IatRecorder = class { audio: true, video: false, }) - .then(stream => { + .then((stream) => { getMediaSuccess(stream) }) - .catch(e => { + .catch((e) => { getMediaFail(e) }) - } else if (navigator.getUserMedia) { + } + else if (navigator.getUserMedia) { navigator.getUserMedia( { audio: true, video: false, }, - stream => { + (stream) => { getMediaSuccess(stream) }, - function(e) { + (e) => { getMediaFail1(e) }, ) - } else { + } + else { if ( - navigator.userAgent.toLowerCase().match(/chrome/) && - location.origin.indexOf('https://') < 0 + navigator.userAgent.toLowerCase().match(/chrome/) + && !location.origin.includes('https://') ) { alert( 'chrome下获取浏览器录音功能,因为安全性问题,需要在localhost或127.0.0.1或https下才能获取权限', ) - } else { + } + else { alert('无法获取浏览器录音功能,请升级软件版本') } this.voiceDialog = 0 @@ -165,12 +175,12 @@ const IatRecorder = class { return } // 获取浏览器录音权限成功的回调 - const getMediaSuccess = stream => { + const getMediaSuccess = (stream) => { this.voiceDialog = 1 // console.log('getMediaSuccess') // 创建一个用于通过JavaScript直接处理音频 this.scriptProcessor = this.audioContext.createScriptProcessor(0, 1, 1) - this.scriptProcessor.onaudioprocess = e => { + this.scriptProcessor.onaudioprocess = (e) => { // 去处理音频数据 if (this.status === 'ing') { // console.log(transWorker) @@ -186,37 +196,38 @@ const IatRecorder = class { this.connectWebSocket() } - const getMediaFail = e => { + const getMediaFail = (e) => { alert(e) this.voiceDialog = 0 // console.log(e) this.audioContext && this.audioContext.close() this.audioContext = undefined // 关闭websocket - if (this.webSocket && this.webSocket.readyState === 1) { + if (this.webSocket && this.webSocket.readyState === 1) this.webSocket.close() - } } - const getMediaFail1 = e => { + const getMediaFail1 = (e) => { alert('请求麦克风失败,请添加权限!') this.voiceDialog = 0 // console.log(e) this.audioContext && this.audioContext.close() this.audioContext = undefined // 关闭websocket - if (this.webSocket && this.webSocket.readyState === 1) { + if (this.webSocket && this.webSocket.readyState === 1) this.webSocket.close() - } } } + recorderStart() { if (!this.audioContext) { this.recorderInit() - } else { + } + else { this.audioContext.resume() this.connectWebSocket() } } + // 暂停录音 recorderStop() { // safari下suspend后再次resume录音内容将是空白,设置safari下不做suspend @@ -224,11 +235,12 @@ const IatRecorder = class { !( /Safari/.test(navigator.userAgent) && !/Chrome/.test(navigator.userAgen) ) - ) { + ) this.audioContext && this.audioContext.suspend() - } + this.setStatus('end') } + // 处理音频数据 // transAudioData(audioData) { // audioData = transAudioData.transaction(audioData) @@ -239,25 +251,26 @@ const IatRecorder = class { let binary = '' const bytes = new Uint8Array(buffer) const len = bytes.byteLength - for (let i = 0; i < len; i++) { + for (let i = 0; i < len; i++) binary += String.fromCharCode(bytes[i]) - } + return window.btoa(binary) } + // 向webSocket发送数据 webSocketSend() { - if (this.webSocket.readyState !== 1) { + if (this.webSocket.readyState !== 1) return - } + let audioData = this.audioData.splice(0, 1280) const params = { common: { app_id: this.appId, }, business: { - language: this.language, //小语种可在控制台--语音听写(流式)--方言/语种处添加试用 + language: this.language, // 小语种可在控制台--语音听写(流式)--方言/语种处添加试用 domain: 'iat', - accent: this.accent, //中文方言可在控制台--语音听写(流式)--方言/语种处添加试用 + accent: this.accent, // 中文方言可在控制台--语音听写(流式)--方言/语种处添加试用 vad_eos: 5000, dwa: 'wpgs', }, @@ -282,7 +295,7 @@ const IatRecorder = class { return } if (this.audioData.length === 0) { - //点击暂停录音 + // 点击暂停录音 // console.log('自动关闭', this.status) if (this.status === 'end') { this.webSocket.send( @@ -314,6 +327,7 @@ const IatRecorder = class { ) }, 40) } + result(resultData) { // 识别结束 const jsonData = JSON.parse(resultData) @@ -322,10 +336,10 @@ const IatRecorder = class { let str = '' const resultStr = '' const ws = data.ws - for (let i = 0; i < ws.length; i++) { + for (let i = 0; i < ws.length; i++) str = str + ws[i].cw[0].w - } - // console.log('识别的结果为:', str) + + // console.log('', str) // 开启wpgs会有此字段(前提:在控制台开通动态修正功能) // 取值为 "apd"时表示该片结果是追加到前面的最终结果;取值为"rpl" 时表示替换前面的部分结果,替换范围为rg字段 if (data.pgs) { @@ -339,28 +353,29 @@ const IatRecorder = class { this.setResultText({ resultTextTemp: this.resultText + str, }) - } else { + } + else { this.setResultText({ resultText: this.resultText + str, }) } } - if (jsonData.code === 0 && jsonData.data.status === 2) { + if (jsonData.code === 0 && jsonData.data.status === 2) this.webSocket.close() - } - if (jsonData.code !== 0) { + + if (jsonData.code !== 0) this.webSocket.close() // console.log(`${jsonData.code}:${jsonData.message}`) - } } + start() { this.recorderStart() this.setResultText({ resultText: '', resultTextTemp: '' }) } + stop() { this.recorderStop() } } export default IatRecorder - diff --git a/src/utils/test.js b/src/utils/test.js index a4715e6..38540e3 100644 --- a/src/utils/test.js +++ b/src/utils/test.js @@ -1,330 +1,350 @@ +import CryptoJS from 'crypto-js' const APPID = '2eda6c2e' const API_SECRET = 'MDEyMzE5YTc5YmQ5NjMwOTU1MWY4N2Y2' const API_KEY = '12ec1f9d113932575fc4b114a2f60ffd' -import CryptoJS from 'crypto-js' // import Worker from './transcode.worker.js' // const transWorker = new Worker() const transWorker = new Worker(new URL('./transcode.worker.js', import.meta.url)) console.log(transWorker) -var startTime = "" -var endTime = "" +let startTime = '' +let endTime = '' -function getWebSocketUrl(){ +function getWebSocketUrl() { return new Promise((resolve, reject) => { - // 请求地址根据语种不同变化 - var url = 'wss://iat-api.xfyun.cn/v2/iat' - var host = 'iat-api.xfyun.cn' - var apiKey = API_KEY - var apiSecret = API_SECRET - var date = new Date().toGMTString() - var algorithm = 'hmac-sha256' - var headers = 'host date request-line' - var signatureOrigin = `host: ${host}\ndate: ${date}\nGET /v2/iat HTTP/1.1` - var signatureSha = CryptoJS.HmacSHA256(signatureOrigin, apiSecret) - var signature = CryptoJS.enc.Base64.stringify(signatureSha) - var authorizationOrigin = `api_key="${apiKey}", algorithm="${algorithm}", headers="${headers}", signature="${signature}"` - var authorization = btoa(authorizationOrigin) - url = `${url}?authorization=${authorization}&date=${date}&host=${host}` - resolve(url) - }) + // 请求地址根据语种不同变化 + let url = 'wss://iat-api.xfyun.cn/v2/iat' + const host = 'iat-api.xfyun.cn' + const apiKey = API_KEY + const apiSecret = API_SECRET + const date = new Date().toGMTString() + const algorithm = 'hmac-sha256' + const headers = 'host date request-line' + const signatureOrigin = `host: ${host}\ndate: ${date}\nGET /v2/iat HTTP/1.1` + const signatureSha = CryptoJS.HmacSHA256(signatureOrigin, apiSecret) + const signature = CryptoJS.enc.Base64.stringify(signatureSha) + const authorizationOrigin = `api_key="${apiKey}", algorithm="${algorithm}", headers="${headers}", signature="${signature}"` + const authorization = btoa(authorizationOrigin) + url = `${url}?authorization=${authorization}&date=${date}&host=${host}` + console.log(date) + resolve(url) + }) } -const IatRecorder = class { - constructor({ language, accent, appId } = {}) { - let self = this - this.status = 'null' - this.language = language || 'zh_cn' - this.accent = accent || 'mandarin' - this.appId = appId || APPID - // 记录音频数据 - this.audioData = [] - // 记录听写结果 - this.resultText = '' - // wpgs下的听写结果需要中间状态辅助记录 - this.resultTextTemp = '' - transWorker.onmessage = function (event) { - // console.log("构造方法中",self.audioData) - self.audioData.push(...event.data) - } - } - - // 修改录音听写状态 - setStatus(status) { - this.onWillStatusChange && this.status !== status && this.onWillStatusChange(this.status, status) - this.status = status - } - setResultText({ resultText, resultTextTemp } = {}) { - this.onTextChange && this.onTextChange(resultTextTemp || resultText || '') - resultText !== undefined && (this.resultText = resultText) - resultTextTemp !== undefined && (this.resultTextTemp = resultTextTemp) - } - // 修改听写参数 - setParams({ language, accent } = {}) { - language && (this.language = language) - accent && (this.accent = accent) - } - // 连接websocket - connectWebSocket() { - return getWebSocketUrl().then(url => { - let iatWS - if ('WebSocket' in window) { - iatWS = new WebSocket(url) - } else if ('MozWebSocket' in window) { - iatWS = new MozWebSocket(url) - } else { - alert('浏览器不支持WebSocket') - return - } - this.webSocket = iatWS - this.setStatus('init') - iatWS.onopen = e => { - this.setStatus('ing') - // 重新开始录音 - setTimeout(() => { - this.webSocketSend() - }, 500) - } - iatWS.onmessage = e => { - this.result(e.data) - } - iatWS.onerror = e => { - this.recorderStop() - } - iatWS.onclose = e => { - endTime = Date.parse(new Date()) - console.log("持续时间",endTime-startTime) - this.recorderStop() - } - }) - } - // 初始化浏览器录音 - recorderInit() { - navigator.getUserMedia = - navigator.getUserMedia || - navigator.webkitGetUserMedia || - navigator.mozGetUserMedia || - navigator.msGetUserMedia - - // 创建音频环境 - try { - this.audioContext = new (window.AudioContext || window.webkitAudioContext)() - this.audioContext.resume() - if (!this.audioContext) { - alert('浏览器不支持webAudioApi相关接口') - return - } - } catch (e) { - if (!this.audioContext) { - alert('浏览器不支持webAudioApi相关接口') - return - } - } - - // 获取浏览器录音权限 - if (navigator.mediaDevices && navigator.mediaDevices.getUserMedia) { - navigator.mediaDevices - .getUserMedia({ - audio: true, - video: false, - }) - .then(stream => { - getMediaSuccess(stream) - }) - .catch(e => { - getMediaFail(e) - }) - } else if (navigator.getUserMedia) { - navigator.getUserMedia( - { - audio: true, - video: false, - }, - stream => { - getMediaSuccess(stream) - }, - function(e) { - getMediaFail(e) - } - ) - } else { - if (navigator.userAgent.toLowerCase().match(/chrome/) && location.origin.indexOf('https://') < 0) { - alert('chrome下获取浏览器录音功能,因为安全性问题,需要在localhost或127.0.0.1或https下才能获取权限') - } else { - alert('无法获取浏览器录音功能,请升级浏览器或使用chrome') - } - this.audioContext && this.audioContext.close() - return - } - // 获取浏览器录音权限成功的回调 - let getMediaSuccess = stream => { - // 创建一个用于通过JavaScript直接处理音频 - this.scriptProcessor = this.audioContext.createScriptProcessor(0, 1, 1) - this.scriptProcessor.onaudioprocess = e => { - // 去处理音频数据 - if (this.status === 'ing') { - transWorker.postMessage(e.inputBuffer.getChannelData(0)) - // this.audioData.push(e.inputBuffer.getChannelData(0)) - } - } - // 创建一个新的MediaStreamAudioSourceNode 对象,使来自MediaStream的音频可以被播放和操作 - this.mediaSource = this.audioContext.createMediaStreamSource(stream) - // 连接 - this.mediaSource.connect(this.scriptProcessor) - this.scriptProcessor.connect(this.audioContext.destination) - this.connectWebSocket() - } - - let getMediaFail = (e) => { - this.audioContext && this.audioContext.close() - this.audioContext = undefined - // 关闭websocket - if (this.webSocket && this.webSocket.readyState === 1) { - this.webSocket.close() - } - } - } - recorderStart() { - if (!this.audioContext) { - this.recorderInit() - } else { - this.audioContext.resume() - this.connectWebSocket() - } - } - // 暂停录音 - recorderStop() { - // safari下suspend后再次resume录音内容将是空白,设置safari下不做suspend - if (!(/Safari/.test(navigator.userAgent) && !/Chrome/.test(navigator.userAgen))){ - this.audioContext && this.audioContext.suspend() - } - this.setStatus('end') - } - // 处理音频数据 - // transAudioData(audioData) { - // audioData = transAudioData.transaction(audioData) - // this.audioData.push(...audioData) - // } - // 对处理后的音频数据进行base64编码, - toBase64(buffer) { - var binary = '' - var bytes = new Uint8Array(buffer) - var len = bytes.byteLength - for (var i = 0; i < len; i++) { - binary += String.fromCharCode(bytes[i]) - } - return window.btoa(binary) - } - // 向webSocket发送数据 - webSocketSend() { - if (this.webSocket.readyState !== 1) { - return - } - let audioData = this.audioData.splice(0, 1280) - var params = { - common: { - app_id: this.appId, - }, - business: { - language: this.language, //小语种可在控制台--语音听写(流式)--方言/语种处添加试用 - domain: 'iat', - accent: this.accent, //中文方言可在控制台--语音听写(流式)--方言/语种处添加试用 - }, - data: { - status: 0, - format: 'audio/L16;rate=16000', - encoding: 'raw', - audio: this.toBase64(audioData), - }, - } - console.log("参数language:",this.language) - console.log("参数accent:",this.accent) - this.webSocket.send(JSON.stringify(params)) - startTime = Date.parse(new Date()) - this.handlerInterval = setInterval(() => { - // websocket未连接 - if (this.webSocket.readyState !== 1) { - console.log("websocket未连接") - this.audioData = [] - clearInterval(this.handlerInterval) - return - } - if (this.audioData.length === 0) { - console.log("自动关闭",this.status) - if (this.status === 'end') { - this.webSocket.send( - JSON.stringify({ - data: { - status: 2, - format: 'audio/L16;rate=16000', - encoding: 'raw', - audio: '', - }, - }) - ) - this.audioData = [] - clearInterval(this.handlerInterval) - } - return false - } - audioData = this.audioData.splice(0, 1280) - // 中间帧 - this.webSocket.send( - JSON.stringify({ - data: { - status: 1, - format: 'audio/L16;rate=16000', - encoding: 'raw', - audio: this.toBase64(audioData), - }, - }) - ) - }, 40) - } - result(resultData) { - // 识别结束 - let jsonData = JSON.parse(resultData) - if (jsonData.data && jsonData.data.result) { - let data = jsonData.data.result - let str = '' - let resultStr = '' - let ws = data.ws - for (let i = 0; i < ws.length; i++) { - str = str + ws[i].cw[0].w - } - console.log("识别的结果为:",str) - // 开启wpgs会有此字段(前提:在控制台开通动态修正功能) - // 取值为 "apd"时表示该片结果是追加到前面的最终结果;取值为"rpl" 时表示替换前面的部分结果,替换范围为rg字段 - if (data.pgs) { - if (data.pgs === 'apd') { - // 将resultTextTemp同步给resultText - this.setResultText({ - resultText: this.resultTextTemp, - }) - } - // 将结果存储在resultTextTemp中 - this.setResultText({ - resultTextTemp: this.resultText + str, - }) - } else { - this.setResultText({ - resultText: this.resultText + str, - }) - } - } - if (jsonData.code === 0 && jsonData.data.status === 2) { - this.webSocket.close() - } - if (jsonData.code !== 0) { - this.webSocket.close() - console.log(`${jsonData.code}:${jsonData.message}`) - } - } - start() { - this.recorderStart() - this.setResultText({ resultText: '', resultTextTemp: '' }) - } - stop() { - this.recorderStop() +const IatRecorder = class { + constructor({ language, accent, appId } = {}) { + const self = this + this.status = 'null' + this.language = language || 'zh_cn' + this.accent = accent || 'mandarin' + this.appId = appId || APPID + // 记录音频数据 + this.audioData = [] + // 记录听写结果 + this.resultText = '' + // wpgs下的听写结果需要中间状态辅助记录 + this.resultTextTemp = '' + transWorker.onmessage = function (event) { + // console.log("构造方法中",self.audioData) + self.audioData.push(...event.data) } } - export default IatRecorder + // 修改录音听写状态 + setStatus(status) { + this.onWillStatusChange && this.status !== status && this.onWillStatusChange(this.status, status) + this.status = status + } + + setResultText({ resultText, resultTextTemp } = {}) { + this.onTextChange && this.onTextChange(resultTextTemp || resultText || '') + resultText !== undefined && (this.resultText = resultText) + resultTextTemp !== undefined && (this.resultTextTemp = resultTextTemp) + } + + // 修改听写参数 + setParams({ language, accent } = {}) { + language && (this.language = language) + accent && (this.accent = accent) + } + + // 连接websocket + connectWebSocket() { + return getWebSocketUrl().then((url) => { + let iatWS + if ('WebSocket' in window) { + iatWS = new WebSocket(url) + } + else if ('MozWebSocket' in window) { + iatWS = new MozWebSocket(url) + } + else { + alert('浏览器不支持WebSocket') + return + } + this.webSocket = iatWS + this.setStatus('init') + iatWS.onopen = (e) => { + this.setStatus('ing') + // 重新开始录音 + setTimeout(() => { + this.webSocketSend() + }, 500) + } + iatWS.onmessage = (e) => { + this.result(e.data) + } + iatWS.onerror = (e) => { + this.recorderStop() + } + iatWS.onclose = (e) => { + console.log('关闭原因', e) + endTime = Date.parse(new Date()) + console.log('持续时间', endTime - startTime) + this.recorderStop() + } + }) + } + + // 初始化浏览器录音 + recorderInit() { + navigator.getUserMedia + = navigator.getUserMedia + || navigator.webkitGetUserMedia + || navigator.mozGetUserMedia + || navigator.msGetUserMedia + + // 创建音频环境 + try { + this.audioContext = new (window.AudioContext || window.webkitAudioContext)() + this.audioContext.resume() + if (!this.audioContext) { + alert('浏览器不支持webAudioApi相关接口') + return + } + } + catch (e) { + if (!this.audioContext) { + alert('浏览器不支持webAudioApi相关接口') + return + } + } + + // 获取浏览器录音权限 + if (navigator.mediaDevices && navigator.mediaDevices.getUserMedia) { + navigator.mediaDevices + .getUserMedia({ + audio: true, + video: false, + }) + .then((stream) => { + getMediaSuccess(stream) + }) + .catch((e) => { + getMediaFail(e) + }) + } + else if (navigator.getUserMedia) { + navigator.getUserMedia( + { + audio: true, + video: false, + }, + (stream) => { + getMediaSuccess(stream) + }, + (e) => { + getMediaFail(e) + }, + ) + } + else { + if (navigator.userAgent.toLowerCase().match(/chrome/) && !location.origin.includes('https://')) + alert('chrome下获取浏览器录音功能,因为安全性问题,需要在localhost或127.0.0.1或https下才能获取权限') + + else + alert('无法获取浏览器录音功能,请升级浏览器或使用chrome') + + this.audioContext && this.audioContext.close() + return + } + // 获取浏览器录音权限成功的回调 + let getMediaSuccess = (stream) => { + // 创建一个用于通过JavaScript直接处理音频 + this.scriptProcessor = this.audioContext.createScriptProcessor(0, 1, 1) + this.scriptProcessor.onaudioprocess = (e) => { + // 去处理音频数据 + if (this.status === 'ing') + transWorker.postMessage(e.inputBuffer.getChannelData(0)) + // this.audioData.push(e.inputBuffer.getChannelData(0)) + } + // 创建一个新的MediaStreamAudioSourceNode 对象,使来自MediaStream的音频可以被播放和操作 + this.mediaSource = this.audioContext.createMediaStreamSource(stream) + // 连接 + this.mediaSource.connect(this.scriptProcessor) + this.scriptProcessor.connect(this.audioContext.destination) + this.connectWebSocket() + } + + let getMediaFail = (e) => { + this.audioContext && this.audioContext.close() + this.audioContext = undefined + // 关闭websocket + if (this.webSocket && this.webSocket.readyState === 1) + this.webSocket.close() + } + } + + recorderStart() { + if (!this.audioContext) { + this.recorderInit() + } + else { + this.audioContext.resume() + this.connectWebSocket() + } + } + + // 暂停录音 + recorderStop() { + // safari下suspend后再次resume录音内容将是空白,设置safari下不做suspend + if (!(/Safari/.test(navigator.userAgent) && !/Chrome/.test(navigator.userAgen))) + this.audioContext && this.audioContext.suspend() + + this.setStatus('end') + } + + // 处理音频数据 + transAudioData(audioData) { + audioData = transAudioData.transaction(audioData) + this.audioData.push(...audioData) + } + + // 对处理后的音频数据进行base64编码, + toBase64(buffer) { + let binary = '' + const bytes = new Uint8Array(buffer) + const len = bytes.byteLength + for (let i = 0; i < len; i++) + binary += String.fromCharCode(bytes[i]) + + return window.btoa(binary) + } + + // 向webSocket发送数据 + webSocketSend() { + if (this.webSocket.readyState !== 1) + return + + let audioData = this.audioData.splice(0, 1280) + const params = { + common: { + app_id: this.appId, + }, + business: { + language: this.language, // 小语种可在控制台--语音听写(流式)--方言/语种处添加试用 + domain: 'iat', + accent: this.accent, // 中文方言可在控制台--语音听写(流式)--方言/语种处添加试用 + }, + data: { + status: 0, + format: 'audio/L16;rate=16000', + encoding: 'raw', + audio: this.toBase64(audioData), + }, + } + console.log('参数language:', this.language) + console.log('参数accent:', this.accent) + this.webSocket.send(JSON.stringify(params)) + startTime = Date.parse(new Date()) + this.handlerInterval = setInterval(() => { + // websocket未连接 + if (this.webSocket.readyState !== 1) { + console.log('websocket未连接', this.webSocket) + this.audioData = [] + clearInterval(this.handlerInterval) + return + } + if (this.audioData.length === 0) { + console.log('自动关闭', this.status) + if (this.status === 'end') { + this.webSocket.send( + JSON.stringify({ + data: { + status: 2, + format: 'audio/L16;rate=16000', + encoding: 'raw', + audio: '', + }, + }), + ) + this.audioData = [] + clearInterval(this.handlerInterval) + } + return false + } + audioData = this.audioData.splice(0, 1280) + // 中间帧 + this.webSocket.send( + JSON.stringify({ + data: { + status: 1, + format: 'audio/L16;rate=16000', + encoding: 'raw', + audio: this.toBase64(audioData), + }, + }), + ) + }, 40) + } + + result(resultData) { + // 识别结束 + const jsonData = JSON.parse(resultData) + if (jsonData.data && jsonData.data.result) { + const data = jsonData.data.result + let str = '' + const resultStr = '' + const ws = data.ws + for (let i = 0; i < ws.length; i++) + str = str + ws[i].cw[0].w + + console.log('识别的结果为:', str) + // 开启wpgs会有此字段(前提:在控制台开通动态修正功能) + // 取值为 "apd"时表示该片结果是追加到前面的最终结果;取值为"rpl" 时表示替换前面的部分结果,替换范围为rg字段 + if (data.pgs) { + if (data.pgs === 'apd') { + // 将resultTextTemp同步给resultText + this.setResultText({ + resultText: this.resultTextTemp, + }) + } + // 将结果存储在resultTextTemp中 + this.setResultText({ + resultTextTemp: this.resultText + str, + }) + } + else { + this.setResultText({ + resultText: this.resultText + str, + }) + } + } + if (jsonData.code === 0 && jsonData.data.status === 2) + this.webSocket.close() + + if (jsonData.code !== 0) { + this.webSocket.close() + console.log(`${jsonData.code}:${jsonData.message}`) + } + } + + start() { + this.recorderStart() + this.setResultText({ resultText: '', resultTextTemp: '' }) + } + + stop() { + this.recorderStop() + } +} + +export default IatRecorder diff --git a/src/utils/transcode.worker.js b/src/utils/transcode.worker.js index e4f0acb..e8890ab 100644 --- a/src/utils/transcode.worker.js +++ b/src/utils/transcode.worker.js @@ -1,38 +1,78 @@ -self.onmessage = function(e){ - transAudioData.transcode(e.data) +// self.onmessage = function(e){ +// transAudioData.transcode(e.data) +// } +// let transAudioData = { +// transcode(audioData) { +// let output = transAudioData.to16kHz(audioData) +// output = transAudioData.to16BitPCM(output) +// output = Array.from(new Uint8Array(output.buffer)) +// self.postMessage(output) +// }, +// to16kHz(audioData) { +// var data = new Float32Array(audioData) +// var fitCount = Math.round(data.length * (16000 / 44100)) +// var newData = new Float32Array(fitCount) +// var springFactor = (data.length - 1) / (fitCount - 1) +// newData[0] = data[0] +// for (let i = 1; i < fitCount - 1; i++) { +// var tmp = i * springFactor +// var before = Math.floor(tmp).toFixed() +// var after = Math.ceil(tmp).toFixed() +// var atPoint = tmp - before +// newData[i] = data[before] + (data[after] - data[before]) * atPoint +// } +// newData[fitCount - 1] = data[data.length - 1] +// return newData +// }, +// to16BitPCM(input) { +// var dataLength = input.length * (16 / 8) +// var dataBuffer = new ArrayBuffer(dataLength) +// var dataView = new DataView(dataBuffer) +// var offset = 0 +// for (var i = 0; i < input.length; i++, offset += 2) { +// var s = Math.max(-1, Math.min(1, input[i])) +// dataView.setInt16(offset, s < 0 ? s * 0x8000 : s * 0x7fff, true) +// } +// return dataView +// }, +// } + +self.onmessage = function (e) { + transAudioData.transcode(e.data) } -let transAudioData = { - transcode(audioData) { - let output = transAudioData.to16kHz(audioData) - output = transAudioData.to16BitPCM(output) - output = Array.from(new Uint8Array(output.buffer)) - self.postMessage(output) - }, - to16kHz(audioData) { - var data = new Float32Array(audioData) - var fitCount = Math.round(data.length * (16000 / 44100)) - var newData = new Float32Array(fitCount) - var springFactor = (data.length - 1) / (fitCount - 1) - newData[0] = data[0] - for (let i = 1; i < fitCount - 1; i++) { - var tmp = i * springFactor - var before = Math.floor(tmp).toFixed() - var after = Math.ceil(tmp).toFixed() - var atPoint = tmp - before - newData[i] = data[before] + (data[after] - data[before]) * atPoint - } - newData[fitCount - 1] = data[data.length - 1] - return newData - }, - to16BitPCM(input) { - var dataLength = input.length * (16 / 8) - var dataBuffer = new ArrayBuffer(dataLength) - var dataView = new DataView(dataBuffer) - var offset = 0 - for (var i = 0; i < input.length; i++, offset += 2) { - var s = Math.max(-1, Math.min(1, input[i])) - dataView.setInt16(offset, s < 0 ? s * 0x8000 : s * 0x7fff, true) - } - return dataView - }, +const transAudioData = { + transcode(audioData) { + let output = transAudioData.to16kHz(audioData) + output = transAudioData.to16BitPCM(output) + output = Array.from(new Uint8Array(output.buffer)) + self.postMessage(output) + // return output + }, + to16kHz(audioData) { + const data = new Float32Array(audioData) + const fitCount = Math.round(data.length * (16000 / 44100)) + const newData = new Float32Array(fitCount) + const springFactor = (data.length - 1) / (fitCount - 1) + newData[0] = data[0] + for (let i = 1; i < fitCount - 1; i++) { + const tmp = i * springFactor + const before = Math.floor(tmp).toFixed() + const after = Math.ceil(tmp).toFixed() + const atPoint = tmp - before + newData[i] = data[before] + (data[after] - data[before]) * atPoint + } + newData[fitCount - 1] = data[data.length - 1] + return newData + }, + to16BitPCM(input) { + const dataLength = input.length * (16 / 8) + const dataBuffer = new ArrayBuffer(dataLength) + const dataView = new DataView(dataBuffer) + let offset = 0 + for (let i = 0; i < input.length; i++, offset += 2) { + const s = Math.max(-1, Math.min(1, input[i])) + dataView.setInt16(offset, s < 0 ? s * 0x8000 : s * 0x7FFF, true) + } + return dataView + }, } diff --git a/src/views/chat/index.vue b/src/views/chat/index.vue index 29b147d..bf9a92e 100644 --- a/src/views/chat/index.vue +++ b/src/views/chat/index.vue @@ -1,123 +1,127 @@ -