瀏覽代碼

节拍器 客户端 合成

黄琪勇 9 月之前
父節點
當前提交
faa891b4bf
共有 5 個文件被更改,包括 57 次插入17 次删除
  1. 二進制
      src/assets/tick.wav
  2. 二進制
      src/assets/tock.wav
  3. 2 1
      src/state.ts
  4. 11 15
      src/utils/crunker.ts
  5. 44 1
      src/view/audio-list/index.tsx

二進制
src/assets/tick.wav


二進制
src/assets/tock.wav


+ 2 - 1
src/state.ts

@@ -1568,8 +1568,9 @@ function initMusicSource(data: any, tracks: string[], partIndex: number, workRec
     state.mingSong = fanSongObj?.solmizationFileUrl
     state.mingSongGirl = fanSongObj?.femaleSolmizationFileUrl
   }
+   /*  目前 管乐迷没有用到 后台生成的节拍器 */
   // 当使用节拍器的时候才加载节拍器音频
-  if(state.isMixBeat) {
+  if(state.isMixBeat && false) {
     Object.assign(state.beatSong, {
       music: musicObj?.audioBeatMixUrl,
       accompany: accompanyObj?.audioBeatMixUrl,

+ 11 - 15
src/utils/crunker.ts

@@ -3,7 +3,7 @@ interface CrunkerConstructorOptions {
    concurrentNetworkRequests: number
 }
 
-type CrunkerInputTypes = string | File | Blob
+type CrunkerInputTypes = string | File | Blob | undefined
 
 export default class Crunker {
    private readonly _sampleRate: number
@@ -23,8 +23,8 @@ export default class Crunker {
    /**
     *转换url等类型为buffer
     */
-   async fetchAudio(...filepaths: CrunkerInputTypes[]): Promise<AudioBuffer[]> {
-      const buffers: AudioBuffer[] = []
+   async fetchAudio(...filepaths: CrunkerInputTypes[]): Promise<(AudioBuffer | undefined)[]> {
+      const buffers: (AudioBuffer | undefined)[] = []
       const groups = Math.ceil(filepaths.length / this._concurrentNetworkRequests)
       for (let i = 0; i < groups; i++) {
          const group = filepaths.slice(i * this._concurrentNetworkRequests, (i + 1) * this._concurrentNetworkRequests)
@@ -32,9 +32,12 @@ export default class Crunker {
       }
       return buffers
    }
-   private async _fetchAudio(...filepaths: CrunkerInputTypes[]): Promise<AudioBuffer[]> {
+   private async _fetchAudio(...filepaths: CrunkerInputTypes[]): Promise<(AudioBuffer | undefined)[]> {
       return await Promise.all(
          filepaths.map(async filepath => {
+            if (!filepath) {
+               return Promise.resolve(undefined)
+            }
             let buffer: ArrayBuffer
             if (filepath instanceof File || filepath instanceof Blob) {
                buffer = await filepath.arrayBuffer()
@@ -74,24 +77,17 @@ export default class Crunker {
       }
       const output = this._context.createBuffer(this._maxNumberOfChannels(buffers), this._sampleRate * this._maxDuration(buffers), this._sampleRate)
       buffers.forEach((buffer, index) => {
+         const offsetNum = Math.round(times[index] * this._sampleRate) //时间偏差
          for (let channelNumber = 0; channelNumber < buffer.numberOfChannels; channelNumber++) {
             const outputData = output.getChannelData(channelNumber)
             const bufferData = buffer.getChannelData(channelNumber)
-            const offsetNum = Math.round(times[index] * this._sampleRate) //时间偏差
-            for (let i = buffer.getChannelData(channelNumber).length - 1; i >= 0; i--) {
-               outputData[i + offsetNum] += bufferData[i]
+            for (let i = bufferData.length - 1; i >= 0; i--) {
                // 当合并大于1或者小于-1的时候可能会爆音  所以这里取最大值和最小值
-               if (outputData[i + offsetNum] > 1) {
-                  outputData[i + offsetNum] = 1
-               }
-               if (outputData[i + offsetNum] < -1) {
-                  outputData[i + offsetNum] = -1
-               }
+               const combinedValue = outputData[i + offsetNum] + bufferData[i]
+               outputData[i + offsetNum] = Math.max(-1, Math.min(1, combinedValue))
             }
-            output.getChannelData(channelNumber).set(outputData)
          }
       })
-
       return output
    }
    /**

+ 44 - 1
src/view/audio-list/index.tsx

@@ -14,6 +14,10 @@ import { evaluatingData } from "/src/view/evaluating";
 import { cloudToggleState } from "/src/helpers/midiPlay"
 import { storeData } from "/src/store";
 import { handleStartTick } from "../tick";
+import Crunker from "/src/utils/crunker"
+import tickMp3 from "/src/assets/tick.wav"
+import tockMp3 from "/src/assets/tock.wav"
+import { metronomeData } from "/src/helpers/metronome";
 
 export const audioData = reactive({
 	songEle: null as HTMLAudioElement | null, // 原生
@@ -334,6 +338,43 @@ export default defineComponent({
 		function loadBeatAudio(){
 			return Promise.all([createAudio(state.beatSong.music), createAudio(state.beatSong.accompany), createAudio(state.beatSong.fanSong), createAudio(state.beatSong.banSong), createAudio(state.beatSong.mingSong), createAudio(state.beatSong.mingSongGirl)])
 		}
+		// 合成节拍器资源
+		async function mergeBeatAudio(){
+			let beatMusic, beatAccompany
+			console.time("音频合成时间")
+			try{
+				const crunker = new Crunker()
+				console.time("音频加载时间")
+				const [musicBuff, accompanyBuff, tickBuff, tockBuff] = await crunker.fetchAudio(state.music?`${state.music}?v=${Date.now()}`:null, state.accompany?`${state.accompany}?v=${Date.now()}`:null, tickMp3, tockMp3)
+				console.timeEnd("音频加载时间")
+				// 计算音频空白时间
+				const silenceDuration = musicBuff&&!state.isEvxml ? crunker.calculateSilenceDuration(musicBuff) : 0
+				const silenceBgDuration = accompanyBuff&&!state.isEvxml ? crunker.calculateSilenceDuration(accompanyBuff) : 0
+				console.log(`音频空白时间:${silenceDuration};${silenceBgDuration}`)
+				const beats:AudioBuffer[] = []
+				const beatsTime:number[] = []
+				const beatsBgTime:number[] = []
+				metronomeData.metroMeasure.map(measures=>{
+					measures.map((item:any)=>{
+						beats.push(item.index===0?tickBuff!:tockBuff!)
+						beatsTime.push(item.time + silenceDuration) // xml 计算的时候 加上空白的时间
+						beatsBgTime.push(item.time + silenceBgDuration) // xml 计算的时候 加上空白的时间 没有背景不赋值
+					})
+				})
+				console.time("音频合并时间")
+				const musicBuffMeg = musicBuff && crunker.mergeAudioBuffers([musicBuff,...beats],[0,...beatsTime])
+				const accompanyBuffMeg = accompanyBuff && crunker.mergeAudioBuffers([accompanyBuff,...beats],[0,...beatsBgTime])
+				console.timeEnd("音频合并时间")
+				console.time("音频audioDom生成时间")
+				beatMusic = musicBuffMeg && crunker.exportAudioElement(musicBuffMeg)
+				beatAccompany = accompanyBuffMeg && crunker.exportAudioElement(accompanyBuffMeg)
+				console.timeEnd("音频audioDom生成时间")
+			}catch(err){
+				console.log(err)
+			}
+			console.timeEnd("音频合成时间")
+			return [beatMusic, beatAccompany]
+		}
 		onMounted(async () => {
 			// 预览的时候不走音频加载逻辑
 			if(state.isPreView){
@@ -384,7 +425,9 @@ export default defineComponent({
 					mingSongGirl.addEventListener("ended", onEnded);
 				}
 				// 处理带节拍器的音源
-				const [beatMusic, beatAccompany, beatFanSong, beatBanSong, beatMingSong, beatMingSongGirl] = await loadBeatAudio()
+				//const [beatMusic, beatAccompany, beatFanSong, beatBanSong, beatMingSong, beatMingSongGirl] = await loadBeatAudio()
+				// 客户端合成节拍器
+				const [beatMusic, beatAccompany, beatFanSong, beatBanSong, beatMingSong, beatMingSongGirl] = await mergeBeatAudio()
 				Object.assign(audioData.songCollection, {
 					beatSongEle:beatMusic,
 					beatBackgroundEle:beatAccompany,