|
@@ -14,6 +14,10 @@ import { evaluatingData } from "/src/view/evaluating";
|
|
|
import { cloudToggleState } from "/src/helpers/midiPlay"
|
|
|
import { storeData } from "/src/store";
|
|
|
import { handleStartTick } from "../tick";
|
|
|
+import Crunker from "/src/utils/crunker"
|
|
|
+import tickMp3 from "/src/assets/tick.wav"
|
|
|
+import tockMp3 from "/src/assets/tock.wav"
|
|
|
+import { metronomeData } from "/src/helpers/metronome";
|
|
|
|
|
|
export const audioData = reactive({
|
|
|
songEle: null as HTMLAudioElement | null, // 原生
|
|
@@ -334,6 +338,43 @@ export default defineComponent({
|
|
|
function loadBeatAudio(){
|
|
|
return Promise.all([createAudio(state.beatSong.music), createAudio(state.beatSong.accompany), createAudio(state.beatSong.fanSong), createAudio(state.beatSong.banSong), createAudio(state.beatSong.mingSong), createAudio(state.beatSong.mingSongGirl)])
|
|
|
}
|
|
|
+ // 合成节拍器资源
|
|
|
+ async function mergeBeatAudio(){
|
|
|
+ let beatMusic, beatAccompany
|
|
|
+ console.time("音频合成时间")
|
|
|
+ try{
|
|
|
+ const crunker = new Crunker()
|
|
|
+ console.time("音频加载时间")
|
|
|
+ const [musicBuff, accompanyBuff, tickBuff, tockBuff] = await crunker.fetchAudio(state.music?`${state.music}?v=${Date.now()}`:null, state.accompany?`${state.accompany}?v=${Date.now()}`:null, tickMp3, tockMp3)
|
|
|
+ console.timeEnd("音频加载时间")
|
|
|
+ // 计算音频空白时间
|
|
|
+ const silenceDuration = musicBuff&&!state.isEvxml ? crunker.calculateSilenceDuration(musicBuff) : 0
|
|
|
+ const silenceBgDuration = accompanyBuff&&!state.isEvxml ? crunker.calculateSilenceDuration(accompanyBuff) : 0
|
|
|
+ console.log(`音频空白时间:${silenceDuration};${silenceBgDuration}`)
|
|
|
+ const beats:AudioBuffer[] = []
|
|
|
+ const beatsTime:number[] = []
|
|
|
+ const beatsBgTime:number[] = []
|
|
|
+ metronomeData.metroMeasure.map(measures=>{
|
|
|
+ measures.map((item:any)=>{
|
|
|
+ beats.push(item.index===0?tickBuff!:tockBuff!)
|
|
|
+ beatsTime.push(item.time + silenceDuration) // xml 计算的时候 加上空白的时间
|
|
|
+ beatsBgTime.push(item.time + silenceBgDuration) // xml 计算的时候 加上空白的时间 没有背景不赋值
|
|
|
+ })
|
|
|
+ })
|
|
|
+ console.time("音频合并时间")
|
|
|
+ const musicBuffMeg = musicBuff && crunker.mergeAudioBuffers([musicBuff,...beats],[0,...beatsTime])
|
|
|
+ const accompanyBuffMeg = accompanyBuff && crunker.mergeAudioBuffers([accompanyBuff,...beats],[0,...beatsBgTime])
|
|
|
+ console.timeEnd("音频合并时间")
|
|
|
+ console.time("音频audioDom生成时间")
|
|
|
+ beatMusic = musicBuffMeg && crunker.exportAudioElement(musicBuffMeg)
|
|
|
+ beatAccompany = accompanyBuffMeg && crunker.exportAudioElement(accompanyBuffMeg)
|
|
|
+ console.timeEnd("音频audioDom生成时间")
|
|
|
+ }catch(err){
|
|
|
+ console.log(err)
|
|
|
+ }
|
|
|
+ console.timeEnd("音频合成时间")
|
|
|
+ return [beatMusic, beatAccompany]
|
|
|
+ }
|
|
|
onMounted(async () => {
|
|
|
// 预览的时候不走音频加载逻辑
|
|
|
if(state.isPreView){
|
|
@@ -384,7 +425,9 @@ export default defineComponent({
|
|
|
mingSongGirl.addEventListener("ended", onEnded);
|
|
|
}
|
|
|
// 处理带节拍器的音源
|
|
|
- const [beatMusic, beatAccompany, beatFanSong, beatBanSong, beatMingSong, beatMingSongGirl] = await loadBeatAudio()
|
|
|
+ //const [beatMusic, beatAccompany, beatFanSong, beatBanSong, beatMingSong, beatMingSongGirl] = await loadBeatAudio()
|
|
|
+ // 客户端合成节拍器
|
|
|
+ const [beatMusic, beatAccompany, beatFanSong, beatBanSong, beatMingSong, beatMingSongGirl] = await mergeBeatAudio()
|
|
|
Object.assign(audioData.songCollection, {
|
|
|
beatSongEle:beatMusic,
|
|
|
beatBackgroundEle:beatAccompany,
|