|
@@ -18,6 +18,7 @@ import Crunker from "/src/utils/crunker"
|
|
|
import tickMp3 from "/src/assets/tick.wav"
|
|
|
import tockMp3 from "/src/assets/tock.wav"
|
|
|
import { metronomeData } from "/src/helpers/metronome";
|
|
|
+import { showToast } from "vant"
|
|
|
|
|
|
export const audioData = reactive({
|
|
|
songEle: null as HTMLAudioElement | null, // 原生
|
|
@@ -44,7 +45,10 @@ export const audioData = reactive({
|
|
|
mingSongGirlEle: null as HTMLAudioElement | null,
|
|
|
beatMingSongEle: null as HTMLAudioElement | null,
|
|
|
beatMingSongGirlEle: null as HTMLAudioElement | null
|
|
|
- }
|
|
|
+ },
|
|
|
+ combineIndex: -1, // 当前 播放的总谱音频索引
|
|
|
+ combineMusics: {} as Record<string, any>, // 音频 url
|
|
|
+ combineMusicEles:[] as {key:number, value:HTMLAudioElement, beatValue:HTMLAudioElement|null}[] // 存储的音频el 当大于4个时候删除
|
|
|
});
|
|
|
const midiRef = ref();
|
|
|
/** 播放或暂停 */
|
|
@@ -207,8 +211,78 @@ export const changeMingSongType = () =>{
|
|
|
}
|
|
|
}
|
|
|
|
|
|
-// 处理加载节拍器音频
|
|
|
+const createAudio = (src?: string): Promise<HTMLAudioElement | null> => {
|
|
|
+ if(!src){
|
|
|
+ return Promise.resolve(null)
|
|
|
+ }
|
|
|
+ return new Promise((resolve) => {
|
|
|
+ const a = new Audio(src + '?v=' + Date.now());
|
|
|
+ a.onloadedmetadata = () => {
|
|
|
+ resolve(a);
|
|
|
+ };
|
|
|
+ a.onerror = () => {
|
|
|
+ resolve(null);
|
|
|
+ };
|
|
|
+ // 当未加载 资源之前 切换到其他浏览器标签,浏览器可能会禁止资源加载所以无法触发onloadedmetadata事件,导致一直在加载中,这里做个兼容
|
|
|
+ if (document.visibilityState === 'visible') {
|
|
|
+ a.load();
|
|
|
+ } else {
|
|
|
+ const onVisibilityChange = () => {
|
|
|
+ if (document.visibilityState === 'visible') {
|
|
|
+ document.removeEventListener('visibilitychange', onVisibilityChange);
|
|
|
+ a.load();
|
|
|
+ }
|
|
|
+ };
|
|
|
+ document.addEventListener('visibilitychange', onVisibilityChange);
|
|
|
+ }
|
|
|
+ });
|
|
|
+};
|
|
|
+
|
|
|
+// 合成节拍器资源
|
|
|
let CrunkerInstance: Crunker
|
|
|
+async function mergeBeatAudio(music?:string){
|
|
|
+ let beatMusic
|
|
|
+ if(!state.isMixBeat) {
|
|
|
+ return beatMusic
|
|
|
+ }
|
|
|
+ if(!music){
|
|
|
+ return beatMusic
|
|
|
+ }
|
|
|
+ console.time("音频合成时间")
|
|
|
+ try{
|
|
|
+ /* 音频合成 */
|
|
|
+ if(!CrunkerInstance){
|
|
|
+ CrunkerInstance = new Crunker()
|
|
|
+ }
|
|
|
+ console.time("音频加载时间")
|
|
|
+ const [musicBuff, tickBuff, tockBuff] = await CrunkerInstance.fetchAudio(music?`${music}?v=${Date.now()}`:undefined, tickMp3, tockMp3)
|
|
|
+ console.timeEnd("音频加载时间")
|
|
|
+ // 计算音频空白时间
|
|
|
+ const silenceDuration = musicBuff&&!state.isEvxml ? CrunkerInstance.calculateSilenceDuration(musicBuff) : 0
|
|
|
+ console.log(`音频空白时间:${silenceDuration}`)
|
|
|
+ const beats:AudioBuffer[] = []
|
|
|
+ const beatsTime:number[] = []
|
|
|
+ metronomeData.metroMeasure.map(measures=>{
|
|
|
+ measures.map((item:any)=>{
|
|
|
+ beats.push(item.index===0?tickBuff!:tockBuff!)
|
|
|
+ beatsTime.push(item.time + silenceDuration) // xml 计算的时候 加上空白的时间
|
|
|
+ })
|
|
|
+ })
|
|
|
+ console.time("音频合并时间")
|
|
|
+ const musicBuffMeg = musicBuff && CrunkerInstance.mergeAudioBuffers([musicBuff,...beats],[0,...beatsTime])
|
|
|
+ console.timeEnd("音频合并时间")
|
|
|
+ console.time("音频audioDom生成时间")
|
|
|
+ beatMusic = musicBuffMeg && CrunkerInstance.exportAudioElement(musicBuffMeg)
|
|
|
+ console.timeEnd("音频audioDom生成时间")
|
|
|
+ }catch(err){
|
|
|
+ console.log(err)
|
|
|
+ }
|
|
|
+ console.timeEnd("音频合成时间")
|
|
|
+ return beatMusic
|
|
|
+}
|
|
|
+
|
|
|
+
|
|
|
+// 处理加载节拍器音频
|
|
|
export const handleLoadBeatMusic = async () => {
|
|
|
if(metronomeData.disable) {
|
|
|
return
|
|
@@ -251,30 +325,7 @@ export const handleLoadBeatMusic = async () => {
|
|
|
}
|
|
|
state.loadingText = "音频资源加载中,请稍后…"
|
|
|
state.isLoading = true
|
|
|
- /* 音频合成 */
|
|
|
- if(!CrunkerInstance){
|
|
|
- CrunkerInstance = new Crunker()
|
|
|
- }
|
|
|
- console.time("音频加载时间")
|
|
|
- const [audioBuffer, tickBuff, tockBuff] = await CrunkerInstance.fetchAudio(`${currentMusic}?v=${Date.now()}`, tickMp3, tockMp3)
|
|
|
- console.timeEnd("音频加载时间")
|
|
|
- // 计算音频空白时间
|
|
|
- const silenceDuration = audioBuffer&&!state.isEvxml ? CrunkerInstance.calculateSilenceDuration(audioBuffer) : 0
|
|
|
- console.log(`音频空白时间:${silenceDuration}`)
|
|
|
- const beats:AudioBuffer[] = []
|
|
|
- const beatsTime:number[] = []
|
|
|
- metronomeData.metroMeasure.map(measures=>{
|
|
|
- measures.map((item:any)=>{
|
|
|
- beats.push(item.index===0?tickBuff!:tockBuff!)
|
|
|
- beatsTime.push(item.time+silenceDuration) //不是妙极客的曲子才加上空白
|
|
|
- })
|
|
|
- })
|
|
|
- console.time("音频合并时间")
|
|
|
- const musicBuffMeg = audioBuffer && CrunkerInstance.mergeAudioBuffers([audioBuffer!,...beats],[0,...beatsTime])
|
|
|
- console.timeEnd("音频合并时间")
|
|
|
- console.time("音频audioDom生成时间")
|
|
|
- const musicAudio = musicBuffMeg && CrunkerInstance.exportAudioElement(musicBuffMeg) as any
|
|
|
- console.timeEnd("音频audioDom生成时间")
|
|
|
+ const musicAudio = await mergeBeatAudio(currentMusic) as any
|
|
|
const playEleObj = {
|
|
|
"play_music":"beatSongEle",
|
|
|
"play_background":"beatBackgroundEle",
|
|
@@ -318,6 +369,78 @@ export const handleLoadBeatMusic = async () => {
|
|
|
state.isLoading = false
|
|
|
}
|
|
|
|
|
|
+// 切换对应的声轨,并且配置当前的audio
|
|
|
+export async function changeCombineAudio (combineIndex: number){
|
|
|
+ // 重复点击的时候取消选中 原音
|
|
|
+ if(combineIndex === audioData.combineIndex){
|
|
|
+ audioData.combineIndex = -1
|
|
|
+ state.playSource = "background"
|
|
|
+ state.music = ""
|
|
|
+ // 当开启节拍器的时候,切为伴奏的时候合成节拍器1
|
|
|
+ await handleLoadBeatMusic()
|
|
|
+ // 当没有背景音文件的时候
|
|
|
+ if(!state.accompany) {
|
|
|
+ state.noMusicSource = true
|
|
|
+ }
|
|
|
+ return
|
|
|
+ }
|
|
|
+ state.loadingText = "音频资源加载中,请稍后…";
|
|
|
+ state.isLoading = true;
|
|
|
+ const musicUrl = audioData.combineMusics[combineIndex]
|
|
|
+ // 有就拿缓存,没有就加载
|
|
|
+ const cacheMusicIndex = audioData.combineMusicEles.findIndex(ele => {
|
|
|
+ return ele.key === combineIndex
|
|
|
+ })
|
|
|
+ const cacheMusic = audioData.combineMusicEles[cacheMusicIndex]
|
|
|
+ if(cacheMusic?.value){
|
|
|
+ audioData.songCollection.songEle = cacheMusic.value
|
|
|
+ audioData.songCollection.beatSongEle = cacheMusic.beatValue
|
|
|
+ // 使用缓存之后 当前数据位置向后偏移,删除缓存的时候以使用顺序位置
|
|
|
+ const itemMusic = audioData.combineMusicEles.splice(cacheMusicIndex, 1)
|
|
|
+ audioData.combineMusicEles.push(...itemMusic)
|
|
|
+ }else{
|
|
|
+ const music = await createAudio(musicUrl)
|
|
|
+ const beatMusic = await mergeBeatAudio(musicUrl) as any
|
|
|
+ // 当没有背景音的时候 需要绑定事件
|
|
|
+ if(!audioData.songCollection.backgroundEle){
|
|
|
+ if(music){
|
|
|
+ music.addEventListener("play", onPlay);
|
|
|
+ music.addEventListener("ended", onEnded);
|
|
|
+ }
|
|
|
+ }
|
|
|
+ // 取消掉背景音绑定的时候,然后给当前原音节拍音频绑定事件,这样防止没有背景节拍的时候,能给
|
|
|
+ if(beatMusic){
|
|
|
+ audioData.songCollection.beatBackgroundEle?.removeEventListener("play", onPlay)
|
|
|
+ audioData.songCollection.beatBackgroundEle?.removeEventListener("ended", onEnded)
|
|
|
+ beatMusic.addEventListener("play", onPlay);
|
|
|
+ beatMusic.addEventListener("ended", onEnded);
|
|
|
+ }
|
|
|
+ audioData.combineMusicEles.push({
|
|
|
+ key: combineIndex,
|
|
|
+ value: music!,
|
|
|
+ beatValue: beatMusic!
|
|
|
+ })
|
|
|
+ // 当大于4个数据的时候 删除掉最前面的数据
|
|
|
+ if(audioData.combineMusicEles.length > 4){
|
|
|
+ audioData.combineMusicEles.splice(0,1)
|
|
|
+ }
|
|
|
+ audioData.songCollection.songEle = music
|
|
|
+ audioData.songCollection.beatSongEle = beatMusic!
|
|
|
+ }
|
|
|
+ audioData.combineIndex = combineIndex
|
|
|
+ state.music = musicUrl
|
|
|
+ state.playSource = "music"
|
|
|
+ // 当没有背景音文件的时候
|
|
|
+ if(!state.accompany) {
|
|
|
+ state.noMusicSource = false
|
|
|
+ }
|
|
|
+ showToast({
|
|
|
+ message: "已开启原声",
|
|
|
+ position: "top",
|
|
|
+ className: "selectionToast",
|
|
|
+ });
|
|
|
+ state.isLoading = false;
|
|
|
+}
|
|
|
export default defineComponent({
|
|
|
name: "audio-list",
|
|
|
setup() {
|
|
@@ -352,33 +475,6 @@ export default defineComponent({
|
|
|
}
|
|
|
);
|
|
|
|
|
|
- const createAudio = (src?: string): Promise<HTMLAudioElement | null> => {
|
|
|
- if(!src){
|
|
|
- return Promise.resolve(null)
|
|
|
- }
|
|
|
- return new Promise((resolve) => {
|
|
|
- const a = new Audio(src + '?v=' + Date.now());
|
|
|
- a.onloadedmetadata = () => {
|
|
|
- resolve(a);
|
|
|
- };
|
|
|
- a.onerror = () => {
|
|
|
- resolve(null);
|
|
|
- };
|
|
|
- // 当未加载 资源之前 切换到其他浏览器标签,浏览器可能会禁止资源加载所以无法触发onloadedmetadata事件,导致一直在加载中,这里做个兼容
|
|
|
- if (document.visibilityState === 'visible') {
|
|
|
- a.load();
|
|
|
- } else {
|
|
|
- const onVisibilityChange = () => {
|
|
|
- if (document.visibilityState === 'visible') {
|
|
|
- document.removeEventListener('visibilitychange', onVisibilityChange);
|
|
|
- a.load();
|
|
|
- }
|
|
|
- };
|
|
|
- document.addEventListener('visibilitychange', onVisibilityChange);
|
|
|
- }
|
|
|
- });
|
|
|
- };
|
|
|
-
|
|
|
/**
|
|
|
* #11046
|
|
|
* 声音与圆点消失的节点不一致,可能原因是部分安卓手机没有立即播放,所以需要等待有音频进度返回时再播放节拍器
|