TIANYONG 10 månader sedan
förälder
incheckning
3a4e07ee1e

BIN
src/assets/tick.mp3


BIN
src/assets/tock.mp3


+ 7 - 3
src/helpers/metronome.ts

@@ -9,8 +9,8 @@ import { browser } from "/src/utils/index";
 import state from "/src/state";
 import { Howl } from "howler";
 import tockAndTick from "/src/constant/tockAndTick.json";
-import tickWav from "/src/assets/tick.wav";
-import tockWav from "/src/assets/tock.wav";
+import tickWav from "/src/assets/tick.mp3";
+import tockWav from "/src/assets/tock.mp3";
 
 type IOptions = {
 	speed: number;
@@ -119,7 +119,7 @@ class Metronome {
 		// 	this.source2 = this.loadAudio2();
 		// }
 		// metronomeData.initPlayerState = true;
-
+		if(metronomeData.initPlayerState) return
 		Promise.all([this.createAudio(tickWav), this.createAudio(tockWav)]).then(
 			([tick, tock]) => {
 				if (tick) {
@@ -179,6 +179,10 @@ class Metronome {
 	};
 	// 播放
 	playAudio = () => {
+		/* 如果是 评测模式且不为MIDI并且节拍器资源加载成功的时候  不运行节拍器播放频*/
+		if (state.audioBetaDone && state.modeType === "practise" && state.playMode !== "MIDI") {
+			return
+		}
 		if (!metronomeData.initPlayerState || state.playState === 'paused') return;
 		const beatVolume = state.setting.beatVolume / 100
 		// this.source = metronomeData.activeMetro?.index === 0 ? this.source1 : this.source2;

+ 27 - 14
src/page-instrument/header-top/index.tsx

@@ -53,7 +53,7 @@ export const headTopData = reactive({
       // 如果延迟检测资源还在加载中,给出提示
       if (!evaluatingData.jsonLoadDone) {
         evaluatingData.jsonLoading = true;
-        showToast("资源加载中,请稍后");
+        state.audioDone && showToast("资源加载中,请稍后");  //音频资源加载完之后才提示
         return;
       }
 
@@ -132,6 +132,17 @@ export default defineComponent({
         display: true,
       };
     });
+    /** 节拍器按钮 */
+    const metronomeBtn = computed(() => {
+      // 选择模式  不显示
+      if (headTopData.modeType !== "show") return { display: false, disabled: true };
+      // 音频播放中 禁用
+      if (state.playState === "play") return { display: true, disabled: true };
+      return {
+        disabled: false,
+        display: true,
+      };
+    });
 
     /** 指法按钮 */
     const fingeringBtn = computed(() => {
@@ -479,19 +490,21 @@ export default defineComponent({
               <img style={{ display: state.playSource === "music" ? "none" : "" }} class={styles.iconBtn} src={headImg(`background.svg`)} />
               <span>{state.playSource === "music" ? "原声" : "伴奏"}</span>
             </div>
-            {state.modeType !== "evaluating" && (
-              <div
-                class={[styles.btn]}
-                onClick={async () => {
-                  metronomeData.disable = !metronomeData.disable;
-                  metronomeData.metro?.initPlayer();
-                }}
-              >
-                <img style={{ display: metronomeData.disable ? "block" : "none" }} class={styles.iconBtn} src={headImg("tickoff.svg")} />
-                <img style={{ display: !metronomeData.disable ? "block" : "none" }} class={styles.iconBtn} src={headImg("tickon.svg")} />
-                <span style={{ whiteSpace: "nowrap" }}>节拍器</span>
-              </div>
-            )}
+            {
+              state.modeType !== "evaluating" && 
+                <div
+                  style={{ display: metronomeBtn.value.display ? "" : "none" }}
+                  class={[styles.btn, metronomeBtn.value.disabled && styles.disabled]}
+                  onClick={async () => {
+                    metronomeData.disable = !metronomeData.disable;
+                    metronomeData.metro?.initPlayer();
+                  }}
+                >
+                  <img style={{ display: metronomeData.disable ? "block" : "none" }} class={styles.iconBtn} src={headImg("tickoff.svg")} />
+                  <img style={{ display: !metronomeData.disable ? "block" : "none" }} class={styles.iconBtn} src={headImg("tickon.svg")} />
+                  <span style={{ whiteSpace: "nowrap" }}>节拍器</span>
+                </div>               
+            }
             <div id={state.platform === IPlatform.PC ? "teacherTop-2" : "studnetT-2"} style={{ display: selectBtn.value.display ? "" : "none" }} class={[styles.btn, selectBtn.value.disabled && styles.disabled]} onClick={() => handleChangeSection()}>
               <img style={{ display: state.section.length === 0 ? "" : "none" }} class={styles.iconBtn} src={headImg(`section0.svg`)} />
               <img style={{ display: state.section.length === 1 ? "" : "none" }} class={styles.iconBtn} src={headImg(`section1.svg`)} />

+ 3 - 2
src/page-instrument/header-top/settting/index.tsx

@@ -142,7 +142,8 @@ export default defineComponent({
 									extra: () => <Switch v-model={state.setting.eyeProtection}></Switch>,
 								}}
 							</Cell>
-							<Cell
+							{/* 节拍器 音量注释掉了  这里的代码也一并注释了 state.setting.beatVolume = state.setting.beatVolume || 50 */}
+							{/* <Cell
 								title="节拍器音量"
 								class={styles.sliderWrap}
 								center
@@ -161,7 +162,7 @@ export default defineComponent({
 										</Slider>
 									),
 								}}
-							</Cell>							
+							</Cell>							 */}
 							<div class={styles.btnsbar}>
 								{/* <div class={styles.btn} onClick={downPng}>
 									<img src={iconDown} />

+ 3 - 2
src/page-instrument/view-detail/index.tsx

@@ -32,7 +32,7 @@ import { setCustomGradual, setCustomNoteRealValue } from "/src/helpers/customMus
 import { usePageVisibility } from "@vant/use";
 import { initMidi } from "/src/helpers/midiPlay"
 import TheAudio from "/src/components/the-audio"
-import tickWav from "/src/assets/tick.wav";
+import tickWav from "/src/assets/tick.mp3";
 import Title from "../header-top/title";
 
 const DelayCheck = defineAsyncComponent(() =>
@@ -110,7 +110,8 @@ export default defineComponent({
       const settting = store.get("musicscoresetting");
       if (settting) {
         state.setting = settting;
-        state.setting.beatVolume = state.setting.beatVolume || 50
+        //state.setting.beatVolume = state.setting.beatVolume || 50
+        state.setting.beatVolume = 50
         if (state.setting.camera) {
           const res = await api_openCamera();
           // 没有授权

+ 17 - 1
src/state.ts

@@ -1,5 +1,5 @@
 import { closeToast, showToast } from "vant";
-import { nextTick, reactive } from "vue";
+import { nextTick, reactive, watch } from "vue";
 import { OpenSheetMusicDisplay } from "../osmd-extended/src";
 import { metronomeData } from "./helpers/metronome";
 import { GradualNote, GradualTimes, GradualVersion } from "./type";
@@ -14,6 +14,7 @@ import { verifyCanRepeat, getDuration } from "./helpers/formateMusic";
 import { getMusicSheetDetail } from "./utils/baseApi"
 import { getQuery } from "/src/utils/queryString";
 import { followData } from "/src/view/follow-practice/index"
+import { changeSongSourceByBate } from "/src/view/audio-list"
 
 const query: any = getQuery();
 
@@ -448,6 +449,8 @@ const state = reactive({
   midiSectionStart: 0,
   /** 音频文件是否加载完成 */
   audioDone: false,
+  /** 节拍文件是否加载成功 */
+  audioBetaDone: false,
   /** 谱面svgdom节点 */
   osmdSvgDom: null as any,
   /** 滚动容器dom */
@@ -661,6 +664,14 @@ export const skipNotePlay = async (itemIndex: number, isStart = false) => {
   }
 };
 
+/* 还原音频源 */
+watch(()=>state.playState,()=>{
+  // 播放之前  当为评测模式和不为MIDI时候按  是否禁用节拍器  切换音源
+  if (state.audioBetaDone && state.playState==='paused' && state.modeType === "practise" && state.playMode !== "MIDI") {
+    console.log("还原音源")
+    changeSongSourceByBate(true)
+  }
+})
 /**
  * 切换曲谱播放状态
  * @param playState 可选: 默认 undefined, 需要切换的状态 play:播放, paused: 暂停
@@ -671,6 +682,11 @@ export const togglePlay = async (playState?: "play" | "paused", sourceType?: str
     if (sourceType !== 'courseware') showToast('音频资源加载中,请稍后')
     return
   }
+  // 播放之前  当为评测模式和不为MIDI时候按  是否禁用节拍器  切换音源
+  if (state.audioBetaDone && (playState ? playState : state.playState === "paused" ? "play" : "paused") ==='play' && state.modeType === "practise" && state.playMode !== "MIDI") {
+    console.log("设置音源")
+    changeSongSourceByBate(metronomeData.disable)
+  }
   // midi播放
   if (state.isAppPlay) {
     if (playState === "paused") {

+ 196 - 0
src/utils/crunker.ts

@@ -0,0 +1,196 @@
+interface CrunkerConstructorOptions {
+   sampleRate: number
+   concurrentNetworkRequests: number
+}
+
+type CrunkerInputTypes = string | File | Blob
+
+export default class Crunker {
+   private readonly _sampleRate: number
+   private readonly _concurrentNetworkRequests: number
+   private readonly _context: AudioContext
+
+   constructor({ sampleRate, concurrentNetworkRequests = 200 }: Partial<CrunkerConstructorOptions> = {}) {
+      this._context = this._createContext(sampleRate)
+      sampleRate ||= this._context.sampleRate
+      this._sampleRate = sampleRate
+      this._concurrentNetworkRequests = concurrentNetworkRequests
+   }
+   private _createContext(sampleRate = 44_100): AudioContext {
+      window.AudioContext = window.AudioContext || (window as any).webkitAudioContext || (window as any).mozAudioContext
+      return new AudioContext({ sampleRate })
+   }
+   /**
+    *转换url等类型为buffer
+    */
+   async fetchAudio(...filepaths: CrunkerInputTypes[]): Promise<AudioBuffer[]> {
+      const buffers: AudioBuffer[] = []
+      const groups = Math.ceil(filepaths.length / this._concurrentNetworkRequests)
+      for (let i = 0; i < groups; i++) {
+         const group = filepaths.slice(i * this._concurrentNetworkRequests, (i + 1) * this._concurrentNetworkRequests)
+         buffers.push(...(await this._fetchAudio(...group)))
+      }
+      return buffers
+   }
+   private async _fetchAudio(...filepaths: CrunkerInputTypes[]): Promise<AudioBuffer[]> {
+      return await Promise.all(
+         filepaths.map(async filepath => {
+            let buffer: ArrayBuffer
+            if (filepath instanceof File || filepath instanceof Blob) {
+               buffer = await filepath.arrayBuffer()
+            } else {
+               buffer = await fetch(filepath).then(response => {
+                  if (response.headers.has("Content-Type") && !response.headers.get("Content-Type")!.includes("audio/")) {
+                     console.warn(
+                        `Crunker: Attempted to fetch an audio file, but its MIME type is \`${
+                           response.headers.get("Content-Type")!.split(";")[0]
+                        }\`. We'll try and continue anyway. (file: "${filepath}")`
+                     )
+                  }
+                  return response.arrayBuffer()
+               })
+            }
+            /* 这里有个坑 safa浏览器老一点的版本不支持decodeAudioData返回promise 所以用这种老式写法 */
+            return await new Promise((res, rej) => {
+               this._context.decodeAudioData(
+                  buffer,
+                  buffer => {
+                     res(buffer)
+                  },
+                  err => {
+                     rej(err)
+                  }
+               )
+            })
+         })
+      )
+   }
+   /**
+    * 根据时间合并音频
+    */
+   mergeAudioBuffers(buffers: AudioBuffer[], times: number[]): AudioBuffer {
+      if (buffers.length !== times.length) {
+         throw new Error("buffer数量和times数量必须一致")
+      }
+      const output = this._context.createBuffer(this._maxNumberOfChannels(buffers), this._sampleRate * this._maxDuration(buffers), this._sampleRate)
+      buffers.forEach((buffer, index) => {
+         for (let channelNumber = 0; channelNumber < buffer.numberOfChannels; channelNumber++) {
+            const outputData = output.getChannelData(channelNumber)
+            const bufferData = buffer.getChannelData(channelNumber)
+            const offsetNum = Math.round(times[index] * this._sampleRate) //时间偏差
+            for (let i = buffer.getChannelData(channelNumber).length - 1; i >= 0; i--) {
+               outputData[i + offsetNum] += bufferData[i]
+               // 当合并大于1或者小于-1的时候可能会爆音  所以这里取最大值和最小值
+               if (outputData[i + offsetNum] > 1) {
+                  outputData[i + offsetNum] = 1
+               }
+               if (outputData[i + offsetNum] < -1) {
+                  outputData[i + offsetNum] = -1
+               }
+            }
+            output.getChannelData(channelNumber).set(outputData)
+         }
+      })
+
+      return output
+   }
+   /**
+    * 根据buffer导出audio标签
+    */
+   exportAudioElement(buffer: AudioBuffer, type = "audio/mp3"): HTMLAudioElement {
+      const recorded = this._interleave(buffer)
+      const dataview = this._writeHeaders(recorded, buffer.numberOfChannels, buffer.sampleRate)
+      const audioBlob = new Blob([dataview], { type })
+      return this._renderAudioElement(audioBlob)
+   }
+   /**
+    * 计算音频前面的空白
+    */
+   calculateSilenceDuration(buffer: AudioBuffer) {
+      const threshold = 0.01 // 静音阈值,低于此值的部分认为是静音
+      const sampleRate = buffer.sampleRate
+      const channelData = buffer.getChannelData(0) // 只处理单声道数据
+      let silenceDuration = 0
+      for (let i = 0; i < channelData.length; i++) {
+         if (Math.abs(channelData[i]) > threshold) {
+            break
+         }
+         silenceDuration++
+      }
+      // 将样本数转换为秒
+      silenceDuration = silenceDuration / sampleRate
+      return silenceDuration
+   }
+   private _maxNumberOfChannels(buffers: AudioBuffer[]): number {
+      return Math.max(...buffers.map(buffer => buffer.numberOfChannels))
+   }
+   private _maxDuration(buffers: AudioBuffer[]): number {
+      return Math.max(...buffers.map(buffer => buffer.duration))
+   }
+   private _interleave(input: AudioBuffer): Float32Array {
+      if (input.numberOfChannels === 1) {
+         return input.getChannelData(0)
+      }
+      const channels = []
+      for (let i = 0; i < input.numberOfChannels; i++) {
+         channels.push(input.getChannelData(i))
+      }
+      const length = channels.reduce((prev, channelData) => prev + channelData.length, 0)
+      const result = new Float32Array(length)
+      let index = 0
+      let inputIndex = 0
+      while (index < length) {
+         channels.forEach(channelData => {
+            result[index++] = channelData[inputIndex]
+         })
+         inputIndex++
+      }
+      return result
+   }
+   private _renderAudioElement(blob: Blob): HTMLAudioElement {
+      const audio = document.createElement("audio")
+      audio.src = this._renderURL(blob)
+      audio.load()
+      return audio
+   }
+   private _renderURL(blob: Blob): string {
+      return (window.URL || window.webkitURL).createObjectURL(blob)
+   }
+   private _writeHeaders(buffer: Float32Array, numOfChannels: number, sampleRate: number): DataView {
+      const bitDepth = 16
+      const bytesPerSample = bitDepth / 8
+      const sampleSize = numOfChannels * bytesPerSample
+      const fileHeaderSize = 8
+      const chunkHeaderSize = 36
+      const chunkDataSize = buffer.length * bytesPerSample
+      const chunkTotalSize = chunkHeaderSize + chunkDataSize
+      const arrayBuffer = new ArrayBuffer(fileHeaderSize + chunkTotalSize)
+      const view = new DataView(arrayBuffer)
+      this._writeString(view, 0, "RIFF")
+      view.setUint32(4, chunkTotalSize, true)
+      this._writeString(view, 8, "WAVE")
+      this._writeString(view, 12, "fmt ")
+      view.setUint32(16, 16, true)
+      view.setUint16(20, 1, true)
+      view.setUint16(22, numOfChannels, true)
+      view.setUint32(24, sampleRate, true)
+      view.setUint32(28, sampleRate * sampleSize, true)
+      view.setUint16(32, sampleSize, true)
+      view.setUint16(34, bitDepth, true)
+      this._writeString(view, 36, "data")
+      view.setUint32(40, chunkDataSize, true)
+      return this._floatTo16BitPCM(view, buffer, fileHeaderSize + chunkHeaderSize)
+   }
+   private _floatTo16BitPCM(dataview: DataView, buffer: Float32Array, offset: number): DataView {
+      for (let i = 0; i < buffer.length; i++, offset += 2) {
+         const tmp = Math.max(-1, Math.min(1, buffer[i]))
+         dataview.setInt16(offset, tmp < 0 ? tmp * 0x8000 : tmp * 0x7fff, true)
+      }
+      return dataview
+   }
+   private _writeString(dataview: DataView, offset: number, header: string): void {
+      for (let i = 0; i < header.length; i++) {
+         dataview.setUint8(offset + i, header.charCodeAt(i))
+      }
+   }
+}

BIN
src/view/audio-list/img/icon_loading_head.png


BIN
src/view/audio-list/img/icon_loading_img.png


+ 61 - 0
src/view/audio-list/index.module.less

@@ -4,4 +4,65 @@
     bottom: 0;
     width: 100%;
     z-index: -1000000;
+}
+
+.loading {
+    position: fixed;
+    left: 0;
+    top: 0;
+    right: 0;
+    bottom: 0;
+    display: flex;
+    justify-content: center;
+    align-items: center;
+    z-index: 10000;
+    background: rgba(0, 0, 0, .6);
+}
+
+.loadingWrap {
+    position: relative;
+    width: 295px;
+    padding: 21px 17px;
+    background: rgba(135, 135, 135, .72);
+    border-radius: 24px;
+
+    .loadingIcon {
+        position: absolute;
+        left: 50%;
+        top: -35px;
+        transform: translateX(-50%);
+        width: 216px;
+    }
+
+    .loadingTip {
+        position: absolute;
+        left: 50%;
+        transform: translateX(-50%);
+        bottom: -35px;
+        color: #fff;
+        font-size: 13px;
+        font-weight: 400px;
+    }
+
+    :global {
+        .van-progress {
+            height: 7px;
+        }
+
+        .van-progress__portion {
+            background: linear-gradient(180deg, #3CD6F9 0%, #1CACF1 100%);
+            border: 1px solid rgba(255, 255, 255, .5);
+        }
+
+        .van-progress__pivot {
+            top: 0;
+            color: transparent;
+            background-color: transparent;
+            width: 35px;
+            height: 37px;
+            background-image: url('./img/icon_loading_head.png');
+            background-repeat: no-repeat;
+            background-size: 100% 100%;
+        }
+    }
 }

+ 135 - 32
src/view/audio-list/index.tsx

@@ -12,10 +12,22 @@ import state, { IPlayState, onEnded, onPlay } from "/src/state";
 import { api_playProgress, api_cloudTimeUpdae, api_cloudplayed, api_remove_cloudplayed, api_remove_cloudTimeUpdae } from "/src/helpers/communication";
 import { evaluatingData } from "/src/view/evaluating";
 import { cloudToggleState } from "/src/helpers/midiPlay"
+import { metronomeData } from "../../helpers/metronome";
+import Crunker from "../../utils/crunker"
+const crunker = new Crunker()
+import tickWav from "/src/assets/tick.mp3";
+import tockWav from "/src/assets/tock.mp3";
+import Loading from "./loading"
 
 export const audioData = reactive({
 	songEle: null as unknown as HTMLAudioElement,
 	backgroundEle: null as unknown as HTMLAudioElement,
+	songCollection: {  // 音乐源合集   beatSongEle和bateBackgroundEle是带节拍的音乐源   评测模式的时候用
+		songEle: null as unknown as HTMLAudioElement,
+		backgroundEle: null as unknown as HTMLAudioElement,
+		beatSongEle: null as unknown as HTMLAudioElement,
+		bateBackgroundEle: null as unknown as HTMLAudioElement,
+	},
 	midiRender: false,
 	progress: 0, // midi播放进度(单位:秒)
 	duration: 0 // 音频总时长(单位:秒)
@@ -112,6 +124,37 @@ export const detectTheNumberOfSoundSources = () => {
 	return total;
 };
 
+/** 切换节拍器音源 */
+export const changeSongSourceByBate = (isDisBate:boolean) => {
+	// isDisBate 为true 切换到不带节拍的,为false 切换到带节拍的
+	if(audioData.songCollection.songEle && audioData.songCollection.beatSongEle){
+		const songEleCurrentTime = audioData.songEle.currentTime
+		console.log("当前音乐时间:",songEleCurrentTime)
+		if(isDisBate){
+			audioData.songEle = audioData.songCollection.songEle
+			audioData.songEle.currentTime = songEleCurrentTime
+			if(audioData.songCollection.backgroundEle){
+				audioData.backgroundEle = audioData.songCollection.backgroundEle
+				audioData.backgroundEle.currentTime = songEleCurrentTime
+			}
+		}else{
+			audioData.songEle = audioData.songCollection.beatSongEle
+			audioData.songEle.currentTime = songEleCurrentTime
+			if(audioData.songCollection.bateBackgroundEle){
+				audioData.backgroundEle = audioData.songCollection.bateBackgroundEle
+				audioData.backgroundEle.currentTime = songEleCurrentTime
+			}
+		}
+	}
+	// 设置静音与取消静音
+	if (state.playSource === "music") {
+		audioData.songEle && (audioData.songEle.muted = false);
+		audioData.backgroundEle && (audioData.backgroundEle.muted = true);
+	} else {
+		audioData.songEle && (audioData.songEle.muted = true);
+		audioData.backgroundEle && (audioData.backgroundEle.muted = false);
+	}
+}
 export default defineComponent({
 	name: "audio-list",
 	setup() {
@@ -202,32 +245,89 @@ export default defineComponent({
 				onEnded();
 			}
 		}
-
-		onMounted(() => {
+		// 合成节拍器音源
+		function loadMergeAudioBetas() {
+			console.time("音频加载时间")
+			const audioList = [state.music+'?v='+Date.now()]
+			if(state.accompany){   // 可能存在没有伴奏的音源
+				audioList.push(state.accompany+'?v='+Date.now()) 
+			}
+			return crunker.fetchAudio(tickWav, tockWav, ...audioList).then(([tickWavBuff,tockWavBuff,musicBuff,accompanyBuff])=>{
+				console.timeEnd("音频加载时间")
+				// 计算音频空白时间
+				const silenceDuration = crunker.calculateSilenceDuration(musicBuff)
+				const silenceBgDuration = accompanyBuff && crunker.calculateSilenceDuration(accompanyBuff) 
+				console.log(`音频空白时间:${silenceDuration},${silenceBgDuration}`)
+				const beats:AudioBuffer[] = []
+				const beatsTime:number[] = []
+				const beatsBgTime:number[] = []
+				metronomeData.metroMeasure.map(Measures=>{
+					Measures.map((item:any)=>{
+						beats.push(item.index===0?tickWavBuff:tockWavBuff)
+						beatsTime.push(item.time + silenceDuration) // xml 计算的时候 加上空白的时间
+						accompanyBuff && beatsBgTime.push(item.time + silenceBgDuration) // xml 计算的时候 加上空白的时间
+					})
+				})
+				//合并
+				console.time("音频合并时间")
+				const musicBuffMeg = crunker.mergeAudioBuffers([musicBuff,...beats],[0,...beatsTime])
+				const accompanyBuffMeg = accompanyBuff && crunker.mergeAudioBuffers([accompanyBuff,...beats],[0,...beatsBgTime])
+				console.timeEnd("音频合并时间")
+				return [musicBuffMeg,accompanyBuffMeg]
+			}).then(([musicBuffMeg,accompanyBuffMeg])=>{
+				console.time("音频audioDom生成时间")
+				const musicAudio = crunker.exportAudioElement(musicBuffMeg)
+				const accompanyAudio = accompanyBuffMeg && crunker.exportAudioElement(accompanyBuffMeg)
+				console.timeEnd("音频audioDom生成时间")
+				return [musicAudio,accompanyAudio]
+			})
+		}
+		// 加载普通音源
+		function loadAudio(){
+			return Promise.all([createAudio(state.music), createAudio(state.accompany)])
+		}
+		onMounted(async () => {
 			if (state.playMode !== "MIDI") {
-				Promise.all([createAudio(state.music), createAudio(state.accompany)]).then(
-					([music, accompany]) => {
-						state.audioDone = true;
-						// console.log(music, accompany);
-						if (music) {
-							audioData.songEle = music;
-						}
-						if (accompany) {
-							audioData.backgroundEle = accompany;
-						}
-						if (audioData.songEle) {
-							audioData.songEle.addEventListener("play", onPlay);
-							audioData.songEle.addEventListener("ended", onEnded);
-							accompany && (accompany.muted = true);
-						} else if (audioData.backgroundEle) {
-							audioData.backgroundEle.addEventListener("play", onPlay);
-							audioData.backgroundEle.addEventListener("ended", onEnded);
-						}
+				console.time("加载资源耗时")
+				// 不带节拍的音源
+				const [music, accompany] = await loadAudio()
+				try {
+					// 带节拍的音源
+					const [musicAudio,accompanyAudio] = await loadMergeAudioBetas()
+					console.log("音频合成成功66666666")
+					state.audioBetaDone = true;
+					if (musicAudio) {
+						musicAudio.addEventListener("play", onPlay);
+						musicAudio.addEventListener("ended", onEnded);
+						accompanyAudio && (accompanyAudio.muted = true);
+					} else if (accompanyAudio) {
+						accompanyAudio.addEventListener("play", onPlay);
+						accompanyAudio.addEventListener("ended", onEnded);
 					}
-				);
-
+					Object.assign(audioData.songCollection,{
+						songEle:music,
+						backgroundEle:accompany,
+						beatSongEle:musicAudio,
+						bateBackgroundEle:accompanyAudio
+					})
+				} catch (err) {
+					console.log("音频合成失败7777777:",err)
+				}
+				state.audioDone = true;
+				audioData.backgroundEle = accompany!;
+				audioData.songEle = music!;
+				if (music) {
+					music.addEventListener("play", onPlay);
+					music.addEventListener("ended", onEnded);
+					accompany && (accompany.muted = true);
+				} else if (accompany) {
+					accompany.addEventListener("play", onPlay);
+					accompany.addEventListener("ended", onEnded);
+				}
+				console.timeEnd("加载资源耗时")
 				api_playProgress(progress);
 			} else {
+				state.audioDone = true;
 				const songEndTime = state.times[state.times.length - 1 || 0]?.endtime || 0
 				audioData.duration = songEndTime
 				// 监听midi播放进度
@@ -243,16 +343,19 @@ export default defineComponent({
 
 		// console.log(state.playMode, state.midiUrl);
 		return () => (
-			<div class={styles.audioList}>
-				{state.playMode === "MIDI" && state.speed != 0 && (
-					<iframe
-						style={{ display: "none" }}
-						ref={midiRef}
-						src={`/midi/index.html`}
-						onLoad={handleLoad}
-					/>
-				)}
-			</div>
+			<>
+				<Loading/>
+				<div class={styles.audioList}>
+					{state.playMode === "MIDI" && state.speed != 0 && (
+						<iframe
+							style={{ display: "none" }}
+							ref={midiRef}
+							src={`/midi/index.html`}
+							onLoad={handleLoad}
+						/>
+					)}
+				</div>
+			</>
 		);
 	},
 });

+ 39 - 0
src/view/audio-list/loading.tsx

@@ -0,0 +1,39 @@
+import { defineComponent, ref } from "vue"
+import icon_loading_img from "./img/icon_loading_img.png"
+import { Progress } from "vant"
+import styles from "./index.module.less"
+import state from "/src/state"
+
+export default defineComponent({
+   name: "loading",
+   setup() {
+      function fakeLoadingProgress(duration = 2000, callback: (num: number) => void) {
+         let startTime = Date.now()
+         let progress = 0
+         const timer = setInterval(() => {
+            let timePassed = Date.now() - startTime
+            if (timePassed >= duration) {
+               clearInterval(timer)
+               callback(96) // 进度完成
+               return
+            }
+            progress = Math.min(100, (timePassed / duration) * 100)
+            callback(progress)
+         }, 300)
+      }
+      const loadingProress = ref(0)
+      fakeLoadingProgress(2000, num => {
+         loadingProress.value = num
+      })
+      return () =>
+         !state.audioDone && (
+            <div class={styles.loading}>
+               <div class={styles.loadingWrap}>
+                  <img class={styles.loadingIcon} src={icon_loading_img} />
+                  <Progress percentage={loadingProress.value} />
+                  <div class={styles.loadingTip}>音频资源加载中,请稍后</div>
+               </div>
+            </div>
+         )
+   }
+})

+ 11 - 11
src/view/tick/index.tsx

@@ -5,8 +5,8 @@ import { Popup } from "vant";
 import styles from "./index.module.less";
 import state from "/src/state";
 import { browser } from "/src/utils/index";
-import tickWav from "/src/assets/tick.wav";
-import tockWav from "/src/assets/tock.wav";
+import tickWav from "/src/assets/tick.mp3";
+import tockWav from "/src/assets/tock.mp3";
 
 const browserInfo = browser();
 export const tickData = reactive({
@@ -53,7 +53,7 @@ const audioData = reactive({
 
 const createAudio = (src: string): Promise<HTMLAudioElement | null> => {
 	return new Promise((resolve) => {
-		const a = new Audio(src + '?v=' + Date.now());
+		const a = new Audio(src);
 		a.load();
 		a.onloadedmetadata = () => {
 			resolve(a);
@@ -79,15 +79,15 @@ export const handleStartTick = async () => {
 	tickData.show = true;
 	tickData.tickEnd = false;
 	if (tickData.state !== "ok") {
-		tickData.source1 = new Howl({
-			src: tockAndTick.tick,
-			// 如果是ios手机,需要强制使用audio,不然部分系统版本第一次播放没有声音
-			html5: browserInfo.ios,
-		});
+		// tickData.source1 = new Howl({
+		// 	src: tockAndTick.tick,
+		// 	// 如果是ios手机,需要强制使用audio,不然部分系统版本第一次播放没有声音
+		// 	html5: browserInfo.ios,
+		// });
 
-		tickData.source2 = new Howl({
-			src: tockAndTick.tock,
-		});
+		// tickData.source2 = new Howl({
+		// 	src: tockAndTick.tock,
+		// });
 		tickData.state = "ok";
 	}
 	tickData.index = 0;

+ 2 - 2
vite.config.ts

@@ -78,8 +78,8 @@ export default defineConfig({
         // target: "https://kt.colexiu.com",
         // target: "https://test.resource.colexiu.com", // 内容平台开发环境,内容平台开发,需在url链接上加上isCbs=true
         // target: "https://dev.resource.colexiu.com",
-        // target: "https://test.kt.colexiu.com",
-        target: "https://mec.colexiu.com",
+        target: "https://test.kt.colexiu.com",
+        //target: "https://mec.colexiu.com",
         changeOrigin: true,
         rewrite: (path) => path.replace(/^\/instrument/, ""),
       },