Bläddra i källkod

Merge remote-tracking branch 'origin/hqyNewVersion' into feature/0429-music

# Conflicts:
#	src/views/music-library/music-sheet/component/music-list.tsx
yuanliang 1 år sedan
förälder
incheckning
763729d31e

+ 0 - 1
components.d.ts

@@ -37,7 +37,6 @@ declare module '@vue/runtime-core' {
     NRadio: typeof import('naive-ui')['NRadio']
     NRadioGroup: typeof import('naive-ui')['NRadioGroup']
     NSpace: typeof import('naive-ui')['NSpace']
-    NSpin: typeof import('naive-ui')['NSpin']
     NTooltip: typeof import('naive-ui')['NTooltip']
     Recharge: typeof import('./src/components/Lockscreen/Recharge.vue')['default']
     RouterError: typeof import('./src/components/RouterError/RouterError.vue')['default']

+ 11 - 0
src/views/music-library/api.ts

@@ -91,6 +91,17 @@ export const musicSheetImg = (params: object) => {
 }
 
 /**
+ * @description: 乐谱生成带节拍器的音频
+ */
+export const musicSheetAddMix = (params: object) => {
+  return request({
+    url: '/cbs-app/musicSheet/addMix',
+    method: 'post',
+    data: params
+  } as any)
+}
+
+/**
  * @description: 乐谱删除
  */
 export const musicSheetRemove = (params: object) => {

+ 16 - 0
src/views/music-library/music-sheet/component/music-list.tsx

@@ -43,6 +43,7 @@ import MusicCreateImg from '../modal/music-create-img'
 import TheTooltip from '@components/TheTooltip'
 import { HelpCircleOutline } from '@vicons/ionicons5'
 import {musicalInstrumentPage} from "@views/system-manage/subject-manage/api";
+import MusiceBeatTime from "../modal/musiceBeatTime"
 
 export default defineComponent({
   name: 'music-list',
@@ -91,6 +92,7 @@ export default defineComponent({
       userIdDisable: true, // 所属人
       userIdData: [] as any, // 所属人数据列表
       productOpen: false,
+      beatTimeOpen: false,
       productItem: {} as any
     })
 
@@ -320,6 +322,17 @@ export default defineComponent({
                   type="primary"
                   size="small"
                   text
+                  onClick={() => {
+                    state.productItem = row
+                    state.beatTimeOpen = true
+                  }}
+                >
+                  生成节拍器音频
+                </NButton>
+                <NButton
+                  type="primary"
+                  size="small"
+                  text
                   disabled={!!row.status}
                   onClick={() => onRmove(row)}
                   v-auth="musicSheet/remove1753457445635645442"
@@ -894,6 +907,9 @@ export default defineComponent({
             }}
           />
         </NModal>
+        {
+          state.beatTimeOpen && <MusiceBeatTime id={ state.productItem.id } onClose={()=>{state.beatTimeOpen = false}}></MusiceBeatTime>
+        }
       </div>
     )
   }

+ 204 - 0
src/views/music-library/music-sheet/crunker/crunker.ts

@@ -0,0 +1,204 @@
+interface CrunkerConstructorOptions {
+   sampleRate: number
+   concurrentNetworkRequests: number
+}
+
+type CrunkerInputTypes = string | File | Blob
+
+export default class Crunker {
+   private readonly _sampleRate: number
+   private readonly _concurrentNetworkRequests: number
+   private readonly _context: AudioContext
+
+   constructor({ sampleRate, concurrentNetworkRequests = 200 }: Partial<CrunkerConstructorOptions> = {}) {
+      this._context = this._createContext(sampleRate)
+      sampleRate ||= this._context.sampleRate
+      this._sampleRate = sampleRate
+      this._concurrentNetworkRequests = concurrentNetworkRequests
+   }
+   private _createContext(sampleRate = 44_100): AudioContext {
+      window.AudioContext = window.AudioContext || (window as any).webkitAudioContext || (window as any).mozAudioContext
+      return new AudioContext({ sampleRate })
+   }
+   /**
+    *转换url等类型为buffer
+    */
+   async fetchAudio(...filepaths: CrunkerInputTypes[]): Promise<AudioBuffer[]> {
+      const buffers: AudioBuffer[] = []
+      const groups = Math.ceil(filepaths.length / this._concurrentNetworkRequests)
+      for (let i = 0; i < groups; i++) {
+         const group = filepaths.slice(i * this._concurrentNetworkRequests, (i + 1) * this._concurrentNetworkRequests)
+         buffers.push(...(await this._fetchAudio(...group)))
+      }
+      return buffers
+   }
+   private async _fetchAudio(...filepaths: CrunkerInputTypes[]): Promise<AudioBuffer[]> {
+      return await Promise.all(
+         filepaths.map(async filepath => {
+            let buffer: ArrayBuffer
+            if (filepath instanceof File || filepath instanceof Blob) {
+               buffer = await filepath.arrayBuffer()
+            } else {
+               buffer = await fetch(filepath).then(response => {
+                  if (response.headers.has("Content-Type") && !response.headers.get("Content-Type")!.includes("audio/")) {
+                     console.warn(
+                        `Crunker: Attempted to fetch an audio file, but its MIME type is \`${
+                           response.headers.get("Content-Type")!.split(";")[0]
+                        }\`. We'll try and continue anyway. (file: "${filepath}")`
+                     )
+                  }
+                  return response.arrayBuffer()
+               })
+            }
+            /* 这里有个坑 safa浏览器老一点的版本不支持decodeAudioData返回promise 所以用这种老式写法 */
+            return await new Promise((res, rej) => {
+               this._context.decodeAudioData(
+                  buffer,
+                  buffer => {
+                     res(buffer)
+                  },
+                  err => {
+                     rej(err)
+                  }
+               )
+            })
+         })
+      )
+   }
+   /**
+    * 根据时间合并音频
+    */
+   mergeAudioBuffers(buffers: AudioBuffer[], times: number[]): AudioBuffer {
+      if (buffers.length !== times.length) {
+         throw new Error("buffer数量和times数量必须一致")
+      }
+      const output = this._context.createBuffer(this._maxNumberOfChannels(buffers), this._sampleRate * this._maxDuration(buffers), this._sampleRate)
+      buffers.forEach((buffer, index) => {
+         for (let channelNumber = 0; channelNumber < buffer.numberOfChannels; channelNumber++) {
+            const outputData = output.getChannelData(channelNumber)
+            const bufferData = buffer.getChannelData(channelNumber)
+            const offsetNum = Math.round(times[index] * this._sampleRate) //时间偏差
+            for (let i = buffer.getChannelData(channelNumber).length - 1; i >= 0; i--) {
+               outputData[i + offsetNum] += bufferData[i]
+               // 当合并大于1或者小于-1的时候可能会爆音  所以这里取最大值和最小值
+               if (outputData[i + offsetNum] > 1) {
+                  outputData[i + offsetNum] = 1
+               }
+               if (outputData[i + offsetNum] < -1) {
+                  outputData[i + offsetNum] = -1
+               }
+            }
+            output.getChannelData(channelNumber).set(outputData)
+         }
+      })
+
+      return output
+   }
+   /**
+    * 根据buffer导出audio标签
+    */
+   exportAudioElement(buffer: AudioBuffer, type = "audio/mp3"): HTMLAudioElement {
+      const recorded = this._interleave(buffer)
+      const dataview = this._writeHeaders(recorded, buffer.numberOfChannels, buffer.sampleRate)
+      const audioBlob = new Blob([dataview], { type })
+      return this._renderAudioElement(audioBlob)
+   }
+   /**
+    * 计算音频前面的空白
+    */
+   calculateSilenceDuration(buffer: AudioBuffer) {
+      const threshold = 0.01 // 静音阈值,低于此值的部分认为是静音
+      const sampleRate = buffer.sampleRate
+      const channelData = buffer.getChannelData(0) // 只处理单声道数据
+      let silenceDuration = 0
+      for (let i = 0; i < channelData.length; i++) {
+         if (Math.abs(channelData[i]) > threshold) {
+            break
+         }
+         silenceDuration++
+      }
+      // 将样本数转换为秒
+      silenceDuration = silenceDuration / sampleRate
+      return silenceDuration
+   }
+   /**
+    * buffer 转为 blob
+    */
+   audioBuffToBlob(buffer: AudioBuffer, type = "audio/mp3") {
+      const recorded = this._interleave(buffer)
+      const dataview = this._writeHeaders(recorded, buffer.numberOfChannels, buffer.sampleRate)
+      return new Blob([dataview], { type })
+   }
+   private _maxNumberOfChannels(buffers: AudioBuffer[]): number {
+      return Math.max(...buffers.map(buffer => buffer.numberOfChannels))
+   }
+   private _maxDuration(buffers: AudioBuffer[]): number {
+      return Math.max(...buffers.map(buffer => buffer.duration))
+   }
+   private _interleave(input: AudioBuffer): Float32Array {
+      if (input.numberOfChannels === 1) {
+         return input.getChannelData(0)
+      }
+      const channels = []
+      for (let i = 0; i < input.numberOfChannels; i++) {
+         channels.push(input.getChannelData(i))
+      }
+      const length = channels.reduce((prev, channelData) => prev + channelData.length, 0)
+      const result = new Float32Array(length)
+      let index = 0
+      let inputIndex = 0
+      while (index < length) {
+         channels.forEach(channelData => {
+            result[index++] = channelData[inputIndex]
+         })
+         inputIndex++
+      }
+      return result
+   }
+   private _renderAudioElement(blob: Blob): HTMLAudioElement {
+      const audio = document.createElement("audio")
+      audio.src = this._renderURL(blob)
+      audio.load()
+      return audio
+   }
+   private _renderURL(blob: Blob): string {
+      return (window.URL || window.webkitURL).createObjectURL(blob)
+   }
+   private _writeHeaders(buffer: Float32Array, numOfChannels: number, sampleRate: number): DataView {
+      const bitDepth = 16
+      const bytesPerSample = bitDepth / 8
+      const sampleSize = numOfChannels * bytesPerSample
+      const fileHeaderSize = 8
+      const chunkHeaderSize = 36
+      const chunkDataSize = buffer.length * bytesPerSample
+      const chunkTotalSize = chunkHeaderSize + chunkDataSize
+      const arrayBuffer = new ArrayBuffer(fileHeaderSize + chunkTotalSize)
+      const view = new DataView(arrayBuffer)
+      this._writeString(view, 0, "RIFF")
+      view.setUint32(4, chunkTotalSize, true)
+      this._writeString(view, 8, "WAVE")
+      this._writeString(view, 12, "fmt ")
+      view.setUint32(16, 16, true)
+      view.setUint16(20, 1, true)
+      view.setUint16(22, numOfChannels, true)
+      view.setUint32(24, sampleRate, true)
+      view.setUint32(28, sampleRate * sampleSize, true)
+      view.setUint16(32, sampleSize, true)
+      view.setUint16(34, bitDepth, true)
+      this._writeString(view, 36, "data")
+      view.setUint32(40, chunkDataSize, true)
+      return this._floatTo16BitPCM(view, buffer, fileHeaderSize + chunkHeaderSize)
+   }
+   private _floatTo16BitPCM(dataview: DataView, buffer: Float32Array, offset: number): DataView {
+      for (let i = 0; i < buffer.length; i++, offset += 2) {
+         const tmp = Math.max(-1, Math.min(1, buffer[i]))
+         dataview.setInt16(offset, tmp < 0 ? tmp * 0x8000 : tmp * 0x7fff, true)
+      }
+      return dataview
+   }
+   private _writeString(dataview: DataView, offset: number, header: string): void {
+      for (let i = 0; i < header.length; i++) {
+         dataview.setUint8(offset + i, header.charCodeAt(i))
+      }
+   }
+}

+ 139 - 0
src/views/music-library/music-sheet/crunker/index.ts

@@ -0,0 +1,139 @@
+/**
+ * 音频合成节拍器
+ */
+import Crunker from './crunker'
+import tickMp3 from './tick.mp3'
+import tockMp3 from './tock.mp3'
+import { getUploadSign, onOnlyFileUpload } from '@/utils/oss-file-upload'
+import { ref } from 'vue'
+
+const crunker = new Crunker()
+
+type musicSheetType = {
+  audioFileUrl: string
+  audioBeatMixUrl: null | string
+  solmizationFileUrl: null | string
+  solmizationBeatUrl: null | string
+}
+
+type taskAudioType = {
+  obj: musicSheetType
+  type: 'audioFileUrl' | 'solmizationFileUrl'
+  audioBuff?: AudioBuffer
+}[]
+
+// 节拍器数据
+export const beatState = {
+  times: [] as number[][],
+  totalIndex: ref(0), // 总共需要处理的音频个数
+  currentIndex: ref(0) // 当前处理了多少条数据
+}
+
+// 节拍器音源
+let tickMp3Buff: null | AudioBuffer = null
+let tockMp3Buff: null | AudioBuffer = null
+
+export default async function audioMergeBeats({
+  musicSheetAccompanimentList,
+  musicSheetSoundList
+}: {
+  musicSheetAccompanimentList: musicSheetType[]
+  musicSheetSoundList: musicSheetType[]
+}) {
+  if (!beatState.times.length) return
+  try {
+    if (musicSheetSoundList.length + musicSheetAccompanimentList.length > 0) {
+      // 扁平化数据 生成任务队列
+      const taskAudio: taskAudioType = []
+      ;[...musicSheetSoundList, ...musicSheetAccompanimentList].map((item) => {
+        taskAudio.push({
+          obj: item,
+          type: 'audioFileUrl'
+        })
+        item.solmizationFileUrl && // 有唱名加上唱名
+          taskAudio.push({
+            obj: item,
+            type: 'solmizationFileUrl'
+          })
+      })
+      beatState.totalIndex.value = taskAudio.length
+      /* 加载节拍器 */
+      if (!tickMp3Buff || !tockMp3Buff) {
+        const [tickMp3Bf, tockMp3Bf] = await crunker.fetchAudio(tickMp3, tockMp3)
+        tickMp3Buff = tickMp3Bf
+        tockMp3Buff = tockMp3Bf
+      }
+      /* 加上所有的音频文件 */
+      await Promise.all(
+        taskAudio.map(async (item) => {
+          const [audioBuff] = await crunker.fetchAudio(item.obj[item.type]!)
+          item.audioBuff = audioBuff
+        })
+      )
+      /* 异步上传 */
+      await new Promise((res) => {
+        /* 合成音源 */
+        taskAudio.map(async (item) => {
+          const audioBlob = mergeBeats(item.audioBuff!)
+          const url = await uploadFile(audioBlob)
+          item.obj[item.type == 'audioFileUrl' ? 'audioBeatMixUrl' : 'solmizationBeatUrl'] = url
+          beatState.currentIndex.value++
+          if (beatState.currentIndex.value >= beatState.totalIndex.value) {
+            res(null)
+          }
+        })
+      })
+    }
+  } catch (err) {
+    console.log('处理音频合成上传失败', err)
+  }
+  // 清空数据
+  beatState.currentIndex.value = 0
+  beatState.totalIndex.value = 0
+  beatState.times = []
+}
+
+// 根据buffer合成音源返回blob
+function mergeBeats(audioBuff: AudioBuffer) {
+  // 计算音频空白时间
+  const silenceDuration = crunker.calculateSilenceDuration(audioBuff)
+  const beats: AudioBuffer[] = []
+  const currentTimes: number[] = []
+  beatState.times.map((items) => {
+    items.map((time, index) => {
+      beats.push(index === 0 ? tickMp3Buff! : tockMp3Buff!)
+      currentTimes.push(time + silenceDuration)
+    })
+  })
+  //合并
+  const mergeAudioBuff = crunker.mergeAudioBuffers([audioBuff, ...beats], [0, ...currentTimes])
+  //转为 blob
+  return crunker.audioBuffToBlob(mergeAudioBuff)
+}
+
+/**
+ * 上传文件
+ */
+async function uploadFile(audioBlob: Blob) {
+  const filename = `${new Date().getTime()}.mp3`
+  const { data } = await getUploadSign({
+    filename,
+    bucketName: 'cloud-coach',
+    postData: {
+      filename,
+      acl: 'public-read',
+      key: filename,
+      unknowValueField: []
+    }
+  })
+  const url = await onOnlyFileUpload('', {
+    KSSAccessKeyId: data.KSSAccessKeyId,
+    acl: 'public-read',
+    file: audioBlob,
+    key: filename,
+    name: filename,
+    policy: data.policy,
+    signature: data.signature
+  })
+  return url
+}

BIN
src/views/music-library/music-sheet/crunker/tick.mp3


BIN
src/views/music-library/music-sheet/crunker/tock.mp3


+ 20 - 0
src/views/music-library/music-sheet/modal/musiceBeatTime/index.module.less

@@ -0,0 +1,20 @@
+.musiceBeatTime {
+  position: fixed;
+  top: 0;
+  left: 0;
+  width: 100vw;
+  height: 100vh;
+  z-index: 10000;
+  background-color: rgba(255, 255, 255, 0.9);
+  .tit{
+    position: absolute;
+    left: 50%;
+    top: 50%;
+    transform: translate(-50%, -50%);
+    font-size: 20px;
+    line-height: 20px;
+  }
+  .iframe{
+    opacity: 0;
+  }
+}

+ 70 - 0
src/views/music-library/music-sheet/modal/musiceBeatTime/index.tsx

@@ -0,0 +1,70 @@
+import { defineComponent, onMounted, onUnmounted, ref } from 'vue'
+import { useUserStore } from '@/store/modules/user'
+import styles from "./index.module.less"
+import { musicSheetAddMix } from '../../../api'
+import { useMessage } from 'naive-ui'
+
+export default defineComponent({
+  name: 'musiceBeatTime',
+  props: {
+    id: {
+      type: String
+    }
+  },
+  emits: ['close'],
+  setup(props, {emit}) {
+    const message = useMessage()
+    onMounted(() => {
+      window.addEventListener('message', handleBeatRes)
+    })
+
+    onUnmounted(() => {
+      window.removeEventListener('message', handleBeatRes)
+    })
+    function handleBeatRes(res: MessageEvent) {
+      const data = res.data
+      if (data?.api === 'webApi_beatTimes') {
+        iframeShow.value = false
+        handleBeatTimes(data)
+      }
+    }
+    async function handleBeatTimes(data:any){
+      try {
+        const { beatTime, singBeatTime, mingBeatTime} = JSON.parse(data.data)
+        await musicSheetAddMix({
+          id: props.id,
+          playTimeList: beatTime,
+          singTimeList: singBeatTime,
+          solmizationTimeList: mingBeatTime
+        })
+        message.success('生成成功')
+        emit("close")
+      }catch (err){
+        console.log('🚀 ~ 音频合成失败', err)
+        message.error('生成失败')
+        emit("close")
+      }
+    }
+    const iframeShow = ref(true)
+    const userStore = useUserStore()
+    const token = userStore.getToken
+    const apiUrls = {
+      'dev': 'https://dev.kt.colexiu.com',
+      'test': 'https://test.lexiaoya.cn',
+      'online': 'https://mec.colexiu.com'
+    }
+    const environment = location.origin.includes('//dev') ? 'dev' : location.origin.includes('//test') ? 'test' : location.origin.includes('//mec.colexiu') ? 'online' : 'dev'
+    const apiUrl = apiUrls[environment]
+    const prefix = /(localhost|192)/.test(location.host) ? 'https://dev.kt.colexiu.com/' : apiUrl
+    let src = prefix + `/instrument/?_t=${Date.now()}&id=${props.id}&Authorization=${token}&isCbs=true&isbeatTimes=true&musicRenderType=staff`
+    //let src = "http://192.168.3.122:3000/instrument.html" + `?_t=${Date.now()}&id=${props.id}&Authorization=${token}&isCbs=true&isbeatTimes=true&musicRenderType=staff`
+    return () => (
+      <div class={styles.musiceBeatTime}>
+        <div class={styles.tit}>节拍器音频生成中...</div>
+        {
+          iframeShow.value && <iframe class={styles.iframe} width={'667px'} height={'375px'} frameborder="0" src={src}></iframe>
+        }
+      </div>
+    )
+  }
+})

+ 1 - 1
vite.config.ts

@@ -19,7 +19,7 @@ function pathResolve(dir: string) {
 }
 
 // const proxyUrl = 'https://dev.lexiaoya.cn'
-// const proxyUrl = 'http://127.0.0.1:7293/'
+//const proxyUrl = 'http://127.0.0.1:7293/'
 // const proxyUrl = 'https://resource.colexiu.com/'
 const proxyUrl = 'https://dev.resource.colexiu.com'
 // https://test.resource.colexiu.com/