|
|
@@ -0,0 +1,259 @@
|
|
|
+package com.yonge.nettty.dto;
|
|
|
+
|
|
|
+import java.util.Comparator;
|
|
|
+import java.util.List;
|
|
|
+import java.util.concurrent.ConcurrentHashMap;
|
|
|
+import java.util.stream.Collectors;
|
|
|
+
|
|
|
+import org.slf4j.Logger;
|
|
|
+import org.slf4j.LoggerFactory;
|
|
|
+
|
|
|
+import be.tarsos.dsp.AudioEvent;
|
|
|
+import be.tarsos.dsp.pitch.PitchDetectionHandler;
|
|
|
+import be.tarsos.dsp.pitch.PitchDetectionResult;
|
|
|
+import be.tarsos.dsp.pitch.PitchDetector;
|
|
|
+import be.tarsos.dsp.pitch.PitchProcessor;
|
|
|
+
|
|
|
+import com.yonge.audio.utils.ArrayUtil;
|
|
|
+import com.yonge.nettty.entity.MusicXmlBasicInfo;
|
|
|
+import com.yonge.nettty.entity.MusicXmlNote;
|
|
|
+import com.yonge.nettty.entity.NoteAnalysis;
|
|
|
+import com.yonge.netty.server.processor.WaveformWriter;
|
|
|
+
|
|
|
+/**
|
|
|
+ * 用户通道上下文
|
|
|
+ */
|
|
|
+public class UserChannelContext implements PitchDetectionHandler {
|
|
|
+
|
|
|
+ private final static Logger LOGGER = LoggerFactory.getLogger(UserChannelContext.class);
|
|
|
+
|
|
|
+ // 曲目与musicxml对应关系
|
|
|
+ private ConcurrentHashMap<Integer, MusicXmlBasicInfo> songMusicXmlMap = new ConcurrentHashMap<Integer, MusicXmlBasicInfo>();
|
|
|
+
|
|
|
+ private WaveformWriter waveFileProcessor;
|
|
|
+
|
|
|
+ private NoteAnalysis processingNote = new NoteAnalysis(0, 0);
|
|
|
+
|
|
|
+ private byte[] channelBufferBytes = new byte[0];
|
|
|
+
|
|
|
+ private float[] handlerBufferBytes = new float[0];
|
|
|
+
|
|
|
+ private PitchDetector pitchDetector = PitchProcessor.PitchEstimationAlgorithm.FFT_YIN.getDetector(44100, 1024 * 4);
|
|
|
+
|
|
|
+ public ConcurrentHashMap<Integer, MusicXmlBasicInfo> getSongMusicXmlMap() {
|
|
|
+ return songMusicXmlMap;
|
|
|
+ }
|
|
|
+
|
|
|
+ public void setSongMusicXmlMap(ConcurrentHashMap<Integer, MusicXmlBasicInfo> songMusicXmlMap) {
|
|
|
+ this.songMusicXmlMap = songMusicXmlMap;
|
|
|
+ }
|
|
|
+
|
|
|
+ public WaveformWriter getWaveFileProcessor() {
|
|
|
+ return waveFileProcessor;
|
|
|
+ }
|
|
|
+
|
|
|
+ public void setWaveFileProcessor(WaveformWriter waveFileProcessor) {
|
|
|
+ this.waveFileProcessor = waveFileProcessor;
|
|
|
+ }
|
|
|
+
|
|
|
+ public NoteAnalysis getProcessingNote() {
|
|
|
+ return processingNote;
|
|
|
+ }
|
|
|
+
|
|
|
+ public void setProcessingNote(NoteAnalysis processingNote) {
|
|
|
+ this.processingNote = processingNote;
|
|
|
+ }
|
|
|
+
|
|
|
+ public void resetUserInfo() {
|
|
|
+
|
|
|
+ waveFileProcessor = null;
|
|
|
+ processingNote = new NoteAnalysis(0,0);
|
|
|
+ channelBufferBytes = new byte[0];
|
|
|
+ handlerBufferBytes = new float[0];
|
|
|
+ }
|
|
|
+
|
|
|
+ public MusicXmlNote getCurrentMusicNote(Integer songId) {
|
|
|
+ if (songMusicXmlMap.size() == 0) {
|
|
|
+ return null;
|
|
|
+ }
|
|
|
+ MusicXmlBasicInfo musicXmlBasicInfo = null;
|
|
|
+ if (songId == null) {
|
|
|
+ musicXmlBasicInfo = songMusicXmlMap.values().stream().findFirst().get();
|
|
|
+ } else {
|
|
|
+ musicXmlBasicInfo = songMusicXmlMap.get(songId);
|
|
|
+ }
|
|
|
+
|
|
|
+ if (musicXmlBasicInfo != null && processingNote.getIndex() <= getTotalMusicNoteIndexNum(null)) {
|
|
|
+ return musicXmlBasicInfo.getMusicXmlInfos().stream().filter(t -> t.getMusicalNotesIndex() == processingNote.getIndex()).findFirst().get();
|
|
|
+ }
|
|
|
+
|
|
|
+ return null;
|
|
|
+ }
|
|
|
+
|
|
|
+ public int getTotalMusicNoteIndexNum(Integer songId) {
|
|
|
+ if (songMusicXmlMap.size() == 0) {
|
|
|
+ return -1;
|
|
|
+ }
|
|
|
+ MusicXmlBasicInfo musicXmlBasicInfo = null;
|
|
|
+ if (songId == null) {
|
|
|
+ musicXmlBasicInfo = songMusicXmlMap.values().stream().findFirst().get();
|
|
|
+ } else {
|
|
|
+ musicXmlBasicInfo = songMusicXmlMap.get(songId);
|
|
|
+ }
|
|
|
+
|
|
|
+ if (musicXmlBasicInfo != null) {
|
|
|
+ return musicXmlBasicInfo.getMusicXmlInfos().stream().map(t -> t.getMusicalNotesIndex()).distinct().max(Integer::compareTo).get();
|
|
|
+ }
|
|
|
+
|
|
|
+ return -1;
|
|
|
+ }
|
|
|
+
|
|
|
+ public List<MusicXmlNote> getCurrentMusicSection(Integer songId) {
|
|
|
+ if (songMusicXmlMap.size() == 0) {
|
|
|
+ return null;
|
|
|
+ }
|
|
|
+ MusicXmlBasicInfo musicXmlBasicInfo = null;
|
|
|
+ if (songId == null) {
|
|
|
+ musicXmlBasicInfo = songMusicXmlMap.values().stream().findFirst().get();
|
|
|
+ } else {
|
|
|
+ musicXmlBasicInfo = songMusicXmlMap.get(songId);
|
|
|
+ }
|
|
|
+
|
|
|
+ if (musicXmlBasicInfo != null) {
|
|
|
+ return musicXmlBasicInfo.getMusicXmlInfos().stream().filter(t -> t.getMusicalNotesIndex() == processingNote.getSectionIndex())
|
|
|
+ .sorted(Comparator.comparing(MusicXmlNote::getMusicalNotesIndex)).collect(Collectors.toList());
|
|
|
+ }
|
|
|
+
|
|
|
+ return null;
|
|
|
+ }
|
|
|
+
|
|
|
+ public int getTotalMusicSectionIndexNum(Integer songId) {
|
|
|
+ if (songMusicXmlMap.size() == 0) {
|
|
|
+ return -1;
|
|
|
+ }
|
|
|
+ MusicXmlBasicInfo musicXmlBasicInfo = null;
|
|
|
+ if (songId == null) {
|
|
|
+ musicXmlBasicInfo = songMusicXmlMap.values().stream().findFirst().get();
|
|
|
+ } else {
|
|
|
+ musicXmlBasicInfo = songMusicXmlMap.get(songId);
|
|
|
+ }
|
|
|
+
|
|
|
+ if (musicXmlBasicInfo != null) {
|
|
|
+ return musicXmlBasicInfo.getMusicXmlInfos().stream().map(t -> t.getMeasureIndex()).distinct().max(Integer::compareTo).get();
|
|
|
+ }
|
|
|
+
|
|
|
+ return -1;
|
|
|
+ }
|
|
|
+
|
|
|
+ public byte[] getChannelBufferBytes() {
|
|
|
+ return channelBufferBytes;
|
|
|
+ }
|
|
|
+
|
|
|
+ public void setChannelBufferBytes(byte[] channelBufferBytes) {
|
|
|
+ this.channelBufferBytes = channelBufferBytes;
|
|
|
+ }
|
|
|
+
|
|
|
+ public float[] getHandlerBufferBytes() {
|
|
|
+ return handlerBufferBytes;
|
|
|
+ }
|
|
|
+
|
|
|
+ public void setHandlerBufferBytes(float[] handlerBufferBytes) {
|
|
|
+ this.handlerBufferBytes = handlerBufferBytes;
|
|
|
+ }
|
|
|
+
|
|
|
+ @Override
|
|
|
+ public void handlePitch(PitchDetectionResult pitchDetectionResult, AudioEvent audioEvent) {
|
|
|
+
|
|
|
+ double durationTime = 1000 * (audioEvent.getFloatBuffer().length) / audioEvent.getSampleRate() / 2;
|
|
|
+
|
|
|
+ float pitch = pitchDetectionResult.getPitch();
|
|
|
+
|
|
|
+ //LOGGER.info("pitch:{} timeStamp:{} endTimeStamp:{} durationTime:{}", pitch, audioEvent.getTimeStamp(), audioEvent.getEndTimeStamp(), durationTime);
|
|
|
+
|
|
|
+ // 获取当前音符信息
|
|
|
+ MusicXmlNote musicXmlNote = getCurrentMusicNote(null);
|
|
|
+
|
|
|
+ if (musicXmlNote == null) {
|
|
|
+ return;
|
|
|
+ }
|
|
|
+
|
|
|
+ //取出当前处理中的音符信息
|
|
|
+ NoteAnalysis noteAnalysis = getProcessingNote();
|
|
|
+ if(noteAnalysis == null){
|
|
|
+ noteAnalysis = new NoteAnalysis(musicXmlNote.getMusicalNotesIndex(),musicXmlNote.getMeasureIndex());
|
|
|
+ }
|
|
|
+
|
|
|
+ double noteDurationTime = noteAnalysis.getDurationTime() + durationTime;
|
|
|
+ noteAnalysis.setDurationTime(noteDurationTime);
|
|
|
+
|
|
|
+ if(pitch != -1){
|
|
|
+ noteAnalysis.setChunks(noteAnalysis.getChunks() + 1);
|
|
|
+ noteAnalysis.setTotalPitch(noteAnalysis.getTotalPitch() + pitch);
|
|
|
+ }
|
|
|
+
|
|
|
+ setProcessingNote(noteAnalysis);
|
|
|
+
|
|
|
+ if(noteAnalysis.getIndex() <= getTotalMusicNoteIndexNum(null) && noteDurationTime >= musicXmlNote.getDuration()){
|
|
|
+
|
|
|
+ noteAnalysis.setAvgPitch(noteAnalysis.getTotalPitch()/noteAnalysis.getChunks());
|
|
|
+
|
|
|
+ LOGGER.info("当前音符下标[{}] 预计频率:{} 实际频率:{} 持续时间:{}", noteAnalysis.getIndex() , musicXmlNote.getFrequency(), noteAnalysis.getAvgPitch(), noteAnalysis.getDurationTime());
|
|
|
+
|
|
|
+ // 准备处理下一个音符
|
|
|
+ setProcessingNote(noteAnalysis = new NoteAnalysis(musicXmlNote.getMusicalNotesIndex() + 1,musicXmlNote.getMeasureIndex()));
|
|
|
+ }
|
|
|
+
|
|
|
+
|
|
|
+ /*// 获取字节流
|
|
|
+ float[] bufferBytes = audioEvent.getFloatBuffer();
|
|
|
+
|
|
|
+ // 粘合音符数据
|
|
|
+ float[] totalNoteBytes = ArrayUtil.mergeFloat(getHandlerBufferBytes(), bufferBytes);
|
|
|
+ setHandlerBufferBytes(totalNoteBytes);
|
|
|
+
|
|
|
+
|
|
|
+ // 计算当前音符的数据长度 公式:数据量(字节/秒)= 采样频率(Hz)× (采样位数(bit)/ 8) × 声道数
|
|
|
+ int length = (int) (44100 * (16 / 8) * 1 * musicXmlNote.getDuration() / 1000);
|
|
|
+
|
|
|
+ if (noteAnalysis.getIndex() <= getTotalMusicNoteIndexNum(null) && totalNoteBytes.length >= length) {
|
|
|
+ // 处理当前音符
|
|
|
+ float[] noteFloatData = new float[length];
|
|
|
+ System.arraycopy(totalNoteBytes, 0, noteFloatData, 0, length);
|
|
|
+ // 剩余未处理的数据
|
|
|
+ setHandlerBufferBytes(ArrayUtil.extractFloat(totalNoteBytes, length - 1, totalNoteBytes.length - 1));
|
|
|
+
|
|
|
+ // 获取频率数据
|
|
|
+ float npitch = getPitch(noteFloatData, audioEvent.getBufferSize());
|
|
|
+
|
|
|
+ LOGGER.info("第{}个音符的样本频率:{} 实际频率:{}", noteAnalysis.getIndex(), musicXmlNote.getFrequency(), npitch);
|
|
|
+
|
|
|
+ // 准备处理下一个音符
|
|
|
+ setProcessingNote(noteAnalysis = new NoteAnalysis(musicXmlNote.getMusicalNotesIndex() + 1,musicXmlNote.getMeasureIndex()));
|
|
|
+ }*/
|
|
|
+ }
|
|
|
+
|
|
|
+ private float getPitch(float[] audioBuffer, int bufferSize) {
|
|
|
+
|
|
|
+ int blankNum = audioBuffer.length % bufferSize;
|
|
|
+ float[] zeroBytes = new float[blankNum];
|
|
|
+
|
|
|
+ audioBuffer = ArrayUtil.mergeFloat(audioBuffer, zeroBytes);
|
|
|
+
|
|
|
+ int times = audioBuffer.length / bufferSize;
|
|
|
+
|
|
|
+ float totalPitch = 0f;
|
|
|
+
|
|
|
+ float[] bufferByte = new float[bufferSize];
|
|
|
+ for (int i = 0; i < times; i++) {
|
|
|
+ bufferByte = ArrayUtil.extractFloat(audioBuffer, i * bufferSize, (i + 1) * bufferSize);
|
|
|
+ float pitch = pitchDetector.getPitch(bufferByte).getPitch();
|
|
|
+ if(pitch == -1){
|
|
|
+ continue;
|
|
|
+ }
|
|
|
+ totalPitch += pitch;
|
|
|
+ }
|
|
|
+
|
|
|
+ return totalPitch / times;
|
|
|
+ }
|
|
|
+
|
|
|
+}
|