瀏覽代碼

Merge remote-tracking branch 'origin/master'

# Conflicts:
#	mec-im/src/main/java/com/ym/service/Impl/RoomServiceImpl.java
zouxuan 3 年之前
父節點
當前提交
f5d0a514d9
共有 32 個文件被更改,包括 2623 次插入592 次删除
  1. 0 67
      audio-analysis/src/main/java/com/yonge/nettty/dto/HardLevelEnum.java
  2. 1 1
      audio-analysis/src/main/java/com/yonge/netty/dto/ChunkAnalysis.java
  3. 133 0
      audio-analysis/src/main/java/com/yonge/netty/dto/HardLevelEnum.java
  4. 3 13
      audio-analysis/src/main/java/com/yonge/netty/dto/NoteAnalysis.java
  5. 73 0
      audio-analysis/src/main/java/com/yonge/netty/dto/NoteFrequencyRange.java
  6. 28 0
      audio-analysis/src/main/java/com/yonge/netty/dto/NotePlayResult.java
  7. 1 1
      audio-analysis/src/main/java/com/yonge/netty/dto/SectionAnalysis.java
  8. 927 0
      audio-analysis/src/main/java/com/yonge/netty/dto/UserChannelContext.java
  9. 58 40
      audio-analysis/src/main/java/com/yonge/netty/dto/UserChannelContext2.java
  10. 841 0
      audio-analysis/src/main/java/com/yonge/netty/dto/UserChannelContext3.java
  11. 1 1
      audio-analysis/src/main/java/com/yonge/netty/dto/WebSocketResponse.java
  12. 1 1
      audio-analysis/src/main/java/com/yonge/netty/entity/MusicXmlBasicInfo.java
  13. 23 1
      audio-analysis/src/main/java/com/yonge/netty/entity/MusicXmlNote.java
  14. 1 1
      audio-analysis/src/main/java/com/yonge/netty/entity/MusicXmlSection.java
  15. 2 2
      audio-analysis/src/main/java/com/yonge/netty/server/NettyServer.java
  16. 0 10
      audio-analysis/src/main/java/com/yonge/netty/server/handler/message/BinaryMessageHandler.java
  17. 7 121
      audio-analysis/src/main/java/com/yonge/netty/server/handler/message/BinaryWebSocketFrameHandler.java
  18. 12 0
      audio-analysis/src/main/java/com/yonge/netty/server/handler/message/MessageHandler.java
  19. 0 5
      audio-analysis/src/main/java/com/yonge/netty/server/handler/message/TextMessageHandler.java
  20. 29 182
      audio-analysis/src/main/java/com/yonge/netty/server/handler/message/TextWebSocketHandler.java
  21. 349 0
      audio-analysis/src/main/java/com/yonge/netty/server/service/AudioCompareHandler.java
  22. 0 135
      audio-analysis/src/main/java/com/yonge/netty/server/service/CompareHandler.java
  23. 10 4
      audio-analysis/src/main/java/com/yonge/netty/server/service/PitchDetectionHandler.java
  24. 1 1
      audio-analysis/src/main/java/com/yonge/netty/server/service/UserChannelContextService.java
  25. 1 1
      audio-analysis/src/main/resources/bootstrap-test.properties
  26. 54 0
      mec-biz/src/main/java/com/ym/mec/biz/dal/entity/SysMusicScore.java
  27. 31 0
      mec-biz/src/main/java/com/ym/mec/biz/dal/entity/SysMusicScoreAccompaniment.java
  28. 7 2
      mec-biz/src/main/resources/config/mybatis/SysMusicScoreAccompanimentMapper.xml
  29. 14 2
      mec-biz/src/main/resources/config/mybatis/SysMusicScoreMapper.xml
  30. 3 0
      mec-common/common-core/src/main/java/com/ym/mec/common/dal/CustomEnumTypeHandler.java
  31. 0 1
      mec-im/pom.xml
  32. 12 0
      pom.xml

+ 0 - 67
audio-analysis/src/main/java/com/yonge/nettty/dto/HardLevelEnum.java

@@ -1,67 +0,0 @@
-package com.yonge.nettty.dto;
-
-import com.ym.mec.common.enums.BaseEnum;
-
-public enum HardLevelEnum implements BaseEnum<String, HardLevelEnum> {
-	BEGINNER("入门级", 5, 5, 50, 60, 10), ADVANCED("进阶级", 5, 5, 50, 50, 10), PERFORMER("大师级", 5, 5, 50, 60, 10);
-
-	private String msg;
-
-	private int amplitudeThreshold;
-
-	private int frequencyOffset;
-
-	private int tempoOffsetOfPercent;
-
-	private int integrityRange;
-
-	private int notPlayRange;
-
-	/**
-	 * 
-	 * @param msg
-	 * @param amplitudeThreshold 振幅阈值
-	 * @param frequencyOffset 频率法制
-	 * @param tempoOffsetOfPercent 节奏偏移量百分比(在当前范围内节奏才算正确)
-	 * @param integrityRange 完成度范围
-	 * @param notPlayRange 未演奏的范围
-	 */
-	HardLevelEnum(String msg, int amplitudeThreshold, int frequencyOffset, int tempoOffsetOfPercent, int integrityRange, int notPlayRange) {
-		this.msg = msg;
-		this.amplitudeThreshold = amplitudeThreshold;
-		this.frequencyOffset = frequencyOffset;
-		this.tempoOffsetOfPercent = tempoOffsetOfPercent;
-		this.integrityRange = integrityRange;
-		this.notPlayRange = notPlayRange;
-	}
-
-	public String getMsg() {
-		return msg;
-	}
-
-	public int getAmplitudeThreshold() {
-		return amplitudeThreshold;
-	}
-
-	public int getFrequencyOffset() {
-		return frequencyOffset;
-	}
-
-	public int getTempoOffsetOfPercent() {
-		return tempoOffsetOfPercent;
-	}
-
-	public int getIntegrityRange() {
-		return integrityRange;
-	}
-
-	public int getNotPlayRange() {
-		return notPlayRange;
-	}
-
-	@Override
-	public String getCode() {
-		return this.name();
-	}
-
-}

+ 1 - 1
audio-analysis/src/main/java/com/yonge/nettty/dto/ChunkAnalysis.java → audio-analysis/src/main/java/com/yonge/netty/dto/ChunkAnalysis.java

@@ -1,4 +1,4 @@
-package com.yonge.nettty.dto;
+package com.yonge.netty.dto;
 
 public class ChunkAnalysis {
 

+ 133 - 0
audio-analysis/src/main/java/com/yonge/netty/dto/HardLevelEnum.java

@@ -0,0 +1,133 @@
+package com.yonge.netty.dto;
+
+import com.ym.mec.common.enums.BaseEnum;
+
+public enum HardLevelEnum implements BaseEnum<String, HardLevelEnum> {
+	/**
+	 * 入门级, 振幅阈值, 频率阈值 <br>
+	 * 节奏有效范围(1分音符), 节奏有效范围(2分音符), 节奏有效范围(4分音符), 节奏有效范围(8分音符), 节奏有效范围(16分音符), 节奏有效范围(32分音符)<br>
+	 * 完成度范围, 未演奏的范围
+	 */
+	BEGINNER("入门级", 3, 5, 5, 5, 10, 10, 13, 15, 60, 10), 
+	/**
+	 * 进阶级, 振幅阈值, 频率阈值 <br>
+	 * 节奏有效范围(1分音符), 节奏有效范围(2分音符), 节奏有效范围(4分音符), 节奏有效范围(8分音符), 节奏有效范围(16分音符), 节奏有效范围(32分音符)<br>
+	 * 完成度范围, 未演奏的范围
+	 */
+	ADVANCED("进阶级", 3, 5, 8, 8, 15, 15, 30, 30, 80, 10),
+	//ADVANCED("进阶级", 3, 5, 50, 50, 50, 50, 50, 5, 80, 10),
+	/**
+	 * 大师级, 振幅阈值, 频率阈值 <br>
+	 * 节奏有效范围(1分音符), 节奏有效范围(2分音符), 节奏有效范围(4分音符), 节奏有效范围(8分音符), 节奏有效范围(16分音符), 节奏有效范围(32分音符)<br>
+	 * 完成度范围, 未演奏的范围
+	 */
+	PERFORMER("大师级", 3, 3, 5, 5, 10, 10, 25, 25, 90, 20);
+
+	private String msg;
+
+	private int amplitudeThreshold;
+
+	private int frequencyThreshold;
+
+	private int tempoEffectiveRangeOf1;
+
+	private int tempoEffectiveRangeOf2;
+
+	private int tempoEffectiveRangeOf4;
+
+	private int tempoEffectiveRangeOf8;
+
+	private int tempoEffectiveRangeOf16;
+
+	private int tempoEffectiveRangeOf32;
+
+	private int integrityRange;
+
+	private int notPlayRange;
+
+	/**
+	 * 
+	 * @param msg
+	 * @param amplitudeThreshold 振幅阈值
+	 * @param frequencyThreshold 频率阈值
+	 * @param tempoEffectiveRangeOf1 节奏偏移量百分比(在当前范围内节奏才算正确)
+	 * @param tempoEffectiveRangeOf2 节奏偏移量百分比(在当前范围内节奏才算正确)
+	 * @param tempoEffectiveRangeOf4 节奏偏移量百分比(在当前范围内节奏才算正确)
+	 * @param tempoEffectiveRangeOf8 节奏偏移量百分比(在当前范围内节奏才算正确)
+	 * @param tempoEffectiveRangeOf16 节奏偏移量百分比(在当前范围内节奏才算正确)
+	 * @param tempoEffectiveRangeOf32 节奏偏移量百分比(在当前范围内节奏才算正确)
+	 * @param integrityRange 完成度范围
+	 * @param notPlayRange 未演奏的范围
+	 */
+	HardLevelEnum(String msg, int amplitudeThreshold, int frequencyThreshold, int tempoEffectiveRangeOf1, int tempoEffectiveRangeOf2,
+			int tempoEffectiveRangeOf4, int tempoEffectiveRangeOf8, int tempoEffectiveRangeOf16, int tempoEffectiveRangeOf32, int integrityRange,
+			int notPlayRange) {
+		this.msg = msg;
+		this.amplitudeThreshold = amplitudeThreshold;
+		this.frequencyThreshold = frequencyThreshold;
+		this.tempoEffectiveRangeOf1 = tempoEffectiveRangeOf1;
+		this.tempoEffectiveRangeOf2 = tempoEffectiveRangeOf2;
+		this.tempoEffectiveRangeOf4 = tempoEffectiveRangeOf4;
+		this.tempoEffectiveRangeOf8 = tempoEffectiveRangeOf8;
+		this.tempoEffectiveRangeOf16 = tempoEffectiveRangeOf16;
+		this.tempoEffectiveRangeOf32 = tempoEffectiveRangeOf32;
+		this.integrityRange = integrityRange;
+		this.notPlayRange = notPlayRange;
+	}
+
+	public String getMsg() {
+		return msg;
+	}
+
+	public int getAmplitudeThreshold() {
+		return amplitudeThreshold;
+	}
+
+	public int getFrequencyThreshold() {
+		return frequencyThreshold;
+	}
+
+	public int getTempoEffectiveRange(int denominator) {
+		
+		int tempoEffectiveRange = 0;
+		
+		switch (denominator) {
+		case 1:
+			tempoEffectiveRange = tempoEffectiveRangeOf1;
+			break;
+		case 2:
+			tempoEffectiveRange = tempoEffectiveRangeOf2;
+			break;
+		case 4:
+			tempoEffectiveRange = tempoEffectiveRangeOf4;
+			break;
+		case 8:
+			tempoEffectiveRange = tempoEffectiveRangeOf8;
+			break;
+		case 16:
+			tempoEffectiveRange = tempoEffectiveRangeOf16;
+			break;
+		case 32:
+			tempoEffectiveRange = tempoEffectiveRangeOf32;
+			break;
+
+		default:
+			break;
+		}
+		return tempoEffectiveRange;
+	}
+
+	public int getIntegrityRange() {
+		return integrityRange;
+	}
+
+	public int getNotPlayRange() {
+		return notPlayRange;
+	}
+
+	@Override
+	public String getCode() {
+		return this.name();
+	}
+
+}

+ 3 - 13
audio-analysis/src/main/java/com/yonge/nettty/dto/NoteAnalysis.java → audio-analysis/src/main/java/com/yonge/netty/dto/NoteAnalysis.java

@@ -1,4 +1,4 @@
-package com.yonge.nettty.dto;
+package com.yonge.netty.dto;
 
 import com.ym.mec.common.enums.BaseEnum;
 
@@ -31,8 +31,6 @@ public class NoteAnalysis {
 
 	private double endTime;
 	
-	private double standardDurationTime;
-
 	private double durationTime;
 
 	private int frequency;
@@ -53,8 +51,8 @@ public class NoteAnalysis {
 
 	private boolean ignore;
 	
-	public NoteAnalysis(int index, int sectionIndex, int frequency, double standardDurationTime) {
-		this.standardDurationTime = standardDurationTime;
+	public NoteAnalysis(int index, int sectionIndex, int frequency, double durationTime) {
+		this.durationTime = durationTime;
 		this.index = index;
 		this.sectionIndex = sectionIndex;
 		this.frequency = frequency;
@@ -99,14 +97,6 @@ public class NoteAnalysis {
 		this.durationTime = durationTime;
 	}
 
-	public double getStandardDurationTime() {
-		return standardDurationTime;
-	}
-
-	public void setStandardDurationTime(double standardDurationTime) {
-		this.standardDurationTime = standardDurationTime;
-	}
-
 	public double getPlayFrequency() {
 		return playFrequency;
 	}

+ 73 - 0
audio-analysis/src/main/java/com/yonge/netty/dto/NoteFrequencyRange.java

@@ -0,0 +1,73 @@
+package com.yonge.netty.dto;
+
+import java.math.BigDecimal;
+
+/**
+ * 一个音符的频率范围,包含最大值和最小值
+ */
+public class NoteFrequencyRange {
+
+	private double minFrequency;
+
+	private double maxFrequency;
+
+	public NoteFrequencyRange(double standardFrequecy, double frequency) {
+		int midiNoteSize = 128;
+		double[] midiNoteFrequencies = new double[midiNoteSize];
+
+		for (int x = 0; x < midiNoteSize; ++x) {
+			midiNoteFrequencies[x] = new BigDecimal(standardFrequecy).multiply(
+					new BigDecimal(Math.pow(2, new BigDecimal(x - 69).divide(new BigDecimal(12), 6, BigDecimal.ROUND_HALF_UP).doubleValue()))).doubleValue();
+
+			if(frequency <= 0){
+				continue;
+			}
+			
+			if (midiNoteFrequencies[x] >= frequency) {
+				if (midiNoteFrequencies[x] - frequency > frequency - midiNoteFrequencies[x - 1]) {
+					// frequency演奏的是上一个音符
+					maxFrequency = midiNoteFrequencies[x - 1] + (midiNoteFrequencies[x] - midiNoteFrequencies[x - 1]) / 2;
+					minFrequency = midiNoteFrequencies[x - 1] - (midiNoteFrequencies[x - 1] - midiNoteFrequencies[x - 2]) / 2;
+				} else {
+					// frequency演奏的是当前音符
+					midiNoteFrequencies[x + 1] = new BigDecimal(standardFrequecy).multiply(
+							new BigDecimal(Math.pow(2, new BigDecimal((x + 1) - 69).divide(new BigDecimal(12), 6, BigDecimal.ROUND_HALF_UP).doubleValue())))
+							.doubleValue();
+					maxFrequency = midiNoteFrequencies[x] + (midiNoteFrequencies[x + 1] - midiNoteFrequencies[x]) / 2;
+					minFrequency = midiNoteFrequencies[x] - (midiNoteFrequencies[x] - midiNoteFrequencies[x - 1]) / 2;
+				}
+				break;
+			}
+		}
+	}
+
+	public NoteFrequencyRange(double frequency) {
+		new NoteFrequencyRange(442, frequency);
+	}
+
+	public double getMinFrequency() {
+		return minFrequency;
+	}
+
+	public void setMinFrequency(double minFrequency) {
+		this.minFrequency = minFrequency;
+	}
+
+	public double getMaxFrequency() {
+		return maxFrequency;
+	}
+
+	public void setMaxFrequency(double maxFrequency) {
+		this.maxFrequency = maxFrequency;
+	}
+
+	@Override
+	public boolean equals(Object obj) {
+		if (obj instanceof NoteFrequencyRange) {
+			NoteFrequencyRange nfr = (NoteFrequencyRange) obj;
+			return this.minFrequency == nfr.minFrequency && this.maxFrequency == nfr.maxFrequency;
+		}
+		return false;
+	}
+
+}

+ 28 - 0
audio-analysis/src/main/java/com/yonge/netty/dto/NotePlayResult.java

@@ -0,0 +1,28 @@
+package com.yonge.netty.dto;
+
+public class NotePlayResult {
+
+	private boolean status;
+	
+	private double migrationRate;
+	
+	public NotePlayResult() {
+		// TODO Auto-generated constructor stub
+	}
+
+	public boolean getStatus() {
+		return status;
+	}
+
+	public void setStatus(boolean status) {
+		this.status = status;
+	}
+
+	public double getMigrationRate() {
+		return migrationRate;
+	}
+
+	public void setMigrationRate(double migrationRate) {
+		this.migrationRate = migrationRate;
+	}
+}

+ 1 - 1
audio-analysis/src/main/java/com/yonge/nettty/dto/SectionAnalysis.java → audio-analysis/src/main/java/com/yonge/netty/dto/SectionAnalysis.java

@@ -1,4 +1,4 @@
-package com.yonge.nettty.dto;
+package com.yonge.netty.dto;
 
 import org.apache.commons.lang3.builder.ToStringBuilder;
 

+ 927 - 0
audio-analysis/src/main/java/com/yonge/netty/dto/UserChannelContext.java

@@ -0,0 +1,927 @@
+package com.yonge.netty.dto;
+
+import java.math.BigDecimal;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.stream.Collectors;
+
+import javax.sound.sampled.AudioFormat;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import be.tarsos.dsp.pitch.FastYin;
+
+import com.yonge.audio.analysis.Signals;
+import com.yonge.audio.analysis.detector.YINPitchDetector;
+import com.yonge.audio.utils.ArrayUtil;
+import com.yonge.netty.dto.NoteAnalysis.NoteErrorType;
+import com.yonge.netty.entity.MusicXmlBasicInfo;
+import com.yonge.netty.entity.MusicXmlNote;
+import com.yonge.netty.entity.MusicXmlSection;
+import com.yonge.netty.server.processor.WaveformWriter;
+
+/**
+ * 用户通道上下文
+ */
+public class UserChannelContext {
+	
+	private final static Logger LOGGER = LoggerFactory.getLogger(UserChannelContext3.class);
+	
+	//打击乐
+	private final static List<Integer> percussionList = Arrays.asList(23, 113);
+	
+	private String user;
+	
+	private double standardFrequecy = 442;
+	
+	private int offsetMS;
+	
+	private double dynamicOffset;
+	
+	private String platform;
+	
+	private Long recordId;
+	
+	private Integer subjectId;
+	
+	private int beatDuration;
+	
+	private int beatByteLength;
+	
+	private boolean delayProcessed;
+	
+	// 曲目与musicxml对应关系
+	private ConcurrentHashMap<Integer, MusicXmlBasicInfo> songMusicXmlMap = new ConcurrentHashMap<Integer, MusicXmlBasicInfo>();
+
+	private WaveformWriter waveFileProcessor;
+
+	private NoteAnalysis processingNote = new NoteAnalysis(0, 0, -1);
+	
+	private AtomicInteger evaluatingSectionIndex = new AtomicInteger(0);
+	
+	private List<NoteAnalysis> doneNoteAnalysisList = new ArrayList<NoteAnalysis>();
+	
+	private List<SectionAnalysis> doneSectionAnalysisList = new ArrayList<SectionAnalysis>();
+	
+	private List<ChunkAnalysis> totalChunkAnalysisList = new ArrayList<ChunkAnalysis>();
+	
+	private byte[] channelBufferBytes = new byte[0];
+	
+	private double playTime;
+	
+	private double receivedTime;
+	
+	private HardLevelEnum hardLevel = HardLevelEnum.ADVANCED;
+	
+	private boolean handlerSwitch;
+	
+	private NotePlayResult queryNoteFrequency(MusicXmlNote xmlNote, double playFrequency) {
+
+		NotePlayResult result = new NotePlayResult();
+
+		boolean status = false;
+		double migrationRate = 0;
+
+		if (Math.round(xmlNote.getFrequency()) == Math.round(playFrequency)) {
+			status = true;
+			migrationRate = 0;
+		} else {
+			NoteFrequencyRange noteFrequencyRange = new NoteFrequencyRange(standardFrequecy, xmlNote.getFrequency());
+
+			if (noteFrequencyRange.getMinFrequency() > playFrequency || playFrequency > noteFrequencyRange.getMaxFrequency()) {
+				status = false;
+			} else {
+
+				status = true;
+
+				if (Math.round(playFrequency) < Math.round(xmlNote.getFrequency())) {
+					double min = Math.abs(xmlNote.getFrequency() - noteFrequencyRange.getMinFrequency()) / 2;
+					migrationRate = Math.abs(playFrequency - xmlNote.getFrequency()) / min;
+				} else {
+					double max = Math.abs(xmlNote.getFrequency() - noteFrequencyRange.getMaxFrequency()) / 2;
+					migrationRate = Math.abs(playFrequency - xmlNote.getFrequency()) / max;
+				}
+			}
+		}
+
+		result.setStatus(status);
+		result.setMigrationRate(migrationRate);
+
+		return result;
+	}
+	
+	public void init(String platform, String heardLevel, int subjectId, int beatDuration) {
+		this.platform = platform;
+		this.subjectId = subjectId;
+		this.beatDuration = beatDuration;
+		this.beatByteLength = WaveformWriter.SAMPLE_RATE * WaveformWriter.BITS_PER_SAMPLE / 8 * beatDuration / 1000;
+		hardLevel = HardLevelEnum.valueOf(heardLevel);
+	}
+	
+	public byte[] skipMetronome(byte[] datas) {
+		if (beatByteLength > 0) {
+			if (datas.length <= beatByteLength) {
+				beatByteLength -= datas.length;
+				return new byte[0];
+			}
+			if(beatByteLength % 2 != 0){
+				beatByteLength++;
+			}
+			datas = ArrayUtil.extractByte(datas, beatByteLength, datas.length - 1);
+			beatByteLength = 0;
+		}
+		return datas;
+	}
+	
+	public void setUser(String user) {
+		this.user = user;
+	}
+
+	public Long getRecordId() {
+		return recordId;
+	}
+
+	public void setRecordId(Long recordId) {
+		this.recordId = recordId;
+	}
+
+	public boolean getHandlerSwitch() {
+		return handlerSwitch;
+	}
+
+	public void setHandlerSwitch(boolean handlerSwitch) {
+		this.handlerSwitch = handlerSwitch;
+	}
+
+	public int getOffsetMS() {
+		return offsetMS;
+	}
+
+	public void setOffsetMS(int offsetMS) {
+		this.offsetMS = offsetMS;
+	}
+
+	public HardLevelEnum getHardLevel() {
+		return hardLevel;
+	}
+
+	public ConcurrentHashMap<Integer, MusicXmlBasicInfo> getSongMusicXmlMap() {
+		return songMusicXmlMap;
+	}
+
+	public WaveformWriter getWaveFileProcessor() {
+		return waveFileProcessor;
+	}
+
+	public void setWaveFileProcessor(WaveformWriter waveFileProcessor) {
+		this.waveFileProcessor = waveFileProcessor;
+	}
+
+	public NoteAnalysis getProcessingNote() {
+		return processingNote;
+	}
+
+	public void setProcessingNote(NoteAnalysis processingNote) {
+		this.processingNote = processingNote;
+	}
+	
+	public List<SectionAnalysis> getDoneSectionAnalysisList() {
+		return doneSectionAnalysisList;
+	}
+
+	public List<NoteAnalysis> getDoneNoteAnalysisList() {
+		return doneNoteAnalysisList;
+	}
+
+	public void resetUserInfo() {
+		beatByteLength = WaveformWriter.SAMPLE_RATE * WaveformWriter.BITS_PER_SAMPLE / 8 * beatDuration / 1000;
+		waveFileProcessor = null;
+		processingNote = new NoteAnalysis(0,0,-1);
+		evaluatingSectionIndex = new AtomicInteger(0);
+		channelBufferBytes = new byte[0];
+		doneNoteAnalysisList = new ArrayList<NoteAnalysis>();
+		doneSectionAnalysisList = new ArrayList<SectionAnalysis>();
+		totalChunkAnalysisList = new ArrayList<ChunkAnalysis>();
+		recordId = null;
+		playTime = 0;
+		receivedTime = 0;
+		delayProcessed = false;
+		dynamicOffset = 0;
+		handlerSwitch = false;
+	}
+	
+	public MusicXmlBasicInfo getMusicXmlBasicInfo(Integer songId){
+		MusicXmlBasicInfo musicXmlBasicInfo = null;
+		if (songId == null) {
+			musicXmlBasicInfo = songMusicXmlMap.values().stream().findFirst().get();
+		} else {
+			musicXmlBasicInfo = songMusicXmlMap.get(songId);
+		}
+		return musicXmlBasicInfo;
+	}
+	
+	public MusicXmlSection getCurrentMusicSection(Integer songId, int sectionIndex){
+		MusicXmlBasicInfo musicXmlBasicInfo = getMusicXmlBasicInfo(songId);
+		return musicXmlBasicInfo.getMusicXmlSectionMap().get(sectionIndex);
+	}
+
+	public MusicXmlNote getCurrentMusicNote(Integer songId, Integer noteIndex) {
+		if (songMusicXmlMap.size() == 0) {
+			return null;
+		}
+		if(noteIndex == null){
+			noteIndex = processingNote.getMusicalNotesIndex();
+		}
+		final int index = noteIndex;
+		MusicXmlBasicInfo musicXmlBasicInfo = getMusicXmlBasicInfo(songId);
+
+		int totalNoteIndex = getTotalMusicNoteIndex(null);
+		if (musicXmlBasicInfo != null && index <= totalNoteIndex) {
+			return musicXmlBasicInfo.getMusicXmlInfos().stream().filter(t -> t.getMusicalNotesIndex() == index).findFirst().get();
+		}
+
+		return null;
+	}
+
+	public int getTotalMusicNoteIndex(Integer songId) {
+		if (songMusicXmlMap.size() == 0) {
+			return -1;
+		}
+		MusicXmlBasicInfo musicXmlBasicInfo = getMusicXmlBasicInfo(songId);
+
+		if (musicXmlBasicInfo != null) {
+			return musicXmlBasicInfo.getMusicXmlInfos().stream().map(t -> t.getMusicalNotesIndex()).distinct().max(Integer::compareTo).get();
+		}
+
+		return -1;
+	}
+
+	public List<MusicXmlNote> getCurrentMusicSection(Integer songId, Integer sectionIndex) {
+		if (songMusicXmlMap.size() == 0) {
+			return null;
+		}
+		if(sectionIndex == null){
+			sectionIndex = processingNote.getSectionIndex();
+		}
+		final int index = sectionIndex;
+		MusicXmlBasicInfo musicXmlBasicInfo = getMusicXmlBasicInfo(songId);
+
+		if (musicXmlBasicInfo != null) {
+			return musicXmlBasicInfo.getMusicXmlInfos().stream().filter(t -> t.getMusicalNotesIndex() == index)
+					.sorted(Comparator.comparing(MusicXmlNote::getMusicalNotesIndex)).collect(Collectors.toList());
+		}
+
+		return null;
+	}
+
+	public int getTotalMusicSectionSize(Integer songId) {
+		if (songMusicXmlMap.size() == 0) {
+			return -1;
+		}
+		MusicXmlBasicInfo musicXmlBasicInfo = getMusicXmlBasicInfo(songId);
+
+		if (musicXmlBasicInfo != null) {
+			return (int) musicXmlBasicInfo.getMusicXmlInfos().stream().map(t -> t.getMeasureIndex()).distinct().count();
+		}
+
+		return -1;
+	}
+	
+	public int getMusicSectionIndex(Integer songId, int musicXmlNoteIndex) {
+		if (songMusicXmlMap.size() == 0) {
+			return -1;
+		}
+		
+		if(getTotalMusicNoteIndex(null) < musicXmlNoteIndex){
+			return -1;
+		}
+		
+		MusicXmlBasicInfo musicXmlBasicInfo = getMusicXmlBasicInfo(songId);
+
+		if (musicXmlBasicInfo != null) {
+			return musicXmlBasicInfo.getMusicXmlInfos().stream().filter(t -> t.getMusicalNotesIndex() == musicXmlNoteIndex).findFirst().get().getMeasureIndex();
+		}
+
+		return -1;
+	}
+	
+	public byte[] getChannelBufferBytes() {
+		return channelBufferBytes;
+	}
+
+	public void setChannelBufferBytes(byte[] channelBufferBytes) {
+		this.channelBufferBytes = channelBufferBytes;
+	}
+
+	public AtomicInteger getEvaluatingSectionIndex() {
+		return evaluatingSectionIndex;
+	}
+
+	public void handle(float[] samples, AudioFormat audioFormat){
+		
+		//YINPitchDetector frequencyDetector = new YINPitchDetector(samples.length , audioFormat.getSampleRate());
+		//int playFrequency = (int) frequencyDetector.getFrequency(samples);
+		
+		FastYin detector = new FastYin(audioFormat.getSampleRate(), samples.length);
+		int playFrequency = -1;
+		if(!percussionList.contains(subjectId)){
+			playFrequency = (int)detector.getPitch(samples).getPitch();
+		}
+		
+		int splDb = (int) Signals.soundPressureLevel(samples);
+		int power = (int) Signals.power(samples);
+		int amplitude = (int) Signals.norm(samples);
+		//float rms = Signals.rms(samples);
+		
+		double durationTime = 1000 * (samples.length * 2) / audioFormat.getSampleRate() / (audioFormat.getSampleSizeInBits() / 8);
+		
+		receivedTime += durationTime;
+		
+		if(receivedTime < offsetMS){
+			return;
+		}
+		
+		playTime += durationTime;
+		
+		// 获取当前音符信息
+		MusicXmlNote musicXmlNote = getCurrentMusicNote(null,null);
+
+		if (musicXmlNote == null) {
+			return;
+		}
+		
+		//取出当前处理中的音符信息
+		NoteAnalysis noteAnalysis = getProcessingNote();
+		if(noteAnalysis == null || noteAnalysis.getDurationTime() == 0) {
+			noteAnalysis = new NoteAnalysis(musicXmlNote.getMusicalNotesIndex(), musicXmlNote.getMeasureIndex(), (int)musicXmlNote.getFrequency(), musicXmlNote.getDuration());
+		}
+		
+		evaluatingSectionIndex.set(noteAnalysis.getSectionIndex());
+		
+		if (noteAnalysis.getMusicalNotesIndex() >= 0 && noteAnalysis.getMusicalNotesIndex() <= getTotalMusicNoteIndex(null)) {
+			
+			LOGGER.info("user:{}  delayProcessed:{}  dynamicOffset:{}  Frequency:{}  splDb:{}  amplitude:{}  time:{}", user, delayProcessed, dynamicOffset, playFrequency, splDb, amplitude, playTime);
+			
+			ChunkAnalysis chunkAnalysis = new ChunkAnalysis(playTime - durationTime, playTime, playFrequency, splDb, power, amplitude);
+			
+			if(totalChunkAnalysisList.size() > 0){
+				if(totalChunkAnalysisList.get(totalChunkAnalysisList.size() - 1).getAmplitude() + 2 < chunkAnalysis.getAmplitude()){
+					chunkAnalysis.setPeak(true);//只针对打击乐
+				}
+			}
+			totalChunkAnalysisList.add(chunkAnalysis);
+			
+			boolean flag = false;
+			if(percussionList.contains(subjectId)){
+				flag = chunkAnalysis.getAmplitude() > hardLevel.getAmplitudeThreshold();
+			}else{
+				flag = chunkAnalysis.getFrequency() > 100;
+			}
+			
+			if(delayProcessed == false && flag){
+				
+				delayProcessed = true;
+				
+				//计算延迟偏移值
+				//playTime = musicXmlNote.getTimeStamp() + durationTime;
+				dynamicOffset = chunkAnalysis.getStartTime() - musicXmlNote.getTimeStamp();
+				if(100 * dynamicOffset / musicXmlNote.getDuration() > (100 - hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator()))){
+					dynamicOffset = 0;
+				}
+			}
+			
+			if (playTime >= (musicXmlNote.getDuration() + musicXmlNote.getTimeStamp() + dynamicOffset)) {
+
+				if (musicXmlNote.getDontEvaluating()) {
+					noteAnalysis.setIgnore(true);
+				}
+				
+				//判断节奏(音符持续时间内有不间断的音高,就节奏正确)
+				boolean tempo = true;
+				if (percussionList.contains(subjectId)) {
+					noteAnalysis.setPlayFrequency(-1);
+					tempo = computeTempoWithAmplitude2(musicXmlNote);
+				}else{
+					noteAnalysis.setPlayFrequency(computeFrequency(musicXmlNote));
+					tempo = computeTempoWithFrequency(musicXmlNote);
+				}
+				
+				noteAnalysis.setTempo(tempo);
+				
+				evaluateForNote(musicXmlNote, noteAnalysis);
+
+				LOGGER.info("当前音符下标[{}] 预计频率:{} 实际频率:{} 节奏:{}", noteAnalysis.getMusicalNotesIndex(), musicXmlNote.getFrequency(), noteAnalysis.getPlayFrequency(),
+						noteAnalysis.isTempo());
+				
+				doneNoteAnalysisList.add(noteAnalysis);
+				
+				// 准备处理下一个音符
+				int nextNoteIndex = musicXmlNote.getMusicalNotesIndex() + 1;
+				float nextNoteFrequence = -1;
+				double standDuration = 0;
+				MusicXmlNote nextMusicXmlNote = getCurrentMusicNote(null, nextNoteIndex);
+				if(nextMusicXmlNote != null){
+					nextNoteFrequence = nextMusicXmlNote.getFrequency();
+					standDuration = nextMusicXmlNote.getDuration();
+				}
+				
+				NoteAnalysis nextNoteAnalysis = new NoteAnalysis(nextNoteIndex, getMusicSectionIndex(null, nextNoteIndex), (int)nextNoteFrequence, standDuration);
+
+				noteAnalysis = nextNoteAnalysis;
+
+			}
+
+			setProcessingNote(noteAnalysis);
+		}
+		
+	}
+	
+
+	public int evaluateForSection(int sectionIndex, int subjectId){
+
+		int score = -1;
+		if(doneSectionAnalysisList.size() >= getTotalMusicSectionSize(null)){
+			return score;
+		}
+		
+		//取出当前小节的所有音符
+		List<NoteAnalysis> noteAnalysisList = doneNoteAnalysisList.stream().filter(t -> t.getSectionIndex() == sectionIndex).collect(Collectors.toList());
+		
+		long ignoreSize = noteAnalysisList.stream().filter(t -> t.isIgnore()).count();
+
+		SectionAnalysis sectionAnalysis = new SectionAnalysis();
+		sectionAnalysis.setIndex(sectionIndex);
+		sectionAnalysis.setNoteNum(noteAnalysisList.size());
+		sectionAnalysis.setIsIngore(ignoreSize == noteAnalysisList.size());
+		
+		//判断是否需要评分
+		MusicXmlSection musicXmlSection = getCurrentMusicSection(null, sectionIndex);
+		if(noteAnalysisList.size() == musicXmlSection.getNoteNum()){
+			//取出需要评测的音符
+			List<NoteAnalysis>  noteList = noteAnalysisList.stream().filter(t -> t.isIgnore() == false).collect(Collectors.toList());
+			
+			if(noteList != null && noteList.size() > 0){
+				score = noteList.stream().mapToInt(t -> t.getScore()).sum() / noteList.size();
+			}
+			sectionAnalysis.setDurationTime(noteAnalysisList.stream().mapToDouble(t -> t.getDurationTime()).sum());
+			sectionAnalysis.setScore(score);
+
+			LOGGER.info("小节评分:{}",sectionAnalysis);
+			doneSectionAnalysisList.add(sectionAnalysis);
+		}
+		
+		return score;
+	}
+	
+	public Map<String, Integer> evaluateForMusic() {
+
+		Map<String, Integer> result = new HashMap<String, Integer>();
+		
+		result.put("playTime", (int) doneNoteAnalysisList.stream().mapToDouble(t -> t.getDurationTime()).sum());
+		
+		// 取出需要评测的音符
+		List<NoteAnalysis> noteAnalysisList = doneNoteAnalysisList.stream().filter(t -> t.isIgnore() == false).collect(Collectors.toList());
+
+		if (noteAnalysisList != null && noteAnalysisList.size() > 0) {
+			int intonationScore = 0;
+			int tempoScore = 0;
+			int integrityScore = 0;
+			int socre = 0;
+
+			for (NoteAnalysis note : noteAnalysisList) {
+				intonationScore += note.getIntonationScore();
+				tempoScore += note.getTempoScore();
+				integrityScore += note.getIntegrityScore();
+				socre += note.getScore();
+			}
+
+			tempoScore = tempoScore / noteAnalysisList.size();
+			intonationScore = intonationScore / noteAnalysisList.size();
+			integrityScore = integrityScore / noteAnalysisList.size();
+
+			result.put("cadence", tempoScore);
+			result.put("intonation", intonationScore);
+			result.put("integrity", integrityScore);
+	        result.put("recordId", recordId.intValue());
+
+			int score = socre / noteAnalysisList.size();
+
+			// 平均得分
+			if (getMusicXmlBasicInfo(null).getSubjectId() == 23 || getMusicXmlBasicInfo(null).getSubjectId() == 113) {
+				score = tempoScore;
+			}
+			result.put("score", score);
+		}
+		return result;
+	}
+	
+
+	public void evaluateForNote(MusicXmlNote musicXmlNote, NoteAnalysis noteAnalysis) {
+		
+		double floatingRange = musicXmlNote.getDuration() * hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator()) / 100;
+		
+		double endTime = musicXmlNote.getTimeStamp() + dynamicOffset + floatingRange;
+		double startTime = musicXmlNote.getTimeStamp() + dynamicOffset - floatingRange;
+		
+		List<ChunkAnalysis> chunkAnalysisList = totalChunkAnalysisList.stream().filter(t -> Double.doubleToLongBits(t.getStartTime()) >= Double.doubleToLongBits(startTime) && Double.doubleToLongBits(t.getEndTime()) <= Double.doubleToLongBits(endTime)).collect(Collectors.toList());
+		
+		double correctedStartTime = queryFirstNoteStartTime(chunkAnalysisList, musicXmlNote);
+		double correctedEndTime = correctedStartTime + musicXmlNote.getDuration();
+		
+		chunkAnalysisList = totalChunkAnalysisList.stream().filter(t -> Double.doubleToLongBits(t.getStartTime()) >= Double.doubleToLongBits(correctedStartTime) && Double.doubleToLongBits(t.getEndTime()) <= Double.doubleToLongBits(correctedEndTime)).collect(Collectors.toList());
+		
+		double durationTime = chunkAnalysisList.get(chunkAnalysisList.size() - 1).getEndTime() - chunkAnalysisList.get(0).getStartTime();
+		
+		double playDurationTime = 0;
+		
+		if (percussionList.contains(subjectId)) {
+			if (noteAnalysis.getFrequency() == -1) {// 休止符
+				if (!noteAnalysis.isTempo()) {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.CADENCE_WRONG);
+				} else {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.RIGHT);
+				}
+			}else{
+				int beatTimes = (int) chunkAnalysisList.stream().filter(t -> t.getAmplitude() > hardLevel.getAmplitudeThreshold()).count();
+				LOGGER.info("Amplitude:{}  beatTimes:{}  Denominator:{}",chunkAnalysisList.stream().map(t -> t.getAmplitude()).collect(Collectors.toList()), beatTimes, musicXmlNote.getDenominator());
+				if(beatTimes == 0){
+					noteAnalysis.setMusicalErrorType(NoteErrorType.NOT_PLAY);
+				}else if (!noteAnalysis.isTempo()) {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.CADENCE_WRONG);
+				} else {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.RIGHT);
+				}
+			}
+		} else {
+			
+			NotePlayResult notePlayResult = queryNoteFrequency(musicXmlNote, noteAnalysis.getPlayFrequency());
+			
+			if (noteAnalysis.getFrequency() == -1) {// 休止符
+
+				playDurationTime = chunkAnalysisList.stream().filter(t -> t.getFrequency() <= 100).mapToDouble(t -> t.getDurationTime()).sum();
+
+				if (!noteAnalysis.isTempo()) {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.CADENCE_WRONG);
+				} else if (playDurationTime * 100 / durationTime < hardLevel.getIntegrityRange()) {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.INTEGRITY_WRONG);
+				} else if (notePlayResult.getStatus() == false) {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.INTONATION_WRONG);
+				} else {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.RIGHT);
+				}
+			} else {
+				playDurationTime = chunkAnalysisList.stream().filter(t -> t.getFrequency() > 100 && t.getFrequency() < 2000)
+						.mapToDouble(t -> t.getDurationTime()).sum();
+
+				if (playDurationTime * 100 / durationTime < hardLevel.getNotPlayRange()) {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.NOT_PLAY);
+					LOGGER.info("未演奏:{}", playDurationTime * 100 / durationTime);
+				} else if (playDurationTime * 100 / durationTime < hardLevel.getIntegrityRange()) {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.INTEGRITY_WRONG);
+					LOGGER.info("完整度不足:{}", playDurationTime * 100 / durationTime);
+				} else if (!noteAnalysis.isTempo()) {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.CADENCE_WRONG);
+				} else if (notePlayResult.getStatus() == false) {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.INTONATION_WRONG);
+				} else {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.RIGHT);
+				}
+			}
+			
+		}
+
+		// 计算音分
+		int tempoScore = 0;
+		int integrityScore = 0;
+		int intonationScore = 100 - new BigDecimal(Math.abs(YINPitchDetector.hertzToAbsoluteCent(noteAnalysis.getPlayFrequency())
+				- YINPitchDetector.hertzToAbsoluteCent(noteAnalysis.getFrequency()))).multiply(new BigDecimal(20)).divide(new BigDecimal(17), BigDecimal.ROUND_UP)
+				.setScale(0, BigDecimal.ROUND_UP).intValue();
+		if (intonationScore < 0) {
+			intonationScore = 0;
+		} else if (intonationScore > 100) {
+			intonationScore = 100;
+		}
+
+		if (noteAnalysis.getMusicalErrorType() == NoteErrorType.NOT_PLAY) {
+			intonationScore = 0;
+		} else {
+
+			if (noteAnalysis.isTempo()) {
+				tempoScore = 100;
+				noteAnalysis.setTempoScore(tempoScore);
+			}
+
+			integrityScore = (int) (playDurationTime * 100 * 100 / hardLevel.getIntegrityRange() / durationTime);
+			if (integrityScore > 100) {
+				integrityScore = 100;
+			}
+			noteAnalysis.setIntegrityScore(integrityScore);
+		}
+		noteAnalysis.setIntonationScore(intonationScore);
+		if (percussionList.contains(subjectId)) {
+			noteAnalysis.setScore(tempoScore);
+		} else {
+			noteAnalysis.setScore(new BigDecimal(intonationScore + tempoScore + integrityScore).divide(new BigDecimal(3), 2).setScale(0, BigDecimal.ROUND_UP)
+					.intValue());
+		}
+	}
+	
+	private int computeFrequency(MusicXmlNote musicXmlNote) {
+		
+		double floatingRange = musicXmlNote.getDuration() * hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator()) / 100;
+		
+		double endTime = musicXmlNote.getTimeStamp() + dynamicOffset + floatingRange;
+		double startTime = musicXmlNote.getTimeStamp() + dynamicOffset - floatingRange;
+		
+		List<ChunkAnalysis> chunkAnalysisList = totalChunkAnalysisList.stream().filter(t -> Double.doubleToLongBits(t.getStartTime()) >= Double.doubleToLongBits(startTime) && Double.doubleToLongBits(t.getEndTime()) <= Double.doubleToLongBits(endTime)).collect(Collectors.toList());
+		
+		double correctedStartTime = queryFirstNoteStartTime(chunkAnalysisList, musicXmlNote);
+		double correctedEndTime = correctedStartTime + musicXmlNote.getDuration();
+		
+		chunkAnalysisList = totalChunkAnalysisList.stream().filter(t -> Double.doubleToLongBits(t.getStartTime()) >= Double.doubleToLongBits(correctedStartTime) && Double.doubleToLongBits(t.getEndTime()) <= Double.doubleToLongBits(correctedEndTime)).collect(Collectors.toList());
+
+		//根据完整度取部分有效信号
+		int elementSize = chunkAnalysisList.size() * hardLevel.getIntegrityRange() / 100;
+		chunkAnalysisList = chunkAnalysisList.subList(0, elementSize);
+		if(chunkAnalysisList == null || chunkAnalysisList.size() == 0){
+			return -1;
+		}
+		
+		ChunkAnalysis firstChunkAnalysis = chunkAnalysisList.get(0);
+		
+		LOGGER.info("-------startTime:{}  endTime:{}------", firstChunkAnalysis.getStartTime(), chunkAnalysisList.get(chunkAnalysisList.size() - 1)
+				.getEndTime());
+		
+		List<ChunkAnalysis> chunkList = new ArrayList<ChunkAnalysis>(chunkAnalysisList);
+		
+		List<Integer> chunkFrequencyList = chunkList.stream().map(t -> t.getFrequency()).filter(t -> t.doubleValue() > 100 && t.doubleValue() < 2000)
+				.collect(Collectors.toList());
+		
+		if (chunkFrequencyList.size() == 0) {
+			return -1;
+		}
+
+		int frequency = (int) (chunkFrequencyList.stream().mapToInt(t -> t).sum() / chunkFrequencyList.size());
+
+		return frequency;
+	}
+	
+	/**
+	 * 时值范围内有且只有一个音,且不能间断,且在合理范围内需开始演奏
+	 * 与上一个音相同时,2个音之间需要间断
+	 * @param musicXmlNote
+	 * @return
+	 */
+	private boolean computeTempoWithFrequency(MusicXmlNote musicXmlNote){
+		
+		double floatingRange = musicXmlNote.getDuration() * hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator()) / 100;
+		
+		double endTime = musicXmlNote.getTimeStamp() + dynamicOffset + floatingRange;
+		double startTime = musicXmlNote.getTimeStamp() + dynamicOffset - floatingRange;
+		
+		List<ChunkAnalysis> chunkAnalysisList = totalChunkAnalysisList.stream().filter(t -> Double.doubleToLongBits(t.getStartTime()) >= Double.doubleToLongBits(startTime) && Double.doubleToLongBits(t.getEndTime()) <= Double.doubleToLongBits(endTime)).collect(Collectors.toList());
+		
+		double correctedStartTime = queryFirstNoteStartTime(chunkAnalysisList, musicXmlNote);
+		double correctedEndTime = correctedStartTime + musicXmlNote.getDuration();
+		
+		chunkAnalysisList = totalChunkAnalysisList.stream().filter(t -> Double.doubleToLongBits(t.getStartTime()) >= Double.doubleToLongBits(correctedStartTime) && Double.doubleToLongBits(t.getEndTime()) <= Double.doubleToLongBits(correctedEndTime)).collect(Collectors.toList());
+		
+		//根据完整度取部分有效信号
+		int elementSize = chunkAnalysisList.size() * hardLevel.getIntegrityRange() / 100;
+		List<ChunkAnalysis> chunkList = chunkAnalysisList.subList(0, elementSize);
+		
+		if(chunkList == null || chunkList.size() == 0){
+			return false;
+		}
+		
+		if (musicXmlNote.getFrequency() == -1) {// 休止符
+			return chunkList.stream().filter(t -> t.getFrequency() > 100).count() <= 1;
+		}
+		
+		ChunkAnalysis firstChunkAnalysis = chunkAnalysisList.get(0);
+		
+		Optional<ChunkAnalysis> chunkAnalysisOptional = totalChunkAnalysisList.stream().filter(t -> Double.doubleToLongBits(t.getEndTime()) == Double.doubleToLongBits(firstChunkAnalysis.getStartTime())).findFirst();
+
+		ChunkAnalysis lastChunkAnalysis = null;
+		if (chunkAnalysisOptional.isPresent()) {
+			lastChunkAnalysis = chunkAnalysisOptional.get();
+		}
+		
+		if(lastChunkAnalysis == null){
+			lastChunkAnalysis = new ChunkAnalysis(0, 0, -1, 0, 0, 0);
+		}
+		
+		/*List<ChunkAnalysis> chunkList = new ArrayList<ChunkAnalysis>(chunkAnalysisList);
+		
+		if(chunkList.size() == 0){
+			return false;
+		}*/
+		
+		NoteFrequencyRange noteFrequencyRange = null;
+		ChunkAnalysis chunkAnalysis = null;
+		boolean tempo = false;
+		boolean isContinue = true;
+		int unplayedSize = 0;
+		int firstPeakIndex = -1;
+		for (int i = 0; i < chunkList.size(); i++) {
+			chunkAnalysis = chunkList.get(i);
+			if (chunkAnalysis != null) {
+				if (chunkAnalysis.getFrequency() > 100) {
+					
+					tempo = true;
+					if (firstPeakIndex == -1) {
+						firstPeakIndex = i;
+						noteFrequencyRange = new NoteFrequencyRange(standardFrequecy, chunkAnalysis.getFrequency());
+					} else if (noteFrequencyRange.getMinFrequency() > chunkAnalysis.getFrequency()
+							|| chunkAnalysis.getFrequency() > noteFrequencyRange.getMaxFrequency()) {
+						// 判断是否是同一个音
+						tempo = false;
+						LOGGER.info("节奏错误原因:不是同一个音[{}]:{}-{}", chunkAnalysis.getFrequency(), noteFrequencyRange.getMinFrequency(), noteFrequencyRange.getMaxFrequency());
+						break;
+					}
+					if (isContinue == false) {
+						if ((i + 1) / chunkAnalysisList.size() < hardLevel.getIntegrityRange()) {
+							if (unplayedSize > 0) {
+								tempo = false;
+								LOGGER.info("节奏错误原因:信号不连续");
+								break;
+							}
+						}
+					}
+				} else {
+					if (tempo == true) {
+						isContinue = false;
+						unplayedSize++;
+					}
+				}
+			}
+		}
+		
+		if (tempo) {
+			// 判断进入时间点
+			if(firstPeakIndex * 100 /chunkAnalysisList.size() > hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator())){
+				tempo = false;
+				LOGGER.info("节奏错误原因:进入时间点太晚");
+			}else{
+				//判断是否与上一个音延续下来的
+				if(firstChunkAnalysis.getFrequency() > 100 && lastChunkAnalysis.getFrequency() > 100){
+					tempo = new NoteFrequencyRange(standardFrequecy, firstChunkAnalysis.getFrequency()).equals(new NoteFrequencyRange(standardFrequecy, lastChunkAnalysis.getFrequency())) == false;
+					if(tempo == false){
+						LOGGER.info("节奏错误原因:上一个音延续下来导致的");
+					}
+				}
+			}
+		}
+		
+		return tempo;
+	}
+	
+	private boolean computeTempoWithAmplitude2(MusicXmlNote musicXmlNote) {
+
+		double floatingRange = musicXmlNote.getDuration() * hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator()) / 100;
+		
+		double endTime = musicXmlNote.getTimeStamp() + dynamicOffset + floatingRange;
+		double startTime = musicXmlNote.getTimeStamp() + dynamicOffset - floatingRange;
+		
+		List<ChunkAnalysis> chunkAnalysisList = totalChunkAnalysisList.stream().filter(t -> Double.doubleToLongBits(t.getStartTime()) >= Double.doubleToLongBits(startTime) && Double.doubleToLongBits(t.getEndTime()) <= Double.doubleToLongBits(endTime)).collect(Collectors.toList());
+		
+		double correctedStartTime = queryFirstNoteStartTime(chunkAnalysisList, musicXmlNote);
+		double correctedEndTime = correctedStartTime + musicXmlNote.getDuration();
+		
+		chunkAnalysisList = totalChunkAnalysisList.stream().filter(t -> Double.doubleToLongBits(t.getStartTime()) >= Double.doubleToLongBits(correctedStartTime) && Double.doubleToLongBits(t.getEndTime()) <= Double.doubleToLongBits(correctedEndTime)).collect(Collectors.toList());
+		
+		if(chunkAnalysisList == null || chunkAnalysisList.size() == 0){
+			return false;
+		}
+		
+		ChunkAnalysis firstChunkAnalysis = chunkAnalysisList.get(0);
+		
+		LOGGER.info("-------startTime:{}  endTime:{}------", firstChunkAnalysis.getStartTime(), chunkAnalysisList.get(chunkAnalysisList.size() - 1)
+				.getEndTime());
+
+		if (musicXmlNote.getFrequency() == -1) {// 休止符
+			return chunkAnalysisList.stream().filter(t -> t.getAmplitude() > hardLevel.getAmplitudeThreshold()).count() <= 0;
+		}
+		
+		Optional<ChunkAnalysis> chunkAnalysisOptional = totalChunkAnalysisList.stream().filter(t -> Double.doubleToLongBits(t.getEndTime()) == Double.doubleToLongBits(firstChunkAnalysis.getStartTime())).findFirst();
+
+		ChunkAnalysis lastChunkAnalysis = null;
+		if (chunkAnalysisOptional.isPresent()) {
+			lastChunkAnalysis = chunkAnalysisOptional.get();
+		}
+		if(lastChunkAnalysis == null){
+			lastChunkAnalysis = new ChunkAnalysis(0, 0, -1, 0, 0, 0);
+		}
+		
+		List<Integer> chunkAmplitudeList = chunkAnalysisList.stream().map(ChunkAnalysis::getAmplitude).collect(Collectors.toList());
+
+		chunkAmplitudeList.add(0, lastChunkAnalysis.getAmplitude());
+		
+		LOGGER.info("--Amplitude:{}  Denominator:{}",chunkAmplitudeList.stream().map(t -> t).collect(Collectors.toList()), musicXmlNote.getDenominator());
+		
+		// 检测是否有多个波峰
+		boolean tempo = false;
+		boolean isContinue = true;
+		int firstPeakIndex = -1;
+		int peakSize = 0;
+		for (int i = 1; i < chunkAmplitudeList.size(); i++) {
+			if (chunkAmplitudeList.get(i) > hardLevel.getAmplitudeThreshold() && chunkAmplitudeList.get(i) > chunkAmplitudeList.get(i - 1) + 1) {
+				tempo = true;
+				if(firstPeakIndex == -1){
+					firstPeakIndex = i;
+					peakSize++;
+				}
+				if (isContinue == false) {
+					tempo = false;
+					peakSize++;
+					break;
+				}
+			} else {
+				if (tempo == true) {
+					isContinue = false;
+				}
+			}
+		}
+		
+		if(peakSize == 0){
+			tempo = lastChunkAnalysis.isPeak();
+		}else if(peakSize == 1){
+			tempo = true;
+		}else{
+			tempo = false;
+		}
+		
+		if (tempo) {
+			// 判断进入时间点
+			if((firstPeakIndex - 1) * 100 /chunkAmplitudeList.size() > hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator()) * 2){
+				LOGGER.info("超过范围:{}", (firstPeakIndex - 1) * 100 /chunkAmplitudeList.size());
+				tempo = false;
+			}
+		}
+		
+		return tempo;
+	}
+	
+	private double queryFirstNoteStartTime(List<ChunkAnalysis> chunkAnalysisList, MusicXmlNote musicXmlNote) {
+		
+		if(chunkAnalysisList == null || chunkAnalysisList.size() == 0){
+			return musicXmlNote.getTimeStamp() + dynamicOffset;
+		}
+		
+		if (percussionList.contains(subjectId)) {
+			Optional<ChunkAnalysis> optional = chunkAnalysisList.stream().filter(t -> t.getAmplitude() > hardLevel.getAmplitudeThreshold()).findFirst();
+			if(optional.isPresent()){
+				return optional.get().getStartTime();
+			}else{
+				return musicXmlNote.getTimeStamp() + dynamicOffset;
+			}
+		}
+		
+		//判断是否与上一个音是同一个音符
+		if(musicXmlNote.getMusicalNotesIndex() > 0){
+			MusicXmlNote preMusicXmlNote = getCurrentMusicNote(null, musicXmlNote.getMusicalNotesIndex() - 1);
+			if((int)preMusicXmlNote.getFrequency() == (int)musicXmlNote.getFrequency()){
+				Optional<ChunkAnalysis> optional = chunkAnalysisList.stream().filter(t -> t.getFrequency() <= 100).findFirst();
+				if(optional.isPresent()){
+					return optional.get().getEndTime();
+				}else{
+					return musicXmlNote.getTimeStamp() + dynamicOffset;
+				}
+			}
+		}
+
+		NoteFrequencyRange standardNote = new NoteFrequencyRange(standardFrequecy, musicXmlNote.getFrequency());
+
+		NoteFrequencyRange noteFrequencyRange = null;
+
+		for (ChunkAnalysis ca : chunkAnalysisList) {
+			noteFrequencyRange = new NoteFrequencyRange(standardFrequecy, ca.getFrequency());
+			if (standardNote.equals(noteFrequencyRange)) {
+				return ca.getStartTime();
+			}
+		}
+
+		//return chunkAnalysisList.get(chunkAnalysisList.size() - 1).getEndTime();
+		return musicXmlNote.getTimeStamp() + dynamicOffset;
+	}
+	
+	public static void main(String[] args) {
+		double[] midi = new double[128];;
+		int standardPitch = 440; // a is 440 hz...
+		for (int x = 0; x < midi.length; ++x)
+		{
+		   midi[x] = new BigDecimal(standardPitch).multiply(new BigDecimal(Math.pow(2, new BigDecimal(x-69).divide(new BigDecimal(12),6,BigDecimal.ROUND_HALF_UP).doubleValue()))).doubleValue();
+		   System.out.println("x=" + x +"  "+ midi[x]);
+		}
+		
+	}
+	
+}

+ 58 - 40
audio-analysis/src/main/java/com/yonge/nettty/dto/UserChannelContext.java → audio-analysis/src/main/java/com/yonge/netty/dto/UserChannelContext2.java

@@ -1,4 +1,4 @@
-package com.yonge.nettty.dto;
+package com.yonge.netty.dto;
 
 import java.math.BigDecimal;
 import java.util.ArrayList;
@@ -19,20 +19,22 @@ import org.slf4j.LoggerFactory;
 import com.yonge.audio.analysis.Signals;
 import com.yonge.audio.analysis.detector.YINPitchDetector;
 import com.yonge.audio.utils.ArrayUtil;
-import com.yonge.nettty.dto.NoteAnalysis.NoteErrorType;
-import com.yonge.nettty.entity.MusicXmlBasicInfo;
-import com.yonge.nettty.entity.MusicXmlNote;
-import com.yonge.nettty.entity.MusicXmlSection;
+import com.yonge.netty.dto.NoteAnalysis.NoteErrorType;
+import com.yonge.netty.entity.MusicXmlBasicInfo;
+import com.yonge.netty.entity.MusicXmlNote;
+import com.yonge.netty.entity.MusicXmlSection;
 import com.yonge.netty.server.processor.WaveformWriter;
 
 /**
  * 用户通道上下文
  */
-public class UserChannelContext {
+public class UserChannelContext2 {
 	
 	private final static Logger LOGGER = LoggerFactory.getLogger(UserChannelContext.class);
 	
-	private final double offsetMS = 300;
+	private int offsetMS = 350;
+	
+	private String platform;
 	
 	private Long recordId;
 	
@@ -67,14 +69,15 @@ public class UserChannelContext {
 	
 	private HardLevelEnum hardLevel = HardLevelEnum.ADVANCED;
 	
-	public void init(String heardLevel, int subjectId, int beatDuration) {
+	public void init(String platform, String heardLevel, int subjectId, int beatDuration) {
+		this.platform = platform;
 		this.subjectId = subjectId;
 		this.beatDuration = beatDuration;
 		this.beatByteLength = WaveformWriter.SAMPLE_RATE * WaveformWriter.BITS_PER_SAMPLE / 8 * beatDuration / 1000;
 		hardLevel = HardLevelEnum.valueOf(heardLevel);
 	}
 	
-	public byte[] skipHeader(byte[] datas) {
+	public byte[] skipMetronome(byte[] datas) {
 		if (beatByteLength > 0) {
 			if (datas.length <= beatByteLength) {
 				beatByteLength -= datas.length;
@@ -97,6 +100,18 @@ public class UserChannelContext {
 		this.recordId = recordId;
 	}
 
+	public int getOffsetMS() {
+		return offsetMS;
+	}
+
+	public void setOffsetMS(int offsetMS) {
+		this.offsetMS = offsetMS;
+	}
+
+	public HardLevelEnum getHardLevel() {
+		return hardLevel;
+	}
+
 	public ConcurrentHashMap<Integer, MusicXmlBasicInfo> getSongMusicXmlMap() {
 		return songMusicXmlMap;
 	}
@@ -254,11 +269,16 @@ public class UserChannelContext {
 		int splDb = (int) Signals.soundPressureLevel(samples);
 		int power = (int) Signals.power(samples);
 		int amplitude = (int) Signals.norm(samples);
+		float rms = Signals.rms(samples);
 		
 		double durationTime = 1000 * (samples.length * 2) / audioFormat.getSampleRate() / (audioFormat.getSampleSizeInBits() / 8);
 		
 		receivedTime += durationTime;
 		
+		/*if(offsetMS == 0){
+			return;
+		}*/
+		
 		if(receivedTime < offsetMS){
 			return;
 		}
@@ -287,7 +307,8 @@ public class UserChannelContext {
 				LOGGER.info("------ Frequency:{}  splDb:{}  Power:{}  amplitude:{} time:{}------", playFrequency, splDb, power, amplitude, playTime);
 				
 				ChunkAnalysis lastChunkAnalysis = new ChunkAnalysis(playTime - durationTime, playTime, playFrequency, splDb, power, amplitude);
-				if(Math.abs(chunkAnalysisList.get(chunkAnalysisList.size() - 1).getFrequency() - lastChunkAnalysis.getFrequency()) > hardLevel.getFrequencyOffset()){
+				
+				if(Math.abs(chunkAnalysisList.get(chunkAnalysisList.size() - 1).getFrequency() - lastChunkAnalysis.getFrequency()) > hardLevel.getFrequencyThreshold()){
 					lastChunkAnalysis.setFrequency(-1);
 				}
 				if(chunkAnalysisList.get(chunkAnalysisList.size() - 1).getAmplitude() + 2 < lastChunkAnalysis.getAmplitude()){
@@ -310,21 +331,21 @@ public class UserChannelContext {
 					
 				}
 				
-				noteAnalysis.setPlayFrequency(computeFrequency(chunkAnalysisList, lastChunkAnalysis, hardLevel.getFrequencyOffset()));
+				noteAnalysis.setPlayFrequency(computeFrequency(chunkAnalysisList, lastChunkAnalysis, hardLevel.getFrequencyThreshold()));
 				
 				//判断节奏(音符持续时间内有不间断的音高,就节奏正确)
 				boolean tempo = true;
-				if (subjectId == 23) {
+				if (subjectId == 23 || subjectId == 113) {
 					if (musicXmlNote.getFrequency() == -1) {// 休止符
 						tempo = chunkAnalysisList.stream().filter(t -> t.getAmplitude() > hardLevel.getAmplitudeThreshold()).count() <= 0;
 					}else{
-						tempo = computeTempoWithAmplitude2(chunkAnalysisList, lastChunkAnalysis);
+						tempo = computeTempoWithAmplitude2(musicXmlNote, chunkAnalysisList, lastChunkAnalysis);
 					}
 				}else{
 					if (musicXmlNote.getFrequency() == -1) {// 休止符
 						tempo = chunkAnalysisList.stream().filter(t -> t.getFrequency() > 100).count() <= 1;
 					}else{
-						tempo = computeTempoWithFrequency(chunkAnalysisList, lastChunkAnalysis);
+						tempo = computeTempoWithFrequency(musicXmlNote, chunkAnalysisList, lastChunkAnalysis);
 					}
 				}
 				
@@ -359,13 +380,7 @@ public class UserChannelContext {
 
 			} else {
 				
-				/*double skip = 0;
-				if (firstNoteIndexPerSectionList.contains(noteAnalysis.getMusicalNotesIndex())) {
-					skip = offsetMSOfSection;
-				}*/
-				//skip = noteAnalysis.getStandardDurationTime() * 0.2;
-				
-				LOGGER.info("Frequency:{}  splDb:{}  Power:{}  amplitude:{}", playFrequency, splDb, power, amplitude);
+				LOGGER.info("Frequency:{}  splDb:{}  Power:{}  amplitude:{}  rms:{}", playFrequency, splDb, power, amplitude, rms);
 				
 				chunkAnalysisList.add(new ChunkAnalysis(playTime - durationTime, playTime, playFrequency, splDb, power, amplitude));
 				
@@ -447,7 +462,7 @@ public class UserChannelContext {
 			int score = socre / noteAnalysisList.size();
 
 			// 平均得分
-			if (getMusicXmlBasicInfo(null).getSubjectId() == 23) {
+			if (getMusicXmlBasicInfo(null).getSubjectId() == 23 || getMusicXmlBasicInfo(null).getSubjectId() == 113) {
 				score = tempoScore;
 			}
 			result.put("score", score);
@@ -460,7 +475,7 @@ public class UserChannelContext {
 
 		double playDurationTime = 0;
 		
-		if (subjectId == 23) {
+		if (subjectId == 23 || subjectId == 113) {
 			if (noteAnalysis.getFrequency() == -1) {// 休止符
 				if (!noteAnalysis.isTempo()) {
 					noteAnalysis.setMusicalErrorType(NoteErrorType.CADENCE_WRONG);
@@ -469,7 +484,7 @@ public class UserChannelContext {
 				}
 			}else{
 				int beatTimes = (int) chunkAnalysisList.stream().filter(t -> t.getAmplitude() > hardLevel.getAmplitudeThreshold()).count();
-				
+				LOGGER.info("Amplitude:{}  beatTimes:{}",chunkAnalysisList.stream().map(t -> t.getAmplitude()).collect(Collectors.toList()),beatTimes);
 				if(beatTimes == 0){
 					noteAnalysis.setMusicalErrorType(NoteErrorType.NOT_PLAY);
 				}else if (!noteAnalysis.isTempo()) {
@@ -487,7 +502,7 @@ public class UserChannelContext {
 					noteAnalysis.setMusicalErrorType(NoteErrorType.CADENCE_WRONG);
 				} else if (playDurationTime * 100 / noteAnalysis.getDurationTime() < hardLevel.getIntegrityRange()) {
 					noteAnalysis.setMusicalErrorType(NoteErrorType.INTEGRITY_WRONG);
-				} else if (Math.abs(noteAnalysis.getFrequency() - noteAnalysis.getPlayFrequency()) > hardLevel.getFrequencyOffset()) {
+				} else if (Math.abs(noteAnalysis.getFrequency() - noteAnalysis.getPlayFrequency()) > hardLevel.getFrequencyThreshold()) {
 					noteAnalysis.setMusicalErrorType(NoteErrorType.INTONATION_WRONG);
 				} else {
 					noteAnalysis.setMusicalErrorType(NoteErrorType.RIGHT);
@@ -502,7 +517,7 @@ public class UserChannelContext {
 					noteAnalysis.setMusicalErrorType(NoteErrorType.INTEGRITY_WRONG);
 				} else if (!noteAnalysis.isTempo()) {
 					noteAnalysis.setMusicalErrorType(NoteErrorType.CADENCE_WRONG);
-				} else if (Math.abs(noteAnalysis.getFrequency() - noteAnalysis.getPlayFrequency()) > hardLevel.getFrequencyOffset()) {
+				} else if (Math.abs(noteAnalysis.getFrequency() - noteAnalysis.getPlayFrequency()) > hardLevel.getFrequencyThreshold()) {
 					noteAnalysis.setMusicalErrorType(NoteErrorType.INTONATION_WRONG);
 				} else {
 					noteAnalysis.setMusicalErrorType(NoteErrorType.RIGHT);
@@ -514,7 +529,7 @@ public class UserChannelContext {
 		int tempoScore = 0;
 		int integrityScore = 0;
 		int intonationScore = 100 - new BigDecimal(Math.abs(YINPitchDetector.hertzToAbsoluteCent(noteAnalysis.getPlayFrequency())
-				- YINPitchDetector.hertzToAbsoluteCent(noteAnalysis.getFrequency()))).multiply(new BigDecimal(10)).divide(new BigDecimal(17), BigDecimal.ROUND_UP)
+				- YINPitchDetector.hertzToAbsoluteCent(noteAnalysis.getFrequency()))).multiply(new BigDecimal(20)).divide(new BigDecimal(17), BigDecimal.ROUND_UP)
 				.setScale(0, BigDecimal.ROUND_UP).intValue();
 		if (intonationScore < 0) {
 			intonationScore = 0;
@@ -540,7 +555,7 @@ public class UserChannelContext {
 			noteAnalysis.setIntegrityScore(integrityScore);
 		}
 		noteAnalysis.setIntonationScore(intonationScore);
-		if (subjectId == 23) {
+		if (subjectId == 23 || subjectId == 113) {
 			noteAnalysis.setScore(tempoScore);
 		} else {
 			noteAnalysis.setScore(new BigDecimal(intonationScore + tempoScore + integrityScore).divide(new BigDecimal(3), 2).setScale(0, BigDecimal.ROUND_UP)
@@ -619,7 +634,7 @@ public class UserChannelContext {
 		}
 
 		if (chunkFrequencyList.size() < 3) {
-			frequency = (int)(chunkFrequencyList.stream().collect(Collectors.summingDouble(t -> t)) / chunkFrequencyList.size());
+			frequency = (int)chunkFrequencyList.get(chunkFrequencyList.size() - 1);
 		}
 		
 		if(frequency < 100){
@@ -629,7 +644,7 @@ public class UserChannelContext {
 		return frequency;
 	}
 	
-	private boolean computeTempoWithFrequency(List<ChunkAnalysis> chunkAnalysisList, ChunkAnalysis lastChunkAnalysis){
+	private boolean computeTempoWithFrequency(MusicXmlNote musicXmlNote, List<ChunkAnalysis> chunkAnalysisList, ChunkAnalysis lastChunkAnalysis){
 		
 		List<ChunkAnalysis> chunkList = new ArrayList<ChunkAnalysis>(chunkAnalysisList);
 		
@@ -638,7 +653,7 @@ public class UserChannelContext {
 			double lastFrequency = lastChunkAnalysis.getFrequency();
 			Iterator<ChunkAnalysis> iterable = chunkList.iterator();
 			while (iterable.hasNext()) {
-				if (Math.abs(lastFrequency - iterable.next().getFrequency()) > hardLevel.getFrequencyOffset()) {
+				if (Math.abs(lastFrequency - iterable.next().getFrequency()) > hardLevel.getFrequencyThreshold()) {
 					break;
 				}
 				iterable.remove();
@@ -686,7 +701,7 @@ public class UserChannelContext {
 		
 		if (tempo) {
 			// 判断进入时间点
-			if((chunkAnalysisList.size() - chunkList.size() + firstPeakIndex) * 100 /chunkAnalysisList.size() > hardLevel.getTempoOffsetOfPercent()){
+			if((chunkAnalysisList.size() - chunkList.size() + firstPeakIndex) * 100 /chunkAnalysisList.size() > hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator())){
 				tempo = false;
 			}
 		}
@@ -694,13 +709,13 @@ public class UserChannelContext {
 		return tempo;
 	}
 	
-	private boolean computeTempoWithAmplitude2(List<ChunkAnalysis> chunkAnalysisList, ChunkAnalysis lastChunkAnalysis) {
+	private boolean computeTempoWithAmplitude2(MusicXmlNote musicXmlNote, List<ChunkAnalysis> chunkAnalysisList, ChunkAnalysis lastChunkAnalysis) {
 
 		List<Integer> chunkAmplitudeList = chunkAnalysisList.stream().map(ChunkAnalysis::getAmplitude).collect(Collectors.toList());
 
-		if (chunkAmplitudeList.size() <= 3) {
+		/*if (chunkAmplitudeList.size() <= 3) {
 			return chunkAmplitudeList.stream().filter(t -> t.floatValue() > hardLevel.getAmplitudeThreshold()).count() > 0;
-		}
+		}*/
 		
 		chunkAmplitudeList.add(0, lastChunkAnalysis.getAmplitude());
 		
@@ -738,7 +753,7 @@ public class UserChannelContext {
 		
 		if (tempo) {
 			// 判断进入时间点
-			if((firstPeakIndex - 1) * 100 /chunkAmplitudeList.size() > hardLevel.getTempoOffsetOfPercent()){
+			if((firstPeakIndex - 1) * 100 /chunkAmplitudeList.size() > hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator())){
 				tempo = false;
 			}
 		}
@@ -746,7 +761,7 @@ public class UserChannelContext {
 		return tempo;
 	}
 	
-	private boolean computeTempoWithAmplitude(List<ChunkAnalysis> chunkAnalysisList, ChunkAnalysis lastChunkAnalysis) {
+	private boolean computeTempoWithAmplitude(MusicXmlNote musicXmlNote, List<ChunkAnalysis> chunkAnalysisList, ChunkAnalysis lastChunkAnalysis) {
 
 		boolean tempo = false;
 
@@ -796,7 +811,7 @@ public class UserChannelContext {
 
 		// 检测是否延迟进入
 		if (tempo == true) {
-			if (minPeakIndex * 100 / chunkAmplitudeList.size() > hardLevel.getTempoOffsetOfPercent() && chunkAmplitudeList.size() > 3) {
+			if (minPeakIndex * 100 / chunkAmplitudeList.size() > hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator()) && chunkAmplitudeList.size() > 3) {
 				tempo = false;
 			}
 		}
@@ -805,7 +820,7 @@ public class UserChannelContext {
 	}
 	
 	public static void main(String[] args) {
-		UserChannelContext context = new UserChannelContext();
+		UserChannelContext2 context = new UserChannelContext2();
 		
 		//int[] frequencys = {286,291,291,291,291,291,291};
 		int[] frequencys = {312,43,295,294,294,295};
@@ -817,8 +832,11 @@ public class UserChannelContext {
 			chunkAnalysisList.add(new ChunkAnalysis(f, 0, 0));
 		}
 		
+		MusicXmlNote musicXmlNote = new MusicXmlNote();
+		musicXmlNote.setDenominator(1);
+		
 		//System.out.println(context.computeFrequency(chunkAnalysisList, lastChunkAnalysis, 5));
-		System.out.println(context.computeTempoWithFrequency(chunkAnalysisList, lastChunkAnalysis));
+		System.out.println(context.computeTempoWithFrequency(musicXmlNote, chunkAnalysisList, lastChunkAnalysis));
 	}
 	
 }

+ 841 - 0
audio-analysis/src/main/java/com/yonge/netty/dto/UserChannelContext3.java

@@ -0,0 +1,841 @@
+package com.yonge.netty.dto;
+
+import java.math.BigDecimal;
+import java.util.ArrayList;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.stream.Collectors;
+
+import javax.sound.sampled.AudioFormat;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.yonge.audio.analysis.Signals;
+import com.yonge.audio.analysis.detector.YINPitchDetector;
+import com.yonge.audio.utils.ArrayUtil;
+import com.yonge.netty.dto.NoteAnalysis.NoteErrorType;
+import com.yonge.netty.entity.MusicXmlBasicInfo;
+import com.yonge.netty.entity.MusicXmlNote;
+import com.yonge.netty.entity.MusicXmlSection;
+import com.yonge.netty.server.processor.WaveformWriter;
+
+/**
+ * 用户通道上下文
+ */
+public class UserChannelContext3 {
+	
+	private final static Logger LOGGER = LoggerFactory.getLogger(UserChannelContext.class);
+	
+	private double standardFrequecy = 442;
+	
+	private int offsetMS;
+	
+	private double dynamicOffset;
+	
+	private String platform;
+	
+	private Long recordId;
+	
+	private Integer subjectId;
+	
+	private int beatDuration;
+	
+	private int beatByteLength;
+	
+	private boolean delayProcessed;
+	
+	// 曲目与musicxml对应关系
+	private ConcurrentHashMap<Integer, MusicXmlBasicInfo> songMusicXmlMap = new ConcurrentHashMap<Integer, MusicXmlBasicInfo>();
+
+	private WaveformWriter waveFileProcessor;
+
+	private NoteAnalysis processingNote = new NoteAnalysis(0, 0, -1);
+	
+	private AtomicInteger evaluatingSectionIndex = new AtomicInteger(0);
+	
+	private List<NoteAnalysis> doneNoteAnalysisList = new ArrayList<NoteAnalysis>();
+	
+	private List<SectionAnalysis> doneSectionAnalysisList = new ArrayList<SectionAnalysis>();
+	
+	private List<ChunkAnalysis> totalChunkAnalysisList = new ArrayList<ChunkAnalysis>();
+	
+	private byte[] channelBufferBytes = new byte[0];
+	
+	private double playTime;
+	
+	private double receivedTime;
+	
+	private HardLevelEnum hardLevel = HardLevelEnum.ADVANCED;
+	
+	private NotePlayResult queryNoteFrequency(MusicXmlNote xmlNote, double playFrequency) {
+
+		NotePlayResult result = new NotePlayResult();
+
+		boolean status = false;
+		double migrationRate = 0;
+
+		if (Math.round(xmlNote.getFrequency()) == Math.round(playFrequency)) {
+			status = true;
+			migrationRate = 0;
+		} else {
+			NoteFrequencyRange noteFrequencyRange = new NoteFrequencyRange(standardFrequecy, xmlNote.getFrequency());
+
+			if (noteFrequencyRange.getMinFrequency() > playFrequency || playFrequency > noteFrequencyRange.getMaxFrequency()) {
+				status = false;
+			} else {
+
+				status = true;
+
+				if (Math.round(playFrequency) < Math.round(xmlNote.getFrequency())) {
+					double min = Math.abs(xmlNote.getFrequency() - noteFrequencyRange.getMinFrequency()) / 2;
+					migrationRate = Math.abs(playFrequency - xmlNote.getFrequency()) / min;
+				} else {
+					double max = Math.abs(xmlNote.getFrequency() - noteFrequencyRange.getMaxFrequency()) / 2;
+					migrationRate = Math.abs(playFrequency - xmlNote.getFrequency()) / max;
+				}
+			}
+		}
+
+		result.setStatus(status);
+		result.setMigrationRate(migrationRate);
+
+		return result;
+	}
+	
+	public void init(String platform, String heardLevel, int subjectId, int beatDuration) {
+		this.platform = platform;
+		this.subjectId = subjectId;
+		this.beatDuration = beatDuration;
+		this.beatByteLength = WaveformWriter.SAMPLE_RATE * WaveformWriter.BITS_PER_SAMPLE / 8 * beatDuration / 1000;
+		hardLevel = HardLevelEnum.valueOf(heardLevel);
+	}
+	
+	public byte[] skipMetronome(byte[] datas) {
+		if (beatByteLength > 0) {
+			if (datas.length <= beatByteLength) {
+				beatByteLength -= datas.length;
+				return new byte[0];
+			}
+			if(beatByteLength % 2 != 0){
+				beatByteLength++;
+			}
+			datas = ArrayUtil.extractByte(datas, beatByteLength, datas.length - 1);
+			beatByteLength = 0;
+		}
+		return datas;
+	}
+	
+	public Long getRecordId() {
+		return recordId;
+	}
+
+	public void setRecordId(Long recordId) {
+		this.recordId = recordId;
+	}
+
+	public int getOffsetMS() {
+		return offsetMS;
+	}
+
+	public void setOffsetMS(int offsetMS) {
+		this.offsetMS = offsetMS;
+	}
+
+	public HardLevelEnum getHardLevel() {
+		return hardLevel;
+	}
+
+	public ConcurrentHashMap<Integer, MusicXmlBasicInfo> getSongMusicXmlMap() {
+		return songMusicXmlMap;
+	}
+
+	public WaveformWriter getWaveFileProcessor() {
+		return waveFileProcessor;
+	}
+
+	public void setWaveFileProcessor(WaveformWriter waveFileProcessor) {
+		this.waveFileProcessor = waveFileProcessor;
+	}
+
+	public NoteAnalysis getProcessingNote() {
+		return processingNote;
+	}
+
+	public void setProcessingNote(NoteAnalysis processingNote) {
+		this.processingNote = processingNote;
+	}
+	
+	public List<SectionAnalysis> getDoneSectionAnalysisList() {
+		return doneSectionAnalysisList;
+	}
+
+	public List<NoteAnalysis> getDoneNoteAnalysisList() {
+		return doneNoteAnalysisList;
+	}
+
+	public void resetUserInfo() {
+		beatByteLength = WaveformWriter.SAMPLE_RATE * WaveformWriter.BITS_PER_SAMPLE / 8 * beatDuration / 1000;
+		waveFileProcessor = null;
+		processingNote = new NoteAnalysis(0,0,-1);
+		evaluatingSectionIndex = new AtomicInteger(0);
+		channelBufferBytes = new byte[0];
+		doneNoteAnalysisList = new ArrayList<NoteAnalysis>();
+		doneSectionAnalysisList = new ArrayList<SectionAnalysis>();
+		totalChunkAnalysisList = new ArrayList<ChunkAnalysis>();
+		recordId = null;
+		playTime = 0;
+		receivedTime = 0;
+		delayProcessed = false;
+		dynamicOffset = 0;
+	}
+	
+	public MusicXmlBasicInfo getMusicXmlBasicInfo(Integer songId){
+		MusicXmlBasicInfo musicXmlBasicInfo = null;
+		if (songId == null) {
+			musicXmlBasicInfo = songMusicXmlMap.values().stream().findFirst().get();
+		} else {
+			musicXmlBasicInfo = songMusicXmlMap.get(songId);
+		}
+		return musicXmlBasicInfo;
+	}
+	
+	public MusicXmlSection getCurrentMusicSection(Integer songId, int sectionIndex){
+		MusicXmlBasicInfo musicXmlBasicInfo = getMusicXmlBasicInfo(songId);
+		return musicXmlBasicInfo.getMusicXmlSectionMap().get(sectionIndex);
+	}
+
+	public MusicXmlNote getCurrentMusicNote(Integer songId, Integer noteIndex) {
+		if (songMusicXmlMap.size() == 0) {
+			return null;
+		}
+		if(noteIndex == null){
+			noteIndex = processingNote.getMusicalNotesIndex();
+		}
+		final int index = noteIndex;
+		MusicXmlBasicInfo musicXmlBasicInfo = getMusicXmlBasicInfo(songId);
+
+		if (musicXmlBasicInfo != null && index <= getTotalMusicNoteIndex(null)) {
+			return musicXmlBasicInfo.getMusicXmlInfos().stream().filter(t -> t.getMusicalNotesIndex() == index).findFirst().get();
+		}
+
+		return null;
+	}
+
+	public int getTotalMusicNoteIndex(Integer songId) {
+		if (songMusicXmlMap.size() == 0) {
+			return -1;
+		}
+		MusicXmlBasicInfo musicXmlBasicInfo = getMusicXmlBasicInfo(songId);
+
+		if (musicXmlBasicInfo != null) {
+			return musicXmlBasicInfo.getMusicXmlInfos().stream().map(t -> t.getMusicalNotesIndex()).distinct().max(Integer::compareTo).get();
+		}
+
+		return -1;
+	}
+
+	public List<MusicXmlNote> getCurrentMusicSection(Integer songId, Integer sectionIndex) {
+		if (songMusicXmlMap.size() == 0) {
+			return null;
+		}
+		if(sectionIndex == null){
+			sectionIndex = processingNote.getSectionIndex();
+		}
+		final int index = sectionIndex;
+		MusicXmlBasicInfo musicXmlBasicInfo = getMusicXmlBasicInfo(songId);
+
+		if (musicXmlBasicInfo != null) {
+			return musicXmlBasicInfo.getMusicXmlInfos().stream().filter(t -> t.getMusicalNotesIndex() == index)
+					.sorted(Comparator.comparing(MusicXmlNote::getMusicalNotesIndex)).collect(Collectors.toList());
+		}
+
+		return null;
+	}
+
+	public int getTotalMusicSectionSize(Integer songId) {
+		if (songMusicXmlMap.size() == 0) {
+			return -1;
+		}
+		MusicXmlBasicInfo musicXmlBasicInfo = getMusicXmlBasicInfo(songId);
+
+		if (musicXmlBasicInfo != null) {
+			return (int) musicXmlBasicInfo.getMusicXmlInfos().stream().map(t -> t.getMeasureIndex()).distinct().count();
+		}
+
+		return -1;
+	}
+	
+	public int getMusicSectionIndex(Integer songId, int musicXmlNoteIndex) {
+		if (songMusicXmlMap.size() == 0) {
+			return -1;
+		}
+		
+		if(getTotalMusicNoteIndex(null) < musicXmlNoteIndex){
+			return -1;
+		}
+		
+		MusicXmlBasicInfo musicXmlBasicInfo = getMusicXmlBasicInfo(songId);
+
+		if (musicXmlBasicInfo != null) {
+			return musicXmlBasicInfo.getMusicXmlInfos().stream().filter(t -> t.getMusicalNotesIndex() == musicXmlNoteIndex).findFirst().get().getMeasureIndex();
+		}
+
+		return -1;
+	}
+	
+	public byte[] getChannelBufferBytes() {
+		return channelBufferBytes;
+	}
+
+	public void setChannelBufferBytes(byte[] channelBufferBytes) {
+		this.channelBufferBytes = channelBufferBytes;
+	}
+
+	public AtomicInteger getEvaluatingSectionIndex() {
+		return evaluatingSectionIndex;
+	}
+
+	public void handle(float[] samples, AudioFormat audioFormat){
+		
+		YINPitchDetector frequencyDetector = new YINPitchDetector(samples.length , audioFormat.getSampleRate());
+
+		int playFrequency = (int) frequencyDetector.getFrequency(samples);
+		int splDb = (int) Signals.soundPressureLevel(samples);
+		int power = (int) Signals.power(samples);
+		int amplitude = (int) Signals.norm(samples);
+		float rms = Signals.rms(samples);
+		
+		double durationTime = 1000 * (samples.length * 2) / audioFormat.getSampleRate() / (audioFormat.getSampleSizeInBits() / 8);
+		
+		receivedTime += durationTime;
+		
+		if(receivedTime < offsetMS){
+			return;
+		}
+		
+		playTime += durationTime;
+		
+		// 获取当前音符信息
+		MusicXmlNote musicXmlNote = getCurrentMusicNote(null,null);
+
+		if (musicXmlNote == null) {
+			return;
+		}
+		
+		//取出当前处理中的音符信息
+		NoteAnalysis noteAnalysis = getProcessingNote();
+		if(noteAnalysis == null || noteAnalysis.getDurationTime() == 0) {
+			noteAnalysis = new NoteAnalysis(musicXmlNote.getMusicalNotesIndex(), musicXmlNote.getMeasureIndex(), (int)musicXmlNote.getFrequency(), musicXmlNote.getDuration());
+		}
+		
+		evaluatingSectionIndex.set(noteAnalysis.getSectionIndex());
+		
+		if (noteAnalysis.getMusicalNotesIndex() >= 0 && noteAnalysis.getMusicalNotesIndex() <= getTotalMusicNoteIndex(null)) {
+			
+			LOGGER.info("delayPrcessed:{} dynamicOffset:{}  Frequency:{}  splDb:{}  Power:{}  amplitude:{}  rms:{}  time:{}", delayProcessed, dynamicOffset, playFrequency, splDb, power, amplitude, rms, playTime);
+			
+			ChunkAnalysis chunkAnalysis = new ChunkAnalysis(playTime - durationTime, playTime, playFrequency, splDb, power, amplitude);
+			
+			if(totalChunkAnalysisList.size() > 0){
+				if(totalChunkAnalysisList.get(totalChunkAnalysisList.size() - 1).getAmplitude() + 2 < chunkAnalysis.getAmplitude()){
+					chunkAnalysis.setPeak(true);//只针对打击乐
+				}
+			}
+			totalChunkAnalysisList.add(chunkAnalysis);
+			
+			if(delayProcessed == false && chunkAnalysis.getFrequency() > 100){
+				
+				delayProcessed = true;
+				//计算延迟偏移值
+				//playTime = musicXmlNote.getTimeStamp() + durationTime;
+				dynamicOffset = chunkAnalysis.getStartTime() - musicXmlNote.getTimeStamp();
+				/*if(100 * dynamicOffset / musicXmlNote.getDuration() > (100 - hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator()))){
+					dynamicOffset = 0;
+				}*/
+			}
+			
+			if (playTime >= (musicXmlNote.getDuration() + musicXmlNote.getTimeStamp() + dynamicOffset)) {
+
+				if (musicXmlNote.getDontEvaluating()) {
+					noteAnalysis.setIgnore(true);
+				}
+				
+				noteAnalysis.setPlayFrequency(computeFrequency(musicXmlNote));
+				
+				//判断节奏(音符持续时间内有不间断的音高,就节奏正确)
+				boolean tempo = true;
+				if (subjectId == 23 || subjectId == 113) {
+					tempo = computeTempoWithAmplitude2(musicXmlNote);
+				}else{
+					tempo = computeTempoWithFrequency(musicXmlNote);
+				}
+				
+				noteAnalysis.setTempo(tempo);
+				
+				evaluateForNote(musicXmlNote, noteAnalysis);
+
+				LOGGER.info("当前音符下标[{}] 预计频率:{} 实际频率:{} 节奏:{}", noteAnalysis.getMusicalNotesIndex(), musicXmlNote.getFrequency(), noteAnalysis.getPlayFrequency(),
+						noteAnalysis.isTempo());
+				
+				doneNoteAnalysisList.add(noteAnalysis);
+				
+				// 准备处理下一个音符
+				int nextNoteIndex = musicXmlNote.getMusicalNotesIndex() + 1;
+				float nextNoteFrequence = -1;
+				double standDuration = 0;
+				MusicXmlNote nextMusicXmlNote = getCurrentMusicNote(null, nextNoteIndex);
+				if(nextMusicXmlNote != null){
+					nextNoteFrequence = nextMusicXmlNote.getFrequency();
+					standDuration = nextMusicXmlNote.getDuration();
+				}
+				
+				NoteAnalysis nextNoteAnalysis = new NoteAnalysis(nextNoteIndex, getMusicSectionIndex(null, nextNoteIndex), (int)nextNoteFrequence, standDuration);
+
+				noteAnalysis = nextNoteAnalysis;
+
+			}
+
+			setProcessingNote(noteAnalysis);
+		}
+		
+	}
+	
+
+	public int evaluateForSection(int sectionIndex, int subjectId){
+
+		int score = -1;
+		if(doneSectionAnalysisList.size() >= getTotalMusicSectionSize(null)){
+			return score;
+		}
+		
+		//取出当前小节的所有音符
+		List<NoteAnalysis> noteAnalysisList = doneNoteAnalysisList.stream().filter(t -> t.getSectionIndex() == sectionIndex).collect(Collectors.toList());
+		
+		long ignoreSize = noteAnalysisList.stream().filter(t -> t.isIgnore()).count();
+
+		SectionAnalysis sectionAnalysis = new SectionAnalysis();
+		sectionAnalysis.setIndex(sectionIndex);
+		sectionAnalysis.setNoteNum(noteAnalysisList.size());
+		sectionAnalysis.setIsIngore(ignoreSize == noteAnalysisList.size());
+		
+		//判断是否需要评分
+		MusicXmlSection musicXmlSection = getCurrentMusicSection(null, sectionIndex);
+		if(noteAnalysisList.size() == musicXmlSection.getNoteNum()){
+			//取出需要评测的音符
+			List<NoteAnalysis>  noteList = noteAnalysisList.stream().filter(t -> t.isIgnore() == false).collect(Collectors.toList());
+			
+			if(noteList != null && noteList.size() > 0){
+				score = noteList.stream().mapToInt(t -> t.getScore()).sum() / noteList.size();
+			}
+			sectionAnalysis.setDurationTime(noteAnalysisList.stream().mapToDouble(t -> t.getDurationTime()).sum());
+			sectionAnalysis.setScore(score);
+
+			LOGGER.info("小节评分:{}",sectionAnalysis);
+			doneSectionAnalysisList.add(sectionAnalysis);
+		}
+		
+		return score;
+	}
+	
+	public Map<String, Integer> evaluateForMusic() {
+
+		Map<String, Integer> result = new HashMap<String, Integer>();
+		
+		result.put("playTime", (int) doneNoteAnalysisList.stream().mapToDouble(t -> t.getDurationTime()).sum());
+		
+		// 取出需要评测的音符
+		List<NoteAnalysis> noteAnalysisList = doneNoteAnalysisList.stream().filter(t -> t.isIgnore() == false).collect(Collectors.toList());
+
+		if (noteAnalysisList != null && noteAnalysisList.size() > 0) {
+			int intonationScore = 0;
+			int tempoScore = 0;
+			int integrityScore = 0;
+			int socre = 0;
+
+			for (NoteAnalysis note : noteAnalysisList) {
+				intonationScore += note.getIntonationScore();
+				tempoScore += note.getTempoScore();
+				integrityScore += note.getIntegrityScore();
+				socre += note.getScore();
+			}
+
+			tempoScore = tempoScore / noteAnalysisList.size();
+			intonationScore = intonationScore / noteAnalysisList.size();
+			integrityScore = integrityScore / noteAnalysisList.size();
+
+			result.put("cadence", tempoScore);
+			result.put("intonation", intonationScore);
+			result.put("integrity", integrityScore);
+	        result.put("recordId", recordId.intValue());
+
+			int score = socre / noteAnalysisList.size();
+
+			// 平均得分
+			if (getMusicXmlBasicInfo(null).getSubjectId() == 23 || getMusicXmlBasicInfo(null).getSubjectId() == 113) {
+				score = tempoScore;
+			}
+			result.put("score", score);
+		}
+		return result;
+	}
+	
+
+	public void evaluateForNote(MusicXmlNote musicXmlNote, NoteAnalysis noteAnalysis) {
+		
+		double floatingRange = musicXmlNote.getDuration() * hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator()) / 100;
+		double endTime = musicXmlNote.getDuration() + musicXmlNote.getTimeStamp() + dynamicOffset;
+		double startTime = musicXmlNote.getTimeStamp() + dynamicOffset;
+		
+		List<ChunkAnalysis> chunkAnalysisList = totalChunkAnalysisList.stream().filter(t -> Double.doubleToLongBits(t.getStartTime()) >= Double.doubleToLongBits(startTime) && Double.doubleToLongBits(t.getEndTime()) <= Double.doubleToLongBits(endTime)).collect(Collectors.toList());
+
+		//根据完整度取部分有效信号
+		int elementSize = chunkAnalysisList.size() * (100 - hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator())) / 100;
+		chunkAnalysisList = chunkAnalysisList.subList(0, elementSize);
+		
+		double playDurationTime = 0;
+		
+		if (subjectId == 23 || subjectId == 113) {
+			if (noteAnalysis.getFrequency() == -1) {// 休止符
+				if (!noteAnalysis.isTempo()) {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.CADENCE_WRONG);
+				} else {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.RIGHT);
+				}
+			}else{
+				int beatTimes = (int) chunkAnalysisList.stream().filter(t -> t.getAmplitude() > hardLevel.getAmplitudeThreshold()).count();
+				LOGGER.info("Amplitude:{}  beatTimes:{}  Denominator:{}",chunkAnalysisList.stream().map(t -> t.getAmplitude()).collect(Collectors.toList()), beatTimes, musicXmlNote.getDenominator());
+				if(beatTimes == 0){
+					noteAnalysis.setMusicalErrorType(NoteErrorType.NOT_PLAY);
+				}else if (!noteAnalysis.isTempo()) {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.CADENCE_WRONG);
+				} else {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.RIGHT);
+				}
+			}
+		} else {
+			
+			NotePlayResult notePlayResult = queryNoteFrequency(musicXmlNote, noteAnalysis.getPlayFrequency());
+			
+			if (noteAnalysis.getFrequency() == -1) {// 休止符
+
+				playDurationTime = chunkAnalysisList.stream().filter(t -> t.getFrequency() <= 100).mapToDouble(t -> t.getDurationTime()).sum();
+
+				if (!noteAnalysis.isTempo()) {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.CADENCE_WRONG);
+				} else if (playDurationTime * 100 / noteAnalysis.getDurationTime() < hardLevel.getIntegrityRange()) {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.INTEGRITY_WRONG);
+				} else if (notePlayResult.getStatus() == false) {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.INTONATION_WRONG);
+				} else {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.RIGHT);
+				}
+			} else {
+				playDurationTime = chunkAnalysisList.stream().filter(t -> t.getFrequency() > 100 && t.getFrequency() < 2000)
+						.mapToDouble(t -> t.getDurationTime()).sum();
+
+				if (playDurationTime * 100 / noteAnalysis.getDurationTime() < hardLevel.getNotPlayRange()) {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.NOT_PLAY);
+				} else if (playDurationTime * 100 / noteAnalysis.getDurationTime() < hardLevel.getIntegrityRange()) {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.INTEGRITY_WRONG);
+				} else if (!noteAnalysis.isTempo()) {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.CADENCE_WRONG);
+				} else if (notePlayResult.getStatus() == false) {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.INTONATION_WRONG);
+				} else {
+					noteAnalysis.setMusicalErrorType(NoteErrorType.RIGHT);
+				}
+			}
+		}
+
+		// 计算音分
+		int tempoScore = 0;
+		int integrityScore = 0;
+		int intonationScore = 100 - new BigDecimal(Math.abs(YINPitchDetector.hertzToAbsoluteCent(noteAnalysis.getPlayFrequency())
+				- YINPitchDetector.hertzToAbsoluteCent(noteAnalysis.getFrequency()))).multiply(new BigDecimal(20)).divide(new BigDecimal(17), BigDecimal.ROUND_UP)
+				.setScale(0, BigDecimal.ROUND_UP).intValue();
+		if (intonationScore < 0) {
+			intonationScore = 0;
+		} else if (intonationScore > 100) {
+			intonationScore = 100;
+		}
+
+		if (noteAnalysis.getMusicalErrorType() == NoteErrorType.NOT_PLAY) {
+			intonationScore = 0;
+		} else {
+
+			if (noteAnalysis.isTempo()) {
+				tempoScore = 100;
+				noteAnalysis.setTempoScore(tempoScore);
+			}
+
+			double durationPercent = playDurationTime / noteAnalysis.getDurationTime();
+			if (durationPercent >= 0.7) {
+				integrityScore = 100;
+			} else if (durationPercent < 0.7 && durationPercent >= 0.5) {
+				integrityScore = 50;
+			}
+			noteAnalysis.setIntegrityScore(integrityScore);
+		}
+		noteAnalysis.setIntonationScore(intonationScore);
+		if (subjectId == 23 || subjectId == 113) {
+			noteAnalysis.setScore(tempoScore);
+		} else {
+			noteAnalysis.setScore(new BigDecimal(intonationScore + tempoScore + integrityScore).divide(new BigDecimal(3), 2).setScale(0, BigDecimal.ROUND_UP)
+					.intValue());
+		}
+	}
+	
+	private int computeFrequency(MusicXmlNote musicXmlNote) {
+		
+		double floatingRange = musicXmlNote.getDuration() * hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator()) / 100;
+		double endTime = musicXmlNote.getDuration() + musicXmlNote.getTimeStamp() + dynamicOffset - floatingRange;
+		double startTime = musicXmlNote.getTimeStamp() + dynamicOffset;
+		
+		List<ChunkAnalysis> chunkAnalysisList = totalChunkAnalysisList.stream().filter(t -> Double.doubleToLongBits(t.getStartTime()) >= Double.doubleToLongBits(startTime) && Double.doubleToLongBits(t.getEndTime()) <= Double.doubleToLongBits(endTime)).collect(Collectors.toList());
+		
+		//根据完整度取部分有效信号
+		int elementSize = chunkAnalysisList.size() * (100 - hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator())) / 100;
+		chunkAnalysisList = chunkAnalysisList.subList(0, elementSize);
+		
+		if(chunkAnalysisList == null || chunkAnalysisList.size() == 0){
+			return -1;
+		}
+		
+		ChunkAnalysis firstChunkAnalysis = chunkAnalysisList.get(0);
+		
+		LOGGER.info("-------startTime:{}  endTime:{}------", firstChunkAnalysis.getStartTime(), chunkAnalysisList.get(chunkAnalysisList.size() - 1)
+				.getEndTime());
+		
+		List<ChunkAnalysis> chunkList = new ArrayList<ChunkAnalysis>(chunkAnalysisList);
+		
+		List<Integer> chunkFrequencyList = chunkList.stream().map(t -> t.getFrequency()).filter(t -> t.doubleValue() > 100 && t.doubleValue() < 2000)
+				.collect(Collectors.toList());
+		
+		if (chunkFrequencyList.size() == 0) {
+			return -1;
+		}
+
+		int frequency = (int) (chunkFrequencyList.stream().mapToInt(t -> t).sum() / chunkFrequencyList.size());
+
+		return frequency;
+	}
+	
+	/**
+	 * 时值范围内有且只有一个音,且不能间断,且在合理范围内需开始演奏
+	 * 与上一个音相同时,2个音之间需要间断
+	 * @param musicXmlNote
+	 * @return
+	 */
+	private boolean computeTempoWithFrequency(MusicXmlNote musicXmlNote){
+		
+		double floatingRange = musicXmlNote.getDuration() * hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator()) / 100;
+		double endTime = musicXmlNote.getDuration() + musicXmlNote.getTimeStamp() + dynamicOffset - floatingRange;
+		double startTime = musicXmlNote.getTimeStamp() + dynamicOffset;
+		
+		List<ChunkAnalysis> chunkAnalysisList = totalChunkAnalysisList.stream().filter(t -> Double.doubleToLongBits(t.getStartTime()) >= Double.doubleToLongBits(startTime) && Double.doubleToLongBits(t.getEndTime()) <= Double.doubleToLongBits(endTime)).collect(Collectors.toList());
+
+		//根据完整度取部分有效信号
+		int elementSize = chunkAnalysisList.size() * (100 - hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator())) / 100;
+		chunkAnalysisList = chunkAnalysisList.subList(0, elementSize);
+		
+		if(chunkAnalysisList == null || chunkAnalysisList.size() == 0){
+			return false;
+		}
+		
+		if (musicXmlNote.getFrequency() == -1) {// 休止符
+			return chunkAnalysisList.stream().filter(t -> t.getFrequency() > 100).count() <= 1;
+		}
+		
+		ChunkAnalysis firstChunkAnalysis = chunkAnalysisList.get(0);
+		
+		Optional<ChunkAnalysis> chunkAnalysisOptional = totalChunkAnalysisList.stream().filter(t -> Double.doubleToLongBits(t.getEndTime()) == Double.doubleToLongBits(firstChunkAnalysis.getStartTime())).findFirst();
+
+		ChunkAnalysis lastChunkAnalysis = null;
+		if (chunkAnalysisOptional.isPresent()) {
+			lastChunkAnalysis = chunkAnalysisOptional.get();
+		}
+		
+		if(lastChunkAnalysis == null){
+			lastChunkAnalysis = new ChunkAnalysis(0, 0, -1, 0, 0, 0);
+		}
+		
+		List<ChunkAnalysis> chunkList = new ArrayList<ChunkAnalysis>(chunkAnalysisList);
+		
+		if(chunkList.size() == 0){
+			return false;
+		}
+		
+		NoteFrequencyRange noteFrequencyRange = null;
+		ChunkAnalysis chunkAnalysis = null;
+		boolean tempo = false;
+		boolean isContinue = true;
+		int unplayedSize = 0;
+		int firstPeakIndex = -1;
+		for (int i = 0; i < chunkList.size(); i++) {
+			chunkAnalysis = chunkList.get(i);
+			if (chunkAnalysis != null) {
+				if (chunkAnalysis.getFrequency() > 100) {
+					
+					tempo = true;
+					if (firstPeakIndex == -1) {
+						firstPeakIndex = i;
+						noteFrequencyRange = new NoteFrequencyRange(standardFrequecy, chunkAnalysis.getFrequency());
+					} else if (noteFrequencyRange.getMinFrequency() > chunkAnalysis.getFrequency()
+							|| chunkAnalysis.getFrequency() > noteFrequencyRange.getMaxFrequency()) {
+						// 判断是否是同一个音
+						tempo = false;
+						LOGGER.info("节奏错误原因:不是同一个音[{}]:{}-{}", chunkAnalysis.getFrequency(), noteFrequencyRange.getMinFrequency(), noteFrequencyRange.getMaxFrequency());
+						break;
+					}
+					if (isContinue == false) {
+						if ((i + 1) / chunkAnalysisList.size() < hardLevel.getIntegrityRange()) {
+							if (unplayedSize > 0) {
+								tempo = false;
+								LOGGER.info("节奏错误原因:信号不连续");
+								break;
+							}
+						}
+					}
+				} else {
+					if (tempo == true) {
+						isContinue = false;
+						unplayedSize++;
+					}
+				}
+			}
+		}
+		
+		if (tempo) {
+			// 判断进入时间点
+			if(firstPeakIndex * 100 /chunkAnalysisList.size() > hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator())){
+				tempo = false;
+				LOGGER.info("节奏错误原因:进入时间点太晚");
+			}
+			if(tempo){
+				//判断是否与上一个音延续下来的
+				if(firstChunkAnalysis.getFrequency() > 100 && lastChunkAnalysis.getFrequency() > 100){
+					tempo = new NoteFrequencyRange(standardFrequecy, firstChunkAnalysis.getFrequency()).equals(new NoteFrequencyRange(standardFrequecy, lastChunkAnalysis.getFrequency())) == false;
+					LOGGER.info("节奏错误原因:上一个音延续下来导致的");
+				}
+			}
+		}
+		
+		return tempo;
+	}
+	
+	private boolean computeTempoWithAmplitude2(MusicXmlNote musicXmlNote) {
+
+		double floatingRange = musicXmlNote.getDuration() * hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator()) / 100;
+		
+		double endTime = musicXmlNote.getTimeStamp() + dynamicOffset + floatingRange;
+		double startTime = musicXmlNote.getTimeStamp() + dynamicOffset - floatingRange;
+		
+		List<ChunkAnalysis> chunkAnalysisList = totalChunkAnalysisList.stream().filter(t -> Double.doubleToLongBits(t.getStartTime()) >= Double.doubleToLongBits(startTime) && Double.doubleToLongBits(t.getEndTime()) <= Double.doubleToLongBits(endTime)).collect(Collectors.toList());
+		
+		double correctedStartTime = queryFirstNoteStartTime(chunkAnalysisList, musicXmlNote);
+		double correctedEndTime = correctedStartTime + musicXmlNote.getDuration();
+		
+		chunkAnalysisList = totalChunkAnalysisList.stream().filter(t -> Double.doubleToLongBits(t.getStartTime()) >= Double.doubleToLongBits(correctedStartTime) && Double.doubleToLongBits(t.getEndTime()) <= Double.doubleToLongBits(correctedEndTime)).collect(Collectors.toList());
+		
+		if(chunkAnalysisList == null || chunkAnalysisList.size() == 0){
+			return false;
+		}
+
+		if (musicXmlNote.getFrequency() == -1) {// 休止符
+			return chunkAnalysisList.stream().filter(t -> t.getAmplitude() > hardLevel.getAmplitudeThreshold()).count() <= 0;
+		}
+		
+		ChunkAnalysis firstChunkAnalysis = chunkAnalysisList.get(0);
+		
+		Optional<ChunkAnalysis> chunkAnalysisOptional = totalChunkAnalysisList.stream().filter(t -> Double.doubleToLongBits(t.getEndTime()) == Double.doubleToLongBits(firstChunkAnalysis.getStartTime())).findFirst();
+
+		ChunkAnalysis lastChunkAnalysis = null;
+		if (chunkAnalysisOptional.isPresent()) {
+			lastChunkAnalysis = chunkAnalysisOptional.get();
+		}
+		if(lastChunkAnalysis == null){
+			lastChunkAnalysis = new ChunkAnalysis(0, 0, -1, 0, 0, 0);
+		}
+		
+		List<Integer> chunkAmplitudeList = chunkAnalysisList.stream().map(ChunkAnalysis::getAmplitude).collect(Collectors.toList());
+
+		chunkAmplitudeList.add(0, lastChunkAnalysis.getAmplitude());
+		
+		// 检测是否有多个波峰
+		boolean tempo = false;
+		boolean isContinue = true;
+		int firstPeakIndex = -1;
+		int peakSize = 0;
+		for (int i = 1; i < chunkAmplitudeList.size(); i++) {
+			if (chunkAmplitudeList.get(i) > hardLevel.getAmplitudeThreshold() && chunkAmplitudeList.get(i) > chunkAmplitudeList.get(i - 1) + 2) {
+				tempo = true;
+				if(firstPeakIndex == -1){
+					firstPeakIndex = i;
+					peakSize++;
+				}
+				if (isContinue == false) {
+					tempo = false;
+					peakSize++;
+					break;
+				}
+			} else {
+				if (tempo == true) {
+					isContinue = false;
+				}
+			}
+		}
+		
+		if(peakSize == 0){
+			tempo = lastChunkAnalysis.isPeak();
+		}else if(peakSize == 1){
+			tempo = true;
+		}else{
+			tempo = false;
+		}
+		
+		if (tempo) {
+			// 判断进入时间点
+			if((firstPeakIndex - 1) * 100 /chunkAmplitudeList.size() > hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator()) * 2){
+				LOGGER.info("超过范围:{}", (firstPeakIndex - 1) * 100 /chunkAmplitudeList.size());
+				tempo = false;
+			}
+		}
+		
+		return tempo;
+	}
+	
+	private double queryFirstNoteStartTime(List<ChunkAnalysis> chunkAnalysisList, MusicXmlNote musicXmlNote) {
+		
+		if(chunkAnalysisList == null || chunkAnalysisList.size() == 0){
+			return musicXmlNote.getTimeStamp() + dynamicOffset;
+		}
+
+		NoteFrequencyRange standardNote = new NoteFrequencyRange(standardFrequecy, musicXmlNote.getFrequency());
+
+		NoteFrequencyRange noteFrequencyRange = null;
+
+		for (ChunkAnalysis ca : chunkAnalysisList) {
+			noteFrequencyRange = new NoteFrequencyRange(standardFrequecy, ca.getFrequency());
+			if (standardNote.equals(noteFrequencyRange)) {
+				return ca.getStartTime();
+			}
+		}
+
+		return chunkAnalysisList.get(chunkAnalysisList.size() - 1).getEndTime();
+	}
+	
+	public static void main(String[] args) {
+		
+		NoteFrequencyRange range = new NoteFrequencyRange(440, 466);
+		
+		System.out.println("Min:" + range.getMinFrequency() + "  Max:" + range.getMaxFrequency());
+		
+	}
+	
+}

+ 1 - 1
audio-analysis/src/main/java/com/yonge/nettty/dto/WebSocketResponse.java → audio-analysis/src/main/java/com/yonge/netty/dto/WebSocketResponse.java

@@ -1,4 +1,4 @@
-package com.yonge.nettty.dto;
+package com.yonge.netty.dto;
 
 import org.springframework.http.HttpStatus;
 

+ 1 - 1
audio-analysis/src/main/java/com/yonge/nettty/entity/MusicXmlBasicInfo.java → audio-analysis/src/main/java/com/yonge/netty/entity/MusicXmlBasicInfo.java

@@ -1,4 +1,4 @@
-package com.yonge.nettty.entity;
+package com.yonge.netty.entity;
 
 import java.util.ArrayList;
 import java.util.HashMap;

+ 23 - 1
audio-analysis/src/main/java/com/yonge/nettty/entity/MusicXmlNote.java → audio-analysis/src/main/java/com/yonge/netty/entity/MusicXmlNote.java

@@ -1,4 +1,4 @@
-package com.yonge.nettty.entity;
+package com.yonge.netty.entity;
 
 /**
  * 音符信息
@@ -17,6 +17,9 @@ public class MusicXmlNote {
 	// 下一个音的频率(不是乐谱中下一个音符的频率)
 	private float nextFrequency;
 
+	// 上一个音的频率(不是乐谱中上一个音符的频率)
+	private float prevFrequency;
+
 	// 当前音符所在的小节下标(从0开始)
 	private int measureIndex;
 
@@ -25,6 +28,9 @@ public class MusicXmlNote {
 
 	// 当前音符在整个曲谱中的下标(从0开始)
 	private int musicalNotesIndex;
+	
+	// 多少分音符
+	private int denominator;
 
 	public double getTimeStamp() {
 		return timeStamp;
@@ -50,6 +56,14 @@ public class MusicXmlNote {
 		this.frequency = frequency;
 	}
 
+	public float getPrevFrequency() {
+		return prevFrequency;
+	}
+
+	public void setPrevFrequency(float prevFrequency) {
+		this.prevFrequency = prevFrequency;
+	}
+
 	public float getNextFrequency() {
 		return nextFrequency;
 	}
@@ -81,4 +95,12 @@ public class MusicXmlNote {
 	public void setMusicalNotesIndex(int musicalNotesIndex) {
 		this.musicalNotesIndex = musicalNotesIndex;
 	}
+
+	public int getDenominator() {
+		return denominator;
+	}
+
+	public void setDenominator(int denominator) {
+		this.denominator = denominator;
+	}
 }

+ 1 - 1
audio-analysis/src/main/java/com/yonge/nettty/entity/MusicXmlSection.java → audio-analysis/src/main/java/com/yonge/netty/entity/MusicXmlSection.java

@@ -1,4 +1,4 @@
-package com.yonge.nettty.entity;
+package com.yonge.netty.entity;
 
 /**
  * 小节信息

+ 2 - 2
audio-analysis/src/main/java/com/yonge/netty/server/NettyServer.java

@@ -51,9 +51,9 @@ public class NettyServer {
 	 */
 	private String webSocketPath = "/audioAnalysis";
 
-	private EventLoopGroup bossGroup = new NioEventLoopGroup();
+	private EventLoopGroup bossGroup = new NioEventLoopGroup(1);
 
-	private EventLoopGroup workGroup = new NioEventLoopGroup();
+	private EventLoopGroup workGroup = new NioEventLoopGroup(5);
 
 	@Autowired
 	private NettyServerHandler nettyServerHandler;

+ 0 - 10
audio-analysis/src/main/java/com/yonge/netty/server/handler/message/BinaryMessageHandler.java

@@ -1,10 +0,0 @@
-package com.yonge.netty.server.handler.message;
-
-import io.netty.channel.Channel;
-
-public interface BinaryMessageHandler {
-	
-	String getAction();
-
-	boolean handler(String user, Channel channel, byte[] bytes);
-}

+ 7 - 121
audio-analysis/src/main/java/com/yonge/netty/server/handler/message/BinaryWebSocketFrameHandler.java

@@ -8,15 +8,9 @@ import io.netty.channel.ChannelHandlerContext;
 import io.netty.channel.SimpleChannelInboundHandler;
 import io.netty.handler.codec.http.websocketx.BinaryWebSocketFrame;
 
-import java.io.File;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-import java.util.HashMap;
 import java.util.Map;
 import java.util.stream.Collectors;
 
-import javax.sound.sampled.AudioFormat;
-
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.beans.BeansException;
@@ -26,15 +20,8 @@ import org.springframework.context.ApplicationContext;
 import org.springframework.context.ApplicationContextAware;
 import org.springframework.stereotype.Component;
 
-import com.yonge.audio.analysis.AudioFloatConverter;
-import com.yonge.audio.utils.ArrayUtil;
-import com.yonge.nettty.dto.UserChannelContext;
-import com.yonge.nettty.dto.WebSocketResponse;
-import com.yonge.nettty.entity.MusicXmlBasicInfo;
 import com.yonge.netty.server.handler.ChannelContextConstants;
 import com.yonge.netty.server.handler.NettyChannelManager;
-import com.yonge.netty.server.processor.WaveformWriter;
-import com.yonge.netty.server.service.UserChannelContextService;
 
 @Component
 @ChannelHandler.Sharable
@@ -45,45 +32,10 @@ public class BinaryWebSocketFrameHandler extends SimpleChannelInboundHandler<Bin
 	@Autowired
 	private NettyChannelManager nettyChannelManager;
 
-	@Autowired
-	private UserChannelContextService userChannelContextService;
-	
 	private ApplicationContext applicationContext;
 	
-	private Map<String, BinaryMessageHandler> handlerMap;
-
-	/**
-	 * @describe 采样率
-	 */
-	private float sampleRate = 44100;
-
-	/**
-	 * 每个采样大小(Bit)
-	 */
-	private int bitsPerSample = 16;
-
-	/**
-	 * 通道数
-	 */
-	private int channels = 1;
-
-	/**
-	 * @describe 采样大小
-	 */
-	private int bufferSize = 1024 * 4;
+	private Map<String, MessageHandler> handlerMap;
 
-	private boolean signed = true;
-
-	private boolean bigEndian = false;
-
-	private AudioFormat audioFormat = new AudioFormat(sampleRate, bitsPerSample, channels, signed, bigEndian);
-
-	private AudioFloatConverter converter = AudioFloatConverter.getConverter(audioFormat);
-
-	private String tmpFileDir = "/mdata/soundCompare/";
-	
-	private SimpleDateFormat sdf =new SimpleDateFormat("yyMMddHHmmSS");
-	
 	@Override
 	protected void channelRead0(ChannelHandlerContext ctx, BinaryWebSocketFrame frame) throws Exception {
 
@@ -101,78 +53,12 @@ public class BinaryWebSocketFrameHandler extends SimpleChannelInboundHandler<Bin
 			if(handlerMap == null){
 				LOGGER.error("消息处理器没有初始化");
 			}
-			BinaryMessageHandler handler = handlerMap.get(action);
+			MessageHandler handler = handlerMap.get(action);
 			
-			switch (action) {
-			case "PITCH_DETECTION":
-				handler.handler(user, channel, datas);
-				break;
-			case "SOUND_COMPARE":
-				
-				UserChannelContext channelContext = userChannelContextService.getChannelContext(channel);
-
-				if (channelContext == null) {
-					return;
-				}
-				
-				// 写录音文件
-				WaveformWriter waveFileProcessor = channelContext.getWaveFileProcessor();
-				if (waveFileProcessor == null) {
-					File file = new File(tmpFileDir + user + "_" + sdf.format(new Date()) + ".wav");
-					waveFileProcessor = new WaveformWriter(file.getAbsolutePath());
-					channelContext.setWaveFileProcessor(waveFileProcessor);
-				}
-				waveFileProcessor.process(datas);
-				
-				datas = channelContext.skipHeader(datas);
-
-				if (datas.length == 0) {
-					return;
-				}
-				
-				channelContext.setChannelBufferBytes(ArrayUtil.mergeByte(channelContext.getChannelBufferBytes(), datas));
-				
-				int totalLength = channelContext.getChannelBufferBytes().length;
-				
-				while (totalLength >= bufferSize) {
-					byte[] bufferData = ArrayUtil.extractByte(channelContext.getChannelBufferBytes(), 0, bufferSize - 1);
-
-					if (bufferSize != totalLength) {
-						channelContext.setChannelBufferBytes(ArrayUtil.extractByte(channelContext.getChannelBufferBytes(), bufferSize, totalLength - 1));
-					} else {
-						channelContext.setChannelBufferBytes(new byte[0]);
-					}
-
-					float[] sampleFloats = new float[bufferSize / 2];
-
-					converter.toFloatArray(bufferData, sampleFloats);
-
-					channelContext.handle(sampleFloats, audioFormat);
-
-					MusicXmlBasicInfo musicXmlBasicInfo = channelContext.getMusicXmlBasicInfo(null);
-					int sectionIndex = channelContext.getEvaluatingSectionIndex().get();
-
-					// 评分
-					int score = channelContext.evaluateForSection(sectionIndex, musicXmlBasicInfo.getSubjectId());
-					if (score >= 0) {
-
-						Map<String, Object> params = new HashMap<String, Object>();
-						params.put("score", score);
-						params.put("measureIndex", sectionIndex);
-
-						WebSocketResponse<Map<String, Object>> resp = new WebSocketResponse<Map<String, Object>>("measureScore", params);
-
-						nettyChannelManager.sendTextMessage(user, resp);
-					}
-
-					totalLength = channelContext.getChannelBufferBytes().length;
-				}
-				break;
-
-			default:
-				break;
+			if(handler != null){
+				handler.handleBinaryMessage(user, channel, datas);
 			}
-
+			
 		} finally {
 			buf.release();
 		}
@@ -180,8 +66,8 @@ public class BinaryWebSocketFrameHandler extends SimpleChannelInboundHandler<Bin
 
 	@Override
 	public void afterPropertiesSet() throws Exception {
-		handlerMap = applicationContext.getBeansOfType(BinaryMessageHandler.class).values().stream()
-				.collect(Collectors.toMap(BinaryMessageHandler::getAction, t -> t));
+		handlerMap = applicationContext.getBeansOfType(MessageHandler.class).values().stream()
+				.collect(Collectors.toMap(MessageHandler::getAction, t -> t));
 	}
 
 	@Override

+ 12 - 0
audio-analysis/src/main/java/com/yonge/netty/server/handler/message/MessageHandler.java

@@ -0,0 +1,12 @@
+package com.yonge.netty.server.handler.message;
+
+import io.netty.channel.Channel;
+
+public interface MessageHandler {
+	
+	String getAction();
+	
+	boolean handleTextMessage(String user, Channel channel, String text);
+
+	boolean handleBinaryMessage(String user, Channel channel, byte[] bytes);
+}

+ 0 - 5
audio-analysis/src/main/java/com/yonge/netty/server/handler/message/TextMessageHandler.java

@@ -1,5 +0,0 @@
-package com.yonge.netty.server.handler.message;
-
-public interface TextMessageHandler {
-
-}

+ 29 - 182
audio-analysis/src/main/java/com/yonge/netty/server/handler/message/TextWebSocketHandler.java

@@ -6,59 +6,35 @@ import io.netty.channel.ChannelHandlerContext;
 import io.netty.channel.SimpleChannelInboundHandler;
 import io.netty.handler.codec.http.websocketx.TextWebSocketFrame;
 
-import java.math.BigDecimal;
-import java.util.Comparator;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.List;
 import java.util.Map;
-import java.util.Objects;
-import java.util.Map.Entry;
 import java.util.stream.Collectors;
 
 import org.apache.commons.lang3.StringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+import org.springframework.beans.BeansException;
+import org.springframework.beans.factory.InitializingBean;
 import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.context.ApplicationContext;
+import org.springframework.context.ApplicationContextAware;
 import org.springframework.stereotype.Component;
 
-import com.alibaba.fastjson.JSON;
-import com.alibaba.fastjson.JSONObject;
 import com.alibaba.fastjson.JSONPath;
-import com.ym.mec.biz.dal.entity.SysMusicCompareRecord;
-import com.ym.mec.biz.dal.enums.DeviceTypeEnum;
-import com.ym.mec.biz.dal.enums.FeatureType;
-import com.ym.mec.biz.service.SysMusicCompareRecordService;
-import com.ym.mec.thirdparty.storage.StoragePluginContext;
-import com.ym.mec.thirdparty.storage.provider.KS3StoragePlugin;
-import com.ym.mec.util.upload.UploadUtil;
-import com.yonge.nettty.dto.SectionAnalysis;
-import com.yonge.nettty.dto.UserChannelContext;
-import com.yonge.nettty.dto.WebSocketResponse;
-import com.yonge.nettty.entity.MusicXmlBasicInfo;
-import com.yonge.nettty.entity.MusicXmlNote;
 import com.yonge.netty.server.handler.ChannelContextConstants;
 import com.yonge.netty.server.handler.NettyChannelManager;
-import com.yonge.netty.server.processor.WaveformWriter;
-import com.yonge.netty.server.service.UserChannelContextService;
 
 @Component
 @ChannelHandler.Sharable
-public class TextWebSocketHandler extends SimpleChannelInboundHandler<TextWebSocketFrame> {
+public class TextWebSocketHandler extends SimpleChannelInboundHandler<TextWebSocketFrame> implements ApplicationContextAware,InitializingBean {
 
 	private static final Logger LOGGER = LoggerFactory.getLogger(TextWebSocketHandler.class);
 
 	@Autowired
-	private SysMusicCompareRecordService sysMusicCompareRecordService;
-
-    @Autowired
-    private StoragePluginContext storagePluginContext;
-
-	@Autowired
-	private UserChannelContextService userChannelContextService;
-
-	@Autowired
 	private NettyChannelManager nettyChannelManager;
+	
+	private ApplicationContext applicationContext;
+	
+	private Map<String, MessageHandler> handlerMap;
 
 	@Override
 	protected void channelRead0(ChannelHandlerContext ctx, TextWebSocketFrame frame) throws Exception {
@@ -69,160 +45,31 @@ public class TextWebSocketHandler extends SimpleChannelInboundHandler<TextWebSoc
 		
 		LOGGER.info("接收到客户端的消息内容:{}", jsonMsg);
 		
-		String type = (String) JSONPath.extract(jsonMsg, "$.header.type");
+		String action = (String) JSONPath.extract(jsonMsg, "$.header.type");
 		
-		if(StringUtils.isNoneBlank(type)){
-			channel.attr(ChannelContextConstants.CHANNEL_ATTR_KEY_ACTION).set(type);
-		}
-		
-		if (StringUtils.equals(type, "PITCH_DETECTION")) {// 校音
+		if(StringUtils.isNoneBlank(action)){
+			channel.attr(ChannelContextConstants.CHANNEL_ATTR_KEY_ACTION).set(action);
 			
-			return;
-		} else if (StringUtils.equals(type, "SOUND_COMPARE")) {// 评测
-			String command = (String) JSONPath.extract(jsonMsg, "$.header.commond");
-
-			JSONObject dataObj = (JSONObject) JSONPath.extract(jsonMsg, "$.body");
-
-			UserChannelContext channelContext = userChannelContextService.getChannelContext(channel);
+			if(handlerMap == null){
+				LOGGER.error("消息处理器没有初始化");
+			}
+			MessageHandler handler = handlerMap.get(action);
 			
-			MusicXmlBasicInfo musicXmlBasicInfo = null;
-
-			switch (command) {
-			case "musicXml": // 同步music xml信息
-
-				musicXmlBasicInfo = JSONObject.toJavaObject(dataObj, MusicXmlBasicInfo.class);
-
-				userChannelContextService.remove(channel);
-
-				if (channelContext == null) {
-					channelContext = new UserChannelContext();
-				}
-
-				channelContext.getSongMusicXmlMap().put(musicXmlBasicInfo.getExamSongId(), musicXmlBasicInfo);
-				channelContext.init(musicXmlBasicInfo.getHeardLevel(), musicXmlBasicInfo.getSubjectId(), musicXmlBasicInfo.getBeatLength());
-
-				userChannelContextService.register(channel, channelContext);
-
-				break;
-			case "recordStart": // 开始评测
-
-				// 清空缓存信息
-				channelContext.resetUserInfo();
-				
-				musicXmlBasicInfo = channelContext.getMusicXmlBasicInfo(null);
-
-				if (musicXmlBasicInfo != null) {
-					Date date = new Date();
-					SysMusicCompareRecord sysMusicCompareRecord = new SysMusicCompareRecord(FeatureType.CLOUD_STUDY_EVALUATION);
-					sysMusicCompareRecord.setCreateTime(date);
-					sysMusicCompareRecord.setUserId(Integer.parseInt(nettyChannelManager.getUser(channel)));
-					sysMusicCompareRecord.setSysMusicScoreId(musicXmlBasicInfo.getExamSongId());
-					sysMusicCompareRecord.setBehaviorId(musicXmlBasicInfo.getBehaviorId());
-					//sysMusicCompareRecord.setClientId();
-					sysMusicCompareRecord.setDeviceType(DeviceTypeEnum.valueOf(musicXmlBasicInfo.getPlatform()));
-					sysMusicCompareRecord.setSpeed(musicXmlBasicInfo.getSpeed());
-					
-					MusicXmlNote musicXmlNote = musicXmlBasicInfo.getMusicXmlInfos().stream().max(Comparator.comparing(MusicXmlNote::getTimeStamp)).get();
-					sysMusicCompareRecord.setSourceTime((float) ((musicXmlNote.getTimeStamp()+musicXmlNote.getDuration())/1000));
-					sysMusicCompareRecordService.insert(sysMusicCompareRecord);
-					channelContext.setRecordId(sysMusicCompareRecord.getId());
-				}
-				break;
-			case "recordEnd": // 结束评测
-			case "recordCancel": // 取消评测
-				if (channelContext == null) {
-					return;
-				}
-
-				WaveformWriter waveFileProcessor = channelContext.getWaveFileProcessor();
-				if (waveFileProcessor != null) {
-					// 写文件头
-					waveFileProcessor.processingFinished();
-				}
-
-				if (StringUtils.equals(command, "recordEnd")) {
-					// 生成评测报告
-					Map<String, Object> params = new HashMap<String, Object>();
-
-					Map<String, Integer> scoreMap = channelContext.evaluateForMusic();
-					for (Entry<String, Integer> entry : scoreMap.entrySet()) {
-						params.put(entry.getKey(), entry.getValue());
-					}
-					
-					//保存评测结果
-					Long recordId = channelContext.getRecordId();
-					SysMusicCompareRecord sysMusicCompareRecord = sysMusicCompareRecordService.get(recordId);
-					if(sysMusicCompareRecord != null){
-						musicXmlBasicInfo = channelContext.getMusicXmlBasicInfo(null);
-						
-						if (scoreMap != null && scoreMap.size() > 1) {
-							sysMusicCompareRecord.setScore(new BigDecimal(scoreMap.get("score")));
-							sysMusicCompareRecord.setIntonation(new BigDecimal(scoreMap.get("intonation")));
-							sysMusicCompareRecord.setIntegrity(new BigDecimal(scoreMap.get("integrity")));
-							sysMusicCompareRecord.setCadence(new BigDecimal(scoreMap.get("cadence")));
-							sysMusicCompareRecord.setPlayTime(scoreMap.get("playTime") / 1000);
-						}
-						sysMusicCompareRecord.setFeature(FeatureType.CLOUD_STUDY_EVALUATION);
-
-			            String url = null;
-			            try {
-			                String folder = UploadUtil.getFileFloder();
-			                url = storagePluginContext.asyncUploadFile(KS3StoragePlugin.PLUGIN_NAME,"soundCompare/" + folder, waveFileProcessor.getFile(), true);
-			            } catch (Exception e) {
-			                LOGGER.error("录音文件上传失败:{}", e);
-			            }
-						sysMusicCompareRecord.setRecordFilePath(url);
-						//sysMusicCompareRecord.setVideoFilePath(videoFilePath);
-
-						Map<String, Object> scoreData = new HashMap<>();
-						List<SectionAnalysis> sectionAnalysisList = channelContext.getDoneSectionAnalysisList();
-						sectionAnalysisList = sectionAnalysisList.stream().filter(t -> t.isIngore() == false).collect(Collectors.toList());
-						scoreData.put("userMeasureScore", sectionAnalysisList.stream().collect(Collectors.toMap(SectionAnalysis :: getIndex, t -> t)));
-
-						Map<String, Object> musicalNotesPlayStats = new HashMap<>();
-						musicalNotesPlayStats.put("detailId", musicXmlBasicInfo.getDetailId());
-						musicalNotesPlayStats.put("examSongId", musicXmlBasicInfo.getExamSongId());
-						musicalNotesPlayStats.put("xmlUrl", musicXmlBasicInfo.getXmlUrl());
-						
-						musicalNotesPlayStats.put("notesData", channelContext.getDoneNoteAnalysisList().stream().filter(t -> t.isIgnore() == false).collect(Collectors.toList()));
-						scoreData.put("musicalNotesPlayStats", musicalNotesPlayStats);
-						sysMusicCompareRecord.setScoreData(JSON.toJSONString(scoreData));
-						
-						sysMusicCompareRecordService.saveMusicCompareData(sysMusicCompareRecord);
-					}
-					
-					WebSocketResponse<Map<String, Object>> resp = new WebSocketResponse<Map<String, Object>>("overall", params);
-
-					nettyChannelManager.sendTextMessage(nettyChannelManager.getUser(channel), resp);
-				}
-
-				// 清空缓存信息
-				channelContext.resetUserInfo();
-
-				break;
-			case "proxyMessage": // ???
-
-				break;
-			case "videoUpload": // 上传音频
-				SysMusicCompareRecord musicCompareRecord = null;
-				if (dataObj.containsKey("recordId")) {
-					musicCompareRecord = sysMusicCompareRecordService.get(dataObj.getLong("recordId"));
-				}
-				if (Objects.nonNull(musicCompareRecord) && dataObj.containsKey("filePath")) {
-					musicCompareRecord.setVideoFilePath(dataObj.getString("filePath"));
-					sysMusicCompareRecordService.update(musicCompareRecord);
-				} else {
-					musicCompareRecord.setVideoFilePath(musicCompareRecord.getRecordFilePath());
-					sysMusicCompareRecordService.update(musicCompareRecord);
-				}
-				
-				break;
-
-			default:
-				// 非法请求
-				break;
+			if(handler != null){
+				handler.handleTextMessage(nettyChannelManager.getUser(channel), channel, jsonMsg);
 			}
 		}
 	}
 
+	@Override
+	public void afterPropertiesSet() throws Exception {
+		handlerMap = applicationContext.getBeansOfType(MessageHandler.class).values().stream()
+				.collect(Collectors.toMap(MessageHandler::getAction, t -> t));
+	}
+
+	@Override
+	public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
+		this.applicationContext = applicationContext;
+	}
+
 }

+ 349 - 0
audio-analysis/src/main/java/com/yonge/netty/server/service/AudioCompareHandler.java

@@ -0,0 +1,349 @@
+package com.yonge.netty.server.service;
+
+import io.netty.channel.Channel;
+
+import java.io.File;
+import java.math.BigDecimal;
+import java.text.SimpleDateFormat;
+import java.util.Comparator;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Objects;
+import java.util.stream.Collectors;
+
+import javax.sound.sampled.AudioFormat;
+
+import org.apache.commons.lang3.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Component;
+
+import com.alibaba.fastjson.JSON;
+import com.alibaba.fastjson.JSONObject;
+import com.alibaba.fastjson.JSONPath;
+import com.ym.mec.biz.dal.entity.SysMusicCompareRecord;
+import com.ym.mec.biz.dal.enums.DeviceTypeEnum;
+import com.ym.mec.biz.dal.enums.FeatureType;
+import com.ym.mec.biz.dal.enums.HeardLevelEnum;
+import com.ym.mec.biz.service.SysMusicCompareRecordService;
+import com.ym.mec.thirdparty.storage.StoragePluginContext;
+import com.ym.mec.thirdparty.storage.provider.KS3StoragePlugin;
+import com.ym.mec.util.upload.UploadUtil;
+import com.yonge.audio.analysis.AudioFloatConverter;
+import com.yonge.audio.utils.ArrayUtil;
+import com.yonge.netty.dto.SectionAnalysis;
+import com.yonge.netty.dto.UserChannelContext;
+import com.yonge.netty.dto.WebSocketResponse;
+import com.yonge.netty.entity.MusicXmlBasicInfo;
+import com.yonge.netty.entity.MusicXmlNote;
+import com.yonge.netty.server.handler.NettyChannelManager;
+import com.yonge.netty.server.handler.message.MessageHandler;
+import com.yonge.netty.server.processor.WaveformWriter;
+
+@Component
+public class AudioCompareHandler implements MessageHandler {
+	
+	private static final Logger LOGGER = LoggerFactory.getLogger(AudioCompareHandler.class);
+
+	@Autowired
+	private UserChannelContextService userChannelContextService;
+
+	@Autowired
+	private NettyChannelManager nettyChannelManager;
+	
+	@Autowired
+	private SysMusicCompareRecordService sysMusicCompareRecordService;
+
+    @Autowired
+    private StoragePluginContext storagePluginContext;
+
+	/**
+	 * @describe 采样率
+	 */
+	private float sampleRate = 44100;
+
+	/**
+	 * 每个采样大小(Bit)
+	 */
+	private int bitsPerSample = 16;
+
+	/**
+	 * 通道数
+	 */
+	private int channels = 1;
+
+	/**
+	 * @describe 采样大小
+	 */
+	private int bufferSize = 1024 * 4;
+
+	private boolean signed = true;
+
+	private boolean bigEndian = false;
+
+	private AudioFormat audioFormat = new AudioFormat(sampleRate, bitsPerSample, channels, signed, bigEndian);
+
+	private AudioFloatConverter converter = AudioFloatConverter.getConverter(audioFormat);
+
+	private String tmpFileDir = "/mdata/soundCompare/";
+
+	private SimpleDateFormat sdf = new SimpleDateFormat("yyMMddHHmmSS");
+	
+	@Override
+	public String getAction() {
+		return "SOUND_COMPARE";
+	}
+
+	@Override
+	public boolean handleTextMessage(String user, Channel channel, String jsonMsg) {
+		
+		String command = (String) JSONPath.extract(jsonMsg, "$.header.commond");
+
+		JSONObject dataObj = (JSONObject) JSONPath.extract(jsonMsg, "$.body");
+		
+		UserChannelContext channelContext = userChannelContextService.getChannelContext(channel);
+		
+		MusicXmlBasicInfo musicXmlBasicInfo = null;
+
+		switch (command) {
+		case "musicXml": // 同步music xml信息
+			
+			musicXmlBasicInfo = JSONObject.toJavaObject(dataObj, MusicXmlBasicInfo.class);
+			
+			userChannelContextService.remove(channel);
+
+			channelContext = new UserChannelContext();
+			
+			channelContext.setHandlerSwitch(false);
+
+			channelContext.getSongMusicXmlMap().put(musicXmlBasicInfo.getExamSongId(), musicXmlBasicInfo);
+			channelContext.init(musicXmlBasicInfo.getPlatform(), musicXmlBasicInfo.getHeardLevel(), musicXmlBasicInfo.getSubjectId(), musicXmlBasicInfo.getBeatLength());
+			channelContext.setUser(user);
+			
+			userChannelContextService.register(channel, channelContext);
+
+			break;
+		case "recordStart": // 开始评测
+
+			// 清空缓存信息
+			channelContext.resetUserInfo();
+			
+			channelContext.setHandlerSwitch(false);
+			
+			musicXmlBasicInfo = channelContext.getMusicXmlBasicInfo(null);
+
+			if (musicXmlBasicInfo != null) {
+				Date date = new Date();
+				SysMusicCompareRecord sysMusicCompareRecord = new SysMusicCompareRecord(FeatureType.CLOUD_STUDY_EVALUATION);
+				sysMusicCompareRecord.setCreateTime(date);
+				sysMusicCompareRecord.setUserId(Integer.parseInt(user));
+				sysMusicCompareRecord.setSysMusicScoreId(musicXmlBasicInfo.getExamSongId());
+				sysMusicCompareRecord.setBehaviorId(musicXmlBasicInfo.getBehaviorId());
+				//sysMusicCompareRecord.setClientId();
+				sysMusicCompareRecord.setDeviceType(DeviceTypeEnum.valueOf(musicXmlBasicInfo.getPlatform()));
+				sysMusicCompareRecord.setSpeed(musicXmlBasicInfo.getSpeed());
+				
+				MusicXmlNote musicXmlNote = musicXmlBasicInfo.getMusicXmlInfos().stream().max(Comparator.comparing(MusicXmlNote::getTimeStamp)).get();
+				sysMusicCompareRecord.setSourceTime((float) ((musicXmlNote.getTimeStamp()+musicXmlNote.getDuration())/1000));
+				sysMusicCompareRecordService.insert(sysMusicCompareRecord);
+				channelContext.setRecordId(sysMusicCompareRecord.getId());
+			}
+			break;
+		case "recordEnd": // 结束评测
+		case "recordCancel": // 取消评测
+			if (channelContext == null) {
+				return false;
+			}
+			
+			channelContext.setHandlerSwitch(false);
+
+			WaveformWriter waveFileProcessor = channelContext.getWaveFileProcessor();
+			if (waveFileProcessor != null) {
+				// 写文件头
+				waveFileProcessor.processingFinished();
+			}
+
+			if (StringUtils.equals(command, "recordEnd")) {
+				// 生成评测报告
+				Map<String, Object> params = new HashMap<String, Object>();
+
+				Map<String, Integer> scoreMap = channelContext.evaluateForMusic();
+				for (Entry<String, Integer> entry : scoreMap.entrySet()) {
+					params.put(entry.getKey(), entry.getValue());
+				}
+				
+				//保存评测结果
+				Long recordId = channelContext.getRecordId();
+				SysMusicCompareRecord sysMusicCompareRecord = sysMusicCompareRecordService.get(recordId);
+				if(sysMusicCompareRecord != null){
+					musicXmlBasicInfo = channelContext.getMusicXmlBasicInfo(null);
+					
+					if (scoreMap != null && scoreMap.size() > 1) {
+						sysMusicCompareRecord.setScore(new BigDecimal(scoreMap.get("score")));
+						sysMusicCompareRecord.setIntonation(new BigDecimal(scoreMap.get("intonation")));
+						sysMusicCompareRecord.setIntegrity(new BigDecimal(scoreMap.get("integrity")));
+						sysMusicCompareRecord.setCadence(new BigDecimal(scoreMap.get("cadence")));
+						sysMusicCompareRecord.setPlayTime(scoreMap.get("playTime") / 1000);
+					}
+					sysMusicCompareRecord.setFeature(FeatureType.CLOUD_STUDY_EVALUATION);
+
+		            String url = null;
+		            try {
+		                String folder = UploadUtil.getFileFloder();
+		                url = storagePluginContext.asyncUploadFile(KS3StoragePlugin.PLUGIN_NAME,"soundCompare/" + folder, waveFileProcessor.getFile(), true);
+		            } catch (Exception e) {
+		                LOGGER.error("录音文件上传失败:{}", e);
+		            }
+					sysMusicCompareRecord.setRecordFilePath(url);
+					//sysMusicCompareRecord.setVideoFilePath(videoFilePath);
+
+					Map<String, Object> scoreData = new HashMap<>();
+					List<SectionAnalysis> sectionAnalysisList = channelContext.getDoneSectionAnalysisList();
+					sectionAnalysisList = sectionAnalysisList.stream().filter(t -> t.isIngore() == false).collect(Collectors.toList());
+					scoreData.put("userMeasureScore", sectionAnalysisList.stream().collect(Collectors.toMap(SectionAnalysis :: getIndex, t -> t)));
+
+					Map<String, Object> musicalNotesPlayStats = new HashMap<>();
+					musicalNotesPlayStats.put("detailId", musicXmlBasicInfo.getDetailId());
+					musicalNotesPlayStats.put("examSongId", musicXmlBasicInfo.getExamSongId());
+					musicalNotesPlayStats.put("xmlUrl", musicXmlBasicInfo.getXmlUrl());
+					
+					musicalNotesPlayStats.put("notesData", channelContext.getDoneNoteAnalysisList().stream().filter(t -> t.isIgnore() == false).collect(Collectors.toList()));
+					scoreData.put("musicalNotesPlayStats", musicalNotesPlayStats);
+					sysMusicCompareRecord.setScoreData(JSON.toJSONString(scoreData));
+					
+					sysMusicCompareRecord.setHeardLevel(HeardLevelEnum.valueOf(channelContext.getHardLevel().name()));
+					
+					sysMusicCompareRecordService.saveMusicCompareData(sysMusicCompareRecord);
+				}
+				
+				WebSocketResponse<Map<String, Object>> resp = new WebSocketResponse<Map<String, Object>>("overall", params);
+
+				nettyChannelManager.sendTextMessage(user, resp);
+			}
+
+			// 清空缓存信息
+			channelContext.resetUserInfo();
+
+			break;
+		case "audioPlayStart": // ???
+			
+			Integer offsetTime = dataObj.getInteger("offsetTime");
+			if(offsetTime != null){
+				channelContext.setOffsetMS(offsetTime);
+				channelContext.setHandlerSwitch(true);
+			}
+
+			break;
+		case "videoUpload": // 上传音频
+			SysMusicCompareRecord musicCompareRecord = null;
+			if (dataObj.containsKey("recordId")) {
+				musicCompareRecord = sysMusicCompareRecordService.get(dataObj.getLong("recordId"));
+			}
+			if (Objects.nonNull(musicCompareRecord) && dataObj.containsKey("filePath")) {
+				musicCompareRecord.setVideoFilePath(dataObj.getString("filePath"));
+				sysMusicCompareRecordService.update(musicCompareRecord);
+			} else {
+				musicCompareRecord.setVideoFilePath(musicCompareRecord.getRecordFilePath());
+				sysMusicCompareRecordService.update(musicCompareRecord);
+			}
+			
+			break;
+
+		default:
+			// 非法请求
+			break;
+		}
+		return true;
+	}
+
+	@Override
+	public boolean handleBinaryMessage(String user, Channel channel, byte[] datas) {
+		
+		UserChannelContext channelContext = userChannelContextService.getChannelContext(channel);
+
+		if (channelContext == null) {
+			return false;
+		}
+
+		// 写录音文件
+		WaveformWriter waveFileProcessor = channelContext.getWaveFileProcessor();
+		if (waveFileProcessor == null) {
+			File file = new File(tmpFileDir + user + "_" + sdf.format(new Date()) + ".wav");
+			waveFileProcessor = new WaveformWriter(file.getAbsolutePath());
+			channelContext.setWaveFileProcessor(waveFileProcessor);
+		}
+		waveFileProcessor.process(datas);
+		
+		datas = channelContext.skipMetronome(datas);
+
+		if (datas.length == 0) {
+			return false;
+		}
+
+		channelContext.setChannelBufferBytes(ArrayUtil.mergeByte(channelContext.getChannelBufferBytes(), datas));
+
+		int totalLength = channelContext.getChannelBufferBytes().length;
+		
+		if (channelContext.getHandlerSwitch() == false) {
+			return false;
+		}
+		
+		if (channelContext.getOffsetMS() > 0) {
+			int beatByteLength = WaveformWriter.SAMPLE_RATE * WaveformWriter.BITS_PER_SAMPLE / 8 * channelContext.getOffsetMS() / 1000;
+			
+			if(totalLength > beatByteLength){
+				channelContext.setChannelBufferBytes(ArrayUtil.extractByte(channelContext.getChannelBufferBytes(), beatByteLength, totalLength - 1));
+				channelContext.setOffsetMS(0);
+			}else{
+				return false;
+			}
+		}
+		
+		totalLength = channelContext.getChannelBufferBytes().length;
+		if(totalLength % 2 != 0){
+			totalLength--;
+		}
+		
+
+		while (totalLength >= bufferSize) {
+			byte[] bufferData = ArrayUtil.extractByte(channelContext.getChannelBufferBytes(), 0, bufferSize - 1);
+
+			if (bufferSize != totalLength) {
+				channelContext.setChannelBufferBytes(ArrayUtil.extractByte(channelContext.getChannelBufferBytes(), bufferSize, totalLength - 1));
+			} else {
+				channelContext.setChannelBufferBytes(new byte[0]);
+			}
+
+			float[] sampleFloats = new float[bufferSize / 2];
+
+			converter.toFloatArray(bufferData, sampleFloats);
+
+			channelContext.handle(sampleFloats, audioFormat);
+
+			MusicXmlBasicInfo musicXmlBasicInfo = channelContext.getMusicXmlBasicInfo(null);
+			int sectionIndex = channelContext.getEvaluatingSectionIndex().get();
+
+			// 评分
+			int score = channelContext.evaluateForSection(sectionIndex, musicXmlBasicInfo.getSubjectId());
+			if (score >= 0) {
+
+				Map<String, Object> params = new HashMap<String, Object>();
+				params.put("score", score);
+				params.put("measureIndex", sectionIndex);
+
+				WebSocketResponse<Map<String, Object>> resp = new WebSocketResponse<Map<String, Object>>("measureScore", params);
+
+				nettyChannelManager.sendTextMessage(user, resp);
+			}
+
+			totalLength = channelContext.getChannelBufferBytes().length;
+		}
+
+		return true;
+	}
+
+}

+ 0 - 135
audio-analysis/src/main/java/com/yonge/netty/server/service/CompareHandler.java

@@ -1,135 +0,0 @@
-package com.yonge.netty.server.service;
-
-import io.netty.channel.Channel;
-
-import java.io.File;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.Map;
-
-import javax.sound.sampled.AudioFormat;
-
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.stereotype.Component;
-
-import com.yonge.audio.analysis.AudioFloatConverter;
-import com.yonge.audio.utils.ArrayUtil;
-import com.yonge.nettty.dto.UserChannelContext;
-import com.yonge.nettty.dto.WebSocketResponse;
-import com.yonge.nettty.entity.MusicXmlBasicInfo;
-import com.yonge.netty.server.handler.NettyChannelManager;
-import com.yonge.netty.server.handler.message.BinaryMessageHandler;
-import com.yonge.netty.server.processor.WaveformWriter;
-
-@Component
-public class CompareHandler implements BinaryMessageHandler {
-
-	@Autowired
-	private UserChannelContextService userChannelContextService;
-
-	@Autowired
-	private NettyChannelManager nettyChannelManager;
-
-	/**
-	 * @describe 采样率
-	 */
-	private float sampleRate = 44100;
-
-	/**
-	 * 每个采样大小(Bit)
-	 */
-	private int bitsPerSample = 16;
-
-	/**
-	 * 通道数
-	 */
-	private int channels = 1;
-
-	/**
-	 * @describe 采样大小
-	 */
-	private int bufferSize = 1024 * 4;
-
-	private boolean signed = true;
-
-	private boolean bigEndian = false;
-
-	private AudioFormat audioFormat = new AudioFormat(sampleRate, bitsPerSample, channels, signed, bigEndian);
-
-	private AudioFloatConverter converter = AudioFloatConverter.getConverter(audioFormat);
-
-	private String tmpFileDir = "e:/soundRecords/";
-
-	private SimpleDateFormat sdf = new SimpleDateFormat("yyMMddHHmmSS");
-
-	@Override
-	public String getAction() {
-		return "SOUND_COMPARE";
-	}
-
-	@Override
-	public boolean handler(String user, Channel channel, byte[] datas) {
-		UserChannelContext channelContext = userChannelContextService.getChannelContext(channel);
-
-		if (channelContext == null) {
-			return false;
-		}
-
-		// 写录音文件
-		WaveformWriter waveFileProcessor = channelContext.getWaveFileProcessor();
-		if (waveFileProcessor == null) {
-			File file = new File(tmpFileDir + user + "_" + sdf.format(new Date()) + ".wav");
-			waveFileProcessor = new WaveformWriter(file.getAbsolutePath());
-			channelContext.setWaveFileProcessor(waveFileProcessor);
-		}
-		waveFileProcessor.process(datas);
-
-		datas = channelContext.skipHeader(datas);
-
-		if (datas.length == 0) {
-			return false;
-		}
-
-		channelContext.setChannelBufferBytes(ArrayUtil.mergeByte(channelContext.getChannelBufferBytes(), datas));
-
-		int totalLength = channelContext.getChannelBufferBytes().length;
-
-		while (totalLength >= bufferSize) {
-			byte[] bufferData = ArrayUtil.extractByte(channelContext.getChannelBufferBytes(), 0, bufferSize - 1);
-
-			if (bufferSize != totalLength) {
-				channelContext.setChannelBufferBytes(ArrayUtil.extractByte(channelContext.getChannelBufferBytes(), bufferSize, totalLength - 1));
-			} else {
-				channelContext.setChannelBufferBytes(new byte[0]);
-			}
-
-			float[] sampleFloats = new float[bufferSize / 2];
-
-			converter.toFloatArray(bufferData, sampleFloats);
-
-			channelContext.handle(sampleFloats, audioFormat);
-
-			MusicXmlBasicInfo musicXmlBasicInfo = channelContext.getMusicXmlBasicInfo(null);
-			int sectionIndex = channelContext.getEvaluatingSectionIndex().get();
-
-			// 评分
-			int score = channelContext.evaluateForSection(sectionIndex, musicXmlBasicInfo.getSubjectId());
-			if (score >= 0) {
-
-				Map<String, Object> params = new HashMap<String, Object>();
-				params.put("score", score);
-				params.put("measureIndex", sectionIndex);
-
-				WebSocketResponse<Map<String, Object>> resp = new WebSocketResponse<Map<String, Object>>("measureScore", params);
-
-				nettyChannelManager.sendTextMessage(user, resp);
-			}
-
-			totalLength = channelContext.getChannelBufferBytes().length;
-		}
-
-		return true;
-	}
-
-}

+ 10 - 4
audio-analysis/src/main/java/com/yonge/netty/server/service/PitchDetectionHandler.java

@@ -14,12 +14,12 @@ import org.springframework.stereotype.Component;
 
 import com.yonge.audio.analysis.AudioFloatConverter;
 import com.yonge.audio.analysis.detector.YINPitchDetector;
-import com.yonge.nettty.dto.WebSocketResponse;
+import com.yonge.netty.dto.WebSocketResponse;
 import com.yonge.netty.server.handler.NettyChannelManager;
-import com.yonge.netty.server.handler.message.BinaryMessageHandler;
+import com.yonge.netty.server.handler.message.MessageHandler;
 
 @Component
-public class PitchDetectionHandler implements BinaryMessageHandler {
+public class PitchDetectionHandler implements MessageHandler {
 	
 	private final static Logger LOGGER = LoggerFactory.getLogger(PitchDetectionHandler.class);
 
@@ -55,7 +55,13 @@ public class PitchDetectionHandler implements BinaryMessageHandler {
 	}
 
 	@Override
-	public boolean handler(String userId, Channel channel, byte[] bytes) {
+	public boolean handleTextMessage(String user, Channel channel, String text) {
+
+		return true;
+	}
+
+	@Override
+	public boolean handleBinaryMessage(String userId, Channel channel, byte[] bytes) {
 
 		float[] samples = new float[bytes.length / 2];
 

+ 1 - 1
audio-analysis/src/main/java/com/yonge/netty/server/service/UserChannelContextService.java

@@ -6,7 +6,7 @@ import java.util.concurrent.ConcurrentHashMap;
 
 import org.springframework.stereotype.Component;
 
-import com.yonge.nettty.dto.UserChannelContext;
+import com.yonge.netty.dto.UserChannelContext;
 
 @Component
 public class UserChannelContextService {

+ 1 - 1
audio-analysis/src/main/resources/bootstrap-test.properties

@@ -1,7 +1,7 @@
 #\u6307\u5b9a\u5f00\u53d1\u73af\u5883
 #spring.profiles.active=dev
 #\u670d\u52a1\u5668\u5730\u5740
-spring.cloud.nacos.config.server-addr=47.114.1.200:8848
+spring.cloud.nacos.config.server-addr=47.114.176.40:8848
 #\u9ed8\u8ba4\u4e3aPublic\u547d\u540d\u7a7a\u95f4,\u53ef\u4ee5\u7701\u7565\u4e0d\u5199
 spring.cloud.nacos.config.namespace=46f06363-b9d6-46f0-9cd7-7b33dcf26bb0
 #\u6307\u5b9a\u914d\u7f6e\u7fa4\u7ec4 --\u5982\u679c\u662fPublic\u547d\u540d\u7a7a\u95f4 \u5219\u53ef\u4ee5\u7701\u7565\u7fa4\u7ec4\u914d\u7f6e

+ 54 - 0
mec-biz/src/main/java/com/ym/mec/biz/dal/entity/SysMusicScore.java

@@ -2,12 +2,36 @@ package com.ym.mec.biz.dal.entity;
 
 import com.ym.mec.biz.dal.enums.ClientTypeEnum;
 import com.ym.mec.biz.dal.enums.ExamSongTypeEnum;
+import com.ym.mec.common.enums.AccessSource;
+import com.ym.mec.common.enums.BaseEnum;
+
 import org.apache.commons.lang3.builder.ToStringBuilder;
 
 /**
  * 对应数据库表(sys_music_score):
  */
 public class SysMusicScore {
+	
+	public enum PlayMode  implements BaseEnum<String, PlayMode> {
+		
+		MP3("MP3播放"),XML("XML播放");
+		
+		private String desc;
+		
+		private PlayMode(String desc) {
+			this.desc = desc;
+		}
+
+		@Override
+		public String getCode() {
+			return this.name();
+		}
+
+		public String getDesc() {
+			return desc;
+		}
+		
+	}
 
 	/**  */
 	private Integer id;
@@ -79,6 +103,12 @@ public class SysMusicScore {
 	private Integer showFlag = 0;
 
 	private Boolean isOpenMetronome;
+	
+	private String museScoreUrl;
+	
+	private String museScoreMemo;
+	
+	private PlayMode playMode;
 
 	private String organName;
 
@@ -114,6 +144,14 @@ public class SysMusicScore {
 		this.isOpenMetronome = isOpenMetronome;
 	}
 
+	public PlayMode getPlayMode() {
+		return playMode;
+	}
+
+	public void setPlayMode(PlayMode playMode) {
+		this.playMode = playMode;
+	}
+
 	public String getAccompanimentUrl() {
 		return accompanimentUrl;
 	}
@@ -298,6 +336,22 @@ public class SysMusicScore {
 		return this.createTime;
 	}
 
+	public String getMuseScoreUrl() {
+		return museScoreUrl;
+	}
+
+	public void setMuseScoreUrl(String museScoreUrl) {
+		this.museScoreUrl = museScoreUrl;
+	}
+
+	public String getMuseScoreMemo() {
+		return museScoreMemo;
+	}
+
+	public void setMuseScoreMemo(String museScoreMemo) {
+		this.museScoreMemo = museScoreMemo;
+	}
+
 	@Override
 	public String toString() {
 		return ToStringBuilder.reflectionToString(this);

+ 31 - 0
mec-biz/src/main/java/com/ym/mec/biz/dal/entity/SysMusicScoreAccompaniment.java

@@ -1,5 +1,6 @@
 package com.ym.mec.biz.dal.entity;
 
+import com.ym.mec.biz.dal.entity.SysMusicScore.PlayMode;
 import com.ym.mec.biz.dal.enums.ClientTypeEnum;
 
 import org.apache.commons.lang3.builder.ToStringBuilder;
@@ -76,6 +77,12 @@ public class SysMusicScoreAccompaniment {
 	private String renderFrom;
 	
 	private boolean enableEvaluation;
+	
+	private String museScoreUrl;
+	
+	private String museScoreMemo;
+	
+	private PlayMode playMode;
 
 	public String getMetronomeUrl() {
 		return metronomeUrl;
@@ -277,6 +284,30 @@ public class SysMusicScoreAccompaniment {
 		this.metronomeMp3Url = metronomeMp3Url;
 	}
 
+	public String getMuseScoreUrl() {
+		return museScoreUrl;
+	}
+
+	public void setMuseScoreUrl(String museScoreUrl) {
+		this.museScoreUrl = museScoreUrl;
+	}
+
+	public String getMuseScoreMemo() {
+		return museScoreMemo;
+	}
+
+	public void setMuseScoreMemo(String museScoreMemo) {
+		this.museScoreMemo = museScoreMemo;
+	}
+
+	public PlayMode getPlayMode() {
+		return playMode;
+	}
+
+	public void setPlayMode(PlayMode playMode) {
+		this.playMode = playMode;
+	}
+
 	@Override
 	public String toString() {
 		return ToStringBuilder.reflectionToString(this);

+ 7 - 2
mec-biz/src/main/resources/config/mybatis/SysMusicScoreAccompanimentMapper.xml

@@ -32,6 +32,9 @@
 		<result column="render_from_" property="renderFrom" />
 		<result column="enable_evaluation_" property="enableEvaluation" />
 		<result column="client_type_" property="clientType" typeHandler="com.ym.mec.common.dal.CustomEnumTypeHandler"/>
+		<result column="play_mode_" property="playMode" typeHandler="com.ym.mec.common.dal.CustomEnumTypeHandler" />
+		<result column="muse_score_url_" property="museScoreUrl" />
+		<result column="muse_score_memo_" property="museScoreMemo" />
 	</resultMap>
 
 	<delete id="deleteBySongId">
@@ -128,7 +131,8 @@
 	
 	<!-- 分页查询 -->
 	<select id="queryPage" resultMap="SysMusicScoreAccompaniment" parameterType="map">
-		SELECT sesa.*,ses.name_,ses.type_,ses.url_,s.name_ subject_name_,sesc.name_ categories_name_,sesc.id_ categories_id_,sesc.parent_id_ parent_categories_id_,ses.client_type_,ses.rank_ids_,ses.render_from_,ses.enable_evaluation_,ses.metronome_url_
+		SELECT sesa.*,ses.name_,ses.type_,ses.url_,s.name_ subject_name_,sesc.name_ categories_name_,sesc.id_ categories_id_,sesc.parent_id_ parent_categories_id_,ses.client_type_,ses.rank_ids_,ses.render_from_,
+		ses.enable_evaluation_,ses.metronome_url_,ses.muse_score_url_,ses.muse_score_memo_,ses.play_mode_
 		FROM sys_music_score ses
 		LEFT JOIN sys_music_score_accompaniment sesa ON ses.id_ = sesa.exam_song_id_
 		LEFT JOIN sys_music_score_categories sesc ON sesc.id_ = ses.music_score_categories_id_
@@ -163,7 +167,8 @@
 		</where>
 	</select>
 	<select id="queryAccPage" resultMap="SysMusicScoreAccompaniment">
-		SELECT sesa.*,ses.name_,ses.type_,ses.url_,s.name_ subject_name_,sesc.name_ categories_name_,sesc.id_ categories_id_,sesc.parent_id_ parent_categories_id_,ses.client_type_,ses.enable_evaluation_,ses.metronome_url_
+		SELECT sesa.*,ses.name_,ses.type_,ses.url_,s.name_ subject_name_,sesc.name_ categories_name_,sesc.id_ categories_id_,sesc.parent_id_ parent_categories_id_,ses.client_type_,
+		ses.enable_evaluation_,ses.metronome_url_,ses.muse_score_url_,ses.muse_score_memo_,ses.play_mode_
 		FROM sys_music_score ses
 		LEFT JOIN sys_music_score_accompaniment sesa ON ses.id_ = sesa.exam_song_id_
 		LEFT JOIN sys_music_score_categories sesc ON sesc.id_ = ses.music_score_categories_id_

+ 14 - 2
mec-biz/src/main/resources/config/mybatis/SysMusicScoreMapper.xml

@@ -29,6 +29,9 @@
 		<result column="render_from_" property="renderFrom" />
 		<result column="enable_evaluation_" property="enableEvaluation" />
 		<result column="subject_id_" property="subjectId" />
+		<result column="play_mode_" property="playMode" typeHandler="com.ym.mec.common.dal.CustomEnumTypeHandler" />
+		<result column="muse_score_url_" property="museScoreUrl" />
+		<result column="muse_score_memo_" property="museScoreMemo" />
 		<result column="client_type_" property="clientType" typeHandler="com.ym.mec.common.dal.CustomEnumTypeHandler"/>
 		<result column="update_time_" property="updateTime" />
 		<result column="create_time_" property="createTime" />
@@ -47,10 +50,10 @@
 	<!-- 向数据库增加一条记录 -->
 	<insert id="insert" parameterType="com.ym.mec.biz.dal.entity.SysMusicScore" useGeneratedKeys="true" keyColumn="id" keyProperty="id">
 		INSERT INTO sys_music_score (music_score_categories_id_,name_,type_,speed_,url_,metronome_url_,create_user_id_,order_,
-		                             update_time_,create_time_,client_type_,rank_ids_,render_from_,enable_evaluation_,show_flag_)
+		                             update_time_,create_time_,client_type_,rank_ids_,render_from_,enable_evaluation_,show_flag_,play_mode_,muse_score_url_,muse_score_memo_)
 		VALUES(#{musicScoreCategoriesId},#{name},#{type,typeHandler=com.ym.mec.common.dal.CustomEnumTypeHandler},
 		       #{speed},#{url},#{metronomeUrl},#{createUserId},#{order},NOW(),NOW(),#{clientType,typeHandler=com.ym.mec.common.dal.CustomEnumTypeHandler},
-		       #{rankIds},#{renderFrom},#{enableEvaluation},#{showFlag})
+		       #{rankIds},#{renderFrom},#{enableEvaluation},#{showFlag},#{playMode},#{museScoreUrl},#{museScoreMemo})
 	</insert>
 
 	<!-- 根据主键查询一条记录 -->
@@ -95,6 +98,15 @@
 		<if test="speed != null">
 			speed_ = #{speed},
 		</if>
+		<if test="playMode != null">
+			play_mode_ = #{playMode,typeHandler=com.ym.mec.common.dal.CustomEnumTypeHandler},
+		</if>
+		<if test="museScoreUrl != null">
+			muse_score_url_ = #{museScoreUrl},
+		</if>
+		<if test="museScoreMemo != null">
+			muse_score_memo_ = #{museScoreMemo},
+		</if>
 			update_time_ = NOW()
 	</set>
 		WHERE id_ = #{id}

+ 3 - 0
mec-common/common-core/src/main/java/com/ym/mec/common/dal/CustomEnumTypeHandler.java

@@ -50,6 +50,9 @@ public class CustomEnumTypeHandler extends BaseTypeHandler<BaseEnum> {
 			return null;
 		}
 		Object code = null;
+		if(type == null){
+			return null;
+		}
 		for (BaseEnum enumBaseInterface : type.getEnumConstants()) {
 
 			code = enumBaseInterface.getCode();

+ 0 - 1
mec-im/pom.xml

@@ -100,7 +100,6 @@
     <dependency>
       <groupId>io.netty</groupId>
       <artifactId>netty-all</artifactId>
-      <version>4.1.24.Final</version>
     </dependency>
   </dependencies>
 

+ 12 - 0
pom.xml

@@ -30,6 +30,12 @@
 				<version>${spring-boot.version}</version>
 				<type>pom</type>
 				<scope>import</scope>
+				<exclusions>
+					<exclusion>
+						<groupId>org.springframework.boot</groupId>
+        				<artifactId>spring-boot-starter-log4j2</artifactId>
+					</exclusion>
+				</exclusions>
 			</dependency>
 
 			<dependency>
@@ -38,6 +44,12 @@
 				<version>${spring-cloud.version}</version>
 				<type>pom</type>
 				<scope>import</scope>
+				<exclusions>
+					<exclusion>
+						<groupId>org.springframework.boot</groupId>
+        				<artifactId>spring-boot-starter-log4j2</artifactId>
+					</exclusion>
+				</exclusions>
 			</dependency>
 
 			<dependency>