Browse Source

测评代码优化

yonge 6 months ago
parent
commit
46edbae8a0

+ 15 - 3
audio-analysis/src/main/java/com/yonge/audio/analysis/Signals.java

@@ -15,17 +15,29 @@ public class Signals {
 		return mean;
 	}
 
-	public static double energy(float[] signal) {
-		double totalEnergy = 0;
+	public static float energy(float[] signal) {
+		float totalEnergy = 0;
 		for (int i = 0; i < signal.length; i++)
 			totalEnergy += Math.pow(signal[i], 2);
 		return totalEnergy;
 	}
 
-	public static double power(float[] signal) {
+	public static float power(float[] signal) {
 		return energy(signal) / signal.length;
 	}
 
+    public static float[] energyEnvelope(float[] audioData, int frameSize) {
+        float[] energyEnvelope = new float[audioData.length / frameSize];
+        for (int i = 0; i < energyEnvelope.length; i++) {
+            float sum = 0;
+            for (int j = 0; j < frameSize; j++) {
+                sum += audioData[i * frameSize + j] * audioData[i * frameSize + j];
+            }
+            energyEnvelope[i] = sum / frameSize;
+        }
+        return energyEnvelope;
+    }
+	
 	public static float norm(float[] signal) {
 		return (float) Math.sqrt(energy(signal));
 	}

+ 95 - 0
audio-analysis/src/main/java/com/yonge/audio/analysis/detector/PercussionRhythmDetector.java

@@ -0,0 +1,95 @@
+package com.yonge.audio.analysis.detector;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class PercussionRhythmDetector {
+	
+	private final static Logger LOGGER = LoggerFactory.getLogger(PercussionRhythmDetector.class);
+	
+	private float[] energyEnvelope;
+	
+	private float sampleRate;
+	
+	private int frameSize;
+	
+	private final float FACTOR = 4.5f;
+
+	public PercussionRhythmDetector(float[] energyEnvelope, float sampleRate, int frameSize) {
+		this.energyEnvelope = energyEnvelope;
+		this.sampleRate = sampleRate;
+		this.frameSize = frameSize;
+	}
+	
+	private float[] calculateEnergyEnvelopeDifference(float[] energyEnvelope) {
+        float[] diff = new float[energyEnvelope.length - 1];
+        for (int i = 0; i < diff.length; i++) {
+            diff[i] = energyEnvelope[i + 1] - energyEnvelope[i];
+        }
+        return diff;
+    }
+    
+	private float calculateMean(float[] energyEnvelope) {
+        float sum = 0;
+        for (float value : energyEnvelope) {
+            sum += value;
+        }
+        return sum / energyEnvelope.length;
+    }
+
+	private float calculateStandardDeviation(float[] energyEnvelope, float mean) {
+        float sum = 0;
+        for (float value : energyEnvelope) {
+            sum += (value - mean) * (value - mean);
+        }
+        return (float) Math.sqrt(sum / energyEnvelope.length);
+    }
+    
+	private List<Integer> detectPercussionRhythm(float[] diff, float mean, float stdDev, float factor) {
+        List<Integer> beatPositions = new ArrayList<>();
+        float dynamicThreshold = mean + factor * stdDev;
+        LOGGER.debug("size:{} dynamicThreshold:{} DIFF:{}", diff.length, dynamicThreshold, Arrays.toString(diff));
+        
+        for (int i = 1; i < diff.length; i++) {
+            if (diff[i] > dynamicThreshold && diff[i - 1] <= dynamicThreshold) {
+                beatPositions.add(i - 1);
+            }
+        }
+        return beatPositions;
+    }
+
+	private Map<Integer, Double> convertPositionsToTime(List<Integer> positions, int frameSize, float sampleRate) {
+		Map<Integer, Double> map = new HashMap<Integer, Double>();
+
+		int intervalTime = 150;
+		double lastTime = Math.negateExact(intervalTime);
+		
+        for (int position : positions) {
+            double time = (position * frameSize * 1000) / sampleRate;
+            if(time - lastTime >= intervalTime) {
+            	map.put(position, time);
+            	LOGGER.debug("Detected beats at times (millisecond): {}", time);
+            	
+            	lastTime = time;
+            }
+        }
+        return map;
+    }
+    
+    public Map<Integer, Double> detect(){
+    	
+        float[] diff = calculateEnergyEnvelopeDifference(energyEnvelope);
+        float mean = calculateMean(diff);
+        float stdDev = calculateStandardDeviation(diff, mean);
+        
+        List<Integer> beatPositions = detectPercussionRhythm(diff, mean, stdDev, FACTOR);
+        
+        return convertPositionsToTime(beatPositions, frameSize, sampleRate);
+    }
+}

+ 1 - 1
audio-analysis/src/main/java/com/yonge/audio/analysis/detector/YINPitchDetector.java

@@ -233,6 +233,6 @@ public class YINPitchDetector {
     }
     
     public static void main(String[] args) {
-    	System.out.println(YINPitchDetector.getDeviationCent(353, 342));
+    	System.out.println(YINPitchDetector.getDeviationCent(-1, 342));
 	}
 }

+ 10 - 17
audio-analysis/src/main/java/com/yonge/netty/dto/ChunkAnalysis.java

@@ -1,7 +1,6 @@
 package com.yonge.netty.dto;
 
 import org.apache.commons.lang3.builder.ToStringBuilder;
-import org.apache.logging.log4j.util.StringBuilders;
 
 public class ChunkAnalysis {
 
@@ -11,32 +10,26 @@ public class ChunkAnalysis {
 
 	private double durationTime;
 
-	private int frequency;
+	private float frequency;
 
 	private double splDb;
 
-	private double power;
+	private float[] energyEnvelop;
 	
 	private int amplitude;
 	
 	private boolean isPeak;
 
-	public ChunkAnalysis(double startTime, double endTime, int frequency, double splDb, double power, int amplitude) {
+	public ChunkAnalysis(double startTime, double endTime, float frequency, double splDb, float[] energyEnvelop, int amplitude) {
 		this.startTime = startTime;
 		this.endTime = endTime;
 		this.frequency = frequency;
 		this.splDb = splDb;
-		this.power = power;
+		this.energyEnvelop = energyEnvelop;
 		this.amplitude = amplitude;
 		this.durationTime = endTime - startTime;
 	}
 
-	public ChunkAnalysis(int frequency, double splDb, double power) {
-		this.frequency = frequency;
-		this.splDb = splDb;
-		this.power = power;
-	}
-
 	public double getStartTime() {
 		return startTime;
 	}
@@ -61,11 +54,11 @@ public class ChunkAnalysis {
 		this.durationTime = durationTime;
 	}
 
-	public int getFrequency() {
+	public float getFrequency() {
 		return frequency;
 	}
 
-	public void setFrequency(int frequency) {
+	public void setFrequency(float frequency) {
 		this.frequency = frequency;
 	}
 
@@ -77,12 +70,12 @@ public class ChunkAnalysis {
 		this.splDb = splDb;
 	}
 
-	public double getPower() {
-		return power;
+	public float[] getEnergyEnvelop() {
+		return energyEnvelop;
 	}
 
-	public void setPower(double power) {
-		this.power = power;
+	public void setEnergyEnvelop(float[] energyEnvelop) {
+		this.energyEnvelop = energyEnvelop;
 	}
 
 	public int getAmplitude() {

+ 8 - 8
audio-analysis/src/main/java/com/yonge/netty/dto/NoteAnalysis.java

@@ -33,7 +33,7 @@ public class NoteAnalysis {
 	
 	private double durationTime;
 
-	private int frequency;
+	private float frequency;
 	
 	private double dBSPL;
 	
@@ -43,7 +43,7 @@ public class NoteAnalysis {
 	
 	private double decibels;
 
-	private int playFrequency = -1;
+	private float playFrequency = -1;
 
 	//节奏状态(0-节奏错误,1-音高识别的节奏,2-声压识别的节奏,3-振幅识别的节奏,99-其他)
 	private int tempoStatus = 0;
@@ -62,7 +62,7 @@ public class NoteAnalysis {
 	
 	private int measureRenderIndex;
 	
-	public NoteAnalysis(int measureRenderIndex, int index, int sectionIndex, int frequency, double durationTime) {
+	public NoteAnalysis(int measureRenderIndex, int index, int sectionIndex, float frequency, double durationTime) {
 		this.measureRenderIndex = measureRenderIndex;
 		this.durationTime = durationTime;
 		this.index = index;
@@ -70,7 +70,7 @@ public class NoteAnalysis {
 		this.frequency = frequency;
 	}
 
-	public NoteAnalysis(double startTime, double endTime, int playFrequency) {
+	public NoteAnalysis(double startTime, double endTime, float playFrequency) {
 		this.startTime = startTime;
 		this.endTime = endTime;
 		this.durationTime = endTime - startTime;
@@ -109,19 +109,19 @@ public class NoteAnalysis {
 		this.durationTime = durationTime;
 	}
 
-	public double getPlayFrequency() {
+	public float getPlayFrequency() {
 		return playFrequency;
 	}
 
-	public void setPlayFrequency(int playFrequency) {
+	public void setPlayFrequency(float playFrequency) {
 		this.playFrequency = playFrequency;
 	}
 
-	public int getFrequency() {
+	public float getFrequency() {
 		return frequency;
 	}
 
-	public void setFrequency(int frequency) {
+	public void setFrequency(float frequency) {
 		this.frequency = frequency;
 	}
 

+ 370 - 338
audio-analysis/src/main/java/com/yonge/netty/dto/UserChannelContext.java

@@ -14,12 +14,14 @@ import java.util.stream.Collectors;
 
 import javax.sound.sampled.AudioFormat;
 
+import org.apache.commons.lang3.ArrayUtils;
 import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.math3.analysis.function.Gaussian;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.yonge.audio.analysis.AudioFloatConverter;
 import com.yonge.audio.analysis.Signals;
+import com.yonge.audio.analysis.detector.PercussionRhythmDetector;
 import com.yonge.audio.analysis.detector.YINPitchDetector;
 import com.yonge.netty.dto.NoteAnalysis.NoteErrorType;
 import com.yonge.netty.entity.MusicXmlBasicInfo;
@@ -42,8 +44,16 @@ public class UserChannelContext {
 	private final static int MIN_FREQUECY = 43;
 
 	private final static int MAX_FREQUECY = 2000;
+	
+	private AudioFormat audioFormat = new AudioFormat(44100, 16, 1, true, false);
+	
+	private int bufferSize = 1024;
+	
+	private int frameSize = 128;
 
 	private FastYin detector;
+	
+	private AudioFloatConverter converter;
 
 	private String user;
 
@@ -81,7 +91,9 @@ public class UserChannelContext {
 	private List<SectionAnalysis> doneSectionAnalysisList = new ArrayList<SectionAnalysis>();
 
 	private List<ChunkAnalysis> totalChunkAnalysisList = new ArrayList<ChunkAnalysis>();
-
+	
+	private List<Float> floatSamples = new ArrayList<Float>();
+	
 	private byte[] channelBufferBytes = new byte[0];
 
 	private double playTime;
@@ -125,15 +137,12 @@ public class UserChannelContext {
 		return result;
 	}
 
-	public void init(MusicXmlBasicInfo musicXmlBasicInfo, float sampleRate, int bufferSize) {
+	public void init(MusicXmlBasicInfo musicXmlBasicInfo) {
 		this.platform = musicXmlBasicInfo.getPlatform();
 		this.subjectId = musicXmlBasicInfo.getSubjectId();
 		this.beatDuration = musicXmlBasicInfo.getBeatLength();
 		this.hardLevel = HardLevelEnum.valueOf(musicXmlBasicInfo.getHeardLevel());
 		this.evaluationCriteria = musicXmlBasicInfo.getEvaluationCriteria();
-		if (detector == null) {
-			detector = new FastYin(sampleRate, bufferSize);
-		}
 	}
 
 	public void setUser(String user) {
@@ -220,6 +229,7 @@ public class UserChannelContext {
 		doneNoteAnalysisList = new ArrayList<NoteAnalysis>();
 		doneSectionAnalysisList = new ArrayList<SectionAnalysis>();
 		totalChunkAnalysisList = new ArrayList<ChunkAnalysis>();
+		floatSamples = new ArrayList<Float>();
 		recordId = null;
 		playTime = 0;
 		delayProcessed = false;
@@ -335,7 +345,27 @@ public class UserChannelContext {
 		return evaluatingSectionIndex;
 	}
 
-	public void handle(float[] samples, AudioFormat audioFormat) {
+	public int getBufferSize() {
+		return bufferSize;
+	}
+
+	public void handle(byte[] datas) {
+		
+		if(converter == null) {
+			converter = AudioFloatConverter.getConverter(audioFormat);
+		}
+		
+		float[] samples = new float[getBufferSize()];
+
+		converter.toFloatArray(datas, samples);
+		
+		for(float f : samples) {
+			floatSamples.add(f);
+		}
+		
+		if (detector == null) {
+			detector = new FastYin(audioFormat.getSampleRate(), getBufferSize());
+		}
 
 		// YINPitchDetector frequencyDetector = new YINPitchDetector(samples.length , audioFormat.getSampleRate());
 		// int playFrequency = (int) frequencyDetector.getFrequency(samples);
@@ -345,23 +375,16 @@ public class UserChannelContext {
 			playFrequency = (int) detector.getPitch(samples).getPitch();
 		}
 
-		double splDb = Signals.soundPressureLevel(samples);
-		double power = Signals.power(samples);
+		double splDb = 0;
+		float[] energyEnvelop = Signals.energyEnvelope(samples, frameSize);
 		int amplitude = (int) Signals.norm(samples);
 		
-		int decibels = (int) Signals.decibels(samples);
-		if (StringUtils.equalsIgnoreCase(evaluationCriteria, EvaluationCriteriaEnum.AMPLITUDE.getCode())) {
-			amplitude = (int) Signals.norm(samples);
-		} else if (StringUtils.equalsIgnoreCase(evaluationCriteria, EvaluationCriteriaEnum.DECIBELS.getCode())) {
-			amplitude = (int) Signals.decibels(samples);
-			amplitude = amplitude >= 60 ? amplitude : 0;
-		}
-		// float rms = Signals.rms(samples);
-
+		int decibels = 0;
+		
 		double durationTime = 1000 * (samples.length * 2) / audioFormat.getSampleRate() / (audioFormat.getSampleSizeInBits() / 8);
 
 		playTime += durationTime;
-
+		
 		// 获取当前音符信息
 		MusicXmlNote musicXmlNote = getCurrentMusicNote(null, null);
 
@@ -380,32 +403,44 @@ public class UserChannelContext {
 
 		if (noteAnalysis.getMusicalNotesIndex() >= 0 && noteAnalysis.getMusicalNotesIndex() <= getTotalMusicNoteIndex(null)) {
 
-			LOGGER.debug("user:{}  delayProcessed:{}  dynamicOffset:{}  Frequency:{}  splDb:{}  power:{}  amplitude:{} decibels:{}  endtime:{}", user,
-					delayProcessed, dynamicOffset, playFrequency, splDb, power, amplitude, decibels, playTime);
-
-			ChunkAnalysis chunkAnalysis = new ChunkAnalysis(playTime - durationTime, playTime, playFrequency, splDb, power, amplitude);
+			ChunkAnalysis chunkAnalysis = new ChunkAnalysis(playTime - durationTime, playTime, playFrequency, splDb, energyEnvelop, amplitude);
 
 			if (totalChunkAnalysisList.size() > 0) {
 				if (totalChunkAnalysisList.get(totalChunkAnalysisList.size() - 1).getAmplitude() + 2 < chunkAnalysis.getAmplitude()) {
 					chunkAnalysis.setPeak(true);// 只针对打击乐
 				}
 			}
+			
+			//处理泛音
+			float basicFrequency = -1;
+			if(totalChunkAnalysisList.size() > 0) {
+				basicFrequency = totalChunkAnalysisList.get(totalChunkAnalysisList.size() - 1).getFrequency();
+			}
+			chunkAnalysis.setFrequency(handleHarmonic(basicFrequency, playFrequency));
+			
 			totalChunkAnalysisList.add(chunkAnalysis);
 
-			if (playTime >= (musicXmlNote.getDuration() + musicXmlNote.getTimeStamp())) {
+			LOGGER.debug("user:{}  delayProcessed:{}  dynamicOffset:{}  Frequency:{}  splDb:{}  amplitude:{} decibels:{} endtime:{}", user,
+					delayProcessed, dynamicOffset, chunkAnalysis.getFrequency(), splDb, amplitude, decibels, playTime);
+
+			if (playTime >= (musicXmlNote.getDuration() + musicXmlNote.getTimeStamp() + getOffsetMS() + beatDuration)) {
 
-				//musicXmlNote.setTimeStamp(musicXmlNote.getTimeStamp() + micDelayMS);
+				musicXmlNote.setTimeStamp(musicXmlNote.getTimeStamp() + getOffsetMS() + beatDuration);
+				
+				if(musicXmlNote.getFrequency() <= 0) {
+					musicXmlNote.setDontEvaluating(true);
+				}
 
 				if (musicXmlNote.getDontEvaluating()) {
 					noteAnalysis.setIgnore(true);
 				}
 				
 				List<ChunkAnalysis> nodeChunkAnalysisList = extract(musicXmlNote, noteAnalysis);
-
+				
 				// 判断节奏(音符持续时间内有不间断的音高,就节奏正确)
 				if (!StringUtils.equalsIgnoreCase(evaluationCriteria, EvaluationCriteriaEnum.FREQUENCY.getCode())) {
 					noteAnalysis.setPlayFrequency(-1);
-					noteAnalysis.setTempoStatus(computeTempoWithAmplitude2(musicXmlNote, noteAnalysis, nodeChunkAnalysisList));
+					noteAnalysis.setTempoStatus(computeTempoWithAmplitude(musicXmlNote, noteAnalysis, nodeChunkAnalysisList));
 				} else {
 					noteAnalysis.setPlayFrequency(computeFrequency(musicXmlNote, nodeChunkAnalysisList));
 					noteAnalysis.setTempoStatus(computeTempoWithFrequency(musicXmlNote, noteAnalysis, nodeChunkAnalysisList));
@@ -532,24 +567,7 @@ public class UserChannelContext {
 
 		double playDurationTime = 0;
 
-		if (!StringUtils.equalsIgnoreCase(evaluationCriteria, EvaluationCriteriaEnum.FREQUENCY.getCode())) {
-			if (noteAnalysis.getFrequency() == -1) {// 休止符
-				if (noteAnalysis.getTempoStatus() == 0) {
-					noteAnalysis.setMusicalErrorType(NoteErrorType.CADENCE_WRONG);
-				} else {
-					noteAnalysis.setMusicalErrorType(NoteErrorType.RIGHT);
-				}
-			} else {
-				int beatTimes = (int) chunkAnalysisList.stream().filter(t -> t.getAmplitude() > 0).count();
-				if (beatTimes == 0) {
-					noteAnalysis.setMusicalErrorType(NoteErrorType.NOT_PLAY);
-				} else if (noteAnalysis.getTempoStatus() == 0) {
-					noteAnalysis.setMusicalErrorType(NoteErrorType.CADENCE_WRONG);
-				} else {
-					noteAnalysis.setMusicalErrorType(NoteErrorType.RIGHT);
-				}
-			}
-		} else {
+		if (StringUtils.equalsIgnoreCase(evaluationCriteria, EvaluationCriteriaEnum.FREQUENCY.getCode())) {
 
 			NotePlayResult notePlayResult = queryNoteFrequency(musicXmlNote, noteAnalysis.getPlayFrequency());
 
@@ -623,9 +641,9 @@ public class UserChannelContext {
 		}
 	}
 
-	private int computeFrequency(MusicXmlNote musicXmlNote, List<ChunkAnalysis> chunkAnalysisList) {
+	private float computeFrequency(MusicXmlNote musicXmlNote, List<ChunkAnalysis> chunkAnalysisList) {
 
-		if (chunkAnalysisList == null || chunkAnalysisList.size() == 0) {
+		if (chunkAnalysisList == null || chunkAnalysisList.size() == 0 || musicXmlNote.getDontEvaluating()) {
 			return -1;
 		}
 
@@ -633,14 +651,14 @@ public class UserChannelContext {
 
 		List<ChunkAnalysis> chunkList = new ArrayList<ChunkAnalysis>(chunkAnalysisList);
 
-		List<Integer> chunkFrequencyList = chunkList.stream().map(t -> t.getFrequency())
+		List<Float> chunkFrequencyList = chunkList.stream().map(t -> t.getFrequency())
 				.filter(t -> t.doubleValue() > MIN_FREQUECY && t.doubleValue() < MAX_FREQUECY).collect(Collectors.toList());
 
 		if (chunkFrequencyList.size() == 0) {
 			return -1;
 		}
 
-		int frequency = (int) (chunkFrequencyList.stream().mapToInt(t -> t).sum() / chunkFrequencyList.size());
+		float frequency = chunkFrequencyList.stream().reduce(0f, (a, b) -> a + b) / chunkFrequencyList.size();
 		
 		return frequency;
 	}
@@ -653,248 +671,125 @@ public class UserChannelContext {
 	 */
 	private int computeTempoWithFrequency(MusicXmlNote musicXmlNote, NoteAnalysis noteAnalysis, List<ChunkAnalysis> chunkAnalysisList) {
 		
+		if (chunkAnalysisList == null || chunkAnalysisList.size() == 0 || musicXmlNote.getDontEvaluating()) {
+			return -1;
+		}
+		
 		LOGGER.debug("---------------------TEMPO----------------------");
 		
-		double avgPower = chunkAnalysisList.stream().collect(Collectors.averagingDouble(ChunkAnalysis::getSplDb));
-		noteAnalysis.setDBSPL(avgPower);
-
-		if (noteAnalysis.getTempoStatus() == 0 || chunkAnalysisList == null || chunkAnalysisList.size() == 0) {
+		ChunkAnalysis firstChunkAnalysis = chunkAnalysisList.get(0);
+		
+		Map<Integer, Double> frequencyRhythmMap = queryRhythmsByFrequency(musicXmlNote, noteAnalysis, chunkAnalysisList);
+		
+		if(frequencyRhythmMap.size() > 1) {
+			LOGGER.debug("根据音高检测到[{}]个断点,分别在[{}]", frequencyRhythmMap.size(), frequencyRhythmMap.values().stream().map(value -> value + firstChunkAnalysis.getStartTime()).map(Object::toString).collect(Collectors.joining(",")));
 			return 0;
 		}
-
-		reduceNoise(chunkAnalysisList, EvaluationCriteriaEnum.FREQUENCY);
-
-		if (musicXmlNote.getFrequency() == -1) {// 休止符
-			return chunkAnalysisList.stream().filter(t -> t.getFrequency() > MIN_FREQUECY).count() <= 1 ? 1 : 0;
-		}
-
-		// 将信号分堆归类
-		Map<Integer, Integer> signalGrouping = new HashMap<Integer, Integer>();
-		for (int i = 1; i < chunkAnalysisList.size(); i++) {
-			if (isSamePitch(chunkAnalysisList.get(i).getFrequency(), chunkAnalysisList.get(i - 1).getFrequency())) {
-				if (signalGrouping.get(chunkAnalysisList.get(i - 1).getFrequency()) == null) {
-					signalGrouping.put(chunkAnalysisList.get(i - 1).getFrequency(), 1);
-				} else {
-					signalGrouping.put(chunkAnalysisList.get(i - 1).getFrequency(), signalGrouping.get(chunkAnalysisList.get(i - 1).getFrequency()) + 1);
-				}
-			} else {
-				signalGrouping.put(chunkAnalysisList.get(i - 1).getFrequency(), 1);
-			}
-		}
-
-		int maxTimes = 0, avgFrequency = 0;
-
-		for (Entry<Integer, Integer> entry : signalGrouping.entrySet()) {
-			if (entry.getValue() > maxTimes) {
-				maxTimes = entry.getValue();
-				avgFrequency = entry.getKey();
-			}
-		}
-
-		LOGGER.debug("当前音符时值范围内平均音高[{}] 声压[{}]", avgFrequency, avgPower);
-
-		int firstBeatIndex = -1;
+		
 		double firstBeatTime = 0;
-
-		int depthThreshold = 2;
-		int peakNum = 0;
-		int continueNums = 0;
-		boolean isContinue = false;
-		// 检测音高是否间断
-		for (int i = 0; i < chunkAnalysisList.size(); i++) {
-			if (firstBeatIndex == -1) {
-				if (chunkAnalysisList.get(i).getFrequency() > MIN_FREQUECY || musicXmlNote.getFrequency() == -1) {
-					firstBeatIndex = i;
-					firstBeatTime = chunkAnalysisList.get(i).getStartTime();
-				}
+		
+		if(frequencyRhythmMap.size() == 1) {
+			// 判断进入时间点
+			for(Entry<Integer, Double> entry : frequencyRhythmMap.entrySet()) {
+				firstBeatTime = entry.getValue();
 			}
-			if (!isSamePitch(avgFrequency, chunkAnalysisList.get(i).getFrequency())) {
-				++continueNums;
-				if (continueNums >= depthThreshold) {
-					if (isContinue == false) {
-						LOGGER.debug("范围内查询到异常音高信号,StartTime:{}  CurrentFrequency:{} AvgFrequency:{}", chunkAnalysisList.get(i).getStartTime(), chunkAnalysisList.get(i).getFrequency(), avgFrequency);
-						peakNum++;
-						isContinue = true;
-					}
-				}
-			} else {
-				continueNums = 0;
-				isContinue = false;
+			
+			if ((firstBeatTime - firstChunkAnalysis.getStartTime()) * 100 / musicXmlNote.getDuration() > hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator(),
+					musicXmlNote.getDuration()) * 2) {
+				LOGGER.debug("节奏错误原因:进入时间点[{}]太晚", firstBeatTime);
+				return 0;
 			}
-		}
-
-		if (peakNum > 0) {
-			LOGGER.debug("已找到节奏起始点,根据音高又检测到[{}]个断点", peakNum);
+			LOGGER.debug("找到节奏点StartTime:{}", firstBeatTime);
+		} else {
+			LOGGER.debug("没有找到节奏点");
 			return 0;
 		}
+
+		/**
+		//根据能量包络再进行分析节奏
+		Map<Integer, Double> energyRrhythmMap = queryRhythmsByEnergyEnvelope(musicXmlNote, noteAnalysis, chunkAnalysisList);
 		
-		// 判断进入时间点
-		if (firstBeatIndex != -1 && firstBeatIndex * 100 / (chunkAnalysisList.size() - 1) > hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator(), musicXmlNote.getDuration())) {
-			LOGGER.debug("节奏(音高)错误原因:进入时间点[{}]太晚", firstBeatTime);
+		if (energyRrhythmMap.size() > 1) {
+			LOGGER.debug("根据能量包络检测到[{}]个节奏点,分别在[{}]", energyRrhythmMap.size(), energyRrhythmMap.values().stream().map(value -> value + firstChunkAnalysis.getStartTime()).map(Object::toString).collect(Collectors.joining(",")));
 			return 0;
 		}
 		
-		//判断是否与上一个音延续下来的
-
-		ChunkAnalysis firstChunkAnalysis = chunkAnalysisList.get(0);
-
-		Optional<ChunkAnalysis> chunkAnalysisOptional = totalChunkAnalysisList.stream()
-				.filter(t -> Double.doubleToLongBits(t.getEndTime()) <= Double.doubleToLongBits(firstChunkAnalysis.getStartTime()))
-				.reduce((first, second) -> second);
-
-		ChunkAnalysis lastChunkAnalysis = null;
-		if (chunkAnalysisOptional.isPresent()) {
-			lastChunkAnalysis = chunkAnalysisOptional.get();
-		}
+		if(energyRrhythmMap.size() == 1) {
+			for(Entry<Integer, Double> entry : energyRrhythmMap.entrySet()) {
+				firstBeatTime = entry.getValue();
+			}
+			
+			firstBeatTime = firstChunkAnalysis.getStartTime() + firstBeatTime;
 
-		if (lastChunkAnalysis == null) {
-			lastChunkAnalysis = new ChunkAnalysis(0, 0, -1, -40, 0, 0);
-		}
-		
-		//只有音高节奏点才需要检测这个条件
-		if(noteAnalysis.getTempoStatus() == 1) {
-			if(firstChunkAnalysis.getFrequency() > MIN_FREQUECY && lastChunkAnalysis.getFrequency() > MIN_FREQUECY){
-				if(isSamePitch(firstChunkAnalysis.getFrequency(), lastChunkAnalysis.getFrequency())){
-					LOGGER.debug("节奏错误原因:上一个音[{}]延续下来导致的", lastChunkAnalysis.getFrequency());
-					return 0;
-				}
+			// 判断进入时间点
+			if ((firstBeatTime - firstChunkAnalysis.getStartTime()) * 100 / musicXmlNote.getDuration() > hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator(),
+					musicXmlNote.getDuration()) * 2) {
+				LOGGER.debug("节奏错误原因:进入时间点[{}]太晚", firstBeatTime);
+				return 0;
 			}
 		}
 		
-		continueNums = 0;
-		firstBeatIndex = -1;
-		firstBeatTime = 0;
-		// 判断过程中声音是否有起伏
-		
-		//获取上一个音符的声压等级
-		Optional<NoteAnalysis> preNoteAnalysisOptinal = doneNoteAnalysisList.stream().filter(t -> t.getIndex() == musicXmlNote.getMusicalNotesIndex() - 1).findFirst();
-		if(preNoteAnalysisOptinal.isPresent()) {
-			avgPower = preNoteAnalysisOptinal.get().getDBSPL();
-		}
-		
-		chunkAnalysisList.add(0, lastChunkAnalysis);
-		Map<Integer, ChunkAnalysis> peakMap = detectPeaks(chunkAnalysisList, avgPower + 2);
-		peakNum = peakMap.size();
-		if (peakNum > 0) {
-			firstBeatIndex = peakMap.keySet().stream().min(Integer::compare).get();
-			firstBeatTime = peakMap.get(firstBeatIndex).getStartTime();
-		}
-
-		if (peakNum > 1) {
-			LOGGER.debug("根据声压检测到[{}]个断点", peakNum);
+		if(energyRrhythmMap.size() == 0 && frequencyRhythmMap.size() == 0) {
+			LOGGER.debug("节奏错误原因:没有找到节奏点");
 			return 0;
 		}
-
-		// 判断进入时间点
-		if (firstBeatIndex * 100 / (chunkAnalysisList.size() - 1) > hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator(), musicXmlNote.getDuration())) {
-			LOGGER.debug("节奏(声压)错误原因:进入时间点[{}]太晚", firstBeatTime);
-			return 0;
+		*/
+		
+		if(dynamicOffset == 0) {
+			dynamicOffset = firstBeatTime - firstChunkAnalysis.getStartTime();
 		}
-
+		
 		return 99;
 	}
+	
 
-	private int computeTempoWithAmplitude2(MusicXmlNote musicXmlNote, NoteAnalysis noteAnalysis, List<ChunkAnalysis> chunkAnalysisList) {
+	private int computeTempoWithAmplitude(MusicXmlNote musicXmlNote, NoteAnalysis noteAnalysis, List<ChunkAnalysis> chunkAnalysisList) {
 
-		if (chunkAnalysisList == null || chunkAnalysisList.size() == 0) {
+		if (chunkAnalysisList == null || chunkAnalysisList.size() == 0 || musicXmlNote.getDontEvaluating()) {
 			return 0;
 		}
-		//计算平均振幅
-		double avgAmplitude = chunkAnalysisList.stream().collect(Collectors.averagingDouble(ChunkAnalysis::getAmplitude));
 		
-		noteAnalysis.setAmplitude(avgAmplitude);
-		noteAnalysis.setDBSPL(chunkAnalysisList.stream().collect(Collectors.averagingDouble(ChunkAnalysis::getSplDb)));
-
-		reduceNoise(chunkAnalysisList, EvaluationCriteriaEnum.AMPLITUDE);
+		//Map<Integer, Double> rhythmMap = queryRhythmsByEnergyEnvelope(musicXmlNote, noteAnalysis, chunkAnalysisList);
 		
-		if (musicXmlNote.getFrequency() == -1) {// 休止符
-
-			LOGGER.debug("--Amplitude:{}  Denominator:{}", chunkAnalysisList.stream().map(t -> t.getAmplitude()).collect(Collectors.toList()),
-					musicXmlNote.getDenominator());
-			return chunkAnalysisList.stream().filter(t -> t.getAmplitude() > 0).count() <= 0 ? 3 : 0;
+		Map<Integer, Double> rhythmMap = queryRhythmsByAmplitude(musicXmlNote, noteAnalysis, chunkAnalysisList);
+		
+		if(rhythmMap.size() != 1) {
+			ChunkAnalysis firstChunkAnalysis = chunkAnalysisList.get(0);
+			LOGGER.debug("根据能量包络检测到[{}]个断点,分别在[{}]", rhythmMap.size(), rhythmMap.values().stream().map(value -> value + firstChunkAnalysis.getStartTime()).map(Object::toString).collect(Collectors.joining(",")));
+			
+			if(rhythmMap.size() > 1) {
+				noteAnalysis.setMusicalErrorType(NoteErrorType.CADENCE_WRONG);
+			}else {
+				noteAnalysis.setMusicalErrorType(NoteErrorType.NOT_PLAY);
+			}
+			return 0;
 		}
-
+		
 		ChunkAnalysis firstChunkAnalysis = chunkAnalysisList.get(0);
-
-		Optional<ChunkAnalysis> chunkAnalysisOptional = totalChunkAnalysisList.stream()
-				.filter(t -> Double.doubleToLongBits(t.getEndTime()) <= Double.doubleToLongBits(firstChunkAnalysis.getStartTime()))
-				.reduce((first, second) -> second);
-
-		ChunkAnalysis lastChunkAnalysis = new ChunkAnalysis(0, 0, -1, -40, 0, 0);
 		
-		if (chunkAnalysisOptional.isPresent()) {
-			lastChunkAnalysis = chunkAnalysisOptional.get();
-		}
-
-		chunkAnalysisList.add(0, lastChunkAnalysis);
-
-		// 检测是否有多个波峰
-		boolean tempo = false;
-		int peakNum = 0;
-		int firstBeatIndex = -1;
+		// 判断进入时间点
 		double firstBeatTime = 0;
-
-		/**
-		boolean isContinue = false;
-		int firstPeakIndex = -1;
-		int firstPeakValue = 0;
-		// int range = hardLevel.getAmplitudeThreshold();
-		int range = 5;
-
-		if (StringUtils.equalsIgnoreCase(evaluationCriteria, EvaluationCriteriaEnum.DECIBELS.getCode())) {
-			range = 50;
+		for(Entry<Integer, Double> entry : rhythmMap.entrySet()) {
+			firstBeatTime = entry.getValue();
 		}
-
-		for (int i = 1; i < chunkAmplitudeList.size(); i++) {
-			if (chunkAmplitudeList.get(i - 1) + range >= chunkAmplitudeList.get(i)) {
-				isContinue = false;
-				continue;
-			}
-
-			if (isContinue == false && chunkAmplitudeList.get(i - 1) + range < chunkAmplitudeList.get(i)) {
-				isContinue = true;
-				peakSize++;
-
-				if (firstPeakIndex == -1) {
-					firstPeakIndex = i;
-					firstPeakValue = chunkAmplitudeList.get(i);
-				}
-			}
-		}*/
 		
-		//获取上一个音符的振幅
-		Optional<NoteAnalysis> preNoteAnalysisOptinal = doneNoteAnalysisList.stream().filter(t -> t.getIndex() == musicXmlNote.getMusicalNotesIndex() - 1).findFirst();
-		if(preNoteAnalysisOptinal.isPresent()) {
-			avgAmplitude = preNoteAnalysisOptinal.get().getDBSPL();
-		}
+		firstBeatTime = firstChunkAnalysis.getStartTime() + firstBeatTime;
 		
-		Map<Integer, ChunkAnalysis> peakMap = detectPeaks(chunkAnalysisList, avgAmplitude);
-		peakNum = peakMap.size();
-		if (peakNum > 0) {
-			firstBeatIndex = peakMap.keySet().stream().min(Integer::compare).get();
-			firstBeatTime = peakMap.get(firstBeatIndex).getStartTime();
-		}
-
-		if (peakNum == 0) {
-			tempo = lastChunkAnalysis.isPeak();
-		} else if (peakNum == 1) {
-			tempo = true;
-		} else {
-			tempo = false;
-			LOGGER.debug("有多个波峰");
-		}
-
-		if (tempo) {
-			// 判断进入时间点
-			if (firstBeatIndex * 100 / (chunkAnalysisList.size() - 1) > hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator(),
-					musicXmlNote.getDuration())) {
-				LOGGER.debug("节奏错误原因:进入时间点[{}]太晚", firstBeatTime);
-				tempo = false;
-			}
+		if ((firstBeatTime - firstChunkAnalysis.getStartTime()) * 100 / musicXmlNote.getDuration() > hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator(),
+				musicXmlNote.getDuration()) * 2) {
+			LOGGER.debug("节奏错误原因:进入时间点[{}]太晚", firstBeatTime);
+			
+			noteAnalysis.setMusicalErrorType(NoteErrorType.CADENCE_WRONG);
+			return 0;
 		}
 		
-
-		return tempo == false ? 0 : 3;
+		LOGGER.debug("找到节奏点StartTime:{}", firstBeatTime);
+		
+		noteAnalysis.setMusicalErrorType(NoteErrorType.RIGHT);
+		
+		dynamicOffset = firstBeatTime - firstChunkAnalysis.getStartTime();
+		
+		return 3;
 	}
 
 	private double queryFirstNoteStartTime(List<ChunkAnalysis> chunkAnalysisList, MusicXmlNote musicXmlNote, NoteAnalysis noteAnalysis) {
@@ -903,6 +798,10 @@ public class UserChannelContext {
 			LOGGER.debug("找不到数据,StartTime:{}", musicXmlNote.getTimeStamp() + dynamicOffset);
 			return musicXmlNote.getTimeStamp() + dynamicOffset;
 		}
+		
+		if (!StringUtils.equalsIgnoreCase(evaluationCriteria, EvaluationCriteriaEnum.FREQUENCY.getCode())) {
+			return Math.max(chunkAnalysisList.get(0).getStartTime(), 0);
+		}
 
 		double onsetStartTime = 0;
 		double preNoteAvgPower = 0;
@@ -933,7 +832,7 @@ public class UserChannelContext {
 		
 		if (StringUtils.equalsIgnoreCase(evaluationCriteria, EvaluationCriteriaEnum.FREQUENCY.getCode())) {
 			//有音高,声压才有效
-			double preNoteAvgFrequency = -1;
+			float preNoteAvgFrequency = -1;
 			
 			if(preNoteAnalysisOptinal.isPresent()) {
 				preNoteAvgPower = preNoteAnalysisOptinal.get().getDBSPL();
@@ -977,7 +876,7 @@ public class UserChannelContext {
 				}
 			}
 			
-			peakMap = detectPeaks(chunkAnalysisList, preNoteAvgPower + 2);
+			peakMap = detectPeaks(chunkAnalysisList, preNoteAvgPower + 1);
 			
 		} else {
 
@@ -1005,6 +904,7 @@ public class UserChannelContext {
 		// return chunkAnalysisList.get(chunkAnalysisList.size() - 1).getEndTime();
 		return Math.max(musicXmlNote.getTimeStamp() + dynamicOffset, onsetStartTime);
 	}
+	
 
 	
 	private void reduceNoise(List<ChunkAnalysis> chunkAnalysisList, EvaluationCriteriaEnum criteria) {
@@ -1035,17 +935,20 @@ public class UserChannelContext {
 
 	}
 
-	private boolean isSamePitch(double frequency1, double frequency2) {
-		if (frequency1 == frequency2) {
+	private boolean isSamePitch(float basicFrequency, float frequency) {
+		if (new BigDecimal(basicFrequency + "").equals(new BigDecimal(frequency + ""))) {
 			return true;
 		}
-		return (frequency1 != -1 || frequency2 != -1) && Math.abs(YINPitchDetector.getDeviationCent(frequency1, frequency2)) < 50;
+		
+		frequency = handleHarmonic(basicFrequency, frequency);
+		
+		return (basicFrequency != -1 && frequency != -1) && Math.abs(YINPitchDetector.getDeviationCent(basicFrequency, frequency)) < 50;
 	}
 
 	private Map<Integer, ChunkAnalysis> detectPeaks(List<ChunkAnalysis> signalList, double threshold) {
 		Map<Integer, ChunkAnalysis> peaks = new HashMap<Integer, ChunkAnalysis>();
 		
-		movingAverageSmooth(signalList, 10);
+		LOGGER.debug("平均声压值:[{}}", threshold);
 		
 		/**
 		int waveCrestNums = 0,waveTroughNums = 0;
@@ -1074,30 +977,18 @@ public class UserChannelContext {
 		
 		int continueSignalThreshold = 2;
 		int continueNums = 0;
-		int disconnectNums = 0;
-		boolean isContinue = false;
 
 		if (StringUtils.equalsIgnoreCase(evaluationCriteria, EvaluationCriteriaEnum.FREQUENCY.getCode())) {
-			for (int i = 0; i < signalList.size(); i++) {
-				//如果连续2个超过平均值
-				if(Math.abs(threshold - signalList.get(i).getSplDb()) > 0) {
+			for (int i = 1; i < signalList.size(); i++) {
+				//如果连续2个下降超过平均值
+				if(threshold - signalList.get(i).getSplDb() < 0 && signalList.get(i).getSplDb() > signalList.get(i - 1).getSplDb()) {
 					++continueNums;
-					if(continueNums >= continueSignalThreshold) {
-						disconnectNums = 0;
-						if(continueNums != i+1) {
-							if(isContinue == false) {
-								peaks.put(i, signalList.get(i));
-								LOGGER.debug("【过程】范围内查询到声压信号,StartTime:{} CurrentSplDb:{} Threshold:{}" , signalList.get(i).getStartTime(), signalList.get(i).getSplDb(), threshold);
-								isContinue = true;
-							}
-						}
-					}
 				} else {
-					disconnectNums++;
-					if(disconnectNums >= continueSignalThreshold) {
-						continueNums = 0;
-						isContinue = false;
+					if(continueNums >= continueSignalThreshold) {
+						peaks.put(i, signalList.get(i));
+						LOGGER.debug("【过程】范围内查询到声压信号,StartTime:{} CurrentSplDb:{} Threshold:{}" , signalList.get(i).getStartTime(), signalList.get(i - 1).getSplDb(), threshold);
 					}
+					continueNums = 0;
 				}
 			}
 		} else {
@@ -1119,83 +1010,224 @@ public class UserChannelContext {
 		return peaks;
 	}
 	
-	private List<ChunkAnalysis> extract(MusicXmlNote musicXmlNote, NoteAnalysis noteAnalysis) {
+	
+	private Map<Integer,Double> queryRhythmsByEnergyEnvelope(MusicXmlNote musicXmlNote, NoteAnalysis noteAnalysis, List<ChunkAnalysis> datas){
 		
-		LOGGER.debug("---------------------Extract Data----------------------");
-
-		double floatingRange = musicXmlNote.getDuration() * hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator(), musicXmlNote.getDuration()) / 100;
+		LOGGER.debug("------------利用能量包络寻找节奏点[{} - {}]------------", noteAnalysis.getStartTime(), noteAnalysis.getEndTime());
+		
+		
+		int start = (int) (noteAnalysis.getStartTime() / ((1 * 1000) / audioFormat.getSampleRate()));
+		int end = (int) (noteAnalysis.getEndTime() * audioFormat.getSampleRate() / 1 / 1000);
+		
+		//取信号
+		float[] fSamples = new float[end - start];
+		int j = 0;
+		for(int i = start; i < end; i++) {
+			fSamples[j] = floatSamples.get(i - 1);
+			j++;
+		}
+		
+		float[] energyEnvelope = Signals.energyEnvelope(fSamples, frameSize);
+		
+		/**
+		float[] energyEnvelope = new float[datas.size() * getBufferSize() / frameSize];
+		
+		for(int i = 0; i < datas.size(); i++) {
+			for(int j = 0; j < datas.get(i).getEnergyEnvelop().length; j++) {
+				energyEnvelope[i * datas.get(i).getEnergyEnvelop().length + j] = datas.get(i).getEnergyEnvelop()[j];
+			}
+		}
+		*/
+		PercussionRhythmDetector percussionRhythmDetector = new PercussionRhythmDetector(energyEnvelope, audioFormat.getSampleRate(), frameSize);
+		
+		Map<Integer, Double> rhythMap = percussionRhythmDetector.detect();
+		
+		return rhythMap;
+	}
+	
+	private Map<Integer,Double> queryRhythmsByAmplitude(MusicXmlNote musicXmlNote, NoteAnalysis noteAnalysis, List<ChunkAnalysis> datas){
+		
+		Map<Integer, Double> rhythMap = new HashMap<Integer, Double>();
 
-		double endTime = musicXmlNote.getTimeStamp() + dynamicOffset + floatingRange;
-		double startTime = musicXmlNote.getTimeStamp() + dynamicOffset - floatingRange;
+		reduceNoise(datas, EvaluationCriteriaEnum.AMPLITUDE);
 		
-		LOGGER.debug("在范围内寻找起始点——floatingRange:{}  modified [ {} - {} ]", floatingRange, startTime, endTime);
+		int amplitudeThreshold = 2;
+		int beatContinueNum = 0;
+		int intervalTime = 150;
+		ChunkAnalysis chunkAnalysis = null;
+		double rhythmTime = -1;
+		int peakIndex = 0;
+		int continueNumThreshold = 0;
+		
+		for (int i = 0; i < datas.size(); i++) {
+			chunkAnalysis = datas.get(i);
 
-		List<ChunkAnalysis> chunkAnalysisList = totalChunkAnalysisList.stream()
-				.filter(t -> Double.doubleToLongBits(t.getStartTime()) >= Double.doubleToLongBits(startTime)
-						&& Double.doubleToLongBits(t.getEndTime()) <= Double.doubleToLongBits(endTime))
-				.collect(Collectors.toList());
+			if (chunkAnalysis.getAmplitude() >= amplitudeThreshold) {
+				beatContinueNum++;
+				if (beatContinueNum == 1) {
+					rhythmTime = i * bufferSize * 1000 / audioFormat.getSampleRate();
+				}
+				
+				if (beatContinueNum > continueNumThreshold) {
+					if (rhythMap.size() == 0 || rhythmTime - rhythMap.get(peakIndex) > intervalTime) {
+						peakIndex++;
+						rhythMap.put(peakIndex, rhythmTime);
+					}
+				}
 
+			} else {
+				beatContinueNum = 0;
+			}
+		}
+		
+		return rhythMap;
+	}
+	
+	
+	private Map<Integer, Double> queryRhythmsByFrequency(MusicXmlNote musicXmlNote, NoteAnalysis noteAnalysis, List<ChunkAnalysis> datas){
+		LOGGER.debug("------------利用频率寻找节奏点------------");
 		/**
-		 * 过程中找到了起始点都可以作为节奏点,后面再出现波峰波谷就表明节奏错误;否则,后面需要出现一个波峰或波谷
-		 * 是否要加上前一个音符的最后一个信号???
-		 */
-		double correctedStartTime = queryFirstNoteStartTime(chunkAnalysisList, musicXmlNote, noteAnalysis);
-		double correctedEndTime = correctedStartTime + musicXmlNote.getDuration();
+		//获取上一个音符
+		Optional<NoteAnalysis> preNoteAnalysisOptinal = doneNoteAnalysisList.stream().filter(t -> t.getIndex() == musicXmlNote.getMusicalNotesIndex() - 1).findFirst();
+		
+		double preNoteAvgFrequency = -1;
+		if(preNoteAnalysisOptinal.isPresent()) {
+			preNoteAvgFrequency = preNoteAnalysisOptinal.get().getPlayFrequency();
+		}
+		
+		LOGGER.debug("上一个音符的平均音高[{}]", preNoteAvgFrequency);
+		*/
 		
-		LOGGER.debug("在范围内寻找到起始点,调整后的信号时值范围[ {} - {} ]", correctedStartTime, correctedEndTime);
+		//获取上一个信号
+		ChunkAnalysis firstChunkAnalysis = datas.get(0);
+		
+		Optional<ChunkAnalysis> chunkAnalysisOptional = totalChunkAnalysisList.stream()
+				.filter(t -> Double.doubleToLongBits(t.getEndTime()) <= Double.doubleToLongBits(firstChunkAnalysis.getStartTime()))
+				.reduce((first, second) -> second);
 
-		if (correctedStartTime != startTime) {
-			if (startTime < 0) {
-				dynamicOffset = correctedStartTime;
-			} else {
-				dynamicOffset = correctedStartTime - startTime;
+		ChunkAnalysis lastChunkAnalysis = null;
+		if (chunkAnalysisOptional.isPresent()) {
+			lastChunkAnalysis = chunkAnalysisOptional.get();
+		}
+
+		if (lastChunkAnalysis == null) {
+			lastChunkAnalysis = new ChunkAnalysis(0, 0, -1, 0, new float[0], 0);
+		}
+		
+		float preNoteAvgFrequency = lastChunkAnalysis.getFrequency();
+		
+		ChunkAnalysis chunkAnalysis = null;
+		Map<Integer, Double> rhythMap = new HashMap<Integer, Double>();
+
+		int intervalTime = 150;
+		double lastestRhythmTime = Math.negateExact(intervalTime);
+
+		int silenceContinueNum = 0;
+		int beatContinueNum = 0;
+		double rhythmTime = -1;
+		int peakIndex = 0;
+		int continueNumThreshold = 0;
+		
+		for(int i = 0; i < datas.size(); i++) {
+			
+			chunkAnalysis = datas.get(i);
+			
+			//若不是休止符
+			if(musicXmlNote.getFrequency() >= MIN_FREQUECY) {
+				if(chunkAnalysis.getFrequency() < MIN_FREQUECY) {
+					
+					silenceContinueNum++;
+					beatContinueNum = 0;
+					
+					if(silenceContinueNum > continueNumThreshold) {
+						preNoteAvgFrequency = chunkAnalysis.getFrequency();
+					}
+				}else {
+					
+					silenceContinueNum = 0;
+					
+					if(preNoteAvgFrequency < MIN_FREQUECY || !isSamePitch(preNoteAvgFrequency, chunkAnalysis.getFrequency())) {
+
+						if (beatContinueNum == 0) {
+							rhythmTime = chunkAnalysis.getStartTime();
+						}
+						
+						beatContinueNum++;
+						if (beatContinueNum > continueNumThreshold) {
+							if (chunkAnalysis.getStartTime() - lastestRhythmTime > intervalTime) {
+
+								lastestRhythmTime = rhythmTime;
+								peakIndex++;
+								
+								if(peakIndex == 1 || lastestRhythmTime - rhythMap.get(peakIndex - 1) > intervalTime) {
+									rhythMap.put(peakIndex, lastestRhythmTime);
+									LOGGER.debug("范围内查询到音高信号,preNoteFrequency:{} peakIndex:{} EndTime:{}", preNoteAvgFrequency, peakIndex, lastestRhythmTime);
+								}
+
+								preNoteAvgFrequency = chunkAnalysis.getFrequency();
+								beatContinueNum = 0;
+							}
+						}
+					}else {
+						beatContinueNum = 0;
+					}
+					
+				}
 			}
+			
+			
 		}
+		
+		return rhythMap;
+	}
+	
+	private List<ChunkAnalysis> extract(MusicXmlNote musicXmlNote, NoteAnalysis noteAnalysis) {
+		
+		LOGGER.debug("---------------------Extract Data----------------------");
+		
+		int range = hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator(), musicXmlNote.getDuration());
+		
+		double floatingRange = musicXmlNote.getDuration() * range / 100;
 
-		chunkAnalysisList = totalChunkAnalysisList.stream().filter(t -> Double.doubleToLongBits(t.getStartTime()) >= Double.doubleToLongBits(correctedStartTime)
-				&& Double.doubleToLongBits(t.getEndTime()) <= Double.doubleToLongBits(correctedEndTime)).collect(Collectors.toList());
+		double startTime = musicXmlNote.getTimeStamp() + dynamicOffset - floatingRange;
+		
+		double endTime = startTime + musicXmlNote.getDuration();
+		
+		LOGGER.debug("当前音符有效信号时值[{}]偏移[{}]后的范围[ {} - {} ]", musicXmlNote.getDuration(), floatingRange, startTime, endTime);
+		
+		List<ChunkAnalysis> chunkAnalysisList = totalChunkAnalysisList.stream()
+				.filter(t -> Double.doubleToLongBits(t.getEndTime()) >= Double.doubleToLongBits(startTime)
+						&& Double.doubleToLongBits(t.getStartTime()) <= Double.doubleToLongBits(endTime))
+				.collect(Collectors.toList());
 
 		// 根据完整度取部分有效信号
-		int elementSize = chunkAnalysisList.size() * (100 - hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator(), musicXmlNote.getDuration())) / 100;
+		int elementSize = chunkAnalysisList.size() * (100 - range) / 100;
 		
 		List<ChunkAnalysis> datas = chunkAnalysisList.subList(0, elementSize);
 		
-		LOGGER.debug("根据用户的评测难度,当前音符有效的信号时值范围[ {} - {} ]", chunkAnalysisList.get(0).getStartTime(), chunkAnalysisList.get(elementSize - 1).getEndTime());
+		noteAnalysis.setStartTime(datas.get(0).getStartTime());
+		noteAnalysis.setEndTime(datas.get(elementSize - 1).getEndTime());
 		
+		LOGGER.debug("根据用户的评测难度[{}],当前音符有效的信号时值的取值范围[ {} - {} ]", range, datas.get(0).getStartTime(), datas.get(elementSize - 1).getEndTime());
+
 		return datas;
 	}
 	
-    public void gaussianSmooth(List<ChunkAnalysis> signalList, int windowSize) {
-        double sigma = windowSize / 3.0;
-        Gaussian gaussian = new Gaussian(1.0, windowSize / 2.0, sigma);
-        double[] kernel = new double[windowSize];
-        for (int i = 0; i < windowSize; i++) {
-            kernel[i] = gaussian.value(i);
-        }
-
-        for (int i = 0; i < signalList.size(); i++) {
-            double sum = 0;
-            double weightSum = 0;
-            for (int j = 0; j < windowSize; j++) {
-                int index = i + j - windowSize / 2;
-                if (index >= 0 && index < signalList.size()) {
-                    sum += signalList.get(index).getSplDb() * kernel[j];
-                    weightSum += kernel[j];
-                }
-            }
-            signalList.get(i).setSplDb(sum / weightSum);
-        }
-    }
-    
-    public void movingAverageSmooth(List<ChunkAnalysis> signalList, int windowSize) {
-        for (int i = 0; i < signalList.size(); i++) {
-            int start = Math.max(0, i - windowSize / 2);
-            int end = Math.min(signalList.size(), i + windowSize / 2);
-            double sum = 0;
-            for (int j = start; j < end; j++) {
-                sum += signalList.get(j).getSplDb();
-            }
-            signalList.get(i).setSplDb(sum / (end - start));
-        }
-    }
+	private float handleHarmonic(float basicFrequency, float frequency) {
+		
+		if (basicFrequency > frequency) {
+			return frequency;
+		}
+		
+		float threshold = 0.02f;
+		// 处理泛音
+		int roundedRatio = Math.round(frequency / basicFrequency);
+		float ratio = frequency / basicFrequency;
+		if (roundedRatio >= 2 && Math.abs(ratio - roundedRatio) <= threshold) {
+			return frequency / ratio;
+		}
+		
+		return frequency;
+	}
+	
 }

+ 28 - 71
audio-analysis/src/main/java/com/yonge/netty/server/service/AudioCompareHandler.java

@@ -1,5 +1,27 @@
 package com.yonge.netty.server.service;
 
+import java.io.File;
+import java.io.IOException;
+import java.math.BigDecimal;
+import java.text.SimpleDateFormat;
+import java.util.Comparator;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Objects;
+import java.util.stream.Collectors;
+
+import javax.sound.sampled.AudioInputStream;
+import javax.sound.sampled.AudioSystem;
+
+import org.apache.commons.lang3.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Component;
+
 import com.alibaba.fastjson.JSON;
 import com.alibaba.fastjson.JSONObject;
 import com.alibaba.fastjson.JSONPath;
@@ -10,11 +32,7 @@ import com.ym.mec.biz.dal.enums.DeviceTypeEnum;
 import com.ym.mec.biz.dal.enums.FeatureType;
 import com.ym.mec.biz.dal.enums.HeardLevelEnum;
 import com.ym.mec.biz.service.SysMusicCompareRecordService;
-import com.ym.mec.thirdparty.storage.StoragePluginContext;
-import com.ym.mec.thirdparty.storage.provider.KS3StoragePlugin;
-import com.ym.mec.util.date.DateUtil;
 import com.ym.mec.util.upload.UploadUtil;
-import com.yonge.audio.analysis.AudioFloatConverter;
 import com.yonge.audio.utils.ArrayUtil;
 import com.yonge.netty.dto.SectionAnalysis;
 import com.yonge.netty.dto.UserChannelContext;
@@ -24,20 +42,8 @@ import com.yonge.netty.entity.MusicXmlNote;
 import com.yonge.netty.server.handler.NettyChannelManager;
 import com.yonge.netty.server.handler.message.MessageHandler;
 import com.yonge.netty.server.processor.WaveformWriter;
-import io.netty.channel.Channel;
-import org.apache.commons.lang3.StringUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.stereotype.Component;
 
-import javax.sound.sampled.AudioFormat;
-import java.io.File;
-import java.math.BigDecimal;
-import java.text.SimpleDateFormat;
-import java.util.*;
-import java.util.Map.Entry;
-import java.util.stream.Collectors;
+import io.netty.channel.Channel;
 
 @Component
 public class AudioCompareHandler implements MessageHandler {
@@ -54,34 +60,6 @@ public class AudioCompareHandler implements MessageHandler {
 	
 	@Autowired
 	private SysMusicCompareRecordService sysMusicCompareRecordService;
-	
-	/**
-	 * @describe 采样率
-	 */
-	private float sampleRate = 44100;
-
-	/**
-	 * 每个采样大小(Bit)
-	 */
-	private int bitsPerSample = 16;
-
-	/**
-	 * 通道数
-	 */
-	private int channels = 1;
-
-	/**
-	 * @describe 采样大小
-	 */
-	private int bufferSize = 1024 * 1;
-
-	private boolean signed = true;
-
-	private boolean bigEndian = false;
-
-	private AudioFormat audioFormat = new AudioFormat(sampleRate, bitsPerSample, channels, signed, bigEndian);
-
-	private AudioFloatConverter converter = AudioFloatConverter.getConverter(audioFormat);
 
 	private String tmpFileDir = "/mdata/soundCompare/";
 
@@ -115,7 +93,7 @@ public class AudioCompareHandler implements MessageHandler {
 			channelContext.setHandlerSwitch(false);
 
 			channelContext.getSongMusicXmlMap().put(musicXmlBasicInfo.getExamSongId(), musicXmlBasicInfo);
-			channelContext.init(musicXmlBasicInfo, audioFormat.getSampleRate(), bufferSize / 2);
+			channelContext.init(musicXmlBasicInfo);
 			channelContext.setUser(user);
 			
 			userChannelContextService.register(channel, channelContext);
@@ -283,12 +261,6 @@ public class AudioCompareHandler implements MessageHandler {
 		}
 		waveFileProcessor.process(datas);
 		
-		/*datas = channelContext.skipMetronome(datas);
-
-		if (datas.length == 0) {
-			return false;
-		}*/
-
 		channelContext.setChannelBufferBytes(ArrayUtil.mergeByte(channelContext.getChannelBufferBytes(), datas));
 
 		int totalLength = channelContext.getChannelBufferBytes().length;
@@ -297,6 +269,7 @@ public class AudioCompareHandler implements MessageHandler {
 			return false;
 		}
 		
+		/**
 		if (channelContext.getOffsetMS() + channelContext.getBeatDuration() > 0) {
 			int beatByteLength = (int) (audioFormat.getSampleRate() * audioFormat.getSampleSizeInBits() / 8 * (channelContext.getOffsetMS() + channelContext.getBeatDuration()) / 1000);
 			
@@ -317,7 +290,9 @@ public class AudioCompareHandler implements MessageHandler {
 		}
 		
 		totalLength = channelContext.getChannelBufferBytes().length;
+		*/
 		
+		int bufferSize = channelContext.getBufferSize() * 2;
 
 		while (totalLength >= bufferSize) {
 			byte[] bufferData = ArrayUtil.extractByte(channelContext.getChannelBufferBytes(), 0, bufferSize - 1);
@@ -328,13 +303,7 @@ public class AudioCompareHandler implements MessageHandler {
 				channelContext.setChannelBufferBytes(new byte[0]);
 			}
 
-			float[] sampleFloats = new float[bufferSize / 2];
-
-			converter.toFloatArray(bufferData, sampleFloats);
-			
-			//hanning(sampleFloats);
-
-			channelContext.handle(sampleFloats, audioFormat);
+			channelContext.handle(bufferData);
 
 			MusicXmlBasicInfo musicXmlBasicInfo = channelContext.getMusicXmlBasicInfo(null);
 			int sectionIndex = channelContext.getEvaluatingSectionIndex().get();
@@ -359,16 +328,4 @@ public class AudioCompareHandler implements MessageHandler {
 		return true;
 	}
 
-	
-	public static void hamming(float[] samples) {
-		for (int i = 0; i < samples.length; i++) {
-			samples[i] *= (0.54f - 0.46f * Math.cos((2 * Math.PI) * i / (samples.length - 1)));
-		}
-	}
-	
-	public static void hanning(float[] samples) {
-		for (int i = 0; i < samples.length; i++) {
-			samples[i] *= 0.5 * (1 + Math.cos((2 * Math.PI) * i / (samples.length - 1)));
-		}
-	}
 }

+ 2 - 13
audio-analysis/src/main/resources/logback-spring.xml

@@ -27,18 +27,7 @@
 		</encoder>
 	</appender>
 
-	<appender name="plumelog" class="com.plumelog.logback.appender.RedisAppender">
-		<appName>mec-audio</appName>
-		<redisHost>10.206.0.13:26379,10.206.0.16:26379,10.206.0.8:26379</redisHost>
-		<redisAuth>dyym</redisAuth>
-		<model>sentinel</model>
-		<masterName>mymaster</masterName>
-		<env>prod</env>
-		<redisDb>10</redisDb>
-		<runModel>2</runModel>
-	</appender>
-
-	<logger name="com.yonge" level="info" />
+	<logger name="com.yonge" level="debug" />
 
 	<!--开发环境:打印控制台 -->
 	<springProfile name="dev">
@@ -48,7 +37,7 @@
 		</root>
 	</springProfile>
 	
-	<springProfile name="test">
+	<springProfile name="local">
 		<root level="info">
 			<appender-ref ref="stdout" />
 			<appender-ref ref="file" />

+ 2 - 2
audio-analysis/src/test/java/com/yonge/netty/client/NettyClient.java

@@ -89,11 +89,11 @@ public class NettyClient {
 			String step2 = "{\"header\":{\"commond\":\"recordStart\",\"type\":\"SOUND_COMPARE\",\"status\":200}}";
 			channel.writeAndFlush(new TextWebSocketFrame(step2));
 			
-			String step3 = "{\"body\":{\"micDelay\":-1,\"offsetTime\":2274},\"uuid\":\"1662715309875118846\",\"header\":{\"commond\":\"audioPlayStart\",\"type\":\"SOUND_COMPARE\"}}";
+			String step3 = "{\"body\":{\"micDelay\":122,\"offsetTime\":431},\"uuid\":\"1662715309875118846\",\"header\":{\"commond\":\"audioPlayStart\",\"type\":\"SOUND_COMPARE\"}}";
 			channel.writeAndFlush(new TextWebSocketFrame(step3));
 			//51. "micDelay\":122,\"offsetTime\":431
 			//step4 发送wav
-			String fileName = "/2265947_2406111127832.wav";
+			String fileName = "/100254_2406060919766_打击乐51.wav";
 			AudioInputStream audioInputStream = AudioSystem.getAudioInputStream(FileUtils.toFile(WebSocketClientHandler.class.getResource(fileName)));
 			
 			AudioFormat baseFormat = audioInputStream.getFormat();

File diff suppressed because it is too large
+ 0 - 0
audio-analysis/src/test/resoures/9550.json


Some files were not shown because too many files changed in this diff