yonge 4 hónapja
szülő
commit
4a6caec5ac

+ 4 - 4
audio-analysis/src/main/java/com/yonge/netty/dto/ChunkAnalysis.java

@@ -16,11 +16,11 @@ public class ChunkAnalysis {
 
 	private float[] energyEnvelop;
 	
-	private int amplitude;
+	private float amplitude;
 	
 	private boolean isPeak;
 
-	public ChunkAnalysis(double startTime, double endTime, float frequency, double splDb, float[] energyEnvelop, int amplitude) {
+	public ChunkAnalysis(double startTime, double endTime, float frequency, double splDb, float[] energyEnvelop, float amplitude) {
 		this.startTime = startTime;
 		this.endTime = endTime;
 		this.frequency = frequency;
@@ -78,11 +78,11 @@ public class ChunkAnalysis {
 		this.energyEnvelop = energyEnvelop;
 	}
 
-	public int getAmplitude() {
+	public float getAmplitude() {
 		return amplitude;
 	}
 
-	public void setAmplitude(int amplitude) {
+	public void setAmplitude(float amplitude) {
 		this.amplitude = amplitude;
 	}
 

+ 1 - 1
audio-analysis/src/main/java/com/yonge/netty/dto/HardLevelEnum.java

@@ -9,7 +9,7 @@ public enum HardLevelEnum implements BaseEnum<String, HardLevelEnum> {
 	 * 完成度范围, 未演奏的范围
 	 */
 	//BEGINNER("入门级", 3, 5, 5, 5, 10, 10, 13, 15, 60, 10), 
-	BEGINNER("入门级", 3, 10, 10, 15, 15, 22, 22, 75, 10), 
+	BEGINNER("入门级", 3, 10, 10, 15, 22, 30, 35, 75, 10), 
 	/**
 	 * 进阶级, 振幅阈值 <br>
 	 * 节奏有效范围(1分音符), 节奏有效范围(2分音符), 节奏有效范围(4分音符), 节奏有效范围(8分音符), 节奏有效范围(16分音符), 节奏有效范围(32分音符)<br>

+ 10 - 0
audio-analysis/src/main/java/com/yonge/netty/dto/NoteAnalysis.java

@@ -62,6 +62,8 @@ public class NoteAnalysis {
 	
 	private int measureRenderIndex;
 	
+	private double rhythmStartTime;
+	
 	public NoteAnalysis(int measureRenderIndex, int index, int sectionIndex, float frequency, double durationTime) {
 		this.measureRenderIndex = measureRenderIndex;
 		this.durationTime = durationTime;
@@ -237,4 +239,12 @@ public class NoteAnalysis {
 		return index;
 	}
 
+	public double getRhythmStartTime() {
+		return rhythmStartTime;
+	}
+
+	public void setRhythmStartTime(double rhythmStartTime) {
+		this.rhythmStartTime = rhythmStartTime;
+	}
+
 }

+ 230 - 72
audio-analysis/src/main/java/com/yonge/netty/dto/UserChannelContext.java

@@ -14,7 +14,6 @@ import java.util.stream.Collectors;
 
 import javax.sound.sampled.AudioFormat;
 
-import org.apache.commons.lang3.ArrayUtils;
 import org.apache.commons.lang3.StringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -75,7 +74,7 @@ public class UserChannelContext {
 
 	private float beatDuration;
 
-	private boolean delayProcessed;
+	private boolean isProcessedDynamicOffset;
 
 	// 曲目与musicxml对应关系
 	private ConcurrentHashMap<Integer, MusicXmlBasicInfo> songMusicXmlMap = new ConcurrentHashMap<Integer, MusicXmlBasicInfo>();
@@ -232,7 +231,7 @@ public class UserChannelContext {
 		floatSamples = new ArrayList<Float>();
 		recordId = null;
 		playTime = 0;
-		delayProcessed = false;
+		isProcessedDynamicOffset = false;
 		offsetMS = 0;
 		dynamicOffset = 0;
 		handlerSwitch = false;
@@ -270,7 +269,7 @@ public class UserChannelContext {
 
 		return null;
 	}
-
+	
 	public int getTotalMusicNoteIndex(Integer songId) {
 		if (songMusicXmlMap.size() == 0) {
 			return -1;
@@ -351,6 +350,10 @@ public class UserChannelContext {
 
 	public void handle(byte[] datas) {
 		
+		//applyLowPassFilter(datas, 3000, audioFormat.getSampleRate());
+		//applyMovingAverageFilter(datas, 5);
+		//datas = applyNoiseGate(datas, 1000);
+		
 		if(converter == null) {
 			converter = AudioFloatConverter.getConverter(audioFormat);
 		}
@@ -377,7 +380,8 @@ public class UserChannelContext {
 
 		double splDb = 0;
 		float[] energyEnvelop = Signals.energyEnvelope(samples, frameSize);
-		int amplitude = (int) Signals.norm(samples);
+		float amplitude = Signals.norm(samples);
+		float power = Signals.power(samples);
 		
 		int decibels = 0;
 		
@@ -417,20 +421,17 @@ public class UserChannelContext {
 				basicFrequency = totalChunkAnalysisList.get(totalChunkAnalysisList.size() - 1).getFrequency();
 			}
 			chunkAnalysis.setFrequency(handleHarmonic(basicFrequency, playFrequency));
+			chunkAnalysis.setFrequency(handleHarmonic(musicXmlNote.getFrequency(), playFrequency));
 			
 			totalChunkAnalysisList.add(chunkAnalysis);
 
-			LOGGER.debug("user:{}  delayProcessed:{}  dynamicOffset:{}  Frequency:{}  splDb:{}  amplitude:{} decibels:{} endtime:{}", user,
-					delayProcessed, dynamicOffset, chunkAnalysis.getFrequency(), splDb, amplitude, decibels, playTime);
+			LOGGER.debug("user:{}  isProcessedDynamicOffset:{}  dynamicOffset:{}  Frequency:{}  splDb:{}  amplitude:{} power:{} decibels:{} endtime:{}", user,
+					isProcessedDynamicOffset, dynamicOffset, chunkAnalysis.getFrequency(), splDb, amplitude, power, decibels, playTime);
 
 			if (playTime >= (musicXmlNote.getDuration() + musicXmlNote.getTimeStamp() + getOffsetMS() + beatDuration)) {
 
 				musicXmlNote.setTimeStamp(musicXmlNote.getTimeStamp() + getOffsetMS() + beatDuration);
 				
-				if(musicXmlNote.getFrequency() <= 0) {
-					musicXmlNote.setDontEvaluating(true);
-				}
-
 				if (musicXmlNote.getDontEvaluating()) {
 					noteAnalysis.setIgnore(true);
 				}
@@ -439,16 +440,16 @@ public class UserChannelContext {
 				
 				// 判断节奏(音符持续时间内有不间断的音高,就节奏正确)
 				if (!StringUtils.equalsIgnoreCase(evaluationCriteria, EvaluationCriteriaEnum.FREQUENCY.getCode())) {
-					noteAnalysis.setPlayFrequency(-1);
 					noteAnalysis.setTempoStatus(computeTempoWithAmplitude(musicXmlNote, noteAnalysis, nodeChunkAnalysisList));
+					noteAnalysis.setPlayFrequency(-1);
 				} else {
-					noteAnalysis.setPlayFrequency(computeFrequency(musicXmlNote, nodeChunkAnalysisList));
 					noteAnalysis.setTempoStatus(computeTempoWithFrequency(musicXmlNote, noteAnalysis, nodeChunkAnalysisList));
+					noteAnalysis.setPlayFrequency(computeFrequency(musicXmlNote, noteAnalysis, nodeChunkAnalysisList));
 				}
 
 				evaluateForNote(musicXmlNote, noteAnalysis, nodeChunkAnalysisList);// 对当前音符评分
 
-				LOGGER.debug("当前音符下标[{}] 预计频率:{} 实际频率:{} 节奏:{}", noteAnalysis.getMusicalNotesIndex(), musicXmlNote.getFrequency(),
+				LOGGER.debug("当前小节[{}] 音符下标[{}] 预计频率:{} 实际频率:{} 节奏:{}", musicXmlNote.getMeasureIndex(), noteAnalysis.getMusicalNotesIndex(), musicXmlNote.getFrequency(),
 						noteAnalysis.getPlayFrequency(), noteAnalysis.getTempoStatus());
 
 				doneNoteAnalysisList.add(noteAnalysis);
@@ -641,15 +642,33 @@ public class UserChannelContext {
 		}
 	}
 
-	private float computeFrequency(MusicXmlNote musicXmlNote, List<ChunkAnalysis> chunkAnalysisList) {
+	private float computeFrequency(MusicXmlNote musicXmlNote, NoteAnalysis noteAnalysis, List<ChunkAnalysis> chunkAnalysisList) {
 
 		if (chunkAnalysisList == null || chunkAnalysisList.size() == 0 || musicXmlNote.getDontEvaluating()) {
 			return -1;
 		}
 
-		reduceNoise(chunkAnalysisList, EvaluationCriteriaEnum.FREQUENCY);
+		List<ChunkAnalysis> chunkList = null;
+		
+		/**
+		if(noteAnalysis.getRhythmStartTime() > 0) {
+			chunkList = chunkAnalysisList.stream().filter(t -> t.getStartTime() >= noteAnalysis.getRhythmStartTime()).collect(Collectors.toList());
+		} else {
+			chunkList = new ArrayList<ChunkAnalysis>(chunkAnalysisList);
+		}*/
+		
+		chunkAnalysisList = totalChunkAnalysisList.stream()
+				.filter(t -> Double.doubleToLongBits(t.getStartTime()) >= Double.doubleToLongBits(noteAnalysis.getRhythmStartTime())
+						&& Double.doubleToLongBits(t.getEndTime()) <= Double.doubleToLongBits(noteAnalysis.getRhythmStartTime() + musicXmlNote.getDuration()))
+				.collect(Collectors.toList());
 
-		List<ChunkAnalysis> chunkList = new ArrayList<ChunkAnalysis>(chunkAnalysisList);
+		reduceNoise(chunkAnalysisList, EvaluationCriteriaEnum.FREQUENCY);
+		
+		int range = hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator(), musicXmlNote.getDuration());
+		
+		chunkList = chunkAnalysisList.subList(0, chunkAnalysisList.size() * (100 - range) / 100);
+		
+		LOGGER.debug("根据节奏点[{}],取当前音符有效的信号范围[ {} - {} ]计算评率", noteAnalysis.getRhythmStartTime(), chunkList.get(0).getStartTime(), chunkList.get(chunkList.size() - 1).getEndTime());
 
 		List<Float> chunkFrequencyList = chunkList.stream().map(t -> t.getFrequency())
 				.filter(t -> t.doubleValue() > MIN_FREQUECY && t.doubleValue() < MAX_FREQUECY).collect(Collectors.toList());
@@ -679,10 +698,23 @@ public class UserChannelContext {
 		
 		ChunkAnalysis firstChunkAnalysis = chunkAnalysisList.get(0);
 		
+		noteAnalysis.setRhythmStartTime(musicXmlNote.getTimeStamp() + dynamicOffset);//默认值
+		
 		Map<Integer, Double> frequencyRhythmMap = queryRhythmsByFrequency(musicXmlNote, noteAnalysis, chunkAnalysisList);
+
+		if(musicXmlNote.getFrequency() < MIN_FREQUECY) {
+			if(frequencyRhythmMap.size() == 0) {
+				noteAnalysis.setMusicalErrorType(NoteErrorType.RIGHT);
+				return 99;
+			} else {
+				noteAnalysis.setMusicalErrorType(NoteErrorType.CADENCE_WRONG);
+				LOGGER.debug("根据能量包络检测到[{}]个断点,分别在[{}]", frequencyRhythmMap.size(), frequencyRhythmMap.values().stream().map(value -> value).map(Object::toString).collect(Collectors.joining(",")));
+				return 0;
+			}
+		}
 		
 		if(frequencyRhythmMap.size() > 1) {
-			LOGGER.debug("根据音高检测到[{}]个断点,分别在[{}]", frequencyRhythmMap.size(), frequencyRhythmMap.values().stream().map(value -> value + firstChunkAnalysis.getStartTime()).map(Object::toString).collect(Collectors.joining(",")));
+			LOGGER.debug("根据音高检测到[{}]个断点,分别在[{}]", frequencyRhythmMap.size(), frequencyRhythmMap.values().stream().map(value -> value).map(Object::toString).collect(Collectors.joining(",")));
 			return 0;
 		}
 		
@@ -694,6 +726,8 @@ public class UserChannelContext {
 				firstBeatTime = entry.getValue();
 			}
 			
+			noteAnalysis.setRhythmStartTime(firstBeatTime);
+			
 			if ((firstBeatTime - firstChunkAnalysis.getStartTime()) * 100 / musicXmlNote.getDuration() > hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator(),
 					musicXmlNote.getDuration()) * 2) {
 				LOGGER.debug("节奏错误原因:进入时间点[{}]太晚", firstBeatTime);
@@ -735,8 +769,11 @@ public class UserChannelContext {
 		}
 		*/
 		
-		if(dynamicOffset == 0) {
-			dynamicOffset = firstBeatTime - firstChunkAnalysis.getStartTime();
+		if(isProcessedDynamicOffset == false) {
+
+			double startTime = musicXmlNote.getTimeStamp() + dynamicOffset;
+			dynamicOffset = firstBeatTime - startTime;
+			isProcessedDynamicOffset = true;
 		}
 		
 		return 99;
@@ -753,8 +790,20 @@ public class UserChannelContext {
 		
 		Map<Integer, Double> rhythmMap = queryRhythmsByAmplitude(musicXmlNote, noteAnalysis, chunkAnalysisList);
 		
+		ChunkAnalysis firstChunkAnalysis = chunkAnalysisList.get(0);
+		
+		if(musicXmlNote.getFrequency() < MIN_FREQUECY) {
+			if(rhythmMap.size() == 0) {
+				noteAnalysis.setMusicalErrorType(NoteErrorType.RIGHT);
+				return 3;
+			} else {
+				noteAnalysis.setMusicalErrorType(NoteErrorType.CADENCE_WRONG);
+				LOGGER.debug("根据能量包络检测到[{}]个断点,分别在[{}]", rhythmMap.size(), rhythmMap.values().stream().map(value -> value + firstChunkAnalysis.getStartTime()).map(Object::toString).collect(Collectors.joining(",")));
+				return 0;
+			}
+		}
+		
 		if(rhythmMap.size() != 1) {
-			ChunkAnalysis firstChunkAnalysis = chunkAnalysisList.get(0);
 			LOGGER.debug("根据能量包络检测到[{}]个断点,分别在[{}]", rhythmMap.size(), rhythmMap.values().stream().map(value -> value + firstChunkAnalysis.getStartTime()).map(Object::toString).collect(Collectors.joining(",")));
 			
 			if(rhythmMap.size() > 1) {
@@ -765,8 +814,6 @@ public class UserChannelContext {
 			return 0;
 		}
 		
-		ChunkAnalysis firstChunkAnalysis = chunkAnalysisList.get(0);
-		
 		// 判断进入时间点
 		double firstBeatTime = 0;
 		for(Entry<Integer, Double> entry : rhythmMap.entrySet()) {
@@ -787,7 +834,12 @@ public class UserChannelContext {
 		
 		noteAnalysis.setMusicalErrorType(NoteErrorType.RIGHT);
 		
-		dynamicOffset = firstBeatTime - firstChunkAnalysis.getStartTime();
+		if(isProcessedDynamicOffset == false) {
+
+			double startTime = musicXmlNote.getTimeStamp() + dynamicOffset;
+			dynamicOffset = firstBeatTime - startTime;
+			isProcessedDynamicOffset = true;
+		}
 		
 		return 3;
 	}
@@ -905,6 +957,77 @@ public class UserChannelContext {
 		return Math.max(musicXmlNote.getTimeStamp() + dynamicOffset, onsetStartTime);
 	}
 	
+	private void applyLowPassFilter(byte[] buffer, int cutoffFrequency, float sampleRate) {
+        int sampleSize = 2; // 16-bit audio
+        int length = buffer.length / sampleSize;
+        double rc = 1.0 / (cutoffFrequency * 2 * Math.PI);
+        double dt = 1.0 / sampleRate;
+        double alpha = dt / (rc + dt);
+        short[] samples = new short[length];
+
+        // 将字节数组转换为短整型数组
+        for (int i = 0; i < length; i++) {
+            samples[i] = (short) ((buffer[2 * i + 1] << 8) | (buffer[2 * i] & 0xff));
+        }
+
+        // 应用低通滤波器
+        for (int i = 1; i < length; i++) {
+            samples[i] = (short) (samples[i - 1] + alpha * (samples[i] - samples[i - 1]));
+        }
+
+        // 将处理后的短整型数组转换回字节数组
+        for (int i = 0; i < length; i++) {
+            buffer[2 * i] = (byte) (samples[i] & 0xff);
+            buffer[2 * i + 1] = (byte) ((samples[i] >> 8) & 0xff);
+        }
+    }
+	
+	private static byte[] applyNoiseGate(byte[] buffer, int threshold) {
+        byte[] filteredBuffer = new byte[buffer.length];
+        for (int i = 0; i < buffer.length; i++) {
+            if (Math.abs(buffer[i]) < threshold) {
+                filteredBuffer[i] = 0; // 设置为静音
+            } else {
+                filteredBuffer[i] = buffer[i];
+            }
+        }
+        return filteredBuffer;
+    }
+	
+	// 移动平均滤波器
+    private void applyMovingAverageFilter(byte[] buffer, int windowSize) {
+        int sampleSize = 2; // 16-bit audio
+        int length = buffer.length / sampleSize;
+        short[] samples = new short[length];
+        
+        // 将字节数组转换为短整型数组
+        for (int i = 0; i < length; i++) {
+            samples[i] = (short) ((buffer[2 * i + 1] << 8) | (buffer[2 * i] & 0xff));
+        }
+
+        short[] filteredSamples = new short[length];
+
+        // 应用移动平均滤波器
+        for (int i = 0; i < length; i++) {
+            int sum = 0;
+            int count = 0;
+            for (int j = i - windowSize / 2; j <= i + windowSize / 2; j++) {
+                if (j >= 0 && j < length) {
+                    sum += samples[j];
+                    count++;
+                }
+            }
+            filteredSamples[i] = (short) (sum / count);
+        }
+
+        // 将处理后的短整型数组转换回字节数组
+        for (int i = 0; i < length; i++) {
+            buffer[2 * i] = (byte) (filteredSamples[i] & 0xff);
+            buffer[2 * i + 1] = (byte) ((filteredSamples[i] >> 8) & 0xff);
+        }
+    }
+	
+	
 
 	
 	private void reduceNoise(List<ChunkAnalysis> chunkAnalysisList, EvaluationCriteriaEnum criteria) {
@@ -940,8 +1063,6 @@ public class UserChannelContext {
 			return true;
 		}
 		
-		frequency = handleHarmonic(basicFrequency, frequency);
-		
 		return (basicFrequency != -1 && frequency != -1) && Math.abs(YINPitchDetector.getDeviationCent(basicFrequency, frequency)) < 50;
 	}
 
@@ -1049,15 +1170,16 @@ public class UserChannelContext {
 		
 		Map<Integer, Double> rhythMap = new HashMap<Integer, Double>();
 
-		reduceNoise(datas, EvaluationCriteriaEnum.AMPLITUDE);
+		//reduceNoise(datas, EvaluationCriteriaEnum.AMPLITUDE);
 		
-		int amplitudeThreshold = 2;
+		float amplitudeThreshold = 0.5f;
 		int beatContinueNum = 0;
-		int intervalTime = 150;
+		int intervalTime = 50;
 		ChunkAnalysis chunkAnalysis = null;
 		double rhythmTime = -1;
 		int peakIndex = 0;
 		int continueNumThreshold = 0;
+		boolean isContinue = false;
 		
 		for (int i = 0; i < datas.size(); i++) {
 			chunkAnalysis = datas.get(i);
@@ -1068,15 +1190,17 @@ public class UserChannelContext {
 					rhythmTime = i * bufferSize * 1000 / audioFormat.getSampleRate();
 				}
 				
-				if (beatContinueNum > continueNumThreshold) {
+				if (beatContinueNum > continueNumThreshold && isContinue == false) {
 					if (rhythMap.size() == 0 || rhythmTime - rhythMap.get(peakIndex) > intervalTime) {
 						peakIndex++;
 						rhythMap.put(peakIndex, rhythmTime);
+						isContinue =true;
 					}
 				}
 
 			} else {
 				beatContinueNum = 0;
+				isContinue = false;
 			}
 		}
 		
@@ -1086,17 +1210,18 @@ public class UserChannelContext {
 	
 	private Map<Integer, Double> queryRhythmsByFrequency(MusicXmlNote musicXmlNote, NoteAnalysis noteAnalysis, List<ChunkAnalysis> datas){
 		LOGGER.debug("------------利用频率寻找节奏点------------");
+
 		/**
 		//获取上一个音符
 		Optional<NoteAnalysis> preNoteAnalysisOptinal = doneNoteAnalysisList.stream().filter(t -> t.getIndex() == musicXmlNote.getMusicalNotesIndex() - 1).findFirst();
 		
-		double preNoteAvgFrequency = -1;
+		float preNoteAvgFrequency = -1;
 		if(preNoteAnalysisOptinal.isPresent()) {
 			preNoteAvgFrequency = preNoteAnalysisOptinal.get().getPlayFrequency();
 		}
 		
 		LOGGER.debug("上一个音符的平均音高[{}]", preNoteAvgFrequency);
-		*/
+		**/
 		
 		//获取上一个信号
 		ChunkAnalysis firstChunkAnalysis = datas.get(0);
@@ -1116,6 +1241,7 @@ public class UserChannelContext {
 		
 		float preNoteAvgFrequency = lastChunkAnalysis.getFrequency();
 		
+		
 		ChunkAnalysis chunkAnalysis = null;
 		Map<Integer, Double> rhythMap = new HashMap<Integer, Double>();
 
@@ -1128,59 +1254,58 @@ public class UserChannelContext {
 		int peakIndex = 0;
 		int continueNumThreshold = 0;
 		
-		for(int i = 0; i < datas.size(); i++) {
-			
+		for (int i = 0; i < datas.size(); i++) {
+
 			chunkAnalysis = datas.get(i);
-			
-			//若不是休止符
-			if(musicXmlNote.getFrequency() >= MIN_FREQUECY) {
-				if(chunkAnalysis.getFrequency() < MIN_FREQUECY) {
-					
-					silenceContinueNum++;
-					beatContinueNum = 0;
-					
-					if(silenceContinueNum > continueNumThreshold) {
-						preNoteAvgFrequency = chunkAnalysis.getFrequency();
+
+			// 若不是休止符
+			if (chunkAnalysis.getFrequency() < MIN_FREQUECY) {
+
+				silenceContinueNum++;
+				beatContinueNum = 0;
+
+				if (silenceContinueNum > continueNumThreshold) {
+					preNoteAvgFrequency = chunkAnalysis.getFrequency();
+				}
+			} else {
+
+				silenceContinueNum = 0;
+
+				if (preNoteAvgFrequency < MIN_FREQUECY || !isSamePitch(preNoteAvgFrequency, chunkAnalysis.getFrequency()) || musicXmlNote.getFrequency() < MIN_FREQUECY) {
+
+					if (beatContinueNum == 0) {
+						rhythmTime = chunkAnalysis.getStartTime();
 					}
-				}else {
-					
-					silenceContinueNum = 0;
-					
-					if(preNoteAvgFrequency < MIN_FREQUECY || !isSamePitch(preNoteAvgFrequency, chunkAnalysis.getFrequency())) {
 
-						if (beatContinueNum == 0) {
-							rhythmTime = chunkAnalysis.getStartTime();
-						}
-						
-						beatContinueNum++;
-						if (beatContinueNum > continueNumThreshold) {
-							if (chunkAnalysis.getStartTime() - lastestRhythmTime > intervalTime) {
+					beatContinueNum++;
+					if (beatContinueNum > continueNumThreshold) {
+						if (chunkAnalysis.getStartTime() - lastestRhythmTime > intervalTime) {
+
+							lastestRhythmTime = rhythmTime;
 
-								lastestRhythmTime = rhythmTime;
+							if (peakIndex == 0 || lastestRhythmTime - rhythMap.get(peakIndex) > intervalTime) {
 								peakIndex++;
-								
-								if(peakIndex == 1 || lastestRhythmTime - rhythMap.get(peakIndex - 1) > intervalTime) {
-									rhythMap.put(peakIndex, lastestRhythmTime);
-									LOGGER.debug("范围内查询到音高信号,preNoteFrequency:{} peakIndex:{} EndTime:{}", preNoteAvgFrequency, peakIndex, lastestRhythmTime);
-								}
-
-								preNoteAvgFrequency = chunkAnalysis.getFrequency();
-								beatContinueNum = 0;
+								rhythMap.put(peakIndex, lastestRhythmTime);
+								LOGGER.debug("范围内查询到音高信号,preNoteFrequency:{} peakIndex:{} EndTime:{}", preNoteAvgFrequency, peakIndex, lastestRhythmTime);
 							}
+
+							preNoteAvgFrequency = chunkAnalysis.getFrequency();
+							beatContinueNum = 0;
 						}
-					}else {
-						beatContinueNum = 0;
 					}
-					
+				} else {
+					beatContinueNum = 0;
 				}
+
 			}
+		}
 			
 			
-		}
 		
 		return rhythMap;
 	}
 	
+	
 	private List<ChunkAnalysis> extract(MusicXmlNote musicXmlNote, NoteAnalysis noteAnalysis) {
 		
 		LOGGER.debug("---------------------Extract Data----------------------");
@@ -1193,11 +1318,20 @@ public class UserChannelContext {
 		
 		double endTime = startTime + musicXmlNote.getDuration();
 		
-		LOGGER.debug("当前音符有效信号时值[{}]偏移[{}]后的范围[ {} - {} ]", musicXmlNote.getDuration(), floatingRange, startTime, endTime);
+		/**
+		//下一个音符信息
+		MusicXmlNote nextMusicXmlNote = getCurrentMusicNote(null, musicXmlNote.getMusicalNotesIndex() + 1);
+		if(nextMusicXmlNote != null) {
+			endTime = endTime - nextMusicXmlNote.getDuration() * hardLevel.getTempoEffectiveRange(nextMusicXmlNote.getDenominator(), nextMusicXmlNote.getDuration()) / 100;
+		}**/
+		
+		double finalEndTime = endTime;
+		
+		LOGGER.debug("当前音符有效信号时值[{}]偏移[{}]后的范围[ {} - {} ]", musicXmlNote.getDuration(), floatingRange, startTime, finalEndTime);
 		
 		List<ChunkAnalysis> chunkAnalysisList = totalChunkAnalysisList.stream()
 				.filter(t -> Double.doubleToLongBits(t.getEndTime()) >= Double.doubleToLongBits(startTime)
-						&& Double.doubleToLongBits(t.getStartTime()) <= Double.doubleToLongBits(endTime))
+						&& Double.doubleToLongBits(t.getStartTime()) <= Double.doubleToLongBits(finalEndTime))
 				.collect(Collectors.toList());
 
 		// 根据完整度取部分有效信号
@@ -1213,7 +1347,7 @@ public class UserChannelContext {
 		return datas;
 	}
 	
-	private float handleHarmonic(float basicFrequency, float frequency) {
+	private float handleHarmonic2(float basicFrequency, float frequency) {
 		
 		if (basicFrequency > frequency) {
 			return frequency;
@@ -1230,4 +1364,28 @@ public class UserChannelContext {
 		return frequency;
 	}
 	
+	private float handleHarmonic(float basicFrequency, float frequency) {
+		
+		if(frequency < MIN_FREQUECY || basicFrequency < MIN_FREQUECY) {
+			return frequency;
+		}
+		
+		int roundedRatio = 0;
+		float targetFrequency = frequency;
+		
+		if(basicFrequency > frequency) {
+			roundedRatio = Math.round(basicFrequency / frequency);
+			targetFrequency = frequency * roundedRatio;
+		}else {
+			roundedRatio = Math.round(frequency / basicFrequency);
+			targetFrequency = frequency / roundedRatio;
+		}
+		
+		if(isSamePitch(basicFrequency, targetFrequency)) {
+			return targetFrequency;
+		}
+		
+		return frequency;
+	}
+	
 }