yonge 3 лет назад
Родитель
Сommit
0cd62a6c1a

+ 77 - 12
audio-analysis/src/main/java/com/yonge/nettty/dto/HardLevelEnum.java

@@ -3,15 +3,42 @@ package com.yonge.nettty.dto;
 import com.ym.mec.common.enums.BaseEnum;
 
 public enum HardLevelEnum implements BaseEnum<String, HardLevelEnum> {
-	BEGINNER("入门级", 5, 5, 50, 60, 10), ADVANCED("进阶级", 5, 5, 50, 50, 10), PERFORMER("大师级", 5, 5, 50, 60, 10);
+	/**
+	 * 入门级, 振幅阈值, 频率阈值 <br>
+	 * 节奏有效范围(1分音符), 节奏有效范围(2分音符), 节奏有效范围(4分音符), 节奏有效范围(8分音符), 节奏有效范围(16分音符), 节奏有效范围(32分音符)<br>
+	 * 完成度范围, 未演奏的范围
+	 */
+	BEGINNER("入门级", 5, 10, 20, 20, 30, 30, 50, 50, 60, 10), 
+	/**
+	 * 进阶级, 振幅阈值, 频率阈值 <br>
+	 * 节奏有效范围(1分音符), 节奏有效范围(2分音符), 节奏有效范围(4分音符), 节奏有效范围(8分音符), 节奏有效范围(16分音符), 节奏有效范围(32分音符)<br>
+	 * 完成度范围, 未演奏的范围
+	 */
+	ADVANCED("进阶级", 5, 5, 10, 10, 15, 15, 20, 20, 80, 10), 
+	/**
+	 * 大师级, 振幅阈值, 频率阈值 <br>
+	 * 节奏有效范围(1分音符), 节奏有效范围(2分音符), 节奏有效范围(4分音符), 节奏有效范围(8分音符), 节奏有效范围(16分音符), 节奏有效范围(32分音符)<br>
+	 * 完成度范围, 未演奏的范围
+	 */
+	PERFORMER("大师级", 5, 3, 5, 5, 10, 10, 15, 15, 90, 20);
 
 	private String msg;
 
 	private int amplitudeThreshold;
 
-	private int frequencyOffset;
+	private int frequencyThreshold;
+
+	private int tempoEffectiveRangeOf1;
+
+	private int tempoEffectiveRangeOf2;
+
+	private int tempoEffectiveRangeOf4;
+
+	private int tempoEffectiveRangeOf8;
+
+	private int tempoEffectiveRangeOf16;
 
-	private int tempoOffsetOfPercent;
+	private int tempoEffectiveRangeOf32;
 
 	private int integrityRange;
 
@@ -21,16 +48,28 @@ public enum HardLevelEnum implements BaseEnum<String, HardLevelEnum> {
 	 * 
 	 * @param msg
 	 * @param amplitudeThreshold 振幅阈值
-	 * @param frequencyOffset 频率法制
-	 * @param tempoOffsetOfPercent 节奏偏移量百分比(在当前范围内节奏才算正确)
+	 * @param frequencyThreshold 频率阈值
+	 * @param tempoEffectiveRangeOf1 节奏偏移量百分比(在当前范围内节奏才算正确)
+	 * @param tempoEffectiveRangeOf2 节奏偏移量百分比(在当前范围内节奏才算正确)
+	 * @param tempoEffectiveRangeOf4 节奏偏移量百分比(在当前范围内节奏才算正确)
+	 * @param tempoEffectiveRangeOf8 节奏偏移量百分比(在当前范围内节奏才算正确)
+	 * @param tempoEffectiveRangeOf16 节奏偏移量百分比(在当前范围内节奏才算正确)
+	 * @param tempoEffectiveRangeOf32 节奏偏移量百分比(在当前范围内节奏才算正确)
 	 * @param integrityRange 完成度范围
 	 * @param notPlayRange 未演奏的范围
 	 */
-	HardLevelEnum(String msg, int amplitudeThreshold, int frequencyOffset, int tempoOffsetOfPercent, int integrityRange, int notPlayRange) {
+	HardLevelEnum(String msg, int amplitudeThreshold, int frequencyThreshold, int tempoEffectiveRangeOf1, int tempoEffectiveRangeOf2,
+			int tempoEffectiveRangeOf4, int tempoEffectiveRangeOf8, int tempoEffectiveRangeOf16, int tempoEffectiveRangeOf32, int integrityRange,
+			int notPlayRange) {
 		this.msg = msg;
 		this.amplitudeThreshold = amplitudeThreshold;
-		this.frequencyOffset = frequencyOffset;
-		this.tempoOffsetOfPercent = tempoOffsetOfPercent;
+		this.frequencyThreshold = frequencyThreshold;
+		this.tempoEffectiveRangeOf1 = tempoEffectiveRangeOf1;
+		this.tempoEffectiveRangeOf2 = tempoEffectiveRangeOf2;
+		this.tempoEffectiveRangeOf4 = tempoEffectiveRangeOf4;
+		this.tempoEffectiveRangeOf8 = tempoEffectiveRangeOf8;
+		this.tempoEffectiveRangeOf16 = tempoEffectiveRangeOf16;
+		this.tempoEffectiveRangeOf32 = tempoEffectiveRangeOf32;
 		this.integrityRange = integrityRange;
 		this.notPlayRange = notPlayRange;
 	}
@@ -43,12 +82,38 @@ public enum HardLevelEnum implements BaseEnum<String, HardLevelEnum> {
 		return amplitudeThreshold;
 	}
 
-	public int getFrequencyOffset() {
-		return frequencyOffset;
+	public int getFrequencyThreshold() {
+		return frequencyThreshold;
 	}
 
-	public int getTempoOffsetOfPercent() {
-		return tempoOffsetOfPercent;
+	public int getTempoEffectiveRange(int denominator) {
+		
+		int tempoEffectiveRange = 0;
+		
+		switch (denominator) {
+		case 1:
+			tempoEffectiveRange = tempoEffectiveRangeOf1;
+			break;
+		case 2:
+			tempoEffectiveRange = tempoEffectiveRangeOf2;
+			break;
+		case 4:
+			tempoEffectiveRange = tempoEffectiveRangeOf4;
+			break;
+		case 8:
+			tempoEffectiveRange = tempoEffectiveRangeOf8;
+			break;
+		case 16:
+			tempoEffectiveRange = tempoEffectiveRangeOf16;
+			break;
+		case 32:
+			tempoEffectiveRange = tempoEffectiveRangeOf32;
+			break;
+
+		default:
+			break;
+		}
+		return tempoEffectiveRange;
 	}
 
 	public int getIntegrityRange() {

+ 24 - 37
audio-analysis/src/main/java/com/yonge/nettty/dto/UserChannelContext.java

@@ -32,7 +32,7 @@ public class UserChannelContext {
 	
 	private final static Logger LOGGER = LoggerFactory.getLogger(UserChannelContext.class);
 	
-	private int offsetMS = 0;
+	private int offsetMS = 300;
 	
 	private Long recordId;
 	
@@ -42,8 +42,6 @@ public class UserChannelContext {
 	
 	private int beatByteLength;
 	
-	private boolean handleSwitch;
-	
 	// 曲目与musicxml对应关系
 	private ConcurrentHashMap<Integer, MusicXmlBasicInfo> songMusicXmlMap = new ConcurrentHashMap<Integer, MusicXmlBasicInfo>();
 
@@ -149,7 +147,6 @@ public class UserChannelContext {
 		receivedTime = 0;
 		offsetMS = 0;
 		lastChunkAnalysisList = new ArrayList<ChunkAnalysis>();
-		handleSwitch = false;
 	}
 	
 	public MusicXmlBasicInfo getMusicXmlBasicInfo(Integer songId){
@@ -270,14 +267,6 @@ public class UserChannelContext {
 		
 		double durationTime = 1000 * (samples.length * 2) / audioFormat.getSampleRate() / (audioFormat.getSampleSizeInBits() / 8);
 		
-		if(handleSwitch == false && rms > 0.01){
-			handleSwitch = true;
-		}
-		
-		if(handleSwitch == false){
-			return;
-		}
-		
 		receivedTime += durationTime;
 		
 		if(receivedTime <= offsetMS){
@@ -308,7 +297,8 @@ public class UserChannelContext {
 				LOGGER.info("------ Frequency:{}  splDb:{}  Power:{}  amplitude:{} time:{}------", playFrequency, splDb, power, amplitude, playTime);
 				
 				ChunkAnalysis lastChunkAnalysis = new ChunkAnalysis(playTime - durationTime, playTime, playFrequency, splDb, power, amplitude);
-				if(Math.abs(chunkAnalysisList.get(chunkAnalysisList.size() - 1).getFrequency() - lastChunkAnalysis.getFrequency()) > hardLevel.getFrequencyOffset()){
+				
+				if(Math.abs(chunkAnalysisList.get(chunkAnalysisList.size() - 1).getFrequency() - lastChunkAnalysis.getFrequency()) > hardLevel.getFrequencyThreshold()){
 					lastChunkAnalysis.setFrequency(-1);
 				}
 				if(chunkAnalysisList.get(chunkAnalysisList.size() - 1).getAmplitude() + 2 < lastChunkAnalysis.getAmplitude()){
@@ -331,21 +321,21 @@ public class UserChannelContext {
 					
 				}
 				
-				noteAnalysis.setPlayFrequency(computeFrequency(chunkAnalysisList, lastChunkAnalysis, hardLevel.getFrequencyOffset()));
+				noteAnalysis.setPlayFrequency(computeFrequency(chunkAnalysisList, lastChunkAnalysis, hardLevel.getFrequencyThreshold()));
 				
 				//判断节奏(音符持续时间内有不间断的音高,就节奏正确)
 				boolean tempo = true;
-				if (subjectId == 23) {
+				if (subjectId == 23 || subjectId == 113) {
 					if (musicXmlNote.getFrequency() == -1) {// 休止符
 						tempo = chunkAnalysisList.stream().filter(t -> t.getAmplitude() > hardLevel.getAmplitudeThreshold()).count() <= 0;
 					}else{
-						tempo = computeTempoWithAmplitude2(chunkAnalysisList, lastChunkAnalysis);
+						tempo = computeTempoWithAmplitude2(musicXmlNote, chunkAnalysisList, lastChunkAnalysis);
 					}
 				}else{
 					if (musicXmlNote.getFrequency() == -1) {// 休止符
 						tempo = chunkAnalysisList.stream().filter(t -> t.getFrequency() > 100).count() <= 1;
 					}else{
-						tempo = computeTempoWithFrequency(chunkAnalysisList, lastChunkAnalysis);
+						tempo = computeTempoWithFrequency(musicXmlNote, chunkAnalysisList, lastChunkAnalysis);
 					}
 				}
 				
@@ -380,12 +370,6 @@ public class UserChannelContext {
 
 			} else {
 				
-				/*double skip = 0;
-				if (firstNoteIndexPerSectionList.contains(noteAnalysis.getMusicalNotesIndex())) {
-					skip = offsetMSOfSection;
-				}*/
-				//skip = noteAnalysis.getStandardDurationTime() * 0.2;
-				
 				LOGGER.info("Frequency:{}  splDb:{}  Power:{}  amplitude:{}  rms:{}", playFrequency, splDb, power, amplitude, rms);
 				
 				chunkAnalysisList.add(new ChunkAnalysis(playTime - durationTime, playTime, playFrequency, splDb, power, amplitude));
@@ -468,7 +452,7 @@ public class UserChannelContext {
 			int score = socre / noteAnalysisList.size();
 
 			// 平均得分
-			if (getMusicXmlBasicInfo(null).getSubjectId() == 23) {
+			if (getMusicXmlBasicInfo(null).getSubjectId() == 23 || getMusicXmlBasicInfo(null).getSubjectId() == 113) {
 				score = tempoScore;
 			}
 			result.put("score", score);
@@ -481,7 +465,7 @@ public class UserChannelContext {
 
 		double playDurationTime = 0;
 		
-		if (subjectId == 23) {
+		if (subjectId == 23 || subjectId == 113) {
 			if (noteAnalysis.getFrequency() == -1) {// 休止符
 				if (!noteAnalysis.isTempo()) {
 					noteAnalysis.setMusicalErrorType(NoteErrorType.CADENCE_WRONG);
@@ -508,7 +492,7 @@ public class UserChannelContext {
 					noteAnalysis.setMusicalErrorType(NoteErrorType.CADENCE_WRONG);
 				} else if (playDurationTime * 100 / noteAnalysis.getDurationTime() < hardLevel.getIntegrityRange()) {
 					noteAnalysis.setMusicalErrorType(NoteErrorType.INTEGRITY_WRONG);
-				} else if (Math.abs(noteAnalysis.getFrequency() - noteAnalysis.getPlayFrequency()) > hardLevel.getFrequencyOffset()) {
+				} else if (Math.abs(noteAnalysis.getFrequency() - noteAnalysis.getPlayFrequency()) > hardLevel.getFrequencyThreshold()) {
 					noteAnalysis.setMusicalErrorType(NoteErrorType.INTONATION_WRONG);
 				} else {
 					noteAnalysis.setMusicalErrorType(NoteErrorType.RIGHT);
@@ -523,7 +507,7 @@ public class UserChannelContext {
 					noteAnalysis.setMusicalErrorType(NoteErrorType.INTEGRITY_WRONG);
 				} else if (!noteAnalysis.isTempo()) {
 					noteAnalysis.setMusicalErrorType(NoteErrorType.CADENCE_WRONG);
-				} else if (Math.abs(noteAnalysis.getFrequency() - noteAnalysis.getPlayFrequency()) > hardLevel.getFrequencyOffset()) {
+				} else if (Math.abs(noteAnalysis.getFrequency() - noteAnalysis.getPlayFrequency()) > hardLevel.getFrequencyThreshold()) {
 					noteAnalysis.setMusicalErrorType(NoteErrorType.INTONATION_WRONG);
 				} else {
 					noteAnalysis.setMusicalErrorType(NoteErrorType.RIGHT);
@@ -535,7 +519,7 @@ public class UserChannelContext {
 		int tempoScore = 0;
 		int integrityScore = 0;
 		int intonationScore = 100 - new BigDecimal(Math.abs(YINPitchDetector.hertzToAbsoluteCent(noteAnalysis.getPlayFrequency())
-				- YINPitchDetector.hertzToAbsoluteCent(noteAnalysis.getFrequency()))).multiply(new BigDecimal(10)).divide(new BigDecimal(17), BigDecimal.ROUND_UP)
+				- YINPitchDetector.hertzToAbsoluteCent(noteAnalysis.getFrequency()))).multiply(new BigDecimal(20)).divide(new BigDecimal(17), BigDecimal.ROUND_UP)
 				.setScale(0, BigDecimal.ROUND_UP).intValue();
 		if (intonationScore < 0) {
 			intonationScore = 0;
@@ -561,7 +545,7 @@ public class UserChannelContext {
 			noteAnalysis.setIntegrityScore(integrityScore);
 		}
 		noteAnalysis.setIntonationScore(intonationScore);
-		if (subjectId == 23) {
+		if (subjectId == 23 || subjectId == 113) {
 			noteAnalysis.setScore(tempoScore);
 		} else {
 			noteAnalysis.setScore(new BigDecimal(intonationScore + tempoScore + integrityScore).divide(new BigDecimal(3), 2).setScale(0, BigDecimal.ROUND_UP)
@@ -650,7 +634,7 @@ public class UserChannelContext {
 		return frequency;
 	}
 	
-	private boolean computeTempoWithFrequency(List<ChunkAnalysis> chunkAnalysisList, ChunkAnalysis lastChunkAnalysis){
+	private boolean computeTempoWithFrequency(MusicXmlNote musicXmlNote, List<ChunkAnalysis> chunkAnalysisList, ChunkAnalysis lastChunkAnalysis){
 		
 		List<ChunkAnalysis> chunkList = new ArrayList<ChunkAnalysis>(chunkAnalysisList);
 		
@@ -659,7 +643,7 @@ public class UserChannelContext {
 			double lastFrequency = lastChunkAnalysis.getFrequency();
 			Iterator<ChunkAnalysis> iterable = chunkList.iterator();
 			while (iterable.hasNext()) {
-				if (Math.abs(lastFrequency - iterable.next().getFrequency()) > hardLevel.getFrequencyOffset()) {
+				if (Math.abs(lastFrequency - iterable.next().getFrequency()) > hardLevel.getFrequencyThreshold()) {
 					break;
 				}
 				iterable.remove();
@@ -707,7 +691,7 @@ public class UserChannelContext {
 		
 		if (tempo) {
 			// 判断进入时间点
-			if((chunkAnalysisList.size() - chunkList.size() + firstPeakIndex) * 100 /chunkAnalysisList.size() > hardLevel.getTempoOffsetOfPercent()){
+			if((chunkAnalysisList.size() - chunkList.size() + firstPeakIndex) * 100 /chunkAnalysisList.size() > hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator())){
 				tempo = false;
 			}
 		}
@@ -715,7 +699,7 @@ public class UserChannelContext {
 		return tempo;
 	}
 	
-	private boolean computeTempoWithAmplitude2(List<ChunkAnalysis> chunkAnalysisList, ChunkAnalysis lastChunkAnalysis) {
+	private boolean computeTempoWithAmplitude2(MusicXmlNote musicXmlNote, List<ChunkAnalysis> chunkAnalysisList, ChunkAnalysis lastChunkAnalysis) {
 
 		List<Integer> chunkAmplitudeList = chunkAnalysisList.stream().map(ChunkAnalysis::getAmplitude).collect(Collectors.toList());
 
@@ -759,7 +743,7 @@ public class UserChannelContext {
 		
 		if (tempo) {
 			// 判断进入时间点
-			if((firstPeakIndex - 1) * 100 /chunkAmplitudeList.size() > hardLevel.getTempoOffsetOfPercent()){
+			if((firstPeakIndex - 1) * 100 /chunkAmplitudeList.size() > hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator())){
 				tempo = false;
 			}
 		}
@@ -767,7 +751,7 @@ public class UserChannelContext {
 		return tempo;
 	}
 	
-	private boolean computeTempoWithAmplitude(List<ChunkAnalysis> chunkAnalysisList, ChunkAnalysis lastChunkAnalysis) {
+	private boolean computeTempoWithAmplitude(MusicXmlNote musicXmlNote, List<ChunkAnalysis> chunkAnalysisList, ChunkAnalysis lastChunkAnalysis) {
 
 		boolean tempo = false;
 
@@ -817,7 +801,7 @@ public class UserChannelContext {
 
 		// 检测是否延迟进入
 		if (tempo == true) {
-			if (minPeakIndex * 100 / chunkAmplitudeList.size() > hardLevel.getTempoOffsetOfPercent() && chunkAmplitudeList.size() > 3) {
+			if (minPeakIndex * 100 / chunkAmplitudeList.size() > hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator()) && chunkAmplitudeList.size() > 3) {
 				tempo = false;
 			}
 		}
@@ -838,8 +822,11 @@ public class UserChannelContext {
 			chunkAnalysisList.add(new ChunkAnalysis(f, 0, 0));
 		}
 		
+		MusicXmlNote musicXmlNote = new MusicXmlNote();
+		musicXmlNote.setDenominator(1);
+		
 		//System.out.println(context.computeFrequency(chunkAnalysisList, lastChunkAnalysis, 5));
-		System.out.println(context.computeTempoWithFrequency(chunkAnalysisList, lastChunkAnalysis));
+		System.out.println(context.computeTempoWithFrequency(musicXmlNote, chunkAnalysisList, lastChunkAnalysis));
 	}
 	
 }

+ 11 - 0
audio-analysis/src/main/java/com/yonge/nettty/entity/MusicXmlNote.java

@@ -25,6 +25,9 @@ public class MusicXmlNote {
 
 	// 当前音符在整个曲谱中的下标(从0开始)
 	private int musicalNotesIndex;
+	
+	// 多少分音符
+	private int denominator;
 
 	public double getTimeStamp() {
 		return timeStamp;
@@ -81,4 +84,12 @@ public class MusicXmlNote {
 	public void setMusicalNotesIndex(int musicalNotesIndex) {
 		this.musicalNotesIndex = musicalNotesIndex;
 	}
+
+	public int getDenominator() {
+		return denominator;
+	}
+
+	public void setDenominator(int denominator) {
+		this.denominator = denominator;
+	}
 }