yonge 2 years ago
parent
commit
94316c53b8

+ 5 - 5
audio-analysis/src/main/java/com/yonge/netty/dto/ChunkAnalysis.java

@@ -8,7 +8,7 @@ public class ChunkAnalysis {
 
 	private double durationTime;
 
-	private int frequency;
+	private double frequency;
 
 	private int splDb;
 
@@ -18,7 +18,7 @@ public class ChunkAnalysis {
 	
 	private boolean isPeak;
 
-	public ChunkAnalysis(double startTime, double endTime, int frequency, int splDb, int power, int amplitude) {
+	public ChunkAnalysis(double startTime, double endTime, double frequency, int splDb, int power, int amplitude) {
 		this.startTime = startTime;
 		this.endTime = endTime;
 		this.frequency = frequency;
@@ -28,7 +28,7 @@ public class ChunkAnalysis {
 		this.durationTime = endTime - startTime;
 	}
 
-	public ChunkAnalysis(int frequency, int splDb, int power) {
+	public ChunkAnalysis(double frequency, int splDb, int power) {
 		this.frequency = frequency;
 		this.splDb = splDb;
 		this.power = power;
@@ -58,11 +58,11 @@ public class ChunkAnalysis {
 		this.durationTime = durationTime;
 	}
 
-	public int getFrequency() {
+	public double getFrequency() {
 		return frequency;
 	}
 
-	public void setFrequency(int frequency) {
+	public void setFrequency(double frequency) {
 		this.frequency = frequency;
 	}
 

+ 31 - 11
audio-analysis/src/main/java/com/yonge/netty/dto/UserChannelContext.java

@@ -15,6 +15,7 @@ import java.util.stream.Collectors;
 
 import javax.sound.sampled.AudioFormat;
 
+import com.yonge.cooleshow.common.enums.HardLevelEnum;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -539,7 +540,7 @@ public class UserChannelContext {
 		
 		List<ChunkAnalysis> chunkAnalysisList = totalChunkAnalysisList.stream().filter(t -> Double.doubleToLongBits(t.getStartTime()) >= Double.doubleToLongBits(startTime) && Double.doubleToLongBits(t.getEndTime()) <= Double.doubleToLongBits(endTime)).collect(Collectors.toList());
 		
-		double correctedStartTime = queryFirstNoteStartTime(chunkAnalysisList, musicXmlNote);
+		double correctedStartTime = queryFirstNoteStartTime(chunkAnalysisList, musicXmlNote, floatingRange);
 		double correctedEndTime = correctedStartTime + musicXmlNote.getDuration();
 		
 		chunkAnalysisList = totalChunkAnalysisList.stream().filter(t -> Double.doubleToLongBits(t.getStartTime()) >= Double.doubleToLongBits(correctedStartTime) && Double.doubleToLongBits(t.getEndTime()) <= Double.doubleToLongBits(correctedEndTime)).collect(Collectors.toList());
@@ -674,7 +675,7 @@ public class UserChannelContext {
 		
 		List<ChunkAnalysis> chunkAnalysisList = totalChunkAnalysisList.stream().filter(t -> Double.doubleToLongBits(t.getStartTime()) >= Double.doubleToLongBits(startTime) && Double.doubleToLongBits(t.getEndTime()) <= Double.doubleToLongBits(endTime)).collect(Collectors.toList());
 		
-		double correctedStartTime = queryFirstNoteStartTime(chunkAnalysisList, musicXmlNote);
+		double correctedStartTime = queryFirstNoteStartTime(chunkAnalysisList, musicXmlNote, floatingRange);
 		double correctedEndTime = correctedStartTime + musicXmlNote.getDuration();
 		
 		//重新计算延迟
@@ -697,14 +698,14 @@ public class UserChannelContext {
 		
 		List<ChunkAnalysis> chunkList = new ArrayList<ChunkAnalysis>(chunkAnalysisList);
 		
-		List<Integer> chunkFrequencyList = chunkList.stream().map(t -> t.getFrequency()).filter(t -> t.doubleValue() > MusicalInstrumentsPitchRange.get(subjectId, standardFrequecy).getMinPitch() && t.doubleValue() < MusicalInstrumentsPitchRange.get(subjectId, standardFrequecy).getMaxPitch())
+		List<Double> chunkFrequencyList = chunkList.stream().map(t -> t.getFrequency()).filter(t -> t.doubleValue() > MusicalInstrumentsPitchRange.get(subjectId, standardFrequecy).getMinPitch() && t.doubleValue() < MusicalInstrumentsPitchRange.get(subjectId, standardFrequecy).getMaxPitch())
 				.collect(Collectors.toList());
 		
 		if (chunkFrequencyList.size() == 0) {
 			return -1;
 		}
 
-		int frequency = (int) (chunkFrequencyList.stream().mapToInt(t -> t).sum() / chunkFrequencyList.size());
+		int frequency = (int) (chunkFrequencyList.stream().mapToDouble(t -> t).sum() / chunkFrequencyList.size());
 
 		return frequency;
 	}
@@ -724,7 +725,7 @@ public class UserChannelContext {
 		
 		List<ChunkAnalysis> chunkAnalysisList = totalChunkAnalysisList.stream().filter(t -> Double.doubleToLongBits(t.getStartTime()) >= Double.doubleToLongBits(startTime) && Double.doubleToLongBits(t.getEndTime()) <= Double.doubleToLongBits(endTime)).collect(Collectors.toList());
 		
-		double correctedStartTime = queryFirstNoteStartTime(chunkAnalysisList, musicXmlNote);
+		double correctedStartTime = queryFirstNoteStartTime(chunkAnalysisList, musicXmlNote, floatingRange);
 		double correctedEndTime = correctedStartTime + musicXmlNote.getDuration();
 		
 		chunkAnalysisList = totalChunkAnalysisList.stream().filter(t -> Double.doubleToLongBits(t.getStartTime()) >= Double.doubleToLongBits(correctedStartTime) && Double.doubleToLongBits(t.getEndTime()) <= Double.doubleToLongBits(correctedEndTime)).collect(Collectors.toList());
@@ -894,7 +895,7 @@ public class UserChannelContext {
 		
 		List<ChunkAnalysis> chunkAnalysisList = totalChunkAnalysisList.stream().filter(t -> Double.doubleToLongBits(t.getStartTime()) >= Double.doubleToLongBits(startTime) && Double.doubleToLongBits(t.getEndTime()) <= Double.doubleToLongBits(endTime)).collect(Collectors.toList());
 		
-		double correctedStartTime = queryFirstNoteStartTime(chunkAnalysisList, musicXmlNote);
+		double correctedStartTime = queryFirstNoteStartTime(chunkAnalysisList, musicXmlNote, floatingRange);
 		double correctedEndTime = correctedStartTime + musicXmlNote.getDuration();
 		
 		chunkAnalysisList = totalChunkAnalysisList.stream().filter(t -> Double.doubleToLongBits(t.getStartTime()) >= Double.doubleToLongBits(correctedStartTime) && Double.doubleToLongBits(t.getEndTime()) <= Double.doubleToLongBits(correctedEndTime)).collect(Collectors.toList());
@@ -980,7 +981,7 @@ public class UserChannelContext {
 		return tempo;
 	}
 	
-	private double queryFirstNoteStartTime(List<ChunkAnalysis> chunkAnalysisList, MusicXmlNote musicXmlNote) {
+	private double queryFirstNoteStartTime(List<ChunkAnalysis> chunkAnalysisList, MusicXmlNote musicXmlNote, double floatingRange) {
 		
 		if(chunkAnalysisList == null || chunkAnalysisList.size() == 0){
 			LOGGER.debug("[查询第一个音]找不到数据,correctedStartTime:{}", musicXmlNote.getTimeStamp() + dynamicOffset);
@@ -1003,10 +1004,29 @@ public class UserChannelContext {
 			MusicXmlNote preMusicXmlNote = getCurrentMusicNote(null, musicXmlNote.getMusicalNotesIndex() - 1);
 			if((int)preMusicXmlNote.getFrequency() == (int)musicXmlNote.getFrequency()){
 				Optional<ChunkAnalysis> optional = chunkAnalysisList.stream().filter(t -> t.getFrequency() <= MusicalInstrumentsPitchRange.get(subjectId, standardFrequecy).getMinPitch()).findFirst();
-				if(optional.isPresent()){
-					LOGGER.debug("[查询第一个音]与上一个音同音,有断开,correctedStartTime:{}", optional.get().getEndTime());
-					return optional.get().getEndTime();
-				}else{
+				if (optional.isPresent()) {// 与上一个音同音,有断开
+					
+					double restNotes = optional.get().getEndTime();
+
+					optional = chunkAnalysisList
+							.stream()
+							.filter(t -> t.getStartTime() >= restNotes && t.getFrequency() > MusicalInstrumentsPitchRange.get(subjectId, standardFrequecy).getMinPitch()
+									&& t.getFrequency() < MusicalInstrumentsPitchRange.get(subjectId, standardFrequecy).getMaxPitch()).findFirst();
+					if(optional.isPresent()){
+						LOGGER.debug("[查询第一个音]与上一个音同音,有断开,correctedStartTime:{}", optional.get().getStartTime());
+						return optional.get().getStartTime();
+					}else{
+						
+						if((int)musicXmlNote.getFrequency() == -1){
+							LOGGER.debug("[查询第一个音]与上一个音同音,有断开,correctedStartTime:{}", musicXmlNote.getTimeStamp() + dynamicOffset);
+							return musicXmlNote.getTimeStamp() + dynamicOffset;
+						}else{
+							LOGGER.debug("[查询第一个音]与上一个音同音,有断开,correctedStartTime:{}", musicXmlNote.getTimeStamp() + floatingRange + dynamicOffset);
+							return musicXmlNote.getTimeStamp() + floatingRange + dynamicOffset;
+						}
+					}
+
+				} else {
 					LOGGER.debug("[查询第一个音]与上一个音同音,未断开,correctedStartTime:{}", musicXmlNote.getTimeStamp() + dynamicOffset);
 					return musicXmlNote.getTimeStamp() + dynamicOffset;
 				}

+ 3 - 3
audio-analysis/src/test/java/com/yonge/netty/client/NettyClient.java

@@ -84,17 +84,17 @@ public class NettyClient {
 	        handler.handshakeFuture().sync();
 	        
 			//step1发送xml
-			String step1 = FileUtils.readFileToString(FileUtils.toFile(WebSocketClientHandler.class.getResource("/扬基歌88.json")));
+			String step1 = FileUtils.readFileToString(FileUtils.toFile(WebSocketClientHandler.class.getResource("/悬崖上的金鱼姬1 速度160.json")));
 			channel.writeAndFlush(new TextWebSocketFrame(step1));
 			
 			String step2 = "{\"header\":{\"commond\":\"recordStart\",\"type\":\"SOUND_COMPARE\",\"status\":200}}";
 			channel.writeAndFlush(new TextWebSocketFrame(step2));
 			
-			String step3 = "{\"body\":{\"offsetTime\":89},\"uuid\":\"1657779786620650261\",\"header\":{\"commond\":\"audioPlayStart\",\"type\":\"SOUND_COMPARE\"}}";
+			String step3 = "{\"body\":{\"offsetTime\":1302},\"uuid\":\"1657779786620650261\",\"header\":{\"commond\":\"audioPlayStart\",\"type\":\"SOUND_COMPARE\"}}";
 			channel.writeAndFlush(new TextWebSocketFrame(step3));
 			
 			//step4 发送wav
-			String fileName = "/扬基歌_速度88.wav";
+			String fileName = "/悬崖上的金鱼姬1 速度160.wav";
 			AudioInputStream audioInputStream = AudioSystem.getAudioInputStream(FileUtils.toFile(WebSocketClientHandler.class.getResource(fileName)));
 			
 			AudioFormat baseFormat = audioInputStream.getFormat();

File diff suppressed because it is too large
+ 0 - 0
audio-analysis/src/test/resources/悬崖上的金鱼姬1 速度160.json


Some files were not shown because too many files changed in this diff