yonge 2 년 전
부모
커밋
1e3f7d7862

+ 3 - 3
audio-analysis/src/main/java/com/yonge/netty/dto/UserChannelContext.java

@@ -366,8 +366,6 @@ public class UserChannelContext {
 			return;
 		}
 		
-		musicXmlNote.setTimeStamp(musicXmlNote.getTimeStamp() + micDelayMS);
-		
 		//取出当前处理中的音符信息
 		NoteAnalysis noteAnalysis = getProcessingNote();
 		if(noteAnalysis == null || noteAnalysis.getDurationTime() == 0) {
@@ -406,7 +404,9 @@ public class UserChannelContext {
 				}
 			}*/
 			
-			if (playTime >= (musicXmlNote.getDuration() + musicXmlNote.getTimeStamp() + dynamicOffset)) {
+			if (playTime >= (musicXmlNote.getDuration() + micDelayMS + musicXmlNote.getTimeStamp() + dynamicOffset)) {
+				
+				musicXmlNote.setTimeStamp(musicXmlNote.getTimeStamp() + micDelayMS);
 
 				if (musicXmlNote.getDontEvaluating()) {
 					noteAnalysis.setIgnore(true);

+ 14 - 3
audio-analysis/src/main/java/com/yonge/netty/dto/WebSocketResponse.java

@@ -13,8 +13,8 @@ public class WebSocketResponse<T> {
 		this.body = body;
 	}
 
-	public WebSocketResponse(String command, T body) {
-		this.header = new Head(command, HttpStatus.OK.value());
+	public WebSocketResponse(String type, String command, T body) {
+		this.header = new Head(type, command, HttpStatus.OK.value());
 		this.body = body;
 	}
 
@@ -37,12 +37,15 @@ public class WebSocketResponse<T> {
 	public static class Head {
 		private int status = HttpStatus.OK.value();
 		private String commond = "";
+		
+		private String type;
 
 		public Head() {
 
 		}
 
-		public Head(String commond, int status) {
+		public Head(String type, String commond, int status) {
+			this.type = type;
 			this.commond = commond;
 			this.status = status;
 		}
@@ -63,5 +66,13 @@ public class WebSocketResponse<T> {
 			this.commond = commond;
 		}
 
+		public String getType() {
+			return type;
+		}
+
+		public void setType(String type) {
+			this.type = type;
+		}
+
 	}
 }

+ 13 - 7
audio-analysis/src/main/java/com/yonge/netty/server/handler/NettyServerHandler.java

@@ -1,12 +1,5 @@
 package com.yonge.netty.server.handler;
 
-import io.netty.channel.Channel;
-import io.netty.channel.ChannelHandler;
-import io.netty.channel.ChannelHandlerContext;
-import io.netty.channel.ChannelInboundHandlerAdapter;
-import io.netty.handler.codec.http.HttpHeaders;
-import io.netty.handler.codec.http.websocketx.WebSocketServerProtocolHandler;
-
 import org.apache.commons.lang3.StringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -14,8 +7,16 @@ import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.security.oauth2.common.OAuth2AccessToken;
 import org.springframework.stereotype.Component;
 
+import com.yonge.netty.server.service.DelayCheckHandler;
 import com.yonge.netty.server.service.UserChannelContextService;
 
+import io.netty.channel.Channel;
+import io.netty.channel.ChannelHandler;
+import io.netty.channel.ChannelHandlerContext;
+import io.netty.channel.ChannelInboundHandlerAdapter;
+import io.netty.handler.codec.http.HttpHeaders;
+import io.netty.handler.codec.http.websocketx.WebSocketServerProtocolHandler;
+
 @Component
 @ChannelHandler.Sharable
 public class NettyServerHandler extends ChannelInboundHandlerAdapter {
@@ -27,6 +28,9 @@ public class NettyServerHandler extends ChannelInboundHandlerAdapter {
 	
 	@Autowired
 	private UserChannelContextService userChannelContextService;
+	
+	@Autowired
+	private DelayCheckHandler delayCheckHandler;
 
 	@Override
 	public void channelActive(ChannelHandlerContext ctx) {
@@ -38,9 +42,11 @@ public class NettyServerHandler extends ChannelInboundHandlerAdapter {
 	public void channelUnregistered(ChannelHandlerContext ctx) {
 		
 		userChannelContextService.remove(ctx.channel());
+		delayCheckHandler.getUserABCMap().remove(ctx.channel());
 		
 		// 从管理器中移除
 		channelManager.remove(ctx.channel());
+		
 	}
 
 	@Override

+ 3 - 3
audio-analysis/src/main/java/com/yonge/netty/server/service/AudioCompareHandler.java

@@ -71,7 +71,7 @@ public class AudioCompareHandler implements MessageHandler {
 	/**
 	 * @describe 采样大小
 	 */
-	private int bufferSize = 1024 * 2;
+	private int bufferSize = 1024 * 1;
 
 	private boolean signed = true;
 
@@ -222,7 +222,7 @@ public class AudioCompareHandler implements MessageHandler {
 				
 				params.put("totalPlayTimeOfCurrentDate", totalPlayTimeOfCurrentDate);
 				
-				WebSocketResponse<Map<String, Object>> resp = new WebSocketResponse<Map<String, Object>>("overall", params);
+				WebSocketResponse<Map<String, Object>> resp = new WebSocketResponse<Map<String, Object>>(getAction(), "overall", params);
 
 				nettyChannelManager.sendTextMessage(user, resp);
 			}
@@ -346,7 +346,7 @@ public class AudioCompareHandler implements MessageHandler {
 				params.put("measureIndex", sectionIndex);
 				params.put("measureRenderIndex", channelContext.getCurrentMusicSection(null, sectionIndex).getMeasureRenderIndex());
 
-				WebSocketResponse<Map<String, Object>> resp = new WebSocketResponse<Map<String, Object>>("measureScore", params);
+				WebSocketResponse<Map<String, Object>> resp = new WebSocketResponse<Map<String, Object>>(getAction(), "measureScore", params);
 
 				nettyChannelManager.sendTextMessage(user, resp);
 			}

+ 168 - 24
audio-analysis/src/main/java/com/yonge/netty/server/service/DelayCheckHandler.java

@@ -1,28 +1,37 @@
 package com.yonge.netty.server.service;
 
+import java.io.File;
+import java.text.SimpleDateFormat;
+import java.util.Date;
 import java.util.HashMap;
 import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
 
 import javax.sound.sampled.AudioFormat;
 
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Service;
 
 import com.alibaba.fastjson.JSONPath;
 import com.yonge.audio.analysis.AudioFloatConverter;
 import com.yonge.audio.analysis.detector.YINPitchDetector;
+import com.yonge.audio.utils.ArrayUtil;
 import com.yonge.netty.dto.WebSocketResponse;
 import com.yonge.netty.server.handler.NettyChannelManager;
 import com.yonge.netty.server.handler.message.MessageHandler;
+import com.yonge.netty.server.processor.WaveformWriter;
 
 import io.netty.channel.Channel;
 
+@Service
 public class DelayCheckHandler implements MessageHandler {
-	
+
 	private final static Logger LOGGER = LoggerFactory.getLogger(DelayCheckHandler.class);
-	
-	private final static int MIN_FREQUECY = 43;
+
+	private final static int MIN_FREQUECY = 3000;
 
 	/**
 	 * @describe 采样率
@@ -39,6 +48,8 @@ public class DelayCheckHandler implements MessageHandler {
 	 */
 	private int channels = 1;
 	
+	private int bufferSize = 1024 * 1;
+
 	private boolean signed = true;
 
 	private boolean bigEndian = false;
@@ -46,9 +57,13 @@ public class DelayCheckHandler implements MessageHandler {
 	private AudioFormat audioFormat = new AudioFormat(sampleRate, bitsPerSample, channels, signed, bigEndian);
 
 	private AudioFloatConverter converter = AudioFloatConverter.getConverter(audioFormat);
-	
-	private double playTime;
-	
+
+	private ConcurrentMap<Channel, UserContext> userABCMap = new ConcurrentHashMap<Channel, UserContext>();
+
+	private String tmpFileDir = "/mdata/soundCompare/";
+
+	private SimpleDateFormat sdf = new SimpleDateFormat("yyMMddHHmmSS");
+
 	@Autowired
 	private NettyChannelManager nettyChannelManager;
 
@@ -59,51 +74,180 @@ public class DelayCheckHandler implements MessageHandler {
 
 	@Override
 	public boolean handleTextMessage(String userId, Channel channel, String jsonMsg) {
-		
+
 		String command = (String) JSONPath.extract(jsonMsg, "$.header.commond");
-		
+
+		UserContext userContext = null;
+
 		switch (command) {
+		case "recordStart":
+
+			userContext = new UserContext(0, false);
+
+			userABCMap.put(channel, userContext);
+
+			break;
 		case "recordEnd":
-			
+
+			userContext = userABCMap.get(channel);
+
+			if (userContext == null) {
+				userContext = new UserContext(0, false);
+			}
+
+			WaveformWriter waveFileProcessor = userContext.getWaveformWriter();
+			if (waveFileProcessor != null) {
+				// 写文件头
+				waveFileProcessor.processingFinished();
+			}
+
+			userContext = userABCMap.get(channel);
+
+			if (userContext == null) {
+				userContext = new UserContext(0, false);
+			}
+
 			Map<String, Object> params = new HashMap<String, Object>();
-			params.put("firstNoteDelayDuration", playTime);
+			params.put("firstNoteDelayDuration", userContext.getDelayDuration());
 
-			WebSocketResponse<Map<String, Object>> resp = new WebSocketResponse<Map<String, Object>>(getAction(), params);
+			WebSocketResponse<Map<String, Object>> resp = new WebSocketResponse<Map<String, Object>>(getAction(), command, params);
 
 			nettyChannelManager.sendTextMessage(userId, resp);
+
+			userContext.setDelayDuration(0);
+			userContext.setIsOver(false);
+			userContext.setWaveformWriter(null);
+			userContext.setChannelBufferBytes(new byte[0]);
+			userABCMap.put(channel, userContext);
 			break;
 
 		default:
 			break;
 		}
-		
+
 		return true;
 	}
 
 	@Override
 	public boolean handleBinaryMessage(String userId, Channel channel, byte[] bytes) {
-		
-		float[] samples = new float[bytes.length / 2];
 
-		if (samples.length == 0) {
-			return false;
-		}
+		UserContext userContext = userABCMap.get(channel);
 
-		converter.toFloatArray(bytes, samples);
+		if (userContext == null) {
+			userContext = new UserContext(0, false);
+		}
 
-		YINPitchDetector frequencyDetector = new YINPitchDetector(samples.length, audioFormat.getSampleRate());
+		// 写录音文件
+		WaveformWriter waveFileProcessor = userContext.getWaveformWriter();
+		if (waveFileProcessor == null) {
+			File file = new File(tmpFileDir + userId + "_CHECK_" + sdf.format(new Date()) + ".wav");
+			waveFileProcessor = new WaveformWriter(file.getAbsolutePath());
+			userContext.setWaveformWriter(waveFileProcessor);
+			
+			userABCMap.put(channel, userContext);
+		}
+		waveFileProcessor.process(bytes);
 
-		int playFrequency = (int) frequencyDetector.getFrequency(samples);
-		
-		if(playFrequency > MIN_FREQUECY) {
+		if (userContext.isOver) {
 			return true;
 		}
 		
-		double durationTime = 1000 * (samples.length * 2) / audioFormat.getSampleRate() / (audioFormat.getSampleSizeInBits() / 8);
+		userContext.setChannelBufferBytes(ArrayUtil.mergeByte(userContext.getChannelBufferBytes(), bytes));
+
+		int totalLength = userContext.getChannelBufferBytes().length;
 		
-		playTime += durationTime;
+		while (totalLength >= bufferSize) {
+			byte[] bufferData = ArrayUtil.extractByte(userContext.getChannelBufferBytes(), 0, bufferSize - 1);
+
+			if (bufferSize != totalLength) {
+				userContext.setChannelBufferBytes(ArrayUtil.extractByte(userContext.getChannelBufferBytes(), bufferSize, totalLength - 1));
+			} else {
+				userContext.setChannelBufferBytes(new byte[0]);
+			}
+
+			float[] sampleFloats = new float[bufferSize / 2];
+
+			converter.toFloatArray(bufferData, sampleFloats);
+			
+			YINPitchDetector frequencyDetector = new YINPitchDetector(sampleFloats.length, audioFormat.getSampleRate());
+
+			int playFrequency = (int) frequencyDetector.getFrequency(sampleFloats);
+
+			// int amplitude = (int) Signals.decibels(samples);
+
+			if (playFrequency > MIN_FREQUECY) {
+
+				userContext.setIsOver(true);
+				userABCMap.put(channel, userContext);
+				return true;
+			}
+
+			double durationTime = 1000 * (sampleFloats.length * 2) / audioFormat.getSampleRate() / (audioFormat.getSampleSizeInBits() / 8);
+
+			double playTime = userContext.delayDuration;
+
+			playTime += durationTime;
+
+			System.out.println("DurationTime:"+ durationTime +"	 playFrequency:" + playFrequency + "  PlayTime:" + playTime);
+
+			userContext.setDelayDuration(playTime);
+
+			totalLength = userContext.getChannelBufferBytes().length;
+		}
 
 		return true;
 	}
 
+	public ConcurrentMap<Channel, UserContext> getUserABCMap() {
+		return userABCMap;
+	}
+
+	class UserContext {
+
+		public UserContext(double delayDuration, boolean isOver) {
+			this.delayDuration = delayDuration;
+			this.isOver = isOver;
+		}
+
+		private double delayDuration;
+
+		private boolean isOver;
+		
+		private byte[] channelBufferBytes = new byte[0];
+
+		private WaveformWriter waveformWriter;
+
+		public double getDelayDuration() {
+			return delayDuration;
+		}
+
+		public void setDelayDuration(double delayDuration) {
+			this.delayDuration = delayDuration;
+		}
+
+		public boolean isOver() {
+			return isOver;
+		}
+
+		public void setIsOver(boolean isOver) {
+			this.isOver = isOver;
+		}
+
+		public byte[] getChannelBufferBytes() {
+			return channelBufferBytes;
+		}
+
+		public void setChannelBufferBytes(byte[] channelBufferBytes) {
+			this.channelBufferBytes = channelBufferBytes;
+		}
+
+		public WaveformWriter getWaveformWriter() {
+			return waveformWriter;
+		}
+
+		public void setWaveformWriter(WaveformWriter waveformWriter) {
+			this.waveformWriter = waveformWriter;
+		}
+	}
+
 }

+ 1 - 1
audio-analysis/src/main/java/com/yonge/netty/server/service/PitchDetectionHandler.java

@@ -80,7 +80,7 @@ public class PitchDetectionHandler implements MessageHandler {
 		Map<String, Object> params = new HashMap<String, Object>();
 		params.put("frequency", playFrequency);
 
-		WebSocketResponse<Map<String, Object>> resp = new WebSocketResponse<Map<String, Object>>("checking", params);
+		WebSocketResponse<Map<String, Object>> resp = new WebSocketResponse<Map<String, Object>>(getAction(), "checking", params);
 
 		nettyChannelManager.sendTextMessage(userId, resp);