소스 검색

Merge remote-tracking branch 'origin/saas' into saas

zouxuan 2 년 전
부모
커밋
798a8a4e9b

+ 1 - 1
audio-analysis/src/main/java/com/yonge/netty/dto/HardLevelEnum.java

@@ -22,7 +22,7 @@ public enum HardLevelEnum implements BaseEnum<String, HardLevelEnum> {
 	 * 节奏有效范围(1分音符), 节奏有效范围(2分音符), 节奏有效范围(4分音符), 节奏有效范围(8分音符), 节奏有效范围(16分音符), 节奏有效范围(32分音符)<br>
 	 * 完成度范围, 未演奏的范围
 	 */
-	PERFORMER("大师级", 3, 3, 5, 5, 10, 10, 13, 15, 95, 10);
+	PERFORMER("大师级", 3, 3, 3, 5, 10, 10, 13, 15, 95, 10);
 
 	private String msg;
 

+ 53 - 26
audio-analysis/src/main/java/com/yonge/netty/dto/UserChannelContext.java

@@ -2,6 +2,7 @@ package com.yonge.netty.dto;
 
 import java.math.BigDecimal;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.Comparator;
 import java.util.HashMap;
 import java.util.List;
@@ -50,6 +51,8 @@ public class UserChannelContext {
 	
 	private float offsetMS;
 	
+	private float micDelayMS;
+	
 	private double dynamicOffset;
 	
 	private String platform;
@@ -161,6 +164,14 @@ public class UserChannelContext {
 		this.offsetMS = offsetMS;
 	}
 
+	public float getMicDelayMS() {
+		return micDelayMS;
+	}
+
+	public void setMicDelayMS(float micDelayMS) {
+		this.micDelayMS = micDelayMS;
+	}
+
 	public float getBeatDuration() {
 		return beatDuration;
 	}
@@ -377,16 +388,14 @@ public class UserChannelContext {
 			}
 			totalChunkAnalysisList.add(chunkAnalysis);
 			
-			boolean flag = false; //是否收到有效信号
-			if(!StringUtils.equalsIgnoreCase(evaluationCriteria, EvaluationCriteriaEnum.FREQUENCY.getCode())){
-				flag = chunkAnalysis.getAmplitude() > hardLevel.getAmplitudeThreshold();
+			//是否收到有效信号
+			/*if(!StringUtils.equalsIgnoreCase(evaluationCriteria, EvaluationCriteriaEnum.FREQUENCY.getCode())){
+				delayProcessed = chunkAnalysis.getAmplitude() > hardLevel.getAmplitudeThreshold();
 			}else{
-				flag = chunkAnalysis.getFrequency() > MIN_FREQUECY && chunkAnalysis.getFrequency() < MAX_FREQUECY;
+				delayProcessed = chunkAnalysis.getFrequency() > MIN_FREQUECY && chunkAnalysis.getFrequency() < MAX_FREQUECY;
 			}
 			
-			if(delayProcessed == false && flag){
-				
-				delayProcessed = true;
+			if(delayProcessed){
 				
 				//计算延迟偏移值
 				//playTime = musicXmlNote.getTimeStamp() + durationTime;
@@ -394,9 +403,11 @@ public class UserChannelContext {
 				if(100 * dynamicOffset / musicXmlNote.getDuration() > (100 - hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator(), musicXmlNote.getDuration()))){
 					dynamicOffset = 0;
 				}
-			}
+			}*/
 			
-			if (playTime >= (musicXmlNote.getDuration() + musicXmlNote.getTimeStamp() + dynamicOffset)) {
+			if (playTime >= (musicXmlNote.getDuration() + micDelayMS + musicXmlNote.getTimeStamp() + dynamicOffset)) {
+				
+				musicXmlNote.setTimeStamp(musicXmlNote.getTimeStamp() + micDelayMS);
 
 				if (musicXmlNote.getDontEvaluating()) {
 					noteAnalysis.setIgnore(true);
@@ -732,7 +743,7 @@ public class UserChannelContext {
 		
 		double firstChunkStartTime = firstChunkAnalysis.getStartTime();
 		
-		Optional<ChunkAnalysis> chunkAnalysisOptional = totalChunkAnalysisList.stream().filter(t -> Double.doubleToLongBits(t.getEndTime()) < Double.doubleToLongBits(firstChunkStartTime)).findFirst();
+		Optional<ChunkAnalysis> chunkAnalysisOptional = totalChunkAnalysisList.stream().filter(t -> Double.doubleToLongBits(t.getEndTime()) < Double.doubleToLongBits(firstChunkStartTime)).reduce((first, second) -> second);
 
 		ChunkAnalysis lastChunkAnalysis = null;
 		if (chunkAnalysisOptional.isPresent()) {
@@ -879,6 +890,8 @@ public class UserChannelContext {
 			return false;
 		}
 		
+		reduceNoise(chunkList, EvaluationCriteriaEnum.AMPLITUDE);
+		
 		ChunkAnalysis firstChunkAnalysis = chunkAnalysisList.get(0);
 		
 		LOGGER.debug("-------startTime:{}  endTime:{}------", firstChunkAnalysis.getStartTime(), chunkList.get(chunkList.size() - 1)
@@ -890,18 +903,15 @@ public class UserChannelContext {
 			return chunkList.stream().filter(t -> t.getAmplitude() > hardLevel.getAmplitudeThreshold()).count() <= 0;
 		}
 		
-		//Optional<ChunkAnalysis> chunkAnalysisOptional = totalChunkAnalysisList.stream().filter(t -> Double.doubleToLongBits(t.getEndTime()) < Double.doubleToLongBits(firstChunkAnalysis.getStartTime())).findFirst();
 		Optional<ChunkAnalysis> chunkAnalysisOptional = totalChunkAnalysisList.stream().filter(t -> Double.doubleToLongBits(t.getEndTime()) < Double.doubleToLongBits(firstChunkAnalysis.getStartTime())).reduce((first, second) -> second);
 
-		ChunkAnalysis lastChunkAnalysis = null;
+		ChunkAnalysis lastChunkAnalysis = new ChunkAnalysis(0, 0, -1, 0, 0, 0);;
 		if (chunkAnalysisOptional.isPresent()) {
 			lastChunkAnalysis = chunkAnalysisOptional.get();
 		}
-		if(lastChunkAnalysis == null){
-			lastChunkAnalysis = new ChunkAnalysis(0, 0, -1, 0, 0, 0);
-		}
 		
 		List<Integer> chunkAmplitudeList = chunkList.stream().map(ChunkAnalysis::getAmplitude).collect(Collectors.toList());
+		
 
 		chunkAmplitudeList.add(0, lastChunkAnalysis.getAmplitude());
 		
@@ -913,14 +923,17 @@ public class UserChannelContext {
 		int firstPeakIndex = -1;
 		int firstPeakValue = 0;
 		int peakSize = 0;
+		
+		//int range = hardLevel.getAmplitudeThreshold();
+		int range = 10;
 
 		for (int i = 1; i < chunkAmplitudeList.size(); i++) {
-			if (chunkAmplitudeList.get(i - 1) + hardLevel.getAmplitudeThreshold() >= chunkAmplitudeList.get(i)) {
+			if (chunkAmplitudeList.get(i - 1) + range >= chunkAmplitudeList.get(i)) {
 				isContinue = false;
 				continue;
 			}
 
-			if(isContinue == false && chunkAmplitudeList.get(i - 1) + hardLevel.getAmplitudeThreshold() < chunkAmplitudeList.get(i)){
+			if(isContinue == false && chunkAmplitudeList.get(i - 1) + range < chunkAmplitudeList.get(i)){
 				isContinue = true;
 				peakSize++;
 				
@@ -966,7 +979,7 @@ public class UserChannelContext {
 		
 		if (tempo) {
 			// 判断进入时间点
-			if((firstPeakIndex - 1) * 100 /chunkAmplitudeList.size() > hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator(), musicXmlNote.getDuration()) * 2){
+			if((firstPeakIndex - 1) * 100 /chunkAmplitudeList.size() > hardLevel.getTempoEffectiveRange(musicXmlNote.getDenominator(), musicXmlNote.getDuration())){
 				LOGGER.debug("超过范围:{}", (firstPeakIndex - 1) * 100 /chunkAmplitudeList.size());
 				tempo = false;
 			}
@@ -1025,16 +1038,30 @@ public class UserChannelContext {
 		//return chunkAnalysisList.get(chunkAnalysisList.size() - 1).getEndTime();
 		return musicXmlNote.getTimeStamp() + dynamicOffset;
 	}
+
+	private void reduceNoise(List<ChunkAnalysis> chunkAnalysisList, EvaluationCriteriaEnum criteria) {
+
+		ChunkAnalysis chunkAnalysis = null;
+
+		for (int i = 1; i < chunkAnalysisList.size(); i++) {
+			if (i < chunkAnalysisList.size() - 1) {
+				chunkAnalysis = chunkAnalysisList.get(i);
+
+				if (EvaluationCriteriaEnum.AMPLITUDE == criteria) {
+					if (chunkAnalysisList.get(i - 1).getAmplitude() == 0 && chunkAnalysisList.get(i + 1).getAmplitude() == 0
+							&& chunkAnalysis.getAmplitude() > 0) {
+						
+						chunkAnalysis.setAmplitude(0);
+						//chunkAnalysisList.set(i, chunkAnalysis);
+					}
+				}
+			}
+		}
+		
+	}
+	
 	
 	public static void main(String[] args) {
-		double[] midi = new double[128];;
-		int standardPitch = 440; // a is 440 hz...
-		for (int x = 0; x < midi.length; ++x)
-		{
-			//转调
-		   midi[x] = new BigDecimal(standardPitch).multiply(new BigDecimal(Math.pow(2, new BigDecimal(x-69).divide(new BigDecimal(12),6,BigDecimal.ROUND_HALF_UP).doubleValue()))).doubleValue();
-		   System.out.println("x=" + x +"  "+ midi[x]);
-		}
 		
 	}
 	

+ 14 - 3
audio-analysis/src/main/java/com/yonge/netty/dto/WebSocketResponse.java

@@ -13,8 +13,8 @@ public class WebSocketResponse<T> {
 		this.body = body;
 	}
 
-	public WebSocketResponse(String command, T body) {
-		this.header = new Head(command, HttpStatus.OK.value());
+	public WebSocketResponse(String type, String command, T body) {
+		this.header = new Head(type, command, HttpStatus.OK.value());
 		this.body = body;
 	}
 
@@ -37,12 +37,15 @@ public class WebSocketResponse<T> {
 	public static class Head {
 		private int status = HttpStatus.OK.value();
 		private String commond = "";
+		
+		private String type;
 
 		public Head() {
 
 		}
 
-		public Head(String commond, int status) {
+		public Head(String type, String commond, int status) {
+			this.type = type;
 			this.commond = commond;
 			this.status = status;
 		}
@@ -63,5 +66,13 @@ public class WebSocketResponse<T> {
 			this.commond = commond;
 		}
 
+		public String getType() {
+			return type;
+		}
+
+		public void setType(String type) {
+			this.type = type;
+		}
+
 	}
 }

+ 13 - 7
audio-analysis/src/main/java/com/yonge/netty/server/handler/NettyServerHandler.java

@@ -1,12 +1,5 @@
 package com.yonge.netty.server.handler;
 
-import io.netty.channel.Channel;
-import io.netty.channel.ChannelHandler;
-import io.netty.channel.ChannelHandlerContext;
-import io.netty.channel.ChannelInboundHandlerAdapter;
-import io.netty.handler.codec.http.HttpHeaders;
-import io.netty.handler.codec.http.websocketx.WebSocketServerProtocolHandler;
-
 import org.apache.commons.lang3.StringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -14,8 +7,16 @@ import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.security.oauth2.common.OAuth2AccessToken;
 import org.springframework.stereotype.Component;
 
+import com.yonge.netty.server.service.DelayCheckHandler;
 import com.yonge.netty.server.service.UserChannelContextService;
 
+import io.netty.channel.Channel;
+import io.netty.channel.ChannelHandler;
+import io.netty.channel.ChannelHandlerContext;
+import io.netty.channel.ChannelInboundHandlerAdapter;
+import io.netty.handler.codec.http.HttpHeaders;
+import io.netty.handler.codec.http.websocketx.WebSocketServerProtocolHandler;
+
 @Component
 @ChannelHandler.Sharable
 public class NettyServerHandler extends ChannelInboundHandlerAdapter {
@@ -27,6 +28,9 @@ public class NettyServerHandler extends ChannelInboundHandlerAdapter {
 	
 	@Autowired
 	private UserChannelContextService userChannelContextService;
+	
+	@Autowired
+	private DelayCheckHandler delayCheckHandler;
 
 	@Override
 	public void channelActive(ChannelHandlerContext ctx) {
@@ -38,9 +42,11 @@ public class NettyServerHandler extends ChannelInboundHandlerAdapter {
 	public void channelUnregistered(ChannelHandlerContext ctx) {
 		
 		userChannelContextService.remove(ctx.channel());
+		delayCheckHandler.getUserABCMap().remove(ctx.channel());
 		
 		// 从管理器中移除
 		channelManager.remove(ctx.channel());
+		
 	}
 
 	@Override

+ 8 - 8
audio-analysis/src/main/java/com/yonge/netty/server/service/AudioCompareHandler.java

@@ -71,7 +71,7 @@ public class AudioCompareHandler implements MessageHandler {
 	/**
 	 * @describe 采样大小
 	 */
-	private int bufferSize = 1024 * 2;
+	private int bufferSize = 1024 * 1;
 
 	private boolean signed = true;
 
@@ -222,7 +222,7 @@ public class AudioCompareHandler implements MessageHandler {
 				
 				params.put("totalPlayTimeOfCurrentDate", totalPlayTimeOfCurrentDate);
 				
-				WebSocketResponse<Map<String, Object>> resp = new WebSocketResponse<Map<String, Object>>("overall", params);
+				WebSocketResponse<Map<String, Object>> resp = new WebSocketResponse<Map<String, Object>>(getAction(), "overall", params);
 
 				nettyChannelManager.sendTextMessage(user, resp);
 			}
@@ -232,12 +232,12 @@ public class AudioCompareHandler implements MessageHandler {
 
 			break;
 		case "audioPlayStart": // ???
-			
+
 			Integer offsetTime = dataObj.getInteger("offsetTime");
-			if(offsetTime != null){
-				channelContext.setOffsetMS(offsetTime);
-				channelContext.setHandlerSwitch(true);
-			}
+			Integer micDelay = dataObj.getInteger("micDelay");
+			channelContext.setMicDelayMS(micDelay + offsetTime);
+			channelContext.setOffsetMS(0);
+			channelContext.setHandlerSwitch(true);
 
 			break;
 		case "videoUpload": // 上传音频
@@ -343,7 +343,7 @@ public class AudioCompareHandler implements MessageHandler {
 				params.put("measureIndex", sectionIndex);
 				params.put("measureRenderIndex", channelContext.getCurrentMusicSection(null, sectionIndex).getMeasureRenderIndex());
 
-				WebSocketResponse<Map<String, Object>> resp = new WebSocketResponse<Map<String, Object>>("measureScore", params);
+				WebSocketResponse<Map<String, Object>> resp = new WebSocketResponse<Map<String, Object>>(getAction(), "measureScore", params);
 
 				nettyChannelManager.sendTextMessage(user, resp);
 			}

+ 268 - 0
audio-analysis/src/main/java/com/yonge/netty/server/service/DelayCheckHandler.java

@@ -0,0 +1,268 @@
+package com.yonge.netty.server.service;
+
+import java.io.File;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
+
+import javax.sound.sampled.AudioFormat;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Service;
+
+import com.alibaba.fastjson.JSONObject;
+import com.alibaba.fastjson.JSONPath;
+import com.yonge.audio.analysis.AudioFloatConverter;
+import com.yonge.audio.analysis.detector.YINPitchDetector;
+import com.yonge.audio.utils.ArrayUtil;
+import com.yonge.netty.dto.NoteFrequencyRange;
+import com.yonge.netty.dto.WebSocketResponse;
+import com.yonge.netty.server.handler.NettyChannelManager;
+import com.yonge.netty.server.handler.message.MessageHandler;
+import com.yonge.netty.server.processor.WaveformWriter;
+
+import io.netty.channel.Channel;
+
+@Service
+public class DelayCheckHandler implements MessageHandler {
+
+	private final static Logger LOGGER = LoggerFactory.getLogger(DelayCheckHandler.class);
+
+	private int standardFrequecy = 3000;
+
+	/**
+	 * @describe 采样率
+	 */
+	private float sampleRate = 44100;
+
+	/**
+	 * 每个采样大小(Bit)
+	 */
+	private int bitsPerSample = 16;
+
+	/**
+	 * 通道数
+	 */
+	private int channels = 1;
+	
+	private int bufferSize = 1024 * 1;
+
+	private boolean signed = true;
+
+	private boolean bigEndian = false;
+
+	private AudioFormat audioFormat = new AudioFormat(sampleRate, bitsPerSample, channels, signed, bigEndian);
+
+	private AudioFloatConverter converter = AudioFloatConverter.getConverter(audioFormat);
+	
+	private boolean isRecWav = false;
+
+	private ConcurrentMap<Channel, UserContext> userABCMap = new ConcurrentHashMap<Channel, UserContext>();
+
+	private String tmpFileDir = "/mdata/soundCompare/";
+
+	private SimpleDateFormat sdf = new SimpleDateFormat("yyMMddHHmmSS");
+
+	@Autowired
+	private NettyChannelManager nettyChannelManager;
+
+	@Override
+	public String getAction() {
+		return "DELAY_CHECK";
+	}
+
+	@Override
+	public boolean handleTextMessage(String userId, Channel channel, String jsonMsg) {
+
+		String command = (String) JSONPath.extract(jsonMsg, "$.header.commond");
+
+		UserContext userContext = null;
+
+		switch (command) {
+		case "recordStart":
+
+			userContext = new UserContext(0, false);
+
+			userABCMap.put(channel, userContext);
+			
+			JSONObject dataObj = (JSONObject) JSONPath.extract(jsonMsg, "$.body");
+			
+			if(dataObj.get("HZ") != null) {
+				String hzStr = dataObj.get("HZ").toString();
+				standardFrequecy = Integer.parseInt(hzStr);
+			}
+
+			break;
+		case "recordEnd":
+
+			userContext = userABCMap.get(channel);
+
+			if (userContext == null) {
+				userContext = new UserContext(0, false);
+			}
+
+			WaveformWriter waveFileProcessor = userContext.getWaveformWriter();
+			if (waveFileProcessor != null) {
+				// 写文件头
+				waveFileProcessor.processingFinished();
+			}
+
+			userContext = userABCMap.get(channel);
+
+			if (userContext == null) {
+				userContext = new UserContext(0, false);
+			}
+
+			Map<String, Object> params = new HashMap<String, Object>();
+			params.put("firstNoteDelayDuration", userContext.getDelayDuration());
+
+			WebSocketResponse<Map<String, Object>> resp = new WebSocketResponse<Map<String, Object>>(getAction(), command, params);
+
+			nettyChannelManager.sendTextMessage(userId, resp);
+
+			userContext.setDelayDuration(0);
+			userContext.setIsOver(false);
+			userContext.setWaveformWriter(null);
+			userContext.setChannelBufferBytes(new byte[0]);
+			userABCMap.put(channel, userContext);
+			break;
+
+		default:
+			break;
+		}
+
+		return true;
+	}
+
+	@Override
+	public boolean handleBinaryMessage(String userId, Channel channel, byte[] bytes) {
+
+		UserContext userContext = userABCMap.get(channel);
+
+		if (userContext == null) {
+			userContext = new UserContext(0, false);
+		}
+
+		// 写录音文件
+		if (isRecWav) {
+			WaveformWriter waveFileProcessor = userContext.getWaveformWriter();
+			if (waveFileProcessor == null) {
+				File file = new File(tmpFileDir + userId + "_CHECK_" + sdf.format(new Date()) + ".wav");
+				waveFileProcessor = new WaveformWriter(file.getAbsolutePath());
+				userContext.setWaveformWriter(waveFileProcessor);
+
+				userABCMap.put(channel, userContext);
+			}
+			waveFileProcessor.process(bytes);
+		}
+
+		if (userContext.isOver) {
+			return true;
+		}
+		
+		userContext.setChannelBufferBytes(ArrayUtil.mergeByte(userContext.getChannelBufferBytes(), bytes));
+
+		int totalLength = userContext.getChannelBufferBytes().length;
+		
+		while (totalLength >= bufferSize) {
+			byte[] bufferData = ArrayUtil.extractByte(userContext.getChannelBufferBytes(), 0, bufferSize - 1);
+
+			if (bufferSize != totalLength) {
+				userContext.setChannelBufferBytes(ArrayUtil.extractByte(userContext.getChannelBufferBytes(), bufferSize, totalLength - 1));
+			} else {
+				userContext.setChannelBufferBytes(new byte[0]);
+			}
+
+			float[] sampleFloats = new float[bufferSize / 2];
+
+			converter.toFloatArray(bufferData, sampleFloats);
+			
+			YINPitchDetector frequencyDetector = new YINPitchDetector(sampleFloats.length, audioFormat.getSampleRate());
+
+			int playFrequency = (int) frequencyDetector.getFrequency(sampleFloats);
+
+			// int amplitude = (int) Signals.decibels(samples);
+
+			double durationTime = 1000 * (sampleFloats.length * 2) / audioFormat.getSampleRate() / (audioFormat.getSampleSizeInBits() / 8);
+
+			double playTime = userContext.delayDuration;
+
+			playTime += durationTime;
+
+			LOGGER.info("DurationTime:{}	 playFrequency:{}  PlayTime:{}" ,durationTime,playFrequency,playTime);
+			
+			NoteFrequencyRange nfr = new NoteFrequencyRange(440, playFrequency);
+			
+			if (nfr.getMinFrequency() < standardFrequecy && nfr.getMaxFrequency() > standardFrequecy) {
+
+				userContext.setIsOver(true);
+				userABCMap.put(channel, userContext);
+				return true;
+			}
+
+			userContext.setDelayDuration(playTime);
+
+			totalLength = userContext.getChannelBufferBytes().length;
+		}
+
+		return true;
+	}
+
+	public ConcurrentMap<Channel, UserContext> getUserABCMap() {
+		return userABCMap;
+	}
+
+	class UserContext {
+
+		public UserContext(double delayDuration, boolean isOver) {
+			this.delayDuration = delayDuration;
+			this.isOver = isOver;
+		}
+
+		private double delayDuration;
+
+		private boolean isOver;
+		
+		private byte[] channelBufferBytes = new byte[0];
+
+		private WaveformWriter waveformWriter;
+
+		public double getDelayDuration() {
+			return delayDuration;
+		}
+
+		public void setDelayDuration(double delayDuration) {
+			this.delayDuration = delayDuration;
+		}
+
+		public boolean isOver() {
+			return isOver;
+		}
+
+		public void setIsOver(boolean isOver) {
+			this.isOver = isOver;
+		}
+
+		public byte[] getChannelBufferBytes() {
+			return channelBufferBytes;
+		}
+
+		public void setChannelBufferBytes(byte[] channelBufferBytes) {
+			this.channelBufferBytes = channelBufferBytes;
+		}
+
+		public WaveformWriter getWaveformWriter() {
+			return waveformWriter;
+		}
+
+		public void setWaveformWriter(WaveformWriter waveformWriter) {
+			this.waveformWriter = waveformWriter;
+		}
+	}
+
+}

+ 1 - 1
audio-analysis/src/main/java/com/yonge/netty/server/service/PitchDetectionHandler.java

@@ -80,7 +80,7 @@ public class PitchDetectionHandler implements MessageHandler {
 		Map<String, Object> params = new HashMap<String, Object>();
 		params.put("frequency", playFrequency);
 
-		WebSocketResponse<Map<String, Object>> resp = new WebSocketResponse<Map<String, Object>>("checking", params);
+		WebSocketResponse<Map<String, Object>> resp = new WebSocketResponse<Map<String, Object>>(getAction(), "checking", params);
 
 		nettyChannelManager.sendTextMessage(userId, resp);
 

+ 1 - 1
audio-analysis/src/main/resources/logback-spring.xml

@@ -27,7 +27,7 @@
 		</encoder>
 	</appender>
 
-	<logger name="com.yonge" level="debug" />
+	<logger name="com.yonge" level="info" />
 
 	<!--开发环境:打印控制台 -->
 	<springProfile name="dev">

+ 2 - 2
audio-analysis/src/test/java/com/yonge/netty/client/NettyClient.java

@@ -90,11 +90,11 @@ public class NettyClient {
 			String step2 = "{\"header\":{\"commond\":\"recordStart\",\"type\":\"SOUND_COMPARE\",\"status\":200}}";
 			channel.writeAndFlush(new TextWebSocketFrame(step2));
 			
-			String step3 = "{\"body\":{\"offsetTime\":113},\"uuid\":\"1662715309875118846\",\"header\":{\"commond\":\"audioPlayStart\",\"type\":\"SOUND_COMPARE\"}}";
+			String step3 = "{\"body\":{\"micDelay\":18,\"offsetTime\":178},\"uuid\":\"1662715309875118846\",\"header\":{\"commond\":\"audioPlayStart\",\"type\":\"SOUND_COMPARE\"}}";
 			channel.writeAndFlush(new TextWebSocketFrame(step3));
 			
 			//step4 发送wav
-			String fileName = "/9550.wav";
+			String fileName = "/84.wav";
 			AudioInputStream audioInputStream = AudioSystem.getAudioInputStream(FileUtils.toFile(WebSocketClientHandler.class.getResource(fileName)));
 			
 			AudioFormat baseFormat = audioInputStream.getFormat();

파일 크기가 너무 크기때문에 변경 상태를 표시하지 않습니다.
+ 0 - 0
audio-analysis/src/test/resoures/9550.json


BIN
audio-analysis/src/test/resoures/9550.wav


+ 9 - 9
mec-biz/src/main/java/com/ym/mec/biz/dal/dto/MusicScoreSubjectDto.java

@@ -6,8 +6,8 @@ import java.util.List;
 
 public class MusicScoreSubjectDto {
 
-    @ApiModelProperty(value = "声部",required = false)
-    private Integer subjectId;
+//    @ApiModelProperty(value = "声部",required = false)
+//    private Integer subjectId;
 
     @ApiModelProperty(value = "学员编号",required = false)
     private List<Integer> userIdList;
@@ -42,13 +42,13 @@ public class MusicScoreSubjectDto {
         this.musicScoreHomeworkDtoList = musicScoreHomeworkDtoList;
     }
 
-    public Integer getSubjectId() {
-        return subjectId;
-    }
-
-    public void setSubjectId(Integer subjectId) {
-        this.subjectId = subjectId;
-    }
+//    public Integer getSubjectId() {
+//        return subjectId;
+//    }
+//
+//    public void setSubjectId(Integer subjectId) {
+//        this.subjectId = subjectId;
+//    }
 
     public List<Integer> getUserIdList() {
         return userIdList;

+ 1 - 1
mec-biz/src/main/java/com/ym/mec/biz/service/impl/ExtracurricularExercisesServiceImpl.java

@@ -143,7 +143,7 @@ public class ExtracurricularExercisesServiceImpl extends BaseServiceImpl<Long, E
                     throw new BizException("有部分声部未选择曲目");
                 }
                 for (StudentLessonTrainingDetailWrapper.AddStudentLessonTrainingDetail studentLessonTrainingDetail : musicScoreSubjectDto.getStudentLessonTrainingDetails()) {
-                    studentLessonTrainingDetail.setSubjectId(musicScoreSubjectDto.getSubjectId());
+                    studentLessonTrainingDetail.setSubjectId(0);
                 }
                 // 作业详情
                 studentLessonTrainingDetailList.addAll(studentLessonTrainingDetailService

+ 1 - 1
mec-biz/src/main/java/com/ym/mec/biz/service/impl/TeacherAttendanceServiceImpl.java

@@ -422,7 +422,7 @@ public class TeacherAttendanceServiceImpl extends BaseServiceImpl<Long, TeacherA
                             }
 
                             for (StudentLessonTrainingDetailWrapper.AddStudentLessonTrainingDetail studentLessonTrainingDetail : scoreSubjectDto.getStudentLessonTrainingDetails()) {
-                                studentLessonTrainingDetail.setSubjectId(scoreSubjectDto.getSubjectId());
+                                studentLessonTrainingDetail.setSubjectId(0);
                             }
                             studentLessonTrainingDetails.addAll(studentLessonTrainingDetailService
                                                                     .homeWorkDetail(scoreSubjectDto.getStudentLessonTrainingDetails(), userIdList,

이 변경점에서 너무 많은 파일들이 변경되어 몇몇 파일들은 표시되지 않았습니다.