yonge 3 年之前
父節點
當前提交
9a6835aed7

+ 17 - 6
audio-analysis/src/main/java/com/yonge/nettty/dto/UserChannelContext.java

@@ -27,7 +27,9 @@ public class UserChannelContext {
 	private AtomicInteger currentMusicSectionIndex = new AtomicInteger(0);
 
 	// 缓存字节数据
-	private byte[] bufferBytes = new byte[0];
+	private byte[] channelBufferBytes = new byte[0];
+	
+	private byte[] noteBufferBytes = new byte[0];
 
 	public ConcurrentHashMap<Integer, MusicXmlBasicInfo> getSongMusicXmlMap() {
 		return songMusicXmlMap;
@@ -66,7 +68,8 @@ public class UserChannelContext {
 		waveFileProcessor = null;
 		currentNoteIndex.set(0);
 		currentMusicSectionIndex.set(0);
-		bufferBytes = new byte[0];
+		channelBufferBytes = new byte[0];
+		noteBufferBytes = new byte[0];
 	}
 
 	public MusicXmlNote getCurrentMusicNote(Integer songId) {
@@ -146,12 +149,20 @@ public class UserChannelContext {
 		return -1;
 	}
 
-	public byte[] getBufferBytes() {
-		return bufferBytes;
+	public byte[] getChannelBufferBytes() {
+		return channelBufferBytes;
 	}
 
-	public void setBufferBytes(byte[] bufferBytes) {
-		this.bufferBytes = bufferBytes;
+	public void setChannelBufferBytes(byte[] channelBufferBytes) {
+		this.channelBufferBytes = channelBufferBytes;
+	}
+
+	public byte[] getNoteBufferBytes() {
+		return noteBufferBytes;
+	}
+
+	public void setNoteBufferBytes(byte[] noteBufferBytes) {
+		this.noteBufferBytes = noteBufferBytes;
 	}
 
 }

+ 47 - 39
audio-analysis/src/main/java/com/yonge/netty/server/messagehandler/BinaryWebSocketFrameHandler.java

@@ -66,7 +66,7 @@ public class BinaryWebSocketFrameHandler extends SimpleChannelInboundHandler<Bin
 	/**
 	 * @describe 采样大小
 	 */
-	private int bufferSize = 1024 * 4;
+	private int bufferSize = 1024 * 2;
 	/**
 	 * @describe 帧覆盖大小
 	 */
@@ -114,7 +114,7 @@ public class BinaryWebSocketFrameHandler extends SimpleChannelInboundHandler<Bin
 		Channel channel = ctx.channel();
 
 		ByteBuf buf = frame.content().retain();
-		
+
 		try {
 			byte[] datas = ByteBufUtil.getBytes(buf);
 
@@ -138,58 +138,66 @@ public class BinaryWebSocketFrameHandler extends SimpleChannelInboundHandler<Bin
 			// LOGGER.info("服务器接收到的音频消息长度[{}]", datas.length);
 
 			// 粘合数据
-			byte[] totalBytes = ArrayUtil.mergeByte(channelContext.getBufferBytes(), datas);
-			channelContext.setBufferBytes(totalBytes);
+			byte[] totalBytes = ArrayUtil.mergeByte(channelContext.getChannelBufferBytes(), datas);
+			channelContext.setChannelBufferBytes(totalBytes);
 
-			// 获取当前音符信息
-			MusicXmlNote musicXmlNote = channelContext.getCurrentMusicNote(null);
+			if (totalBytes.length >= bufferSize * audioFormat.getFrameSize()) {
 
-			if (musicXmlNote == null) {
-				return;
-			}
+				// 剩余未处理的数据
+				channelContext.setChannelBufferBytes(new byte[0]);
 
-			// 计算当前音符的数据长度 公式:数据量(字节/秒)= 采样频率(Hz)× (采样位数(bit)/ 8) × 声道数
-			int length = (int) (audioFormat.getSampleRate() * (audioFormat.getSampleSizeInBits() / 8) * channels * musicXmlNote.getDuration() / 1000);
+				AudioDispatcher dispatcher = AudioDispatcherFactory.fromByteArray(totalBytes, audioFormat, bufferSize, overlap);
 
-			if (channelContext.getCurrentMusicNoteIndex() <= channelContext.getTotalMusicNoteIndexNum(null) && totalBytes.length >= length) {
-				// 处理当前音符
-				byte[] noteByteData = new byte[length];
-				System.arraycopy(totalBytes, 0, noteByteData, 0, length);
+				dispatcher.addAudioProcessor(new PitchProcessor(algorithm, sampleRate, bufferSize, new PitchDetectionHandler() {
 
-				float[] noteFloatData = new float[length / audioFormat.getFrameSize()];
+					@Override
+					public void handlePitch(PitchDetectionResult pitchDetectionResult, AudioEvent audioEvent) {
 
-				converter.toFloatArray(noteByteData, noteFloatData);
+						// 获取字节流
+						int byteOverlap = audioEvent.getOverlap() * audioFormat.getFrameSize();
+						int byteStepSize = audioEvent.getBufferSize() * audioFormat.getFrameSize() - byteOverlap;
+						byte[] bufferBytes = ArrayUtils.subarray(audioEvent.getByteBuffer(), byteOverlap, byteStepSize);
 
-				// 获取频率数据
-				float pitch = getPitch(noteFloatData, bufferSize);
+						// 粘合音符数据
+						byte[] totalNoteBytes = ArrayUtil.mergeByte(channelContext.getNoteBufferBytes(), bufferBytes);
+						channelContext.setNoteBufferBytes(totalNoteBytes);
 
-				LOGGER.info("第{}个音符的样本频率:{} 实际频率:{}", channelContext.getCurrentMusicNoteIndex(), musicXmlNote.getFrequency(), pitch);
+						LOGGER.info("新增字节数:{} 最新剩余字节长度:{} 频率:{}", bufferBytes.length, totalNoteBytes.length, pitchDetectionResult.getPitch());
 
-				// 准备处理下一个音符
-				channelContext.incrementMusicNoteIndex();
-				// 剩余未处理的数据
-				channelContext.setBufferBytes(ArrayUtil.extractByte(totalBytes, length, totalBytes.length - 1));
-			}
-			
+						// 获取当前音符信息
+						MusicXmlNote musicXmlNote = channelContext.getCurrentMusicNote(null);
+
+						if (musicXmlNote == null) {
+							return;
+						}
+
+						// 计算当前音符的数据长度 公式:数据量(字节/秒)= 采样频率(Hz)× (采样位数(bit)/ 8) × 声道数
+						int length = (int) (audioFormat.getSampleRate() * (audioFormat.getSampleSizeInBits() / 8) * channels * musicXmlNote.getDuration() / 1000);
 
-			AudioDispatcher dispatcher = AudioDispatcherFactory.fromByteArray(datas, audioFormat, bufferSize, overlap);
+						if (channelContext.getCurrentMusicNoteIndex() <= channelContext.getTotalMusicNoteIndexNum(null) && totalNoteBytes.length >= length) {
+							// 处理当前音符
+							byte[] noteByteData = new byte[length];
+							System.arraycopy(totalNoteBytes, 0, noteByteData, 0, length);
 
-			dispatcher.addAudioProcessor(new PitchProcessor(algorithm, sampleRate, bufferSize, new PitchDetectionHandler() {
+							float[] noteFloatData = new float[length / audioFormat.getFrameSize()];
 
-				@Override
-				public void handlePitch(PitchDetectionResult pitchDetectionResult, AudioEvent audioEvent) {
+							converter.toFloatArray(noteByteData, noteFloatData);
 
-					// 获取字节流
-					int byteOverlap = audioEvent.getOverlap() * audioFormat.getFrameSize();
-					int byteStepSize = audioEvent.getBufferSize() * audioFormat.getFrameSize() - byteOverlap;
-					byte[] acceptDatas = ArrayUtils.subarray(audioEvent.getByteBuffer(), byteOverlap, byteStepSize);
+							// 获取频率数据
+							float pitch = getPitch(noteFloatData, bufferSize);
 
-					LOGGER.info("新增字节数:{} 最新剩余字节长度:{}", acceptDatas.length, totalBytes.length);
+							LOGGER.info("第{}个音符的样本频率:{} 实际频率:{}", channelContext.getCurrentMusicNoteIndex(), musicXmlNote.getFrequency(), pitch);
+
+							// 准备处理下一个音符
+							channelContext.incrementMusicNoteIndex();
+							// 剩余未处理的数据
+							channelContext.setNoteBufferBytes(ArrayUtil.extractByte(totalNoteBytes, length - 1, totalNoteBytes.length - 1));
+						}
+					}
+				}));
+				dispatcher.run();
+			}
 
-					
-				}
-			}));
-			dispatcher.run();
 		} finally {
 			buf.release();
 		}