|
@@ -66,11 +66,11 @@ public class BinaryWebSocketFrameHandler extends SimpleChannelInboundHandler<Bin
|
|
|
/**
|
|
|
* @describe 采样大小
|
|
|
*/
|
|
|
- private int sampleSize = 1024 * 4;
|
|
|
+ private int bufferSize = 1024 * 4;
|
|
|
/**
|
|
|
* @describe 帧覆盖大小
|
|
|
*/
|
|
|
- private int overlap = 256;
|
|
|
+ private int overlap = 0;
|
|
|
|
|
|
private boolean signed = true;
|
|
|
|
|
@@ -83,7 +83,7 @@ public class BinaryWebSocketFrameHandler extends SimpleChannelInboundHandler<Bin
|
|
|
|
|
|
private PitchEstimationAlgorithm algorithm = PitchProcessor.PitchEstimationAlgorithm.FFT_YIN;
|
|
|
|
|
|
- private PitchDetector pitchDetector = algorithm.getDetector(sampleRate, sampleSize);
|
|
|
+ private PitchDetector pitchDetector = algorithm.getDetector(sampleRate, bufferSize);
|
|
|
|
|
|
/**
|
|
|
* @describe 有效分贝大小
|
|
@@ -114,75 +114,85 @@ public class BinaryWebSocketFrameHandler extends SimpleChannelInboundHandler<Bin
|
|
|
Channel channel = ctx.channel();
|
|
|
|
|
|
ByteBuf buf = frame.content().retain();
|
|
|
+
|
|
|
+ try {
|
|
|
+ byte[] datas = ByteBufUtil.getBytes(buf);
|
|
|
|
|
|
- byte[] datas = ByteBufUtil.getBytes(buf);
|
|
|
+ String user = nettyChannelManager.getUser(channel);
|
|
|
|
|
|
- String user = nettyChannelManager.getUser(channel);
|
|
|
+ UserChannelContext channelContext = userChannelContextService.getChannelContext(channel);
|
|
|
|
|
|
- UserChannelContext channelContext = userChannelContextService.getChannelContext(channel);
|
|
|
+ if (channelContext == null) {
|
|
|
+ return;
|
|
|
+ }
|
|
|
|
|
|
- if (channelContext == null) {
|
|
|
- return;
|
|
|
- }
|
|
|
-
|
|
|
- // 写录音文件
|
|
|
- WaveformWriter waveFileProcessor = channelContext.getWaveFileProcessor();
|
|
|
- if (waveFileProcessor == null) {
|
|
|
- File file = new File(tmpFileDir + user + "_" + System.currentTimeMillis() + ".wav");
|
|
|
- waveFileProcessor = new WaveformWriter(file.getAbsolutePath());
|
|
|
- channelContext.setWaveFileProcessor(waveFileProcessor);
|
|
|
- }
|
|
|
- waveFileProcessor.process(datas);
|
|
|
+ // 写录音文件
|
|
|
+ WaveformWriter waveFileProcessor = channelContext.getWaveFileProcessor();
|
|
|
+ if (waveFileProcessor == null) {
|
|
|
+ File file = new File(tmpFileDir + user + "_" + System.currentTimeMillis() + ".wav");
|
|
|
+ waveFileProcessor = new WaveformWriter(file.getAbsolutePath());
|
|
|
+ channelContext.setWaveFileProcessor(waveFileProcessor);
|
|
|
+ }
|
|
|
+ waveFileProcessor.process(datas);
|
|
|
|
|
|
- LOGGER.info("服务器接收到二进制消息长度[{}]", datas.length);
|
|
|
+ // LOGGER.info("服务器接收到的音频消息长度[{}]", datas.length);
|
|
|
|
|
|
- AudioDispatcher dispatcher = AudioDispatcherFactory.fromByteArray(datas, audioFormat, sampleSize, overlap);
|
|
|
+ // 粘合数据
|
|
|
+ byte[] totalBytes = ArrayUtil.mergeByte(channelContext.getBufferBytes(), datas);
|
|
|
+ channelContext.setBufferBytes(totalBytes);
|
|
|
|
|
|
- dispatcher.addAudioProcessor(new PitchProcessor(algorithm, sampleRate, sampleSize, new PitchDetectionHandler() {
|
|
|
+ // 获取当前音符信息
|
|
|
+ MusicXmlNote musicXmlNote = channelContext.getCurrentMusicNote(null);
|
|
|
|
|
|
- @Override
|
|
|
- public void handlePitch(PitchDetectionResult pitchDetectionResult, AudioEvent audioEvent) {
|
|
|
+ if (musicXmlNote == null) {
|
|
|
+ return;
|
|
|
+ }
|
|
|
|
|
|
- // 获取字节流
|
|
|
- int byteOverlap = audioEvent.getOverlap() * audioFormat.getFrameSize();
|
|
|
- int byteStepSize = audioEvent.getBufferSize() * audioFormat.getFrameSize() - byteOverlap;
|
|
|
- byte[] acceptDatas = ArrayUtils.subarray(audioEvent.getByteBuffer(), byteOverlap, byteStepSize);
|
|
|
+ // 计算当前音符的数据长度 公式:数据量(字节/秒)= 采样频率(Hz)× (采样位数(bit)/ 8) × 声道数
|
|
|
+ int length = (int) (audioFormat.getSampleRate() * (audioFormat.getSampleSizeInBits() / 8) * channels * musicXmlNote.getDuration() / 1000);
|
|
|
|
|
|
- // 粘合数据
|
|
|
- byte[] totalBytes = ArrayUtil.mergeByte(channelContext.getBufferBytes(), acceptDatas);
|
|
|
- channelContext.setBufferBytes(totalBytes);
|
|
|
+ if (channelContext.getCurrentMusicNoteIndex() <= channelContext.getTotalMusicNoteIndexNum(null) && totalBytes.length >= length) {
|
|
|
+ // 处理当前音符
|
|
|
+ byte[] noteByteData = new byte[length];
|
|
|
+ System.arraycopy(totalBytes, 0, noteByteData, 0, length);
|
|
|
|
|
|
- LOGGER.info("新增字节数:{} 最新剩余字节长度:{}", acceptDatas.length, totalBytes.length);
|
|
|
+ float[] noteFloatData = new float[length / audioFormat.getFrameSize()];
|
|
|
|
|
|
- // 获取当前音符信息
|
|
|
- MusicXmlNote musicXmlNote = channelContext.getCurrentMusicNote(null);
|
|
|
+ converter.toFloatArray(noteByteData, noteFloatData);
|
|
|
|
|
|
- // 计算当前音符的数据长度 公式:数据量(字节/秒)= 采样频率(Hz)× (采样位数(bit)/ 8) × 声道数
|
|
|
- int length = (int) (audioFormat.getSampleRate() * (audioFormat.getSampleSizeInBits() / 8) * channels * musicXmlNote.getDuration() / 1000);
|
|
|
+ // 获取频率数据
|
|
|
+ float pitch = getPitch(noteFloatData, bufferSize);
|
|
|
|
|
|
- if (totalBytes.length >= length) {
|
|
|
- // 处理当前音符
|
|
|
- byte[] noteByteData = new byte[length];
|
|
|
- System.arraycopy(totalBytes, 0, noteByteData, 0, length);
|
|
|
+ LOGGER.info("第{}个音符的样本频率:{} 实际频率:{}", channelContext.getCurrentMusicNoteIndex(), musicXmlNote.getFrequency(), pitch);
|
|
|
|
|
|
- float[] noteFloatData = new float[length / audioFormat.getFrameSize()];
|
|
|
+ // 准备处理下一个音符
|
|
|
+ channelContext.incrementMusicNoteIndex();
|
|
|
+ // 剩余未处理的数据
|
|
|
+ channelContext.setBufferBytes(ArrayUtil.extractByte(totalBytes, length, totalBytes.length - 1));
|
|
|
+ }
|
|
|
+
|
|
|
|
|
|
- converter.toFloatArray(noteByteData, noteFloatData);
|
|
|
+ AudioDispatcher dispatcher = AudioDispatcherFactory.fromByteArray(datas, audioFormat, bufferSize, overlap);
|
|
|
|
|
|
- // 获取频率数据
|
|
|
- float pitch = getPitch(noteFloatData, sampleSize);
|
|
|
+ dispatcher.addAudioProcessor(new PitchProcessor(algorithm, sampleRate, bufferSize, new PitchDetectionHandler() {
|
|
|
|
|
|
- LOGGER.info("第{}个音符的样本频率:{} 实际频率:{}", channelContext.getCurrentMusicNoteIndex(), musicXmlNote.getFrequency(), pitch);
|
|
|
+ @Override
|
|
|
+ public void handlePitch(PitchDetectionResult pitchDetectionResult, AudioEvent audioEvent) {
|
|
|
|
|
|
- // 准备处理下一个音符
|
|
|
- channelContext.incrementMusicNoteIndex();
|
|
|
- // 剩余未处理的数据
|
|
|
- channelContext.setBufferBytes(ArrayUtil.extractByte(totalBytes, 0, length - 1));
|
|
|
- }
|
|
|
+ // 获取字节流
|
|
|
+ int byteOverlap = audioEvent.getOverlap() * audioFormat.getFrameSize();
|
|
|
+ int byteStepSize = audioEvent.getBufferSize() * audioFormat.getFrameSize() - byteOverlap;
|
|
|
+ byte[] acceptDatas = ArrayUtils.subarray(audioEvent.getByteBuffer(), byteOverlap, byteStepSize);
|
|
|
|
|
|
- }
|
|
|
- }));
|
|
|
- dispatcher.run();
|
|
|
+ LOGGER.info("新增字节数:{} 最新剩余字节长度:{}", acceptDatas.length, totalBytes.length);
|
|
|
+
|
|
|
+
|
|
|
+ }
|
|
|
+ }));
|
|
|
+ dispatcher.run();
|
|
|
+ } finally {
|
|
|
+ buf.release();
|
|
|
+ }
|
|
|
}
|
|
|
|
|
|
private float getPitch(float[] audioBuffer, int bufferSize) {
|