123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201 |
- package com.yonge;
- import be.tarsos.dsp.AudioDispatcher;
- import be.tarsos.dsp.AudioEvent;
- import be.tarsos.dsp.AudioProcessor;
- import be.tarsos.dsp.io.jvm.JVMAudioInputStream;
- import be.tarsos.dsp.mfcc.MFCC;
- import be.tarsos.dsp.pitch.FastYin;
- import be.tarsos.dsp.pitch.PitchDetectionHandler;
- import be.tarsos.dsp.pitch.PitchDetectionResult;
- import be.tarsos.dsp.pitch.PitchProcessor;
- import com.yonge.audio.analysis.AudioFloatConverter;
- import com.yonge.audio.analysis.detector.YINPitchDetector;
- import com.yonge.audio.utils.ArrayUtil;
- import com.yonge.netty.server.processor.WaveformWriter;
- import org.apache.commons.io.IOUtils;
- import org.apache.commons.lang3.ArrayUtils;
- import javax.sound.sampled.AudioFormat;
- import javax.sound.sampled.AudioInputStream;
- import javax.sound.sampled.AudioSystem;
- import javax.sound.sampled.UnsupportedAudioFileException;
- import java.io.*;
- import java.net.URL;
- import java.util.Arrays;
- import java.util.Date;
- /**
- * Description
- *
- * @author liujunchi
- * @date 2022-06-24
- */
- public class Main {
- private final static int audioBufferSize = 2048;
- private final static int bufferOverlap = 1024;
- private final static int amountOfMelFilters = 20;
- private final static int amountOfCepstrumCoef = 30;
- private final static float lowerFilterFreq = 133.33f;
- private final static float upperFilterFreq = 8000f;
- private static AudioFormat audioFormat = new AudioFormat(44100, 16, 1, true, false);
- // private AudioFloatConverter converter = AudioFloatConverter.getConverter(audioFormat);
- public static void main(String[] args){
- try{
- float sampleRate = 44100;
- int audioBufferSize = 1024 *2;
- int bufferOverlap = 0;
- AudioFloatConverter converter = AudioFloatConverter.getConverter(audioFormat);
- //Create an AudioInputStream from my .wav file
- URL soundURL = Main.class.getResource("/WAV.wav");
- AudioInputStream stream = AudioSystem.getAudioInputStream(soundURL);
- // final MFCC mfccProcessor = new MFCC(audioBufferSize, stream.getFormat().getSampleRate(),
- // amountOfCepstrumCoef, amountOfMelFilters, lowerFilterFreq, upperFilterFreq);
- FastYin detector = new FastYin(sampleRate, audioBufferSize *2);
- byte[] bytes = IOUtils.toByteArray(stream);
- AudioFormat format = stream.getFormat();
- int b = 0;
- int frequency = 0;
- File file = new File("D:\\project\\cooleshow\\audio-analysis\\target\\wav1.wav");
- WaveformWriter waveFileProcessor = new WaveformWriter(file.getAbsolutePath());
- byte[] bytes1 = new byte[0];
- // for (int i = 0; i < bytes.length; i++) {
- // if (i%2 ==1) {
- // System.out.println(bytes[i] + "----------" + bytes[i-1]);
- // }
- // }
- while (bytes.length > audioBufferSize *2) {
- byte[] bufferData = ArrayUtil.extractByte(bytes, 0, audioBufferSize*2 - 1);
- bytes1 = ArrayUtil.mergeByte(bytes1, bufferData);
- byte[] bytes2 = new byte[bytes1.length *2];
- for (int i = 0; i < bytes1.length; i =i+2) {
- bytes2[(i+1) *2] = bytes2[i*2] = bytes1[i];
- bytes2[(i+1) *2 +1] = bytes2[i*2 + 1] = bytes1[i +1];
- }
- // byte ff = bytes1[bytes1.length -1];
- // for (int start = 0, end = bytes1.length - 2; start < end; start++, end--) {
- // byte temp = bytes1[end];
- // bytes1[end] = bytes1[start];
- // bytes1[start] = temp;
- // }
- // bytes1[bytes1.length -1] = ff;
- //
- // bytes1 = ArrayUtil.mergeByte(bufferData, bytes1);
- if (bytes2.length == audioBufferSize *4) {
- waveFileProcessor.process(bytes2);
- float[] sampleFloats = new float[audioBufferSize *2];
- converter.toFloatArray(bytes2, sampleFloats);
- int playFrequency = (int) detector.getPitch(sampleFloats).getPitch();
- if (playFrequency != -1) {
- System.out.println("play frequency is " + playFrequency);
- }
- bytes1 = new byte[0];
- }
- // YINPitchDetector frequencyDetector = new YINPitchDetector(sampleFloats.length, audioFormat.getSampleRate());
- //
- // playFrequency = (int) frequencyDetector.getFrequency(sampleFloats);
- //
- // System.out.println("frequencyDetector play frequency is " + playFrequency);
- // ArrayUtil.extractByte(channelContext.getChannelBufferBytes(), bufferSize, totalLength - 1)
- bytes = ArrayUtil.extractByte(bytes, audioBufferSize, bytes.length - 1);
- // if (b == 1) {
- // frequency += playFrequency;
- // System.out.println("play frequency is " +frequency/2);
- // b = 0;
- // frequency = 0;
- // } else {
- // frequency += playFrequency;
- // b ++;
- // }
- }
- waveFileProcessor.processingFinished();
- //Convert into TarsosDSP API
- // JVMAudioInputStream audioStream = new JVMAudioInputStream(stream);
- // AudioDispatcher dispatcher = new AudioDispatcher(audioStream, audioBufferSize, bufferOverlap);
- // MyPitchDetector myPitchDetector = new MyPitchDetector();
- // dispatcher.addAudioProcessor(mfccProcessor);
- // dispatcher.addAudioProcessor(new AudioProcessor() {
- // @Override
- // public boolean process(AudioEvent audioEvent) {
- // float[] mfccs = mfccProcessor.getMFCC();
- //
- // // System.out.println(Arrays.toString(mfccs));
- //
- // YINPitchDetector frequencyDetector = new YINPitchDetector(mfccs.length, sampleRate);
- //
- // int playFrequency = (int)detector.getPitch(audioEvent.getFloatBuffer()).getPitch();
- // // int playFrequency = (int) frequencyDetector.getFrequency(mfccs);
- // System.out.println("play frequency is " +playFrequency);
- // return true;
- // }
- //
- // @Override
- // public void processingFinished() {
- //
- // }
- // });
- // // dispatcher.addAudioProcessor(new MyPitchProcessor(PitchProcessor.PitchEstimationAlgorithm.FFT_YIN, sampleRate, audioBufferSize, myPitchDetector));
- // dispatcher.run();
- }
- catch(FileNotFoundException fne){fne.printStackTrace();}
- catch(UnsupportedAudioFileException uafe){uafe.printStackTrace();}
- catch(IOException ie){ie.printStackTrace();}
- }
- }
- class MyPitchDetector implements PitchDetectionHandler {
- //Here the result of pitch is always less than half.
- @Override
- public void handlePitch(PitchDetectionResult pitchDetectionResult,
- AudioEvent audioEvent) {
- if(pitchDetectionResult.getPitch() != -1){
- double timeStamp = audioEvent.getTimeStamp();
- float pitch = pitchDetectionResult.getPitch();
- float probability = pitchDetectionResult.getProbability();
- double rms = audioEvent.getRMS() * 100;
- String message = String.format("Pitch detected at %.2fs: %.2fHz ( %.2f probability, RMS: %.5f )\n", timeStamp,pitch,probability,rms);
- System.out.println(message);
- }
- }
- }
- class MyPitchProcessor extends PitchProcessor {
- /**
- * Initialize a new pitch processor.
- *
- * @param algorithm An enum defining the algorithm.
- * @param sampleRate The sample rate of the buffer (Hz).
- * @param bufferSize The size of the buffer in samples.
- * @param handler
- */
- public MyPitchProcessor(PitchEstimationAlgorithm algorithm, float sampleRate, int bufferSize, PitchDetectionHandler handler) {
- super(algorithm, sampleRate, bufferSize, handler);
- }
- @Override
- public boolean process(AudioEvent audioEvent) {
- return super.process(audioEvent);
- }
- }
|