Browse Source

修改节拍播放方案实现

Pq 10 months ago
parent
commit
ec4f965642

+ 725 - 0
metronome/src/main/java/com/cooleshow/metronome/Utils/AudioTrackManager2.java

@@ -0,0 +1,725 @@
+package com.cooleshow.metronome.Utils;
+
+import android.media.AudioFormat;
+import android.media.AudioManager;
+import android.media.AudioRecord;
+import android.media.AudioTrack;
+import android.media.PlaybackParams;
+import android.os.Build;
+import android.util.Log;
+
+import com.cooleshow.base.utils.ConvertUtils;
+import com.cooleshow.base.utils.LOG;
+import com.cooleshow.base.utils.Utils;
+import com.cooleshow.metronome.constants.MetronomeType;
+import com.cooleshow.metronome.constants.QuarterNoteRhythmType;
+
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.InputStream;
+
+import io.reactivex.rxjava3.android.schedulers.AndroidSchedulers;
+import io.reactivex.rxjava3.annotations.NonNull;
+import io.reactivex.rxjava3.core.Observable;
+import io.reactivex.rxjava3.core.ObservableEmitter;
+import io.reactivex.rxjava3.core.ObservableOnSubscribe;
+import io.reactivex.rxjava3.core.Observer;
+import io.reactivex.rxjava3.disposables.Disposable;
+import io.reactivex.rxjava3.schedulers.Schedulers;
+
+/**
+ * Author by pq, Date on 2022/10/26.
+ */
+public class AudioTrackManager2 {
+    private static final int DEFAULT_STREAM_TYPE = AudioManager.STREAM_MUSIC;
+    private static final int DEFAULT_PLAY_MODE = AudioTrack.MODE_STREAM;
+    private static final int DEFAULT_CHANNEL = AudioFormat.CHANNEL_OUT_MONO;
+
+
+    AudioTrack mAudioTrack;
+
+    /**
+     * 总长度
+     **/
+    int length;
+    /**
+     * 是否循环播放
+     */
+    private boolean ISPLAYSOUND = false;
+    private volatile static AudioTrackManager2 mInstance;
+
+    private float[] mWeghts;
+
+    private long totalDuration = -1;
+    private int minBufferSize;
+    private byte[][] mAllAudioData;
+
+    public static AudioTrackManager2 getInstance() {
+        if (mInstance == null) {
+            synchronized (AudioTrackManager2.class) {
+                if (mInstance == null) {
+                    mInstance = new AudioTrackManager2();
+                }
+            }
+        }
+        return mInstance;
+    }
+
+    public void init() {
+        minBufferSize = AudioTrack.getMinBufferSize(RATE, AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_16BIT);
+        LOG.i("bufferSize:" + minBufferSize);
+        mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, RATE,
+                AudioFormat.CHANNEL_OUT_STEREO, // CHANNEL_CONFIGURATION_MONO,
+                AudioFormat.ENCODING_PCM_16BIT, minBufferSize, AudioTrack.MODE_STREAM);
+    }
+
+    public static final int RATE = 44100;
+    public static final float MAX_VOLUME = 1f;
+
+
+    public void setWeight(int pos, float value) {
+        if (mWeghts == null) {
+            return;
+        }
+        if (value < 0f || value > 1.0f) {
+            return;
+        }
+        if (pos < mWeghts.length) {
+            LOG.i("setWeight:" + pos + "--value:" + value);
+            mWeghts[pos] = value * MAX_VOLUME;
+        }
+    }
+
+    public void setOnlyPlay(int pos) {
+        if (mWeghts == null) {
+            return;
+        }
+        if (pos < mWeghts.length) {
+            for (int i = 0; i < mWeghts.length; i++) {
+                mWeghts[i] = i == pos ? MAX_VOLUME : 0;
+            }
+        }
+    }
+
+    public void setPlaySpeed(float value) {
+        // 设置播放速度
+        if (value < 0.5f) {
+            value = 0.5f;
+        }
+        if (value > 2.5f) {
+            value = 2.5f;
+        }
+        if (mAudioTrack != null) {
+            PlaybackParams playbackParams = null;
+//            int i = mAudioTrack.setPlaybackRate((int) (value * RATE));
+//            LOG.i("setPlaybackRate:"+i);
+            if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
+                playbackParams = mAudioTrack.getPlaybackParams();
+                playbackParams.setSpeed(value);
+                LOG.i("setPlaybackRate:" + value);
+                mAudioTrack.setPlaybackParams(playbackParams);
+            }
+        }
+    }
+
+    public void pause() {
+        if (mAudioTrack != null) {
+            mAudioTrack.pause();
+        }
+    }
+
+    public void initAudio(int[] musicRes) {
+        if (musicRes == null || musicRes.length == 0) {
+            return;
+        }
+        mAllAudioData = parseFile2(musicRes);
+    }
+
+    private QuarterNoteRhythmType currentRhythmType;
+    private MetronomeType currentBeatType;
+    private int currentSpeed;
+    private float currentNoteRate;
+
+    public void setCurrentNoteRate(int playSpeed, float currentNoteRate) {
+        this.currentSpeed = playSpeed;
+        this.currentNoteRate = currentNoteRate;
+    }
+
+    public void playBeat(QuarterNoteRhythmType rhythmType, MetronomeType metronomeType) {
+        if (ISPLAYSOUND) {
+            return;
+        }
+        this.currentRhythmType = rhythmType;
+        this.currentBeatType = metronomeType;
+        Observable.create(new ObservableOnSubscribe<String>() {
+                    @Override
+                    public void subscribe(@NonNull ObservableEmitter<String> emitter) throws Throwable {
+                        ISPLAYSOUND = true;
+                        mAudioTrack.play();
+                        int playPos = 0;
+                        while (ISPLAYSOUND) {
+                            byte[] audiodata;
+                            if (isTickOrTock(playPos)) {
+                                audiodata = mAllAudioData[0];
+                            } else {
+                                audiodata = mAllAudioData[1];
+                            }
+                            audiodata = countBySpeed(playPos, audiodata);
+                            mAudioTrack.write(audiodata, 0, audiodata.length);
+                            playPos++;
+                        }
+                    }
+                }).subscribeOn(Schedulers.newThread())
+                .observeOn(AndroidSchedulers.mainThread())
+                .subscribe(new Observer<String>() {
+                    @Override
+                    public void onSubscribe(@NonNull Disposable d) {
+
+                    }
+
+                    @Override
+                    public void onNext(@NonNull String s) {
+
+                    }
+
+                    @Override
+                    public void onError(@NonNull Throwable e) {
+                        e.printStackTrace();
+                    }
+
+                    @Override
+                    public void onComplete() {
+
+                    }
+                });
+    }
+
+    private byte[] countBySpeed(int playPos, byte[] audiodata) {
+        int duration = (int) (1 / (currentSpeed / 60d) * 1000 * currentNoteRate * getRhythmParams(playPos));
+        LOG.i("当前拍时值:" + duration);
+        //totalDuration = byteLength / ((long) rate * channelConfig * audioFormat) * 1000;
+        //byteLength / ((long) rate * channelConfig * audioFormat) * 1000;
+        float d = duration / 1000f;
+        int exceptAudioLength = (int) (d * (RATE * 2 * AudioFormat.ENCODING_PCM_16BIT));
+        byte[] lastLengthFromExcept = getLastLengthFromExcept(audiodata, exceptAudioLength);
+        LOG.i("lastLengthFromExcept:" + lastLengthFromExcept.length);
+        return lastLengthFromExcept;
+    }
+
+    private float getRhythmParams(int playPosition) {
+        float v = QuarterNoteRhythmType.countSingleNoteDuration(currentRhythmType, playPosition);
+        LOG.i("getRhythmParams:" + v);
+        return v;
+    }
+
+
+    private boolean isTickOrTock(int playPosition) {
+        if (currentRhythmType != QuarterNoteRhythmType.METRONOME_1_TYPE) {
+            return playPosition % currentRhythmType.getNoteCount() == 0;
+        }
+        if (currentBeatType.getValue() == MetronomeType.METRONOME_04_TYPE.getValue()) {
+            return false;
+        }
+        int beatValue = currentBeatType.getBeatValue();
+        return playPosition % beatValue == 0;
+    }
+
+    public void play2(String[] filePaths) {
+        if (ISPLAYSOUND) {
+            return;
+        }
+        Observable.create(new ObservableOnSubscribe<String>() {
+                    @Override
+                    public void subscribe(@NonNull ObservableEmitter<String> emitter) throws Throwable {
+                        mWeghts = new float[filePaths.length];
+                        mAllAudioData = parseFile(filePaths);
+                        WeightAudioMixer weightAudioMixer = new WeightAudioMixer(mWeghts);
+                        ISPLAYSOUND = true;
+                        mAudioTrack.play();
+                        byte[][] allAudioData2 = new byte[filePaths.length][];
+                        int bytesRead = 0;
+                        while (ISPLAYSOUND) {
+                            int result = 0;
+                            for (int i = 0; i < mAllAudioData.length; i++) {
+                                byte[] buffer = new byte[minBufferSize];
+                                result = splitByteArray(mAllAudioData[i], bytesRead, buffer);
+                                allAudioData2[i] = buffer;
+                            }
+                            bytesRead = result != -1 ? ++result : -1;
+                            LOG.i("bytesRead:" + bytesRead);
+                            byte[] audiodata = weightAudioMixer.mixRawAudioBytes(allAudioData2);
+                            mAudioTrack.write(audiodata, 0, minBufferSize);
+                            if (bytesRead == -1) {
+                                break;
+                            }
+                        }
+                    }
+                }).subscribeOn(Schedulers.newThread())
+                .observeOn(AndroidSchedulers.mainThread())
+                .subscribe(new Observer<String>() {
+                    @Override
+                    public void onSubscribe(@NonNull Disposable d) {
+
+                    }
+
+                    @Override
+                    public void onNext(@NonNull String s) {
+
+                    }
+
+                    @Override
+                    public void onError(@NonNull Throwable e) {
+                        e.printStackTrace();
+                    }
+
+                    @Override
+                    public void onComplete() {
+
+                    }
+                });
+    }
+
+    @androidx.annotation.NonNull
+    private byte[][] parseFile(String[] filePaths) {
+        byte[][] allAudioData = new byte[filePaths.length][];
+        LOG.i("filePaths:" + filePaths.length);
+        for (int i = 0; i < filePaths.length; i++) {
+            mWeghts[i] = MAX_VOLUME;
+            String filePath = filePaths[i];
+            byte[] bytes = readAudioDataFromFile(filePath);
+            LOG.i("readAudioDataFromFile:" + bytes.length);
+            allAudioData[i] = bytes;
+        }
+        makeLengthConsistent(allAudioData);
+        int byteLength = allAudioData[0].length;
+        countPlayTotalTime(RATE, 2, AudioFormat.ENCODING_PCM_16BIT, byteLength);
+        return allAudioData;
+    }
+
+    private byte[][] parseFile2(int[] resIds) {
+        byte[][] allAudioData = new byte[resIds.length][];
+        LOG.i("allAudioData:" + allAudioData.length);
+        for (int i = 0; i < resIds.length; i++) {
+//            mWeghts[i] = MAX_VOLUME;
+            byte[] bytes = readAudioDataFromRawFile(resIds[i]);
+            LOG.i("readAudioDataFromFile:" + bytes.length);
+            allAudioData[i] = bytes;
+        }
+//        makeLengthConsistent(allAudioData);
+        int byteLength = allAudioData[0].length;
+        countPlayTotalTime(RATE, 2, AudioFormat.ENCODING_PCM_16BIT, byteLength);
+        return allAudioData;
+    }
+
+    private void countPlayTotalTime(int rate, int channelConfig, int audioFormat, int byteLength) {
+        //一帧音频的大小(字节) = 通道数 x 采样个数 x 采样位数
+        LOG.i("countPlayTotalTime: rate:" + rate + "-channelConfig:" + channelConfig + "-audioFormat:" + audioFormat + "-byteLength:" + byteLength);
+        totalDuration = byteLength / ((long) rate * channelConfig * audioFormat) * 1000;
+    }
+
+    private int splitByteArray(byte[] source, int startIndex, byte[] destination) {
+        if (startIndex * destination.length >= source.length) {
+            return -1;
+        }
+        int remainByteLength = source.length - startIndex * destination.length;
+        boolean isLast = remainByteLength <= destination.length;
+        if (isLast) {
+            System.arraycopy(source, startIndex * destination.length, destination, 0, remainByteLength);
+        } else {
+            System.arraycopy(source, startIndex * destination.length, destination, 0, destination.length);
+        }
+        if (isLast) {
+            return -1;
+        }
+        return startIndex;
+    }
+
+    public long getTotalDuration() {
+        return totalDuration;
+    }
+
+    public void seekPercent(float percent) {
+        if (getTotalDuration() == -1) {
+            return;
+        }
+        if (percent < 0 || percent > 1.0f) {
+            return;
+        }
+        long totalDuration = getTotalDuration();
+        LOG.i("seek percent:" + percent);
+        LOG.i("seek percent:" + totalDuration);
+        int posTime = (int) (totalDuration * percent);
+        LOG.i("seek posTime:" + posTime);
+        int seekToFrame = (int) (RATE * (posTime * 1.0f / 1000));  // 转换为对应的采样帧
+        LOG.i("seek seekToFrame:" + seekToFrame);
+        seekTo(seekToFrame);
+    }
+
+    private void seekTo(int seekToFrame) {
+        if (mAudioTrack != null) {
+            if (!isPlaying()) {
+                return;
+            }
+            LOG.i("seekTo:" + seekToFrame);
+
+            int currentFrame = mAudioTrack.getPlaybackHeadPosition();  // 获取当前播放头位置
+            int targetFrame = currentFrame + seekToFrame;  // 计算目标播放头位置
+            mAudioTrack.setPlaybackHeadPosition(targetFrame);
+        }
+    }
+
+    private boolean isPlaying() {
+        if (mAudioTrack != null) {
+            return mAudioTrack.getPlayState() == AudioTrack.PLAYSTATE_PLAYING;
+        }
+        return false;
+    }
+
+    public long getPlayProgress() {
+        if (mAudioTrack != null) {
+            int currentFrame = mAudioTrack.getPlaybackHeadPosition();
+            LOG.i("currentFrame:" + currentFrame);
+
+            int rate = RATE;
+            if (rate > 0) {
+                float playTime = currentFrame * 1.0f / rate;
+                long currentPlayTimeMs = (long) (1000 * playTime);
+                LOG.i("currentPlayTimeMs=" + currentPlayTimeMs);
+                return currentPlayTimeMs;
+            }
+        }
+        return -1;
+    }
+
+    /**
+     * Play beep.
+     *
+     * @param
+     * @param
+     */
+    public void play(String[] filePaths) {
+        Observable.create(new ObservableOnSubscribe<String>() {
+                    @Override
+                    public void subscribe(@NonNull ObservableEmitter<String> emitter) throws Throwable {
+                        int bufferSize = AudioTrack.getMinBufferSize(RATE, AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_16BIT);
+                        float[] weghts = new float[filePaths.length];
+                        byte[][] allAudioData = new byte[filePaths.length][];
+                        for (int i = 0; i < filePaths.length; i++) {
+                            weghts[i] = 1f;
+                            String filePath = filePaths[i];
+                            byte[] bytes = readAudioDataFromFile(filePath);
+                            allAudioData[i] = bytes;
+                        }
+                        WeightAudioMixer weightAudioMixer = new WeightAudioMixer(weghts);
+                        makeLengthConsistent(allAudioData);
+                        byte[] audiodata = weightAudioMixer.mixRawAudioBytes(allAudioData);
+                        mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, RATE,
+                                AudioFormat.CHANNEL_OUT_STEREO, // CHANNEL_CONFIGURATION_MONO,
+                                AudioFormat.ENCODING_PCM_16BIT, audiodata.length, AudioTrack.MODE_STATIC);
+                        mAudioTrack.write(audiodata, 0, audiodata.length);
+                        setPlaySpeed(2.0f);
+                        mAudioTrack.play();
+                    }
+                }).subscribeOn(Schedulers.newThread())
+                .observeOn(AndroidSchedulers.mainThread())
+                .subscribe(new Observer<String>() {
+                    @Override
+                    public void onSubscribe(@NonNull Disposable d) {
+
+                    }
+
+                    @Override
+                    public void onNext(@NonNull String s) {
+
+                    }
+
+                    @Override
+                    public void onError(@NonNull Throwable e) {
+                        e.printStackTrace();
+                    }
+
+                    @Override
+                    public void onComplete() {
+
+                    }
+                });
+        ISPLAYSOUND = true;
+    }
+
+    public byte[] getLastLengthFromExcept(byte[] originalAudio, int exceptLength) {
+        LOG.i("originalAudio:" + originalAudio.length);
+        LOG.i("exceptLength:" + exceptLength);
+        if (originalAudio.length == exceptLength) {
+            return originalAudio;
+        }
+        byte[] paddedBlock = new byte[exceptLength];
+        if (originalAudio.length > exceptLength) {
+            //裁剪
+            System.arraycopy(originalAudio, 0, paddedBlock, 0, paddedBlock.length);
+        } else {
+            //补齐
+            System.arraycopy(originalAudio, 0, paddedBlock, 0, originalAudio.length);
+        }
+        return paddedBlock;
+    }
+
+    public void makeLengthConsistent(byte[][] dataBlocks) {
+        // 找出最长的数据块长度
+        int maxLength = 0;
+        for (byte[] block : dataBlocks) {
+            if (block.length > maxLength) {
+                maxLength = block.length;
+            }
+        }
+
+        // 填充数据块,使它们的长度一致
+        for (int i = 0; i < dataBlocks.length; i++) {
+            byte[] block = dataBlocks[i];
+            if (block.length < maxLength) {
+                byte[] paddedBlock = new byte[maxLength];
+                System.arraycopy(block, 0, paddedBlock, 0, block.length);
+                dataBlocks[i] = paddedBlock;
+            }
+        }
+    }
+
+    /**
+     * 叠加合成器
+     *
+     * @author Darcy
+     */
+    private static class AddAudioMixer extends MultiAudioMixer {
+
+        @Override
+        public byte[] mixRawAudioBytes(byte[][] bMulRoadAudioes) {
+
+            if (bMulRoadAudioes == null || bMulRoadAudioes.length == 0)
+                return null;
+
+            byte[] realMixAudio = bMulRoadAudioes[0];
+
+            if (bMulRoadAudioes.length == 1)
+                return realMixAudio;
+
+//            for (int rw = 0; rw < bMulRoadAudioes.length; ++rw) {
+//                if (bMulRoadAudioes[rw].length != realMixAudio.length) {
+//                    Log.e("app", "column of the road of audio + " + rw + " is diffrent.");
+//                    return null;
+//                }
+//            }
+
+            //row 代表参与合成的音频数量
+            //column 代表一段音频的采样点数,这里所有参与合成的音频的采样点数都是相同的
+            int row = bMulRoadAudioes.length;
+            int coloum = realMixAudio.length / 2;
+            LOG.i("pq", "row:" + row);
+            LOG.i("pq", "coloum:" + coloum);
+            short[][] sMulRoadAudioes = new short[row][coloum];
+
+            //PCM音频16位的存储是大端存储方式,即低位在前,高位在后,例如(X1Y1, X2Y2, X3Y3)数据,它代表的采样点数值就是((Y1 * 256 + X1), (Y2 * 256 + X2), (Y3 * 256 + X3))
+            for (int r = 0; r < row; ++r) {
+                for (int c = 0; c < coloum; ++c) {
+                    sMulRoadAudioes[r][c] = (short) ((bMulRoadAudioes[r][c * 2] & 0xff) | (bMulRoadAudioes[r][c * 2 + 1] & 0xff) << 8);
+                }
+            }
+
+            short[] sMixAudio = new short[coloum];
+            int mixVal;
+            int sr = 0;
+            for (int sc = 0; sc < coloum; ++sc) {
+                mixVal = 0;
+                sr = 0;
+                //这里采取累加法
+                for (; sr < row; ++sr) {
+                    mixVal += sMulRoadAudioes[sr][sc];
+                }
+                //最终值不能大于short最大值,因此可能出现溢出
+                sMixAudio[sc] = (short) (mixVal);
+            }
+
+            //short值转为大端存储的双字节序列
+            for (sr = 0; sr < coloum; ++sr) {
+                realMixAudio[sr * 2] = (byte) (sMixAudio[sr] & 0x00FF);
+                realMixAudio[sr * 2 + 1] = (byte) ((sMixAudio[sr] & 0xFF00) >> 8);
+            }
+
+            return realMixAudio;
+        }
+
+    }
+
+
+    /**
+     * 求平均值合成器
+     *
+     * @author Darcy
+     */
+    private static class AverageAudioMixer extends MultiAudioMixer {
+
+        @Override
+        public byte[] mixRawAudioBytes(byte[][] bMulRoadAudioes) {
+
+            if (bMulRoadAudioes == null || bMulRoadAudioes.length == 0)
+                return null;
+
+            byte[] realMixAudio = bMulRoadAudioes[0];
+
+            if (bMulRoadAudioes.length == 1)
+                return realMixAudio;
+
+            for (int rw = 0; rw < bMulRoadAudioes.length; ++rw) {
+                if (bMulRoadAudioes[rw].length != realMixAudio.length) {
+                    Log.e("app", "column of the road of audio + " + rw + " is diffrent.");
+                    return null;
+                }
+            }
+
+            int row = bMulRoadAudioes.length;
+            int coloum = realMixAudio.length / 2;
+            short[][] sMulRoadAudioes = new short[row][coloum];
+
+            for (int r = 0; r < row; ++r) {
+                for (int c = 0; c < coloum; ++c) {
+                    sMulRoadAudioes[r][c] = (short) ((bMulRoadAudioes[r][c * 2] & 0xff) | (bMulRoadAudioes[r][c * 2 + 1] & 0xff) << 8);
+                }
+            }
+
+            short[] sMixAudio = new short[coloum];
+            int mixVal;
+            int sr = 0;
+            for (int sc = 0; sc < coloum; ++sc) {
+                mixVal = 0;
+                sr = 0;
+                for (; sr < row; ++sr) {
+                    mixVal += sMulRoadAudioes[sr][sc];
+                }
+                sMixAudio[sc] = (short) (mixVal / row);
+            }
+
+            for (sr = 0; sr < coloum; ++sr) {
+                realMixAudio[sr * 2] = (byte) (sMixAudio[sr] & 0x00FF);
+                realMixAudio[sr * 2 + 1] = (byte) ((sMixAudio[sr] & 0xFF00) >> 8);
+            }
+
+            return realMixAudio;
+        }
+
+    }
+
+    /**
+     * 权重求值合成器
+     *
+     * @author Darcy
+     */
+    private static class WeightAudioMixer extends MultiAudioMixer {
+        private float[] weights;
+
+        public WeightAudioMixer(float[] weights) {
+            this.weights = weights;
+        }
+
+        @Override
+        public byte[] mixRawAudioBytes(byte[][] bMulRoadAudioes) {
+
+            if (bMulRoadAudioes == null || bMulRoadAudioes.length == 0) {
+                return null;
+            }
+            LOG.i("bMulRoadAudioes length:" + bMulRoadAudioes.length);
+            if (weights == null || weights.length != bMulRoadAudioes.length) {
+                return null;
+            }
+
+            byte[] realMixAudio = bMulRoadAudioes[0];
+
+            if (bMulRoadAudioes.length == 1)
+                return realMixAudio;
+
+            for (int rw = 0; rw < bMulRoadAudioes.length; ++rw) {
+                LOG.i("mixRawAudioBytes:" + bMulRoadAudioes[rw].length);
+                if (bMulRoadAudioes[rw].length != realMixAudio.length) {
+                    Log.e("app", "column of the road of audio + " + rw + " is diffrent.");
+                    return null;
+                }
+            }
+
+            int row = bMulRoadAudioes.length;
+            int coloum = realMixAudio.length / 2;
+            short[][] sMulRoadAudioes = new short[row][coloum];
+
+            for (int r = 0; r < row; ++r) {
+                for (int c = 0; c < coloum; ++c) {
+                    sMulRoadAudioes[r][c] = (short) ((bMulRoadAudioes[r][c * 2] & 0xff) | (bMulRoadAudioes[r][c * 2 + 1] & 0xff) << 8);
+                }
+            }
+
+            short[] sMixAudio = new short[coloum];
+            int mixVal;
+            int sr = 0;
+            for (int sc = 0; sc < coloum; ++sc) {
+                mixVal = 0;
+                sr = 0;
+                for (; sr < row; ++sr) {
+                    mixVal += sMulRoadAudioes[sr][sc] * weights[sr];
+                }
+//                sMixAudio[sc] = (short) (mixVal);
+                sMixAudio[sc] = (short) (mixVal / row);
+            }
+
+            for (sr = 0; sr < coloum; ++sr) {
+                realMixAudio[sr * 2] = (byte) (sMixAudio[sr] & 0x00FF);
+                realMixAudio[sr * 2 + 1] = (byte) ((sMixAudio[sr] & 0xFF00) >> 8);
+            }
+
+            return realMixAudio;
+        }
+
+    }
+
+    private byte[] readAudioDataFromRawFile(int resId) {
+        try {
+            InputStream inputStream = Utils.getApp().getResources().openRawResource(resId);
+            ByteArrayOutputStream byteArrayOutputStream = ConvertUtils.input2OutputStream(inputStream);
+            byte[] audiodata = byteArrayOutputStream.toByteArray();
+            byteArrayOutputStream.close();
+            return audiodata;
+        } catch (Exception e) {
+            e.printStackTrace();
+        }
+        return null;
+    }
+
+    private byte[] readAudioDataFromFile(String filePath) {
+        try {
+            File file = new File(filePath);
+            FileInputStream fileInputStream = new FileInputStream(file);
+            ByteArrayOutputStream byteArrayOutputStream = ConvertUtils.input2OutputStream(fileInputStream);
+            byte[] audiodata = byteArrayOutputStream.toByteArray();
+            byteArrayOutputStream.close();
+            return audiodata;
+        } catch (Exception e) {
+            e.printStackTrace();
+        }
+        return null;
+    }
+
+    public void stop() {
+        ISPLAYSOUND = false;
+        if (mAudioTrack != null) {
+            if (mAudioTrack.getState() == AudioRecord.STATE_INITIALIZED) {
+                mAudioTrack.pause();
+                mAudioTrack.flush();
+            }
+        }
+    }
+
+    public void release() {
+        if (mAllAudioData != null) {
+            mAllAudioData = null;
+        }
+        if (mAudioTrack != null) {
+            mAudioTrack.release();
+            mAudioTrack = null;
+        }
+    }
+}

+ 30 - 0
metronome/src/main/java/com/cooleshow/metronome/Utils/ComposeInfo.java

@@ -0,0 +1,30 @@
+package com.cooleshow.metronome.Utils;
+
+/**
+ * 音频合成信息
+ *
+ */
+public class ComposeInfo {
+
+    /**
+     * 音频文件路径
+     */
+    public String audioPath;
+
+    /**
+     * 音频解码后的pcm文件路径
+     */
+    public String pcmPath;
+
+    /**
+     * 音频开始播放的时间
+     */
+    public float offsetSeconds;
+
+    /**
+     * 参与合成的权重大小
+     */
+    public float weight;
+
+
+}

+ 458 - 0
metronome/src/main/java/com/cooleshow/metronome/Utils/MultiAudioMixer.java

@@ -0,0 +1,458 @@
+package com.cooleshow.metronome.Utils;
+
+import android.util.Log;
+
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.List;
+
+public abstract class MultiAudioMixer {
+	
+	private OnAudioMixListener mOnAudioMixListener;
+
+	/**
+	 * 创建默认的合成器
+	 * @return
+     */
+	public static MultiAudioMixer createDefaultAudioMixer(){
+		return createAddAudioMixer();
+	}
+
+	/**
+	 * 创建叠加合成器
+	 * @return
+	 */
+	public static MultiAudioMixer createAddAudioMixer(){
+		return new AddAudioMixer();
+	}
+
+	/**
+	 * 创建平均值合成器
+	 * @return
+     */
+	public static MultiAudioMixer createAverageAudioMixer(){
+		return new AverageAudioMixer();
+	}
+
+	/**
+	 * 创建权值合成器
+	 * @param weights
+	 * @return
+     */
+	public static MultiAudioMixer createWeightAudioMixer(float[] weights){
+		return new WeightAudioMixer(weights);
+	}
+
+	/**
+	 * 设置合成监听
+	 * @param l
+     */
+	public void setOnAudioMixListener(OnAudioMixListener l){
+		this.mOnAudioMixListener = l;
+	}
+	
+
+	/**
+	 * 合成音频
+	 *
+	 * @param rawAudioFiles 合成音频的列表
+     */
+	public void mixAudios(String[] rawAudioFiles){
+		
+		final int fileSize = rawAudioFiles.length;
+
+		FileInputStream[] audioFileStreams = new FileInputStream[fileSize];
+
+		FileInputStream inputStream;
+		byte[][] allAudioBytes = new byte[fileSize][];
+		boolean[] streamDoneArray = new boolean[fileSize];
+		final int bufferSize = 1024;
+		byte[] buffer = new byte[bufferSize];
+		int offset;
+		
+		try {
+			
+			for (int fileIndex = 0; fileIndex < fileSize; ++fileIndex) {
+				audioFileStreams[fileIndex] = new FileInputStream(rawAudioFiles[fileIndex]);
+			}
+
+			while(true){
+				
+				for(int streamIndex = 0 ; streamIndex < fileSize ; ++streamIndex){
+					
+					inputStream = audioFileStreams[streamIndex];
+					if(!streamDoneArray[streamIndex] && (offset = inputStream.read(buffer)) != -1){
+						allAudioBytes[streamIndex] = Arrays.copyOf(buffer,buffer.length);
+					}else{
+						streamDoneArray[streamIndex] = true;
+						allAudioBytes[streamIndex] = new byte[bufferSize];
+					}
+				}
+
+				byte[] mixBytes = mixRawAudioBytes(allAudioBytes);
+				if(mixBytes != null && mOnAudioMixListener != null){
+					mOnAudioMixListener.onMixing(mixBytes);
+				}
+				
+				boolean done = true;
+				for(boolean streamEnd : streamDoneArray){
+					if(!streamEnd){
+						done = false;
+					}
+				}
+				
+				if(done){
+					if(mOnAudioMixListener != null)
+						mOnAudioMixListener.onMixComplete();
+					break;
+				}
+			}
+			
+		} catch (IOException e) {
+			e.printStackTrace();
+			if(mOnAudioMixListener != null)
+				mOnAudioMixListener.onMixError(1);
+		}finally{
+			try {
+				for(FileInputStream in : audioFileStreams){
+					if(in != null)
+						in.close();
+				}
+			} catch (IOException e) {
+				e.printStackTrace();
+			}
+		}
+	}
+
+	/**
+	 * 合成音频, 可设置音频开始播放时间
+	 *
+	 * @param infoList 待合成的音频信息列表
+     */
+	public void mixAudios(List<ComposeInfo> infoList){
+
+		if(infoList == null || infoList.size() <= 0) return;
+
+		final int fileSize = infoList.size();
+
+		FileInputStream[] audioFileStreams = new FileInputStream[fileSize];
+
+		FileInputStream inputStream;
+		byte[][] allAudioBytes = new byte[fileSize][];
+		boolean[] streamDoneArray = new boolean[fileSize];
+		final int bufferSize = 1024;
+		byte[] buffer = new byte[bufferSize];
+		int offset;
+
+		int[] audioOffset = new int[fileSize];
+		for(int i=0; i<fileSize; i++){
+			audioOffset[i] = (int) (infoList.get(i).offsetSeconds * 16/8 * 2 * 44100);
+		}
+
+		try {
+
+			for (int fileIndex = 0; fileIndex < fileSize; ++fileIndex) {
+				audioFileStreams[fileIndex] = new FileInputStream(infoList.get(fileIndex).pcmPath);
+			}
+
+			while(true){
+
+				for(int streamIndex = 0 ; streamIndex < fileSize ; ++streamIndex){
+
+					inputStream = audioFileStreams[streamIndex];
+
+					//处理填充offset空白数据
+					int curOffset = audioOffset[streamIndex];
+					if(curOffset >= bufferSize){
+
+						//填充空白数据
+						allAudioBytes[streamIndex] = new byte[bufferSize];
+
+						audioOffset[streamIndex] = curOffset - bufferSize;
+
+						continue;
+
+					}else if(curOffset > 0 && curOffset < bufferSize){
+
+						//填充空白数据和读取的音频数据
+						byte[] data = new byte[bufferSize];
+
+						byte[] dataChild = new byte[bufferSize - curOffset];
+						inputStream.read(dataChild);
+
+						System.arraycopy(dataChild, 0, data, curOffset, dataChild.length);
+
+						allAudioBytes[streamIndex] = data;
+
+						audioOffset[streamIndex] = 0;
+
+						continue;
+
+					}
+
+					//处理文件流数据
+					if(!streamDoneArray[streamIndex] && (offset = inputStream.read(buffer)) != -1){
+						//填充音频数据
+						allAudioBytes[streamIndex] = Arrays.copyOf(buffer,buffer.length);
+					}else{
+						//填充空白数据
+						streamDoneArray[streamIndex] = true;
+						allAudioBytes[streamIndex] = new byte[bufferSize];
+					}
+				}
+
+				//合成数据
+				byte[] mixBytes = mixRawAudioBytes(allAudioBytes);
+
+				if(mixBytes != null && mOnAudioMixListener != null){
+					mOnAudioMixListener.onMixing(mixBytes);
+				}
+
+				boolean done = true;
+				for(boolean streamEnd : streamDoneArray){
+					if(!streamEnd){
+						done = false;
+					}
+				}
+
+				if(done){
+					//全部合成完成
+					if(mOnAudioMixListener != null)
+						mOnAudioMixListener.onMixComplete();
+					break;
+				}
+			}
+
+		} catch (Exception e) {
+			e.printStackTrace();
+
+			if(mOnAudioMixListener != null){
+				mOnAudioMixListener.onMixError(1);
+			}
+
+		}finally{
+			try {
+				for(FileInputStream in : audioFileStreams){
+					if(in != null)
+						in.close();
+				}
+			} catch (IOException e) {
+				e.printStackTrace();
+			}
+		}
+	}
+
+	/**
+	 * 合成音频数据
+	 * @param data
+	 * @return
+     */
+	public abstract byte[] mixRawAudioBytes(byte[][] data);
+
+	public interface OnAudioMixListener{
+		/**
+		 * 合成进行
+		 * @param mixBytes
+		 * @throws IOException
+         */
+		void onMixing(byte[] mixBytes) throws IOException;
+
+		/**
+		 * 合成错误
+		 * @param errorCode
+         */
+		void onMixError(int errorCode);
+		
+		/**
+		 * 合成完成
+		 */
+		void onMixComplete();
+	}
+	
+	/**
+	 * 叠加合成器
+	 * @author Darcy
+	 */
+	private static class AddAudioMixer extends MultiAudioMixer{
+
+		@Override
+		public byte[] mixRawAudioBytes(byte[][] bMulRoadAudioes) {
+			
+			if (bMulRoadAudioes == null || bMulRoadAudioes.length == 0)
+				return null;
+
+			byte[] realMixAudio = bMulRoadAudioes[0];
+			
+			if(bMulRoadAudioes.length == 1)
+				return realMixAudio;
+			
+			for(int rw = 0 ; rw < bMulRoadAudioes.length ; ++rw){
+				if(bMulRoadAudioes[rw].length != realMixAudio.length){
+					Log.e("app", "column of the road of audio + " + rw +" is diffrent.");
+					return null;
+				}
+			}
+
+			//row 代表参与合成的音频数量
+			//column 代表一段音频的采样点数,这里所有参与合成的音频的采样点数都是相同的
+			int row = bMulRoadAudioes.length;
+			int coloum = realMixAudio.length / 2;
+			short[][] sMulRoadAudioes = new short[row][coloum];
+
+			//PCM音频16位的存储是大端存储方式,即低位在前,高位在后,例如(X1Y1, X2Y2, X3Y3)数据,它代表的采样点数值就是((Y1 * 256 + X1), (Y2 * 256 + X2), (Y3 * 256 + X3))
+			for (int r = 0; r < row; ++r) {
+				for (int c = 0; c < coloum; ++c) {
+					sMulRoadAudioes[r][c] = (short) ((bMulRoadAudioes[r][c * 2] & 0xff) | (bMulRoadAudioes[r][c * 2 + 1] & 0xff) << 8);
+				}
+			}
+
+			short[] sMixAudio = new short[coloum];
+			int mixVal;
+			int sr = 0;
+			for (int sc = 0; sc < coloum; ++sc) {
+				mixVal = 0;
+				sr = 0;
+				//这里采取累加法
+				for (; sr < row; ++sr) {
+					mixVal += sMulRoadAudioes[sr][sc];
+				}
+				//最终值不能大于short最大值,因此可能出现溢出
+				sMixAudio[sc] = (short) (mixVal);
+			}
+
+			//short值转为大端存储的双字节序列
+			for (sr = 0; sr < coloum; ++sr) {
+				realMixAudio[sr * 2] = (byte) (sMixAudio[sr] & 0x00FF);
+				realMixAudio[sr * 2 + 1] = (byte) ((sMixAudio[sr] & 0xFF00) >> 8);
+			}
+
+			return realMixAudio;
+		}
+		
+	}
+
+	/**
+	 * 求平均值合成器
+	 * @author Darcy
+	 */
+	private static class AverageAudioMixer extends MultiAudioMixer{
+
+		@Override
+		public byte[] mixRawAudioBytes(byte[][] bMulRoadAudioes) {
+
+			if (bMulRoadAudioes == null || bMulRoadAudioes.length == 0)
+				return null;
+
+			byte[] realMixAudio = bMulRoadAudioes[0];
+
+			if(bMulRoadAudioes.length == 1)
+				return realMixAudio;
+
+			for(int rw = 0 ; rw < bMulRoadAudioes.length ; ++rw){
+				if(bMulRoadAudioes[rw].length != realMixAudio.length){
+					Log.e("app", "column of the road of audio + " + rw +" is diffrent.");
+					return null;
+				}
+			}
+
+			int row = bMulRoadAudioes.length;
+			int coloum = realMixAudio.length / 2;
+			short[][] sMulRoadAudioes = new short[row][coloum];
+
+			for (int r = 0; r < row; ++r) {
+				for (int c = 0; c < coloum; ++c) {
+					sMulRoadAudioes[r][c] = (short) ((bMulRoadAudioes[r][c * 2] & 0xff) | (bMulRoadAudioes[r][c * 2 + 1] & 0xff) << 8);
+				}
+			}
+
+			short[] sMixAudio = new short[coloum];
+			int mixVal;
+			int sr = 0;
+			for (int sc = 0; sc < coloum; ++sc) {
+				mixVal = 0;
+				sr = 0;
+				for (; sr < row; ++sr) {
+					mixVal += sMulRoadAudioes[sr][sc];
+				}
+				sMixAudio[sc] = (short) (mixVal / row);
+			}
+
+			for (sr = 0; sr < coloum; ++sr) {
+				realMixAudio[sr * 2] = (byte) (sMixAudio[sr] & 0x00FF);
+				realMixAudio[sr * 2 + 1] = (byte) ((sMixAudio[sr] & 0xFF00) >> 8);
+			}
+
+			return realMixAudio;
+		}
+
+	}
+
+	/**
+	 * 权重求值合成器
+	 * @author Darcy
+	 */
+	private static class WeightAudioMixer extends MultiAudioMixer{
+		private float[] weights;
+
+		public WeightAudioMixer(float[] weights){
+			this.weights = weights;
+		}
+
+		@Override
+		public byte[] mixRawAudioBytes(byte[][] bMulRoadAudioes) {
+
+			if (bMulRoadAudioes == null || bMulRoadAudioes.length == 0){
+				return null;
+			}
+
+			if(weights == null || weights.length != bMulRoadAudioes.length){
+				return null;
+			}
+
+			byte[] realMixAudio = bMulRoadAudioes[0];
+
+			if(bMulRoadAudioes.length == 1)
+				return realMixAudio;
+
+			for(int rw = 0 ; rw < bMulRoadAudioes.length ; ++rw){
+				if(bMulRoadAudioes[rw].length != realMixAudio.length){
+					Log.e("app", "column of the road of audio + " + rw +" is diffrent.");
+					return null;
+				}
+			}
+
+			int row = bMulRoadAudioes.length;
+			int coloum = realMixAudio.length / 2;
+			short[][] sMulRoadAudioes = new short[row][coloum];
+
+			for (int r = 0; r < row; ++r) {
+				for (int c = 0; c < coloum; ++c) {
+					sMulRoadAudioes[r][c] = (short) ((bMulRoadAudioes[r][c * 2] & 0xff) | (bMulRoadAudioes[r][c * 2 + 1] & 0xff) << 8);
+				}
+			}
+
+			short[] sMixAudio = new short[coloum];
+			int mixVal;
+			int sr = 0;
+			for (int sc = 0; sc < coloum; ++sc) {
+				mixVal = 0;
+				sr = 0;
+				for (; sr < row; ++sr) {
+					mixVal += sMulRoadAudioes[sr][sc] * weights[sr];
+				}
+				sMixAudio[sc] = (short) (mixVal);
+			}
+
+			for (sr = 0; sr < coloum; ++sr) {
+				realMixAudio[sr * 2] = (byte) (sMixAudio[sr] & 0x00FF);
+				realMixAudio[sr * 2 + 1] = (byte) ((sMixAudio[sr] & 0xFF00) >> 8);
+			}
+
+			return realMixAudio;
+		}
+
+	}
+
+}
+

+ 11 - 4
metronome/src/main/java/com/cooleshow/metronome/Utils/PlayBeanManager.java

@@ -7,7 +7,6 @@ import android.os.Looper;
 
 import com.cooleshow.base.utils.LOG;
 import com.cooleshow.metronome.R;
-import com.cooleshow.metronome.adapter.MetronomeAdapter;
 import com.cooleshow.metronome.constants.MetronomeConfig;
 import com.cooleshow.metronome.constants.MetronomeType;
 import com.cooleshow.metronome.constants.QuarterNoteRhythmType;
@@ -114,6 +113,7 @@ public class PlayBeanManager {
     }
 
     public void reset() {
+        currentBeatRate = MetronomeConfig.DEFAULT_PLAY_RATE;
         playPosition = 0;
         isPlaying = false;
         isInit = false;
@@ -124,7 +124,8 @@ public class PlayBeanManager {
             pausePlay();
         }
         playPosition = 0;
-        SoundPoolUtils.getInstance().release();
+        AudioTrackManager2.getInstance().release();
+//        SoundPoolUtils.getInstance().release();
         mHandler.removeCallbacksAndMessages(null);
         isInit = false;
     }
@@ -138,6 +139,8 @@ public class PlayBeanManager {
             return;
         }
         reset();
+        AudioTrackManager2.getInstance().init();
+        AudioTrackManager2.getInstance().initAudio(musicFileRes);
         setBeat(MetronomeType.METRONOME_44_TYPE, QuarterNoteRhythmType.METRONOME_1_TYPE);
         SoundPoolUtils.getInstance().init(context.getApplicationContext(), musicFileRes);
         isInit = true;
@@ -148,6 +151,7 @@ public class PlayBeanManager {
         currentRhythmType = rhythmType;
         playPosition = 0;
         currentNoteRate = MetronomeType.getNoteSpeedValue(metronomeType);
+        AudioTrackManager2.getInstance().setCurrentNoteRate(currentBeatRate,currentNoteRate);
     }
 
     public int getBeatSymbolCount() {
@@ -199,6 +203,7 @@ public class PlayBeanManager {
 
     public void setCurrentBeatRate(int playBeatRate) {
         currentBeatRate = playBeatRate;
+        AudioTrackManager2.getInstance().setCurrentNoteRate(currentBeatRate,currentNoteRate);
     }
 
     public String getCurrentBeanType() {
@@ -219,8 +224,9 @@ public class PlayBeanManager {
     public void play() {
         stopPlay();
         resetStatus();
-        mHandler.post(mRunnable);
+//        mHandler.post(mRunnable);
         isPlaying = true;
+        AudioTrackManager2.getInstance().playBeat(currentRhythmType,currentBeatType);
     }
 
     private void  resetStatus(){
@@ -236,7 +242,8 @@ public class PlayBeanManager {
 
     private void stopPlay() {
         mHandler.removeCallbacksAndMessages(null);
-        SoundPoolUtils.getInstance().stop();
+        AudioTrackManager2.getInstance().stop();
+//        SoundPoolUtils.getInstance().stop();
     }
 
     private boolean isTickOrTock() {

+ 11 - 11
metronome/src/main/java/com/cooleshow/metronome/constants/QuarterNoteRhythmType.java

@@ -7,13 +7,13 @@ import com.cooleshow.metronome.R;
  * 4分音符节奏类型
  */
 public enum QuarterNoteRhythmType {
-    METRONOME_1_TYPE(1, 1,R.mipmap.icon_4_rhythm_1_normal, R.mipmap.icon_4_rhythm_1_select, ""),
-    METRONOME_2_TYPE(2, 2,R.mipmap.icon_4_rhythm_2_normal, R.mipmap.icon_4_rhythm_2_select, ""),
-    METRONOME_3_TYPE(3, 3,R.mipmap.icon_4_rhythm_3_normal, R.mipmap.icon_4_rhythm_3_select, ""),
-    METRONOME_4_TYPE(4, 4,R.mipmap.icon_4_rhythm_4_normal, R.mipmap.icon_4_rhythm_4_select, ""),
-    METRONOME_5_TYPE(5, 2,R.mipmap.icon_4_rhythm_5_normal, R.mipmap.icon_4_rhythm_5_select, ""),
-    METRONOME_6_TYPE(6, 2,R.mipmap.icon_4_rhythm_6_normal, R.mipmap.icon_4_rhythm_6_select, ""),
-    METRONOME_7_TYPE(7, 3,R.mipmap.icon_4_rhythm_7_normal, R.mipmap.icon_4_rhythm_7_select, "");
+    METRONOME_1_TYPE(1, 1, R.mipmap.icon_4_rhythm_1_normal, R.mipmap.icon_4_rhythm_1_select, ""),
+    METRONOME_2_TYPE(2, 2, R.mipmap.icon_4_rhythm_2_normal, R.mipmap.icon_4_rhythm_2_select, ""),
+    METRONOME_3_TYPE(3, 3, R.mipmap.icon_4_rhythm_3_normal, R.mipmap.icon_4_rhythm_3_select, ""),
+    METRONOME_4_TYPE(4, 4, R.mipmap.icon_4_rhythm_4_normal, R.mipmap.icon_4_rhythm_4_select, ""),
+    METRONOME_5_TYPE(5, 2, R.mipmap.icon_4_rhythm_5_normal, R.mipmap.icon_4_rhythm_5_select, ""),
+    METRONOME_6_TYPE(6, 2, R.mipmap.icon_4_rhythm_6_normal, R.mipmap.icon_4_rhythm_6_select, ""),
+    METRONOME_7_TYPE(7, 3, R.mipmap.icon_4_rhythm_7_normal, R.mipmap.icon_4_rhythm_7_select, "");
     private int value;
 
     private int noteCount;
@@ -21,7 +21,7 @@ public enum QuarterNoteRhythmType {
     private int selectImgRes;
     private String des;
 
-    private QuarterNoteRhythmType(int value, int noteCount,int imgRes, int selectImgRes, String des) {
+    private QuarterNoteRhythmType(int value, int noteCount, int imgRes, int selectImgRes, String des) {
         this.value = value;
         this.noteCount = noteCount;
         this.imgRes = imgRes;
@@ -69,7 +69,7 @@ public enum QuarterNoteRhythmType {
         }
         if (rhythmType == QuarterNoteRhythmType.METRONOME_5_TYPE) {
             //三连音的重音音符的节奏(第一个音符占三分之二,第二个音符占三分之一时值)
-            if (pos % 2 == 1) {
+            if (pos % 2 == 0) {
                 return 0.666666f;
             } else {
                 return 0.333333f;
@@ -77,7 +77,7 @@ public enum QuarterNoteRhythmType {
         }
         if (rhythmType == QuarterNoteRhythmType.METRONOME_6_TYPE) {
             //
-            if (pos % 2 == 1) {
+            if (pos % 2 == 0) {
                 return 0.75f;
             } else {
                 return 0.25f;
@@ -85,7 +85,7 @@ public enum QuarterNoteRhythmType {
         }
         if (rhythmType == QuarterNoteRhythmType.METRONOME_7_TYPE) {
             //
-            if (pos % 3 == 1) {
+            if (pos % 3 == 0) {
                 return 0.5f;
             } else {
                 return 0.25f;