Ver Fonte

播放器性能优化

Steven há 4 meses atrás
pai
commit
f30f30cafc

+ 76 - 98
KulexiuForStudent/KulexiuForStudent/Common/MediaMerge/AudioMerge/MergeAudioAnimation/KSSpectrumView.m

@@ -6,15 +6,12 @@
 //
 
 #import "KSSpectrumView.h"
-#import <Accelerate/Accelerate.h>
 
 @interface KSSpectrumView ()
 
 @property (nonatomic, strong) CAGradientLayer *combinedGradientLayer;
-
-@property (nonatomic, strong) CAShapeLayer *combinedMaskLayer;
-
-@property (nonatomic, assign) CGFloat xIncrement;
+@property (nonatomic, strong) dispatch_queue_t spectrumQueue;
+@property (atomic, assign) BOOL isCalculating;
 
 @end
 
@@ -40,17 +37,16 @@
 - (void)setupView {
     self.barWidth = 3.0;
     self.space = 1.0;
-    self.xIncrement = self.barWidth + self.space;
+    
+    self.spectrumQueue = dispatch_queue_create("com.kulexiu.spectrumQueue", DISPATCH_QUEUE_SERIAL);
     
     self.combinedGradientLayer = [CAGradientLayer layer];
     self.combinedGradientLayer.colors = @[
-        (__bridge id)HexRGBAlpha(0xffffff, 1.0f).CGColor,
-        (__bridge id)HexRGBAlpha(0xffffff, 1.0f).CGColor
+        (__bridge id)HexRGBAlpha(0xFFFFFF, 1.0f).CGColor,
+        (__bridge id)HexRGBAlpha(0xFFFFFF, 1.0f).CGColor
     ];
     self.combinedGradientLayer.locations = @[@0.6, @1.0];
     [self.layer addSublayer:self.combinedGradientLayer];
-    
-    self.combinedMaskLayer = [CAShapeLayer layer];
 }
 
 - (void)resetLayer {
@@ -58,108 +54,90 @@
 }
 
 - (void)setSpectra:(NSArray<NSArray<NSNumber *> *> *)spectra {
-    _spectra = spectra;
-    if (spectra) {
-        CGFloat viewHeight = self.bounds.size.height;
-        CGFloat viewWidth = self.bounds.size.width;
-        @weakObj(self);
-        dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
-            @strongObj(self);
-            self.isModify = YES;
-            NSUInteger spectraCount = [spectra[0] count];
-            NSMutableArray<NSNumber *> *combinedSpectrum = [NSMutableArray arrayWithCapacity:spectraCount];
-            // 取两个声道数据中的最大值
-            for (NSUInteger i = 0; i < spectraCount; i++) {
-                NSNumber *leftAmplitude = spectra[0][i];
-                NSNumber *rightAmplitude = spectra.count > 1 ? spectra[1][i] : @0;
-                CGFloat maxAmplitude = MAX(leftAmplitude.floatValue, rightAmplitude.floatValue);
-                [combinedSpectrum addObject:@(maxAmplitude)];
-            }
-            
-            CGFloat middleY = viewHeight / 2.0;
-            CGFloat barHeight = (viewHeight) / 2.0;
-            CGFloat cornerRadius = viewWidth / 2.0f;
-            UIBezierPath *combinedPath = [UIBezierPath bezierPath];
+    if (!spectra || self.isCalculating) return;
+    
+    _spectra = [spectra copy];
+    
+    CGFloat viewHeight = self.bounds.size.height;
+    CGFloat viewWidth = self.bounds.size.width;
+    
+    @weakObj(self);
+    dispatch_async(self.spectrumQueue, ^{
+        @strongObj(self);
+        if (!self) return;
+        
+        self.isCalculating = YES;
+        self.isModify = YES;
+        
+        NSUInteger spectraCount = [spectra[0] count];
+        NSMutableArray<NSNumber *> *combinedSpectrum = [[NSMutableArray alloc] initWithCapacity:spectraCount];
+        UIBezierPath *combinedPath = [UIBezierPath bezierPath];
+        
+        CGFloat middleY = viewHeight / 2.0;
+        CGFloat barHeight = viewHeight / 2.0;
+        CGFloat cornerRadius = self.barWidth / 2.0f;
+        CGFloat xIncrement = self.barWidth + self.space;
+        
+        [self combineSpectraChannels:spectra intoArray:combinedSpectrum];
+        
+        [self createSpectrumPath:combinedPath
+                  withSpectrum:combinedSpectrum
+                     middleY:middleY
+                   barHeight:barHeight
+               cornerRadius:cornerRadius
+                xIncrement:xIncrement];
+        
+        dispatch_async(dispatch_get_main_queue(), ^{
+            if (!self.isModify) return;
             
-            // Left channel
-            for (NSUInteger i = 0; i < spectraCount; i++) {
-                CGFloat x = i * self.xIncrement + self.space;
-                CGFloat amplitudeValue = combinedSpectrum[i].floatValue;
-                CGFloat height = amplitudeValue * barHeight;
-                CGFloat y = middleY - height/2.0; // Centered vertically
-                
-                CGRect rect = CGRectMake(x, y, self.barWidth, height);
-                UIBezierPath *barPath = [UIBezierPath bezierPathWithRoundedRect:rect cornerRadius:cornerRadius];
-                [combinedPath appendPath:barPath];
-            }
+            CAShapeLayer *combinedMaskLayer = [CAShapeLayer layer];
+            combinedMaskLayer.path = combinedPath.CGPath;
+            self.combinedGradientLayer.frame = CGRectMake(0, 0, viewWidth, viewHeight);
+            self.combinedGradientLayer.mask = combinedMaskLayer;
             
-            dispatch_async(dispatch_get_main_queue(), ^{
-                self.combinedMaskLayer.path = combinedPath.CGPath;
-                self.combinedGradientLayer.frame = CGRectMake(0, 0, viewWidth, viewHeight);
-                self.combinedGradientLayer.mask = self.combinedMaskLayer;
-                self.isModify = NO;
-            });
+            self.isModify = NO;
+            self.isCalculating = NO;
         });
+    });
+}
+
+- (void)combineSpectraChannels:(NSArray<NSArray<NSNumber *> *> *)spectra
+                   intoArray:(NSMutableArray<NSNumber *> *)combinedSpectrum {
+    NSUInteger spectraCount = [spectra[0] count];
+    for (NSUInteger i = 0; i < spectraCount; i++) {
+        NSNumber *leftAmplitude = spectra[0][i];
+        NSNumber *rightAmplitude = spectra.count > 1 ? spectra[1][i] : @0;
+        CGFloat maxAmplitude = MAX(leftAmplitude.floatValue, rightAmplitude.floatValue);
+        [combinedSpectrum addObject:@(maxAmplitude)];
     }
 }
 
+- (void)createSpectrumPath:(UIBezierPath *)path
+             withSpectrum:(NSArray<NSNumber *> *)spectrum
+                 middleY:(CGFloat)middleY
+               barHeight:(CGFloat)barHeight
+            cornerRadius:(CGFloat)cornerRadius
+             xIncrement:(CGFloat)xIncrement {
+    [spectrum enumerateObjectsUsingBlock:^(NSNumber *amplitude, NSUInteger i, BOOL *stop) {
+        CGFloat x = i * xIncrement + self.space;
+        CGFloat height = amplitude.floatValue * barHeight;
+        CGFloat y = middleY - height/2.0;
+        
+        CGRect rect = CGRectMake(x, y, self.barWidth, height);
+        UIBezierPath *barPath = [UIBezierPath bezierPathWithRoundedRect:rect cornerRadius:cornerRadius];
+        [path appendPath:barPath];
+    }];
+}
+
 - (CGFloat)translateAmplitudeToYPosition:(float)amplitude {
     CGFloat barHeight = amplitude * self.bounds.size.height;
     return self.bounds.size.height - barHeight;
 }
 
 - (void)dealloc {
+    self.spectrumQueue = nil;
     NSLog(@"---- KSSpectrumView dealloc");
 }
-
-- (NSArray<NSNumber *> *)computeFFTWithPCMBuffer:(float *)pcmBuffer frameCount:(NSUInteger)frameCount {
-    NSUInteger log2n = log2(frameCount); // FFT 长度必须是 2 的幂
-    NSUInteger fftSize = 1 << log2n;    // 计算实际 FFT 点数
-    
-    // 创建 FFT 设置
-    FFTSetup fftSetup = vDSP_create_fftsetup(log2n, FFT_RADIX2);
-    if (!fftSetup) {
-        NSLog(@"FFT Setup failed");
-        return nil;
-    }
-
-    // 输入数据必须是复数形式,创建 SplitComplex 存储数据
-    DSPSplitComplex splitComplex;
-    float *real = malloc(sizeof(float) * fftSize / 2);
-    float *imag = malloc(sizeof(float) * fftSize / 2);
-    splitComplex.realp = real;
-    splitComplex.imagp = imag;
-    
-    // 填充输入数据并进行窗口处理(如 Hanning 窗口)
-    vDSP_ctoz((DSPComplex *)pcmBuffer, 2, &splitComplex, 1, fftSize / 2);
-    vDSP_hann_window(real, fftSize / 2, vDSP_HANN_NORM);
-
-    // 执行 FFT
-    vDSP_fft_zrip(fftSetup, &splitComplex, 1, log2n, FFT_FORWARD);
-
-    // 计算幅值(模长)
-    float *amplitudes = malloc(sizeof(float) * fftSize / 2);
-    vDSP_zvmags(&splitComplex, 1, amplitudes, 1, fftSize / 2);
-    
-    // 转换为 dB(可选)
-    float scale = 1.0 / (2.0 * fftSize); // 归一化
-    vDSP_vsmul(amplitudes, 1, &scale, amplitudes, 1, fftSize / 2);
-    vDSP_vdbcon(amplitudes, 1, &scale, amplitudes, 1, fftSize / 2, 0);
-
-    // 转换结果为 NSArray
-    NSMutableArray<NSNumber *> *result = [NSMutableArray arrayWithCapacity:fftSize / 2];
-    for (NSUInteger i = 0; i < fftSize / 2; i++) {
-        [result addObject:@(amplitudes[i])];
-    }
-
-    // 清理内存
-    free(real);
-    free(imag);
-    free(amplitudes);
-    vDSP_destroy_fftsetup(fftSetup);
-
-    return result;
-}
 /*
 // Only override drawRect: if you perform custom drawing.
 // An empty implementation adversely affects performance during animation.

+ 274 - 140
KulexiuForStudent/KulexiuForStudent/Common/MediaMerge/AudioMerge/MergePlayer/KSMergeEnginePlayer.m

@@ -10,7 +10,6 @@
 #import <Accelerate/Accelerate.h>
 
 #define READ_FILE_LENGTH (8192)
-
 #define BUFFER_SIZE (2048)
 
 @interface KSMergeEnginePlayer ()
@@ -47,6 +46,12 @@
 
 @property (nonatomic, assign) BOOL isCanceled; // 是否在取消
 
+@property (nonatomic, strong) dispatch_group_t audioGroup;
+
+@property (nonatomic, strong) dispatch_queue_t analyzerQueue; // 分享线程
+
+@property (nonatomic, strong) dispatch_queue_t mixerQueue;  // 混音专用队列
+
 @end
 
 
@@ -87,11 +92,11 @@
     } @finally {
         
     }
+    
     [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(handleInterruption:) name:AVAudioSessionInterruptionNotification object:audioSession];
     [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(handleRouteChange:) name:AVAudioSessionRouteChangeNotification object:audioSession];
 }
 
-
 - (void)retryInitEngine {
     // 如果audio engine不存在
     self.audioEngine = [[AVAudioEngine alloc] init];
@@ -128,14 +133,30 @@
 }
 
 - (void)prepareNativeSongWithUrl:(NSURL *)recordAudioUrl bgMusic:(NSURL *)bgMusicUrl {
+    self.audioGroup = dispatch_group_create();
     @weakObj(self);
-    dispatch_async(self.sourceQueue, ^{
+    dispatch_group_async(self.audioGroup, self.sourceQueue, ^{
         @strongObj(self);
         if (!self || self.isCanceled) {
             return;
         }
         [self loadAuidoFile:recordAudioUrl isBgm:NO];
+    });
+    
+    dispatch_group_async(self.audioGroup, self.sourceQueue, ^{
+        @strongObj(self);
+        if (!self || self.isCanceled) {
+            return;
+        }
         [self loadAuidoFile:bgMusicUrl isBgm:YES];
+    });
+    
+    // 待所有音频全部加载完成
+    dispatch_group_notify(self.audioGroup, self.sourceQueue, ^{
+        @strongObj(self);
+        if (!self || self.isCanceled) {
+            return;
+        }
         self.sampleRate = self.audioFile.fileFormat.sampleRate;
         [self configEngine];
         
@@ -161,20 +182,23 @@
     BOOL delegateRespondsToDidGenerateSpectrum = [self.delegate respondsToSelector:@selector(player:didGenerateSpectrum:)];
     AVAudioFormat *outputFormat = [self.audioEngine.mainMixerNode outputFormatForBus:0];
     [self.audioEngine.mainMixerNode removeTapOnBus:0];
+    
     @weakObj(self);
-    [self.audioEngine.mainMixerNode installTapOnBus:0 bufferSize:BUFFER_SIZE format:outputFormat block:^(AVAudioPCMBuffer * _Nonnull buffer, AVAudioTime * _Nonnull when) {
+    [self.audioEngine.mainMixerNode installTapOnBus:0 bufferSize:BUFFER_SIZE*4 format:outputFormat block:^(AVAudioPCMBuffer * _Nonnull buffer, AVAudioTime * _Nonnull when) {
+        // 每累积 2 个缓冲区分析一次
+        static int counter = 0;
+        if (++counter % 2 != 0) return;
         @strongObj(self);
         if (!self || !self.nodePlayer.isPlaying || self.isCanceled) {
             return;
         }
-        // 将频谱分析任务提交到后台队列
-        dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0), ^{
+        // 使用专门的队列处理频谱分析
+        dispatch_async(self.analyzerQueue, ^{
             if (self.analyzer.isAnalise == NO) {
-                // 分析音频缓冲区
                 NSArray<NSArray<NSNumber *> *> *spectra = [self.analyzer analyseWithBuffer:buffer];
                 
-                // 回到主线程更新 UI 或调用委托方法
-                dispatch_async(dispatch_get_main_queue(), ^{
+                // 使用 dispatch_async_main_safe 优化主线程切换
+                dispatch_main_async_safe(^{
                     if (delegateRespondsToDidGenerateSpectrum) {
                         [self.delegate player:self didGenerateSpectrum:spectra];
                     }
@@ -202,13 +226,22 @@
     } @finally {
         if (error) {
             // 错误回调
-        }
-        else { // 加载成功
+            if (self.delegate && [self.delegate respondsToSelector:@selector(enginePlayerDidError:error:)]) {
+                [self.delegate enginePlayerDidError:self error:error];
+            }
+            // 释放已分配资源
+            if (isBgm) {
+                self.bgAudioFile = nil;
+                self.bgAudioFormat = nil;
+            } else {
+                self.audioFile = nil;
+                self.audioFormat = nil;
+            }
+        } else {
             if (isBgm) {
                 self.bgAudioFile = audioFile;
                 self.bgAudioFormat = audioFormat;
-            }
-            else {
+            } else {
                 self.audioFile = audioFile;
                 self.audioFormat = audioFormat;
             }
@@ -234,118 +267,176 @@
     });
 }
 
-// 预计加载buffer
+// 1. 优化 Buffer 处理的线程控制
 - (void)prepareBuffer:(AVAudioFramePosition)startPosition offset:(NSInteger)offsetTime mixStart:(AVAudioFramePosition)mixStartPosition {
-    
+    // 使用 barrier 确保数据同步
     @weakObj(self);
-    dispatch_async(self.sourceQueue, ^{
+    dispatch_barrier_async(self.sourceQueue, ^{
         @strongObj(self);
-        if (!self || self.isCanceled) {
-            return;
-        }
-        if (!self.bgAudioFile || !self.audioFile) {
-            return;
-        }
-        AVAudioFramePosition minFrameCount = (AVAudioFramePosition)MIN(self.bgAudioFile.length, self.audioFile.length);
-        AVAudioFrameCount offsetFrame = labs(offsetTime)/1000.0 * self.audioFile.processingFormat.sampleRate;
-        if (minFrameCount <= startPosition) {
-            return;
-        }
-        AVAudioFrameCount frameToRead = minFrameCount - startPosition > READ_FILE_LENGTH ? READ_FILE_LENGTH : (AVAudioFrameCount)(minFrameCount - startPosition);
-        
+        [self processBufferWithStartPosition:startPosition offset:offsetTime mixStart:mixStartPosition];
+    });
+}
+
+// 2. 将具体的处理逻辑抽取成单独的方法
+- (void)processBufferWithStartPosition:(AVAudioFramePosition)startPosition
+                                offset:(NSInteger)offsetTime
+                              mixStart:(AVAudioFramePosition)mixStartPosition {
+    
+    if (!self || self.isCanceled) {
+        return;
+    }
+    if (!self.bgAudioFile ||!self.audioFile) {
+        return;
+    }
+    // 计算帧数
+    AVAudioFramePosition minFrameCount = (AVAudioFramePosition)MIN(self.bgAudioFile.length, self.audioFile.length);
+    AVAudioFrameCount offsetFrame = labs(offsetTime)/1000.0 * self.audioFile.processingFormat.sampleRate;
+    if (minFrameCount <= startPosition) {
+        return;
+    }
+    // 计算需要读取的帧数
+    AVAudioFrameCount frameToRead = minFrameCount - startPosition > READ_FILE_LENGTH ? READ_FILE_LENGTH : (AVAudioFrameCount)(minFrameCount - startPosition);
+    // 读取背景音乐数据
+    @autoreleasepool  {
         self.bgAudioFile.framePosition = startPosition;
-        AVAudioPCMBuffer *bgBuffer = [[AVAudioPCMBuffer alloc] initWithPCMFormat:self.bgAudioFile.processingFormat frameCapacity:frameToRead];
+        AVAudioPCMBuffer *bgBuffer = [[AVAudioPCMBuffer alloc] initWithPCMFormat:self.bgAudioFile.processingFormat
+                                                                   frameCapacity:frameToRead];
         bgBuffer.frameLength = frameToRead;
-        BOOL readSuccess = [self.bgAudioFile readIntoBuffer:bgBuffer frameCount:frameToRead error:nil];
-        if (!readSuccess) {
+        
+        if (![self.bgAudioFile readIntoBuffer:bgBuffer frameCount:frameToRead error:nil]) {
             return;
         }
-        AVAudioPCMBuffer *recordBuffer = [[AVAudioPCMBuffer alloc] initWithPCMFormat:self.audioFile.processingFormat frameCapacity:frameToRead];
+        
+        // 读取录音数据
+        AVAudioPCMBuffer *recordBuffer = [[AVAudioPCMBuffer alloc] initWithPCMFormat:self.audioFile.processingFormat
+                                                                       frameCapacity:frameToRead];
         recordBuffer.frameLength = frameToRead;
         
+        // 处理偏移时间
         if (offsetTime >= 0) { // 演奏需要提前
-            self.audioFile.framePosition = startPosition + offsetFrame;
-            AVAudioFrameCount audioReadFrame = frameToRead;
-            if (startPosition + offsetFrame + frameToRead > minFrameCount) { // 如果超出
-                audioReadFrame = (AVAudioFrameCount)(minFrameCount - startPosition - offsetFrame);
-            }
-            if (audioReadFrame <= frameToRead) {
-                BOOL isSuccess = [self.audioFile readIntoBuffer:recordBuffer frameCount:audioReadFrame error:nil];
-                if (!isSuccess) {
-                    return;
-                }
-            }
+            [self processPositiveOffset:offsetTime
+                          startPosition:startPosition
+                            frameToRead:frameToRead
+                           recordBuffer:recordBuffer
+                          minFrameCount:minFrameCount
+                            offsetFrame:offsetFrame];
+        } else { // 演奏需要延后
+            [self processNegativeOffset:offsetTime
+                          startPosition:startPosition
+                            frameToRead:frameToRead
+                           recordBuffer:recordBuffer
+                          minFrameCount:minFrameCount
+                            offsetFrame:offsetFrame];
         }
-        else { // 演奏需要延后
-            AVAudioFramePosition audioFramePosition = startPosition - offsetFrame;
-            if (audioFramePosition > 0) {
-                self.audioFile.framePosition = audioFramePosition;
-                AVAudioFrameCount audioReadFrame = frameToRead;
-                if (audioFramePosition + frameToRead > minFrameCount) { // 如果超出
-                    audioReadFrame = (AVAudioFrameCount)(minFrameCount - audioFramePosition);
-                }
-                // AVAudioFrameCount 无符号整型 uint32_t
-                if (audioReadFrame <= frameToRead) {
-                    BOOL isSuccess = [self.audioFile readIntoBuffer:recordBuffer frameCount:audioReadFrame error:nil];
-                    if (!isSuccess) {
-                        return;
-                    }
-                }
-            }
-            else {
-                self.audioFile.framePosition = 0;
-                // 需要读取部分数据
-                if (offsetFrame - startPosition < frameToRead) {
-                    AVAudioFrameCount readCount = (AVAudioFrameCount)(offsetFrame - startPosition);
-//                    NSLog(@"----need readCount --%u", readCount);
-                    AVAudioPCMBuffer *tempBuffer = [[AVAudioPCMBuffer alloc] initWithPCMFormat:self.audioFile.processingFormat frameCapacity:readCount];
-                    tempBuffer.frameLength = readCount;
-                    BOOL isSuccess = [self.audioFile readIntoBuffer:tempBuffer error:nil];
-                    if (!isSuccess) {
-                        return;
-                    }
-                    float *tempData = tempBuffer.floatChannelData[0];
-                    float *recordData = recordBuffer.floatChannelData[0];
-                    // 复制数据到 recordBuffer
-                    AVAudioFrameCount startFrame = frameToRead - readCount;
-                    for (AVAudioFrameCount i = 0; i < readCount; i++) {
-                        recordData[startFrame + i] = tempData[i];
-                    }
-                }
-            }
+        
+        // 混音处理
+        [self mixAudioBuffers:bgBuffer
+                 recordBuffer:recordBuffer
+                  frameToRead:frameToRead
+                startPosition:startPosition
+                     mixStart:mixStartPosition];
+    }
+}
+
+// 3. 处理正偏移
+- (void)processPositiveOffset:(NSInteger)offsetTime
+                startPosition:(AVAudioFramePosition)startPosition
+                  frameToRead:(AVAudioFrameCount)frameToRead
+                 recordBuffer:(AVAudioPCMBuffer *)recordBuffer
+                minFrameCount:(AVAudioFramePosition)minFrameCount
+                  offsetFrame:(AVAudioFrameCount)offsetFrame {
+    
+    self.audioFile.framePosition = startPosition + offsetFrame;
+    AVAudioFrameCount audioReadFrame = frameToRead;
+    
+    if (startPosition + offsetFrame + frameToRead > minFrameCount) {
+        audioReadFrame = (AVAudioFrameCount)(minFrameCount - startPosition - offsetFrame);
+    }
+    
+    if (audioReadFrame <= frameToRead) {
+        [self.audioFile readIntoBuffer:recordBuffer frameCount:audioReadFrame error:nil];
+    }
+}
+
+// 4. 处理负偏移
+- (void)processNegativeOffset:(NSInteger)offsetTime
+                startPosition:(AVAudioFramePosition)startPosition
+                  frameToRead:(AVAudioFrameCount)frameToRead
+                 recordBuffer:(AVAudioPCMBuffer *)recordBuffer
+                minFrameCount:(AVAudioFramePosition)minFrameCount
+                  offsetFrame:(AVAudioFrameCount)offsetFrame {
+    
+    AVAudioFramePosition audioFramePosition = startPosition - offsetFrame;
+    
+    if (audioFramePosition > 0) {
+        self.audioFile.framePosition = audioFramePosition;
+        AVAudioFrameCount audioReadFrame = frameToRead;
+        
+        if (audioFramePosition + frameToRead > minFrameCount) {
+            audioReadFrame = (AVAudioFrameCount)(minFrameCount - audioFramePosition);
         }
         
-        float *bgLeftChannel = bgBuffer.floatChannelData[0];
-        float *bgRightChannel = bgBuffer.floatChannelData[1];
-        if (bgBuffer.format.channelCount == 1) {
-            bgRightChannel = bgBuffer.floatChannelData[0];
+        if (audioReadFrame <= frameToRead) {
+            [self.audioFile readIntoBuffer:recordBuffer frameCount:audioReadFrame error:nil];
         }
-        // 录音文件未单声道
+    } else {
+        self.audioFile.framePosition = 0;
+        if (offsetFrame - startPosition < frameToRead) {
+            AVAudioFrameCount readCount = (AVAudioFrameCount)(offsetFrame - startPosition);
+            AVAudioPCMBuffer *tempBuffer = [[AVAudioPCMBuffer alloc] initWithPCMFormat:self.audioFile.processingFormat
+                                                                         frameCapacity:readCount];
+            tempBuffer.frameLength = readCount;
+            
+            if ([self.audioFile readIntoBuffer:tempBuffer error:nil]) {
+                float *tempData = tempBuffer.floatChannelData[0];
+                float *recordData = recordBuffer.floatChannelData[0];
+                AVAudioFrameCount startFrame = frameToRead - readCount;
+                memcpy(recordData + startFrame, tempData, readCount * sizeof(float));
+            }
+        }
+    }
+}
+
+// 5. 混音处理
+- (void)mixAudioBuffers:(AVAudioPCMBuffer *)bgBuffer
+           recordBuffer:(AVAudioPCMBuffer *)recordBuffer
+            frameToRead:(AVAudioFrameCount)frameToRead
+          startPosition:(AVAudioFramePosition)startPosition
+               mixStart:(AVAudioFramePosition)mixStartPosition {
+    
+    // 使用异步队列处理混音
+    dispatch_async(self.mixerQueue, ^{
+        float *bgLeftChannel = bgBuffer.floatChannelData[0];
+        float *bgRightChannel = bgBuffer.floatChannelData[bgBuffer.format.channelCount > 1 ? 1 : 0];
         float *recordLeftChannel = recordBuffer.floatChannelData[0];
         
         float *mixLeftChannel = self.mixBuffer.floatChannelData[0];
         float *mixRightChannel = self.mixBuffer.floatChannelData[1];
         
-        for (int frame = 0; frame < frameToRead; frame++) {
+        AVAudioFramePosition mixStartIndex = startPosition - mixStartPosition;
+        if (mixStartIndex >= 0 && mixStartIndex < self.mixBuffer.frameLength) {
+            // 应用背景音乐音量
+            vDSP_vsmul(bgLeftChannel, 1, &self->_bgVolume, bgLeftChannel, 1, frameToRead);
+            vDSP_vsmul(bgRightChannel, 1, &self->_bgVolume, bgRightChannel, 1, frameToRead);
             
-            AVAudioFramePosition mixIndex = frame + startPosition - mixStartPosition;
-            float leftChannel = (frame < bgBuffer.frameLength) ? bgLeftChannel[frame] : 0;
-            float rightChannel = (frame < bgBuffer.frameLength) ? bgRightChannel[frame] : 0;
+            // 应用录音音量
+            vDSP_vsmul(recordLeftChannel, 1, &self->_recordVolume, recordLeftChannel, 1, frameToRead);
             
-            float recordData = (frame < recordBuffer.frameLength) ? recordLeftChannel[frame] : 0;
+            // 混合左声道
+            vDSP_vadd(bgLeftChannel, 1, recordLeftChannel, 1, mixLeftChannel + mixStartIndex, 1, frameToRead);
             
-            float mixLeftData = [self mixChannelData:leftChannel bgVolume:self.bgVolume recordData:recordData recordVolume:self.recordVolume];
-            float mixRightData = [self mixChannelData:rightChannel bgVolume:self.bgVolume recordData:recordData recordVolume:self.recordVolume];
+            // 混合右声道
+            vDSP_vadd(bgRightChannel, 1, recordLeftChannel, 1, mixRightChannel + mixStartIndex, 1, frameToRead);
             
-            // 防止数组越界
-            if (mixIndex >= 0 && mixIndex < self.mixBuffer.frameLength) {
-                mixLeftChannel[mixIndex] = fminf(fmaxf(mixLeftData, -1.0f), 1.0f);
-                mixRightChannel[mixIndex] = fminf(fmaxf(mixRightData, -1.0f), 1.0f);
-            }
+            // 应用增益
+            float scale = 0.5f;
+            vDSP_vsmul(mixLeftChannel + mixStartIndex, 1, &scale, mixLeftChannel + mixStartIndex, 1, frameToRead);
+            vDSP_vsmul(mixRightChannel + mixStartIndex, 1, &scale, mixRightChannel + mixStartIndex, 1, frameToRead);
         }
     });
 }
 
+
 - (void)scheduleBufferFromPosition:(AVAudioFramePosition)startPosition {
     [self resetMixBuffer];
     self.startPosition = startPosition;
@@ -356,14 +447,8 @@
     }];
 }
 
-
-
-- (float)mixChannelData:(float)bgData bgVolume:(float)bgVolume recordData:(float)recordData recordVolume:(float)recordVolume {
-    return (bgData * bgVolume + recordData * recordVolume) / 2;
-}
-
 - (void)changeVolume:(float)bgVolume recordVolume:(float)recordVolume {
-//    NSLog(@"bg volume ---- %f,  record volume ---- %f", bgVolume, recordVolume);
+    //    NSLog(@"bg volume ---- %f,  record volume ---- %f", bgVolume, recordVolume);
     self.bgVolume = bgVolume;
     self.recordVolume = recordVolume;
 }
@@ -432,7 +517,7 @@
 }
 
 - (void)seekToTimePlay:(NSInteger)time {
-
+    
     if (self.audioEngine.isRunning == NO) {
         [self startEngine];
     }
@@ -483,7 +568,7 @@
     // 跳转进度
     self.currentFrame = startFrame;
     [self scheduleBufferFromPosition:startFrame];
-
+    
     if (needPlay) {
         [self.nodePlayer play];
         [self startTimer];
@@ -523,11 +608,25 @@
     return _sourceQueue;
 }
 
+- (dispatch_queue_t)analyzerQueue {
+    if (!_analyzerQueue) {
+        _analyzerQueue = dispatch_queue_create("com.ks.analyzer", DISPATCH_QUEUE_SERIAL);
+    }
+    return _analyzerQueue;
+}
+
+- (dispatch_queue_t)mixerQueue {
+    if (!_mixerQueue) {
+        _mixerQueue = dispatch_queue_create("com.ks.mixer", DISPATCH_QUEUE_SERIAL);
+    }
+    return _mixerQueue;
+}
+
 - (NSTimer *)timer {
-    
     if (!_timer) {
         __weak typeof(self)weakSelf = self;
-        _timer = [NSTimer scheduledTimerWithTimeInterval:0.01 repeats:YES block:^(NSTimer * _Nonnull timer) {
+        // 将触发间隔从 0.01 秒调整为 0.1 秒
+        _timer = [NSTimer scheduledTimerWithTimeInterval:0.1 repeats:YES block:^(NSTimer * _Nonnull timer) {
             [weakSelf timeFunction];
         }];
         [[NSRunLoop currentRunLoop] addTimer:_timer forMode:NSRunLoopCommonModes];
@@ -552,11 +651,11 @@
     }
     else {
         // 定时器10ms出触发一次 buffer每100ms执行一次
-        if (self.timeCount % 10 == 0) {
+//        if (self.timeCount % 5 == 0) {
             [self scheduleMixBuffer];
-        }
+//        }
         self.timeCount++;
-
+        
         if (self.delegate && [self.delegate respondsToSelector:@selector(updatePlayProgress:andTotalTime:andProgress:currentInterval:inPlayer:)]) {
             [self.delegate updatePlayProgress:currentTime andTotalTime:self.totalDuration andProgress:progress currentInterval:inteveral*1000 inPlayer:self];
         }
@@ -589,9 +688,9 @@
             NSLog(@"播放已停止");
         }
         double elapsedSamples = (double)currentFrame;
-
+        
         NSTimeInterval currentTime = elapsedSamples / self.sampleRate;
-//        NSLog(@"当前时间----- %f",currentTime*1000);
+        //        NSLog(@"当前时间----- %f",currentTime*1000);
         return currentTime*1000;
     }
     else {
@@ -612,32 +711,67 @@
 }
 
 - (void)releaseAudioResources {
-    if (self.audioEngine.isRunning) {
-        [self.audioEngine stop];
-        [self.audioEngine reset];
-    }
-    // 停止并移除 player node
-    if (self.nodePlayer) {
-        [self.nodePlayer stop];
-        [self.audioEngine detachNode:self.nodePlayer];
-        self.nodePlayer = nil;
-    }
-    if (self.analyzer) {
-        self.analyzer = nil;
-    }
-    
-    // 释放音频文件
-    self.audioFile = nil;
-    self.audioFormat = nil;
-    self.bgAudioFile = nil;
-    self.bgAudioFormat = nil;
-    self.mixBuffer = nil;
-    
-    // 释放音频引擎
-    self.audioEngine = nil;
-    
-    // 打印确认释放日志
-    NSLog(@"Audio resources successfully released.");
+    // 使用 sync 而不是 async 确保资源释放完成
+    dispatch_sync(self.sourceQueue, ^{
+        @try {
+            // 1. 首先停止所有活动
+            if (self.nodePlayer.isPlaying) {
+                [self.nodePlayer stop];
+            }
+            
+            if (self.audioEngine.isRunning) {
+                [self removeTap];
+                [self.audioEngine stop];
+                [self.audioEngine reset];
+            }
+            
+            // 2. 移除节点连接
+            if (self.nodePlayer && self.audioEngine) {
+                @try {
+                    [self.audioEngine detachNode:self.nodePlayer];
+                } @catch (NSException *exception) {
+                    NSLog(@"Detach node exception: %@", exception);
+                }
+            }
+            
+            // 3. 使用临时变量持有引用
+            AVAudioEngine *tempEngine = self.audioEngine;
+            AVAudioPlayerNode *tempPlayer = self.nodePlayer;
+            AVAudioFile *tempAudioFile = self.audioFile;
+            AVAudioFile *tempBgAudioFile = self.bgAudioFile;
+            AVAudioPCMBuffer *tempMixBuffer = self.mixBuffer;
+            
+            // 4. 先清空属性引用
+            self.audioEngine = nil;
+            self.nodePlayer = nil;
+            self.analyzer = nil;
+            self.audioFile = nil;
+            self.audioFormat = nil;
+            self.bgAudioFile = nil;
+            self.bgAudioFormat = nil;
+            self.mixBuffer = nil;
+            
+            // 5. 在临时变量作用域内释放资源
+            @autoreleasepool {
+                tempEngine = nil;
+                tempPlayer = nil;
+                tempAudioFile = nil;
+                tempBgAudioFile = nil;
+                tempMixBuffer = nil;
+            }
+            
+            // 6. 重置状态
+            self.isReady = NO;
+            self.currentFrame = 0;
+            self.startPosition = 0;
+            self.timeCount = 0;
+            
+        } @catch (NSException *exception) {
+            NSLog(@"Release resources exception: %@", exception);
+        } @finally {
+            NSLog(@"Audio resources release completed");
+        }
+    });
 }
 
 - (void)dealloc {