Bläddra i källkod

播放器优化

Steven 2 månader sedan
förälder
incheckning
c42f45c6de

+ 122 - 52
KulexiuForTeacher/KulexiuForTeacher/Common/MediaMerge/AudioMerge/MergeAudioAnimation/KSRealtimeAnalyzer.m

@@ -22,6 +22,13 @@
 
 @property (nonatomic, assign) BOOL isCanceled;
 
+@property (nonatomic, strong) NSArray<NSValue *> *bandIndexRanges;
+
+@property (nonatomic, assign) float *windowBuffer;
+@property (nonatomic, assign) float *amplitudesBuffer;
+
+@property (nonatomic, strong) dispatch_queue_t analyzeQueue;
+
 @end
 
 @implementation KSRealtimeAnalyzer
@@ -40,15 +47,29 @@
         _spectrumBuffer = [NSMutableArray array];
         [self setupBands];
         _cachedFrequencyWeights = [self createFrequencyWeights];
+
+        _windowBuffer = (float *)calloc(_fftSize, sizeof(float));
+        _amplitudesBuffer = (float *)calloc(_fftSize / 2, sizeof(float));
+
+        vDSP_hann_window(_windowBuffer, _fftSize, vDSP_HANN_NORM);
+
+        _analyzeQueue = dispatch_queue_create("com.kulexiu.audioanalyzer", DISPATCH_QUEUE_SERIAL);
     }
     return self;
 }
 
 - (void)dealloc {
     self.isCanceled = YES;
+    
+    dispatch_sync(self.analyzeQueue, ^{});
+    
+    self.analyzeQueue = nil;
+    
     vDSP_destroy_fftsetup(_fftSetup);
     free(_realp);
     free(_imagp);
+    free(_windowBuffer);
+    free(_amplitudesBuffer);
     NSLog(@"-------KSRealtimeAnalyzer dealloc");
 }
 
@@ -64,54 +85,94 @@
     }
     
     self.bands = [bands copy];
+    
+    // 预计算频段索引范围
+    NSMutableArray *indexRanges = [NSMutableArray arrayWithCapacity:self.frequencyBands];
+    float bandWidth = 44100.0 / self.fftSize; // 假设采样率为44100
+    
+    for (NSDictionary *band in self.bands) {
+        NSInteger startIndex = round([band[@"lowerFrequency"] floatValue] / bandWidth);
+        NSInteger endIndex = MIN(round([band[@"upperFrequency"] floatValue] / bandWidth), self.fftSize / 2 - 1);
+        [indexRanges addObject:[NSValue valueWithRange:NSMakeRange(startIndex, endIndex - startIndex + 1)]];
+    }
+    self.bandIndexRanges = indexRanges;
 }
 
+// 在 analyseWithBuffer 中添加同步锁
 - (NSArray<NSArray<NSNumber *> *> *)analyseWithBuffer:(AVAudioPCMBuffer *)buffer {
     if (self.isCanceled) {
         return [NSArray array];
     }
-    self.isAnalise = YES;
-    NSArray<NSArray<NSNumber *> *> *channelsAmplitudes = [self fftWithBuffer:buffer];
-    NSArray<NSNumber *> *aWeights = self.cachedFrequencyWeights;
-    if (self.spectrumBuffer.count == 0) {
-        for (NSInteger i = 0; i < channelsAmplitudes.count; i++) {
-            [self.spectrumBuffer addObject:[self emptyArrayOfCount:self.bands.count]];
-        }
-    }
     
-    NSMutableArray<NSArray<NSNumber *> *> *result = [NSMutableArray arrayWithArray:self.spectrumBuffer];
-    // 放大倍数
-    CGFloat amplificationFactor = 40.0; // 调整为所需的放大倍数
-    dispatch_apply(channelsAmplitudes.count, dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^(size_t index) {
-        NSArray<NSNumber *> *amplitudes = channelsAmplitudes[index];
-        NSMutableArray<NSNumber *> *weightedAmplitudes = [NSMutableArray arrayWithCapacity:amplitudes.count];
-        
-        for (NSInteger i = 0; i < amplitudes.count; i++) {
-            weightedAmplitudes[i] = @(amplitudes[i].floatValue * aWeights[i].floatValue);
-        }
-        
-        NSMutableArray<NSNumber *> *spectrum = [NSMutableArray arrayWithCapacity:self.bands.count];
-        
-        for (NSDictionary *band in self.bands) {
-            float bandWidth = (float)buffer.format.sampleRate / self.fftSize;
-            [spectrum addObject:@([self findMaxAmplitudeForBand:band inAmplitudes:weightedAmplitudes withBandWidth:bandWidth] * amplificationFactor)];
-        }
-        
-        spectrum = [self highlightWaveform:spectrum];
-        
-        NSMutableArray<NSNumber *> *previousSpectrum = [self.spectrumBuffer[index] mutableCopy];
-        NSMutableArray<NSNumber *> *newSpectrum = [NSMutableArray arrayWithCapacity:spectrum.count];
-        
-        for (NSInteger i = 0; i < spectrum.count; i++) {
-            float smoothedValue = ([previousSpectrum[i] floatValue] * self.spectrumSmooth) + (spectrum[i].floatValue * (1 - self.spectrumSmooth));
-            newSpectrum[i] = @(smoothedValue);
+    @synchronized (self) {
+        self.isAnalise = YES;
+        @autoreleasepool {
+            NSArray<NSArray<NSNumber *> *> *channelsAmplitudes = [self fftWithBuffer:buffer];
+            NSArray<NSNumber *> *aWeights = self.cachedFrequencyWeights;
+            
+            if (self.spectrumBuffer.count == 0) {
+                for (NSInteger i = 0; i < channelsAmplitudes.count; i++) {
+                    [self.spectrumBuffer addObject:[self emptyArrayOfCount:self.bands.count]];
+                }
+            }
+            
+            NSMutableArray<NSArray<NSNumber *> *> *result = [NSMutableArray arrayWithArray:self.spectrumBuffer];
+            CGFloat amplificationFactor = 40.0;
+            
+            for (size_t index = 0; index < channelsAmplitudes.count; index++) {
+                dispatch_async(self.analyzeQueue, ^{
+                    NSArray<NSNumber *> *amplitudes = channelsAmplitudes[index];
+                    NSMutableArray<NSNumber *> *weightedAmplitudes = [NSMutableArray arrayWithCapacity:amplitudes.count];
+                    
+                    float *amplitudesBuffer = (float *)malloc(amplitudes.count * sizeof(float));
+                    float *weightsBuffer = (float *)malloc(amplitudes.count * sizeof(float));
+                    float *resultBuffer = (float *)malloc(amplitudes.count * sizeof(float));
+                    
+                    for (NSInteger i = 0; i < amplitudes.count; i++) {
+                        amplitudesBuffer[i] = amplitudes[i].floatValue;
+                        weightsBuffer[i] = aWeights[i].floatValue;
+                    }
+                    
+                    vDSP_vmul(amplitudesBuffer, 1, weightsBuffer, 1, resultBuffer, 1, (vDSP_Length)amplitudes.count);
+                    
+                    for (NSInteger i = 0; i < amplitudes.count; i++) {
+                        weightedAmplitudes[i] = @(resultBuffer[i]);
+                    }
+                    
+                    free(amplitudesBuffer);
+                    free(weightsBuffer);
+                    free(resultBuffer);
+                    
+                    NSMutableArray<NSNumber *> *spectrum = [NSMutableArray arrayWithCapacity:self.bands.count];
+                    
+                    for (NSDictionary *band in self.bands) {
+                        float bandWidth = (float)buffer.format.sampleRate / self.fftSize;
+                        [spectrum addObject:@([self findMaxAmplitudeForBand:band inAmplitudes:weightedAmplitudes withBandWidth:bandWidth] * amplificationFactor)];
+                    }
+                    
+                    spectrum = [self highlightWaveform:spectrum];
+                    
+                    NSMutableArray<NSNumber *> *previousSpectrum = [self.spectrumBuffer[index] mutableCopy];
+                    NSMutableArray<NSNumber *> *newSpectrum = [NSMutableArray arrayWithCapacity:spectrum.count];
+                    
+                    for (NSInteger i = 0; i < spectrum.count; i++) {
+                        float smoothedValue = ([previousSpectrum[i] floatValue] * self.spectrumSmooth) + (spectrum[i].floatValue * (1 - self.spectrumSmooth));
+                        newSpectrum[i] = @(smoothedValue);
+                    }
+                    
+                    dispatch_async(dispatch_get_main_queue(), ^{
+                        result[index] = [newSpectrum copy];
+                        self.spectrumBuffer[index] = [newSpectrum copy];
+                    });
+                });
+            }
+            
+            dispatch_sync(self.analyzeQueue, ^{});
+            
+            self.isAnalise = NO;
+            return [result copy];
         }
-        
-        result[index] = [newSpectrum copy];
-        self.spectrumBuffer[index] = [newSpectrum copy];
-    });
-    self.isAnalise = NO;
-    return [result copy];
+    }
 }
 
 
@@ -145,10 +206,7 @@
     
     for (NSInteger i = 0; i < channelCount; i++) {
         float *channel = floatChannelData[i];
-        NSMutableData *windowData = [NSMutableData dataWithLength:self.fftSize * sizeof(float)];
-        float *window = windowData.mutableBytes;
-        vDSP_hann_window(window, self.fftSize, vDSP_HANN_NORM);
-        vDSP_vmul(channel, 1, window, 1, channel, 1, self.fftSize);
+        vDSP_vmul(channel, 1, self.windowBuffer, 1, channel, 1, self.fftSize);
         
         DSPSplitComplex fftInOut;
         fftInOut.realp = self.realp;
@@ -162,19 +220,15 @@
         vDSP_vsmul(fftInOut.realp, 1, &fftNormFactor, fftInOut.realp, 1, self.fftSize / 2);
         vDSP_vsmul(fftInOut.imagp, 1, &fftNormFactor, fftInOut.imagp, 1, self.fftSize / 2);
         
-        NSMutableArray<NSNumber *> *channelAmplitudes = [NSMutableArray arrayWithCapacity:self.fftSize / 2];
-        float *amplitudesArray = (float *)calloc(self.fftSize / 2, sizeof(float));
-        vDSP_zvabs(&fftInOut, 1, amplitudesArray, 1, self.fftSize / 2);
-        amplitudesArray[0] /= 2;
+        vDSP_zvabs(&fftInOut, 1, self.amplitudesBuffer, 1, self.fftSize / 2);
+        self.amplitudesBuffer[0] /= 2;
         
+        NSMutableArray<NSNumber *> *channelAmplitudes = [NSMutableArray arrayWithCapacity:self.fftSize / 2];
         for (NSInteger j = 0; j < self.fftSize / 2; j++) {
-            channelAmplitudes[j] = @(amplitudesArray[j]);
+            channelAmplitudes[j] = @(self.amplitudesBuffer[j]);
         }
         
         [amplitudes addObject:[channelAmplitudes copy]];
-//        free(fftInOut.realp);
-//        free(fftInOut.imagp);
-        free(amplitudesArray);
     }
     
     if (isInterleaved) {
@@ -259,4 +313,20 @@
     return array;
 }
 
+- (float)findMaxAmplitudeForBand:(NSInteger)bandIndex inAmplitudes:(NSArray<NSNumber *> *)amplitudes {
+    NSRange range = [self.bandIndexRanges[bandIndex] rangeValue];
+    float maxAmplitude = -FLT_MAX;
+    
+    for (NSInteger i = range.location; i < NSMaxRange(range); i++) {
+        float amplitude = [amplitudes[i] floatValue];
+        maxAmplitude = MAX(maxAmplitude, amplitude);
+    }
+    return maxAmplitude;
+}
+
+- (void)shutdown {
+    self.isCanceled = YES;
+    dispatch_sync(self.analyzeQueue, ^{});
+}
+
 @end

+ 16 - 0
KulexiuForTeacher/KulexiuForTeacher/Common/MediaMerge/AudioMerge/MergeAudioAnimation/KSSpectrumView.m

@@ -11,6 +11,8 @@
 
 @property (nonatomic, strong) CAGradientLayer *combinedGradientLayer;
 
+@property (nonatomic, assign) NSUInteger currentTaskId;
+
 @end
 
 @implementation KSSpectrumView
@@ -52,11 +54,18 @@
 - (void)setSpectra:(NSArray<NSArray<NSNumber *> *> *)spectra {
     _spectra = spectra;
     if (spectra) {
+        // 生成唯一标识,用于取消旧任务
+        NSUInteger taskId = arc4random();
+        _currentTaskId = taskId;
+
         CGFloat viewHeight = self.bounds.size.height;
         CGFloat viewWidth = self.bounds.size.width;
         @weakObj(self);
         dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
             @strongObj(self);
+            if (!self || self.currentTaskId != taskId) {
+                return;
+            }
             self.isModify = YES;
             NSUInteger spectraCount = [spectra[0] count];
             NSMutableArray<NSNumber *> *combinedSpectrum = [NSMutableArray arrayWithCapacity:spectraCount];
@@ -87,6 +96,9 @@
             }
             
             dispatch_async(dispatch_get_main_queue(), ^{
+                if (self.currentTaskId != taskId) {
+                    return;
+                }
                 CAShapeLayer *combinedMaskLayer = [CAShapeLayer layer];
                 combinedMaskLayer.path = combinedPath.CGPath;
                 self.combinedGradientLayer.frame = CGRectMake(0, 0, viewWidth, viewHeight);
@@ -103,6 +115,10 @@
 }
 
 - (void)dealloc {
+    _currentTaskId = 0;
+    // 移除图层
+    [self.combinedGradientLayer removeFromSuperlayer];
+    self.combinedGradientLayer = nil;
     NSLog(@"---- KSSpectrumView dealloc");
 }
 /*

+ 272 - 147
KulexiuForTeacher/KulexiuForTeacher/Common/MediaMerge/AudioMerge/MergePlayer/KSMergeEnginePlayer.m

@@ -10,7 +10,6 @@
 #import <Accelerate/Accelerate.h>
 
 #define READ_FILE_LENGTH (8192)
-
 #define BUFFER_SIZE (2048)
 
 @interface KSMergeEnginePlayer ()
@@ -47,6 +46,12 @@
 
 @property (nonatomic, assign) BOOL isCanceled; // 是否在取消
 
+@property (nonatomic, strong) dispatch_group_t audioGroup;
+
+@property (nonatomic, strong) dispatch_queue_t analyzerQueue; // 分享线程
+
+@property (nonatomic, strong) dispatch_queue_t mixerQueue;  // 混音专用队列
+
 @end
 
 
@@ -67,7 +72,9 @@
 
 - (void)configEngine {
     [self setupAudioSession];
-    
+    if (self.audioEngine) {
+        [self releaseAudioResources];
+    }
     self.audioEngine = [[AVAudioEngine alloc] init];
     self.nodePlayer = [[AVAudioPlayerNode alloc] init];
     
@@ -128,17 +135,34 @@
 }
 
 - (void)prepareNativeSongWithUrl:(NSURL *)recordAudioUrl bgMusic:(NSURL *)bgMusicUrl {
+    self.audioGroup = dispatch_group_create();
     @weakObj(self);
-    dispatch_async(self.sourceQueue, ^{
+    dispatch_group_async(self.audioGroup, self.sourceQueue, ^{
         @strongObj(self);
         if (!self || self.isCanceled) {
             return;
         }
         [self loadAuidoFile:recordAudioUrl isBgm:NO];
+    });
+    
+    dispatch_group_async(self.audioGroup, self.sourceQueue, ^{
+        @strongObj(self);
+        if (!self || self.isCanceled) {
+            return;
+        }
         [self loadAuidoFile:bgMusicUrl isBgm:YES];
+    });
+    
+    // 待所有音频全部加载完成
+    dispatch_group_notify(self.audioGroup, self.sourceQueue, ^{
+        @strongObj(self);
+        if (!self || self.isCanceled) {
+            return;
+        }
         self.sampleRate = self.audioFile.fileFormat.sampleRate;
         [self configEngine];
         
+        
         AVAudioFormat *outputFormat = [self.audioEngine.mainMixerNode outputFormatForBus:0];
         [self.audioEngine connect:self.nodePlayer to:self.audioEngine.mainMixerNode format:outputFormat];
         if (self.needAnalyzer) {
@@ -161,20 +185,23 @@
     BOOL delegateRespondsToDidGenerateSpectrum = [self.delegate respondsToSelector:@selector(player:didGenerateSpectrum:)];
     AVAudioFormat *outputFormat = [self.audioEngine.mainMixerNode outputFormatForBus:0];
     [self.audioEngine.mainMixerNode removeTapOnBus:0];
+    
     @weakObj(self);
-    [self.audioEngine.mainMixerNode installTapOnBus:0 bufferSize:BUFFER_SIZE format:outputFormat block:^(AVAudioPCMBuffer * _Nonnull buffer, AVAudioTime * _Nonnull when) {
+    [self.audioEngine.mainMixerNode installTapOnBus:0 bufferSize:BUFFER_SIZE*4 format:outputFormat block:^(AVAudioPCMBuffer * _Nonnull buffer, AVAudioTime * _Nonnull when) {
+        // 每累积 2 个缓冲区分析一次
+        static int counter = 0;
+        if (++counter % 2 != 0) return;
         @strongObj(self);
         if (!self || !self.nodePlayer.isPlaying || self.isCanceled) {
             return;
         }
-        // 将频谱分析任务提交到后台队列
-        dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0), ^{
+        // 使用专门的队列处理频谱分析
+        dispatch_async(self.analyzerQueue, ^{
             if (self.analyzer.isAnalise == NO) {
-                // 分析音频缓冲区
                 NSArray<NSArray<NSNumber *> *> *spectra = [self.analyzer analyseWithBuffer:buffer];
                 
-                // 回到主线程更新 UI 或调用委托方法
-                dispatch_async(dispatch_get_main_queue(), ^{
+                // 使用 dispatch_async_main_safe 优化主线程切换
+                dispatch_main_async_safe(^{
                     if (delegateRespondsToDidGenerateSpectrum) {
                         [self.delegate player:self didGenerateSpectrum:spectra];
                     }
@@ -202,13 +229,22 @@
     } @finally {
         if (error) {
             // 错误回调
-        }
-        else { // 加载成功
+            if (self.delegate && [self.delegate respondsToSelector:@selector(enginePlayerDidError:error:)]) {
+                [self.delegate enginePlayerDidError:self error:error];
+            }
+            // 释放已分配资源
+            if (isBgm) {
+                self.bgAudioFile = nil;
+                self.bgAudioFormat = nil;
+            } else {
+                self.audioFile = nil;
+                self.audioFormat = nil;
+            }
+        } else {
             if (isBgm) {
                 self.bgAudioFile = audioFile;
                 self.bgAudioFormat = audioFormat;
-            }
-            else {
+            } else {
                 self.audioFile = audioFile;
                 self.audioFormat = audioFormat;
             }
@@ -234,118 +270,183 @@
     });
 }
 
-// 预计加载buffer
+// 1. 优化 Buffer 处理的线程控制
 - (void)prepareBuffer:(AVAudioFramePosition)startPosition offset:(NSInteger)offsetTime mixStart:(AVAudioFramePosition)mixStartPosition {
-    
+    // 使用 barrier 确保数据同步
     @weakObj(self);
-    dispatch_async(self.sourceQueue, ^{
+    dispatch_barrier_async(self.sourceQueue, ^{
         @strongObj(self);
-        if (!self || self.isCanceled) {
-            return;
-        }
-        if (!self.bgAudioFile || !self.audioFile) {
-            return;
-        }
-        AVAudioFramePosition minFrameCount = (AVAudioFramePosition)MIN(self.bgAudioFile.length, self.audioFile.length);
-        AVAudioFrameCount offsetFrame = labs(offsetTime)/1000.0 * self.audioFile.processingFormat.sampleRate;
-        if (minFrameCount <= startPosition) {
-            return;
-        }
-        AVAudioFrameCount frameToRead = minFrameCount - startPosition > READ_FILE_LENGTH ? READ_FILE_LENGTH : (AVAudioFrameCount)(minFrameCount - startPosition);
-        
+        [self processBufferWithStartPosition:startPosition offset:offsetTime mixStart:mixStartPosition];
+    });
+}
+
+// 2. 将具体的处理逻辑抽取成单独的方法
+- (void)processBufferWithStartPosition:(AVAudioFramePosition)startPosition
+                                offset:(NSInteger)offsetTime
+                              mixStart:(AVAudioFramePosition)mixStartPosition {
+    
+    if (!self || self.isCanceled) {
+        return;
+    }
+    if (!self.bgAudioFile ||!self.audioFile) {
+        return;
+    }
+    // 计算帧数
+    AVAudioFramePosition minFrameCount = (AVAudioFramePosition)MIN(self.bgAudioFile.length, self.audioFile.length);
+    AVAudioFrameCount offsetFrame = labs(offsetTime)/1000.0 * self.audioFile.processingFormat.sampleRate;
+    if (minFrameCount <= startPosition) {
+        return;
+    }
+    // 计算需要读取的帧数
+    AVAudioFrameCount frameToRead = minFrameCount - startPosition > READ_FILE_LENGTH ? READ_FILE_LENGTH : (AVAudioFrameCount)(minFrameCount - startPosition);
+    // 读取背景音乐数据
+    @autoreleasepool  {
         self.bgAudioFile.framePosition = startPosition;
-        AVAudioPCMBuffer *bgBuffer = [[AVAudioPCMBuffer alloc] initWithPCMFormat:self.bgAudioFile.processingFormat frameCapacity:frameToRead];
+        AVAudioPCMBuffer *bgBuffer = [[AVAudioPCMBuffer alloc] initWithPCMFormat:self.bgAudioFile.processingFormat
+                                                                   frameCapacity:frameToRead];
         bgBuffer.frameLength = frameToRead;
-        BOOL readSuccess = [self.bgAudioFile readIntoBuffer:bgBuffer frameCount:frameToRead error:nil];
-        if (!readSuccess) {
+        
+        if (![self.bgAudioFile readIntoBuffer:bgBuffer frameCount:frameToRead error:nil]) {
             return;
         }
-        AVAudioPCMBuffer *recordBuffer = [[AVAudioPCMBuffer alloc] initWithPCMFormat:self.audioFile.processingFormat frameCapacity:frameToRead];
+        
+        // 读取录音数据
+        AVAudioPCMBuffer *recordBuffer = [[AVAudioPCMBuffer alloc] initWithPCMFormat:self.audioFile.processingFormat
+                                                                       frameCapacity:frameToRead];
         recordBuffer.frameLength = frameToRead;
         
+        // 处理偏移时间
         if (offsetTime >= 0) { // 演奏需要提前
-            self.audioFile.framePosition = startPosition + offsetFrame;
-            AVAudioFrameCount audioReadFrame = frameToRead;
-            if (startPosition + offsetFrame + frameToRead > minFrameCount) { // 如果超出
-                audioReadFrame = (AVAudioFrameCount)(minFrameCount - startPosition - offsetFrame);
-            }
-            if (audioReadFrame <= frameToRead) {
-                BOOL isSuccess = [self.audioFile readIntoBuffer:recordBuffer frameCount:audioReadFrame error:nil];
-                if (!isSuccess) {
-                    return;
-                }
-            }
+            [self processPositiveOffset:offsetTime
+                          startPosition:startPosition
+                            frameToRead:frameToRead
+                           recordBuffer:recordBuffer
+                          minFrameCount:minFrameCount
+                            offsetFrame:offsetFrame];
+        } else { // 演奏需要延后
+            [self processNegativeOffset:offsetTime
+                          startPosition:startPosition
+                            frameToRead:frameToRead
+                           recordBuffer:recordBuffer
+                          minFrameCount:minFrameCount
+                            offsetFrame:offsetFrame];
         }
-        else { // 演奏需要延后
-            AVAudioFramePosition audioFramePosition = startPosition - offsetFrame;
-            if (audioFramePosition > 0) {
-                self.audioFile.framePosition = audioFramePosition;
-                AVAudioFrameCount audioReadFrame = frameToRead;
-                if (audioFramePosition + frameToRead > minFrameCount) { // 如果超出
-                    audioReadFrame = (AVAudioFrameCount)(minFrameCount - audioFramePosition);
-                }
-                // AVAudioFrameCount 无符号整型 uint32_t
-                if (audioReadFrame <= frameToRead) {
-                    BOOL isSuccess = [self.audioFile readIntoBuffer:recordBuffer frameCount:audioReadFrame error:nil];
-                    if (!isSuccess) {
-                        return;
-                    }
-                }
-            }
-            else {
-                self.audioFile.framePosition = 0;
-                // 需要读取部分数据
-                if (offsetFrame - startPosition < frameToRead) {
-                    AVAudioFrameCount readCount = (AVAudioFrameCount)(offsetFrame - startPosition);
-//                    NSLog(@"----need readCount --%u", readCount);
-                    AVAudioPCMBuffer *tempBuffer = [[AVAudioPCMBuffer alloc] initWithPCMFormat:self.audioFile.processingFormat frameCapacity:readCount];
-                    tempBuffer.frameLength = readCount;
-                    BOOL isSuccess = [self.audioFile readIntoBuffer:tempBuffer error:nil];
-                    if (!isSuccess) {
-                        return;
-                    }
-                    float *tempData = tempBuffer.floatChannelData[0];
-                    float *recordData = recordBuffer.floatChannelData[0];
-                    // 复制数据到 recordBuffer
-                    AVAudioFrameCount startFrame = frameToRead - readCount;
-                    for (AVAudioFrameCount i = 0; i < readCount; i++) {
-                        recordData[startFrame + i] = tempData[i];
-                    }
-                }
+        
+        // 混音处理
+        [self mixAudioBuffers:bgBuffer
+                 recordBuffer:recordBuffer
+                  frameToRead:frameToRead
+                startPosition:startPosition
+                     mixStart:mixStartPosition];
+    }
+}
+
+// 3. 处理正偏移
+- (void)processPositiveOffset:(NSInteger)offsetTime
+                startPosition:(AVAudioFramePosition)startPosition
+                  frameToRead:(AVAudioFrameCount)frameToRead
+                 recordBuffer:(AVAudioPCMBuffer *)recordBuffer
+                minFrameCount:(AVAudioFramePosition)minFrameCount
+                  offsetFrame:(AVAudioFrameCount)offsetFrame {
+    
+    self.audioFile.framePosition = startPosition + offsetFrame;
+    AVAudioFrameCount audioReadFrame = frameToRead;
+    
+    if (startPosition + offsetFrame + frameToRead > minFrameCount) {
+        audioReadFrame = (AVAudioFrameCount)(minFrameCount - startPosition - offsetFrame);
+    }
+    
+    if (audioReadFrame <= frameToRead) {
+        [self.audioFile readIntoBuffer:recordBuffer frameCount:audioReadFrame error:nil];
+    }
+}
+
+// 4. 处理负偏移
+- (void)processNegativeOffset:(NSInteger)offsetTime
+                startPosition:(AVAudioFramePosition)startPosition
+                  frameToRead:(AVAudioFrameCount)frameToRead
+                 recordBuffer:(AVAudioPCMBuffer *)recordBuffer
+                minFrameCount:(AVAudioFramePosition)minFrameCount
+                  offsetFrame:(AVAudioFrameCount)offsetFrame {
+    
+    AVAudioFramePosition audioFramePosition = startPosition - offsetFrame;
+    
+    if (audioFramePosition > 0) {
+        self.audioFile.framePosition = audioFramePosition;
+        AVAudioFrameCount audioReadFrame = frameToRead;
+        
+        if (audioFramePosition + frameToRead > minFrameCount) {
+            audioReadFrame = (AVAudioFrameCount)(minFrameCount - audioFramePosition);
+        }
+        
+        if (audioReadFrame <= frameToRead) {
+            [self.audioFile readIntoBuffer:recordBuffer frameCount:audioReadFrame error:nil];
+        }
+    } else {
+        self.audioFile.framePosition = 0;
+        if (offsetFrame - startPosition < frameToRead) {
+            AVAudioFrameCount readCount = (AVAudioFrameCount)(offsetFrame - startPosition);
+            AVAudioPCMBuffer *tempBuffer = [[AVAudioPCMBuffer alloc] initWithPCMFormat:self.audioFile.processingFormat
+                                                                         frameCapacity:readCount];
+            tempBuffer.frameLength = readCount;
+            
+            if ([self.audioFile readIntoBuffer:tempBuffer error:nil]) {
+                float *tempData = tempBuffer.floatChannelData[0];
+                float *recordData = recordBuffer.floatChannelData[0];
+                AVAudioFrameCount startFrame = frameToRead - readCount;
+                memcpy(recordData + startFrame, tempData, readCount * sizeof(float));
             }
         }
+    }
+}
+
+// 5. 混音处理
+- (void)mixAudioBuffers:(AVAudioPCMBuffer *)bgBuffer
+           recordBuffer:(AVAudioPCMBuffer *)recordBuffer
+            frameToRead:(AVAudioFrameCount)frameToRead
+          startPosition:(AVAudioFramePosition)startPosition
+               mixStart:(AVAudioFramePosition)mixStartPosition {
+    
+    // 添加取消检查
+    if (self.isCanceled) return;
+    
+    // 使用 weak 引用避免循环
+    @weakObj(self);
+    dispatch_async(self.mixerQueue, ^{
+        @strongObj(self);
+        if (!self || self.isCanceled) return; // 关键检查
         
         float *bgLeftChannel = bgBuffer.floatChannelData[0];
-        float *bgRightChannel = bgBuffer.floatChannelData[1];
-        if (bgBuffer.format.channelCount == 1) {
-            bgRightChannel = bgBuffer.floatChannelData[0];
-        }
-        // 录音文件未单声道
+        float *bgRightChannel = bgBuffer.floatChannelData[bgBuffer.format.channelCount > 1 ? 1 : 0];
         float *recordLeftChannel = recordBuffer.floatChannelData[0];
         
         float *mixLeftChannel = self.mixBuffer.floatChannelData[0];
         float *mixRightChannel = self.mixBuffer.floatChannelData[1];
         
-        for (int frame = 0; frame < frameToRead; frame++) {
+        AVAudioFramePosition mixStartIndex = startPosition - mixStartPosition;
+        if (mixStartIndex >= 0 && mixStartIndex < self.mixBuffer.frameLength) {
+            // 应用背景音乐音量
+            vDSP_vsmul(bgLeftChannel, 1, &self->_bgVolume, bgLeftChannel, 1, frameToRead);
+            vDSP_vsmul(bgRightChannel, 1, &self->_bgVolume, bgRightChannel, 1, frameToRead);
             
-            AVAudioFramePosition mixIndex = frame + startPosition - mixStartPosition;
-            float leftChannel = (frame < bgBuffer.frameLength) ? bgLeftChannel[frame] : 0;
-            float rightChannel = (frame < bgBuffer.frameLength) ? bgRightChannel[frame] : 0;
+            // 应用录音音量
+            vDSP_vsmul(recordLeftChannel, 1, &self->_recordVolume, recordLeftChannel, 1, frameToRead);
             
-            float recordData = (frame < recordBuffer.frameLength) ? recordLeftChannel[frame] : 0;
+            // 混合左声道
+            vDSP_vadd(bgLeftChannel, 1, recordLeftChannel, 1, mixLeftChannel + mixStartIndex, 1, frameToRead);
             
-            float mixLeftData = [self mixChannelData:leftChannel bgVolume:self.bgVolume recordData:recordData recordVolume:self.recordVolume];
-            float mixRightData = [self mixChannelData:rightChannel bgVolume:self.bgVolume recordData:recordData recordVolume:self.recordVolume];
+            // 混合右声道
+            vDSP_vadd(bgRightChannel, 1, recordLeftChannel, 1, mixRightChannel + mixStartIndex, 1, frameToRead);
             
-            // 防止数组越界
-            if (mixIndex >= 0 && mixIndex < self.mixBuffer.frameLength) {
-                mixLeftChannel[mixIndex] = fminf(fmaxf(mixLeftData, -1.0f), 1.0f);
-                mixRightChannel[mixIndex] = fminf(fmaxf(mixRightData, -1.0f), 1.0f);
-            }
+            // 应用增益
+            float scale = 0.5f;
+            vDSP_vsmul(mixLeftChannel + mixStartIndex, 1, &scale, mixLeftChannel + mixStartIndex, 1, frameToRead);
+            vDSP_vsmul(mixRightChannel + mixStartIndex, 1, &scale, mixRightChannel + mixStartIndex, 1, frameToRead);
         }
     });
 }
 
+
 - (void)scheduleBufferFromPosition:(AVAudioFramePosition)startPosition {
     [self resetMixBuffer];
     self.startPosition = startPosition;
@@ -356,14 +457,8 @@
     }];
 }
 
-
-
-- (float)mixChannelData:(float)bgData bgVolume:(float)bgVolume recordData:(float)recordData recordVolume:(float)recordVolume {
-    return (bgData * bgVolume + recordData * recordVolume) / 2;
-}
-
 - (void)changeVolume:(float)bgVolume recordVolume:(float)recordVolume {
-//    NSLog(@"bg volume ---- %f,  record volume ---- %f", bgVolume, recordVolume);
+    //    NSLog(@"bg volume ---- %f,  record volume ---- %f", bgVolume, recordVolume);
     self.bgVolume = bgVolume;
     self.recordVolume = recordVolume;
 }
@@ -385,8 +480,9 @@
     else if (type == AVAudioSessionInterruptionTypeEnded) {
         AVAudioSessionInterruptionOptions options = [info[AVAudioSessionInterruptionOptionKey] unsignedIntegerValue];
         if (options == AVAudioSessionInterruptionOptionShouldResume) {
-            //Handle Resume
+            // 正确的顺序:1.恢复会话 → 2.重建引擎
             [self resumeAudioSession];
+            [self retryInitEngine];
             NSLog(@"---- 播放恢复");
         }
     }
@@ -394,10 +490,16 @@
 
 - (void)resumeAudioSession {
     NSError *error = nil;
-    [[AVAudioSession sharedInstance] setActive:YES error:&error];
-    if (error) {
-        NSLog(@"------ error desc %@", error.description);
+    // 先设置为非活动状态确保配置生效
+    [[AVAudioSession sharedInstance] setActive:NO error:nil];
+    
+    // 重新激活会话
+    if (![[AVAudioSession sharedInstance] setActive:YES error:&error]) {
+        NSLog(@"会话激活失败: %@", error.localizedDescription);
     }
+    
+    // 添加延迟保证会话生效
+    [NSThread sleepForTimeInterval:0.05]; // 50ms延迟
 }
 
 - (void)handleRouteChange:(NSNotification *)notification {
@@ -432,7 +534,7 @@
 }
 
 - (void)seekToTimePlay:(NSInteger)time {
-
+    
     if (self.audioEngine.isRunning == NO) {
         [self startEngine];
     }
@@ -483,7 +585,7 @@
     // 跳转进度
     self.currentFrame = startFrame;
     [self scheduleBufferFromPosition:startFrame];
-
+    
     if (needPlay) {
         [self.nodePlayer play];
         [self startTimer];
@@ -511,8 +613,9 @@
 }
 
 - (void)stopTimer {
-    
-    [self.timer setFireDate:[NSDate distantFuture]];//暂停计时器
+    if (_timer) {
+        [self.timer setFireDate:[NSDate distantFuture]];//暂停计时器
+    }
 }
 
 #pragma mark ---- lazying
@@ -523,11 +626,25 @@
     return _sourceQueue;
 }
 
+- (dispatch_queue_t)analyzerQueue {
+    if (!_analyzerQueue) {
+        _analyzerQueue = dispatch_queue_create("com.ks.analyzer", DISPATCH_QUEUE_SERIAL);
+    }
+    return _analyzerQueue;
+}
+
+- (dispatch_queue_t)mixerQueue {
+    if (!_mixerQueue) {
+        _mixerQueue = dispatch_queue_create("com.ks.mixer", DISPATCH_QUEUE_SERIAL);
+    }
+    return _mixerQueue;
+}
+
 - (NSTimer *)timer {
-    
     if (!_timer) {
         __weak typeof(self)weakSelf = self;
-        _timer = [NSTimer scheduledTimerWithTimeInterval:0.01 repeats:YES block:^(NSTimer * _Nonnull timer) {
+        // 将触发间隔从 0.01 秒调整为 0.1 秒
+        _timer = [NSTimer scheduledTimerWithTimeInterval:0.1 repeats:YES block:^(NSTimer * _Nonnull timer) {
             [weakSelf timeFunction];
         }];
         [[NSRunLoop currentRunLoop] addTimer:_timer forMode:NSRunLoopCommonModes];
@@ -538,6 +655,7 @@
 
 - (void)timeFunction {
     if (self.isCanceled) {
+        [self stopTimer];
         return;
     }
     self.totalDuration = [self getTotalTime];
@@ -552,11 +670,11 @@
     }
     else {
         // 定时器10ms出触发一次 buffer每100ms执行一次
-        if (self.timeCount % 10 == 0) {
+//        if (self.timeCount % 5 == 0) {
             [self scheduleMixBuffer];
-        }
+//        }
         self.timeCount++;
-
+        
         if (self.delegate && [self.delegate respondsToSelector:@selector(updatePlayProgress:andTotalTime:andProgress:currentInterval:inPlayer:)]) {
             [self.delegate updatePlayProgress:currentTime andTotalTime:self.totalDuration andProgress:progress currentInterval:inteveral*1000 inPlayer:self];
         }
@@ -589,9 +707,9 @@
             NSLog(@"播放已停止");
         }
         double elapsedSamples = (double)currentFrame;
-
+        
         NSTimeInterval currentTime = elapsedSamples / self.sampleRate;
-//        NSLog(@"当前时间----- %f",currentTime*1000);
+        //        NSLog(@"当前时间----- %f",currentTime*1000);
         return currentTime*1000;
     }
     else {
@@ -612,43 +730,50 @@
 }
 
 - (void)releaseAudioResources {
-    if (self.audioEngine.isRunning) {
-        [self.audioEngine stop];
-        [self.audioEngine reset];
-    }
-    // 停止并移除 player node
-    if (self.nodePlayer) {
-        [self.nodePlayer stop];
-        [self.audioEngine detachNode:self.nodePlayer];
-        self.nodePlayer = nil;
-    }
-    if (self.analyzer) {
-        self.analyzer = nil;
-    }
     
-    // 释放音频文件
+    dispatch_sync(self.sourceQueue, ^{
+        if (!self) return;
+
+        if (self.nodePlayer.isPlaying) {
+            [self.nodePlayer stop];
+        }
+        if (self.audioEngine.isRunning) {
+            [self.audioEngine stop];
+            [self.audioEngine reset];
+        }
+        if (self.audioEngine && self.nodePlayer) {
+            [self.audioEngine disconnectNodeInput:self.nodePlayer];
+            [self.audioEngine detachNode:self.nodePlayer];
+        }
+       
+    });
     self.audioFile = nil;
-    self.audioFormat = nil;
     self.bgAudioFile = nil;
-    self.bgAudioFormat = nil;
     self.mixBuffer = nil;
-    
-    // 释放音频引擎
-    self.audioEngine = nil;
-    
-    // 打印确认释放日志
-    NSLog(@"Audio resources successfully released.");
 }
 
 - (void)dealloc {
-    NSLog(@"---- KSMergeEnginePlayer dealloc");
+    // 1. 取消所有异步操作
+    self.isCanceled = YES;
+    // 2. 执行资源释放
     [self releaseAudioResources];
-    // 停止并清理定时器
-    if (_timer) {
-        [_timer invalidate];
-        _timer = nil;
-    }
+    // 3. 安全释放定时器
+    dispatch_main_sync_safe(^{
+        [self stopTimer];
+        if (self->_timer) {
+            [self->_timer invalidate];
+            self->_timer = nil;
+        }
+    });
+    
+    dispatch_sync(self.sourceQueue, ^{});
+
+    // 4. 移除通知监听
     [[NSNotificationCenter defaultCenter] removeObserver:self];
+    
+    
+    
+    NSLog(@"---- KSMergeEnginePlayer dealloc");
 }
 
 @end

+ 1 - 1
KulexiuForTeacher/Podfile.lock

@@ -337,7 +337,7 @@ SPEC CHECKSUMS:
   JPush: 88d6361fbec4be7c8b55b20b7fe1c292228f6bc2
   JXCategoryView: 262d503acea0b1278c79a1c25b7332ffaef4d518
   JXPagingView: afdd2e9af09c90160dd232b970d603cc6e7ddd0e
-  KSCloudAccompanyLibrary: e9edc50bba0b63b7b270389afbb31d6655255961
+  KSCloudAccompanyLibrary: a85e75727985fe95b2454585b2288e3430a0d43c
   KSToolsLibrary: c1630ddfedd31f005ed2d7dbe0162fc0eb546a99
   lottie-ios: a50d5c0160425cd4b01b852bb9578963e6d92d31
   Masonry: 678fab65091a9290e40e2832a55e7ab731aad201

+ 1 - 0
KulexiuForTeacher/Pods/Headers/Private/KSCloudAccompanyLibrary/AudioProcessHandle.hpp

@@ -0,0 +1 @@
+../../../../../../../WorkSpace/TargetModule/my-local-repo/KSCloudAccompanyLibrary/KSCloudAccompanyLibrary/Evaluate/AudioProcessHandle.hpp

+ 1 - 0
KulexiuForTeacher/Pods/Headers/Private/KSCloudAccompanyLibrary/EvaluateManager.h

@@ -0,0 +1 @@
+../../../../../../../WorkSpace/TargetModule/my-local-repo/KSCloudAccompanyLibrary/KSCloudAccompanyLibrary/Classes/Base/EvaluateManager.h

+ 1 - 0
KulexiuForTeacher/Pods/Headers/Public/KSCloudAccompanyLibrary/EvaluateManager.h

@@ -0,0 +1 @@
+../../../../../../../WorkSpace/TargetModule/my-local-repo/KSCloudAccompanyLibrary/KSCloudAccompanyLibrary/Classes/Base/EvaluateManager.h

+ 5 - 1
KulexiuForTeacher/Pods/Local Podspecs/KSCloudAccompanyLibrary.podspec.json

@@ -18,7 +18,10 @@
   "platforms": {
     "ios": "12.0"
   },
-  "source_files": "KSCloudAccompanyLibrary/Classes/**/*",
+  "source_files": [
+    "KSCloudAccompanyLibrary/Classes/**/*",
+    "KSCloudAccompanyLibrary/Evaluate/AudioProcessHandle.hpp"
+  ],
   "swift_versions": "5.0",
   "pod_target_xcconfig": {
     "VALID_ARCHS": "arm64"
@@ -28,6 +31,7 @@
   "vendored_frameworks": [
     "KSTunerLibrary.framework"
   ],
+  "vendored_libraries": "KSCloudAccompanyLibrary/Evaluate/libEvaluateLibrary.a",
   "dependencies": {
     "KSToolsLibrary": [
 

+ 1 - 1
KulexiuForTeacher/Pods/Manifest.lock

@@ -337,7 +337,7 @@ SPEC CHECKSUMS:
   JPush: 88d6361fbec4be7c8b55b20b7fe1c292228f6bc2
   JXCategoryView: 262d503acea0b1278c79a1c25b7332ffaef4d518
   JXPagingView: afdd2e9af09c90160dd232b970d603cc6e7ddd0e
-  KSCloudAccompanyLibrary: e9edc50bba0b63b7b270389afbb31d6655255961
+  KSCloudAccompanyLibrary: a85e75727985fe95b2454585b2288e3430a0d43c
   KSToolsLibrary: c1630ddfedd31f005ed2d7dbe0162fc0eb546a99
   lottie-ios: a50d5c0160425cd4b01b852bb9578963e6d92d31
   Masonry: 678fab65091a9290e40e2832a55e7ab731aad201

Filskillnaden har hållts tillbaka eftersom den är för stor
+ 872 - 877
KulexiuForTeacher/Pods/Pods.xcodeproj/project.pbxproj


Filskillnaden har hållts tillbaka eftersom den är för stor
+ 1 - 1
KulexiuForTeacher/Pods/Target Support Files/Pods-KulexiuForTeacher/Pods-KulexiuForTeacher.debug.xcconfig


Filskillnaden har hållts tillbaka eftersom den är för stor
+ 1 - 1
KulexiuForTeacher/Pods/Target Support Files/Pods-KulexiuForTeacher/Pods-KulexiuForTeacher.dev.xcconfig


Filskillnaden har hållts tillbaka eftersom den är för stor
+ 1 - 1
KulexiuForTeacher/Pods/Target Support Files/Pods-KulexiuForTeacher/Pods-KulexiuForTeacher.release.xcconfig


Filskillnaden har hållts tillbaka eftersom den är för stor
+ 1 - 1
KulexiuForTeacher/Pods/Target Support Files/Pods-KulexiuForTeacher/Pods-KulexiuForTeacher.test.xcconfig


Vissa filer visades inte eftersom för många filer har ändrats