KSMergeEnginePlayer.m 19 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533
  1. //
  2. // KSMergeEnginePlayer.m
  3. // MutiPlayDemo
  4. //
  5. // Created by 王智 on 2024/6/17.
  6. //
  7. #import "KSMergeEnginePlayer.h"
  8. #import <AVFoundation/AVFoundation.h>
  9. @interface KSMergeEnginePlayer ()
  10. /** 定时器 */
  11. @property (nonatomic, strong) NSTimer *timer;
  12. @property (nonatomic, strong) AVAudioEngine *audioEngine;
  13. @property (nonatomic, strong) AVAudioPlayerNode *nodePlayer;
  14. @property (nonatomic, strong) AVAudioFile *audioFile;
  15. @property (nonatomic, strong) AVAudioFormat *audioFormat;
  16. @property (nonatomic, strong) AVAudioFile *bgAudioFile;
  17. @property (nonatomic, strong) AVAudioFormat *bgAudioFormat;
  18. @property (nonatomic, assign) NSTimeInterval totalDuration;
  19. @property (nonatomic, assign) NSInteger offsetTime; // 延迟时间
  20. @property (nonatomic, assign) AVAudioFramePosition startPosition; // 开始位置
  21. @property (nonatomic, strong) dispatch_queue_t sourceQueue;
  22. @property (nonatomic, strong) AVAudioPCMBuffer *mixBuffer;
  23. @property (nonatomic, strong) AVAudioPCMBuffer *bgBuffer;
  24. @property (nonatomic, strong) AVAudioPCMBuffer *recordBuffer;
  25. @property (nonatomic, assign) AVAudioFramePosition currentFrame;
  26. @property (nonatomic, assign) double sampleRate;
  27. @property (nonatomic, assign) BOOL stopMix; // 是否停止mix
  28. @property (nonatomic, strong) dispatch_semaphore_t mixChangeSemaphore; // mix信号量
  29. @property (nonatomic, assign) BOOL stopChangeVolume; // 是否停止音量修改循环
  30. @property (nonatomic, strong) dispatch_semaphore_t volumeChangeSemaphore;
  31. @end
  32. @implementation KSMergeEnginePlayer
  33. - (instancetype)init {
  34. self = [super init];
  35. if (self) {
  36. [self configDefault];
  37. }
  38. return self;
  39. }
  40. - (void)configDefault {
  41. self.recordVolume = 1.0f;
  42. self.bgVolume = 1.0f;
  43. self.mixChangeSemaphore = dispatch_semaphore_create(1); // 初始化信号量
  44. self.volumeChangeSemaphore = dispatch_semaphore_create(1); // 初始化信号量,初始值为1
  45. }
  46. - (void)configEngine {
  47. [self setupAudioSession];
  48. self.audioEngine = [[AVAudioEngine alloc] init];
  49. self.nodePlayer = [[AVAudioPlayerNode alloc] init];
  50. // attach node
  51. [self.audioEngine attachNode:self.nodePlayer];
  52. }
  53. - (void)setupAudioSession {
  54. NSError *err = nil;
  55. AVAudioSession *audioSession = [AVAudioSession sharedInstance];
  56. @try {
  57. [audioSession setActive:YES error:&err];
  58. } @catch (NSException *exception) {
  59. } @finally {
  60. }
  61. [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(handleInterruption:) name:AVAudioSessionInterruptionNotification object:audioSession];
  62. [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(handleRouteChange:) name:AVAudioSessionRouteChangeNotification object:audioSession];
  63. }
  64. - (void)startEngine {
  65. // 启动engine
  66. NSError *error = nil;
  67. @try {
  68. [self.audioEngine startAndReturnError:&error];
  69. } @catch (NSException *exception) {
  70. NSLog(@"--------Exception: %@", exception);
  71. } @finally {
  72. if (error) {
  73. self.audioEngine = nil;
  74. // 错误回调
  75. if (self.delegate && [self.delegate respondsToSelector:@selector(enginePlayerDidError:error:)]) {
  76. [self.delegate enginePlayerDidError:self error:error];
  77. }
  78. }
  79. }
  80. }
  81. - (void)prepareNativeSongWithUrl:(NSURL *)recordAudioUrl bgMusic:(NSURL *)bgMusicUrl {
  82. [self loadAuidoFile:recordAudioUrl isBgm:NO];
  83. [self loadAuidoFile:bgMusicUrl isBgm:YES];
  84. self.sampleRate = self.audioFile.fileFormat.sampleRate;
  85. [self configEngine];
  86. AVAudioFormat *outputFormat = [self.audioEngine.mainMixerNode outputFormatForBus:0];
  87. [self.audioEngine connect:self.nodePlayer to:self.audioEngine.mainMixerNode format:outputFormat];
  88. [self startEngine];
  89. if (self.audioEngine && self.audioEngine.isRunning) {
  90. dispatch_async(self.sourceQueue, ^{
  91. [self prepareBufferFrame];
  92. });
  93. }
  94. }
  95. - (void)loadAuidoFile:(NSURL *)audioFileUrl isBgm:(BOOL)isBgm {
  96. dispatch_sync(self.sourceQueue, ^{
  97. NSError *error = nil;
  98. AVAudioFile *audioFile = nil;
  99. AVAudioFormat *audioFormat = nil;
  100. @try {
  101. audioFile = [[AVAudioFile alloc] initForReading:audioFileUrl error:&error];
  102. audioFormat = audioFile.processingFormat;
  103. } @catch (NSException *exception) {
  104. audioFile = nil;
  105. audioFormat = nil;
  106. } @finally {
  107. if (error) {
  108. // 错误回调
  109. }
  110. else { // 加载成功
  111. if (isBgm) {
  112. self.bgAudioFile = audioFile;
  113. self.bgAudioFormat = audioFormat;
  114. }
  115. else {
  116. self.audioFile = audioFile;
  117. self.audioFormat = audioFormat;
  118. }
  119. }
  120. }
  121. });
  122. }
  123. - (void)prepareBufferFrame {
  124. AVAudioFrameCount minFrameCount = (AVAudioFrameCount)MIN(self.bgAudioFile.length, self.audioFile.length);
  125. // mixBuffer
  126. AVAudioFormat *outputFormat = [[AVAudioFormat alloc] initStandardFormatWithSampleRate:self.bgAudioFormat.sampleRate channels:2];
  127. self.mixBuffer = [[AVAudioPCMBuffer alloc] initWithPCMFormat:outputFormat frameCapacity:minFrameCount];
  128. self.mixBuffer.frameLength = minFrameCount;
  129. self.bgBuffer = [self loadAudioSegment:self.bgAudioFile startFrame:0 frameCount:minFrameCount];
  130. self.recordBuffer = [self loadAudioSegment:self.audioFile startFrame:0 frameCount:minFrameCount];
  131. if (self.delegate && [self.delegate respondsToSelector:@selector(enginePlayerIsReadyPlay:)]) {
  132. self.isReady = YES;
  133. [self.delegate enginePlayerIsReadyPlay:self];
  134. }
  135. }
  136. - (AVAudioPCMBuffer *)loadAudioSegment:(AVAudioFile *)audioFile startFrame:(AVAudioFramePosition)startFrame frameCount:(AVAudioFrameCount)frameCount {
  137. AVAudioFormat *audioFromat = audioFile.processingFormat;
  138. AVAudioFrameCount frameToRead = (AVAudioFrameCount)MIN(frameCount, (AVAudioFrameCount)audioFile.length - startFrame);
  139. if (startFrame > audioFile.length) {
  140. return nil;
  141. }
  142. AVAudioPCMBuffer *buffer = [[AVAudioPCMBuffer alloc] initWithPCMFormat:audioFromat frameCapacity:frameToRead];
  143. buffer.frameLength = frameToRead;
  144. audioFile.framePosition = startFrame;
  145. if (frameToRead > 0) {
  146. @try {
  147. [audioFile readIntoBuffer:buffer frameCount:frameToRead error:nil];
  148. } @catch (NSException *exception) {
  149. } @finally {
  150. }
  151. }
  152. return buffer;
  153. }
  154. - (void)mixBuffers:(AVAudioPCMBuffer *)bgBuffer bgBufferVolume:(float)bgBufferVolume withRecordBuffer:(AVAudioPCMBuffer *)recordBuffer recordVolume:(float)recordVolume offset:(NSInteger)offsetTime startPosition:(AVAudioFrameCount)startPosition {
  155. if (!bgBuffer && !recordBuffer) {
  156. return;
  157. }
  158. NSLog(@"------- start");
  159. AVAudioFrameCount minFrameCount = MIN(bgBuffer.frameLength, recordBuffer.frameLength);
  160. AVAudioFrameCount offsetFrame = labs(offsetTime)/1000.0 * recordBuffer.format.sampleRate;
  161. float *bgLeftChannel = bgBuffer.floatChannelData[0];
  162. float *bgRightChannel = bgBuffer.floatChannelData[1];
  163. // 录音文件未单声道
  164. float *recordLeftChannel = recordBuffer.floatChannelData[0];
  165. float *mixLeftChannel = self.mixBuffer.floatChannelData[0];
  166. float *mixRightChannel = self.mixBuffer.floatChannelData[1];
  167. for (int frame = 0; frame < minFrameCount; frame++) {
  168. if (self.stopMix) {
  169. NSLog(@"------- stop mix");
  170. dispatch_semaphore_signal(self.mixChangeSemaphore); // 释放信号量
  171. return;
  172. }
  173. int bgFrame = frame+startPosition;
  174. float leftChannel = (bgFrame < bgBuffer.frameLength) ? bgLeftChannel[bgFrame] : 0;
  175. float rightChannel = (bgFrame < bgBuffer.frameLength) ? bgRightChannel[bgFrame] : 0;
  176. int recordFrame = (offsetTime < 0) ? (bgFrame - offsetFrame) : (bgFrame + offsetFrame);
  177. float recordData = (recordFrame >= 0 && recordFrame < recordBuffer.frameLength) ? recordLeftChannel[recordFrame] : 0;
  178. float mixLeftData = [self mixChannelData:leftChannel bgVolume:bgBufferVolume recordData:recordData recordVolume:recordVolume];
  179. float mixRightData = [self mixChannelData:rightChannel bgVolume:bgBufferVolume recordData:recordData recordVolume:recordVolume];
  180. mixLeftChannel[frame] = MAX(-1.0, MIN(1.0, mixLeftData));
  181. mixRightChannel[frame] = MAX(-1.0, MIN(1.0, mixRightData));
  182. }
  183. NSLog(@"---------finish");
  184. }
  185. - (float)mixChannelData:(float)bgData bgVolume:(float)bgVolume recordData:(float)recordData recordVolume:(float)recordVolume {
  186. return (bgData * bgVolume + recordData * recordVolume) / 2;
  187. }
  188. - (void)changeVolume:(float)bgVolume recordVolume:(float)recordVolume {
  189. NSLog(@"bg volume ---- %f, record volume ---- %f", bgVolume, recordVolume);
  190. self.bgVolume = bgVolume;
  191. self.recordVolume = recordVolume;
  192. if (self.bgBuffer && self.recordBuffer) {
  193. self.stopChangeVolume = YES;
  194. // 停止上一次修改音量
  195. dispatch_async(self.sourceQueue, ^{
  196. // 等待上一次的操作完成
  197. dispatch_semaphore_wait(self.volumeChangeSemaphore, DISPATCH_TIME_FOREVER);
  198. self.stopChangeVolume = NO;
  199. // 开始新的音量修改操作
  200. AVAudioFramePosition startFrame = self.currentFrame;
  201. NSLog(@"----- current frame -----%lld", startFrame);
  202. [self modifyMixBuffer:self.bgBuffer bgBufferVolume:bgVolume withRecordBuffer:self.recordBuffer recordVolume:recordVolume offset:self.offsetTime startPosition:startFrame tagIndex:0];
  203. // 释放信号量,标记音量修改操作完成
  204. dispatch_semaphore_signal(self.volumeChangeSemaphore);
  205. });
  206. }
  207. }
  208. - (void)modifyMixBuffer:(AVAudioPCMBuffer *)bgBuffer bgBufferVolume:(float)bgBufferVolume withRecordBuffer:(AVAudioPCMBuffer *)recordBuffer recordVolume:(float)recordVolume offset:(NSInteger)offsetTime startPosition:(AVAudioFramePosition)startFrame tagIndex:(NSInteger)tagIndex {
  209. AVAudioFrameCount minFrameCount = MIN(bgBuffer.frameLength, recordBuffer.frameLength);
  210. AVAudioFrameCount offsetFrame = labs(offsetTime)/1000.0 * recordBuffer.format.sampleRate;
  211. float *bgLeftChannel = bgBuffer.floatChannelData[0];
  212. float *bgRightChannel = bgBuffer.floatChannelData[1];
  213. // 录音文件未单声道
  214. float *recordLeftChannel = recordBuffer.floatChannelData[0];
  215. float *mixLeftChannel = self.mixBuffer.floatChannelData[0];
  216. float *mixRightChannel = self.mixBuffer.floatChannelData[1];
  217. // 先处理后续播放的buffer
  218. NSLog(@"------- volume change start");
  219. for (int frame = (int)startFrame; frame < minFrameCount; frame++) {
  220. if (self.stopChangeVolume) {
  221. NSLog(@"------- stop volume change");
  222. dispatch_semaphore_signal(self.volumeChangeSemaphore); // 释放信号量
  223. return;
  224. }
  225. float leftChannel = bgLeftChannel[frame];
  226. float rightChannel = bgRightChannel[frame];
  227. int recordFrame = (offsetTime < 0) ? (frame - offsetFrame) : (frame + offsetFrame);
  228. float recordData = (recordFrame >= 0 && recordFrame < recordBuffer.frameLength) ? recordLeftChannel[recordFrame] : 0;
  229. float mixLeftData = [self mixChannelData:leftChannel bgVolume:bgBufferVolume recordData:recordData recordVolume:recordVolume];
  230. float mixRightData = [self mixChannelData:rightChannel bgVolume:bgBufferVolume recordData:recordData recordVolume:recordVolume];
  231. mixLeftChannel[frame-self.startPosition] = MAX(-1.0, MIN(1.0, mixLeftData));
  232. mixRightChannel[frame-self.startPosition] = MAX(-1.0, MIN(1.0, mixRightData));
  233. }
  234. NSLog(@"------- volume change end");
  235. }
  236. - (void)scheduleBufferFromPosition:(AVAudioFramePosition)startPosition {
  237. self.stopMix = YES;
  238. self.startPosition = startPosition;
  239. dispatch_async(self.sourceQueue, ^{
  240. // 等待上一次的操作完成
  241. dispatch_semaphore_wait(self.mixChangeSemaphore, DISPATCH_TIME_FOREVER);
  242. self.stopMix = NO;
  243. [self mixBuffers:self.bgBuffer bgBufferVolume:self.bgVolume withRecordBuffer:self.recordBuffer recordVolume:self.recordVolume offset:self.offsetTime startPosition:(AVAudioFrameCount)startPosition];
  244. // 释放信号量,标记修改操作完成
  245. dispatch_semaphore_signal(self.mixChangeSemaphore);
  246. // 加载缓冲区
  247. [self.nodePlayer scheduleBuffer:self.mixBuffer atTime:nil options:AVAudioPlayerNodeBufferInterruptsAtLoop completionHandler:^{
  248. }];
  249. });
  250. }
  251. // 打断处理
  252. - (void)handleInterruption:(NSNotification *)notification {
  253. NSDictionary *info = notification.userInfo;
  254. AVAudioSessionInterruptionType type = [info[AVAudioSessionInterruptionTypeKey] unsignedIntegerValue];
  255. if (type == AVAudioSessionInterruptionTypeBegan) {
  256. //Handle InterruptionBegan
  257. if (self.delegate && [self.delegate respondsToSelector:@selector(enginePlayerDidError:error:)]) {
  258. NSError *error = [[NSError alloc] initWithDomain:NSCocoaErrorDomain code:99999 userInfo:@{@"errorDesc" : @"播放被打断"}];
  259. [self.delegate enginePlayerDidError:self error:error];
  260. }
  261. }
  262. else if (type == AVAudioSessionInterruptionTypeEnded) {
  263. AVAudioSessionInterruptionOptions options = [info[AVAudioSessionInterruptionOptionKey] unsignedIntegerValue];
  264. if (options == AVAudioSessionInterruptionOptionShouldResume) {
  265. //Handle Resume
  266. [[AVAudioSession sharedInstance] setActive:YES error:nil];
  267. }
  268. }
  269. }
  270. - (void)handleRouteChange:(NSNotification *)notification {
  271. NSDictionary *info = notification.userInfo;
  272. if ([notification.name isEqualToString:AVAudioSessionRouteChangeNotification]) {
  273. AVAudioSessionRouteChangeReason routeChangeReason = [[info valueForKey:AVAudioSessionRouteChangeReasonKey] integerValue];
  274. if (routeChangeReason == AVAudioSessionRouteChangeReasonCategoryChange) {
  275. return;
  276. }
  277. if (self.delegate && [self.delegate respondsToSelector:@selector(enginePlayerDidError:error:)]) {
  278. NSError *error = [[NSError alloc] initWithDomain:NSCocoaErrorDomain code:99999 userInfo:@{@"errorDesc" : @"播放被打断"}];
  279. [self.delegate enginePlayerDidError:self error:error];
  280. }
  281. }
  282. }
  283. #pragma mark ------ play action
  284. - (void)changeRecordDelay:(NSInteger)delayMs {
  285. self.offsetTime = delayMs;
  286. }
  287. - (void)seekToTimePlay:(NSInteger)time {
  288. if (self.audioEngine.isRunning == NO) {
  289. [self startEngine];
  290. }
  291. if (self.audioEngine.isRunning) {
  292. [self seekAudioWithStartTime:time needPlay:YES];
  293. }
  294. }
  295. - (void)stopPlay {
  296. self.stopMix = YES;
  297. self.stopChangeVolume = YES;
  298. if (self.nodePlayer.isPlaying) {
  299. [self.nodePlayer stop];
  300. }
  301. [self stopTimer];
  302. }
  303. - (void)seekToTime:(NSInteger)time {
  304. if (self.audioEngine.isRunning == NO) {
  305. [self startEngine];
  306. }
  307. if (self.audioEngine.isRunning) {
  308. [self seekAudioWithStartTime:time needPlay:NO];
  309. }
  310. }
  311. - (void)seekAudioWithStartTime:(NSTimeInterval)startTime needPlay:(BOOL)needPlay {
  312. if (self.audioEngine.isRunning == NO) {
  313. [self startEngine];
  314. }
  315. if (self.audioEngine.isRunning) {
  316. if (self.nodePlayer.isPlaying) {
  317. [self.nodePlayer stop];
  318. }
  319. }
  320. // 停止修改音量循环
  321. self.stopChangeVolume = YES;
  322. AVAudioFramePosition startFrame = startTime / 1000.0 * self.audioFormat.sampleRate;
  323. // 跳转进度
  324. self.currentFrame = startFrame;
  325. [self scheduleBufferFromPosition:startFrame];
  326. if (needPlay) {
  327. [self.nodePlayer play];
  328. [self startTimer];
  329. }
  330. }
  331. // 调整偏移
  332. - (void)seekOffsetTime:(NSInteger)offsetTime {
  333. self.offsetTime = offsetTime;
  334. NSTimeInterval currentTime = [self getCurrentPlayTime];
  335. [self seekToTimePlay:currentTime];
  336. }
  337. - (void)freePlayer {
  338. if (self.nodePlayer.isPlaying) {
  339. [self stopPlay];
  340. }
  341. [self.audioEngine stop];
  342. }
  343. - (void)startTimer {
  344. [self.timer setFireDate:[NSDate distantPast]];
  345. }
  346. - (void)stopTimer {
  347. [self.timer setFireDate:[NSDate distantFuture]];//暂停计时器
  348. }
  349. #pragma mark ---- lazying
  350. - (dispatch_queue_t)sourceQueue {
  351. if (!_sourceQueue) {
  352. _sourceQueue = dispatch_queue_create("ks_MutilSourceQueue", DISPATCH_QUEUE_SERIAL);
  353. }
  354. return _sourceQueue;
  355. }
  356. - (NSTimer *)timer {
  357. if (!_timer) {
  358. __weak typeof(self)weakSelf = self;
  359. _timer = [NSTimer scheduledTimerWithTimeInterval:0.1 repeats:YES block:^(NSTimer * _Nonnull timer) {
  360. [weakSelf timeFunction];
  361. }];
  362. [[NSRunLoop currentRunLoop] addTimer:_timer forMode:NSRunLoopCommonModes];
  363. [_timer setFireDate:[NSDate distantFuture]];
  364. }
  365. return _timer;
  366. }
  367. - (void)timeFunction {
  368. self.totalDuration = [self getTotalTime];
  369. NSTimeInterval currentTime = [self getCurrentPlayTime];
  370. float progress = currentTime/self.totalDuration;
  371. NSDate *date = [NSDate date];
  372. NSTimeInterval inteveral = [date timeIntervalSince1970];
  373. if (currentTime > self.totalDuration) {
  374. if (self.delegate && [self.delegate respondsToSelector:@selector(enginePlayFinished:)]) {
  375. [self.delegate enginePlayFinished:self];
  376. }
  377. }
  378. else {
  379. if (self.delegate && [self.delegate respondsToSelector:@selector(updatePlayProgress:andTotalTime:andProgress:currentInterval:inPlayer:)]) {
  380. [self.delegate updatePlayProgress:currentTime andTotalTime:self.totalDuration andProgress:progress currentInterval:inteveral*1000 inPlayer:self];
  381. }
  382. }
  383. }
  384. - (NSTimeInterval)getCurrentPlayTime {
  385. AVAudioTime *nodeTime = [self.nodePlayer lastRenderTime];
  386. if (nodeTime && self.bgAudioFile) {
  387. AVAudioTime *playerTime = [self.nodePlayer playerTimeForNodeTime:nodeTime];
  388. AVAudioFramePosition currentFrame = self.currentFrame;
  389. if (playerTime) {
  390. self.sampleRate = [playerTime sampleRate];
  391. AVAudioFramePosition currentFrame = [playerTime sampleTime];
  392. if (currentFrame <= 0) {
  393. currentFrame = 0;
  394. }
  395. currentFrame += self.startPosition;
  396. self.currentFrame = currentFrame;
  397. }
  398. else {
  399. NSLog(@"播放已停止");
  400. }
  401. double elapsedSamples = (double)currentFrame;
  402. NSTimeInterval currentTime = elapsedSamples / self.sampleRate;
  403. // NSLog(@"当前时间----- %f",currentTime*1000);
  404. return currentTime*1000;
  405. }
  406. else {
  407. return 0;
  408. }
  409. }
  410. - (NSTimeInterval)getTotalTime {
  411. NSTimeInterval recordTotalDuration = (AVAudioFramePosition)self.audioFile.length * 1000.0 / self.audioFormat.sampleRate;
  412. return recordTotalDuration;
  413. }
  414. - (BOOL)isPlaying {
  415. if (self.nodePlayer) {
  416. return self.nodePlayer.isPlaying;
  417. }
  418. return NO;
  419. }
  420. @end