KSMergeEnginePlayer.m 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602
  1. //
  2. // KSMergeEnginePlayer.m
  3. // MutiPlayDemo
  4. //
  5. // Created by 王智 on 2024/6/17.
  6. //
  7. #import "KSMergeEnginePlayer.h"
  8. #import <AVFoundation/AVFoundation.h>
  9. #import <Accelerate/Accelerate.h>
  10. #define READ_FILE_LENGTH (8192)
  11. #define BUFFER_SIZE (2048)
  12. @interface KSMergeEnginePlayer ()
  13. /** 定时器 */
  14. @property (nonatomic, strong) NSTimer *timer;
  15. @property (nonatomic, strong) AVAudioEngine *audioEngine;
  16. @property (nonatomic, strong) AVAudioPlayerNode *nodePlayer;
  17. @property (nonatomic, strong) AVAudioFile *audioFile;
  18. @property (nonatomic, strong) AVAudioFormat *audioFormat;
  19. @property (nonatomic, strong) AVAudioFile *bgAudioFile;
  20. @property (nonatomic, strong) AVAudioFormat *bgAudioFormat;
  21. @property (nonatomic, assign) NSTimeInterval totalDuration;
  22. @property (nonatomic, assign) NSInteger offsetTime; // 延迟时间
  23. @property (nonatomic, assign) AVAudioFramePosition startPosition; // 开始位置
  24. @property (nonatomic, strong) dispatch_queue_t sourceQueue;
  25. @property (nonatomic, strong) AVAudioPCMBuffer *mixBuffer;
  26. @property (nonatomic, assign) AVAudioFramePosition currentFrame;
  27. @property (nonatomic, assign) double sampleRate;
  28. @property (nonatomic, assign) NSInteger timeCount;
  29. @property (nonatomic, assign) BOOL isCanceled; // 是否在取消
  30. @end
  31. @implementation KSMergeEnginePlayer
  32. - (instancetype)init {
  33. self = [super init];
  34. if (self) {
  35. [self configDefault];
  36. }
  37. return self;
  38. }
  39. - (void)configDefault {
  40. self.recordVolume = 1.0f;
  41. self.bgVolume = 1.0f;
  42. }
  43. - (void)configEngine {
  44. [self setupAudioSession];
  45. self.audioEngine = [[AVAudioEngine alloc] init];
  46. self.nodePlayer = [[AVAudioPlayerNode alloc] init];
  47. // attach node
  48. [self.audioEngine attachNode:self.nodePlayer];
  49. }
  50. - (void)setupAudioSession {
  51. NSError *err = nil;
  52. AVAudioSession *audioSession = [AVAudioSession sharedInstance];
  53. @try {
  54. [audioSession setActive:YES error:&err];
  55. } @catch (NSException *exception) {
  56. } @finally {
  57. }
  58. [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(handleInterruption:) name:AVAudioSessionInterruptionNotification object:audioSession];
  59. [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(handleRouteChange:) name:AVAudioSessionRouteChangeNotification object:audioSession];
  60. }
  61. - (void)retryInitEngine {
  62. // 如果audio engine不存在
  63. self.audioEngine = [[AVAudioEngine alloc] init];
  64. [self.audioEngine attachNode:self.nodePlayer];
  65. AVAudioFormat *outputFormat = [self.audioEngine.mainMixerNode outputFormatForBus:0];
  66. [self.audioEngine connect:self.nodePlayer to:self.audioEngine.mainMixerNode format:outputFormat];
  67. }
  68. - (void)startEngine {
  69. if (!self.audioEngine) { // 如果audio engine 被释放
  70. [self retryInitEngine];
  71. }
  72. // 启动engine
  73. NSError *error = nil;
  74. @try {
  75. [self.audioEngine startAndReturnError:&error];
  76. } @catch (NSException *exception) {
  77. NSLog(@"--------Exception: %@", exception);
  78. } @finally {
  79. if (error) {
  80. self.audioEngine = nil;
  81. // 错误回调
  82. if (self.delegate && [self.delegate respondsToSelector:@selector(enginePlayerDidError:error:)]) {
  83. [self.delegate enginePlayerDidError:self error:error];
  84. }
  85. }
  86. }
  87. }
  88. - (void)prepareNativeSongWithUrl:(NSURL *)recordAudioUrl bgMusic:(NSURL *)bgMusicUrl {
  89. @weakObj(self);
  90. dispatch_async(self.sourceQueue, ^{
  91. @strongObj(self);
  92. if (!self || self.isCanceled) {
  93. return;
  94. }
  95. [self loadAuidoFile:recordAudioUrl isBgm:NO];
  96. [self loadAuidoFile:bgMusicUrl isBgm:YES];
  97. self.sampleRate = self.audioFile.fileFormat.sampleRate;
  98. [self configEngine];
  99. AVAudioFormat *outputFormat = [self.audioEngine.mainMixerNode outputFormatForBus:0];
  100. [self.audioEngine connect:self.nodePlayer to:self.audioEngine.mainMixerNode format:outputFormat];
  101. [self startEngine];
  102. if (self.audioEngine && self.audioEngine.isRunning) {
  103. [self prepareBufferFrame];
  104. }
  105. });
  106. }
  107. - (void)addTapBus {
  108. BOOL delegateRespondsToDidGenerateSpectrum = [self.delegate respondsToSelector:@selector(player:didGenerateSpectrum:)];
  109. self.analyzer = [[KSRealtimeAnalyzer alloc] initWithFFTSize:BUFFER_SIZE];
  110. AVAudioFormat *outputFormat = [self.audioEngine.mainMixerNode outputFormatForBus:0];
  111. @weakObj(self);
  112. [self.audioEngine.mainMixerNode removeTapOnBus:0];
  113. [self.audioEngine.mainMixerNode installTapOnBus:0 bufferSize:BUFFER_SIZE format:outputFormat block:^(AVAudioPCMBuffer * _Nonnull buffer, AVAudioTime * _Nonnull when) {
  114. @strongObj(self);
  115. if (!self || !self.nodePlayer.isPlaying) {
  116. return;
  117. }
  118. // 将频谱分析任务提交到后台队列
  119. dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0), ^{
  120. if (self.analyzer.isAnalise == NO) {
  121. // 分析音频缓冲区
  122. NSArray<NSArray<NSNumber *> *> *spectra = [self.analyzer analyseWithBuffer:buffer];
  123. // 回到主线程更新 UI 或调用委托方法
  124. dispatch_async(dispatch_get_main_queue(), ^{
  125. if (delegateRespondsToDidGenerateSpectrum) {
  126. [self.delegate player:self didGenerateSpectrum:spectra];
  127. }
  128. });
  129. }
  130. });
  131. }];
  132. }
  133. - (void)loadAuidoFile:(NSURL *)audioFileUrl isBgm:(BOOL)isBgm {
  134. NSError *error = nil;
  135. AVAudioFile *audioFile = nil;
  136. AVAudioFormat *audioFormat = nil;
  137. @try {
  138. audioFile = [[AVAudioFile alloc] initForReading:audioFileUrl error:&error];
  139. audioFormat = audioFile.processingFormat;
  140. } @catch (NSException *exception) {
  141. audioFile = nil;
  142. audioFormat = nil;
  143. } @finally {
  144. if (error) {
  145. // 错误回调
  146. }
  147. else { // 加载成功
  148. if (isBgm) {
  149. self.bgAudioFile = audioFile;
  150. self.bgAudioFormat = audioFormat;
  151. }
  152. else {
  153. self.audioFile = audioFile;
  154. self.audioFormat = audioFormat;
  155. }
  156. }
  157. }
  158. }
  159. - (void)resetMixBuffer {
  160. AVAudioFrameCount minFrameCount = (AVAudioFrameCount)MIN(self.bgAudioFile.length, self.audioFile.length);
  161. // mixBuffer
  162. AVAudioFormat *outputFormat = [[AVAudioFormat alloc] initStandardFormatWithSampleRate:self.bgAudioFormat.sampleRate channels:2];
  163. self.mixBuffer = [[AVAudioPCMBuffer alloc] initWithPCMFormat:outputFormat frameCapacity:minFrameCount];
  164. self.mixBuffer.frameLength = minFrameCount;
  165. }
  166. - (void)prepareBufferFrame {
  167. [self resetMixBuffer];
  168. dispatch_main_async_safe(^{
  169. if (self.delegate && [self.delegate respondsToSelector:@selector(enginePlayerIsReadyPlay:)]) {
  170. self.isReady = YES;
  171. [self.delegate enginePlayerIsReadyPlay:self];
  172. }
  173. });
  174. }
  175. // 预计加载buffer
  176. - (void)prepareBuffer:(AVAudioFramePosition)startPosition offset:(NSInteger)offsetTime mixStart:(AVAudioFramePosition)mixStartPosition {
  177. @weakObj(self);
  178. dispatch_async(self.sourceQueue, ^{
  179. @strongObj(self);
  180. if (!self || self.isCanceled) {
  181. return;
  182. }
  183. if (!self.bgAudioFile || !self.audioFile) {
  184. return;
  185. }
  186. AVAudioFramePosition minFrameCount = (AVAudioFramePosition)MIN(self.bgAudioFile.length, self.audioFile.length);
  187. AVAudioFrameCount offsetFrame = labs(offsetTime)/1000.0 * self.audioFile.processingFormat.sampleRate;
  188. if (minFrameCount <= startPosition) {
  189. return;
  190. }
  191. AVAudioFrameCount frameToRead = minFrameCount - startPosition > READ_FILE_LENGTH ? READ_FILE_LENGTH : (AVAudioFrameCount)(minFrameCount - startPosition);
  192. self.bgAudioFile.framePosition = startPosition;
  193. AVAudioPCMBuffer *bgBuffer = [[AVAudioPCMBuffer alloc] initWithPCMFormat:self.bgAudioFile.processingFormat frameCapacity:frameToRead];
  194. bgBuffer.frameLength = frameToRead;
  195. BOOL readSuccess = [self.bgAudioFile readIntoBuffer:bgBuffer frameCount:frameToRead error:nil];
  196. if (!readSuccess) {
  197. return;
  198. }
  199. AVAudioPCMBuffer *recordBuffer = [[AVAudioPCMBuffer alloc] initWithPCMFormat:self.audioFile.processingFormat frameCapacity:frameToRead];
  200. recordBuffer.frameLength = frameToRead;
  201. if (offsetTime >= 0) { // 演奏需要提前
  202. self.audioFile.framePosition = startPosition + offsetFrame;
  203. AVAudioFrameCount audioReadFrame = frameToRead;
  204. if (startPosition + offsetFrame + frameToRead > minFrameCount) { // 如果超出
  205. audioReadFrame = (AVAudioFrameCount)(minFrameCount - startPosition - offsetFrame);
  206. }
  207. if (audioReadFrame <= frameToRead) {
  208. BOOL isSuccess = [self.audioFile readIntoBuffer:recordBuffer frameCount:audioReadFrame error:nil];
  209. if (!isSuccess) {
  210. return;
  211. }
  212. }
  213. }
  214. else { // 演奏需要延后
  215. AVAudioFramePosition audioFramePosition = startPosition - offsetFrame;
  216. if (audioFramePosition > 0) {
  217. self.audioFile.framePosition = audioFramePosition;
  218. AVAudioFrameCount audioReadFrame = frameToRead;
  219. if (audioFramePosition + frameToRead > minFrameCount) { // 如果超出
  220. audioReadFrame = (AVAudioFrameCount)(minFrameCount - audioFramePosition);
  221. }
  222. // AVAudioFrameCount 无符号整型 uint32_t
  223. if (audioReadFrame <= frameToRead) {
  224. BOOL isSuccess = [self.audioFile readIntoBuffer:recordBuffer frameCount:audioReadFrame error:nil];
  225. if (!isSuccess) {
  226. return;
  227. }
  228. }
  229. }
  230. else {
  231. self.audioFile.framePosition = 0;
  232. // 需要读取部分数据
  233. if (offsetFrame - startPosition < frameToRead) {
  234. AVAudioFrameCount readCount = (AVAudioFrameCount)(offsetFrame - startPosition);
  235. // NSLog(@"----need readCount --%u", readCount);
  236. AVAudioPCMBuffer *tempBuffer = [[AVAudioPCMBuffer alloc] initWithPCMFormat:self.audioFile.processingFormat frameCapacity:readCount];
  237. tempBuffer.frameLength = readCount;
  238. BOOL isSuccess = [self.audioFile readIntoBuffer:tempBuffer error:nil];
  239. if (!isSuccess) {
  240. return;
  241. }
  242. float *tempData = tempBuffer.floatChannelData[0];
  243. float *recordData = recordBuffer.floatChannelData[0];
  244. // 复制数据到 recordBuffer
  245. AVAudioFrameCount startFrame = frameToRead - readCount;
  246. for (AVAudioFrameCount i = 0; i < readCount; i++) {
  247. recordData[startFrame + i] = tempData[i];
  248. }
  249. }
  250. }
  251. }
  252. float *bgLeftChannel = bgBuffer.floatChannelData[0];
  253. float *bgRightChannel = bgBuffer.floatChannelData[1];
  254. if (bgBuffer.format.channelCount == 1) {
  255. bgRightChannel = bgBuffer.floatChannelData[0];
  256. }
  257. // 录音文件未单声道
  258. float *recordLeftChannel = recordBuffer.floatChannelData[0];
  259. float *mixLeftChannel = self.mixBuffer.floatChannelData[0];
  260. float *mixRightChannel = self.mixBuffer.floatChannelData[1];
  261. for (int frame = 0; frame < frameToRead; frame++) {
  262. AVAudioFramePosition mixIndex = frame + startPosition - mixStartPosition;
  263. float leftChannel = (frame < bgBuffer.frameLength) ? bgLeftChannel[frame] : 0;
  264. float rightChannel = (frame < bgBuffer.frameLength) ? bgRightChannel[frame] : 0;
  265. float recordData = (frame < recordBuffer.frameLength) ? recordLeftChannel[frame] : 0;
  266. float mixLeftData = [self mixChannelData:leftChannel bgVolume:self.bgVolume recordData:recordData recordVolume:self.recordVolume];
  267. float mixRightData = [self mixChannelData:rightChannel bgVolume:self.bgVolume recordData:recordData recordVolume:self.recordVolume];
  268. // 防止数组越界
  269. if (mixIndex >= 0 && mixIndex < self.mixBuffer.frameLength) {
  270. mixLeftChannel[mixIndex] = fminf(fmaxf(mixLeftData, -1.0f), 1.0f);
  271. mixRightChannel[mixIndex] = fminf(fmaxf(mixRightData, -1.0f), 1.0f);
  272. }
  273. }
  274. });
  275. }
  276. - (void)scheduleBufferFromPosition:(AVAudioFramePosition)startPosition {
  277. [self resetMixBuffer];
  278. self.startPosition = startPosition;
  279. [self prepareBuffer:startPosition offset:self.offsetTime mixStart:startPosition];
  280. // 加载缓冲区
  281. [self.nodePlayer scheduleBuffer:self.mixBuffer atTime:nil options:AVAudioPlayerNodeBufferInterruptsAtLoop completionHandler:^{
  282. }];
  283. }
  284. - (float)mixChannelData:(float)bgData bgVolume:(float)bgVolume recordData:(float)recordData recordVolume:(float)recordVolume {
  285. return (bgData * bgVolume + recordData * recordVolume) / 2;
  286. }
  287. - (void)changeVolume:(float)bgVolume recordVolume:(float)recordVolume {
  288. // NSLog(@"bg volume ---- %f, record volume ---- %f", bgVolume, recordVolume);
  289. self.bgVolume = bgVolume;
  290. self.recordVolume = recordVolume;
  291. }
  292. // 打断处理
  293. - (void)handleInterruption:(NSNotification *)notification {
  294. NSDictionary *info = notification.userInfo;
  295. AVAudioSessionInterruptionType type = [info[AVAudioSessionInterruptionTypeKey] unsignedIntegerValue];
  296. if (type == AVAudioSessionInterruptionTypeBegan) {
  297. NSLog(@"---- 播放打断");
  298. //Handle InterruptionBegan
  299. // 停止播放
  300. if (self.nodePlayer.isPlaying) {
  301. [self.nodePlayer stop];
  302. [self sendInterruptError:nil];
  303. }
  304. }
  305. else if (type == AVAudioSessionInterruptionTypeEnded) {
  306. AVAudioSessionInterruptionOptions options = [info[AVAudioSessionInterruptionOptionKey] unsignedIntegerValue];
  307. if (options == AVAudioSessionInterruptionOptionShouldResume) {
  308. //Handle Resume
  309. [self resumeAudioSession];
  310. NSLog(@"---- 播放恢复");
  311. }
  312. }
  313. }
  314. - (void)resumeAudioSession {
  315. NSError *error = nil;
  316. [[AVAudioSession sharedInstance] setActive:YES error:&error];
  317. if (error) {
  318. NSLog(@"------ error desc %@", error.description);
  319. }
  320. }
  321. - (void)handleRouteChange:(NSNotification *)notification {
  322. NSDictionary *info = notification.userInfo;
  323. AVAudioSessionRouteChangeReason reason = [info[AVAudioSessionRouteChangeReasonKey] unsignedIntegerValue];
  324. if (reason == AVAudioSessionRouteChangeReasonOldDeviceUnavailable) {
  325. // 耳机拔出时暂停音频
  326. if (self.nodePlayer.isPlaying) {
  327. NSError *error = nil;
  328. [self sendInterruptError:error];
  329. }
  330. } else if (reason == AVAudioSessionRouteChangeReasonNewDeviceAvailable) {
  331. // 耳机插入时恢复音频
  332. if (self.nodePlayer.isPlaying) {
  333. NSError *error = nil;
  334. [self sendInterruptError:error];
  335. }
  336. }
  337. }
  338. - (void)sendInterruptError:(NSError *)error {
  339. if (self.delegate && [self.delegate respondsToSelector:@selector(enginePlayerDidError:error:)]) {
  340. [self.delegate enginePlayerDidError:self error:error];
  341. }
  342. }
  343. #pragma mark ------ play action
  344. - (void)changeRecordDelay:(NSInteger)delayMs {
  345. self.offsetTime = delayMs;
  346. }
  347. - (void)seekToTimePlay:(NSInteger)time {
  348. if (self.audioEngine.isRunning == NO) {
  349. [self startEngine];
  350. }
  351. if (self.audioEngine.isRunning) {
  352. [self seekAudioWithStartTime:time needPlay:YES];
  353. }
  354. else {
  355. [self sendInterruptError:nil];
  356. }
  357. }
  358. - (void)stopPlay {
  359. dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
  360. if (self.nodePlayer.isPlaying) {
  361. [self.nodePlayer stop];
  362. }
  363. [self stopTimer];
  364. if (self.audioEngine.isRunning) {
  365. [self.audioEngine stop];
  366. }
  367. });
  368. }
  369. - (void)seekToTime:(NSInteger)time {
  370. if (self.audioEngine.isRunning == NO) {
  371. [self startEngine];
  372. }
  373. if (self.audioEngine.isRunning) {
  374. [self seekAudioWithStartTime:time needPlay:NO];
  375. }
  376. }
  377. - (void)seekAudioWithStartTime:(NSTimeInterval)startTime needPlay:(BOOL)needPlay {
  378. if (self.audioEngine.isRunning == NO) {
  379. [self startEngine];
  380. }
  381. if (self.audioEngine.isRunning) {
  382. if (self.nodePlayer.isPlaying) {
  383. [self.nodePlayer stop];
  384. }
  385. }
  386. AVAudioFramePosition startFrame = startTime / 1000.0 * self.audioFormat.sampleRate;
  387. // 跳转进度
  388. self.currentFrame = startFrame;
  389. [self scheduleBufferFromPosition:startFrame];
  390. if (needPlay) {
  391. [self.nodePlayer play];
  392. [self startTimer];
  393. }
  394. }
  395. // 调整偏移
  396. - (void)seekOffsetTime:(NSInteger)offsetTime {
  397. self.offsetTime = offsetTime;
  398. NSTimeInterval currentTime = [self getCurrentPlayTime];
  399. [self seekToTimePlay:currentTime];
  400. }
  401. - (void)freePlayer {
  402. self.isCanceled = YES;
  403. [self stopPlay];
  404. }
  405. - (void)startTimer {
  406. self.timeCount = 0;
  407. [self.timer setFireDate:[NSDate distantPast]];
  408. }
  409. - (void)stopTimer {
  410. [self.timer setFireDate:[NSDate distantFuture]];//暂停计时器
  411. }
  412. #pragma mark ---- lazying
  413. - (dispatch_queue_t)sourceQueue {
  414. if (!_sourceQueue) {
  415. _sourceQueue = dispatch_queue_create("ks_MutilSourceQueue", DISPATCH_QUEUE_SERIAL);
  416. }
  417. return _sourceQueue;
  418. }
  419. - (NSTimer *)timer {
  420. if (!_timer) {
  421. __weak typeof(self)weakSelf = self;
  422. _timer = [NSTimer scheduledTimerWithTimeInterval:0.01 repeats:YES block:^(NSTimer * _Nonnull timer) {
  423. [weakSelf timeFunction];
  424. }];
  425. [[NSRunLoop currentRunLoop] addTimer:_timer forMode:NSRunLoopCommonModes];
  426. [_timer setFireDate:[NSDate distantFuture]];
  427. }
  428. return _timer;
  429. }
  430. - (void)timeFunction {
  431. if (self.isCanceled) {
  432. return;
  433. }
  434. self.totalDuration = [self getTotalTime];
  435. NSTimeInterval currentTime = [self getCurrentPlayTime];
  436. float progress = currentTime/self.totalDuration;
  437. NSDate *date = [NSDate date];
  438. NSTimeInterval inteveral = [date timeIntervalSince1970];
  439. if (currentTime >= self.totalDuration) {
  440. if (self.delegate && [self.delegate respondsToSelector:@selector(enginePlayFinished:)]) {
  441. [self.delegate enginePlayFinished:self];
  442. }
  443. }
  444. else {
  445. // 定时器10ms出触发一次 buffer每100ms执行一次
  446. if (self.timeCount % 10 == 0) {
  447. [self scheduleMixBuffer];
  448. }
  449. self.timeCount++;
  450. if (self.delegate && [self.delegate respondsToSelector:@selector(updatePlayProgress:andTotalTime:andProgress:currentInterval:inPlayer:)]) {
  451. [self.delegate updatePlayProgress:currentTime andTotalTime:self.totalDuration andProgress:progress currentInterval:inteveral*1000 inPlayer:self];
  452. }
  453. }
  454. }
  455. - (void)scheduleMixBuffer {
  456. if (self.nodePlayer.isPlaying) {
  457. [self prepareBuffer:self.currentFrame offset:self.offsetTime mixStart:self.startPosition];
  458. }
  459. }
  460. - (NSTimeInterval)getCurrentPlayTime {
  461. AVAudioTime *nodeTime = [self.nodePlayer lastRenderTime];
  462. if (nodeTime && self.bgAudioFile) {
  463. AVAudioTime *playerTime = [self.nodePlayer playerTimeForNodeTime:nodeTime];
  464. AVAudioFramePosition currentFrame = self.currentFrame;
  465. if (playerTime) {
  466. self.sampleRate = [playerTime sampleRate];
  467. AVAudioFramePosition currentFrame = [playerTime sampleTime];
  468. if (currentFrame <= 0) {
  469. currentFrame = 0;
  470. }
  471. currentFrame += self.startPosition;
  472. self.currentFrame = currentFrame;
  473. }
  474. else {
  475. NSLog(@"播放已停止");
  476. }
  477. double elapsedSamples = (double)currentFrame;
  478. NSTimeInterval currentTime = elapsedSamples / self.sampleRate;
  479. // NSLog(@"当前时间----- %f",currentTime*1000);
  480. return currentTime*1000;
  481. }
  482. else {
  483. return 0;
  484. }
  485. }
  486. - (NSTimeInterval)getTotalTime {
  487. NSTimeInterval recordTotalDuration = (AVAudioFramePosition)self.audioFile.length * 1000.0 / self.audioFormat.sampleRate;
  488. return recordTotalDuration;
  489. }
  490. - (BOOL)isPlaying {
  491. if (self.nodePlayer) {
  492. return self.nodePlayer.isPlaying;
  493. }
  494. return NO;
  495. }
  496. - (void)dealloc {
  497. NSLog(@"---- KSMergeEnginePlayer dealloc");
  498. if (_audioEngine) {
  499. [_audioEngine disconnectNodeInput:self.nodePlayer];
  500. [_audioEngine detachNode:self.nodePlayer];
  501. }
  502. // 停止并清理定时器
  503. if (_timer) {
  504. [_timer invalidate];
  505. _timer = nil;
  506. }
  507. [[NSNotificationCenter defaultCenter] removeObserver:self];
  508. }
  509. @end