Browse Source

1.权限申请相关功能优化。
2.作品合成播放器相关优化。

Steven 7 months ago
parent
commit
9b2a0f02f2
36 changed files with 1189 additions and 553 deletions
  1. 29 9
      KulexiuForTeacher/KulexiuForTeacher.xcodeproj/project.pbxproj
  2. 39 16
      KulexiuForTeacher/KulexiuForTeacher/Common/Base/AccompanyWebView/AudioEnginePlayer.m
  3. 93 64
      KulexiuForTeacher/KulexiuForTeacher/Common/Base/AccompanyWebView/KSAccompanyWebViewController.m
  4. 41 21
      KulexiuForTeacher/KulexiuForTeacher/Common/Base/AccompanyWebView/KSCloudWebManager.m
  5. 62 46
      KulexiuForTeacher/KulexiuForTeacher/Common/Base/WebView/KSBaseWKWebViewController.m
  6. 18 26
      KulexiuForTeacher/KulexiuForTeacher/Common/MediaMerge/AudioMerge/KSMediaMergeView.m
  7. 69 52
      KulexiuForTeacher/KulexiuForTeacher/Common/MediaMerge/AudioMerge/KSMergeAudioControlView.m
  8. 8 10
      KulexiuForTeacher/KulexiuForTeacher/Common/MediaMerge/AudioMerge/KSMergeAudioControlView.xib
  9. 8 0
      KulexiuForTeacher/KulexiuForTeacher/Common/MediaMerge/AudioMerge/KSPlayerSliderView.m
  10. 27 0
      KulexiuForTeacher/KulexiuForTeacher/Common/MediaMerge/AudioMerge/MergeAudioAnimation/KSRealtimeAnalyzer.h
  11. 256 0
      KulexiuForTeacher/KulexiuForTeacher/Common/MediaMerge/AudioMerge/MergeAudioAnimation/KSRealtimeAnalyzer.m
  12. 23 0
      KulexiuForTeacher/KulexiuForTeacher/Common/MediaMerge/AudioMerge/MergeAudioAnimation/KSSpectrumView.h
  13. 109 0
      KulexiuForTeacher/KulexiuForTeacher/Common/MediaMerge/AudioMerge/MergeAudioAnimation/KSSpectrumView.m
  14. 7 1
      KulexiuForTeacher/KulexiuForTeacher/Common/MediaMerge/AudioMerge/MergePlayer/KSMergeEnginePlayer.h
  15. 218 201
      KulexiuForTeacher/KulexiuForTeacher/Common/MediaMerge/AudioMerge/MergePlayer/KSMergeEnginePlayer.m
  16. 8 2
      KulexiuForTeacher/KulexiuForTeacher/Common/MediaMerge/AudioMerge/VideoPlayerView/KSVideoPlayerView.m
  17. 33 17
      KulexiuForTeacher/KulexiuForTeacher/Module/Chat/Controller/TXCustom/KSTXBaseChatViewController.m
  18. 2 2
      KulexiuForTeacher/KulexiuForTeacher/Module/Chat/Group/View/ChatComplainBodyView.m
  19. 29 16
      KulexiuForTeacher/KulexiuForTeacher/Module/Course/AccompanyCourse/Controller/AccompanyDetailViewController.m
  20. 2 1
      KulexiuForTeacher/KulexiuForTeacher/Module/Course/AccompanyCourse/View/AccompanyAlertView.m
  21. 28 16
      KulexiuForTeacher/KulexiuForTeacher/Module/Course/MusicRoom/Controller/MusicRoomViewController.m
  22. 7 8
      KulexiuForTeacher/KulexiuForTeacher/Module/Home/Controller/HomeViewController.m
  23. 14 17
      KulexiuForTeacher/KulexiuForTeacher/Module/Home/Music/Controller/MyMusicViewController.m
  24. 6 8
      KulexiuForTeacher/KulexiuForTeacher/Module/Home/ScanView/Controller/KSScanViewController.m
  25. 7 8
      KulexiuForTeacher/KulexiuForTeacher/Module/Mine/Controller/MineViewController.m
  26. 2 2
      KulexiuForTeacher/KulexiuForTeacher/Module/Mine/Works/View/MusicPublicContentView.m
  27. 0 3
      KulexiuForTeacher/KulexiuForTeacher/Module/TXClassRoom/View/CloseCourse/KSCloseCourseView.m
  28. BIN
      KulexiuForTeacher/KulexiuForTeacher/ToolKit/CloudAccompanyLibrary.framework/CloudAccompanyLibrary
  29. 34 0
      KulexiuForTeacher/KulexiuForTeacher/ToolKit/CloudAccompanyLibrary.framework/Headers/KSCloudWebViewController.h
  30. BIN
      KulexiuForTeacher/KulexiuForTeacher/ToolKit/CloudAccompanyLibrary.framework/_CodeSignature/CodeDirectory
  31. BIN
      KulexiuForTeacher/KulexiuForTeacher/ToolKit/CloudAccompanyLibrary.framework/_CodeSignature/CodeRequirements-1
  32. 3 3
      KulexiuForTeacher/KulexiuForTeacher/ToolKit/CloudAccompanyLibrary.framework/_CodeSignature/CodeResources
  33. BIN
      KulexiuForTeacher/KulexiuForTeacher/ToolKit/CloudAccompanyLibrary.framework/_CodeSignature/CodeSignature
  34. 0 1
      KulexiuForTeacher/KulexiuForTeacher/ToolKit/KSToolLibrary.framework/Headers/KSAQRecordManager.h
  35. 7 3
      KulexiuForTeacher/KulexiuForTeacher/ToolKit/KSToolLibrary.framework/Headers/RecordCheckManager.h
  36. BIN
      KulexiuForTeacher/KulexiuForTeacher/ToolKit/KSToolLibrary.framework/KSToolLibrary

+ 29 - 9
KulexiuForTeacher/KulexiuForTeacher.xcodeproj/project.pbxproj

@@ -744,6 +744,8 @@
 		BC84183C2AC2D83700D8F90E /* ForgetPasswordViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = BC8418392AC2D83700D8F90E /* ForgetPasswordViewController.m */; };
 		BC8418452AC2D9FB00D8F90E /* PasswordCheckBodyView.xib in Resources */ = {isa = PBXBuildFile; fileRef = BC8418422AC2D9FB00D8F90E /* PasswordCheckBodyView.xib */; };
 		BC8418462AC2D9FB00D8F90E /* PasswordCheckBodyView.m in Sources */ = {isa = PBXBuildFile; fileRef = BC8418442AC2D9FB00D8F90E /* PasswordCheckBodyView.m */; };
+		BC85A9D32C6B4D4A003C1ABE /* KSRealtimeAnalyzer.m in Sources */ = {isa = PBXBuildFile; fileRef = BC85A9CF2C6B4D4A003C1ABE /* KSRealtimeAnalyzer.m */; };
+		BC85A9D42C6B4D4A003C1ABE /* KSSpectrumView.m in Sources */ = {isa = PBXBuildFile; fileRef = BC85A9D12C6B4D4A003C1ABE /* KSSpectrumView.m */; };
 		BC86CB172AC2E72000450EED /* KSNewConfirmAlertView.m in Sources */ = {isa = PBXBuildFile; fileRef = BC86CB162AC2E72000450EED /* KSNewConfirmAlertView.m */; };
 		BC86CB192AC2E72500450EED /* KSNewConfirmAlertView.xib in Resources */ = {isa = PBXBuildFile; fileRef = BC86CB182AC2E72500450EED /* KSNewConfirmAlertView.xib */; };
 		BC8831002873D26000C702A0 /* LiveVideoModel.m in Sources */ = {isa = PBXBuildFile; fileRef = BC8830FE2873D25F00C702A0 /* LiveVideoModel.m */; };
@@ -870,7 +872,6 @@
 		BCB399B327F94B5A00AFF376 /* LTSCalendarBottomView.xib in Resources */ = {isa = PBXBuildFile; fileRef = BCB399B127F94B5A00AFF376 /* LTSCalendarBottomView.xib */; };
 		BCB399BC27F9831D00AFF376 /* CourseForLiveCell.m in Sources */ = {isa = PBXBuildFile; fileRef = BCB399BA27F9831D00AFF376 /* CourseForLiveCell.m */; };
 		BCB399BD27F9831D00AFF376 /* CourseForLiveCell.xib in Resources */ = {isa = PBXBuildFile; fileRef = BCB399BB27F9831D00AFF376 /* CourseForLiveCell.xib */; };
-		BCB3D89A2C4E68130091B1EB /* KSToolLibrary.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = BCB3D8992C4E68130091B1EB /* KSToolLibrary.framework */; };
 		BCB633F627F6A18200ACFDCF /* LocalRenderManager.m in Sources */ = {isa = PBXBuildFile; fileRef = BCB633DC27F6A18100ACFDCF /* LocalRenderManager.m */; };
 		BCB633F727F6A18200ACFDCF /* ClassroomTimerManager.m in Sources */ = {isa = PBXBuildFile; fileRef = BCB633DD27F6A18100ACFDCF /* ClassroomTimerManager.m */; };
 		BCB633F827F6A18200ACFDCF /* KSTipsView.m in Sources */ = {isa = PBXBuildFile; fileRef = BCB633E027F6A18100ACFDCF /* KSTipsView.m */; };
@@ -883,6 +884,8 @@
 		BCB908FD2850C9CB00F5FF69 /* MusicChooseSearchView.xib in Resources */ = {isa = PBXBuildFile; fileRef = BCB908FC2850C9CB00F5FF69 /* MusicChooseSearchView.xib */; };
 		BCB909042851E25D00F5FF69 /* KSShareChooseViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = BCB909032851E25D00F5FF69 /* KSShareChooseViewController.m */; };
 		BCB909072851E32C00F5FF69 /* ShareChooseMainView.m in Sources */ = {isa = PBXBuildFile; fileRef = BCB909062851E32C00F5FF69 /* ShareChooseMainView.m */; };
+		BCB950A42C6AF08400C62508 /* KSToolLibrary.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = BCB950A32C6AF08400C62508 /* KSToolLibrary.framework */; };
+		BCB950A62C6AF0A800C62508 /* CloudAccompanyLibrary.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = BCB950A52C6AF0A800C62508 /* CloudAccompanyLibrary.framework */; };
 		BCB9FA1C286D537E005D766B /* KSScanViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = BCB9FA1B286D537E005D766B /* KSScanViewController.m */; };
 		BCB9FA1F286D539A005D766B /* ScanNavView.m in Sources */ = {isa = PBXBuildFile; fileRef = BCB9FA1E286D539A005D766B /* ScanNavView.m */; };
 		BCB9FA21286D53A1005D766B /* ScanNavView.xib in Resources */ = {isa = PBXBuildFile; fileRef = BCB9FA20286D53A1005D766B /* ScanNavView.xib */; };
@@ -1091,7 +1094,6 @@
 		BCF61BEC28042D1A0000ACFE /* InstrumentMessageModel.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF61BEB28042D1A0000ACFE /* InstrumentMessageModel.m */; };
 		BCF61BEF28042F9B0000ACFE /* InstrumentChooseBottonView.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF61BEE28042F9B0000ACFE /* InstrumentChooseBottonView.m */; };
 		BCF61BF128042FA90000ACFE /* InstrumentChooseBottonView.xib in Resources */ = {isa = PBXBuildFile; fileRef = BCF61BF028042FA90000ACFE /* InstrumentChooseBottonView.xib */; };
-		BCFC09CC2C47FB5C009A727F /* CloudAccompanyLibrary.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = BCFC09CB2C47FB5C009A727F /* CloudAccompanyLibrary.framework */; };
 		BCFC09DD2C48E4A0009A727F /* UserVip.m in Sources */ = {isa = PBXBuildFile; fileRef = BCFC09DC2C48E4A0009A727F /* UserVip.m */; };
 		BCFE540928168DFF00AD6786 /* KSButtonStatusView.m in Sources */ = {isa = PBXBuildFile; fileRef = BCFE540828168DFF00AD6786 /* KSButtonStatusView.m */; };
 		BCFE541028178FF600AD6786 /* MyIncomeViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = BCFE540F28178FF600AD6786 /* MyIncomeViewController.m */; };
@@ -2356,6 +2358,10 @@
 		BC8418422AC2D9FB00D8F90E /* PasswordCheckBodyView.xib */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = file.xib; path = PasswordCheckBodyView.xib; sourceTree = "<group>"; };
 		BC8418432AC2D9FB00D8F90E /* PasswordCheckBodyView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = PasswordCheckBodyView.h; sourceTree = "<group>"; };
 		BC8418442AC2D9FB00D8F90E /* PasswordCheckBodyView.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = PasswordCheckBodyView.m; sourceTree = "<group>"; };
+		BC85A9CE2C6B4D4A003C1ABE /* KSRealtimeAnalyzer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = KSRealtimeAnalyzer.h; sourceTree = "<group>"; };
+		BC85A9CF2C6B4D4A003C1ABE /* KSRealtimeAnalyzer.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = KSRealtimeAnalyzer.m; sourceTree = "<group>"; };
+		BC85A9D02C6B4D4A003C1ABE /* KSSpectrumView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = KSSpectrumView.h; sourceTree = "<group>"; };
+		BC85A9D12C6B4D4A003C1ABE /* KSSpectrumView.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = KSSpectrumView.m; sourceTree = "<group>"; };
 		BC86CB152AC2E72000450EED /* KSNewConfirmAlertView.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = KSNewConfirmAlertView.h; sourceTree = "<group>"; };
 		BC86CB162AC2E72000450EED /* KSNewConfirmAlertView.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = KSNewConfirmAlertView.m; sourceTree = "<group>"; };
 		BC86CB182AC2E72500450EED /* KSNewConfirmAlertView.xib */ = {isa = PBXFileReference; lastKnownFileType = file.xib; path = KSNewConfirmAlertView.xib; sourceTree = "<group>"; };
@@ -2553,7 +2559,6 @@
 		BCB399B927F9831D00AFF376 /* CourseForLiveCell.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = CourseForLiveCell.h; sourceTree = "<group>"; };
 		BCB399BA27F9831D00AFF376 /* CourseForLiveCell.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = CourseForLiveCell.m; sourceTree = "<group>"; };
 		BCB399BB27F9831D00AFF376 /* CourseForLiveCell.xib */ = {isa = PBXFileReference; lastKnownFileType = file.xib; path = CourseForLiveCell.xib; sourceTree = "<group>"; };
-		BCB3D8992C4E68130091B1EB /* KSToolLibrary.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; path = KSToolLibrary.framework; sourceTree = "<group>"; };
 		BCB633DA27F6A18100ACFDCF /* LocalRenderManager.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = LocalRenderManager.h; sourceTree = "<group>"; };
 		BCB633DB27F6A18100ACFDCF /* ClassroomTimerManager.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = ClassroomTimerManager.h; sourceTree = "<group>"; };
 		BCB633DC27F6A18100ACFDCF /* LocalRenderManager.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = LocalRenderManager.m; sourceTree = "<group>"; };
@@ -2576,6 +2581,8 @@
 		BCB909032851E25D00F5FF69 /* KSShareChooseViewController.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = KSShareChooseViewController.m; sourceTree = "<group>"; };
 		BCB909052851E32C00F5FF69 /* ShareChooseMainView.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = ShareChooseMainView.h; sourceTree = "<group>"; };
 		BCB909062851E32C00F5FF69 /* ShareChooseMainView.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = ShareChooseMainView.m; sourceTree = "<group>"; };
+		BCB950A32C6AF08400C62508 /* KSToolLibrary.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; path = KSToolLibrary.framework; sourceTree = "<group>"; };
+		BCB950A52C6AF0A800C62508 /* CloudAccompanyLibrary.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; path = CloudAccompanyLibrary.framework; sourceTree = "<group>"; };
 		BCB9FA1A286D537E005D766B /* KSScanViewController.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = KSScanViewController.h; sourceTree = "<group>"; };
 		BCB9FA1B286D537E005D766B /* KSScanViewController.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = KSScanViewController.m; sourceTree = "<group>"; };
 		BCB9FA1D286D539A005D766B /* ScanNavView.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = ScanNavView.h; sourceTree = "<group>"; };
@@ -2913,7 +2920,6 @@
 		BCF880EC2B91C7200007B8F0 /* Config-test.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; path = "Config-test.xcconfig"; sourceTree = "<group>"; };
 		BCF880EE2B91C7310007B8F0 /* Config-debug.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; path = "Config-debug.xcconfig"; sourceTree = "<group>"; };
 		BCF880F12B91C7580007B8F0 /* Config-release.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; path = "Config-release.xcconfig"; sourceTree = "<group>"; };
-		BCFC09CB2C47FB5C009A727F /* CloudAccompanyLibrary.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; path = CloudAccompanyLibrary.framework; sourceTree = "<group>"; };
 		BCFC09DB2C48E49F009A727F /* UserVip.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = UserVip.h; sourceTree = "<group>"; };
 		BCFC09DC2C48E4A0009A727F /* UserVip.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = UserVip.m; sourceTree = "<group>"; };
 		BCFE540728168DFF00AD6786 /* KSButtonStatusView.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = KSButtonStatusView.h; sourceTree = "<group>"; };
@@ -2940,15 +2946,15 @@
 				2779334727E3148E0010E277 /* GLKit.framework in Frameworks */,
 				2779334527E314870010E277 /* VideoToolbox.framework in Frameworks */,
 				2779334327E3147C0010E277 /* OpenGLES.framework in Frameworks */,
-				BCFC09CC2C47FB5C009A727F /* CloudAccompanyLibrary.framework in Frameworks */,
 				BC8B6E552856ED0600866917 /* UMCommon.framework in Frameworks */,
 				BC8B6E622856ED0600866917 /* libSocialQQ.a in Frameworks */,
 				BCFEE1902AD15BF5000E888F /* SoundpipeAudioKit in Frameworks */,
 				BC8B6E612856ED0600866917 /* libWeiboSDK.a in Frameworks */,
 				BC8B6E532856ED0600866917 /* UMAPM.framework in Frameworks */,
-				BCB3D89A2C4E68130091B1EB /* KSToolLibrary.framework in Frameworks */,
+				BCB950A42C6AF08400C62508 /* KSToolLibrary.framework in Frameworks */,
 				2779333F27E314640010E277 /* CoreVideo.framework in Frameworks */,
 				BC8B6E572856ED0600866917 /* UMDevice.framework in Frameworks */,
+				BCB950A62C6AF0A800C62508 /* CloudAccompanyLibrary.framework in Frameworks */,
 				BC3A55662BAA799B002E1616 /* KSTunerLibrary.framework in Frameworks */,
 				2779334127E3146B0010E277 /* CoreMedia.framework in Frameworks */,
 				2779333D27E314550010E277 /* AVFoundation.framework in Frameworks */,
@@ -5031,6 +5037,7 @@
 		BC38C4002AF900E100ABFCC2 /* AudioMerge */ = {
 			isa = PBXGroup;
 			children = (
+				BC85A9D22C6B4D4A003C1ABE /* MergeAudioAnimation */,
 				BCED0AE52C463E8D00369AED /* MergePlayer */,
 				BC38C4102AF900E100ABFCC2 /* AudioPlayAnimationView */,
 				BC38C4062AF900E100ABFCC2 /* KSAudioAnimationView.h */,
@@ -5235,11 +5242,11 @@
 		BC3BF61A2B9EAE7A00831494 /* ToolKit */ = {
 			isa = PBXGroup;
 			children = (
+				BC3A55612BAA798A002E1616 /* KSTunerLibrary.framework */,
+				BCB950A32C6AF08400C62508 /* KSToolLibrary.framework */,
+				BCB950A52C6AF0A800C62508 /* CloudAccompanyLibrary.framework */,
 				BC00A65B2BB58F1700231B74 /* LLPhotoBrowse.bundle */,
 				BC00A6592BB58F0000231B74 /* WMPlayer.bundle */,
-				BCB3D8992C4E68130091B1EB /* KSToolLibrary.framework */,
-				BC3A55612BAA798A002E1616 /* KSTunerLibrary.framework */,
-				BCFC09CB2C47FB5C009A727F /* CloudAccompanyLibrary.framework */,
 				BC24570E286C437D00D1F7C0 /* SoundFontFile */,
 				BC3BF6242B9EAF1700831494 /* client.p12 */,
 			);
@@ -6154,6 +6161,17 @@
 			path = PasswordModify;
 			sourceTree = "<group>";
 		};
+		BC85A9D22C6B4D4A003C1ABE /* MergeAudioAnimation */ = {
+			isa = PBXGroup;
+			children = (
+				BC85A9CE2C6B4D4A003C1ABE /* KSRealtimeAnalyzer.h */,
+				BC85A9CF2C6B4D4A003C1ABE /* KSRealtimeAnalyzer.m */,
+				BC85A9D02C6B4D4A003C1ABE /* KSSpectrumView.h */,
+				BC85A9D12C6B4D4A003C1ABE /* KSSpectrumView.m */,
+			);
+			path = MergeAudioAnimation;
+			sourceTree = "<group>";
+		};
 		BC8B641828F3E8D800A08D16 /* AwardAlert */ = {
 			isa = PBXGroup;
 			children = (
@@ -7821,6 +7839,7 @@
 				2779326327E30FD80010E277 /* FSCalendarCollectionView.m in Sources */,
 				BCA1135728A2439D007FAFB9 /* HomeBannerCell.m in Sources */,
 				BC2888522A80DB990064B773 /* AppDelegate+AppService.m in Sources */,
+				BC85A9D32C6B4D4A003C1ABE /* KSRealtimeAnalyzer.m in Sources */,
 				BC106B7A2A8F4586000759A9 /* TXLiveMessageLikeCount.m in Sources */,
 				BCA9CE3927FD93EB00D558C6 /* AccompanyStudentEvaCell.m in Sources */,
 				2723B68C27F1685600E0B90B /* HomeNavView.m in Sources */,
@@ -8020,6 +8039,7 @@
 				BCD457BB2865652C0010B493 /* LiveAnimationView.m in Sources */,
 				2779361527E32C0A0010E277 /* ChatViewController.m in Sources */,
 				BC7CFFD5281801A800CAEB21 /* CardBandBodyView.m in Sources */,
+				BC85A9D42C6B4D4A003C1ABE /* KSSpectrumView.m in Sources */,
 				BCDE3591289A7E4900A9A560 /* KSGroupTagImageView.m in Sources */,
 				27D83F5127F4036E00062476 /* KSNormalAlertView.m in Sources */,
 				BCED5CB4285083AC009A42DE /* ShareMusicViewController.m in Sources */,

+ 39 - 16
KulexiuForTeacher/KulexiuForTeacher/Common/Base/AccompanyWebView/AudioEnginePlayer.m

@@ -22,10 +22,14 @@
 
 @property (nonatomic, strong) AVAudioFormat *audioFormat;
 
+@property (nonatomic, strong) dispatch_queue_t sourceQueue;
+
 @property (nonatomic, assign) NSTimeInterval totalDuration;
 
 @property (nonatomic, assign) AVAudioFramePosition startPosition; // 开始位置
 
+@property (nonatomic, assign) BOOL isPlayEnd;  // 是否播放完成
+
 @property (nonatomic, assign) BOOL isInterrupt; // 是否被打断
 
 @end
@@ -148,24 +152,26 @@
 }
 
 - (void)prepareNativeSongWithUrl:(NSURL *)nativeMusicUrl {
-    [self loadAudioFile:nativeMusicUrl];
-    
-    if (self.audioFile && self.audioFormat) {
-        [self configEngine];
-        // connect node
-        [self.audioEngine connect:self.bgPlayer to:self.timePitchUnit format:self.audioFormat];
-        [self.audioEngine connect:self.timePitchUnit to:self.audioEngine.mainMixerNode format:self.audioFormat];
-        
-        [self.audioEngine prepare];
-        
-        [self startEngine];
-        if (self.audioEngine && self.audioEngine.isRunning) {
-            if (self.delegate && [self.delegate respondsToSelector:@selector(enginePlayerIsReadyPlay:)]) {
-                [self.delegate enginePlayerIsReadyPlay:self];
+    dispatch_async(self.sourceQueue, ^{
+        [self loadAudioFile:nativeMusicUrl];
+        if (self.audioFile && self.audioFormat) {
+            [self configEngine];
+            // connect node
+            [self.audioEngine connect:self.bgPlayer to:self.timePitchUnit format:self.audioFormat];
+            [self.audioEngine connect:self.timePitchUnit to:self.audioEngine.mainMixerNode format:self.audioFormat];
+            
+            [self.audioEngine prepare];
+            
+            [self startEngine];
+            if (self.audioEngine && self.audioEngine.isRunning) {
+                dispatch_main_async_safe(^{
+                    if (self.delegate && [self.delegate respondsToSelector:@selector(enginePlayerIsReadyPlay:)]) {
+                        [self.delegate enginePlayerIsReadyPlay:self];
+                    }
+                });
             }
         }
-    }
-        
+    });
 }
 
 - (void)startEngine {
@@ -259,6 +265,8 @@
 
 
 - (void)startTimer {
+    NSLog(@"----- start timer ");
+    self.isPlayEnd = NO;
     [self.timer setFireDate:[NSDate distantPast]];
 }
 
@@ -290,6 +298,15 @@
     if (self.delegate && [self.delegate respondsToSelector:@selector(updatePlayProgress:andTotalTime:andProgress:currentInterval:inPlayer:)]) {
         [self.delegate updatePlayProgress:currentTime andTotalTime:self.totalDuration andProgress:progress currentInterval:inteveral*1000 inPlayer:self];
     }
+//    NSLog(@"------- current progress %f", progress);
+    if (progress >= 1 && self.isPlayEnd == NO) {
+        self.isPlayEnd = YES;
+        NSLog(@"0---isPlayEnd--------");
+        // 播放完成
+        if (self.delegate && [self.delegate respondsToSelector:@selector(enginePlayFinished:)]) {
+            [self.delegate enginePlayFinished:self];
+        }
+    }
 }
 
 - (void)setRate:(float)rate {
@@ -350,4 +367,10 @@
     return NO;
 }
 
+- (dispatch_queue_t)sourceQueue {
+    if (!_sourceQueue) {
+        _sourceQueue = dispatch_queue_create("KSAudioEnginePlayer_queue", DISPATCH_QUEUE_SERIAL);
+    }
+    return _sourceQueue;
+}
 @end

+ 93 - 64
KulexiuForTeacher/KulexiuForTeacher/Common/Base/AccompanyWebView/KSAccompanyWebViewController.m

@@ -14,7 +14,6 @@
 #import <CloudAccompanyLibrary/KSVideoRecordManager.h>  // 视频录制
 #import <CloudAccompanyLibrary/KSCloudBeatView.h>     // 节拍器
 #import <KSToolLibrary/MidiPlayerEngine.h>    // midi 播放
-//#import <CloudAccompanyLibrary/kSNewPlayer.h>         // mp3 播放器
 
 #import "AccompanyLoadingView.h"
 
@@ -459,7 +458,6 @@
     if (isBack) { // 页面销毁才删除
         if (_AQManager) {
             [_AQManager freeAudioQueue];
-            _AQManager = nil;
         }
         // 如果退出评测页面 清除 playerEngine
         if (self.playerEngine) {
@@ -507,27 +505,27 @@
             else if ([[parm ks_stringValueForKey:@"api"] isEqualToString:@"startEvaluating"]) { // 开始评测
                 [self configRecordManager];
                 self.hasSendStartMessage = NO;
-                PREMISSIONTYPE isOk = [RecordCheckManager checkMicPermissionAvaiable];
-                if (isOk == PREMISSIONTYPE_YES) {
-                    self.evaluatParm = [NSMutableDictionary dictionaryWithDictionary:parm];
-                    // 如果socket 连上了
-                    if (self.socketManager.socketReadyState == SR_OPEN) {
-                        NSDictionary *content = [parm ks_dictionaryValueForKey:@"content"];
-                        NSString *sendData = [self configDataCommond:@"musicXml" body:content type:@"SOUND_COMPARE"];
-                        [self sendDataToSocketService:sendData];
-                        [self postMessage:parm];
-                        self.hasSendStartMessage = YES;
+                
+                [RecordCheckManager checkMicPermissionAvaiableCallback:^(PREMISSIONTYPE type) {
+                    if (type == PREMISSIONTYPE_YES) {
+                        self.evaluatParm = [NSMutableDictionary dictionaryWithDictionary:parm];
+                        // 如果socket 连上了
+                        if (self.socketManager.socketReadyState == SR_OPEN) {
+                            NSDictionary *content = [parm ks_dictionaryValueForKey:@"content"];
+                            NSString *sendData = [self configDataCommond:@"musicXml" body:content type:@"SOUND_COMPARE"];
+                            [self sendDataToSocketService:sendData];
+                            [self postMessage:parm];
+                            self.hasSendStartMessage = YES;
+                        }
+                        else {
+                            [self connectSocketService];
+                        }
                     }
                     else {
-                        [self connectSocketService];
+                        [self responseMessage:@"storageUnable" desc:@"没有麦克风访问权限" parm:parm];
+                        [self showAlertWithMessage:@"请开启麦克风访问权限" type:CHECKDEVICETYPE_MIC];
                     }
-                }
-                else {
-                    NSDictionary *postParm = @{@"api" : @"cancelEvaluating",
-                                               @"content" : @{@"reson":@"没有麦克风权限"}
-                    };
-                    [self postMessage:postParm];
-                }
+                }];
             }
             else if ([[parm ks_stringValueForKey:@"api"] isEqualToString:@"endEvaluating"]) {// 停止评测
                 dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(0.2 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
@@ -596,19 +594,10 @@
                 [self sendDataToSocketService:sendData];
             }
             else if ([[parm ks_stringValueForKey:@"api"] isEqualToString:@"openCamera"]) { // 开启摄像头
-                PREMISSIONTYPE canOpenCamera = [RecordCheckManager checkCameraPremissionAvaiable];
-                PREMISSIONTYPE albumEnable =  [RecordCheckManager checkPhotoLibraryPremissionAvaiable];
-
-                if (canOpenCamera == PREMISSIONTYPE_YES) {
-                    self.isCameraOpen = YES;
-                    [self.videoRecordManager setIgnoreAudio:YES];
-                    [self.videoRecordManager configSessiondisplayInView:self.viewContainer];
-                    [self postMessage:parm];
-                }
-                if (albumEnable == PREMISSIONTYPE_NO) {
-                    [self showAlertWithMessage:@"请开启相册访问权限" type:CHECKDEVICETYPE_CAMREA];
-                }
                 
+                [RecordCheckManager checkCameraPremissionAvaiableCallback:^(PREMISSIONTYPE type) {
+                    [self afterCheckCameraCheckAlbum:type parm:parm];
+                }];
             }
             else if ([[parm ks_stringValueForKey:@"api"] isEqualToString:@"closeCamera"]) { // 关闭摄像头
                 self.isCameraOpen = NO;
@@ -618,14 +607,18 @@
                 [self postMessage:parm];
             }
             else if ([[parm ks_stringValueForKey:@"api"] isEqualToString:@"startCapture"]) { // 开始录制
-                PREMISSIONTYPE isOk = [RecordCheckManager checkPhotoLibraryPremissionAvaiable];
-                if (isOk == PREMISSIONTYPE_YES) {
-                    
-                    [self.videoRecordManager setIgnoreAudio:YES];
-                    self.videoRecordManager.audioUrl = self.AQManager.audioUrl;
-                    [self.videoRecordManager startRecord];
-                    [self postMessage:parm];
-                }
+                [RecordCheckManager checkPhotoLibraryPremissionAvaiableCallback:^(PREMISSIONTYPE type) {
+                    if (type == PREMISSIONTYPE_YES) {
+                        [self.videoRecordManager setIgnoreAudio:YES];
+                        self.videoRecordManager.audioUrl = self.AQManager.audioUrl;
+                        [self.videoRecordManager startRecord];
+                        [self postMessage:parm];
+                    }
+                    else {
+//                        [self responseMessage:@"storageUnable" desc:@"没有相册存储权限" parm:parm];
+//                        [self showAlertWithMessage:@"开启相册存储" type:CHECKDEVICETYPE_CAMREA];
+                    }
+                }];
             }
             else if ([[parm ks_stringValueForKey:@"api"] isEqualToString:@"endCapture"]) { // 结束录制
                 if (self->_videoRecordManager) {
@@ -919,16 +912,25 @@
                 }
             }
             // 跟音
-            else if ([[parm ks_stringValueForKey:@"api"] isEqualToString:@"cloudToggleFollow"]) {
-                NSDictionary *content = [parm ks_dictionaryValueForKey:@"content"];
-                NSString *status = [content ks_stringValueForKey:@"state"];
-                if ([status isEqualToString:@"start"]) { // 开始
-                    [self configAudioSession];
-                    [self startTuner];
-                }
-                else if ([status isEqualToString:@"end"]) { // 结束
-                    [self stopTuner];
-                }
+            else if ([[parm ks_stringValueForKey:@"api"] isEqualToString:@"cloudToggleFollow"]) { // 跟音
+                [RecordCheckManager checkMicPermissionAvaiableCallback:^(PREMISSIONTYPE type) {
+                    if (type == PREMISSIONTYPE_YES) {
+                        NSDictionary *content = [parm ks_dictionaryValueForKey:@"content"];
+                        NSString *status = [content ks_stringValueForKey:@"state"];
+                        if ([status isEqualToString:@"start"]) { // 开始
+                            [self configAudioSession];
+                            [self startTuner];
+                        }
+                        else if ([status isEqualToString:@"end"]) { // 结束
+                            [self stopTuner];
+                        }
+                        [self postMessage:parm];
+                    }
+                    else {
+                        [self responseMessage:@"storageUnable" desc:@"没有麦克风权限" parm:parm];
+                        [self showAlertWithMessage:@"请开启麦克风访问权限" type:CHECKDEVICETYPE_MIC];
+                    }
+                }];
             }
             else if ([[parm ks_stringValueForKey:@"api"] isEqualToString:@"cloudAccompanyMessage"]) { // 获取伴奏 废弃⚠️
                 
@@ -1003,6 +1005,47 @@
     }
 }
 
+- (void)afterCheckCameraCheckAlbum:(PREMISSIONTYPE)cameraType parm:(NSDictionary *)sourceParm {
+    
+    [RecordCheckManager checkPhotoLibraryPremissionAvaiableCallback:^(PREMISSIONTYPE type) {
+        if (type == PREMISSIONTYPE_YES && cameraType == PREMISSIONTYPE_YES) {
+            self.isCameraOpen = YES;
+            [self.videoRecordManager setIgnoreAudio:YES];
+            [self.videoRecordManager configSessiondisplayInView:self.viewContainer];
+            [self postMessage:sourceParm];
+        }
+        else { //
+            
+            NSString *content = @"";
+            NSString *des = @"";;
+            if (cameraType == PREMISSIONTYPE_NO && type == PREMISSIONTYPE_NO) {
+                des = @"没有相机和相册访问权限";
+                content = @"请开启相机和相册访问权限";
+            }
+            else if (cameraType == PREMISSIONTYPE_NO && type == PREMISSIONTYPE_YES) {
+                des = @"没有相机访问权限";
+                content = @"请开启相机访问权限";
+            }
+            else if (cameraType == PREMISSIONTYPE_YES && type == PREMISSIONTYPE_NO) {
+                des = @"没有相册访问权限";
+                content = @"请开启相册访问权限";
+            }
+            [self responseMessage:@"storageUnable" desc:des parm:sourceParm];
+            [self showAlertWithMessage:content type:CHECKDEVICETYPE_CAMREA];
+
+        }
+    }];
+}
+
+- (void)responseMessage:(NSString *)reson desc:(NSString *)desc parm:(NSDictionary *)parm {
+    NSMutableDictionary *sendContent = [NSMutableDictionary dictionaryWithDictionary:[parm ks_dictionaryValueForKey:@"content"]];
+    [sendContent setValue:reson forKey:@"reson"];
+    [sendContent setValue:desc forKey:@"des"];
+    NSMutableDictionary *sendParm = [NSMutableDictionary dictionaryWithDictionary:parm];
+    [sendParm setValue:sendContent forKey:@"content"];
+    [self postMessage:sendParm];
+}
+
 - (void)musicPublishCallBack:(NSDictionary *)content {
     NSMutableDictionary *parm = [NSMutableDictionary dictionary];
     [parm setValue:@"hideComplexButton" forKey:@"api"];
@@ -1017,20 +1060,6 @@
     } failure:^(NSString * _Nonnull desc) {
         
     }];
-    
-//    [self.videoRecordManager uploadRecordVideoSuccess:^(NSString * _Nonnull videoUrl) {
-//        [contentParm setValue:@"success" forKey:@"type"];
-//        [contentParm setValue:videoUrl forKey:@"filePath"];
-//        [contentParm setValue:@"上传成功" forKey:@"message"];
-//        [sendParm setValue:contentParm forKey:@"content"];
-//        
-//        [weakSelf postMessage:sendParm];
-//    } failure:^(NSString * _Nonnull desc) {
-//        [contentParm setValue:@"error" forKey:@"type"];
-//        [contentParm setValue:desc forKey:@"message"];
-//        [sendParm setValue:contentParm forKey:@"content"];
-//        [weakSelf postMessage:sendParm];
-//    }];
 }
 - (void)showAlertWithMessage:(NSString *)message type:(CHECKDEVICETYPE)deviceType {
     [KSPremissionAlert shareInstanceDisplayImage:deviceType message:message showInView:self.view cancel:^{

+ 41 - 21
KulexiuForTeacher/KulexiuForTeacher/Common/Base/AccompanyWebView/KSCloudWebManager.m

@@ -53,31 +53,51 @@
 }
 
 - (void)showWebView:(NSDictionary *)parm fromController:(CustomNavViewController *)navCtrl {
-    PREMISSIONTYPE micEnable = [RecordCheckManager checkMicPermissionAvaiable];
-    PREMISSIONTYPE cameraEnable = [RecordCheckManager checkCameraPremissionAvaiable];
-    if (micEnable == PREMISSIONTYPE_YES && cameraEnable == PREMISSIONTYPE_YES) { //
-        KSCloudWebViewController *ctrl = [[KSCloudWebViewController alloc] init];
-        ctrl.webViewDelegate = self;
-        ctrl.url = [parm ks_stringValueForKey:@"url"];
-        ctrl.parmDic = parm;
-        NSInteger orientation = [parm ks_integerValueForKey:@"orientation"];
-        BOOL isLandScape = orientation == 0 ? YES : NO;
-        ctrl.ks_landScape = isLandScape;
-        [navCtrl pushViewController:ctrl animated:YES];
-    }
-    else {
-        if (micEnable == PREMISSIONTYPE_NO && cameraEnable == PREMISSIONTYPE_NO) { // 如果麦克风权限和摄像头权限都没有
-            [self showAlertWithMessage:@"请开启相机和麦克风访问权限" type:CHECKDEVICETYPE_BOTH];
-        }
-        else if (micEnable == PREMISSIONTYPE_NO) { // 如果没有麦克风权限
-            [self showAlertWithMessage:@"请开启麦克风访问权限" type:CHECKDEVICETYPE_MIC];
+    // 先检测相机 再检测麦克风
+    [RecordCheckManager checkCameraPremissionAvaiableCallback:^(PREMISSIONTYPE type) {
+        
+        [self afterCheckCameraCheckMic:type parm:parm fromController:navCtrl];
+    }];
+}
+
+- (void)afterCheckCameraCheckMic:(PREMISSIONTYPE)cameraType parm:(NSDictionary *)sourceParm fromController:(CustomNavViewController *)navCtrl {
+    
+    [RecordCheckManager checkMicPermissionAvaiableCallback:^(PREMISSIONTYPE type) {
+        if (type == PREMISSIONTYPE_YES && cameraType == PREMISSIONTYPE_YES) {
+            NSDictionary *valueDic = [sourceParm ks_dictionaryValueForKey:@"content"];
+            
+            KSCloudWebViewController *ctrl = [[KSCloudWebViewController alloc] init];
+            ctrl.webViewDelegate = self;
+            ctrl.url = [valueDic ks_stringValueForKey:@"url"];
+            ctrl.parmDic = valueDic;
+            NSInteger orientation = [valueDic ks_integerValueForKey:@"orientation"];
+            BOOL isLandScape = orientation == 0 ? YES : NO;
+            ctrl.ks_landScape = isLandScape;
+            [navCtrl pushViewController:ctrl animated:YES];
         }
-        else if (cameraEnable == PREMISSIONTYPE_NO) { // 如果没有摄像头权限
-            [self showAlertWithMessage:@"请开启相机访问权限" type:CHECKDEVICETYPE_CAMREA];
+        else { //
+            
+            NSString *content = @"";
+            CHECKDEVICETYPE checkType = CHECKDEVICETYPE_BOTH;
+            if (cameraType == PREMISSIONTYPE_NO && type == PREMISSIONTYPE_NO) {
+                content = @"请开启相机和麦克风访问权限";
+                checkType = CHECKDEVICETYPE_BOTH;
+            }
+            else if (cameraType == PREMISSIONTYPE_NO && type == PREMISSIONTYPE_YES) {
+                content = @"请开启麦克风访问权限";
+                checkType = CHECKDEVICETYPE_MIC;
+            }
+            else if (cameraType == PREMISSIONTYPE_YES && type == PREMISSIONTYPE_NO) {
+                content = @"请开启相机访问权限";
+                checkType = CHECKDEVICETYPE_CAMREA;
+            }
+            [self showAlertWithMessage:content type:checkType];
+
         }
-    }
+    }];
 }
 
+
 - (void)showAlertWithMessage:(NSString *)message type:(CHECKDEVICETYPE)deviceType {
     [KSPremissionAlert shareInstanceDisplayImage:deviceType message:message showInView:[NSObject getKeyWindow] cancel:^{
         

+ 62 - 46
KulexiuForTeacher/KulexiuForTeacher/Common/Base/WebView/KSBaseWKWebViewController.m

@@ -390,37 +390,19 @@ typedef NS_ENUM(NSInteger, CHOOSETYPE) {
     }
     
     else if ([[parm ks_stringValueForKey:@"api"] isEqualToString:@"openAccompanyWebView"]) { // 打开伴奏
-        PREMISSIONTYPE micEnable = [RecordCheckManager checkMicPermissionAvaiable];
-        PREMISSIONTYPE cameraEnable = [RecordCheckManager checkCameraPremissionAvaiable];
-        if (micEnable == PREMISSIONTYPE_YES && cameraEnable == PREMISSIONTYPE_YES) { // 如果麦克风和摄像头权限都有
-            NSDictionary *valueDic = [parm ks_dictionaryValueForKey:@"content"];
-            KSAccompanyWebViewController *detailCtrl = [[KSAccompanyWebViewController alloc] init];
-            detailCtrl.url = [valueDic ks_stringValueForKey:@"url"];
-            detailCtrl.parmDic = valueDic;
-            NSInteger orientation = [valueDic ks_integerValueForKey:@"orientation"];
-            BOOL isLandScape = orientation == 0 ? YES : NO;
-            detailCtrl.ks_landScape = isLandScape;
-            [self postMessage:parm];
-            [self.navigationController pushViewController:detailCtrl animated:YES];
-        }
-        else {
-            if (micEnable == PREMISSIONTYPE_NO && cameraEnable == PREMISSIONTYPE_NO) { // 如果麦克风权限和摄像头权限都没有
-                [self showAlertWithMessage:@"请开启相机和麦克风访问权限" type:CHECKDEVICETYPE_BOTH];
-            }
-            else if (micEnable == PREMISSIONTYPE_NO) { // 如果没有麦克风权限
-                [self showAlertWithMessage:@"请开启麦克风访问权限" type:CHECKDEVICETYPE_MIC];
-            }
-            else if (cameraEnable == PREMISSIONTYPE_NO) { // 如果没有摄像头权限
-                [self showAlertWithMessage:@"请开启相机访问权限" type:CHECKDEVICETYPE_CAMREA];
-            }
-        }
+        [RecordCheckManager checkCameraPremissionAvaiableCallback:^(PREMISSIONTYPE type) {
+            [self afterCheckCameraCheckMic:type parm:parm];
+        }];
     }
     else if ([[parm ks_stringValueForKey:@"api"] isEqualToString:@"checkAlbum"]) { // 判断权限
-        [self postMessage:parm];
-        PREMISSIONTYPE albumEnable = [RecordCheckManager checkPhotoLibraryPremissionAvaiable];
-        if (albumEnable == PREMISSIONTYPE_NO) {
-            [self showAlertWithMessage:@"请开启相册访问权限" type:CHECKDEVICETYPE_CAMREA];
-        }
+        [RecordCheckManager checkPhotoLibraryPremissionAvaiableCallback:^(PREMISSIONTYPE type) {
+            if (type == PREMISSIONTYPE_YES) {
+                [self postMessage:parm];
+            }
+            else {
+                [self showAlertWithMessage:@"请开启相册访问权限" type:CHECKDEVICETYPE_CAMREA];
+            }
+        }];
     }
     else if ([[parm ks_stringValueForKey:@"api"] isEqualToString:@"shareAchievements"]) { // 分享
         //
@@ -493,25 +475,24 @@ typedef NS_ENUM(NSInteger, CHOOSETYPE) {
     }
     else if ([[parm ks_stringValueForKey:@"api"] isEqualToString:@"savePicture"]) { // 保存图片到相册
         // 判断相册权限
-        PREMISSIONTYPE albumEnable = [RecordCheckManager checkPhotoLibraryPremissionAvaiable];
-        if (albumEnable == PREMISSIONTYPE_YES) { // 如果有权限
-            NSDictionary *valueDic = [parm ks_dictionaryValueForKey:@"content"];
-            NSString *base64String = [valueDic ks_stringValueForKey:@"base64"];
-            UIImage *saveImage = [self imageWithBase64String:base64String];
-            [[TZImageManager manager] savePhotoWithImage:saveImage completion:^(PHAsset *asset, NSError *error) {
-                if (!error) {
-                    [self savePicCallback:[valueDic ks_stringValueForKey:@"uuid"] isSuccess:YES];
-                }
-                else {
-                    [self savePicCallback:[valueDic ks_stringValueForKey:@"uuid"] isSuccess:NO];
-                }
-            }];
-        }
-        else {
-            if (albumEnable == PREMISSIONTYPE_NO) {
+        [RecordCheckManager checkPhotoLibraryPremissionAvaiableCallback:^(PREMISSIONTYPE type) {
+            if (type == PREMISSIONTYPE_YES) {
+                NSDictionary *valueDic = [parm ks_dictionaryValueForKey:@"content"];
+                NSString *base64String = [valueDic ks_stringValueForKey:@"base64"];
+                UIImage *saveImage = [self imageWithBase64String:base64String];
+                [[TZImageManager manager] savePhotoWithImage:saveImage completion:^(PHAsset *asset, NSError *error) {
+                    if (!error) {
+                        [self savePicCallback:[valueDic ks_stringValueForKey:@"uuid"] isSuccess:YES];
+                    }
+                    else {
+                        [self savePicCallback:[valueDic ks_stringValueForKey:@"uuid"] isSuccess:NO];
+                    }
+                }];
+            }
+            else {
                 [self showAlertWithMessage:@"请开启相册访问权限" type:CHECKDEVICETYPE_CAMREA];
             }
-        }
+        }];
     }
     else if ([[parm ks_stringValueForKey:@"api"] isEqualToString:@"getVersion"]) {
         NSDictionary *valueDic = [parm ks_dictionaryValueForKey:@"content"];
@@ -734,6 +715,41 @@ typedef NS_ENUM(NSInteger, CHOOSETYPE) {
     }
 }
 
+- (void)afterCheckCameraCheckMic:(PREMISSIONTYPE)cameraType parm:(NSDictionary *)sourceParm {
+    
+    [RecordCheckManager checkMicPermissionAvaiableCallback:^(PREMISSIONTYPE type) {
+        if (type == PREMISSIONTYPE_YES && cameraType == PREMISSIONTYPE_YES) {
+            NSDictionary *valueDic = [sourceParm ks_dictionaryValueForKey:@"content"];
+            KSAccompanyWebViewController *detailCtrl = [[KSAccompanyWebViewController alloc] init];
+            detailCtrl.url = [valueDic ks_stringValueForKey:@"url"];
+            detailCtrl.parmDic = valueDic;
+            NSInteger orientation = [valueDic ks_integerValueForKey:@"orientation"];
+            BOOL isLandScape = orientation == 0 ? YES : NO;
+            detailCtrl.ks_landScape = isLandScape;
+            [self postMessage:sourceParm];
+            [self.navigationController pushViewController:detailCtrl animated:YES];
+        }
+        else { //
+            
+            NSString *content = @"";
+            CHECKDEVICETYPE checkType = CHECKDEVICETYPE_BOTH;
+            if (cameraType == PREMISSIONTYPE_NO && type == PREMISSIONTYPE_NO) {
+                content = @"请开启相机和麦克风访问权限";
+                checkType = CHECKDEVICETYPE_BOTH;
+            }
+            else if (cameraType == PREMISSIONTYPE_NO && type == PREMISSIONTYPE_YES) {
+                content =  @"请开启相机访问权限";
+                checkType = CHECKDEVICETYPE_CAMREA;
+            }
+            else if (cameraType == PREMISSIONTYPE_YES && type == PREMISSIONTYPE_NO) {
+                content = @"请开启麦克风访问权限";
+                checkType = CHECKDEVICETYPE_MIC;
+            }
+            [self showAlertWithMessage:content type:checkType];
+        }
+    }];
+}
+
 - (void)downloadWithParm:(NSDictionary *)parm {
     MJWeakSelf;
     [COURSEWARE_MANAGER downloadCourseWithParm:parm callback:^(NSDictionary * _Nonnull sendParm) {

+ 18 - 26
KulexiuForTeacher/KulexiuForTeacher/Common/MediaMerge/AudioMerge/KSMediaMergeView.m

@@ -108,8 +108,6 @@
 
 @property (nonatomic, assign) NSInteger defaultDelay;
 
-@property (nonatomic, assign) CGFloat bgPlayerRate;
-
 @property (nonatomic, assign) NSInteger evaluateDelay;
 
 @property (nonatomic, assign) BOOL fromDraftPage; // 是否从草稿页面进入
@@ -124,7 +122,6 @@
     self = [super init];
     if (self) {
         self.hasModify = NO;
-        self.bgPlayerRate = 1.0;
         [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(appEnterBackground) name:@"appEnterBackground" object:nil];
         [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(otherLogin) name:@"otherLogin" object:nil];
         [self configAudioSession];
@@ -415,23 +412,26 @@
 }
 
 - (void)backAction {
-    if (self.hasModify) {
-        MJWeakSelf;
-        [self.alertView configTitle:@"提示" descMessage:@"是否将本次录制的作品保存为草稿?" leftButtonTitle:@"取消" rightButtonTitle:@"确认" leftButtonAction:^{
-            [weakSelf removeViewTips:NO];
-        } rightButtonAction:^{
-            [weakSelf saveCurrentDraft:YES];
-        }];
-        [self.alertView showAlert];
-    }
-    else {
-        if (self.mergeCallback) {
-            [self removeViewTips:NO];
+    [self stopPlay];
+    dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(0.1 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
+        if (self.hasModify) {
+            MJWeakSelf;
+            [self.alertView configTitle:@"提示" descMessage:@"是否将本次录制的作品保存为草稿?" leftButtonTitle:@"取消" rightButtonTitle:@"确认" leftButtonAction:^{
+                [weakSelf removeViewTips:NO];
+            } rightButtonAction:^{
+                [weakSelf saveCurrentDraft:YES];
+            }];
+            [self.alertView showAlert];
         }
         else {
-            [self removeView];
+            if (self.mergeCallback) {
+                [self removeViewTips:NO];
+            }
+            else {
+                [self removeView];
+            }
         }
-    }
+    });
 }
 
 #pragma mark ----- lazying
@@ -583,8 +583,8 @@
             break;
         case MERGEACTION_UPLOAD:  // 上传
         {
+            [self stopPlay];
             [self showPublishAlert];
-            
         }
             break;
         case MERGEACTION_HIDEVIEW:
@@ -612,8 +612,6 @@
     switch (type) {
         case PUBLISH_ACTION_PUBLISH:
         {
-            // 暂停播放
-            [self stopPlay];
             self.desc = [NSString isEmptyString:self.publishAlert.publishContainView.textView.text] ? @"我发布了一首演奏作品,快来听听吧~" :self.publishAlert.publishContainView.textView.text;
             if (self.settingImage || self.videoCoverImage) { // 上传图片
                 [self updateWithCoverImage];
@@ -625,8 +623,6 @@
             break;
         case PUBLISH_ACTION_CHOOSEIMG:
         {
-            // 暂停播放
-            [self stopPlay];
             self.isChooseVideoCover = NO;
 
             // 调用相册
@@ -655,8 +651,6 @@
             break;
         case PUBLISH_ACTION_VIDEOCOVER: // 视频封面
         {
-            // 暂停播放
-            [self stopPlay];
             self.isChooseVideoCover = YES;
             // 调用相册
             self.mediaManager = [[KSMediaManager alloc] init];
@@ -684,8 +678,6 @@
             break;
         case PUBLISH_ACTION_VIDEOCROP:
         {
-            // 暂停播放
-            [self stopPlay];
             KSVideoCropViewController *ctrl = [[KSVideoCropViewController alloc] init];
             [ctrl configWithVideoPath:self.videoUrl];
             UIViewController *baseCtrl = [self findViewController];

+ 69 - 52
KulexiuForTeacher/KulexiuForTeacher/Common/MediaMerge/AudioMerge/KSMergeAudioControlView.m

@@ -32,10 +32,6 @@
 
 @property (weak, nonatomic) IBOutlet UILabel *offsetTipsLabel;
 
-@property (nonatomic, strong) UITapGestureRecognizer *recordProgressTap;
-
-@property (nonatomic,strong) UITapGestureRecognizer *bgProgressTap;
-
 @property (weak, nonatomic) IBOutlet UIView *recordBubble;
 @property (weak, nonatomic) IBOutlet UILabel *recordBubbleLabel;
 @property (weak, nonatomic) IBOutlet UIView *recordGestureView;
@@ -122,15 +118,6 @@
 }
 
 #pragma mark ----- 音量控制
-- (IBAction)changeRecordVolume:(UISlider *)sender {
-//    [self configRecordVolumeValue:sender.value];
-    
-}
-
-- (IBAction)changeBgVolume:(UISlider *)sender {
-//    [self configBgVolumeValue:sender.value];
-    
-}
 
 - (void)configRecordVolumeValue:(NSInteger)volume {
     self.recordVolume = volume;
@@ -145,9 +132,6 @@
             self.recordBubble.hidden = NO;
         }
     }];
-    if (self.callback) {
-        self.callback(MERGEACTION_MODIFY, self.recordVolume, self.bgVolume, self.offsetTime);
-    }
 }
 
 - (void)configBgVolumeValue:(NSInteger)volume {
@@ -161,9 +145,6 @@
             self.bgBubble.hidden =NO;
         }
     }];
-    if (self.callback) {
-        self.callback(MERGEACTION_MODIFY, self.recordVolume, self.bgVolume, self.offsetTime);
-    }
 }
 
 - (IBAction)cancleMerge:(id)sender {
@@ -195,9 +176,6 @@
 }
 
 - (void)touchesMoved:(NSSet<UITouch *> *)touches withEvent:(UIEvent *)event {
-    if (![[(UITouch *)touches.anyObject view] isEqual:self.sliderView] && ![[(UITouch *)touches.anyObject view] isEqual:self.recordGestureView] && ![[(UITouch *)touches.anyObject view] isEqual:self.bgGestureView] && ![[(UITouch *)touches.anyObject view] isEqual:self]) {
-        return;
-    }
     [super touchesMoved:touches withEvent:event];
     if ([[(UITouch *)touches.anyObject view] isEqual:self.sliderView]) {
         CGPoint tempPoint = [touches.anyObject locationInView:self.sliderView];
@@ -227,7 +205,6 @@
             xPosition = width;
         }
         NSInteger value =  (xPosition / width) * 100;
-         NSLog(@"---- record volume ---- %zd", value);
         [self.recordSlider setValue:value animated:YES];
         [self configRecordVolumeValue:value];
     }
@@ -242,36 +219,91 @@
             xPosition = width;
         }
         NSInteger value =  (xPosition / width) * 100;
-         NSLog(@"---- bg volume ---- %zd", value);
         [self.bgSlider setValue:value animated:YES];
         [self configBgVolumeValue:value];
     }
 }
 
 - (void)touchesEnded:(NSSet<UITouch *> *)touches withEvent:(UIEvent *)event {
+    [super touchesEnded:touches withEvent:event];
     if ([[(UITouch *)touches.anyObject view] isEqual:self.recordGestureView]) {
-        dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(500 * NSEC_PER_MSEC)), dispatch_get_main_queue(), ^{
-            self.recordBubble.hidden = YES;
-        });
-        return;
+        CGPoint tempPoint = [touches.anyObject locationInView:self.recordGestureView];
+        CGFloat xPosition = tempPoint.x;
+        CGFloat width = self.recordGestureView.frame.size.width;
+        if (xPosition < 0) {
+            xPosition = 0;
+        }
+        else if (xPosition > width) {
+            xPosition = width;
+        }
+        NSInteger value =  (xPosition / width) * 100;
+        [self refreshRecordValue:value];
     }
     else if ([[(UITouch *)touches.anyObject view] isEqual:self.bgGestureView]) {
-        dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(500 * NSEC_PER_MSEC)), dispatch_get_main_queue(), ^{
-            self.bgBubble.hidden = YES;
-        });
-        return;
+        
+        CGPoint tempPoint = [touches.anyObject locationInView:self.bgGestureView];
+        CGFloat xPosition = tempPoint.x;
+        CGFloat width = self.recordGestureView.frame.size.width;
+        if (xPosition < 0) {
+            xPosition = 0;
+        }
+        else if (xPosition > width) {
+            xPosition = width;
+        }
+        NSInteger value =  (xPosition / width) * 100;
+        [self refreshBgValue:value];
+        
     }
-    if (![[(UITouch *)touches.anyObject view] isEqual:self.sliderView] && ![[(UITouch *)touches.anyObject view] isEqual:self]) {
-        return;
+    else if ([[(UITouch *)touches.anyObject view] isEqual:self.sliderView]) {
+        CGPoint tempPoint = [touches.anyObject locationInView:self.sliderView];
+        if (tempPoint.x <= 90.5) {
+            NSInteger offset = (NSInteger)(tempPoint.x - 90.5) / 3 * 10 * 2;
+            if (offset < -MAX_OFFSET) {
+                offset = -MAX_OFFSET;
+            }
+            self.offsetTime = offset;
+        }
+        else if (tempPoint.x > 90.5) {
+            NSInteger offset = (NSInteger)(tempPoint.x - 90.5) / 3 * 10 * 2;
+            if (offset > MAX_OFFSET) {
+                offset = MAX_OFFSET;
+            }
+            self.offsetTime = offset;
+        }
+        NSLog(@"---- offset - %zd", self.offsetTime);
+        if (self.callback) {
+            self.callback(MERGEACTION_DELAY, self.recordVolume, self.bgVolume, self.offsetTime);
+        }
     }
-    [super touchesEnded:touches withEvent:event];
+}
 
+- (void)refreshRecordValue:(NSInteger)value {
+    NSLog(@"---- record volume ---- %zd", value);
+    [self.recordSlider setValue:value animated:NO];
+    [self configRecordVolumeValue:value];
     if (self.callback) {
-        self.callback(MERGEACTION_DELAY, self.recordVolume, self.bgVolume, self.offsetTime);
+        self.callback(MERGEACTION_MODIFY, self.recordVolume, self.bgVolume, self.offsetTime);
     }
-    
+    @weakObj(self);
+    dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(500 * NSEC_PER_MSEC)), dispatch_get_main_queue(), ^{
+        @strongObj(self);
+        self.recordBubble.hidden = YES;
+    });
 }
 
+- (void)refreshBgValue:(NSInteger)value {
+    NSLog(@"---- bg volume ---- %zd", value);
+    [self.bgSlider setValue:value animated:NO];
+    [self configBgVolumeValue:value];
+    if (self.callback) {
+        self.callback(MERGEACTION_MODIFY, self.recordVolume, self.bgVolume, self.offsetTime);
+    }
+    @weakObj(self);
+    dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(500 * NSEC_PER_MSEC)), dispatch_get_main_queue(), ^{
+        @strongObj(self);
+        self.bgBubble.hidden = YES;
+    });
+}
 - (IBAction)leftAction:(id)sender {
     if (self.offsetTime <= -MAX_OFFSET) {
         self.offsetTime = -MAX_OFFSET;
@@ -300,21 +332,6 @@
     }
 }
 
-//进度条的点击事件
-- (void)actionTapGesture:(UITapGestureRecognizer *)gesture {
-    if (gesture.view == self.recordGestureView) {
-        CGPoint touchLocation = [gesture locationInView:self.recordGestureView];
-        CGFloat value = (self.recordSlider.maximumValue - self.recordSlider.minimumValue) * (touchLocation.x/self.recordSlider.frame.size.width);
-        [self.recordSlider setValue:value animated:YES];
-        [self configRecordVolumeValue:value];
-    }
-    else {
-        CGPoint touchLocation = [gesture locationInView:self.bgGestureView];
-        CGFloat value = (self.bgSlider.maximumValue - self.bgSlider.minimumValue) * (touchLocation.x/self.bgSlider.frame.size.width);
-        [self.bgSlider setValue:value animated:YES];
-        [self configBgVolumeValue:value];
-    }
-}
 
 #pragma mark ------ lazying
 - (void)setOffsetTime:(NSInteger)offsetTime {

+ 8 - 10
KulexiuForTeacher/KulexiuForTeacher/Common/MediaMerge/AudioMerge/KSMergeAudioControlView.xib

@@ -44,9 +44,6 @@
                             <color key="minimumTrackTintColor" red="0.1764705882" green="0.78039215689999997" blue="0.66666666669999997" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
                             <color key="maximumTrackTintColor" red="0.90980392156862744" green="0.92549019607843142" blue="0.94509803921568625" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
                             <color key="thumbTintColor" red="0.1764705882" green="0.78039215689999997" blue="0.66666666669999997" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
-                            <connections>
-                                <action selector="changeRecordVolume:" destination="iN0-l3-epB" eventType="valueChanged" id="WcD-V3-Jqt"/>
-                            </connections>
                         </slider>
                         <label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="伴奏音量" textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="if2-R5-Tul">
                             <rect key="frame" x="26" y="145" width="58" height="17"/>
@@ -59,9 +56,6 @@
                             <color key="minimumTrackTintColor" red="0.1764705882" green="0.78039215689999997" blue="0.66666666669999997" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
                             <color key="maximumTrackTintColor" red="0.90980392156862744" green="0.92549019607843142" blue="0.94509803921568625" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
                             <color key="thumbTintColor" red="0.1764705882" green="0.78039215689999997" blue="0.66666666669999997" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
-                            <connections>
-                                <action selector="changeBgVolume:" destination="iN0-l3-epB" eventType="valueChanged" id="vwy-ge-2aE"/>
-                            </connections>
                         </slider>
                         <view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="DAF-iV-Mgw">
                             <rect key="frame" x="26" y="173" width="247" height="40"/>
@@ -238,8 +232,8 @@
                                 <imageView clipsSubviews="YES" userInteractionEnabled="NO" contentMode="scaleAspectFit" horizontalHuggingPriority="251" verticalHuggingPriority="251" image="slider_bubble" translatesAutoresizingMaskIntoConstraints="NO" id="ahK-NK-SMi">
                                     <rect key="frame" x="0.0" y="0.0" width="34" height="34"/>
                                 </imageView>
-                                <label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="100" textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="oYa-cu-i4y">
-                                    <rect key="frame" x="4" y="4" width="26" height="17"/>
+                                <label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="100" textAlignment="center" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="oYa-cu-i4y">
+                                    <rect key="frame" x="0.0" y="4" width="34" height="17"/>
                                     <fontDescription key="fontDescription" type="system" weight="semibold" pointSize="14"/>
                                     <color key="textColor" red="0.074509803921568626" green="0.078431372549019607" blue="0.082352941176470587" alpha="1" colorSpace="calibratedRGB"/>
                                     <nil key="highlightedColor"/>
@@ -249,10 +243,12 @@
                             <constraints>
                                 <constraint firstItem="ahK-NK-SMi" firstAttribute="leading" secondItem="nyI-H7-gwi" secondAttribute="leading" id="EMl-Ys-Vv1"/>
                                 <constraint firstAttribute="trailing" secondItem="ahK-NK-SMi" secondAttribute="trailing" id="H4j-le-wjD"/>
+                                <constraint firstItem="oYa-cu-i4y" firstAttribute="leading" secondItem="nyI-H7-gwi" secondAttribute="leading" id="MPY-GZ-0K3"/>
                                 <constraint firstAttribute="bottom" secondItem="ahK-NK-SMi" secondAttribute="bottom" id="Xgf-qc-1hG"/>
                                 <constraint firstItem="oYa-cu-i4y" firstAttribute="centerX" secondItem="nyI-H7-gwi" secondAttribute="centerX" id="ejz-pz-k8f"/>
                                 <constraint firstItem="ahK-NK-SMi" firstAttribute="top" secondItem="nyI-H7-gwi" secondAttribute="top" id="eku-h5-9mJ"/>
                                 <constraint firstItem="oYa-cu-i4y" firstAttribute="top" secondItem="nyI-H7-gwi" secondAttribute="top" constant="4" id="lH8-xm-u7O"/>
+                                <constraint firstAttribute="trailing" secondItem="oYa-cu-i4y" secondAttribute="trailing" id="xZP-kT-yES"/>
                             </constraints>
                         </view>
                         <view hidden="YES" contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="Z4q-fo-efi">
@@ -261,8 +257,8 @@
                                 <imageView clipsSubviews="YES" userInteractionEnabled="NO" contentMode="scaleAspectFit" horizontalHuggingPriority="251" verticalHuggingPriority="251" image="slider_bubble" translatesAutoresizingMaskIntoConstraints="NO" id="qaQ-ih-J2u">
                                     <rect key="frame" x="0.0" y="0.0" width="34" height="34"/>
                                 </imageView>
-                                <label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="100" textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="TSI-LH-Hac">
-                                    <rect key="frame" x="4" y="4" width="26" height="17"/>
+                                <label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="100" textAlignment="center" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="TSI-LH-Hac">
+                                    <rect key="frame" x="0.0" y="4" width="34" height="17"/>
                                     <fontDescription key="fontDescription" type="system" weight="semibold" pointSize="14"/>
                                     <color key="textColor" red="0.074509803920000006" green="0.078431372550000003" blue="0.08235294118" alpha="1" colorSpace="calibratedRGB"/>
                                     <nil key="highlightedColor"/>
@@ -273,9 +269,11 @@
                                 <constraint firstAttribute="bottom" secondItem="qaQ-ih-J2u" secondAttribute="bottom" id="LPG-Hp-6Nw"/>
                                 <constraint firstItem="qaQ-ih-J2u" firstAttribute="top" secondItem="Z4q-fo-efi" secondAttribute="top" id="STk-ay-vnl"/>
                                 <constraint firstItem="qaQ-ih-J2u" firstAttribute="leading" secondItem="Z4q-fo-efi" secondAttribute="leading" id="V5S-da-cyV"/>
+                                <constraint firstAttribute="trailing" secondItem="TSI-LH-Hac" secondAttribute="trailing" id="ZNT-5e-22G"/>
                                 <constraint firstItem="TSI-LH-Hac" firstAttribute="top" secondItem="Z4q-fo-efi" secondAttribute="top" constant="4" id="jM5-5i-DJN"/>
                                 <constraint firstItem="TSI-LH-Hac" firstAttribute="centerX" secondItem="Z4q-fo-efi" secondAttribute="centerX" id="pzq-an-1hO"/>
                                 <constraint firstAttribute="trailing" secondItem="qaQ-ih-J2u" secondAttribute="trailing" id="vFJ-Hx-o68"/>
+                                <constraint firstItem="TSI-LH-Hac" firstAttribute="leading" secondItem="Z4q-fo-efi" secondAttribute="leading" id="xQH-jo-Qaa"/>
                             </constraints>
                         </view>
                         <view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="DzT-go-dxS">

+ 8 - 0
KulexiuForTeacher/KulexiuForTeacher/Common/MediaMerge/AudioMerge/KSPlayerSliderView.m

@@ -77,6 +77,13 @@
     self.progressTap = [[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(actionTapGesture:)];
     self.progressTap.delegate = self;
     [self.progressSlider addGestureRecognizer:self.progressTap];
+    // 获取 UISlider 内部的 panGestureRecognizer 并设置优先级
+    for (UIGestureRecognizer *gestureRecognizer in self.progressSlider.gestureRecognizers) {
+        if ([gestureRecognizer isKindOfClass:[UIPanGestureRecognizer class]]) {
+            [self.progressTap requireGestureRecognizerToFail:gestureRecognizer];
+            break;
+        }
+    }
 }
 
 
@@ -124,6 +131,7 @@
 
 //视频进度条的点击事件
 - (void)actionTapGesture:(UITapGestureRecognizer *)sender {
+    self.isDragingSlider = NO;
     CGPoint touchLocation = [sender locationInView:self.progressSlider];
     CGFloat value = (self.progressSlider.maximumValue - self.progressSlider.minimumValue) * (touchLocation.x/self.progressSlider.frame.size.width);
     [self.progressSlider setValue:value animated:YES];

+ 27 - 0
KulexiuForTeacher/KulexiuForTeacher/Common/MediaMerge/AudioMerge/MergeAudioAnimation/KSRealtimeAnalyzer.h

@@ -0,0 +1,27 @@
+//
+//  KSRealtimeAnalyzer.h
+//  KulexiuSchoolStudent
+//
+//  Created by 王智 on 2024/7/29.
+//
+
+#import <Foundation/Foundation.h>
+#import <AVFoundation/AVFoundation.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface KSRealtimeAnalyzer : NSObject
+
+@property (nonatomic, assign) BOOL isAnalise; // 是否正在分析中
+
+@property (nonatomic, assign) NSInteger frequencyBands; // 频带数量
+@property (nonatomic, assign) float startFrequency; // 起始频率
+@property (nonatomic, assign) float endFrequency; // 截止频率
+@property (nonatomic, assign) float spectrumSmooth; // 频谱平滑
+
+- (instancetype)initWithFFTSize:(NSInteger)fftSize;
+- (NSArray<NSArray<NSNumber *> *> *)analyseWithBuffer:(AVAudioPCMBuffer *)buffer;
+
+@end
+
+NS_ASSUME_NONNULL_END

+ 256 - 0
KulexiuForTeacher/KulexiuForTeacher/Common/MediaMerge/AudioMerge/MergeAudioAnimation/KSRealtimeAnalyzer.m

@@ -0,0 +1,256 @@
+//
+//  KSRealtimeAnalyzer.m
+//  KulexiuSchoolStudent
+//
+//  Created by 王智 on 2024/7/29.
+//
+
+#import "KSRealtimeAnalyzer.h"
+#import <Accelerate/Accelerate.h>
+
+@interface KSRealtimeAnalyzer ()
+
+@property (nonatomic, assign) NSInteger fftSize;
+@property (nonatomic, assign) FFTSetup fftSetup;
+@property (nonatomic, strong) NSArray<NSArray<NSNumber *> *> *bands;
+@property (nonatomic, strong) NSMutableArray<NSMutableArray<NSNumber *> *> *spectrumBuffer;
+
+@property (nonatomic, assign) float *realp;
+@property (nonatomic, assign) float *imagp;
+
+@property (nonatomic, strong) NSArray<NSNumber *> *cachedFrequencyWeights;
+
+@end
+
+@implementation KSRealtimeAnalyzer
+
+- (instancetype)initWithFFTSize:(NSInteger)fftSize {
+    self = [super init];
+    if (self) {
+        _fftSize = fftSize;
+        _fftSetup = vDSP_create_fftsetup(log2(_fftSize), FFT_RADIX2);
+        _realp = (float *)calloc(_fftSize / 2, sizeof(float));
+        _imagp = (float *)calloc(_fftSize / 2, sizeof(float));
+        _frequencyBands = 80;
+        _startFrequency = 100.0;
+        _endFrequency = 18000.0;
+        _spectrumSmooth = 0.5;
+        _spectrumBuffer = [NSMutableArray array];
+        [self setupBands];
+        _cachedFrequencyWeights = [self createFrequencyWeights];
+    }
+    return self;
+}
+
+- (void)dealloc {
+    vDSP_destroy_fftsetup(_fftSetup);
+    free(_realp);
+    free(_imagp);
+    NSLog(@"-------KSRealtimeAnalyzer dealloc");
+}
+
+- (void)setupBands {
+    NSMutableArray *bands = [NSMutableArray array];
+    float n = log2(self.endFrequency / self.startFrequency) / self.frequencyBands;
+    float lowerFrequency = self.startFrequency;
+
+    for (NSInteger i = 0; i < self.frequencyBands; i++) {
+        float upperFrequency = (i == self.frequencyBands - 1) ? self.endFrequency : lowerFrequency * powf(2, n);
+        [bands addObject:@{@"lowerFrequency": @(lowerFrequency), @"upperFrequency": @(upperFrequency)}];
+        lowerFrequency = upperFrequency;
+    }
+    
+    self.bands = [bands copy];
+}
+
+- (NSArray<NSArray<NSNumber *> *> *)analyseWithBuffer:(AVAudioPCMBuffer *)buffer {
+    self.isAnalise = YES;
+    NSArray<NSArray<NSNumber *> *> *channelsAmplitudes = [self fftWithBuffer:buffer];
+    NSArray<NSNumber *> *aWeights = self.cachedFrequencyWeights;
+    if (self.spectrumBuffer.count == 0) {
+        for (NSInteger i = 0; i < channelsAmplitudes.count; i++) {
+            [self.spectrumBuffer addObject:[self emptyArrayOfCount:self.bands.count]];
+        }
+    }
+    
+    NSMutableArray<NSArray<NSNumber *> *> *result = [NSMutableArray arrayWithArray:self.spectrumBuffer];
+    // 放大倍数
+    CGFloat amplificationFactor = 40.0; // 调整为所需的放大倍数
+    dispatch_apply(channelsAmplitudes.count, dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^(size_t index) {
+        NSArray<NSNumber *> *amplitudes = channelsAmplitudes[index];
+        NSMutableArray<NSNumber *> *weightedAmplitudes = [NSMutableArray arrayWithCapacity:amplitudes.count];
+        
+        for (NSInteger i = 0; i < amplitudes.count; i++) {
+            weightedAmplitudes[i] = @(amplitudes[i].floatValue * aWeights[i].floatValue);
+        }
+        
+        NSMutableArray<NSNumber *> *spectrum = [NSMutableArray arrayWithCapacity:self.bands.count];
+        
+        for (NSDictionary *band in self.bands) {
+            float bandWidth = (float)buffer.format.sampleRate / self.fftSize;
+            [spectrum addObject:@([self findMaxAmplitudeForBand:band inAmplitudes:weightedAmplitudes withBandWidth:bandWidth] * amplificationFactor)];
+        }
+        
+        spectrum = [self highlightWaveform:spectrum];
+        
+        NSMutableArray<NSNumber *> *previousSpectrum = [self.spectrumBuffer[index] mutableCopy];
+        NSMutableArray<NSNumber *> *newSpectrum = [NSMutableArray arrayWithCapacity:spectrum.count];
+        
+        for (NSInteger i = 0; i < spectrum.count; i++) {
+            float smoothedValue = ([previousSpectrum[i] floatValue] * self.spectrumSmooth) + (spectrum[i].floatValue * (1 - self.spectrumSmooth));
+            newSpectrum[i] = @(smoothedValue);
+        }
+        
+        result[index] = [newSpectrum copy];
+        self.spectrumBuffer[index] = [newSpectrum copy];
+    });
+    self.isAnalise = NO;
+    return [result copy];
+}
+
+
+- (NSArray<NSArray<NSNumber *> *> *)fftWithBuffer:(AVAudioPCMBuffer *)buffer {
+    NSMutableArray<NSMutableArray<NSNumber *> *> *amplitudes = [NSMutableArray array];
+    float *const _Nonnull *floatChannelData = buffer.floatChannelData;
+    NSInteger channelCount = buffer.format.channelCount;
+    BOOL isInterleaved = buffer.format.isInterleaved;
+
+    float **tempFloatChannelData = NULL;
+    
+    if (isInterleaved) {
+        NSMutableArray<NSMutableData *> *channelsTemp = [NSMutableArray array];
+        float *interleavedData = floatChannelData[0];
+        for (NSInteger i = 0; i < channelCount; i++) {
+            NSMutableData *channelData = [NSMutableData dataWithLength:self.fftSize * sizeof(float)];
+            float *channelDataPtr = (float *)channelData.mutableBytes;
+            for (NSInteger j = i; j < self.fftSize * channelCount; j += channelCount) {
+                channelDataPtr[j / channelCount] = interleavedData[j];
+            }
+            [channelsTemp addObject:channelData];
+        }
+        
+        tempFloatChannelData = (float **)malloc(channelCount * sizeof(float *));
+        for (NSInteger i = 0; i < channelCount; i++) {
+            tempFloatChannelData[i] = (float *)[channelsTemp[i] mutableBytes];
+        }
+        
+        floatChannelData = (float *const _Nonnull *)tempFloatChannelData;
+    }
+    
+    for (NSInteger i = 0; i < channelCount; i++) {
+        float *channel = floatChannelData[i];
+        NSMutableData *windowData = [NSMutableData dataWithLength:self.fftSize * sizeof(float)];
+        float *window = windowData.mutableBytes;
+        vDSP_hann_window(window, self.fftSize, vDSP_HANN_NORM);
+        vDSP_vmul(channel, 1, window, 1, channel, 1, self.fftSize);
+        
+        DSPSplitComplex fftInOut;
+        fftInOut.realp = self.realp;
+        fftInOut.imagp = self.imagp;
+        
+        vDSP_ctoz((DSPComplex *)channel, 2, &fftInOut, 1, self.fftSize / 2);
+        vDSP_fft_zrip(self.fftSetup, &fftInOut, 1, log2(self.fftSize), FFT_FORWARD);
+        
+        fftInOut.imagp[0] = 0;
+        float fftNormFactor = 1.0 / self.fftSize;
+        vDSP_vsmul(fftInOut.realp, 1, &fftNormFactor, fftInOut.realp, 1, self.fftSize / 2);
+        vDSP_vsmul(fftInOut.imagp, 1, &fftNormFactor, fftInOut.imagp, 1, self.fftSize / 2);
+        
+        NSMutableArray<NSNumber *> *channelAmplitudes = [NSMutableArray arrayWithCapacity:self.fftSize / 2];
+        float *amplitudesArray = (float *)calloc(self.fftSize / 2, sizeof(float));
+        vDSP_zvabs(&fftInOut, 1, amplitudesArray, 1, self.fftSize / 2);
+        amplitudesArray[0] /= 2;
+        
+        for (NSInteger j = 0; j < self.fftSize / 2; j++) {
+            channelAmplitudes[j] = @(amplitudesArray[j]);
+        }
+        
+        [amplitudes addObject:[channelAmplitudes copy]];
+//        free(fftInOut.realp);
+//        free(fftInOut.imagp);
+        free(amplitudesArray);
+    }
+    
+    if (isInterleaved) {
+        free(tempFloatChannelData);
+    }
+    
+    return [amplitudes copy];
+}
+
+- (float)findMaxAmplitudeForBand:(NSDictionary *)band inAmplitudes:(NSArray<NSNumber *> *)amplitudes withBandWidth:(float)bandWidth {
+    NSInteger startIndex = round([band[@"lowerFrequency"] floatValue] / bandWidth);
+    NSInteger endIndex = MIN(round([band[@"upperFrequency"] floatValue] / bandWidth), amplitudes.count - 1);
+    float maxAmplitude = -FLT_MAX;
+    for (NSInteger i = startIndex; i <= endIndex; i++) {
+        float amplitude = [amplitudes[i] floatValue];
+        if (amplitude > maxAmplitude) {
+            maxAmplitude = amplitude;
+        }
+    }
+    return maxAmplitude;
+}
+
+- (NSArray<NSNumber *> *)createFrequencyWeights {
+    float sampleRate = 44100.0;
+    NSInteger fftSize = self.fftSize;
+    float deltaFrequency = sampleRate / fftSize;
+    NSInteger bins = fftSize / 2;
+
+    // 预先计算常量
+    float c1 = powf(12194.217, 2.0);
+    float c2 = powf(20.598997, 2.0);
+    float c3 = powf(107.65265, 2.0);
+    float c4 = powf(737.86223, 2.0);
+
+    NSMutableArray<NSNumber *> *frequencies = [NSMutableArray arrayWithCapacity:bins];
+    NSMutableArray<NSNumber *> *weights = [NSMutableArray arrayWithCapacity:bins];
+
+    for (NSInteger i = 0; i < bins; i++) {
+        float frequency = i * deltaFrequency;
+        [frequencies addObject:@(frequency)];
+    }
+
+    for (NSInteger i = 0; i < bins; i++) {
+        float frequencySquared = [frequencies[i] floatValue] * [frequencies[i] floatValue];
+
+        float numerator = c1 * frequencySquared * frequencySquared;
+        float denominator = (frequencySquared + c2) * sqrtf((frequencySquared + c3) * (frequencySquared + c4)) * (frequencySquared + c1);
+
+        float weight = 1.2589 * numerator / denominator;
+        [weights addObject:@(weight)];
+    }
+
+    return [weights copy];
+}
+
+- (NSMutableArray<NSNumber *> *)highlightWaveform:(NSArray<NSNumber *> *)spectrum {
+    float weights[] = {1, 2, 3, 5, 3, 2, 1};
+    float totalWeights = 17.0; // 1 + 2 + 3 + 5 + 3 + 2 + 1
+    NSInteger weightsCount = sizeof(weights) / sizeof(weights[0]);
+    NSInteger startIndex = weightsCount / 2;
+    NSMutableArray<NSNumber *> *averagedSpectrum = [NSMutableArray arrayWithArray:[spectrum subarrayWithRange:NSMakeRange(0, startIndex)]];
+    // 直接复制开始部分
+    [averagedSpectrum addObjectsFromArray:[spectrum subarrayWithRange:NSMakeRange(0, startIndex)]];
+    // 使用 vDSP 函数来加权平滑
+    for (NSInteger i = startIndex; i < spectrum.count - startIndex; i++) {
+        float result = 0;
+        for (NSInteger j = 0; j < weightsCount; j++) {
+            result += [spectrum[i - startIndex + j] floatValue] * weights[j];
+        }
+        [averagedSpectrum addObject:@(result / totalWeights)];
+    }
+    // 复制结束部分
+    [averagedSpectrum addObjectsFromArray:[spectrum subarrayWithRange:NSMakeRange(spectrum.count - startIndex, startIndex)]];
+    return [averagedSpectrum mutableCopy];
+}
+
+- (NSMutableArray<NSNumber *> *)emptyArrayOfCount:(NSUInteger)count {
+    NSMutableArray<NSNumber *> *array = [NSMutableArray arrayWithCapacity:count];
+    for (NSUInteger i = 0; i < count; i++) {
+        [array addObject:@(0.0)];
+    }
+    return array;
+}
+
+@end

+ 23 - 0
KulexiuForTeacher/KulexiuForTeacher/Common/MediaMerge/AudioMerge/MergeAudioAnimation/KSSpectrumView.h

@@ -0,0 +1,23 @@
+//
+//  KSSpectrumView.h
+//  KulexiuSchoolStudent
+//
+//  Created by 王智 on 2024/7/29.
+//
+
+#import <UIKit/UIKit.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface KSSpectrumView : UIView
+
+@property (nonatomic, assign) CGFloat barWidth;
+@property (nonatomic, assign) CGFloat space;
+
+@property (nonatomic, strong) NSArray<NSArray<NSNumber *> *> *spectra;
+
+- (void)resetLayer;
+
+@end
+
+NS_ASSUME_NONNULL_END

+ 109 - 0
KulexiuForTeacher/KulexiuForTeacher/Common/MediaMerge/AudioMerge/MergeAudioAnimation/KSSpectrumView.m

@@ -0,0 +1,109 @@
+//
+//  KSSpectrumView.m
+//  KulexiuSchoolStudent
+//
+//  Created by 王智 on 2024/7/29.
+//
+
+#import "KSSpectrumView.h"
+
+@interface KSSpectrumView ()
+
+@property (nonatomic, strong) CAGradientLayer *combinedGradientLayer;
+
+@end
+
+@implementation KSSpectrumView
+
+- (instancetype)initWithFrame:(CGRect)frame {
+    self = [super initWithFrame:frame];
+    if (self) {
+        [self setupView];
+    }
+    return self;
+}
+
+
+- (instancetype)initWithCoder:(NSCoder *)aDecoder {
+    self = [super initWithCoder:aDecoder];
+    if (self) {
+        [self setupView];
+    }
+    return self;
+}
+
+- (void)setupView {
+    self.barWidth = 3.0;
+    self.space = 1.0;
+
+    self.combinedGradientLayer = [CAGradientLayer layer];
+    self.combinedGradientLayer.colors = @[
+        (__bridge id)HexRGBAlpha(0xffffff, 0.32f).CGColor,
+        (__bridge id)HexRGBAlpha(0xffffff, 0.32f).CGColor
+    ];
+    self.combinedGradientLayer.locations = @[@0.6, @1.0];
+    [self.layer addSublayer:self.combinedGradientLayer];
+}
+
+- (void)resetLayer {
+    self.spectra = [NSArray array];
+}
+
+- (void)setSpectra:(NSArray<NSArray<NSNumber *> *> *)spectra {
+    _spectra = spectra;
+    if (spectra) {
+        NSUInteger spectraCount = [spectra[0] count];
+        NSMutableArray<NSNumber *> *combinedSpectrum = [NSMutableArray arrayWithCapacity:spectraCount];
+        // 取两个声道数据中的最大值
+        for (NSUInteger i = 0; i < spectraCount; i++) {
+            NSNumber *leftAmplitude = spectra[0][i];
+            NSNumber *rightAmplitude = spectra.count > 1 ? spectra[1][i] : @0;
+            CGFloat maxAmplitude = MAX(leftAmplitude.floatValue, rightAmplitude.floatValue);
+            [combinedSpectrum addObject:@(maxAmplitude)];
+        }
+        
+        CGFloat viewHeight = self.bounds.size.height;
+        CGFloat viewWidth = self.bounds.size.width;
+        CGFloat middleY = viewHeight / 2.0;
+        CGFloat barHeight = (viewHeight) / 2.0;
+        CGFloat cornerRadius = viewWidth / 2.0f;
+        CGFloat xIncrement = self.barWidth + self.space;
+        UIBezierPath *combinedPath = [UIBezierPath bezierPath];
+
+        // Left channel
+        for (NSUInteger i = 0; i < spectraCount; i++) {
+            CGFloat x = i * xIncrement + self.space;
+            CGFloat amplitudeValue = combinedSpectrum[i].floatValue;
+            CGFloat height = amplitudeValue * barHeight;
+            CGFloat y = middleY - height/2.0; // Centered vertically
+            
+            CGRect rect = CGRectMake(x, y, self.barWidth, height);
+            UIBezierPath *barPath = [UIBezierPath bezierPathWithRoundedRect:rect cornerRadius:cornerRadius];
+            [combinedPath appendPath:barPath];
+        }
+        
+        CAShapeLayer *combinedMaskLayer = [CAShapeLayer layer];
+        combinedMaskLayer.path = combinedPath.CGPath;
+        self.combinedGradientLayer.frame = CGRectMake(0, 0, viewWidth, viewHeight);
+        self.combinedGradientLayer.mask = combinedMaskLayer;
+
+    }
+}
+
+- (CGFloat)translateAmplitudeToYPosition:(float)amplitude {
+    CGFloat barHeight = amplitude * self.bounds.size.height;
+    return self.bounds.size.height - barHeight;
+}
+
+- (void)dealloc {
+    NSLog(@"---- KSSpectrumView dealloc");
+}
+/*
+// Only override drawRect: if you perform custom drawing.
+// An empty implementation adversely affects performance during animation.
+- (void)drawRect:(CGRect)rect {
+    // Drawing code
+}
+*/
+
+@end

+ 7 - 1
KulexiuForTeacher/KulexiuForTeacher/Common/MediaMerge/AudioMerge/MergePlayer/KSMergeEnginePlayer.h

@@ -6,6 +6,7 @@
 //
 
 #import <Foundation/Foundation.h>
+#import "KSRealtimeAnalyzer.h"
 
 @class KSMergeEnginePlayer;
 
@@ -26,7 +27,10 @@
 
 - (void)enginePlayerIsReadyPlay:(KSMergeEnginePlayer *_Nonnull)player;
 
-- (void)enginePlayerDidError:(KSMergeEnginePlayer *_Nonnull)player error:(NSError *_Nullable)error;
+- (void)enginePlayerDidError:(KSMergeEnginePlayer *_Nonnull)player error:(NSError *_Nonnull)error;
+
+// 动效相关
+- (void)player:(KSMergeEnginePlayer *_Nonnull)player didGenerateSpectrum:(NSArray<NSArray<NSNumber *> *> *_Nonnull)spectra;
 
 @end
 
@@ -34,6 +38,8 @@ NS_ASSUME_NONNULL_BEGIN
 
 @interface KSMergeEnginePlayer : NSObject
 
+@property (nonatomic, strong) KSRealtimeAnalyzer *analyzer;
+
 @property (nonatomic, weak) id <KSMergeEnginePlayerDelegate>delegate;
 
 @property (nonatomic, assign) BOOL isReady;

+ 218 - 201
KulexiuForTeacher/KulexiuForTeacher/Common/MediaMerge/AudioMerge/MergePlayer/KSMergeEnginePlayer.m

@@ -7,8 +7,11 @@
 
 #import "KSMergeEnginePlayer.h"
 #import <AVFoundation/AVFoundation.h>
+#import <Accelerate/Accelerate.h>
 
+#define READ_FILE_LENGTH (8192)
 
+#define BUFFER_SIZE (2048)
 
 @interface KSMergeEnginePlayer ()
 /** 定时器 */
@@ -34,24 +37,14 @@
 
 @property (nonatomic, strong) AVAudioPCMBuffer *mixBuffer;
 
-@property (nonatomic, strong) AVAudioPCMBuffer *bgBuffer;
-
-@property (nonatomic, strong) AVAudioPCMBuffer *recordBuffer;
-
 @property (nonatomic, assign) AVAudioFramePosition currentFrame;
 
 @property (nonatomic, assign) double sampleRate;
 
-@property (nonatomic, assign) BOOL stopMix; // 是否停止mix
-
-@property (nonatomic, strong) dispatch_semaphore_t mixChangeSemaphore; // mix信号量
-
-@property (nonatomic, assign) BOOL stopChangeVolume; // 是否停止音量修改循环
-
-@property (nonatomic, strong) dispatch_semaphore_t volumeChangeSemaphore;
-
 @property (nonatomic, assign) BOOL isInterrupt; // 是否被打断
 
+@property (nonatomic, assign) NSInteger timeCount;
+
 @end
 
 
@@ -68,8 +61,6 @@
 - (void)configDefault {
     self.recordVolume = 1.0f;
     self.bgVolume = 1.0f;
-    self.mixChangeSemaphore = dispatch_semaphore_create(1); // 初始化信号量
-    self.volumeChangeSemaphore = dispatch_semaphore_create(1); // 初始化信号量,初始值为1
 }
 
 - (void)configEngine {
@@ -111,225 +102,235 @@
         if (error) {
             self.audioEngine = nil;
             // 错误回调
-            [self sendInterruptError:error];
+            if (self.delegate && [self.delegate respondsToSelector:@selector(enginePlayerDidError:error:)]) {
+                [self.delegate enginePlayerDidError:self error:error];
+            }
         }
     }
 }
 
 - (void)prepareNativeSongWithUrl:(NSURL *)recordAudioUrl bgMusic:(NSURL *)bgMusicUrl {
-    [self loadAuidoFile:recordAudioUrl isBgm:NO];
-    [self loadAuidoFile:bgMusicUrl isBgm:YES];
-    self.sampleRate = self.audioFile.fileFormat.sampleRate;
-    [self configEngine];
-    
-    AVAudioFormat *outputFormat = [self.audioEngine.mainMixerNode outputFormatForBus:0];
-    [self.audioEngine connect:self.nodePlayer to:self.audioEngine.mainMixerNode format:outputFormat];
-    [self startEngine];
-    
-    if (self.audioEngine && self.audioEngine.isRunning) {
-        dispatch_async(self.sourceQueue, ^{
+    dispatch_async(self.sourceQueue, ^{
+        
+        [self loadAuidoFile:recordAudioUrl isBgm:NO];
+        [self loadAuidoFile:bgMusicUrl isBgm:YES];
+        self.sampleRate = self.audioFile.fileFormat.sampleRate;
+        [self configEngine];
+        
+        AVAudioFormat *outputFormat = [self.audioEngine.mainMixerNode outputFormatForBus:0];
+        [self.audioEngine connect:self.nodePlayer to:self.audioEngine.mainMixerNode format:outputFormat];
+//        [self addTapBus];
+        [self startEngine];
+        
+        if (self.audioEngine && self.audioEngine.isRunning) {
             [self prepareBufferFrame];
-        });
-    }
+        }
+    });
 }
 
+- (void)addTapBus {
+    BOOL delegateRespondsToDidGenerateSpectrum = [self.delegate respondsToSelector:@selector(player:didGenerateSpectrum:)];
+    self.analyzer = [[KSRealtimeAnalyzer alloc] initWithFFTSize:BUFFER_SIZE];
+    AVAudioFormat *outputFormat = [self.audioEngine.mainMixerNode outputFormatForBus:0];
+    @weakObj(self);
+    [self.audioEngine.mainMixerNode removeTapOnBus:0];
+    [self.audioEngine.mainMixerNode installTapOnBus:0 bufferSize:BUFFER_SIZE format:outputFormat block:^(AVAudioPCMBuffer * _Nonnull buffer, AVAudioTime * _Nonnull when) {
+        @strongObj(self);
+        if (!self || !self.nodePlayer.isPlaying) {
+            return;
+        }
+        // 将频谱分析任务提交到后台队列
+            dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0), ^{
+                if (self.analyzer.isAnalise == NO) {
+                    // 分析音频缓冲区
+                    NSArray<NSArray<NSNumber *> *> *spectra = [self.analyzer analyseWithBuffer:buffer];
+                    
+                    // 回到主线程更新 UI 或调用委托方法
+                    dispatch_async(dispatch_get_main_queue(), ^{
+                        if (delegateRespondsToDidGenerateSpectrum) {
+                            [self.delegate player:self didGenerateSpectrum:spectra];
+                        }
+                    });
+                }
+            });
+    }];
+}
 
 - (void)loadAuidoFile:(NSURL *)audioFileUrl isBgm:(BOOL)isBgm {
-    dispatch_sync(self.sourceQueue, ^{
-        NSError *error = nil;
-        AVAudioFile *audioFile = nil;
-        AVAudioFormat *audioFormat = nil;
-        @try {
-            audioFile = [[AVAudioFile alloc] initForReading:audioFileUrl error:&error];
-            audioFormat = audioFile.processingFormat;
-            
-        } @catch (NSException *exception) {
-            audioFile = nil;
-            audioFormat = nil;
-        } @finally {
-            if (error) {
-                // 错误回调
+    NSError *error = nil;
+    AVAudioFile *audioFile = nil;
+    AVAudioFormat *audioFormat = nil;
+    @try {
+        audioFile = [[AVAudioFile alloc] initForReading:audioFileUrl error:&error];
+        audioFormat = audioFile.processingFormat;
+        
+    } @catch (NSException *exception) {
+        audioFile = nil;
+        audioFormat = nil;
+    } @finally {
+        if (error) {
+            // 错误回调
+        }
+        else { // 加载成功
+            if (isBgm) {
+                self.bgAudioFile = audioFile;
+                self.bgAudioFormat = audioFormat;
             }
-            else { // 加载成功
-                if (isBgm) {
-                    self.bgAudioFile = audioFile;
-                    self.bgAudioFormat = audioFormat;
-                }
-                else {
-                    self.audioFile = audioFile;
-                    self.audioFormat = audioFormat;
-                }
+            else {
+                self.audioFile = audioFile;
+                self.audioFormat = audioFormat;
             }
         }
-    });
+    }
 }
 
-- (void)prepareBufferFrame {
+- (void)resetMixBuffer {
     AVAudioFrameCount minFrameCount = (AVAudioFrameCount)MIN(self.bgAudioFile.length, self.audioFile.length);
     // mixBuffer
     AVAudioFormat *outputFormat = [[AVAudioFormat alloc] initStandardFormatWithSampleRate:self.bgAudioFormat.sampleRate channels:2];
     self.mixBuffer = [[AVAudioPCMBuffer alloc] initWithPCMFormat:outputFormat frameCapacity:minFrameCount];
     self.mixBuffer.frameLength = minFrameCount;
-    
-    self.bgBuffer = [self loadAudioSegment:self.bgAudioFile startFrame:0 frameCount:minFrameCount];
-    self.recordBuffer = [self loadAudioSegment:self.audioFile startFrame:0 frameCount:minFrameCount];
-    
-    if (self.delegate && [self.delegate respondsToSelector:@selector(enginePlayerIsReadyPlay:)]) {
-        self.isReady = YES;
-        [self.delegate enginePlayerIsReadyPlay:self];
-    }
 }
 
-- (AVAudioPCMBuffer *)loadAudioSegment:(AVAudioFile *)audioFile startFrame:(AVAudioFramePosition)startFrame frameCount:(AVAudioFrameCount)frameCount {
-    AVAudioFormat *audioFromat = audioFile.processingFormat;
-    AVAudioFrameCount frameToRead = (AVAudioFrameCount)MIN(frameCount, (AVAudioFrameCount)audioFile.length - startFrame);
-    if (startFrame > audioFile.length) {
-        return nil;
-    }
-    AVAudioPCMBuffer *buffer = [[AVAudioPCMBuffer alloc] initWithPCMFormat:audioFromat frameCapacity:frameToRead];
-    buffer.frameLength = frameToRead;
-    
-    audioFile.framePosition = startFrame;
-    if (frameToRead > 0) {
-        @try {
-            [audioFile readIntoBuffer:buffer frameCount:frameToRead error:nil];
-        } @catch (NSException *exception) {
-            
-        } @finally {
-            
+- (void)prepareBufferFrame {
+    [self resetMixBuffer];
+    dispatch_main_async_safe(^{
+        if (self.delegate && [self.delegate respondsToSelector:@selector(enginePlayerIsReadyPlay:)]) {
+            self.isReady = YES;
+            [self.delegate enginePlayerIsReadyPlay:self];
         }
-    }
-    return buffer;
+    });
 }
 
-
-- (void)mixBuffers:(AVAudioPCMBuffer *)bgBuffer bgBufferVolume:(float)bgBufferVolume withRecordBuffer:(AVAudioPCMBuffer *)recordBuffer recordVolume:(float)recordVolume offset:(NSInteger)offsetTime startPosition:(AVAudioFrameCount)startPosition {
-    if (!bgBuffer && !recordBuffer) {
-        return;
-    }
-    NSLog(@"------- start");
-    
-    AVAudioFrameCount minFrameCount = MIN(bgBuffer.frameLength, recordBuffer.frameLength);
-    AVAudioFrameCount offsetFrame = labs(offsetTime)/1000.0 * recordBuffer.format.sampleRate;
+// 预计加载buffer
+- (void)prepareBuffer:(AVAudioFramePosition)startPosition offset:(NSInteger)offsetTime mixStart:(AVAudioFramePosition)mixStartPosition {
     
-    float *bgLeftChannel = bgBuffer.floatChannelData[0];
-    float *bgRightChannel = bgBuffer.floatChannelData[1];
-    // 录音文件未单声道
-    float *recordLeftChannel = recordBuffer.floatChannelData[0];
-    
-    float *mixLeftChannel = self.mixBuffer.floatChannelData[0];
-    float *mixRightChannel = self.mixBuffer.floatChannelData[1];
-    
-    for (int frame = 0; frame < minFrameCount; frame++) {
-        if (self.stopMix) {
-            NSLog(@"------- stop mix");
-            dispatch_semaphore_signal(self.mixChangeSemaphore); // 释放信号量
+    dispatch_async(self.sourceQueue, ^{
+        if (!self.bgAudioFile || !self.audioFile) {
             return;
         }
-        int bgFrame = frame+startPosition;
-        float leftChannel = (bgFrame < bgBuffer.frameLength) ? bgLeftChannel[bgFrame] : 0;
-        float rightChannel = (bgFrame < bgBuffer.frameLength) ? bgRightChannel[bgFrame] : 0;
-        
-        int recordFrame = (offsetTime < 0) ? (bgFrame - offsetFrame) : (bgFrame + offsetFrame);
-        
-        float recordData = (recordFrame >= 0 && recordFrame < recordBuffer.frameLength) ? recordLeftChannel[recordFrame] : 0;
-        
-        
-        float mixLeftData = [self mixChannelData:leftChannel bgVolume:bgBufferVolume recordData:recordData recordVolume:recordVolume];
-        float mixRightData = [self mixChannelData:rightChannel bgVolume:bgBufferVolume recordData:recordData recordVolume:recordVolume];
+        AVAudioFramePosition minFrameCount = (AVAudioFramePosition)MIN(self.bgAudioFile.length, self.audioFile.length);
+        AVAudioFrameCount offsetFrame = labs(offsetTime)/1000.0 * self.audioFile.processingFormat.sampleRate;
+        if (minFrameCount <= startPosition) {
+            return;
+        }
+        AVAudioFrameCount frameToRead = minFrameCount - startPosition > READ_FILE_LENGTH ? READ_FILE_LENGTH : (AVAudioFrameCount)(minFrameCount - startPosition);
         
-        mixLeftChannel[frame] = MAX(-1.0, MIN(1.0, mixLeftData));
-        mixRightChannel[frame] = MAX(-1.0, MIN(1.0, mixRightData));
-    }
-    NSLog(@"---------finish");
-    
-}
-
-- (float)mixChannelData:(float)bgData bgVolume:(float)bgVolume recordData:(float)recordData recordVolume:(float)recordVolume {
-    return (bgData * bgVolume + recordData * recordVolume) / 2;
-}
-
-- (void)changeVolume:(float)bgVolume recordVolume:(float)recordVolume {
-    
-    NSLog(@"bg volume ---- %f,  record volume ---- %f", bgVolume, recordVolume);
-    self.bgVolume = bgVolume;
-    self.recordVolume = recordVolume;
-    if (self.bgBuffer && self.recordBuffer) {
-        self.stopChangeVolume = YES;
-        // 停止上一次修改音量
-        dispatch_async(self.sourceQueue, ^{
-            // 等待上一次的操作完成
-            dispatch_semaphore_wait(self.volumeChangeSemaphore, DISPATCH_TIME_FOREVER);
-            self.stopChangeVolume = NO;
-            // 开始新的音量修改操作
-            AVAudioFramePosition startFrame = self.currentFrame;
-            NSLog(@"----- current frame -----%lld", startFrame);
-            [self modifyMixBuffer:self.bgBuffer bgBufferVolume:bgVolume withRecordBuffer:self.recordBuffer recordVolume:recordVolume offset:self.offsetTime startPosition:startFrame tagIndex:0];
-            // 释放信号量,标记音量修改操作完成
-            dispatch_semaphore_signal(self.volumeChangeSemaphore);
-        });
-    }
-}
-
-
-- (void)modifyMixBuffer:(AVAudioPCMBuffer *)bgBuffer bgBufferVolume:(float)bgBufferVolume withRecordBuffer:(AVAudioPCMBuffer *)recordBuffer recordVolume:(float)recordVolume offset:(NSInteger)offsetTime startPosition:(AVAudioFramePosition)startFrame tagIndex:(NSInteger)tagIndex {
-    
-    AVAudioFrameCount minFrameCount = MIN(bgBuffer.frameLength, recordBuffer.frameLength);
-    AVAudioFrameCount offsetFrame = labs(offsetTime)/1000.0 * recordBuffer.format.sampleRate;
-    
-    float *bgLeftChannel = bgBuffer.floatChannelData[0];
-    float *bgRightChannel = bgBuffer.floatChannelData[1];
-    // 录音文件未单声道
-    float *recordLeftChannel = recordBuffer.floatChannelData[0];
-    
-    float *mixLeftChannel = self.mixBuffer.floatChannelData[0];
-    float *mixRightChannel = self.mixBuffer.floatChannelData[1];
-    
-    // 先处理后续播放的buffer
-    NSLog(@"------- volume change start");
-    for (int frame = (int)startFrame; frame < minFrameCount; frame++) {
-        if (self.stopChangeVolume) {
-            NSLog(@"------- stop volume change");
-            dispatch_semaphore_signal(self.volumeChangeSemaphore); // 释放信号量
+        self.bgAudioFile.framePosition = startPosition;
+        AVAudioPCMBuffer *bgBuffer = [[AVAudioPCMBuffer alloc] initWithPCMFormat:self.bgAudioFile.processingFormat frameCapacity:frameToRead];
+        bgBuffer.frameLength = frameToRead;
+        BOOL readSuccess = [self.bgAudioFile readIntoBuffer:bgBuffer frameCount:frameToRead error:nil];
+        if (!readSuccess) {
             return;
         }
+        AVAudioPCMBuffer *recordBuffer = [[AVAudioPCMBuffer alloc] initWithPCMFormat:self.audioFile.processingFormat frameCapacity:frameToRead];
+        recordBuffer.frameLength = frameToRead;
         
-        float leftChannel = bgLeftChannel[frame];
-        float rightChannel = bgRightChannel[frame];
+        if (offsetTime >= 0) { // 演奏需要提前
+            self.audioFile.framePosition = startPosition + offsetFrame;
+            AVAudioFrameCount audioReadFrame = frameToRead;
+            if (startPosition + offsetFrame + frameToRead > minFrameCount) { // 如果超出
+                audioReadFrame = (AVAudioFrameCount)(minFrameCount - startPosition - offsetFrame);
+            }
+            if (audioReadFrame <= frameToRead) {
+                BOOL isSuccess = [self.audioFile readIntoBuffer:recordBuffer frameCount:audioReadFrame error:nil];
+                if (!isSuccess) {
+                    return;
+                }
+            }
+        }
+        else { // 演奏需要延后
+            AVAudioFramePosition audioFramePosition = startPosition - offsetFrame;
+            if (audioFramePosition > 0) {
+                self.audioFile.framePosition = audioFramePosition;
+                AVAudioFrameCount audioReadFrame = frameToRead;
+                if (audioFramePosition + frameToRead > minFrameCount) { // 如果超出
+                    audioReadFrame = (AVAudioFrameCount)(minFrameCount - audioFramePosition);
+                }
+                // AVAudioFrameCount 无符号整型 uint32_t
+                if (audioReadFrame <= frameToRead) {
+                    BOOL isSuccess = [self.audioFile readIntoBuffer:recordBuffer frameCount:audioReadFrame error:nil];
+                    if (!isSuccess) {
+                        return;
+                    }
+                }
+            }
+            else {
+                self.audioFile.framePosition = 0;
+                // 需要读取部分数据
+                if (offsetFrame - startPosition < frameToRead) {
+                    AVAudioFrameCount readCount = (AVAudioFrameCount)(offsetFrame - startPosition);
+//                    NSLog(@"----need readCount --%u", readCount);
+                    AVAudioPCMBuffer *tempBuffer = [[AVAudioPCMBuffer alloc] initWithPCMFormat:self.audioFile.processingFormat frameCapacity:readCount];
+                    tempBuffer.frameLength = readCount;
+                    BOOL isSuccess = [self.audioFile readIntoBuffer:tempBuffer error:nil];
+                    if (!isSuccess) {
+                        return;
+                    }
+                    float *tempData = tempBuffer.floatChannelData[0];
+                    float *recordData = recordBuffer.floatChannelData[0];
+                    // 复制数据到 recordBuffer
+                    AVAudioFrameCount startFrame = frameToRead - readCount;
+                    for (AVAudioFrameCount i = 0; i < readCount; i++) {
+                        recordData[startFrame + i] = tempData[i];
+                    }
+                }
+            }
+        }
         
-        int recordFrame = (offsetTime < 0) ? (frame - offsetFrame) : (frame + offsetFrame);
-        float recordData = (recordFrame >= 0 && recordFrame < recordBuffer.frameLength) ? recordLeftChannel[recordFrame] : 0;
+        float *bgLeftChannel = bgBuffer.floatChannelData[0];
+        float *bgRightChannel = bgBuffer.floatChannelData[1];
+        if (bgBuffer.format.channelCount == 1) {
+            bgRightChannel = bgBuffer.floatChannelData[0];
+        }
+        // 录音文件未单声道
+        float *recordLeftChannel = recordBuffer.floatChannelData[0];
         
-        float mixLeftData = [self mixChannelData:leftChannel bgVolume:bgBufferVolume recordData:recordData recordVolume:recordVolume];
-        float mixRightData = [self mixChannelData:rightChannel bgVolume:bgBufferVolume recordData:recordData recordVolume:recordVolume];
+        float *mixLeftChannel = self.mixBuffer.floatChannelData[0];
+        float *mixRightChannel = self.mixBuffer.floatChannelData[1];
         
-        mixLeftChannel[frame-self.startPosition] = MAX(-1.0, MIN(1.0, mixLeftData));
-        mixRightChannel[frame-self.startPosition] = MAX(-1.0, MIN(1.0, mixRightData));
-    }
-    NSLog(@"------- volume change end");
+        for (int frame = 0; frame < frameToRead; frame++) {
+            
+            AVAudioFramePosition mixIndex = frame + startPosition - mixStartPosition;
+            float leftChannel = (frame < bgBuffer.frameLength) ? bgLeftChannel[frame] : 0;
+            float rightChannel = (frame < bgBuffer.frameLength) ? bgRightChannel[frame] : 0;
+            
+            float recordData = (frame < recordBuffer.frameLength) ? recordLeftChannel[frame] : 0;
+            
+            float mixLeftData = [self mixChannelData:leftChannel bgVolume:self.bgVolume recordData:recordData recordVolume:self.recordVolume];
+            float mixRightData = [self mixChannelData:rightChannel bgVolume:self.bgVolume recordData:recordData recordVolume:self.recordVolume];
+            
+            // 防止数组越界
+            if (mixIndex >= 0 && mixIndex < self.mixBuffer.frameLength) {
+                mixLeftChannel[mixIndex] = fminf(fmaxf(mixLeftData, -1.0f), 1.0f);
+                mixRightChannel[mixIndex] = fminf(fmaxf(mixRightData, -1.0f), 1.0f);
+            }
+        }
+    });
 }
 
 - (void)scheduleBufferFromPosition:(AVAudioFramePosition)startPosition {
-    
-    self.stopMix = YES;
+    [self resetMixBuffer];
     self.startPosition = startPosition;
-    dispatch_async(self.sourceQueue, ^{
-        // 等待上一次的操作完成
-        dispatch_semaphore_wait(self.mixChangeSemaphore, DISPATCH_TIME_FOREVER);
-        self.stopMix = NO;
+    [self prepareBuffer:startPosition offset:self.offsetTime mixStart:startPosition];
+    // 加载缓冲区
+    [self.nodePlayer scheduleBuffer:self.mixBuffer atTime:nil options:AVAudioPlayerNodeBufferInterruptsAtLoop completionHandler:^{
         
-        [self mixBuffers:self.bgBuffer bgBufferVolume:self.bgVolume withRecordBuffer:self.recordBuffer recordVolume:self.recordVolume offset:self.offsetTime startPosition:(AVAudioFrameCount)startPosition];
-        // 释放信号量,标记修改操作完成
-        dispatch_semaphore_signal(self.mixChangeSemaphore);
-        // 加载缓冲区
-        [self.nodePlayer scheduleBuffer:self.mixBuffer atTime:nil options:AVAudioPlayerNodeBufferInterruptsAtLoop completionHandler:^{
-            
-        }];
-    });
+    }];
 }
 
 
 
+- (float)mixChannelData:(float)bgData bgVolume:(float)bgVolume recordData:(float)recordData recordVolume:(float)recordVolume {
+    return (bgData * bgVolume + recordData * recordVolume) / 2;
+}
+
+- (void)changeVolume:(float)bgVolume recordVolume:(float)recordVolume {
+//    NSLog(@"bg volume ---- %f,  record volume ---- %f", bgVolume, recordVolume);
+    self.bgVolume = bgVolume;
+    self.recordVolume = recordVolume;
+}
+
 // 打断处理
 - (void)handleInterruption:(NSNotification *)notification {
     NSDictionary *info = notification.userInfo;
@@ -386,6 +387,7 @@
         [self.delegate enginePlayerDidError:self error:error];
     }
 }
+
 #pragma mark ------ play action
 
 - (void)changeRecordDelay:(NSInteger)delayMs {
@@ -406,8 +408,7 @@
 }
 
 - (void)stopPlay {
-    self.stopMix = YES;
-    self.stopChangeVolume = YES;
+
     if (self.nodePlayer.isPlaying) {
         [self.nodePlayer stop];
     }
@@ -437,9 +438,6 @@
         }
     }
     
-    // 停止修改音量循环
-    self.stopChangeVolume = YES;
-    
     AVAudioFramePosition startFrame = startTime / 1000.0 * self.audioFormat.sampleRate;
     // 跳转进度
     self.currentFrame = startFrame;
@@ -459,15 +457,17 @@
 }
 
 - (void)freePlayer {
-    
-    if (self.nodePlayer.isPlaying) {
-        [self stopPlay];
-    }
+    [self stopPlay];
     [self.audioEngine stop];
+    // 停止并清理定时器
+    if (_timer) {
+        [_timer invalidate];
+        _timer = nil;
+    }
 }
 
 - (void)startTimer {
-    
+    self.timeCount = 0;
     [self.timer setFireDate:[NSDate distantPast]];
 }
 
@@ -488,7 +488,7 @@
     
     if (!_timer) {
         __weak typeof(self)weakSelf = self;
-        _timer = [NSTimer scheduledTimerWithTimeInterval:0.1 repeats:YES block:^(NSTimer * _Nonnull timer) {
+        _timer = [NSTimer scheduledTimerWithTimeInterval:0.01 repeats:YES block:^(NSTimer * _Nonnull timer) {
             [weakSelf timeFunction];
         }];
         [[NSRunLoop currentRunLoop] addTimer:_timer forMode:NSRunLoopCommonModes];
@@ -503,12 +503,18 @@
     float progress = currentTime/self.totalDuration;
     NSDate *date = [NSDate date];
     NSTimeInterval inteveral = [date timeIntervalSince1970];
-    if (currentTime > self.totalDuration) {
+    if (currentTime >= self.totalDuration) {
         if (self.delegate && [self.delegate respondsToSelector:@selector(enginePlayFinished:)]) {
             [self.delegate enginePlayFinished:self];
         }
     }
     else {
+        // 定时器10ms出触发一次 buffer每100ms执行一次
+        if (self.timeCount % 10 == 0) {
+            [self scheduleMixBuffer];
+        }
+        self.timeCount++;
+
         if (self.delegate && [self.delegate respondsToSelector:@selector(updatePlayProgress:andTotalTime:andProgress:currentInterval:inPlayer:)]) {
             [self.delegate updatePlayProgress:currentTime andTotalTime:self.totalDuration andProgress:progress currentInterval:inteveral*1000 inPlayer:self];
         }
@@ -516,6 +522,11 @@
     
 }
 
+- (void)scheduleMixBuffer {
+    if (self.nodePlayer.isPlaying) {
+        [self prepareBuffer:self.currentFrame offset:self.offsetTime mixStart:self.startPosition];
+    }
+}
 
 - (NSTimeInterval)getCurrentPlayTime {
     AVAudioTime *nodeTime = [self.nodePlayer lastRenderTime];
@@ -557,4 +568,10 @@
     }
     return NO;
 }
+
+- (void)dealloc {
+    NSLog(@"---- KSMergeEnginePlayer dealloc");
+    [[NSNotificationCenter defaultCenter] removeObserver:self];
+}
+
 @end

+ 8 - 2
KulexiuForTeacher/KulexiuForTeacher/Common/MediaMerge/AudioMerge/VideoPlayerView/KSVideoPlayerView.m

@@ -9,7 +9,6 @@
 #import <AVFoundation/AVFoundation.h>
 #import <CloudAccompanyLibrary/AVPlayer+KSSeekSmoothly.h>
 
-
 @interface KSVideoPlayerView ()
 
 @property (nonatomic, strong) AVPlayer *videoPlayer;
@@ -24,6 +23,8 @@
 
 @property (nonatomic, assign) BOOL hasFreeObserver;
 
+@property (nonatomic, assign) BOOL needResume; // 是否需要恢复播放
+
 @end
 
 @implementation KSVideoPlayerView
@@ -116,6 +117,7 @@
 }
 
 - (void)puasePlay {
+    self.needResume = NO;
     if (_isPlaying) {
         _isPlaying = NO;
         [_videoPlayer pause];
@@ -157,13 +159,17 @@
 - (void)seekOffsetTime:(NSInteger)offsetTime {
     CMTime newTime = CMTimeMake(offsetTime, 1000);
     CMTime toleranceTime = CMTimeMake(1, 1000);
+    if (self.isPlaying) {
+        self.needResume = YES;
+    }
     [self.videoPlayer pause];
     @weakObj(self);
     [self.videoPlayer ss_seekToTime:newTime toleranceBefore:toleranceTime toleranceAfter:toleranceTime completionHandler:^(BOOL finished) {
         @strongObj(self);
-        if (self.isPlaying) {
+        if (self.needResume) {
             [self.videoPlayer play];
         }
+        self.needResume = NO;
     }];
 }
 

+ 33 - 17
KulexiuForTeacher/KulexiuForTeacher/Module/Chat/Controller/TXCustom/KSTXBaseChatViewController.m

@@ -170,25 +170,41 @@ static UIView *gCustomTopView;
 
 
 - (void)showMusic:(NSString *)songId {
-    PREMISSIONTYPE micEnable = [RecordCheckManager checkMicPermissionAvaiable];
-    PREMISSIONTYPE cameraEnable = [RecordCheckManager checkCameraPremissionAvaiable];
-    if (micEnable == PREMISSIONTYPE_YES && cameraEnable == PREMISSIONTYPE_YES) {
-        KSAccompanyWebViewController *detailCtrl = [[KSAccompanyWebViewController alloc] init];
-        detailCtrl.url = [NSString stringWithFormat:@"%@/accompany?id=%@",hostURL, songId];
-        detailCtrl.parmDic = @{@"isOpenLight" : @(YES), @"orientation" : @(0),@"isHideTitle" : @(YES)};
-        [self.navigationController pushViewController:detailCtrl animated:YES];
-    }
-    else {
-        if (micEnable == PREMISSIONTYPE_NO && cameraEnable == PREMISSIONTYPE_NO) { // 如果麦克风权限和摄像头权限都没有
-            [self showAlertWithMessage:@"请开启相机和麦克风访问权限" type:CHECKDEVICETYPE_BOTH];
-        }
-        else if (micEnable == PREMISSIONTYPE_NO) { // 如果没有麦克风权限
-            [self showAlertWithMessage:@"请开启麦克风访问权限" type:CHECKDEVICETYPE_MIC];
+    
+    [RecordCheckManager checkCameraPremissionAvaiableCallback:^(PREMISSIONTYPE type) {
+        [self afterCheckCameraCheckMic:type songId:songId];
+    }];
+}
+
+
+- (void)afterCheckCameraCheckMic:(PREMISSIONTYPE)cameraType songId:(NSString *)songId {
+    
+    [RecordCheckManager checkMicPermissionAvaiableCallback:^(PREMISSIONTYPE type) {
+        if (type == PREMISSIONTYPE_YES && cameraType == PREMISSIONTYPE_YES) {
+            KSAccompanyWebViewController *detailCtrl = [[KSAccompanyWebViewController alloc] init];
+            detailCtrl.url = [NSString stringWithFormat:@"%@/accompany?id=%@",hostURL, songId];
+            detailCtrl.parmDic = @{@"isOpenLight" : @(YES), @"orientation" : @(0),@"isHideTitle" : @(YES)};
+            [self.navigationController pushViewController:detailCtrl animated:YES];
         }
-        else if (cameraEnable == PREMISSIONTYPE_NO) { // 如果没有摄像头权限
-            [self showAlertWithMessage:@"请开启相机访问权限" type:CHECKDEVICETYPE_CAMREA];
+        else { //
+            
+            NSString *content = @"";
+            CHECKDEVICETYPE checkType = CHECKDEVICETYPE_BOTH;
+            if (cameraType == PREMISSIONTYPE_NO && type == PREMISSIONTYPE_NO) {
+                content = @"请开启相机和麦克风访问权限";
+                checkType = CHECKDEVICETYPE_BOTH;
+            }
+            else if (cameraType == PREMISSIONTYPE_NO && type == PREMISSIONTYPE_YES) {
+                content =  @"请开启相机访问权限";
+                checkType = CHECKDEVICETYPE_CAMREA;
+            }
+            else if (cameraType == PREMISSIONTYPE_YES && type == PREMISSIONTYPE_NO) {
+                content = @"请开启麦克风访问权限";
+                checkType = CHECKDEVICETYPE_MIC;
+            }
+            [self showAlertWithMessage:content type:checkType];
         }
-    }
+    }];
 }
 
 - (void)showAlertWithMessage:(NSString *)message type:(CHECKDEVICETYPE)deviceType {

+ 2 - 2
KulexiuForTeacher/KulexiuForTeacher/Module/Chat/Group/View/ChatComplainBodyView.m

@@ -54,8 +54,8 @@
 }
 
 -(BOOL)textView:(UITextView *)textView shouldChangeTextInRange:(NSRange)range replacementText:(NSString *)text {
-
-    if ([text isEqualToString:@""]) {
+    if ([text isEqualToString:@"\n"]) {
+        [self endEditing:YES];
         return YES;
     }
     

+ 29 - 16
KulexiuForTeacher/KulexiuForTeacher/Module/Course/AccompanyCourse/Controller/AccompanyDetailViewController.m

@@ -480,25 +480,38 @@
 
 // 加入房间
 - (void)joinClassRoom {
+    
     // 加入房间前判断摄像头和麦克风逻辑
-    PREMISSIONTYPE micEnable = [RecordCheckManager checkMicPermissionAvaiable];
-    PREMISSIONTYPE cameraEnable = [RecordCheckManager checkCameraPremissionAvaiable];
-    if (micEnable == PREMISSIONTYPE_YES && cameraEnable == PREMISSIONTYPE_YES) {
-        // 进入教室
-        // 判断是否进行课前检测
-        [self.classManager joinRoomWithId:self.courseId subjectName:self.homeworkModel.subjectName classEndTime:self.homeworkModel.endTime inViewController:self];
-    }
-    else {
-        if (micEnable == PREMISSIONTYPE_NO && cameraEnable == PREMISSIONTYPE_NO) { // 如果麦克风权限和摄像头权限都没有
-            [self showAlertWithMessage:@"请开启相机和麦克风访问权限" type:CHECKDEVICETYPE_BOTH];
-        }
-        else if (micEnable == PREMISSIONTYPE_NO) { // 如果没有麦克风权限
-            [self showAlertWithMessage:@"请开启麦克风访问权限" type:CHECKDEVICETYPE_MIC];
+    [RecordCheckManager checkCameraPremissionAvaiableCallback:^(PREMISSIONTYPE type) {
+        [self afterCheckCameraCheckMic:type];
+    }];
+}
+
+- (void)afterCheckCameraCheckMic:(PREMISSIONTYPE)cameraType {
+    [RecordCheckManager checkMicPermissionAvaiableCallback:^(PREMISSIONTYPE type) {
+        if (type == PREMISSIONTYPE_YES && cameraType == PREMISSIONTYPE_YES) {
+            // 判断是否进行课前检测
+            [self.classManager joinRoomWithId:self.courseId subjectName:self.homeworkModel.subjectName classEndTime:self.homeworkModel.endTime inViewController:self];
         }
-        else if (cameraEnable == PREMISSIONTYPE_NO) { // 如果没有摄像头权限
-            [self showAlertWithMessage:@"请开启相机访问权限" type:CHECKDEVICETYPE_CAMREA];
+        else {
+            NSString *content = @"";
+            CHECKDEVICETYPE checkType = CHECKDEVICETYPE_BOTH;
+            if (cameraType == PREMISSIONTYPE_NO && type == PREMISSIONTYPE_NO) {
+                content = @"请开启相机和麦克风访问权限";
+                checkType = CHECKDEVICETYPE_BOTH;
+            }
+            else if (cameraType == PREMISSIONTYPE_NO && type == PREMISSIONTYPE_YES) {
+                content =  @"请开启相机访问权限";
+                checkType = CHECKDEVICETYPE_CAMREA;
+            }
+            else if (cameraType == PREMISSIONTYPE_YES && type == PREMISSIONTYPE_NO) {
+                content = @"请开启麦克风访问权限";
+                checkType = CHECKDEVICETYPE_MIC;
+            }
+            [self showAlertWithMessage:content type:checkType];
         }
-    }
+    }];
+    
 }
 
 - (void)showAlertWithMessage:(NSString *)message type:(CHECKDEVICETYPE)deviceType {

+ 2 - 1
KulexiuForTeacher/KulexiuForTeacher/Module/Course/AccompanyCourse/View/AccompanyAlertView.m

@@ -81,7 +81,8 @@
 }
 
 - (BOOL)textView:(UITextView *)textView shouldChangeTextInRange:(NSRange)range replacementText:(NSString *)text {
-    if ([text isEqualToString:@""]) {
+    if ([text isEqualToString:@"\n"]) {
+        [self endEditing:YES];
         return YES;
     }
     

+ 28 - 16
KulexiuForTeacher/KulexiuForTeacher/Module/Course/MusicRoom/Controller/MusicRoomViewController.m

@@ -286,24 +286,36 @@
 // 加入房间
 - (void)joinClassRoom {
     // 加入房间前判断摄像头和麦克风逻辑
-    PREMISSIONTYPE micEnable = [RecordCheckManager checkMicPermissionAvaiable];
-    PREMISSIONTYPE cameraEnable = [RecordCheckManager checkCameraPremissionAvaiable];
-    if (micEnable == PREMISSIONTYPE_YES && cameraEnable == PREMISSIONTYPE_YES) {
-        // 进入教室
-        // 判断是否进行课前检测
-        [self.classManager joinRoomWithId:self.courseId subjectName:self.detailModel.subjectName classEndTime:self.detailModel.endTime inViewController:self];
-    }
-    else {
-        if (micEnable == PREMISSIONTYPE_NO && cameraEnable == PREMISSIONTYPE_NO) { // 如果麦克风权限和摄像头权限都没有
-            [self showAlertWithMessage:@"请开启相机和麦克风访问权限" type:CHECKDEVICETYPE_BOTH];
-        }
-        else if (micEnable == PREMISSIONTYPE_NO) { // 如果没有麦克风权限
-            [self showAlertWithMessage:@"请开启麦克风访问权限" type:CHECKDEVICETYPE_MIC];
+    [RecordCheckManager checkCameraPremissionAvaiableCallback:^(PREMISSIONTYPE type) {
+        [self afterCheckCameraCheckMic:type];
+    }];
+}
+
+- (void)afterCheckCameraCheckMic:(PREMISSIONTYPE)cameraType {
+    [RecordCheckManager checkMicPermissionAvaiableCallback:^(PREMISSIONTYPE type) {
+        if (type == PREMISSIONTYPE_YES && cameraType == PREMISSIONTYPE_YES) {
+            // 判断是否进行课前检测
+            [self.classManager joinRoomWithId:self.courseId subjectName:self.detailModel.subjectName classEndTime:self.detailModel.endTime inViewController:self];
         }
-        else if (cameraEnable == PREMISSIONTYPE_NO) { // 如果没有摄像头权限
-            [self showAlertWithMessage:@"请开启相机访问权限" type:CHECKDEVICETYPE_CAMREA];
+        else {
+            NSString *content = @"";
+            CHECKDEVICETYPE checkType = CHECKDEVICETYPE_BOTH;
+            if (cameraType == PREMISSIONTYPE_NO && type == PREMISSIONTYPE_NO) {
+                content = @"请开启相机和麦克风访问权限";
+                checkType = CHECKDEVICETYPE_BOTH;
+            }
+            else if (cameraType == PREMISSIONTYPE_NO && type == PREMISSIONTYPE_YES) {
+                content =  @"请开启相机访问权限";
+                checkType = CHECKDEVICETYPE_CAMREA;
+            }
+            else if (cameraType == PREMISSIONTYPE_YES && type == PREMISSIONTYPE_NO) {
+                content = @"请开启麦克风访问权限";
+                checkType = CHECKDEVICETYPE_MIC;
+            }
+            [self showAlertWithMessage:content type:checkType];
         }
-    }
+    }];
+    
 }
 
 - (void)showAlertWithMessage:(NSString *)message type:(CHECKDEVICETYPE)deviceType {

+ 7 - 8
KulexiuForTeacher/KulexiuForTeacher/Module/Home/Controller/HomeViewController.m

@@ -893,16 +893,15 @@
 
 - (void)scanAction {
     // 判断是否有权限
-    PREMISSIONTYPE cameraEnable = [RecordCheckManager checkCameraPremissionAvaiable];
-    if (cameraEnable == PREMISSIONTYPE_YES) { // 如果有权限
-        KSScanViewController *ctrl = [[KSScanViewController alloc] init];
-        [self.navigationController pushViewController:ctrl animated:YES];
-    }
-    else {
-        if (cameraEnable == PREMISSIONTYPE_NO) {
+    [RecordCheckManager checkCameraPremissionAvaiableCallback:^(PREMISSIONTYPE type) {
+        if (type == PREMISSIONTYPE_YES) {
+            KSScanViewController *ctrl = [[KSScanViewController alloc] init];
+            [self.navigationController pushViewController:ctrl animated:YES];
+        }
+        else {
             [self showAlertWithMessage:@"请开启相机访问权限" type:CHECKDEVICETYPE_CAMREA];
         }
-    }
+    }];
 }
 
 - (void)showAlertWithMessage:(NSString *)message type:(CHECKDEVICETYPE)deviceType {

+ 14 - 17
KulexiuForTeacher/KulexiuForTeacher/Module/Home/Music/Controller/MyMusicViewController.m

@@ -162,26 +162,23 @@
 }
 
 - (void)savePicWithImage:(UIImage *)image {
-
     // 判断相册权限
-    PREMISSIONTYPE albumEnable = [RecordCheckManager checkPhotoLibraryPremissionAvaiable];
-    if (albumEnable == PREMISSIONTYPE_YES) { // 如果有权限
-        
-        [[TZImageManager manager] savePhotoWithImage:image completion:^(PHAsset *asset, NSError *error) {
-            if (!error) {
-                [self .shareView hideView];
-                [LOADING_MANAGER MBShowAUTOHidingInWindow:@"已保存到相册"];
-            }
-            else {
-                [LOADING_MANAGER MBShowAUTOHidingInWindow:@"保存图片失败"];
-            }
-        }];
-    }
-    else {
-        if (albumEnable == PREMISSIONTYPE_NO) {
+    [RecordCheckManager checkPhotoLibraryPremissionAvaiableCallback:^(PREMISSIONTYPE type) {
+        if (type == PREMISSIONTYPE_YES) {
+            [[TZImageManager manager] savePhotoWithImage:image completion:^(PHAsset *asset, NSError *error) {
+                if (!error) {
+                    [self .shareView hideView];
+                    [LOADING_MANAGER MBShowAUTOHidingInWindow:@"已保存到相册"];
+                }
+                else {
+                    [LOADING_MANAGER MBShowAUTOHidingInWindow:@"保存图片失败"];
+                }
+            }];
+        }
+        else {
             [self showAlertWithMessage:@"请开启相册访问权限" type:CHECKDEVICETYPE_CAMREA];
         }
-    }
+    }];
 }
 
 - (void)showAlertWithMessage:(NSString *)message type:(CHECKDEVICETYPE)deviceType {

+ 6 - 8
KulexiuForTeacher/KulexiuForTeacher/Module/Home/ScanView/Controller/KSScanViewController.m

@@ -149,16 +149,14 @@
     }
     else {  // 选择相册
         // 判断是否有权限
-        PREMISSIONTYPE albumEnable = [RecordCheckManager checkPhotoLibraryPremissionAvaiable];
-        if (albumEnable == PREMISSIONTYPE_YES) { // 如果有权限
-            [self choosePhoneScan];
-        }
-        else {
-            if (albumEnable == PREMISSIONTYPE_NO) {
+        [RecordCheckManager checkPhotoLibraryPremissionAvaiableCallback:^(PREMISSIONTYPE type) {
+            if (type == PREMISSIONTYPE_YES) {
+                [self choosePhoneScan];
+            }
+            else {
                 [self showAlertWithMessage:@"请开启相册访问权限" type:CHECKDEVICETYPE_CAMREA];
             }
-        }
-        
+        }];
     }
 }
 - (void)showAlertWithMessage:(NSString *)message type:(CHECKDEVICETYPE)deviceType {

+ 7 - 8
KulexiuForTeacher/KulexiuForTeacher/Module/Mine/Controller/MineViewController.m

@@ -587,16 +587,15 @@
 
 - (void)scanAction {
     // 判断是否有权限
-    PREMISSIONTYPE cameraEnable = [RecordCheckManager checkCameraPremissionAvaiable];
-    if (cameraEnable == PREMISSIONTYPE_YES) { // 如果有权限
-        KSScanViewController *ctrl = [[KSScanViewController alloc] init];
-        [self.navigationController pushViewController:ctrl animated:YES];
-    }
-    else {
-        if (cameraEnable == PREMISSIONTYPE_NO) {
+    [RecordCheckManager checkCameraPremissionAvaiableCallback:^(PREMISSIONTYPE type) {
+        if (type == PREMISSIONTYPE_YES) {
+            KSScanViewController *ctrl = [[KSScanViewController alloc] init];
+            [self.navigationController pushViewController:ctrl animated:YES];
+        }
+        else {
             [self showAlertWithMessage:@"请开启相机访问权限" type:CHECKDEVICETYPE_CAMREA];
         }
-    }
+    }];
 }
 
 - (void)showAlertWithMessage:(NSString *)message type:(CHECKDEVICETYPE)deviceType {

+ 2 - 2
KulexiuForTeacher/KulexiuForTeacher/Module/Mine/Works/View/MusicPublicContentView.m

@@ -71,8 +71,8 @@
 }
 
 -(BOOL)textView:(UITextView *)textView shouldChangeTextInRange:(NSRange)range replacementText:(NSString *)text {
-
-    if ([text isEqualToString:@""]) {
+    if ([text isEqualToString:@"\n"]) {
+        [self endEditing:YES];
         return YES;
     }
     

+ 0 - 3
KulexiuForTeacher/KulexiuForTeacher/Module/TXClassRoom/View/CloseCourse/KSCloseCourseView.m

@@ -86,9 +86,6 @@
         [self endEditing:YES];
         return YES;
     }
-    if ([text isEqualToString:@""]) {
-        return YES;
-    }
     // 输入控制
     NSString *newString = [textView.text stringByReplacingCharactersInRange:range withString:text];
     if (newString.length > 250) {

BIN
KulexiuForTeacher/KulexiuForTeacher/ToolKit/CloudAccompanyLibrary.framework/CloudAccompanyLibrary


+ 34 - 0
KulexiuForTeacher/KulexiuForTeacher/ToolKit/CloudAccompanyLibrary.framework/Headers/KSCloudWebViewController.h

@@ -10,6 +10,13 @@
 #import <KSToolLibrary/WeakWebViewScriptMessageDelegate.h>
 #import <KSToolLibrary/UINavigationController+KSNavigationBar.h>
 
+typedef NS_ENUM(NSInteger, CHECK_DELAY_TYPE) {
+    CHECK_DELAY_TYPE_START,
+    CHECK_DELAY_TYPE_FAILED,  // 失败
+    CHECK_DELAY_TYPE_SUCCESS, // 成功
+    CHECK_DELAY_TYPE_CANCEL,  // 取消延迟检测流程
+};
+
 @class KSCloudWebViewController;
 
 @protocol KSCloudWebViewControllerDelegate <NSObject>
@@ -40,6 +47,16 @@
 // 错误上报回调 ,额外需填写当前版本
 - (void)cloudPageOccourError:(NSMutableDictionary *_Nonnull)uploadParm;
 
+
+#pragma mark ----- 延迟检测相关
+// 显示延迟检测
+- (void)showDelayCheckViewDisplay:(UIView *_Nonnull)displayView callBackController:(KSCloudWebViewController *_Nonnull)controller;
+// 修改检测状态
+- (void)changeDelayCheckStatus:(CHECK_DELAY_TYPE)checkStatus;
+// 回调当前耳机状态
+- (void)displayCurrentHeadsetTypes:(NSDictionary *_Nonnull)parm;
+// socket连接成功回调
+- (void)socketConnectedCallback;
 @end
 
 NS_ASSUME_NONNULL_BEGIN
@@ -81,6 +98,23 @@ NS_ASSUME_NONNULL_BEGIN
 
 // 合成完成后回调
 - (void)musicPublishCallBack;
+// 重播
+- (void)retryEvaluatingMusic;
+
+- (void)checkAudioType;
+// socket是否连接上
+- (BOOL)isSocketConnected;
+
+// 开始App延迟检测
+- (void)startAppDelayCheck;
+// 取消延迟检测流程
+- (void)cancelAppDelayCheck;
+// 延迟检测中断
+- (void)checkDelayBreak;
+
+- (void)finishDelayCheck;
+
+- (void)connectSocket;
 
 @end
 

BIN
KulexiuForTeacher/KulexiuForTeacher/ToolKit/CloudAccompanyLibrary.framework/_CodeSignature/CodeDirectory


BIN
KulexiuForTeacher/KulexiuForTeacher/ToolKit/CloudAccompanyLibrary.framework/_CodeSignature/CodeRequirements-1


+ 3 - 3
KulexiuForTeacher/KulexiuForTeacher/ToolKit/CloudAccompanyLibrary.framework/_CodeSignature/CodeResources

@@ -58,7 +58,7 @@
 		</data>
 		<key>Headers/KSCloudWebViewController.h</key>
 		<data>
-		pZOk0rtmzLtLAH9cSM9y5V6aZ/k=
+		zdPduXm8/0MzlDLm3KlHZ6du4GI=
 		</data>
 		<key>Headers/KSVideoRecordManager.h</key>
 		<data>
@@ -222,11 +222,11 @@
 		<dict>
 			<key>hash</key>
 			<data>
-			pZOk0rtmzLtLAH9cSM9y5V6aZ/k=
+			zdPduXm8/0MzlDLm3KlHZ6du4GI=
 			</data>
 			<key>hash2</key>
 			<data>
-			RO0ZS6NoI6TZn40r/kX+ncngS2X+PI+xR65M5qT1qUs=
+			UT4btEVKKOf+tZgqaS8Lp6gJWWQh+a6BsV/MWK1hAuQ=
 			</data>
 		</dict>
 		<key>Headers/KSVideoRecordManager.h</key>

BIN
KulexiuForTeacher/KulexiuForTeacher/ToolKit/CloudAccompanyLibrary.framework/_CodeSignature/CodeSignature


+ 0 - 1
KulexiuForTeacher/KulexiuForTeacher/ToolKit/KSToolLibrary.framework/Headers/KSAQRecordManager.h

@@ -9,7 +9,6 @@
 #import <Foundation/Foundation.h>
 #import <KSToolLibrary/KSAudioSessionManager.h>
 
-
 NS_ASSUME_NONNULL_BEGIN
 
 @class KSAQRecordManager;

+ 7 - 3
KulexiuForTeacher/KulexiuForTeacher/ToolKit/KSToolLibrary.framework/Headers/RecordCheckManager.h

@@ -14,13 +14,17 @@ typedef NS_ENUM(NSInteger, PREMISSIONTYPE) {
     PREMISSIONTYPE_YES,
 };
 
+typedef void(^RecordCheckCallback)(PREMISSIONTYPE type);
+
 NS_ASSUME_NONNULL_BEGIN
 
 @interface RecordCheckManager : NSObject
 
-+ (PREMISSIONTYPE)checkMicPermissionAvaiable;
-+ (PREMISSIONTYPE)checkCameraPremissionAvaiable;
-+ (PREMISSIONTYPE)checkPhotoLibraryPremissionAvaiable;
++ (void)checkMicPermissionAvaiableCallback:(RecordCheckCallback)callback;
+
++ (void)checkCameraPremissionAvaiableCallback:(RecordCheckCallback)callback;
+
++ (void)checkPhotoLibraryPremissionAvaiableCallback:(RecordCheckCallback)callback;
 
 @end
 

BIN
KulexiuForTeacher/KulexiuForTeacher/ToolKit/KSToolLibrary.framework/KSToolLibrary