runtime.ts 15 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524
  1. import { reactive, ref, Ref } from 'vue'
  2. import * as RongIMLib from '@rongcloud/imlib-next'
  3. import * as RTC from '@rongcloud/plugin-rtc'
  4. import request from '/src/helpers/request'
  5. import { state } from '/src/state'
  6. import event, { LIVE_EVENT_MESSAGE } from './event'
  7. import dayjs from 'dayjs'
  8. // import { SeatsCtrl } from './message-type'
  9. type imConnectStatus = 'connecting' | 'connected' | 'disconnect'
  10. type VideoStatus = 'init' | 'stream' | 'liveing' | 'stopped' | 'error' | 'loading'
  11. type TrackType = 'microphone' | 'camera' | 'screen'
  12. type ActiveTracks = {
  13. [key in TrackType]: RTC.RCLocalTrack | null
  14. }
  15. type DeviceStatus = {
  16. [key in TrackType]: 'init' | 'granted' | 'denied' | 'closed' | 'none'
  17. }
  18. export const START_LIVE_TIME = 'start-live-time'
  19. export const START_LIVE_STATUS = 'start-live-status'
  20. const runtime = reactive({
  21. /** 房间id */
  22. roomUid: 'LIVE-2112263-12345',
  23. /** IM连接状态 */
  24. imConnectStatus: 'connecting' as imConnectStatus,
  25. // 屏幕分享状态
  26. screenShareStatus: false,
  27. // 视频节点
  28. videoRef: ref<HTMLVideoElement | null>(null),
  29. // RTC实例
  30. rtcClient: null as RTC.RCRTCClient | null,
  31. /** 加入房间实例 */
  32. joinedRoom: null as RTC.RCLivingRoom | null,
  33. // Tracks
  34. mediaStreamTrack: [] as MediaStreamTrack[],
  35. // 媒体流
  36. mediaStreams: null as MediaStream | null,
  37. // 视频状态
  38. videoStatus: 'init' as VideoStatus,
  39. // 麦克风设备列表
  40. microphones: [] as MediaDeviceInfo[],
  41. // 摄像头设备列表
  42. cameras: [] as MediaDeviceInfo[],
  43. // 摄像头设备
  44. selectedCamera: null as MediaDeviceInfo | null,
  45. // 麦克风设备
  46. selectedMicrophone: null as MediaDeviceInfo | null,
  47. // 点赞数量
  48. likeCount: 0,
  49. // 上一次点赞数量
  50. lastLikeCount: 0,
  51. /** 当前活跃的数据流 */
  52. activeTracks: {} as ActiveTracks,
  53. /** 是否关闭连麦 */
  54. allowSeatsCtrl: true,
  55. /** 是否关闭发言 */
  56. allowChatCtrl: true,
  57. /** 当前设备获取状态 */
  58. deviceStatus: {
  59. microphone: 'init',
  60. camera: 'init',
  61. screen: 'init'
  62. } as DeviceStatus
  63. })
  64. export default runtime
  65. const RONG_IM_TOKEN = 'c9kqb3rdc451j'
  66. RongIMLib.init({
  67. appkey: RONG_IM_TOKEN,
  68. })
  69. // 注册自定义消息类型
  70. // 控制是否允许连麦
  71. const MessageSeatsCtrl = RongIMLib.registerMessageType('RC:Chatroom:SeatsCtrl', true, true)
  72. // 控制是否允许发言
  73. const MessageChatBan = RongIMLib.registerMessageType('RC:Chatroom:ChatBan', true, true)
  74. // 连麦消息
  75. const MessageSeatApply = RongIMLib.registerMessageType('RC:Chatroom:SeatApply', true, true)
  76. // 响应连麦消息
  77. const MessageSeatResponse = RongIMLib.registerMessageType('RC:Chatroom:SeatResponse', true, true)
  78. type MessageProps = {
  79. messageType: 'RC:Chatroom:Welcome' | 'RC:TxtMsg' | 'RC:Chatroom:Barrage' | 'RC:Chatroom:Like' | 'RC:Chatroom:SeatsCtrl' | 'RC:Chatroom:ChatBan' | 'RC:Chatroom:SeatApply',
  80. content: any,
  81. }
  82. type MessageEvent = {
  83. messages: MessageProps[],
  84. }
  85. const Events = RongIMLib.Events
  86. /**
  87. * 监听消息通知
  88. */
  89. const { MESSAGES, ...RestMessage } = Events
  90. RongIMLib.addEventListener(Events.MESSAGES, (evt: MessageEvent) => {
  91. console.log(evt, '收到消息')
  92. const { messages } = evt
  93. for (const message of messages) {
  94. // console.log(LIVE_EVENT_MESSAGE[message.messageType], message)
  95. if (LIVE_EVENT_MESSAGE[message.messageType]) {
  96. event.emit(LIVE_EVENT_MESSAGE[message.messageType], {...message.content, $EventMessage: message})
  97. }
  98. }
  99. })
  100. for (const Message of Object.values(RestMessage)) {
  101. RongIMLib.addEventListener(Message, (evt: any) => {
  102. console.log(Message, evt)
  103. // chatroomDestroyed
  104. event.emit(Message, {$EventMessage: null})
  105. })
  106. }
  107. /**
  108. * 监听 IM 连接状态变化
  109. */
  110. RongIMLib.addEventListener(Events.CONNECTING, () => {
  111. console.log('connecting')
  112. runtime.imConnectStatus = 'connecting'
  113. })
  114. RongIMLib.addEventListener(Events.CONNECTED, () => {
  115. console.log('connected')
  116. runtime.imConnectStatus = 'connected'
  117. })
  118. RongIMLib.addEventListener(Events.DISCONNECT, () => {
  119. console.log('disconnect')
  120. runtime.imConnectStatus = 'disconnect'
  121. })
  122. export const connectIM = async (imToken: string) => {
  123. try {
  124. const user = await RongIMLib.connect(imToken)
  125. runtime.rtcClient = RongIMLib.installPlugin(RTC.installer, {})
  126. console.log('connect success', user.data?.userId)
  127. return user
  128. } catch (error) {
  129. throw error
  130. }
  131. }
  132. /**
  133. * 设置声音
  134. * @param video
  135. * @param Value 声音大小
  136. */
  137. export const setVolume = (value: number) => {
  138. if(runtime.videoRef) {
  139. runtime.videoRef.volume = value / 100
  140. }
  141. }
  142. /**
  143. * 设置video视频流
  144. */
  145. export const setVideoSrcObject = (video: HTMLVideoElement | null, mediaStreams: MediaStream | null) => {
  146. if (video && mediaStreams) {
  147. video.srcObject = mediaStreams
  148. video.onloadedmetadata = () => {
  149. video.play()
  150. }
  151. }
  152. }
  153. /**
  154. * 发起屏幕共享
  155. */
  156. export const shareScreenVideo = async () => {
  157. if (runtime.rtcClient && !runtime.screenShareStatus) {
  158. const screenTrack = await getTrack('screen')
  159. const oldTrack = runtime.activeTracks.camera as RTC.RCLocalTrack
  160. // removeTrack([oldTrack], 'camera')
  161. runtime.joinedRoom?.unpublish([oldTrack])
  162. setTrack([screenTrack as RTC.RCLocalTrack], 'screen')
  163. if (runtime.videoRef) {
  164. screenTrack.play(runtime.videoRef)
  165. runtime.screenShareStatus = true
  166. }
  167. screenTrack?.on(RTC.RCLocalTrack.EVENT_LOCAL_TRACK_END, (track: RTC.RCLocalTrack) => {
  168. runtime.screenShareStatus = false
  169. track.destroy()
  170. // removeTrack([track], 'screen')
  171. if (oldTrack) {
  172. setTrack([oldTrack as RTC.RCLocalTrack], 'camera')
  173. if (runtime.videoRef) {
  174. oldTrack.play(runtime.videoRef)
  175. }
  176. }
  177. // setVideoSrcObject(runtime.videoRef, this.mediaStreams)
  178. })
  179. }
  180. }
  181. /**
  182. *
  183. * 获取所有音频输入设备
  184. * @returns {Promise<void>}
  185. */
  186. export const getMicrophones = async () => {
  187. const microphones = await RTC.device.getMicrophones()
  188. runtime.microphones = microphones
  189. return microphones
  190. }
  191. /**
  192. *
  193. * 获取所有视频输入设备
  194. * @returns {Promise<void>}
  195. */
  196. export const getCameras = async () => {
  197. const cameras = await RTC.device.getCameras()
  198. runtime.cameras = cameras
  199. return cameras
  200. }
  201. /**
  202. *
  203. * 设置当前视频设备
  204. * @param camera MediaDeviceInfo
  205. */
  206. export const setSelectCamera = async (camera: MediaDeviceInfo) => {
  207. runtime.selectedCamera = camera
  208. const oldTrack = runtime.activeTracks.camera as RTC.RCLocalTrack
  209. if (oldTrack) {
  210. await removeTrack([oldTrack], 'camera', oldTrack.isPublished())
  211. }
  212. const track = await getTrack('camera')
  213. setTrack([track], 'camera', runtime.videoStatus === 'liveing')
  214. }
  215. /**
  216. *
  217. * 设置当前麦克风设备
  218. * @param microphone MediaDeviceInfo
  219. */
  220. export const setSelectMicrophone = async (microphone: MediaDeviceInfo) => {
  221. runtime.selectedMicrophone = microphone
  222. const oldTrack = runtime.activeTracks.microphone as RTC.RCLocalTrack
  223. if (oldTrack) {
  224. await removeTrack([oldTrack], 'microphone', oldTrack.isPublished())
  225. }
  226. const track = await getTrack('microphone')
  227. setTrack([track], 'microphone', runtime.videoStatus === 'liveing')
  228. }
  229. type TrackResult = {
  230. code: RTC.RCRTCCode,
  231. track: RTC.RCMicphoneAudioTrack | RTC.RCCameraVideoTrack | RTC.RCScreenVideoTrack | undefined
  232. }
  233. export const getTrack = async (trackType: TrackType): Promise<RTC.RCLocalTrack> => {
  234. let res: TrackResult | undefined
  235. let Track: RTC.RCLocalTrack | null = null
  236. if (trackType === 'microphone') {
  237. res = await runtime.rtcClient?.createMicrophoneAudioTrack('RongCloudRTC', {
  238. micphoneId: runtime.selectedMicrophone?.deviceId,
  239. }) as TrackResult
  240. } else if (trackType === 'camera') {
  241. res = await runtime.rtcClient?.createCameraVideoTrack('RongCloudRTC', {
  242. cameraId: runtime.selectedCamera?.deviceId,
  243. faceMode: 'user',
  244. frameRate: RTC.RCFrameRate.FPS_24,
  245. resolution: RTC.RCResolution.W1920_H1080,
  246. }) as TrackResult
  247. } else {
  248. res = await runtime?.rtcClient?.createScreenVideoTrack() as TrackResult
  249. }
  250. Track = res?.track as RTC.RCLocalTrack
  251. if (trackType === 'camera' && !runtime.cameras.length) {
  252. runtime.deviceStatus[trackType] = 'none'
  253. } else if (trackType === 'microphone' && !runtime.microphones.length) {
  254. runtime.deviceStatus[trackType] = 'none'
  255. } else if (trackType === 'screen' && !runtime.screenShareStatus) {
  256. runtime.deviceStatus[trackType] = 'none'
  257. }
  258. if (res.code === RTC.RCRTCCode.PERMISSION_DENIED) {
  259. runtime.deviceStatus[trackType] = 'denied'
  260. } else {
  261. runtime.deviceStatus[trackType] = 'granted'
  262. }
  263. // if (res.code !== RTC.RCRTCCode.SUCCESS || !Track) {
  264. // throw new Error('获取数据流失败')
  265. // }
  266. return Track
  267. }
  268. /**
  269. * 添加视频流,会同步修改当先视频与推送的流
  270. * @param track
  271. */
  272. export const setTrack = async (tracks: RTC.RCLocalTrack[], trackType: TrackType, needPublish = true) => {
  273. for (const track of tracks) {
  274. // @ts-ignore
  275. // await runtime.mediaStreams?.addTrack(track._msTrack)
  276. if (trackType === 'microphone') {
  277. console.log('添加麦克风')
  278. track.play()
  279. }
  280. runtime.activeTracks[trackType] = track
  281. }
  282. if (needPublish) {
  283. // console.log('publish', runtime.joinedRoom)
  284. await runtime.joinedRoom?.publish(tracks)
  285. }
  286. }
  287. /**
  288. * 删除视频流,会同步修改当先视频与推送的流
  289. * @param track
  290. */
  291. export const removeTrack = async (tracks: RTC.RCLocalTrack[], trackType: TrackType, needPublish = true) => {
  292. if (needPublish) {
  293. await runtime.joinedRoom?.unpublish(tracks)
  294. }
  295. for (const track of tracks) {
  296. // @ts-ignore
  297. // await runtime.mediaStreams?.removeTrack(track._msTrack)
  298. // runtime.activeTracks[trackType].destroy()
  299. // console.log(runtime.activeTracks[trackType])
  300. track?.destroy()
  301. runtime.activeTracks[trackType] = null
  302. }
  303. }
  304. export const joinIMRoom = async (roomId: string, type: RTC.RCLivingType, listenEvents: RTC.IRoomEventListener | null) => {
  305. await RongIMLib.joinChatRoom(roomId, {count: -1})
  306. const join = await runtime.rtcClient?.joinLivingRoom(roomId, type)
  307. if (join?.code != RTC.RCRTCCode.SUCCESS) throw Error('加入房间失败')
  308. join.room?.registerRoomEventListener(listenEvents)
  309. return join
  310. }
  311. export const joinRoom = async (roomId: string, type: RTC.RCLivingType, listenEvents: RTC.IRoomEventListener | null) => {
  312. try {
  313. await request.get('/api-web/imLiveBroadcastRoom/joinRoom', {
  314. params: {
  315. roomUid: runtime.roomUid,
  316. userId: state.user?.speakerId,
  317. }
  318. })
  319. } catch (error) {}
  320. return await joinIMRoom(roomId, type, listenEvents)
  321. }
  322. /**
  323. * 开始直播
  324. */
  325. export const startLive = async (resetTime = true) => {
  326. if (runtime.videoStatus !== 'stream') throw Error('当前无视频流')
  327. const room = runtime.joinedRoom
  328. if (room) {
  329. // const microphoneAudioTrack = await getTrack('microphone')
  330. // const cameraVideoTrack = await getTrack('camera')
  331. // await setTrack([cameraVideoTrack], 'camera')
  332. // await setTrack([microphoneAudioTrack], 'microphone')
  333. // const builder = await runtime.joinedRoom?.getMCUConfigBuilder()
  334. // // @ts-ignore
  335. // await builder.setOutputVideoRenderMode?.(RTC.MixVideoRenderMode.WHOLE)
  336. // // @ts-ignore
  337. // await builder.flush()
  338. runtime.videoStatus = 'liveing'
  339. }
  340. if (resetTime) {
  341. sessionStorage.setItem(START_LIVE_TIME, dayjs().valueOf().toString())
  342. }
  343. sessionStorage.setItem(START_LIVE_STATUS, 'liveing')
  344. }
  345. /**
  346. * 关闭直播
  347. */
  348. export const closeLive = async () => {
  349. // removeMedia(runtime.mediaStreams, runtime.mediaStreamTrack)
  350. // await request.post('/api-im/user/statusImUser', {
  351. // data: {
  352. // os: 'PC',
  353. // status: 3,
  354. // userId: state.user?.id,
  355. // }
  356. // })
  357. sessionStorage.removeItem(START_LIVE_TIME)
  358. sessionStorage.removeItem(START_LIVE_STATUS)
  359. runtime.videoStatus = 'stream'
  360. for (const key in runtime.activeTracks) {
  361. if (Object.prototype.hasOwnProperty.call(runtime.activeTracks, key)) {
  362. const track = runtime.activeTracks[key as TrackType] as RTC.RCLocalTrack
  363. if (track) {
  364. removeTrack([track], key as TrackType)
  365. }
  366. }
  367. }
  368. }
  369. /**
  370. * 同步点赞数量
  371. */
  372. export const loopSyncLike = async () => {
  373. if ((runtime.likeCount !== runtime.lastLikeCount || runtime.likeCount === 0) && state.user) {
  374. try {
  375. await request.get('/api-web/imLiveBroadcastRoom/syncLike', {
  376. hideLoading: true,
  377. hideMessage: true,
  378. params: {
  379. likeNum: runtime.likeCount,
  380. roomUid: runtime.roomUid,
  381. }
  382. })
  383. runtime.lastLikeCount = runtime.likeCount
  384. } catch (error) {}
  385. }
  386. setTimeout(() => {
  387. loopSyncLike()
  388. }, 1000 * 60)
  389. }
  390. type SendMessageType = 'text' | 'image' | 'audio' | 'video' | 'file' | 'SeatsCtrl' | 'ChatBan' | 'SeatApply' | 'SeatResponse'
  391. export const getSendMessageUser = () => {
  392. return {
  393. id: state.user?.speakerId,
  394. name: state.user?.speakerName,
  395. userId: state.user?.speakerId,
  396. userName: state.user?.speakerName,
  397. }
  398. }
  399. /**
  400. *
  401. * @param msg 消息内容
  402. * @param type 消息类型
  403. * @returns null 或者 发送消息的结果
  404. */
  405. export const sendMessage = async (msg: any, type: SendMessageType = 'text') => {
  406. let message: RongIMLib.BaseMessage<unknown> | null = null
  407. if (!msg) return
  408. const conversation = {
  409. conversationType: RongIMLib.ConversationType.CHATROOM,
  410. targetId: runtime.joinedRoom?._roomId as string,
  411. }
  412. if (type === 'text') {
  413. message = new RongIMLib.TextMessage({
  414. user: getSendMessageUser(),
  415. content: msg
  416. })
  417. } else if (type === 'SeatsCtrl') {
  418. message = new MessageSeatsCtrl(msg)
  419. } else if (type === 'ChatBan') {
  420. message = new MessageChatBan(msg)
  421. } else if (type === 'SeatApply') {
  422. message = new MessageSeatApply(msg)
  423. } else if (type === 'SeatResponse') {
  424. message = new MessageSeatResponse(msg)
  425. }
  426. if (!message) return
  427. console.log(message)
  428. return await RongIMLib.sendMessage(conversation, message)
  429. }
  430. export const openDevice = async (trackType: TrackType, needPublish = true) => {
  431. if (trackType === 'microphone' && runtime.activeTracks[trackType]) {
  432. runtime.activeTracks[trackType]?.unmute()
  433. } else {
  434. const track = await getTrack(trackType)
  435. await setTrack([track], trackType, needPublish)
  436. if (runtime.videoRef) {
  437. track.play(runtime.videoRef)
  438. }
  439. }
  440. }
  441. export const closeDevice = async (trackType: TrackType, needPublish = true) => {
  442. const track = runtime.activeTracks[trackType]
  443. if (trackType !== 'microphone') {
  444. // console.log('closeDevice', track)
  445. // track?.destroy()
  446. await removeTrack([track] as RTC.RCLocalTrack[], trackType, needPublish)
  447. } else {
  448. track?.mute()
  449. }
  450. }
  451. export const toggleDevice = async (trackType: TrackType) => {
  452. const track = runtime.activeTracks[trackType]
  453. const needPublish = runtime.videoStatus === 'liveing'
  454. if (track) {
  455. if (trackType === 'camera') {
  456. runtime.deviceStatus.camera = 'closed'
  457. }
  458. closeDevice(trackType, needPublish)
  459. } else {
  460. if (trackType === 'camera') {
  461. runtime.deviceStatus.camera = 'granted'
  462. }
  463. openDevice(trackType, needPublish)
  464. }
  465. }
  466. export const leaveIMRoom = async () => {
  467. if (runtime.joinedRoom) {
  468. // @ts-ignore
  469. await runtime.rtcClient?.leaveRoom(runtime.joinedRoom)
  470. runtime.joinedRoom = null
  471. }
  472. }