runtime.ts 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539
  1. import { reactive, ref, Ref } from 'vue'
  2. import * as RongIMLib from '@rongcloud/imlib-next'
  3. import * as RTC from '@rongcloud/plugin-rtc'
  4. import request from '/src/helpers/request'
  5. import { state } from '/src/state'
  6. import event, { LIVE_EVENT_MESSAGE } from './event'
  7. import dayjs from 'dayjs'
  8. // import { SeatsCtrl } from './message-type'
  9. type imConnectStatus = 'connecting' | 'connected' | 'disconnect'
  10. type VideoStatus = 'init' | 'stream' | 'liveing' | 'stopped' | 'error' | 'loading'
  11. export type TrackType = 'microphone' | 'camera' | 'screen'
  12. type ActiveTracks = {
  13. [key in TrackType]: RTC.RCLocalTrack | null
  14. }
  15. type DeviceStatus = {
  16. [key in TrackType]: 'init' | 'granted' | 'denied' | 'closed' | 'none'
  17. }
  18. export const START_LIVE_TIME = 'start-live-time'
  19. export const START_LIVE_STATUS = 'start-live-status'
  20. export const VIDEO_DEVICE_ID = 'video-deviceId'
  21. export const AUDIO_DEVICE_ID = 'audio-deviceId'
  22. export const AUDIO_DEVICE_VOLUME = 'audio-device-volume'
  23. const runtime = reactive({
  24. /** 房间id */
  25. roomUid: 'LIVE-2112263-12345',
  26. /** IM连接状态 */
  27. imConnectStatus: 'connecting' as imConnectStatus,
  28. // 屏幕分享状态
  29. screenShareStatus: false,
  30. // 视频节点
  31. videoRef: ref<HTMLVideoElement | null>(null),
  32. // RTC实例
  33. rtcClient: null as RTC.RCRTCClient | null,
  34. /** 加入房间实例 */
  35. joinedRoom: null as RTC.RCLivingRoom | null,
  36. // Tracks
  37. mediaStreamTrack: [] as MediaStreamTrack[],
  38. // 媒体流
  39. mediaStreams: null as MediaStream | null,
  40. // 视频状态
  41. videoStatus: 'init' as VideoStatus,
  42. // 麦克风设备列表
  43. microphones: [] as MediaDeviceInfo[],
  44. // 摄像头设备列表
  45. cameras: [] as MediaDeviceInfo[],
  46. // 摄像头设备
  47. selectedCamera: null as MediaDeviceInfo | null,
  48. // 麦克风设备
  49. selectedMicrophone: null as MediaDeviceInfo | null,
  50. // 点赞数量
  51. likeCount: 0,
  52. // 上一次点赞数量
  53. lastLikeCount: 0,
  54. /** 当前活跃的数据流 */
  55. activeTracks: {} as ActiveTracks,
  56. /** 是否关闭连麦 */
  57. allowSeatsCtrl: true,
  58. /** 是否关闭发言 */
  59. allowChatCtrl: true,
  60. /** 当前设备获取状态 */
  61. deviceStatus: {
  62. microphone: 'init',
  63. camera: 'init',
  64. screen: 'init'
  65. } as DeviceStatus
  66. })
  67. export default runtime
  68. const RONG_IM_TOKEN = 'c9kqb3rdc451j'
  69. RongIMLib.init({
  70. appkey: RONG_IM_TOKEN,
  71. })
  72. // 注册自定义消息类型
  73. // 控制是否允许连麦
  74. const MessageSeatsCtrl = RongIMLib.registerMessageType('RC:Chatroom:SeatsCtrl', true, true)
  75. // 控制是否允许发言
  76. const MessageChatBan = RongIMLib.registerMessageType('RC:Chatroom:ChatBan', true, true)
  77. // 连麦消息
  78. const MessageSeatApply = RongIMLib.registerMessageType('RC:Chatroom:SeatApply', true, true)
  79. // 响应连麦消息
  80. const MessageSeatResponse = RongIMLib.registerMessageType('RC:Chatroom:SeatResponse', true, true)
  81. type MessageProps = {
  82. messageType: 'RC:Chatroom:Welcome' | 'RC:TxtMsg' | 'RC:Chatroom:Barrage' | 'RC:Chatroom:Like' | 'RC:Chatroom:SeatsCtrl' | 'RC:Chatroom:ChatBan' | 'RC:Chatroom:SeatApply',
  83. content: any,
  84. }
  85. type MessageEvent = {
  86. messages: MessageProps[],
  87. }
  88. const Events = RongIMLib.Events
  89. /**
  90. * 监听消息通知
  91. */
  92. const { MESSAGES, ...RestMessage } = Events
  93. RongIMLib.addEventListener(Events.MESSAGES, (evt: MessageEvent) => {
  94. console.log(evt, '收到消息')
  95. const { messages } = evt
  96. for (const message of messages) {
  97. // console.log(LIVE_EVENT_MESSAGE[message.messageType], message)
  98. if (LIVE_EVENT_MESSAGE[message.messageType]) {
  99. event.emit(LIVE_EVENT_MESSAGE[message.messageType], {...message.content, $EventMessage: message})
  100. }
  101. }
  102. })
  103. for (const Message of Object.values(RestMessage)) {
  104. RongIMLib.addEventListener(Message, (evt: any) => {
  105. console.log(Message, evt)
  106. // chatroomDestroyed
  107. event.emit(Message, {$EventMessage: null})
  108. })
  109. }
  110. /**
  111. * 监听 IM 连接状态变化
  112. */
  113. RongIMLib.addEventListener(Events.CONNECTING, () => {
  114. console.log('connecting')
  115. runtime.imConnectStatus = 'connecting'
  116. })
  117. RongIMLib.addEventListener(Events.CONNECTED, () => {
  118. console.log('connected')
  119. runtime.imConnectStatus = 'connected'
  120. })
  121. RongIMLib.addEventListener(Events.DISCONNECT, () => {
  122. console.log('disconnect')
  123. runtime.imConnectStatus = 'disconnect'
  124. })
  125. export const connectIM = async (imToken: string) => {
  126. try {
  127. const user = await RongIMLib.connect(imToken)
  128. runtime.rtcClient = RongIMLib.installPlugin(RTC.installer, {})
  129. console.log('connect success', user.data?.userId)
  130. return user
  131. } catch (error) {
  132. throw error
  133. }
  134. }
  135. /**
  136. * 设置声音
  137. * @param video
  138. * @param Value 声音大小
  139. */
  140. export const setVolume = (value: number) => {
  141. localStorage.setItem(AUDIO_DEVICE_VOLUME, value.toString())
  142. if(runtime.videoRef) {
  143. runtime.videoRef.volume = value / 100
  144. }
  145. // @ts-ignore
  146. if (runtime.activeTracks.microphone && runtime.activeTracks.microphone._element) {
  147. // @ts-ignore
  148. runtime.activeTracks.microphone._element.volume = value / 100
  149. }
  150. }
  151. /**
  152. * 设置video视频流
  153. */
  154. export const setVideoSrcObject = (video: HTMLVideoElement | null, mediaStreams: MediaStream | null) => {
  155. if (video && mediaStreams) {
  156. video.srcObject = mediaStreams
  157. video.onloadedmetadata = () => {
  158. video.play()
  159. }
  160. }
  161. }
  162. /**
  163. * 发起屏幕共享
  164. */
  165. export const shareScreenVideo = async () => {
  166. if (runtime.rtcClient && !runtime.screenShareStatus) {
  167. const screenTrack = await getTrack('screen')
  168. const oldTrack = runtime.activeTracks.camera as RTC.RCLocalTrack
  169. // removeTrack([oldTrack], 'camera')
  170. runtime.joinedRoom?.unpublish([oldTrack])
  171. setTrack([screenTrack as RTC.RCLocalTrack], 'screen')
  172. if (runtime.videoRef) {
  173. screenTrack.play(runtime.videoRef)
  174. runtime.screenShareStatus = true
  175. }
  176. screenTrack?.on(RTC.RCLocalTrack.EVENT_LOCAL_TRACK_END, (track: RTC.RCLocalTrack) => {
  177. runtime.screenShareStatus = false
  178. track.destroy()
  179. // removeTrack([track], 'screen')
  180. if (oldTrack) {
  181. setTrack([oldTrack as RTC.RCLocalTrack], 'camera')
  182. if (runtime.videoRef) {
  183. oldTrack.play(runtime.videoRef)
  184. }
  185. }
  186. // setVideoSrcObject(runtime.videoRef, this.mediaStreams)
  187. })
  188. }
  189. }
  190. /**
  191. *
  192. * 获取所有音频输入设备
  193. * @returns {Promise<void>}
  194. */
  195. export const getMicrophones = async () => {
  196. const microphones = await RTC.device.getMicrophones()
  197. runtime.microphones = microphones
  198. return microphones
  199. }
  200. /**
  201. *
  202. * 获取所有视频输入设备
  203. * @returns {Promise<void>}
  204. */
  205. export const getCameras = async () => {
  206. const cameras = await RTC.device.getCameras()
  207. runtime.cameras = cameras
  208. return cameras
  209. }
  210. /**
  211. *
  212. * 设置当前视频设备
  213. * @param camera MediaDeviceInfo
  214. */
  215. export const setSelectCamera = async (camera: MediaDeviceInfo) => {
  216. runtime.selectedCamera = camera
  217. localStorage.setItem(VIDEO_DEVICE_ID, camera.deviceId)
  218. const oldTrack = runtime.activeTracks.camera as RTC.RCLocalTrack
  219. if (oldTrack) {
  220. await removeTrack([oldTrack], 'camera', oldTrack.isPublished())
  221. }
  222. const track = await getTrack('camera')
  223. setTrack([track], 'camera', runtime.videoStatus === 'liveing')
  224. }
  225. /**
  226. *
  227. * 设置当前麦克风设备
  228. * @param microphone MediaDeviceInfo
  229. */
  230. export const setSelectMicrophone = async (microphone: MediaDeviceInfo) => {
  231. runtime.selectedMicrophone = microphone
  232. localStorage.setItem(AUDIO_DEVICE_ID, microphone.deviceId)
  233. const oldTrack = runtime.activeTracks.microphone as RTC.RCLocalTrack
  234. if (oldTrack) {
  235. await removeTrack([oldTrack], 'microphone', oldTrack.isPublished())
  236. }
  237. const track = await getTrack('microphone')
  238. setTrack([track], 'microphone', runtime.videoStatus === 'liveing')
  239. }
  240. type TrackResult = {
  241. code: RTC.RCRTCCode,
  242. track: RTC.RCMicphoneAudioTrack | RTC.RCCameraVideoTrack | RTC.RCScreenVideoTrack | undefined
  243. }
  244. export const getTrack = async (trackType: TrackType): Promise<RTC.RCLocalTrack> => {
  245. let res: TrackResult | undefined
  246. let Track: RTC.RCLocalTrack | null = null
  247. if (trackType === 'microphone') {
  248. res = await runtime.rtcClient?.createMicrophoneAudioTrack('RongCloudRTC', {
  249. micphoneId: runtime.selectedMicrophone?.deviceId,
  250. }) as TrackResult
  251. } else if (trackType === 'camera') {
  252. res = await runtime.rtcClient?.createCameraVideoTrack('RongCloudRTC', {
  253. cameraId: runtime.selectedCamera?.deviceId,
  254. faceMode: 'user',
  255. frameRate: RTC.RCFrameRate.FPS_24,
  256. resolution: RTC.RCResolution.W1920_H1080,
  257. }) as TrackResult
  258. } else {
  259. res = await runtime?.rtcClient?.createScreenVideoTrack() as TrackResult
  260. }
  261. Track = res?.track as RTC.RCLocalTrack
  262. if (trackType === 'camera' && !runtime.cameras.length) {
  263. runtime.deviceStatus[trackType] = 'none'
  264. } else if (trackType === 'microphone' && !runtime.microphones.length) {
  265. runtime.deviceStatus[trackType] = 'none'
  266. } else if (trackType === 'screen' && !runtime.screenShareStatus) {
  267. runtime.deviceStatus[trackType] = 'none'
  268. }
  269. if (res.code === RTC.RCRTCCode.PERMISSION_DENIED) {
  270. runtime.deviceStatus[trackType] = 'denied'
  271. } else {
  272. runtime.deviceStatus[trackType] = 'granted'
  273. }
  274. // if (res.code !== RTC.RCRTCCode.SUCCESS || !Track) {
  275. // throw new Error('获取数据流失败')
  276. // }
  277. return Track
  278. }
  279. /**
  280. * 添加视频流,会同步修改当先视频与推送的流
  281. * @param track
  282. */
  283. export const setTrack = async (tracks: RTC.RCLocalTrack[], trackType: TrackType, needPublish = true) => {
  284. for (const track of tracks) {
  285. // @ts-ignore
  286. // await runtime.mediaStreams?.addTrack(track._msTrack)
  287. if (trackType === 'microphone') {
  288. console.log('添加麦克风')
  289. track.play()
  290. }
  291. runtime.activeTracks[trackType] = track
  292. }
  293. if (needPublish) {
  294. // console.log('publish', runtime.joinedRoom)
  295. await runtime.joinedRoom?.publish(tracks)
  296. }
  297. }
  298. /**
  299. * 删除视频流,会同步修改当先视频与推送的流
  300. * @param track
  301. */
  302. export const removeTrack = async (tracks: RTC.RCLocalTrack[], trackType: TrackType, needPublish = true) => {
  303. if (needPublish) {
  304. await runtime.joinedRoom?.unpublish(tracks)
  305. }
  306. for (const track of tracks) {
  307. // @ts-ignore
  308. // await runtime.mediaStreams?.removeTrack(track._msTrack)
  309. // runtime.activeTracks[trackType].destroy()
  310. // console.log(runtime.activeTracks[trackType])
  311. track?.destroy()
  312. runtime.activeTracks[trackType] = null
  313. }
  314. }
  315. export const joinIMRoom = async (roomId: string, type: RTC.RCLivingType, listenEvents: RTC.IRoomEventListener | null) => {
  316. await RongIMLib.joinChatRoom(roomId, {count: -1})
  317. const join = await runtime.rtcClient?.joinLivingRoom(roomId, type)
  318. if (join?.code != RTC.RCRTCCode.SUCCESS) throw Error('加入房间失败')
  319. join.room?.registerRoomEventListener(listenEvents)
  320. return join
  321. }
  322. export const joinRoom = async (roomId: string, type: RTC.RCLivingType, listenEvents: RTC.IRoomEventListener | null) => {
  323. try {
  324. await request.get('/api-web/imLiveBroadcastRoom/joinRoom', {
  325. params: {
  326. roomUid: runtime.roomUid,
  327. userId: state.user?.speakerId,
  328. }
  329. })
  330. } catch (error) {}
  331. return await joinIMRoom(roomId, type, listenEvents)
  332. }
  333. /**
  334. * 开始直播
  335. */
  336. export const startLive = async (resetTime = true) => {
  337. if (runtime.videoStatus !== 'stream') throw Error('当前无视频流')
  338. const room = runtime.joinedRoom
  339. if (room) {
  340. // const microphoneAudioTrack = await getTrack('microphone')
  341. // const cameraVideoTrack = await getTrack('camera')
  342. // await setTrack([cameraVideoTrack], 'camera')
  343. // await setTrack([microphoneAudioTrack], 'microphone')
  344. // const builder = await runtime.joinedRoom?.getMCUConfigBuilder()
  345. // // @ts-ignore
  346. // await builder.setOutputVideoRenderMode?.(RTC.MixVideoRenderMode.WHOLE)
  347. // // @ts-ignore
  348. // await builder.flush()
  349. runtime.videoStatus = 'liveing'
  350. }
  351. if (resetTime) {
  352. sessionStorage.setItem(START_LIVE_TIME, dayjs().valueOf().toString())
  353. }
  354. sessionStorage.setItem(START_LIVE_STATUS, 'liveing')
  355. }
  356. /**
  357. * 关闭直播
  358. */
  359. export const closeLive = async () => {
  360. // removeMedia(runtime.mediaStreams, runtime.mediaStreamTrack)
  361. // await request.post('/api-im/user/statusImUser', {
  362. // data: {
  363. // os: 'PC',
  364. // status: 3,
  365. // userId: state.user?.id,
  366. // }
  367. // })
  368. sessionStorage.removeItem(START_LIVE_TIME)
  369. sessionStorage.removeItem(START_LIVE_STATUS)
  370. runtime.videoStatus = 'stream'
  371. for (const key in runtime.activeTracks) {
  372. if (Object.prototype.hasOwnProperty.call(runtime.activeTracks, key)) {
  373. const track = runtime.activeTracks[key as TrackType] as RTC.RCLocalTrack
  374. if (track) {
  375. removeTrack([track], key as TrackType)
  376. }
  377. }
  378. }
  379. }
  380. /**
  381. * 同步点赞数量
  382. */
  383. export const loopSyncLike = async () => {
  384. if ((runtime.likeCount !== runtime.lastLikeCount || runtime.likeCount === 0) && state.user) {
  385. try {
  386. await request.get('/api-web/imLiveBroadcastRoom/syncLike', {
  387. hideLoading: true,
  388. hideMessage: true,
  389. params: {
  390. likeNum: runtime.likeCount,
  391. roomUid: runtime.roomUid,
  392. }
  393. })
  394. runtime.lastLikeCount = runtime.likeCount
  395. } catch (error) {}
  396. }
  397. setTimeout(() => {
  398. loopSyncLike()
  399. }, 1000 * 60)
  400. }
  401. type SendMessageType = 'text' | 'image' | 'audio' | 'video' | 'file' | 'SeatsCtrl' | 'ChatBan' | 'SeatApply' | 'SeatResponse'
  402. export const getSendMessageUser = () => {
  403. return {
  404. id: state.user?.speakerId,
  405. name: state.user?.speakerName,
  406. userId: state.user?.speakerId,
  407. userName: state.user?.speakerName,
  408. }
  409. }
  410. /**
  411. *
  412. * @param msg 消息内容
  413. * @param type 消息类型
  414. * @returns null 或者 发送消息的结果
  415. */
  416. export const sendMessage = async (msg: any, type: SendMessageType = 'text') => {
  417. let message: RongIMLib.BaseMessage<unknown> | null = null
  418. if (!msg) return
  419. const conversation = {
  420. conversationType: RongIMLib.ConversationType.CHATROOM,
  421. targetId: runtime.joinedRoom?._roomId as string,
  422. }
  423. if (type === 'text') {
  424. message = new RongIMLib.TextMessage({
  425. user: getSendMessageUser(),
  426. content: msg
  427. })
  428. } else if (type === 'SeatsCtrl') {
  429. message = new MessageSeatsCtrl(msg)
  430. } else if (type === 'ChatBan') {
  431. message = new MessageChatBan(msg)
  432. } else if (type === 'SeatApply') {
  433. message = new MessageSeatApply(msg)
  434. } else if (type === 'SeatResponse') {
  435. message = new MessageSeatResponse(msg)
  436. }
  437. if (!message) return
  438. console.log(message)
  439. return await RongIMLib.sendMessage(conversation, message)
  440. }
  441. export const openDevice = async (trackType: TrackType, needPublish = true) => {
  442. if (trackType === 'microphone' && runtime.activeTracks[trackType]) {
  443. runtime.activeTracks[trackType]?.unmute()
  444. } else {
  445. const track = await getTrack(trackType)
  446. await setTrack([track], trackType, needPublish)
  447. if (runtime.videoRef) {
  448. track.play(runtime.videoRef)
  449. }
  450. }
  451. }
  452. export const closeDevice = async (trackType: TrackType, needPublish = true) => {
  453. const track = runtime.activeTracks[trackType]
  454. if (trackType !== 'microphone') {
  455. // console.log('closeDevice', track)
  456. // track?.destroy()
  457. await removeTrack([track] as RTC.RCLocalTrack[], trackType, needPublish)
  458. } else {
  459. track?.mute()
  460. }
  461. }
  462. export const toggleDevice = async (trackType: TrackType) => {
  463. const track = runtime.activeTracks[trackType]
  464. const needPublish = runtime.videoStatus === 'liveing'
  465. if (track) {
  466. if (trackType === 'camera') {
  467. runtime.deviceStatus.camera = 'closed'
  468. }
  469. closeDevice(trackType, needPublish)
  470. } else {
  471. if (trackType === 'camera') {
  472. runtime.deviceStatus.camera = 'granted'
  473. }
  474. openDevice(trackType, needPublish)
  475. }
  476. }
  477. export const leaveIMRoom = async () => {
  478. await closeLive()
  479. if (runtime.joinedRoom) {
  480. // @ts-ignore
  481. await runtime.rtcClient?.leaveRoom(runtime.joinedRoom)
  482. runtime.joinedRoom = null
  483. }
  484. }