import { reactive, ref, Ref } from 'vue' import * as RongIMLib from '@rongcloud/imlib-next' import * as RTC from '@rongcloud/plugin-rtc' import request from '/src/helpers/request' import { state } from '/src/state' import event, { LIVE_EVENT_MESSAGE } from './event' import dayjs from 'dayjs' // import { SeatsCtrl } from './message-type' type imConnectStatus = 'connecting' | 'connected' | 'disconnect' type VideoStatus = 'init' | 'stream' | 'liveing' | 'stopped' | 'error' | 'loading' export type TrackType = 'microphone' | 'camera' | 'screen' type ActiveTracks = { [key in TrackType]: RTC.RCLocalTrack | null } type DeviceStatus = { [key in TrackType]: 'init' | 'granted' | 'denied' | 'closed' | 'none' } export const START_LIVE_TIME = 'start-live-time' export const START_LIVE_STATUS = 'start-live-status' export const VIDEO_DEVICE_ID = 'video-deviceId' export const AUDIO_DEVICE_ID = 'audio-deviceId' export const AUDIO_DEVICE_VOLUME = 'audio-device-volume' const runtime = reactive({ /** 房间id */ roomUid: 'LIVE-2112263-12345', /** IM连接状态 */ imConnectStatus: 'connecting' as imConnectStatus, // 屏幕分享状态 screenShareStatus: false, // 视频节点 videoRef: ref(null), // RTC实例 rtcClient: null as RTC.RCRTCClient | null, /** 加入房间实例 */ joinedRoom: null as RTC.RCLivingRoom | null, // Tracks mediaStreamTrack: [] as MediaStreamTrack[], // 媒体流 mediaStreams: null as MediaStream | null, // 视频状态 videoStatus: 'init' as VideoStatus, // 麦克风设备列表 microphones: [] as MediaDeviceInfo[], // 摄像头设备列表 cameras: [] as MediaDeviceInfo[], // 摄像头设备 selectedCamera: null as MediaDeviceInfo | null, // 麦克风设备 selectedMicrophone: null as MediaDeviceInfo | null, // 点赞数量 likeCount: 0, // 上一次点赞数量 lastLikeCount: 0, /** 当前活跃的数据流 */ activeTracks: {} as ActiveTracks, /** 是否关闭连麦 */ allowSeatsCtrl: true, /** 是否关闭发言 */ allowChatCtrl: true, /** 当前设备获取状态 */ deviceStatus: { microphone: 'init', camera: 'init', screen: 'init' } as DeviceStatus }) export default runtime const RONG_IM_TOKEN = 'c9kqb3rdc451j' RongIMLib.init({ appkey: RONG_IM_TOKEN, }) // 注册自定义消息类型 // 控制是否允许连麦 const MessageSeatsCtrl = RongIMLib.registerMessageType('RC:Chatroom:SeatsCtrl', true, true) // 控制是否允许发言 const MessageChatBan = RongIMLib.registerMessageType('RC:Chatroom:ChatBan', true, true) // 连麦消息 const MessageSeatApply = RongIMLib.registerMessageType('RC:Chatroom:SeatApply', true, true) // 响应连麦消息 const MessageSeatResponse = RongIMLib.registerMessageType('RC:Chatroom:SeatResponse', true, true) type MessageProps = { messageType: 'RC:Chatroom:Welcome' | 'RC:TxtMsg' | 'RC:Chatroom:Barrage' | 'RC:Chatroom:Like' | 'RC:Chatroom:SeatsCtrl' | 'RC:Chatroom:ChatBan' | 'RC:Chatroom:SeatApply', content: any, } type MessageEvent = { messages: MessageProps[], } const Events = RongIMLib.Events /** * 监听消息通知 */ const { MESSAGES, ...RestMessage } = Events RongIMLib.addEventListener(Events.MESSAGES, (evt: MessageEvent) => { console.log(evt, '收到消息') const { messages } = evt for (const message of messages) { // console.log(LIVE_EVENT_MESSAGE[message.messageType], message) if (LIVE_EVENT_MESSAGE[message.messageType]) { event.emit(LIVE_EVENT_MESSAGE[message.messageType], {...message.content, $EventMessage: message}) } } }) for (const Message of Object.values(RestMessage)) { RongIMLib.addEventListener(Message, (evt: any) => { console.log(Message, evt) // chatroomDestroyed event.emit(Message, {$EventMessage: null}) }) } /** * 监听 IM 连接状态变化 */ RongIMLib.addEventListener(Events.CONNECTING, () => { console.log('connecting') runtime.imConnectStatus = 'connecting' }) RongIMLib.addEventListener(Events.CONNECTED, () => { console.log('connected') runtime.imConnectStatus = 'connected' }) RongIMLib.addEventListener(Events.DISCONNECT, () => { console.log('disconnect') runtime.imConnectStatus = 'disconnect' }) export const connectIM = async (imToken: string) => { try { const user = await RongIMLib.connect(imToken) runtime.rtcClient = RongIMLib.installPlugin(RTC.installer, {}) console.log('connect success', user.data?.userId) return user } catch (error) { throw error } } /** * 设置声音 * @param video * @param Value 声音大小 */ export const setVolume = (value: number) => { localStorage.setItem(AUDIO_DEVICE_VOLUME, value.toString()) if(runtime.videoRef) { runtime.videoRef.volume = value / 100 } // @ts-ignore if (runtime.activeTracks.microphone && runtime.activeTracks.microphone._element) { // @ts-ignore runtime.activeTracks.microphone._element.volume = value / 100 } } /** * 设置video视频流 */ export const setVideoSrcObject = (video: HTMLVideoElement | null, mediaStreams: MediaStream | null) => { if (video && mediaStreams) { video.srcObject = mediaStreams video.onloadedmetadata = () => { video.play() } } } /** * 发起屏幕共享 */ export const shareScreenVideo = async () => { if (runtime.rtcClient && !runtime.screenShareStatus) { const screenTrack = await getTrack('screen') const oldTrack = runtime.activeTracks.camera as RTC.RCLocalTrack // removeTrack([oldTrack], 'camera') runtime.joinedRoom?.unpublish([oldTrack]) setTrack([screenTrack as RTC.RCLocalTrack], 'screen') if (runtime.videoRef) { screenTrack.play(runtime.videoRef) runtime.screenShareStatus = true } screenTrack?.on(RTC.RCLocalTrack.EVENT_LOCAL_TRACK_END, (track: RTC.RCLocalTrack) => { runtime.screenShareStatus = false track.destroy() // removeTrack([track], 'screen') if (oldTrack) { setTrack([oldTrack as RTC.RCLocalTrack], 'camera') if (runtime.videoRef) { oldTrack.play(runtime.videoRef) } } // setVideoSrcObject(runtime.videoRef, this.mediaStreams) }) } } /** * * 获取所有音频输入设备 * @returns {Promise} */ export const getMicrophones = async () => { const microphones = await RTC.device.getMicrophones() runtime.microphones = microphones return microphones } /** * * 获取所有视频输入设备 * @returns {Promise} */ export const getCameras = async () => { const cameras = await RTC.device.getCameras() runtime.cameras = cameras return cameras } /** * * 设置当前视频设备 * @param camera MediaDeviceInfo */ export const setSelectCamera = async (camera: MediaDeviceInfo) => { runtime.selectedCamera = camera localStorage.setItem(VIDEO_DEVICE_ID, camera.deviceId) const oldTrack = runtime.activeTracks.camera as RTC.RCLocalTrack if (oldTrack) { await removeTrack([oldTrack], 'camera', oldTrack.isPublished()) } const track = await getTrack('camera') setTrack([track], 'camera', runtime.videoStatus === 'liveing') } /** * * 设置当前麦克风设备 * @param microphone MediaDeviceInfo */ export const setSelectMicrophone = async (microphone: MediaDeviceInfo) => { runtime.selectedMicrophone = microphone localStorage.setItem(AUDIO_DEVICE_ID, microphone.deviceId) const oldTrack = runtime.activeTracks.microphone as RTC.RCLocalTrack if (oldTrack) { await removeTrack([oldTrack], 'microphone', oldTrack.isPublished()) } const track = await getTrack('microphone') setTrack([track], 'microphone', runtime.videoStatus === 'liveing') } type TrackResult = { code: RTC.RCRTCCode, track: RTC.RCMicphoneAudioTrack | RTC.RCCameraVideoTrack | RTC.RCScreenVideoTrack | undefined } export const getTrack = async (trackType: TrackType): Promise => { let res: TrackResult | undefined let Track: RTC.RCLocalTrack | null = null if (trackType === 'microphone') { res = await runtime.rtcClient?.createMicrophoneAudioTrack('RongCloudRTC', { micphoneId: runtime.selectedMicrophone?.deviceId, }) as TrackResult } else if (trackType === 'camera') { res = await runtime.rtcClient?.createCameraVideoTrack('RongCloudRTC', { cameraId: runtime.selectedCamera?.deviceId, faceMode: 'user', frameRate: RTC.RCFrameRate.FPS_24, resolution: RTC.RCResolution.W1920_H1080, }) as TrackResult } else { res = await runtime?.rtcClient?.createScreenVideoTrack() as TrackResult } Track = res?.track as RTC.RCLocalTrack if (trackType === 'camera' && !runtime.cameras.length) { runtime.deviceStatus[trackType] = 'none' } else if (trackType === 'microphone' && !runtime.microphones.length) { runtime.deviceStatus[trackType] = 'none' } else if (trackType === 'screen' && !runtime.screenShareStatus) { runtime.deviceStatus[trackType] = 'none' } if (res.code === RTC.RCRTCCode.PERMISSION_DENIED) { runtime.deviceStatus[trackType] = 'denied' } else { runtime.deviceStatus[trackType] = 'granted' } // if (res.code !== RTC.RCRTCCode.SUCCESS || !Track) { // throw new Error('获取数据流失败') // } return Track } /** * 添加视频流,会同步修改当先视频与推送的流 * @param track */ export const setTrack = async (tracks: RTC.RCLocalTrack[], trackType: TrackType, needPublish = true) => { for (const track of tracks) { // @ts-ignore // await runtime.mediaStreams?.addTrack(track._msTrack) if (trackType === 'microphone') { console.log('添加麦克风') track.play() } runtime.activeTracks[trackType] = track } if (needPublish) { // console.log('publish', runtime.joinedRoom) await runtime.joinedRoom?.publish(tracks) } } /** * 删除视频流,会同步修改当先视频与推送的流 * @param track */ export const removeTrack = async (tracks: RTC.RCLocalTrack[], trackType: TrackType, needPublish = true) => { if (needPublish) { await runtime.joinedRoom?.unpublish(tracks) } for (const track of tracks) { // @ts-ignore // await runtime.mediaStreams?.removeTrack(track._msTrack) // runtime.activeTracks[trackType].destroy() // console.log(runtime.activeTracks[trackType]) track?.destroy() runtime.activeTracks[trackType] = null } } export const joinIMRoom = async (roomId: string, type: RTC.RCLivingType, listenEvents: RTC.IRoomEventListener | null) => { await RongIMLib.joinChatRoom(roomId, {count: -1}) const join = await runtime.rtcClient?.joinLivingRoom(roomId, type) if (join?.code != RTC.RCRTCCode.SUCCESS) throw Error('加入房间失败') join.room?.registerRoomEventListener(listenEvents) return join } export const joinRoom = async (roomId: string, type: RTC.RCLivingType, listenEvents: RTC.IRoomEventListener | null) => { try { await request.get('/api-web/imLiveBroadcastRoom/joinRoom', { params: { roomUid: runtime.roomUid, userId: state.user?.speakerId, } }) } catch (error) {} return await joinIMRoom(roomId, type, listenEvents) } /** * 开始直播 */ export const startLive = async (resetTime = true) => { if (runtime.videoStatus !== 'stream') throw Error('当前无视频流') const room = runtime.joinedRoom if (room) { // const microphoneAudioTrack = await getTrack('microphone') // const cameraVideoTrack = await getTrack('camera') // await setTrack([cameraVideoTrack], 'camera') // await setTrack([microphoneAudioTrack], 'microphone') // const builder = await runtime.joinedRoom?.getMCUConfigBuilder() // // @ts-ignore // await builder.setOutputVideoRenderMode?.(RTC.MixVideoRenderMode.WHOLE) // // @ts-ignore // await builder.flush() runtime.videoStatus = 'liveing' } if (resetTime) { sessionStorage.setItem(START_LIVE_TIME, dayjs().valueOf().toString()) } sessionStorage.setItem(START_LIVE_STATUS, 'liveing') } /** * 关闭直播 */ export const closeLive = async () => { // removeMedia(runtime.mediaStreams, runtime.mediaStreamTrack) // await request.post('/api-im/user/statusImUser', { // data: { // os: 'PC', // status: 3, // userId: state.user?.id, // } // }) sessionStorage.removeItem(START_LIVE_TIME) sessionStorage.removeItem(START_LIVE_STATUS) runtime.videoStatus = 'stream' for (const key in runtime.activeTracks) { if (Object.prototype.hasOwnProperty.call(runtime.activeTracks, key)) { const track = runtime.activeTracks[key as TrackType] as RTC.RCLocalTrack if (track) { removeTrack([track], key as TrackType) } } } } /** * 同步点赞数量 */ export const loopSyncLike = async () => { if ((runtime.likeCount !== runtime.lastLikeCount || runtime.likeCount === 0) && state.user) { try { await request.get('/api-web/imLiveBroadcastRoom/syncLike', { hideLoading: true, hideMessage: true, params: { likeNum: runtime.likeCount, roomUid: runtime.roomUid, } }) runtime.lastLikeCount = runtime.likeCount } catch (error) {} } setTimeout(() => { loopSyncLike() }, 1000 * 60) } type SendMessageType = 'text' | 'image' | 'audio' | 'video' | 'file' | 'SeatsCtrl' | 'ChatBan' | 'SeatApply' | 'SeatResponse' export const getSendMessageUser = () => { return { id: state.user?.speakerId, name: state.user?.speakerName, userId: state.user?.speakerId, userName: state.user?.speakerName, } } /** * * @param msg 消息内容 * @param type 消息类型 * @returns null 或者 发送消息的结果 */ export const sendMessage = async (msg: any, type: SendMessageType = 'text') => { let message: RongIMLib.BaseMessage | null = null if (!msg) return const conversation = { conversationType: RongIMLib.ConversationType.CHATROOM, targetId: runtime.joinedRoom?._roomId as string, } if (type === 'text') { message = new RongIMLib.TextMessage({ user: getSendMessageUser(), content: msg }) } else if (type === 'SeatsCtrl') { message = new MessageSeatsCtrl(msg) } else if (type === 'ChatBan') { message = new MessageChatBan(msg) } else if (type === 'SeatApply') { message = new MessageSeatApply(msg) } else if (type === 'SeatResponse') { message = new MessageSeatResponse(msg) } if (!message) return console.log(message) return await RongIMLib.sendMessage(conversation, message) } export const openDevice = async (trackType: TrackType, needPublish = true) => { if (trackType === 'microphone' && runtime.activeTracks[trackType]) { runtime.activeTracks[trackType]?.unmute() } else { const track = await getTrack(trackType) await setTrack([track], trackType, needPublish) if (runtime.videoRef) { track.play(runtime.videoRef) } } } export const closeDevice = async (trackType: TrackType, needPublish = true) => { const track = runtime.activeTracks[trackType] if (trackType !== 'microphone') { // console.log('closeDevice', track) // track?.destroy() await removeTrack([track] as RTC.RCLocalTrack[], trackType, needPublish) } else { track?.mute() } } export const toggleDevice = async (trackType: TrackType) => { const track = runtime.activeTracks[trackType] const needPublish = runtime.videoStatus === 'liveing' if (track) { if (trackType === 'camera') { runtime.deviceStatus.camera = 'closed' } closeDevice(trackType, needPublish) } else { if (trackType === 'camera') { runtime.deviceStatus.camera = 'granted' } openDevice(trackType, needPublish) } } export const leaveIMRoom = async () => { await closeLive() if (runtime.joinedRoom) { // @ts-ignore await runtime.rtcClient?.leaveRoom(runtime.joinedRoom) runtime.joinedRoom = null } }