diff --git a/changelog.md b/changelog.md index 84f1f379..843bd0dc 100644 --- a/changelog.md +++ b/changelog.md @@ -3,15 +3,29 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). -# [Unreleased](https://github.com/MyPureCloud/genesys-cloud-webrtc-sdk/compare/v12.1.0...HEAD) +# [Unreleased](https://github.com/MyPureCloud/genesys-cloud-webrtc-sdk/compare/v13.0.0...HEAD) + +# [v13.0.0](https://github.com/MyPureCloud/genesys-cloud-webrtc-sdk/compare/v12.1.0...HEAD) +### Breaking Changes +* [STREAM-1351](https://inindca.atlassian.net/browse/STREAM-1351) - Removed `v2.conversations.{id}.media` notification subscription. Removed the `activeVideoParticipantsUpdate` session event and `IOnScreenParticipantsUpdate` interface. Speaker and on-screen participant updates are now entirely handled via the data channel. + +### Added +* [STREAM-1056](https://inindca.atlassian.net/browse/STREAM-1056) Added documentation for live screen monitoring functionality + +### Fixed +* [STREAM-905](https://inindca.atlassian.net/browse/STREAM-905) - Fix issue where `sessionStarted` was not emitted for calls that re-use persistent connections after a media recovery has occurred. Now initializing `_emittedSessionStarteds` for reinvites but only emitting if not a reinvite. # [v12.1.0](https://github.com/MyPureCloud/genesys-cloud-webrtc-sdk/compare/v12.0.0...v12.1.0) ### Added * [STREAM-1153](https://inindca.atlassian.net/browse/STREAM-1153) - Add support for monitoring multiple screens during live monitoring sessions * [STREAM-1123](https://inindca.atlassian.net/browse/STREAM-1123) - Only update necessary tracks when changing devices. Prevent virtual background from getting replaced +* [STREAM-781](https://inindca.atlassian.net/browse/STREAM-781) - Fix issue where hold caused `Unhandled promise rejection from setConversationHeld` for inactive conversations. ### Changed -* [STREAM-1211](https://inindca.atlassian.net/browse/STREAM-1178) - Update `axios` to `v1.13.5`. +* [STREAM-1211](https://inindca.atlassian.net/browse/STREAM-1211) - Update `axios` to `v1.13.5`. +* [STREAM-1285](https://inindca.atlassian.net/browse/STREAM-1285) - Refactor `handlePropose` in SoftphoneSessionHandler for clarity and future changes. Update associated comments to better reflect implementation. +* [STREAM-1403](https://inindca.atlassian.net/browse/STREAM-1403) - Update `lodash` to `v4.18.1` to address Snyk vulnerability. +* [STREAM-1423](https://inindca.atlassian.net/browse/STREAM-1423) - Update `axios` to `v1.15.0`. # [v12.0.0](https://github.com/MyPureCloud/genesys-cloud-webrtc-sdk/compare/v11.5.1...v12.0.0) ### Added diff --git a/doc/index.md b/doc/index.md index cb69caa0..af59cf75 100644 --- a/doc/index.md +++ b/doc/index.md @@ -18,6 +18,7 @@ - [WebRTC SoftPhone] - [WebRTC Video Conferencing] - [WebRTC Screen Recording] +- [WebRTC Live Screen Monitoring] - [WebRTC Media] (media, devices, and permissions support) - [WebRTC Headset Integration] @@ -131,6 +132,7 @@ interface ISdkConfig { wsHost?: string; autoConnectSessions?: boolean; autoAcceptPendingScreenRecordingRequests?: boolean; + autoAcceptPendingLiveScreenMonitoringRequests?: boolean; jidResource?: string; disableAutoAnswer?: boolean; logLevel?: LogLevels; @@ -220,6 +222,14 @@ If true, incoming proposes for screen recording sessions will be accepted immedi and no `pendingSession` event will be emitted. The consumer will still have to react to `sessionStarted` in order to add screen media and then call `sdk.acceptSession(...)`. +#### `autoAcceptPendingLiveScreenMonitoringRequests` + +`autoAcceptPendingLiveScreenMonitoringRequests?: boolean;` Optional: default `false` + +If true, incoming proposes for live screen monitoring sessions will be accepted immediately +and no `pendingSession` event will be emitted. The consumer will still have to react to +`sessionStarted` in order to add screen media and then call `sdk.acceptSession(...)`. + #### `jidResource` `jidResource?: string;` Optional: default `undefined` @@ -1200,7 +1210,7 @@ Params: provided, no media will be requested. - `audioElement?: HTMLAudioElement` Optional: audio element to attach incoming audio to default is sdk `defaults.audioElement` - - `videoElement?: HTMLAudioElement` Optional: video element to attach incoming video to + - `videoElement?: HTMLVideoElement` Optional: video element to attach incoming video to default is sdk `defaults.videoElement`. (only used for video sessions) - `videoDeviceId?: string | boolean | null;` Optional: See [ISdkMediaDeviceIds] for full details - `audioDeviceId?: string | boolean | null;` Optional: See [ISdkMediaDeviceIds] for full details @@ -2042,6 +2052,7 @@ The SDK will add the `type` to give more clarity as to why the error was thrown. [WebRTC SoftPhone]: softphone.md [WebRTC Video Conferencing]: video.md [WebRTC Screen Recording]: screen-recording.md +[WebRTC Live Screen Monitoring]: live-screen-monitoring.md [WebRTC Media]: media.md [ISdkMediaDeviceIds]: media.md#isdkmediadeviceids [SDK Media audioTrackVolume event]: media.md#audiotrackvolume diff --git a/doc/live-screen-monitoring.md b/doc/live-screen-monitoring.md new file mode 100644 index 00000000..530ceb3a --- /dev/null +++ b/doc/live-screen-monitoring.md @@ -0,0 +1,175 @@ +# Genesys Cloud WebRTC SDK Live Screen Monitoring + +This SDK supports live screen monitoring functionality that allows real-time viewing of user screens. Live screen monitoring sessions are initiated by the server and provide the ability to observe user activity in real-time for support, training, or compliance purposes. + +When the server determines that live screen monitoring should be initiated, it will send a `pendingSession` similar to how other session types work (ie. softphone and video conversations). The consuming client must accept the `pendingSession`, gather screen media, and add the media to that session. + +> *Note: live screen monitoring does not support guest users.* + +## WebRTC SDK Live Screen Monitoring Index +This documentation expands upon the [GenesysCloudWebrtcSdk] documentation but is specific to +live screen monitoring. + +* [Example usage](#example-usage) +* [Observer vs Target Roles](#observer-vs-target-roles) + +## Prerequisites + +You must have live screen monitoring policies in place and the WebRTC SDK must be configured to allow live screen monitoring sessions. + +## Example Usage + +### Automatic Accept (default) +Signaling is automatically accepted by the SDK. When the live screen monitoring session comes in, you need to add your screen tracks and then call `sdk.acceptSession(session)`. + +``` ts +// set up needed events +sdk.on('sessionStarted', async (session) => { + if (sdk.isLiveScreenMonitoringSession(session)) { + // gather media - the SDK does *not* gather screen media for you + const screenStream = await navigator.getDisplayMedia(); + + // create metadata for the screen being monitored + const track = screenStream.getTracks()[0]; + const { height, width, deviceId } = track.getSettings(); + const liveScreenMonitoringMetadata = [ + { + trackId: track.id, + screenId: deviceId, + originX: 0, + originY: 0, + resolutionX: width, + resolutionY: height, + primary: true, + } + ]; + + sdk.acceptSession({ + conversationId: session.conversationId, + sessionType: session.sessionType, + mediaStream: screenStream, + liveScreenMonitoringMetadata + }); + } +}); +``` + +### Manual Accept +If you want to manually control the acceptance of live screen monitoring sessions: + +``` ts +const sdk = new GenesysCloudWebrtcSdk({ + // other config stuff ... + autoAcceptPendingLiveScreenMonitoringRequests: false +}); + +sdk.on('pendingSession', (session) => { + if (session.sessionType === SessionTypes.liveScreenMonitoring) { + // manually accept the pending session + sdk.acceptPendingSession({ + conversationId: session.conversationId, + sessionType: session.sessionType + }); + } +}); + +sdk.on('sessionStarted', async (session) => { + if (sdk.isLiveScreenMonitoringSession(session)) { + const screenStream = await navigator.getDisplayMedia(); + + const track = screenStream.getTracks()[0]; + const { height, width, deviceId } = track.getSettings(); + const liveScreenMonitoringMetadata = [ + { + trackId: track.id, + screenId: deviceId, + originX: 0, + originY: 0, + resolutionX: width, + resolutionY: height, + primary: true, + } + ]; + + sdk.acceptSession({ + conversationId: session.conversationId, + sessionType: session.sessionType, + mediaStream: screenStream, + liveScreenMonitoringMetadata + }); + } +}); +``` + +### Multiple Screens +Live screen monitoring supports monitoring up to **four** screens simultaneously. All screen tracks need to be on the same media stream provided in `sdk.acceptSession(...)`. + +``` ts +sdk.on('sessionStarted', async (session) => { + if (sdk.isLiveScreenMonitoringSession(session)) { + // gather multiple screens + const screenStream1 = await navigator.getDisplayMedia(); + const screenStream2 = await navigator.getDisplayMedia(); + + // combine streams + screenStream2.getVideoTracks().forEach(track => screenStream1.addTrack(track)); + + // create metadata for all screens + const liveScreenMonitoringMetadata = createMultiScreenMetadata(); + + sdk.acceptSession({ + conversationId: session.conversationId, + sessionType: session.sessionType, + mediaStream: screenStream1, + liveScreenMonitoringMetadata + }); + } +}); +``` + +## Observer vs Target Roles + +Live screen monitoring sessions involve two distinct roles with different behaviors: + +### Target Role +The **target** is the user whose screen is being monitored. When accepting a session as a target: +- Must provide a `mediaStream` containing screen capture tracks +- The session automatically accepts if the `fromUserId` matches the current user +- Cannot end the monitoring session (only observers can end it) +- Sends their screen content to observers + +``` ts +// Target accepts with screen media +sdk.acceptSession({ + conversationId: session.conversationId, + sessionType: session.sessionType, + mediaStream: screenStream, // Required for targets + liveScreenMonitoringMetadata +}); +``` + +### Observer Role +The **observer** is the user monitoring the target's screen. When accepting a session as an observer: +- Must provide `videoElements` array or `videoElement` to display incoming video +- Set `liveMonitoringObserver: true` in the accept parameters +- Receives video streams from the target and displays them in provided video elements +- Can end the monitoring session +- Sends empty video tracks to maintain WebRTC connection + +``` ts +// Observer accepts with video elements +sdk.acceptSession({ + conversationId: session.conversationId, + sessionType: session.sessionType, + liveMonitoringObserver: true, // Identifies this as observer role + videoElements: [videoElement1, videoElement2], // Required for observers +}); +``` + +### Role Determination +The SDK automatically determines the role based on: +1. If `liveMonitoringObserver: true` is set in accept parameters → Observer +2. If `fromUserId` matches current user ID → Target (auto-accepted) +3. Otherwise → Target (manual acceptance required unless specified) + +[GenesysCloudWebrtcSdk]: index.md#genesyscloudwebrtcsdk diff --git a/doc/video.md b/doc/video.md index 3a4ba777..6c46542e 100644 --- a/doc/video.md +++ b/doc/video.md @@ -6,8 +6,8 @@ are done using the public API, but are abstracted being api's in this sdk. > *Note: video conferencing does not support guest users.* ## WebRTC SDK Video Index -This documentation expands upon the [GenesysCloudWebrtcSdk] documention but is specific to -video conferencing. See the full list of the [APIs], [methods], and [events]. +This documentation expands upon the [GenesysCloudWebrtcSdk] documention but is specific to +video conferencing. See the full list of the [APIs], [methods], and [events]. * See [sdk.startVideoConference()] for usage * [Example usage](#example-usage) @@ -71,55 +71,8 @@ interface IParticipantUpdate { audioMuted: boolean } ``` -Value of event: -* `update: IParticipantsUpdate` – list of updated participants - -#### `activeVideoParticipantsUpdate` - -This event will happen when the server switches who is visible on the screen. - -> *Note: this user may not be providing video or that video could be muted. It is up to the -> implementing party to show something else such as an avatar or profile picture in such instances.* - -Declaration: -``` ts -session.on('activeVideoParticipantsUpdate', (update: IOnScreenParticipantsUpdate) => {}); - -/* interface declaration */ -interface IOnScreenParticipantsUpdate { - participantsOnScreen: [ - { - userId: string, - } - ] -} -``` Value of event: -* `update: IOnScreenParticipantsUpdate` – the new participant on the screen - - -#### `speakersUpdate` - -This event tells who is making noise in the conference. - -> Caveat: currently, we can only emit on this event when the on-screen user changes, -this will often appear to be out of sync. This will be fixed in the future. - -Declaration: -``` ts -session.on('speakersUpdate', (update: ISpeakersUpdate) => {})): - -/* interface declaration */ -interface ISpeakersUpdate { - speakers: [ - { - userId: string; - } - ] -} -``` -Value of event: -* `update: ISpeakersUpdate` – userIds of the participants speaking +* `update: IParticipantsUpdate` – list of updated participants ## Video Session Methods @@ -134,7 +87,7 @@ be cleaned up and replaced with a track presenting your screen. When ending scre a camera track was cleaned up during `startScreenShare` a new one will be created to replace the screen share track. -Declaration: +Declaration: ``` ts session.startScreenShare(): Promise; ``` @@ -145,13 +98,13 @@ Returns: a promise that completes after the screen share started. #### `stopScreenShare()` -Ends the active outgoing screen share. If video media was on before the screen share -started, the media we be required and added to the session. +Ends the active outgoing screen share. If video media was on before the screen share +started, the media we be required and added to the session. -If there was no active screen share for this session, an error is logged -and returns (no error is thrown). +If there was no active screen share for this session, an error is logged +and returns (no error is thrown). -Declaration: +Declaration: ``` ts session.stopScreenShare(): Promise; ``` @@ -164,19 +117,19 @@ Returns: a promise that completes after the screen share end and the new video #### `pinParticipantVideo()` Locks video to the provided video conference participant. If `participantId` is `null` -or `undefined`, any currently pinned participants will be removed and will switch automatically -when speaking. +or `undefined`, any currently pinned participants will be removed and will switch automatically +when speaking. When a participant's video is pinned it will disable the video switching when other participants talk. > Note: participantIds can be found in the [participantsUpdate](#participantsupdate) event. -Declaration: +Declaration: ``` ts session.pinParticipantVideo(participantId?: string): Promise; ``` -Params: +Params: * `participantId: string` – Optional: if provided, it will pin that participant's video. If the id is not provided, it will clear any old pin and reset back to the active user on screen. @@ -188,4 +141,4 @@ Returns: a promise that completes after the Public API call finishes pinning the [methods]: index.md#methods [events]: index.md#events -[session level events]: index.md#session-level-events \ No newline at end of file +[session level events]: index.md#session-level-events diff --git a/package-lock.json b/package-lock.json index e9b3e8cf..680bd5c0 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,23 +1,23 @@ { "name": "genesys-cloud-webrtc-sdk", - "version": "12.1.0", + "version": "13.0.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "genesys-cloud-webrtc-sdk", - "version": "12.1.0", + "version": "13.0.0", "license": "MIT", "dependencies": { "@babel/runtime": "^7.24.6", "@babel/runtime-corejs3": "^7.24.6", - "axios": "^1.13.5", + "axios": "^1.15.0", "browserama": "^3.2.2", "core-js": "^3.37.1", "genesys-cloud-client-logger": "^4.2.17", "genesys-cloud-streaming-client": "^19.5.0", "jwt-decode": "^4.0.0", - "lodash": "^4.17.21", + "lodash": "^4.18.1", "process-fast": "^1.0.0", "rxjs": "^7.8.1", "softphone-vendor-headsets": "^2.5.4", @@ -4407,14 +4407,14 @@ } }, "node_modules/axios": { - "version": "1.13.5", - "resolved": "https://registry.npmjs.org/axios/-/axios-1.13.5.tgz", - "integrity": "sha512-cz4ur7Vb0xS4/KUN0tPWe44eqxrIu31me+fbang3ijiNscE129POzipJJA6zniq2C/Z6sJCjMimjS8Lc/GAs8Q==", + "version": "1.15.0", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.15.0.tgz", + "integrity": "sha512-wWyJDlAatxk30ZJer+GeCWS209sA42X+N5jU2jy6oHTp7ufw8uzUTVFBX9+wTfAlhiJXGS0Bq7X6efruWjuK9Q==", "license": "MIT", "dependencies": { "follow-redirects": "^1.15.11", "form-data": "^4.0.5", - "proxy-from-env": "^1.1.0" + "proxy-from-env": "^2.1.0" } }, "node_modules/axios-mock-adapter": { @@ -9621,9 +9621,9 @@ } }, "node_modules/lodash": { - "version": "4.17.23", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.23.tgz", - "integrity": "sha512-LgVTMpQtIopCi79SJeDiP0TfWi5CNEc/L/aRdTh3yIvmZXTnheWpKjSZhnvMl8iXbC1tFg9gdHHDMLoV7CnG+w==", + "version": "4.18.1", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.18.1.tgz", + "integrity": "sha512-dMInicTPVE8d1e5otfwmmjlxkZoUpiVLwyeTdUsi/Caj/gfzzblBcCE5sRHV/AsjuCmxWrte2TNGSYuCeCq+0Q==", "license": "MIT" }, "node_modules/lodash.debounce": { @@ -10927,10 +10927,13 @@ } }, "node_modules/proxy-from-env": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", - "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==", - "license": "MIT" + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-2.1.0.tgz", + "integrity": "sha512-cJ+oHTW1VAEa8cJslgmUZrc+sjRKgAKl3Zyse6+PV38hZe/V6Z14TbCuXcan9F9ghlz4QrFr2c92TNF82UkYHA==", + "license": "MIT", + "engines": { + "node": ">=10" + } }, "node_modules/pseudomap": { "version": "1.0.2", diff --git a/package.json b/package.json index d710a468..41b69688 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "genesys-cloud-webrtc-sdk", - "version": "12.1.0", + "version": "13.0.0", "description": "client for the interfacing with Genesys Cloud WebRTC", "repository": "https://github.com/mypurecloud/genesys-cloud-webrtc-sdk", "license": "MIT", @@ -57,13 +57,13 @@ "dependencies": { "@babel/runtime": "^7.24.6", "@babel/runtime-corejs3": "^7.24.6", - "axios": "^1.13.5", + "axios": "^1.15.0", "browserama": "^3.2.2", "core-js": "^3.37.1", "genesys-cloud-client-logger": "^4.2.17", "genesys-cloud-streaming-client": "^19.5.0", "jwt-decode": "^4.0.0", - "lodash": "^4.17.21", + "lodash": "^4.18.1", "process-fast": "^1.0.0", "rxjs": "^7.8.1", "softphone-vendor-headsets": "^2.5.4", diff --git a/src/sessions/base-session-handler.ts b/src/sessions/base-session-handler.ts index 3583daba..693733ef 100644 --- a/src/sessions/base-session-handler.ts +++ b/src/sessions/base-session-handler.ts @@ -124,8 +124,8 @@ export default abstract class BaseSessionHandler { session.on('terminated', this.onSessionTerminated.bind(this, session)); + session._emittedSessionStarteds = {[session.conversationId]: true}; if (!session.reinvite) { - session._emittedSessionStarteds = {[session.conversationId]: true}; this.sdk.emit('sessionStarted', session); } } diff --git a/src/sessions/softphone-session-handler.ts b/src/sessions/softphone-session-handler.ts index 09d2e34e..091dbbc6 100644 --- a/src/sessions/softphone-session-handler.ts +++ b/src/sessions/softphone-session-handler.ts @@ -152,32 +152,29 @@ export class SoftphoneSessionHandler extends BaseSessionHandler { const logInfo = { sessionId: pendingSession?.id, conversationId: pendingSession.conversationId }; if (isPrivAnswerAuto) { - this.log('info', 'received a propose with privAnswerMode=true', logInfo); - } - - // if eagerPersistentConnectionEstablishment==='none' then we want to completely swallow the propose - const shouldIgnorePrivAnswerPropose = isPrivAnswerAuto && eagerConnectionEstablishmentMode === 'none'; - if (shouldIgnorePrivAnswerPropose) { - this.log('info', 'eagerPersistentConnectionEstablishment is "none" so propose with privAnswerMode=true will be ignored', logInfo); - return; - } - - const shouldAutoAnswerPrivately = isPrivAnswerAuto && eagerConnectionEstablishmentMode === 'auto'; + this.log('info', 'received a propose with privAnswerMode=Auto', logInfo); - // we want to emit the pendingSession event in all cases except when eagerConnectionEstablishmentMode === auto and this is a privAnswerMode call - if (!shouldAutoAnswerPrivately) { - await super.handlePropose(pendingSession); + if (eagerConnectionEstablishmentMode === 'none') { + this.log('info', 'eagerPersistentConnectionEstablishment is "none" so propose with privAnswerMode=Auto will be ignored', logInfo); + return; + } else if (eagerConnectionEstablishmentMode === 'auto') { + // we don't need to emit a pendingSession event when we auto-answer eager persistent connections + return await this.proceedWithSession(pendingSession); + } else { + await super.handlePropose(pendingSession); + } } else { - return await this.proceedWithSession(pendingSession); - } + // we want to emit the pendingSession event in all other cases + await super.handlePropose(pendingSession); - // calls will can be marked as auto-answer or priv-answer-mode: Auto, but never both - if (pendingSession.autoAnswer) { - if (this.sdk._config.disableAutoAnswer) { - // It is possible that the consuming client has its own logic for auto-answering calls (eg. web-dir). - this.log('info', 'received an autoAnswer tagged propose but the SDK was configured to not auto-answer, deferring to the consuming client.', logInfo); - } else { - await this.proceedWithSession(pendingSession); + // calls will can be marked as auto-answer or priv-answer-mode: Auto, but never both + if (pendingSession.autoAnswer) { + if (this.sdk._config.disableAutoAnswer) { + // It is possible that the consuming client has its own logic for auto-answering calls (e.g. web-dir). + this.log('info', 'received an autoAnswer tagged propose but the SDK was configured to not auto-answer, deferring to the consuming client.', logInfo); + } else { + await this.proceedWithSession(pendingSession); + } } } } @@ -904,9 +901,11 @@ export class SoftphoneSessionHandler extends BaseSessionHandler { * connection that will attempt to hold the previous ended call */ const otherSessions = sessions.filter(session => { + const convo = this.conversations[session.conversationId]; return session.sessionType === SessionTypes.softphone && - this.conversations[session.conversationId] && + convo && !this.isConversationHeld(session.conversationId) && + !this.isEndedState(convo.mostRecentCallState) && session !== currentSession; }); @@ -914,6 +913,13 @@ export class SoftphoneSessionHandler extends BaseSessionHandler { otherSessions.forEach(session => { this.setConversationHeld(session, { conversationId: session.conversationId, held: true }) + .catch(err => { + this.log('warn', 'Failed to hold other session during holdOtherSessions', { + sessionId: session.id, + conversationId: session.conversationId, + error: err + }); + }); }); } diff --git a/src/sessions/video-session-handler.ts b/src/sessions/video-session-handler.ts index 73787f8b..ff081c6c 100644 --- a/src/sessions/video-session-handler.ts +++ b/src/sessions/video-session-handler.ts @@ -1,4 +1,4 @@ -import { differenceBy, intersection } from 'lodash'; +import { differenceBy } from 'lodash'; import { Constants } from 'stanza'; import { @@ -8,8 +8,6 @@ import { IExtendedMediaSession, IParticipantUpdate, IParticipantsUpdate, - IOnScreenParticipantsUpdate, - ISpeakersUpdate, IConversationParticipant, IMediaRequestOptions, IStartVideoSessionParams, @@ -26,35 +24,6 @@ import { ConversationUpdate } from '../conversations/conversation-update'; import { JsonRpcMessage } from 'genesys-cloud-streaming-client'; import { jwtDecode } from "jwt-decode"; -/** - * speakers is an array of audio track ids sending audio - */ -export interface IMediaChangeEvent { - eventBody: { - id: string, - participants: IMediaChangeEventParticipant[], - speakers: string[] - }; - metadata: { - CorrelationId: string - }; -} - -/** - * sinks represents outgoing paths for this track. For example, if we have a track that looks like { id: "1", mediaType: "audio", sinks: ["a", "b"] } - * then we know that the audio of track "1" can be heard on tracks "a" and "b". Another way to say this is whichever participants are receiving - * tracks "a" or "b" are hearing this participant - */ -export interface IMediaChangeEventParticipant { - communicationId: string; - userId: string; - tracks: { - id: string, - mediaType: 'audio' | 'video', - sinks?: string[] - }[]; -} - export class VideoSessionHandler extends BaseSessionHandler { requestedSessions: { [roomJid: string]: boolean } = {}; requestedMeetingSessions: { [meetingId: string]: boolean } = {}; @@ -165,91 +134,6 @@ export class VideoSessionHandler extends BaseSessionHandler { session.emit('participantsUpdate', update); } - updateParticipantsOnScreen (session: VideoMediaSession, mediaUpdateEvent: IMediaChangeEvent) { - const incomingVideoTrackIds = session.pc.getReceivers() - .filter((receiver) => receiver.track && receiver.track.kind === 'video') - .map((receiver) => receiver.track.id); - - /** - * Firefox messes the trackIds up from what is actually in the sdp offer. - * Need to pull it from the offer to accurately match the track.sinks - */ - const incomingVideoMsidTrackId = this.getTrackIdFromSdp(session.pc.remoteDescription.sdp, 'video'); - - if (incomingVideoMsidTrackId) { - incomingVideoTrackIds.push(incomingVideoMsidTrackId); - } - - const onScreenParticipants: Array<{ userId: string }> = []; - mediaUpdateEvent.eventBody.participants.forEach((updateParticipant: IMediaChangeEventParticipant) => { - const matchingVideoTracks = updateParticipant.tracks - .filter((track) => track.mediaType === 'video') - .filter((track) => { - const intersectingTracks = intersection(track.sinks, incomingVideoTrackIds); - return intersectingTracks.length; - }); - - if (matchingVideoTracks.length) { - onScreenParticipants.push({ userId: updateParticipant.userId }); - } - }); - - const lastUpdate: IOnScreenParticipantsUpdate = session._lastOnScreenUpdate || { participants: [] } as any; - - // send out an update if the onScreenParticipants count or items changed - if (lastUpdate.participants.length === onScreenParticipants.length) { - const changed = differenceBy(lastUpdate.participants, onScreenParticipants, 'userId').length || - differenceBy(lastUpdate.participants, onScreenParticipants, 'userId').length; - - if (!changed) { - return; - } - } - - const update: IOnScreenParticipantsUpdate = { - participants: onScreenParticipants - }; - - session._lastOnScreenUpdate = update; - session.emit('activeVideoParticipantsUpdate', update); - } - - updateSpeakers (session: IExtendedMediaSession, mediaUpdateEvent: IMediaChangeEvent) { - const incomingAudioTrackIds = session.pc.getReceivers() - .filter((receiver) => receiver.track && receiver.track.kind === 'audio') - .map((receiver) => receiver.track.id); - - /** - * Firefox messes the trackIds up from what is actually in the sdp offer. - * Need to pull it from the offer to accurately match the track.sinks - */ - const incomingAudioMsidTrackId = this.getTrackIdFromSdp(session.pc.remoteDescription.sdp, 'audio'); - - if (incomingAudioMsidTrackId) { - incomingAudioTrackIds.push(incomingAudioMsidTrackId); - } - - const speakingParticipants: Array<{ userId: string }> = []; - mediaUpdateEvent.eventBody.participants.forEach((updateParticipant: IMediaChangeEventParticipant) => { - const matchingAudioTracks = updateParticipant.tracks - .filter((track) => track.mediaType === 'audio') - .filter((track) => { - const intersectingTracks = intersection(track.sinks, incomingAudioTrackIds); - return intersectingTracks.length; - }); - - if (matchingAudioTracks.length) { - speakingParticipants.push({ userId: updateParticipant.userId }); - } - }); - - const update: ISpeakersUpdate = { - speakers: speakingParticipants - }; - - session.emit('speakersUpdate', update); - } - // triggers a propose from the backend async startSession(startParams: IStartVideoSessionParams | IStartVideoMeetingSessionParams): Promise<{ conversationId: string }> { if ("jid" in startParams) { @@ -414,10 +298,6 @@ export class VideoSessionHandler extends BaseSessionHandler { } session._outboundStream = stream; - // If using a JWT, we can't subscribe to the media change events. - if (!this.sdk._config.jwt) { - await this.sdk._streamingConnection.notifications.subscribe(`v2.conversations.${session.conversationId}.media`, this.handleMediaChangeEvent.bind(this, session)); - } await this.addMediaToSession(session, stream); @@ -699,11 +579,6 @@ export class VideoSessionHandler extends BaseSessionHandler { } } - handleMediaChangeEvent (session: VideoMediaSession, event: IMediaChangeEvent): void { - this.updateParticipantsOnScreen(session, event); - this.updateSpeakers(session, event); - } - async startScreenShare (session: VideoMediaSession): Promise { session._resurrectVideoOnScreenShareEnd = !session.videoMuted; try { diff --git a/src/types/interfaces.ts b/src/types/interfaces.ts index 845992a8..7071d208 100644 --- a/src/types/interfaces.ts +++ b/src/types/interfaces.ts @@ -13,7 +13,6 @@ export { ISessionInfo, IPendingSession }; declare module 'genesys-cloud-streaming-client' { export interface SessionEvents { participantsUpdate: IParticipantsUpdate; - activeVideoParticipantsUpdate: IOnScreenParticipantsUpdate; speakersUpdate: ISpeakersUpdate; incomingMedia: void; pinnedParticipant: { participantId: string | null }; @@ -116,9 +115,7 @@ export interface ISdkFullConfig { /** * If the station is configured for persistent connection and an active connection is required to go on queue, * a "fake" call will be used to establish the persistent connection as part of the process to go on queue. - * This setting is additional configuration for how the webrtc sdk handles this circumstance but - * only comes into play if `disableAutoAnswer` is `true`. If `disableAutoAnswer` is `false`, `eagerPersistentConnectionEstablishment` - * will always be `'auto'`. + * This setting is additional configuration for how the webrtc sdk handles this circumstance. * * Options: * ``` ts @@ -930,7 +927,6 @@ export interface VideoMediaSession extends IExtendedMediaSession { pinParticipantVideo?: (participantId: string) => Promise; _resurrectVideoOnScreenShareEnd?: boolean; _lastParticipantsUpdate?: IParticipantsUpdate; - _lastOnScreenUpdate?: IOnScreenParticipantsUpdate; } export interface ScreenRecordingMediaSession extends IExtendedMediaSession { @@ -1004,15 +1000,6 @@ export interface IParticipantUpdate { videoMuted: boolean; audioMuted: boolean; } - -export interface IOnScreenParticipantsUpdate { - participants: Array< - { - userId: string; - } - >; -} - export interface ISpeakersUpdate { speakers: Array< { diff --git a/test/unit/sessions/base-session-handler.test.ts b/test/unit/sessions/base-session-handler.test.ts index a6e8cffa..39f0b82a 100644 --- a/test/unit/sessions/base-session-handler.test.ts +++ b/test/unit/sessions/base-session-handler.test.ts @@ -583,6 +583,24 @@ describe('handleSessionInit', () => { expect(eventSpy).toHaveBeenCalled(); }); + it('should initialize _emittedSessionStarteds but not emit sessionStarted for reinvite sessions', async () => { + const session: any = new MockSession(); + session.conversationId = 'convo123'; + session.reinvite = true; + + const pendingSession = createPendingSession(); + jest.spyOn(mockSessionManager, 'getPendingSession').mockReturnValue(pendingSession); + + const eventSpy = jest.fn(); + mockSdk.on('sessionStarted', eventSpy); + + await handler.handleSessionInit(session); + + expect(mockSdk._streamingConnection.webrtcSessions.rtcSessionAccepted).toHaveBeenCalled(); + expect(session._emittedSessionStarteds).toEqual({ 'convo123': true }); + expect(eventSpy).not.toHaveBeenCalled(); + }); + it('should set up terminated listener', async () => { const session: any = new MockSession(); const pendingSession = createPendingSession(); diff --git a/test/unit/sessions/softphone-session-handler.test.ts b/test/unit/sessions/softphone-session-handler.test.ts index 466f79ba..cadf05b7 100644 --- a/test/unit/sessions/softphone-session-handler.test.ts +++ b/test/unit/sessions/softphone-session-handler.test.ts @@ -621,7 +621,7 @@ describe('acceptSession()', () => { }); it('should hold other sessions if LA>1', () => { - const setHoldSpy = jest.spyOn(handler, 'setConversationHeld').mockImplementation(); + const setHoldSpy = jest.spyOn(handler, 'setConversationHeld').mockResolvedValue(undefined); const getActiveSessionsSpy = jest.spyOn(mockSessionManager, 'getAllActiveSessions').mockReturnValue(sessionsArray); const mockAudioElement = {} as HTMLAudioElement; jest.spyOn(BaseSessionHandler.prototype, 'acceptSession'); @@ -650,7 +650,7 @@ describe('acceptSession()', () => { }); it('should NOT hold other sessions if LA>1 and we are establishing an eager persistent connection', () => { - const setHoldSpy = jest.spyOn(handler, 'setConversationHeld').mockImplementation(); + const setHoldSpy = jest.spyOn(handler, 'setConversationHeld').mockResolvedValue(undefined); const getActiveSessionsSpy = jest.spyOn(mockSessionManager, 'getAllActiveSessions').mockReturnValue(sessionsArray); const mockAudioElement = {} as HTMLAudioElement; jest.spyOn(BaseSessionHandler.prototype, 'acceptSession'); @@ -2451,6 +2451,105 @@ describe('setConversationHeld()', () => { }); }); +describe('holdOtherSessions()', () => { + let currentSession: IExtendedMediaSession; + let otherSession: IExtendedMediaSession; + + beforeEach(() => { + currentSession = new MockSession(SessionTypes.softphone) as any; + otherSession = new MockSession(SessionTypes.softphone) as any; + otherSession.sessionType = SessionTypes.softphone; + }); + + it('should call setConversationHeld for other active softphone sessions', () => { + jest.spyOn(mockSessionManager, 'getAllActiveSessions').mockReturnValue([currentSession, otherSession]); + const setHoldSpy = jest.spyOn(handler, 'setConversationHeld').mockResolvedValue(undefined); + + handler.conversations = { + [currentSession.conversationId]: { mostRecentCallState: { state: CommunicationStates.connected, held: false } } as any, + [otherSession.conversationId]: { mostRecentCallState: { state: CommunicationStates.connected, held: false } } as any, + }; + + handler.holdOtherSessions(currentSession); + + expect(setHoldSpy).toHaveBeenCalledWith(otherSession, { conversationId: otherSession.conversationId, held: true }); + expect(setHoldSpy).not.toHaveBeenCalledWith(currentSession, expect.anything()); + }); + + it('should not attempt to hold sessions with ended conversation states', () => { + const endedSession = new MockSession(SessionTypes.softphone) as any; + endedSession.sessionType = SessionTypes.softphone; + + jest.spyOn(mockSessionManager, 'getAllActiveSessions').mockReturnValue([currentSession, otherSession, endedSession]); + const setHoldSpy = jest.spyOn(handler, 'setConversationHeld').mockResolvedValue(undefined); + + handler.conversations = { + [currentSession.conversationId]: { mostRecentCallState: { state: CommunicationStates.connected, held: false } } as any, + [otherSession.conversationId]: { mostRecentCallState: { state: CommunicationStates.connected, held: false } } as any, + [endedSession.conversationId]: { mostRecentCallState: { state: CommunicationStates.disconnected, held: false } } as any, + }; + + handler.holdOtherSessions(currentSession); + + expect(setHoldSpy).toHaveBeenCalledWith(otherSession, { conversationId: otherSession.conversationId, held: true }); + expect(setHoldSpy).not.toHaveBeenCalledWith(endedSession, expect.anything()); + }); + + it('should not attempt to hold sessions with terminated conversation states', () => { + jest.spyOn(mockSessionManager, 'getAllActiveSessions').mockReturnValue([currentSession, otherSession]); + const setHoldSpy = jest.spyOn(handler, 'setConversationHeld').mockResolvedValue(undefined); + + handler.conversations = { + [currentSession.conversationId]: { mostRecentCallState: { state: CommunicationStates.connected, held: false } } as any, + [otherSession.conversationId]: { mostRecentCallState: { state: CommunicationStates.terminated, held: false } } as any, + }; + + handler.holdOtherSessions(currentSession); + + expect(setHoldSpy).not.toHaveBeenCalled(); + }); + + it('should log a warning and not throw if setConversationHeld rejects', async () => { + jest.spyOn(mockSessionManager, 'getAllActiveSessions').mockReturnValue([currentSession, otherSession]); + const error = new Error('Bad Request'); + jest.spyOn(handler, 'setConversationHeld').mockRejectedValue(error); + + handler.conversations = { + [currentSession.conversationId]: { mostRecentCallState: { state: CommunicationStates.connected, held: false } } as any, + [otherSession.conversationId]: { mostRecentCallState: { state: CommunicationStates.connected, held: false } } as any, + }; + + // should not throw + handler.holdOtherSessions(currentSession); + + // flush the microtask queue so the .catch() handler runs + await flushPromises(); + + expect(mockSdk.logger.warn).toHaveBeenCalledWith( + expect.stringContaining('Failed to hold other session during holdOtherSessions'), + expect.objectContaining({ + sessionId: otherSession.id, + conversationId: otherSession.conversationId, + }), + undefined + ); + }); + + it('should not attempt to hold sessions that are already held', () => { + jest.spyOn(mockSessionManager, 'getAllActiveSessions').mockReturnValue([currentSession, otherSession]); + const setHoldSpy = jest.spyOn(handler, 'setConversationHeld').mockResolvedValue(undefined); + + handler.conversations = { + [currentSession.conversationId]: { mostRecentCallState: { state: CommunicationStates.connected, held: false } } as any, + [otherSession.conversationId]: { mostRecentCallState: { state: CommunicationStates.connected, held: true } } as any, + }; + + handler.holdOtherSessions(currentSession); + + expect(setHoldSpy).not.toHaveBeenCalled(); + }); +}); + describe('patchPhoneCall()', () => { it('should patch a phone call', async () => { const conversationId = 'talking-buddies'; diff --git a/test/unit/sessions/video-session-handler.test.ts b/test/unit/sessions/video-session-handler.test.ts index ff519554..11d85ed2 100644 --- a/test/unit/sessions/video-session-handler.test.ts +++ b/test/unit/sessions/video-session-handler.test.ts @@ -8,8 +8,8 @@ import { SessionTypes, CommunicationStates, SdkErrorTypes } from '../../../src/t import * as mediaUtils from '../../../src/media/media-utils'; import * as utils from '../../../src/utils'; import { IParticipantsUpdate, IExtendedMediaSession, IConversationParticipant, VideoMediaSession, MemberStatusMessage } from '../../../src/types/interfaces'; -import VideoSessionHandler, { IMediaChangeEvent } from '../../../src/sessions/video-session-handler'; -import { ConversationUpdate, GenesysDataChannelMessageParams, ICallStateFromParticipant, IConversationParticipantFromEvent, IParticipantVideo, IPersonDetails } from '../../../src/'; +import VideoSessionHandler from '../../../src/sessions/video-session-handler'; +import { ConversationUpdate, IConversationParticipantFromEvent, IParticipantVideo, IPersonDetails } from '../../../src/'; import { JsonRpcMessage } from 'genesys-cloud-streaming-client'; import { jwtDecode } from 'jwt-decode'; @@ -314,154 +314,6 @@ describe('handleConversationUpdate', () => { }); }); -describe('mediaUpdateEvent', () => { - let mediaEvent: IMediaChangeEvent; - let session: any; - const incomingVideoId = '8a28ff7f-bfe7-4490-b921-0df0dd44bc23'; - const incomingAudioId = '9c10d6bd-6dd8-423c-b317-b64f831f8d48'; - const sendingUser = '2058ab75-7514-4092-b39e-ad2dcb8079a9'; - const otherUser = 'dc432f16-031f-4ffc-a89a-3c291029647f'; - - beforeEach(() => { - mediaEvent = { - metadata: { CorrelationId: '123' }, - eventBody: { - id: 'c89f515b-69fb-4730-93f8-dcff24997fec', - participants: [ - { - communicationId: 'c1b10be3-51a6-4f3c-b4fc-75834a8115c3', - userId: sendingUser, - tracks: [ - { - id: 'fc61add6-8934-4e97-ac17-9f715ab16449', - mediaType: 'audio', - sinks: [incomingAudioId] - }, - { - id: '30f1b98c-b272-4df1-bb90-4a91a8db85ec', - mediaType: 'video', - sinks: [incomingVideoId] - } - ] - }, - { - communicationId: 'ca13afab-ac4a-4346-87ac-bb5fa4179df9', - userId: 'dc432f16-031f-4ffc-a89a-3c291029647f', - tracks: [ - { - id: 'a2b940a0-d58a-420d-a923-6923a7402b62', - mediaType: 'audio', - sinks: [] - }, - { - id: '46b8b64f-4743-4f2c-b91f-00bbe680a37f', - mediaType: 'video', - sinks: ['52b2fd95-6c13-4e4b-aea9-99eaf7f4661b'] - } - ] - } - ], - speakers: ['fc61add6-8934-4e97-ac17-9f715ab16449'] - } - }; - - session = { - pc: { - remoteDescription: { - sdp: 'v=0\notherstuff' - }, - getReceivers: jest.fn().mockReturnValue([ - { track: { kind: 'video', id: incomingVideoId } }, - { track: { kind: 'audio', id: incomingAudioId } } - ]), - }, - emit: jest.fn() - }; - }); - - describe('updateParticipantsOnScreen', () => { - it('should only emit if the onScreenParticipant changes', () => { - handler.updateParticipantsOnScreen(session, mediaEvent); - - const expected = { - participants: [ - { userId: sendingUser } - ] - }; - - expect(session.emit).toHaveBeenCalledWith('activeVideoParticipantsUpdate', expected); - session.emit.mockReset(); - - handler.updateParticipantsOnScreen(session, mediaEvent); - expect(session.emit).not.toHaveBeenCalled(); - }); - - it('should be able to match using the trackId from the sdp (for Firefox)', () => { - session.pc.getReceivers.mockReturnValue([ - { track: { kind: 'video', id: '{some-firefox-track-id}' } } - ]); - jest.spyOn(handler, 'getTrackIdFromSdp').mockReturnValue(incomingVideoId); - - handler.updateParticipantsOnScreen(session, mediaEvent); - - const expected = { - participants: [ - { userId: sendingUser } - ] - }; - - expect(session.emit).toHaveBeenCalledWith('activeVideoParticipantsUpdate', expected); - session.emit.mockReset(); - }); - - it('should emit if participantCount has not changed but on screen has', () => { - handler.updateParticipantsOnScreen(session, mediaEvent); - - const expected = { - participants: [ - { userId: sendingUser } - ] - }; - - expect(session.emit).toHaveBeenCalledWith('activeVideoParticipantsUpdate', expected); - session.emit.mockReset(); - - const expected2 = { - participants: [ - { userId: otherUser } - ] - }; - - const videoInfo = mediaEvent.eventBody.participants[0].tracks[1]; - videoInfo.sinks = []; - - const otherVideo = mediaEvent.eventBody.participants[1].tracks[1]; - otherVideo.sinks = [incomingVideoId]; - - handler.updateParticipantsOnScreen(session, mediaEvent); - expect(session.emit).toHaveBeenCalledWith('activeVideoParticipantsUpdate', expected2); - expect(session.emit).toHaveBeenCalled(); - }); - }); - - describe('updateSpeakers', () => { - it('should send out speaker update', () => { - handler.updateSpeakers(session, mediaEvent); - expect(session.emit).toHaveBeenCalledWith('speakersUpdate', { speakers: [{ userId: sendingUser }] }); - }); - - it('should be able to match using the trackId from the sdp (for Firefox) and send the speaker update', () => { - session.pc.getReceivers.mockReturnValue([ - { track: { kind: 'audio', id: '{some-firefox-track-id}' } } - ]); - jest.spyOn(handler, 'getTrackIdFromSdp').mockReturnValue(incomingAudioId); - - handler.updateSpeakers(session, mediaEvent); - expect(session.emit).toHaveBeenCalledWith('speakersUpdate', { speakers: [{ userId: sendingUser }] }); - }); - }); -}); - describe('startSession', () => { it('should post to video conference api', async () => { const roomJid = '123@conference.com'; @@ -907,14 +759,6 @@ describe('acceptSession', () => { expect(startMediaSpy).toHaveBeenCalledWith({ video: true, audio: false, session }); }); - it('should subscribe to media change events', async () => { - const audio = document.createElement('audio'); - const video = document.createElement('video'); - await handler.acceptSession(session, { conversationId: session.conversationId, audioElement: audio, videoElement: video }); - - expect(mockSdk._streamingConnection.notifications.subscribe).toHaveBeenCalled(); - }); - it('should attach tracks later if not available', async () => { const audio = document.createElement('audio'); const video = document.createElement('video'); @@ -1446,18 +1290,6 @@ describe('setAudioMute', () => { }); }); -describe('handleMediaChangeEvent', () => { - it('should update on-screen and speakers', () => { - jest.spyOn(handler, 'updateParticipantsOnScreen').mockReturnValue(); - jest.spyOn(handler, 'updateSpeakers').mockReturnValue(); - - handler.handleMediaChangeEvent(new MockSession() as any, {} as any); - - expect(handler.updateParticipantsOnScreen).toHaveBeenCalled(); - expect(handler.updateSpeakers).toHaveBeenCalled(); - }); -}); - describe('getSendersByTrackType', () => { it('should only return senders with a track and matching the track type', () => { const track1 = { track: { kind: 'audio' } };