diff --git a/CHANGELOG.md b/CHANGELOG.md index 1e1bf8425..d7557ecc7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,47 @@ The Twilio Programmable Video SDKs use [Semantic Versioning](http://www.semver.o **Version 1.x reached End of Life on September 8th, 2021.** See the changelog entry [here](https://www.twilio.com/changelog/end-of-life-complete-for-unsupported-versions-of-the-programmable-video-sdk). Support for the 1.x version ended on December 4th, 2020. +2.19.0 (January 31, 2022) +========================= + +New Features +------------ + +- This release introduces a new feature **Adaptive Simulcast**. This opt-in feature can be enabled by setting `preferredVideoCodecs="auto"` in ConnectOptions. When joining a group room with this feature enabled, the SDK will use VP8 simulcast, and will enable/disable simulcast layers dynamically, thus improving bandwidth and CPU usage for the publishing client. It works best when used along with `Client Track Switch Off Control` and `Video Content Preferences`. These two flags allow the SFU to determine which simulcast layers are needed, thus allowing it to disable the layers not needed on publisher side. This feature cannot be used alongside `maxVideoBitrate`. + +If your application is currently using VP8 simulcast we recommend that you switch to this option. + +Example: + +```ts +const { connect } = require('twilio-video'); + +const room = await connect(token, { + preferredVideoCodecs: 'auto', + bandwidthProfile: { + video: { + contentPreferencesMode: 'auto', + clientTrackSwitchOffControl: 'auto' + } + } +}); +``` + +Known Limitations +----------------- + +- Specifying `preferredVideoCodecs="auto"` will revert to unicast in the following cases: + - The publisher is using Firefox. + - The publisher has preferred the H264 codec. + - The Room is configured to support only the H264 codec. + - Peer-to-Peer Rooms +- When the participant is being recorded, the SFU will not disable any simulcast layers of the participant's VideoTrack. + +Bug Fixes +--------- + +- Fixed a bug where `clientTrackSwitchOffControl` and `contentPreferencesMode` sometimes did not work as expected during network glitches. (VIDEO-7654) + 2.18.3 (January 4, 2022) ======================== diff --git a/README.md b/README.md index 5779e0d32..174f716e9 100644 --- a/README.md +++ b/README.md @@ -74,7 +74,7 @@ Releases of twilio-video.js are hosted on a CDN, and you can include these directly in your web app using a <script> tag. ```html - + ``` diff --git a/lib/connect.js b/lib/connect.js index 5ee3dadf3..99b9622ef 100644 --- a/lib/connect.js +++ b/lib/connect.js @@ -221,6 +221,18 @@ function connect(token, options) { // Additionally, the options that are no longer needed will be removed. deprecateOptions(options, log, deprecatedConnectOptionsProps); + const adaptiveSimulcast = options.preferredVideoCodecs === 'auto'; + if (adaptiveSimulcast) { + // NOTE(mpatwardhan): enable adaptiveSimulcast. + options.preferredVideoCodecs = [{ codec: 'VP8', simulcast: true, adaptiveSimulcast: true }]; + } + + if (options.maxVideoBitrate && adaptiveSimulcast) { + log.error('ConnectOptions "maxVideoBitrate" is not compatible with "preferredVideoCodecs=auto"'); + return CancelablePromise.reject(E.ILLEGAL_INVOKE('connect', + 'ConnectOptions "maxVideoBitrate" is not compatible with "preferredVideoCodecs=auto"')); + } + options = Object.assign({ automaticSubscription: true, createLocalTracks, @@ -359,7 +371,7 @@ function connect(token, options) { const encodingParameters = new EncodingParametersImpl({ maxAudioBitrate: options.maxAudioBitrate, maxVideoBitrate: options.maxVideoBitrate - }); + }, adaptiveSimulcast); const preferredCodecs = { audio: options.preferredAudioCodecs.map(normalizeCodecSettings), @@ -455,6 +467,7 @@ function connect(token, options) { * effect for fixed bitrate codecs; Based on our tests, Chrome, Firefox and Safari * all seem to support an average bitrate range of 20000 bps (20 kbps) to * 8000000 bps (8 mbps) for a 720p VideoTrack + * This parameter must not be set when when preferredVideoCodecs is set to `auto`. * @property {?string} [name=null] - Set to connect to a {@link Room} by name * @property {boolean|NetworkQualityConfiguration} [networkQuality=false] - Whether to enable the Network * Quality API or not. This only takes effect in Group Rooms. Pass a {@link NetworkQualityConfiguration} @@ -470,8 +483,11 @@ function connect(token, options) { * for the list of supported signaling regions. * @property {Array} [preferredAudioCodecs=[]] - Preferred audio codecs; * An empty array preserves the current audio codec preference order. - * @property {Array} [preferredVideoCodecs=[]] - - * Preferred video codecs; An empty array preserves the current video codec + * @property {Array|VideoEncodingMode} [preferredVideoCodecs=[]] - + * Preferred video codecs; when set to 'VideoEncodingMode.Auto', SDK manages the video codec, + * by preferring VP8 simulcast in group rooms. It also enables adaptive simulcast, which allows SDK + * to turn off simulcast layers that are not needed for efficient bandwidth and CPU usage. + * An empty array preserves the current video codec. * preference order. If you want to set a preferred video codec on a Group Room, * you will need to create the Room using the REST API and set the * VideoCodecs property. @@ -620,6 +636,16 @@ const AudioCodec = { PCMU: 'PCMU' }; +/** + * Names of the supported VideoEncodingMode. + * @enum {string} + */ +// eslint-disable-next-line +const VideoEncodingMode = { + Auto: 'auto', +}; + + /** * Names of the supported video codecs. * @enum {string} diff --git a/lib/encodingparameters.js b/lib/encodingparameters.js index 54a20e4f3..a5e24472a 100644 --- a/lib/encodingparameters.js +++ b/lib/encodingparameters.js @@ -15,8 +15,9 @@ class EncodingParametersImpl extends EventEmitter { /** * Construct an {@link EncodingParametersImpl}. * @param {EncodingParamters} encodingParameters - Initial {@link EncodingParameters} + * @param {Boolean} adaptiveSimulcast - true if adaptive simulcast was enabled by connect options. */ - constructor(encodingParameters) { + constructor(encodingParameters, adaptiveSimulcast) { super(); encodingParameters = Object.assign({ @@ -32,6 +33,9 @@ class EncodingParametersImpl extends EventEmitter { maxVideoBitrate: { value: encodingParameters.maxVideoBitrate, writable: true + }, + adaptiveSimulcast: { + value: adaptiveSimulcast } }); } diff --git a/lib/localparticipant.js b/lib/localparticipant.js index d112d44ed..1298d1e31 100644 --- a/lib/localparticipant.js +++ b/lib/localparticipant.js @@ -534,6 +534,11 @@ class LocalParticipant extends Participant { } if (encodingParameters) { + if (this._signaling.getParameters().adaptiveSimulcast && encodingParameters.maxVideoBitrate) { + // eslint-disable-next-line new-cap + throw E.INVALID_TYPE('encodingParameters', 'encodingParameters.maxVideoBitrate is not compatible with "preferredVideoCodecs=auto"'); + } + ['maxAudioBitrate', 'maxVideoBitrate'].forEach(prop => { if (typeof encodingParameters[prop] !== 'undefined' && typeof encodingParameters[prop] !== 'number' diff --git a/lib/media/track/sender.js b/lib/media/track/sender.js index edfb3f5c6..c19be6663 100644 --- a/lib/media/track/sender.js +++ b/lib/media/track/sender.js @@ -5,6 +5,7 @@ const MediaTrackTransceiver = require('./transceiver'); /** * A {@link MediaTrackSender} represents one or more local RTCRtpSenders. * @extends MediaTrackTransceiver + * @emits MediaTrackSender#replaced */ class MediaTrackSender extends MediaTrackTransceiver { /** @@ -20,6 +21,9 @@ class MediaTrackSender extends MediaTrackTransceiver { _senders: { value: new Set() }, + _senderToPublisherHintCallbacks: { + value: new Map() + }, isPublishing: { get() { return !!this._clones.size; @@ -58,7 +62,7 @@ class MediaTrackSender extends MediaTrackTransceiver { return Promise.all(clones.map(clone => { return clone.setMediaStreamTrack(mediaStreamTrack.clone()); }).concat(senders.map(sender => { - return sender.replaceTrack(mediaStreamTrack); + return this._replaceTrack(sender, mediaStreamTrack); }))).finally(() => { this._track = mediaStreamTrack; }); @@ -67,10 +71,14 @@ class MediaTrackSender extends MediaTrackTransceiver { /** * Add an RTCRtpSender. * @param {RTCRtpSender} sender + * @param {?()=>Promise} publisherHintCallback * @returns {this} */ - addSender(sender) { + addSender(sender, publisherHintCallback) { this._senders.add(sender); + if (publisherHintCallback) { + this._senderToPublisherHintCallbacks.set(sender, publisherHintCallback); + } return this; } @@ -81,8 +89,34 @@ class MediaTrackSender extends MediaTrackTransceiver { */ removeSender(sender) { this._senders.delete(sender); + this._senderToPublisherHintCallbacks.delete(sender); return this; } + + /** + * Applies given encodings, or resets encodings if none specified. + * @param {Array<{enabled: boolean, layer_index: number}>|null} encodings + * @returns {Promise} + */ + setPublisherHint(encodings) { + // Note(mpatwardhan): since publisher hint applies only to group rooms we only look at 1st call callback. + const [publisherHintCallback] = Array.from(this._senderToPublisherHintCallbacks.values()); + return publisherHintCallback ? publisherHintCallback(encodings) : Promise.resolve('COULD_NOT_APPLY_HINT'); + } + + _replaceTrack(sender, mediaStreamTrack) { + return sender.replaceTrack(mediaStreamTrack).then(replaceTrackResult => { + // clear any publisherHints and apply default encodings. + this.setPublisherHint(null).catch(() => {}); + this.emit('replaced'); + return replaceTrackResult; + }); + } } +/** + * The {@link MediaTrackSender} replaced the underlying mediaStreamTrack + * @event MediaTrackSender#replaced + */ + module.exports = MediaTrackSender; diff --git a/lib/signaling/v2/cancelableroomsignalingpromise.js b/lib/signaling/v2/cancelableroomsignalingpromise.js index 0f5de5fca..171e9f592 100644 --- a/lib/signaling/v2/cancelableroomsignalingpromise.js +++ b/lib/signaling/v2/cancelableroomsignalingpromise.js @@ -19,6 +19,7 @@ function createCancelableRoomSignalingPromise(token, wsServer, localParticipant, Transport: DefaultTransport }, options); + const adaptiveSimulcast = preferredCodecs.video[0] && preferredCodecs.video[0].adaptiveSimulcast === true; const { PeerConnectionManager, RoomV2, Transport, iceServers, log } = options; const peerConnectionManager = new PeerConnectionManager(encodingParameters, preferredCodecs, options); const trackSenders = flatMap(localParticipant.tracks, trackV2 => [trackV2.trackTransceiver]); @@ -79,6 +80,7 @@ function createCancelableRoomSignalingPromise(token, wsServer, localParticipant, (options.clientTrackSwitchOffControl !== 'disabled' || options.contentPreferencesMode !== 'disabled'); const transportOptions = Object.assign({ + adaptiveSimulcast, automaticSubscription, dominantSpeaker, environment, diff --git a/lib/signaling/v2/localparticipant.js b/lib/signaling/v2/localparticipant.js index c7ffa9dc7..6f11150fb 100644 --- a/lib/signaling/v2/localparticipant.js +++ b/lib/signaling/v2/localparticipant.js @@ -2,7 +2,9 @@ const LocalParticipantSignaling = require('../localparticipant'); const LocalTrackPublicationV2 = require('./localtrackpublication'); -const { isDeepEqual } = require('../../util'); +const { DEFAULT_LOG_LEVEL } = require('../../util/constants'); +const Log = require('../../util/log'); +const { buildLogLevels, isDeepEqual } = require('../../util'); /** * @extends ParticipantSignaling @@ -20,10 +22,14 @@ class LocalParticipantV2 extends LocalParticipantSignaling { */ constructor(encodingParameters, networkQualityConfiguration, options) { options = Object.assign({ + logLevel: DEFAULT_LOG_LEVEL, LocalTrackPublicationV2 }, options); super(); + + const logLevels = buildLogLevels(options.logLevel); + Object.defineProperties(this, { _bandwidthProfile: { value: null, @@ -42,6 +48,11 @@ class LocalParticipantV2 extends LocalParticipantSignaling { _LocalTrackPublicationV2: { value: options.LocalTrackPublicationV2 }, + _log: { + value: options.log + ? options.log.createLog('default', this) + : new Log('default', this, logLevels, options.loggerName) + }, _publishedRevision: { writable: true, value: 0 @@ -85,6 +96,10 @@ class LocalParticipantV2 extends LocalParticipantSignaling { }); } + toString() { + return `[LocalParticipantSignaling: ${this.sid}]`; + } + /** * Set the signalingRegion. * @param {string} signalingRegion. @@ -110,6 +125,14 @@ class LocalParticipantV2 extends LocalParticipantSignaling { } } + /** + * returns current {@link EncodingParametersImpl}. + * @returns {EncodingParametersImpl} + */ + getParameters() { + return this._encodingParameters; + } + /** * Set the {@link EncodingParameters}. * @param {?EncodingParameters} encodingParameters @@ -249,8 +272,25 @@ class LocalParticipantV2 extends LocalParticipantSignaling { setNetworkQualityConfiguration(networkQualityConfiguration) { this.networkQualityConfiguration.update(networkQualityConfiguration); } + + /** + * updates encodings for simulcast layers. + * @param {Track.SID} trackSid + * @param {Array<{enabled: boolean, layer_index: number}>} encodings + * @returns {Promise} string indicating result of the operation. can be one of + * "OK", "INVALID_HINT", "COULD_NOT_APPLY_HINT", "UNKNOWN_TRACK" + */ + setPublisherHint(trackSid, encodings) { + const trackSignaling = Array.from(this.tracks.values()).find(trackPub => trackPub.sid === trackSid); + if (!trackSignaling) { + this._log.warn(`track:${trackSid} not found`); + return Promise.resolve('UNKNOWN_TRACK'); + } + return trackSignaling.trackTransceiver.setPublisherHint(encodings); + } } + /** * @interface Published * @property {number} revision diff --git a/lib/signaling/v2/peerconnection.js b/lib/signaling/v2/peerconnection.js index 5f170ee8f..909658e10 100644 --- a/lib/signaling/v2/peerconnection.js +++ b/lib/signaling/v2/peerconnection.js @@ -10,7 +10,8 @@ const { getStats: getStatistics } = require('@twilio/webrtc'); -const { guessBrowser } = require('@twilio/webrtc/lib/util'); +const util = require('@twilio/webrtc/lib/util'); +const { guessBrowser } = util; const { getSdpFormat } = require('@twilio/webrtc/lib/util/sdp'); const { @@ -26,7 +27,7 @@ const { enableDtxForOpus, getMediaSections, removeSSRCAttributes, - revertSimulcastForNonVP8MediaSections, + revertSimulcast, setBitrateParameters, setCodecPreferences, setSimulcast, @@ -46,7 +47,8 @@ const { buildLogLevels, getPlatform, isChromeScreenShareTrack, - oncePerTick + oncePerTick, + defer } = require('../../util'); const IceBox = require('./icebox'); @@ -124,7 +126,6 @@ class PeerConnectionV2 extends StateMachine { */ constructor(id, encodingParameters, preferredCodecs, options) { super('open', states); - options = Object.assign({ enableDscp: false, dummyAudioMediaStreamTrack: null, @@ -133,7 +134,7 @@ class PeerConnectionV2 extends StateMachine { isRTCRtpSenderParamsSupported, logLevel: DEFAULT_LOG_LEVEL, offerOptions: {}, - revertSimulcastForNonVP8MediaSections, + revertSimulcast, sessionTimeout: DEFAULT_SESSION_TIMEOUT_SEC * 1000, setBitrateParameters, setCodecPreferences, @@ -280,6 +281,9 @@ class PeerConnectionV2 extends StateMachine { _rtpSenders: { value: new Map() }, + _rtpNewSenders: { + value: new Set() + }, _iceConnectionMonitor: { value: new options.IceConnectionMonitor(peerConnection) }, @@ -322,10 +326,6 @@ class PeerConnectionV2 extends StateMachine { value: preferredCodecs.audio.every(({ codec }) => codec !== 'opus') || preferredCodecs.audio.some(({ codec, dtx }) => codec === 'opus' && dtx) }, - _shouldApplySimulcast: { - value: (isChrome || isSafari) && preferredCodecs.video.some( - codecSettings => codecSettings.codec.toLowerCase() === 'vp8' && codecSettings.simulcast) - }, _queuedDescription: { writable: true, value: null @@ -367,8 +367,8 @@ class PeerConnectionV2 extends StateMachine { _setSimulcast: { value: options.setSimulcast }, - _revertSimulcastForNonVP8MediaSections: { - value: options.revertSimulcastForNonVP8MediaSections + _revertSimulcast: { + value: options.revertSimulcast }, _RTCIceCandidate: { value: options.RTCIceCandidate @@ -395,6 +395,9 @@ class PeerConnectionV2 extends StateMachine { writable: true, value: null }, + _mediaTrackSenderToPublisherHints: { + value: new Map() + }, id: { enumerable: true, value: id @@ -428,6 +431,29 @@ class PeerConnectionV2 extends StateMachine { return `[PeerConnectionV2 #${this._instanceId}: ${this.id}]`; } + setEffectiveAdaptiveSimulcast(effectiveAdaptiveSimulcast) { + this._log.debug('Setting setEffectiveAdaptiveSimulcast: ', effectiveAdaptiveSimulcast); + // clear adaptive simulcast from codec preferences if it was set. + this._preferredVideoCodecs.forEach(cs => { + if ('adaptiveSimulcast' in cs) { + cs.adaptiveSimulcast = effectiveAdaptiveSimulcast; + } + }); + } + + get _shouldApplySimulcast() { + if (!isChrome && !isSafari) { + return false; + } + + // adaptiveSimulcast is set to false after connected message is received if other party does not support it. + const simulcast = this._preferredVideoCodecs.some(cs => { + return cs.codec.toLowerCase() === 'vp8' && cs.simulcast && cs.adaptiveSimulcast !== false; + }); + + return simulcast; + } + /** * The {@link PeerConnectionV2}'s underlying RTCPeerConnection's RTCPeerConnectionState * if supported by the browser, its RTCIceConnectionState otherwise. @@ -464,33 +490,90 @@ class PeerConnectionV2 extends StateMachine { } /** - * Updates scaleResolutionDownBy for encoding layers. - * @param {number} width - * @param {number} height + * Whether adaptive simulcast is enabled. + * @returns {boolean} + */ + get _isAdaptiveSimulcastEnabled() { + const adaptiveSimulcastEntry = this._preferredVideoCodecs.find(cs => 'adaptiveSimulcast' in cs); + return adaptiveSimulcastEntry && adaptiveSimulcastEntry.adaptiveSimulcast === true; + } + + /** + * @param {MediaStreamTrack} track * @param {Array} encodings + * @param {boolean} trackReplaced + * @returns {boolean} true if encodings were updated. */ - _updateEncodings(width, height, encodings) { - // NOTE(mpatwardhan): All the simulcast encodings in Safari have - // the same resolution. So, here we make sure that the lower layers have - // lower resolution, as seen in Chrome. - const pixelsToMaxActiveLayers = [ - { pixels: 960 * 540, maxActiveLayers: 3 }, - { pixels: 480 * 270, maxActiveLayers: 2 }, - { pixels: 0, maxActiveLayers: 1 } - ]; - - const trackPixels = width * height; - const activeLayersInfo = pixelsToMaxActiveLayers.find(layer => trackPixels >= layer.pixels); - const activeLayers = Math.min(encodings.length, activeLayersInfo.maxActiveLayers); - encodings.forEach((encoding, i) => { - encoding.active = i < activeLayers; - if (encoding.active) { - encoding.scaleResolutionDownBy = 1 << (activeLayers - i - 1); - } else { - delete encoding.scaleResolutionDownBy; - } - this._log.debug(`setting up simulcast layer ${i} with active = ${encoding.active}, scaleResolutionDownBy = ${encoding.scaleResolutionDownBy}`); - }); + _maybeUpdateEncodings(track, encodings, trackReplaced = false) { + if (track.kind !== 'video') { + return false; + } + const browser = util.guessBrowser(); + + // Note(mpatwardhan): always configure encodings for safari. + // for chrome only when adaptive simulcast enabled. + if (browser === 'safari' || (browser === 'chrome' && this._isAdaptiveSimulcastEnabled)) { + this._updateEncodings(track, encodings, trackReplaced); + return true; + } + + return false; + } + + /** + * Configures with default encodings depending on track type and resolution. + * Default configuration sets some encodings to disabled, and for others set scaleResolutionDownBy + * values. When trackReplaced is set to true, it will clear 'active' for any encodings that + * needs to be enabled. + * @param {MediaStreamTrack} track + * @param {Array} encodings + * @param {boolean} trackReplaced + */ + _updateEncodings(track, encodings, trackReplaced) { + if (this._isChromeScreenShareTrack(track)) { + const screenShareActiveLayerConfig = [ + { scaleResolutionDownBy: 1 }, + { scaleResolutionDownBy: 1 } + ]; + encodings.forEach((encoding, i) => { + const activeLayerConfig = screenShareActiveLayerConfig[i]; + if (activeLayerConfig) { + encoding.scaleResolutionDownBy = activeLayerConfig.scaleResolutionDownBy; + if (trackReplaced) { + delete encoding.active; + } + } else { + encoding.active = false; + delete encoding.scaleResolutionDownBy; + } + }); + } else { + const { width, height } = track.getSettings(); + // NOTE(mpatwardhan): for non-screen share tracks + // enable layers depending on track resolutions + const pixelsToMaxActiveLayers = [ + { pixels: 960 * 540, maxActiveLayers: 3 }, + { pixels: 480 * 270, maxActiveLayers: 2 }, + { pixels: 0, maxActiveLayers: 1 } + ]; + + const trackPixels = width * height; + const activeLayersInfo = pixelsToMaxActiveLayers.find(layer => trackPixels >= layer.pixels); + const activeLayers = Math.min(encodings.length, activeLayersInfo.maxActiveLayers); + encodings.forEach((encoding, i) => { + const enabled = i < activeLayers; + if (enabled) { + encoding.scaleResolutionDownBy = 1 << (activeLayers - i - 1); + if (trackReplaced) { + delete encoding.active; + } + } else { + encoding.active = false; + delete encoding.scaleResolutionDownBy; + } + }); + } + this._log.debug('_updateEncodings:', encodings.map(({ active, scaleResolutionDownBy }, i) => `[${i}: ${active}, ${scaleResolutionDownBy || 0}]`).join(', ')); } /** @@ -613,7 +696,7 @@ class PeerConnectionV2 extends StateMachine { // NOTE(syerrapragada): VMS does not support H264 simulcast. So, // unset simulcast for sections in local offer where corresponding // sections in answer doesn't have vp8 as preferred codec and reapply offer. - updatedSdp = this._revertSimulcastForNonVP8MediaSections(updatedSdp, sdpWithoutSimulcast, offer.sdp); + updatedSdp = this._revertSimulcast(updatedSdp, sdpWithoutSimulcast, offer.sdp); } // NOTE(mmalavalli): Work around Chromium bug 1074421. @@ -1099,7 +1182,7 @@ class PeerConnectionV2 extends StateMachine { }); } return this._peerConnection.setLocalDescription(description).catch(error => { - this._log.warn(`Calling setLocalDescription with an RTCSessionDescription of type "${description.type}" failed with the error "${error.message}".`); + this._log.warn(`Calling setLocalDescription with an RTCSessionDescription of type "${description.type}" failed with the error "${error.message}".`, error); const errorToThrow = new MediaClientLocalDescFailedError(); const publishWarning = { @@ -1188,11 +1271,16 @@ class PeerConnectionV2 extends StateMachine { // NOTE(syerrapragada): VMS does not support H264 simulcast. So, // unset simulcast for sections in local offer where corresponding // sections in answer doesn't have vp8 as preferred codec and reapply offer. - if (description.type === 'answer' && this._shouldApplySimulcast) { - const sdpWithoutSimulcastForNonVP8MediaSections = this._revertSimulcastForNonVP8MediaSections( + if (description.type === 'answer' && this._localDescriptionWithoutSimulcast) { + // NOTE(mpatwardhan):if we were using adaptive simulcast, and if its not supported by server + // revert simulcast even for vp8. + const adaptiveSimulcastEntry = this._preferredVideoCodecs.find(cs => 'adaptiveSimulcast' in cs); + const revertForAll = !!adaptiveSimulcastEntry && adaptiveSimulcastEntry.adaptiveSimulcast === false; + const sdpWithoutSimulcastForNonVP8MediaSections = this._revertSimulcast( this._localDescription.sdp, this._localDescriptionWithoutSimulcast.sdp, - description.sdp); + description.sdp, revertForAll); + this._localDescriptionWithoutSimulcast = null; if (sdpWithoutSimulcastForNonVP8MediaSections !== this._localDescription.sdp) { return this._rollbackAndApplyOffer({ type: this._localDescription.type, @@ -1209,7 +1297,7 @@ class PeerConnectionV2 extends StateMachine { negotiationCompleted(this); } }, error => { - this._log.warn(`Calling setRemoteDescription with an RTCSessionDescription of type "${description.type}" failed with the error "${error.message}".`); + this._log.warn(`Calling setRemoteDescription with an RTCSessionDescription of type "${description.type}" failed with the error "${error.message}".`, error); if (description.sdp) { this._log.warn(`The SDP was ${description.sdp}`); } @@ -1335,6 +1423,79 @@ class PeerConnectionV2 extends StateMachine { } } + _handleQueuedPublisherHints() { + if (this._peerConnection.signalingState === 'stable') { + this._mediaTrackSenderToPublisherHints.forEach(({ deferred, encodings }, mediaTrackSender) => { + this._mediaTrackSenderToPublisherHints.delete(mediaTrackSender); + this._setPublisherHint(mediaTrackSender, encodings) + .then(result => deferred.resolve(result)) + .catch(error => deferred.reject(error)); + }); + } + } + + /** + * updates encodings for simulcast layers of given sender. + * @param {RTCRtpSender} sender + * @param {Array<{enabled: boolean, layer_index: number}>|null} encodings + * @returns {Promise} string indicating result of the operation. can be one of + * "OK", "INVALID_HINT", "COULD_NOT_APPLY_HINT", "UNKNOWN_TRACK" + */ + _setPublisherHint(mediaTrackSender, encodings) { + if (isFirefox) { + return Promise.resolve('COULD_NOT_APPLY_HINT'); + } + + if (this._mediaTrackSenderToPublisherHints.has(mediaTrackSender)) { + // skip any stale hint associated with the mediaTrackSender. + const queuedHint = this._mediaTrackSenderToPublisherHints.get(mediaTrackSender); + queuedHint.deferred.resolve('REQUEST_SKIPPED'); + this._mediaTrackSenderToPublisherHints.delete(mediaTrackSender); + } + + const sender = this._rtpSenders.get(mediaTrackSender); + if (!sender) { + this._log.warn('Could not apply publisher hint because RTCRtpSender was not found'); + return Promise.resolve('UNKNOWN_TRACK'); + } + + if (this._peerConnection.signalingState === 'closed') { + this._log.warn('Could not apply publisher hint because signalingState was "closed"'); + return Promise.resolve('COULD_NOT_APPLY_HINT'); + } + + if (this._peerConnection.signalingState !== 'stable') { + // enqueue this hint to be applied when pc becomes stable. + this._log.debug('Queuing up publisher hint because signalingState:', this._peerConnection.signalingState); + const deferred = defer(); + this._mediaTrackSenderToPublisherHints.set(mediaTrackSender, { deferred, encodings }); + return deferred.promise; + } + + const parameters = sender.getParameters(); + if (encodings !== null) { + encodings.forEach(({ enabled, layer_index: layerIndex }) => { + if (parameters.encodings.length > layerIndex) { + this._log.debug(`layer:${layerIndex}, active:${parameters.encodings[layerIndex].active} => ${enabled}`); + parameters.encodings[layerIndex].active = enabled; + } else { + this._log.warn(`invalid layer:${layerIndex}, active:${enabled}`); + } + }); + } + + // Note(mpatwardhan): after publisher hints are applied, overwrite with default encodings + // to disable any encoding that shouldn't have been enabled by publisher_hints. + // When encodings===null (that is we are asked to reset encodings for replaceTrack) + // along with disabling encodings, clear active flag for encodings that should not be disabled + this._maybeUpdateEncodings(sender.track, parameters.encodings, encodings === null /* trackReplaced */); + + return sender.setParameters(parameters).then(() => 'OK').catch(error => { + this._log.error('Failed to apply publisher hints:', error); + return 'COULD_NOT_APPLY_HINT'; + }); + } + /** * Add the {@link MediaTrackSender} to the {@link PeerConnectionV2}. * @param {MediaTrackSender} mediaTrackSender @@ -1352,7 +1513,8 @@ class PeerConnectionV2 extends StateMachine { const transceiver = this._addOrUpdateTransceiver(mediaTrackSender.track); sender = transceiver.sender; } - mediaTrackSender.addSender(sender); + mediaTrackSender.addSender(sender, encodings => this._setPublisherHint(mediaTrackSender, encodings)); + this._rtpNewSenders.add(sender); this._rtpSenders.set(mediaTrackSender, sender); } @@ -1457,6 +1619,13 @@ class PeerConnectionV2 extends StateMachine { this._localMediaStream.removeTrack(mediaTrackSender.track); } mediaTrackSender.removeSender(sender); + // clean up any pending publisher hints associated with this mediaTrackSender. + if (this._mediaTrackSenderToPublisherHints.has(mediaTrackSender)) { + const queuedHint = this._mediaTrackSenderToPublisherHints.get(mediaTrackSender); + queuedHint.deferred.resolve('UNKNOWN_TRACK'); + this._mediaTrackSenderToPublisherHints.delete(mediaTrackSender); + } + this._rtpNewSenders.delete(sender); this._rtpSenders.delete(mediaTrackSender); } @@ -1709,7 +1878,10 @@ function negotiationCompleted(pcv2) { updateRemoteCodecMaps(pcv2); } if (pcv2._isRTCRtpSenderParamsSupported) { - updateEncodingParameters(pcv2); + updateEncodingParameters(pcv2).then(() => { + // if there any any publisher hints queued, apply them now. + pcv2._handleQueuedPublisherHints(); + }); } } @@ -1726,6 +1898,7 @@ function updateEncodingParameters(pcv2) { ['video', maxVideoBitrate] ]); + const promises = []; pcv2._peerConnection.getSenders().filter(sender => sender.track).forEach(sender => { const maxBitrate = maxBitrates.get(sender.track.kind); const params = sender.getParameters(); @@ -1749,15 +1922,17 @@ function updateEncodingParameters(pcv2) { params.encodings[0].networkPriority = 'high'; } - if (isSafari && sender.track.kind === 'video') { - const { width, height } = sender.track.getSettings(); - pcv2._updateEncodings(width, height, params.encodings); - } + // when a sender is reused, delete any active encodings set by server. + const trackReplaced = pcv2._rtpNewSenders.has(sender); + pcv2._maybeUpdateEncodings(sender.track, params.encodings, trackReplaced); + pcv2._rtpNewSenders.delete(sender); - sender.setParameters(params).catch(error => { + const promise = sender.setParameters(params).catch(error => { pcv2._log.warn(`Error while setting encodings parameters for ${sender.track.kind} Track ${sender.track.id}: ${error.message || error.name}`); }); + promises.push(promise); }); + return Promise.all(promises); } /** diff --git a/lib/signaling/v2/peerconnectionmanager.js b/lib/signaling/v2/peerconnectionmanager.js index c81f78c62..0b5adedde 100644 --- a/lib/signaling/v2/peerconnectionmanager.js +++ b/lib/signaling/v2/peerconnectionmanager.js @@ -111,6 +111,15 @@ class PeerConnectionManager extends QueueingEventEmitter { }); } + setEffectiveAdaptiveSimulcast(effectiveAdaptiveSimulcast) { + this._peerConnections.forEach(pc => pc.setEffectiveAdaptiveSimulcast(effectiveAdaptiveSimulcast)); + this._preferredCodecs.video.forEach(cs => { + if ('adaptiveSimulcast' in cs) { + cs.adaptiveSimulcast = effectiveAdaptiveSimulcast; + } + }); + } + /** * A summarized RTCPeerConnectionState across all the * {@link PeerConnectionManager}'s underlying {@link PeerConnectionV2}s. diff --git a/lib/signaling/v2/publisherhintsignaling.js b/lib/signaling/v2/publisherhintsignaling.js new file mode 100644 index 000000000..cb0ad66e7 --- /dev/null +++ b/lib/signaling/v2/publisherhintsignaling.js @@ -0,0 +1,71 @@ +/* eslint callback-return:0 */ +'use strict'; + +const MediaSignaling = require('./mediasignaling'); + +let messageId = 1; +class PublisherHintsSignaling extends MediaSignaling { + /** + * Construct a {@link RenderHintsSignaling}. + */ + constructor(getReceiver, options) { + super(getReceiver, 'publisher_hints', options); + this.on('ready', transport => { + this._log.debug('publisher_hints transport ready:', transport); + transport.on('message', message => { + this._log.debug('Incoming: ', message); + switch (message.type) { + case 'publisher_hints': + if (message.publisher && message.publisher.hints && message.publisher.id) { + this._processPublisherHints(message.publisher.hints, message.publisher.id); + } + break; + default: + this._log.warn('Unknown message type: ', message.type); + break; + } + }); + }); + } + + sendTrackReplaced({ trackSid }) { + if (!this._transport) { + return; + } + + const payLoad = { + type: 'client_reset', + track: trackSid, + id: messageId++ + }; + this._log.debug('Outgoing: ', payLoad); + this._transport.publish(payLoad); + } + + sendHintResponse({ id, hints }) { + if (!this._transport) { + return; + } + const payLoad = { + type: 'publisher_hints', + id, + hints + }; + this._log.debug('Outgoing: ', payLoad); + this._transport.publish(payLoad); + } + + /** + * @private + */ + _processPublisherHints(hints, id) { + try { + this.emit('updated', hints, id); + } catch (ex) { + this._log.error('error processing hints:', ex); + } + } +} + + +module.exports = PublisherHintsSignaling; diff --git a/lib/signaling/v2/renderhintssignaling.js b/lib/signaling/v2/renderhintssignaling.js index 21f99536b..b8b4bcfc0 100644 --- a/lib/signaling/v2/renderhintssignaling.js +++ b/lib/signaling/v2/renderhintssignaling.js @@ -2,7 +2,9 @@ 'use strict'; const MediaSignaling = require('./mediasignaling'); +const Timeout = require('../../util/timeout'); const { isDeepEqual } = require('../../util'); +const RENDER_HINT_RESPONSE_TIME_MS = 2000; // time to wait for server response (before resending all hints.) let messageId = 1; class RenderHintsSignaling extends MediaSignaling { @@ -15,9 +17,12 @@ class RenderHintsSignaling extends MediaSignaling { _trackSidsToRenderHints: { value: new Map() }, - _isResponsePending: { - value: false, - writable: true, + _responseTimer: { + value: new Timeout(() => { + this._sendAllHints(); + // once timer fires, for next round double the delay. + this._responseTimer.setDelay(this._responseTimer.delay * 2); + }, RENDER_HINT_RESPONSE_TIME_MS, false), } }); @@ -35,23 +40,29 @@ class RenderHintsSignaling extends MediaSignaling { }); // NOTE(mpatwardhan): When transport is set (either 1st time of after vms failover) - // resend all track states. For this simply mark all tracks as dirty. - Array.from(this._trackSidsToRenderHints.keys()).forEach(trackSid => { - const trackState = this._trackSidsToRenderHints.get(trackSid); - if (trackState.renderDimensions) { - trackState.isDimensionDirty = true; - } + // resend all track states. + this._sendAllHints(); + }); + } - if ('enabled' in trackState) { - trackState.isEnabledDirty = true; - } - }); - this._sendHints(); + _sendAllHints() { + // to force sending all hints simply mark all tracks as dirty. + Array.from(this._trackSidsToRenderHints.keys()).forEach(trackSid => { + const trackState = this._trackSidsToRenderHints.get(trackSid); + if (trackState.renderDimensions) { + trackState.isDimensionDirty = true; + } + + if ('enabled' in trackState) { + trackState.isEnabledDirty = true; + } }); + this._sendHints(); } _processHintResults(hintResults) { - this._isResponsePending = false; + this._responseTimer.clear(); + this._responseTimer.setDelay(RENDER_HINT_RESPONSE_TIME_MS); hintResults.forEach(hintResult => { if (hintResult.result !== 'OK') { this._log.debug('Server error processing hint:', hintResult); @@ -61,7 +72,7 @@ class RenderHintsSignaling extends MediaSignaling { } _sendHints() { - if (!this._transport || this._isResponsePending) { + if (!this._transport || this._responseTimer.isSet) { return; } @@ -95,7 +106,7 @@ class RenderHintsSignaling extends MediaSignaling { }; this._log.debug('Outgoing: ', payLoad); this._transport.publish(payLoad); - this._isResponsePending = true; + this._responseTimer.start(); } } diff --git a/lib/signaling/v2/room.js b/lib/signaling/v2/room.js index 3b6531ebb..5ca864b35 100644 --- a/lib/signaling/v2/room.js +++ b/lib/signaling/v2/room.js @@ -1,3 +1,4 @@ +/* eslint-disable no-console */ 'use strict'; const DominantSpeakerSignaling = require('./dominantspeakersignaling'); @@ -10,6 +11,8 @@ const StatsReport = require('../../stats/statsreport'); const TrackPrioritySignaling = require('./trackprioritysignaling'); const TrackSwitchOffSignaling = require('./trackswitchoffsignaling'); const RenderHintsSignaling = require('./renderhintssignaling'); +const PublisherHintsSignaling = require('./publisherhintsignaling.js'); + const { constants: { DEFAULT_SESSION_TIMEOUT_SEC }, @@ -110,6 +113,9 @@ class RoomV2 extends RoomSignaling { _renderHintsSignaling: { value: new RenderHintsSignaling(getTrackReceiver, { log }), }, + _publisherHintsSignaling: { + value: new PublisherHintsSignaling(getTrackReceiver, { log }), + }, _trackPrioritySignaling: { value: new options.TrackPrioritySignaling(getTrackReceiver, { log }), }, @@ -134,6 +140,7 @@ class RoomV2 extends RoomSignaling { this._initTrackSwitchOffSignaling(); this._initDominantSpeakerSignaling(); this._initNetworkQualityMonitorSignaling(); + this._initPublisherHintSignaling(); handleLocalParticipantEvents(this, localParticipant); handlePeerConnectionEvents(this, peerConnectionManager); @@ -141,6 +148,10 @@ class RoomV2 extends RoomSignaling { periodicallyPublishStats(this, transport, options.statsPublishIntervalMs); this._update(initialState); + + // NOTE(mpatwardhan) after initial state we know if publisher_hints are enabled or not + // if they are not enabled. we need to undo simulcast that if it was enabled with initial offer. + this._peerConnectionManager.setEffectiveAdaptiveSimulcast(this._publisherHintsSignaling.isSetup); } /** @@ -418,7 +429,8 @@ class RoomV2 extends RoomSignaling { this._networkQualitySignaling, this._trackPrioritySignaling, this._trackSwitchOffSignaling, - this._renderHintsSignaling + this._renderHintsSignaling, + this._publisherHintsSignaling ].forEach(mediaSignaling => { const channel = mediaSignaling.channel; if (!mediaSignaling.isSetup @@ -433,6 +445,30 @@ class RoomV2 extends RoomSignaling { return this; } + _initPublisherHintSignaling() { + this._publisherHintsSignaling.on('updated', (hints, id) => { + Promise.all(hints.map(hint => { + return this.localParticipant.setPublisherHint(hint.track, hint.encodings).then(result => { + return { track: hint.track, result }; + }); + })).then(hintResponses => { + this._publisherHintsSignaling.sendHintResponse({ id, hints: hintResponses }); + }); + }); + + const handleReplaced = track => { + if (track.kind === 'video') { + track.trackTransceiver.on('replaced', () => { + this._publisherHintsSignaling.sendTrackReplaced({ trackSid: track.sid }); + }); + } + }; + + // hook up for any existing and new tracks getting replaced. + Array.from(this.localParticipant.tracks.values()).forEach(track => handleReplaced(track)); + this.localParticipant.on('trackAdded', track => handleReplaced(track)); + } + _initTrackSwitchOffSignaling() { this._trackSwitchOffSignaling.on('updated', (tracksOff, tracksOn) => { try { diff --git a/lib/signaling/v2/twilioconnectiontransport.js b/lib/signaling/v2/twilioconnectiontransport.js index d9ecbad55..0aa2eeee8 100644 --- a/lib/signaling/v2/twilioconnectiontransport.js +++ b/lib/signaling/v2/twilioconnectiontransport.js @@ -110,6 +110,9 @@ class TwilioConnectionTransport extends StateMachine { _dominantSpeaker: { value: options.dominantSpeaker }, + _adaptiveSimulcast: { + value: options.adaptiveSimulcast + }, _eventObserver: { value: options.eventObserver, writable: false @@ -231,6 +234,7 @@ class TwilioConnectionTransport extends StateMachine { this._networkQuality, this._trackPriority, this._trackSwitchOff, + this._adaptiveSimulcast, this._renderHints); message.subscribe = createSubscribePayload( diff --git a/lib/util/index.js b/lib/util/index.js index b2ac29382..e44e2f7c0 100644 --- a/lib/util/index.js +++ b/lib/util/index.js @@ -588,11 +588,23 @@ function createRoomConnectEventPayload(connectOptions) { payload[eventProp] = boolToString(!!connectOptions[prop]); }); - // numbers and string properties. - [['maxVideoBitrate'], ['maxAudioBitrate'], ['iceTransportPolicy'], ['region'], ['name', 'roomName']].forEach(([prop, eventProp]) => { + // numbers properties. + [['maxVideoBitrate'], ['maxAudioBitrate']].forEach(([prop, eventProp]) => { eventProp = eventProp || prop; - if (typeof connectOptions[prop] === 'number' || typeof connectOptions[prop] === 'string') { + if (typeof connectOptions[prop] === 'number') { payload[eventProp] = connectOptions[prop]; + } else if (!isNaN(Number(connectOptions[prop]))) { + payload[eventProp] = Number(connectOptions[prop]); + } + }); + + // string properties. + [['iceTransportPolicy'], ['region'], ['name', 'roomName']].forEach(([prop, eventProp]) => { + eventProp = eventProp || prop; + if (typeof connectOptions[prop] === 'string') { + payload[eventProp] = connectOptions[prop]; + } else if (typeof connectOptions[prop] === 'number' && prop === 'name') { + payload[eventProp] = connectOptions[prop].toString(); } }); @@ -680,7 +692,7 @@ function createBandwidthProfileVideoPayload(bandwidthProfileVideo) { * protocol or not. * @returns {object} */ -function createMediaSignalingPayload(dominantSpeaker, networkQuality, trackPriority, trackSwitchOff, renderHints) { +function createMediaSignalingPayload(dominantSpeaker, networkQuality, trackPriority, trackSwitchOff, adaptiveSimulcast, renderHints) { const transports = { transports: [{ type: 'data-channel' }] }; return Object.assign( dominantSpeaker @@ -695,6 +707,10 @@ function createMediaSignalingPayload(dominantSpeaker, networkQuality, trackPrior // eslint-disable-next-line ? { render_hints: transports } : {}, + adaptiveSimulcast + // eslint-disable-next-line + ? { publisher_hints: transports } + : {}, trackPriority // eslint-disable-next-line ? { track_priority: transports } @@ -796,8 +812,7 @@ function inRange(num, min, max) { function isChromeScreenShareTrack(track) { // NOTE(mpatwardhan): Chrome creates screen share tracks with label like: "screen:69734272*" // we will check for label that starts with "screen:D" where D being a digit. - const isChrome = util.guessBrowser() === 'chrome'; - return isChrome && track.kind === 'video' && track.label && (/^screen:[0-9]+/.test(track.label) || /^web-contents-media-stream:[0-9/]+/.test(track.label) || /^window:[0-9]+/.test(track.label)); + return util.guessBrowser() === 'chrome' && track.kind === 'video' && 'displaySurface' in track.getSettings(); } diff --git a/lib/util/sdp/index.js b/lib/util/sdp/index.js index 5c05b6017..c6bf157cc 100644 --- a/lib/util/sdp/index.js +++ b/lib/util/sdp/index.js @@ -408,9 +408,11 @@ function unifiedPlanFilterLocalCodecs(localSdp, remoteSdp) { * @param localSdp - simulcast enabled local sdp * @param localSdpWithoutSimulcast - local sdp before simulcast was set * @param remoteSdp - remote sdp + * @param revertForAll - when true simulcast will be reverted for all codecs. when false it will be reverted + * only for non-vp8 codecs. * @return {string} Updated SDP string */ -function revertSimulcastForNonVP8MediaSections(localSdp, localSdpWithoutSimulcast, remoteSdp) { +function revertSimulcast(localSdp, localSdpWithoutSimulcast, remoteSdp, revertForAll = false) { const remoteMidToMediaSections = createMidToMediaSectionMap(remoteSdp); const localMidToMediaSectionsWithoutSimulcast = createMidToMediaSectionMap(localSdpWithoutSimulcast); const mediaSections = getMediaSections(localSdp); @@ -431,7 +433,8 @@ function revertSimulcastForNonVP8MediaSections(localSdp, localSdpWithoutSimulcas const remotePayloadTypes = getPayloadTypesInMediaSection(remoteSection); const isVP8ThePreferredCodec = remotePayloadTypes.length && remotePtToCodecs.get(remotePayloadTypes[0]) === 'vp8'; - return isVP8ThePreferredCodec ? section : localMidToMediaSectionsWithoutSimulcast.get(mid).replace(/\r\n$/, ''); + const shouldRevertSimulcast = revertForAll || !isVP8ThePreferredCodec; + return shouldRevertSimulcast ? localMidToMediaSectionsWithoutSimulcast.get(mid).replace(/\r\n$/, '') : section; })).concat('').join('\r\n'); } @@ -635,7 +638,7 @@ exports.disableRtx = disableRtx; exports.enableDtxForOpus = enableDtxForOpus; exports.getMediaSections = getMediaSections; exports.removeSSRCAttributes = removeSSRCAttributes; -exports.revertSimulcastForNonVP8MediaSections = revertSimulcastForNonVP8MediaSections; +exports.revertSimulcast = revertSimulcast; exports.setBitrateParameters = setBitrateParameters; exports.setCodecPreferences = setCodecPreferences; exports.setSimulcast = setSimulcast; diff --git a/lib/util/sdp/simulcast.js b/lib/util/sdp/simulcast.js index 757528157..c37c02b05 100644 --- a/lib/util/sdp/simulcast.js +++ b/lib/util/sdp/simulcast.js @@ -2,7 +2,6 @@ const difference = require('../').difference; const flatMap = require('../').flatMap; - /** * Create a random {@link SSRC}. * @returns {SSRC} diff --git a/package.json b/package.json index 99c47ea03..b8284c1a6 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "name": "twilio-video", "title": "Twilio Video", "description": "Twilio Video JavaScript Library", - "version": "2.18.4-dev", + "version": "2.19.0-dev", "homepage": "https://twilio.com", "author": "Mark Andrus Roberts ", "contributors": [ diff --git a/test/integration/index.js b/test/integration/index.js index eff053d01..d9b15e467 100644 --- a/test/integration/index.js +++ b/test/integration/index.js @@ -1,8 +1,8 @@ 'use strict'; - require('./spec/browserbugs/chromium'); require('./spec/docker/reconnection'); require('./spec/docker/docker'); +require('./spec/bandwidthprofile/publisheerhints.js'); require('./spec/bandwidthprofile/regressions'); require('./spec/bandwidthprofile/renderhints'); require('./spec/bandwidthprofile/video'); diff --git a/test/integration/spec/audioonlyrooms.js b/test/integration/spec/audioonlyrooms.js index 67e09d647..d98c45ef3 100644 --- a/test/integration/spec/audioonlyrooms.js +++ b/test/integration/spec/audioonlyrooms.js @@ -17,6 +17,7 @@ const { waitFor } = require('../../lib/util'); +// eslint-disable-next-line no-warning-comments // TODO(mmalavalli): Enable tests for firefox once VIDEO-7341 is fixed by the VMS team. (topology === 'group' && !isFirefox ? describe : describe.skip)('Audio Only Rooms', function() { // eslint-disable-next-line no-invalid-this diff --git a/test/integration/spec/bandwidthprofile/publisherhints.js b/test/integration/spec/bandwidthprofile/publisherhints.js new file mode 100644 index 000000000..73de7343a --- /dev/null +++ b/test/integration/spec/bandwidthprofile/publisherhints.js @@ -0,0 +1,470 @@ +/* eslint-disable no-console */ +/* eslint-disable no-undefined */ +'use strict'; + +const assert = require('assert'); +const { video: createLocalVideoTrack, audio: createLocalAudioTrack } = require('../../../../es5/createlocaltrack'); +const defaults = require('../../../lib/defaults'); +const { Logger } = require('../../../../es5'); +const connect = require('../../../../es5/connect'); +const { createRoom, completeRoom } = require('../../../lib/rest'); +const getToken = require('../../../lib/token'); +const { isFirefox } = require('../../../lib/guessbrowser'); +const SECOND = 1000; + +const { + tracksSubscribed, + trackSwitchedOff, + trackSwitchedOn, + waitFor, + participantsConnected, + setupAliceAndBob, + randomName, + waitForSometime, +} = require('../../../lib/util'); + +// for a given stat reports returns a Map +async function getSimulcastLayerReport(room) { + const statReports = await room.getStats(); + const ssrcToLocalVideoTrackStats = new Map(); + statReports.forEach(statReport => { + statReport.localVideoTrackStats.forEach(trackStat => { + ssrcToLocalVideoTrackStats.set(trackStat.ssrc, trackStat); + }); + }); + return ssrcToLocalVideoTrackStats; +} + +// waits for active layers to match the condition, samples every incrementalWaitTime. +// resolves when active layers matches the condition. rejects after totalWaitTimeMS +async function waitForActiveLayers({ room, condition, incrementalWaitTime = 5 * SECOND, totalWaitTimeMS = 30 * SECOND }) { + let waitTime = 0; + while (totalWaitTimeMS > waitTime) { + // eslint-disable-next-line no-await-in-loop + const { activeLayers, inactiveLayers } = await getActiveLayers({ room, initialWaitMS: 0, activeTimeMS: incrementalWaitTime }); + waitTime += incrementalWaitTime; + if (condition({ activeLayers, inactiveLayers })) { + return; + } + } + throw new Error('waitForActiveLayers failed'); +} + +// for a given room, returns array of simulcast layers that are active. +// it checks for active layers by gathering layer stats activeTimeMS apart. +async function getActiveLayers({ room, initialWaitMS = 15 * SECOND, activeTimeMS = 3 * SECOND }) { + await waitForSometime(initialWaitMS); + const layersBefore = await getSimulcastLayerReport(room); + await waitForSometime(activeTimeMS); + const layersAfter = await getSimulcastLayerReport(room); + + const activeLayers = []; + const inactiveLayers = []; + Array.from(layersAfter.keys()).forEach(ssrc => { + const layerStatsAfter = layersAfter.get(ssrc); + const layerStatsBefore = layersBefore.get(ssrc); + const bytesSentAfter = layerStatsAfter.bytesSent; + const bytesSentBefore = layerStatsBefore ? layerStatsBefore.bytesSent : 0; + const diffBytes = bytesSentAfter - bytesSentBefore; + + const width = layerStatsAfter?.dimensions?.width || layerStatsBefore?.dimensions?.width || 0; + const height = layerStatsAfter?.dimensions?.height || layerStatsBefore?.dimensions?.height || 0; + if (diffBytes > 0) { + activeLayers.push({ ssrc, width, height, diffBytes }); + } else { + inactiveLayers.push({ ssrc, width, height, diffBytes }); + } + }); + + function layersToString(layers) { + return layers.map(({ ssrc, width, height }) => `${ssrc}: ${width}x${height}`).join(', '); + } + + console.log(`active: [${layersToString(activeLayers)}], inactive: [${layersToString(inactiveLayers)}]`); + return { activeLayers, inactiveLayers }; +} + +describe('preferredVideoCodecs = auto', function() { + // eslint-disable-next-line no-invalid-this + this.timeout(120 * 1000); + // eslint-disable-next-line no-invalid-this + this.retries(2); + if (defaults.topology === 'peer-to-peer') { + describe('reverts to unicast', () => { + let roomSid; + let aliceRemote; + let aliceRoom; + let bobRoom; + let aliceLocalVideo; + let bobLocalVideo; + before(async () => { + aliceLocalVideo = await waitFor(createLocalVideoTrack(), 'alice local video track'); + bobLocalVideo = await waitFor(createLocalVideoTrack(), 'bob local video track'); + const bandwidthProfile = { + video: { + contentPreferencesMode: 'manual', + clientTrackSwitchOffControl: 'manual' + } + }; + + const aliceOptions = { + tracks: [aliceLocalVideo], + preferredVideoCodecs: 'auto', + loggerName: 'AliceLogger', + bandwidthProfile + }; + + const bobOptions = { + preferredVideoCodecs: 'auto', + tracks: [bobLocalVideo], + loggerName: 'BobLogger', + bandwidthProfile + }; + + ({ roomSid, aliceRemote, aliceRoom, bobRoom } = await setupAliceAndBob({ aliceOptions, bobOptions })); + }); + after(() => { + [aliceRoom, bobRoom].forEach(room => room && room.disconnect()); + [aliceLocalVideo, bobLocalVideo].forEach(video => video.stop()); + }); + + it('should fall back to unicast', async () => { + console.log({ roomSid, aliceRemote, aliceRoom, bobRoom }); + await waitFor(tracksSubscribed(aliceRemote, 1), `Bob to subscribe to Alice's track: ${roomSid}`); + await waitFor(tracksSubscribed(aliceRemote, 1), `Alice to subscribe to Bob's track: ${roomSid}`); + + await waitForSometime(5000); + + const aliceSimulcastLayers = await getSimulcastLayerReport(aliceRoom); + const bobSimulcastLayers = await getSimulcastLayerReport(bobRoom); + + assert(aliceSimulcastLayers.size === 1); + assert(bobSimulcastLayers.size === 1); + }); + }); + } else { + [ + { + testCase: 'enables simulcast for VP8 rooms', + roomOptions: { VideoCodecs: ['VP8'] }, + expectedCodec: 'VP8', + expectedLayers: 3 + }, + { + testCase: 'does not enable simulcast for H264 rooms', + roomOptions: { VideoCodecs: ['H264'] }, + expectedCodec: 'H264', + }, + ].forEach(({ testCase, roomOptions, expectedCodec, expectedLayers }) => { + // firefox does not support simulcast. + (isFirefox ? it.skip : it)(testCase, async () => { + const roomSid = await createRoom(randomName(), defaults.topology, roomOptions); + const aliceLocalVideo = await waitFor(createLocalVideoTrack(), 'alice local video track'); + const room = await connect(getToken('Alice'), { + ...defaults, + tracks: [aliceLocalVideo], + name: roomSid, + loggerName: 'AliceLogger', + preferredVideoCodecs: 'auto', + bandwidthProfile: { + video: { + contentPreferencesMode: 'manual', + clientTrackSwitchOffControl: 'manual' + } + } + }); + + await waitForSometime(2000); + const simulcastLayers = await getSimulcastLayerReport(room); + const layerArray = Array.from(simulcastLayers.values()); + layerArray.forEach(layer => layer.codec === expectedCodec); + if (expectedLayers) { + assert.strictEqual(layerArray.length, expectedLayers, `layers: ${layerArray.length}, expected: ${expectedLayers} : room: ${roomSid}`); + } + completeRoom(roomSid); + aliceLocalVideo.stop(); + }); + }); + } +}); + +if (defaults.topology !== 'peer-to-peer' && !isFirefox) { + describe('adaptive simulcast layers', function() { + // eslint-disable-next-line no-invalid-this + this.timeout(120 * 1000); + + [ + { width: 1280, height: 720, expectedActive: 3 }, + { width: 640, height: 480, expectedActive: 2 }, + { width: 320, height: 180, expectedActive: 1 }, + ].forEach(({ width, height, expectedActive }) => { + it(`are configured correctly for ${width}x${height}`, async () => { + const roomSid = await createRoom(randomName(), defaults.topology); + const bandwidthProfile = { video: { contentPreferencesMode: 'manual', clientTrackSwitchOffControl: 'manual' } }; + const aliceLocalVideo = await waitFor(createLocalVideoTrack({ width, height }), 'alice local video track'); + assert.strictEqual(aliceLocalVideo.mediaStreamTrack.getSettings().height, height); + assert.strictEqual(aliceLocalVideo.mediaStreamTrack.getSettings().width, width); + + const aliceRoom = await connect(getToken('Alice'), { + ...defaults, + tracks: [aliceLocalVideo], + name: roomSid, + preferredVideoCodecs: 'auto', + bandwidthProfile + }); + console.log('room sid: ', aliceRoom.sid); + + // we may not see all layers active simultaneously, because SFU disables layers as it discovers them + // and HD layers get started late. Verify that we see expected number of see unique active ssrc + const uniqueActiveSSRC = new Set(); + await waitForActiveLayers({ room: aliceRoom, condition: ({ activeLayers, inactiveLayers }) => { + activeLayers.forEach(({ ssrc }) => uniqueActiveSSRC.add(ssrc)); + assert(activeLayers.length + inactiveLayers.length === 3); + return uniqueActiveSSRC.size === expectedActive; + } }); + + aliceRoom.disconnect(); + completeRoom(roomSid); + }); + }); + }); + + describe('adaptive simulcast', function() { + // eslint-disable-next-line no-invalid-this + this.timeout(120 * 1000); + + let roomSid = null; + let aliceVideoTrackPublication = null; + let aliceRemoteVideoForBob = null; + let aliceRoom = null; + const bandwidthProfile = { video: { contentPreferencesMode: 'manual', clientTrackSwitchOffControl: 'manual' } }; + before(async () => { + roomSid = await createRoom(randomName(), defaults.topology); + }); + after(() => { + completeRoom(roomSid); + roomSid = null; + }); + + context('Alice joins the room', () => { + let aliceLocalVideo; + before(async () => { + aliceLocalVideo = await waitFor(createLocalVideoTrack({ width: 1280, height: 720 }), 'alice local video track'); + const { height, width } = aliceLocalVideo.mediaStreamTrack.getSettings(); + assert.strictEqual(height, 720); + assert.strictEqual(width, 1280); + aliceRoom = await connect(getToken('Alice'), { + ...defaults, + tracks: [aliceLocalVideo], + name: roomSid, + loggerName: 'AliceLogger', + preferredVideoCodecs: 'auto', + bandwidthProfile + }); + Logger.getLogger('AliceLogger').setLevel('WARN'); + console.log(`Alice joined the room: ${roomSid}: ${aliceRoom.localParticipant.sid}`); + }); + + describe('While Alice is alone in the room', () => { + it('c1: all layers get turned off.', async () => { + // initially SFU might take upto 30 seconds to turn off all layers. + await waitForActiveLayers({ room: aliceRoom, condition: ({ activeLayers }) => activeLayers.length === 0 }); + }); + + it('VIDEO-8391 track layers get reset when track is unpublished and published again', async () => { + const aliceVideoTrackPublication = [...aliceRoom.localParticipant.tracks.values()][0]; + aliceVideoTrackPublication.unpublish(); + + await waitForSometime(2000); + + await waitFor(aliceRoom.localParticipant.publishTrack(aliceLocalVideo), `alice to publish track again in ${roomSid}`); + + await waitForActiveLayers({ room: aliceRoom, incrementalWaitTime: 3 * SECOND, condition: ({ activeLayers }) => activeLayers.length >= 2 }); + }); + }); + + context('Bob joins the room', () => { + let bobRoom = null; + before(async () => { + aliceVideoTrackPublication = [...aliceRoom.localParticipant.tracks.values()][0]; + bobRoom = await connect(getToken('Bob'), { + ...defaults, + tracks: [], + name: roomSid, + loggerName: 'BobLogger', + preferredVideoCodecs: 'auto', + bandwidthProfile + }); + console.log(`Bob joined the room: ${roomSid}: ${bobRoom.localParticipant.sid}`); + Logger.getLogger('BobLogger').setLevel('ERROR'); + + await waitFor(participantsConnected(bobRoom, 1), `wait for Bob to see alice: ${roomSid}`); + const aliceRemote = bobRoom.participants.get(aliceRoom.localParticipant.sid); + await waitFor(tracksSubscribed(aliceRemote, 1), `wait for Bob to see alice's track: ${roomSid}`); + aliceRemoteVideoForBob = aliceRemote.videoTracks.get(aliceVideoTrackPublication.trackSid).track; + }); + + [ + { + testCase: 'c2: layers get turned on', + bob: { }, // no action yet. + expectedActiveLayers: layers => layers >= 2 + }, + { + testCase: 'c3: Bob switches off', + bob: { switchOff: true, switchOn: false }, + expectedActiveLayers: layers => layers === 0 + }, + { + testCase: 'c4 bob switch on and renders @ 1280x720', + bob: { switchOff: false, switchOn: true, renderDimensions: { width: 1280, height: 720 } }, + expectedActiveLayers: layers => layers >= 2 + }, + { + testCase: 'c5: Bob request 640x360', + bob: { renderDimensions: { width: 640, height: 360 } }, + expectedActiveLayers: layers => layers === 2 + }, + { + testCase: 'c6: Bob request 320x180', + bob: { renderDimensions: { width: 320, height: 180 } }, + expectedActiveLayers: layers => layers === 1 + }, + { + testCase: 'c7: Bob switches off', + bob: { switchOff: true }, + expectedActiveLayers: layers => layers === 0 + }, + ].forEach(({ testCase, bob, expectedActiveLayers }) => { + it(testCase, async () => { + console.log(`executing ${testCase}`); + await executeRemoteTrackActions(bob, aliceRemoteVideoForBob, 'Bob'); + await waitForActiveLayers({ room: aliceRoom, condition: ({ activeLayers }) => expectedActiveLayers(activeLayers.length) }); + }); + }); + context('Charlie joins the room', () => { + let charlieRoom = null; + let aliceRemoteVideoForCharlie = null; + before(async () => { + charlieRoom = await connect(getToken('Charlie'), { + ...defaults, + tracks: [], + name: roomSid, + loggerName: 'CharlieLogger', + preferredVideoCodecs: 'auto', + bandwidthProfile + }); + console.log(`Charlie joined the room: ${roomSid}: ${charlieRoom.localParticipant.sid}`); + Logger.getLogger('CharlieLogger').setLevel('ERROR'); + + await waitFor(participantsConnected(charlieRoom, 2), `wait for Bob to see alice and Bob: ${roomSid}`); + const aliceRemote = charlieRoom.participants.get(aliceRoom.localParticipant.sid); + await waitFor(tracksSubscribed(aliceRemote, 1), `wait for Charlie to see alice's track: ${roomSid}`); + aliceRemoteVideoForCharlie = aliceRemote.videoTracks.get(aliceVideoTrackPublication.trackSid).track; + }); + [ + { + testCase: 'c8: Charlie joined (no render hints yet)', + expectedActiveLayers: layers => layers >= 2 + }, + { + testCase: 'c9: Charlie requests 320x180', + charlie: { switchOff: false, switchOn: true, renderDimensions: { width: 320, height: 180 } }, + expectedActiveLayers: layers => layers === 1 + }, + { + testCase: 'c10 Charlie Switches off', + charlie: { switchOff: true }, + expectedActiveLayers: layers => layers === 0 + }, + { + testCase: 'c11: Bob requests 1280x720', + bob: { switchOn: true, renderDimensions: { width: 1280, height: 720 } }, + expectedActiveLayers: layers => layers >= 2 + }, + { + testCase: 'c12: Charlie requests 1280x720', + charlie: { switchOn: true, renderDimensions: { width: 1280, height: 720 } }, + expectedActiveLayers: layers => layers >= 2 + }, + { + testCase: 'c13: Charlie switches off, Bob: 640x360', + bob: { renderDimensions: { width: 640, height: 360 } }, + charlie: { switchOff: true }, + expectedActiveLayers: layers => layers === 2 + }, + { + testCase: 'c14: Charlie SwitchOn @ 320x180, Bob switch off', + bob: { switchOff: true }, + charlie: { switchOn: true, renderDimensions: { width: 320, height: 180 } }, + expectedActiveLayers: layers => layers === 1 + }, + { + testCase: 'c15: Charlie switches off', + charlie: { switchOff: true }, + expectedActiveLayers: layers => layers === 0 + }, + ].forEach(({ testCase, bob, charlie, expectedActiveLayers }) => { + it(testCase, async () => { + console.log(`executing ${testCase}`); + await executeRemoteTrackActions(bob, aliceRemoteVideoForBob, 'Bob'); + await executeRemoteTrackActions(charlie, aliceRemoteVideoForCharlie, 'Charlie'); + + await waitForActiveLayers({ room: aliceRoom, condition: ({ activeLayers }) => expectedActiveLayers(activeLayers.length) }); + }); + }); + + it('subsequent negotiations does not cause layers to be enabled', async () => { + await executeRemoteTrackActions({ switchOff: true }, aliceRemoteVideoForBob, 'Bob'); + await executeRemoteTrackActions({ switchOff: true }, aliceRemoteVideoForCharlie, 'Charlie'); + await waitForActiveLayers({ room: aliceRoom, condition: ({ activeLayers }) => activeLayers.length === 0 }); + + // assert(activeLayers.length === 0, `unexpected activeLayers.length after switch off: ${activeLayers.length} in ${roomSid}`); + const aliceLocalAudio = await waitFor(createLocalAudioTrack(), 'alice local audio track'); + + // Bob publishes track + await waitFor(aliceRoom.localParticipant.publishTrack(aliceLocalAudio), `Alice to publish audio track: ${roomSid}`); + + await waitForActiveLayers({ room: aliceRoom, condition: ({ activeLayers }) => activeLayers.length === 0 }); + }); + + it('adaptive simulcast continue to work after replace track', async () => { + // have both bob and charlie turn off tracks. + await executeRemoteTrackActions({ switchOff: true }, aliceRemoteVideoForBob, 'Bob'); + await executeRemoteTrackActions({ switchOff: true }, aliceRemoteVideoForCharlie, 'Charlie'); + + await waitForActiveLayers({ room: aliceRoom, condition: ({ activeLayers }) => activeLayers.length === 0 }); + + // now restart the track + console.log('restarting the track'); + await aliceLocalVideo.restart(); + + // have bob switch on and off tracks again to retrigger SFU. + await executeRemoteTrackActions({ switchOn: true }, aliceRemoteVideoForBob, 'Bob'); + await executeRemoteTrackActions({ switchOff: true }, aliceRemoteVideoForBob, 'Bob'); + + await waitForActiveLayers({ room: aliceRoom, condition: ({ activeLayers }) => activeLayers.length === 0 }); + }); + }); + }); + }); + }); +} +async function executeRemoteTrackActions(actions, remoteTrack, actor) { + if (actions) { + if (actions.switchOn) { + console.log(`${actor} switching on`); + remoteTrack.switchOn(); + await waitFor(trackSwitchedOn(remoteTrack), `track to switch on: ${remoteTrack.sid}`); + } + if (actions.switchOff) { + console.log(`${actor} switching off`); + remoteTrack.switchOff(); + await waitFor(trackSwitchedOff(remoteTrack), `track to switch off: ${remoteTrack.sid}`); + } + if (actions.renderDimensions) { + console.log(`${actor} setting renderDimensions`, actions.renderDimensions); + remoteTrack.setContentPreferences({ renderDimensions: actions.renderDimensions }); + } + } +} diff --git a/test/integration/spec/connect.js b/test/integration/spec/connect.js index 64d07e952..0444a1b9c 100644 --- a/test/integration/spec/connect.js +++ b/test/integration/spec/connect.js @@ -126,6 +126,53 @@ describe('connect', function() { }); }); + describe('preferredVideoCodecs = auto', () => { + it('should rejects with a TypeError when maxVideoBitrate is specified at connect', async () => { + const identity = randomName(); + const token = getToken(identity); + const cancelablePromise = connect(token, Object.assign({}, defaults, { + tracks: [], + preferredVideoCodecs: 'auto', + maxVideoBitrate: 10000, + })); + + let errorThrown = null; + try { + await cancelablePromise; + } catch (error) { + errorThrown = error; + } + assert(errorThrown instanceof TypeError); + assert(cancelablePromise instanceof CancelablePromise); + }); + + it('should throw on subsequent setParameters if maxVideoBitrate is specified', async () => { + const identity = randomName(); + const token = getToken(identity); + const room = await connect(token, Object.assign({}, defaults, { + tracks: [], + preferredVideoCodecs: 'auto' + })); + + let errorThrown = null; + try { + room.localParticipant.setParameters({ maxAudioBitrate: 100 }); + } catch (error) { + errorThrown = error; + } + assert(!errorThrown); + + try { + room.localParticipant.setParameters({ maxVideoBitrate: 100 }); + } catch (error) { + errorThrown = error; + } + assert(errorThrown); + assert.equal(errorThrown.message, 'encodingParameters must be an encodingParameters.maxVideoBitrate is not compatible with "preferredVideoCodecs=auto"'); + assert(errorThrown instanceof TypeError); + }); + }); + describe('should return a CancelablePromise that rejects when called with invalid bandwidth Profile options: ', () => { [ { diff --git a/test/lib/post.js b/test/lib/post.js index 00a81704f..9031c46e3 100644 --- a/test/lib/post.js +++ b/test/lib/post.js @@ -30,7 +30,12 @@ function request(config, data) { try { resolve(JSON.parse(data.join(''))); } catch (e) { - resolve({ status: 'ok' }); + resolve({ + e, + errorMessage: e.message, + data: data.join(''), + status: 'not_ok' + }); } }); }); diff --git a/test/unit/index.js b/test/unit/index.js index 1c69fff52..5b2ecce8e 100644 --- a/test/unit/index.js +++ b/test/unit/index.js @@ -45,6 +45,7 @@ require('./spec/signaling/room'); require('./spec/signaling/v2'); require('./spec/signaling/v2/renderhintssignaling'); +require('./spec/signaling/v2/publisherhintsignaling'); require('./spec/signaling/v2/mediasignaling'); require('./spec/signaling/v2/dominantspeakersignaling'); require('./spec/signaling/v2/cancelableroomsignalingpromise'); diff --git a/test/unit/spec/connect.js b/test/unit/spec/connect.js index 90b968213..47e73b29d 100644 --- a/test/unit/spec/connect.js +++ b/test/unit/spec/connect.js @@ -334,6 +334,35 @@ describe('connect', () => { }); }); + describe('called with ConnectOptions#preferredVideoCodecs = auto', () => { + it('throws when ConnectOptions has maxVideoBitrate', async () => { + const mockSignaling = new Signaling(); + mockSignaling.connect = () => Promise.resolve(() => new RoomSignaling()); + function signaling() { + return mockSignaling; + } + + let errorThrown = null; + try { + await connect(token, { + signaling, + iceServers: [], + tracks: [], + preferredVideoCodecs: 'auto', + maxVideoBitrate: 100 + }); + + assert(false); + } catch (error) { + errorThrown = error; + } + + assert(errorThrown); + assert(errorThrown instanceof TypeError); + assert(errorThrown.message, 'Illegal call to connect: ConnectOptions "maxVideoBitrate" is not compatible with "preferredVideoCodecs=auto"'); + }); + }); + describe('called without ConnectOptions#tracks', () => { it('automatically acquires LocalTracks', () => { const createLocalTracks = sinon.spy(); diff --git a/test/unit/spec/localparticipant.js b/test/unit/spec/localparticipant.js index bf75eb8e7..5f700b41b 100644 --- a/test/unit/spec/localparticipant.js +++ b/test/unit/spec/localparticipant.js @@ -1247,6 +1247,7 @@ function makeSignaling(options) { signaling.setBandwidthProfile = sinon.spy(() => {}); signaling.setNetworkQualityConfiguration = sinon.spy(() => {}); signaling.setParameters = sinon.spy(() => {}); + signaling.getParameters = sinon.spy(() => { return {}; }); return signaling; } diff --git a/test/unit/spec/media/track/sender.js b/test/unit/spec/media/track/sender.js index 4523e821a..e150674cc 100644 --- a/test/unit/spec/media/track/sender.js +++ b/test/unit/spec/media/track/sender.js @@ -4,6 +4,8 @@ const assert = require('assert'); const MediaTrackSender = require('../../../../../lib/media/track/sender'); const Document = require('../../../../lib/document'); +const sinon = require('sinon'); +const { combinationContext } = require('../../../../lib/util'); describe('MediaTrackSender', () => { before(() => { @@ -120,4 +122,119 @@ describe('MediaTrackSender', () => { }); }); }); + + describe('setPublisherHint', () => { + it('resolves to "COULD_NOT_APPLY_HINT" when publisher hint callback is not set', async () => { + const trackSender = new MediaTrackSender(mediaStreamTrack); + const rtpSender = { track: 'foo' }; + trackSender.addSender(rtpSender); + // eslint-disable-next-line camelcase + const result = await trackSender.setPublisherHint({ enabled: false, layer_index: 0 }); + assert.strictEqual(result, 'COULD_NOT_APPLY_HINT'); + }); + + it('forwards to callback if set', async () => { + const trackSender = new MediaTrackSender(mediaStreamTrack); + const rtpSender1 = { track: 'foo' }; + const rtpSender2 = { track: 'bar' }; + // eslint-disable-next-line camelcase + const encodings = [{ enabled: false, layer_index: 0 }]; + const publisherHintCallback = payload => { + assert.deepStrictEqual(encodings, payload); + return Promise.resolve('OK'); + }; + trackSender.addSender(rtpSender2); + trackSender.addSender(rtpSender1, publisherHintCallback); + + // eslint-disable-next-line camelcase + const result = await trackSender.setPublisherHint(encodings); + assert.strictEqual(result, 'OK'); + }); + }); + + describe('setMediaStreamTrack', () => { + combinationContext([ + [ + [true, false], + x => `when replaceTrack ${x ? 'resolves' : 'rejects'}` + ], + [ + [true, false], + x => `when publisher hint callback is ${x ? '' : 'not '} set` + ], + [ + [true, false], + x => `when publisherHitCallBack ${x ? 'resolves' : 'rejects'}` + ], + ], ([replaceTrackSuccess, publisherHintCallbackSet, publisherHintSuccess]) => { + + let msTrackReplaced; + let rtpSender; + let publisherHitCallBack; + let result; + let errorResult; + let trackSender; + beforeEach(async () => { + const msTrackOrig = makeMediaStreamTrack({ id: 'original' }); + trackSender = new MediaTrackSender(msTrackOrig); + msTrackReplaced = makeMediaStreamTrack({ id: 'replaced' }); + rtpSender = { + track: 'foo', + replaceTrack: sinon.spy(() => replaceTrackSuccess ? Promise.resolve('yay') : Promise.reject('boo')) + }; + + publisherHitCallBack = sinon.spy(() => publisherHintSuccess ? Promise.resolve('yes') : Promise.reject('no')); + if (publisherHintCallbackSet) { + trackSender.addSender(rtpSender, publisherHitCallBack); + } else { + trackSender.addSender(rtpSender); + } + + try { + result = await trackSender.setMediaStreamTrack(msTrackReplaced); + } catch (error) { + errorResult = error; + } + }); + + it('calls RTCRtpSender.replaceTrack', () => { + sinon.assert.calledWith(rtpSender.replaceTrack, msTrackReplaced); + }); + + if (replaceTrackSuccess) { + it('resolves', () => { + assert(result); + assert(!errorResult); + }); + } else { + it('rejects', () => { + assert(!result); + assert(errorResult); + }); + } + + if (publisherHintCallbackSet && replaceTrackSuccess) { + it('sets default publisher hint', () => { + sinon.assert.calledWith(publisherHitCallBack, null); + }); + } else { + it('does not set default publisher hint', () => { + sinon.assert.notCalled(publisherHitCallBack); + }); + } + + it('always replaces the track', () => { + assert.strictEqual(trackSender.track.id, 'replaced'); + }); + }); + }); }); + +function makeMediaStreamTrack({ id = 'foo', kind = 'baz', readyState = 'zee' }) { + return { + id, kind, readyState, + clone: () => { + return { id: 'cloned_' + id, kind, readyState }; + } + }; +} diff --git a/test/unit/spec/signaling/v2/peerconnection.js b/test/unit/spec/signaling/v2/peerconnection.js index 4a5e125e9..d4325cb0f 100644 --- a/test/unit/spec/signaling/v2/peerconnection.js +++ b/test/unit/spec/signaling/v2/peerconnection.js @@ -5,6 +5,7 @@ const assert = require('assert'); const EventEmitter = require('events'); const sinon = require('sinon'); +const util = require('@twilio/webrtc/lib/util'); const EventTarget = require('../../../../../lib/eventtarget'); const IceConnectionMonitor = require('../../../../../lib/signaling/v2/iceconnectionmonitor.js'); @@ -80,53 +81,161 @@ describe('PeerConnectionV2', () => { }); }); - describe('._updateEncodings', () => { + describe('._maybeUpdateEncodings', () => { + let stub; + beforeEach(() => { + stub = sinon.stub(util, 'guessBrowser'); + }); + + afterEach(() => { + stub.restore(); + }); + [ { - testName: 'resolution >= 960x540', + browser: 'chrome', + testName: 'video, resolution >= 960x540 (defaults)', width: 960, height: 540, encodings: [{}, {}, {}], - expectedEncodings: [{ active: true, scaleResolutionDownBy: 4 }, { active: true, scaleResolutionDownBy: 2 }, { active: true, scaleResolutionDownBy: 1 }] + expectedEncodings: [{ scaleResolutionDownBy: 4 }, { scaleResolutionDownBy: 2 }, { scaleResolutionDownBy: 1 }] }, { - testName: 'resolution >= 960x540 (no simulcast)', + browser: 'chrome', + testName: '960x540 > resolution >= 480x270 (defaults)', + width: 480, + height: 270, + encodings: [{}, {}, {}], + expectedEncodings: [{ scaleResolutionDownBy: 2 }, { scaleResolutionDownBy: 1 }, { active: false }] + }, + { + browser: 'chrome', + testName: 'resolution <= 480x270 (defaults)', + width: 320, + height: 180, + encodings: [{}, {}, {}], + expectedEncodings: [{ scaleResolutionDownBy: 1 }, { active: false }, { active: false }] + }, + { + browser: 'chrome', + testName: '960x540 > resolution >= 480x270 (keeps layers disabled if disabled originally)', + width: 480, + height: 270, + encodings: [{ scaleResolutionDownBy: 2, active: true }, { scaleResolutionDownBy: 1, active: false }, { active: true }], + expectedEncodings: [{ scaleResolutionDownBy: 2, active: true }, { scaleResolutionDownBy: 1, active: false }, { active: false }] + }, + { + browser: 'chrome', + testName: 'resolution <= 480x270 (trackReplaced removes active flag when trackReplaced)', + width: 320, + trackReplaced: true, + height: 180, + encodings: [{ scaleResolutionDownBy: 4, active: false }, { scaleResolutionDownBy: 2, active: true }, { scaleResolutionDownBy: 1, active: true }], + expectedEncodings: [{ scaleResolutionDownBy: 1 }, { active: false }, { active: false }] + }, + { + browser: 'chrome', + testName: '960x540 > resolution >= 480x270 (no simulcast)', + width: 480, + height: 270, + encodings: [{}], // input encodings has only one layer + expectedEncodings: [{ scaleResolutionDownBy: 1 }] + }, + { + browser: 'chrome', + testName: 'video, resolution >= 960x540 (no simulcast)', width: 960, height: 540, encodings: [{}], - expectedEncodings: [{ active: true, scaleResolutionDownBy: 1 }] + expectedEncodings: [{ scaleResolutionDownBy: 1 }] }, { - testName: '960x540 > resolution >= 480x270', + browser: 'chrome', + testName: 'screen share track (defaults)', + isScreenShare: true, + width: 960, + height: 540, + encodings: [{}, {}, {}], + expectedEncodings: [{ scaleResolutionDownBy: 1 }, { scaleResolutionDownBy: 1 }, { active: false }], + }, + { + browser: 'chrome', + testName: 'screen share track (keeps layers disabled if disabled originally)', + isScreenShare: true, + width: 960, + height: 540, + encodings: [{ scaleResolutionDownBy: 1, active: true }, { scaleResolutionDownBy: 1, active: false }, { scaleResolutionDownBy: 1, active: true }], + expectedEncodings: [{ scaleResolutionDownBy: 1, active: true }, { scaleResolutionDownBy: 1, active: false }, { active: false }], + }, + { + browser: 'chrome', + testName: 'screen share track (trackReplaced)', + isScreenShare: true, + trackReplaced: true, + width: 960, + height: 540, + encodings: [{ scaleResolutionDownBy: 1, active: true }, { scaleResolutionDownBy: 1, active: false }, { scaleResolutionDownBy: 1, active: true }], + expectedEncodings: [{ scaleResolutionDownBy: 1 }, { scaleResolutionDownBy: 1 }, { active: false }], + }, + { + browser: 'chrome', + testName: 'does not update encodings when not using adaptive simulcast', + width: 960, + height: 540, + encodings: [{}, {}, {}], + preferredCodecs: { audio: [], video: [{ codec: 'vp8', simulcast: true }] } + }, + { + browser: 'safari', + testName: 'updates encoding for safari (irrespective of adaptiveSimulcast) ', width: 480, height: 270, encodings: [{}, {}, {}], - expectedEncodings: [{ active: true, scaleResolutionDownBy: 2 }, { active: true, scaleResolutionDownBy: 1 }, { active: false }] + expectedEncodings: [{ scaleResolutionDownBy: 2 }, { scaleResolutionDownBy: 1 }, { active: false }], + preferredCodecs: { audio: [], video: [{ codec: 'vp8', simulcast: true }] } }, { - testName: '960x540 > resolution >= 480x270 (no simulcast)', + browser: 'safari', + testName: 'does not update encoding for audio tracks', + kind: 'audio', width: 480, height: 270, - encodings: [{}], // input encodings has only one layer - expectedEncodings: [{ active: true, scaleResolutionDownBy: 1 }] + encodings: [{}, {}, {}], + preferredCodecs: { audio: [], video: [{ codec: 'vp8', simulcast: true }] } }, { - testName: 'resolution <= 480x270', - width: 320, - height: 180, + browser: 'firefox', + testName: 'does not update encodings', + width: 480, + height: 270, encodings: [{}, {}, {}], - expectedEncodings: [{ active: true, scaleResolutionDownBy: 1 }, { active: false }, { active: false }] } - ].forEach(({ width, height, encodings, expectedEncodings, testName }) => { - it(testName, () => { - const test = makeTest(); - test.pcv2._updateEncodings(width, height, encodings); - assert.deepStrictEqual(encodings, expectedEncodings); + ].forEach(({ width, height, encodings, testName, browser, preferredCodecs, trackReplaced = false, expectedEncodings = null, isScreenShare = false, kind = 'video' }) => { + it(`${browser}:${testName}`, () => { + stub = stub.returns(browser); + const trackSettings = { width, height }; + if (isScreenShare) { + trackSettings.displaySurface = 'monitor'; + } + const mediaStreamTrack = { + kind, + getSettings: () => trackSettings + }; + + preferredCodecs = preferredCodecs || { audio: [], video: [{ codec: 'vp8', simulcast: true, adaptiveSimulcast: true }] }; + const test = makeTest({ preferredCodecs, isChromeScreenShareTrack: () => isScreenShare }); + const updated = test.pcv2._maybeUpdateEncodings(mediaStreamTrack, encodings, trackReplaced); + const shouldUpdate = !!expectedEncodings; + assert(updated === shouldUpdate, `_maybeUpdateEncodings returned unexpected: ${updated}`); + if (expectedEncodings) { + assert.deepStrictEqual(encodings, expectedEncodings); + } + stub.resetHistory(); }); + return true; }); }); - describe('.iceConnectionState', () => { it('equals the underlying RTCPeerConnection\'s .iceConnectionState', () => { const test = makeTest(); @@ -397,6 +506,155 @@ describe('PeerConnectionV2', () => { }); }); + function makePublisherHints(layerIndex, enabled) { + // eslint-disable-next-line camelcase + return [{ enabled, layer_index: layerIndex }]; + } + + describe('_setPublisherHint', () => { + let test; + combinationContext([ + [ + ['stable', 'have-local-offer', 'closed'], + x => `in signalingState "${x}"` + ], + [ + [true, false], + x => `When a publisher hint was previously ${x ? '' : 'not '} queued` + ], + [ + [true, false], + x => `When a rtpSender.setParameters ${x ? 'resolves' : 'rejects'}` + ] + ], ([signalingState, hasQueuedHint, setParameterSuccess]) => { + + let trackSender; + let deferred; + beforeEach(async () => { + test = makeTest({ offers: 1 }); + + switch (signalingState) { + case 'closed': + test.pcv2.close(); + break; + case 'stable': + break; + case 'have-local-offer': + await test.pcv2.offer(); + break; + } + + const tracks = [{ id: 1 }]; + trackSender = makeMediaTrackSender(tracks[0]); + test.pcv2.addMediaTrackSender(trackSender); + + if (hasQueuedHint) { + deferred = defer(); + test.pcv2._mediaTrackSenderToPublisherHints.set(trackSender, { + encodings: makePublisherHints(1, true), + deferred + }); + } + }); + + if (deferred) { + it('resolves stale hint promise with "REQUEST_SKIPPED"', async () => { + test.pcv2._setPublisherHint(trackSender, makePublisherHints(0, true)); + const result = await deferred.promise; + assert(result, 'REQUEST_SKIPPED'); + }); + } + if (signalingState === 'closed') { + it('returns a promise that resolves to "COULD_NOT_APPLY_HINT"', async () => { + // eslint-disable-next-line camelcase + const result = await test.pcv2._setPublisherHint(trackSender, makePublisherHints(0, true)); + assert(result, 'COULD_NOT_APPLY_HINT'); + }); + } + + it('for a unknown track sender resolves to "UNKNOWN_TRACK"', async () => { + const unknownTrackSender = {}; + const result = await test.pcv2._setPublisherHint(unknownTrackSender, makePublisherHints(0, true)); + assert(result, 'UNKNOWN_TRACK'); + }); + + if (signalingState === 'have-local-offer') { + it('queues the hint for later processing', done => { + const resultPromise = test.pcv2._setPublisherHint(trackSender, makePublisherHints(0, true)); + const queued = test.pcv2._mediaTrackSenderToPublisherHints.get(trackSender); + assert.deepEqual(queued.encodings, makePublisherHints(0, true)); + + resultPromise.then(result => { + assert(result, 'whatever'); + done(); + }); + + queued.deferred.resolve('whatever'); + }); + } + if (signalingState === 'stable') { + it('applies given encodings if provided', () => { + test.pcv2._setPublisherHint(trackSender, makePublisherHints(0, false)); + const rtpSender = test.pcv2._rtpSenders.get(trackSender); + sinon.assert.calledWith(rtpSender.setParameters, sinon.match(parameters => { + return parameters.encodings[0].active === false; + })); + }); + + it('resets hints in none provided', () => { + test.pcv2._setPublisherHint(trackSender, null); + const rtpSender = test.pcv2._rtpSenders.get(trackSender); + sinon.assert.calledWith(rtpSender.setParameters, sinon.match(parameters => { + return !('active' in parameters.encodings[0]); + })); + }); + + let expectedResult = setParameterSuccess ? 'OK' : 'COULD_NOT_APPLY_HINT'; + it(`resolves to ${expectedResult}`, async () => { + test.pc.getSenders().forEach(sender => { + sender.setParameters = sinon.spy(() => + setParameterSuccess ? Promise.resolve('good result') : Promise.reject('bad error') + ); + }); + const result = await test.pcv2._setPublisherHint(trackSender, makePublisherHints(0, true)); + assert.strictEqual(result, expectedResult); + }); + } + }); + }); + + describe('_handleQueuedPublisherHints', () => { + let test; + let trackSender; + let deferred; + + beforeEach(() => { + test = makeTest({ offers: 1 }); + const tracks = [{ id: 1 }]; + trackSender = makeMediaTrackSender(tracks[0]); + test.pcv2.addMediaTrackSender(trackSender); + + deferred = defer(); + test.pcv2._mediaTrackSenderToPublisherHints.set(trackSender, { + encodings: makePublisherHints(0, false), + deferred + }); + + test.pcv2._handleQueuedPublisherHints(); + }); + + it('processes queued hints', async () => { + const result = await deferred.promise; + assert(result, 'OK'); + + const rtpSender = test.pcv2._rtpSenders.get(trackSender); + sinon.assert.calledWith(rtpSender.setParameters, sinon.match(parameters => { + return parameters.encodings[0].active === false; + })); + assert.strictEqual(test.pcv2._mediaTrackSenderToPublisherHints.size, 0); + }); + }); + describe('#close', () => { ['closed', 'stable', 'have-local-offer'].forEach(signalingState => { let test; @@ -2363,7 +2621,7 @@ function makePeerConnectionV2(options) { options.sessionTimeout = options.sessionTimeout || 100; options.setBitrateParameters = options.setBitrateParameters || sinon.spy(sdp => sdp); options.setCodecPreferences = options.setCodecPreferences || sinon.spy(sdp => sdp); - options.preferredCodecs = options.preferredcodecs || { audio: [], video: [] }; + options.preferredCodecs = options.preferredCodecs || { audio: [], video: [] }; options.options = { Backoff: options.Backoff, Event: function(type) { return { type: type }; }, diff --git a/test/unit/spec/signaling/v2/peerconnectionmanager.js b/test/unit/spec/signaling/v2/peerconnectionmanager.js index 704902761..ff1b65256 100644 --- a/test/unit/spec/signaling/v2/peerconnectionmanager.js +++ b/test/unit/spec/signaling/v2/peerconnectionmanager.js @@ -1046,6 +1046,8 @@ function makePeerConnectionV2Constructor(testOptions) { peerConnectionV2.removeMediaTrackSender = sinon.spy(); + peerConnectionV2.setEffectiveAdaptiveSimulcast = sinon.spy(); + peerConnectionV2.setConfiguration = sinon.spy(configuration => { peerConnectionV2.configuration = configuration; }); diff --git a/test/unit/spec/signaling/v2/publisherhintsignaling.js b/test/unit/spec/signaling/v2/publisherhintsignaling.js new file mode 100644 index 000000000..a377b7214 --- /dev/null +++ b/test/unit/spec/signaling/v2/publisherhintsignaling.js @@ -0,0 +1,127 @@ +'use strict'; + +const assert = require('assert'); +const { EventEmitter } = require('events'); +const sinon = require('sinon'); + +const PublisherHintSignaling = require('../../../../../lib/signaling/v2/publisherhintsignaling.js'); +const log = require('../../../../lib/fakelog'); +const { waitForSometime } = require('../../../../../lib/util'); + +describe('PublisherHintSignaling', () => { + describe('constructor', () => { + it('sets ._transport to null', () => { + const mst = makeTransport(); + const subject = makeTest(mst); + assert.strictEqual(subject._transport, null); + }); + + it('_transport assigned after ready', () => { + const mst = makeTransport(); + const subject = makeTest(mst); + return new Promise(resolve => { + subject.on('ready', () => { + assert(subject._transport !== null); + resolve(); + }); + }); + }); + + it('emits "updated" when a hint request is received', async () => { + const mst = makeTransport(); + const subject = makeTest(mst); + await waitForSometime(10); + + const serverMessage = { + 'type': 'publisher_hints', + 'publisher': { + 'id': 42, + 'hints': [ + { + 'track': 'foo', + 'encodings': [] + }, + { + 'track': 'boo', + 'encodings': [] + } + ] + } + }; + + const updatedPromise = new Promise(resolve => { + subject.on('updated', (hints, id) => { + assert.deepStrictEqual(hints, serverMessage.publisher.hints); + assert.strictEqual(id, serverMessage.publisher.id); + resolve(); + }); + }); + + mst.emit('message', serverMessage); + await updatedPromise; + }); + }); + + describe('sendHintResponse', () => { + it('does nothing when transport is not ready', () => { + const mst = makeTransport(); + const subject = makeTest(mst); + subject.sendHintResponse({ id: 100, hints: [{ track: 'foo', result: 'OK' }] }); + }); + + it('sends response provided', async () => { + const mst = makeTransport(); + const subject = makeTest(mst); + await new Promise(resolve => subject.on('ready', resolve)); + subject.sendHintResponse({ id: 100, hints: [{ track: 'foo', result: 'OK' }] }); + sinon.assert.calledWith(mst.publish, { + type: 'publisher_hints', + id: 100, + hints: [{ + track: 'foo', + result: 'OK' + }] + }); + }); + }); + + describe('sendTrackReplaced', () => { + it('does not crash when transport is not ready', () => { + const mst = makeTransport(); + const subject = makeTest(mst); + subject.sendTrackReplaced({ trackSid: 'bar' }); + }); + + it('sends client_reset message', async () => { + const mst = makeTransport(); + const subject = makeTest(mst); + await new Promise(resolve => subject.on('ready', resolve)); + subject.sendTrackReplaced({ trackSid: 'bar' }); + sinon.assert.calledWith(mst.publish, { + type: 'client_reset', + id: sinon.match.number, + track: 'bar' + }); + }); + }); +}); + +function makeTransport(onPublish) { + const transport = new EventEmitter(); + transport.publish = onPublish || sinon.stub(); + return transport; +} + +function makeTest(mst) { + const getReceiver = () => { + return Promise.resolve({ + kind: 'data', + toDataTransport: () => mst, + once: () => {} + }); + }; + + const subject = new PublisherHintSignaling(getReceiver, { log }); + subject.setup('foo'); + return subject; +} diff --git a/test/unit/spec/signaling/v2/renderhintssignaling.js b/test/unit/spec/signaling/v2/renderhintssignaling.js index 71b153b18..e310fe1ba 100644 --- a/test/unit/spec/signaling/v2/renderhintssignaling.js +++ b/test/unit/spec/signaling/v2/renderhintssignaling.js @@ -131,7 +131,6 @@ describe('RenderHintsSignaling', () => { subject.setTrackHint('foo', { enabled: false }); assert.strictEqual(subject._trackSidsToRenderHints.get('foo').isDimensionDirty, true); assert.strictEqual(subject._trackSidsToRenderHints.get('foo').isEnabledDirty, true); - }); it('processes subsequent messages only after a reply is received', async () => { @@ -149,7 +148,7 @@ describe('RenderHintsSignaling', () => { // wait for message to get published. await deferred.promise; - assert(publishCalls, 1); + assert.equal(publishCalls, 1); sinon.assert.calledWith(mst.publish, { type: 'render_hints', subscriber: { @@ -167,7 +166,7 @@ describe('RenderHintsSignaling', () => { // send another hint subject.setTrackHint('bar', { enabled: true, renderDimensions: { width: 200, height: 200 } }); await waitForSometime(10); - assert(publishCalls, 1); + assert.equal(publishCalls, 1); const serverMessage = { 'type': 'render_hints', @@ -189,18 +188,120 @@ describe('RenderHintsSignaling', () => { deferred = defer(); mst.emit('message', serverMessage); await deferred.promise; - assert(publishCalls, 2); + assert.equal(publishCalls, 2); + sinon.assert.calledWith(mst.publish, { + type: 'render_hints', + subscriber: { + id: sinon.match.number, + hints: [{ + 'track': 'bar', + 'enabled': true, + 'render_dimensions': { height: 200, width: 200 }, + }] + } + }); + }); + + it('re-sends all hints with exponential backoff until server responds', async function() { + let clock = sinon.useFakeTimers(); + const mst = makeTransport(); + const subject = makeTest(mst); + subject.setTrackHint('foo', { enabled: true, renderDimensions: { width: 100, height: 100 } }); + subject.setTrackHint('boo', { enabled: false }); + + let publishCalls = 0; + let publishTimes = []; + let deferred = defer(); + mst.publish.callsFake(() => { + publishCalls++; + publishTimes.push(Date.now()); + deferred.resolve(); + }); + + clock.tick(1000); + await deferred.promise; + assert.equal(publishCalls, 1); + sinon.assert.calledWith(mst.publish, { + type: 'render_hints', + subscriber: { + id: sinon.match.number, + hints: [{ + 'track': 'foo', + 'render_dimensions': { height: 100, width: 100 }, + 'enabled': true, + }, { + 'track': 'boo', + 'enabled': false, + }] + } + }); + + // send another hint + subject.setTrackHint('bar', { enabled: true, renderDimensions: { width: 200, height: 200 } }); + assert.equal(publishCalls, 1); + + clock.tick(2000); // simulate 2 seconds + + // we should expect 1st retry now. + assert.equal(publishCalls, 2); + + // expect publish to be called with all the hints. sinon.assert.calledWith(mst.publish, { type: 'render_hints', subscriber: { id: sinon.match.number, hints: [{ + 'track': 'foo', + 'render_dimensions': { height: 100, width: 100 }, + 'enabled': true, + }, { + 'track': 'boo', + 'enabled': false, + }, + { 'track': 'bar', 'enabled': true, 'render_dimensions': { height: 200, width: 200 }, }] } }); + + clock.tick(40000); // simulate 40 seconds + + // we expect 2nd retry to be made 2 second after 1st, and subsequent retries at exponential intervals. + assert.equal(publishCalls, 6); + assert.equal(publishTimes[2] - publishTimes[1], 2000); + assert.equal(publishTimes[3] - publishTimes[2], 4000); + assert.equal(publishTimes[4] - publishTimes[3], 8000); + assert.equal(publishTimes[5] - publishTimes[4], 16000); + + // simulate a server response. + const serverMessage = { + 'type': 'render_hints', + 'subscriber': { + 'id': 42, + 'hints': [ + { + 'track': 'foo', + 'result': 'OK' + }, + { + 'track': 'boo', + 'result': 'INVALID_RENDER_HINT' + }, + { + 'track': 'bar', + 'result': 'OK' + } + ] + } + }; + mst.emit('message', serverMessage); + + // simulate more time and verify that timer stops retrying. + clock.tick(100000); // simulate 100 seconds + assert.equal(publishCalls, 6); + clock.restore(); }); }); diff --git a/test/unit/spec/signaling/v2/room.js b/test/unit/spec/signaling/v2/room.js index 3497e75d9..670c6cc73 100644 --- a/test/unit/spec/signaling/v2/room.js +++ b/test/unit/spec/signaling/v2/room.js @@ -1798,6 +1798,7 @@ function makeTest(options) { options.RemoteParticipantV2 = options.RemoteParticipantV2 || makeRemoteParticipantV2Constructor(options); options.localTracks = (options.localTracks || []).map(track => { + track.trackTransceiver = new EventEmitter(); const eventEmitter = new EventEmitter(); return Object.assign(eventEmitter, track); }); @@ -1875,6 +1876,7 @@ function makePeerConnectionManager(getRoom) { peerConnectionManager.setTrackSenders = sinon.spy(() => {}); peerConnectionManager.getTrackReceivers = sinon.spy(() => []); peerConnectionManager.setIceReconnectTimeout = sinon.spy(() => {}); + peerConnectionManager.setEffectiveAdaptiveSimulcast = sinon.spy(() => {}); // eslint-disable-next-line require-await peerConnectionManager.getStats = async () => { diff --git a/test/unit/spec/util/browserdetection.js b/test/unit/spec/util/browserdetection.js index 8e09ce0fd..5372b9edd 100644 --- a/test/unit/spec/util/browserdetection.js +++ b/test/unit/spec/util/browserdetection.js @@ -3,6 +3,7 @@ const assert = require('assert'); const { isIOSChrome } = require('@twilio/webrtc/lib/util'); +// eslint-disable-next-line no-warning-comments // TODO(joma): Move the contents of this file to twilio-webrtc.js. describe('isIOSChrome', () => { let oldAgent; diff --git a/test/unit/spec/util/index.js b/test/unit/spec/util/index.js index 9fbd0d1d7..f857b06d5 100644 --- a/test/unit/spec/util/index.js +++ b/test/unit/spec/util/index.js @@ -74,25 +74,50 @@ describe('util', () => { expectedPayload: { preferredAudioCodecs: JSON.stringify([{ codec: 'VP8', simulcast: true }]) }, }, { - testCase: 'name specified', + testCase: 'name specified as string', connectOptions: { name: 'room name goes here' }, expectedPayload: { roomName: 'room name goes here' }, }, + { + testCase: 'name specified as number', + connectOptions: { name: 1234 }, + expectedPayload: { roomName: '1234' }, + }, { testCase: 'region specified', connectOptions: { region: 'in1' }, expectedPayload: { region: 'in1' }, }, { - testCase: 'maxVideoBitrate specified', + testCase: 'maxVideoBitrate specified as number', connectOptions: { maxVideoBitrate: 100 }, expectedPayload: { maxVideoBitrate: 100 }, }, { - testCase: 'maxAudioBitrate specified', + testCase: 'maxVideoBitrate specified as string', + connectOptions: { maxVideoBitrate: '100' }, + expectedPayload: { maxVideoBitrate: 100 }, + }, + { + testCase: 'maxVideoBitrate specified as non-number string', + connectOptions: { maxVideoBitrate: 'foo' }, + expectedPayload: {}, + }, + { + testCase: 'maxAudioBitrate specified as number', connectOptions: { maxAudioBitrate: 100 }, expectedPayload: { maxAudioBitrate: 100 }, }, + { + testCase: 'maxAudioBitrate specified as string', + connectOptions: { maxAudioBitrate: '100' }, + expectedPayload: { maxAudioBitrate: 100 }, + }, + { + testCase: 'maxAudioBitrate specified as non-number string', + connectOptions: { maxAudioBitrate: 'foo' }, + expectedPayload: {}, + }, { testCase: 'networkQuality true', connectOptions: { networkQuality: true }, @@ -250,9 +275,7 @@ describe('util', () => { }); describe('chromeScreenShare', () => { - const validLabels = ['web-contents-media-stream://1174:3', 'window:1561:0', 'screen:2077749241:0']; - const invalidLabels = ['foo:bar:12356', 'fizz:123456:78901', 'fakelabel://123456']; - const mediaStreamTrack = { + const userMediaTrack = { kind: 'video', id: '1aaadf6e-6a4f-465b-96bf-1a35a2d3ac2b', enabled: true, @@ -261,8 +284,30 @@ describe('util', () => { onunmute: null, readyState: 'live', onended: null, - contentHint: '' + contentHint: '', + getSettings: () => { + return { + }; + } + }; + + const screenShareTrack = { + kind: 'video', + id: '1aaadf6e-6a4f-465b-96bf-1a35a2d3ac2b', + enabled: true, + muted: true, + onmute: null, + onunmute: null, + readyState: 'live', + onended: null, + contentHint: '', + getSettings: () => { + return { + displaySurface: 'monitor' + }; + } }; + let stub; beforeEach(() => { @@ -274,26 +319,20 @@ describe('util', () => { }); [['chrome', true], ['firefox', false], ['safari', false]].forEach(([browser, expectedBool]) => { - it(`valid labels should return ${expectedBool} for ${browser}`, () => { + it(`screen share track should return ${expectedBool} for ${browser}`, () => { stub = stub.returns(browser); - validLabels.forEach(label => { - mediaStreamTrack.label = label; - const screenShare = isChromeScreenShareTrack(mediaStreamTrack); - assert.equal(expectedBool, screenShare); - stub.resetHistory(); - }); + const screenShare = isChromeScreenShareTrack(screenShareTrack); + assert.equal(expectedBool, screenShare); + stub.resetHistory(); }); }); [['chrome', false], ['firefox', false], ['safari', false]].forEach(([browser, expectedBool]) => { - it(`invalid labels should return ${expectedBool} for ${browser}`, () => { + it(`user Media Track track should return ${expectedBool} for ${browser}`, () => { stub = stub.returns(browser); - invalidLabels.forEach(label => { - mediaStreamTrack.label = label; - const screenShare = isChromeScreenShareTrack(mediaStreamTrack); - assert.equal(expectedBool, screenShare); - stub.resetHistory(); - }); + const screenShare = isChromeScreenShareTrack(userMediaTrack); + assert.equal(expectedBool, screenShare); + stub.resetHistory(); }); }); }); diff --git a/test/unit/spec/util/sdp/index.js b/test/unit/spec/util/sdp/index.js index 96905ad4b..513c3890c 100644 --- a/test/unit/spec/util/sdp/index.js +++ b/test/unit/spec/util/sdp/index.js @@ -13,7 +13,7 @@ const { setSimulcast, unifiedPlanFilterLocalCodecs, removeSSRCAttributes, - revertSimulcastForNonVP8MediaSections, + revertSimulcast, unifiedPlanAddOrRewriteNewTrackIds, unifiedPlanAddOrRewriteTrackIds } = require('../../../../../lib/util/sdp'); @@ -800,7 +800,7 @@ a=ssrc:1111111111 label:d8b9a935-da54-4d21-a8de-522c87258244\r }); }); -describe('revertSimulcastForNonVP8MediaSections', () => { +describe('revertSimulcast', () => { combinationContext([ [ ['planb', 'unified'], @@ -810,11 +810,15 @@ describe('revertSimulcastForNonVP8MediaSections', () => { [true, false], x => `when the preferred payload type in answer is${x ? '' : ' not'} VP8` ], + [ + [true, false], + x => `when revertForAll is${x ? ' true' : ' false'}` + ], [ [new Set(['01234']), new Set(['01234', '56789'])], x => `when retransmission is${x.size === 2 ? '' : ' not'} supported` ] - ], ([sdpType, isVP8PreferredPayloadType, ssrcs]) => { + ], ([sdpType, isVP8PreferredPayloadType, revertForAll, ssrcs]) => { let sdp; let simSdp; let remoteSdp; @@ -835,10 +839,10 @@ describe('revertSimulcastForNonVP8MediaSections', () => { } else { remoteSdp = setCodecPreferences(sdp, [{ codec: 'PCMU' }], [{ codec: 'H264' }]); } - revertedSdp = revertSimulcastForNonVP8MediaSections(simSdp, sdp, remoteSdp); + revertedSdp = revertSimulcast(simSdp, sdp, remoteSdp, revertForAll); }); - if (isVP8PreferredPayloadType) { + if (isVP8PreferredPayloadType && !revertForAll) { it('should not revert simulcast SSRCs', () => { assert.equal(revertedSdp, simSdp); }); diff --git a/tsdef/twilio-video-tests.ts b/tsdef/twilio-video-tests.ts index 02fa2ccda..418bd4548 100644 --- a/tsdef/twilio-video-tests.ts +++ b/tsdef/twilio-video-tests.ts @@ -357,6 +357,12 @@ function useConnectionOptions() { return connectionOptions; } +function useAdaptiveSimulcast() { + const connectionOptions: Video.ConnectOptions = { + preferredVideoCodecs: 'auto', + }; +} + function runPreflight() { const preflight: Video.PreflightTest = Video.runPreflight('token', { region: 'us1' }); preflight.on('completed', (report: Video.PreflightTestReport) => { diff --git a/tsdef/types.d.ts b/tsdef/types.d.ts index 009a31971..c6f9e0b9c 100644 --- a/tsdef/types.d.ts +++ b/tsdef/types.d.ts @@ -76,6 +76,7 @@ export type AudioLevel = number; export type AudioCodec = 'isac' | 'opus' | 'PCMA' | 'PCMU'; export type VideoCodec = 'H264' | 'VP8' | 'VP9'; +export type VideoEncodingMode = 'auto'; /** * @deprecated */ @@ -188,7 +189,7 @@ export interface ConnectOptions { networkQuality?: boolean | NetworkQualityConfiguration; region?: string; preferredAudioCodecs?: Array; - preferredVideoCodecs?: Array; + preferredVideoCodecs?: Array | VideoEncodingMode; /** * @deprecated use Video.Logger.