feat(tests): Add start muted test.

This commit is contained in:
Hristo Terezov
2025-02-04 08:36:54 -06:00
parent 8105127571
commit 3e1adcd9b7
11 changed files with 543 additions and 78 deletions

View File

@@ -1,6 +1,7 @@
/* global APP $ */
import { multiremotebrowser } from '@wdio/globals';
import assert from 'assert';
import { Key } from 'webdriverio';
import { IConfig } from '../../react/features/base/config/configType';
@@ -10,6 +11,7 @@ import ChatPanel from '../pageobjects/ChatPanel';
import Filmstrip from '../pageobjects/Filmstrip';
import IframeAPI from '../pageobjects/IframeAPI';
import InviteDialog from '../pageobjects/InviteDialog';
import LargeVideo from '../pageobjects/LargeVideo';
import LobbyScreen from '../pageobjects/LobbyScreen';
import Notifications from '../pageobjects/Notifications';
import ParticipantsPane from '../pageobjects/ParticipantsPane';
@@ -27,6 +29,13 @@ export const P2_DISPLAY_NAME = 'p2';
export const P3_DISPLAY_NAME = 'p3';
export const P4_DISPLAY_NAME = 'p4';
interface IWaitForSendReceiveDataOptions {
checkReceive?: boolean;
checkSend?: boolean;
msg?: string;
timeout?: number;
}
/**
* Participant.
*/
@@ -91,7 +100,7 @@ export class Participant {
async getEndpointId(): Promise<string> {
if (!this._endpointId) {
this._endpointId = await this.driver.execute(() => { // eslint-disable-line arrow-body-style
return APP.conference.getMyUserId();
return APP?.conference?.getMyUserId();
});
}
@@ -209,7 +218,7 @@ export class Participant {
const parallel = [];
parallel.push(driver.execute((name, sessionId, prefix) => {
APP.UI.dockToolbar(true);
APP?.UI?.dockToolbar(true);
// disable keyframe animations (.fadeIn and .fadeOut classes)
$('<style>.notransition * { '
@@ -274,8 +283,8 @@ export class Participant {
/**
* Checks if the participant is in the meeting.
*/
async isInMuc() {
return await this.driver.execute(() => typeof APP !== 'undefined' && APP.conference?.isJoined());
isInMuc() {
return this.driver.execute(() => typeof APP !== 'undefined' && APP.conference?.isJoined());
}
/**
@@ -326,7 +335,7 @@ export class Participant {
const driver = this.driver;
return driver.waitUntil(() =>
driver.execute(() => APP.conference.getConnectionState() === 'connected'), {
driver.execute(() => APP?.conference?.getConnectionState() === 'connected'), {
timeout: 15_000,
timeoutMsg: `expected ICE to be connected for 15s for ${this.name}`
});
@@ -335,47 +344,35 @@ export class Participant {
/**
* Waits for send and receive data.
*
* @param {Object} options
* @param {boolean} options.checkSend - If true we will chec
* @returns {Promise<void>}
*/
async waitForSendReceiveData(
timeout = 15_000, msg = `expected to receive/send data in 15s for ${this.name}`): Promise<void> {
const driver = this.driver;
waitForSendReceiveData({
checkSend = true,
checkReceive = true,
timeout = 15_000,
msg
} = {} as IWaitForSendReceiveDataOptions): Promise<void> {
if (!checkSend && !checkReceive) {
return Promise.resolve();
}
return driver.waitUntil(() => driver.execute(() => {
const stats = APP.conference.getStats();
const lMsg = msg ?? `expected to ${
checkSend && checkReceive ? 'receive/send' : checkSend ? 'send' : 'receive'} data in 15s for ${this.name}`;
return this.driver.waitUntil(() => this.driver.execute((pCheckSend: boolean, pCheckReceive: boolean) => {
const stats = APP?.conference?.getStats();
const bitrateMap = stats?.bitrate || {};
const rtpStats = {
uploadBitrate: bitrateMap.upload || 0,
downloadBitrate: bitrateMap.download || 0
};
return rtpStats.uploadBitrate > 0 && rtpStats.downloadBitrate > 0;
}), {
return (rtpStats.uploadBitrate > 0 || !pCheckSend) && (rtpStats.downloadBitrate > 0 || !pCheckReceive);
}, checkSend, checkReceive), {
timeout,
timeoutMsg: msg
});
}
/**
* Waits for send and receive data.
*
* @returns {Promise<void>}
*/
async waitForSendData(
timeout = 15_000, msg = `expected to send data in 15s for ${this.name}`): Promise<void> {
const driver = this.driver;
return driver.waitUntil(() => driver.execute(() => {
const stats = APP.conference.getStats();
const bitrateMap = stats?.bitrate || {};
const rtpStats = {
uploadBitrate: bitrateMap.upload || 0
};
return rtpStats.uploadBitrate > 0;
}), {
timeout,
timeoutMsg: msg
timeoutMsg: lMsg
});
}
@@ -389,7 +386,7 @@ export class Participant {
const driver = this.driver;
return driver.waitUntil(() =>
driver.execute(count => APP.conference.getNumberOfParticipantsWithTracks() >= count, number), {
driver.execute(count => (APP?.conference?.getNumberOfParticipantsWithTracks() ?? -1) >= count, number), {
timeout: 15_000,
timeoutMsg: `expected number of remote streams:${number} in 15s for ${this.name}`
});
@@ -405,10 +402,12 @@ export class Participant {
waitForParticipants(number: number, msg?: string): Promise<void> {
const driver = this.driver;
return driver.waitUntil(() => driver.execute(count => APP.conference.listMembers().length === count, number), {
timeout: 15_000,
timeoutMsg: msg || `not the expected participants ${number} in 15s for ${this.name}`
});
return driver.waitUntil(
() => driver.execute(count => (APP?.conference?.listMembers()?.length ?? -1) === count, number),
{
timeout: 15_000,
timeoutMsg: msg || `not the expected participants ${number} in 15s for ${this.name}`
});
}
/**
@@ -470,6 +469,15 @@ export class Participant {
return new ParticipantsPane(this);
}
/**
* Returns the large video page object.
*
* @returns {LargeVideo}
*/
getLargeVideo(): LargeVideo {
return new LargeVideo(this);
}
/**
* Returns the videoQuality Dialog.
*
@@ -546,7 +554,7 @@ export class Participant {
}
// do a hangup, to make sure unavailable presence is sent
await this.driver.execute(() => typeof APP !== 'undefined' && APP?.conference?.hangup());
await this.driver.execute(() => typeof APP !== 'undefined' && APP.conference?.hangup());
// let's give it some time to leave the muc, we redirect after hangup so we should wait for the
// change of url
@@ -608,29 +616,6 @@ export class Participant {
return await avatar.isExisting() ? await avatar.getAttribute('src') : null;
}
/**
* Gets avatar SRC attribute for the one displayed on large video.
*/
async getLargeVideoAvatar() {
const avatar = this.driver.$('//img[@id="dominantSpeakerAvatar"]');
return await avatar.isExisting() ? await avatar.getAttribute('src') : null;
}
/**
* Returns resource part of the JID of the user who is currently displayed in the large video area.
*/
async getLargeVideoResource() {
return await this.driver.execute(() => APP.UI.getLargeVideoID());
}
/**
* Returns the source of the large video currently shown.
*/
async getLargeVideoId() {
return this.driver.execute('return document.getElementById("largeVideo").srcObject.id');
}
/**
* Makes sure that the avatar is displayed in the local thumbnail and that the video is not displayed.
* There are 3 options for avatar:
@@ -700,6 +685,85 @@ export class Participant {
return this.driver.$('div[data-testid="dialog.leaveReason"]').isDisplayed();
}
/**
* Returns the audio level for a participant.
*
* @param observer
* @param participant
* @return
*/
async getRemoteAudioLevel(p: Participant) {
const jid = await p.getEndpointId();
return await this.driver.execute(id => {
const level = APP?.conference?.getPeerSSRCAudioLevel(id);
return level ? level.toFixed(2) : null;
}, jid);
}
/**
* For the participant to have his audio muted/unmuted from given observer's
* perspective. The method will fail the test if something goes wrong or
* the audio muted status is different than the expected one. We wait up to
* 3 seconds for the expected status to appear.
*
* @param testee - instance of the participant for whom we're checking the audio muted status.
* @param muted - <tt>true</tt> to wait for audio muted status or <tt>false</tt> to wait for the participant to
* unmute.
*/
async waitForAudioMuted(testee: Participant, muted: boolean): Promise<void> {
// Waits for the correct icon
await this.getFilmstrip().assertAudioMuteIconIsDisplayed(testee, !muted);
// Extended timeout for 'unmuted' to make tests more resilient to
// unexpected glitches.
const timeout = muted ? 3_000 : 6_000;
// Give it 3 seconds to not get any audio or to receive some
// depending on "muted" argument
try {
await this.driver.waitUntil(async () => {
const audioLevel = await this.getRemoteAudioLevel(testee);
if (muted) {
if (audioLevel !== null && audioLevel > 0.1) {
console.log(`muted exiting on: ${audioLevel}`);
return true;
}
return false;
}
// When testing for unmuted we wait for first sound
if (audioLevel !== null && audioLevel > 0.1) {
console.log(`unmuted exiting on: ${audioLevel}`);
return true;
}
return false;
},
{ timeout });
// When testing for muted we don't want to have
// the condition succeeded
if (muted) {
const name = await testee.displayName;
assert.fail(`There was some sound coming from muted: '${name}'`);
} // else we're good for unmuted participant
} catch (_timeoutE) {
if (!muted) {
const name = await testee.displayName;
assert.fail(`There was no sound from unmuted: '${name}'`);
} // else we're good for muted participant
}
}
/**
* Waits for remote video state - receiving and displayed.
* @param endpointId

View File

@@ -53,6 +53,38 @@ export async function ensureThreeParticipants(ctx: IContext, options: IJoinOptio
]);
}
/**
* Creates the second participant instance or prepares one for re-joining.
*
* @param {Object} ctx - The context.
* @param {IJoinOptions} options - The options to use when joining the participant.
* @returns {Promise<void>}
*/
export function joinSecondParticipant(ctx: IContext, options: IJoinOptions = {}): Promise<void> {
return _joinParticipant('participant2', ctx.p2, p => {
ctx.p2 = p;
}, {
displayName: P2_DISPLAY_NAME,
...options
});
}
/**
* Creates the third participant instance or prepares one for re-joining.
*
* @param {Object} ctx - The context.
* @param {IJoinOptions} options - The options to use when joining the participant.
* @returns {Promise<void>}
*/
export function joinThirdParticipant(ctx: IContext, options: IJoinOptions = {}): Promise<void> {
return _joinParticipant('participant3', ctx.p3, p => {
ctx.p3 = p;
}, {
displayName: P3_DISPLAY_NAME,
...options
});
}
/**
* Ensure that there are four participants.
*

View File

@@ -36,7 +36,7 @@ export default class Filmstrip extends BasePageObject {
await this.participant.driver.$(mutedIconXPath).waitForDisplayed({
reverse,
timeout: 2000,
timeout: 5_000,
timeoutMsg: `Audio mute icon is${reverse ? '' : ' not'} displayed for ${testee.name}`
});
}

View File

@@ -0,0 +1,81 @@
import BasePageObject from './BasePageObject';
/**
* The large video.
*/
export default class LargeVideo extends BasePageObject {
/**
* Returns the elapsed time at which video has been playing.
*
* @return {number} - The current play time of the video element.
*/
async getPlaytime() {
return this.participant.driver.$('#largeVideo').getProperty('currentTime');
}
/**
* Waits 5s for the large video to switch to passed endpoint id.
*
* @param {string} endpointId - The endpoint.
* @returns {Promise<void>}
*/
waitForSwitchTo(endpointId: string): Promise<void> {
return this.participant.driver.waitUntil(async () => endpointId === await this.getResource(), {
timeout: 5_000,
timeoutMsg: `expected large video to switch to ${endpointId} for 5s`
});
}
/**
* Gets avatar SRC attribute for the one displayed on large video.
*/
async getAvatar() {
const avatar = this.participant.driver.$('//img[@id="dominantSpeakerAvatar"]');
return await avatar.isExisting() ? await avatar.getAttribute('src') : null;
}
/**
* Returns resource part of the JID of the user who is currently displayed in the large video area.
*/
getResource() {
return this.participant.driver.execute(() => APP.UI.getLargeVideoID());
}
/**
* Returns the source of the large video currently shown.
*/
getId() {
return this.participant.driver.execute('return document.getElementById("largeVideo").srcObject.id');
}
/**
* Checks if the large video is playing or not.
*
* @returns {Promise<void>}
*/
assertPlaying() {
let lastTime: number;
return this.participant.driver.waitUntil(async () => {
const currentTime = parseFloat(await this.getPlaytime());
if (typeof lastTime === 'undefined') {
lastTime = currentTime;
}
if (currentTime > lastTime) {
return true;
}
lastTime = currentTime;
return false;
}, {
timeout: 5_500,
interval: 500,
timeoutMsg:
`Expected large video for participant ${this.participant.name} to play but it didn't for more than 5s`
});
}
}

View File

@@ -4,6 +4,8 @@ const EMAIL_FIELD = '#setEmail';
const FOLLOW_ME_CHECKBOX = '//input[@name="follow-me"]';
const HIDE_SELF_VIEW_CHECKBOX = '//input[@name="hide-self-view"]';
const SETTINGS_DIALOG_CONTENT = '.settings-pane';
const START_AUDIO_MUTED_CHECKBOX = '//input[@name="start-audio-muted"]';
const START_VIDEO_MUTED_CHECKBOX = '//input[@name="start-video-muted"]';
const X_PATH_MODERATOR_TAB = '//div[contains(@class, "settings-dialog")]//*[text()="Moderator"]';
const X_PATH_MORE_TAB = '//div[contains(@class, "settings-dialog")]//*[text()="General"]';
const X_PATH_PROFILE_TAB = '//div[contains(@class, "settings-dialog")]//*[text()="Profile"]';
@@ -78,6 +80,28 @@ export default class SettingsDialog extends BaseDialog {
return this.clickOkButton();
}
/**
* Sets the start audio muted feature to enabled/disabled.
* @param {boolean} enable - true for enabled and false for disabled.
* @returns {Promise<void>}
*/
async setStartAudioMuted(enable: boolean) {
await this.openModeratorTab();
await this.setCheckbox(START_AUDIO_MUTED_CHECKBOX, enable);
}
/**
* Sets the start video muted feature to enabled/disabled.
* @param {boolean} enable - true for enabled and false for disabled.
* @returns {Promise<void>}
*/
async setStartVideoMuted(enable: boolean) {
await this.openModeratorTab();
await this.setCheckbox(START_VIDEO_MUTED_CHECKBOX, enable);
}
/**
* Sets the state checked/selected of a checkbox in the settings dialog.
*/

View File

@@ -1,4 +1,3 @@
/* global APP */
import type { Participant } from '../../helpers/Participant';
import { ensureThreeParticipants, muteAudioAndCheck } from '../../helpers/participants';
@@ -61,7 +60,7 @@ async function testActiveSpeaker(
const otherParticipant1Driver = otherParticipant1.driver;
await otherParticipant1Driver.waitUntil(
() => otherParticipant1Driver.execute(id => APP.UI.getLargeVideoID() === id, speakerEndpoint),
async () => await otherParticipant1.getLargeVideo().getResource() === speakerEndpoint,
{
timeout: 30_000, // 30 seconds
timeoutMsg: 'Active speaker not displayed on large video.'

View File

@@ -50,7 +50,7 @@ describe('Avatar', () => {
});
// check if the avatar in the large video has changed
expect(await p2.getLargeVideoAvatar()).toContain(HASH);
expect(await p2.getLargeVideo().getAvatar()).toContain(HASH);
// we check whether the default avatar of participant2 is displayed on both sides
await p1.assertDefaultAvatarExist(p2);
@@ -72,12 +72,12 @@ describe('Avatar', () => {
await p1.getParticipantsPane().assertVideoMuteIconIsDisplayed(p1);
await p1.driver.waitUntil(
async () => (await p1.getLargeVideoAvatar())?.includes(HASH), {
async () => (await p1.getLargeVideo().getAvatar())?.includes(HASH), {
timeout: 2000,
timeoutMsg: 'Avatar on large video did not change'
});
const p1LargeSrc = await p1.getLargeVideoAvatar();
const p1LargeSrc = await p1.getLargeVideo().getAvatar();
const p1ThumbSrc = await p1.getLocalVideoAvatar();
// Check if avatar on large video is the same as on local thumbnail
@@ -96,7 +96,7 @@ describe('Avatar', () => {
// Check if p1's avatar is on large video now
await p2.driver.waitUntil(
async () => await p2.getLargeVideoAvatar() === p1LargeSrc, {
async () => await p2.getLargeVideo().getAvatar() === p1LargeSrc, {
timeout: 2000,
timeoutMsg: 'Avatar on large video did not change'
});
@@ -141,7 +141,7 @@ describe('Avatar', () => {
// The avatar should be on large video and display name instead of an avatar, local video displayed
await p3.driver.waitUntil(
async () => await p3.getLargeVideoResource() === p1EndpointId, {
async () => await p3.getLargeVideo().getResource() === p1EndpointId, {
timeout: 2000,
timeoutMsg: `Large video did not switch to ${p1.name}`
});
@@ -158,7 +158,7 @@ describe('Avatar', () => {
// The avatar should be on large video and display name instead of an avatar, local video displayed
await p3.driver.waitUntil(
async () => await p3.getLargeVideoResource() === p2EndpointId, {
async () => await p3.getLargeVideo().getResource() === p2EndpointId, {
timeout: 2000,
timeoutMsg: `Large video did not switch to ${p2.name}`
});

View File

@@ -75,7 +75,7 @@ describe('Follow Me', () => {
const localVideoId = await p2Filmstrip.getLocalVideoId();
await p2.driver.waitUntil(
async () => await localVideoId === await p2.getLargeVideoId(),
async () => await localVideoId === await p2.getLargeVideo().getId(),
{
timeout: 5_000,
timeoutMsg: 'The pinned participant is not displayed on stage for p2'
@@ -84,7 +84,7 @@ describe('Follow Me', () => {
const p2VideoIdOnp3 = await p3.getFilmstrip().getRemoteVideoId(await p2.getEndpointId());
await p3.driver.waitUntil(
async () => p2VideoIdOnp3 === await p3.getLargeVideoId(),
async () => p2VideoIdOnp3 === await p3.getLargeVideo().getId(),
{
timeout: 5_000,
timeoutMsg: 'The pinned participant is not displayed on stage for p3'

View File

@@ -0,0 +1,265 @@
import {
ensureOneParticipant,
ensureTwoParticipants,
joinSecondParticipant,
joinThirdParticipant,
unmuteVideoAndCheck
} from '../../helpers/participants';
describe('StartMuted', () => {
it('checkboxes test', async () => {
const options = { configOverwrite: { p2p: { enabled: true } } };
await ensureOneParticipant(ctx, options);
const { p1 } = ctx;
await p1.getToolbar().clickSettingsButton();
const settingsDialog = p1.getSettingsDialog();
await settingsDialog.waitForDisplay();
await settingsDialog.setStartAudioMuted(true);
await settingsDialog.setStartVideoMuted(true);
await settingsDialog.submit();
await joinSecondParticipant(ctx, {
...options,
skipInMeetingChecks: true
});
const { p2 } = ctx;
await p2.waitForIceConnected();
await p2.waitForSendReceiveData({ checkSend: false });
await p2.getFilmstrip().assertAudioMuteIconIsDisplayed(p2);
await p2.getParticipantsPane().assertVideoMuteIconIsDisplayed(p2);
await p1.waitForAudioMuted(p2, true);
await p2.getFilmstrip().assertAudioMuteIconIsDisplayed(p1, true);
await p2.getParticipantsPane().assertVideoMuteIconIsDisplayed(p1, true);
// Enable video on p2 and check if p2 appears unmuted on p1.
await Promise.all([
p2.getToolbar().clickAudioUnmuteButton(), p2.getToolbar().clickVideoUnmuteButton()
]);
await p2.getFilmstrip().assertAudioMuteIconIsDisplayed(p2, true);
await p2.getParticipantsPane().assertVideoMuteIconIsDisplayed(p2, true);
await p1.waitForAudioMuted(p2, false);
// Add a third participant and check p3 is able to receive audio and video from p2.
await joinThirdParticipant(ctx, {
...options,
skipInMeetingChecks: true
});
const { p3 } = ctx;
await p3.waitForIceConnected();
await p3.waitForSendReceiveData({ checkSend: false });
await p3.getFilmstrip().assertAudioMuteIconIsDisplayed(p2, true);
await p3.getParticipantsPane().assertVideoMuteIconIsDisplayed(p2, true);
});
it('config options test', async () => {
await hangupAllParticipants();
const options = {
configOverwrite: {
testing: {
testMode: true,
debugAudioLevels: true
},
startAudioMuted: 2,
startVideoMuted: 2
}
};
await ensureOneParticipant(ctx, options);
await joinSecondParticipant(ctx, { skipInMeetingChecks: true });
const { p2 } = ctx;
await p2.waitForIceConnected();
await p2.waitForSendReceiveData({ checkSend: false });
await joinThirdParticipant(ctx, { skipInMeetingChecks: true });
const { p3 } = ctx;
await p3.waitForIceConnected();
await p3.waitForSendReceiveData({ checkSend: false });
const { p1 } = ctx;
const p2ID = await p2.getEndpointId();
p1.log(`Start configOptionsTest, second participant: ${p2ID}`);
// Participant 3 should be muted, 1 and 2 unmuted.
await p3.getFilmstrip().assertAudioMuteIconIsDisplayed(p3);
await p3.getParticipantsPane().assertVideoMuteIconIsDisplayed(p3);
await Promise.all([
p1.waitForAudioMuted(p3, true),
p2.waitForAudioMuted(p3, true)
]);
await p3.getFilmstrip().assertAudioMuteIconIsDisplayed(p1, true);
await p3.getFilmstrip().assertAudioMuteIconIsDisplayed(p2, true);
await p3.getParticipantsPane().assertVideoMuteIconIsDisplayed(p1, true);
await p3.getParticipantsPane().assertVideoMuteIconIsDisplayed(p2, true);
// Unmute and see if the audio works
await p3.getToolbar().clickAudioUnmuteButton();
p1.log('configOptionsTest, unmuted third participant');
await p1.waitForAudioMuted(p3, false /* unmuted */);
});
it('startWithVideoMuted=true can unmute', async () => {
// Maybe disable if there is FF or Safari participant.
await hangupAllParticipants();
// Explicitly enable P2P due to a regression with unmute not updating
// large video while in P2P.
const options = {
configOverwrite: {
p2p: {
enabled: true
},
startWithVideoMuted: true
}
};
await ensureTwoParticipants(ctx, options);
const { p1, p2 } = ctx;
await p1.getParticipantsPane().assertVideoMuteIconIsDisplayed(p2);
await p2.getParticipantsPane().assertVideoMuteIconIsDisplayed(p1);
await Promise.all([
p1.getLargeVideo().waitForSwitchTo(await p2.getEndpointId()),
p2.getLargeVideo().waitForSwitchTo(await p1.getEndpointId())
]);
await unmuteVideoAndCheck(p2, p1);
await p1.getLargeVideo().assertPlaying();
});
it('startWithAudioMuted=true can unmute', async () => {
await hangupAllParticipants();
const options = {
configOverwrite: {
startWithAudioMuted: true
}
};
await ensureTwoParticipants(ctx, options);
const { p1, p2 } = ctx;
await Promise.all([ p1.waitForAudioMuted(p2, true), p2.waitForAudioMuted(p1, true) ]);
await p1.getToolbar().clickAudioUnmuteButton();
await Promise.all([ p1.waitForAudioMuted(p2, true), p2.waitForAudioMuted(p1, false) ]);
});
it('startWithAudioVideoMuted=true can unmute', async () => {
await hangupAllParticipants();
const options = {
configOverwrite: {
startWithAudioMuted: true,
startWithVideoMuted: true,
p2p: {
enabled: true
}
}
};
await Promise.all([
ensureOneParticipant(ctx, options),
joinSecondParticipant(ctx, {
configOverwrite: {
p2p: {
enabled: true
}
},
skipInMeetingChecks: true
})
]);
const { p1, p2 } = ctx;
await p2.waitForIceConnected();
await p2.waitForSendReceiveData({ checkReceive: false });
await p2.waitForAudioMuted(p1, true);
await p2.getParticipantsPane().assertVideoMuteIconIsDisplayed(p1);
// Unmute p1's both audio and video and check on p2.
await p1.getToolbar().clickAudioUnmuteButton();
await p2.waitForAudioMuted(p1, false);
await unmuteVideoAndCheck(p1, p2);
await p2.getLargeVideo().assertPlaying();
});
it('test p2p JVB switch and switch back', async () => {
const { p1, p2 } = ctx;
// Mute p2's video just before p3 joins.
await p2.getToolbar().clickVideoMuteButton();
await joinThirdParticipant(ctx, {
configOverwrite: {
p2p: {
enabled: true
}
}
});
const { p3 } = ctx;
// Unmute p2 and check if its video is being received by p1 and p3.
await p2.getToolbar().clickVideoUnmuteButton();
await p1.getParticipantsPane().assertVideoMuteIconIsDisplayed(p2, true);
await p3.getParticipantsPane().assertVideoMuteIconIsDisplayed(p2, true);
// Mute p2's video just before p3 leaves.
await p2.getToolbar().clickVideoMuteButton();
await p3.hangup();
await p1.getParticipantsPane().assertVideoMuteIconIsDisplayed(p2);
await p2.getToolbar().clickVideoUnmuteButton();
// Check if p2's video is playing on p1.
await p1.getParticipantsPane().assertVideoMuteIconIsDisplayed(p2, true);
await p1.getLargeVideo().assertPlaying();
});
});
/**
* Hangs up all participants (p1, p2 and p3)
* @returns {Promise<void>}
*/
function hangupAllParticipants() {
return Promise.all([ ctx.p1?.hangup(), ctx.p2?.hangup(), ctx.p3?.hangup() ]);
}

View File

@@ -251,7 +251,7 @@ describe('Desktop sharing', () => {
await checkForScreensharingTile(p3, p2);
// The desktop sharing participant should be on large
expect(await p1.driver.execute(() => APP.UI.getLargeVideoID())).toBe(`${await p3.getEndpointId()}-v1`);
expect(await p1.getLargeVideo().getResource()).toBe(`${await p3.getEndpointId()}-v1`);
// the video should be playing
await p1.driver.waitUntil(() => p1.driver.execute(() => JitsiMeetJS.app.testing.isLargeVideoReceived()), {

View File

@@ -23,7 +23,7 @@ describe('lastN', () => {
const { p3 } = ctx;
const p3Toolbar = p3.getToolbar();
await p3.waitForSendData();
await p3.waitForSendReceiveData({ checkReceive: false });
await ctx.p1.waitForRemoteVideo(await p3.getEndpointId());