feat(av-moderation) implement screen-sharing moderation

This commit is contained in:
Saúl Ibarra Corretgé
2025-06-30 10:59:28 +02:00
committed by Saúl Ibarra Corretgé
parent 31a30f1118
commit 2305ae85a0
48 changed files with 1272 additions and 456 deletions

View File

@@ -18,8 +18,6 @@ import {
maybeRedirectToWelcomePage,
reloadWithStoredParams
} from './react/features/app/actions';
import { showModeratedNotification } from './react/features/av-moderation/actions';
import { shouldShowModeratedNotification } from './react/features/av-moderation/functions';
import {
_conferenceWillJoin,
authStatusChanged,
@@ -153,7 +151,6 @@ import {
DATA_CHANNEL_CLOSED_NOTIFICATION_ID,
NOTIFICATION_TIMEOUT_TYPE
} from './react/features/notifications/constants';
import { isModerationNotificationDisplayed } from './react/features/notifications/functions';
import { suspendDetected } from './react/features/power-monitor/actions';
import { initPrejoin, isPrejoinPageVisible } from './react/features/prejoin/functions';
import { disableReceiver, stopReceiver } from './react/features/remote-control/actions';
@@ -704,15 +701,6 @@ export default {
return;
}
// check for A/V Moderation when trying to unmute
if (!mute && shouldShowModeratedNotification(MEDIA_TYPE.AUDIO, state)) {
if (!isModerationNotificationDisplayed(MEDIA_TYPE.AUDIO, state)) {
APP.store.dispatch(showModeratedNotification(MEDIA_TYPE.AUDIO));
}
return;
}
await APP.store.dispatch(setAudioMuted(mute, true));
},
@@ -746,12 +734,6 @@ export default {
* dialogs in case of media permissions error.
*/
muteVideo(mute) {
if (this.videoSwitchInProgress) {
logger.warn('muteVideo - unable to perform operations while video switch is in progress');
return;
}
const state = APP.store.getState();
if (!mute
@@ -761,11 +743,6 @@ export default {
return;
}
// check for A/V Moderation when trying to unmute and return early
if (!mute && shouldShowModeratedNotification(MEDIA_TYPE.VIDEO, state)) {
return;
}
APP.store.dispatch(setVideoMuted(mute, VIDEO_MUTISM_AUTHORITY.USER, true));
},
@@ -1019,7 +996,6 @@ export default {
// Restore initial state.
this._localTracksInitialized = false;
this.isSharingScreen = false;
this.roomName = roomName;
const { tryCreateLocalTracks, errors } = this.createInitialLocalTracks(options);
@@ -1198,8 +1174,6 @@ export default {
return Boolean(APP.store.getState()['features/base/audio-only'].enabled);
},
videoSwitchInProgress: false,
/**
* This fields stores a handler which will create a Promise which turns off
* the screen sharing and restores the previous video state (was there
@@ -1228,7 +1202,6 @@ export default {
*/
async _turnScreenSharingOff(didHaveVideo, ignoreDidHaveVideo) {
this._untoggleScreenSharing = null;
this.videoSwitchInProgress = true;
APP.store.dispatch(stopReceiver());
@@ -1280,13 +1253,11 @@ export default {
return promise.then(
() => {
this.videoSwitchInProgress = false;
sendAnalytics(createScreenSharingEvent('stopped',
duration === 0 ? null : duration));
logger.info('Screen sharing stopped.');
},
error => {
this.videoSwitchInProgress = false;
logger.error(`_turnScreenSharingOff failed: ${error}`);
throw error;
@@ -1316,14 +1287,13 @@ export default {
this._untoggleScreenSharing
= this._turnScreenSharingOff.bind(this, didHaveVideo);
const desktopVideoStream = desktopStreams.find(stream => stream.getType() === MEDIA_TYPE.VIDEO);
const desktopAudioStream = desktopStreams.find(stream => stream.getType() === MEDIA_TYPE.AUDIO);
if (desktopAudioStream) {
desktopAudioStream.on(
JitsiTrackEvents.LOCAL_TRACK_STOPPED,
() => {
logger.debug(`Local screensharing audio track stopped. ${this.isSharingScreen}`);
logger.debug('Local screensharing audio track stopped.');
// Handle case where screen share was stopped from the browsers 'screen share in progress'
// window. If audio screen sharing is stopped via the normal UX flow this point shouldn't
@@ -1335,21 +1305,6 @@ export default {
);
}
if (desktopVideoStream) {
desktopVideoStream.on(
JitsiTrackEvents.LOCAL_TRACK_STOPPED,
() => {
logger.debug(`Local screensharing track stopped. ${this.isSharingScreen}`);
// If the stream was stopped during screen sharing
// session then we should switch back to video.
this.isSharingScreen
&& this._untoggleScreenSharing
&& this._untoggleScreenSharing();
}
);
}
return desktopStreams;
}, error => {
throw error;
@@ -1497,10 +1452,6 @@ export default {
room.on(JitsiConferenceEvents.TRACK_MUTE_CHANGED, (track, participantThatMutedUs) => {
if (participantThatMutedUs) {
APP.store.dispatch(participantMutedUs(participantThatMutedUs, track));
if (this.isSharingScreen && track.isVideoTrack()) {
logger.debug('TRACK_MUTE_CHANGED while screen sharing');
this._turnScreenSharingOff(false);
}
}
});

View File

@@ -380,6 +380,8 @@
"muteEveryoneDialogModerationOn": "The participants can send a request to speak at any time.",
"muteEveryoneElseDialog": "Once muted, you won't be able to unmute them, but they can unmute themselves at any time.",
"muteEveryoneElseTitle": "Mute everyone except {{whom}}?",
"muteEveryoneElsesDesktopDialog": "Once the share is stopped, you won't be able to restart it, but they can do so at any time.",
"muteEveryoneElsesDesktopTitle": "Stop everyone's screen-share except {{whom}}?",
"muteEveryoneElsesVideoDialog": "Once the camera is disabled, you won't be able to turn it back on, but they can turn it back on at any time.",
"muteEveryoneElsesVideoTitle": "Stop everyone's video except {{whom}}?",
"muteEveryoneSelf": "yourself",
@@ -391,6 +393,12 @@
"muteEveryonesVideoTitle": "Stop everyone's video?",
"muteParticipantBody": "You won't be able to unmute them, but they can unmute themselves at any time.",
"muteParticipantButton": "Mute",
"muteParticipantsDesktopBody": "You won't be able to start their screen-share, but they can do so at any time.",
"muteParticipantsDesktopBodyModerationOn": "You won't be able to start their screen-share and neither will they.",
"muteParticipantsDesktopButton": "Stop screen sharing",
"muteParticipantsDesktopDialog": "Are you sure you want to turn off this participant's screen-share? You won't be able to restart it, but they can do so at any time.",
"muteParticipantsDesktopDialogModerationOn": "Are you sure you want to turn off this participant's screen-share? You won't be able to turn the screen back on and neither will they.",
"muteParticipantsDesktopTitle": "Disable screen-share of this participant?",
"muteParticipantsVideoBody": "You won't be able to turn the camera back on, but they can turn it back on at any time.",
"muteParticipantsVideoBodyModerationOn": "You won't be able to turn the camera back on and neither will they.",
"muteParticipantsVideoButton": "Stop video",
@@ -767,8 +775,9 @@
"me": "me",
"notify": {
"OldElectronAPPTitle": "Security vulnerability!",
"allowAll": "Allow All",
"allowAudio": "Allow Audio",
"allowBoth": "Both",
"allowDesktop": "Allow screen sharing",
"allowVideo": "Allow Video",
"allowedUnmute": "You can unmute your microphone, start your camera or share your screen.",
"audioUnmuteBlockedDescription": "Mic unmute operation has been temporarily blocked because of system limits.",
@@ -782,6 +791,7 @@
"dataChannelClosedDescription": "The bridge channel is down and thus video quality may be limited to its lowest setting.",
"dataChannelClosedDescriptionWithAudio": "The bridge channel is down and thus disruptions to audio and video may occur.",
"dataChannelClosedWithAudio": "Audio and video quality may be impaired",
"desktopMutedRemotelyTitle": "Your screen sharing has been stopped by {{participantDisplayName}}",
"disabledIframe": "Embedding is only meant for demo purposes, so this call will disconnect in {{timeout}} minutes.",
"disabledIframeSecondaryNative": "Embedding {{domain}} is only meant for demo purposes, so this call will disconnect in {{timeout}} minutes.",
"disabledIframeSecondaryWeb": "Embedding {{domain}} is only meant for demo purposes, so this call will disconnect in {{timeout}} minutes. Please use <a href='{{jaasDomain}}' rel='noopener noreferrer' target='_blank'>Jitsi as a Service</a> for production embedding!",
@@ -862,6 +872,7 @@
"suggestRecordingDescription": "Would you like to start a recording?",
"suggestRecordingTitle": "Record this meeting",
"unmute": "Unmute Audio",
"unmuteScreen": "Start screen sharing",
"unmuteVideo": "Unmute Video",
"videoMutedRemotelyDescription": "You can always turn it on again.",
"videoMutedRemotelyTitle": "Your video has been turned off by {{participantDisplayName}}",
@@ -881,11 +892,14 @@
"admit": "Admit",
"admitAll": "Admit all",
"allow": "Allow non-moderators to:",
"allowDesktop": "Allow screen sharing",
"allowVideo": "Allow video",
"askDesktop": "Ask to share screen",
"askUnmute": "Ask to unmute",
"audioModeration": "Unmute themselves",
"blockEveryoneMicCamera": "Block everyone's mic and camera",
"breakoutRooms": "Breakout rooms",
"desktopModeration": "Start screen sharing",
"goLive": "Go live",
"invite": "Invite someone",
"lowerAllHands": "Lower all hands",
@@ -897,6 +911,8 @@
"muteAll": "Mute all",
"muteEveryoneElse": "Mute everyone else",
"reject": "Reject",
"stopDesktop": "Stop screen sharing",
"stopEveryonesDesktop": "Stop everyone's screen-share",
"stopEveryonesVideo": "Stop everyone's video",
"stopVideo": "Stop video",
"unblockEveryoneMicCamera": "Unblock everyone's mic and camera",
@@ -1503,6 +1519,8 @@
"connectionInfo": "Connection Info",
"demote": "Move to viewer",
"domute": "Mute",
"domuteDesktop": "Stop screen-sharing",
"domuteDesktopOfOthers": "Stop screen-sharing for everyone else",
"domuteOthers": "Mute everyone else",
"domuteVideo": "Disable camera",
"domuteVideoOfOthers": "Disable camera of everyone else",

View File

@@ -13,7 +13,7 @@ import {
requestEnableAudioModeration,
requestEnableVideoModeration
} from '../../react/features/av-moderation/actions';
import { isEnabledFromState } from '../../react/features/av-moderation/functions';
import { isEnabledFromState, isForceMuted } from '../../react/features/av-moderation/functions';
import { setAudioOnly } from '../../react/features/base/audio-only/actions';
import {
endConference,
@@ -106,7 +106,7 @@ import {
close as closeParticipantsPane,
open as openParticipantsPane
} from '../../react/features/participants-pane/actions';
import { getParticipantsPaneOpen, isForceMuted } from '../../react/features/participants-pane/functions';
import { getParticipantsPaneOpen } from '../../react/features/participants-pane/functions';
import { startLocalVideoRecording, stopLocalVideoRecording } from '../../react/features/recording/actions.any';
import { grantRecordingConsent, grantRecordingConsentAndUnmute } from '../../react/features/recording/actions.web';
import { RECORDING_METADATA_ID, RECORDING_TYPES } from '../../react/features/recording/constants';

View File

@@ -63,6 +63,11 @@ export const ACTION_SHORTCUT_TRIGGERED = 'triggered';
*/
export const AUDIO_MUTE = 'audio.mute';
/**
* The name of the keyboard shortcut or toolbar button for muting desktop sharing.
*/
export const DESKTOP_MUTE = 'desktop.mute';
/**
* The name of the keyboard shortcut or toolbar button for muting video.
*/

View File

@@ -37,6 +37,15 @@ export const ENABLE_MODERATION = 'ENABLE_MODERATION';
*/
export const REQUEST_DISABLE_AUDIO_MODERATION = 'REQUEST_DISABLE_AUDIO_MODERATION';
/**
* The type of (redux) action which signals that Desktop Moderation disable has been requested.
*
* {
* type: REQUEST_DISABLE_DESKTOP_MODERATION
* }
*/
export const REQUEST_DISABLE_DESKTOP_MODERATION = 'REQUEST_DISABLE_DESKTOP_MODERATION';
/**
* The type of (redux) action which signals that Video Moderation disable has been requested.
*
@@ -55,6 +64,15 @@ export const REQUEST_DISABLE_VIDEO_MODERATION = 'REQUEST_DISABLE_VIDEO_MODERATIO
*/
export const REQUEST_ENABLE_AUDIO_MODERATION = 'REQUEST_ENABLE_AUDIO_MODERATION';
/**
* The type of (redux) action which signals that Desktop Moderation enable has been requested.
*
* {
* type: REQUEST_ENABLE_DESKTOP_MODERATION
* }
*/
export const REQUEST_ENABLE_DESKTOP_MODERATION = 'REQUEST_ENABLE_DESKTOP_MODERATION';
/**
* The type of (redux) action which signals that Video Moderation enable has been requested.
*
@@ -117,7 +135,7 @@ export const PARTICIPANT_REJECTED = 'PARTICIPANT_REJECTED';
/**
* The type of (redux) action which signals that a participant asked to have its audio umuted.
* The type of (redux) action which signals that a participant asked to have its audio unmuted.
*
* {
* type: PARTICIPANT_PENDING_AUDIO

View File

@@ -1,9 +1,9 @@
import { batch } from 'react-redux';
import { IStore } from '../app/types';
import { getConferenceState } from '../base/conference/functions';
import { MEDIA_TYPE, type MediaType } from '../base/media/constants';
import { getParticipantById, isParticipantModerator } from '../base/participants/functions';
import { IParticipant } from '../base/participants/types';
import { isForceMuted } from '../participants-pane/functions';
import {
DISABLE_MODERATION,
@@ -16,11 +16,14 @@ import {
PARTICIPANT_PENDING_AUDIO,
PARTICIPANT_REJECTED,
REQUEST_DISABLE_AUDIO_MODERATION,
REQUEST_DISABLE_DESKTOP_MODERATION,
REQUEST_DISABLE_VIDEO_MODERATION,
REQUEST_ENABLE_AUDIO_MODERATION,
REQUEST_ENABLE_DESKTOP_MODERATION,
REQUEST_ENABLE_VIDEO_MODERATION
} from './actionTypes';
import { isEnabledFromState } from './functions';
import { MEDIA_TYPE, type MediaType } from './constants';
import { isEnabledFromState, isForceMuted } from './functions';
/**
* Action used by moderator to approve audio for a participant.
@@ -42,6 +45,25 @@ export const approveParticipantAudio = (id: string) => (dispatch: IStore['dispat
}
};
/**
* Action used by moderator to approve desktop for a participant.
*
* @param {staring} id - The id of the participant to be approved.
* @returns {void}
*/
export const approveParticipantDesktop = (id: string) => (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
const state = getState();
const { conference } = getConferenceState(state);
const participant = getParticipantById(state, id);
const isDesktopForceMuted = isForceMuted(participant, MEDIA_TYPE.DESKTOP, state);
const isDesktopModerationOn = isEnabledFromState(MEDIA_TYPE.DESKTOP, state);
if (isDesktopModerationOn && isDesktopForceMuted) {
conference?.avModerationApprove(MEDIA_TYPE.DESKTOP, id);
}
};
/**
* Action used by moderator to approve video for a participant.
*
@@ -68,8 +90,11 @@ export const approveParticipantVideo = (id: string) => (dispatch: IStore['dispat
* @returns {void}
*/
export const approveParticipant = (id: string) => (dispatch: IStore['dispatch']) => {
dispatch(approveParticipantAudio(id));
dispatch(approveParticipantVideo(id));
batch(() => {
dispatch(approveParticipantAudio(id));
dispatch(approveParticipantDesktop(id));
dispatch(approveParticipantVideo(id));
});
};
/**
@@ -92,6 +117,26 @@ export const rejectParticipantAudio = (id: string) => (dispatch: IStore['dispatc
}
};
/**
* Action used by moderator to reject desktop for a participant.
*
* @param {staring} id - The id of the participant to be rejected.
* @returns {void}
*/
export const rejectParticipantDesktop = (id: string) => (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
const state = getState();
const { conference } = getConferenceState(state);
const desktopModeration = isEnabledFromState(MEDIA_TYPE.DESKTOP, state);
const participant = getParticipantById(state, id);
const isDesktopForceMuted = isForceMuted(participant, MEDIA_TYPE.DESKTOP, state);
const isModerator = isParticipantModerator(participant);
if (desktopModeration && !isDesktopForceMuted && !isModerator) {
conference?.avModerationReject(MEDIA_TYPE.DESKTOP, id);
}
};
/**
* Action used by moderator to reject video for a participant.
*
@@ -185,6 +230,19 @@ export const requestDisableAudioModeration = () => {
};
};
/**
* Requests disable of video moderation.
*
* @returns {{
* type: REQUEST_DISABLE_DESKTOP_MODERATION
* }}
*/
export const requestDisableDesktopModeration = () => {
return {
type: REQUEST_DISABLE_DESKTOP_MODERATION
};
};
/**
* Requests disable of video moderation.
*
@@ -211,6 +269,19 @@ export const requestEnableAudioModeration = () => {
};
};
/**
* Requests enable of video moderation.
*
* @returns {{
* type: REQUEST_ENABLE_DESKTOP_MODERATION
* }}
*/
export const requestEnableDesktopModeration = () => {
return {
type: REQUEST_ENABLE_DESKTOP_MODERATION
};
};
/**
* Requests enable of video moderation.
*
@@ -313,4 +384,3 @@ export function participantRejected(id: string, mediaType: MediaType) {
mediaType
};
}

View File

@@ -1,18 +1,35 @@
import { MEDIA_TYPE } from '../base/media/constants';
export type MediaType = 'audio' | 'video' | 'desktop';
/**
* Mapping between a media type and the witelist reducer key.
* The set of media types for AV moderation.
*
* @enum {string}
*/
export const MEDIA_TYPE: {
AUDIO: MediaType;
DESKTOP: MediaType;
VIDEO: MediaType;
} = {
AUDIO: 'audio',
DESKTOP: 'desktop',
VIDEO: 'video'
};
/**
* Mapping between a media type and the whitelist reducer key.
*/
export const MEDIA_TYPE_TO_WHITELIST_STORE_KEY: { [key: string]: string; } = {
[MEDIA_TYPE.AUDIO]: 'audioWhitelist',
[MEDIA_TYPE.DESKTOP]: 'desktopWhitelist',
[MEDIA_TYPE.VIDEO]: 'videoWhitelist'
};
/**
* Mapping between a media type and the pending reducer key.
*/
export const MEDIA_TYPE_TO_PENDING_STORE_KEY: { [key: string]: 'pendingAudio' | 'pendingVideo'; } = {
export const MEDIA_TYPE_TO_PENDING_STORE_KEY: { [key: string]: 'pendingAudio' | 'pendingDesktop' | 'pendingVideo'; } = {
[MEDIA_TYPE.AUDIO]: 'pendingAudio',
[MEDIA_TYPE.DESKTOP]: 'pendingDesktop',
[MEDIA_TYPE.VIDEO]: 'pendingVideo'
};
@@ -20,11 +37,15 @@ export const ASKED_TO_UNMUTE_NOTIFICATION_ID = 'asked-to-unmute';
export const ASKED_TO_UNMUTE_SOUND_ID = 'ASKED_TO_UNMUTE_SOUND';
export const AUDIO_MODERATION_NOTIFICATION_ID = 'audio-moderation';
export const DESKTOP_MODERATION_NOTIFICATION_ID = 'desktop-moderation';
export const VIDEO_MODERATION_NOTIFICATION_ID = 'video-moderation';
export const CS_MODERATION_NOTIFICATION_ID = 'screensharing-moderation';
export const AUDIO_RAISED_HAND_NOTIFICATION_ID = 'raise-hand-audio';
export const DESKTOP_RAISED_HAND_NOTIFICATION_ID = 'raise-hand-desktop';
export const VIDEO_RAISED_HAND_NOTIFICATION_ID = 'raise-hand-video';
export const MODERATION_NOTIFICATIONS = {
[MEDIA_TYPE.AUDIO]: AUDIO_MODERATION_NOTIFICATION_ID,
[MEDIA_TYPE.SCREENSHARE]: CS_MODERATION_NOTIFICATION_ID,
[MEDIA_TYPE.DESKTOP]: DESKTOP_MODERATION_NOTIFICATION_ID,
[MEDIA_TYPE.VIDEO]: VIDEO_MODERATION_NOTIFICATION_ID
};

View File

@@ -1,10 +1,14 @@
import { IReduxState } from '../app/types';
import { MEDIA_TYPE, type MediaType } from '../base/media/constants';
import { isLocalParticipantModerator } from '../base/participants/functions';
import { isLocalParticipantModerator, isParticipantModerator } from '../base/participants/functions';
import { IParticipant } from '../base/participants/types';
import { isInBreakoutRoom } from '../breakout-rooms/functions';
import { MEDIA_TYPE_TO_PENDING_STORE_KEY, MEDIA_TYPE_TO_WHITELIST_STORE_KEY } from './constants';
import {
MEDIA_TYPE,
MEDIA_TYPE_TO_PENDING_STORE_KEY,
MEDIA_TYPE_TO_WHITELIST_STORE_KEY,
MediaType
} from './constants';
/**
* Returns this feature's root state.
@@ -29,10 +33,18 @@ const EMPTY_ARRAY: any[] = [];
* @param {IReduxState} state - Global state.
* @returns {boolean}
*/
export const isEnabledFromState = (mediaType: MediaType, state: IReduxState) =>
(mediaType === MEDIA_TYPE.AUDIO
? getState(state)?.audioModerationEnabled
: getState(state)?.videoModerationEnabled) === true;
export const isEnabledFromState = (mediaType: MediaType, state: IReduxState) => {
switch (mediaType) {
case MEDIA_TYPE.AUDIO:
return getState(state)?.audioModerationEnabled === true;
case MEDIA_TYPE.DESKTOP:
return getState(state)?.desktopModerationEnabled === true;
case MEDIA_TYPE.VIDEO:
return getState(state)?.videoModerationEnabled === true;
default:
throw new Error(`Unknown media type: ${mediaType}`);
}
};
/**
* Returns whether moderation is enabled per media type.
@@ -61,11 +73,20 @@ export const isSupported = () => (state: IReduxState) => {
* @returns {boolean}
*/
export const isLocalParticipantApprovedFromState = (mediaType: MediaType, state: IReduxState) => {
const approved = (mediaType === MEDIA_TYPE.AUDIO
? getState(state).audioUnmuteApproved
: getState(state).videoUnmuteApproved) === true;
if (isLocalParticipantModerator(state)) {
return true;
}
return approved || isLocalParticipantModerator(state);
switch (mediaType) {
case MEDIA_TYPE.AUDIO:
return getState(state).audioUnmuteApproved === true;
case MEDIA_TYPE.DESKTOP:
return getState(state).desktopUnmuteApproved === true;
case MEDIA_TYPE.VIDEO:
return getState(state).videoUnmuteApproved === true;
default:
throw new Error(`Unknown media type: ${mediaType}`);
}
};
/**
@@ -134,3 +155,28 @@ export const getParticipantsAskingToAudioUnmute = (state: IReduxState) => {
export const shouldShowModeratedNotification = (mediaType: MediaType, state: IReduxState) =>
isEnabledFromState(mediaType, state)
&& !isLocalParticipantApprovedFromState(mediaType, state);
/**
* Checks if a participant is force muted.
*
* @param {IParticipant|undefined} participant - The participant.
* @param {MediaType} mediaType - The media type.
* @param {IReduxState} state - The redux state.
* @returns {MediaState}
*/
export function isForceMuted(participant: IParticipant | undefined, mediaType: MediaType, state: IReduxState) {
if (isEnabledFromState(mediaType, state)) {
if (participant?.local) {
return !isLocalParticipantApprovedFromState(mediaType, state);
}
// moderators cannot be force muted
if (isParticipantModerator(participant)) {
return false;
}
return !isParticipantApproved(participant?.id ?? '', mediaType)(state);
}
return false;
}

View File

@@ -3,8 +3,12 @@ import { batch } from 'react-redux';
import { APP_WILL_MOUNT, APP_WILL_UNMOUNT } from '../base/app/actionTypes';
import { getConferenceState } from '../base/conference/functions';
import { JitsiConferenceEvents } from '../base/lib-jitsi-meet';
import { MEDIA_TYPE, MediaType } from '../base/media/constants';
import { isAudioMuted, isVideoMuted } from '../base/media/functions';
import { MEDIA_TYPE as TRACK_MEDIA_TYPE } from '../base/media/constants';
import {
isAudioMuted,
isScreenshareMuted,
isVideoMuted
} from '../base/media/functions';
import { PARTICIPANT_UPDATED } from '../base/participants/actionTypes';
import { raiseHand } from '../base/participants/actions';
import {
@@ -30,8 +34,10 @@ import {
PARTICIPANT_APPROVED,
PARTICIPANT_REJECTED,
REQUEST_DISABLE_AUDIO_MODERATION,
REQUEST_DISABLE_DESKTOP_MODERATION,
REQUEST_DISABLE_VIDEO_MODERATION,
REQUEST_ENABLE_AUDIO_MODERATION,
REQUEST_ENABLE_DESKTOP_MODERATION,
REQUEST_ENABLE_VIDEO_MODERATION
} from './actionTypes';
import {
@@ -49,8 +55,10 @@ import {
ASKED_TO_UNMUTE_NOTIFICATION_ID,
ASKED_TO_UNMUTE_SOUND_ID,
AUDIO_MODERATION_NOTIFICATION_ID,
CS_MODERATION_NOTIFICATION_ID,
VIDEO_MODERATION_NOTIFICATION_ID
DESKTOP_MODERATION_NOTIFICATION_ID,
MEDIA_TYPE,
MediaType,
VIDEO_MODERATION_NOTIFICATION_ID,
} from './constants';
import {
isEnabledFromState,
@@ -90,9 +98,9 @@ MiddlewareRegistry.register(({ dispatch, getState }) => next => action => {
uid = VIDEO_MODERATION_NOTIFICATION_ID;
break;
}
case MEDIA_TYPE.SCREENSHARE: {
case MEDIA_TYPE.DESKTOP: {
titleKey = 'notify.moderationInEffectCSTitle';
uid = CS_MODERATION_NOTIFICATION_ID;
uid = DESKTOP_MODERATION_NOTIFICATION_ID;
break;
}
}
@@ -115,6 +123,10 @@ MiddlewareRegistry.register(({ dispatch, getState }) => next => action => {
conference?.disableAVModeration(MEDIA_TYPE.AUDIO);
break;
}
case REQUEST_DISABLE_DESKTOP_MODERATION: {
conference?.disableAVModeration(MEDIA_TYPE.DESKTOP);
break;
}
case REQUEST_DISABLE_VIDEO_MODERATION: {
conference?.disableAVModeration(MEDIA_TYPE.VIDEO);
break;
@@ -123,6 +135,10 @@ MiddlewareRegistry.register(({ dispatch, getState }) => next => action => {
conference?.enableAVModeration(MEDIA_TYPE.AUDIO);
break;
}
case REQUEST_ENABLE_DESKTOP_MODERATION: {
conference?.enableAVModeration(MEDIA_TYPE.DESKTOP);
break;
}
case REQUEST_ENABLE_VIDEO_MODERATION: {
conference?.enableAVModeration(MEDIA_TYPE.VIDEO);
break;
@@ -219,24 +235,37 @@ StateListenerRegistry.register(
const customActionHandler = [];
if ((mediaType === MEDIA_TYPE.AUDIO || getState()['features/av-moderation'].audioUnmuteApproved)
&& isAudioMuted(getState())) {
&& isAudioMuted(getState())) {
customActionNameKey.push('notify.unmute');
customActionHandler.push(() => {
dispatch(muteLocal(false, MEDIA_TYPE.AUDIO));
dispatch(muteLocal(false, TRACK_MEDIA_TYPE.AUDIO));
dispatch(hideNotification(ASKED_TO_UNMUTE_NOTIFICATION_ID));
});
}
if ((mediaType === MEDIA_TYPE.VIDEO || getState()['features/av-moderation'].videoUnmuteApproved)
&& isVideoMuted(getState())) {
customActionNameKey.push('notify.unmuteVideo');
if ((mediaType === MEDIA_TYPE.DESKTOP || getState()['features/av-moderation'].desktopUnmuteApproved)
&& isScreenshareMuted(getState())) {
customActionNameKey.push('notify.unmuteScreen');
customActionHandler.push(() => {
dispatch(muteLocal(false, MEDIA_TYPE.VIDEO));
dispatch(muteLocal(false, TRACK_MEDIA_TYPE.SCREENSHARE));
dispatch(hideNotification(ASKED_TO_UNMUTE_NOTIFICATION_ID));
// lower hand as there will be no audio and change in dominant speaker to clear it
// Since permission is requested by raising the hand, lower it not to rely on dominant speaker detection
// to clear the hand.
dispatch(raiseHand(false));
});
}
if ((mediaType === MEDIA_TYPE.VIDEO || getState()['features/av-moderation'].videoUnmuteApproved)
&& isVideoMuted(getState())) {
customActionNameKey.push('notify.unmuteVideo');
customActionHandler.push(() => {
dispatch(muteLocal(false, TRACK_MEDIA_TYPE.VIDEO));
dispatch(hideNotification(ASKED_TO_UNMUTE_NOTIFICATION_ID));
// Since permission is requested by raising the hand, lower it not to rely on dominant speaker detection
// to clear the hand.
dispatch(raiseHand(false));
});
}

View File

@@ -1,4 +1,3 @@
import { MEDIA_TYPE, type MediaType } from '../base/media/constants';
import {
PARTICIPANT_LEFT,
PARTICIPANT_UPDATED
@@ -16,14 +15,21 @@ import {
PARTICIPANT_PENDING_AUDIO,
PARTICIPANT_REJECTED
} from './actionTypes';
import { MEDIA_TYPE_TO_PENDING_STORE_KEY } from './constants';
import {
MEDIA_TYPE,
MEDIA_TYPE_TO_PENDING_STORE_KEY,
type MediaType
} from './constants';
const initialState = {
audioModerationEnabled: false,
desktopModerationEnabled: false,
videoModerationEnabled: false,
audioWhitelist: {},
desktopWhitelist: {},
videoWhitelist: {},
pendingAudio: [],
pendingDesktop: [],
pendingVideo: []
};
@@ -31,7 +37,11 @@ export interface IAVModerationState {
audioModerationEnabled: boolean;
audioUnmuteApproved?: boolean | undefined;
audioWhitelist: { [id: string]: boolean; };
desktopModerationEnabled: boolean;
desktopUnmuteApproved?: boolean | undefined;
desktopWhitelist: { [id: string]: boolean; };
pendingAudio: Array<{ id: string; }>;
pendingDesktop: Array<{ id: string; }>;
pendingVideo: Array<{ id: string; }>;
videoModerationEnabled: boolean;
videoUnmuteApproved?: boolean | undefined;
@@ -77,28 +87,61 @@ ReducerRegistry.register<IAVModerationState>('features/av-moderation',
(state = initialState, action): IAVModerationState => {
switch (action.type) {
case DISABLE_MODERATION: {
const newState = action.mediaType === MEDIA_TYPE.AUDIO
? {
let newState = {};
switch (action.mediaType) {
case MEDIA_TYPE.AUDIO:
newState = {
audioModerationEnabled: false,
audioUnmuteApproved: undefined
} : {
};
break;
case MEDIA_TYPE.DESKTOP:
newState = {
desktopModerationEnabled: false,
desktopUnmuteApproved: undefined
};
break;
case MEDIA_TYPE.VIDEO:
newState = {
videoModerationEnabled: false,
videoUnmuteApproved: undefined
};
break;
}
return {
...state,
...newState,
audioWhitelist: {},
desktopWhitelist: {},
videoWhitelist: {},
pendingAudio: [],
pendingDesktop: [],
pendingVideo: []
};
}
case ENABLE_MODERATION: {
const newState = action.mediaType === MEDIA_TYPE.AUDIO
? { audioModerationEnabled: true } : { videoModerationEnabled: true };
let newState = {};
switch (action.mediaType) {
case MEDIA_TYPE.AUDIO:
newState = {
audioModerationEnabled: true,
};
break;
case MEDIA_TYPE.DESKTOP:
newState = {
desktopModerationEnabled: true,
};
break;
case MEDIA_TYPE.VIDEO:
newState = {
videoModerationEnabled: true,
};
break;
}
return {
...state,
@@ -107,8 +150,25 @@ ReducerRegistry.register<IAVModerationState>('features/av-moderation',
}
case LOCAL_PARTICIPANT_APPROVED: {
const newState = action.mediaType === MEDIA_TYPE.AUDIO
? { audioUnmuteApproved: true } : { videoUnmuteApproved: true };
let newState = {};
switch (action.mediaType) {
case MEDIA_TYPE.AUDIO:
newState = {
audioUnmuteApproved: true
};
break;
case MEDIA_TYPE.DESKTOP:
newState = {
desktopUnmuteApproved: true
};
break;
case MEDIA_TYPE.VIDEO:
newState = {
videoUnmuteApproved: true
};
break;
}
return {
...state,
@@ -117,8 +177,25 @@ ReducerRegistry.register<IAVModerationState>('features/av-moderation',
}
case LOCAL_PARTICIPANT_REJECTED: {
const newState = action.mediaType === MEDIA_TYPE.AUDIO
? { audioUnmuteApproved: false } : { videoUnmuteApproved: false };
let newState = {};
switch (action.mediaType) {
case MEDIA_TYPE.AUDIO:
newState = {
audioUnmuteApproved: false
};
break;
case MEDIA_TYPE.DESKTOP:
newState = {
desktopUnmuteApproved: false
};
break;
case MEDIA_TYPE.VIDEO:
newState = {
videoUnmuteApproved: false
};
break;
}
return {
...state,
@@ -146,7 +223,7 @@ ReducerRegistry.register<IAVModerationState>('features/av-moderation',
case PARTICIPANT_UPDATED: {
const participant = action.participant;
const { audioModerationEnabled, videoModerationEnabled } = state;
const { audioModerationEnabled, desktopModerationEnabled, videoModerationEnabled } = state;
let hasStateChanged = false;
// skips changing the reference of pendingAudio or pendingVideo,
@@ -155,6 +232,10 @@ ReducerRegistry.register<IAVModerationState>('features/av-moderation',
hasStateChanged = _updatePendingParticipant(MEDIA_TYPE.AUDIO, participant, state);
}
if (desktopModerationEnabled) {
hasStateChanged = hasStateChanged || _updatePendingParticipant(MEDIA_TYPE.DESKTOP, participant, state);
}
if (videoModerationEnabled) {
hasStateChanged = hasStateChanged || _updatePendingParticipant(MEDIA_TYPE.VIDEO, participant, state);
}
@@ -168,9 +249,10 @@ ReducerRegistry.register<IAVModerationState>('features/av-moderation',
return state;
}
case PARTICIPANT_LEFT: {
const participant = action.participant;
const { audioModerationEnabled, videoModerationEnabled } = state;
const { audioModerationEnabled, desktopModerationEnabled, videoModerationEnabled } = state;
let hasStateChanged = false;
// skips changing the reference of pendingAudio or pendingVideo,
@@ -184,6 +266,15 @@ ReducerRegistry.register<IAVModerationState>('features/av-moderation',
}
}
if (desktopModerationEnabled) {
const newPendingDesktop = state.pendingDesktop.filter(pending => pending.id !== participant.id);
if (state.pendingDesktop.length !== newPendingDesktop.length) {
state.pendingDesktop = newPendingDesktop;
hasStateChanged = true;
}
}
if (videoModerationEnabled) {
const newPendingVideo = state.pendingVideo.filter(pending => pending.id !== participant.id);
@@ -213,6 +304,13 @@ ReducerRegistry.register<IAVModerationState>('features/av-moderation',
};
}
if (mediaType === MEDIA_TYPE.DESKTOP) {
return {
...state,
pendingDesktop: state.pendingDesktop.filter(pending => pending.id !== id)
};
}
if (mediaType === MEDIA_TYPE.VIDEO) {
return {
...state,
@@ -236,6 +334,16 @@ ReducerRegistry.register<IAVModerationState>('features/av-moderation',
};
}
if (mediaType === MEDIA_TYPE.DESKTOP) {
return {
...state,
desktopWhitelist: {
...state.desktopWhitelist,
[id]: true
}
};
}
if (mediaType === MEDIA_TYPE.VIDEO) {
return {
...state,
@@ -262,6 +370,16 @@ ReducerRegistry.register<IAVModerationState>('features/av-moderation',
};
}
if (mediaType === MEDIA_TYPE.DESKTOP) {
return {
...state,
desktopWhitelist: {
...state.desktopWhitelist,
[id]: false
}
};
}
if (mediaType === MEDIA_TYPE.VIDEO) {
return {
...state,

View File

@@ -1,5 +1,6 @@
import { IStore } from '../../app/types';
import { showModeratedNotification } from '../../av-moderation/actions';
import { MEDIA_TYPE as AVM_MEDIA_TYPE } from '../../av-moderation/constants';
import { shouldShowModeratedNotification } from '../../av-moderation/functions';
import { isModerationNotificationDisplayed } from '../../notifications/functions';
@@ -18,7 +19,6 @@ import {
TOGGLE_CAMERA_FACING_MODE
} from './actionTypes';
import {
MEDIA_TYPE,
MediaType,
SCREENSHARE_MUTISM_AUTHORITY,
VIDEO_MUTISM_AUTHORITY
@@ -56,10 +56,23 @@ export function setAudioAvailable(available: boolean) {
* }}
*/
export function setAudioMuted(muted: boolean, ensureTrack = false) {
return {
type: SET_AUDIO_MUTED,
ensureTrack,
muted
return (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
const state = getState();
// check for A/V Moderation when trying to unmute
if (!muted && shouldShowModeratedNotification(AVM_MEDIA_TYPE.AUDIO, state)) {
if (!isModerationNotificationDisplayed(AVM_MEDIA_TYPE.AUDIO, state)) {
ensureTrack && dispatch(showModeratedNotification(AVM_MEDIA_TYPE.AUDIO));
}
return;
}
dispatch({
type: SET_AUDIO_MUTED,
ensureTrack,
muted
});
};
}
@@ -126,9 +139,9 @@ export function setScreenshareMuted(
const state = getState();
// check for A/V Moderation when trying to unmute
if (!muted && shouldShowModeratedNotification(MEDIA_TYPE.SCREENSHARE, state)) {
if (!isModerationNotificationDisplayed(MEDIA_TYPE.SCREENSHARE, state)) {
ensureTrack && dispatch(showModeratedNotification(MEDIA_TYPE.SCREENSHARE));
if (!muted && shouldShowModeratedNotification(AVM_MEDIA_TYPE.DESKTOP, state)) {
if (!isModerationNotificationDisplayed(AVM_MEDIA_TYPE.DESKTOP, state)) {
ensureTrack && dispatch(showModeratedNotification(AVM_MEDIA_TYPE.DESKTOP));
}
return;
@@ -184,9 +197,9 @@ export function setVideoMuted(
const state = getState();
// check for A/V Moderation when trying to unmute
if (!muted && shouldShowModeratedNotification(MEDIA_TYPE.VIDEO, state)) {
if (!isModerationNotificationDisplayed(MEDIA_TYPE.VIDEO, state)) {
ensureTrack && dispatch(showModeratedNotification(MEDIA_TYPE.VIDEO));
if (!muted && shouldShowModeratedNotification(AVM_MEDIA_TYPE.VIDEO, state)) {
if (!isModerationNotificationDisplayed(AVM_MEDIA_TYPE.VIDEO, state)) {
ensureTrack && dispatch(showModeratedNotification(AVM_MEDIA_TYPE.VIDEO));
}
return;

View File

@@ -88,6 +88,16 @@ export function getStartWithVideoMuted(stateful: IStateful) {
return Boolean(getPropertyValue(stateful, 'startWithVideoMuted', START_WITH_AUDIO_VIDEO_MUTED_SOURCES));
}
/**
* Determines whether screen-share is currently muted.
*
* @param {Function|Object} stateful - The redux store, state, or {@code getState} function.
* @returns {boolean}
*/
export function isScreenshareMuted(stateful: IStateful) {
return Boolean(toState(stateful)['features/base/media'].screenshare.muted);
}
/**
* Determines whether video is currently muted.
*

View File

@@ -8,15 +8,17 @@ import {
} from '../../analytics/AnalyticsEvents';
import { sendAnalytics } from '../../analytics/functions';
import { IStore } from '../../app/types';
import { MEDIA_TYPE as AVM_MEDIA_TYPE } from '../../av-moderation/constants';
import { isForceMuted } from '../../av-moderation/functions';
import { APP_STATE_CHANGED } from '../../mobile/background/actionTypes';
import { showWarningNotification } from '../../notifications/actions';
import { NOTIFICATION_TIMEOUT_TYPE } from '../../notifications/constants';
import { isForceMuted } from '../../participants-pane/functions';
import { isScreenMediaShared } from '../../screen-share/functions';
import { SET_AUDIO_ONLY } from '../audio-only/actionTypes';
import { setAudioOnly } from '../audio-only/actions';
import { SET_ROOM } from '../conference/actionTypes';
import { isRoomValid } from '../conference/functions';
import { PARTICIPANT_MUTED_US } from '../participants/actionTypes';
import { getLocalParticipant } from '../participants/functions';
import MiddlewareRegistry from '../redux/MiddlewareRegistry';
import { getPropertyValue } from '../settings/functions.any';
@@ -46,7 +48,8 @@ import {
import {
MEDIA_TYPE,
SCREENSHARE_MUTISM_AUTHORITY,
VIDEO_MUTISM_AUTHORITY
VIDEO_MUTISM_AUTHORITY,
VIDEO_TYPE
} from './constants';
import { getStartWithAudioMuted, getStartWithVideoMuted } from './functions';
import logger from './logger';
@@ -66,6 +69,24 @@ MiddlewareRegistry.register(store => next => action => {
case APP_STATE_CHANGED:
return _appStateChanged(store, next, action);
case PARTICIPANT_MUTED_US: {
const { dispatch } = store;
const { track } = action;
// Sync the media muted state with the track muted state.
if (track.isAudioTrack()) {
dispatch(setAudioMuted(true, /* ensureTrack */ false));
} else if (track.isVideoTrack()) {
if (track.getVideoType() === VIDEO_TYPE.DESKTOP) {
dispatch(setScreenshareMuted(true, SCREENSHARE_MUTISM_AUTHORITY.USER, /* ensureTrack */ false));
} else {
dispatch(setVideoMuted(true, VIDEO_MUTISM_AUTHORITY.USER, /* ensureTrack */ false));
}
}
break;
}
case SET_AUDIO_ONLY:
return _setAudioOnly(store, next, action);
@@ -88,7 +109,7 @@ MiddlewareRegistry.register(store => next => action => {
const state = store.getState();
const participant = getLocalParticipant(state);
if (!action.muted && isForceMuted(participant, MEDIA_TYPE.AUDIO, state)) {
if (!action.muted && isForceMuted(participant, AVM_MEDIA_TYPE.AUDIO, state)) {
return;
}
break;
@@ -113,7 +134,7 @@ MiddlewareRegistry.register(store => next => action => {
const state = store.getState();
const participant = getLocalParticipant(state);
if (!action.muted && isForceMuted(participant, MEDIA_TYPE.SCREENSHARE, state)) {
if (!action.muted && isForceMuted(participant, AVM_MEDIA_TYPE.DESKTOP, state)) {
return;
}
break;
@@ -122,7 +143,7 @@ MiddlewareRegistry.register(store => next => action => {
const state = store.getState();
const participant = getLocalParticipant(state);
if (!action.muted && isForceMuted(participant, MEDIA_TYPE.VIDEO, state)) {
if (!action.muted && isForceMuted(participant, AVM_MEDIA_TYPE.VIDEO, state)) {
return;
}
break;

View File

@@ -112,6 +112,17 @@ export const PARTICIPANT_KICKED = 'PARTICIPANT_KICKED';
*/
export const PARTICIPANT_LEFT = 'PARTICIPANT_LEFT';
/**
* Action to handle case when the remote participant mutes the local participant.
*
* {
* type: PARTICIPANT_MUTED_US,
* participant: Participant,
* track: JitsiLocalTrack
* }
*/
export const PARTICIPANT_MUTED_US = 'PARTICIPANT_MUTED_US';
/**
* Action to handle case when the sources attached to a participant are updated.
*

View File

@@ -17,6 +17,7 @@ import {
PARTICIPANT_JOINED,
PARTICIPANT_KICKED,
PARTICIPANT_LEFT,
PARTICIPANT_MUTED_US,
PARTICIPANT_ROLE_CHANGED,
PARTICIPANT_SOURCES_UPDATED,
PARTICIPANT_UPDATED,
@@ -467,19 +468,10 @@ export function participantUpdated(participant: IParticipant = { id: '' }) {
* @returns {Promise}
*/
export function participantMutedUs(participant: any, track: any) {
return (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
if (!participant) {
return;
}
const isAudio = track.isAudioTrack();
dispatch(showNotification({
titleKey: isAudio ? 'notify.mutedRemotelyTitle' : 'notify.videoMutedRemotelyTitle',
titleArguments: {
participantDisplayName: getParticipantDisplayName(getState, participant.getId())
}
}, NOTIFICATION_TIMEOUT_TYPE.MEDIUM));
return {
type: PARTICIPANT_MUTED_US,
participant,
track
};
}

View File

@@ -392,7 +392,7 @@ export function getMutedStateByParticipantAndMediaType(
if (mediaType === MEDIA_TYPE.AUDIO) {
return Array.from(sources.values())[0].muted;
}
const videoType = mediaType === MEDIA_TYPE.VIDEO ? VIDEO_TYPE.CAMERA : VIDEO_TYPE.SCREENSHARE;
const videoType = mediaType === MEDIA_TYPE.VIDEO ? VIDEO_TYPE.CAMERA : VIDEO_TYPE.DESKTOP;
const source = Array.from(sources.values()).find(src => src.videoType === videoType);
return source?.muted ?? true;

View File

@@ -3,7 +3,19 @@ import { batch } from 'react-redux';
import { AnyAction } from 'redux';
import { IStore } from '../../app/types';
import { approveParticipant, approveParticipantAudio, approveParticipantVideo } from '../../av-moderation/actions';
import {
approveParticipant,
approveParticipantAudio,
approveParticipantDesktop,
approveParticipantVideo
} from '../../av-moderation/actions';
import {
AUDIO_RAISED_HAND_NOTIFICATION_ID,
DESKTOP_RAISED_HAND_NOTIFICATION_ID,
MEDIA_TYPE,
VIDEO_RAISED_HAND_NOTIFICATION_ID
} from '../../av-moderation/constants';
import { isForceMuted } from '../../av-moderation/functions';
import { UPDATE_BREAKOUT_ROOMS } from '../../breakout-rooms/actionTypes';
import { getBreakoutRooms } from '../../breakout-rooms/functions';
import { toggleE2EE } from '../../e2ee/actions';
@@ -15,7 +27,6 @@ import {
RAISE_HAND_NOTIFICATION_ID
} from '../../notifications/constants';
import { open as openParticipantsPane } from '../../participants-pane/actions';
import { isForceMuted } from '../../participants-pane/functions';
import { CALLING, INVITED } from '../../presence-status/constants';
import { RAISE_HAND_SOUND_ID } from '../../reactions/constants';
import { RECORDING_OFF_SOUND_ID, RECORDING_ON_SOUND_ID } from '../../recording/constants';
@@ -26,7 +37,7 @@ import { IJitsiConference } from '../conference/reducer';
import { SET_CONFIG } from '../config/actionTypes';
import { getDisableRemoveRaisedHandOnFocus } from '../config/functions.any';
import { JitsiConferenceEvents } from '../lib-jitsi-meet';
import { MEDIA_TYPE } from '../media/constants';
import { VIDEO_TYPE } from '../media/constants';
import MiddlewareRegistry from '../redux/MiddlewareRegistry';
import StateListenerRegistry from '../redux/StateListenerRegistry';
import { playSound, registerSound, unregisterSound } from '../sounds/actions';
@@ -43,6 +54,7 @@ import {
OVERWRITE_PARTICIPANT_NAME,
PARTICIPANT_JOINED,
PARTICIPANT_LEFT,
PARTICIPANT_MUTED_US,
PARTICIPANT_UPDATED,
RAISE_HAND_UPDATED,
SET_LOCAL_PARTICIPANT_RECORDING_STATUS
@@ -292,6 +304,31 @@ MiddlewareRegistry.register(store => next => action => {
break;
}
case PARTICIPANT_MUTED_US: {
const { dispatch, getState } = store;
const { participant, track } = action;
let titleKey;
if (track.isAudioTrack()) {
titleKey = 'notify.mutedRemotelyTitle';
} else if (track.isVideoTrack()) {
if (track.getVideoType() === VIDEO_TYPE.DESKTOP) {
titleKey = 'notify.desktopMutedRemotelyTitle';
} else {
titleKey = 'notify.videoMutedRemotelyTitle';
}
}
dispatch(showNotification({
titleKey,
titleArguments: {
participantDisplayName: getParticipantDisplayName(getState, participant.getId())
}
}, NOTIFICATION_TIMEOUT_TYPE.MEDIUM));
break;
}
case PARTICIPANT_UPDATED:
return _participantJoinedOrUpdated(store, next, action);
@@ -786,77 +823,124 @@ function _raiseHandUpdated({ dispatch, getState }: IStore, conference: IJitsiCon
APP.API.notifyRaiseHandUpdated(participantId, raisedHandTimestamp);
}
if (!raisedHandTimestamp) {
return;
}
// Display notifications about raised hands.
const isModerator = isLocalParticipantModerator(state);
const participant = getParticipantById(state, participantId);
const participantName = getParticipantDisplayName(state, participantId);
let shouldDisplayAllowAudio = false;
let shouldDisplayAllowVideo = false;
let shouldDisplayAllowDesktop = false;
if (isModerator) {
shouldDisplayAllowAudio = isForceMuted(participant, MEDIA_TYPE.AUDIO, state);
shouldDisplayAllowVideo = isForceMuted(participant, MEDIA_TYPE.VIDEO, state);
shouldDisplayAllowDesktop = isForceMuted(participant, MEDIA_TYPE.DESKTOP, state);
}
let action;
if (shouldDisplayAllowAudio || shouldDisplayAllowVideo) {
action = {
customActionNameKey: [] as string[],
customActionHandler: [] as Function[]
if (shouldDisplayAllowAudio || shouldDisplayAllowVideo || shouldDisplayAllowDesktop) {
const action: {
customActionHandler: Array<() => void>;
customActionNameKey: string[];
} = {
customActionHandler: [],
customActionNameKey: [],
};
// Always add a "allow all" at the end of the list.
action.customActionNameKey.push('notify.allowAll');
action.customActionHandler.push(() => {
dispatch(approveParticipant(participantId));
dispatch(hideNotification(AUDIO_RAISED_HAND_NOTIFICATION_ID));
dispatch(hideNotification(DESKTOP_RAISED_HAND_NOTIFICATION_ID));
dispatch(hideNotification(VIDEO_RAISED_HAND_NOTIFICATION_ID));
});
if (shouldDisplayAllowAudio) {
action.customActionNameKey.push('notify.allowAudio');
action.customActionHandler.push(() => {
const customActionNameKey = action.customActionNameKey.slice();
const customActionHandler = action.customActionHandler.slice();
customActionNameKey.unshift('notify.allowAudio');
customActionHandler.unshift(() => {
dispatch(approveParticipantAudio(participantId));
dispatch(hideNotification(RAISE_HAND_NOTIFICATION_ID));
dispatch(hideNotification(AUDIO_RAISED_HAND_NOTIFICATION_ID));
});
dispatch(showNotification({
title: participantName,
descriptionKey: 'notify.raisedHand',
uid: AUDIO_RAISED_HAND_NOTIFICATION_ID,
customActionNameKey,
customActionHandler,
}, NOTIFICATION_TIMEOUT_TYPE.EXTRA_LONG));
}
if (shouldDisplayAllowVideo) {
action.customActionNameKey.push('notify.allowVideo');
action.customActionHandler.push(() => {
const customActionNameKey = action.customActionNameKey.slice();
const customActionHandler = action.customActionHandler.slice();
customActionNameKey.unshift('notify.allowVideo');
customActionHandler.unshift(() => {
dispatch(approveParticipantVideo(participantId));
dispatch(hideNotification(RAISE_HAND_NOTIFICATION_ID));
dispatch(hideNotification(VIDEO_RAISED_HAND_NOTIFICATION_ID));
});
dispatch(showNotification({
title: participantName,
descriptionKey: 'notify.raisedHand',
uid: VIDEO_RAISED_HAND_NOTIFICATION_ID,
customActionNameKey,
customActionHandler,
}, NOTIFICATION_TIMEOUT_TYPE.EXTRA_LONG));
}
if (shouldDisplayAllowAudio && shouldDisplayAllowVideo) {
action.customActionNameKey.push('notify.allowBoth');
action.customActionHandler.push(() => {
dispatch(approveParticipant(participantId));
dispatch(hideNotification(RAISE_HAND_NOTIFICATION_ID));
if (shouldDisplayAllowDesktop) {
const customActionNameKey = action.customActionNameKey.slice();
const customActionHandler = action.customActionHandler.slice();
customActionNameKey.unshift('notify.allowDesktop');
customActionHandler.unshift(() => {
dispatch(approveParticipantDesktop(participantId));
dispatch(hideNotification(DESKTOP_RAISED_HAND_NOTIFICATION_ID));
});
dispatch(showNotification({
title: participantName,
descriptionKey: 'notify.raisedHand',
uid: DESKTOP_RAISED_HAND_NOTIFICATION_ID,
customActionNameKey,
customActionHandler
}, NOTIFICATION_TIMEOUT_TYPE.EXTRA_LONG));
}
} else {
action = {
customActionNameKey: [ 'notify.viewParticipants' ],
customActionHandler: [ () => dispatch(openParticipantsPane()) ]
};
}
if (raisedHandTimestamp) {
let notificationTitle;
const participantName = getParticipantDisplayName(state, participantId);
const { raisedHandsQueue } = state['features/base/participants'];
if (raisedHandsQueue.length > 1) {
const raisedHands = raisedHandsQueue.length - 1;
notificationTitle = i18n.t('notify.raisedHands', {
participantName,
raisedHands
raisedHands: raisedHandsQueue.length - 1
});
} else {
notificationTitle = participantName;
}
dispatch(showNotification({
titleKey: 'notify.somebody',
title: notificationTitle,
descriptionKey: 'notify.raisedHand',
concatText: true,
uid: RAISE_HAND_NOTIFICATION_ID,
...action
}, NOTIFICATION_TIMEOUT_TYPE.MEDIUM));
dispatch(playSound(RAISE_HAND_SOUND_ID));
customActionNameKey: [ 'notify.viewParticipants' ],
customActionHandler: [ () => dispatch(openParticipantsPane()) ]
}, NOTIFICATION_TIMEOUT_TYPE.LONG));
}
dispatch(playSound(RAISE_HAND_SOUND_ID));
}
/**

View File

@@ -2,6 +2,7 @@
import { AUDIO_ONLY_SCREEN_SHARE_NO_TRACK } from '../../../../modules/UI/UIErrors';
import { IReduxState, IStore } from '../../app/types';
import { showModeratedNotification } from '../../av-moderation/actions';
import { MEDIA_TYPE as AVM_MEDIA_TYPE } from '../../av-moderation/constants';
import { shouldShowModeratedNotification } from '../../av-moderation/functions';
import { setNoiseSuppressionEnabled } from '../../noise-suppression/actions';
import { showErrorNotification, showNotification } from '../../notifications/actions';
@@ -55,10 +56,10 @@ export function toggleScreensharing(
shareOptions: IShareOptions = {}) {
return (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
// check for A/V Moderation when trying to start screen sharing
if ((enabled || enabled === undefined) && shouldShowModeratedNotification(MEDIA_TYPE.VIDEO, getState())) {
dispatch(showModeratedNotification(MEDIA_TYPE.SCREENSHARE));
if ((enabled || enabled === undefined) && shouldShowModeratedNotification(AVM_MEDIA_TYPE.DESKTOP, getState())) {
dispatch(showModeratedNotification(AVM_MEDIA_TYPE.DESKTOP));
return Promise.reject();
return Promise.resolve();
}
return _toggleScreenSharing({

View File

@@ -1,10 +1,12 @@
import { IReduxState, IStore } from '../../app/types';
import { getSsrcRewritingFeatureFlag } from '../config/functions.any';
import { JitsiTrackErrors, browser } from '../lib-jitsi-meet';
import { gumPending } from '../media/actions';
import { CAMERA_FACING_MODE, MEDIA_TYPE, MediaType, VIDEO_TYPE } from '../media/constants';
import { IMediaState } from '../media/reducer';
import { IGUMPendingState } from '../media/types';
import {
getMutedStateByParticipantAndMediaType,
getVirtualScreenshareParticipantOwnerId,
isScreenShareParticipant
} from '../participants/functions';
@@ -35,6 +37,10 @@ export function isParticipantMediaMuted(participant: IParticipant | undefined,
return false;
}
if (getSsrcRewritingFeatureFlag(state)) {
return getMutedStateByParticipantAndMediaType(state, participant, mediaType);
}
const tracks = getTrackState(state);
if (participant?.local) {
@@ -53,10 +59,21 @@ export function isParticipantMediaMuted(participant: IParticipant | undefined,
* @param {IReduxState} state - Global state.
* @returns {boolean} - Is audio muted for the participant.
*/
export function isParticipantAudioMuted(participant: IParticipant, state: IReduxState) {
export function isParticipantAudioMuted(participant: IParticipant | undefined, state: IReduxState) {
return isParticipantMediaMuted(participant, MEDIA_TYPE.AUDIO, state);
}
/**
* Checks if the participant is screen-share muted.
*
* @param {IParticipant} participant - Participant reference.
* @param {IReduxState} state - Global state.
* @returns {boolean} - Is screen-share muted for the participant.
*/
export function isParticipantScreenShareMuted(participant: IParticipant | undefined, state: IReduxState) {
return isParticipantMediaMuted(participant, MEDIA_TYPE.SCREENSHARE, state);
}
/**
* Checks if the participant is video muted.
*
@@ -118,6 +135,10 @@ export function getLocalJitsiDesktopTrack(state: IReduxState) {
* @returns {(Track|undefined)}
*/
export function getLocalTrack(tracks: ITrack[], mediaType: MediaType, includePending = false) {
if (mediaType === MEDIA_TYPE.SCREENSHARE) {
return getLocalDesktopTrack(tracks, includePending);
}
return (
getLocalTracks(tracks, includePending)
.find(t => t.mediaType === mediaType));
@@ -216,6 +237,14 @@ export function getTrackByMediaTypeAndParticipant(
tracks: ITrack[],
mediaType: MediaType,
participantId?: string) {
if (!participantId) {
return;
}
if (mediaType === MEDIA_TYPE.SCREENSHARE) {
return getScreenShareTrack(tracks, participantId);
}
return tracks.find(
t => Boolean(t.jitsiTrack) && t.participantId === participantId && t.mediaType === mediaType
);

View File

@@ -188,10 +188,6 @@ function _setMuted(store: IStore, { ensureTrack, muted }: {
const localTrack = _getLocalTrack(store, mediaType, /* includePending */ true);
const state = getState();
if (mediaType === MEDIA_TYPE.SCREENSHARE && !muted) {
return;
}
if (localTrack) {
// The `jitsiTrack` property will have a value only for a localTrack for which `getUserMedia` has already
// completed. If there's no `jitsiTrack`, then the `muted` state will be applied once the `jitsiTrack` is
@@ -203,8 +199,11 @@ function _setMuted(store: IStore, { ensureTrack, muted }: {
.catch(() => dispatch(trackMuteUnmuteFailed(localTrack, muted)));
}
} else if (!muted && ensureTrack) {
// TODO(saghul): reconcile these 2 types.
const createMediaType = mediaType === MEDIA_TYPE.SCREENSHARE ? 'desktop' : mediaType;
typeof APP !== 'undefined' && dispatch(gumPending([ mediaType ], IGUMPendingState.PENDING_UNMUTE));
dispatch(createLocalTracksA({ devices: [ mediaType ] })).then(() => {
dispatch(createLocalTracksA({ devices: [ createMediaType ] })).then(() => {
typeof APP !== 'undefined' && dispatch(gumPending([ mediaType ], IGUMPendingState.NONE));
});
}

View File

@@ -1,6 +1,5 @@
import { MODERATION_NOTIFICATIONS } from '../av-moderation/constants';
import { MODERATION_NOTIFICATIONS, MediaType } from '../av-moderation/constants';
import { IStateful } from '../base/app/types';
import { MediaType } from '../base/media/constants';
import { toState } from '../base/redux/functions';
/**

View File

@@ -11,6 +11,7 @@ import {
requestEnableAudioModeration,
requestEnableVideoModeration
} from '../../../av-moderation/actions';
import { MEDIA_TYPE } from '../../../av-moderation/constants';
import {
isEnabled as isAvModerationEnabled,
isSupported as isAvModerationSupported
@@ -20,7 +21,6 @@ import { hideSheet, openDialog } from '../../../base/dialog/actions';
import BottomSheet from '../../../base/dialog/components/native/BottomSheet';
import Icon from '../../../base/icons/components/Icon';
import { IconCheck, IconRaiseHand, IconVideoOff } from '../../../base/icons/svg';
import { MEDIA_TYPE } from '../../../base/media/constants';
import { raiseHand } from '../../../base/participants/actions';
import { getRaiseHandsQueue, isLocalParticipantModerator } from '../../../base/participants/functions';
import { LOWER_HAND_MESSAGE } from '../../../base/tracks/constants';

View File

@@ -2,12 +2,9 @@ import React, { PureComponent } from 'react';
import { connect } from 'react-redux';
import { IReduxState, IStore } from '../../../app/types';
import { getSsrcRewritingFeatureFlag } from '../../../base/config/functions.any';
import { translate } from '../../../base/i18n/functions';
import { MEDIA_TYPE } from '../../../base/media/constants';
import {
getLocalParticipant,
getMutedStateByParticipantAndMediaType,
getParticipantById,
getParticipantDisplayName,
hasRaisedHand,
@@ -169,12 +166,8 @@ function mapStateToProps(state: IReduxState, ownProps: any) {
const { participant } = ownProps;
const { ownerId } = state['features/shared-video'];
const localParticipantId = getLocalParticipant(state)?.id;
const _isAudioMuted = getSsrcRewritingFeatureFlag(state)
? Boolean(participant && getMutedStateByParticipantAndMediaType(state, participant, MEDIA_TYPE.AUDIO))
: Boolean(participant && isParticipantAudioMuted(participant, state));
const _isVideoMuted = getSsrcRewritingFeatureFlag(state)
? Boolean(participant && getMutedStateByParticipantAndMediaType(state, participant, MEDIA_TYPE.VIDEO))
: isParticipantVideoMuted(participant, state);
const _isAudioMuted = isParticipantAudioMuted(participant, state);
const _isVideoMuted = isParticipantVideoMuted(participant, state);
const audioMediaState = getParticipantAudioMediaState(participant, _isAudioMuted, state);
const videoMediaState = getParticipantVideoMediaState(participant, _isVideoMuted, state);
const { disableModeratorIndicator } = state['features/base/config'];

View File

@@ -6,10 +6,13 @@ import { makeStyles } from 'tss-react/mui';
import { IReduxState } from '../../../app/types';
import {
requestDisableAudioModeration,
requestDisableDesktopModeration,
requestDisableVideoModeration,
requestEnableAudioModeration,
requestEnableDesktopModeration,
requestEnableVideoModeration
} from '../../../av-moderation/actions';
import { MEDIA_TYPE } from '../../../av-moderation/constants';
import {
isEnabled as isAvModerationEnabled,
isSupported as isAvModerationSupported
@@ -18,9 +21,9 @@ import { openDialog } from '../../../base/dialog/actions';
import {
IconCheck,
IconDotsHorizontal,
IconScreenshare,
IconVideoOff
} from '../../../base/icons/svg';
import { MEDIA_TYPE } from '../../../base/media/constants';
import { getRaiseHandsQueue } from '../../../base/participants/functions';
import { withPixelLineHeight } from '../../../base/styles/functions.web';
import ContextMenu from '../../../base/ui/components/web/ContextMenu';
@@ -30,6 +33,7 @@ import { openSettingsDialog } from '../../../settings/actions.web';
import { SETTINGS_TABS } from '../../../settings/constants';
import { shouldShowModeratorSettings } from '../../../settings/functions.web';
import LowerHandButton from '../../../video-menu/components/web/LowerHandButton';
import MuteEveryonesDesktopDialog from '../../../video-menu/components/web/MuteEveryonesDesktopDialog';
import MuteEveryonesVideoDialog from '../../../video-menu/components/web/MuteEveryonesVideoDialog';
const useStyles = makeStyles()(theme => {
@@ -86,17 +90,16 @@ export const FooterContextMenu = ({ isOpen, onDrawerClose, onMouseLeave }: IProp
const raisedHandsQueue = useSelector(getRaiseHandsQueue);
const isModeratorSettingsTabEnabled = useSelector(shouldShowModeratorSettings);
const isAudioModerationEnabled = useSelector(isAvModerationEnabled(MEDIA_TYPE.AUDIO));
const isDesktopModerationEnabled = useSelector(isAvModerationEnabled(MEDIA_TYPE.DESKTOP));
const isVideoModerationEnabled = useSelector(isAvModerationEnabled(MEDIA_TYPE.VIDEO));
const isBreakoutRoom = useSelector(isInBreakoutRoom);
const { t } = useTranslation();
const disableAudioModeration = useCallback(() => dispatch(requestDisableAudioModeration()), [ dispatch ]);
const disableDesktopModeration = useCallback(() => dispatch(requestDisableDesktopModeration()), [ dispatch ]);
const disableVideoModeration = useCallback(() => dispatch(requestDisableVideoModeration()), [ dispatch ]);
const enableAudioModeration = useCallback(() => dispatch(requestEnableAudioModeration()), [ dispatch ]);
const enableDesktopModeration = useCallback(() => dispatch(requestEnableDesktopModeration()), [ dispatch ]);
const enableVideoModeration = useCallback(() => dispatch(requestEnableVideoModeration()), [ dispatch ]);
const { classes } = useStyles();
@@ -104,6 +107,9 @@ export const FooterContextMenu = ({ isOpen, onDrawerClose, onMouseLeave }: IProp
const muteAllVideo = useCallback(
() => dispatch(openDialog(MuteEveryonesVideoDialog)), [ dispatch ]);
const muteAllDesktop = useCallback(
() => dispatch(openDialog(MuteEveryonesDesktopDialog)), [ dispatch ]);
const openModeratorSettings = () => dispatch(openSettingsDialog(SETTINGS_TABS.MODERATOR));
const actions = [
@@ -125,6 +131,15 @@ export const FooterContextMenu = ({ isOpen, onDrawerClose, onMouseLeave }: IProp
icon: !isVideoModerationEnabled && IconCheck,
onClick: isVideoModerationEnabled ? disableVideoModeration : enableVideoModeration,
text: t('participantsPane.actions.videoModeration')
}, {
accessibilityLabel: t('participantsPane.actions.desktopModeration'),
className: isDesktopModerationEnabled ? classes.indentedLabel : '',
id: isDesktopModerationEnabled
? 'participants-pane-context-menu-stop-desktop-moderation'
: 'participants-pane-context-menu-start-desktop-moderation',
icon: !isDesktopModerationEnabled && IconCheck,
onClick: isDesktopModerationEnabled ? disableDesktopModeration : enableDesktopModeration,
text: t('participantsPane.actions.desktopModeration')
}
];
@@ -137,13 +152,22 @@ export const FooterContextMenu = ({ isOpen, onDrawerClose, onMouseLeave }: IProp
onDrawerClose = { onDrawerClose }
onMouseLeave = { onMouseLeave }>
<ContextMenuItemGroup
actions = { [ {
accessibilityLabel: t('participantsPane.actions.stopEveryonesVideo'),
id: 'participants-pane-context-menu-stop-video',
icon: IconVideoOff,
onClick: muteAllVideo,
text: t('participantsPane.actions.stopEveryonesVideo')
} ] } />
actions = { [
{
accessibilityLabel: t('participantsPane.actions.stopEveryonesVideo'),
id: 'participants-pane-context-menu-stop-video',
icon: IconVideoOff,
onClick: muteAllVideo,
text: t('participantsPane.actions.stopEveryonesVideo')
},
{
accessibilityLabel: t('participantsPane.actions.stopEveryonesDesktop'),
id: 'participants-pane-context-menu-stop-desktop',
icon: IconScreenshare,
onClick: muteAllDesktop,
text: t('participantsPane.actions.stopEveryonesDesktop')
}
] } />
{raisedHandsQueue.length !== 0 && <LowerHandButton />}
{!isBreakoutRoom && isModerationSupported && (
<ContextMenuItemGroup actions = { actions }>

View File

@@ -2,12 +2,10 @@ import React, { useCallback, useEffect, useState } from 'react';
import { connect } from 'react-redux';
import { IReduxState } from '../../../app/types';
import { getSsrcRewritingFeatureFlag } from '../../../base/config/functions.any';
import { JitsiTrackEvents } from '../../../base/lib-jitsi-meet';
import { MEDIA_TYPE } from '../../../base/media/constants';
import {
getLocalParticipant,
getMutedStateByParticipantAndMediaType,
getParticipantByIdOrUndefined,
getParticipantDisplayName,
hasRaisedHand,
@@ -123,11 +121,6 @@ interface IProps {
*/
isInBreakoutRoom: boolean;
/**
* Callback used to open a confirmation dialog for audio muting.
*/
muteAudio: Function;
/**
* The translated text for the mute participant button.
*/
@@ -153,7 +146,6 @@ interface IProps {
*/
overflowDrawer: boolean;
/**
* The aria-label for the ellipsis action.
*/
@@ -164,11 +156,6 @@ interface IProps {
*/
participantID?: string;
/**
* Callback used to stop a participant's video.
*/
stopVideo: Function;
/**
* The translated "you" text.
*/
@@ -196,13 +183,11 @@ function MeetingParticipantItem({
_videoMediaState,
isHighlighted,
isInBreakoutRoom,
muteAudio,
onContextMenu,
onLeave,
openDrawerForParticipant,
overflowDrawer,
participantActionEllipsisLabel,
stopVideo,
youText
}: IProps) {
@@ -268,10 +253,8 @@ function MeetingParticipantItem({
{!isInBreakoutRoom && (
<ParticipantQuickAction
buttonType = { _quickActionButtonType }
muteAudio = { muteAudio }
participantID = { _participantID }
participantName = { _displayName }
stopVideo = { stopVideo } />
participantName = { _displayName } />
)}
<ParticipantActionEllipsis
accessibilityLabel = { participantActionEllipsisLabel }
@@ -304,15 +287,11 @@ function _mapStateToProps(state: IReduxState, ownProps: any) {
const participant = getParticipantByIdOrUndefined(state, participantID);
const _displayName = getParticipantDisplayName(state, participant?.id ?? '');
const _matchesSearch = participantMatchesSearch(participant, searchString);
const _isAudioMuted = getSsrcRewritingFeatureFlag(state)
? Boolean(participant && getMutedStateByParticipantAndMediaType(state, participant, MEDIA_TYPE.AUDIO))
: Boolean(participant && isParticipantAudioMuted(participant, state));
const _isVideoMuted = getSsrcRewritingFeatureFlag(state)
? Boolean(participant && getMutedStateByParticipantAndMediaType(state, participant, MEDIA_TYPE.VIDEO))
: isParticipantVideoMuted(participant, state);
const _isAudioMuted = isParticipantAudioMuted(participant, state);
const _isVideoMuted = isParticipantVideoMuted(participant, state);
const _audioMediaState = getParticipantAudioMediaState(participant, _isAudioMuted, state);
const _videoMediaState = getParticipantVideoMediaState(participant, _isVideoMuted, state);
const _quickActionButtonType = getQuickActionButtonType(participant, _isAudioMuted, _isVideoMuted, state);
const _quickActionButtonType = getQuickActionButtonType(participant, state);
const tracks = state['features/base/tracks'];
const _audioTrack = participantID === localParticipantId

View File

@@ -19,11 +19,6 @@ interface IProps {
*/
lowerMenu: Function;
/**
* Callback used to open a confirmation dialog for audio muting.
*/
muteAudio: Function;
/**
* The translated text for the mute participant button.
*/
@@ -59,11 +54,6 @@ interface IProps {
*/
searchString?: string;
/**
* Callback used to stop a participant's video.
*/
stopVideo: Function;
/**
* Callback for the activation of this item's context menu.
*/
@@ -84,14 +74,12 @@ function MeetingParticipantItems({
isInBreakoutRoom,
lowerMenu,
toggleMenu,
muteAudio,
participantIds,
openDrawerForParticipant,
overflowDrawer,
raiseContextId,
participantActionEllipsisLabel,
searchString,
stopVideo,
youText
}: IProps) {
const renderParticipant = (id: string) => (
@@ -99,7 +87,6 @@ function MeetingParticipantItems({
isHighlighted = { raiseContextId === id }
isInBreakoutRoom = { isInBreakoutRoom }
key = { id }
muteAudio = { muteAudio }
onContextMenu = { toggleMenu(id) }
onLeave = { lowerMenu }
openDrawerForParticipant = { openDrawerForParticipant }
@@ -107,7 +94,6 @@ function MeetingParticipantItems({
participantActionEllipsisLabel = { participantActionEllipsisLabel }
participantID = { id }
searchString = { searchString }
stopVideo = { stopVideo }
youText = { youText } />
);

View File

@@ -1,12 +1,10 @@
import React, { useCallback } from 'react';
import React from 'react';
import { useTranslation } from 'react-i18next';
import { connect, useDispatch, useSelector } from 'react-redux';
import { connect, useSelector } from 'react-redux';
import { makeStyles } from 'tss-react/mui';
import { IReduxState } from '../../../app/types';
import { rejectParticipantAudio, rejectParticipantVideo } from '../../../av-moderation/actions';
import participantsPaneTheme from '../../../base/components/themes/participantsPaneTheme.json';
import { MEDIA_TYPE } from '../../../base/media/constants';
import { getParticipantById, isScreenShareParticipant } from '../../../base/participants/functions';
import { withPixelLineHeight } from '../../../base/styles/functions.web';
import Input from '../../../base/ui/components/web/Input';
@@ -14,7 +12,6 @@ import useContextMenu from '../../../base/ui/hooks/useContextMenu.web';
import { normalizeAccents } from '../../../base/util/strings.web';
import { getBreakoutRooms, getCurrentRoomId, isInBreakoutRoom } from '../../../breakout-rooms/functions';
import { isButtonEnabled, showOverflowDrawer } from '../../../toolbox/functions.web';
import { muteRemote } from '../../../video-menu/actions.web';
import { iAmVisitor } from '../../../visitors/functions';
import { getSortedParticipantIds, isCurrentRoomRenamable, shouldRenderInviteButton } from '../../functions';
import { useParticipantDrawer } from '../../hooks';
@@ -82,18 +79,9 @@ function MeetingParticipants({
showInviteButton,
sortedParticipantIds = []
}: IProps) {
const dispatch = useDispatch();
const { t } = useTranslation();
const [ lowerMenu, , toggleMenu, menuEnter, menuLeave, raiseContext ] = useContextMenu<string>();
const muteAudio = useCallback(id => () => {
dispatch(muteRemote(id, MEDIA_TYPE.AUDIO));
dispatch(rejectParticipantAudio(id));
}, [ dispatch ]);
const stopVideo = useCallback(id => () => {
dispatch(muteRemote(id, MEDIA_TYPE.VIDEO));
dispatch(rejectParticipantVideo(id));
}, [ dispatch ]);
const [ drawerParticipant, closeDrawer, openDrawerForParticipant ] = useParticipantDrawer();
// FIXME:
@@ -140,21 +128,18 @@ function MeetingParticipants({
<MeetingParticipantItems
isInBreakoutRoom = { isBreakoutRoom }
lowerMenu = { lowerMenu }
muteAudio = { muteAudio }
openDrawerForParticipant = { openDrawerForParticipant }
overflowDrawer = { overflowDrawer }
participantActionEllipsisLabel = { participantActionEllipsisLabel }
participantIds = { sortedParticipantIds }
raiseContextId = { raiseContext.entity }
searchString = { normalizeAccents(searchString) }
stopVideo = { stopVideo }
toggleMenu = { toggleMenu }
youText = { youText } />
</div>
<MeetingParticipantContextMenu
closeDrawer = { closeDrawer }
drawerParticipant = { drawerParticipant }
muteAudio = { muteAudio }
offsetTarget = { raiseContext?.offsetTarget }
onEnter = { menuEnter }
onLeave = { menuLeave }

View File

@@ -3,8 +3,17 @@ import { useTranslation } from 'react-i18next';
import { useDispatch } from 'react-redux';
import { makeStyles } from 'tss-react/mui';
import { approveParticipantAudio, approveParticipantVideo } from '../../../av-moderation/actions';
import {
approveParticipantAudio,
approveParticipantDesktop,
approveParticipantVideo,
rejectParticipantAudio,
rejectParticipantDesktop,
rejectParticipantVideo
} from '../../../av-moderation/actions';
import { MEDIA_TYPE } from '../../../base/media/constants';
import Button from '../../../base/ui/components/web/Button';
import { muteRemote } from '../../../video-menu/actions.web';
import { QUICK_ACTION_BUTTON } from '../../constants';
interface IProps {
@@ -24,11 +33,6 @@ interface IProps {
*/
buttonType: string;
/**
* Callback used to open a confirmation dialog for audio muting.
*/
muteAudio: Function;
/**
* Label for mute participant button.
*/
@@ -44,11 +48,6 @@ interface IProps {
*/
participantName: string;
/**
* Callback used to stop a participant's video.
*/
stopVideo: Function;
}
const useStyles = makeStyles()(theme => {
@@ -61,10 +60,8 @@ const useStyles = makeStyles()(theme => {
const ParticipantQuickAction = ({
buttonType,
muteAudio,
participantID,
participantName,
stopVideo
participantName
}: IProps) => {
const { classes: styles } = useStyles();
const dispatch = useDispatch();
@@ -74,10 +71,29 @@ const ParticipantQuickAction = ({
dispatch(approveParticipantAudio(participantID));
}, [ dispatch, participantID ]);
const allowDesktop = useCallback(() => {
dispatch(approveParticipantDesktop(participantID));
}, [ dispatch, participantID ]);
const allowVideo = useCallback(() => {
dispatch(approveParticipantVideo(participantID));
}, [ dispatch, participantID ]);
const muteAudio = useCallback(() => {
dispatch(muteRemote(participantID, MEDIA_TYPE.AUDIO));
dispatch(rejectParticipantAudio(participantID));
}, [ dispatch, participantID ]);
const stopDesktop = useCallback(() => {
dispatch(muteRemote(participantID, MEDIA_TYPE.SCREENSHARE));
dispatch(rejectParticipantDesktop(participantID));
}, [ dispatch, participantID ]);
const stopVideo = useCallback(() => {
dispatch(muteRemote(participantID, MEDIA_TYPE.VIDEO));
dispatch(rejectParticipantVideo(participantID));
}, [ dispatch, participantID ]);
switch (buttonType) {
case QUICK_ACTION_BUTTON.MUTE: {
return (
@@ -85,7 +101,7 @@ const ParticipantQuickAction = ({
accessibilityLabel = { `${t('participantsPane.actions.mute')} ${participantName}` }
className = { styles.button }
label = { t('participantsPane.actions.mute') }
onClick = { muteAudio(participantID) }
onClick = { muteAudio }
size = 'small'
testId = { `mute-audio-${participantID}` } />
);
@@ -101,6 +117,17 @@ const ParticipantQuickAction = ({
testId = { `unmute-audio-${participantID}` } />
);
}
case QUICK_ACTION_BUTTON.ALLOW_DESKTOP: {
return (
<Button
accessibilityLabel = { `${t('participantsPane.actions.askDesktop')} ${participantName}` }
className = { styles.button }
label = { t('participantsPane.actions.allowDesktop') }
onClick = { allowDesktop }
size = 'small'
testId = { `unmute-desktop-${participantID}` } />
);
}
case QUICK_ACTION_BUTTON.ALLOW_VIDEO: {
return (
<Button
@@ -112,13 +139,24 @@ const ParticipantQuickAction = ({
testId = { `unmute-video-${participantID}` } />
);
}
case QUICK_ACTION_BUTTON.STOP_DESKTOP: {
return (
<Button
accessibilityLabel = { `${t('participantsPane.actions.stopDesktop')} ${participantName}` }
className = { styles.button }
label = { t('participantsPane.actions.stopDesktop') }
onClick = { stopDesktop }
size = 'small'
testId = { `mute-desktop-${participantID}` } />
);
}
case QUICK_ACTION_BUTTON.STOP_VIDEO: {
return (
<Button
accessibilityLabel = { `${t('participantsPane.actions.mute')} ${participantName}` }
className = { styles.button }
label = { t('participantsPane.actions.stopVideo') }
onClick = { stopVideo(participantID) }
onClick = { stopVideo }
size = 'small'
testId = { `mute-video-${participantID}` } />
);

View File

@@ -45,7 +45,6 @@ const useStyles = makeStyles<IStylesProps>()((theme, { isChatOpen }) => {
participantsPane: {
backgroundColor: theme.palette.ui01,
flexShrink: 0,
overflow: 'hidden',
position: 'relative',
transition: 'width .16s ease-in-out',
width: '315px',
@@ -72,7 +71,6 @@ const useStyles = makeStyles<IStylesProps>()((theme, { isChatOpen }) => {
container: {
boxSizing: 'border-box',
flex: 1,
overflowY: 'auto',
position: 'relative',
padding: `0 ${participantsPaneTheme.panePadding}px`,
display: 'flex',

View File

@@ -36,22 +36,27 @@ export const MEDIA_STATE: { [key: string]: MediaState; } = {
NONE: 'None'
};
export type QuickActionButtonType = 'Mute' | 'AskToUnmute' | 'AllowVideo' | 'StopVideo' | 'None';
export type QuickActionButtonType
= 'Mute' | 'AskToUnmute' | 'AllowVideo' | 'StopVideo' | 'AllowDesktop' | 'StopDesktop' | 'None';
/**
* Enum of possible participant mute button states.
*/
export const QUICK_ACTION_BUTTON: {
ALLOW_DESKTOP: QuickActionButtonType;
ALLOW_VIDEO: QuickActionButtonType;
ASK_TO_UNMUTE: QuickActionButtonType;
MUTE: QuickActionButtonType;
NONE: QuickActionButtonType;
STOP_DESKTOP: QuickActionButtonType;
STOP_VIDEO: QuickActionButtonType;
} = {
ALLOW_DESKTOP: 'AllowDesktop',
ALLOW_VIDEO: 'AllowVideo',
MUTE: 'Mute',
ASK_TO_UNMUTE: 'AskToUnmute',
NONE: 'None',
STOP_DESKTOP: 'StopDesktop',
STOP_VIDEO: 'StopVideo'
};

View File

@@ -1,8 +1,7 @@
import { IReduxState } from '../app/types';
import { MEDIA_TYPE as AVM_MEDIA_TYPE } from '../av-moderation/constants';
import {
isEnabledFromState,
isLocalParticipantApprovedFromState,
isParticipantApproved,
isForceMuted,
isSupported
} from '../av-moderation/functions';
import { IStateful } from '../base/app/types';
@@ -10,48 +9,26 @@ import theme from '../base/components/themes/participantsPaneTheme.json';
import { getCurrentConference } from '../base/conference/functions';
import { INVITE_ENABLED, PARTICIPANTS_ENABLED } from '../base/flags/constants';
import { getFeatureFlag } from '../base/flags/functions';
import { MEDIA_TYPE, type MediaType } from '../base/media/constants';
import {
getDominantSpeakerParticipant,
getLocalParticipant,
getRaiseHandsQueue,
getRemoteParticipantsSorted,
isLocalParticipantModerator,
isParticipantModerator
isLocalParticipantModerator
} from '../base/participants/functions';
import { IParticipant } from '../base/participants/types';
import { toState } from '../base/redux/functions';
import {
isParticipantAudioMuted,
isParticipantScreenShareMuted,
isParticipantVideoMuted
} from '../base/tracks/functions.any';
import { normalizeAccents } from '../base/util/strings';
import { BREAKOUT_ROOMS_RENAME_FEATURE } from '../breakout-rooms/constants';
import { isInBreakoutRoom } from '../breakout-rooms/functions';
import { MEDIA_STATE, QUICK_ACTION_BUTTON, REDUCER_KEY } from './constants';
/**
* Checks if a participant is force muted.
*
* @param {IParticipant|undefined} participant - The participant.
* @param {MediaType} mediaType - The media type.
* @param {IReduxState} state - The redux state.
* @returns {MediaState}
*/
export function isForceMuted(participant: IParticipant | undefined, mediaType: MediaType, state: IReduxState) {
if (isEnabledFromState(mediaType, state)) {
if (participant?.local) {
return !isLocalParticipantApprovedFromState(mediaType, state);
}
// moderators cannot be force muted
if (isParticipantModerator(participant)) {
return false;
}
return !isParticipantApproved(participant?.id ?? '', mediaType)(state);
}
return false;
}
/**
* Determines the audio media state (the mic icon) for a participant.
*
@@ -69,7 +46,7 @@ export function getParticipantAudioMediaState(participant: IParticipant | undefi
}
if (muted) {
if (isForceMuted(participant, MEDIA_TYPE.AUDIO, state)) {
if (isForceMuted(participant, AVM_MEDIA_TYPE.AUDIO, state)) {
return MEDIA_STATE.FORCE_MUTED;
}
@@ -94,7 +71,7 @@ export function getParticipantAudioMediaState(participant: IParticipant | undefi
export function getParticipantVideoMediaState(participant: IParticipant | undefined,
muted: Boolean, state: IReduxState) {
if (muted) {
if (isForceMuted(participant, MEDIA_TYPE.VIDEO, state)) {
if (isForceMuted(participant, AVM_MEDIA_TYPE.VIDEO, state)) {
return MEDIA_STATE.FORCE_MUTED;
}
@@ -139,33 +116,44 @@ export const getParticipantsPaneOpen = (state: IReduxState) => Boolean(getState(
* The button is displayed when hovering a participant from the participant list.
*
* @param {IParticipant} participant - The participant.
* @param {boolean} isAudioMuted - If audio is muted for the participant.
* @param {boolean} isVideoMuted - If audio is muted for the participant.
* @param {IReduxState} state - The redux state.
* @returns {string} - The type of the quick action button.
*/
export function getQuickActionButtonType(
participant: IParticipant | undefined,
isAudioMuted: Boolean,
isVideoMuted: Boolean,
state: IReduxState) {
// handled only by moderators
const isVideoForceMuted = isForceMuted(participant, MEDIA_TYPE.VIDEO, state);
const isParticipantSilent = participant?.isSilent || false;
export function getQuickActionButtonType(participant: IParticipant | undefined, state: IReduxState) {
if (!isLocalParticipantModerator(state)) {
return QUICK_ACTION_BUTTON.NONE;
}
if (isLocalParticipantModerator(state)) {
if (!isAudioMuted && !isParticipantSilent) {
return QUICK_ACTION_BUTTON.MUTE;
}
if (!isVideoMuted) {
return QUICK_ACTION_BUTTON.STOP_VIDEO;
}
if (isSupported()(state) && !isParticipantSilent) {
return QUICK_ACTION_BUTTON.ASK_TO_UNMUTE;
}
if (isVideoForceMuted) {
return QUICK_ACTION_BUTTON.ALLOW_VIDEO;
}
// Handled only by moderators.
const isAudioMuted = isParticipantAudioMuted(participant, state);
const isScreenShareMuted = isParticipantScreenShareMuted(participant, state);
const isVideoMuted = isParticipantVideoMuted(participant, state);
const isDesktopForceMuted = isForceMuted(participant, AVM_MEDIA_TYPE.DESKTOP, state);
const isVideoForceMuted = isForceMuted(participant, AVM_MEDIA_TYPE.VIDEO, state);
const isParticipantSilent = participant?.isSilent ?? false;
if (!isAudioMuted && !isParticipantSilent) {
return QUICK_ACTION_BUTTON.MUTE;
}
if (!isVideoMuted) {
return QUICK_ACTION_BUTTON.STOP_VIDEO;
}
if (!isScreenShareMuted) {
return QUICK_ACTION_BUTTON.STOP_DESKTOP;
}
if (isSupported()(state) && !isParticipantSilent) {
return QUICK_ACTION_BUTTON.ASK_TO_UNMUTE;
}
if (isVideoForceMuted) {
return QUICK_ACTION_BUTTON.ALLOW_VIDEO;
}
if (isDesktopForceMuted) {
return QUICK_ACTION_BUTTON.ALLOW_DESKTOP;
}
return QUICK_ACTION_BUTTON.NONE;

View File

@@ -1,11 +1,11 @@
import { IReduxState } from '../app/types';
import { MEDIA_TYPE } from '../av-moderation/constants';
import { isEnabledFromState } from '../av-moderation/functions';
import { IStateful } from '../base/app/types';
import { isNameReadOnly } from '../base/config/functions.any';
import { SERVER_URL_CHANGE_ENABLED } from '../base/flags/constants';
import { getFeatureFlag } from '../base/flags/functions';
import i18next, { DEFAULT_LANGUAGE, LANGUAGES } from '../base/i18n/i18next';
import { MEDIA_TYPE } from '../base/media/constants';
import { getLocalParticipant } from '../base/participants/functions';
import { toState } from '../base/redux/functions';
import { getHideSelfView } from '../base/settings/functions.any';

View File

@@ -1,15 +1,15 @@
import { MEDIA_TYPE } from '../av-moderation/constants';
import { isForceMuted } from '../av-moderation/functions';
import { APP_WILL_MOUNT, APP_WILL_UNMOUNT } from '../base/app/actionTypes';
import { CONFERENCE_JOINED } from '../base/conference/actionTypes';
import { JitsiConferenceEvents } from '../base/lib-jitsi-meet';
import { setAudioMuted } from '../base/media/actions';
import { MEDIA_TYPE } from '../base/media/constants';
import { raiseHand } from '../base/participants/actions';
import { getLocalParticipant } from '../base/participants/functions';
import MiddlewareRegistry from '../base/redux/MiddlewareRegistry';
import { playSound, registerSound, unregisterSound } from '../base/sounds/actions';
import { hideNotification, showNotification } from '../notifications/actions';
import { NOTIFICATION_TIMEOUT_TYPE } from '../notifications/constants';
import { isForceMuted } from '../participants-pane/functions';
import { isAudioMuteButtonDisabled } from '../toolbox/functions.any';
import { setCurrentNotificationUid } from './actions';

View File

@@ -1,19 +1,26 @@
import {
AUDIO_MUTE,
DESKTOP_MUTE,
VIDEO_MUTE,
createRemoteMuteConfirmedEvent,
createToolbarEvent
} from '../analytics/AnalyticsEvents';
import { sendAnalytics } from '../analytics/functions';
import { IStore } from '../app/types';
import { rejectParticipantAudio, rejectParticipantVideo, showModeratedNotification } from '../av-moderation/actions';
import { shouldShowModeratedNotification } from '../av-moderation/functions';
import { setAudioMuted, setVideoMuted } from '../base/media/actions';
import { MEDIA_TYPE, MediaType, VIDEO_MUTISM_AUTHORITY } from '../base/media/constants';
import {
rejectParticipantAudio,
rejectParticipantDesktop,
rejectParticipantVideo
} from '../av-moderation/actions';
import { setAudioMuted, setScreenshareMuted, setVideoMuted } from '../base/media/actions';
import {
MEDIA_TYPE,
MediaType,
SCREENSHARE_MUTISM_AUTHORITY,
VIDEO_MUTISM_AUTHORITY
} from '../base/media/constants';
import { muteRemoteParticipant } from '../base/participants/actions';
import { getRemoteParticipants } from '../base/participants/functions';
import { toggleScreensharing } from '../base/tracks/actions';
import { isModerationNotificationDisplayed } from '../notifications/functions';
import logger from './logger';
@@ -22,39 +29,31 @@ import logger from './logger';
*
* @param {boolean} enable - Whether to mute or unmute.
* @param {MEDIA_TYPE} mediaType - The type of the media channel to mute.
* @param {boolean} stopScreenSharing - Whether or not to stop the screensharing.
* @returns {Function}
*/
export function muteLocal(enable: boolean, mediaType: MediaType, stopScreenSharing = false) {
return (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
const isAudio = mediaType === MEDIA_TYPE.AUDIO;
if (!isAudio && mediaType !== MEDIA_TYPE.VIDEO) {
export function muteLocal(enable: boolean, mediaType: MediaType) {
return (dispatch: IStore['dispatch']) => {
switch (mediaType) {
case MEDIA_TYPE.AUDIO: {
sendAnalytics(createToolbarEvent(AUDIO_MUTE, { enable }));
dispatch(setAudioMuted(enable, /* ensureTrack */ true));
break;
}
case MEDIA_TYPE.SCREENSHARE: {
sendAnalytics(createToolbarEvent(DESKTOP_MUTE, { enable }));
dispatch(setScreenshareMuted(enable, SCREENSHARE_MUTISM_AUTHORITY.USER, /* ensureTrack */ true));
break;
}
case MEDIA_TYPE.VIDEO: {
sendAnalytics(createToolbarEvent(VIDEO_MUTE, { enable }));
dispatch(setVideoMuted(enable, VIDEO_MUTISM_AUTHORITY.USER, /* ensureTrack */ true));
break;
}
default: {
logger.error(`Unsupported media type: ${mediaType}`);
return;
}
// check for A/V Moderation when trying to unmute
if (isAudio && !enable && shouldShowModeratedNotification(MEDIA_TYPE.AUDIO, getState())) {
if (!isModerationNotificationDisplayed(MEDIA_TYPE.AUDIO, getState())) {
dispatch(showModeratedNotification(MEDIA_TYPE.AUDIO));
}
return;
}
if (enable && stopScreenSharing) {
dispatch(toggleScreensharing(false, false));
}
sendAnalytics(createToolbarEvent(isAudio ? AUDIO_MUTE : VIDEO_MUTE, { enable }));
dispatch(isAudio ? setAudioMuted(enable, /* ensureTrack */ true)
: setVideoMuted(enable, VIDEO_MUTISM_AUTHORITY.USER, /* ensureTrack */ true));
// FIXME: The old conference logic still relies on this event being emitted.
if (typeof APP !== 'undefined') {
isAudio ? APP.conference.muteAudio(enable) : APP.conference.muteVideo(enable, false);
}
};
}
@@ -68,13 +67,12 @@ export function muteLocal(enable: boolean, mediaType: MediaType, stopScreenShari
*/
export function muteRemote(participantId: string, mediaType: MediaType) {
return (dispatch: IStore['dispatch']) => {
if (mediaType !== MEDIA_TYPE.AUDIO && mediaType !== MEDIA_TYPE.VIDEO) {
logger.error(`Unsupported media type: ${mediaType}`);
return;
}
sendAnalytics(createRemoteMuteConfirmedEvent(participantId, mediaType));
dispatch(muteRemoteParticipant(participantId, mediaType));
// TODO(saghul): reconcile these 2 types.
const muteMediaType = mediaType === MEDIA_TYPE.SCREENSHARE ? 'desktop' : mediaType;
dispatch(muteRemoteParticipant(participantId, muteMediaType));
};
}
@@ -97,8 +95,10 @@ export function muteAllParticipants(exclude: Array<string>, mediaType: MediaType
dispatch(muteRemote(id, mediaType));
if (mediaType === MEDIA_TYPE.AUDIO) {
dispatch(rejectParticipantAudio(id));
} else {
} else if (mediaType === MEDIA_TYPE.VIDEO) {
dispatch(rejectParticipantVideo(id));
} else if (mediaType === MEDIA_TYPE.SCREENSHARE) {
dispatch(rejectParticipantDesktop(id));
}
});
};

View File

@@ -1,24 +1,26 @@
import { IReduxState } from '../../app/types';
import { Component } from 'react';
import { WithTranslation } from 'react-i18next';
import { IReduxState, IStore } from '../../app/types';
import { requestDisableAudioModeration, requestEnableAudioModeration } from '../../av-moderation/actions';
import { MEDIA_TYPE as AVM_MEDIA_TYPE } from '../../av-moderation/constants';
import { isEnabledFromState, isSupported } from '../../av-moderation/functions';
import { MEDIA_TYPE } from '../../base/media/constants';
import { getLocalParticipant, getParticipantDisplayName, isEveryoneModerator } from '../../base/participants/functions';
import { muteAllParticipants } from '../actions';
import AbstractMuteRemoteParticipantDialog, {
type IProps as AbstractProps
} from './AbstractMuteRemoteParticipantDialog';
/**
* The type of the React {@code Component} props of
* {@link AbstractMuteEveryoneDialog}.
*/
export interface IProps extends AbstractProps {
export interface IProps extends WithTranslation {
content?: string;
dispatch: IStore['dispatch'];
exclude: Array<string>;
isAudioModerationEnabled?: boolean;
isEveryoneModerator: boolean;
isModerationSupported?: boolean;
participantID: string;
showAdvancedModerationToggle: boolean;
title: string;
}
@@ -33,17 +35,15 @@ interface IState {
* An abstract Component with the contents for a dialog that asks for confirmation
* from the user before muting all remote participants.
*
* @augments AbstractMuteRemoteParticipantDialog
*/
export default class AbstractMuteEveryoneDialog<P extends IProps> extends
AbstractMuteRemoteParticipantDialog<P, IState> {
export default class AbstractMuteEveryoneDialog<P extends IProps> extends Component<P, IState> {
static defaultProps = {
exclude: [],
muteLocal: false
};
/**
* Initializes a new {@code AbstractMuteRemoteParticipantDialog} instance.
* Initializes a new {@code AbstractMuteEveryoneDialog} instance.
*
* @param {Object} props - The read-only properties with which the new
* instance is to be initialized.
@@ -84,7 +84,7 @@ export default class AbstractMuteEveryoneDialog<P extends IProps> extends
*
* @returns {boolean}
*/
override _onSubmit() {
_onSubmit() {
const {
dispatch,
exclude
@@ -124,7 +124,7 @@ export function abstractMapStateToProps(state: IReduxState, ownProps: IProps) {
isEveryoneModerator: isEveryoneModerator(state)
} : {
title: t('dialog.muteEveryoneTitle'),
isAudioModerationEnabled: isEnabledFromState(MEDIA_TYPE.AUDIO, state),
isAudioModerationEnabled: isEnabledFromState(AVM_MEDIA_TYPE.AUDIO, state),
isModerationSupported: isSupported()(state),
isEveryoneModerator: isEveryoneModerator(state)
};

View File

@@ -0,0 +1,130 @@
import { IReduxState } from '../../app/types';
import { requestDisableDesktopModeration, requestEnableDesktopModeration } from '../../av-moderation/actions';
import { MEDIA_TYPE as AVM_MEDIA_TYPE } from '../../av-moderation/constants';
import { isEnabledFromState, isSupported } from '../../av-moderation/functions';
import { MEDIA_TYPE } from '../../base/media/constants';
import { getLocalParticipant, getParticipantDisplayName } from '../../base/participants/functions';
import { muteAllParticipants } from '../actions';
import AbstractMuteRemoteParticipantsDesktopDialog, {
type IProps as AbstractProps
} from './AbstractMuteRemoteParticipantsDesktopDialog';
/**
* The type of the React {@code Component} props of
* {@link AbstractMuteEveryonesDesktopDialog}.
*/
export interface IProps extends AbstractProps {
content?: string;
exclude: Array<string>;
isModerationEnabled?: boolean;
isModerationSupported?: boolean;
showAdvancedModerationToggle: boolean;
title: string;
}
interface IState {
content: string;
moderationEnabled?: boolean;
}
/**
*
* An abstract Component with the contents for a dialog that asks for confirmation
* from the user before disabling all remote participants cameras.
*
* @augments AbstractMuteRemoteParticipantsDesktopDialog
*/
export default class AbstractMuteEveryonesDesktopDialog<P extends IProps>
extends AbstractMuteRemoteParticipantsDesktopDialog<P, IState> {
static defaultProps = {
exclude: [],
muteLocal: false
};
/**
* Initializes a new {@code AbstractMuteRemoteParticipantsDesktopDialog} instance.
*
* @param {Object} props - The read-only properties with which the new
* instance is to be initialized.
*/
constructor(props: P) {
super(props);
this.state = {
moderationEnabled: props.isModerationEnabled,
content: props.content || props.t(props.isModerationEnabled
? 'dialog.muteEveryonesDesktopDialogModerationOn' : 'dialog.muteEveryonesDesktopDialog'
)
};
// Bind event handlers so they are only bound once per instance.
this._onSubmit = this._onSubmit.bind(this);
this._onToggleModeration = this._onToggleModeration.bind(this);
}
/**
* Toggles advanced moderation switch.
*
* @returns {void}
*/
_onToggleModeration() {
this.setState(state => {
return {
moderationEnabled: !state.moderationEnabled,
content: this.props.t(state.moderationEnabled
? 'dialog.muteEveryonesDesktopDialog' : 'dialog.muteEveryonesDesktopDialogModerationOn'
)
};
});
}
/**
* Callback to be invoked when the value of this dialog is submitted.
*
* @returns {boolean}
*/
override _onSubmit() {
const {
dispatch,
exclude
} = this.props;
dispatch(muteAllParticipants(exclude, MEDIA_TYPE.SCREENSHARE));
if (this.state.moderationEnabled) {
dispatch(requestEnableDesktopModeration());
} else if (this.state.moderationEnabled !== undefined) {
dispatch(requestDisableDesktopModeration());
}
return true;
}
}
/**
* Maps (parts of) the Redux state to the associated {@code AbstractMuteEveryonesDesktopDialog}'s props.
*
* @param {IReduxState} state - The redux state.
* @param {Object} ownProps - The properties explicitly passed to the component.
* @returns {IProps}
*/
export function abstractMapStateToProps(state: IReduxState, ownProps: IProps) {
const { exclude = [], t } = ownProps;
const isModerationEnabled = isEnabledFromState(AVM_MEDIA_TYPE.DESKTOP, state);
const whom = exclude
// eslint-disable-next-line no-confusing-arrow
.map(id => id === getLocalParticipant(state)?.id
? t('dialog.muteEveryoneSelf')
: getParticipantDisplayName(state, id))
.join(', ');
return whom.length ? {
content: t('dialog.muteEveryoneElsesDesktopDialog'),
title: t('dialog.muteEveryoneElsesDesktopTitle', { whom })
} : {
title: t('dialog.muteEveryonesDesktopTitle'),
isModerationEnabled,
isModerationSupported: isSupported()(state)
};
}

View File

@@ -1,5 +1,6 @@
import { IReduxState } from '../../app/types';
import { requestDisableVideoModeration, requestEnableVideoModeration } from '../../av-moderation/actions';
import { MEDIA_TYPE as AVM_MEDIA_TYPE } from '../../av-moderation/constants';
import { isEnabledFromState, isSupported } from '../../av-moderation/functions';
import { MEDIA_TYPE } from '../../base/media/constants';
import { getLocalParticipant, getParticipantDisplayName } from '../../base/participants/functions';
@@ -109,7 +110,7 @@ export default class AbstractMuteEveryonesVideoDialog<P extends IProps>
*/
export function abstractMapStateToProps(state: IReduxState, ownProps: IProps) {
const { exclude = [], t } = ownProps;
const isVideoModerationEnabled = isEnabledFromState(MEDIA_TYPE.VIDEO, state);
const isVideoModerationEnabled = isEnabledFromState(AVM_MEDIA_TYPE.VIDEO, state);
const whom = exclude
// eslint-disable-next-line no-confusing-arrow

View File

@@ -1,58 +0,0 @@
import { Component } from 'react';
import { WithTranslation } from 'react-i18next';
import { IStore } from '../../app/types';
import { MEDIA_TYPE } from '../../base/media/constants';
import { muteRemote } from '../actions';
/**
* The type of the React {@code Component} props of
* {@link AbstractMuteRemoteParticipantDialog}.
*/
export interface IProps extends WithTranslation {
/**
* The Redux dispatch function.
*/
dispatch: IStore['dispatch'];
/**
* The ID of the remote participant to be muted.
*/
participantID: string;
}
/**
* Abstract dialog to confirm a remote participant mute action.
*
* @augments Component
*/
export default class AbstractMuteRemoteParticipantDialog<P extends IProps = IProps, State=void>
extends Component<P, State> {
/**
* Initializes a new {@code AbstractMuteRemoteParticipantDialog} instance.
*
* @param {Object} props - The read-only properties with which the new
* instance is to be initialized.
*/
constructor(props: P) {
super(props);
// Bind event handlers so they are only bound once per instance.
this._onSubmit = this._onSubmit.bind(this);
}
/**
* Handles the submit button action.
*
* @private
* @returns {boolean} - True (to note that the modal should be closed).
*/
_onSubmit() {
const { dispatch, participantID } = this.props;
dispatch(muteRemote(participantID, MEDIA_TYPE.AUDIO));
return true;
}
}

View File

@@ -0,0 +1,81 @@
import { Component } from 'react';
import { WithTranslation } from 'react-i18next';
import { IReduxState, IStore } from '../../app/types';
import { rejectParticipantDesktop } from '../../av-moderation/actions';
import { MEDIA_TYPE as AVM_MEDIA_TYPE } from '../../av-moderation/constants';
import { isEnabledFromState } from '../../av-moderation/functions';
import { MEDIA_TYPE } from '../../base/media/constants';
import { muteRemote } from '../actions';
/**
* The type of the React {@code Component} props of
* {@link AbstractMuteRemoteParticipantsDesktopDialog}.
*/
export interface IProps extends WithTranslation {
/**
* The Redux dispatch function.
*/
dispatch: IStore['dispatch'];
/**
* Whether or not desktop moderation is on.
*/
isModerationOn: boolean;
/**
* The ID of the remote participant to be muted.
*/
participantID: string;
}
/**
* Abstract dialog to confirm a remote participant desktop mute action.
*
* @augments Component
*/
export default class AbstractMuteRemoteParticipantsDesktopDialog<P extends IProps = IProps, State=any>
extends Component<P, State> {
/**
* Initializes a new {@code AbstractMuteRemoteParticipantsDesktopDialog} instance.
*
* @param {Object} props - The read-only properties with which the new
* instance is to be initialized.
*/
constructor(props: P) {
super(props);
// Bind event handlers so they are only bound once per instance.
this._onSubmit = this._onSubmit.bind(this);
}
/**
* Handles the submit button action.
*
* @private
* @returns {boolean} - True (to note that the modal should be closed).
*/
_onSubmit() {
const { dispatch, participantID } = this.props;
dispatch(muteRemote(participantID, MEDIA_TYPE.SCREENSHARE));
dispatch(rejectParticipantDesktop(participantID));
return true;
}
}
/**
* Maps (parts of) the redux state to the associated
* {@code AbstractDialogContainer}'s props.
*
* @param {IReduxState} state - The redux state.
* @private
* @returns {Object}
*/
export function abstractMapStateToProps(state: IReduxState) {
return {
isModerationOn: isEnabledFromState(AVM_MEDIA_TYPE.DESKTOP, state)
};
}

View File

@@ -3,6 +3,7 @@ import { WithTranslation } from 'react-i18next';
import { IReduxState, IStore } from '../../app/types';
import { rejectParticipantVideo } from '../../av-moderation/actions';
import { MEDIA_TYPE as AVM_MEDIA_TYPE } from '../../av-moderation/constants';
import { isEnabledFromState } from '../../av-moderation/functions';
import { MEDIA_TYPE } from '../../base/media/constants';
import { muteRemote } from '../actions';
@@ -75,6 +76,6 @@ export default class AbstractMuteRemoteParticipantsVideoDialog<P extends IProps
*/
export function abstractMapStateToProps(state: IReduxState) {
return {
isVideoModerationOn: isEnabledFromState(MEDIA_TYPE.VIDEO, state)
isVideoModerationOn: isEnabledFromState(AVM_MEDIA_TYPE.VIDEO, state)
};
}

View File

@@ -2,13 +2,12 @@ import { connect } from 'react-redux';
import { IReduxState } from '../../../app/types';
import { approveParticipant } from '../../../av-moderation/actions';
import { isSupported } from '../../../av-moderation/functions';
import { MEDIA_TYPE } from '../../../av-moderation/constants';
import { isForceMuted, isSupported } from '../../../av-moderation/functions';
import { translate } from '../../../base/i18n/functions';
import { IconMic, IconVideo } from '../../../base/icons/svg';
import { MEDIA_TYPE } from '../../../base/media/constants';
import { getParticipantById, isLocalParticipantModerator } from '../../../base/participants/functions';
import AbstractButton, { IProps as AbstractButtonProps } from '../../../base/toolbox/components/AbstractButton';
import { isForceMuted } from '../../../participants-pane/functions';
export interface IProps extends AbstractButtonProps {

View File

@@ -2,8 +2,8 @@ import React, { useCallback, useMemo } from 'react';
import { useTranslation } from 'react-i18next';
import { useDispatch } from 'react-redux';
import { approveParticipantAudio, approveParticipantVideo } from '../../../av-moderation/actions';
import { IconMic, IconVideo } from '../../../base/icons/svg';
import { approveParticipantAudio, approveParticipantDesktop, approveParticipantVideo } from '../../../av-moderation/actions';
import { IconMic, IconScreenshare, IconVideo } from '../../../base/icons/svg';
import { MEDIA_TYPE, MediaType } from '../../../base/media/constants';
import ContextMenuItem from '../../../base/ui/components/web/ContextMenuItem';
import { NOTIFY_CLICK_MODE } from '../../../toolbox/types';
@@ -36,6 +36,8 @@ const AskToUnmuteButton = ({
dispatch(approveParticipantAudio(participantID));
} else if (buttonType === MEDIA_TYPE.VIDEO) {
dispatch(approveParticipantVideo(participantID));
} else if (buttonType === MEDIA_TYPE.SCREENSHARE) {
dispatch(approveParticipantDesktop(participantID));
}
}, [ buttonType, dispatch, notifyClick, notifyMode, participantID ]);
@@ -44,6 +46,8 @@ const AskToUnmuteButton = ({
return t('participantsPane.actions.askUnmute');
} else if (buttonType === MEDIA_TYPE.VIDEO) {
return t('participantsPane.actions.allowVideo');
} else if (buttonType === MEDIA_TYPE.SCREENSHARE) {
return t('participantsPane.actions.allowDesktop');
}
return '';
@@ -54,6 +58,8 @@ const AskToUnmuteButton = ({
return IconMic;
} else if (buttonType === MEDIA_TYPE.VIDEO) {
return IconVideo;
} else if (buttonType === MEDIA_TYPE.SCREENSHARE) {
return IconScreenshare;
}
}, [ buttonType ]);

View File

@@ -0,0 +1,67 @@
import React, { useCallback, useMemo } from 'react';
import { useTranslation } from 'react-i18next';
import { useDispatch, useSelector } from 'react-redux';
import { createRemoteVideoMenuButtonEvent } from '../../../analytics/AnalyticsEvents';
import { sendAnalytics } from '../../../analytics/functions';
import { IReduxState } from '../../../app/types';
import { openDialog } from '../../../base/dialog/actions';
import { IconScreenshare } from '../../../base/icons/svg';
import { MEDIA_TYPE } from '../../../base/media/constants';
import { isRemoteTrackMuted } from '../../../base/tracks/functions.any';
import ContextMenuItem from '../../../base/ui/components/web/ContextMenuItem';
import { NOTIFY_CLICK_MODE } from '../../../toolbox/types';
import { IButtonProps } from '../../types';
import MuteRemoteParticipantsDesktopDialog from './MuteRemoteParticipantsDesktopDialog';
/**
* Implements a React {@link Component} which displays a button for disabling
* the desktop share of a participant in the conference.
*
* @returns {JSX.Element|null}
*/
const MuteDesktopButton = ({
notifyClick,
notifyMode,
participantID
}: IButtonProps): JSX.Element | null => {
const { t } = useTranslation();
const dispatch = useDispatch();
const tracks = useSelector((state: IReduxState) => state['features/base/tracks']);
// TODO: review if we shouldn't be using isParticipantMediaMuted.
const trackMuted = useMemo(
() => isRemoteTrackMuted(tracks, MEDIA_TYPE.SCREENSHARE, participantID),
[ isRemoteTrackMuted, participantID, tracks ]
);
const handleClick = useCallback(() => {
notifyClick?.();
if (notifyMode === NOTIFY_CLICK_MODE.PREVENT_AND_NOTIFY) {
return;
}
sendAnalytics(createRemoteVideoMenuButtonEvent(
'desktop.mute.button',
{
'participant_id': participantID
}));
dispatch(openDialog(MuteRemoteParticipantsDesktopDialog, { participantID }));
}, [ dispatch, notifyClick, notifyClick, participantID, sendAnalytics ]);
if (trackMuted) {
return null;
}
return (
<ContextMenuItem
accessibilityLabel = { t('participantsPane.actions.stopDesktop') }
className = 'mutedesktoplink'
icon = { IconScreenshare }
onClick = { handleClick }
text = { t('participantsPane.actions.stopDesktop') } />
);
};
export default MuteDesktopButton;

View File

@@ -0,0 +1,48 @@
import React, { useCallback } from 'react';
import { useTranslation } from 'react-i18next';
import { useDispatch } from 'react-redux';
import { createToolbarEvent } from '../../../analytics/AnalyticsEvents';
import { sendAnalytics } from '../../../analytics/functions';
import { openDialog } from '../../../base/dialog/actions';
import { IconScreenshare } from '../../../base/icons/svg';
import ContextMenuItem from '../../../base/ui/components/web/ContextMenuItem';
import { NOTIFY_CLICK_MODE } from '../../../toolbox/types';
import { IButtonProps } from '../../types';
import MuteEveryonesDesktopDialog from './MuteEveryonesDesktopDialog';
/**
* Implements a React {@link Component} which displays a button for audio muting
* every participant in the conference except the one with the given
* participantID.
*
* @returns {JSX.Element}
*/
const MuteEveryoneElsesDesktopButton = ({
notifyClick,
notifyMode,
participantID
}: IButtonProps): JSX.Element => {
const { t } = useTranslation();
const dispatch = useDispatch();
const handleClick = useCallback(() => {
notifyClick?.();
if (notifyMode === NOTIFY_CLICK_MODE.PREVENT_AND_NOTIFY) {
return;
}
sendAnalytics(createToolbarEvent('mute.everyoneelsesdesktop.pressed'));
dispatch(openDialog(MuteEveryonesDesktopDialog, { exclude: [ participantID ] }));
}, [ notifyClick, notifyMode, participantID ]);
return (
<ContextMenuItem
accessibilityLabel = { t('toolbar.accessibilityLabel.muteEveryoneElsesDesktopStream') }
icon = { IconScreenshare }
onClick = { handleClick }
text = { t('videothumbnail.domuteDesktopOfOthers') } />
);
};
export default MuteEveryoneElsesDesktopButton;

View File

@@ -0,0 +1,52 @@
import React from 'react';
import { connect } from 'react-redux';
import { translate } from '../../../base/i18n/functions';
import Dialog from '../../../base/ui/components/web/Dialog';
import Switch from '../../../base/ui/components/web/Switch';
import AbstractMuteEveryonesDesktopDialog, { type IProps, abstractMapStateToProps }
from '../AbstractMuteEveryonesDesktopDialog';
/**
* A React Component with the contents for a dialog that asks for confirmation
* from the user before disabling all remote participants cameras.
*
* @augments AbstractMuteEveryonesDesktopDialog
*/
class MuteEveryonesDesktopDialog extends AbstractMuteEveryonesDesktopDialog<IProps> {
/**
* Implements React's {@link Component#render()}.
*
* @inheritdoc
* @returns {ReactElement}
*/
override render() {
return (
<Dialog
ok = {{ translationKey: 'dialog.muteParticipantsDesktopButton' }}
onSubmit = { this._onSubmit }
title = { this.props.title }>
<div className = 'mute-dialog'>
{this.state.content}
{ this.props.isModerationSupported && this.props.exclude.length === 0 && (
<>
<div className = 'separator-line' />
<div className = 'control-row'>
<label htmlFor = 'moderation-switch'>
{this.props.t('dialog.moderationDesktopLabel')}
</label>
<Switch
checked = { !this.state.moderationEnabled }
id = 'moderation-switch'
onChange = { this._onToggleModeration } />
</div>
</>
)}
</div>
</Dialog>
);
}
}
export default translate(connect(abstractMapStateToProps)(MuteEveryonesDesktopDialog));

View File

@@ -0,0 +1,40 @@
import React from 'react';
import { connect } from 'react-redux';
import { translate } from '../../../base/i18n/functions';
import Dialog from '../../../base/ui/components/web/Dialog';
import AbstractMuteRemoteParticipantsDesktopDialog, {
abstractMapStateToProps
} from '../AbstractMuteRemoteParticipantsDesktopDialog';
/**
* A React Component with the contents for a dialog that asks for confirmation
* from the user before disabling a remote participants camera.
*
* @augments Component
*/
class MuteRemoteParticipantsDesktopDialog extends AbstractMuteRemoteParticipantsDesktopDialog {
/**
* Implements React's {@link Component#render()}.
*
* @inheritdoc
* @returns {ReactElement}
*/
override render() {
return (
<Dialog
ok = {{ translationKey: 'dialog.muteParticipantsDesktopButton' }}
onSubmit = { this._onSubmit }
titleKey = 'dialog.muteParticipantsDesktopTitle'>
<div>
{this.props.t(this.props.isModerationOn
? 'dialog.muteParticipantsDesktopBodyModerationOn'
: 'dialog.muteParticipantsDesktopBody'
) }
</div>
</Dialog>
);
}
}
export default translate(connect(abstractMapStateToProps)(MuteRemoteParticipantsDesktopDialog));

View File

@@ -4,14 +4,15 @@ import { useDispatch, useSelector } from 'react-redux';
import { makeStyles } from 'tss-react/mui';
import { IReduxState, IStore } from '../../../app/types';
import { isSupported as isAvModerationSupported } from '../../../av-moderation/functions';
import { MEDIA_TYPE as AVM_MEDIA_TYPE } from '../../../av-moderation/constants';
import { isSupported as isAvModerationSupported, isForceMuted } from '../../../av-moderation/functions';
import Avatar from '../../../base/avatar/components/Avatar';
import { isIosMobileBrowser, isMobileBrowser } from '../../../base/environment/utils';
import { MEDIA_TYPE } from '../../../base/media/constants';
import { PARTICIPANT_ROLE } from '../../../base/participants/constants';
import { getLocalParticipant, hasRaisedHand, isPrivateChatEnabled } from '../../../base/participants/functions';
import { IParticipant } from '../../../base/participants/types';
import { isParticipantAudioMuted, isParticipantVideoMuted } from '../../../base/tracks/functions.any';
import { isParticipantAudioMuted } from '../../../base/tracks/functions.any';
import ContextMenu from '../../../base/ui/components/web/ContextMenu';
import ContextMenuItemGroup from '../../../base/ui/components/web/ContextMenuItemGroup';
import { getBreakoutRooms, getCurrentRoomId, isInBreakoutRoom } from '../../../breakout-rooms/functions';
@@ -20,7 +21,7 @@ import { displayVerification } from '../../../e2ee/functions';
import { setVolume } from '../../../filmstrip/actions.web';
import { isStageFilmstripAvailable } from '../../../filmstrip/functions.web';
import { QUICK_ACTION_BUTTON } from '../../../participants-pane/constants';
import { getQuickActionButtonType, isForceMuted } from '../../../participants-pane/functions';
import { getQuickActionButtonType } from '../../../participants-pane/functions';
import { requestRemoteControl, stopController } from '../../../remote-control/actions';
import { getParticipantMenuButtonsWithNotifyClick, showOverflowDrawer } from '../../../toolbox/functions.web';
import { NOTIFY_CLICK_MODE } from '../../../toolbox/types';
@@ -34,7 +35,9 @@ import GrantModeratorButton from './GrantModeratorButton';
import KickButton from './KickButton';
import LowerHandButton from './LowerHandButton';
import MuteButton from './MuteButton';
import MuteDesktopButton from './MuteDesktopButton';
import MuteEveryoneElseButton from './MuteEveryoneElseButton';
import MuteEveryoneElsesDesktopButton from './MuteEveryoneElsesDesktopButton';
import MuteEveryoneElsesVideoButton from './MuteEveryoneElsesVideoButton';
import MuteVideoButton from './MuteVideoButton';
import PrivateMessageMenuButton from './PrivateMessageMenuButton';
@@ -134,9 +137,10 @@ const ParticipantContextMenu = ({
const localParticipant = useSelector(getLocalParticipant);
const _isModerator = Boolean(localParticipant?.role === PARTICIPANT_ROLE.MODERATOR);
const _isVideoForceMuted = useSelector<IReduxState>(state =>
isForceMuted(participant, MEDIA_TYPE.VIDEO, state));
isForceMuted(participant, AVM_MEDIA_TYPE.VIDEO, state));
const _isDesktopForceMuted = useSelector<IReduxState>(state =>
isForceMuted(participant, AVM_MEDIA_TYPE.DESKTOP, state));
const _isAudioMuted = useSelector((state: IReduxState) => isParticipantAudioMuted(participant, state));
const _isVideoMuted = useSelector((state: IReduxState) => isParticipantVideoMuted(participant, state));
const _overflowDrawer: boolean = useSelector(showOverflowDrawer);
const { remoteVideoMenu = {}, disableRemoteMute, startSilent, customParticipantMenuButtons }
= useSelector((state: IReduxState) => state['features/base/config']);
@@ -190,7 +194,7 @@ const ParticipantContextMenu = ({
() => !_overflowDrawer && !thumbnailMenu,
[ _overflowDrawer, thumbnailMenu ]);
const quickActionButtonType = useSelector((state: IReduxState) =>
getQuickActionButtonType(participant, _isAudioMuted, _isVideoMuted, state));
getQuickActionButtonType(participant, state));
const buttons: JSX.Element[] = [];
const buttons2: JSX.Element[] = [];
@@ -229,6 +233,13 @@ const ParticipantContextMenu = ({
buttonType = { MEDIA_TYPE.VIDEO } />
);
}
if (_isDesktopForceMuted
&& !(isClickedFromParticipantPane && quickActionButtonType === QUICK_ACTION_BUTTON.ALLOW_DESKTOP)) {
buttons.push(<AskToUnmuteButton
{ ...getButtonProps(BUTTONS.ALLOW_DESKTOP) }
buttonType = { MEDIA_TYPE.SCREENSHARE } />
);
}
}
if (!disableRemoteMute && !participant.isSilent) {
@@ -240,6 +251,10 @@ const ParticipantContextMenu = ({
buttons.push(<MuteVideoButton { ...getButtonProps(BUTTONS.MUTE_VIDEO) } />);
}
buttons.push(<MuteEveryoneElsesVideoButton { ...getButtonProps(BUTTONS.MUTE_OTHERS_VIDEO) } />);
if (!(isClickedFromParticipantPane && quickActionButtonType === QUICK_ACTION_BUTTON.STOP_DESKTOP)) {
buttons.push(<MuteDesktopButton { ...getButtonProps(BUTTONS.MUTE_DESKTOP) } />);
}
buttons.push(<MuteEveryoneElsesDesktopButton { ...getButtonProps(BUTTONS.MUTE_OTHERS_DESKTOP) } />);
}
if (raisedHands) {

View File

@@ -17,6 +17,7 @@ export const VOLUME_SLIDER_SCALE = 100;
* Participant context menu button keys.
*/
export const PARTICIPANT_MENU_BUTTONS = {
ALLOW_DESKTOP: 'allow-desktop',
ALLOW_VIDEO: 'allow-video',
ASK_UNMUTE: 'ask-unmute',
CONN_STATUS: 'conn-status',
@@ -27,7 +28,9 @@ export const PARTICIPANT_MENU_BUTTONS = {
KICK: 'kick',
LOWER_PARTICIPANT_HAND: 'lower-participant-hand',
MUTE: 'mute',
MUTE_DESKTOP: 'mute-desktop',
MUTE_OTHERS: 'mute-others',
MUTE_OTHERS_DESKTOP: 'mute-others-desktop',
MUTE_OTHERS_VIDEO: 'mute-others-video',
MUTE_VIDEO: 'mute-video',
PIN_TO_STAGE: 'pinToStage',