feat(unmute/track creation): pending indicator.

This commit is contained in:
Hristo Terezov
2023-05-18 14:16:37 -05:00
parent 06e86a2f3e
commit bb7ae777b0
20 changed files with 412 additions and 43 deletions

View File

@@ -87,6 +87,7 @@ import {
} from './react/features/base/lib-jitsi-meet';
import { isFatalJitsiConnectionError } from './react/features/base/lib-jitsi-meet/functions';
import {
gumPending,
setAudioAvailable,
setAudioMuted,
setAudioUnmutePermissions,
@@ -100,6 +101,7 @@ import {
getStartWithVideoMuted,
isVideoMutedByUser
} from './react/features/base/media/functions';
import { IGUMPendingState } from './react/features/base/media/types';
import {
dominantSpeakerChanged,
localParticipantAudioLevelChanged,
@@ -493,6 +495,21 @@ function disconnect() {
return connection.disconnect().then(onDisconnected, onDisconnected);
}
/**
* Sets the GUM pending state for the tracks that have failed.
*
* NOTE: Some of the track that we will be setting to GUM pending state NONE may not have failed but they may have
* been requested. This won't be a problem because their current GUM pending state will be NONE anyway.
* @param {JitsiLocalTrack} tracks - The tracks that have been created.
* @returns {void}
*/
function setGUMPendingStateOnFailedTracks(tracks) {
const tracksTypes = tracks.map(track => track.getType());
const nonPendingTracks = [ MEDIA_TYPE.AUDIO, MEDIA_TYPE.VIDEO ].filter(type => !tracksTypes.includes(type));
APP.store.dispatch(gumPending(nonPendingTracks, IGUMPendingState.NONE));
}
/**
* Handles CONNECTION_FAILED events from lib-jitsi-meet.
*
@@ -601,6 +618,7 @@ export default {
return [];
});
} else if (requestedAudio || requestedVideo) {
APP.store.dispatch(gumPending(initialDevices, IGUMPendingState.PENDING_UNMUTE));
tryCreateLocalTracks = createLocalTracksF({
devices: initialDevices,
timeout,
@@ -863,6 +881,8 @@ export default {
this._initDeviceList(true);
if (isPrejoinPageVisible(state)) {
APP.store.dispatch(gumPending([ MEDIA_TYPE.AUDIO, MEDIA_TYPE.VIDEO ], IGUMPendingState.NONE));
return APP.store.dispatch(initPrejoin(localTracks, errors));
}
@@ -870,14 +890,22 @@ export default {
this._displayErrorsForCreateInitialLocalTracks(errors);
return this._setLocalAudioVideoStreams(handleInitialTracks(initialOptions, localTracks));
const tracks = handleInitialTracks(initialOptions, localTracks);
setGUMPendingStateOnFailedTracks(tracks);
return this._setLocalAudioVideoStreams(tracks);
}
const [ tracks, con ] = await this.createInitialLocalTracksAndConnect(roomName, initialOptions);
this._initDeviceList(true);
return this.startConference(con, handleInitialTracks(initialOptions, tracks));
const filteredTracks = handleInitialTracks(initialOptions, tracks);
setGUMPendingStateOnFailedTracks(filteredTracks);
return this.startConference(con, filteredTracks);
},
/**
@@ -1000,6 +1028,7 @@ export default {
showUI && APP.store.dispatch(notifyMicError(error));
};
APP.store.dispatch(gumPending([ MEDIA_TYPE.AUDIO ], IGUMPendingState.PENDING_UNMUTE));
createLocalTracksF({ devices: [ 'audio' ] })
.then(([ audioTrack ]) => audioTrack)
.catch(error => {
@@ -1011,7 +1040,10 @@ export default {
.then(async audioTrack => {
await this._maybeApplyAudioMixerEffect(audioTrack);
this.useAudioStream(audioTrack);
return this.useAudioStream(audioTrack);
})
.finally(() => {
APP.store.dispatch(gumPending([ MEDIA_TYPE.AUDIO ], IGUMPendingState.NONE));
});
} else {
muteLocalAudio(mute);
@@ -1091,6 +1123,8 @@ export default {
this.isCreatingLocalTrack = true;
APP.store.dispatch(gumPending([ MEDIA_TYPE.VIDEO ], IGUMPendingState.PENDING_UNMUTE));
// Try to create local video if there wasn't any.
// This handles the case when user joined with no video
// (dismissed screen sharing screen or in audio only mode), but
@@ -1115,6 +1149,7 @@ export default {
})
.finally(() => {
this.isCreatingLocalTrack = false;
APP.store.dispatch(gumPending([ MEDIA_TYPE.VIDEO ], IGUMPendingState.NONE));
});
} else {
// FIXME show error dialog if it fails (should be handled by react)
@@ -1427,11 +1462,16 @@ export default {
* @private
*/
_setLocalAudioVideoStreams(tracks = []) {
const { dispatch } = APP.store;
const pendingGUMDevicesToRemove = [];
const promises = tracks.map(track => {
if (track.isAudioTrack()) {
pendingGUMDevicesToRemove.push(MEDIA_TYPE.AUDIO);
return this.useAudioStream(track);
} else if (track.isVideoTrack()) {
logger.debug(`_setLocalAudioVideoStreams is calling useVideoStream with track: ${track}`);
pendingGUMDevicesToRemove.push(MEDIA_TYPE.VIDEO);
return this.useVideoStream(track);
}
@@ -1443,6 +1483,10 @@ export default {
});
return Promise.allSettled(promises).then(() => {
if (pendingGUMDevicesToRemove.length > 0) {
dispatch(gumPending(pendingGUMDevicesToRemove, IGUMPendingState.NONE));
}
this._localTracksInitialized = true;
logger.log(`Initialized with ${tracks.length} local tracks`);
});

View File

@@ -1151,6 +1151,7 @@
"muteEveryoneElse": "Mute everyone else",
"muteEveryoneElsesVideoStream": "Stop everyone else's video",
"muteEveryonesVideoStream": "Stop everyone's video",
"muteGUMPending": "Connecting your microphone",
"noiseSuppression": "Noise suppression",
"openChat": "Open chat",
"participants": "Open participants pane",
@@ -1184,6 +1185,7 @@
"unmute": "Unmute",
"videoblur": "Toggle video blur",
"videomute": "Stop camera",
"videomuteGUMPending": "Connecting your camera",
"videounmute": "Start camera"
},
"addPeople": "Add people to your call",
@@ -1234,6 +1236,7 @@
"mute": "Mute",
"muteEveryone": "Mute everyone",
"muteEveryonesVideo": "Disable everyone's camera",
"muteGUMPending": "Connecting your microphone",
"noAudioSignalDesc": "If you did not purposely mute it from system settings or hardware, consider switching the device.",
"noAudioSignalDescSuggestion": "If you did not purposely mute it from system settings or hardware, consider switching to the suggested device.",
"noAudioSignalDialInDesc": "You can also dial-in using:",
@@ -1279,6 +1282,7 @@
"unmute": "Unmute",
"videoSettings": "Video settings",
"videomute": "Stop camera",
"videomuteGUMPending": "Connecting your camera",
"videounmute": "Start camera"
},
"transcribing": {

View File

@@ -1,4 +1,15 @@
/**
* The type of (redux) action to store the gum pending state for unmute and initial track creation.
*
* {
* type: GUM_PENDING,
* mediaTypes: Array<MediaType>,
* status: IGUMPendingState
* }
*/
export const GUM_PENDING = 'GUM_PENDING';
/**
* The type of (redux) action to adjust the availability of the local audio.
*

View File

@@ -4,6 +4,7 @@ import { shouldShowModeratedNotification } from '../../av-moderation/functions';
import { isModerationNotificationDisplayed } from '../../notifications/functions';
import {
GUM_PENDING,
SET_AUDIO_AVAILABLE,
SET_AUDIO_MUTED,
SET_AUDIO_UNMUTE_PERMISSIONS,
@@ -17,9 +18,11 @@ import {
} from './actionTypes';
import {
MEDIA_TYPE,
MediaType,
SCREENSHARE_MUTISM_AUTHORITY,
VIDEO_MUTISM_AUTHORITY
} from './constants';
import { IGUMPendingState } from './types';
/**
* Action to adjust the availability of the local audio.
@@ -237,3 +240,22 @@ export function toggleCameraFacingMode() {
type: TOGGLE_CAMERA_FACING_MODE
};
}
/**
* Sets the GUM pending status from unmute and initial track creation operation.
*
* @param {Array<MediaType>} mediaTypes - An array with the media types that GUM is called with.
* @param {IGUMPendingState} status - The GUM status.
* @returns {{
* type: TOGGLE_CAMERA_FACING_MODE,
* mediaTypes: Array<MediaType>,
* status: IGUMPendingState
* }}
*/
export function gumPending(mediaTypes: Array<MediaType>, status: IGUMPendingState) {
return {
type: GUM_PENDING,
mediaTypes,
status
};
}

View File

@@ -297,7 +297,7 @@ function _setRoom({ dispatch, getState }: IStore, next: Function, action: AnyAct
* @private
* @returns {void}
*/
function _syncTrackMutedState({ getState }: IStore, track: ITrack) {
function _syncTrackMutedState({ getState, dispatch }: IStore, track: ITrack) {
const state = getState()['features/base/media'];
const mediaType = track.mediaType;
const muted = Boolean(state[mediaType].muted);
@@ -312,6 +312,6 @@ function _syncTrackMutedState({ getState }: IStore, track: ITrack) {
logger.log(`Sync ${mediaType} track muted state to ${muted ? 'muted' : 'unmuted'}`);
track.muted = muted;
setTrackMuted(track.jitsiTrack, muted, state);
setTrackMuted(track.jitsiTrack, muted, state, dispatch);
}
}

View File

@@ -5,6 +5,7 @@ import ReducerRegistry from '../redux/ReducerRegistry';
import { TRACK_REMOVED } from '../tracks/actionTypes';
import {
GUM_PENDING,
SET_AUDIO_AVAILABLE,
SET_AUDIO_MUTED,
SET_AUDIO_UNMUTE_PERMISSIONS,
@@ -16,7 +17,8 @@ import {
STORE_VIDEO_TRANSFORM,
TOGGLE_CAMERA_FACING_MODE
} from './actionTypes';
import { CAMERA_FACING_MODE, SCREENSHARE_MUTISM_AUTHORITY } from './constants';
import { CAMERA_FACING_MODE, MEDIA_TYPE, SCREENSHARE_MUTISM_AUTHORITY } from './constants';
import { IGUMPendingState } from './types';
/**
* Media state object for local audio.
@@ -36,6 +38,7 @@ import { CAMERA_FACING_MODE, SCREENSHARE_MUTISM_AUTHORITY } from './constants';
*/
export const _AUDIO_INITIAL_MEDIA_STATE = {
available: true,
gumPending: IGUMPendingState.NONE,
unmuteBlocked: false,
muted: false
};
@@ -57,6 +60,16 @@ function _audio(state: IAudioState = _AUDIO_INITIAL_MEDIA_STATE, action: AnyActi
available: action.available
};
case GUM_PENDING:
if (action.mediaTypes.includes(MEDIA_TYPE.AUDIO)) {
return {
...state,
gumPending: action.status
};
}
return state;
case SET_AUDIO_MUTED:
return {
...state,
@@ -141,6 +154,7 @@ function _screenshare(state: IScreenshareState = _SCREENSHARE_INITIAL_MEDIA_STAT
*/
export const _VIDEO_INITIAL_MEDIA_STATE = {
available: true,
gumPending: IGUMPendingState.NONE,
unmuteBlocked: false,
facingMode: CAMERA_FACING_MODE.USER,
muted: 0,
@@ -167,6 +181,16 @@ function _video(state: IVideoState = _VIDEO_INITIAL_MEDIA_STATE, action: any) {
case CONFERENCE_LEFT:
return _clearAllVideoTransforms(state);
case GUM_PENDING:
if (action.mediaTypes.includes(MEDIA_TYPE.VIDEO)) {
return {
...state,
gumPending: action.status
};
}
return state;
case SET_CAMERA_FACING_MODE:
return {
...state,
@@ -218,6 +242,7 @@ function _video(state: IVideoState = _VIDEO_INITIAL_MEDIA_STATE, action: any) {
interface IAudioState {
available: boolean;
gumPending: IGUMPendingState;
muted: boolean;
unmuteBlocked: boolean;
}
@@ -231,6 +256,7 @@ interface IScreenshareState {
interface IVideoState {
available: boolean;
facingMode: string;
gumPending: IGUMPendingState;
muted: number;
transforms: Object;
unmuteBlocked: boolean;

View File

@@ -0,0 +1,4 @@
export enum IGUMPendingState {
PENDING_UNMUTE = 1,
NONE = 2
}

View File

@@ -1,4 +1,4 @@
import React, { Component, ReactNode } from 'react';
import React, { Component, ReactElement, ReactNode } from 'react';
import { WithTranslation } from 'react-i18next';
import { GestureResponderEvent } from 'react-native';
@@ -217,7 +217,7 @@ export default class AbstractButton<P extends IProps, S=any> extends Component<P
* @protected
* @returns {ReactElement|null}
*/
_getElementAfter() {
_getElementAfter(): ReactElement | null {
return null;
}

View File

@@ -139,6 +139,7 @@ export function createLocalTracksA(options: ITrackOptions = {}) {
dispatch,
getState
};
const promises = [];
// The following executes on React Native only at the time of this
// writing. The effort to port Web's createInitialLocalTracksAndConnect
@@ -196,6 +197,8 @@ export function createLocalTracksA(options: ITrackOptions = {}) {
reason,
device)));
promises.push(gumProcess.catch(() => undefined));
/**
* Cancels the {@code getUserMedia} process represented by this
* {@code Promise}.
@@ -217,6 +220,8 @@ export function createLocalTracksA(options: ITrackOptions = {}) {
}
});
}
return Promise.all(promises);
};
}

View File

@@ -1,10 +1,12 @@
import { IReduxState } from '../../app/types';
import { IReduxState, IStore } from '../../app/types';
import {
getMultipleVideoSendingSupportFeatureFlag
} from '../config/functions.any';
import { JitsiTrackErrors, browser } from '../lib-jitsi-meet';
import { gumPending } from '../media/actions';
import { MEDIA_TYPE, MediaType, VIDEO_TYPE } from '../media/constants';
import { IMediaState } from '../media/reducer';
import { IGUMPendingState } from '../media/types';
import {
getVirtualScreenshareParticipantOwnerId,
isScreenShareParticipant
@@ -351,6 +353,26 @@ export function isUserInteractionRequiredForUnmute(state: IReduxState) {
&& !state['features/base/user-interaction'].interacted;
}
/**
* Sets the GUM pending state for the passed track operation (mute/unmute) and media type.
* NOTE: We need this only for web.
*
* @param {IGUMPendingState} status - The new GUM pending status.
* @param {MediaType} mediaType - The media type related to the operation (audio or video).
* @param {boolean} muted - True if the operation is mute and false for unmute.
* @param {Function} dispatch - The dispatch method.
* @returns {void}
*/
export function _setGUMPendingState(
status: IGUMPendingState,
mediaType: MediaType,
muted: boolean,
dispatch?: IStore['dispatch']) {
if (!muted && dispatch && typeof APP !== 'undefined') {
dispatch(gumPending([ mediaType ], status));
}
}
/**
* Mutes or unmutes a specific {@code JitsiLocalTrack}. If the muted state of the specified {@code track} is already in
* accord with the specified {@code muted} value, then does nothing.
@@ -358,9 +380,11 @@ export function isUserInteractionRequiredForUnmute(state: IReduxState) {
* @param {JitsiLocalTrack} track - The {@code JitsiLocalTrack} to mute or unmute.
* @param {boolean} muted - If the specified {@code track} is to be muted, then {@code true}; otherwise, {@code false}.
* @param {Object} state - The redux state.
* @param {Function} dispatch - The dispatch method.
* @returns {Promise}
*/
export function setTrackMuted(track: any, muted: boolean, state: IReduxState | IMediaState) {
export function setTrackMuted(track: any, muted: boolean, state: IReduxState | IMediaState,
dispatch?: IStore['dispatch']) {
muted = Boolean(muted); // eslint-disable-line no-param-reassign
// Ignore the check for desktop track muted operation. When the screenshare is terminated by clicking on the
@@ -372,8 +396,18 @@ export function setTrackMuted(track: any, muted: boolean, state: IReduxState | I
}
const f = muted ? 'mute' : 'unmute';
const mediaType = track.getType();
_setGUMPendingState(IGUMPendingState.PENDING_UNMUTE, mediaType, muted, dispatch);
return track[f]().then((result: any) => {
_setGUMPendingState(IGUMPendingState.NONE, mediaType, muted, dispatch);
return result;
})
.catch((error: Error) => {
_setGUMPendingState(IGUMPendingState.NONE, mediaType, muted, dispatch);
return track[f]().catch((error: Error) => {
// Track might be already disposed so ignore such an error.
if (error.name !== JitsiTrackErrors.TRACK_IS_DISPOSED) {
logger.error(`set track ${f} failed`, error);

View File

@@ -2,9 +2,10 @@ import { IStore } from '../../app/types';
import { IStateful } from '../app/types';
import { isMobileBrowser } from '../environment/utils';
import JitsiMeetJS, { JitsiTrackErrors, browser } from '../lib-jitsi-meet';
import { setAudioMuted } from '../media/actions';
import { gumPending, setAudioMuted } from '../media/actions';
import { MEDIA_TYPE } from '../media/constants';
import { getStartWithAudioMuted } from '../media/functions';
import { IGUMPendingState } from '../media/types';
import { toState } from '../redux/functions';
import {
getUserSelectedCameraDeviceId,
@@ -105,7 +106,7 @@ export function createLocalTracksF(options: ITrackOptions = {}, store?: IStore)
*/
export function createPrejoinTracks() {
const errors: any = {};
const initialDevices = [ 'audio' ];
const initialDevices = [ MEDIA_TYPE.AUDIO ];
const requestedAudio = true;
let requestedVideo = false;
const { startAudioOnly, startWithVideoMuted } = APP.store.getState()['features/base/settings'];
@@ -125,11 +126,14 @@ export function createPrejoinTracks() {
}
if (!startWithVideoMuted && !startAudioOnly) {
initialDevices.push('video');
initialDevices.push(MEDIA_TYPE.VIDEO);
requestedVideo = true;
}
let tryCreateLocalTracks: any = Promise.resolve([]);
const { dispatch } = APP.store;
dispatch(gumPending(initialDevices, IGUMPendingState.PENDING_UNMUTE));
if (requestedAudio || requestedVideo) {
tryCreateLocalTracks = createLocalTracksF({
@@ -188,6 +192,9 @@ export function createPrejoinTracks() {
}
return tracks;
})
.finally(() => {
dispatch(gumPending(initialDevices, IGUMPendingState.NONE));
});
}

View File

@@ -12,7 +12,7 @@ import {
SET_VIDEO_MUTED,
TOGGLE_CAMERA_FACING_MODE
} from '../media/actionTypes';
import { toggleCameraFacingMode } from '../media/actions';
import { gumPending, toggleCameraFacingMode } from '../media/actions';
import {
CAMERA_FACING_MODE,
MEDIA_TYPE,
@@ -20,6 +20,7 @@ import {
SCREENSHARE_MUTISM_AUTHORITY,
VIDEO_MUTISM_AUTHORITY
} from '../media/constants';
import { IGUMPendingState } from '../media/types';
import MiddlewareRegistry from '../redux/MiddlewareRegistry';
import StateListenerRegistry from '../redux/StateListenerRegistry';
@@ -210,11 +211,16 @@ async function _setMuted(store: IStore, { ensureTrack, authority, muted }: {
if (jitsiTrack && (
jitsiTrack.videoType !== 'desktop' || isAudioOnly || getMultipleVideoSendingSupportFeatureFlag(state))
) {
setTrackMuted(jitsiTrack, muted, state).catch(() => dispatch(trackMuteUnmuteFailed(localTrack, muted)));
setTrackMuted(jitsiTrack, muted, state, dispatch)
.catch(() => dispatch(trackMuteUnmuteFailed(localTrack, muted)));
}
} else if (!muted && ensureTrack && (typeof APP === 'undefined' || isPrejoinPageVisible(state))) {
typeof APP !== 'undefined' && dispatch(gumPending([ mediaType ], IGUMPendingState.PENDING_UNMUTE));
// FIXME: This only runs on mobile now because web has its own way of
// creating local tracks. Adjust the check once they are unified.
dispatch(createLocalTracksA({ devices: [ mediaType ] }));
dispatch(createLocalTracksA({ devices: [ mediaType ] })).then(() => {
typeof APP !== 'undefined' && dispatch(gumPending([ mediaType ], IGUMPendingState.NONE));
});
}
}

View File

@@ -3,6 +3,7 @@ import { keyframes } from 'tss-react';
import { makeStyles } from 'tss-react/mui';
interface IProps {
color?: string;
size?: 'small' | 'medium' | 'large';
}
@@ -12,7 +13,9 @@ const SIZE = {
large: 48
};
const useStyles = makeStyles()(() => {
const DEFAULT_COLOR = '#E6EDFA';
const useStyles = makeStyles<{ color?: string; }>()((_, { color }) => {
return {
container: {
verticalAlign: 'middle',
@@ -33,7 +36,7 @@ const useStyles = makeStyles()(() => {
circle: {
fill: 'none',
stroke: '#E6EDFA',
stroke: color,
strokeWidth: 1.5,
strokeLinecap: 'round',
strokeDasharray: 60,
@@ -53,8 +56,8 @@ const useStyles = makeStyles()(() => {
};
});
const Spinner = ({ size = 'medium' }: IProps) => {
const { classes } = useStyles();
const Spinner = ({ color = DEFAULT_COLOR, size = 'medium' }: IProps) => {
const { classes } = useStyles({ color });
return (
<svg

View File

@@ -1,3 +1,5 @@
import { ClassNameMap, withStyles } from '@mui/styles';
import React, { ReactElement } from 'react';
import { connect } from 'react-redux';
import { ACTION_SHORTCUT_TRIGGERED, AUDIO_MUTE, createShortcutEvent } from '../../analytics/AnalyticsEvents';
@@ -7,27 +9,51 @@ import { AUDIO_MUTE_BUTTON_ENABLED } from '../../base/flags/constants';
import { getFeatureFlag } from '../../base/flags/functions';
import { translate } from '../../base/i18n/functions';
import { MEDIA_TYPE } from '../../base/media/constants';
import { IGUMPendingState } from '../../base/media/types';
import AbstractAudioMuteButton from '../../base/toolbox/components/AbstractAudioMuteButton';
import AbstractButton, { IProps as AbstractButtonProps } from '../../base/toolbox/components/AbstractButton';
import { isLocalTrackMuted } from '../../base/tracks/functions';
import Spinner from '../../base/ui/components/web/Spinner';
import { registerShortcut, unregisterShortcut } from '../../keyboard-shortcuts/actions';
import { muteLocal } from '../../video-menu/actions';
import { SPINNER_COLOR } from '../constants';
import { isAudioMuteButtonDisabled } from '../functions';
const styles = () => {
return {
pendingContainer: {
position: 'absolute' as const,
bottom: '3px',
right: '3px'
}
};
};
/**
* The type of the React {@code Component} props of {@link AudioMuteButton}.
*/
interface IProps extends AbstractButtonProps {
/**
* Whether audio is currently muted or not.
*/
_audioMuted: boolean;
/**
* Whether the button is disabled.
*/
_disabled: boolean;
* Whether audio is currently muted or not.
*/
_audioMuted: boolean;
/**
* Whether the button is disabled.
*/
_disabled: boolean;
/**
* The gumPending state from redux.
*/
_gumPending: IGUMPendingState;
/**
* The @mui/styles classes.
*/
classes: ClassNameMap<string>;
}
/**
@@ -53,6 +79,7 @@ class AudioMuteButton extends AbstractAudioMuteButton<IProps> {
// Bind event handlers so they are only bound once per instance.
this._onKeyboardShortcut = this._onKeyboardShortcut.bind(this);
this._getTooltip = this._getLabel;
}
/**
@@ -87,6 +114,43 @@ class AudioMuteButton extends AbstractAudioMuteButton<IProps> {
this.props.dispatch(unregisterShortcut('M'));
}
/**
* Gets the current accessibility label, taking the toggled and GUM pending state into account. If no toggled label
* is provided, the regular accessibility label will also be used in the toggled state.
*
* The accessibility label is not visible in the UI, it is meant to be used by assistive technologies, mainly screen
* readers.
*
* @private
* @returns {string}
*/
_getAccessibilityLabel() {
const { _gumPending } = this.props;
if (_gumPending === IGUMPendingState.NONE) {
return super._getAccessibilityLabel();
}
return 'toolbar.accessibilityLabel.muteGUMPending';
}
/**
* Gets the current label, taking the toggled and GUM pending state into account. If no
* toggled label is provided, the regular label will also be used in the toggled state.
*
* @private
* @returns {string}
*/
_getLabel() {
const { _gumPending } = this.props;
if (_gumPending === IGUMPendingState.NONE) {
return super._getLabel();
}
return 'toolbar.muteGUMPending';
}
/**
* Indicates if audio is currently muted or not.
*
@@ -95,7 +159,13 @@ class AudioMuteButton extends AbstractAudioMuteButton<IProps> {
* @returns {boolean}
*/
_isAudioMuted() {
return this.props._audioMuted;
const { _audioMuted, _gumPending } = this.props;
if (_gumPending === IGUMPendingState.PENDING_UNMUTE) {
return false;
}
return _audioMuted;
}
/**
@@ -120,6 +190,24 @@ class AudioMuteButton extends AbstractAudioMuteButton<IProps> {
AbstractButton.prototype._onClick.call(this);
}
/**
* Returns a spinner if there is pending GUM.
*
* @returns {ReactElement | null}
*/
_getElementAfter(): ReactElement | null {
const { _gumPending, classes } = this.props;
return _gumPending === IGUMPendingState.NONE ? null
: (
<div className = { classes.pendingContainer }>
<Spinner
color = { SPINNER_COLOR }
size = 'small' />
</div>
);
}
/**
* Changes the muted state.
*
@@ -156,12 +244,14 @@ function _mapStateToProps(state: IReduxState) {
const _audioMuted = isLocalTrackMuted(state['features/base/tracks'], MEDIA_TYPE.AUDIO);
const _disabled = isAudioMuteButtonDisabled(state);
const enabledFlag = getFeatureFlag(state, AUDIO_MUTE_BUTTON_ENABLED, true);
const { gumPending } = state['features/base/media'].audio;
return {
_audioMuted,
_disabled,
_gumPending: gumPending,
visible: enabledFlag
};
}
export default translate(connect(_mapStateToProps)(AudioMuteButton));
export default withStyles(styles)(translate(connect(_mapStateToProps)(AudioMuteButton)));

View File

@@ -1,3 +1,5 @@
import { ClassNameMap, withStyles } from '@mui/styles';
import React, { ReactElement } from 'react';
import { connect } from 'react-redux';
import { ACTION_SHORTCUT_TRIGGERED, VIDEO_MUTE, createShortcutEvent } from '../../analytics/AnalyticsEvents';
@@ -7,18 +9,35 @@ import { VIDEO_MUTE_BUTTON_ENABLED } from '../../base/flags/constants';
import { getFeatureFlag } from '../../base/flags/functions';
import { translate } from '../../base/i18n/functions';
import { MEDIA_TYPE } from '../../base/media/constants';
import { IGUMPendingState } from '../../base/media/types';
import AbstractButton, { IProps as AbstractButtonProps } from '../../base/toolbox/components/AbstractButton';
import AbstractVideoMuteButton from '../../base/toolbox/components/AbstractVideoMuteButton';
import { isLocalTrackMuted } from '../../base/tracks/functions';
import Spinner from '../../base/ui/components/web/Spinner';
import { registerShortcut, unregisterShortcut } from '../../keyboard-shortcuts/actions';
import { handleToggleVideoMuted } from '../actions.any';
import { SPINNER_COLOR } from '../constants';
import { isVideoMuteButtonDisabled } from '../functions';
const styles = () => {
return {
pendingContainer: {
position: 'absolute' as const,
bottom: '3px',
right: '3px'
}
};
};
/**
* The type of the React {@code Component} props of {@link VideoMuteButton}.
*/
interface IProps extends AbstractButtonProps {
/**
* The gumPending state from redux.
*/
_gumPending: IGUMPendingState;
/**
* Whether video button is disabled or not.
@@ -29,6 +48,11 @@ interface IProps extends AbstractButtonProps {
* Whether video is currently muted or not.
*/
_videoMuted: boolean;
/**
* The @mui/styles classes.
*/
classes: ClassNameMap<string>;
}
/**
@@ -54,6 +78,7 @@ class VideoMuteButton extends AbstractVideoMuteButton<IProps> {
// Bind event handlers so they are only bound once per instance.
this._onKeyboardShortcut = this._onKeyboardShortcut.bind(this);
this._getTooltip = this._getLabel;
}
/**
@@ -88,6 +113,43 @@ class VideoMuteButton extends AbstractVideoMuteButton<IProps> {
this.props.dispatch(unregisterShortcut('V'));
}
/**
* Gets the current accessibility label, taking the toggled and GUM pending state into account. If no toggled label
* is provided, the regular accessibility label will also be used in the toggled state.
*
* The accessibility label is not visible in the UI, it is meant to be used by assistive technologies, mainly screen
* readers.
*
* @private
* @returns {string}
*/
_getAccessibilityLabel() {
const { _gumPending } = this.props;
if (_gumPending === IGUMPendingState.NONE) {
return super._getAccessibilityLabel();
}
return 'toolbar.accessibilityLabel.videomuteGUMPending';
}
/**
* Gets the current label, taking the toggled and GUM pending state into account. If no
* toggled label is provided, the regular label will also be used in the toggled state.
*
* @private
* @returns {string}
*/
_getLabel() {
const { _gumPending } = this.props;
if (_gumPending === IGUMPendingState.NONE) {
return super._getLabel();
}
return 'toolbar.videomuteGUMPending';
}
/**
* Indicates if video is currently disabled or not.
*
@@ -96,7 +158,7 @@ class VideoMuteButton extends AbstractVideoMuteButton<IProps> {
* @returns {boolean}
*/
_isDisabled() {
return this.props._videoDisabled;
return this.props._videoDisabled || this.props._gumPending !== IGUMPendingState.NONE;
}
/**
@@ -107,7 +169,31 @@ class VideoMuteButton extends AbstractVideoMuteButton<IProps> {
* @returns {boolean}
*/
_isVideoMuted() {
return this.props._videoMuted;
const { _gumPending, _videoMuted } = this.props;
if (_gumPending === IGUMPendingState.PENDING_UNMUTE) {
return false;
}
return _videoMuted;
}
/**
* Returns a spinner if there is pending GUM.
*
* @returns {ReactElement | null}
*/
_getElementAfter(): ReactElement | null {
const { _gumPending, classes } = this.props;
return _gumPending === IGUMPendingState.NONE ? null
: (
<div className = { classes.pendingContainer }>
<Spinner
color = { SPINNER_COLOR }
size = 'small' />
</div>
);
}
/**
@@ -158,12 +244,14 @@ class VideoMuteButton extends AbstractVideoMuteButton<IProps> {
function _mapStateToProps(state: IReduxState) {
const tracks = state['features/base/tracks'];
const enabledFlag = getFeatureFlag(state, VIDEO_MUTE_BUTTON_ENABLED, true);
const { gumPending } = state['features/base/media'].video;
return {
_videoDisabled: isVideoMuteButtonDisabled(state),
_videoMuted: isLocalTrackMuted(tracks, MEDIA_TYPE.VIDEO),
_gumPending: gumPending,
visible: enabledFlag
};
}
export default translate(connect(_mapStateToProps)(VideoMuteButton));
export default withStyles(styles)(translate(connect(_mapStateToProps)(VideoMuteButton)));

View File

@@ -7,6 +7,7 @@ import { isMobileBrowser } from '../../../base/environment/utils';
import { translate } from '../../../base/i18n/functions';
import { IconArrowUp } from '../../../base/icons/svg';
import JitsiMeetJS from '../../../base/lib-jitsi-meet/_';
import { IGUMPendingState } from '../../../base/media/types';
import ToolboxButtonWithIcon from '../../../base/toolbox/components/web/ToolboxButtonWithIcon';
import { toggleAudioSettings } from '../../../settings/actions';
import AudioSettingsPopup from '../../../settings/components/web/audio/AudioSettingsPopup';
@@ -21,6 +22,11 @@ interface IProps extends WithTranslation {
*/
buttonKey?: string;
/**
* The gumPending state from redux.
*/
gumPending: IGUMPendingState;
/**
* External handler for click action.
*/
@@ -112,7 +118,7 @@ class AudioSettingsButton extends Component<IProps> {
* @inheritdoc
*/
render() {
const { hasPermissions, isDisabled, visible, isOpen, buttonKey, notifyMode, t } = this.props;
const { gumPending, hasPermissions, isDisabled, visible, isOpen, buttonKey, notifyMode, t } = this.props;
const settingsDisabled = !hasPermissions
|| isDisabled
|| !JitsiMeetJS.mediaDevices.isMultipleAudioInputSupported();
@@ -126,7 +132,7 @@ class AudioSettingsButton extends Component<IProps> {
ariaLabel = { t('toolbar.audioSettings') }
buttonKey = { buttonKey }
icon = { IconArrowUp }
iconDisabled = { settingsDisabled }
iconDisabled = { settingsDisabled || gumPending !== IGUMPendingState.NONE }
iconId = 'audio-settings-button'
iconTooltip = { t('toolbar.audioSettings') }
notifyMode = { notifyMode }
@@ -152,8 +158,10 @@ class AudioSettingsButton extends Component<IProps> {
function mapStateToProps(state: IReduxState) {
const { permissions = { audio: false } } = state['features/base/devices'];
const { isNarrowLayout } = state['features/base/responsive-ui'];
const { gumPending } = state['features/base/media'].audio;
return {
gumPending,
hasPermissions: permissions.audio,
isDisabled: Boolean(isAudioSettingsButtonDisabled(state)),
isOpen: Boolean(getAudioSettingsVisibility(state)),

View File

@@ -6,6 +6,7 @@ import { IReduxState } from '../../../app/types';
import { isMobileBrowser } from '../../../base/environment/utils';
import { translate } from '../../../base/i18n/functions';
import { IconArrowUp } from '../../../base/icons/svg';
import { IGUMPendingState } from '../../../base/media/types';
import ToolboxButtonWithIcon from '../../../base/toolbox/components/web/ToolboxButtonWithIcon';
import { getLocalJitsiVideoTrack } from '../../../base/tracks/functions.web';
import { toggleVideoSettings } from '../../../settings/actions';
@@ -22,6 +23,11 @@ interface IProps extends WithTranslation {
*/
buttonKey?: string;
/**
* The gumPending state from redux.
*/
gumPending: IGUMPendingState;
/**
* External handler for click action.
*/
@@ -91,9 +97,9 @@ class VideoSettingsButton extends Component<IProps> {
* @returns {boolean}
*/
_isIconDisabled() {
const { hasPermissions, hasVideoTrack, isDisabled } = this.props;
const { gumPending, hasPermissions, hasVideoTrack, isDisabled } = this.props;
return (!hasPermissions || isDisabled) && !hasVideoTrack;
return ((!hasPermissions || isDisabled) && !hasVideoTrack) || gumPending !== IGUMPendingState.NONE;
}
/**
@@ -131,7 +137,7 @@ class VideoSettingsButton extends Component<IProps> {
* @inheritdoc
*/
render() {
const { t, visible, isOpen, buttonKey, notifyMode } = this.props;
const { gumPending, t, visible, isOpen, buttonKey, notifyMode } = this.props;
return visible ? (
<VideoSettingsPopup>
@@ -142,7 +148,7 @@ class VideoSettingsButton extends Component<IProps> {
ariaLabel = { this.props.t('toolbar.videoSettings') }
buttonKey = { buttonKey }
icon = { IconArrowUp }
iconDisabled = { this._isIconDisabled() }
iconDisabled = { this._isIconDisabled() || gumPending !== IGUMPendingState.NONE }
iconId = 'video-settings-button'
iconTooltip = { t('toolbar.videoSettings') }
notifyMode = { notifyMode }
@@ -168,8 +174,10 @@ class VideoSettingsButton extends Component<IProps> {
function mapStateToProps(state: IReduxState) {
const { permissions = { video: false } } = state['features/base/devices'];
const { isNarrowLayout } = state['features/base/responsive-ui'];
const { gumPending } = state['features/base/media'].video;
return {
gumPending,
hasPermissions: permissions.video,
hasVideoTrack: Boolean(getLocalJitsiVideoTrack(state)),
isDisabled: isVideoSettingsButtonDisabled(state),

View File

@@ -45,3 +45,8 @@ export const NOTIFY_CLICK_MODE = {
// Around 300 to be displayed above components like chat
export const ZINDEX_DIALOG_PORTAL = 302;
/**
* Color for spinner displayed in the toolbar.
*/
export const SPINNER_COLOR = '#929292';

View File

@@ -1,6 +1,7 @@
import { IReduxState } from '../app/types';
import { FEATURES_TO_BUTTONS_MAPPING } from '../base/jwt/constants';
import { isJwtFeatureEnabled } from '../base/jwt/functions';
import { IGUMPendingState } from '../base/media/types';
/**
* Indicates if the audio mute button is disabled or not.
@@ -9,10 +10,10 @@ import { isJwtFeatureEnabled } from '../base/jwt/functions';
* @returns {boolean}
*/
export function isAudioMuteButtonDisabled(state: IReduxState) {
const { available, muted, unmuteBlocked } = state['features/base/media'].audio;
const { available, muted, unmuteBlocked, gumPending } = state['features/base/media'].audio;
const { startSilent } = state['features/base/config'];
return Boolean(!available || startSilent || (muted && unmuteBlocked));
return Boolean(!available || startSilent || (muted && unmuteBlocked) || gumPending !== IGUMPendingState.NONE);
}
/**

View File

@@ -3,6 +3,7 @@ import { getToolbarButtons } from '../base/config/functions.web';
import { hasAvailableDevices } from '../base/devices/functions';
import { MEET_FEATURES } from '../base/jwt/constants';
import { isJwtFeatureEnabled } from '../base/jwt/functions';
import { IGUMPendingState } from '../base/media/types';
import { isScreenMediaShared } from '../screen-share/functions';
import { isWhiteboardVisible } from '../whiteboard/functions';
@@ -108,9 +109,11 @@ export function isVideoSettingsButtonDisabled(state: IReduxState) {
* @returns {boolean}
*/
export function isVideoMuteButtonDisabled(state: IReduxState) {
const { muted, unmuteBlocked } = state['features/base/media'].video;
const { muted, unmuteBlocked, gumPending } = state['features/base/media'].video;
return !hasAvailableDevices(state, 'videoInput') || (unmuteBlocked && Boolean(muted));
return !hasAvailableDevices(state, 'videoInput')
|| (unmuteBlocked && Boolean(muted))
|| gumPending !== IGUMPendingState.NONE;
}
/**