mirror of
https://gitcode.com/GitHub_Trending/ji/jitsi-meet.git
synced 2025-12-30 11:22:31 +00:00
Compare commits
29 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
cd6e905b95 | ||
|
|
ddaf7a3180 | ||
|
|
47aa51a58c | ||
|
|
66f7b9de53 | ||
|
|
bab87be9c9 | ||
|
|
6549d47233 | ||
|
|
2063e66b8e | ||
|
|
4dd241712d | ||
|
|
a574d5ec79 | ||
|
|
4b2b85bd12 | ||
|
|
77ab1ea8ed | ||
|
|
05e47ade7c | ||
|
|
6c78ec9099 | ||
|
|
4c5afc0b5e | ||
|
|
2e31ab9dca | ||
|
|
02787b1394 | ||
|
|
2476a06237 | ||
|
|
2d8909911e | ||
|
|
d06b847319 | ||
|
|
b517f614b3 | ||
|
|
10f77f1fbc | ||
|
|
77b89ece4a | ||
|
|
129264c3c9 | ||
|
|
96c5a9abd1 | ||
|
|
93ef2337ae | ||
|
|
854a077684 | ||
|
|
f903a7ae6e | ||
|
|
9013881f76 | ||
|
|
b6e7e0a19e |
17
.github/workflows/ci.yml
vendored
17
.github/workflows/ci.yml
vendored
@@ -8,7 +8,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version-file: '.nvmrc'
|
||||
cache: 'npm'
|
||||
@@ -43,7 +43,7 @@ jobs:
|
||||
os: [macos-latest, ubuntu-latest]
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version-file: '.nvmrc'
|
||||
cache: 'npm'
|
||||
@@ -60,7 +60,7 @@ jobs:
|
||||
runs-on: macos-15
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version-file: '.nvmrc'
|
||||
cache: 'npm'
|
||||
@@ -75,7 +75,7 @@ jobs:
|
||||
runs-on: macos-15
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version-file: '.nvmrc'
|
||||
cache: 'npm'
|
||||
@@ -106,7 +106,7 @@ jobs:
|
||||
container: reactnativecommunity/react-native-android:v15.0
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version-file: '.nvmrc'
|
||||
cache: 'npm'
|
||||
@@ -119,12 +119,15 @@ jobs:
|
||||
cd android
|
||||
./gradlew :sdk:clean
|
||||
./gradlew :sdk:assembleRelease
|
||||
- run: |
|
||||
git config --global --add safe.directory /__w/jitsi-meet/jitsi-meet
|
||||
git clean -dfx
|
||||
ios-sdk-build:
|
||||
name: Build mobile SDK (iOS)
|
||||
runs-on: macos-15
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version-file: '.nvmrc'
|
||||
cache: 'npm'
|
||||
@@ -174,7 +177,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version-file: '.nvmrc'
|
||||
cache: 'npm'
|
||||
|
||||
@@ -520,9 +520,6 @@ var config = {
|
||||
// // Note: Starting transcriptions from the recording dialog will still work.
|
||||
// disableClosedCaptions: false,
|
||||
|
||||
// // Whether to invite jigasi when backend transcriptions are enabled (asyncTranscription is true in metadata).
|
||||
// // By default, we invite it.
|
||||
// inviteJigasiOnBackendTranscribing: true,
|
||||
// },
|
||||
|
||||
// Misc
|
||||
@@ -927,6 +924,11 @@ var config = {
|
||||
// [ 'microphone', 'camera' ]
|
||||
// ],
|
||||
|
||||
// Overrides the buttons displayed in the main toolbar for reduced UI.
|
||||
// When there isn't an override for a certain configuration the default jitsi-meet configuration will be used.
|
||||
// The order of the buttons in the array is preserved.
|
||||
// reducedUImainToolbarButtons: [ 'microphone', 'camera' ],
|
||||
|
||||
// Toolbar buttons which have their click/tap event exposed through the API on
|
||||
// `toolbarButtonClicked`. Passing a string for the button key will
|
||||
// prevent execution of the click/tap routine; passing an object with `key` and
|
||||
|
||||
5
custom.d.ts
vendored
5
custom.d.ts
vendored
@@ -2,3 +2,8 @@ declare module '*.svg' {
|
||||
const content: any;
|
||||
export default content;
|
||||
}
|
||||
|
||||
declare module '*.svg?raw' {
|
||||
const content: string;
|
||||
export default content;
|
||||
}
|
||||
|
||||
@@ -10,6 +10,7 @@ no-cli
|
||||
no-loopback-peers
|
||||
no-tcp-relay
|
||||
no-tcp
|
||||
no-dtls
|
||||
listening-port=3478
|
||||
tls-listening-port=5349
|
||||
no-tlsv1
|
||||
|
||||
@@ -1358,7 +1358,7 @@
|
||||
"muteEveryoneElsesVideoStream": "Stop everyone else's video",
|
||||
"muteEveryonesVideoStream": "Stop everyone's video",
|
||||
"muteGUMPending": "Connecting your microphone",
|
||||
"noiseSuppression": "Extra noise suppression (BETA)",
|
||||
"noiseSuppression": "Extra noise suppression",
|
||||
"openChat": "Open chat",
|
||||
"participants": "Open participants panel. {{participantsCount}} participants",
|
||||
"pip": "Toggle Picture-in-Picture mode",
|
||||
@@ -1423,14 +1423,14 @@
|
||||
"closeParticipantsPane": "Close participants pane",
|
||||
"closeReactionsMenu": "Close reactions menu",
|
||||
"closedCaptions": "Closed captions",
|
||||
"disableNoiseSuppression": "Disable extra noise suppression (BETA)",
|
||||
"disableNoiseSuppression": "Disable extra noise suppression",
|
||||
"disableReactionSounds": "You can disable reaction sounds for this meeting",
|
||||
"documentClose": "Close shared document",
|
||||
"documentOpen": "Open shared document",
|
||||
"download": "Download our apps",
|
||||
"e2ee": "End-to-End Encryption",
|
||||
"embedMeeting": "Embed meeting",
|
||||
"enableNoiseSuppression": "Enable extra noise suppression (BETA)",
|
||||
"enableNoiseSuppression": "Enable extra noise suppression",
|
||||
"endConference": "End meeting for all",
|
||||
"enterFullScreen": "View full screen",
|
||||
"enterTileView": "Enter tile view",
|
||||
@@ -1466,7 +1466,7 @@
|
||||
"noAudioSignalDialInDesc": "You can also dial-in using:",
|
||||
"noAudioSignalDialInLinkDesc": "Dial-in numbers",
|
||||
"noAudioSignalTitle": "There is no input coming from your mic!",
|
||||
"noiseSuppression": "Extra noise suppression (BETA)",
|
||||
"noiseSuppression": "Extra noise suppression",
|
||||
"noisyAudioInputDesc": "It sounds like your microphone is making noise, please consider muting or changing the device.",
|
||||
"noisyAudioInputTitle": "Your microphone appears to be noisy!",
|
||||
"openChat": "Open chat",
|
||||
|
||||
@@ -107,9 +107,10 @@ import {
|
||||
open as openParticipantsPane
|
||||
} from '../../react/features/participants-pane/actions';
|
||||
import { getParticipantsPaneOpen } from '../../react/features/participants-pane/functions';
|
||||
import { hidePiP, showPiP } from '../../react/features/pip/actions';
|
||||
import { startLocalVideoRecording, stopLocalVideoRecording } from '../../react/features/recording/actions.any';
|
||||
import { grantRecordingConsent, grantRecordingConsentAndUnmute } from '../../react/features/recording/actions.web';
|
||||
import { RECORDING_METADATA_ID, RECORDING_TYPES } from '../../react/features/recording/constants';
|
||||
import { RECORDING_TYPES } from '../../react/features/recording/constants';
|
||||
import { getActiveSession, supportsLocalRecording } from '../../react/features/recording/functions';
|
||||
import { startAudioScreenShareFlow, startScreenShareFlow } from '../../react/features/screen-share/actions';
|
||||
import { isScreenAudioSupported } from '../../react/features/screen-share/functions';
|
||||
@@ -125,7 +126,7 @@ import { extractYoutubeIdOrURL } from '../../react/features/shared-video/functio
|
||||
import { setRequestingSubtitles, toggleRequestingSubtitles } from '../../react/features/subtitles/actions';
|
||||
import { isAudioMuteButtonDisabled } from '../../react/features/toolbox/functions';
|
||||
import { setTileView, toggleTileView } from '../../react/features/video-layout/actions.any';
|
||||
import { muteAllParticipants } from '../../react/features/video-menu/actions';
|
||||
import { muteAllParticipants, muteRemote } from '../../react/features/video-menu/actions';
|
||||
import { setVideoQuality } from '../../react/features/video-quality/actions';
|
||||
import { toggleBackgroundEffect, toggleBlurredBackgroundEffect } from '../../react/features/virtual-background/actions';
|
||||
import { VIRTUAL_BACKGROUND_TYPE } from '../../react/features/virtual-background/constants';
|
||||
@@ -238,6 +239,17 @@ function initCommands() {
|
||||
|
||||
APP.store.dispatch(muteAllParticipants(exclude, muteMediaType));
|
||||
},
|
||||
'mute-remote-participant': (participantId, mediaType) => {
|
||||
if (!isLocalParticipantModerator(APP.store.getState())) {
|
||||
logger.error('Missing moderator rights to mute remote participant');
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
const muteMediaType = mediaType ? mediaType : MEDIA_TYPE.AUDIO;
|
||||
|
||||
APP.store.dispatch(muteRemote(participantId, muteMediaType));
|
||||
},
|
||||
'toggle-lobby': isLobbyEnabled => {
|
||||
APP.store.dispatch(toggleLobbyMode(isLobbyEnabled));
|
||||
},
|
||||
@@ -778,10 +790,7 @@ function initCommands() {
|
||||
}
|
||||
|
||||
if (transcription) {
|
||||
APP.store.dispatch(setRequestingSubtitles(true, false, null, true));
|
||||
conference.getMetadataHandler().setMetadata(RECORDING_METADATA_ID, {
|
||||
isTranscribingEnabled: true
|
||||
});
|
||||
APP.store.dispatch(setRequestingSubtitles(true, false, null));
|
||||
}
|
||||
},
|
||||
|
||||
@@ -804,9 +813,6 @@ function initCommands() {
|
||||
|
||||
if (transcription) {
|
||||
APP.store.dispatch(setRequestingSubtitles(false, false, null));
|
||||
conference.getMetadataHandler().setMetadata(RECORDING_METADATA_ID, {
|
||||
isTranscribingEnabled: false
|
||||
});
|
||||
}
|
||||
|
||||
if (mode === 'local') {
|
||||
@@ -907,6 +913,12 @@ function initCommands() {
|
||||
backgroundType: VIRTUAL_BACKGROUND_TYPE.IMAGE,
|
||||
virtualSource: backgroundImage
|
||||
}, jitsiTrack));
|
||||
},
|
||||
'show-pip': () => {
|
||||
APP.store.dispatch(showPiP());
|
||||
},
|
||||
'hide-pip': () => {
|
||||
APP.store.dispatch(hidePiP());
|
||||
}
|
||||
};
|
||||
transport.on('event', ({ data, name }) => {
|
||||
@@ -1242,6 +1254,20 @@ class API {
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Notify external application (if API is enabled) that the in-page toolbox
|
||||
* visibility changed.
|
||||
*
|
||||
* @param {boolean} visible - True if the toolbox is visible, false otherwise.
|
||||
* @returns {void}
|
||||
*/
|
||||
notifyToolbarVisibilityChanged(visible) {
|
||||
this._sendEvent({
|
||||
name: 'toolbar-visibility-changed',
|
||||
visible
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Notifies the external application (spot) that the local jitsi-participant
|
||||
* has a status update.
|
||||
@@ -1386,6 +1412,25 @@ class API {
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Notify the external application that a participant's mute status changed.
|
||||
*
|
||||
* @param {string} participantId - The ID of the participant.
|
||||
* @param {boolean} isMuted - True if muted, false if unmuted.
|
||||
* @param {string} mediaType - Media type that was muted ('audio', 'video', or 'desktop').
|
||||
* @param {boolean} isSelfMuted - True if participant muted themselves, false if muted by moderator.
|
||||
* @returns {void}
|
||||
*/
|
||||
notifyParticipantMuted(participantId, isMuted, mediaType, isSelfMuted = true) {
|
||||
this._sendEvent({
|
||||
name: 'participant-muted',
|
||||
id: participantId,
|
||||
isMuted,
|
||||
mediaType,
|
||||
isSelfMuted
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Notify the external app that a notification has been triggered.
|
||||
*
|
||||
@@ -2235,6 +2280,40 @@ class API {
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Notify external application (if API is enabled) that Picture-in-Picture was requested.
|
||||
* Used by Electron to handle PiP requests with proper user gesture context.
|
||||
*
|
||||
* @returns {void}
|
||||
*/
|
||||
notifyPictureInPictureRequested() {
|
||||
this._sendEvent({
|
||||
name: '_pip-requested'
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Notify external application (if API is enabled) that Picture-in-Picture mode was entered.
|
||||
*
|
||||
* @returns {void}
|
||||
*/
|
||||
notifyPictureInPictureEntered() {
|
||||
this._sendEvent({
|
||||
name: 'pip-entered'
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Notify external application (if API is enabled) that Picture-in-Picture mode was exited.
|
||||
*
|
||||
* @returns {void}
|
||||
*/
|
||||
notifyPictureInPictureLeft() {
|
||||
this._sendEvent({
|
||||
name: 'pip-left'
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Notify external application ( if API is enabled) that a participant menu button was clicked.
|
||||
*
|
||||
|
||||
119
modules/API/external/external_api.js
vendored
119
modules/API/external/external_api.js
vendored
@@ -2,6 +2,7 @@ import { jitsiLocalStorage } from '@jitsi/js-utils/jitsi-local-storage';
|
||||
import EventEmitter from 'events';
|
||||
|
||||
import { urlObjectToString } from '../../../react/features/base/util/uri';
|
||||
import { isPiPEnabled } from '../../../react/features/pip/external-api.shared';
|
||||
import {
|
||||
PostMessageTransportBackend,
|
||||
Transport
|
||||
@@ -46,6 +47,7 @@ const commands = {
|
||||
localSubject: 'local-subject',
|
||||
kickParticipant: 'kick-participant',
|
||||
muteEveryone: 'mute-everyone',
|
||||
muteRemoteParticipant: 'mute-remote-participant',
|
||||
overwriteConfig: 'overwrite-config',
|
||||
overwriteNames: 'overwrite-names',
|
||||
password: 'password',
|
||||
@@ -94,7 +96,9 @@ const commands = {
|
||||
toggleTileView: 'toggle-tile-view',
|
||||
toggleVirtualBackgroundDialog: 'toggle-virtual-background',
|
||||
toggleVideo: 'toggle-video',
|
||||
toggleWhiteboard: 'toggle-whiteboard'
|
||||
toggleWhiteboard: 'toggle-whiteboard',
|
||||
showPiP: 'show-pip',
|
||||
hidePiP: 'hide-pip'
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -102,6 +106,9 @@ const commands = {
|
||||
* events expected by jitsi-meet.
|
||||
*/
|
||||
const events = {
|
||||
'_pip-requested': '_pipRequested',
|
||||
'pip-entered': 'pipEntered',
|
||||
'pip-left': 'pipLeft',
|
||||
'avatar-changed': 'avatarChanged',
|
||||
'audio-availability-changed': 'audioAvailabilityChanged',
|
||||
'audio-mute-status-changed': 'audioMuteStatusChanged',
|
||||
@@ -144,6 +151,7 @@ const events = {
|
||||
'participant-joined': 'participantJoined',
|
||||
'participant-kicked-out': 'participantKickedOut',
|
||||
'participant-left': 'participantLeft',
|
||||
'participant-muted': 'participantMuted',
|
||||
'participant-role-changed': 'participantRoleChanged',
|
||||
'participants-pane-toggled': 'participantsPaneToggled',
|
||||
'password-required': 'passwordRequired',
|
||||
@@ -167,6 +175,7 @@ const events = {
|
||||
'suspend-detected': 'suspendDetected',
|
||||
'tile-view-changed': 'tileViewChanged',
|
||||
'toolbar-button-clicked': 'toolbarButtonClicked',
|
||||
'toolbar-visibility-changed': 'toolbarVisibilityChanged',
|
||||
'transcribing-status-changed': 'transcribingStatusChanged',
|
||||
'transcription-chunk-received': 'transcriptionChunkReceived',
|
||||
'whiteboard-status-changed': 'whiteboardStatusChanged'
|
||||
@@ -329,6 +338,7 @@ export default class JitsiMeetExternalAPI extends EventEmitter {
|
||||
this._myUserID = undefined;
|
||||
this._onStageParticipant = undefined;
|
||||
this._iAmvisitor = undefined;
|
||||
this._pipConfig = configOverwrite?.pip;
|
||||
this._setupListeners();
|
||||
id++;
|
||||
}
|
||||
@@ -648,6 +658,56 @@ export default class JitsiMeetExternalAPI extends EventEmitter {
|
||||
this.emit(requestName, data, callback);
|
||||
}
|
||||
});
|
||||
|
||||
this._setupIntersectionObserver();
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets up IntersectionObserver to monitor iframe visibility.
|
||||
* Calls showPiP/hidePiP based on visibility.
|
||||
*
|
||||
* @private
|
||||
* @returns {void}
|
||||
*/
|
||||
_setupIntersectionObserver() {
|
||||
if (!isPiPEnabled(this._pipConfig)) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Don't create duplicate observers.
|
||||
if (this._intersectionObserver) {
|
||||
return;
|
||||
}
|
||||
|
||||
this._isIntersecting = true;
|
||||
|
||||
this._intersectionObserver = new IntersectionObserver(entries => {
|
||||
const entry = entries[entries.length - 1];
|
||||
const wasIntersecting = this._isIntersecting;
|
||||
|
||||
this._isIntersecting = entry.isIntersecting;
|
||||
|
||||
if (!entry.isIntersecting && wasIntersecting) {
|
||||
this.showPiP();
|
||||
} else if (entry.isIntersecting && !wasIntersecting) {
|
||||
this.hidePiP();
|
||||
}
|
||||
});
|
||||
|
||||
this._intersectionObserver.observe(this._frame);
|
||||
}
|
||||
|
||||
/**
|
||||
* Tears down IntersectionObserver.
|
||||
*
|
||||
* @private
|
||||
* @returns {void}
|
||||
*/
|
||||
_teardownIntersectionObserver() {
|
||||
if (this._intersectionObserver) {
|
||||
this._intersectionObserver.disconnect();
|
||||
this._intersectionObserver = null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -850,6 +910,8 @@ export default class JitsiMeetExternalAPI extends EventEmitter {
|
||||
this.emit('_willDispose');
|
||||
this._transport.dispose();
|
||||
this.removeAllListeners();
|
||||
this._teardownIntersectionObserver();
|
||||
|
||||
if (this._frame && this._frame.parentNode) {
|
||||
this._frame.parentNode.removeChild(this._frame);
|
||||
}
|
||||
@@ -878,10 +940,47 @@ export default class JitsiMeetExternalAPI extends EventEmitter {
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
// Handle pip config changes locally.
|
||||
// We update local state, send command to iframe, then handle PiP show/hide
|
||||
// so the iframe config is updated before we try to show PiP.
|
||||
let pipTransition = null;
|
||||
|
||||
if (name === 'overwriteConfig' && args[0]?.pip !== undefined) {
|
||||
const wasEnabled = isPiPEnabled(this._pipConfig);
|
||||
|
||||
this._pipConfig = {
|
||||
...this._pipConfig,
|
||||
...args[0].pip
|
||||
};
|
||||
|
||||
const isEnabled = isPiPEnabled(this._pipConfig);
|
||||
|
||||
if (!wasEnabled && isEnabled) {
|
||||
this._setupIntersectionObserver();
|
||||
pipTransition = 'enabled';
|
||||
} else if (wasEnabled && !isEnabled) {
|
||||
this._teardownIntersectionObserver();
|
||||
pipTransition = 'disabled';
|
||||
}
|
||||
}
|
||||
|
||||
// Send command to iframe first.
|
||||
this._transport.sendEvent({
|
||||
data: args,
|
||||
name: commands[name]
|
||||
});
|
||||
|
||||
// Handle PiP state after command is sent so iframe config is updated.
|
||||
if (pipTransition === 'enabled') {
|
||||
// Show PiP if iframe is currently not visible.
|
||||
if (!this._isIntersecting) {
|
||||
this.showPiP();
|
||||
}
|
||||
} else if (pipTransition === 'disabled') {
|
||||
// Hide any open PiP window.
|
||||
this.hidePiP();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1495,6 +1594,24 @@ export default class JitsiMeetExternalAPI extends EventEmitter {
|
||||
this.executeCommand('setVirtualBackground', enabled, backgroundImage);
|
||||
}
|
||||
|
||||
/**
|
||||
* Shows Picture-in-Picture window.
|
||||
*
|
||||
* @returns {void}
|
||||
*/
|
||||
showPiP() {
|
||||
this.executeCommand('showPiP');
|
||||
}
|
||||
|
||||
/**
|
||||
* Hides Picture-in-Picture window.
|
||||
*
|
||||
* @returns {void}
|
||||
*/
|
||||
hidePiP() {
|
||||
this.executeCommand('hidePiP');
|
||||
}
|
||||
|
||||
/**
|
||||
* Opens the desktop picker. This is invoked by the Electron SDK when gDM is used.
|
||||
*
|
||||
|
||||
3228
package-lock.json
generated
3228
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
16
package.json
16
package.json
@@ -72,7 +72,7 @@
|
||||
"js-md5": "0.6.1",
|
||||
"js-sha512": "0.8.0",
|
||||
"jwt-decode": "2.2.0",
|
||||
"lib-jitsi-meet": "https://github.com/jitsi/lib-jitsi-meet/releases/download/v2115.0.0+cc2f34c2/lib-jitsi-meet.tgz",
|
||||
"lib-jitsi-meet": "https://github.com/jitsi/lib-jitsi-meet/releases/download/v2118.0.0+67fd2c84/lib-jitsi-meet.tgz",
|
||||
"lodash-es": "4.17.21",
|
||||
"null-loader": "4.0.1",
|
||||
"optional-require": "1.0.3",
|
||||
@@ -164,12 +164,12 @@
|
||||
"@types/w3c-image-capture": "1.0.6",
|
||||
"@types/w3c-web-hid": "1.0.3",
|
||||
"@types/zxcvbn": "4.4.1",
|
||||
"@wdio/allure-reporter": "9.16.0",
|
||||
"@wdio/cli": "9.16.0",
|
||||
"@wdio/globals": "9.16.0",
|
||||
"@wdio/junit-reporter": "9.16.0",
|
||||
"@wdio/local-runner": "9.16.0",
|
||||
"@wdio/mocha-framework": "9.16.0",
|
||||
"@wdio/allure-reporter": "9.22.0",
|
||||
"@wdio/cli": "9.22.0",
|
||||
"@wdio/globals": "9.17.0",
|
||||
"@wdio/junit-reporter": "9.21.0",
|
||||
"@wdio/local-runner": "9.22.0",
|
||||
"@wdio/mocha-framework": "9.22.0",
|
||||
"babel-loader": "9.1.0",
|
||||
"babel-plugin-optional-require": "0.3.1",
|
||||
"circular-dependency-plugin": "5.2.0",
|
||||
@@ -193,7 +193,7 @@
|
||||
"ts-loader": "9.4.2",
|
||||
"typescript": "5.7.2",
|
||||
"unorm": "1.6.0",
|
||||
"webdriverio": "9.16.0",
|
||||
"webdriverio": "9.22.0",
|
||||
"webpack": "5.95.0",
|
||||
"webpack-bundle-analyzer": "4.4.2",
|
||||
"webpack-cli": "5.1.4",
|
||||
|
||||
@@ -5,6 +5,7 @@ import JitsiThemeProvider from '../../base/ui/components/JitsiThemeProvider.web'
|
||||
import DialogContainer from '../../base/ui/components/web/DialogContainer';
|
||||
import ChromeExtensionBanner from '../../chrome-extension-banner/components/ChromeExtensionBanner.web';
|
||||
import OverlayContainer from '../../overlay/components/web/OverlayContainer';
|
||||
import PiP from '../../pip/components/PiP';
|
||||
|
||||
import { AbstractApp } from './AbstractApp';
|
||||
|
||||
@@ -47,6 +48,7 @@ export class App extends AbstractApp {
|
||||
<JitsiThemeProvider>
|
||||
<GlobalStyles />
|
||||
<ChromeExtensionBanner />
|
||||
<PiP />
|
||||
{ super._createMainElement(component, props) }
|
||||
</JitsiThemeProvider>
|
||||
);
|
||||
|
||||
@@ -2,7 +2,6 @@ import '../analytics/middleware';
|
||||
import '../authentication/middleware';
|
||||
import '../av-moderation/middleware';
|
||||
import '../base/conference/middleware';
|
||||
import '../base/config/middleware';
|
||||
import '../base/i18n/middleware';
|
||||
import '../base/jwt/middleware';
|
||||
import '../base/known-domains/middleware';
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import '../base/config/middleware';
|
||||
import '../dynamic-branding/middleware';
|
||||
import '../gifs/middleware';
|
||||
import '../mobile/audio-mode/middleware';
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import '../base/app/middleware';
|
||||
import '../base/config/middleware';
|
||||
import '../base/connection/middleware';
|
||||
import '../base/devices/middleware';
|
||||
import '../base/media/middleware';
|
||||
@@ -11,6 +12,7 @@ import '../no-audio-signal/middleware';
|
||||
import '../notifications/middleware';
|
||||
import '../noise-detection/middleware';
|
||||
import '../old-client-notification/middleware';
|
||||
import '../pip/middleware';
|
||||
import '../power-monitor/middleware';
|
||||
import '../prejoin/middleware';
|
||||
import '../remote-control/middleware';
|
||||
|
||||
@@ -8,6 +8,7 @@ import '../keyboard-shortcuts/reducer';
|
||||
import '../no-audio-signal/reducer';
|
||||
import '../noise-detection/reducer';
|
||||
import '../participants-pane/reducer';
|
||||
import '../pip/reducer';
|
||||
import '../power-monitor/reducer';
|
||||
import '../prejoin/reducer';
|
||||
import '../remote-control/reducer';
|
||||
|
||||
@@ -57,6 +57,7 @@ import { INoiseDetectionState } from '../noise-detection/reducer';
|
||||
import { INoiseSuppressionState } from '../noise-suppression/reducer';
|
||||
import { INotificationsState } from '../notifications/reducer';
|
||||
import { IParticipantsPaneState } from '../participants-pane/reducer';
|
||||
import { IPipState } from '../pip/reducer';
|
||||
import { IPollsState } from '../polls/reducer';
|
||||
import { IPollsHistoryState } from '../polls-history/reducer';
|
||||
import { IPowerMonitorState } from '../power-monitor/reducer';
|
||||
@@ -145,6 +146,7 @@ export interface IReduxState {
|
||||
'features/noise-suppression': INoiseSuppressionState;
|
||||
'features/notifications': INotificationsState;
|
||||
'features/participants-pane': IParticipantsPaneState;
|
||||
'features/pip': IPipState;
|
||||
'features/polls': IPollsState;
|
||||
'features/polls-history': IPollsHistoryState;
|
||||
'features/power-monitor': IPowerMonitorState;
|
||||
|
||||
@@ -5,7 +5,18 @@ import Icon from '../../../icons/components/Icon';
|
||||
import { pixelsToRem } from '../../../ui/functions.any';
|
||||
import { isIcon } from '../../functions';
|
||||
import { IAvatarProps } from '../../types';
|
||||
import { PRESENCE_AVAILABLE_COLOR, PRESENCE_AWAY_COLOR, PRESENCE_BUSY_COLOR, PRESENCE_IDLE_COLOR } from '../styles';
|
||||
import {
|
||||
PRESENCE_AVAILABLE_COLOR,
|
||||
PRESENCE_AWAY_COLOR,
|
||||
PRESENCE_BUSY_COLOR,
|
||||
PRESENCE_IDLE_COLOR
|
||||
} from '../styles';
|
||||
|
||||
import {
|
||||
AVATAR_DEFAULT_BACKGROUND_COLOR,
|
||||
getAvatarFont,
|
||||
getAvatarInitialsColor
|
||||
} from './styles';
|
||||
|
||||
interface IProps extends IAvatarProps {
|
||||
|
||||
@@ -48,10 +59,10 @@ interface IProps extends IAvatarProps {
|
||||
const useStyles = makeStyles()(theme => {
|
||||
return {
|
||||
avatar: {
|
||||
backgroundColor: '#AAA',
|
||||
backgroundColor: AVATAR_DEFAULT_BACKGROUND_COLOR,
|
||||
borderRadius: '50%',
|
||||
color: theme.palette?.text01 || '#fff',
|
||||
...(theme.typography?.heading1 ?? {}),
|
||||
color: getAvatarInitialsColor(theme),
|
||||
...getAvatarFont(theme),
|
||||
fontSize: 'inherit',
|
||||
objectFit: 'cover',
|
||||
textAlign: 'center',
|
||||
|
||||
26
react/features/base/avatar/components/web/styles.ts
Normal file
26
react/features/base/avatar/components/web/styles.ts
Normal file
@@ -0,0 +1,26 @@
|
||||
import { Theme } from '@mui/material/styles';
|
||||
|
||||
// Default avatar background color
|
||||
export const AVATAR_DEFAULT_BACKGROUND_COLOR = '#AAA';
|
||||
|
||||
/**
|
||||
* Returns the avatar font style from the theme.
|
||||
*
|
||||
* @param {Theme} theme - The MUI theme.
|
||||
* @returns {Object} The font style object containing fontFamily, fontWeight, etc.
|
||||
*/
|
||||
export const getAvatarFont = (theme: Theme) => theme.typography?.heading1 ?? {};
|
||||
|
||||
/**
|
||||
* Default text color for avatar initials.
|
||||
*/
|
||||
export const AVATAR_DEFAULT_INITIALS_COLOR = '#FFFFFF';
|
||||
|
||||
/**
|
||||
* Returns the text color for avatar initials from the theme.
|
||||
*
|
||||
* @param {Theme} theme - The MUI theme.
|
||||
* @returns {string} The text color.
|
||||
*/
|
||||
export const getAvatarInitialsColor = (theme: Theme): string =>
|
||||
theme.palette?.text01 || AVATAR_DEFAULT_INITIALS_COLOR;
|
||||
@@ -517,6 +517,10 @@ export interface IConfig {
|
||||
peopleSearchQueryTypes?: string[];
|
||||
peopleSearchTokenLocation?: string;
|
||||
peopleSearchUrl?: string;
|
||||
pip?: {
|
||||
disabled?: boolean;
|
||||
showOnPrejoin?: boolean;
|
||||
};
|
||||
preferBosh?: boolean;
|
||||
preferVisitor?: boolean;
|
||||
preferredTranscribeLanguage?: string;
|
||||
@@ -555,6 +559,7 @@ export interface IConfig {
|
||||
skipConsentInMeeting?: boolean;
|
||||
suggestRecording?: boolean;
|
||||
};
|
||||
reducedUImainToolbarButtons?: Array<string>;
|
||||
remoteVideoMenu?: {
|
||||
disableDemote?: boolean;
|
||||
disableGrantModerator?: boolean;
|
||||
@@ -632,7 +637,6 @@ export interface IConfig {
|
||||
autoTranscribeOnRecord?: boolean;
|
||||
disableClosedCaptions?: boolean;
|
||||
enabled?: boolean;
|
||||
inviteJigasiOnBackendTranscribing?: boolean;
|
||||
preferredLanguage?: string;
|
||||
translationLanguages?: Array<string>;
|
||||
translationLanguagesHead?: Array<string>;
|
||||
|
||||
@@ -199,6 +199,7 @@ export default [
|
||||
'participantMenuButtonsWithNotifyClick',
|
||||
'participantsPane',
|
||||
'pcStatsInterval',
|
||||
'pip',
|
||||
'preferBosh',
|
||||
'preferVisitor',
|
||||
'prejoinConfig.enabled',
|
||||
@@ -214,6 +215,7 @@ export default [
|
||||
'recordings.showPrejoinWarning',
|
||||
'recordings.showRecordingLink',
|
||||
'recordings.suggestRecording',
|
||||
'reducedUImainToolbarButtons',
|
||||
'replaceParticipant',
|
||||
'resolution',
|
||||
'screenshotCapture',
|
||||
|
||||
@@ -2,7 +2,6 @@ import { AnyAction } from 'redux';
|
||||
|
||||
import { IStore } from '../../app/types';
|
||||
import { SET_DYNAMIC_BRANDING_DATA } from '../../dynamic-branding/actionTypes';
|
||||
import { setUserFilmstripWidth } from '../../filmstrip/actions.web';
|
||||
import { getFeatureFlag } from '../flags/functions';
|
||||
import MiddlewareRegistry from '../redux/MiddlewareRegistry';
|
||||
import { updateSettings } from '../settings/actions';
|
||||
@@ -80,29 +79,8 @@ function _setConfig({ dispatch, getState }: IStore, next: Function, action: AnyA
|
||||
}));
|
||||
}
|
||||
|
||||
const { initialWidth, stageFilmstripParticipants } = action.config.filmstrip || {};
|
||||
|
||||
if (stageFilmstripParticipants !== undefined) {
|
||||
dispatch(updateSettings({
|
||||
maxStageParticipants: stageFilmstripParticipants
|
||||
}));
|
||||
}
|
||||
|
||||
if (initialWidth) {
|
||||
dispatch(setUserFilmstripWidth(initialWidth));
|
||||
}
|
||||
|
||||
dispatch(updateConfig(config));
|
||||
|
||||
// FIXME On Web we rely on the global 'config' variable which gets altered
|
||||
// multiple times, before it makes it to the reducer. At some point it may
|
||||
// not be the global variable which is being modified anymore due to
|
||||
// different merge methods being used along the way. The global variable
|
||||
// must be synchronized with the final state resolved by the reducer.
|
||||
if (typeof window.config !== 'undefined') {
|
||||
window.config = state['features/base/config'];
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
1
react/features/base/config/middleware.native.ts
Normal file
1
react/features/base/config/middleware.native.ts
Normal file
@@ -0,0 +1 @@
|
||||
import './middleware.any';
|
||||
48
react/features/base/config/middleware.web.ts
Normal file
48
react/features/base/config/middleware.web.ts
Normal file
@@ -0,0 +1,48 @@
|
||||
import { setUserFilmstripWidth } from '../../filmstrip/actions.web';
|
||||
import MiddlewareRegistry from '../redux/MiddlewareRegistry';
|
||||
import { updateSettings } from '../settings/actions';
|
||||
|
||||
import { SET_CONFIG } from './actionTypes';
|
||||
import './middleware.any';
|
||||
|
||||
/**
|
||||
* The middleware of the feature {@code base/config}.
|
||||
*
|
||||
* @param {Store} store - The redux store.
|
||||
* @private
|
||||
* @returns {Function}
|
||||
*/
|
||||
MiddlewareRegistry.register(store => next => action => {
|
||||
const result = next(action);
|
||||
|
||||
switch (action.type) {
|
||||
case SET_CONFIG: {
|
||||
const { initialWidth, stageFilmstripParticipants } = action.config.filmstrip || {};
|
||||
const { dispatch, getState } = store;
|
||||
const state = getState();
|
||||
|
||||
if (stageFilmstripParticipants !== undefined) {
|
||||
dispatch(updateSettings({
|
||||
maxStageParticipants: stageFilmstripParticipants
|
||||
}));
|
||||
}
|
||||
|
||||
if (initialWidth) {
|
||||
dispatch(setUserFilmstripWidth(initialWidth));
|
||||
}
|
||||
|
||||
// FIXME On Web we rely on the global 'config' variable which gets altered
|
||||
// multiple times, before it makes it to the reducer. At some point it may
|
||||
// not be the global variable which is being modified anymore due to
|
||||
// different merge methods being used along the way. The global variable
|
||||
// must be synchronized with the final state resolved by the reducer.
|
||||
if (typeof window.config !== 'undefined') {
|
||||
window.config = state['features/base/config'];
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
});
|
||||
@@ -1,5 +1,6 @@
|
||||
import React, { PureComponent, ReactNode } from 'react';
|
||||
import { SafeAreaView, ScrollView, View, ViewStyle } from 'react-native';
|
||||
import { ScrollView, View, ViewStyle } from 'react-native';
|
||||
import { SafeAreaView } from 'react-native-safe-area-context';
|
||||
import { connect } from 'react-redux';
|
||||
|
||||
import { IStore } from '../../../../app/types';
|
||||
@@ -122,6 +123,7 @@ class BottomSheet extends PureComponent<Props> {
|
||||
style = { styles.sheetAreaCover } />
|
||||
{ renderHeader?.() }
|
||||
<SafeAreaView
|
||||
edges = { [ 'left', 'right' ] }
|
||||
style = { [
|
||||
styles.sheetItemContainer,
|
||||
renderHeader
|
||||
|
||||
@@ -80,3 +80,8 @@ export const LOWER_HAND_AUDIO_LEVEL = 0.2;
|
||||
* Icon URL for the whiteboard participant.
|
||||
*/
|
||||
export const WHITEBOARD_PARTICIPANT_ICON = IconWhiteboard;
|
||||
|
||||
/**
|
||||
* The ID used for non-participant (system) messages coming from a transcriber.
|
||||
*/
|
||||
export const TRANSCRIBER_ID = 'transcriber';
|
||||
|
||||
@@ -22,17 +22,35 @@ export function preloadImage(
|
||||
return new Promise((resolve, reject) => {
|
||||
const image = document.createElement('img');
|
||||
|
||||
// Cleanup function to release resources and prevent memory leaks
|
||||
const cleanup = () => {
|
||||
// Clear event handlers to break circular references
|
||||
image.onload = null;
|
||||
image.onerror = null;
|
||||
|
||||
// Clear src to stop any pending load and allow GC
|
||||
image.src = '';
|
||||
};
|
||||
|
||||
if (useCORS) {
|
||||
image.setAttribute('crossOrigin', '');
|
||||
}
|
||||
image.onload = () => resolve({
|
||||
src,
|
||||
isUsingCORS: useCORS
|
||||
});
|
||||
|
||||
image.onload = () => {
|
||||
cleanup();
|
||||
resolve({
|
||||
src,
|
||||
isUsingCORS: useCORS
|
||||
});
|
||||
};
|
||||
|
||||
image.onerror = error => {
|
||||
cleanup();
|
||||
|
||||
if (tryOnce) {
|
||||
reject(error);
|
||||
} else {
|
||||
// Retry with different CORS mode
|
||||
preloadImage(src, !useCORS, true)
|
||||
.then(resolve)
|
||||
.catch(reject);
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import React, { Component } from 'react';
|
||||
import {
|
||||
SectionList as ReactNativeSectionList,
|
||||
SafeAreaView,
|
||||
SectionListRenderItemInfo,
|
||||
ViewStyle
|
||||
} from 'react-native';
|
||||
import { SafeAreaView } from 'react-native-safe-area-context';
|
||||
|
||||
import { Item, Section } from '../../types';
|
||||
|
||||
@@ -76,6 +76,7 @@ export default class SectionList extends Component<IProps> {
|
||||
override render() {
|
||||
return (
|
||||
<SafeAreaView
|
||||
edges = { [ 'left', 'right' ] }
|
||||
style = { styles.container as ViewStyle } >
|
||||
<ReactNativeSectionList
|
||||
ListEmptyComponent = { this.props.ListEmptyComponent }
|
||||
|
||||
@@ -24,6 +24,7 @@ import { ASPECT_RATIO_NARROW, ASPECT_RATIO_WIDE } from './constants';
|
||||
* determine whether and how to render it.
|
||||
*/
|
||||
const REDUCED_UI_THRESHOLD = 300;
|
||||
const WEB_REDUCED_UI_THRESHOLD = 320;
|
||||
|
||||
/**
|
||||
* Indicates a resize of the window.
|
||||
@@ -49,6 +50,8 @@ export function clientResized(clientWidth: number, clientHeight: number) {
|
||||
}
|
||||
|
||||
availableWidth -= getParticipantsPaneWidth(state);
|
||||
|
||||
dispatch(setReducedUI(availableWidth, clientHeight));
|
||||
}
|
||||
|
||||
batch(() => {
|
||||
@@ -106,7 +109,10 @@ export function setAspectRatio(width: number, height: number) {
|
||||
*/
|
||||
export function setReducedUI(width: number, height: number) {
|
||||
return (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
|
||||
const reducedUI = Math.min(width, height) < REDUCED_UI_THRESHOLD;
|
||||
const threshold = navigator.product === 'ReactNative'
|
||||
? REDUCED_UI_THRESHOLD
|
||||
: WEB_REDUCED_UI_THRESHOLD;
|
||||
const reducedUI = Math.min(width, height) < threshold;
|
||||
|
||||
if (reducedUI !== getState()['features/base/responsive-ui'].reducedUI) {
|
||||
return dispatch({
|
||||
|
||||
@@ -33,7 +33,6 @@ import {
|
||||
isUserInteractionRequiredForUnmute,
|
||||
setTrackMuted
|
||||
} from './functions';
|
||||
import './subscriber';
|
||||
|
||||
/**
|
||||
* Middleware that captures LIB_DID_DISPOSE and LIB_DID_INIT actions and,
|
||||
|
||||
@@ -38,6 +38,7 @@ import {
|
||||
import { ITrack, ITrackOptions } from './types';
|
||||
|
||||
import './middleware.any';
|
||||
import './subscriber.web';
|
||||
|
||||
/**
|
||||
* Middleware that captures LIB_DID_DISPOSE and LIB_DID_INIT actions and,
|
||||
@@ -142,6 +143,13 @@ MiddlewareRegistry.register(store => next => action => {
|
||||
|
||||
if (typeof action.track?.muted !== 'undefined' && participantID && !local) {
|
||||
logTracksForParticipant(store.getState()['features/base/tracks'], participantID, 'Track updated');
|
||||
|
||||
// Notify external API when remote participant mutes/unmutes themselves
|
||||
const mediaType = isVideoTrack
|
||||
? (jitsiTrack.getVideoType() === VIDEO_TYPE.DESKTOP ? 'desktop' : 'video')
|
||||
: 'audio';
|
||||
|
||||
APP.API.notifyParticipantMuted(participantID, action.track.muted, mediaType, true);
|
||||
}
|
||||
|
||||
return result;
|
||||
|
||||
@@ -1,40 +0,0 @@
|
||||
import { isEqual, sortBy } from 'lodash-es';
|
||||
|
||||
import { MEDIA_TYPE } from '../media/constants';
|
||||
import { getScreenshareParticipantIds } from '../participants/functions';
|
||||
import StateListenerRegistry from '../redux/StateListenerRegistry';
|
||||
|
||||
import { isLocalTrackMuted } from './functions';
|
||||
|
||||
/**
|
||||
* Notifies when the list of currently sharing participants changes.
|
||||
*/
|
||||
StateListenerRegistry.register(
|
||||
/* selector */ state => getScreenshareParticipantIds(state),
|
||||
/* listener */ (participantIDs, store, previousParticipantIDs) => {
|
||||
if (typeof APP !== 'object') {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!isEqual(sortBy(participantIDs), sortBy(previousParticipantIDs))) {
|
||||
APP.API.notifySharingParticipantsChanged(participantIDs);
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
|
||||
/**
|
||||
* Notifies when the local video mute state changes.
|
||||
*/
|
||||
StateListenerRegistry.register(
|
||||
/* selector */ state => isLocalTrackMuted(state['features/base/tracks'], MEDIA_TYPE.VIDEO),
|
||||
/* listener */ (muted, store, previousMuted) => {
|
||||
if (typeof APP !== 'object') {
|
||||
return;
|
||||
}
|
||||
|
||||
if (muted !== previousMuted) {
|
||||
APP.API.notifyVideoMutedStatusChanged(muted);
|
||||
}
|
||||
}
|
||||
);
|
||||
52
react/features/base/tracks/subscriber.web.ts
Normal file
52
react/features/base/tracks/subscriber.web.ts
Normal file
@@ -0,0 +1,52 @@
|
||||
import { isEqual, sortBy } from 'lodash-es';
|
||||
|
||||
// @ts-expect-error
|
||||
import VideoLayout from '../../../../modules/UI/videolayout/VideoLayout';
|
||||
import { getAutoPinSetting } from '../../video-layout/functions.any';
|
||||
import { MEDIA_TYPE } from '../media/constants';
|
||||
import { getScreenshareParticipantIds } from '../participants/functions';
|
||||
import StateListenerRegistry from '../redux/StateListenerRegistry';
|
||||
|
||||
import { isLocalTrackMuted } from './functions';
|
||||
|
||||
/**
|
||||
* Notifies when the list of currently sharing participants changes.
|
||||
*/
|
||||
StateListenerRegistry.register(
|
||||
/* selector */ state => getScreenshareParticipantIds(state),
|
||||
/* listener */ (participantIDs, store, previousParticipantIDs) => {
|
||||
if (getAutoPinSetting() && participantIDs !== previousParticipantIDs) {
|
||||
const { participantId } = store.getState()['features/large-video'];
|
||||
|
||||
// Check if any new screenshare participants were added
|
||||
const newParticipants = participantIDs.filter((id: string) => !previousParticipantIDs.includes(id));
|
||||
|
||||
// If the current large video participant is a new screensharer, update the display. This is needed when
|
||||
// the track is created much later after the action for auto-pinning is dispatched. This usually happens in
|
||||
// very large meetings if the screenshare was already ongoing when the participant joined. The track is
|
||||
// signaled only after the receiver constraints with SS source id is processed by the bridge but the
|
||||
// auto-pinning action is dispatched when the participant tile is created as soon as the presence is
|
||||
// received.
|
||||
if (participantId && newParticipants.includes(participantId)) {
|
||||
VideoLayout.updateLargeVideo(participantId, true);
|
||||
}
|
||||
}
|
||||
|
||||
if (!isEqual(sortBy(participantIDs), sortBy(previousParticipantIDs))) {
|
||||
APP.API.notifySharingParticipantsChanged(participantIDs);
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
|
||||
/**
|
||||
* Notifies when the local video mute state changes.
|
||||
*/
|
||||
StateListenerRegistry.register(
|
||||
/* selector */ state => isLocalTrackMuted(state['features/base/tracks'], MEDIA_TYPE.VIDEO),
|
||||
/* listener */ (muted, store, previousMuted) => {
|
||||
if (muted !== previousMuted) {
|
||||
APP.API.notifyVideoMutedStatusChanged(muted);
|
||||
}
|
||||
}
|
||||
);
|
||||
@@ -22,7 +22,6 @@ export function assignIfDefined(target: Object, source: Object) {
|
||||
return to;
|
||||
}
|
||||
|
||||
|
||||
const MATCH_OPERATOR_REGEXP = /[|\\{}()[\]^$+*?.-]/g;
|
||||
|
||||
/**
|
||||
@@ -79,6 +78,21 @@ export function getJitsiMeetGlobalNS() {
|
||||
return window.JitsiMeetJS.app;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the Electron-specific global namespace.
|
||||
*
|
||||
* @returns {Object} The Electron namespace.
|
||||
*/
|
||||
export function getElectronGlobalNS() {
|
||||
const globalNS = getJitsiMeetGlobalNS();
|
||||
|
||||
if (!globalNS.electron) {
|
||||
globalNS.electron = {};
|
||||
}
|
||||
|
||||
return globalNS.electron;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the object that stores the connection times.
|
||||
*
|
||||
|
||||
@@ -78,6 +78,11 @@ interface IProps extends AbstractProps {
|
||||
*/
|
||||
_isResizing: boolean;
|
||||
|
||||
/**
|
||||
* The indicator which determines whether the UI is reduced.
|
||||
*/
|
||||
_reducedUI: boolean;
|
||||
|
||||
/**
|
||||
* Whether or not to block chat access with a nickname input form.
|
||||
*/
|
||||
@@ -227,6 +232,7 @@ const Chat = ({
|
||||
_focusedTab,
|
||||
_isResizing,
|
||||
_messages,
|
||||
_reducedUI,
|
||||
_unreadMessagesCount,
|
||||
_unreadPollsCount,
|
||||
_unreadFilesCount,
|
||||
@@ -567,6 +573,10 @@ const Chat = ({
|
||||
);
|
||||
}
|
||||
|
||||
if (_reducedUI) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return (
|
||||
_isOpen ? <div
|
||||
className = { classes.container }
|
||||
@@ -623,6 +633,7 @@ function _mapStateToProps(state: IReduxState, _ownProps: any) {
|
||||
const { isOpen, messages, unreadMessagesCount, unreadFilesCount, width, isResizing } = state['features/chat'];
|
||||
const { unreadPollsCount } = state['features/polls'];
|
||||
const _localParticipant = getLocalParticipant(state);
|
||||
const { reducedUI } = state['features/base/responsive-ui'];
|
||||
|
||||
return {
|
||||
_isModal: window.innerWidth <= SMALL_WIDTH_THRESHOLD,
|
||||
@@ -633,6 +644,7 @@ function _mapStateToProps(state: IReduxState, _ownProps: any) {
|
||||
_isFileSharingTabEnabled: isFileSharingEnabled(state),
|
||||
_focusedTab: getFocusedTab(state),
|
||||
_messages: messages,
|
||||
_reducedUI: reducedUI,
|
||||
_unreadMessagesCount: unreadMessagesCount,
|
||||
_unreadPollsCount: unreadPollsCount,
|
||||
_unreadFilesCount: unreadFilesCount,
|
||||
|
||||
@@ -4,10 +4,12 @@ import { useDispatch, useSelector } from 'react-redux';
|
||||
import { makeStyles } from 'tss-react/mui';
|
||||
|
||||
import { IReduxState } from '../../../app/types';
|
||||
import { openDialog } from '../../../base/dialog/actions';
|
||||
import Icon from '../../../base/icons/components/Icon';
|
||||
import { IconSubtitles } from '../../../base/icons/svg';
|
||||
import Button from '../../../base/ui/components/web/Button';
|
||||
import { groupMessagesBySender } from '../../../base/util/messageGrouping';
|
||||
import { StartRecordingDialog } from '../../../recording/components/Recording';
|
||||
import { setRequestingSubtitles } from '../../../subtitles/actions.any';
|
||||
import LanguageSelector from '../../../subtitles/components/web/LanguageSelector';
|
||||
import { canStartSubtitles } from '../../../subtitles/functions.any';
|
||||
@@ -88,6 +90,8 @@ export default function ClosedCaptionsTab() {
|
||||
const _canStartSubtitles = useSelector(canStartSubtitles);
|
||||
const [ isButtonPressed, setButtonPressed ] = useState(false);
|
||||
const subtitlesError = useSelector((state: IReduxState) => state['features/subtitles']._hasError);
|
||||
const isAsyncTranscriptionEnabled = useSelector((state: IReduxState) =>
|
||||
state['features/base/conference'].conference?.getMetadataHandler()?.getMetadata()?.asyncTranscription);
|
||||
|
||||
const filteredSubtitles = useMemo(() => {
|
||||
// First, create a map of transcription messages by message ID
|
||||
@@ -121,14 +125,21 @@ export default function ClosedCaptionsTab() {
|
||||
groupMessagesBySender(filteredSubtitles), [ filteredSubtitles ]);
|
||||
|
||||
const startClosedCaptions = useCallback(() => {
|
||||
if (isButtonPressed) {
|
||||
return;
|
||||
if (isAsyncTranscriptionEnabled) {
|
||||
dispatch(openDialog('StartRecordingDialog', StartRecordingDialog, {
|
||||
recordAudioAndVideo: false
|
||||
}));
|
||||
} else {
|
||||
if (isButtonPressed) {
|
||||
return;
|
||||
}
|
||||
dispatch(setRequestingSubtitles(true, false, null));
|
||||
setButtonPressed(true);
|
||||
}
|
||||
dispatch(setRequestingSubtitles(true, false, null));
|
||||
setButtonPressed(true);
|
||||
}, [ dispatch, isButtonPressed, setButtonPressed ]);
|
||||
|
||||
if (subtitlesError && isButtonPressed) {
|
||||
}, [ isAsyncTranscriptionEnabled, dispatch, isButtonPressed, openDialog, setButtonPressed ]);
|
||||
|
||||
if (subtitlesError && isButtonPressed && !isAsyncTranscriptionEnabled) {
|
||||
setButtonPressed(false);
|
||||
}
|
||||
|
||||
@@ -148,7 +159,7 @@ export default function ClosedCaptionsTab() {
|
||||
);
|
||||
}
|
||||
|
||||
if (isButtonPressed) {
|
||||
if (isButtonPressed && !isAsyncTranscriptionEnabled) {
|
||||
setButtonPressed(false);
|
||||
}
|
||||
|
||||
@@ -165,7 +176,7 @@ export default function ClosedCaptionsTab() {
|
||||
);
|
||||
}
|
||||
|
||||
if (isButtonPressed) {
|
||||
if (isButtonPressed && !isAsyncTranscriptionEnabled) {
|
||||
setButtonPressed(false);
|
||||
}
|
||||
|
||||
|
||||
@@ -3,11 +3,10 @@ import React, { useCallback } from 'react';
|
||||
import {
|
||||
BackHandler,
|
||||
NativeModules,
|
||||
SafeAreaView,
|
||||
View,
|
||||
ViewStyle
|
||||
} from 'react-native';
|
||||
import { EdgeInsets, withSafeAreaInsets } from 'react-native-safe-area-context';
|
||||
import { Edge, EdgeInsets, SafeAreaView, withSafeAreaInsets } from 'react-native-safe-area-context';
|
||||
import { connect, useDispatch } from 'react-redux';
|
||||
|
||||
import { appNavigate } from '../../../app/actions.native';
|
||||
@@ -436,6 +435,7 @@ class Conference extends AbstractConference<IProps, State> {
|
||||
</View>
|
||||
|
||||
<SafeAreaView
|
||||
edges = { [ 'left', 'right', 'top' ] }
|
||||
pointerEvents = 'box-none'
|
||||
style = {
|
||||
(_toolboxVisible
|
||||
@@ -444,6 +444,7 @@ class Conference extends AbstractConference<IProps, State> {
|
||||
<TitleBar _createOnPress = { this._createOnPress } />
|
||||
</SafeAreaView>
|
||||
<SafeAreaView
|
||||
edges = { [ 'bottom', 'left', 'right', !_toolboxVisible && 'top' ].filter(Boolean) as Edge[] }
|
||||
pointerEvents = 'box-none'
|
||||
style = {
|
||||
(_toolboxVisible
|
||||
|
||||
@@ -90,6 +90,11 @@ interface IProps extends AbstractProps, WithTranslation {
|
||||
*/
|
||||
_overflowDrawer: boolean;
|
||||
|
||||
/**
|
||||
* The indicator which determines whether the UI is reduced.
|
||||
*/
|
||||
_reducedUI: boolean;
|
||||
|
||||
/**
|
||||
* Name for this conference room.
|
||||
*/
|
||||
@@ -226,12 +231,45 @@ class Conference extends AbstractConference<IProps, any> {
|
||||
_layoutClassName,
|
||||
_notificationsVisible,
|
||||
_overflowDrawer,
|
||||
_reducedUI,
|
||||
_showLobby,
|
||||
_showPrejoin,
|
||||
_showVisitorsQueue,
|
||||
t
|
||||
} = this.props;
|
||||
|
||||
if (_reducedUI) {
|
||||
return (
|
||||
<div
|
||||
id = 'layout_wrapper'
|
||||
onMouseEnter = { this._onMouseEnter }
|
||||
onMouseLeave = { this._onMouseLeave }
|
||||
onMouseMove = { this._onMouseMove }
|
||||
ref = { this._setBackground }>
|
||||
<Chat />
|
||||
<div
|
||||
className = { _layoutClassName }
|
||||
id = 'videoconference_page'
|
||||
onMouseMove = { isMobileBrowser() ? undefined : this._onShowToolbar }>
|
||||
<ConferenceInfo />
|
||||
<Notice />
|
||||
<div
|
||||
id = 'videospace'
|
||||
onTouchStart = { this._onVideospaceTouchStart }>
|
||||
<LargeVideo />
|
||||
</div>
|
||||
<span
|
||||
aria-level = { 1 }
|
||||
className = 'sr-only'
|
||||
role = 'heading'>
|
||||
{ t('toolbar.accessibilityLabel.heading') }
|
||||
</span>
|
||||
<Toolbox />
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div
|
||||
id = 'layout_wrapper'
|
||||
@@ -418,6 +456,7 @@ class Conference extends AbstractConference<IProps, any> {
|
||||
function _mapStateToProps(state: IReduxState) {
|
||||
const { backgroundAlpha, mouseMoveCallbackInterval } = state['features/base/config'];
|
||||
const { overflowDrawer } = state['features/toolbox'];
|
||||
const { reducedUI } = state['features/base/responsive-ui'];
|
||||
|
||||
return {
|
||||
...abstractMapStateToProps(state),
|
||||
@@ -426,6 +465,7 @@ function _mapStateToProps(state: IReduxState) {
|
||||
_layoutClassName: LAYOUT_CLASSNAMES[getCurrentLayout(state) ?? ''],
|
||||
_mouseMoveCallbackInterval: mouseMoveCallbackInterval,
|
||||
_overflowDrawer: overflowDrawer,
|
||||
_reducedUI: reducedUI,
|
||||
_roomName: getConferenceNameForTitle(state),
|
||||
_showLobby: getIsLobbyVisible(state),
|
||||
_showPrejoin: isPrejoinPageVisible(state),
|
||||
|
||||
@@ -34,6 +34,11 @@ interface IProps {
|
||||
autoHide?: string[];
|
||||
};
|
||||
|
||||
/**
|
||||
* The indicator which determines whether the UI is reduced.
|
||||
*/
|
||||
_reducedUI: boolean;
|
||||
|
||||
/**
|
||||
* Indicates whether the component should be visible or not.
|
||||
*/
|
||||
@@ -194,6 +199,12 @@ class ConferenceInfo extends Component<IProps> {
|
||||
* @returns {ReactElement}
|
||||
*/
|
||||
override render() {
|
||||
const { _reducedUI } = this.props;
|
||||
|
||||
if (_reducedUI) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return (
|
||||
<div
|
||||
className = 'details-container'
|
||||
@@ -217,9 +228,12 @@ class ConferenceInfo extends Component<IProps> {
|
||||
* }}
|
||||
*/
|
||||
function _mapStateToProps(state: IReduxState) {
|
||||
const { reducedUI } = state['features/base/responsive-ui'];
|
||||
|
||||
return {
|
||||
_conferenceInfo: getConferenceInfo(state),
|
||||
_reducedUI: reducedUI,
|
||||
_visible: isToolboxVisible(state),
|
||||
_conferenceInfo: getConferenceInfo(state)
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -25,9 +25,10 @@ const useStyles = makeStyles()(theme => {
|
||||
|
||||
const Notice = () => {
|
||||
const message = useSelector((state: IReduxState) => state['features/base/config'].noticeMessage);
|
||||
const { reducedUI } = useSelector((state: IReduxState) => state['features/base/responsive-ui']);
|
||||
const { classes } = useStyles();
|
||||
|
||||
if (!message) {
|
||||
if (!message || reducedUI) {
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
@@ -13,6 +13,8 @@ import Tooltip from '../../../base/tooltip/components/Tooltip';
|
||||
import { getIndicatorsTooltipPosition } from '../../../filmstrip/functions.web';
|
||||
import { appendSuffix } from '../../functions';
|
||||
|
||||
import { getDisplayNameColor } from './styles';
|
||||
|
||||
/**
|
||||
* The type of the React {@code Component} props of {@link DisplayName}.
|
||||
*/
|
||||
@@ -49,7 +51,7 @@ const useStyles = makeStyles()(theme => {
|
||||
return {
|
||||
displayName: {
|
||||
...theme.typography.labelBold,
|
||||
color: theme.palette.text01,
|
||||
color: getDisplayNameColor(theme),
|
||||
overflow: 'hidden',
|
||||
textOverflow: 'ellipsis',
|
||||
whiteSpace: 'nowrap'
|
||||
@@ -62,7 +64,7 @@ const useStyles = makeStyles()(theme => {
|
||||
boxShadow: 'none',
|
||||
padding: 0,
|
||||
...theme.typography.labelBold,
|
||||
color: theme.palette.text01
|
||||
color: getDisplayNameColor(theme)
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
@@ -121,3 +121,19 @@ export function scaleFontProperty(
|
||||
|
||||
return parseFloat(calculatedRemValue.toFixed(3));
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Default text color for display name.
|
||||
*/
|
||||
export const DISPLAY_NAME_DEFAULT_COLOR = '#FFFFFF';
|
||||
|
||||
/**
|
||||
* Returns the text color for display name from the theme.
|
||||
*
|
||||
* @param {Theme} theme - The MUI theme.
|
||||
* @returns {string} The text color.
|
||||
*/
|
||||
export const getDisplayNameColor = (theme: Theme): string =>
|
||||
theme.palette?.text01 || DISPLAY_NAME_DEFAULT_COLOR;
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import React, { PureComponent } from 'react';
|
||||
import { FlatList, ViewStyle, ViewToken } from 'react-native';
|
||||
import { SafeAreaView, withSafeAreaInsets } from 'react-native-safe-area-context';
|
||||
import { Edge, SafeAreaView, withSafeAreaInsets } from 'react-native-safe-area-context';
|
||||
import { connect } from 'react-redux';
|
||||
|
||||
import { IReduxState, IStore } from '../../../app/types';
|
||||
@@ -272,8 +272,8 @@ class Filmstrip extends PureComponent<IProps> {
|
||||
}
|
||||
|
||||
return (
|
||||
<SafeAreaView // @ts-ignore
|
||||
edges = { [ bottomEdge && 'bottom', 'left', 'right' ].filter(Boolean) }
|
||||
<SafeAreaView
|
||||
edges = { [ bottomEdge && 'bottom', 'left', 'right' ].filter(Boolean) as Edge[] }
|
||||
style = { filmstripStyle as ViewStyle }>
|
||||
{
|
||||
this._separateLocalThumbnail
|
||||
|
||||
@@ -2,18 +2,17 @@ import React, { PureComponent } from 'react';
|
||||
import {
|
||||
FlatList,
|
||||
GestureResponderEvent,
|
||||
SafeAreaView,
|
||||
TouchableWithoutFeedback,
|
||||
ViewToken
|
||||
} from 'react-native';
|
||||
import { EdgeInsets, withSafeAreaInsets } from 'react-native-safe-area-context';
|
||||
import { EdgeInsets, SafeAreaView, withSafeAreaInsets } from 'react-native-safe-area-context';
|
||||
import { connect } from 'react-redux';
|
||||
|
||||
import { IReduxState, IStore } from '../../../app/types';
|
||||
import { getLocalParticipant, getParticipantCountWithFake } from '../../../base/participants/functions';
|
||||
import { ILocalParticipant } from '../../../base/participants/types';
|
||||
import { getHideSelfView } from '../../../base/settings/functions.any';
|
||||
import { setVisibleRemoteParticipants } from '../../actions.web';
|
||||
import { setVisibleRemoteParticipants } from '../../actions.native';
|
||||
|
||||
import Thumbnail from './Thumbnail';
|
||||
import styles from './styles';
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import { Theme } from '@mui/material/styles';
|
||||
|
||||
import { IReduxState } from '../app/types';
|
||||
import { IStateful } from '../base/app/types';
|
||||
import { isMobileBrowser } from '../base/environment/utils';
|
||||
@@ -830,3 +832,13 @@ export function isTopPanelEnabled(state: IReduxState) {
|
||||
return !filmstrip?.disableTopPanel && participantsCount >= (filmstrip?.minParticipantCountForTopPanel ?? 50);
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the thumbnail background color from the theme.
|
||||
*
|
||||
* @param {Theme} theme - The MUI theme.
|
||||
* @returns {string} The background color.
|
||||
*/
|
||||
export function getThumbnailBackgroundColor(theme: Theme): string {
|
||||
return theme.palette.uiBackground;
|
||||
}
|
||||
|
||||
@@ -58,7 +58,7 @@ import { isEnabled as isDropboxEnabled } from '../../dropbox/functions.native';
|
||||
import { hideNotification, showNotification } from '../../notifications/actions';
|
||||
import { NOTIFICATION_TIMEOUT_TYPE, NOTIFICATION_TYPE } from '../../notifications/constants';
|
||||
import { RECORDING_SESSION_UPDATED } from '../../recording/actionTypes';
|
||||
import { RECORDING_METADATA_ID, RECORDING_TYPES } from '../../recording/constants';
|
||||
import { RECORDING_TYPES } from '../../recording/constants';
|
||||
import { getActiveSession } from '../../recording/functions';
|
||||
import { setRequestingSubtitles } from '../../subtitles/actions.any';
|
||||
import { CUSTOM_BUTTON_PRESSED } from '../../toolbox/actionTypes';
|
||||
@@ -588,10 +588,7 @@ function _registerForNativeEvents(store: IStore) {
|
||||
}
|
||||
|
||||
if (transcription) {
|
||||
store.dispatch(setRequestingSubtitles(true, false, null, true));
|
||||
conference.getMetadataHandler().setMetadata(RECORDING_METADATA_ID, {
|
||||
isTranscribingEnabled: true
|
||||
});
|
||||
store.dispatch(setRequestingSubtitles(true, false, null));
|
||||
}
|
||||
});
|
||||
|
||||
@@ -607,9 +604,6 @@ function _registerForNativeEvents(store: IStore) {
|
||||
|
||||
if (transcription) {
|
||||
store.dispatch(setRequestingSubtitles(false, false, null));
|
||||
conference.getMetadataHandler().setMetadata(RECORDING_METADATA_ID, {
|
||||
isTranscribingEnabled: false
|
||||
});
|
||||
}
|
||||
|
||||
if (![ JitsiRecordingConstants.mode.FILE, JitsiRecordingConstants.mode.STREAM ].includes(mode)) {
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import React from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { SafeAreaView, Text, View, ViewStyle } from 'react-native';
|
||||
import { Text, View, ViewStyle } from 'react-native';
|
||||
import { SafeAreaView } from 'react-native-safe-area-context';
|
||||
|
||||
import JitsiScreen from '../../../base/modal/components/JitsiScreen';
|
||||
import LoadingIndicator from '../../../base/react/components/native/LoadingIndicator';
|
||||
|
||||
4
react/features/pip/actionTypes.ts
Normal file
4
react/features/pip/actionTypes.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
/**
|
||||
* Action type to set Picture-in-Picture active state.
|
||||
*/
|
||||
export const SET_PIP_ACTIVE = 'SET_PIP_ACTIVE';
|
||||
193
react/features/pip/actions.ts
Normal file
193
react/features/pip/actions.ts
Normal file
@@ -0,0 +1,193 @@
|
||||
import { IStore } from '../app/types';
|
||||
import { MEDIA_TYPE } from '../base/media/constants';
|
||||
import { isLocalTrackMuted } from '../base/tracks/functions.any';
|
||||
import { handleToggleVideoMuted } from '../toolbox/actions.any';
|
||||
import { muteLocal } from '../video-menu/actions.any';
|
||||
|
||||
import { SET_PIP_ACTIVE } from './actionTypes';
|
||||
import {
|
||||
cleanupMediaSessionHandlers,
|
||||
enterPiP,
|
||||
setupMediaSessionHandlers,
|
||||
shouldShowPiP
|
||||
} from './functions';
|
||||
import logger from './logger';
|
||||
|
||||
/**
|
||||
* Action to set Picture-in-Picture active state.
|
||||
*
|
||||
* @param {boolean} isPiPActive - Whether PiP is active.
|
||||
* @returns {{
|
||||
* type: SET_PIP_ACTIVE,
|
||||
* isPiPActive: boolean
|
||||
* }}
|
||||
*/
|
||||
export function setPiPActive(isPiPActive: boolean) {
|
||||
return {
|
||||
type: SET_PIP_ACTIVE,
|
||||
isPiPActive
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Toggles audio mute from PiP MediaSession controls.
|
||||
* Uses exact same logic as toolbar audio button including GUM pending state.
|
||||
*
|
||||
* @returns {Function}
|
||||
*/
|
||||
export function toggleAudioFromPiP() {
|
||||
return (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
|
||||
const state = getState();
|
||||
const audioMuted = isLocalTrackMuted(state['features/base/tracks'], MEDIA_TYPE.AUDIO);
|
||||
|
||||
// Use the exact same action as toolbar button.
|
||||
dispatch(muteLocal(!audioMuted, MEDIA_TYPE.AUDIO));
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Toggles video mute from PiP MediaSession controls.
|
||||
* Uses exact same logic as toolbar video button including GUM pending state.
|
||||
*
|
||||
* @returns {Function}
|
||||
*/
|
||||
export function toggleVideoFromPiP() {
|
||||
return (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
|
||||
const state = getState();
|
||||
const videoMuted = isLocalTrackMuted(state['features/base/tracks'], MEDIA_TYPE.VIDEO);
|
||||
|
||||
// Use the exact same action as toolbar button (showUI=true, ensureTrack=true).
|
||||
dispatch(handleToggleVideoMuted(!videoMuted, true, true));
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Action to exit Picture-in-Picture mode.
|
||||
*
|
||||
* @returns {Function}
|
||||
*/
|
||||
export function exitPiP() {
|
||||
return (dispatch: IStore['dispatch']) => {
|
||||
if (document.pictureInPictureElement) {
|
||||
document.exitPictureInPicture()
|
||||
.then(() => {
|
||||
logger.debug('Exited Picture-in-Picture mode');
|
||||
})
|
||||
.catch((err: Error) => {
|
||||
logger.error(`Error while exiting PiP: ${err.message}`);
|
||||
});
|
||||
}
|
||||
|
||||
dispatch(setPiPActive(false));
|
||||
cleanupMediaSessionHandlers();
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Action to handle window blur or tab switch.
|
||||
* Enters PiP mode if not already active.
|
||||
*
|
||||
* @param {HTMLVideoElement} videoElement - The video element we will use for PiP.
|
||||
* @returns {Function}
|
||||
*/
|
||||
export function handleWindowBlur(videoElement: HTMLVideoElement) {
|
||||
return (_dispatch: IStore['dispatch'], getState: IStore['getState']) => {
|
||||
const state = getState();
|
||||
const isPiPActive = state['features/pip']?.isPiPActive;
|
||||
|
||||
if (!isPiPActive) {
|
||||
enterPiP(videoElement);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Action to handle window focus.
|
||||
* Exits PiP mode if currently active (matches old AOT behavior).
|
||||
*
|
||||
* @returns {Function}
|
||||
*/
|
||||
export function handleWindowFocus() {
|
||||
return (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
|
||||
const state = getState();
|
||||
const isPiPActive = state['features/pip']?.isPiPActive;
|
||||
|
||||
if (isPiPActive) {
|
||||
dispatch(exitPiP());
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Action to handle the browser's leavepictureinpicture event.
|
||||
* Updates state and cleans up MediaSession handlers.
|
||||
*
|
||||
* @returns {Function}
|
||||
*/
|
||||
export function handlePiPLeaveEvent() {
|
||||
return (dispatch: IStore['dispatch']) => {
|
||||
logger.log('Left Picture-in-Picture mode');
|
||||
|
||||
dispatch(setPiPActive(false));
|
||||
cleanupMediaSessionHandlers();
|
||||
APP.API.notifyPictureInPictureLeft();
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Action to handle the browser's enterpictureinpicture event.
|
||||
* Updates state and sets up MediaSession handlers.
|
||||
*
|
||||
* @returns {Function}
|
||||
*/
|
||||
export function handlePipEnterEvent() {
|
||||
return (dispatch: IStore['dispatch']) => {
|
||||
logger.log('Entered Picture-in-Picture mode');
|
||||
|
||||
dispatch(setPiPActive(true));
|
||||
setupMediaSessionHandlers(dispatch);
|
||||
APP.API.notifyPictureInPictureEntered();
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Shows Picture-in-Picture window.
|
||||
* Called from external API when iframe becomes not visible (IntersectionObserver).
|
||||
*
|
||||
* @returns {Function}
|
||||
*/
|
||||
export function showPiP() {
|
||||
return (_dispatch: IStore['dispatch'], getState: IStore['getState']) => {
|
||||
const state = getState();
|
||||
const isPiPActive = state['features/pip']?.isPiPActive;
|
||||
|
||||
if (!shouldShowPiP(state)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!isPiPActive) {
|
||||
const videoElement = document.getElementById('pipVideo') as HTMLVideoElement;
|
||||
|
||||
if (videoElement) {
|
||||
enterPiP(videoElement);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Hides Picture-in-Picture window.
|
||||
* Called from external API when iframe becomes visible.
|
||||
*
|
||||
* @returns {Function}
|
||||
*/
|
||||
export function hidePiP() {
|
||||
return (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
|
||||
const state = getState();
|
||||
const isPiPActive = state['features/pip']?.isPiPActive;
|
||||
|
||||
if (isPiPActive) {
|
||||
dispatch(exitPiP());
|
||||
}
|
||||
};
|
||||
}
|
||||
24
react/features/pip/components/PiP.tsx
Normal file
24
react/features/pip/components/PiP.tsx
Normal file
@@ -0,0 +1,24 @@
|
||||
import React from 'react';
|
||||
import { useSelector } from 'react-redux';
|
||||
|
||||
import { shouldShowPiP } from '../functions';
|
||||
|
||||
import PiPVideoElement from './PiPVideoElement';
|
||||
|
||||
/**
|
||||
* Wrapper component that conditionally renders PiPVideoElement.
|
||||
* Prevents mounting when PiP is disabled or on prejoin without showOnPrejoin flag.
|
||||
*
|
||||
* @returns {React.ReactElement | null}
|
||||
*/
|
||||
function PiP() {
|
||||
const showPiP = useSelector(shouldShowPiP);
|
||||
|
||||
if (!showPiP) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return <PiPVideoElement />;
|
||||
}
|
||||
|
||||
export default PiP;
|
||||
233
react/features/pip/components/PiPVideoElement.tsx
Normal file
233
react/features/pip/components/PiPVideoElement.tsx
Normal file
@@ -0,0 +1,233 @@
|
||||
import React, { useEffect, useRef } from 'react';
|
||||
import { useDispatch, useSelector } from 'react-redux';
|
||||
import { makeStyles } from 'tss-react/mui';
|
||||
|
||||
import { IReduxState, IStore } from '../../app/types';
|
||||
import { getAvatarFont, getAvatarInitialsColor } from '../../base/avatar/components/web/styles';
|
||||
import { getLocalParticipant, getParticipantDisplayName } from '../../base/participants/functions';
|
||||
import { isTrackStreamingStatusActive } from '../../connection-indicator/functions';
|
||||
import { getDisplayNameColor } from '../../display-name/components/web/styles';
|
||||
import { getThumbnailBackgroundColor } from '../../filmstrip/functions.web';
|
||||
import { getLargeVideoParticipant } from '../../large-video/functions';
|
||||
import { isPrejoinPageVisible } from '../../prejoin/functions.any';
|
||||
import { handlePiPLeaveEvent, handlePipEnterEvent, handleWindowBlur, handleWindowFocus } from '../actions';
|
||||
import { getPiPVideoTrack } from '../functions';
|
||||
import { useCanvasAvatar } from '../hooks';
|
||||
import logger from '../logger';
|
||||
|
||||
const useStyles = makeStyles()(() => {
|
||||
return {
|
||||
hiddenVideo: {
|
||||
position: 'absolute' as const,
|
||||
width: '1px',
|
||||
height: '1px',
|
||||
opacity: 0,
|
||||
pointerEvents: 'none' as const,
|
||||
left: '-9999px',
|
||||
top: '-9999px'
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
/**
|
||||
* Component that renders a hidden video element for Picture-in-Picture.
|
||||
* Automatically switches between real video track and canvas-based avatar
|
||||
* depending on video availability.
|
||||
*
|
||||
* @returns {JSX.Element} The hidden video element.
|
||||
*/
|
||||
const PiPVideoElement: React.FC = () => {
|
||||
const { classes, theme } = useStyles();
|
||||
const videoRef = useRef<HTMLVideoElement>(null);
|
||||
const previousTrackRef = useRef<any>(null);
|
||||
|
||||
// Redux selectors.
|
||||
const isOnPrejoin = useSelector(isPrejoinPageVisible);
|
||||
const localParticipant = useSelector(getLocalParticipant);
|
||||
const largeVideoParticipant = useSelector(getLargeVideoParticipant);
|
||||
|
||||
// Use local participant during prejoin, otherwise large video participant.
|
||||
const participant = isOnPrejoin ? localParticipant : largeVideoParticipant;
|
||||
|
||||
// Get appropriate video track based on prejoin state.
|
||||
const videoTrack = useSelector((state: IReduxState) =>
|
||||
getPiPVideoTrack(state, participant)
|
||||
);
|
||||
const displayName = useSelector((state: IReduxState) =>
|
||||
participant?.id
|
||||
? getParticipantDisplayName(state, participant.id)
|
||||
: ''
|
||||
);
|
||||
const customAvatarBackgrounds = useSelector((state: IReduxState) =>
|
||||
state['features/dynamic-branding']?.avatarBackgrounds || []
|
||||
);
|
||||
|
||||
const dispatch: IStore['dispatch'] = useDispatch();
|
||||
const avatarFont = getAvatarFont(theme);
|
||||
const fontFamily = (avatarFont as any).fontFamily ?? 'Inter, sans-serif';
|
||||
const initialsColor = getAvatarInitialsColor(theme);
|
||||
const displayNameColor = getDisplayNameColor(theme);
|
||||
const { canvasStreamRef } = useCanvasAvatar({
|
||||
participant,
|
||||
displayName,
|
||||
customAvatarBackgrounds,
|
||||
backgroundColor: getThumbnailBackgroundColor(theme),
|
||||
fontFamily,
|
||||
initialsColor,
|
||||
displayNameColor
|
||||
});
|
||||
|
||||
// Determine if we should show avatar instead of video.
|
||||
const shouldShowAvatar = !videoTrack
|
||||
|| videoTrack.muted
|
||||
|| (!videoTrack.local && !isTrackStreamingStatusActive(videoTrack));
|
||||
|
||||
/**
|
||||
* Effect: Handle switching between real video track and canvas avatar stream.
|
||||
*/
|
||||
useEffect(() => {
|
||||
const videoElement = videoRef.current;
|
||||
|
||||
if (!videoElement) {
|
||||
return;
|
||||
}
|
||||
|
||||
const previousTrack = previousTrackRef.current;
|
||||
|
||||
// Detach previous track.
|
||||
if (previousTrack?.jitsiTrack) {
|
||||
try {
|
||||
previousTrack.jitsiTrack.detach(videoElement);
|
||||
} catch (error) {
|
||||
logger.error('Error detaching previous track:', error);
|
||||
}
|
||||
}
|
||||
|
||||
if (shouldShowAvatar) {
|
||||
// Use canvas stream for avatar.
|
||||
// Access ref inside effect - stream is created in useCanvasAvatar's effect.
|
||||
const canvasStream = canvasStreamRef.current;
|
||||
|
||||
// Only set srcObject if it's different to avoid interrupting playback.
|
||||
if (canvasStream && videoElement.srcObject !== canvasStream) {
|
||||
videoElement.srcObject = canvasStream;
|
||||
}
|
||||
} else if (videoTrack?.jitsiTrack) {
|
||||
// Attach real video track.
|
||||
videoTrack.jitsiTrack.attach(videoElement)
|
||||
.catch((error: Error) => {
|
||||
logger.error('Error attaching video track:', error);
|
||||
});
|
||||
}
|
||||
|
||||
previousTrackRef.current = videoTrack;
|
||||
|
||||
// Cleanup on unmount or track change.
|
||||
return () => {
|
||||
if (videoTrack?.jitsiTrack && videoElement) {
|
||||
try {
|
||||
videoTrack.jitsiTrack.detach(videoElement);
|
||||
} catch (error) {
|
||||
logger.error('Error during cleanup:', error);
|
||||
}
|
||||
}
|
||||
};
|
||||
}, [ videoTrack, shouldShowAvatar ]);
|
||||
|
||||
/**
|
||||
* Effect: Window blur/focus and visibility change listeners.
|
||||
* Enters PiP on blur, exits on focus (matches old AOT behavior).
|
||||
*/
|
||||
useEffect(() => {
|
||||
const videoElement = videoRef.current;
|
||||
|
||||
if (!videoElement) {
|
||||
return;
|
||||
}
|
||||
|
||||
const onWindowBlur = () => dispatch(handleWindowBlur(videoElement));
|
||||
const onWindowFocus = () => {
|
||||
|
||||
// In the use case where the PiP is closed by the 'X' or 'back to main window' buttons, this handler is
|
||||
// called before the leavepictureinpicture handler. From there we call document.exitPictureInPicture()
|
||||
// which seems to put Chrome into a weird state - document.exitPictureInPicture() never resolves, the
|
||||
// leavepictureinpicture is never triggered and it is not possible to display PiP again.
|
||||
// This is probably a browser bug. To workaround it we have the 100ms timeout here. This way this event
|
||||
// is triggered after the leavepictureinpicture event and everything seems to work well.
|
||||
setTimeout(() => {
|
||||
dispatch(handleWindowFocus());
|
||||
}, 100);
|
||||
};
|
||||
const onVisibilityChange = () => {
|
||||
if (document.hidden) {
|
||||
onWindowBlur();
|
||||
}
|
||||
};
|
||||
|
||||
window.addEventListener('blur', onWindowBlur);
|
||||
window.addEventListener('focus', onWindowFocus);
|
||||
document.addEventListener('visibilitychange', onVisibilityChange);
|
||||
|
||||
// Check if window is already blurred on mount (handles PiP enable while app is in background).
|
||||
// Wait for video to be ready before attempting PiP (canvas stream may not be attached yet).
|
||||
const checkFocusAndEnterPiP = () => {
|
||||
if (!document.hasFocus()) {
|
||||
onWindowBlur();
|
||||
}
|
||||
};
|
||||
|
||||
if (videoElement.readyState >= 1) {
|
||||
// Video already has metadata loaded (e.g., real video track was already attached).
|
||||
checkFocusAndEnterPiP();
|
||||
} else {
|
||||
// Wait for video source to be ready (e.g., canvas stream being created).
|
||||
videoElement.addEventListener('loadedmetadata', checkFocusAndEnterPiP, { once: true });
|
||||
}
|
||||
|
||||
return () => {
|
||||
window.removeEventListener('blur', onWindowBlur);
|
||||
window.removeEventListener('focus', onWindowFocus);
|
||||
document.removeEventListener('visibilitychange', onVisibilityChange);
|
||||
videoElement.removeEventListener('loadedmetadata', checkFocusAndEnterPiP);
|
||||
};
|
||||
}, [ dispatch ]);
|
||||
|
||||
/**
|
||||
* Effect: PiP enter/leave event listeners.
|
||||
* Updates Redux state when browser PiP events occur.
|
||||
*/
|
||||
useEffect(() => {
|
||||
const videoElement = videoRef.current;
|
||||
|
||||
if (!videoElement) {
|
||||
return;
|
||||
}
|
||||
|
||||
const onEnterPiP = () => {
|
||||
dispatch(handlePipEnterEvent());
|
||||
};
|
||||
const onLeavePiP = () => {
|
||||
dispatch(handlePiPLeaveEvent());
|
||||
};
|
||||
|
||||
videoElement.addEventListener('enterpictureinpicture', onEnterPiP);
|
||||
videoElement.addEventListener('leavepictureinpicture', onLeavePiP);
|
||||
|
||||
return () => {
|
||||
videoElement.removeEventListener('enterpictureinpicture', onEnterPiP);
|
||||
videoElement.removeEventListener('leavepictureinpicture', onLeavePiP);
|
||||
};
|
||||
}, [ dispatch ]);
|
||||
|
||||
return (
|
||||
<video
|
||||
autoPlay = { true }
|
||||
className = { classes.hiddenVideo }
|
||||
id = 'pipVideo'
|
||||
muted = { true }
|
||||
playsInline = { true }
|
||||
ref = { videoRef } />
|
||||
);
|
||||
};
|
||||
|
||||
export default PiPVideoElement;
|
||||
31
react/features/pip/external-api.shared.ts
Normal file
31
react/features/pip/external-api.shared.ts
Normal file
@@ -0,0 +1,31 @@
|
||||
/**
|
||||
* Shared utilities for PiP feature used by external_api.js.
|
||||
*
|
||||
* IMPORTANT: Keep this file minimal with no heavy dependencies.
|
||||
* It's bundled into external_api.min.js and we want to keep that bundle slim.
|
||||
* Only import lightweight modules here.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Checks if current environment is Electron.
|
||||
* Inline check to avoid importing BrowserDetection and its ua-parser dependency.
|
||||
*
|
||||
* @returns {boolean} - True if running in Electron.
|
||||
*/
|
||||
function isElectron(): boolean {
|
||||
return navigator.userAgent.includes('Electron');
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if PiP is enabled based on config and environment.
|
||||
*
|
||||
* @param {Object} pipConfig - The pip config object.
|
||||
* @returns {boolean} - True if PiP is enabled.
|
||||
*/
|
||||
export function isPiPEnabled(pipConfig?: { disabled?: boolean; }): boolean {
|
||||
if (pipConfig?.disabled) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return isElectron();
|
||||
}
|
||||
461
react/features/pip/functions.ts
Normal file
461
react/features/pip/functions.ts
Normal file
@@ -0,0 +1,461 @@
|
||||
import { IReduxState, IStore } from '../app/types';
|
||||
import { AVATAR_DEFAULT_BACKGROUND_COLOR } from '../base/avatar/components/web/styles';
|
||||
import { getAvatarColor, getInitials } from '../base/avatar/functions';
|
||||
import { leaveConference } from '../base/conference/actions';
|
||||
import { browser } from '../base/lib-jitsi-meet';
|
||||
import { IParticipant } from '../base/participants/types';
|
||||
import { getLocalVideoTrack } from '../base/tracks/functions.any';
|
||||
import { getVideoTrackByParticipant } from '../base/tracks/functions.web';
|
||||
import { isPrejoinPageVisible } from '../prejoin/functions.any';
|
||||
|
||||
import { toggleAudioFromPiP, toggleVideoFromPiP } from './actions';
|
||||
import { isPiPEnabled } from './external-api.shared';
|
||||
import logger from './logger';
|
||||
import { IMediaSessionState } from './types';
|
||||
|
||||
/**
|
||||
* Gets the appropriate video track for PiP based on prejoin state.
|
||||
* During prejoin, returns local video track. In conference, returns large video participant's track.
|
||||
*
|
||||
* @param {IReduxState} state - Redux state.
|
||||
* @param {IParticipant | undefined} participant - Participant to get track for.
|
||||
* @returns {ITrack | undefined} The video track or undefined.
|
||||
*/
|
||||
export function getPiPVideoTrack(state: IReduxState, participant: IParticipant | undefined) {
|
||||
const isOnPrejoin = isPrejoinPageVisible(state);
|
||||
|
||||
return isOnPrejoin
|
||||
? getLocalVideoTrack(state['features/base/tracks'])
|
||||
: getVideoTrackByParticipant(state, participant);
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines if PiP should be shown based on config and current app state.
|
||||
* Checks if PiP is enabled and handles prejoin page visibility.
|
||||
*
|
||||
* @param {IReduxState} state - Redux state.
|
||||
* @returns {boolean} Whether PiP should be shown.
|
||||
*/
|
||||
export function shouldShowPiP(state: IReduxState): boolean {
|
||||
const pipConfig = state['features/base/config'].pip;
|
||||
|
||||
// Check if PiP is enabled at all.
|
||||
if (!isPiPEnabled(pipConfig)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check prejoin state.
|
||||
const isOnPrejoin = isPrejoinPageVisible(state);
|
||||
const showOnPrejoin = pipConfig?.showOnPrejoin ?? false;
|
||||
|
||||
// Don't show PiP on prejoin unless explicitly enabled.
|
||||
if (isOnPrejoin && !showOnPrejoin) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Draws an image-based avatar as a circular clipped image on canvas.
|
||||
*
|
||||
* @param {CanvasRenderingContext2D} ctx - Canvas 2D context.
|
||||
* @param {string} imageUrl - URL of the avatar image.
|
||||
* @param {boolean | undefined} useCORS - Whether to use CORS for image loading.
|
||||
* @param {number} centerX - X coordinate of avatar center.
|
||||
* @param {number} centerY - Y coordinate of avatar center.
|
||||
* @param {number} radius - Radius of the avatar circle.
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
export async function drawImageAvatar(
|
||||
ctx: CanvasRenderingContext2D,
|
||||
imageUrl: string,
|
||||
useCORS: boolean | undefined,
|
||||
centerX: number,
|
||||
centerY: number,
|
||||
radius: number
|
||||
): Promise<void> {
|
||||
const img = new Image();
|
||||
|
||||
if (useCORS) {
|
||||
img.crossOrigin = 'anonymous';
|
||||
}
|
||||
img.src = imageUrl;
|
||||
|
||||
try {
|
||||
await img.decode();
|
||||
ctx.save();
|
||||
ctx.beginPath();
|
||||
ctx.arc(centerX, centerY, radius, 0, Math.PI * 2);
|
||||
ctx.clip();
|
||||
const size = radius * 2;
|
||||
|
||||
ctx.drawImage(img, centerX - radius, centerY - radius, size, size);
|
||||
ctx.restore();
|
||||
} catch (error) {
|
||||
logger.error('Failed to draw image avatar', error);
|
||||
throw new Error('Image load failed');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Draws an initials-based avatar with a colored background on canvas.
|
||||
*
|
||||
* @param {CanvasRenderingContext2D} ctx - Canvas 2D context.
|
||||
* @param {string} name - Participant's display name.
|
||||
* @param {Array<string>} customAvatarBackgrounds - Custom avatar background colors.
|
||||
* @param {number} centerX - X coordinate of avatar center.
|
||||
* @param {number} centerY - Y coordinate of avatar center.
|
||||
* @param {number} radius - Radius of the avatar circle.
|
||||
* @param {string} fontFamily - Font family to use for initials.
|
||||
* @param {string} textColor - Color for the initials text.
|
||||
* @returns {void}
|
||||
*/
|
||||
export function drawInitialsAvatar(
|
||||
ctx: CanvasRenderingContext2D,
|
||||
name: string,
|
||||
customAvatarBackgrounds: Array<string>,
|
||||
centerX: number,
|
||||
centerY: number,
|
||||
radius: number,
|
||||
fontFamily: string,
|
||||
textColor: string
|
||||
) {
|
||||
const initials = getInitials(name);
|
||||
const color = getAvatarColor(name, customAvatarBackgrounds);
|
||||
|
||||
ctx.fillStyle = color;
|
||||
ctx.beginPath();
|
||||
ctx.arc(centerX, centerY, radius, 0, Math.PI * 2);
|
||||
ctx.fill();
|
||||
|
||||
ctx.fillStyle = textColor;
|
||||
ctx.font = `bold 80px ${fontFamily}`;
|
||||
ctx.textAlign = 'center';
|
||||
ctx.textBaseline = 'middle';
|
||||
ctx.fillText(initials, centerX, centerY);
|
||||
}
|
||||
|
||||
/**
|
||||
* Draws the default user icon when no avatar is available.
|
||||
*
|
||||
* @param {CanvasRenderingContext2D} ctx - Canvas 2D context.
|
||||
* @param {HTMLImageElement | null} defaultIcon - Preloaded default icon image.
|
||||
* @param {number} centerX - X coordinate of icon center.
|
||||
* @param {number} centerY - Y coordinate of icon center.
|
||||
* @param {number} radius - Radius of the icon circle.
|
||||
* @returns {void}
|
||||
*/
|
||||
export function drawDefaultIcon(
|
||||
ctx: CanvasRenderingContext2D,
|
||||
defaultIcon: HTMLImageElement | null,
|
||||
centerX: number,
|
||||
centerY: number,
|
||||
radius: number
|
||||
) {
|
||||
ctx.fillStyle = AVATAR_DEFAULT_BACKGROUND_COLOR;
|
||||
ctx.beginPath();
|
||||
ctx.arc(centerX, centerY, radius, 0, Math.PI * 2);
|
||||
ctx.fill();
|
||||
|
||||
if (defaultIcon) {
|
||||
const iconSize = radius;
|
||||
const x = centerX - iconSize / 2;
|
||||
const y = centerY - iconSize / 2;
|
||||
|
||||
ctx.drawImage(defaultIcon, x, y, iconSize, iconSize);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Maximum character limit for display name before truncation.
|
||||
*/
|
||||
const DISPLAY_NAME_MAX_CHARS = 25;
|
||||
|
||||
/**
|
||||
* Draws the participant's display name below the avatar.
|
||||
* Truncates long names with ellipsis using a simple character limit.
|
||||
*
|
||||
* @param {CanvasRenderingContext2D} ctx - Canvas 2D context.
|
||||
* @param {string} displayName - Participant's display name.
|
||||
* @param {number} centerX - X coordinate of text center.
|
||||
* @param {number} y - Y coordinate of text top.
|
||||
* @param {string} fontFamily - Font family to use for display name.
|
||||
* @param {string} textColor - Color for the display name text.
|
||||
* @returns {void}
|
||||
*/
|
||||
export function drawDisplayName(
|
||||
ctx: CanvasRenderingContext2D,
|
||||
displayName: string,
|
||||
centerX: number,
|
||||
y: number,
|
||||
fontFamily: string,
|
||||
textColor: string
|
||||
) {
|
||||
const truncated = displayName.length > DISPLAY_NAME_MAX_CHARS
|
||||
? `${displayName.slice(0, DISPLAY_NAME_MAX_CHARS)}...`
|
||||
: displayName;
|
||||
|
||||
ctx.fillStyle = textColor;
|
||||
ctx.font = `24px ${fontFamily}`;
|
||||
ctx.textAlign = 'center';
|
||||
ctx.textBaseline = 'top';
|
||||
ctx.fillText(truncated, centerX, y);
|
||||
}
|
||||
|
||||
/**
|
||||
* Renders a complete avatar (image, initials, or default icon) with display name on canvas.
|
||||
*
|
||||
* @param {HTMLCanvasElement} canvas - The canvas element.
|
||||
* @param {CanvasRenderingContext2D} ctx - Canvas 2D context.
|
||||
* @param {IParticipant | undefined} participant - The participant to render.
|
||||
* @param {string} displayName - The display name to show.
|
||||
* @param {Array<string>} customAvatarBackgrounds - Custom avatar background colors.
|
||||
* @param {HTMLImageElement | null} defaultIcon - Preloaded default icon image.
|
||||
* @param {string} backgroundColor - Background color for the canvas.
|
||||
* @param {string} fontFamily - Font family to use for text rendering.
|
||||
* @param {string} initialsColor - Color for avatar initials text.
|
||||
* @param {string} displayNameColor - Color for display name text.
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
export async function renderAvatarOnCanvas(
|
||||
canvas: HTMLCanvasElement,
|
||||
ctx: CanvasRenderingContext2D,
|
||||
participant: IParticipant | undefined,
|
||||
displayName: string,
|
||||
customAvatarBackgrounds: Array<string>,
|
||||
defaultIcon: HTMLImageElement | null,
|
||||
backgroundColor: string,
|
||||
fontFamily: string,
|
||||
initialsColor: string,
|
||||
displayNameColor: string
|
||||
): Promise<void> {
|
||||
const { width, height } = canvas;
|
||||
const centerX = width / 2;
|
||||
const centerY = height / 2;
|
||||
const avatarRadius = 100;
|
||||
const spacing = 20;
|
||||
const textY = centerY + avatarRadius + spacing;
|
||||
|
||||
// Clear and fill background.
|
||||
ctx.fillStyle = backgroundColor;
|
||||
ctx.fillRect(0, 0, width, height);
|
||||
|
||||
let avatarRendered = false;
|
||||
|
||||
if (participant?.loadableAvatarUrl) {
|
||||
try {
|
||||
await drawImageAvatar(
|
||||
ctx,
|
||||
participant.loadableAvatarUrl,
|
||||
participant.loadableAvatarUrlUseCORS,
|
||||
centerX,
|
||||
centerY,
|
||||
avatarRadius
|
||||
);
|
||||
avatarRendered = true;
|
||||
} catch (error) {
|
||||
logger.warn('Failed to load image avatar, falling back.', error);
|
||||
}
|
||||
}
|
||||
|
||||
if (!avatarRendered) {
|
||||
if (participant?.name) {
|
||||
drawInitialsAvatar(
|
||||
ctx, participant.name, customAvatarBackgrounds, centerX, centerY, avatarRadius, fontFamily, initialsColor
|
||||
);
|
||||
} else {
|
||||
drawDefaultIcon(ctx, defaultIcon, centerX, centerY, avatarRadius);
|
||||
}
|
||||
}
|
||||
|
||||
drawDisplayName(ctx, displayName, centerX, textY, fontFamily, displayNameColor);
|
||||
}
|
||||
|
||||
/**
|
||||
* Requests picture-in-picture mode for the pip video element.
|
||||
*
|
||||
* NOTE: Called by Electron main process with userGesture: true.
|
||||
*
|
||||
* @returns {void}
|
||||
*/
|
||||
export function requestPictureInPicture() {
|
||||
const video = document.getElementById('pipVideo') as HTMLVideoElement;
|
||||
|
||||
if (!video) {
|
||||
logger.error('PiP video element (#pipVideo) not found');
|
||||
|
||||
return;
|
||||
}
|
||||
if (document.pictureInPictureElement) {
|
||||
logger.warn('Already in PiP mode');
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
// Check if video metadata is loaded.
|
||||
// readyState >= 1 (HAVE_METADATA) means video dimensions are available.
|
||||
if (video.readyState < 1) {
|
||||
logger.warn('Video metadata not loaded yet, waiting...');
|
||||
|
||||
// Wait for metadata to load before requesting PiP.
|
||||
video.addEventListener('loadedmetadata', () => {
|
||||
// @ts-ignore - requestPictureInPicture is not yet in all TypeScript definitions.
|
||||
video.requestPictureInPicture().catch((err: Error) => {
|
||||
logger.error(`Error while requesting PiP after metadata loaded: ${err.message}`);
|
||||
});
|
||||
}, { once: true });
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
// @ts-ignore - requestPictureInPicture is not yet in all TypeScript definitions.
|
||||
video.requestPictureInPicture().catch((err: Error) => {
|
||||
logger.error(`Error while requesting PiP: ${err.message}`);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Action to enter Picture-in-Picture mode.
|
||||
* Handles both browser and Electron environments.
|
||||
*
|
||||
* @param {HTMLVideoElement} videoElement - The video element to call requestPictureInPicuture on.
|
||||
* @returns {void}
|
||||
*/
|
||||
export function enterPiP(videoElement: HTMLVideoElement | undefined | null) {
|
||||
if (!videoElement) {
|
||||
logger.error('PiP video element not found');
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
// Check if PiP is supported.
|
||||
if (!('pictureInPictureEnabled' in document)) {
|
||||
logger.error('Picture-in-Picture is not supported in this browser');
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
if (document.pictureInPictureEnabled === false) {
|
||||
logger.error('Picture-in-Picture is disabled');
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
// In Electron, use postMessage to request PiP from main process.
|
||||
// This bypasses the transient activation requirement by executing
|
||||
// requestPictureInPicture with userGesture: true in the main process.
|
||||
if (browser.isElectron()) {
|
||||
logger.log('Electron detected, sending postMessage to request PiP');
|
||||
|
||||
APP.API.notifyPictureInPictureRequested();
|
||||
|
||||
// State will be updated by enterpictureinpicture event.
|
||||
return;
|
||||
}
|
||||
|
||||
// TODO: Enable PiP for browsers:
|
||||
// In browsers, we should directly call requestPictureInPicture.
|
||||
// @ts-ignore - requestPictureInPicture is not yet in all TypeScript definitions.
|
||||
// requestPictureInPicture();
|
||||
} catch (error) {
|
||||
logger.error('Error entering Picture-in-Picture:', error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets up MediaSession API action handlers for controlling the conference.
|
||||
* Handlers dispatch actions that query fresh Redux state, avoiding stale closures.
|
||||
*
|
||||
* @param {Function} dispatch - Redux dispatch function.
|
||||
* @returns {void}
|
||||
*/
|
||||
export function setupMediaSessionHandlers(dispatch: IStore['dispatch']) {
|
||||
// @ts-ignore - MediaSession API is not fully typed in all environments.
|
||||
if ('mediaSession' in navigator && navigator.mediaSession?.setActionHandler) {
|
||||
try {
|
||||
// Set up audio mute toggle handler.
|
||||
// Dispatch action that will query current state and toggle.
|
||||
// @ts-ignore - togglemicrophone is a newer MediaSession action.
|
||||
navigator.mediaSession.setActionHandler('togglemicrophone', () => {
|
||||
dispatch(toggleAudioFromPiP());
|
||||
});
|
||||
|
||||
// Set up video mute toggle handler.
|
||||
// Dispatch action that will query current state and toggle.
|
||||
// @ts-ignore - togglecamera is a newer MediaSession action.
|
||||
navigator.mediaSession.setActionHandler('togglecamera', () => {
|
||||
dispatch(toggleVideoFromPiP());
|
||||
});
|
||||
|
||||
// Set up hangup handler.
|
||||
// @ts-ignore - hangup is a newer MediaSession action.
|
||||
navigator.mediaSession.setActionHandler('hangup', () => {
|
||||
dispatch(leaveConference());
|
||||
});
|
||||
|
||||
logger.log('MediaSession API handlers registered for PiP controls');
|
||||
} catch (error) {
|
||||
logger.warn('Some MediaSession actions not supported:', error);
|
||||
}
|
||||
} else {
|
||||
logger.warn('MediaSession API not supported in this browser');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates the MediaSession API microphone and camera active state.
|
||||
* This ensures the PiP controls show the correct mute/unmute state.
|
||||
*
|
||||
* @param {IMediaSessionState} state - The current media session state.
|
||||
* @returns {void}
|
||||
*/
|
||||
export function updateMediaSessionState(state: IMediaSessionState) {
|
||||
if ('mediaSession' in navigator) {
|
||||
try {
|
||||
// @ts-ignore - setMicrophoneActive is a newer MediaSession method.
|
||||
if (navigator.mediaSession.setMicrophoneActive) {
|
||||
// @ts-ignore
|
||||
navigator.mediaSession.setMicrophoneActive(state.microphoneActive);
|
||||
}
|
||||
|
||||
// @ts-ignore - setCameraActive is a newer MediaSession method.
|
||||
if (navigator.mediaSession.setCameraActive) {
|
||||
// @ts-ignore
|
||||
navigator.mediaSession.setCameraActive(state.cameraActive);
|
||||
}
|
||||
|
||||
logger.log('MediaSession state updated:', state);
|
||||
} catch (error) {
|
||||
logger.warn('Error updating MediaSession state:', error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleans up MediaSession API action handlers.
|
||||
*
|
||||
* @returns {void}
|
||||
*/
|
||||
export function cleanupMediaSessionHandlers() {
|
||||
if ('mediaSession' in navigator) {
|
||||
try {
|
||||
// Note: Setting handlers to null is commented out as it may cause issues
|
||||
// in some browsers. The handlers will be overwritten when entering PiP again.
|
||||
// @ts-ignore - togglemicrophone is a newer MediaSession action.
|
||||
navigator.mediaSession.setActionHandler('togglemicrophone', null);
|
||||
// @ts-ignore - togglecamera is a newer MediaSession action.
|
||||
navigator.mediaSession.setActionHandler('togglecamera', null);
|
||||
// @ts-ignore - hangup is a newer MediaSession action.
|
||||
navigator.mediaSession.setActionHandler('hangup', null);
|
||||
logger.log('MediaSession API handlers cleaned up');
|
||||
} catch (error) {
|
||||
logger.error('Error cleaning up MediaSession handlers:', error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Re-export from shared file for external use.
|
||||
export { isPiPEnabled };
|
||||
|
||||
183
react/features/pip/hooks.ts
Normal file
183
react/features/pip/hooks.ts
Normal file
@@ -0,0 +1,183 @@
|
||||
import React, { useEffect, useRef } from 'react';
|
||||
|
||||
import IconUserSVG from '../base/icons/svg/user.svg?raw';
|
||||
import { IParticipant } from '../base/participants/types';
|
||||
import { TILE_ASPECT_RATIO } from '../filmstrip/constants';
|
||||
|
||||
import { renderAvatarOnCanvas } from './functions';
|
||||
import logger from './logger';
|
||||
|
||||
/**
|
||||
* Canvas dimensions for PiP avatar rendering.
|
||||
*/
|
||||
const CANVAS_WIDTH = 640;
|
||||
const CANVAS_HEIGHT = Math.floor(CANVAS_WIDTH / TILE_ASPECT_RATIO);
|
||||
|
||||
/**
|
||||
* Frame rate 0 means capture on-demand when canvas changes.
|
||||
* We manually request frames after drawing to ensure capture.
|
||||
*/
|
||||
const CANVAS_FRAME_RATE = 0;
|
||||
|
||||
/**
|
||||
* Options for the useCanvasAvatar hook.
|
||||
*/
|
||||
interface IUseCanvasAvatarOptions {
|
||||
backgroundColor: string;
|
||||
customAvatarBackgrounds: string[];
|
||||
displayName: string;
|
||||
displayNameColor: string;
|
||||
fontFamily: string;
|
||||
initialsColor: string;
|
||||
participant: IParticipant | undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Result returned by the useCanvasAvatar hook.
|
||||
* Returns a ref object so consumers can access .current inside effects
|
||||
* (the stream is created in an effect and won't be available at render time).
|
||||
*/
|
||||
interface IUseCanvasAvatarResult {
|
||||
canvasStreamRef: React.MutableRefObject<MediaStream | null>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Internal refs managed by the hook.
|
||||
*/
|
||||
interface ICanvasRefs {
|
||||
canvas: HTMLCanvasElement | null;
|
||||
defaultIcon: HTMLImageElement | null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads and prepares the default user icon SVG as an Image element.
|
||||
*
|
||||
* @returns {HTMLImageElement} The prepared image element.
|
||||
*/
|
||||
function createDefaultIconImage(): HTMLImageElement {
|
||||
let svgText = IconUserSVG;
|
||||
|
||||
if (!svgText.includes('fill=')) {
|
||||
svgText = svgText.replace('<svg', '<svg fill="#FFFFFF"');
|
||||
}
|
||||
|
||||
const dataUrl = `data:image/svg+xml,${encodeURIComponent(svgText)
|
||||
.replace(/'/g, '%27')
|
||||
.replace(/"/g, '%22')}`;
|
||||
|
||||
const img = new Image();
|
||||
|
||||
img.src = dataUrl;
|
||||
|
||||
return img;
|
||||
}
|
||||
|
||||
/**
|
||||
* Custom hook that manages canvas-based avatar rendering for Picture-in-Picture.
|
||||
* Creates and maintains a canvas element with a MediaStream that can be used
|
||||
* as a video source when the participant's video is unavailable.
|
||||
*
|
||||
* @param {IUseCanvasAvatarOptions} options - The hook options.
|
||||
* @returns {IUseCanvasAvatarResult} The canvas stream for use as video source.
|
||||
*/
|
||||
export function useCanvasAvatar(options: IUseCanvasAvatarOptions): IUseCanvasAvatarResult {
|
||||
const {
|
||||
participant,
|
||||
displayName,
|
||||
customAvatarBackgrounds,
|
||||
backgroundColor,
|
||||
fontFamily,
|
||||
initialsColor,
|
||||
displayNameColor
|
||||
} = options;
|
||||
|
||||
const refs = useRef<ICanvasRefs>({
|
||||
canvas: null,
|
||||
defaultIcon: null
|
||||
});
|
||||
|
||||
// Separate ref for the stream to return to consumers.
|
||||
// This allows consumers to access .current inside their effects.
|
||||
//
|
||||
// NOTE: If we ever need to recreate the stream (e.g., different canvas size),
|
||||
// consumers' effects won't automatically re-run since refs don't trigger re-renders.
|
||||
// To fix this, we could return an additional state flag like `streamReady` that
|
||||
// changes when the stream is set, and consumers would add it to their effect deps.
|
||||
const streamRef = useRef<MediaStream | null>(null);
|
||||
|
||||
/**
|
||||
* Initialize canvas, stream, and default icon on mount.
|
||||
*/
|
||||
useEffect(() => {
|
||||
// Create canvas.
|
||||
const canvas = document.createElement('canvas');
|
||||
|
||||
canvas.width = CANVAS_WIDTH;
|
||||
canvas.height = CANVAS_HEIGHT;
|
||||
refs.current.canvas = canvas;
|
||||
|
||||
// Create stream from canvas.
|
||||
streamRef.current = canvas.captureStream(CANVAS_FRAME_RATE);
|
||||
|
||||
// Load default icon.
|
||||
refs.current.defaultIcon = createDefaultIconImage();
|
||||
|
||||
logger.log('Canvas avatar initialized');
|
||||
|
||||
// Cleanup on unmount.
|
||||
return () => {
|
||||
if (streamRef.current) {
|
||||
streamRef.current.getTracks().forEach(track => track.stop());
|
||||
streamRef.current = null;
|
||||
}
|
||||
refs.current.canvas = null;
|
||||
refs.current.defaultIcon = null;
|
||||
logger.log('Canvas avatar cleaned up');
|
||||
};
|
||||
}, []);
|
||||
|
||||
/**
|
||||
* Re-render avatar when participant or display name changes.
|
||||
*/
|
||||
useEffect(() => {
|
||||
const { canvas, defaultIcon } = refs.current;
|
||||
|
||||
if (!canvas) {
|
||||
return;
|
||||
}
|
||||
|
||||
const ctx = canvas.getContext('2d');
|
||||
|
||||
if (!ctx) {
|
||||
logger.error('Failed to get canvas 2D context');
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
renderAvatarOnCanvas(
|
||||
canvas,
|
||||
ctx,
|
||||
participant,
|
||||
displayName,
|
||||
customAvatarBackgrounds,
|
||||
defaultIcon,
|
||||
backgroundColor,
|
||||
fontFamily,
|
||||
initialsColor,
|
||||
displayNameColor
|
||||
).then(() => {
|
||||
// Request a frame capture after drawing.
|
||||
// For captureStream(0), we need to manually trigger frame capture.
|
||||
const track = streamRef.current?.getVideoTracks()[0] as MediaStreamTrack & { requestFrame?: () => void; };
|
||||
|
||||
if (track?.requestFrame) {
|
||||
track.requestFrame();
|
||||
logger.log('Canvas frame requested after render');
|
||||
}
|
||||
}).catch((error: Error) => logger.error('Error rendering avatar on canvas:', error));
|
||||
}, [ participant?.loadableAvatarUrl, participant?.name, displayName, customAvatarBackgrounds, backgroundColor, fontFamily, initialsColor, displayNameColor ]);
|
||||
|
||||
return {
|
||||
canvasStreamRef: streamRef
|
||||
};
|
||||
}
|
||||
3
react/features/pip/logger.ts
Normal file
3
react/features/pip/logger.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
import { getLogger } from '../base/logging/functions';
|
||||
|
||||
export default getLogger('app:pip');
|
||||
1
react/features/pip/middleware.ts
Normal file
1
react/features/pip/middleware.ts
Normal file
@@ -0,0 +1 @@
|
||||
import './subscriber';
|
||||
30
react/features/pip/reducer.ts
Normal file
30
react/features/pip/reducer.ts
Normal file
@@ -0,0 +1,30 @@
|
||||
import ReducerRegistry from '../base/redux/ReducerRegistry';
|
||||
|
||||
import { SET_PIP_ACTIVE } from './actionTypes';
|
||||
|
||||
/**
|
||||
* The default state for the pip feature.
|
||||
*/
|
||||
const DEFAULT_STATE = {
|
||||
isPiPActive: false
|
||||
};
|
||||
|
||||
export interface IPipState {
|
||||
isPiPActive: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Reduces the Redux actions of the pip feature.
|
||||
*/
|
||||
ReducerRegistry.register<IPipState>('features/pip', (state = DEFAULT_STATE, action): IPipState => {
|
||||
switch (action.type) {
|
||||
case SET_PIP_ACTIVE:
|
||||
return {
|
||||
...state,
|
||||
isPiPActive: action.isPiPActive
|
||||
};
|
||||
|
||||
default:
|
||||
return state;
|
||||
}
|
||||
});
|
||||
61
react/features/pip/subscriber.ts
Normal file
61
react/features/pip/subscriber.ts
Normal file
@@ -0,0 +1,61 @@
|
||||
import { IReduxState } from '../app/types';
|
||||
import { MEDIA_TYPE } from '../base/media/constants';
|
||||
import StateListenerRegistry from '../base/redux/StateListenerRegistry';
|
||||
import { isLocalTrackMuted } from '../base/tracks/functions.any';
|
||||
import { getElectronGlobalNS } from '../base/util/helpers';
|
||||
|
||||
import { requestPictureInPicture, shouldShowPiP, updateMediaSessionState } from './functions';
|
||||
|
||||
/**
|
||||
* Listens to audio and video mute state changes when PiP is active
|
||||
* and updates the MediaSession API to reflect the current state in PiP controls.
|
||||
*/
|
||||
StateListenerRegistry.register(
|
||||
/* selector */ (state: IReduxState) => {
|
||||
// Skip if PiP is disabled or shouldn't be shown (e.g., on prejoin without showOnPrejoin).
|
||||
if (!shouldShowPiP(state)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const isPiPActive = state['features/pip']?.isPiPActive;
|
||||
|
||||
if (!isPiPActive) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return {
|
||||
audioMuted: isLocalTrackMuted(state['features/base/tracks'], MEDIA_TYPE.AUDIO),
|
||||
videoMuted: isLocalTrackMuted(state['features/base/tracks'], MEDIA_TYPE.VIDEO)
|
||||
};
|
||||
},
|
||||
/* listener */ (muteState: { audioMuted: boolean; videoMuted: boolean; } | null) => {
|
||||
if (muteState === null) {
|
||||
return;
|
||||
}
|
||||
|
||||
updateMediaSessionState({
|
||||
cameraActive: !muteState.videoMuted,
|
||||
microphoneActive: !muteState.audioMuted
|
||||
});
|
||||
},
|
||||
{
|
||||
deepEquals: true
|
||||
}
|
||||
);
|
||||
|
||||
StateListenerRegistry.register(
|
||||
/* selector */ shouldShowPiP,
|
||||
/* listener */ (_shouldShowPiP: boolean) => {
|
||||
const electronNS = getElectronGlobalNS();
|
||||
|
||||
if (_shouldShowPiP) {
|
||||
// Expose requestPictureInPicture for Electron main process.
|
||||
if (!electronNS.requestPictureInPicture) {
|
||||
electronNS.requestPictureInPicture = requestPictureInPicture;
|
||||
}
|
||||
} else if (typeof electronNS.requestPictureInPicture === 'function') {
|
||||
delete electronNS.requestPictureInPicture;
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
14
react/features/pip/types.ts
Normal file
14
react/features/pip/types.ts
Normal file
@@ -0,0 +1,14 @@
|
||||
/**
|
||||
* MediaSession state for microphone and camera.
|
||||
*/
|
||||
export interface IMediaSessionState {
|
||||
/**
|
||||
* Whether the camera is active (unmuted).
|
||||
*/
|
||||
cameraActive: boolean;
|
||||
|
||||
/**
|
||||
* Whether the microphone is active (unmuted).
|
||||
*/
|
||||
microphoneActive: boolean;
|
||||
}
|
||||
@@ -1,5 +1,6 @@
|
||||
import React, { ComponentType, PureComponent } from 'react';
|
||||
import { SafeAreaView, TouchableWithoutFeedback, View } from 'react-native';
|
||||
import { TouchableWithoutFeedback } from 'react-native';
|
||||
import { Edge, SafeAreaView } from 'react-native-safe-area-context';
|
||||
import { connect } from 'react-redux';
|
||||
|
||||
import { IReduxState, IStore } from '../../../app/types';
|
||||
@@ -8,6 +9,7 @@ import { hideDialog } from '../../../base/dialog/actions';
|
||||
import { isDialogOpen } from '../../../base/dialog/functions';
|
||||
import { getParticipantCount } from '../../../base/participants/functions';
|
||||
import { StyleType } from '../../../base/styles/functions.native';
|
||||
import { isToolboxVisible } from '../../../toolbox/functions.native';
|
||||
|
||||
import ReactionMenu from './ReactionMenu';
|
||||
|
||||
@@ -36,6 +38,11 @@ interface IProps {
|
||||
*/
|
||||
_styles: StyleType;
|
||||
|
||||
/**
|
||||
* The indicator which determines whether the Toolbox is visible.
|
||||
*/
|
||||
_toolboxVisible: boolean;
|
||||
|
||||
/**
|
||||
* The width of the screen.
|
||||
*/
|
||||
@@ -80,25 +87,24 @@ class ReactionMenuDialog extends PureComponent<IProps> {
|
||||
* @returns {ReactElement}
|
||||
*/
|
||||
override render() {
|
||||
const { _styles, _width, _height, _participantCount } = this.props;
|
||||
const { _height, _participantCount, _styles, _toolboxVisible, _width } = this.props;
|
||||
|
||||
return (
|
||||
<SafeAreaView style = { _styles }>
|
||||
<TouchableWithoutFeedback
|
||||
onPress = { this._onCancel }>
|
||||
<View style = { _styles }>
|
||||
<View
|
||||
style = {{
|
||||
left: (_width - 360) / 2,
|
||||
top: _height - (_participantCount > 1 ? 144 : 80) - 80
|
||||
}}>
|
||||
<ReactionMenu
|
||||
onCancel = { this._onCancel }
|
||||
overflowMenu = { false } />
|
||||
</View>
|
||||
</View>
|
||||
</TouchableWithoutFeedback>
|
||||
</SafeAreaView>
|
||||
<TouchableWithoutFeedback
|
||||
onPress = { this._onCancel }>
|
||||
<SafeAreaView
|
||||
edges = { [ 'bottom', 'left', 'right', !_toolboxVisible && 'top' ].filter(Boolean) as Edge[] }
|
||||
style = { [
|
||||
_styles,
|
||||
{
|
||||
left: (_width - 360) / 2,
|
||||
top: _height - (_participantCount > 1 ? 144 : 80) - 80
|
||||
} ] }>
|
||||
<ReactionMenu
|
||||
onCancel = { this._onCancel }
|
||||
overflowMenu = { false } />
|
||||
</SafeAreaView>
|
||||
</TouchableWithoutFeedback>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -132,7 +138,8 @@ function _mapStateToProps(state: IReduxState) {
|
||||
_styles: ColorSchemeRegistry.get(state, 'Toolbox').reactionDialog,
|
||||
_width: state['features/base/responsive-ui'].clientWidth,
|
||||
_height: state['features/base/responsive-ui'].clientHeight,
|
||||
_participantCount: getParticipantCount(state)
|
||||
_participantCount: getParticipantCount(state),
|
||||
_toolboxVisible: isToolboxVisible(state)
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@ import { openSheet } from '../../base/dialog/actions';
|
||||
import { translate } from '../../base/i18n/functions';
|
||||
import NavigateSectionList from '../../base/react/components/native/NavigateSectionList';
|
||||
import { Item, Section } from '../../base/react/types';
|
||||
import styles from '../../welcome/components/styles';
|
||||
import styles from '../../welcome/components/styles.native';
|
||||
import { isRecentListEnabled, toDisplayableList } from '../functions.native';
|
||||
|
||||
import AbstractRecentList from './AbstractRecentList';
|
||||
|
||||
@@ -30,10 +30,7 @@ import {
|
||||
START_LOCAL_RECORDING,
|
||||
STOP_LOCAL_RECORDING
|
||||
} from './actionTypes';
|
||||
import {
|
||||
RECORDING_METADATA_ID,
|
||||
START_RECORDING_NOTIFICATION_ID
|
||||
} from './constants';
|
||||
import { START_RECORDING_NOTIFICATION_ID } from './constants';
|
||||
import {
|
||||
getRecordButtonProps,
|
||||
getRecordingLink,
|
||||
@@ -462,10 +459,7 @@ export function showStartRecordingNotificationWithCallback(openRecordingDialog:
|
||||
});
|
||||
|
||||
if (autoTranscribeOnRecord) {
|
||||
conference?.getMetadataHandler().setMetadata(RECORDING_METADATA_ID, {
|
||||
isTranscribingEnabled: true
|
||||
});
|
||||
dispatch(setRequestingSubtitles(true, false, null, true));
|
||||
dispatch(setRequestingSubtitles(true, false, null));
|
||||
}
|
||||
} else {
|
||||
openRecordingDialog();
|
||||
|
||||
@@ -63,11 +63,6 @@ export interface IProps extends WithTranslation {
|
||||
*/
|
||||
_rToken: string;
|
||||
|
||||
/**
|
||||
* Whether the record audio / video option is enabled by default.
|
||||
*/
|
||||
_recordAudioAndVideo: boolean;
|
||||
|
||||
/**
|
||||
* Whether or not the local participant is screensharing.
|
||||
*/
|
||||
@@ -99,6 +94,11 @@ export interface IProps extends WithTranslation {
|
||||
dispatch: IStore['dispatch'];
|
||||
|
||||
navigation: any;
|
||||
|
||||
/**
|
||||
* Whether the record audio / video option is enabled by default.
|
||||
*/
|
||||
recordAudioAndVideo: boolean;
|
||||
}
|
||||
|
||||
interface IState {
|
||||
@@ -191,7 +191,7 @@ class AbstractStartRecordingDialog extends Component<IProps, IState> {
|
||||
isValidating: false,
|
||||
userName: undefined,
|
||||
sharingEnabled: true,
|
||||
shouldRecordAudioAndVideo: this.props._recordAudioAndVideo,
|
||||
shouldRecordAudioAndVideo: this.props.recordAudioAndVideo,
|
||||
shouldRecordTranscription: this.props._autoTranscribeOnRecord,
|
||||
spaceLeft: undefined,
|
||||
selectedRecordingService,
|
||||
@@ -415,13 +415,13 @@ class AbstractStartRecordingDialog extends Component<IProps, IState> {
|
||||
|
||||
if (this.state.selectedRecordingService === RECORDING_TYPES.JITSI_REC_SERVICE
|
||||
&& this.state.shouldRecordTranscription) {
|
||||
dispatch(setRequestingSubtitles(true, _displaySubtitles, _subtitlesLanguage, true));
|
||||
dispatch(setRequestingSubtitles(true, _displaySubtitles, _subtitlesLanguage));
|
||||
} else {
|
||||
_conference?.getMetadataHandler().setMetadata(RECORDING_METADATA_ID, {
|
||||
isTranscribingEnabled: this.state.shouldRecordTranscription
|
||||
});
|
||||
}
|
||||
|
||||
_conference?.getMetadataHandler().setMetadata(RECORDING_METADATA_ID, {
|
||||
isTranscribingEnabled: this.state.shouldRecordTranscription
|
||||
});
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -474,7 +474,7 @@ export function mapStateToProps(state: IReduxState, _ownProps: any) {
|
||||
_isDropboxEnabled: isDropboxEnabled(state),
|
||||
_localRecordingEnabled: !localRecording?.disable,
|
||||
_rToken: state['features/dropbox'].rToken ?? '',
|
||||
_recordAudioAndVideo: recordings?.recordAudioAndVideo ?? true,
|
||||
recordAudioAndVideo: _ownProps.recordAudioAndVideo ?? recordings?.recordAudioAndVideo ?? true,
|
||||
_subtitlesLanguage,
|
||||
_tokenExpireDate: state['features/dropbox'].expireDate,
|
||||
_token: state['features/dropbox'].token ?? ''
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import React, { useCallback } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { Platform, SafeAreaView, ScrollView, Text, View, ViewStyle } from 'react-native';
|
||||
import { Platform, ScrollView, Text, View, ViewStyle } from 'react-native';
|
||||
import { SafeAreaView } from 'react-native-safe-area-context';
|
||||
import { useSelector } from 'react-redux';
|
||||
|
||||
import { IReduxState } from '../../../app/types';
|
||||
|
||||
@@ -2,6 +2,7 @@ import { useNavigation } from '@react-navigation/native';
|
||||
import React, { useCallback, useLayoutEffect } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { ScrollView, Text, TouchableHighlight, View, ViewStyle } from 'react-native';
|
||||
import { Edge } from 'react-native-safe-area-context';
|
||||
import { useSelector } from 'react-redux';
|
||||
|
||||
import { IReduxState } from '../../../app/types';
|
||||
@@ -45,9 +46,7 @@ const LanguageSelectView = ({ isInWelcomePage }: { isInWelcomePage?: boolean; })
|
||||
return (
|
||||
<JitsiScreen
|
||||
disableForcedKeyboardDismiss = { true }
|
||||
|
||||
// @ts-ignore
|
||||
safeAreaInsets = { [ !isInWelcomePage && 'bottom', 'left', 'right' ].filter(Boolean) }
|
||||
safeAreaInsets = { [ !isInWelcomePage && 'bottom', 'left', 'right' ].filter(Boolean) as Edge[] }
|
||||
style = { styles.settingsViewContainer }>
|
||||
<ScrollView
|
||||
bounces = { isInWelcomePage }
|
||||
|
||||
@@ -115,8 +115,6 @@ const ProfileView = ({ isInWelcomePage }: {
|
||||
<JitsiScreen
|
||||
disableForcedKeyboardDismiss = { true }
|
||||
hasBottomTextInput = { true }
|
||||
|
||||
// @ts-ignore
|
||||
safeAreaInsets = { [ !isInWelcomePage && 'bottom', 'left', 'right' ].filter(Boolean) as Edge[] }
|
||||
style = { styles.settingsViewContainer }>
|
||||
<ScrollView
|
||||
|
||||
@@ -95,11 +95,9 @@ export function toggleRequestingSubtitles() {
|
||||
export function setRequestingSubtitles(
|
||||
enabled: boolean,
|
||||
displaySubtitles = true,
|
||||
language: string | null = `translation-languages:${DEFAULT_LANGUAGE}`,
|
||||
backendRecordingOn = false) {
|
||||
language: string | null = `translation-languages:${DEFAULT_LANGUAGE}`) {
|
||||
return {
|
||||
type: SET_REQUESTING_SUBTITLES,
|
||||
backendRecordingOn,
|
||||
displaySubtitles,
|
||||
enabled,
|
||||
language
|
||||
|
||||
@@ -3,10 +3,11 @@ import { useTranslation } from 'react-i18next';
|
||||
import { useDispatch, useSelector } from 'react-redux';
|
||||
|
||||
import { IReduxState, IStore } from '../../app/types';
|
||||
import { openDialog } from '../../base/dialog/actions';
|
||||
import { StartRecordingDialog } from '../../recording/components/Recording/index';
|
||||
import { setRequestingSubtitles } from '../actions.any';
|
||||
import { getAvailableSubtitlesLanguages } from '../functions.any';
|
||||
|
||||
|
||||
export interface IAbstractLanguageSelectorDialogProps {
|
||||
dispatch: IStore['dispatch'];
|
||||
language: string | null;
|
||||
@@ -44,14 +45,21 @@ const AbstractLanguageSelectorDialog = (Component: ComponentType<IAbstractLangua
|
||||
selected: lang === selected
|
||||
};
|
||||
});
|
||||
const { conference } = useSelector((state: IReduxState) => state['features/base/conference']);
|
||||
|
||||
const onLanguageSelected = useCallback((value: string) => {
|
||||
const _selectedLanguage = value === noLanguageLabel ? null : value;
|
||||
const enabled = Boolean(_selectedLanguage);
|
||||
const displaySubtitles = enabled;
|
||||
|
||||
dispatch(setRequestingSubtitles(enabled, displaySubtitles, _selectedLanguage));
|
||||
}, [ language ]);
|
||||
if (conference?.getMetadataHandler()?.getMetadata()?.asyncTranscription) {
|
||||
dispatch(openDialog('StartRecordingDialog', StartRecordingDialog, {
|
||||
recordAudioAndVideo: false
|
||||
}));
|
||||
} else {
|
||||
dispatch(setRequestingSubtitles(enabled, displaySubtitles, _selectedLanguage));
|
||||
}
|
||||
}, [ conference, language ]);
|
||||
|
||||
return (
|
||||
<Component
|
||||
|
||||
@@ -5,8 +5,10 @@ import { ENDPOINT_MESSAGE_RECEIVED, NON_PARTICIPANT_MESSAGE_RECEIVED } from '../
|
||||
import { MEET_FEATURES } from '../base/jwt/constants';
|
||||
import { isJwtFeatureEnabled } from '../base/jwt/functions';
|
||||
import JitsiMeetJS from '../base/lib-jitsi-meet';
|
||||
import { TRANSCRIBER_ID } from '../base/participants/constants';
|
||||
import MiddlewareRegistry from '../base/redux/MiddlewareRegistry';
|
||||
import { showErrorNotification } from '../notifications/actions';
|
||||
import { RECORDING_METADATA_ID } from '../recording/constants';
|
||||
import { TRANSCRIBER_JOINED } from '../transcribing/actionTypes';
|
||||
|
||||
import {
|
||||
@@ -96,7 +98,7 @@ MiddlewareRegistry.register(store => next => action => {
|
||||
break;
|
||||
}
|
||||
case SET_REQUESTING_SUBTITLES:
|
||||
_requestingSubtitlesChange(store, action.enabled, action.language, action.backendRecordingOn);
|
||||
_requestingSubtitlesChange(store, action.enabled, action.language);
|
||||
break;
|
||||
}
|
||||
|
||||
@@ -118,7 +120,18 @@ MiddlewareRegistry.register(store => next => action => {
|
||||
* @returns {Object} The value returned by {@code next(action)}.
|
||||
*/
|
||||
function _endpointMessageReceived(store: IStore, next: Function, action: AnyAction) {
|
||||
const { data: json } = action;
|
||||
let json: any = {};
|
||||
|
||||
if (action.type === ENDPOINT_MESSAGE_RECEIVED) {
|
||||
if (!action.participant.isHidden()) {
|
||||
return next(action);
|
||||
}
|
||||
json = action.data;
|
||||
} else if (action.type === NON_PARTICIPANT_MESSAGE_RECEIVED && action.id === TRANSCRIBER_ID) {
|
||||
json = action.json;
|
||||
} else {
|
||||
return next(action);
|
||||
}
|
||||
|
||||
if (![ JSON_TYPE_TRANSCRIPTION_RESULT, JSON_TYPE_TRANSLATION_RESULT ].includes(json?.type)) {
|
||||
return next(action);
|
||||
@@ -331,31 +344,25 @@ function _getPrimaryLanguageCode(language: string) {
|
||||
* @param {Store} store - The redux store.
|
||||
* @param {boolean} enabled - Whether subtitles should be enabled or not.
|
||||
* @param {string} language - The language to use for translation.
|
||||
* @param {boolean} backendRecordingOn - Whether backend recording is on or not.
|
||||
* @private
|
||||
* @returns {void}
|
||||
*/
|
||||
function _requestingSubtitlesChange(
|
||||
{ dispatch, getState }: IStore,
|
||||
enabled: boolean,
|
||||
language?: string | null,
|
||||
backendRecordingOn = false) {
|
||||
language?: string | null) {
|
||||
const state = getState();
|
||||
const { conference } = state['features/base/conference'];
|
||||
const { transcription } = state['features/base/config'];
|
||||
const backendRecordingOn = conference?.getMetadataHandler()?.getMetadata()?.asyncTranscription;
|
||||
|
||||
conference?.setLocalParticipantProperty(
|
||||
P_NAME_REQUESTING_TRANSCRIPTION,
|
||||
enabled);
|
||||
|
||||
if (enabled && conference?.getTranscriptionStatus() === JitsiMeetJS.constants.transcriptionStatus.OFF) {
|
||||
const featureAllowed = isJwtFeatureEnabled(getState(), MEET_FEATURES.TRANSCRIPTION, false);
|
||||
if (enabled && conference?.getTranscriptionStatus() === JitsiMeetJS.constants.transcriptionStatus.OFF
|
||||
&& isJwtFeatureEnabled(getState(), MEET_FEATURES.TRANSCRIPTION, false)) {
|
||||
|
||||
// the default value for inviteJigasiOnBackendTranscribing is true (when undefined)
|
||||
const inviteJigasi = conference?.getMetadataHandler()?.getMetadata()?.asyncTranscription
|
||||
? (transcription?.inviteJigasiOnBackendTranscribing ?? true) : true;
|
||||
|
||||
if (featureAllowed && (!backendRecordingOn || inviteJigasi)) {
|
||||
if (!backendRecordingOn) {
|
||||
conference?.dial(TRANSCRIBER_DIAL_NUMBER)
|
||||
.catch((e: any) => {
|
||||
logger.error('Error dialing', e);
|
||||
@@ -368,6 +375,10 @@ function _requestingSubtitlesChange(
|
||||
}));
|
||||
dispatch(setSubtitlesError(true));
|
||||
});
|
||||
} else {
|
||||
conference?.getMetadataHandler()?.setMetadata(RECORDING_METADATA_ID, {
|
||||
isTranscribingEnabled: true
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -376,6 +387,13 @@ function _requestingSubtitlesChange(
|
||||
P_NAME_TRANSLATION_LANGUAGE,
|
||||
language.replace('translation-languages:', ''));
|
||||
}
|
||||
|
||||
if (!enabled && backendRecordingOn
|
||||
&& conference?.getMetadataHandler()?.getMetadata()[RECORDING_METADATA_ID]?.isTranscribingEnabled) {
|
||||
conference?.getMetadataHandler()?.setMetadata(RECORDING_METADATA_ID, {
|
||||
isTranscribingEnabled: false
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -50,6 +50,13 @@ export function setToolboxVisible(visible: boolean) {
|
||||
type: SET_TOOLBOX_VISIBLE,
|
||||
visible
|
||||
});
|
||||
// Notify external API consumers about the change in toolbox visibility
|
||||
// if the old legacy APP.API bridge is available.
|
||||
/* eslint-disable no-undef */
|
||||
if (typeof APP !== 'undefined' && APP.API && typeof APP.API.notifyToolbarVisibilityChanged === 'function') {
|
||||
APP.API.notifyToolbarVisibilityChanged(visible);
|
||||
}
|
||||
/* eslint-enable no-undef */
|
||||
};
|
||||
}
|
||||
|
||||
@@ -72,6 +79,18 @@ export function toggleToolboxVisible() {
|
||||
dispatch({
|
||||
type: TOGGLE_TOOLBOX_VISIBLE
|
||||
});
|
||||
|
||||
// After toggling, read the updated state and notify external API
|
||||
// about the current visibility. This mirrors the behavior of
|
||||
// setToolboxVisible and ensures consumers are informed when the
|
||||
// visibility changes via toggle.
|
||||
/* eslint-disable no-undef */
|
||||
if (typeof APP !== 'undefined' && APP.API && typeof APP.API.notifyToolbarVisibilityChanged === 'function') {
|
||||
const { visible: newVisible } = getState()['features/toolbox'];
|
||||
|
||||
APP.API.notifyToolbarVisibilityChanged(newVisible);
|
||||
}
|
||||
/* eslint-enable no-undef */
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import React from 'react';
|
||||
import { View, ViewStyle } from 'react-native';
|
||||
import { SafeAreaView } from 'react-native-safe-area-context';
|
||||
import { Edge, SafeAreaView } from 'react-native-safe-area-context';
|
||||
import { connect, useSelector } from 'react-redux';
|
||||
|
||||
import { IReduxState, IStore } from '../../../app/types';
|
||||
@@ -117,9 +117,7 @@ function Toolbox(props: IProps) {
|
||||
style = { styles.toolboxContainer as ViewStyle }>
|
||||
<SafeAreaView
|
||||
accessibilityRole = 'toolbar'
|
||||
|
||||
// @ts-ignore
|
||||
edges = { [ bottomEdge && 'bottom' ].filter(Boolean) }
|
||||
edges = { [ bottomEdge && 'bottom' ].filter(Boolean) as Edge[] }
|
||||
pointerEvents = 'box-none'
|
||||
style = { style as ViewStyle }>
|
||||
{ renderToolboxButtons() }
|
||||
|
||||
@@ -19,6 +19,7 @@ import {
|
||||
import {
|
||||
getJwtDisabledButtons,
|
||||
getVisibleButtons,
|
||||
getVisibleButtonsForReducedUI,
|
||||
isButtonEnabled,
|
||||
isToolboxVisible
|
||||
} from '../../functions.web';
|
||||
@@ -82,8 +83,7 @@ export default function Toolbox({
|
||||
const isNarrowLayout = useSelector((state: IReduxState) => state['features/base/responsive-ui'].isNarrowLayout);
|
||||
const videoSpaceWidth = useSelector((state: IReduxState) => state['features/base/responsive-ui'].videoSpaceWidth);
|
||||
const isModerator = useSelector(isLocalParticipantModerator);
|
||||
const customToolbarButtons = useSelector(
|
||||
(state: IReduxState) => state['features/base/config'].customToolbarButtons);
|
||||
const customToolbarButtons = useSelector((state: IReduxState) => state['features/base/config'].customToolbarButtons);
|
||||
const iAmRecorder = useSelector((state: IReduxState) => state['features/base/config'].iAmRecorder);
|
||||
const iAmSipGateway = useSelector((state: IReduxState) => state['features/base/config'].iAmSipGateway);
|
||||
const overflowDrawer = useSelector((state: IReduxState) => state['features/toolbox'].overflowDrawer);
|
||||
@@ -110,6 +110,8 @@ export default function Toolbox({
|
||||
const toolbarVisible = useSelector(isToolboxVisible);
|
||||
const mainToolbarButtonsThresholds
|
||||
= useSelector((state: IReduxState) => state['features/toolbox'].mainToolbarButtonsThresholds);
|
||||
const { reducedUImainToolbarButtons } = useSelector((state: IReduxState) => state['features/base/config']);
|
||||
const reducedUI = useSelector((state: IReduxState) => state['features/base/responsive-ui'].reducedUI);
|
||||
const allButtons = useToolboxButtons(customToolbarButtons);
|
||||
const isMobile = isMobileBrowser();
|
||||
const endConferenceSupported = Boolean(conference?.isEndConferenceSupported() && isModerator);
|
||||
@@ -233,7 +235,7 @@ export default function Toolbox({
|
||||
const toolbarAccLabel = 'toolbar.accessibilityLabel.moreActionsMenu';
|
||||
const containerClassName = `toolbox-content${isMobile || isNarrowLayout ? ' toolbox-content-mobile' : ''}`;
|
||||
|
||||
const { mainMenuButtons, overflowMenuButtons } = getVisibleButtons({
|
||||
const normalUIButtons = getVisibleButtons({
|
||||
allButtons,
|
||||
buttonsWithNotifyClick,
|
||||
toolbarButtons: toolbarButtonsToUse,
|
||||
@@ -241,6 +243,20 @@ export default function Toolbox({
|
||||
jwtDisabledButtons,
|
||||
mainToolbarButtonsThresholds
|
||||
});
|
||||
|
||||
const reducedUIButtons = getVisibleButtonsForReducedUI({
|
||||
allButtons,
|
||||
buttonsWithNotifyClick,
|
||||
jwtDisabledButtons,
|
||||
reducedUImainToolbarButtons,
|
||||
});
|
||||
|
||||
const mainMenuButtons = reducedUI
|
||||
? reducedUIButtons.mainMenuButtons
|
||||
: normalUIButtons.mainMenuButtons;
|
||||
const overflowMenuButtons = reducedUI
|
||||
? []
|
||||
: normalUIButtons.overflowMenuButtons;
|
||||
const raiseHandInOverflowMenu = overflowMenuButtons.some(({ key }) => key === 'raisehand');
|
||||
const showReactionsInOverflowMenu = _shouldDisplayReactionsButtons
|
||||
&& (
|
||||
|
||||
@@ -12,6 +12,8 @@ export const DUMMY_9_BUTTONS_THRESHOLD_VALUE = Symbol('9_BUTTONS_THRESHOLD_VALUE
|
||||
*/
|
||||
export const DUMMY_10_BUTTONS_THRESHOLD_VALUE = Symbol('10_BUTTONS_THRESHOLD_VALUE');
|
||||
|
||||
export const DEFAULT_REDUCED_UI_MAIN_TOOLBAR_BUTTONS = [ 'microphone', 'camera' ];
|
||||
|
||||
/**
|
||||
* Thresholds for displaying toolbox buttons.
|
||||
*/
|
||||
|
||||
@@ -6,9 +6,9 @@ import { IGUMPendingState } from '../base/media/types';
|
||||
import { isScreenMediaShared } from '../screen-share/functions';
|
||||
import { isWhiteboardVisible } from '../whiteboard/functions';
|
||||
|
||||
import { MAIN_TOOLBAR_BUTTONS_PRIORITY, TOOLBAR_TIMEOUT } from './constants';
|
||||
import { DEFAULT_REDUCED_UI_MAIN_TOOLBAR_BUTTONS, MAIN_TOOLBAR_BUTTONS_PRIORITY, TOOLBAR_TIMEOUT } from './constants';
|
||||
import { isButtonEnabled } from './functions.any';
|
||||
import { IGetVisibleButtonsParams, IToolboxButton, NOTIFY_CLICK_MODE } from './types';
|
||||
import { IGetVisibleButtonsForReducedUIParams, IGetVisibleButtonsParams, IToolboxButton, NOTIFY_CLICK_MODE } from './types';
|
||||
|
||||
export * from './functions.any';
|
||||
|
||||
@@ -201,6 +201,41 @@ export function getVisibleButtons({
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns buttons that need to be rendered for reduced UI mode.
|
||||
*
|
||||
* @param {IGetVisibleButtonsForReducedUIParams} params - The parameters needed to extract the visible buttons.
|
||||
* @returns {Object} - The visible buttons for reduced ui.
|
||||
*/
|
||||
export function getVisibleButtonsForReducedUI({
|
||||
allButtons,
|
||||
buttonsWithNotifyClick,
|
||||
jwtDisabledButtons,
|
||||
reducedUImainToolbarButtons
|
||||
}: IGetVisibleButtonsForReducedUIParams) {
|
||||
setButtonsNotifyClickMode(allButtons, buttonsWithNotifyClick);
|
||||
|
||||
if (!Array.isArray(reducedUImainToolbarButtons) || reducedUImainToolbarButtons.length === 0) {
|
||||
const defaultButtons = DEFAULT_REDUCED_UI_MAIN_TOOLBAR_BUTTONS.map(key => allButtons[key]);
|
||||
|
||||
return {
|
||||
mainMenuButtons: defaultButtons
|
||||
};
|
||||
}
|
||||
|
||||
const filteredButtons = reducedUImainToolbarButtons.filter(key =>
|
||||
typeof key !== 'undefined'
|
||||
&& !jwtDisabledButtons.includes(key)
|
||||
&& isButtonEnabled(key, reducedUImainToolbarButtons)
|
||||
&& allButtons[key]);
|
||||
|
||||
const mainMenuButtons = filteredButtons.map(key => allButtons[key]);
|
||||
|
||||
return {
|
||||
mainMenuButtons
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the list of participant menu buttons that have that notify the api when clicked.
|
||||
*
|
||||
|
||||
@@ -107,3 +107,10 @@ export interface IGetVisibleButtonsParams {
|
||||
mainToolbarButtonsThresholds: IMainToolbarButtonThresholds;
|
||||
toolbarButtons: string[];
|
||||
}
|
||||
|
||||
export interface IGetVisibleButtonsForReducedUIParams {
|
||||
allButtons: { [key: string]: IToolboxButton; };
|
||||
buttonsWithNotifyClick: Map<string, NOTIFY_CLICK_MODE>;
|
||||
jwtDisabledButtons: string[];
|
||||
reducedUImainToolbarButtons?: string[];
|
||||
}
|
||||
|
||||
@@ -73,6 +73,11 @@ export function muteRemote(participantId: string, mediaType: MediaType) {
|
||||
const muteMediaType = mediaType === MEDIA_TYPE.SCREENSHARE ? 'desktop' : mediaType;
|
||||
|
||||
dispatch(muteRemoteParticipant(participantId, muteMediaType));
|
||||
|
||||
// Notify external API that participant was muted by moderator
|
||||
if (typeof APP !== 'undefined') {
|
||||
APP.API.notifyParticipantMuted(participantId, true, muteMediaType, false);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -2,7 +2,6 @@ import React from 'react';
|
||||
import {
|
||||
Animated,
|
||||
NativeSyntheticEvent,
|
||||
SafeAreaView,
|
||||
StyleProp,
|
||||
TextInputFocusEventData,
|
||||
TextStyle,
|
||||
@@ -10,6 +9,7 @@ import {
|
||||
View,
|
||||
ViewStyle
|
||||
} from 'react-native';
|
||||
import { SafeAreaView } from 'react-native-safe-area-context';
|
||||
import { connect } from 'react-redux';
|
||||
|
||||
import { getName } from '../../app/functions.native';
|
||||
@@ -333,30 +333,30 @@ class WelcomePage extends AbstractWelcomePage<IProps> {
|
||||
isSettingsScreenFocused && styles.roomNameInputContainer,
|
||||
{ opacity: this.state.roomNameInputAnimation }
|
||||
] as StyleProp<ViewStyle> }>
|
||||
<SafeAreaView style = { styles.roomContainer as StyleProp<ViewStyle> }>
|
||||
<View style = { styles.joinControls } >
|
||||
<Text style = { styles.enterRoomText as StyleProp<TextStyle> }>
|
||||
{ t('welcomepage.roomname') }
|
||||
</Text>
|
||||
<Input
|
||||
accessibilityLabel = { t(roomnameAccLabel) }
|
||||
autoCapitalize = { 'none' }
|
||||
autoFocus = { false }
|
||||
customStyles = {{ input: styles.customInput }}
|
||||
onBlur = { this._onFieldBlur }
|
||||
onChange = { this._onRoomChange }
|
||||
onFocus = { this._onFieldFocus }
|
||||
onSubmitEditing = { this._onJoin }
|
||||
placeholder = { this.state.roomPlaceholder }
|
||||
returnKeyType = { 'go' }
|
||||
value = { this.state.room } />
|
||||
{
|
||||
this._renderInsecureRoomNameWarning()
|
||||
}
|
||||
{
|
||||
this._renderHintBox()
|
||||
}
|
||||
</View>
|
||||
<SafeAreaView
|
||||
edges = { [ 'left', 'right' ] }
|
||||
style = { styles.roomContainer as StyleProp<ViewStyle> }>
|
||||
<Text style = { styles.enterRoomText as StyleProp<TextStyle> }>
|
||||
{ t('welcomepage.roomname') }
|
||||
</Text>
|
||||
<Input
|
||||
accessibilityLabel = { t(roomnameAccLabel) }
|
||||
autoCapitalize = { 'none' }
|
||||
autoFocus = { false }
|
||||
customStyles = {{ input: styles.customInput }}
|
||||
onBlur = { this._onFieldBlur }
|
||||
onChange = { this._onRoomChange }
|
||||
onFocus = { this._onFieldFocus }
|
||||
onSubmitEditing = { this._onJoin }
|
||||
placeholder = { this.state.roomPlaceholder }
|
||||
returnKeyType = { 'go' }
|
||||
value = { this.state.room } />
|
||||
{
|
||||
this._renderInsecureRoomNameWarning()
|
||||
}
|
||||
{
|
||||
this._renderHintBox()
|
||||
}
|
||||
</SafeAreaView>
|
||||
</Animated.View>
|
||||
);
|
||||
|
||||
@@ -101,13 +101,6 @@ export default {
|
||||
paddingTop: 10
|
||||
},
|
||||
|
||||
/**
|
||||
* A view that contains the field and hint box.
|
||||
*/
|
||||
joinControls: {
|
||||
padding: BoxModel.padding
|
||||
},
|
||||
|
||||
messageContainer: {
|
||||
backgroundColor: BaseTheme.palette.ui03,
|
||||
borderRadius: BaseTheme.shape.borderRadius,
|
||||
@@ -149,7 +142,7 @@ export default {
|
||||
roomContainer: {
|
||||
alignSelf: 'stretch',
|
||||
flexDirection: 'column',
|
||||
marginHorizontal: BaseTheme.spacing[2]
|
||||
padding: BaseTheme.spacing[3]
|
||||
},
|
||||
|
||||
/**
|
||||
|
||||
@@ -6,6 +6,7 @@
|
||||
- breakout_rooms - A table containing breakout rooms created in the main room. The keys are the JIDs of the breakout rooms, and the values are their subjects.
|
||||
- breakout_rooms_active - Whether there was a breakout room created in the main room.
|
||||
- breakout_rooms_counter - A counter for breakout rooms created in the main room.
|
||||
- lobby_disabled - Whether lobby was disabled for the room by the backend.
|
||||
- flip_participant_nick - Used in mod_muc_flip, when flipping a participant we store the nick of the second device/participant. Same processing as kicked_participant_nick.
|
||||
- hideDisplayNameForGuests - When set to true, the display name of participants is hidden for guests.
|
||||
- jicofo_lock - A boolean value, when set to true the room is locked waiting for Jicofo to join. All attempts to join will be queued until Jicofo joins.
|
||||
|
||||
@@ -19,7 +19,7 @@ local internal_room_jid_match_rewrite = util.internal_room_jid_match_rewrite;
|
||||
|
||||
-- We must filter stanzas in order to hook in to all incoming and outgoing messaging which skips the stanza routers
|
||||
function filter_stanza(stanza, session)
|
||||
if stanza.skipMapping then
|
||||
if stanza.skipMapping or session.type == 's2sout' then
|
||||
return stanza;
|
||||
end
|
||||
|
||||
|
||||
@@ -48,6 +48,7 @@ local NOTIFY_LOBBY_ACCESS_DENIED = 'LOBBY-ACCESS-DENIED';
|
||||
local util = module:require "util";
|
||||
local ends_with = util.ends_with;
|
||||
local get_room_by_name_and_subdomain = util.get_room_by_name_and_subdomain;
|
||||
local internal_room_jid_match_rewrite = util.internal_room_jid_match_rewrite;
|
||||
local get_room_from_jid = util.get_room_from_jid;
|
||||
local is_healthcheck_room = util.is_healthcheck_room;
|
||||
local presence_check_status = util.presence_check_status;
|
||||
@@ -412,11 +413,16 @@ function process_lobby_muc_loaded(lobby_muc, host_module)
|
||||
host_module:hook('host-disco-info-node', function (event)
|
||||
local session, reply, node = event.origin, event.reply, event.node;
|
||||
if node == LOBBY_IDENTITY_TYPE
|
||||
and session.jitsi_web_query_room
|
||||
and check_display_name_required then
|
||||
and session.jitsi_web_query_room then
|
||||
local room = get_room_by_name_and_subdomain(session.jitsi_web_query_room, session.jitsi_web_query_prefix);
|
||||
|
||||
if room and room._data.lobbyroom then
|
||||
if room and room._data.lobby_disabled then
|
||||
-- we cannot remove the child from the stanza so let's just change the type
|
||||
local lobby_identity = reply:get_child_with_attr('identity', nil, 'type', LOBBY_IDENTITY_TYPE);
|
||||
lobby_identity.attr.type = 'DISABLED_'..LOBBY_IDENTITY_TYPE;
|
||||
end
|
||||
|
||||
if check_display_name_required and room and room._data.lobbyroom then
|
||||
reply:tag('feature', { var = DISPLAY_NAME_REQUIRED_FEATURE }):up();
|
||||
end
|
||||
end
|
||||
@@ -488,6 +494,11 @@ process_host_module(main_muc_component_config, function(host_module, host)
|
||||
end
|
||||
local members_only = event.fields['muc#roomconfig_membersonly'] and true or nil;
|
||||
if members_only then
|
||||
-- if lobby disabled just ignore and return
|
||||
if room._data.lobby_disabled then
|
||||
module:log('warn', 'Lobby is disabled for room %s, cannot enable members only', room.jid);
|
||||
return;
|
||||
end
|
||||
local lobby_created = attach_lobby_room(room, actor);
|
||||
if lobby_created then
|
||||
module:fire_event('jitsi-lobby-enabled', { room = room; });
|
||||
@@ -502,7 +513,7 @@ process_host_module(main_muc_component_config, function(host_module, host)
|
||||
host_module:fire_event('room-metadata-changed', { room = room; });
|
||||
end
|
||||
elseif room._data.lobbyroom then
|
||||
destroy_lobby_room(room, room.jid, nil);
|
||||
destroy_lobby_room(room, internal_room_jid_match_rewrite(room.jid), nil); --
|
||||
module:fire_event('jitsi-lobby-disabled', { room = room; });
|
||||
notify_lobby_enabled(room, actor, false);
|
||||
end
|
||||
|
||||
@@ -182,7 +182,10 @@ local function filterTranscriptionResult(event)
|
||||
local occupant_jid = stanza.attr.from;
|
||||
local occupant = room:get_occupant_by_real_jid(occupant_jid);
|
||||
if not occupant then
|
||||
module:log("error", "Occupant sending msg %s was not found in room %s", occupant_jid, room.jid)
|
||||
-- skip logs for messages coming from s2s
|
||||
if event.origin.type == 'c2s' then
|
||||
module:log("error", "Occupant sending msg %s was not found in room %s", occupant_jid, room.jid);
|
||||
end
|
||||
return;
|
||||
end
|
||||
|
||||
|
||||
@@ -12,6 +12,7 @@ local is_admin = util.is_admin;
|
||||
local is_healthcheck_room = util.is_healthcheck_room;
|
||||
local is_moderated = util.is_moderated;
|
||||
local process_host_module = util.process_host_module;
|
||||
local internal_room_jid_match_rewrite = util.internal_room_jid_match_rewrite;
|
||||
|
||||
local disable_auto_owners = module:get_option_boolean('wait_for_host_disable_auto_owners', false);
|
||||
|
||||
@@ -78,7 +79,7 @@ module:hook('muc-occupant-pre-join', function (event)
|
||||
module:fire_event('room_host_arrived', room.jid, session);
|
||||
lobby_host:fire_event('destroy-lobby-room', {
|
||||
room = room,
|
||||
newjid = room.jid,
|
||||
newjid = internal_room_jid_match_rewrite(room.jid),
|
||||
message = 'Host arrived.',
|
||||
});
|
||||
elseif not room:get_members_only() then
|
||||
|
||||
@@ -33,15 +33,6 @@ local main_domain = module:get_option_string('main_domain');
|
||||
-- only the visitor prosody has main_domain setting
|
||||
local is_visitor_prosody = main_domain ~= nil;
|
||||
|
||||
-- Logs a warning and returns true if a room does not
|
||||
-- have poll data associated with it.
|
||||
local function check_polls(room)
|
||||
if room.polls == nil then
|
||||
module:log("warn", "no polls data in room");
|
||||
return true;
|
||||
end
|
||||
return false;
|
||||
end
|
||||
|
||||
local function validate_polls(data)
|
||||
if type(data) ~= 'table' then
|
||||
@@ -53,7 +44,7 @@ local function validate_polls(data)
|
||||
if data.command ~= 'new-poll' and data.command ~= 'answer-poll' then
|
||||
return false;
|
||||
end
|
||||
if type(data.answers) ~= 'table' then
|
||||
if type(data.answers) ~= 'table' or #data.answers == 0 then
|
||||
return false;
|
||||
end
|
||||
|
||||
@@ -222,8 +213,6 @@ end
|
||||
return true;
|
||||
end
|
||||
|
||||
if check_polls(room) then return end
|
||||
|
||||
local poll_creator = occupant_details;
|
||||
|
||||
if room.polls.count >= POLLS_LIMIT then
|
||||
@@ -286,8 +275,6 @@ end
|
||||
|
||||
module:context(jid.host(room.jid)):fire_event('poll-created', pollData);
|
||||
elseif data.command == "answer-poll" then
|
||||
if check_polls(room) then return end
|
||||
|
||||
local poll = room.polls.by_id[data.pollId];
|
||||
if poll == nil then
|
||||
module:log("warn", "answering inexistent poll %s", data.pollId);
|
||||
|
||||
@@ -26,6 +26,7 @@
|
||||
"react/features/face-landmarks",
|
||||
"react/features/keyboard-shortcuts",
|
||||
"react/features/no-audio-signal",
|
||||
"react/features/pip",
|
||||
"react/features/noise-suppression",
|
||||
"react/features/old-client-notification",
|
||||
"react/features/remote-control",
|
||||
|
||||
@@ -160,7 +160,14 @@ function getConfig(options = {}) {
|
||||
'css-loader'
|
||||
]
|
||||
}, {
|
||||
// Import SVG as raw text when using ?raw query parameter.
|
||||
test: /\.svg$/,
|
||||
resourceQuery: /raw/,
|
||||
type: 'asset/source'
|
||||
}, {
|
||||
// Import SVG as React component (default).
|
||||
test: /\.svg$/,
|
||||
resourceQuery: { not: [ /raw/ ] },
|
||||
use: [ {
|
||||
loader: '@svgr/webpack',
|
||||
options: {
|
||||
|
||||
Reference in New Issue
Block a user