feat(pip): Add Picture-in-Picture support for Electron

Implements Picture-in-Picture functionality for the Electron wrapper to maintain video engagement when users are not actively focused on the conference window. This feature addresses the need to keep users visually connected to the conference even when multitasking.

Key features:
- Automatic PiP mode activation and deactivation based on user interaction
- Displays large video participant's stream or renders their avatar on canvas when video unavailable
- Provides audio/video mute controls via MediaSession API directly in PiP window
- Adds API events (_pip-requested) for Electron wrapper integration

Implementation includes new pip feature module with Redux architecture, canvas-based avatar rendering with custom backgrounds support, and integration with existing mute/unmute logic. Depends on jitsi-meet-electron-sdk#479 for proper user gesture handling in Electron.
This commit is contained in:
Hristo Terezov
2025-11-24 23:02:28 -06:00
parent b517f614b3
commit d06b847319
31 changed files with 1492 additions and 10 deletions

5
custom.d.ts vendored
View File

@@ -2,3 +2,8 @@ declare module '*.svg' {
const content: any;
export default content;
}
declare module '*.svg?raw' {
const content: string;
export default content;
}

View File

@@ -107,6 +107,7 @@ import {
open as openParticipantsPane
} from '../../react/features/participants-pane/actions';
import { getParticipantsPaneOpen } from '../../react/features/participants-pane/functions';
import { hidePiP, showPiP } from '../../react/features/pip/actions';
import { startLocalVideoRecording, stopLocalVideoRecording } from '../../react/features/recording/actions.any';
import { grantRecordingConsent, grantRecordingConsentAndUnmute } from '../../react/features/recording/actions.web';
import { RECORDING_METADATA_ID, RECORDING_TYPES } from '../../react/features/recording/constants';
@@ -907,6 +908,12 @@ function initCommands() {
backgroundType: VIRTUAL_BACKGROUND_TYPE.IMAGE,
virtualSource: backgroundImage
}, jitsiTrack));
},
'show-pip': () => {
APP.store.dispatch(showPiP());
},
'hide-pip': () => {
APP.store.dispatch(hidePiP());
}
};
transport.on('event', ({ data, name }) => {
@@ -2249,6 +2256,40 @@ class API {
});
}
/**
* Notify external application (if API is enabled) that Picture-in-Picture was requested.
* Used by Electron to handle PiP requests with proper user gesture context.
*
* @returns {void}
*/
notifyPictureInPictureRequested() {
this._sendEvent({
name: '_pip-requested'
});
}
/**
* Notify external application (if API is enabled) that Picture-in-Picture mode was entered.
*
* @returns {void}
*/
notifyPictureInPictureEntered() {
this._sendEvent({
name: 'pip-entered'
});
}
/**
* Notify external application (if API is enabled) that Picture-in-Picture mode was exited.
*
* @returns {void}
*/
notifyPictureInPictureLeft() {
this._sendEvent({
name: 'pip-left'
});
}
/**
* Notify external application ( if API is enabled) that a participant menu button was clicked.
*

View File

@@ -2,6 +2,7 @@ import { jitsiLocalStorage } from '@jitsi/js-utils/jitsi-local-storage';
import EventEmitter from 'events';
import { urlObjectToString } from '../../../react/features/base/util/uri';
import { isPiPEnabled } from '../../../react/features/pip/external-api.shared';
import {
PostMessageTransportBackend,
Transport
@@ -94,7 +95,9 @@ const commands = {
toggleTileView: 'toggle-tile-view',
toggleVirtualBackgroundDialog: 'toggle-virtual-background',
toggleVideo: 'toggle-video',
toggleWhiteboard: 'toggle-whiteboard'
toggleWhiteboard: 'toggle-whiteboard',
showPiP: 'show-pip',
hidePiP: 'hide-pip'
};
/**
@@ -102,6 +105,9 @@ const commands = {
* events expected by jitsi-meet.
*/
const events = {
'_pip-requested': '_pipRequested',
'pip-entered': 'pipEntered',
'pip-left': 'pipLeft',
'avatar-changed': 'avatarChanged',
'audio-availability-changed': 'audioAvailabilityChanged',
'audio-mute-status-changed': 'audioMuteStatusChanged',
@@ -330,6 +336,7 @@ export default class JitsiMeetExternalAPI extends EventEmitter {
this._myUserID = undefined;
this._onStageParticipant = undefined;
this._iAmvisitor = undefined;
this._pipConfig = configOverwrite?.pip;
this._setupListeners();
id++;
}
@@ -625,6 +632,26 @@ export default class JitsiMeetExternalAPI extends EventEmitter {
// Since this is internal event we don't need to emit it to the consumer of the API.
return true;
case 'config-overwrite': {
// Update stored pip config when overwriteConfig is called.
if (data.pip !== undefined) {
const wasEnabled = isPiPEnabled(this._pipConfig);
this._pipConfig = data.pip;
const isEnabled = isPiPEnabled(this._pipConfig);
// Handle enable/disable transitions.
if (!wasEnabled && isEnabled) {
this._setupIntersectionObserver();
} else if (wasEnabled && !isEnabled) {
this._teardownIntersectionObserver();
}
}
// Since this is internal event we don't need to emit it to the consumer of the API.
return true;
}
}
const eventName = events[name];
@@ -649,6 +676,56 @@ export default class JitsiMeetExternalAPI extends EventEmitter {
this.emit(requestName, data, callback);
}
});
this._setupIntersectionObserver();
}
/**
* Sets up IntersectionObserver to monitor iframe visibility.
* Calls showPiP/hidePiP based on visibility.
*
* @private
* @returns {void}
*/
_setupIntersectionObserver() {
if (!isPiPEnabled(this._pipConfig)) {
return;
}
// Don't create duplicate observers.
if (this._intersectionObserver) {
return;
}
this._isIntersecting = true;
this._intersectionObserver = new IntersectionObserver(entries => {
const entry = entries[entries.length - 1];
const wasIntersecting = this._isIntersecting;
this._isIntersecting = entry.isIntersecting;
if (!entry.isIntersecting && wasIntersecting) {
this.showPiP();
} else if (entry.isIntersecting && !wasIntersecting) {
this.hidePiP();
}
});
this._intersectionObserver.observe(this._frame);
}
/**
* Tears down IntersectionObserver.
*
* @private
* @returns {void}
*/
_teardownIntersectionObserver() {
if (this._intersectionObserver) {
this._intersectionObserver.disconnect();
this._intersectionObserver = null;
}
}
/**
@@ -851,6 +928,8 @@ export default class JitsiMeetExternalAPI extends EventEmitter {
this.emit('_willDispose');
this._transport.dispose();
this.removeAllListeners();
this._teardownIntersectionObserver();
if (this._frame && this._frame.parentNode) {
this._frame.parentNode.removeChild(this._frame);
}
@@ -1496,6 +1575,24 @@ export default class JitsiMeetExternalAPI extends EventEmitter {
this.executeCommand('setVirtualBackground', enabled, backgroundImage);
}
/**
* Shows Picture-in-Picture window.
*
* @returns {void}
*/
showPiP() {
this.executeCommand('showPiP');
}
/**
* Hides Picture-in-Picture window.
*
* @returns {void}
*/
hidePiP() {
this.executeCommand('hidePiP');
}
/**
* Opens the desktop picker. This is invoked by the Electron SDK when gDM is used.
*

32
package-lock.json generated
View File

@@ -18279,6 +18279,17 @@
"webrtc-adapter": "8.1.1"
}
},
"node_modules/lib-jitsi-meet/node_modules/@jitsi/js-utils": {
"version": "2.8.2",
"resolved": "https://registry.npmjs.org/@jitsi/js-utils/-/js-utils-2.8.2.tgz",
"integrity": "sha512-tMFuci2lPmbQIFF/f3b5QdkL1vzY6sii9nj0e+K0EEJcFUJiX/QVkv5GbI6pMZ74BYAQXGFZqeASo8hKItniUA==",
"license": "Apache-2.0",
"dependencies": {
"@hapi/bourne": "3.0.0",
"js-md5": "0.7.3",
"ua-parser-js": "1.0.35"
}
},
"node_modules/lib-jitsi-meet/node_modules/@jitsi/logger": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/@jitsi/logger/-/logger-2.0.2.tgz",
@@ -18301,6 +18312,12 @@
"integrity": "sha512-EC+0oUMY1Rqm4O6LLrgjtYDvcVYTy7chDnM4Q7030tP4Kwj3u/pR6gP9ygnp2CJMK5Gq+9Q2oqmrFJAz01DXjw==",
"license": "MIT"
},
"node_modules/lib-jitsi-meet/node_modules/js-md5": {
"version": "0.7.3",
"resolved": "https://registry.npmjs.org/js-md5/-/js-md5-0.7.3.tgz",
"integrity": "sha512-ZC41vPSTLKGwIRjqDh8DfXoCrdQIyBgspJVPXHBGu4nZlAEvG3nf+jO9avM9RmLiGakg7vz974ms99nEV0tmTQ==",
"license": "MIT"
},
"node_modules/lie": {
"version": "3.3.0",
"resolved": "https://registry.npmjs.org/lie/-/lie-3.3.0.tgz",
@@ -39694,6 +39711,16 @@
"webrtc-adapter": "8.1.1"
},
"dependencies": {
"@jitsi/js-utils": {
"version": "2.8.2",
"resolved": "https://registry.npmjs.org/@jitsi/js-utils/-/js-utils-2.8.2.tgz",
"integrity": "sha512-tMFuci2lPmbQIFF/f3b5QdkL1vzY6sii9nj0e+K0EEJcFUJiX/QVkv5GbI6pMZ74BYAQXGFZqeASo8hKItniUA==",
"requires": {
"@hapi/bourne": "3.0.0",
"js-md5": "0.7.3",
"ua-parser-js": "1.0.35"
}
},
"@jitsi/logger": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/@jitsi/logger/-/logger-2.0.2.tgz",
@@ -39714,6 +39741,11 @@
"version": "10.4.0",
"resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-10.4.0.tgz",
"integrity": "sha512-EC+0oUMY1Rqm4O6LLrgjtYDvcVYTy7chDnM4Q7030tP4Kwj3u/pR6gP9ygnp2CJMK5Gq+9Q2oqmrFJAz01DXjw=="
},
"js-md5": {
"version": "0.7.3",
"resolved": "https://registry.npmjs.org/js-md5/-/js-md5-0.7.3.tgz",
"integrity": "sha512-ZC41vPSTLKGwIRjqDh8DfXoCrdQIyBgspJVPXHBGu4nZlAEvG3nf+jO9avM9RmLiGakg7vz974ms99nEV0tmTQ=="
}
}
},

View File

@@ -5,6 +5,7 @@ import JitsiThemeProvider from '../../base/ui/components/JitsiThemeProvider.web'
import DialogContainer from '../../base/ui/components/web/DialogContainer';
import ChromeExtensionBanner from '../../chrome-extension-banner/components/ChromeExtensionBanner.web';
import OverlayContainer from '../../overlay/components/web/OverlayContainer';
import PiP from '../../pip/components/PiP';
import { AbstractApp } from './AbstractApp';
@@ -47,6 +48,7 @@ export class App extends AbstractApp {
<JitsiThemeProvider>
<GlobalStyles />
<ChromeExtensionBanner />
<PiP />
{ super._createMainElement(component, props) }
</JitsiThemeProvider>
);

View File

@@ -12,6 +12,7 @@ import '../no-audio-signal/middleware';
import '../notifications/middleware';
import '../noise-detection/middleware';
import '../old-client-notification/middleware';
import '../pip/middleware';
import '../power-monitor/middleware';
import '../prejoin/middleware';
import '../remote-control/middleware';

View File

@@ -8,6 +8,7 @@ import '../keyboard-shortcuts/reducer';
import '../no-audio-signal/reducer';
import '../noise-detection/reducer';
import '../participants-pane/reducer';
import '../pip/reducer';
import '../power-monitor/reducer';
import '../prejoin/reducer';
import '../remote-control/reducer';

View File

@@ -57,6 +57,7 @@ import { INoiseDetectionState } from '../noise-detection/reducer';
import { INoiseSuppressionState } from '../noise-suppression/reducer';
import { INotificationsState } from '../notifications/reducer';
import { IParticipantsPaneState } from '../participants-pane/reducer';
import { IPipState } from '../pip/reducer';
import { IPollsState } from '../polls/reducer';
import { IPollsHistoryState } from '../polls-history/reducer';
import { IPowerMonitorState } from '../power-monitor/reducer';
@@ -145,6 +146,7 @@ export interface IReduxState {
'features/noise-suppression': INoiseSuppressionState;
'features/notifications': INotificationsState;
'features/participants-pane': IParticipantsPaneState;
'features/pip': IPipState;
'features/polls': IPollsState;
'features/polls-history': IPollsHistoryState;
'features/power-monitor': IPowerMonitorState;

View File

@@ -5,7 +5,18 @@ import Icon from '../../../icons/components/Icon';
import { pixelsToRem } from '../../../ui/functions.any';
import { isIcon } from '../../functions';
import { IAvatarProps } from '../../types';
import { PRESENCE_AVAILABLE_COLOR, PRESENCE_AWAY_COLOR, PRESENCE_BUSY_COLOR, PRESENCE_IDLE_COLOR } from '../styles';
import {
PRESENCE_AVAILABLE_COLOR,
PRESENCE_AWAY_COLOR,
PRESENCE_BUSY_COLOR,
PRESENCE_IDLE_COLOR
} from '../styles';
import {
AVATAR_DEFAULT_BACKGROUND_COLOR,
getAvatarFont,
getAvatarInitialsColor
} from './styles';
interface IProps extends IAvatarProps {
@@ -48,10 +59,10 @@ interface IProps extends IAvatarProps {
const useStyles = makeStyles()(theme => {
return {
avatar: {
backgroundColor: '#AAA',
backgroundColor: AVATAR_DEFAULT_BACKGROUND_COLOR,
borderRadius: '50%',
color: theme.palette?.text01 || '#fff',
...(theme.typography?.heading1 ?? {}),
color: getAvatarInitialsColor(theme),
...getAvatarFont(theme),
fontSize: 'inherit',
objectFit: 'cover',
textAlign: 'center',

View File

@@ -0,0 +1,26 @@
import { Theme } from '@mui/material/styles';
// Default avatar background color
export const AVATAR_DEFAULT_BACKGROUND_COLOR = '#AAA';
/**
* Returns the avatar font style from the theme.
*
* @param {Theme} theme - The MUI theme.
* @returns {Object} The font style object containing fontFamily, fontWeight, etc.
*/
export const getAvatarFont = (theme: Theme) => theme.typography?.heading1 ?? {};
/**
* Default text color for avatar initials.
*/
export const AVATAR_DEFAULT_INITIALS_COLOR = '#FFFFFF';
/**
* Returns the text color for avatar initials from the theme.
*
* @param {Theme} theme - The MUI theme.
* @returns {string} The text color.
*/
export const getAvatarInitialsColor = (theme: Theme): string =>
theme.palette?.text01 || AVATAR_DEFAULT_INITIALS_COLOR;

View File

@@ -517,6 +517,10 @@ export interface IConfig {
peopleSearchQueryTypes?: string[];
peopleSearchTokenLocation?: string;
peopleSearchUrl?: string;
pip?: {
disabled?: boolean;
showOnPrejoin?: boolean;
};
preferBosh?: boolean;
preferVisitor?: boolean;
preferredTranscribeLanguage?: string;

View File

@@ -199,6 +199,7 @@ export default [
'participantMenuButtonsWithNotifyClick',
'participantsPane',
'pcStatsInterval',
'pip',
'preferBosh',
'preferVisitor',
'prejoinConfig.enabled',

View File

@@ -13,11 +13,12 @@ import './middleware.any';
* @returns {Function}
*/
MiddlewareRegistry.register(store => next => action => {
const result = next(action);
switch (action.type) {
case SET_CONFIG: {
const { initialWidth, stageFilmstripParticipants } = action.config.filmstrip || {};
const { dispatch, getState } = store;
const result = next(action);
const state = getState();
if (stageFilmstripParticipants !== undefined) {
@@ -39,7 +40,9 @@ MiddlewareRegistry.register(store => next => action => {
window.config = state['features/base/config'];
}
return result;
break;
}
}
return result;
});

View File

@@ -22,7 +22,6 @@ export function assignIfDefined(target: Object, source: Object) {
return to;
}
const MATCH_OPERATOR_REGEXP = /[|\\{}()[\]^$+*?.-]/g;
/**
@@ -79,6 +78,21 @@ export function getJitsiMeetGlobalNS() {
return window.JitsiMeetJS.app;
}
/**
* Gets the Electron-specific global namespace.
*
* @returns {Object} The Electron namespace.
*/
export function getElectronGlobalNS() {
const globalNS = getJitsiMeetGlobalNS();
if (!globalNS.electron) {
globalNS.electron = {};
}
return globalNS.electron;
}
/**
* Returns the object that stores the connection times.
*

View File

@@ -13,6 +13,8 @@ import Tooltip from '../../../base/tooltip/components/Tooltip';
import { getIndicatorsTooltipPosition } from '../../../filmstrip/functions.web';
import { appendSuffix } from '../../functions';
import { getDisplayNameColor } from './styles';
/**
* The type of the React {@code Component} props of {@link DisplayName}.
*/
@@ -49,7 +51,7 @@ const useStyles = makeStyles()(theme => {
return {
displayName: {
...theme.typography.labelBold,
color: theme.palette.text01,
color: getDisplayNameColor(theme),
overflow: 'hidden',
textOverflow: 'ellipsis',
whiteSpace: 'nowrap'
@@ -62,7 +64,7 @@ const useStyles = makeStyles()(theme => {
boxShadow: 'none',
padding: 0,
...theme.typography.labelBold,
color: theme.palette.text01
color: getDisplayNameColor(theme)
}
};
});

View File

@@ -121,3 +121,19 @@ export function scaleFontProperty(
return parseFloat(calculatedRemValue.toFixed(3));
}
/**
* Default text color for display name.
*/
export const DISPLAY_NAME_DEFAULT_COLOR = '#FFFFFF';
/**
* Returns the text color for display name from the theme.
*
* @param {Theme} theme - The MUI theme.
* @returns {string} The text color.
*/
export const getDisplayNameColor = (theme: Theme): string =>
theme.palette?.text01 || DISPLAY_NAME_DEFAULT_COLOR;

View File

@@ -1,3 +1,5 @@
import { Theme } from '@mui/material/styles';
import { IReduxState } from '../app/types';
import { IStateful } from '../base/app/types';
import { isMobileBrowser } from '../base/environment/utils';
@@ -830,3 +832,13 @@ export function isTopPanelEnabled(state: IReduxState) {
return !filmstrip?.disableTopPanel && participantsCount >= (filmstrip?.minParticipantCountForTopPanel ?? 50);
}
/**
* Returns the thumbnail background color from the theme.
*
* @param {Theme} theme - The MUI theme.
* @returns {string} The background color.
*/
export function getThumbnailBackgroundColor(theme: Theme): string {
return theme.palette.uiBackground;
}

View File

@@ -0,0 +1,4 @@
/**
* Action type to set Picture-in-Picture active state.
*/
export const SET_PIP_ACTIVE = 'SET_PIP_ACTIVE';

View File

@@ -0,0 +1,193 @@
import { IStore } from '../app/types';
import { MEDIA_TYPE } from '../base/media/constants';
import { isLocalTrackMuted } from '../base/tracks/functions.any';
import { handleToggleVideoMuted } from '../toolbox/actions.any';
import { muteLocal } from '../video-menu/actions.any';
import { SET_PIP_ACTIVE } from './actionTypes';
import {
cleanupMediaSessionHandlers,
enterPiP,
setupMediaSessionHandlers,
shouldShowPiP
} from './functions';
import logger from './logger';
/**
* Action to set Picture-in-Picture active state.
*
* @param {boolean} isPiPActive - Whether PiP is active.
* @returns {{
* type: SET_PIP_ACTIVE,
* isPiPActive: boolean
* }}
*/
export function setPiPActive(isPiPActive: boolean) {
return {
type: SET_PIP_ACTIVE,
isPiPActive
};
}
/**
* Toggles audio mute from PiP MediaSession controls.
* Uses exact same logic as toolbar audio button including GUM pending state.
*
* @returns {Function}
*/
export function toggleAudioFromPiP() {
return (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
const state = getState();
const audioMuted = isLocalTrackMuted(state['features/base/tracks'], MEDIA_TYPE.AUDIO);
// Use the exact same action as toolbar button.
dispatch(muteLocal(!audioMuted, MEDIA_TYPE.AUDIO));
};
}
/**
* Toggles video mute from PiP MediaSession controls.
* Uses exact same logic as toolbar video button including GUM pending state.
*
* @returns {Function}
*/
export function toggleVideoFromPiP() {
return (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
const state = getState();
const videoMuted = isLocalTrackMuted(state['features/base/tracks'], MEDIA_TYPE.VIDEO);
// Use the exact same action as toolbar button (showUI=true, ensureTrack=true).
dispatch(handleToggleVideoMuted(!videoMuted, true, true));
};
}
/**
* Action to exit Picture-in-Picture mode.
*
* @returns {Function}
*/
export function exitPiP() {
return (dispatch: IStore['dispatch']) => {
if (document.pictureInPictureElement) {
document.exitPictureInPicture()
.then(() => {
logger.debug('Exited Picture-in-Picture mode');
})
.catch((err: Error) => {
logger.error(`Error while exiting PiP: ${err.message}`);
});
}
dispatch(setPiPActive(false));
cleanupMediaSessionHandlers();
};
}
/**
* Action to handle window blur or tab switch.
* Enters PiP mode if not already active.
*
* @param {HTMLVideoElement} videoElement - The video element we will use for PiP.
* @returns {Function}
*/
export function handleWindowBlur(videoElement: HTMLVideoElement) {
return (_dispatch: IStore['dispatch'], getState: IStore['getState']) => {
const state = getState();
const isPiPActive = state['features/pip']?.isPiPActive;
if (!isPiPActive) {
enterPiP(videoElement);
}
};
}
/**
* Action to handle window focus.
* Exits PiP mode if currently active (matches old AOT behavior).
*
* @returns {Function}
*/
export function handleWindowFocus() {
return (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
const state = getState();
const isPiPActive = state['features/pip']?.isPiPActive;
if (isPiPActive) {
dispatch(exitPiP());
}
};
}
/**
* Action to handle the browser's leavepictureinpicture event.
* Updates state and cleans up MediaSession handlers.
*
* @returns {Function}
*/
export function handlePiPLeaveEvent() {
return (dispatch: IStore['dispatch']) => {
logger.log('Left Picture-in-Picture mode');
dispatch(setPiPActive(false));
cleanupMediaSessionHandlers();
APP.API.notifyPictureInPictureLeft();
};
}
/**
* Action to handle the browser's enterpictureinpicture event.
* Updates state and sets up MediaSession handlers.
*
* @returns {Function}
*/
export function handlePipEnterEvent() {
return (dispatch: IStore['dispatch']) => {
logger.log('Entered Picture-in-Picture mode');
dispatch(setPiPActive(true));
setupMediaSessionHandlers(dispatch);
APP.API.notifyPictureInPictureEntered();
};
}
/**
* Shows Picture-in-Picture window.
* Called from external API when iframe becomes not visible (IntersectionObserver).
*
* @returns {Function}
*/
export function showPiP() {
return (_dispatch: IStore['dispatch'], getState: IStore['getState']) => {
const state = getState();
const isPiPActive = state['features/pip']?.isPiPActive;
if (!shouldShowPiP(state)) {
return;
}
if (!isPiPActive) {
const videoElement = document.getElementById('pipVideo') as HTMLVideoElement;
if (videoElement) {
enterPiP(videoElement);
}
}
};
}
/**
* Hides Picture-in-Picture window.
* Called from external API when iframe becomes visible.
*
* @returns {Function}
*/
export function hidePiP() {
return (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
const state = getState();
const isPiPActive = state['features/pip']?.isPiPActive;
if (isPiPActive) {
dispatch(exitPiP());
}
};
}

View File

@@ -0,0 +1,24 @@
import React from 'react';
import { useSelector } from 'react-redux';
import { shouldShowPiP } from '../functions';
import PiPVideoElement from './PiPVideoElement';
/**
* Wrapper component that conditionally renders PiPVideoElement.
* Prevents mounting when PiP is disabled or on prejoin without showOnPrejoin flag.
*
* @returns {React.ReactElement | null}
*/
function PiP() {
const showPiP = useSelector(shouldShowPiP);
if (!showPiP) {
return null;
}
return <PiPVideoElement />;
}
export default PiP;

View File

@@ -0,0 +1,208 @@
import React, { useEffect, useRef } from 'react';
import { useDispatch, useSelector } from 'react-redux';
import { makeStyles } from 'tss-react/mui';
import { IReduxState, IStore } from '../../app/types';
import { getAvatarFont, getAvatarInitialsColor } from '../../base/avatar/components/web/styles';
import { getLocalParticipant, getParticipantDisplayName } from '../../base/participants/functions';
import { isTrackStreamingStatusActive } from '../../connection-indicator/functions';
import { getDisplayNameColor } from '../../display-name/components/web/styles';
import { getThumbnailBackgroundColor } from '../../filmstrip/functions.web';
import { getLargeVideoParticipant } from '../../large-video/functions';
import { isPrejoinPageVisible } from '../../prejoin/functions.any';
import { handlePiPLeaveEvent, handlePipEnterEvent, handleWindowBlur, handleWindowFocus } from '../actions';
import { getPiPVideoTrack } from '../functions';
import { useCanvasAvatar } from '../hooks';
import logger from '../logger';
const useStyles = makeStyles()(() => {
return {
hiddenVideo: {
position: 'absolute' as const,
width: '1px',
height: '1px',
opacity: 0,
pointerEvents: 'none' as const,
left: '-9999px',
top: '-9999px'
}
};
});
/**
* Component that renders a hidden video element for Picture-in-Picture.
* Automatically switches between real video track and canvas-based avatar
* depending on video availability.
*
* @returns {JSX.Element} The hidden video element.
*/
const PiPVideoElement: React.FC = () => {
const { classes, theme } = useStyles();
const videoRef = useRef<HTMLVideoElement>(null);
const previousTrackRef = useRef<any>(null);
// Redux selectors.
const isOnPrejoin = useSelector(isPrejoinPageVisible);
const localParticipant = useSelector(getLocalParticipant);
const largeVideoParticipant = useSelector(getLargeVideoParticipant);
// Use local participant during prejoin, otherwise large video participant.
const participant = isOnPrejoin ? localParticipant : largeVideoParticipant;
// Get appropriate video track based on prejoin state.
const videoTrack = useSelector((state: IReduxState) =>
getPiPVideoTrack(state, participant)
);
const displayName = useSelector((state: IReduxState) =>
participant?.id
? getParticipantDisplayName(state, participant.id)
: ''
);
const customAvatarBackgrounds = useSelector((state: IReduxState) =>
state['features/dynamic-branding']?.avatarBackgrounds || []
);
const dispatch: IStore['dispatch'] = useDispatch();
const videoElement = videoRef.current;
const avatarFont = getAvatarFont(theme);
const fontFamily = (avatarFont as any).fontFamily ?? 'Inter, sans-serif';
const initialsColor = getAvatarInitialsColor(theme);
const displayNameColor = getDisplayNameColor(theme);
const { canvasStream } = useCanvasAvatar({
participant,
displayName,
customAvatarBackgrounds,
backgroundColor: getThumbnailBackgroundColor(theme),
fontFamily,
initialsColor,
displayNameColor
});
// Determine if we should show avatar instead of video.
const shouldShowAvatar = !videoTrack
|| videoTrack.muted
|| (!videoTrack.local && !isTrackStreamingStatusActive(videoTrack));
/**
* Effect: Handle switching between real video track and canvas avatar stream.
*/
useEffect(() => {
if (!videoElement) {
return;
}
const previousTrack = previousTrackRef.current;
// Detach previous track.
if (previousTrack?.jitsiTrack) {
try {
previousTrack.jitsiTrack.detach(videoElement);
} catch (error) {
logger.error('Error detaching previous track:', error);
}
}
if (shouldShowAvatar) {
// Use canvas stream for avatar.
// Only set srcObject if it's different to avoid interrupting playback.
if (canvasStream && videoElement.srcObject !== canvasStream) {
videoElement.srcObject = canvasStream;
}
} else if (videoTrack?.jitsiTrack) {
// Attach real video track.
videoTrack.jitsiTrack.attach(videoElement)
.catch((error: Error) => {
logger.error('Error attaching video track:', error);
});
}
previousTrackRef.current = videoTrack;
// Cleanup on unmount or track change.
return () => {
if (videoTrack?.jitsiTrack && videoElement) {
try {
videoTrack.jitsiTrack.detach(videoElement);
} catch (error) {
logger.error('Error during cleanup:', error);
}
}
};
}, [ videoTrack, shouldShowAvatar, canvasStream, videoElement ]);
/**
* Effect: Window blur/focus and visibility change listeners.
* Enters PiP on blur, exits on focus (matches old AOT behavior).
*/
useEffect(() => {
if (!videoElement) {
return;
}
const onWindowBlur = () => dispatch(handleWindowBlur(videoElement));
const onWindowFocus = () => {
// In the use case where the PiP is closed by the 'X' or 'back to main window' buttons, this handler is
// called before the leavepictureinpicture handler. From there we call document.exitPictureInPicture()
// which seems to put Chrome into a weird state - document.exitPictureInPicture() never resolves, the
// leavepictureinpicture is never triggered and it is not possible to display PiP again.
// This is probably a browser bug. To workaround it we have the 100ms timeout here. This way this event
// is triggered after the leavepictureinpicture event and everythink seems to work well.
setTimeout(() => {
dispatch(handleWindowFocus());
}, 100);
};
const onVisibilityChange = () => {
if (document.hidden) {
onWindowBlur();
}
};
window.addEventListener('blur', onWindowBlur);
window.addEventListener('focus', onWindowFocus);
document.addEventListener('visibilitychange', onVisibilityChange);
return () => {
window.removeEventListener('blur', onWindowBlur);
window.removeEventListener('focus', onWindowFocus);
document.removeEventListener('visibilitychange', onVisibilityChange);
};
}, [ dispatch, videoElement ]);
/**
* Effect: PiP enter/leave event listeners.
* Updates Redux state when browser PiP events occur.
*/
useEffect(() => {
if (!videoElement) {
return;
}
const onEnterPiP = () => {
dispatch(handlePipEnterEvent());
};
const onLeavePiP = () => {
dispatch(handlePiPLeaveEvent());
};
videoElement.addEventListener('enterpictureinpicture', onEnterPiP);
videoElement.addEventListener('leavepictureinpicture', onLeavePiP);
return () => {
videoElement.removeEventListener('enterpictureinpicture', onEnterPiP);
videoElement.removeEventListener('leavepictureinpicture', onLeavePiP);
};
}, [ videoElement, dispatch ]);
return (
<video
autoPlay = { true }
className = { classes.hiddenVideo }
id = 'pipVideo'
muted = { true }
playsInline = { true }
ref = { videoRef } />
);
};
export default PiPVideoElement;

View File

@@ -0,0 +1,31 @@
/**
* Shared utilities for PiP feature used by external_api.js.
*
* IMPORTANT: Keep this file minimal with no heavy dependencies.
* It's bundled into external_api.min.js and we want to keep that bundle slim.
* Only import lightweight modules here.
*/
/**
* Checks if current environment is Electron.
* Inline check to avoid importing BrowserDetection and its ua-parser dependency.
*
* @returns {boolean} - True if running in Electron.
*/
function isElectron(): boolean {
return navigator.userAgent.includes('Electron');
}
/**
* Checks if PiP is enabled based on config and environment.
*
* @param {Object} pipConfig - The pip config object.
* @returns {boolean} - True if PiP is enabled.
*/
export function isPiPEnabled(pipConfig?: { disabled?: boolean; }): boolean {
if (pipConfig?.disabled) {
return false;
}
return isElectron();
}

View File

@@ -0,0 +1,461 @@
import { IReduxState, IStore } from '../app/types';
import { AVATAR_DEFAULT_BACKGROUND_COLOR } from '../base/avatar/components/web/styles';
import { getAvatarColor, getInitials } from '../base/avatar/functions';
import { leaveConference } from '../base/conference/actions';
import { browser } from '../base/lib-jitsi-meet';
import { IParticipant } from '../base/participants/types';
import { getLocalVideoTrack } from '../base/tracks/functions.any';
import { getVideoTrackByParticipant } from '../base/tracks/functions.web';
import { isPrejoinPageVisible } from '../prejoin/functions.any';
import { toggleAudioFromPiP, toggleVideoFromPiP } from './actions';
import { isPiPEnabled } from './external-api.shared';
import logger from './logger';
import { IMediaSessionState } from './types';
/**
* Gets the appropriate video track for PiP based on prejoin state.
* During prejoin, returns local video track. In conference, returns large video participant's track.
*
* @param {IReduxState} state - Redux state.
* @param {IParticipant | undefined} participant - Participant to get track for.
* @returns {ITrack | undefined} The video track or undefined.
*/
export function getPiPVideoTrack(state: IReduxState, participant: IParticipant | undefined) {
const isOnPrejoin = isPrejoinPageVisible(state);
return isOnPrejoin
? getLocalVideoTrack(state['features/base/tracks'])
: getVideoTrackByParticipant(state, participant);
}
/**
* Determines if PiP should be shown based on config and current app state.
* Checks if PiP is enabled and handles prejoin page visibility.
*
* @param {IReduxState} state - Redux state.
* @returns {boolean} Whether PiP should be shown.
*/
export function shouldShowPiP(state: IReduxState): boolean {
const pipConfig = state['features/base/config'].pip;
// Check if PiP is enabled at all.
if (!isPiPEnabled(pipConfig)) {
return false;
}
// Check prejoin state.
const isOnPrejoin = isPrejoinPageVisible(state);
const showOnPrejoin = pipConfig?.showOnPrejoin ?? false;
// Don't show PiP on prejoin unless explicitly enabled.
if (isOnPrejoin && !showOnPrejoin) {
return false;
}
return true;
}
/**
* Draws an image-based avatar as a circular clipped image on canvas.
*
* @param {CanvasRenderingContext2D} ctx - Canvas 2D context.
* @param {string} imageUrl - URL of the avatar image.
* @param {boolean | undefined} useCORS - Whether to use CORS for image loading.
* @param {number} centerX - X coordinate of avatar center.
* @param {number} centerY - Y coordinate of avatar center.
* @param {number} radius - Radius of the avatar circle.
* @returns {Promise<void>}
*/
export async function drawImageAvatar(
ctx: CanvasRenderingContext2D,
imageUrl: string,
useCORS: boolean | undefined,
centerX: number,
centerY: number,
radius: number
): Promise<void> {
const img = new Image();
if (useCORS) {
img.crossOrigin = 'anonymous';
}
img.src = imageUrl;
try {
await img.decode();
ctx.save();
ctx.beginPath();
ctx.arc(centerX, centerY, radius, 0, Math.PI * 2);
ctx.clip();
const size = radius * 2;
ctx.drawImage(img, centerX - radius, centerY - radius, size, size);
ctx.restore();
} catch (error) {
logger.error('Failed to draw image avatar', error);
throw new Error('Image load failed');
}
}
/**
* Draws an initials-based avatar with a colored background on canvas.
*
* @param {CanvasRenderingContext2D} ctx - Canvas 2D context.
* @param {string} name - Participant's display name.
* @param {Array<string>} customAvatarBackgrounds - Custom avatar background colors.
* @param {number} centerX - X coordinate of avatar center.
* @param {number} centerY - Y coordinate of avatar center.
* @param {number} radius - Radius of the avatar circle.
* @param {string} fontFamily - Font family to use for initials.
* @param {string} textColor - Color for the initials text.
* @returns {void}
*/
export function drawInitialsAvatar(
ctx: CanvasRenderingContext2D,
name: string,
customAvatarBackgrounds: Array<string>,
centerX: number,
centerY: number,
radius: number,
fontFamily: string,
textColor: string
) {
const initials = getInitials(name);
const color = getAvatarColor(name, customAvatarBackgrounds);
ctx.fillStyle = color;
ctx.beginPath();
ctx.arc(centerX, centerY, radius, 0, Math.PI * 2);
ctx.fill();
ctx.fillStyle = textColor;
ctx.font = `bold 80px ${fontFamily}`;
ctx.textAlign = 'center';
ctx.textBaseline = 'middle';
ctx.fillText(initials, centerX, centerY);
}
/**
* Draws the default user icon when no avatar is available.
*
* @param {CanvasRenderingContext2D} ctx - Canvas 2D context.
* @param {HTMLImageElement | null} defaultIcon - Preloaded default icon image.
* @param {number} centerX - X coordinate of icon center.
* @param {number} centerY - Y coordinate of icon center.
* @param {number} radius - Radius of the icon circle.
* @returns {void}
*/
export function drawDefaultIcon(
ctx: CanvasRenderingContext2D,
defaultIcon: HTMLImageElement | null,
centerX: number,
centerY: number,
radius: number
) {
ctx.fillStyle = AVATAR_DEFAULT_BACKGROUND_COLOR;
ctx.beginPath();
ctx.arc(centerX, centerY, radius, 0, Math.PI * 2);
ctx.fill();
if (defaultIcon) {
const iconSize = radius;
const x = centerX - iconSize / 2;
const y = centerY - iconSize / 2;
ctx.drawImage(defaultIcon, x, y, iconSize, iconSize);
}
}
/**
* Maximum character limit for display name before truncation.
*/
const DISPLAY_NAME_MAX_CHARS = 25;
/**
* Draws the participant's display name below the avatar.
* Truncates long names with ellipsis using a simple character limit.
*
* @param {CanvasRenderingContext2D} ctx - Canvas 2D context.
* @param {string} displayName - Participant's display name.
* @param {number} centerX - X coordinate of text center.
* @param {number} y - Y coordinate of text top.
* @param {string} fontFamily - Font family to use for display name.
* @param {string} textColor - Color for the display name text.
* @returns {void}
*/
export function drawDisplayName(
ctx: CanvasRenderingContext2D,
displayName: string,
centerX: number,
y: number,
fontFamily: string,
textColor: string
) {
const truncated = displayName.length > DISPLAY_NAME_MAX_CHARS
? `${displayName.slice(0, DISPLAY_NAME_MAX_CHARS)}...`
: displayName;
ctx.fillStyle = textColor;
ctx.font = `24px ${fontFamily}`;
ctx.textAlign = 'center';
ctx.textBaseline = 'top';
ctx.fillText(truncated, centerX, y);
}
/**
* Renders a complete avatar (image, initials, or default icon) with display name on canvas.
*
* @param {HTMLCanvasElement} canvas - The canvas element.
* @param {CanvasRenderingContext2D} ctx - Canvas 2D context.
* @param {IParticipant | undefined} participant - The participant to render.
* @param {string} displayName - The display name to show.
* @param {Array<string>} customAvatarBackgrounds - Custom avatar background colors.
* @param {HTMLImageElement | null} defaultIcon - Preloaded default icon image.
* @param {string} backgroundColor - Background color for the canvas.
* @param {string} fontFamily - Font family to use for text rendering.
* @param {string} initialsColor - Color for avatar initials text.
* @param {string} displayNameColor - Color for display name text.
* @returns {Promise<void>}
*/
export async function renderAvatarOnCanvas(
canvas: HTMLCanvasElement,
ctx: CanvasRenderingContext2D,
participant: IParticipant | undefined,
displayName: string,
customAvatarBackgrounds: Array<string>,
defaultIcon: HTMLImageElement | null,
backgroundColor: string,
fontFamily: string,
initialsColor: string,
displayNameColor: string
): Promise<void> {
const { width, height } = canvas;
const centerX = width / 2;
const centerY = height / 2;
const avatarRadius = 100;
const spacing = 20;
const textY = centerY + avatarRadius + spacing;
// Clear and fill background.
ctx.fillStyle = backgroundColor;
ctx.fillRect(0, 0, width, height);
let avatarRendered = false;
if (participant?.loadableAvatarUrl) {
try {
await drawImageAvatar(
ctx,
participant.loadableAvatarUrl,
participant.loadableAvatarUrlUseCORS,
centerX,
centerY,
avatarRadius
);
avatarRendered = true;
} catch (error) {
logger.warn('Failed to load image avatar, falling back.', error);
}
}
if (!avatarRendered) {
if (participant?.name) {
drawInitialsAvatar(
ctx, participant.name, customAvatarBackgrounds, centerX, centerY, avatarRadius, fontFamily, initialsColor
);
} else {
drawDefaultIcon(ctx, defaultIcon, centerX, centerY, avatarRadius);
}
}
drawDisplayName(ctx, displayName, centerX, textY, fontFamily, displayNameColor);
}
/**
* Requests picture-in-picture mode for the pip video element.
*
* NOTE: Called by Electron main process with userGesture: true.
*
* @returns {void}
*/
export function requestPictureInPicture() {
const video = document.getElementById('pipVideo') as HTMLVideoElement;
if (!video) {
logger.error('PiP video element (#pipVideo) not found');
return;
}
if (document.pictureInPictureElement) {
logger.warn('Already in PiP mode');
return;
}
// Check if video metadata is loaded.
// readyState >= 1 (HAVE_METADATA) means video dimensions are available.
if (video.readyState < 1) {
logger.warn('Video metadata not loaded yet, waiting...');
// Wait for metadata to load before requesting PiP.
video.addEventListener('loadedmetadata', () => {
// @ts-ignore - requestPictureInPicture is not yet in all TypeScript definitions.
video.requestPictureInPicture().catch((err: Error) => {
logger.error(`Error while requesting PiP after metadata loaded: ${err.message}`);
});
}, { once: true });
return;
}
// @ts-ignore - requestPictureInPicture is not yet in all TypeScript definitions.
video.requestPictureInPicture().catch((err: Error) => {
logger.error(`Error while requesting PiP: ${err.message}`);
});
}
/**
* Action to enter Picture-in-Picture mode.
* Handles both browser and Electron environments.
*
* @param {HTMLVideoElement} videoElement - The video element to call requestPictureInPicuture on.
* @returns {void}
*/
export function enterPiP(videoElement: HTMLVideoElement | undefined | null) {
if (!videoElement) {
logger.error('PiP video element not found');
return;
}
// Check if PiP is supported.
if (!('pictureInPictureEnabled' in document)) {
logger.error('Picture-in-Picture is not supported in this browser');
return;
}
if (document.pictureInPictureEnabled === false) {
logger.error('Picture-in-Picture is disabled');
return;
}
try {
// In Electron, use postMessage to request PiP from main process.
// This bypasses the transient activation requirement by executing
// requestPictureInPicture with userGesture: true in the main process.
if (browser.isElectron()) {
logger.log('Electron detected, sending postMessage to request PiP');
APP.API.notifyPictureInPictureRequested();
// State will be updated by enterpictureinpicture event.
return;
}
// TODO: Enable PiP for browsers:
// In browsers, we should directly call requestPictureInPicture.
// @ts-ignore - requestPictureInPicture is not yet in all TypeScript definitions.
// requestPictureInPicture();
} catch (error) {
logger.error('Error entering Picture-in-Picture:', error);
}
}
/**
* Sets up MediaSession API action handlers for controlling the conference.
* Handlers dispatch actions that query fresh Redux state, avoiding stale closures.
*
* @param {Function} dispatch - Redux dispatch function.
* @returns {void}
*/
export function setupMediaSessionHandlers(dispatch: IStore['dispatch']) {
// @ts-ignore - MediaSession API is not fully typed in all environments.
if ('mediaSession' in navigator && navigator.mediaSession?.setActionHandler) {
try {
// Set up audio mute toggle handler.
// Dispatch action that will query current state and toggle.
// @ts-ignore - togglemicrophone is a newer MediaSession action.
navigator.mediaSession.setActionHandler('togglemicrophone', () => {
dispatch(toggleAudioFromPiP());
});
// Set up video mute toggle handler.
// Dispatch action that will query current state and toggle.
// @ts-ignore - togglecamera is a newer MediaSession action.
navigator.mediaSession.setActionHandler('togglecamera', () => {
dispatch(toggleVideoFromPiP());
});
// Set up hangup handler.
// @ts-ignore - hangup is a newer MediaSession action.
navigator.mediaSession.setActionHandler('hangup', () => {
dispatch(leaveConference());
});
logger.log('MediaSession API handlers registered for PiP controls');
} catch (error) {
logger.warn('Some MediaSession actions not supported:', error);
}
} else {
logger.warn('MediaSession API not supported in this browser');
}
}
/**
* Updates the MediaSession API microphone and camera active state.
* This ensures the PiP controls show the correct mute/unmute state.
*
* @param {IMediaSessionState} state - The current media session state.
* @returns {void}
*/
export function updateMediaSessionState(state: IMediaSessionState) {
if ('mediaSession' in navigator) {
try {
// @ts-ignore - setMicrophoneActive is a newer MediaSession method.
if (navigator.mediaSession.setMicrophoneActive) {
// @ts-ignore
navigator.mediaSession.setMicrophoneActive(state.microphoneActive);
}
// @ts-ignore - setCameraActive is a newer MediaSession method.
if (navigator.mediaSession.setCameraActive) {
// @ts-ignore
navigator.mediaSession.setCameraActive(state.cameraActive);
}
logger.log('MediaSession state updated:', state);
} catch (error) {
logger.warn('Error updating MediaSession state:', error);
}
}
}
/**
* Cleans up MediaSession API action handlers.
*
* @returns {void}
*/
export function cleanupMediaSessionHandlers() {
if ('mediaSession' in navigator) {
try {
// Note: Setting handlers to null is commented out as it may cause issues
// in some browsers. The handlers will be overwritten when entering PiP again.
// @ts-ignore - togglemicrophone is a newer MediaSession action.
navigator.mediaSession.setActionHandler('togglemicrophone', null);
// @ts-ignore - togglecamera is a newer MediaSession action.
navigator.mediaSession.setActionHandler('togglecamera', null);
// @ts-ignore - hangup is a newer MediaSession action.
navigator.mediaSession.setActionHandler('hangup', null);
logger.log('MediaSession API handlers cleaned up');
} catch (error) {
logger.error('Error cleaning up MediaSession handlers:', error);
}
}
}
// Re-export from shared file for external use.
export { isPiPEnabled };

174
react/features/pip/hooks.ts Normal file
View File

@@ -0,0 +1,174 @@
import { useEffect, useRef } from 'react';
import IconUserSVG from '../base/icons/svg/user.svg?raw';
import { IParticipant } from '../base/participants/types';
import { TILE_ASPECT_RATIO } from '../filmstrip/constants';
import { renderAvatarOnCanvas } from './functions';
import logger from './logger';
/**
* Canvas dimensions for PiP avatar rendering.
*/
const CANVAS_WIDTH = 640;
const CANVAS_HEIGHT = Math.floor(CANVAS_WIDTH / TILE_ASPECT_RATIO);
/**
* Frame rate 0 means capture on-demand when canvas changes.
* We manually request frames after drawing to ensure capture.
*/
const CANVAS_FRAME_RATE = 0;
/**
* Options for the useCanvasAvatar hook.
*/
interface IUseCanvasAvatarOptions {
backgroundColor: string;
customAvatarBackgrounds: string[];
displayName: string;
displayNameColor: string;
fontFamily: string;
initialsColor: string;
participant: IParticipant | undefined;
}
/**
* Result returned by the useCanvasAvatar hook.
*/
interface IUseCanvasAvatarResult {
canvasStream: MediaStream | null;
}
/**
* Internal refs managed by the hook.
*/
interface ICanvasRefs {
canvas: HTMLCanvasElement | null;
defaultIcon: HTMLImageElement | null;
stream: MediaStream | null;
}
/**
* Loads and prepares the default user icon SVG as an Image element.
*
* @returns {HTMLImageElement} The prepared image element.
*/
function createDefaultIconImage(): HTMLImageElement {
let svgText = IconUserSVG;
if (!svgText.includes('fill=')) {
svgText = svgText.replace('<svg', '<svg fill="#FFFFFF"');
}
const dataUrl = `data:image/svg+xml,${encodeURIComponent(svgText)
.replace(/'/g, '%27')
.replace(/"/g, '%22')}`;
const img = new Image();
img.src = dataUrl;
return img;
}
/**
* Custom hook that manages canvas-based avatar rendering for Picture-in-Picture.
* Creates and maintains a canvas element with a MediaStream that can be used
* as a video source when the participant's video is unavailable.
*
* @param {IUseCanvasAvatarOptions} options - The hook options.
* @returns {IUseCanvasAvatarResult} The canvas stream for use as video source.
*/
export function useCanvasAvatar(options: IUseCanvasAvatarOptions): IUseCanvasAvatarResult {
const {
participant,
displayName,
customAvatarBackgrounds,
backgroundColor,
fontFamily,
initialsColor,
displayNameColor
} = options;
const refs = useRef<ICanvasRefs>({
canvas: null,
stream: null,
defaultIcon: null
});
/**
* Initialize canvas, stream, and default icon on mount.
*/
useEffect(() => {
// Create canvas.
const canvas = document.createElement('canvas');
canvas.width = CANVAS_WIDTH;
canvas.height = CANVAS_HEIGHT;
refs.current.canvas = canvas;
// Create stream from canvas.
refs.current.stream = canvas.captureStream(CANVAS_FRAME_RATE);
// Load default icon.
refs.current.defaultIcon = createDefaultIconImage();
logger.log('Canvas avatar initialized');
// Cleanup on unmount.
return () => {
if (refs.current.stream) {
refs.current.stream.getTracks().forEach(track => track.stop());
refs.current.stream = null;
}
refs.current.canvas = null;
refs.current.defaultIcon = null;
logger.log('Canvas avatar cleaned up');
};
}, []);
/**
* Re-render avatar when participant or display name changes.
*/
useEffect(() => {
const { canvas, defaultIcon, stream } = refs.current;
if (!canvas) {
return;
}
const ctx = canvas.getContext('2d');
if (!ctx) {
logger.error('Failed to get canvas 2D context');
return;
}
renderAvatarOnCanvas(
canvas,
ctx,
participant,
displayName,
customAvatarBackgrounds,
defaultIcon,
backgroundColor,
fontFamily,
initialsColor,
displayNameColor
).then(() => {
// Request a frame capture after drawing.
// For captureStream(0), we need to manually trigger frame capture.
const track = stream?.getVideoTracks()[0] as MediaStreamTrack & { requestFrame?: () => void; };
if (track?.requestFrame) {
track.requestFrame();
logger.log('Canvas frame requested after render');
}
}).catch((error: Error) => logger.error('Error rendering avatar on canvas:', error));
}, [ participant?.loadableAvatarUrl, participant?.name, displayName, customAvatarBackgrounds, backgroundColor, fontFamily, initialsColor, displayNameColor ]);
return {
canvasStream: refs.current.stream
};
}

View File

@@ -0,0 +1,3 @@
import { getLogger } from '../base/logging/functions';
export default getLogger('app:pip');

View File

@@ -0,0 +1 @@
import './subscriber';

View File

@@ -0,0 +1,30 @@
import ReducerRegistry from '../base/redux/ReducerRegistry';
import { SET_PIP_ACTIVE } from './actionTypes';
/**
* The default state for the pip feature.
*/
const DEFAULT_STATE = {
isPiPActive: false
};
export interface IPipState {
isPiPActive: boolean;
}
/**
* Reduces the Redux actions of the pip feature.
*/
ReducerRegistry.register<IPipState>('features/pip', (state = DEFAULT_STATE, action): IPipState => {
switch (action.type) {
case SET_PIP_ACTIVE:
return {
...state,
isPiPActive: action.isPiPActive
};
default:
return state;
}
});

View File

@@ -0,0 +1,61 @@
import { IReduxState } from '../app/types';
import { MEDIA_TYPE } from '../base/media/constants';
import StateListenerRegistry from '../base/redux/StateListenerRegistry';
import { isLocalTrackMuted } from '../base/tracks/functions.any';
import { getElectronGlobalNS } from '../base/util/helpers';
import { requestPictureInPicture, shouldShowPiP, updateMediaSessionState } from './functions';
/**
* Listens to audio and video mute state changes when PiP is active
* and updates the MediaSession API to reflect the current state in PiP controls.
*/
StateListenerRegistry.register(
/* selector */ (state: IReduxState) => {
// Skip if PiP is disabled or shouldn't be shown (e.g., on prejoin without showOnPrejoin).
if (!shouldShowPiP(state)) {
return null;
}
const isPiPActive = state['features/pip']?.isPiPActive;
if (!isPiPActive) {
return null;
}
return {
audioMuted: isLocalTrackMuted(state['features/base/tracks'], MEDIA_TYPE.AUDIO),
videoMuted: isLocalTrackMuted(state['features/base/tracks'], MEDIA_TYPE.VIDEO)
};
},
/* listener */ (muteState: { audioMuted: boolean; videoMuted: boolean; } | null) => {
if (muteState === null) {
return;
}
updateMediaSessionState({
cameraActive: !muteState.videoMuted,
microphoneActive: !muteState.audioMuted
});
},
{
deepEquals: true
}
);
StateListenerRegistry.register(
/* selector */ shouldShowPiP,
/* listener */ (_shouldShowPiP: boolean) => {
const electronNS = getElectronGlobalNS();
if (_shouldShowPiP) {
// Expose requestPictureInPicture for Electron main process.
if (!electronNS.requestPictureInPicture) {
electronNS.requestPictureInPicture = requestPictureInPicture;
}
} else if (typeof electronNS.requestPictureInPicture === 'function') {
delete electronNS.requestPictureInPicture;
}
}
);

View File

@@ -0,0 +1,14 @@
/**
* MediaSession state for microphone and camera.
*/
export interface IMediaSessionState {
/**
* Whether the camera is active (unmuted).
*/
cameraActive: boolean;
/**
* Whether the microphone is active (unmuted).
*/
microphoneActive: boolean;
}

View File

@@ -26,6 +26,7 @@
"react/features/face-landmarks",
"react/features/keyboard-shortcuts",
"react/features/no-audio-signal",
"react/features/pip",
"react/features/noise-suppression",
"react/features/old-client-notification",
"react/features/remote-control",

View File

@@ -160,7 +160,14 @@ function getConfig(options = {}) {
'css-loader'
]
}, {
// Import SVG as raw text when using ?raw query parameter.
test: /\.svg$/,
resourceQuery: /raw/,
type: 'asset/source'
}, {
// Import SVG as React component (default).
test: /\.svg$/,
resourceQuery: { not: [ /raw/ ] },
use: [ {
loader: '@svgr/webpack',
options: {