Files
jitsi-meet/react/features/pip/hooks.ts
Hristo Terezov d06b847319 feat(pip): Add Picture-in-Picture support for Electron
Implements Picture-in-Picture functionality for the Electron wrapper to maintain video engagement when users are not actively focused on the conference window. This feature addresses the need to keep users visually connected to the conference even when multitasking.

Key features:
- Automatic PiP mode activation and deactivation based on user interaction
- Displays large video participant's stream or renders their avatar on canvas when video unavailable
- Provides audio/video mute controls via MediaSession API directly in PiP window
- Adds API events (_pip-requested) for Electron wrapper integration

Implementation includes new pip feature module with Redux architecture, canvas-based avatar rendering with custom backgrounds support, and integration with existing mute/unmute logic. Depends on jitsi-meet-electron-sdk#479 for proper user gesture handling in Electron.
2025-12-04 16:04:10 -06:00

175 lines
4.8 KiB
TypeScript

import { useEffect, useRef } from 'react';
import IconUserSVG from '../base/icons/svg/user.svg?raw';
import { IParticipant } from '../base/participants/types';
import { TILE_ASPECT_RATIO } from '../filmstrip/constants';
import { renderAvatarOnCanvas } from './functions';
import logger from './logger';
/**
* Canvas dimensions for PiP avatar rendering.
*/
const CANVAS_WIDTH = 640;
const CANVAS_HEIGHT = Math.floor(CANVAS_WIDTH / TILE_ASPECT_RATIO);
/**
* Frame rate 0 means capture on-demand when canvas changes.
* We manually request frames after drawing to ensure capture.
*/
const CANVAS_FRAME_RATE = 0;
/**
* Options for the useCanvasAvatar hook.
*/
interface IUseCanvasAvatarOptions {
backgroundColor: string;
customAvatarBackgrounds: string[];
displayName: string;
displayNameColor: string;
fontFamily: string;
initialsColor: string;
participant: IParticipant | undefined;
}
/**
* Result returned by the useCanvasAvatar hook.
*/
interface IUseCanvasAvatarResult {
canvasStream: MediaStream | null;
}
/**
* Internal refs managed by the hook.
*/
interface ICanvasRefs {
canvas: HTMLCanvasElement | null;
defaultIcon: HTMLImageElement | null;
stream: MediaStream | null;
}
/**
* Loads and prepares the default user icon SVG as an Image element.
*
* @returns {HTMLImageElement} The prepared image element.
*/
function createDefaultIconImage(): HTMLImageElement {
let svgText = IconUserSVG;
if (!svgText.includes('fill=')) {
svgText = svgText.replace('<svg', '<svg fill="#FFFFFF"');
}
const dataUrl = `data:image/svg+xml,${encodeURIComponent(svgText)
.replace(/'/g, '%27')
.replace(/"/g, '%22')}`;
const img = new Image();
img.src = dataUrl;
return img;
}
/**
* Custom hook that manages canvas-based avatar rendering for Picture-in-Picture.
* Creates and maintains a canvas element with a MediaStream that can be used
* as a video source when the participant's video is unavailable.
*
* @param {IUseCanvasAvatarOptions} options - The hook options.
* @returns {IUseCanvasAvatarResult} The canvas stream for use as video source.
*/
export function useCanvasAvatar(options: IUseCanvasAvatarOptions): IUseCanvasAvatarResult {
const {
participant,
displayName,
customAvatarBackgrounds,
backgroundColor,
fontFamily,
initialsColor,
displayNameColor
} = options;
const refs = useRef<ICanvasRefs>({
canvas: null,
stream: null,
defaultIcon: null
});
/**
* Initialize canvas, stream, and default icon on mount.
*/
useEffect(() => {
// Create canvas.
const canvas = document.createElement('canvas');
canvas.width = CANVAS_WIDTH;
canvas.height = CANVAS_HEIGHT;
refs.current.canvas = canvas;
// Create stream from canvas.
refs.current.stream = canvas.captureStream(CANVAS_FRAME_RATE);
// Load default icon.
refs.current.defaultIcon = createDefaultIconImage();
logger.log('Canvas avatar initialized');
// Cleanup on unmount.
return () => {
if (refs.current.stream) {
refs.current.stream.getTracks().forEach(track => track.stop());
refs.current.stream = null;
}
refs.current.canvas = null;
refs.current.defaultIcon = null;
logger.log('Canvas avatar cleaned up');
};
}, []);
/**
* Re-render avatar when participant or display name changes.
*/
useEffect(() => {
const { canvas, defaultIcon, stream } = refs.current;
if (!canvas) {
return;
}
const ctx = canvas.getContext('2d');
if (!ctx) {
logger.error('Failed to get canvas 2D context');
return;
}
renderAvatarOnCanvas(
canvas,
ctx,
participant,
displayName,
customAvatarBackgrounds,
defaultIcon,
backgroundColor,
fontFamily,
initialsColor,
displayNameColor
).then(() => {
// Request a frame capture after drawing.
// For captureStream(0), we need to manually trigger frame capture.
const track = stream?.getVideoTracks()[0] as MediaStreamTrack & { requestFrame?: () => void; };
if (track?.requestFrame) {
track.requestFrame();
logger.log('Canvas frame requested after render');
}
}).catch((error: Error) => logger.error('Error rendering avatar on canvas:', error));
}, [ participant?.loadableAvatarUrl, participant?.name, displayName, customAvatarBackgrounds, backgroundColor, fontFamily, initialsColor, displayNameColor ]);
return {
canvasStream: refs.current.stream
};
}