diff --git a/conference.js b/conference.js index 9b273c5c6d..e006eaf7ef 100644 --- a/conference.js +++ b/conference.js @@ -120,6 +120,7 @@ import { import { mediaPermissionPromptVisibilityChanged } from './react/features/overlay'; import { suspendDetected } from './react/features/power-monitor'; import { setSharedVideoStatus } from './react/features/shared-video'; +import { AudioMixerEffect } from './react/features/stream-effects/audio-mixer/AudioMixerEffect'; import { createPresenterEffect } from './react/features/stream-effects/presenter'; import { endpointMessageReceived } from './react/features/subtitles'; import { createRnnoiseProcessorPromise } from './react/features/rnnoise'; @@ -659,10 +660,10 @@ export default { startAudioOnly: config.startAudioOnly, startScreenSharing: config.startScreenSharing, startWithAudioMuted: config.startWithAudioMuted - || config.startSilent - || isUserInteractionRequiredForUnmute(APP.store.getState()), + || config.startSilent + || isUserInteractionRequiredForUnmute(APP.store.getState()), startWithVideoMuted: config.startWithVideoMuted - || isUserInteractionRequiredForUnmute(APP.store.getState()) + || isUserInteractionRequiredForUnmute(APP.store.getState()) })) .then(([ tracks, con ]) => { tracks.forEach(track => { @@ -1417,7 +1418,7 @@ export default { * in case it fails. * @private */ - _turnScreenSharingOff(didHaveVideo) { + async _turnScreenSharingOff(didHaveVideo) { this._untoggleScreenSharing = null; this.videoSwitchInProgress = true; const { receiver } = APP.remoteControl; @@ -1446,6 +1447,20 @@ export default { } }); + // If system audio was also shared stop the AudioMixerEffect and dispose of the desktop audio track. + if (this._mixerEffect) { + await this.localAudio.setEffect(undefined); + await this._desktopAudioStream.dispose(); + this._mixerEffect = undefined; + this._desktopAudioStream = undefined; + + // In case there was no local audio when screen sharing was started the fact that we set the audio stream to + // null will take care of the desktop audio stream cleanup. + } else if (this._desktopAudioStream) { + await this.useAudioStream(null); + this._desktopAudioStream = undefined; + } + if (didHaveVideo) { promise = promise.then(() => createLocalTracksF({ devices: [ 'video' ] })) .then(([ stream ]) => this.useVideoStream(stream)) @@ -1585,26 +1600,31 @@ export default { } }); - return getDesktopStreamPromise.then(([ desktopStream ]) => { + return getDesktopStreamPromise.then(desktopStreams => { // Stores the "untoggle" handler which remembers whether was // there any video before and whether was it muted. this._untoggleScreenSharing = this._turnScreenSharingOff.bind(this, didHaveVideo); - desktopStream.on( - JitsiTrackEvents.LOCAL_TRACK_STOPPED, - () => { - // If the stream was stopped during screen sharing - // session then we should switch back to video. - this.isSharingScreen - && this._untoggleScreenSharing - && this._untoggleScreenSharing(); - } - ); + + const desktopVideoStream = desktopStreams.find(stream => stream.getType() === MEDIA_TYPE.VIDEO); + + if (desktopVideoStream) { + desktopVideoStream.on( + JitsiTrackEvents.LOCAL_TRACK_STOPPED, + () => { + // If the stream was stopped during screen sharing + // session then we should switch back to video. + this.isSharingScreen + && this._untoggleScreenSharing + && this._untoggleScreenSharing(); + } + ); + } // close external installation dialog on success. externalInstallation && $.prompt.close(); - return desktopStream; + return desktopStreams; }, error => { DSExternalInstallationInProgress = false; @@ -1755,7 +1775,29 @@ export default { this.videoSwitchInProgress = true; return this._createDesktopTrack(options) - .then(stream => this.useVideoStream(stream)) + .then(async streams => { + const desktopVideoStream = streams.find(stream => stream.getType() === MEDIA_TYPE.VIDEO); + + if (desktopVideoStream) { + this.useVideoStream(desktopVideoStream); + } + + this._desktopAudioStream = streams.find(stream => stream.getType() === MEDIA_TYPE.AUDIO); + + if (this._desktopAudioStream) { + // If there is a localAudio stream, mix in the desktop audio stream captured by the screen sharing + // api. + if (this.localAudio) { + this._mixerEffect = new AudioMixerEffect(this._desktopAudioStream); + + await this.localAudio.setEffect(this._mixerEffect); + } else { + // If no local stream is present ( i.e. no input audio devices) we use the screen share audio + // stream as we would use a regular stream. + await this.useAudioStream(this._desktopAudioStream); + } + } + }) .then(() => { this.videoSwitchInProgress = false; if (config.enableScreenshotCapture) { @@ -2288,7 +2330,17 @@ export default { return stream; }) - .then(stream => this.useAudioStream(stream)) + .then(async stream => { + // In case screen sharing audio is also shared we mix it with new input stream. The old _mixerEffect + // will be cleaned up when the existing track is replaced. + if (this._mixerEffect) { + this._mixerEffect = new AudioMixerEffect(this._desktopAudioStream); + + await stream.setEffect(this._mixerEffect); + } + + return this.useAudioStream(stream); + }) .then(() => { logger.log(`switched local audio device: ${this.localAudio?.getDeviceId()}`); diff --git a/package-lock.json b/package-lock.json index eb687e04d2..b5105ac4f6 100644 --- a/package-lock.json +++ b/package-lock.json @@ -10653,8 +10653,8 @@ "integrity": "sha1-mGbfOVECEw449/mWvOtlRDIJwls=" }, "js-utils": { - "version": "github:jitsi/js-utils#7a2be83d17dc4a3d0fac4a742ab999478f326f2e", - "from": "github:jitsi/js-utils#7a2be83d17dc4a3d0fac4a742ab999478f326f2e", + "version": "github:jitsi/js-utils#91c5e53ca5fa42907c88d56bc78254e6e56e058d", + "from": "github:jitsi/js-utils#91c5e53ca5fa42907c88d56bc78254e6e56e058d", "requires": { "bowser": "2.7.0", "js-md5": "0.7.3", @@ -10883,8 +10883,8 @@ } }, "lib-jitsi-meet": { - "version": "github:jitsi/lib-jitsi-meet#5466c9d08a2c262ebb5889e3bb0cbbe6f08dc0c3", - "from": "github:jitsi/lib-jitsi-meet#5466c9d08a2c262ebb5889e3bb0cbbe6f08dc0c3", + "version": "github:jitsi/lib-jitsi-meet#a7950f8ebb489225c2e8bf41fe65f330b3de0874", + "from": "github:jitsi/lib-jitsi-meet#a7950f8ebb489225c2e8bf41fe65f330b3de0874", "requires": { "@jitsi/sdp-interop": "0.1.14", "@jitsi/sdp-simulcast": "0.2.2", diff --git a/package.json b/package.json index 2773267605..291b82bb90 100644 --- a/package.json +++ b/package.json @@ -56,7 +56,7 @@ "js-utils": "github:jitsi/js-utils#91c5e53ca5fa42907c88d56bc78254e6e56e058d", "jsrsasign": "8.0.12", "jwt-decode": "2.2.0", - "lib-jitsi-meet": "github:jitsi/lib-jitsi-meet#5466c9d08a2c262ebb5889e3bb0cbbe6f08dc0c3", + "lib-jitsi-meet": "github:jitsi/lib-jitsi-meet#a7950f8ebb489225c2e8bf41fe65f330b3de0874", "libflacjs": "github:mmig/libflac.js#93d37e7f811f01cf7d8b6a603e38bd3c3810907d", "lodash": "4.17.13", "moment": "2.19.4", diff --git a/react/features/stream-effects/audio-mixer/AudioMixerEffect.js b/react/features/stream-effects/audio-mixer/AudioMixerEffect.js new file mode 100644 index 0000000000..5e82d46010 --- /dev/null +++ b/react/features/stream-effects/audio-mixer/AudioMixerEffect.js @@ -0,0 +1,70 @@ +// @flow + +import JitsiMeetJS from '../../base/lib-jitsi-meet'; +import { MEDIA_TYPE } from '../../base/media'; + +/** + * Class Implementing the effect interface expected by a JitsiLocalTrack. + * The AudioMixerEffect, as the name implies, mixes two JitsiLocalTracks containing a audio track. First track is + * provided at the moment of creation, second is provided through the effect interface. + */ +export class AudioMixerEffect { + /** + * JitsiLocalTrack that is going to be mixed into the track that uses this effect. + */ + _mixAudio: Object; + + /** + * lib-jitsi-meet AudioMixer. + */ + _audioMixer: Object; + + /** + * Creates AudioMixerEffect. + * + * @param {JitsiLocalTrack} mixAudio - JitsiLocalTrack which will be mixed with the original track. + */ + constructor(mixAudio: Object) { + if (mixAudio.getType() !== MEDIA_TYPE.AUDIO) { + throw new Error('AudioMixerEffect only supports audio JitsiLocalTracks; effect will not work!'); + } + + this._mixAudio = mixAudio; + } + + /** + * Checks if the JitsiLocalTrack supports this effect. + * + * @param {JitsiLocalTrack} sourceLocalTrack - Track to which the effect will be applied. + * @returns {boolean} - Returns true if this effect can run on the specified track, false otherwise. + */ + isEnabled(sourceLocalTrack: Object) { + // Both JitsiLocalTracks need to be audio i.e. contain an audio MediaStreamTrack + return sourceLocalTrack.isAudioTrack() && this._mixAudio.isAudioTrack(); + } + + /** + * Effect interface called by source JitsiLocalTrack, At this point a WebAudio ChannelMergerNode is created + * and and the two associated MediaStreams are connected to it; the resulting mixed MediaStream is returned. + * + * @param {MediaStream} audioStream - Audio stream which will be mixed with _mixAudio. + * @returns {MediaStream} - MediaStream containing both audio tracks mixed together. + */ + startEffect(audioStream: MediaStream) { + this._audioMixer = JitsiMeetJS.createAudioMixer(); + this._audioMixer.addMediaStream(this._mixAudio.getOriginalStream()); + this._audioMixer.addMediaStream(audioStream); + + return this._audioMixer.start(); + } + + /** + * Reset the AudioMixer stopping it in the process. + * + * @returns {void} + */ + stopEffect() { + this._audioMixer.reset(); + } + +}