init
Some checks failed
Close stale issues and PRs / stale (push) Has been cancelled

This commit is contained in:
2025-09-02 14:49:16 +08:00
commit 38ba663466
2885 changed files with 391107 additions and 0 deletions

View File

@@ -0,0 +1,118 @@
/**
* The type of Redux action which sets the noSrcDataNotificationUid state representing the UID of the previous
* no data from source notification. Used to check if such a notification was previously displayed.
*
* {
* type: SET_NO_SRC_DATA_NOTIFICATION_UID,
* uid: ?number
* }
*/
export const SET_NO_SRC_DATA_NOTIFICATION_UID = 'SET_NO_SRC_DATA_NOTIFICATION_UID';
/**
* The type of redux action dispatched when a track has been (locally or
* remotely) added to the conference.
*
* {
* type: TRACK_ADDED,
* track: Track
* }
*/
export const TRACK_ADDED = 'TRACK_ADDED';
/**
* The type of redux action dispatched when a canceled {@code getUserMedia}
* process completes either successfully or with an error (the error is ignored
* and the track is immediately disposed if it has been created).
*
* {
* type: TRACK_CREATE_CANCELED,
* trackType: MEDIA_TYPE
* }
*/
export const TRACK_CREATE_CANCELED = 'TRACK_CREATE_CANCELED';
/**
* The type of redux action dispatched when {@code getUserMedia} fails with an
* error (such as permission denied).
*
* {
* type: TRACK_CREATE_ERROR,
* permissionDenied: Boolean,
* trackType: MEDIA_TYPE
* }
*/
export const TRACK_CREATE_ERROR = 'TRACK_CREATE_ERROR';
/**
* The type of redux action dispatched when the track mute/unmute operation fails at the conference level. This could
* happen because of {@code getUserMedia} errors during unmute or replace track errors at the peerconnection level.
*
* {
* type: TRACK_MUTE_UNMUTE_FAILED,
* track: Track,
* wasMuting: Boolean
* }
*/
export const TRACK_MUTE_UNMUTE_FAILED = 'TRACK_MUTE_UNMUTE_FAILED';
/**
* The type of redux action dispatched when a track has triggered no data from source event.
*
* {
* type: TRACK_NO_DATA_FROM_SOURCE,
* track: Track
* }
*/
export const TRACK_NO_DATA_FROM_SOURCE = 'TRACK_NO_DATA_FROM_SOURCE';
/**
* The type of redux action dispatched when a track has been (locally or
* remotely) removed from the conference.
*
* {
* type: TRACK_REMOVED,
* track: Track
* }
*/
export const TRACK_REMOVED = 'TRACK_REMOVED';
/**
* The type of redux action dispatched when a track has stopped.
*
* {
* type: TRACK_STOPPED,
* track: Track
* }
*/
export const TRACK_STOPPED = 'TRACK_STOPPED';
/**
* The type of redux action dispatched when a track's properties were updated.
*
* {
* type: TRACK_UPDATED,
* track: Track
* }
*/
export const TRACK_UPDATED = 'TRACK_UPDATED';
/**
* The type of redux action dispatched when a local track starts being created
* via a WebRTC {@code getUserMedia} call. The action's payload includes an
* extra {@code gumProcess} property which is a {@code Promise} with an extra
* {@code cancel} method which can be used to cancel the process. Canceling will
* result in disposing any {@code JitsiLocalTrack} returned by the
* {@code getUserMedia} callback. There will be a {@code TRACK_CREATE_CANCELED}
* action instead of a {@code TRACK_ADDED} or {@code TRACK_CREATE_ERROR} action.
*
* {
* type: TRACK_WILL_CREATE
* track: {
* gumProcess: Promise with a `cancel` method to cancel the process,
* local: true,
* mediaType: MEDIA_TYPE
* }
* }
*/
export const TRACK_WILL_CREATE = 'TRACK_WILL_CREATE';

View File

@@ -0,0 +1,827 @@
import { createTrackMutedEvent } from '../../analytics/AnalyticsEvents';
import { sendAnalytics } from '../../analytics/functions';
import { IStore } from '../../app/types';
import { showErrorNotification, showNotification } from '../../notifications/actions';
import { NOTIFICATION_TIMEOUT, NOTIFICATION_TIMEOUT_TYPE } from '../../notifications/constants';
import { getCurrentConference } from '../conference/functions';
import { IJitsiConference } from '../conference/reducer';
import { JitsiTrackErrors, JitsiTrackEvents } from '../lib-jitsi-meet';
import { setAudioMuted, setScreenshareMuted, setVideoMuted } from '../media/actions';
import {
CAMERA_FACING_MODE,
MEDIA_TYPE,
MediaType,
VIDEO_MUTISM_AUTHORITY,
VIDEO_TYPE,
VideoType
} from '../media/constants';
import { getLocalParticipant } from '../participants/functions';
import {
SET_NO_SRC_DATA_NOTIFICATION_UID,
TRACK_ADDED,
TRACK_CREATE_CANCELED,
TRACK_CREATE_ERROR,
TRACK_MUTE_UNMUTE_FAILED,
TRACK_NO_DATA_FROM_SOURCE,
TRACK_REMOVED,
TRACK_STOPPED,
TRACK_UPDATED,
TRACK_WILL_CREATE
} from './actionTypes';
import {
createLocalTracksF,
getCameraFacingMode,
getLocalTrack,
getLocalTracks,
getLocalVideoTrack,
getTrackByJitsiTrack
} from './functions';
import logger from './logger';
import { ITrack, ITrackOptions } from './types';
/**
* Add a given local track to the conference.
*
* @param {JitsiLocalTrack} newTrack - The local track to be added to the conference.
* @returns {Function}
*/
export function addLocalTrack(newTrack: any) {
return async (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
const conference = getCurrentConference(getState());
if (conference) {
await conference.addTrack(newTrack);
}
const setMuted = newTrack.isVideoTrack()
? newTrack.getVideoType() === VIDEO_TYPE.DESKTOP
? setScreenshareMuted
: setVideoMuted
: setAudioMuted;
const isMuted = newTrack.isMuted();
logger.log(`Adding ${newTrack.getType()} track - ${isMuted ? 'muted' : 'unmuted'}`);
dispatch(setMuted(isMuted));
return dispatch(_addTracks([ newTrack ]));
};
}
/**
* Requests the creating of the desired media type tracks. Desire is expressed
* by base/media unless the function caller specifies desired media types
* explicitly and thus override base/media. Dispatches a
* {@code createLocalTracksA} action for the desired media types for which there
* are no existing tracks yet.
*
* @returns {Function}
*/
export function createDesiredLocalTracks(...desiredTypes: any) {
return (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
const state = getState();
dispatch(destroyLocalDesktopTrackIfExists());
if (desiredTypes.length === 0) {
const { startSilent } = state['features/base/config'];
const { video } = state['features/base/media'];
if (!startSilent) {
// Always create the audio track early, even if it will be muted.
// This fixes a timing issue when adding the track to the conference which
// manifests primarily on iOS 15.
// Unless we are silent, of course.
desiredTypes.push(MEDIA_TYPE.AUDIO);
}
// XXX When the app is coming into the foreground from the
// background in order to handle a URL, it may realize the new
// background state soon after it has tried to create the local
// tracks requested by the URL. Ignore
// VIDEO_MUTISM_AUTHORITY.BACKGROUND and create the local video
// track if no other VIDEO_MUTISM_AUTHORITY has muted it. The local
// video track will be muted until the app realizes the new
// background state.
// eslint-disable-next-line no-bitwise
(video.muted & ~VIDEO_MUTISM_AUTHORITY.BACKGROUND)
|| desiredTypes.push(MEDIA_TYPE.VIDEO);
}
const availableTypes
= getLocalTracks(
state['features/base/tracks'],
/* includePending */ true)
.map(t => t.mediaType);
// We need to create the desired tracks which are not already available.
const createTypes
= desiredTypes.filter((type: MediaType) => availableTypes.indexOf(type) === -1);
createTypes.length
&& dispatch(createLocalTracksA({ devices: createTypes }));
};
}
/**
* Request to start capturing local audio and/or video. By default, the user
* facing camera will be selected.
*
* @param {Object} [options] - For info @see JitsiMeetJS.createLocalTracks.
* @returns {Function}
*/
export function createLocalTracksA(options: ITrackOptions = {}) {
return (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
const devices
= options.devices || [ MEDIA_TYPE.AUDIO, MEDIA_TYPE.VIDEO ];
const store = {
dispatch,
getState
};
const promises = [];
const state = getState();
// The following executes on React Native only at the time of this
// writing. The effort to port Web's createInitialLocalTracks
// is significant and that's where the function createLocalTracksF got
// born. I started with the idea a porting so that we could inherit the
// ability to getUserMedia for audio only or video only if getUserMedia
// for audio and video fails. Eventually though, I realized that on
// mobile we do not have combined permission prompts implemented anyway
// (either because there are no such prompts or it does not make sense
// to implement them) and the right thing to do is to ask for each
// device separately.
for (const device of devices) {
if (getLocalTrack(
state['features/base/tracks'],
device as MediaType,
/* includePending */ true)) {
throw new Error(`Local track for ${device} already exists`);
}
const gumProcess: any
= createLocalTracksF(
{
cameraDeviceId: options.cameraDeviceId,
devices: [ device ],
facingMode:
options.facingMode || getCameraFacingMode(state),
micDeviceId: options.micDeviceId
},
store)
.then( // @ts-ignore
(localTracks: any[]) => {
// Because GUM is called for 1 device (which is actually
// a media type 'audio', 'video', 'screen', etc.) we
// should not get more than one JitsiTrack.
if (localTracks.length !== 1) {
throw new Error(
`Expected exactly 1 track, but was given ${
localTracks.length} tracks for device: ${
device}.`);
}
if (gumProcess.canceled) {
return _disposeTracks(localTracks)
.then(() =>
dispatch(_trackCreateCanceled(device as MediaType)));
}
return dispatch(trackAdded(localTracks[0]));
},
(reason: Error) =>
dispatch(
gumProcess.canceled
? _trackCreateCanceled(device as MediaType)
: _onCreateLocalTracksRejected(
reason,
device)));
promises.push(gumProcess.catch(() => undefined));
/**
* Cancels the {@code getUserMedia} process represented by this
* {@code Promise}.
*
* @returns {Promise} This {@code Promise} i.e. {@code gumProcess}.
*/
gumProcess.cancel = () => {
gumProcess.canceled = true;
return gumProcess;
};
dispatch({
type: TRACK_WILL_CREATE,
track: {
gumProcess,
local: true,
mediaType: device
}
});
}
return Promise.all(promises);
};
}
/**
* Calls JitsiLocalTrack#dispose() on the given track or on all local tracks (if none are passed) ignoring errors if
* track is already disposed. After that signals tracks to be removed.
*
* @param {JitsiLocalTrack|null} [track] - The local track that needs to be destroyed.
* @returns {Function}
*/
export function destroyLocalTracks(track: any = null) {
if (track) {
return (dispatch: IStore['dispatch']) => dispatch(_disposeAndRemoveTracks([ track ]));
}
return (dispatch: IStore['dispatch'], getState: IStore['getState']) =>
// First wait until any getUserMedia in progress is settled and then get
// rid of all local tracks.
_cancelGUMProcesses(getState)
.then(() =>
dispatch(
_disposeAndRemoveTracks(
getState()['features/base/tracks']
.filter(t => t.local)
.map(t => t.jitsiTrack))));
}
/**
* Signals that the passed JitsiLocalTrack has triggered a no data from source event.
*
* @param {JitsiLocalTrack} track - The track.
* @returns {{
* type: TRACK_NO_DATA_FROM_SOURCE,
* track: Track
* }}
*/
export function noDataFromSource(track: any) {
return {
type: TRACK_NO_DATA_FROM_SOURCE,
track
};
}
/**
* Displays a no data from source video error if needed.
*
* @param {JitsiLocalTrack} jitsiTrack - The track.
* @returns {Function}
*/
export function showNoDataFromSourceVideoError(jitsiTrack: any) {
return (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
let notificationInfo;
const track = getTrackByJitsiTrack(getState()['features/base/tracks'], jitsiTrack);
if (!track) {
return;
}
if (track.isReceivingData) {
notificationInfo = undefined;
} else {
const notificationAction = dispatch(showErrorNotification({
descriptionKey: 'dialog.cameraNotSendingData',
titleKey: 'dialog.cameraNotSendingDataTitle'
}));
notificationInfo = {
uid: notificationAction?.uid
};
}
dispatch(trackNoDataFromSourceNotificationInfoChanged(jitsiTrack, notificationInfo));
};
}
/**
* Replaces one track with another for one renegotiation instead of invoking
* two renegotiations with a separate removeTrack and addTrack. Disposes the
* removed track as well.
*
* @param {JitsiLocalTrack|null} oldTrack - The track to dispose.
* @param {JitsiLocalTrack|null} newTrack - The track to use instead.
* @param {JitsiConference} [conference] - The conference from which to remove
* and add the tracks. If one is not provided, the conference in the redux store
* will be used.
* @returns {Function}
*/
export function replaceLocalTrack(oldTrack: any, newTrack: any, conference?: IJitsiConference) {
return async (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
conference
// eslint-disable-next-line no-param-reassign
|| (conference = getState()['features/base/conference'].conference);
if (conference) {
await conference.replaceTrack(oldTrack, newTrack);
}
return dispatch(replaceStoredTracks(oldTrack, newTrack));
};
}
/**
* Replaces a stored track with another.
*
* @param {JitsiLocalTrack|null} oldTrack - The track to dispose.
* @param {JitsiLocalTrack|null} newTrack - The track to use instead.
* @returns {Function}
*/
function replaceStoredTracks(oldTrack: any, newTrack: any) {
return async (dispatch: IStore['dispatch']) => {
// We call dispose after doing the replace because dispose will
// try and do a new o/a after the track removes itself. Doing it
// after means the JitsiLocalTrack.conference is already
// cleared, so it won't try and do the o/a.
if (oldTrack) {
await dispatch(_disposeAndRemoveTracks([ oldTrack ]));
}
if (newTrack) {
// The mute state of the new track should be reflected in the app's mute state. For example, if the
// app is currently muted and changing to a new track that is not muted, the app's mute state
// should be falsey. As such, emit a mute event here to set up the app to reflect the track's mute
// state. If this is not done, the current mute state of the app will be reflected on the track,
// not vice-versa.
const setMuted = newTrack.isVideoTrack()
? newTrack.getVideoType() === VIDEO_TYPE.DESKTOP
? setScreenshareMuted
: setVideoMuted
: setAudioMuted;
const isMuted = newTrack.isMuted();
sendAnalytics(createTrackMutedEvent(newTrack.getType(), 'track.replaced', isMuted));
logger.log(`Replace ${newTrack.getType()} track - ${isMuted ? 'muted' : 'unmuted'}`);
dispatch(setMuted(isMuted));
await dispatch(_addTracks([ newTrack ]));
}
};
}
/**
* Create an action for when a new track has been signaled to be added to the
* conference.
*
* @param {(JitsiLocalTrack|JitsiRemoteTrack)} track - JitsiTrack instance.
* @returns {Function}
*/
export function trackAdded(track: any) {
return (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
track.on(
JitsiTrackEvents.TRACK_MUTE_CHANGED,
() => dispatch(trackMutedChanged(track)));
track.on(
JitsiTrackEvents.TRACK_VIDEOTYPE_CHANGED,
(type: VideoType) => dispatch(trackVideoTypeChanged(track, type)));
const local = track.isLocal();
const mediaType = track.getVideoType() === VIDEO_TYPE.DESKTOP
? MEDIA_TYPE.SCREENSHARE
: track.getType();
let isReceivingData, noDataFromSourceNotificationInfo, participantId;
if (local) {
// Reset the no data from src notification state when we change the track, as it's context is set
// on a per device basis.
dispatch(setNoSrcDataNotificationUid());
const participant = getLocalParticipant(getState);
if (participant) {
participantId = participant.id;
}
isReceivingData = track.isReceivingData();
track.on(JitsiTrackEvents.NO_DATA_FROM_SOURCE, () => dispatch(noDataFromSource({ jitsiTrack: track })));
if (!isReceivingData) {
if (mediaType === MEDIA_TYPE.AUDIO) {
const notificationAction = dispatch(showNotification({
descriptionKey: 'dialog.micNotSendingData',
titleKey: 'dialog.micNotSendingDataTitle'
}, NOTIFICATION_TIMEOUT_TYPE.LONG));
// Set the notification ID so that other parts of the application know that this was
// displayed in the context of the current device.
// I.E. The no-audio-signal notification shouldn't be displayed if this was already shown.
dispatch(setNoSrcDataNotificationUid(notificationAction?.uid));
noDataFromSourceNotificationInfo = { uid: notificationAction?.uid };
} else {
const timeout = setTimeout(() => dispatch(
showNoDataFromSourceVideoError(track)),
NOTIFICATION_TIMEOUT.MEDIUM);
noDataFromSourceNotificationInfo = { timeout };
}
}
track.on(JitsiTrackEvents.LOCAL_TRACK_STOPPED,
() => {
logger.debug(`Local track stopped: ${track}, removing it from the conference`);
dispatch({
type: TRACK_STOPPED,
track: {
jitsiTrack: track
} });
});
} else {
participantId = track.getParticipantId();
isReceivingData = true;
}
return dispatch({
type: TRACK_ADDED,
track: {
jitsiTrack: track,
isReceivingData,
local,
mediaType,
mirror: _shouldMirror(track),
muted: track.isMuted(),
noDataFromSourceNotificationInfo,
participantId,
videoStarted: false,
videoType: track.videoType
}
});
};
}
/**
* Create an action for when a track's codec has been signaled to have been changed.
*
* @param {JitsiLocalTrack} track - JitsiLocalTrack instance.
* @param {string} codec - The video codec.
* @returns {{
* type: TRACK_UPDATED,
* track: Track
* }}
*/
export function trackCodecChanged(track: ITrack, codec: string): {
track: {
codec: string;
jitsiTrack: any;
};
type: 'TRACK_UPDATED';
} {
return {
type: TRACK_UPDATED,
track: {
codec,
jitsiTrack: track
}
};
}
/**
* Create an action for when a track's muted state has been signaled to be
* changed.
*
* @param {(JitsiLocalTrack|JitsiRemoteTrack)} track - JitsiTrack instance.
* @returns {{
* type: TRACK_UPDATED,
* track: Track
* }}
*/
export function trackMutedChanged(track: any): {
track: {
jitsiTrack: any;
muted: boolean;
};
type: 'TRACK_UPDATED';
} {
return {
type: TRACK_UPDATED,
track: {
jitsiTrack: track,
muted: track.isMuted()
}
};
}
/**
* Create an action for when a track's muted state change action has failed. This could happen because of
* {@code getUserMedia} errors during unmute or replace track errors at the peerconnection level.
*
* @param {(JitsiLocalTrack|JitsiRemoteTrack)} track - JitsiTrack instance.
* @param {boolean} wasMuting - If the operation that failed was a mute operation or an unmute operation.
* @returns {{
* type: TRACK_MUTE_UNMUTE_FAILED,
* track: Track
* }}
*/
export function trackMuteUnmuteFailed(track: any, wasMuting: boolean): {
track: any;
type: 'TRACK_MUTE_UNMUTE_FAILED';
wasMuting: boolean;
} {
return {
type: TRACK_MUTE_UNMUTE_FAILED,
track,
wasMuting
};
}
/**
* Create an action for when a track's no data from source notification information changes.
*
* @param {JitsiLocalTrack} track - JitsiTrack instance.
* @param {Object} noDataFromSourceNotificationInfo - Information about no data from source notification.
* @returns {{
* type: TRACK_UPDATED,
* track: Track
* }}
*/
export function trackNoDataFromSourceNotificationInfoChanged(track: any, noDataFromSourceNotificationInfo?: Object) {
return {
type: TRACK_UPDATED,
track: {
jitsiTrack: track,
noDataFromSourceNotificationInfo
}
};
}
/**
* Create an action for when a track has been signaled for removal from the
* conference.
*
* @param {(JitsiLocalTrack|JitsiRemoteTrack)} track - JitsiTrack instance.
* @returns {{
* type: TRACK_REMOVED,
* track: Track
* }}
*/
export function trackRemoved(track: any): {
track: {
jitsiTrack: any;
};
type: 'TRACK_REMOVED';
} {
track.removeAllListeners(JitsiTrackEvents.TRACK_MUTE_CHANGED);
track.removeAllListeners(JitsiTrackEvents.TRACK_VIDEOTYPE_CHANGED);
track.removeAllListeners(JitsiTrackEvents.NO_DATA_FROM_SOURCE);
return {
type: TRACK_REMOVED,
track: {
jitsiTrack: track
}
};
}
/**
* Signal that track's video started to play.
*
* @param {(JitsiLocalTrack|JitsiRemoteTrack)} track - JitsiTrack instance.
* @returns {{
* type: TRACK_UPDATED,
* track: Track
* }}
*/
export function trackVideoStarted(track: any): {
track: {
jitsiTrack: any;
videoStarted: true;
};
type: 'TRACK_UPDATED';
} {
return {
type: TRACK_UPDATED,
track: {
jitsiTrack: track,
videoStarted: true
}
};
}
/**
* Create an action for when participant video type changes.
*
* @param {(JitsiLocalTrack|JitsiRemoteTrack)} track - JitsiTrack instance.
* @param {VIDEO_TYPE|undefined} videoType - Video type.
* @returns {{
* type: TRACK_UPDATED,
* track: Track
* }}
*/
export function trackVideoTypeChanged(track: any, videoType: VideoType) {
const mediaType = videoType === VIDEO_TYPE.CAMERA ? MEDIA_TYPE.VIDEO : MEDIA_TYPE.SCREENSHARE;
return {
type: TRACK_UPDATED,
track: {
jitsiTrack: track,
videoType,
mediaType
}
};
}
/**
* Create an action for when track streaming status changes.
*
* @param {(JitsiRemoteTrack)} track - JitsiTrack instance.
* @param {string} streamingStatus - The new streaming status of the track.
* @returns {{
* type: TRACK_UPDATED,
* track: Track
* }}
*/
export function trackStreamingStatusChanged(track: any, streamingStatus: string): {
track: {
jitsiTrack: any;
streamingStatus: string;
};
type: 'TRACK_UPDATED';
} {
return {
type: TRACK_UPDATED,
track: {
jitsiTrack: track,
streamingStatus
}
};
}
/**
* Signals passed tracks to be added.
*
* @param {(JitsiLocalTrack|JitsiRemoteTrack)[]} tracks - List of tracks.
* @private
* @returns {Function}
*/
function _addTracks(tracks: any[]) {
return (dispatch: IStore['dispatch']) => Promise.all(tracks.map(t => dispatch(trackAdded(t))));
}
/**
* Cancels and waits for any {@code getUserMedia} process/currently in progress
* to complete/settle.
*
* @param {Function} getState - The redux store {@code getState} function used
* to obtain the state.
* @private
* @returns {Promise} - A {@code Promise} resolved once all
* {@code gumProcess.cancel()} {@code Promise}s are settled because all we care
* about here is to be sure that the {@code getUserMedia} callbacks have
* completed (i.e. Returned from the native side).
*/
function _cancelGUMProcesses(getState: IStore['getState']): Promise<any> {
const logError
= (error: Error) =>
logger.error('gumProcess.cancel failed', JSON.stringify(error));
return Promise.all(
getState()['features/base/tracks']
.filter(t => t.local)
.map(({ gumProcess }: any) =>
gumProcess?.cancel().catch(logError)));
}
/**
* Disposes passed tracks and signals them to be removed.
*
* @param {(JitsiLocalTrack|JitsiRemoteTrack)[]} tracks - List of tracks.
* @protected
* @returns {Function}
*/
export function _disposeAndRemoveTracks(tracks: any[]) {
return (dispatch: IStore['dispatch']) =>
_disposeTracks(tracks)
.then(() =>
Promise.all(tracks.map(t => dispatch(trackRemoved(t)))));
}
/**
* Disposes passed tracks.
*
* @param {(JitsiLocalTrack|JitsiRemoteTrack)[]} tracks - List of tracks.
* @private
* @returns {Promise} - A Promise resolved once {@link JitsiTrack.dispose()} is
* done for every track from the list.
*/
function _disposeTracks(tracks: any[]): Promise<any> {
return Promise.all(
tracks.map(t =>
t.dispose()
.catch((err: Error) => {
// Track might be already disposed so ignore such an error.
// Of course, re-throw any other error(s).
if (err.name !== JitsiTrackErrors.TRACK_IS_DISPOSED) {
throw err;
}
})));
}
/**
* Implements the {@code Promise} rejection handler of
* {@code createLocalTracksA} and {@code createLocalTracksF}.
*
* @param {Object} error - The {@code Promise} rejection reason.
* @param {string} device - The device/{@code MEDIA_TYPE} associated with the
* rejection.
* @private
* @returns {Function}
*/
function _onCreateLocalTracksRejected(error?: Error, device?: string) {
return (dispatch: IStore['dispatch']) => {
// If permissions are not allowed, alert the user.
dispatch({
type: TRACK_CREATE_ERROR,
permissionDenied: error?.name === 'SecurityError',
trackType: device
});
};
}
/**
* Returns true if the provided {@code JitsiTrack} should be rendered as a
* mirror.
*
* We only want to show a video in mirrored mode when:
* 1) The video source is local, and not remote.
* 2) The video source is a camera, not a desktop (capture).
* 3) The camera is capturing the user, not the environment.
*
* TODO Similar functionality is part of lib-jitsi-meet. This function should be
* removed after https://github.com/jitsi/lib-jitsi-meet/pull/187 is merged.
*
* @param {(JitsiLocalTrack|JitsiRemoteTrack)} track - JitsiTrack instance.
* @private
* @returns {boolean}
*/
function _shouldMirror(track: any): boolean {
return (
track?.isLocal()
&& track?.isVideoTrack()
// XXX The type of the return value of JitsiLocalTrack's
// getCameraFacingMode happens to be named CAMERA_FACING_MODE as
// well, it's defined by lib-jitsi-meet. Note though that the type
// of the value on the right side of the equality check is defined
// by jitsi-meet. The type definitions are surely compatible today
// but that may not be the case tomorrow.
&& track?.getCameraFacingMode() === CAMERA_FACING_MODE.USER);
}
/**
* Signals that track create operation for given media track has been canceled.
* Will clean up local track stub from the redux state which holds the
* {@code gumProcess} reference.
*
* @param {MEDIA_TYPE} mediaType - The type of the media for which the track was
* being created.
* @private
* @returns {{
* type,
* trackType: MEDIA_TYPE
* }}
*/
function _trackCreateCanceled(mediaType: MediaType): {
trackType: MediaType;
type: 'TRACK_CREATE_CANCELED';
} {
return {
type: TRACK_CREATE_CANCELED,
trackType: mediaType
};
}
/**
* If the local track if of type Desktop, it calls _disposeAndRemoveTracks) on it.
*
* @returns {Function}
*/
export function destroyLocalDesktopTrackIfExists() {
return (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
const videoTrack = getLocalVideoTrack(getState()['features/base/tracks']);
const isDesktopTrack = videoTrack && videoTrack.videoType === VIDEO_TYPE.DESKTOP;
if (isDesktopTrack) {
dispatch(_disposeAndRemoveTracks([ videoTrack.jitsiTrack ]));
}
};
}
/**
* Sets UID of the displayed no data from source notification. Used to track
* if the notification was previously displayed in this context.
*
* @param {number} uid - Notification UID.
* @returns {{
* type: SET_NO_AUDIO_SIGNAL_UID,
* uid: string
* }}
*/
export function setNoSrcDataNotificationUid(uid?: string) {
return {
type: SET_NO_SRC_DATA_NOTIFICATION_UID,
uid
};
}

View File

@@ -0,0 +1,68 @@
import { IReduxState, IStore } from '../../app/types';
import { showNotification } from '../../notifications/actions';
import { NOTIFICATION_TIMEOUT_TYPE } from '../../notifications/constants';
import JitsiMeetJS from '../lib-jitsi-meet';
import { setScreenshareMuted } from '../media/actions';
import { addLocalTrack, replaceLocalTrack } from './actions.any';
import { getLocalDesktopTrack, getTrackState } from './functions.native';
export * from './actions.any';
/**
* Signals that the local participant is ending screensharing or beginning the screensharing flow.
*
* @param {boolean} enabled - The state to toggle screen sharing to.
* @param {boolean} _ignore1 - Ignored.
* @param {any} _ignore2 - Ignored.
* @returns {Function}
*/
export function toggleScreensharing(enabled: boolean, _ignore1?: boolean, _ignore2?: any) {
return (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
const state = getState();
if (enabled) {
_startScreenSharing(dispatch, state);
} else {
dispatch(setScreenshareMuted(true));
}
};
}
/**
* Creates desktop track and replaces the local one.
*
* @private
* @param {Dispatch} dispatch - The redux {@code dispatch} function.
* @param {Object} state - The redux state.
* @returns {void}
*/
async function _startScreenSharing(dispatch: IStore['dispatch'], state: IReduxState) {
try {
const tracks: any[] = await JitsiMeetJS.createLocalTracks({ devices: [ 'desktop' ] });
const track = tracks[0];
const currentLocalDesktopTrack = getLocalDesktopTrack(getTrackState(state));
const currentJitsiTrack = currentLocalDesktopTrack?.jitsiTrack;
// The first time the user shares the screen we add the track and create the transceiver.
// Afterwards, we just replace the old track, so the transceiver will be reused.
if (currentJitsiTrack) {
dispatch(replaceLocalTrack(currentJitsiTrack, track));
} else {
dispatch(addLocalTrack(track));
}
const { enabled: audioOnly } = state['features/base/audio-only'];
if (audioOnly) {
dispatch(showNotification({
titleKey: 'notify.screenSharingAudioOnlyTitle',
descriptionKey: 'notify.screenSharingAudioOnlyDescription',
maxLines: 3
}, NOTIFICATION_TIMEOUT_TYPE.LONG));
}
} catch (error: any) {
console.log('ERROR creating screen-sharing stream ', error);
}
}

View File

@@ -0,0 +1,543 @@
// @ts-expect-error
import { AUDIO_ONLY_SCREEN_SHARE_NO_TRACK } from '../../../../modules/UI/UIErrors';
import { IReduxState, IStore } from '../../app/types';
import { showModeratedNotification } from '../../av-moderation/actions';
import { MEDIA_TYPE as AVM_MEDIA_TYPE } from '../../av-moderation/constants';
import { shouldShowModeratedNotification } from '../../av-moderation/functions';
import { setNoiseSuppressionEnabled } from '../../noise-suppression/actions';
import { showErrorNotification, showNotification } from '../../notifications/actions';
import { NOTIFICATION_TIMEOUT_TYPE } from '../../notifications/constants';
import { stopReceiver } from '../../remote-control/actions';
import { setScreenAudioShareState, setScreenshareAudioTrack } from '../../screen-share/actions';
import { isAudioOnlySharing, isScreenVideoShared } from '../../screen-share/functions';
import { toggleScreenshotCaptureSummary } from '../../screenshot-capture/actions';
import { isScreenshotCaptureEnabled } from '../../screenshot-capture/functions';
import { AudioMixerEffect } from '../../stream-effects/audio-mixer/AudioMixerEffect';
import { getCurrentConference } from '../conference/functions';
import { notifyCameraError, notifyMicError } from '../devices/actions.web';
import { openDialog } from '../dialog/actions';
import { JitsiTrackErrors, JitsiTrackEvents, browser } from '../lib-jitsi-meet';
import { createLocalTrack } from '../lib-jitsi-meet/functions.any';
import { gumPending, setScreenshareMuted } from '../media/actions';
import {
CAMERA_FACING_MODE,
MEDIA_TYPE,
MediaType,
VIDEO_TYPE,
} from '../media/constants';
import { IGUMPendingState } from '../media/types';
import { updateSettings } from '../settings/actions';
import { addLocalTrack, replaceLocalTrack } from './actions.any';
import AllowToggleCameraDialog from './components/web/AllowToggleCameraDialog';
import {
createLocalTracksF,
getLocalDesktopTrack,
getLocalJitsiAudioTrack,
getLocalVideoTrack,
isToggleCameraEnabled
} from './functions';
import logger from './logger';
import { ICreateInitialTracksOptions, IInitialTracksErrors, IShareOptions, IToggleScreenSharingOptions } from './types';
export * from './actions.any';
/**
* Signals that the local participant is ending screensharing or beginning the screensharing flow.
*
* @param {boolean} enabled - The state to toggle screen sharing to.
* @param {boolean} audioOnly - Only share system audio.
* @param {Object} shareOptions - The options to be passed for capturing screenshare.
* @returns {Function}
*/
export function toggleScreensharing(
enabled?: boolean,
audioOnly = false,
shareOptions: IShareOptions = {}) {
return (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
// check for A/V Moderation when trying to start screen sharing
if ((enabled || enabled === undefined) && shouldShowModeratedNotification(AVM_MEDIA_TYPE.DESKTOP, getState())) {
dispatch(showModeratedNotification(AVM_MEDIA_TYPE.DESKTOP));
return Promise.resolve();
}
return _toggleScreenSharing({
enabled,
audioOnly,
shareOptions
}, {
dispatch,
getState
});
};
}
/**
* Displays a UI notification for screensharing failure based on the error passed.
*
* @private
* @param {Object} error - The error.
* @param {Object} store - The redux store.
* @returns {void}
*/
/**
* Applies the AudioMixer effect on the local audio track if applicable. If there is no local audio track, the desktop
* audio track is added to the conference.
*
* @private
* @param {JitsiLocalTrack} desktopAudioTrack - The audio track to be added to the conference.
* @param {*} state - The redux state.
* @returns {void}
*/
async function _maybeApplyAudioMixerEffect(desktopAudioTrack: any, state: IReduxState): Promise<void> {
const localAudio = getLocalJitsiAudioTrack(state);
const conference = getCurrentConference(state);
if (localAudio) {
// If there is a localAudio stream, mix in the desktop audio stream captured by the screen sharing API.
const mixerEffect = new AudioMixerEffect(desktopAudioTrack);
await localAudio.setEffect(mixerEffect);
} else {
// If no local stream is present ( i.e. no input audio devices) we use the screen share audio
// stream as we would use a regular stream.
await conference?.replaceTrack(null, desktopAudioTrack);
}
}
/**
* Toggles screen sharing.
*
* @private
* @param {boolean} enabled - The state to toggle screen sharing to.
* @param {Store} store - The redux store.
* @returns {void}
*/
async function _toggleScreenSharing(
{
enabled,
audioOnly = false,
shareOptions = {}
}: IToggleScreenSharingOptions,
store: IStore
): Promise<void> {
const { dispatch, getState } = store;
const state = getState();
const audioOnlySharing = isAudioOnlySharing(state);
const screenSharing = isScreenVideoShared(state);
const conference = getCurrentConference(state);
const localAudio = getLocalJitsiAudioTrack(state);
const localScreenshare = getLocalDesktopTrack(state['features/base/tracks']);
// Toggle screenshare or audio-only share if the new state is not passed. Happens in the following two cases.
// 1. ShareAudioDialog passes undefined when the user hits continue in the share audio demo modal.
// 2. Toggle screenshare called from the external API.
const enable = audioOnly
? enabled ?? !audioOnlySharing
: enabled ?? !screenSharing;
const screensharingDetails: { sourceType?: string; } = {};
if (enable) {
let tracks;
// Spot proxy stream.
if (shareOptions.desktopStream) {
tracks = [ shareOptions.desktopStream ];
} else {
const { _desktopSharingSourceDevice } = state['features/base/config'];
if (!shareOptions.desktopSharingSources && _desktopSharingSourceDevice) {
shareOptions.desktopSharingSourceDevice = _desktopSharingSourceDevice;
}
const options = {
devices: [ VIDEO_TYPE.DESKTOP ],
...shareOptions
};
try {
tracks = await createLocalTracksF(options) as any[];
} catch (error) {
dispatch(handleScreenSharingError(error));
return;
}
}
const desktopAudioTrack = tracks.find(track => track.getType() === MEDIA_TYPE.AUDIO);
const desktopVideoTrack = tracks.find(track => track.getType() === MEDIA_TYPE.VIDEO);
if (audioOnly) {
// Dispose the desktop track for audio-only screensharing.
desktopVideoTrack.dispose();
if (!desktopAudioTrack) {
dispatch(handleScreenSharingError(AUDIO_ONLY_SCREEN_SHARE_NO_TRACK));
return;
}
} else if (desktopVideoTrack) {
if (localScreenshare) {
await dispatch(replaceLocalTrack(localScreenshare.jitsiTrack, desktopVideoTrack, conference));
} else {
await dispatch(addLocalTrack(desktopVideoTrack));
}
if (isScreenshotCaptureEnabled(state, false, true)) {
dispatch(toggleScreenshotCaptureSummary(true));
}
screensharingDetails.sourceType = desktopVideoTrack.sourceType;
}
// Apply the AudioMixer effect if there is a local audio track, add the desktop track to the conference
// otherwise without unmuting the microphone.
if (desktopAudioTrack) {
// Noise suppression doesn't work with desktop audio because we can't chain track effects yet, disable it
// first. We need to to wait for the effect to clear first or it might interfere with the audio mixer.
await dispatch(setNoiseSuppressionEnabled(false));
_maybeApplyAudioMixerEffect(desktopAudioTrack, state);
dispatch(setScreenshareAudioTrack(desktopAudioTrack));
// Handle the case where screen share was stopped from the browsers 'screen share in progress' window.
if (audioOnly) {
desktopAudioTrack?.on(
JitsiTrackEvents.LOCAL_TRACK_STOPPED,
() => dispatch(toggleScreensharing(undefined, true)));
}
}
// Show notification about more bandwidth usage in audio-only mode if the user starts screensharing. This
// doesn't apply to audio-only screensharing.
const { enabled: bestPerformanceMode } = state['features/base/audio-only'];
if (bestPerformanceMode && !audioOnly) {
dispatch(showNotification({
titleKey: 'notify.screenSharingAudioOnlyTitle',
descriptionKey: 'notify.screenSharingAudioOnlyDescription'
}, NOTIFICATION_TIMEOUT_TYPE.LONG));
}
} else {
const { desktopAudioTrack } = state['features/screen-share'];
dispatch(stopReceiver());
dispatch(toggleScreenshotCaptureSummary(false));
// Mute the desktop track instead of removing it from the conference since we don't want the client to signal
// a source-remove to the remote peer for the screenshare track. Later when screenshare is enabled again, the
// same sender will be re-used without the need for signaling a new ssrc through source-add.
dispatch(setScreenshareMuted(true));
if (desktopAudioTrack) {
if (localAudio) {
localAudio.setEffect(undefined);
} else {
await conference?.replaceTrack(desktopAudioTrack, null);
}
desktopAudioTrack.dispose();
dispatch(setScreenshareAudioTrack(null));
}
}
if (audioOnly) {
dispatch(setScreenAudioShareState(enable));
} else {
// Notify the external API.
APP.API.notifyScreenSharingStatusChanged(enable, screensharingDetails);
}
}
/**
* Sets the camera facing mode(environment/user). If facing mode not provided, it will do a toggle.
*
* @param {string | undefined} facingMode - The selected facing mode.
* @returns {void}
*/
export function setCameraFacingMode(facingMode: string | undefined) {
return async (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
const state = getState();
if (!isToggleCameraEnabled(state)) {
return;
}
if (!facingMode) {
dispatch(toggleCamera());
return;
}
const tracks = state['features/base/tracks'];
const localVideoTrack = getLocalVideoTrack(tracks)?.jitsiTrack;
if (!tracks || !localVideoTrack) {
return;
}
const currentFacingMode = localVideoTrack.getCameraFacingMode();
if (currentFacingMode !== facingMode) {
dispatch(toggleCamera());
}
};
}
/**
* Signals to open the permission dialog for toggling camera remotely.
*
* @param {Function} onAllow - Callback to be executed if permission to toggle camera was granted.
* @param {string} initiatorId - The participant id of the requester.
* @returns {Object} - The open dialog action.
*/
export function openAllowToggleCameraDialog(onAllow: Function, initiatorId: string) {
return openDialog(AllowToggleCameraDialog, {
onAllow,
initiatorId
});
}
/**
* Sets the GUM pending state for the tracks that have failed.
*
* NOTE: Some of the track that we will be setting to GUM pending state NONE may not have failed but they may have
* been requested. This won't be a problem because their current GUM pending state will be NONE anyway.
*
* @param {JitsiLocalTrack} tracks - The tracks that have been created.
* @param {Function} dispatch - The redux dispatch function.
* @returns {void}
*/
export function setGUMPendingStateOnFailedTracks(tracks: Array<any>, dispatch: IStore['dispatch']) {
const tracksTypes = tracks.map(track => {
if (track.getVideoType() === VIDEO_TYPE.DESKTOP) {
return MEDIA_TYPE.SCREENSHARE;
}
return track.getType();
});
const nonPendingTracks = [ MEDIA_TYPE.AUDIO, MEDIA_TYPE.VIDEO ].filter(type => !tracksTypes.includes(type));
dispatch(gumPending(nonPendingTracks, IGUMPendingState.NONE));
}
/**
* Creates and adds to the conference the initial audio/video tracks.
*
* @param {Array<MediaType>} devices - Array with devices (audio/video) that will be used.
* @returns {Function}
*/
export function createAndAddInitialAVTracks(devices: Array<MediaType>) {
return async (dispatch: IStore['dispatch']) => {
dispatch(gumPending(devices, IGUMPendingState.PENDING_UNMUTE));
const { tracks, errors } = await dispatch(createInitialAVTracks({ devices }));
setGUMPendingStateOnFailedTracks(tracks, dispatch);
dispatch(displayErrorsForCreateInitialLocalTracks(errors));
await Promise.allSettled(tracks.map((track: any) => {
const legacyConferenceObject = APP.conference;
if (track.isAudioTrack()) {
return legacyConferenceObject.useAudioStream(track);
}
if (track.isVideoTrack()) {
return legacyConferenceObject.useVideoStream(track);
}
return Promise.resolve();
}));
dispatch(gumPending(devices, IGUMPendingState.NONE));
};
}
/**
* Creates the initial audio/video tracks.
*
* @param {ICreateInitialTracksOptions} options - Options for creating the audio/video tracks.
* @param {boolean} recordTimeMetrics - If true time metrics will be recorded.
* @returns {Function}
*/
export function createInitialAVTracks(options: ICreateInitialTracksOptions, recordTimeMetrics = false) {
return (dispatch: IStore['dispatch'], _getState: IStore['getState']) => {
const {
devices,
timeout
} = options;
dispatch(gumPending(devices, IGUMPendingState.PENDING_UNMUTE));
return createLocalTracksF(options, undefined, recordTimeMetrics).then(tracks => {
return {
errors: {} as IInitialTracksErrors,
tracks
};
})
.catch(async error => {
const errors = {} as IInitialTracksErrors;
if (error.name === JitsiTrackErrors.TIMEOUT && !browser.isElectron()) {
if (devices.includes(MEDIA_TYPE.AUDIO)) {
errors.audioOnlyError = error;
}
if (devices.includes(MEDIA_TYPE.VIDEO)) {
errors.videoOnlyError = error;
}
if (errors.audioOnlyError && errors.videoOnlyError) {
errors.audioAndVideoError = error;
}
return {
errors,
tracks: []
};
}
// Retry with separate gUM calls.
const gUMPromises = [];
const tracks: any[] | PromiseLike<any[]> = [];
if (devices.includes(MEDIA_TYPE.AUDIO)) {
gUMPromises.push(createLocalTracksF({
devices: [ MEDIA_TYPE.AUDIO ],
timeout
}));
}
if (devices.includes(MEDIA_TYPE.VIDEO)) {
gUMPromises.push(createLocalTracksF({
devices: [ MEDIA_TYPE.VIDEO ],
timeout
}));
}
const results = await Promise.allSettled(gUMPromises);
let errorMsg;
results.forEach((result, idx) => {
if (result.status === 'fulfilled') {
tracks.push(result.value[0]);
} else {
errorMsg = result.reason;
const isAudio = idx === 0;
logger.error(`${isAudio ? 'Audio' : 'Video'} track creation failed with error ${errorMsg}`);
if (isAudio) {
errors.audioOnlyError = errorMsg;
} else {
errors.videoOnlyError = errorMsg;
}
}
});
if (errors.audioOnlyError && errors.videoOnlyError) {
errors.audioAndVideoError = errorMsg;
}
return {
tracks,
errors
};
});
};
}
/**
* Displays error notifications according to the state carried by the passed {@code errors} object.
*
* @param {InitialTracksErrors} errors - The errors (if any).
* @returns {Function}
* @private
*/
export function displayErrorsForCreateInitialLocalTracks(errors: IInitialTracksErrors) {
return (dispatch: IStore['dispatch']) => {
const {
audioOnlyError,
screenSharingError,
videoOnlyError
} = errors;
if (screenSharingError) {
dispatch(handleScreenSharingError(screenSharingError));
}
if (audioOnlyError || videoOnlyError) {
if (audioOnlyError) {
dispatch(notifyMicError(audioOnlyError));
}
if (videoOnlyError) {
dispatch(notifyCameraError(videoOnlyError));
}
}
};
}
/**
* Displays a UI notification for screensharing failure based on the error passed.
*
* @private
* @param {Error | AUDIO_ONLY_SCREEN_SHARE_NO_TRACK} error - The error.
* @returns {Function}
*/
export function handleScreenSharingError(
error: Error | AUDIO_ONLY_SCREEN_SHARE_NO_TRACK) {
return (dispatch: IStore['dispatch']) => {
logger.error('failed to share local desktop', error);
let descriptionKey;
let titleKey;
if (error.name === JitsiTrackErrors.PERMISSION_DENIED) {
descriptionKey = 'dialog.screenSharingPermissionDeniedError';
titleKey = 'dialog.screenSharingFailedTitle';
} else if (error.name === JitsiTrackErrors.CONSTRAINT_FAILED) {
descriptionKey = 'dialog.cameraConstraintFailedError';
titleKey = 'deviceError.cameraError';
} else if (error.name === JitsiTrackErrors.SCREENSHARING_GENERIC_ERROR) {
descriptionKey = 'dialog.screenSharingFailed';
titleKey = 'dialog.screenSharingFailedTitle';
} else if (error === AUDIO_ONLY_SCREEN_SHARE_NO_TRACK) {
descriptionKey = 'notify.screenShareNoAudio';
titleKey = 'notify.screenShareNoAudioTitle';
} else { // safeguard for not showing notification with empty text. This will also include
// error.name === JitsiTrackErrors.SCREENSHARING_USER_CANCELED
return;
}
dispatch(showErrorNotification({
descriptionKey,
titleKey
}));
};
}
/**
* Toggles the facingMode constraint on the video stream.
*
* @returns {Function}
*/
export function toggleCamera() {
return async (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
const state = getState();
const tracks = state['features/base/tracks'];
const localVideoTrack = getLocalVideoTrack(tracks)?.jitsiTrack;
const currentFacingMode = localVideoTrack.getCameraFacingMode();
const { localFlipX } = state['features/base/settings'];
const targetFacingMode = currentFacingMode === CAMERA_FACING_MODE.USER
? CAMERA_FACING_MODE.ENVIRONMENT
: CAMERA_FACING_MODE.USER;
// Update the flipX value so the environment facing camera is not flipped, before the new track is created.
dispatch(updateSettings({ localFlipX: targetFacingMode === CAMERA_FACING_MODE.USER ? localFlipX : false }));
// On mobile only one camera can be open at a time, so first stop the current camera track.
await dispatch(replaceLocalTrack(localVideoTrack, null));
const newVideoTrack = await createLocalTrack('video', null, null, { facingMode: targetFacingMode });
await dispatch(replaceLocalTrack(null, newVideoTrack));
};
}

View File

@@ -0,0 +1,44 @@
import React from 'react';
import { WithTranslation } from 'react-i18next';
import { useSelector } from 'react-redux';
import { IReduxState } from '../../../../app/types';
import { translate } from '../../../i18n/functions';
import { getParticipantDisplayName } from '../../../participants/functions';
import Dialog from '../../../ui/components/web/Dialog';
interface IProps extends WithTranslation {
/**
* The participant id of the toggle camera requester.
*/
initiatorId: string;
/**
* Function to be invoked after permission to toggle camera granted.
*/
onAllow: () => void;
}
/**
* Dialog to allow toggling camera remotely.
*
* @returns {JSX.Element} - The allow toggle camera dialog.
*/
const AllowToggleCameraDialog = ({ onAllow, t, initiatorId }: IProps): JSX.Element => {
const initiatorName = useSelector((state: IReduxState) => getParticipantDisplayName(state, initiatorId));
return (
<Dialog
ok = {{ translationKey: 'dialog.allow' }}
onSubmit = { onAllow }
titleKey = 'dialog.allowToggleCameraTitle'>
<div>
{ t('dialog.allowToggleCameraDialog', { initiatorName }) }
</div>
</Dialog>
);
};
export default translate(AllowToggleCameraDialog);

View File

@@ -0,0 +1,5 @@
/**
* The payload name for remotely setting the camera facing mode message.
*/
export const CAMERA_FACING_MODE_MESSAGE = 'camera-facing-mode-message';
export const LOWER_HAND_MESSAGE = 'lower-hand-message';

View File

@@ -0,0 +1,484 @@
import { IReduxState, IStore } from '../../app/types';
import { getSsrcRewritingFeatureFlag } from '../config/functions.any';
import { JitsiTrackErrors, browser } from '../lib-jitsi-meet';
import { gumPending } from '../media/actions';
import { CAMERA_FACING_MODE, MEDIA_TYPE, MediaType, VIDEO_TYPE } from '../media/constants';
import { IMediaState } from '../media/reducer';
import { IGUMPendingState } from '../media/types';
import {
getMutedStateByParticipantAndMediaType,
getVirtualScreenshareParticipantOwnerId,
isScreenShareParticipant
} from '../participants/functions';
import { IParticipant } from '../participants/types';
import logger from './logger';
import { ITrack } from './types';
/**
* Returns root tracks state.
*
* @param {IReduxState} state - Global state.
* @returns {Object} Tracks state.
*/
export const getTrackState = (state: IReduxState) => state['features/base/tracks'];
/**
* Checks if the passed media type is muted for the participant.
*
* @param {IParticipant} participant - Participant reference.
* @param {MediaType} mediaType - Media type.
* @param {IReduxState} state - Global state.
* @returns {boolean} - Is the media type muted for the participant.
*/
export function isParticipantMediaMuted(participant: IParticipant | undefined,
mediaType: MediaType, state: IReduxState) {
if (!participant) {
return false;
}
if (getSsrcRewritingFeatureFlag(state)) {
return getMutedStateByParticipantAndMediaType(state, participant, mediaType);
}
const tracks = getTrackState(state);
if (participant?.local) {
return isLocalTrackMuted(tracks, mediaType);
} else if (!participant?.fakeParticipant) {
return isRemoteTrackMuted(tracks, mediaType, participant.id);
}
return true;
}
/**
* Checks if the participant is audio muted.
*
* @param {IParticipant} participant - Participant reference.
* @param {IReduxState} state - Global state.
* @returns {boolean} - Is audio muted for the participant.
*/
export function isParticipantAudioMuted(participant: IParticipant | undefined, state: IReduxState) {
return isParticipantMediaMuted(participant, MEDIA_TYPE.AUDIO, state);
}
/**
* Checks if the participant is screen-share muted.
*
* @param {IParticipant} participant - Participant reference.
* @param {IReduxState} state - Global state.
* @returns {boolean} - Is screen-share muted for the participant.
*/
export function isParticipantScreenShareMuted(participant: IParticipant | undefined, state: IReduxState) {
return isParticipantMediaMuted(participant, MEDIA_TYPE.SCREENSHARE, state);
}
/**
* Checks if the participant is video muted.
*
* @param {IParticipant} participant - Participant reference.
* @param {IReduxState} state - Global state.
* @returns {boolean} - Is video muted for the participant.
*/
export function isParticipantVideoMuted(participant: IParticipant | undefined, state: IReduxState) {
return isParticipantMediaMuted(participant, MEDIA_TYPE.VIDEO, state);
}
/**
* Returns local audio track.
*
* @param {ITrack[]} tracks - List of all tracks.
* @returns {(Track|undefined)}
*/
export function getLocalAudioTrack(tracks: ITrack[]) {
return getLocalTrack(tracks, MEDIA_TYPE.AUDIO);
}
/**
* Returns the local desktop track.
*
* @param {Track[]} tracks - List of all tracks.
* @param {boolean} [includePending] - Indicates whether a local track is to be returned if it is still pending.
* A local track is pending if {@code getUserMedia} is still executing to create it and, consequently, its
* {@code jitsiTrack} property is {@code undefined}. By default a pending local track is not returned.
* @returns {(Track|undefined)}
*/
export function getLocalDesktopTrack(tracks: ITrack[], includePending = false) {
return (
getLocalTracks(tracks, includePending)
.find(t => t.mediaType === MEDIA_TYPE.SCREENSHARE || t.videoType === VIDEO_TYPE.DESKTOP));
}
/**
* Returns the stored local desktop jitsiLocalTrack.
*
* @param {IReduxState} state - The redux state.
* @returns {JitsiLocalTrack|undefined}
*/
export function getLocalJitsiDesktopTrack(state: IReduxState) {
const track = getLocalDesktopTrack(getTrackState(state));
return track?.jitsiTrack;
}
/**
* Returns local track by media type.
*
* @param {ITrack[]} tracks - List of all tracks.
* @param {MediaType} mediaType - Media type.
* @param {boolean} [includePending] - Indicates whether a local track is to be
* returned if it is still pending. A local track is pending if
* {@code getUserMedia} is still executing to create it and, consequently, its
* {@code jitsiTrack} property is {@code undefined}. By default a pending local
* track is not returned.
* @returns {(Track|undefined)}
*/
export function getLocalTrack(tracks: ITrack[], mediaType: MediaType, includePending = false) {
if (mediaType === MEDIA_TYPE.SCREENSHARE) {
return getLocalDesktopTrack(tracks, includePending);
}
return (
getLocalTracks(tracks, includePending)
.find(t => t.mediaType === mediaType));
}
/**
* Returns an array containing the local tracks with or without a (valid)
* {@code JitsiTrack}.
*
* @param {ITrack[]} tracks - An array containing all local tracks.
* @param {boolean} [includePending] - Indicates whether a local track is to be
* returned if it is still pending. A local track is pending if
* {@code getUserMedia} is still executing to create it and, consequently, its
* {@code jitsiTrack} property is {@code undefined}. By default a pending local
* track is not returned.
* @returns {Track[]}
*/
export function getLocalTracks(tracks: ITrack[], includePending = false) {
// XXX A local track is considered ready only once it has its `jitsiTrack`
// property set by the `TRACK_ADDED` action. Until then there is a stub
// added just before the `getUserMedia` call with a cancellable
// `gumInProgress` property which then can be used to destroy the track that
// has not yet been added to the redux store. Once GUM is cancelled, it will
// never make it to the store nor there will be any
// `TRACK_ADDED`/`TRACK_REMOVED` actions dispatched for it.
return tracks.filter(t => t.local && (t.jitsiTrack || includePending));
}
/**
* Returns local video track.
*
* @param {ITrack[]} tracks - List of all tracks.
* @returns {(Track|undefined)}
*/
export function getLocalVideoTrack(tracks: ITrack[]) {
return getLocalTrack(tracks, MEDIA_TYPE.VIDEO);
}
/**
* Returns the stored local video track.
*
* @param {IReduxState} state - The redux state.
* @returns {Object}
*/
export function getLocalJitsiVideoTrack(state: IReduxState) {
const track = getLocalVideoTrack(getTrackState(state));
return track?.jitsiTrack;
}
/**
* Returns the stored local audio track.
*
* @param {IReduxState} state - The redux state.
* @returns {Object}
*/
export function getLocalJitsiAudioTrack(state: IReduxState) {
const track = getLocalAudioTrack(getTrackState(state));
return track?.jitsiTrack;
}
/**
* Returns track of specified media type for specified participant.
*
* @param {IReduxState} state - The redux state.
* @param {IParticipant} participant - Participant Object.
* @returns {(Track|undefined)}
*/
export function getVideoTrackByParticipant(
state: IReduxState,
participant?: IParticipant) {
if (!participant) {
return;
}
const tracks = state['features/base/tracks'];
if (isScreenShareParticipant(participant)) {
return getVirtualScreenshareParticipantTrack(tracks, participant.id);
}
return getTrackByMediaTypeAndParticipant(tracks, MEDIA_TYPE.VIDEO, participant.id);
}
/**
* Returns track of specified media type for specified participant id.
*
* @param {ITrack[]} tracks - List of all tracks.
* @param {MediaType} mediaType - Media type.
* @param {string} participantId - Participant ID.
* @returns {(Track|undefined)}
*/
export function getTrackByMediaTypeAndParticipant(
tracks: ITrack[],
mediaType: MediaType,
participantId?: string) {
if (!participantId) {
return;
}
if (mediaType === MEDIA_TYPE.SCREENSHARE) {
return getScreenShareTrack(tracks, participantId);
}
return tracks.find(
t => Boolean(t.jitsiTrack) && t.participantId === participantId && t.mediaType === mediaType
);
}
/**
* Returns track for specified participant id.
*
* @param {ITrack[]} tracks - List of all tracks.
* @param {string} participantId - Participant ID.
* @returns {(Track[]|undefined)}
*/
export function getTrackByParticipantId(tracks: ITrack[], participantId: string) {
return tracks.filter(t => t.participantId === participantId);
}
/**
* Returns screenshare track of given virtualScreenshareParticipantId.
*
* @param {ITrack[]} tracks - List of all tracks.
* @param {string} virtualScreenshareParticipantId - Virtual Screenshare Participant ID.
* @returns {(Track|undefined)}
*/
export function getVirtualScreenshareParticipantTrack(tracks: ITrack[], virtualScreenshareParticipantId: string) {
const ownderId = getVirtualScreenshareParticipantOwnerId(virtualScreenshareParticipantId);
return getScreenShareTrack(tracks, ownderId);
}
/**
* Returns screenshare track of given owner ID.
*
* @param {Track[]} tracks - List of all tracks.
* @param {string} ownerId - Screenshare track owner ID.
* @returns {(Track|undefined)}
*/
export function getScreenShareTrack(tracks: ITrack[], ownerId: string) {
return tracks.find(
t => Boolean(t.jitsiTrack)
&& t.participantId === ownerId
&& (t.mediaType === MEDIA_TYPE.SCREENSHARE || t.videoType === VIDEO_TYPE.DESKTOP)
);
}
/**
* Returns track source name of specified media type for specified participant id.
*
* @param {ITrack[]} tracks - List of all tracks.
* @param {MediaType} mediaType - Media type.
* @param {string} participantId - Participant ID.
* @returns {(string|undefined)}
*/
export function getTrackSourceNameByMediaTypeAndParticipant(
tracks: ITrack[],
mediaType: MediaType,
participantId: string) {
const track = getTrackByMediaTypeAndParticipant(
tracks,
mediaType,
participantId);
return track?.jitsiTrack?.getSourceName();
}
/**
* Returns the track if any which corresponds to a specific instance
* of JitsiLocalTrack or JitsiRemoteTrack.
*
* @param {ITrack[]} tracks - List of all tracks.
* @param {(JitsiLocalTrack|JitsiRemoteTrack)} jitsiTrack - JitsiTrack instance.
* @returns {(Track|undefined)}
*/
export function getTrackByJitsiTrack(tracks: ITrack[], jitsiTrack: any) {
return tracks.find(t => t.jitsiTrack === jitsiTrack);
}
/**
* Returns tracks of specified media type.
*
* @param {ITrack[]} tracks - List of all tracks.
* @param {MediaType} mediaType - Media type.
* @returns {Track[]}
*/
export function getTracksByMediaType(tracks: ITrack[], mediaType: MediaType) {
return tracks.filter(t => t.mediaType === mediaType);
}
/**
* Checks if the first local track in the given tracks set is muted.
*
* @param {ITrack[]} tracks - List of all tracks.
* @param {MediaType} mediaType - The media type of tracks to be checked.
* @returns {boolean} True if local track is muted or false if the track is
* unmuted or if there are no local tracks of the given media type in the given
* set of tracks.
*/
export function isLocalTrackMuted(tracks: ITrack[], mediaType: MediaType) {
const track = getLocalTrack(tracks, mediaType);
return !track || track.muted;
}
/**
* Checks if the local video track is of type DESKtOP.
*
* @param {IReduxState} state - The redux state.
* @returns {boolean}
*/
export function isLocalVideoTrackDesktop(state: IReduxState) {
const desktopTrack = getLocalDesktopTrack(getTrackState(state));
return desktopTrack !== undefined && !desktopTrack.muted;
}
/**
* Returns true if the remote track of the given media type and the given
* participant is muted, false otherwise.
*
* @param {ITrack[]} tracks - List of all tracks.
* @param {MediaType} mediaType - The media type of tracks to be checked.
* @param {string} participantId - Participant ID.
* @returns {boolean}
*/
export function isRemoteTrackMuted(tracks: ITrack[], mediaType: MediaType, participantId: string) {
const track = getTrackByMediaTypeAndParticipant(tracks, mediaType, participantId);
return !track || track.muted;
}
/**
* Returns whether or not the current environment needs a user interaction with
* the page before any unmute can occur.
*
* @param {IReduxState} state - The redux state.
* @returns {boolean}
*/
export function isUserInteractionRequiredForUnmute(state: IReduxState) {
return browser.isUserInteractionRequiredForUnmute()
&& window
&& window.self !== window.top
&& !state['features/base/user-interaction'].interacted;
}
/**
* Sets the GUM pending state for the passed track operation (mute/unmute) and media type.
* NOTE: We need this only for web.
*
* @param {IGUMPendingState} status - The new GUM pending status.
* @param {MediaType} mediaType - The media type related to the operation (audio or video).
* @param {boolean} muted - True if the operation is mute and false for unmute.
* @param {Function} dispatch - The dispatch method.
* @returns {void}
*/
export function _setGUMPendingState(
status: IGUMPendingState,
mediaType: MediaType,
muted: boolean,
dispatch?: IStore['dispatch']) {
if (!muted && dispatch && typeof APP !== 'undefined') {
dispatch(gumPending([ mediaType ], status));
}
}
/**
* Mutes or unmutes a specific {@code JitsiLocalTrack}. If the muted state of the specified {@code track} is already in
* accord with the specified {@code muted} value, then does nothing.
*
* @param {JitsiLocalTrack} track - The {@code JitsiLocalTrack} to mute or unmute.
* @param {boolean} muted - If the specified {@code track} is to be muted, then {@code true}; otherwise, {@code false}.
* @param {Object} state - The redux state.
* @param {Function} dispatch - The dispatch method.
* @returns {Promise}
*/
export function setTrackMuted(track: any, muted: boolean, state: IReduxState | IMediaState,
dispatch?: IStore['dispatch']) {
muted = Boolean(muted); // eslint-disable-line no-param-reassign
// Ignore the check for desktop track muted operation. When the screenshare is terminated by clicking on the
// browser's 'Stop sharing' button, the local stream is stopped before the inactive stream handler is fired.
// We still need to proceed here and remove the track from the peerconnection.
if (track.isMuted() === muted && track.getVideoType() !== VIDEO_TYPE.DESKTOP) {
return Promise.resolve();
}
const f = muted ? 'mute' : 'unmute';
const mediaType = track.getType();
_setGUMPendingState(IGUMPendingState.PENDING_UNMUTE, mediaType, muted, dispatch);
return track[f]().then((result: any) => {
_setGUMPendingState(IGUMPendingState.NONE, mediaType, muted, dispatch);
return result;
})
.catch((error: Error) => {
_setGUMPendingState(IGUMPendingState.NONE, mediaType, muted, dispatch);
// Track might be already disposed so ignore such an error.
if (error.name !== JitsiTrackErrors.TRACK_IS_DISPOSED) {
logger.error(`set track ${f} failed`, error);
return Promise.reject(error);
}
});
}
/**
* Logs the current track state for a participant.
*
* @param {ITrack[]} tracksState - The tracks from redux.
* @param {string} participantId - The ID of the participant.
* @param {string} reason - The reason for the track change.
* @returns {void}
*/
export function logTracksForParticipant(tracksState: ITrack[], participantId: string, reason?: string) {
if (!participantId) {
return;
}
const tracks = getTrackByParticipantId(tracksState, participantId);
const logStringPrefix = `Track state for participant ${participantId} changed`;
const trackStateStrings = tracks.map(t => `{type: ${t.mediaType}, videoType: ${t.videoType}, muted: ${
t.muted}, isReceivingData: ${t.isReceivingData}, jitsiTrack: ${t.jitsiTrack?.toString()}}`);
const tracksLogMsg = trackStateStrings.length > 0 ? `\n${trackStateStrings.join('\n')}` : ' No tracks available!';
logger.debug(`${logStringPrefix}${reason ? `(reason: ${reason})` : ''}:${tracksLogMsg}`);
}
/**
* Gets the default camera facing mode.
*
* @param {Object} state - The redux state.
* @returns {string} - The camera facing mode.
*/
export function getCameraFacingMode(state: IReduxState) {
return state['features/base/config'].cameraFacingMode ?? CAMERA_FACING_MODE.USER;
}

View File

@@ -0,0 +1,44 @@
import { IStore } from '../../app/types';
import JitsiMeetJS from '../lib-jitsi-meet';
import { getCameraFacingMode } from './functions.any';
import { ITrackOptions } from './types';
export * from './functions.any';
/**
* Create local tracks of specific types.
*
* @param {Object} options - The options with which the local tracks are to be
* created.
* @param {string|null} [options.cameraDeviceId] - Camera device id or
* {@code undefined} to use app's settings.
* @param {string[]} options.devices - Required track types such as 'audio'
* and/or 'video'.
* @param {string|null} [options.micDeviceId] - Microphone device id or
* {@code undefined} to use app's settings.
* @param {number|undefined} [options.timeout] - A timeout for JitsiMeetJS.createLocalTracks used to create the tracks.
* @param {IStore} store - The redux store in the context of which the function
* is to execute and from which state such as {@code config} is to be retrieved.
* @returns {Promise<JitsiLocalTrack[]>}
*/
export function createLocalTracksF(options: ITrackOptions = {}, store: IStore) {
const { cameraDeviceId, micDeviceId } = options;
const state = store.getState();
const {
resolution
} = state['features/base/config'];
const constraints = options.constraints ?? state['features/base/config'].constraints;
return JitsiMeetJS.createLocalTracks(
{
cameraDeviceId,
constraints,
// Copy array to avoid mutations inside library.
devices: options.devices?.slice(0),
facingMode: options.facingMode || getCameraFacingMode(state),
micDeviceId,
resolution
});
}

View File

@@ -0,0 +1,216 @@
import { IStore } from '../../app/types';
import { IStateful } from '../app/types';
import { isMobileBrowser } from '../environment/utils';
import JitsiMeetJS, { JitsiTrackErrors, browser } from '../lib-jitsi-meet';
import { gumPending, setAudioMuted } from '../media/actions';
import { MEDIA_TYPE } from '../media/constants';
import { getStartWithAudioMuted } from '../media/functions';
import { IGUMPendingState } from '../media/types';
import { toState } from '../redux/functions';
import {
getUserSelectedCameraDeviceId,
getUserSelectedMicDeviceId
} from '../settings/functions.web';
import { getJitsiMeetGlobalNSConnectionTimes } from '../util/helpers';
import { getCameraFacingMode } from './functions.any';
import loadEffects from './loadEffects';
import logger from './logger';
import { ITrackOptions } from './types';
export * from './functions.any';
/**
* Create local tracks of specific types.
*
* @param {Object} options - The options with which the local tracks are to be
* created.
* @param {string|null} [options.cameraDeviceId] - Camera device id or
* {@code undefined} to use app's settings.
* @param {string[]} options.devices - Required track types such as 'audio'
* and/or 'video'.
* @param {string|null} [options.micDeviceId] - Microphone device id or
* {@code undefined} to use app's settings.
* @param {number|undefined} [options.timeout] - A timeout for JitsiMeetJS.createLocalTracks used to create the tracks.
* @param {IStore} store - The redux store in the context of which the function
* is to execute and from which state such as {@code config} is to be retrieved.
* @param {boolean} recordTimeMetrics - If true time metrics will be recorded.
* @returns {Promise<JitsiLocalTrack[]>}
*/
export function createLocalTracksF(options: ITrackOptions = {}, store?: IStore, recordTimeMetrics = false) {
let { cameraDeviceId, micDeviceId } = options;
const {
desktopSharingSourceDevice,
desktopSharingSources,
timeout
} = options;
// TODO The app's settings should go in the redux store and then the
// reliance on the global variable APP will go away.
store = store || APP.store; // eslint-disable-line no-param-reassign
const state = store.getState();
if (typeof cameraDeviceId === 'undefined' || cameraDeviceId === null) {
cameraDeviceId = getUserSelectedCameraDeviceId(state);
}
if (typeof micDeviceId === 'undefined' || micDeviceId === null) {
micDeviceId = getUserSelectedMicDeviceId(state);
}
const {
desktopSharingFrameRate,
resolution
} = state['features/base/config'];
const constraints = options.constraints ?? state['features/base/config'].constraints;
return (
loadEffects(store).then((effectsArray: Object[]) => {
if (recordTimeMetrics) {
getJitsiMeetGlobalNSConnectionTimes()['trackEffects.loaded'] = window.performance.now();
}
// Filter any undefined values returned by Promise.resolve().
const effects = effectsArray.filter(effect => Boolean(effect));
return JitsiMeetJS.createLocalTracks(
{
cameraDeviceId,
constraints,
desktopSharingFrameRate,
desktopSharingSourceDevice,
desktopSharingSources,
// Copy array to avoid mutations inside library.
devices: options.devices?.slice(0),
effects,
facingMode: options.facingMode || getCameraFacingMode(state),
micDeviceId,
resolution,
timeout
})
.catch((err: Error) => {
logger.error('Failed to create local tracks', options.devices, err);
return Promise.reject(err);
});
}));
}
/**
* Returns an object containing a promise which resolves with the created tracks and the errors resulting from that
* process.
*
* @returns {Promise<JitsiLocalTrack[]>}
*
* @todo Refactor to not use APP.
*/
export function createPrejoinTracks() {
const errors: any = {};
const initialDevices = [ MEDIA_TYPE.AUDIO ];
const requestedAudio = true;
let requestedVideo = false;
const { startAudioOnly, startWithVideoMuted } = APP.store.getState()['features/base/settings'];
const startWithAudioMuted = getStartWithAudioMuted(APP.store.getState());
// On Electron there is no permission prompt for granting permissions. That's why we don't need to
// spend much time displaying the overlay screen. If GUM is not resolved within 15 seconds it will
// probably never resolve.
const timeout = browser.isElectron() ? 15000 : 60000;
// Always get a handle on the audio input device so that we have statistics even if the user joins the
// conference muted. Previous implementation would only acquire the handle when the user first unmuted,
// which would results in statistics ( such as "No audio input" or "Are you trying to speak?") being available
// only after that point.
if (startWithAudioMuted) {
APP.store.dispatch(setAudioMuted(true));
}
if (!startWithVideoMuted && !startAudioOnly) {
initialDevices.push(MEDIA_TYPE.VIDEO);
requestedVideo = true;
}
let tryCreateLocalTracks: any = Promise.resolve([]);
const { dispatch } = APP.store;
dispatch(gumPending(initialDevices, IGUMPendingState.PENDING_UNMUTE));
if (requestedAudio || requestedVideo) {
tryCreateLocalTracks = createLocalTracksF({
devices: initialDevices,
timeout
}, APP.store)
.catch(async (err: Error) => {
if (err.name === JitsiTrackErrors.TIMEOUT && !browser.isElectron()) {
errors.audioAndVideoError = err;
return [];
}
// Retry with separate gUM calls.
const gUMPromises: any = [];
const tracks: any = [];
if (requestedAudio) {
gUMPromises.push(createLocalTracksF({
devices: [ MEDIA_TYPE.AUDIO ],
timeout
}));
}
if (requestedVideo) {
gUMPromises.push(createLocalTracksF({
devices: [ MEDIA_TYPE.VIDEO ],
timeout
}));
}
const results = await Promise.allSettled(gUMPromises);
let errorMsg;
results.forEach((result, idx) => {
if (result.status === 'fulfilled') {
tracks.push(result.value[0]);
} else {
errorMsg = result.reason;
const isAudio = idx === 0;
logger.error(`${isAudio ? 'Audio' : 'Video'} track creation failed with error ${errorMsg}`);
if (isAudio) {
errors.audioOnlyError = errorMsg;
} else {
errors.videoOnlyError = errorMsg;
}
}
});
if (errors.audioOnlyError && errors.videoOnlyError) {
errors.audioAndVideoError = errorMsg;
}
return tracks;
})
.finally(() => {
dispatch(gumPending(initialDevices, IGUMPendingState.NONE));
});
}
return {
tryCreateLocalTracks,
errors
};
}
/**
* Determines whether toggle camera should be enabled or not.
*
* @param {Function|Object} stateful - The redux store or {@code getState} function.
* @returns {boolean} - Whether toggle camera should be enabled.
*/
export function isToggleCameraEnabled(stateful: IStateful) {
const state = toState(stateful);
const { videoInput } = state['features/base/devices'].availableDevices;
return isMobileBrowser() && Number(videoInput?.length) > 1;
}

View File

@@ -0,0 +1,9 @@
/**
* Loads the enabled stream effects.
*
* @param {Object} _store - The Redux store.
* @returns {Promise} - A Promise which resolves with an array of the loaded effects.
*/
export default function loadEffects(_store: Object): Promise<Array<any>> {
return Promise.resolve([]);
}

View File

@@ -0,0 +1,41 @@
import { IStore } from '../../app/types';
import { NoiseSuppressionEffect } from '../../stream-effects/noise-suppression/NoiseSuppressionEffect';
import { createVirtualBackgroundEffect } from '../../stream-effects/virtual-background';
import logger from './logger';
/**
* Loads the enabled stream effects.
*
* @param {Object} store - The Redux store.
* @returns {Promise} - A Promise which resolves when all effects are created.
*/
export default function loadEffects(store: IStore): Promise<any> {
const start = window.performance.now();
const state = store.getState();
const virtualBackground = state['features/virtual-background'];
const noiseSuppression = state['features/noise-suppression'];
const { noiseSuppression: nsOptions } = state['features/base/config'];
const backgroundPromise = virtualBackground.backgroundEffectEnabled
? createVirtualBackgroundEffect(virtualBackground)
.catch((error: Error) => {
logger.error('Failed to obtain the background effect instance with error: ', error);
return Promise.resolve();
})
: Promise.resolve();
const noiseSuppressionPromise = noiseSuppression?.enabled
? Promise.resolve(new NoiseSuppressionEffect(nsOptions))
: Promise.resolve();
return Promise.all([ backgroundPromise, noiseSuppressionPromise ]).then(effectsArray => {
const end = window.performance.now();
logger.debug(`(TIME) loadEffects() start=${start}, end=${end}, time=${end - start}`);
return effectsArray;
});
}

View File

@@ -0,0 +1,3 @@
import { getLogger } from '../logging/functions';
export default getLogger('features/base/tracks');

View File

@@ -0,0 +1,210 @@
import { batch } from 'react-redux';
import { IStore } from '../../app/types';
import { _RESET_BREAKOUT_ROOMS } from '../../breakout-rooms/actionTypes';
import { getCurrentConference } from '../conference/functions';
import {
SET_AUDIO_MUTED,
SET_CAMERA_FACING_MODE,
SET_SCREENSHARE_MUTED,
SET_VIDEO_MUTED,
TOGGLE_CAMERA_FACING_MODE
} from '../media/actionTypes';
import { gumPending, toggleCameraFacingMode } from '../media/actions';
import {
CAMERA_FACING_MODE,
MEDIA_TYPE,
MediaType
} from '../media/constants';
import { IGUMPendingState } from '../media/types';
import MiddlewareRegistry from '../redux/MiddlewareRegistry';
import StateListenerRegistry from '../redux/StateListenerRegistry';
import {
TRACK_UPDATED
} from './actionTypes';
import {
createLocalTracksA,
destroyLocalTracks,
trackMuteUnmuteFailed,
trackRemoved
} from './actions';
import {
getLocalTrack,
isUserInteractionRequiredForUnmute,
setTrackMuted
} from './functions';
import './subscriber';
/**
* Middleware that captures LIB_DID_DISPOSE and LIB_DID_INIT actions and,
* respectively, creates/destroys local media tracks. Also listens to
* media-related actions and performs corresponding operations with tracks.
*
* @param {Store} store - The redux store.
* @returns {Function}
*/
MiddlewareRegistry.register(store => next => action => {
switch (action.type) {
case SET_AUDIO_MUTED:
if (!action.muted
&& isUserInteractionRequiredForUnmute(store.getState())) {
return;
}
_setMuted(store, action, MEDIA_TYPE.AUDIO);
break;
case SET_CAMERA_FACING_MODE: {
// XXX The camera facing mode of a MediaStreamTrack can be specified
// only at initialization time and then it can only be toggled. So in
// order to set the camera facing mode, one may destroy the track and
// then initialize a new instance with the new camera facing mode. But
// that is inefficient on mobile at least so the following relies on the
// fact that there are 2 camera facing modes and merely toggles between
// them to (hopefully) get the camera in the specified state.
const localTrack = _getLocalTrack(store, MEDIA_TYPE.VIDEO);
let jitsiTrack;
if (localTrack
&& (jitsiTrack = localTrack.jitsiTrack)
&& jitsiTrack.getCameraFacingMode()
!== action.cameraFacingMode) {
store.dispatch(toggleCameraFacingMode());
}
break;
}
case SET_SCREENSHARE_MUTED:
_setMuted(store, action, MEDIA_TYPE.SCREENSHARE);
break;
case SET_VIDEO_MUTED:
if (!action.muted
&& isUserInteractionRequiredForUnmute(store.getState())) {
return;
}
_setMuted(store, action, MEDIA_TYPE.VIDEO);
break;
case TOGGLE_CAMERA_FACING_MODE: {
const localTrack = _getLocalTrack(store, MEDIA_TYPE.VIDEO);
let jitsiTrack;
if (localTrack && (jitsiTrack = localTrack.jitsiTrack)) {
// XXX MediaStreamTrack._switchCamera is a custom function
// implemented in react-native-webrtc for video which switches
// between the cameras via a native WebRTC library implementation
// without making any changes to the track.
jitsiTrack._switchCamera();
// Don't mirror the video of the back/environment-facing camera.
const mirror
= jitsiTrack.getCameraFacingMode() === CAMERA_FACING_MODE.USER;
store.dispatch({
type: TRACK_UPDATED,
track: {
jitsiTrack,
mirror
}
});
}
break;
}
}
return next(action);
});
/**
* Set up state change listener to perform maintenance tasks when the conference
* is left or failed, remove all tracks from the store.
*/
StateListenerRegistry.register(
state => getCurrentConference(state),
(conference, { dispatch, getState }, prevConference) => {
const { authRequired, error } = getState()['features/base/conference'];
// conference keep flipping while we are authenticating, skip clearing while we are in that process
if (prevConference && !conference && !authRequired && !error) {
// Clear all tracks.
const remoteTracks = getState()['features/base/tracks'].filter(t => !t.local);
batch(() => {
dispatch(destroyLocalTracks());
for (const track of remoteTracks) {
dispatch(trackRemoved(track.jitsiTrack));
}
dispatch({ type: _RESET_BREAKOUT_ROOMS });
});
}
});
/**
* Gets the local track associated with a specific {@code MEDIA_TYPE} in a
* specific redux store.
*
* @param {Store} store - The redux store from which the local track associated
* with the specified {@code mediaType} is to be retrieved.
* @param {MEDIA_TYPE} mediaType - The {@code MEDIA_TYPE} of the local track to
* be retrieved from the specified {@code store}.
* @param {boolean} [includePending] - Indicates whether a local track is to be
* returned if it is still pending. A local track is pending if
* {@code getUserMedia} is still executing to create it and, consequently, its
* {@code jitsiTrack} property is {@code undefined}. By default a pending local
* track is not returned.
* @private
* @returns {Track} The local {@code Track} associated with the specified
* {@code mediaType} in the specified {@code store}.
*/
function _getLocalTrack(
{ getState }: { getState: IStore['getState']; },
mediaType: MediaType,
includePending = false) {
return (
getLocalTrack(
getState()['features/base/tracks'],
mediaType,
includePending));
}
/**
* Mutes or unmutes a local track with a specific media type.
*
* @param {Store} store - The redux store in which the specified action is
* dispatched.
* @param {Action} action - The redux action dispatched in the specified store.
* @param {MEDIA_TYPE} mediaType - The {@link MEDIA_TYPE} of the local track
* which is being muted or unmuted.
* @private
* @returns {void}
*/
function _setMuted(store: IStore, { ensureTrack, muted }: {
ensureTrack: boolean; muted: boolean; }, mediaType: MediaType) {
const { dispatch, getState } = store;
const localTrack = _getLocalTrack(store, mediaType, /* includePending */ true);
const state = getState();
if (localTrack) {
// The `jitsiTrack` property will have a value only for a localTrack for which `getUserMedia` has already
// completed. If there's no `jitsiTrack`, then the `muted` state will be applied once the `jitsiTrack` is
// created.
const { jitsiTrack } = localTrack;
if (jitsiTrack) {
setTrackMuted(jitsiTrack, muted, state, dispatch)
.catch(() => dispatch(trackMuteUnmuteFailed(localTrack, muted)));
}
} else if (!muted && ensureTrack) {
// TODO(saghul): reconcile these 2 types.
const createMediaType = mediaType === MEDIA_TYPE.SCREENSHARE ? 'desktop' : mediaType;
typeof APP !== 'undefined' && dispatch(gumPending([ mediaType ], IGUMPendingState.PENDING_UNMUTE));
dispatch(createLocalTracksA({ devices: [ createMediaType ] })).then(() => {
typeof APP !== 'undefined' && dispatch(gumPending([ mediaType ], IGUMPendingState.NONE));
});
}
}

View File

@@ -0,0 +1,38 @@
import {
MEDIA_TYPE,
VIDEO_TYPE
} from '../media/constants';
import MiddlewareRegistry from '../redux/MiddlewareRegistry';
import {
TRACK_UPDATED
} from './actionTypes';
import {
toggleScreensharing
} from './actions.native';
import './middleware.any';
/**
* Middleware that captures LIB_DID_DISPOSE and LIB_DID_INIT actions and,
* respectively, creates/destroys local media tracks. Also listens to
* media-related actions and performs corresponding operations with tracks.
*
* @param {Store} store - The redux store.
* @returns {Function}
*/
MiddlewareRegistry.register(store => next => action => {
switch (action.type) {
case TRACK_UPDATED: {
const { jitsiTrack, local } = action.track;
if (local && jitsiTrack.isMuted()
&& jitsiTrack.type === MEDIA_TYPE.VIDEO && jitsiTrack.videoType === VIDEO_TYPE.DESKTOP) {
store.dispatch(toggleScreensharing(false));
}
break;
}
}
return next(action);
});

View File

@@ -0,0 +1,209 @@
import { AnyAction } from 'redux';
import { IStore } from '../../app/types';
import { hideNotification } from '../../notifications/actions';
import { isPrejoinPageVisible } from '../../prejoin/functions';
import { getAvailableDevices } from '../devices/actions.web';
import { setScreenshareMuted } from '../media/actions';
import {
MEDIA_TYPE,
VIDEO_TYPE
} from '../media/constants';
import MiddlewareRegistry from '../redux/MiddlewareRegistry';
import {
TRACK_ADDED,
TRACK_MUTE_UNMUTE_FAILED,
TRACK_NO_DATA_FROM_SOURCE,
TRACK_REMOVED,
TRACK_STOPPED,
TRACK_UPDATED
} from './actionTypes';
import {
showNoDataFromSourceVideoError,
toggleScreensharing,
trackNoDataFromSourceNotificationInfoChanged
} from './actions.web';
import {
getTrackByJitsiTrack, logTracksForParticipant
} from './functions.web';
import { ITrack } from './types';
import './middleware.any';
/**
* Middleware that captures LIB_DID_DISPOSE and LIB_DID_INIT actions and,
* respectively, creates/destroys local media tracks. Also listens to
* media-related actions and performs corresponding operations with tracks.
*
* @param {Store} store - The redux store.
* @returns {Function}
*/
MiddlewareRegistry.register(store => next => action => {
switch (action.type) {
case TRACK_ADDED: {
const { local } = action.track;
// The devices list needs to be refreshed when no initial video permissions
// were granted and a local video track is added by umuting the video.
if (local) {
store.dispatch(getAvailableDevices());
break;
}
const result = next(action);
const participantId = action.track?.participantId;
if (participantId) {
logTracksForParticipant(store.getState()['features/base/tracks'], participantId, 'Track added');
}
return result;
}
case TRACK_NO_DATA_FROM_SOURCE: {
const result = next(action);
_handleNoDataFromSourceErrors(store, action);
return result;
}
case TRACK_REMOVED: {
_removeNoDataFromSourceNotification(store, action.track);
const result = next(action);
const participantId = action.track?.jitsiTrack?.getParticipantId();
if (participantId && !action.track?.jitsiTrack?.isLocal()) {
logTracksForParticipant(store.getState()['features/base/tracks'], participantId, 'Track removed');
}
return result;
}
case TRACK_MUTE_UNMUTE_FAILED: {
const { jitsiTrack } = action.track;
const muted = action.wasMuted;
const isVideoTrack = jitsiTrack.getType() !== MEDIA_TYPE.AUDIO;
if (isVideoTrack && jitsiTrack.getVideoType() === VIDEO_TYPE.DESKTOP) {
store.dispatch(setScreenshareMuted(!muted));
} else if (isVideoTrack) {
APP.conference.setVideoMuteStatus();
} else {
APP.conference.updateAudioIconEnabled();
}
break;
}
case TRACK_STOPPED: {
const { jitsiTrack } = action.track;
if (jitsiTrack.getVideoType() === VIDEO_TYPE.DESKTOP) {
store.dispatch(toggleScreensharing(false));
}
break;
}
case TRACK_UPDATED: {
// TODO Remove the following calls to APP.UI once components interested
// in track mute changes are moved into React and/or redux.
const result = next(action);
const state = store.getState();
if (isPrejoinPageVisible(state)) {
return result;
}
const { jitsiTrack } = action.track;
const participantID = jitsiTrack.getParticipantId();
const isVideoTrack = jitsiTrack.type !== MEDIA_TYPE.AUDIO;
const local = jitsiTrack.isLocal();
if (isVideoTrack) {
if (local && !(jitsiTrack.getVideoType() === VIDEO_TYPE.DESKTOP)) {
APP.conference.setVideoMuteStatus();
} else if (!local) {
APP.UI.setVideoMuted(participantID);
}
} else if (local) {
APP.conference.updateAudioIconEnabled();
}
if (typeof action.track?.muted !== 'undefined' && participantID && !local) {
logTracksForParticipant(store.getState()['features/base/tracks'], participantID, 'Track updated');
}
return result;
}
}
return next(action);
});
/**
* Handles no data from source errors.
*
* @param {Store} store - The redux store in which the specified action is
* dispatched.
* @param {Action} action - The redux action dispatched in the specified store.
* @private
* @returns {void}
*/
function _handleNoDataFromSourceErrors(store: IStore, action: AnyAction) {
const { getState, dispatch } = store;
const track = getTrackByJitsiTrack(getState()['features/base/tracks'], action.track.jitsiTrack);
if (!track?.local) {
return;
}
const { jitsiTrack } = track;
if (track.mediaType === MEDIA_TYPE.AUDIO && track.isReceivingData) {
_removeNoDataFromSourceNotification(store, action.track);
}
if (track.mediaType === MEDIA_TYPE.VIDEO) {
const { noDataFromSourceNotificationInfo = {} } = track;
if (track.isReceivingData) {
if (noDataFromSourceNotificationInfo.timeout) {
clearTimeout(noDataFromSourceNotificationInfo.timeout);
dispatch(trackNoDataFromSourceNotificationInfoChanged(jitsiTrack, undefined));
}
// try to remove the notification if there is one.
_removeNoDataFromSourceNotification(store, action.track);
} else {
if (noDataFromSourceNotificationInfo.timeout) {
return;
}
const timeout = setTimeout(() => dispatch(showNoDataFromSourceVideoError(jitsiTrack)), 5000);
dispatch(trackNoDataFromSourceNotificationInfoChanged(jitsiTrack, { timeout }));
}
}
}
/**
* Removes the no data from source notification associated with the JitsiTrack if displayed.
*
* @param {Store} store - The redux store.
* @param {Track} track - The redux action dispatched in the specified store.
* @returns {void}
*/
function _removeNoDataFromSourceNotification({ getState, dispatch }: IStore, track: ITrack) {
const t = getTrackByJitsiTrack(getState()['features/base/tracks'], track.jitsiTrack);
const { jitsiTrack, noDataFromSourceNotificationInfo = {} } = t || {};
if (noDataFromSourceNotificationInfo?.uid) {
dispatch(hideNotification(noDataFromSourceNotificationInfo.uid));
dispatch(trackNoDataFromSourceNotificationInfoChanged(jitsiTrack, undefined));
}
}

View File

@@ -0,0 +1,140 @@
import { AnyAction } from 'redux';
import { PARTICIPANT_ID_CHANGED } from '../participants/actionTypes';
import ReducerRegistry from '../redux/ReducerRegistry';
import { set } from '../redux/functions';
import {
SET_NO_SRC_DATA_NOTIFICATION_UID,
TRACK_ADDED,
TRACK_CREATE_CANCELED,
TRACK_CREATE_ERROR,
TRACK_NO_DATA_FROM_SOURCE,
TRACK_REMOVED,
TRACK_UPDATED,
TRACK_WILL_CREATE
} from './actionTypes';
import { ITrack } from './types';
/**
* Reducer function for a single track.
*
* @param {Track|undefined} state - Track to be modified.
* @param {Object} action - Action object.
* @param {string} action.type - Type of action.
* @param {string} action.name - Name of last media event.
* @param {string} action.newValue - New participant ID value (in this
* particular case).
* @param {string} action.oldValue - Old participant ID value (in this
* particular case).
* @param {Track} action.track - Information about track to be changed.
* @param {Participant} action.participant - Information about participant.
* @returns {Track|undefined}
*/
function track(state: ITrack, action: AnyAction) {
switch (action.type) {
case PARTICIPANT_ID_CHANGED:
if (state.participantId === action.oldValue) {
return {
...state,
participantId: action.newValue
};
}
break;
case TRACK_UPDATED: {
const t = action.track;
if (state.jitsiTrack === t.jitsiTrack) {
// Make sure that there's an actual update in order to reduce the
// risk of unnecessary React Component renders.
for (const p in t) {
// @ts-ignore
if (state[p] !== t[p]) {
// There's an actual update.
return {
...state,
...t
};
}
}
}
break;
}
case TRACK_NO_DATA_FROM_SOURCE: {
const t = action.track;
if (state.jitsiTrack === t.jitsiTrack) {
const isReceivingData = t.jitsiTrack.isReceivingData();
if (state.isReceivingData !== isReceivingData) {
return {
...state,
isReceivingData
};
}
}
break;
}
}
return state;
}
export type ITracksState = ITrack[];
/**
* Listen for actions that mutate (e.g. Add, remove) local and remote tracks.
*/
ReducerRegistry.register<ITracksState>('features/base/tracks', (state = [], action): ITracksState => {
switch (action.type) {
case PARTICIPANT_ID_CHANGED:
case TRACK_NO_DATA_FROM_SOURCE:
case TRACK_UPDATED:
return state.map((t: ITrack) => track(t, action));
case TRACK_ADDED: {
let withoutTrackStub = state;
if (action.track.local) {
withoutTrackStub
= state.filter(
(t: ITrack) => !t.local || t.mediaType !== action.track.mediaType);
}
return [ ...withoutTrackStub, action.track ];
}
case TRACK_CREATE_CANCELED:
case TRACK_CREATE_ERROR: {
return state.filter((t: ITrack) => !t.local || t.mediaType !== action.trackType);
}
case TRACK_REMOVED:
return state.filter((t: ITrack) => t.jitsiTrack !== action.track.jitsiTrack);
case TRACK_WILL_CREATE:
return [ ...state, action.track ];
default:
return state;
}
});
export interface INoSrcDataState {
noSrcDataNotificationUid?: string | number;
}
/**
* Listen for actions that mutate the no-src-data state, like the current notification id.
*/
ReducerRegistry.register<INoSrcDataState>('features/base/no-src-data', (state = {}, action): INoSrcDataState => {
switch (action.type) {
case SET_NO_SRC_DATA_NOTIFICATION_UID:
return set(state, 'noSrcDataNotificationUid', action.uid);
default:
return state;
}
});

View File

@@ -0,0 +1,40 @@
import { isEqual, sortBy } from 'lodash-es';
import { MEDIA_TYPE } from '../media/constants';
import { getScreenshareParticipantIds } from '../participants/functions';
import StateListenerRegistry from '../redux/StateListenerRegistry';
import { isLocalTrackMuted } from './functions';
/**
* Notifies when the list of currently sharing participants changes.
*/
StateListenerRegistry.register(
/* selector */ state => getScreenshareParticipantIds(state),
/* listener */ (participantIDs, store, previousParticipantIDs) => {
if (typeof APP !== 'object') {
return;
}
if (!isEqual(sortBy(participantIDs), sortBy(previousParticipantIDs))) {
APP.API.notifySharingParticipantsChanged(participantIDs);
}
}
);
/**
* Notifies when the local video mute state changes.
*/
StateListenerRegistry.register(
/* selector */ state => isLocalTrackMuted(state['features/base/tracks'], MEDIA_TYPE.VIDEO),
/* listener */ (muted, store, previousMuted) => {
if (typeof APP !== 'object') {
return;
}
if (muted !== previousMuted) {
APP.API.notifyVideoMutedStatusChanged(muted);
}
}
);

View File

@@ -0,0 +1,88 @@
import { MediaType } from '../media/constants';
export interface ITrackOptions {
cameraDeviceId?: string | null;
constraints?: {
video?: {
height?: {
ideal?: number;
max?: number;
min?: number;
};
};
};
desktopSharingSourceDevice?: string;
desktopSharingSources?: Array<DesktopSharingSourceType>;
devices?: string[];
facingMode?: string;
micDeviceId?: string | null;
timeout?: number;
}
/**
* Track type.
*
* @typedef {object} Track
* @property {JitsiLocalTrack|JitsiRemoteTrack} jitsiTrack - The associated
* {@code JitsiTrack} instance. Optional for local tracks if those are still
* being created (ie {@code getUserMedia} is still in progress).
* @property {Promise} [gumProcess] - If a local track is still being created,
* it will have no {@code JitsiTrack}, but a {@code gumProcess} set to a
* {@code Promise} with and extra {@code cancel()}.
* @property {boolean} local=false - If the track is local.
* @property {MEDIA_TYPE} mediaType=false - The media type of the track.
* @property {boolean} mirror=false - The indicator which determines whether the
* display/rendering of the track should be mirrored. It only makes sense in the
* context of video (at least at the time of this writing).
* @property {boolean} muted=false - If the track is muted.
* @property {(string|undefined)} participantId - The ID of the participant whom
* the track belongs to.
* @property {boolean} videoStarted=false - If the video track has already
* started to play.
* @property {(VIDEO_TYPE|undefined)} videoType - The type of video track if
* any.
*/
export interface ITrack {
codec: string;
getOriginalStream: Function;
isReceivingData: boolean;
jitsiTrack: any;
local: boolean;
mediaType: MediaType;
mirror: boolean;
muted: boolean;
noDataFromSourceNotificationInfo?: {
timeout?: number;
uid?: string;
};
participantId: string;
streamingStatus?: string;
videoStarted: boolean;
videoType?: string | null;
}
export interface IToggleScreenSharingOptions {
audioOnly: boolean;
enabled?: boolean;
shareOptions: IShareOptions;
}
export type DesktopSharingSourceType = 'screen' | 'window';
export interface IShareOptions {
desktopSharingSourceDevice?: string;
desktopSharingSources?: Array<DesktopSharingSourceType>;
desktopStream?: any;
}
export interface ICreateInitialTracksOptions {
devices: Array<MediaType>;
timeout?: number;
}
export interface IInitialTracksErrors {
audioAndVideoError?: Error;
audioOnlyError: Error;
screenSharingError: Error;
videoOnlyError: Error;
}