init
Some checks failed
Close stale issues and PRs / stale (push) Has been cancelled

This commit is contained in:
2025-09-02 14:49:16 +08:00
commit 38ba663466
2885 changed files with 391107 additions and 0 deletions

View File

@@ -0,0 +1,126 @@
/**
* The type of (redux) action to store the gum pending state for unmute and initial track creation.
*
* {
* type: GUM_PENDING,
* mediaTypes: Array<MediaType>,
* status: IGUMPendingState
* }
*/
export const GUM_PENDING = 'GUM_PENDING';
/**
* The type of (redux) action to adjust the availability of the local audio.
*
* {
* type: SET_AUDIO_AVAILABLE,
* muted: boolean
* }
*/
export const SET_AUDIO_AVAILABLE = 'SET_AUDIO_AVAILABLE';
/**
* The type of (redux) action to set the muted state of the local audio.
*
* {
* type: SET_AUDIO_MUTED,
* muted: boolean
* }
*/
export const SET_AUDIO_MUTED = 'SET_AUDIO_MUTED';
/**
* The type of (redux) action to enable/disable the audio mute icon.
*
* {
* type: SET_AUDIO_UNMUTE_PERMISSIONS,
* blocked: boolean
* }
*/
export const SET_AUDIO_UNMUTE_PERMISSIONS = 'SET_AUDIO_UNMUTE_PERMISSIONS';
/**
* The type of (redux) action to set the facing mode of the local video camera
* to a specific value.
*
* {
* type: SET_CAMERA_FACING_MODE,
* cameraFacingMode: CAMERA_FACING_MODE
* }
*/
export const SET_CAMERA_FACING_MODE = 'SET_CAMERA_FACING_MODE';
/**
* Sets the initial GUM promise.
*
* {
* type: SET_INITIAL_GUM_PROMISE,
* promise: Promise
* }}
*/
export const SET_INITIAL_GUM_PROMISE = 'SET_INITIAL_GUM_PROMISE';
/**
* The type of (redux) action to set the muted state of the local screenshare.
*
* {
* type: SET_SCREENSHARE_MUTED,
* muted: boolean
* }
*/
export const SET_SCREENSHARE_MUTED = 'SET_SCREENSHARE_MUTED';
/**
* The type of (redux) action to adjust the availability of the local video.
*
* {
* type: SET_VIDEO_AVAILABLE,
* available: boolean
* }
*/
export const SET_VIDEO_AVAILABLE = 'SET_VIDEO_AVAILABLE';
/**
* The type of (redux) action to set the muted state of the local video.
*
* {
* type: SET_VIDEO_MUTED,
* muted: boolean
* }
*/
export const SET_VIDEO_MUTED = 'SET_VIDEO_MUTED';
/**
* The type of (redux) action to store the last video {@link Transform} applied
* to a stream.
*
* {
* type: STORE_VIDEO_TRANSFORM,
* streamId: string,
* transform: Transform
* }
*/
export const STORE_VIDEO_TRANSFORM = 'STORE_VIDEO_TRANSFORM';
/**
* The type of (redux) action to enable/disable the video mute icon.
*
* {
* type: SET_VIDEO_UNMUTE_PERMISSIONS,
* blocked: boolean
* }
*/
export const SET_VIDEO_UNMUTE_PERMISSIONS = 'SET_VIDEO_UNMUTE_PERMISSIONS';
/**
* The type of (redux) action to toggle the local video camera facing mode. In
* contrast to SET_CAMERA_FACING_MODE, allows the toggling to be optimally
* and/or natively implemented without the overhead of separate reads and writes
* of the current/effective camera facing mode.
*
* {
* type: TOGGLE_CAMERA_FACING_MODE
* }
*/
export const TOGGLE_CAMERA_FACING_MODE = 'TOGGLE_CAMERA_FACING_MODE';

View File

@@ -0,0 +1,291 @@
import { IStore } from '../../app/types';
import { showModeratedNotification } from '../../av-moderation/actions';
import { MEDIA_TYPE as AVM_MEDIA_TYPE } from '../../av-moderation/constants';
import { shouldShowModeratedNotification } from '../../av-moderation/functions';
import { isModerationNotificationDisplayed } from '../../notifications/functions';
import {
GUM_PENDING,
SET_AUDIO_AVAILABLE,
SET_AUDIO_MUTED,
SET_AUDIO_UNMUTE_PERMISSIONS,
SET_CAMERA_FACING_MODE,
SET_INITIAL_GUM_PROMISE,
SET_SCREENSHARE_MUTED,
SET_VIDEO_AVAILABLE,
SET_VIDEO_MUTED,
SET_VIDEO_UNMUTE_PERMISSIONS,
STORE_VIDEO_TRANSFORM,
TOGGLE_CAMERA_FACING_MODE
} from './actionTypes';
import {
MediaType,
SCREENSHARE_MUTISM_AUTHORITY,
VIDEO_MUTISM_AUTHORITY
} from './constants';
import { IGUMPendingState } from './types';
/**
* Action to adjust the availability of the local audio.
*
* @param {boolean} available - True if the local audio is to be marked as
* available or false if the local audio is not available.
* @returns {{
* type: SET_AUDIO_AVAILABLE,
* available: boolean
* }}
*/
export function setAudioAvailable(available: boolean) {
return {
type: SET_AUDIO_AVAILABLE,
available
};
}
/**
* Action to set the muted state of the local audio.
*
* @param {boolean} muted - True if the local audio is to be muted or false if
* the local audio is to be unmuted.
* @param {boolean} ensureTrack - True if we want to ensure that a new track is
* created if missing.
* @returns {{
* type: SET_AUDIO_MUTED,
* ensureTrack: boolean,
* muted: boolean
* }}
*/
export function setAudioMuted(muted: boolean, ensureTrack = false) {
return (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
const state = getState();
// check for A/V Moderation when trying to unmute
if (!muted && shouldShowModeratedNotification(AVM_MEDIA_TYPE.AUDIO, state)) {
if (!isModerationNotificationDisplayed(AVM_MEDIA_TYPE.AUDIO, state)) {
ensureTrack && dispatch(showModeratedNotification(AVM_MEDIA_TYPE.AUDIO));
}
return;
}
dispatch({
type: SET_AUDIO_MUTED,
ensureTrack,
muted
});
};
}
/**
* Action to disable/enable the audio mute icon.
*
* @param {boolean} blocked - True if the audio mute icon needs to be disabled.
* @param {boolean|undefined} skipNotification - True if we want to skip showing the notification.
* @returns {Function}
*/
export function setAudioUnmutePermissions(blocked: boolean, skipNotification = false) {
return {
type: SET_AUDIO_UNMUTE_PERMISSIONS,
blocked,
skipNotification
};
}
/**
* Action to set the facing mode of the local camera.
*
* @param {CAMERA_FACING_MODE} cameraFacingMode - The camera facing mode to set.
* @returns {{
* type: SET_CAMERA_FACING_MODE,
* cameraFacingMode: CAMERA_FACING_MODE
* }}
*/
export function setCameraFacingMode(cameraFacingMode: string) {
return {
type: SET_CAMERA_FACING_MODE,
cameraFacingMode
};
}
/**
* Sets the initial GUM promise.
*
* @param {Promise<Array<Object>> | undefined} promise - The promise.
* @returns {{
* type: SET_INITIAL_GUM_PROMISE,
* promise: Promise
* }}
*/
export function setInitialGUMPromise(promise: Promise<{ errors: any; tracks: Array<any>; }> | null = null) {
return {
type: SET_INITIAL_GUM_PROMISE,
promise
};
}
/**
* Action to set the muted state of the local screenshare.
*
* @param {boolean} muted - True if the local screenshare is to be enabled or false otherwise.
* @param {number} authority - The {@link SCREENSHARE_MUTISM_AUTHORITY} which is muting/unmuting the local screenshare.
* @param {boolean} ensureTrack - True if we want to ensure that a new track is created if missing.
* @returns {Function}
*/
export function setScreenshareMuted(
muted: boolean,
authority: number = SCREENSHARE_MUTISM_AUTHORITY.USER,
ensureTrack = false) {
return (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
const state = getState();
// check for A/V Moderation when trying to unmute
if (!muted && shouldShowModeratedNotification(AVM_MEDIA_TYPE.DESKTOP, state)) {
if (!isModerationNotificationDisplayed(AVM_MEDIA_TYPE.DESKTOP, state)) {
ensureTrack && dispatch(showModeratedNotification(AVM_MEDIA_TYPE.DESKTOP));
}
return;
}
const oldValue = state['features/base/media'].screenshare.muted;
// eslint-disable-next-line no-bitwise
const newValue = muted ? oldValue | authority : oldValue & ~authority;
dispatch({
type: SET_SCREENSHARE_MUTED,
authority,
ensureTrack,
muted: newValue
});
};
}
/**
* Action to adjust the availability of the local video.
*
* @param {boolean} available - True if the local video is to be marked as
* available or false if the local video is not available.
* @returns {{
* type: SET_VIDEO_AVAILABLE,
* available: boolean
* }}
*/
export function setVideoAvailable(available: boolean) {
return {
type: SET_VIDEO_AVAILABLE,
available
};
}
/**
* Action to set the muted state of the local video.
*
* @param {boolean} muted - True if the local video is to be muted or false if
* the local video is to be unmuted.
* @param {number} authority - The {@link VIDEO_MUTISM_AUTHORITY} which is
* muting/unmuting the local video.
* @param {boolean} ensureTrack - True if we want to ensure that a new track is
* created if missing.
* @returns {Function}
*/
export function setVideoMuted(
muted: boolean | number,
authority: number = VIDEO_MUTISM_AUTHORITY.USER,
ensureTrack = false) {
return (dispatch: IStore['dispatch'], getState: IStore['getState']) => {
const state = getState();
// check for A/V Moderation when trying to unmute
if (!muted && shouldShowModeratedNotification(AVM_MEDIA_TYPE.VIDEO, state)) {
if (!isModerationNotificationDisplayed(AVM_MEDIA_TYPE.VIDEO, state)) {
ensureTrack && dispatch(showModeratedNotification(AVM_MEDIA_TYPE.VIDEO));
}
return;
}
const oldValue = state['features/base/media'].video.muted;
// eslint-disable-next-line no-bitwise
const newValue = muted ? oldValue | authority : oldValue & ~authority;
dispatch({
type: SET_VIDEO_MUTED,
authority,
ensureTrack,
muted: newValue
});
};
}
/**
* Action to disable/enable the video mute icon.
*
* @param {boolean} blocked - True if the video mute icon needs to be disabled.
* @param {boolean|undefined} skipNotification - True if we want to skip showing the notification.
* @returns {Function}
*/
export function setVideoUnmutePermissions(blocked: boolean, skipNotification = false) {
return {
type: SET_VIDEO_UNMUTE_PERMISSIONS,
blocked,
skipNotification
};
}
/**
* Creates an action to store the last video {@link Transform} applied to a
* stream.
*
* @param {string} streamId - The ID of the stream.
* @param {Object} transform - The {@code Transform} to store.
* @returns {{
* type: STORE_VIDEO_TRANSFORM,
* streamId: string,
* transform: Object
* }}
*/
export function storeVideoTransform(streamId: string, transform: Object) {
return {
type: STORE_VIDEO_TRANSFORM,
streamId,
transform
};
}
/**
* Toggles the camera facing mode. Most commonly, for example, mobile devices
* such as phones have a front/user-facing and a back/environment-facing
* cameras. In contrast to setCameraFacingMode, allows the toggling to be
* optimally and/or natively implemented without the overhead of separate reads
* and writes of the current/effective camera facing mode.
*
* @returns {{
* type: TOGGLE_CAMERA_FACING_MODE
* }}
*/
export function toggleCameraFacingMode() {
return {
type: TOGGLE_CAMERA_FACING_MODE
};
}
/**
* Sets the GUM pending status from unmute and initial track creation operation.
*
* @param {Array<MediaType>} mediaTypes - An array with the media types that GUM is called with.
* @param {IGUMPendingState} status - The GUM status.
* @returns {{
* type: TOGGLE_CAMERA_FACING_MODE,
* mediaTypes: Array<MediaType>,
* status: IGUMPendingState
* }}
*/
export function gumPending(mediaTypes: Array<MediaType>, status: IGUMPendingState) {
return {
type: GUM_PENDING,
mediaTypes,
status
};
}

View File

@@ -0,0 +1,126 @@
import { Component } from 'react';
import logger from '../logger';
/**
* Describes audio element interface used in the base/media feature for audio
* playback.
*/
export type AudioElement = {
currentTime: number;
pause: () => void;
play: () => void;
setSinkId?: (id: string) => Promise<any>;
stop: () => void;
};
/**
* {@code AbstractAudio} Component's property types.
*/
export interface IProps {
loop?: boolean;
/**
* A callback which will be called with {@code AbstractAudio} instance once
* the audio element is loaded.
*/
setRef?: (ref?: any) => void;
/**
* The URL of a media resource to use in the element.
*
* NOTE on react-native sound files are imported through 'require' and then
* passed as the 'src' parameter which means their type will be 'any'.
*
* @type {Object | string}
*/
src: any | string;
stream?: Object;
}
/**
* The React {@link Component} which is similar to Web's
* {@code HTMLAudioElement}.
*/
export default class AbstractAudio extends Component<IProps> {
/**
* The {@link AudioElement} instance which implements the audio playback
* functionality.
*/
_audioElementImpl?: AudioElement | null;
/**
* Initializes a new {@code AbstractAudio} instance.
*
* @param {IProps} props - The read-only properties with which the new
* instance is to be initialized.
*/
constructor(props: IProps) {
super(props);
// Bind event handlers so they are only bound once per instance.
this.setAudioElementImpl = this.setAudioElementImpl.bind(this);
}
/**
* Attempts to pause the playback of the media.
*
* @public
* @returns {void}
*/
pause() {
this._audioElementImpl?.pause();
}
/**
* Attempts to begin the playback of the media.
*
* @public
* @returns {void}
*/
play() {
this._audioElementImpl?.play();
}
/**
* Set the (reference to the) {@link AudioElement} object which implements
* the audio playback functionality.
*
* @param {AudioElement} element - The {@link AudioElement} instance
* which implements the audio playback functionality.
* @protected
* @returns {void}
*/
setAudioElementImpl(element?: AudioElement | null | any) {
this._audioElementImpl = element;
const { setRef } = this.props;
typeof setRef === 'function' && setRef(element ? this : null);
}
/**
* Sets the sink ID (output device ID) on the underlying audio element.
* NOTE: Currently, implemented only on Web.
*
* @param {string} sinkId - The sink ID (output device ID).
* @returns {void}
*/
setSinkId(sinkId: string) {
this._audioElementImpl
&& typeof this._audioElementImpl.setSinkId === 'function'
&& this._audioElementImpl.setSinkId(sinkId)
.catch(error => logger.error('Error setting sink', error));
}
/**
* Attempts to stop the playback of the media.
*
* @public
* @returns {void}
*/
stop() {
this._audioElementImpl?.stop();
}
}

View File

@@ -0,0 +1,150 @@
import React, { Component } from 'react';
import { IStore } from '../../../app/types';
import { trackVideoStarted } from '../../tracks/actions';
import { shouldRenderVideoTrack } from '../functions';
import { Video } from './index';
/**
* The type of the React {@code Component} props of {@link AbstractVideoTrack}.
*/
export interface IProps {
/**
* The Redux dispatch function.
*/
dispatch: IStore['dispatch'];
/**
* Callback to invoke when the {@link Video} of {@code AbstractVideoTrack}
* is clicked/pressed.
*/
onPress?: Function;
/**
* The Redux representation of the participant's video track.
*/
videoTrack?: any;
/**
* Whether or not video should be rendered after knowing video playback has
* started.
*/
waitForVideoStarted?: boolean;
/**
* The z-order of the Video of AbstractVideoTrack in the stacking space of
* all Videos. For more details, refer to the zOrder property of the Video
* class for React Native.
*/
zOrder?: number;
/**
* Indicates whether zooming (pinch to zoom and/or drag) is enabled.
*/
zoomEnabled?: boolean;
}
/**
* Implements a React {@link Component} that renders video element for a
* specific video track.
*
* @abstract
*/
export default class AbstractVideoTrack<P extends IProps> extends Component<P> {
/**
* Initializes a new AbstractVideoTrack instance.
*
* @param {Object} props - The read-only properties with which the new
* instance is to be initialized.
*/
constructor(props: P) {
super(props);
// Bind event handlers so they are only bound once for every instance.
this._onVideoPlaying = this._onVideoPlaying.bind(this);
}
/**
* Implements React's {@link Component#render()}.
*
* @inheritdoc
* @returns {ReactElement}
*/
override render() {
const videoTrack = _falsy2null(this.props.videoTrack);
let render;
if (this.props.waitForVideoStarted && videoTrack) {
// That's the complex case: we have to wait for onPlaying before we
// render videoTrack. The complexity comes from the fact that
// onPlaying will come after we render videoTrack.
if (shouldRenderVideoTrack(videoTrack, true)) {
// It appears that onPlaying has come for videoTrack already.
// Most probably, another render has already passed through the
// else clause below already.
render = true;
} else if (shouldRenderVideoTrack(videoTrack, false)
&& !videoTrack.videoStarted) {
// XXX Unfortunately, onPlaying has not come for videoTrack yet.
// We have to render in order to give onPlaying a chance to
// come.
render = true;
}
} else {
// That's the simple case: we don't have to wait for onPlaying
// before we render videoTrack
render = shouldRenderVideoTrack(videoTrack, false);
}
const stream = render && videoTrack
? videoTrack.jitsiTrack.getOriginalStream() : null;
// Actual zoom is currently only enabled if the stream is a desktop
// stream.
const zoomEnabled
= this.props.zoomEnabled
&& stream
&& videoTrack
&& videoTrack.videoType === 'desktop';
return (
<Video
mirror = { videoTrack?.mirror }
onPlaying = { this._onVideoPlaying }
// @ts-ignore
onPress = { this.props.onPress }
stream = { stream }
zOrder = { this.props.zOrder }
zoomEnabled = { zoomEnabled } />
);
}
/**
* Handler for case when video starts to play.
*
* @private
* @returns {void}
*/
_onVideoPlaying() {
const { videoTrack } = this.props;
if (videoTrack && !videoTrack.videoStarted) {
this.props.dispatch(trackVideoStarted(videoTrack.jitsiTrack));
}
}
}
/**
* Returns null if a specific value is falsy; otherwise, returns the specified
* value.
*
* @param {*} value - The value to return if it is not falsy.
* @returns {*} If the specified value is falsy, null; otherwise, the specified
* value.
*/
function _falsy2null(value: any) {
return value || null;
}

View File

@@ -0,0 +1,2 @@
export { default as Audio } from './native/Audio';
export { default as Video } from './native/Video';

View File

@@ -0,0 +1,2 @@
export { default as Audio } from './web/Audio';
export { default as Video } from './web/Video';

View File

@@ -0,0 +1,114 @@
import Sound from 'react-native-sound';
import logger from '../../logger';
import AbstractAudio, { IProps } from '../AbstractAudio';
/**
* The React Native/mobile {@link Component} which is similar to Web's
* {@code HTMLAudioElement} and wraps around react-native-webrtc's
* {@link RTCView}.
*/
export default class Audio extends AbstractAudio {
/**
* Reference to 'react-native-sound} {@link Sound} instance.
*/
_sound: Sound | undefined | null;
/**
* A callback passed to the 'react-native-sound''s {@link Sound} instance,
* called when loading sound is finished.
*
* @param {Object} error - The error object passed by
* the 'react-native-sound' library.
* @returns {void}
* @private
*/
_soundLoadedCallback(error: Error) {
if (error) {
logger.error('Failed to load sound', error);
} else {
this.setAudioElementImpl(this._sound);
}
}
/**
* Implements React's {@link Component#componentDidUpdate()}.
*
* @inheritdoc
*/
override async componentDidUpdate(prevProps: IProps): Promise<void> {
// source is different !! call didunmount and call didmount
if (prevProps.src !== this.props.src) {
await this.componentWillUnmount();
await this.componentDidMount();
}
}
/**
* Will load the sound, after the component did mount.
*
* @returns {void}
*/
override async componentDidMount() {
this._sound
= this.props.src
? new Sound(
this.props.src, undefined,
this._soundLoadedCallback.bind(this))
: null;
}
/**
* Will dispose sound resources (if any) when component is about to unmount.
*
* @returns {void}
*/
override async componentWillUnmount() {
if (this._sound) {
this._sound.release();
this._sound = null;
this.setAudioElementImpl(null);
}
}
/**
* Attempts to begin the playback of the media.
*
* @inheritdoc
* @override
*/
play() {
if (this._sound) {
this._sound.setNumberOfLoops(this.props.loop ? -1 : 0);
this._sound.play(success => {
if (!success) {
logger.warn(`Failed to play ${this.props.src}`);
}
});
}
}
/**
* Implements React's {@link Component#render()}.
*
* @inheritdoc
* @returns {null}
*/
override render() {
// TODO react-native-webrtc's RTCView doesn't do anything with the audio
// MediaStream specified to it so it's easier at the time of this
// writing to not render anything.
return null;
}
/**
* Stops the sound if it's currently playing.
*
* @returns {void}
*/
stop() {
if (this._sound) {
this._sound.stop();
}
}
}

View File

@@ -0,0 +1,134 @@
import React, { Component } from 'react';
import { GestureResponderEvent } from 'react-native';
import { MediaStream, RTCView } from 'react-native-webrtc';
import Pressable from '../../../react/components/native/Pressable';
import VideoTransform from './VideoTransform';
import styles from './styles';
/**
* The type of the React {@code Component} props of {@link Video}.
*/
interface IProps {
mirror: boolean;
onPlaying: Function;
/**
* Callback to invoke when the {@code Video} is clicked/pressed.
*/
onPress?: (event: GestureResponderEvent) => void;
stream: MediaStream;
/**
* Similarly to the CSS property z-index, specifies the z-order of this
* Video in the stacking space of all Videos. When Videos overlap,
* zOrder determines which one covers the other. A Video with a larger
* zOrder generally covers a Video with a lower one.
*
* Non-overlapping Videos may safely share a z-order (because one does
* not have to cover the other).
*
* The support for zOrder is platform-dependent and/or
* implementation-specific. Thus, specifying a value for zOrder is to be
* thought of as giving a hint rather than as imposing a requirement.
* For example, video renderers such as Video are commonly implemented
* using OpenGL and OpenGL views may have different numbers of layers in
* their stacking space. Android has three: a layer below the window
* (aka default), a layer below the window again but above the previous
* layer (aka media overlay), and above the window. Consequently, it is
* advisable to limit the number of utilized layers in the stacking
* space to the minimum sufficient for the desired display. For example,
* a video call application usually needs a maximum of two zOrder
* values: 0 for the remote video(s) which appear in the background, and
* 1 for the local video(s) which appear above the remote video(s).
*/
zOrder?: number;
/**
* Indicates whether zooming (pinch to zoom and/or drag) is enabled.
*/
zoomEnabled: boolean;
}
/**
* The React Native {@link Component} which is similar to Web's
* {@code HTMLVideoElement} and wraps around react-native-webrtc's
* {@link RTCView}.
*/
export default class Video extends Component<IProps> {
/**
* React Component method that executes once component is mounted.
*
* @inheritdoc
*/
override componentDidMount() {
// RTCView currently does not support media events, so just fire
// onPlaying callback when <RTCView> is rendered.
const { onPlaying } = this.props;
onPlaying?.();
}
/**
* Implements React's {@link Component#render()}.
*
* @inheritdoc
* @returns {ReactElement|null}
*/
override render() {
const { onPress, stream, zoomEnabled } = this.props;
if (stream) {
// RTCView
const style = styles.video;
const objectFit
= zoomEnabled
? 'contain'
: 'cover';
const rtcView
= (
<RTCView
mirror = { this.props.mirror }
objectFit = { objectFit }
streamURL = { stream.toURL() }
style = { style }
zOrder = { this.props.zOrder } />
);
// VideoTransform implements "pinch to zoom". As part of "pinch to
// zoom", it implements onPress, of course.
if (zoomEnabled) {
return (
<VideoTransform
enabled = { zoomEnabled }
onPress = { onPress }
streamId = { stream.id }
style = { style }>
{ rtcView }
</VideoTransform>
);
}
// XXX Unfortunately, VideoTransform implements a custom press
// detection which has been observed to be very picky about the
// precision of the press unlike the builtin/default/standard press
// detection which is forgiving to imperceptible movements while
// pressing. It's not acceptable to be so picky, especially when
// "pinch to zoom" is not enabled.
return (
<Pressable onPress = { onPress }>
{ rtcView }
</Pressable>
);
}
// RTCView has peculiarities which may or may not be platform specific.
// For example, it doesn't accept an empty streamURL. If the execution
// reached here, it means that we explicitly chose to not initialize an
// RTCView as a way of dealing with its idiosyncrasies.
return null;
}
}

View File

@@ -0,0 +1,30 @@
import React from 'react';
import { View } from 'react-native';
import { connect } from 'react-redux';
import AbstractVideoTrack, { IProps } from '../AbstractVideoTrack';
import styles from './styles';
/**
* Component that renders video element for a specified video track.
*
* @augments AbstractVideoTrack
*/
class VideoTrack extends AbstractVideoTrack<IProps> {
/**
* Renders the video element for the associated video track.
*
* @override
* @returns {ReactElement}
*/
override render() {
return (
<View style = { styles.video } >
{ super.render() }
</View>
);
}
}
export default connect()(VideoTrack);

View File

@@ -0,0 +1,730 @@
import React, { Component } from 'react';
import { PanResponder, PixelRatio, View } from 'react-native';
import { SafeAreaView } from 'react-native-safe-area-context';
import { connect } from 'react-redux';
import { IReduxState, IStore } from '../../../../app/types';
import { ASPECT_RATIO_WIDE } from '../../../responsive-ui/constants';
import { storeVideoTransform } from '../../actions';
import styles from './styles';
/**
* The default/initial transform (= no transform).
*/
const DEFAULT_TRANSFORM = {
scale: 1,
translateX: 0,
translateY: 0
};
/**
* The minimum scale (magnification) multiplier. 1 is equal to objectFit
* = 'contain'.
*/
const MIN_SCALE = 1;
/*
* The max distance from the edge of the screen where we let the user move the
* view to. This is large enough now to let the user drag the view to a position
* where no other displayed components cover it (such as filmstrip). If a
* ViewPort (hint) support is added to the LargeVideo component then this
* constant will not be necessary anymore.
*/
const MAX_OFFSET = 100;
/**
* The max allowed scale (magnification) multiplier.
*/
const MAX_SCALE = 5;
/**
* The threshold to allow the fingers move before we consider a gesture a
* move instead of a touch.
*/
const MOVE_THRESHOLD_DISMISSES_TOUCH = 5;
/**
* A tap timeout after which we consider a gesture a long tap and will not
* trigger onPress (unless long tap gesture support is added in the future).
*/
const TAP_TIMEOUT_MS = 400;
/**
* Type of a transform object this component is capable of handling.
*/
type Transform = {
scale: number;
translateX: number;
translateY: number;
};
interface IProps {
/**
* The current aspect ratio of the screen.
*/
_aspectRatio: Symbol;
/**
* Action to dispatch when the component is unmounted.
*/
_onUnmount: Function;
/**
* The stored transforms retrieved from Redux to be initially applied
* to different streams.
*/
_transforms: Object;
/**
* The children components of this view.
*/
children: Object;
/**
* Transformation is only enabled when this flag is true.
*/
enabled: boolean;
/**
* Function to invoke when a press event is detected.
*/
onPress?: Function;
/**
* The id of the current stream that is displayed.
*/
streamId: string;
/**
* Style of the top level transformable view.
*/
style: Object;
}
interface IState {
/**
* The current (non-transformed) layout of the View.
*/
layout: any;
/**
* The current transform that is applied.
*/
transform: Transform;
}
/**
* An container that captures gestures such as pinch&zoom, touch or move.
*/
class VideoTransform extends Component<IProps, IState> {
/**
* The gesture handler object.
*/
gestureHandlers: any;
/**
* The initial distance of the fingers on pinch start.
*/
initialDistance?: number;
/**
* The initial position of the finger on touch start.
*/
initialPosition: {
x: number;
y: number;
};
/**
* The actual move threshold that is calculated for this device/screen.
*/
moveThreshold: number;
/**
* Time of the last tap.
*/
lastTap: number;
/**
* Constructor of the component.
*
* @inheritdoc
*/
constructor(props: IProps) {
super(props);
this.state = {
layout: null,
transform:
this._getSavedTransform(props.streamId) || DEFAULT_TRANSFORM
};
this._didMove = this._didMove.bind(this);
this._getTransformStyle = this._getTransformStyle.bind(this);
this._onGesture = this._onGesture.bind(this);
this._onLayout = this._onLayout.bind(this);
this._onMoveShouldSetPanResponder
= this._onMoveShouldSetPanResponder.bind(this);
this._onPanResponderGrant = this._onPanResponderGrant.bind(this);
this._onPanResponderMove = this._onPanResponderMove.bind(this);
this._onPanResponderRelease = this._onPanResponderRelease.bind(this);
this._onStartShouldSetPanResponder
= this._onStartShouldSetPanResponder.bind(this);
// The move threshold should be adaptive to the pixel ratio of the
// screen to avoid making it too sensitive or difficult to handle on
// different pixel ratio screens.
this.moveThreshold
= PixelRatio.get() * MOVE_THRESHOLD_DISMISSES_TOUCH;
this.gestureHandlers = PanResponder.create({
onPanResponderGrant: this._onPanResponderGrant,
onPanResponderMove: this._onPanResponderMove,
onPanResponderRelease: this._onPanResponderRelease,
onPanResponderTerminationRequest: () => true,
onMoveShouldSetPanResponder: this._onMoveShouldSetPanResponder,
onShouldBlockNativeResponder: () => false,
onStartShouldSetPanResponder: this._onStartShouldSetPanResponder
});
}
/**
* Implements React Component's componentDidUpdate.
*
* @inheritdoc
*/
override componentDidUpdate(prevProps: IProps, prevState: IState) {
if (prevProps.streamId !== this.props.streamId) {
this._storeTransform(prevProps.streamId, prevState.transform);
this._restoreTransform(this.props.streamId);
}
}
/**
* Implements React Component's componentWillUnmount.
*
* @inheritdoc
*/
override componentWillUnmount() {
this._storeTransform(this.props.streamId, this.state.transform);
}
/**
* Renders the empty component that captures the gestures.
*
* @inheritdoc
*/
override render() {
const { _aspectRatio, children, style } = this.props;
const isAspectRatioWide = _aspectRatio === ASPECT_RATIO_WIDE;
const videoTransformedViewContainerStyles
= isAspectRatioWide ? styles.videoTransformedViewContainerWide : styles.videoTransformedViewContainer;
return (
<View
onLayout = { this._onLayout }
pointerEvents = 'box-only'
style = { [
videoTransformedViewContainerStyles,
style
] }
{ ...this.gestureHandlers.panHandlers }>
<SafeAreaView
edges = { [ 'bottom', 'left' ] }
style = { [
styles.videoTranformedView,
this._getTransformStyle()
] }>
{ children }
</SafeAreaView>
</View>
);
}
/**
* Calculates the new transformation to be applied by merging the current
* transform values with the newly received incremental values.
*
* @param {Transform} transform - The new transform object.
* @private
* @returns {Transform}
*/
_calculateTransformIncrement(transform: Transform) {
let {
scale,
translateX,
translateY
} = this.state.transform;
const {
scale: newScale,
translateX: newTranslateX,
translateY: newTranslateY
} = transform;
// Note: We don't limit MIN_SCALE here yet, as we need to detect a scale
// down gesture even if the scale is already at MIN_SCALE to let the
// user return the screen to center with that gesture. Scale is limited
// to MIN_SCALE right before it gets applied.
scale = Math.min(scale * (newScale || 1), MAX_SCALE);
translateX = translateX + ((newTranslateX || 0) / scale);
translateY = translateY + ((newTranslateY || 0) / scale);
return {
scale,
translateX,
translateY
};
}
/**
* Determines if there was large enough movement to be handled.
*
* @param {Object} gestureState - The gesture state.
* @returns {boolean}
*/
_didMove({ dx, dy }: any) {
return Math.abs(dx) > this.moveThreshold
|| Math.abs(dy) > this.moveThreshold;
}
/**
* Returns the stored transform a stream should display with initially.
*
* @param {string} streamId - The id of the stream to match with a stored
* transform.
* @private
* @returns {Object | null}
*/
_getSavedTransform(streamId: string) {
const { enabled, _transforms } = this.props;
// @ts-ignore
return (enabled && _transforms[streamId]) || null;
}
/**
* Calculates the touch distance on a pinch event.
*
* @param {Object} evt - The touch event.
* @private
* @returns {number}
*/
_getTouchDistance({ nativeEvent: { touches } }: any) {
const dx = Math.abs(touches[0].pageX - touches[1].pageX);
const dy = Math.abs(touches[0].pageY - touches[1].pageY);
return Math.sqrt(Math.pow(dx, 2) + Math.pow(dy, 2));
}
/**
* Calculates the position of the touch event.
*
* @param {Object} evt - The touch event.
* @private
* @returns {Object}
*/
_getTouchPosition({ nativeEvent: { touches } }: any) {
return {
x: touches[0].pageX,
y: touches[0].pageY
};
}
/**
* Generates a transform style object to be used on the component.
*
* @returns {{string: Array<{string: number}>}}
*/
_getTransformStyle() {
const { enabled } = this.props;
if (!enabled) {
return null;
}
const {
scale,
translateX,
translateY
} = this.state.transform;
return {
transform: [
{ scale },
{ translateX },
{ translateY }
]
};
}
/**
* Limits the move matrix and then applies the transformation to the
* component (updates state).
*
* Note: Points A (top-left) and D (bottom-right) are opposite points of
* the View rectangle.
*
* @param {Transform} transform - The transformation object.
* @private
* @returns {void}
*/
_limitAndApplyTransformation(transform: Transform) {
const { _aspectRatio } = this.props;
const { layout } = this.state;
const isAspectRatioWide = _aspectRatio === ASPECT_RATIO_WIDE;
if (layout) {
const { scale } = this.state.transform;
const { scale: newScaleUnlimited } = transform;
let {
translateX: newTranslateX,
translateY: newTranslateY
} = transform;
// Scale is only limited to MIN_SCALE here to detect downscale
// gesture later.
const newScale = Math.max(newScaleUnlimited, MIN_SCALE);
// The A and D points of the original View (before transform).
const originalLayout = {
a: {
x: layout.x,
y: layout.y
},
d: {
x: layout.x + layout.width,
y: layout.y + layout.height
}
};
// The center point (midpoint) of the transformed View.
const transformedCenterPoint = {
x: ((layout.x + layout.width) / 2) + (newTranslateX * newScale),
y: ((layout.y + layout.height) / 2) + (newTranslateY * newScale)
};
// The size of the transformed View.
const transformedSize = {
height: layout.height * newScale,
width: layout.width * newScale
};
// The A and D points of the transformed View.
const transformedLayout = {
a: {
x: transformedCenterPoint.x - (transformedSize.width / 2),
y: transformedCenterPoint.y - (transformedSize.height / 2)
},
d: {
x: transformedCenterPoint.x + (transformedSize.width / 2),
y: transformedCenterPoint.y + (transformedSize.height / 2)
}
};
let _MAX_OFFSET = isAspectRatioWide ? 0 : MAX_OFFSET;
if (newScaleUnlimited < scale) {
// This is a negative scale event so we dynamically reduce the
// MAX_OFFSET to get the screen back to the center on
// downscaling.
_MAX_OFFSET = Math.min(MAX_OFFSET, MAX_OFFSET * (newScale - 1));
}
// Correct move matrix if it goes out of the view
// too much (_MAX_OFFSET).
newTranslateX
-= Math.max(
transformedLayout.a.x - originalLayout.a.x - _MAX_OFFSET,
0);
newTranslateX
+= Math.max(
originalLayout.d.x - transformedLayout.d.x - _MAX_OFFSET,
0);
newTranslateY
-= Math.max(
transformedLayout.a.y - originalLayout.a.y - _MAX_OFFSET,
0);
newTranslateY
+= Math.max(
originalLayout.d.y - transformedLayout.d.y - _MAX_OFFSET,
0);
this.setState({
transform: {
scale: newScale,
translateX: Math.round(newTranslateX),
translateY: Math.round(newTranslateY)
}
});
}
}
/**
* Handles gestures and converts them to transforms.
*
* Currently supported gestures:
* - scale (punch&zoom-type scale).
* - move
* - press.
*
* Note: This component supports onPress solely to overcome the problem of
* not being able to register gestures via the PanResponder due to the fact
* that the entire Conference component was a single touch responder
* component in the past (see base/react/.../Container with an onPress
* event) - and stock touch responder components seem to have exclusive
* priority in handling touches in React.
*
* @param {string} type - The type of the gesture.
* @param {?Object | number} value - The value of the gesture, if any.
* @returns {void}
*/
_onGesture(type: string, value?: any) {
let transform;
switch (type) {
case 'move':
transform = {
...DEFAULT_TRANSFORM,
translateX: value.x,
translateY: value.y
};
break;
case 'scale':
transform = {
...DEFAULT_TRANSFORM,
scale: value
};
break;
case 'press': {
const { onPress } = this.props;
typeof onPress === 'function' && onPress();
break;
}
}
if (transform) {
this._limitAndApplyTransformation(
this._calculateTransformIncrement(transform));
}
this.lastTap = 0;
}
/**
* Callback for the onLayout of the component.
*
* @param {Object} event - The native props of the onLayout event.
* @private
* @returns {void}
*/
_onLayout({ nativeEvent: { layout: { x, y, width, height } } }: any) {
this.setState({
layout: {
x,
y,
width,
height
}
});
}
/**
* Function to decide whether the responder should respond to a move event.
*
* @param {Object} evt - The event.
* @param {Object} gestureState - Gesture state.
* @private
* @returns {boolean}
*/
_onMoveShouldSetPanResponder(evt: Object, gestureState: any) {
return this.props.enabled
&& (this._didMove(gestureState)
|| gestureState.numberActiveTouches === 2);
}
/**
* Calculates the initial touch distance.
*
* @param {Object} evt - Touch event.
* @param {Object} gestureState - Gesture state.
* @private
* @returns {void}
*/
_onPanResponderGrant(evt: Object, { numberActiveTouches }: any) {
if (numberActiveTouches === 1) {
this.initialPosition = this._getTouchPosition(evt);
this.lastTap = Date.now();
} else if (numberActiveTouches === 2) {
this.initialDistance = this._getTouchDistance(evt);
}
}
/**
* Handles the PanResponder move (touch move) event.
*
* @param {Object} evt - Touch event.
* @param {Object} gestureState - Gesture state.
* @private
* @returns {void}
*/
_onPanResponderMove(evt: Object, gestureState: any) {
if (gestureState.numberActiveTouches === 2) {
// this is a zoom event
if (
this.initialDistance === undefined
|| isNaN(this.initialDistance)
) {
// there is no initial distance because the user started
// with only one finger. We calculate it now.
this.initialDistance = this._getTouchDistance(evt);
} else {
const distance = this._getTouchDistance(evt);
const scale = distance / (this.initialDistance || 1);
this.initialDistance = distance;
this._onGesture('scale', scale);
}
} else if (gestureState.numberActiveTouches === 1
&& (this.initialDistance === undefined
|| isNaN(this.initialDistance))
&& this._didMove(gestureState)) {
// this is a move event
const position = this._getTouchPosition(evt);
const move = {
x: position.x - this.initialPosition.x,
y: position.y - this.initialPosition.y
};
this.initialPosition = position;
this._onGesture('move', move);
}
}
/**
* Handles the PanResponder gesture end event.
*
* @private
* @returns {void}
*/
_onPanResponderRelease() {
if (this.lastTap && Date.now() - this.lastTap < TAP_TIMEOUT_MS) {
this._onGesture('press');
}
delete this.initialDistance;
this.initialPosition = {
x: 0,
y: 0
};
}
/**
* Function to decide whether the responder should respond to a start
* (touch) event.
*
* @private
* @returns {boolean}
*/
_onStartShouldSetPanResponder() {
return typeof this.props.onPress === 'function';
}
/**
* Restores the last applied transform when the component is mounted, or
* a new stream is about to be rendered.
*
* @param {string} streamId - The stream id to restore transform for.
* @private
* @returns {void}
*/
_restoreTransform(streamId: string) {
const savedTransform = this._getSavedTransform(streamId);
if (savedTransform) {
this.setState({
transform: savedTransform
});
}
}
/**
* Stores/saves the a transform when the component is destroyed, or a
* new stream is about to be rendered.
*
* @param {string} streamId - The stream id associated with the transform.
* @param {Object} transform - The {@Transform} to save.
* @private
* @returns {void}
*/
_storeTransform(streamId: string, transform: Transform) {
const { _onUnmount, enabled } = this.props;
if (enabled) {
_onUnmount(streamId, transform);
}
}
}
/**
* Maps dispatching of some action to React component props.
*
* @param {Function} dispatch - Redux action dispatcher.
* @private
* @returns {{
* _onUnmount: Function
* }}
*/
function _mapDispatchToProps(dispatch: IStore['dispatch']) {
return {
/**
* Dispatches actions to store the last applied transform to a video.
*
* @param {string} streamId - The ID of the stream.
* @param {Transform} transform - The last applied transform.
* @private
* @returns {void}
*/
_onUnmount(streamId: string, transform: Transform) {
dispatch(storeVideoTransform(streamId, transform));
}
};
}
/**
* Maps (parts of) the redux state to the component's props.
*
* @param {Object} state - The redux state.
* @private
* @returns {{
* _transforms: Object
* }}
*/
function _mapStateToProps(state: IReduxState) {
return {
_aspectRatio: state['features/base/responsive-ui'].aspectRatio,
/**
* The stored transforms retrieved from Redux to be initially applied to
* different streams.
*
* @private
* @type {Object}
*/
_transforms: state['features/base/media'].video.transforms
};
}
export default connect(_mapStateToProps, _mapDispatchToProps)(VideoTransform);

View File

@@ -0,0 +1,35 @@
import { StyleSheet } from 'react-native';
/**
* The styles of the feature base/media.
*/
export default StyleSheet.create({
/**
* Base style of the transformed video view.
*/
videoTranformedView: {
flex: 1
},
/**
* A basic style to avoid rendering a transformed view off the component,
* that can be visible on special occasions, such as during device rotate
* animation, or PiP mode.
*/
videoTransformedViewContainer: {
overflow: 'hidden'
},
videoTransformedViewContainerWide: {
overflow: 'hidden',
paddingRight: '16%'
},
/**
* Make {@code Video} fill its container.
*/
video: {
flex: 1
}
});

View File

@@ -0,0 +1,111 @@
import React from 'react';
import AbstractAudio, { IProps } from '../AbstractAudio';
/**
* The React/Web {@link Component} which is similar to and wraps around
* {@code HTMLAudioElement} in order to facilitate cross-platform source code.
*/
export default class Audio extends AbstractAudio {
/**
* Set to <code>true</code> when the whole file is loaded.
*/
_audioFileLoaded: boolean;
/**
* Reference to the HTML audio element, stored until the file is ready.
*/
_ref?: HTMLAudioElement | null;
/**
* Creates new <code>Audio</code> element instance with given props.
*
* @param {Object} props - The read-only properties with which the new
* instance is to be initialized.
*/
constructor(props: IProps) {
super(props);
// Bind event handlers so they are only bound once for every instance.
this._onCanPlayThrough = this._onCanPlayThrough.bind(this);
this._setRef = this._setRef?.bind(this);
}
/**
* Implements React's {@link Component#render()}.
*
* @inheritdoc
* @returns {ReactElement}
*/
override render() {
return (
<audio
loop = { Boolean(this.props.loop) }
onCanPlayThrough = { this._onCanPlayThrough }
preload = 'auto'
ref = { this._setRef }
src = { this.props.src } />
);
}
/**
* Stops the audio HTML element.
*
* @returns {void}
*/
override stop() {
if (this._ref) {
this._ref.pause();
this._ref.currentTime = 0;
}
}
/**
* If audio element reference has been set and the file has been
* loaded then {@link setAudioElementImpl} will be called to eventually add
* the audio to the Redux store.
*
* @private
* @returns {void}
*/
_maybeSetAudioElementImpl() {
if (this._ref && this._audioFileLoaded) {
this.setAudioElementImpl(this._ref);
}
}
/**
* Called when 'canplaythrough' event is triggered on the audio element,
* which means that the whole file has been loaded.
*
* @private
* @returns {void}
*/
_onCanPlayThrough() {
this._audioFileLoaded = true;
this._maybeSetAudioElementImpl();
}
/**
* Sets the reference to the HTML audio element.
*
* @param {HTMLAudioElement} audioElement - The HTML audio element instance.
* @private
* @returns {void}
*/
_setRef(audioElement?: HTMLAudioElement | null) {
this._ref = audioElement;
if (audioElement) {
this._maybeSetAudioElementImpl();
} else {
// AbstractAudioElement is supposed to trigger "removeAudio" only if
// it was previously added, so it's safe to just call it.
this.setAudioElementImpl(null);
// Reset the loaded flag, as the audio element is being removed from
// the DOM tree.
this._audioFileLoaded = false;
}
}
}

View File

@@ -0,0 +1,314 @@
import React, { Component } from 'react';
import { connect } from 'react-redux';
import { createAudioPlayErrorEvent, createAudioPlaySuccessEvent } from '../../../../analytics/AnalyticsEvents';
import { sendAnalytics } from '../../../../analytics/functions';
import { IReduxState } from '../../../../app/types';
import { ITrack } from '../../../tracks/types';
import logger from '../../logger';
/**
* The type of the React {@code Component} props of {@link AudioTrack}.
*/
interface IProps {
/**
* Represents muted property of the underlying audio element.
*/
_muted?: boolean;
/**
* Represents volume property of the underlying audio element.
*/
_volume?: number | boolean;
/**
* The audio track.
*/
audioTrack?: ITrack;
/**
* Used to determine the value of the autoplay attribute of the underlying
* audio element.
*/
autoPlay: boolean;
/**
* The value of the id attribute of the audio element.
*/
id: string;
/**
* The ID of the participant associated with the audio element.
*/
participantId: string;
}
/**
* The React/Web {@link Component} which is similar to and wraps around {@code HTMLAudioElement}.
*/
class AudioTrack extends Component<IProps> {
/**
* Reference to the HTML audio element, stored until the file is ready.
*/
_ref: React.RefObject<HTMLAudioElement>;
/**
* The current timeout ID for play() retries.
*/
_playTimeout: number | undefined;
/**
* Default values for {@code AudioTrack} component's properties.
*
* @static
*/
static defaultProps = {
autoPlay: true,
id: ''
};
/**
* Creates new <code>Audio</code> element instance with given props.
*
* @param {Object} props - The read-only properties with which the new
* instance is to be initialized.
*/
constructor(props: IProps) {
super(props);
// Bind event handlers so they are only bound once for every instance.
this._errorHandler = this._errorHandler.bind(this);
this._ref = React.createRef();
this._play = this._play.bind(this);
}
/**
* Attaches the audio track to the audio element and plays it.
*
* @inheritdoc
* @returns {void}
*/
override componentDidMount() {
this._attachTrack(this.props.audioTrack);
if (this._ref?.current) {
const audio = this._ref?.current;
const { _muted, _volume } = this.props;
if (typeof _volume === 'number') {
audio.volume = _volume;
}
if (typeof _muted === 'boolean') {
audio.muted = _muted;
}
// @ts-ignore
audio.addEventListener('error', this._errorHandler);
} else { // This should never happen
logger.error(`The react reference is null for AudioTrack ${this.props?.id}`);
}
}
/**
* Remove any existing associations between the current audio track and the
* component's audio element.
*
* @inheritdoc
* @returns {void}
*/
override componentWillUnmount() {
this._detachTrack(this.props.audioTrack);
// @ts-ignore
this._ref?.current?.removeEventListener('error', this._errorHandler);
}
/**
* This component's updating is blackboxed from React to prevent re-rendering of the audio
* element, as we set all the properties manually.
*
* @inheritdoc
* @returns {boolean} - False is always returned to blackbox this component
* from React.
*/
override shouldComponentUpdate(nextProps: IProps) {
const currentJitsiTrack = this.props.audioTrack?.jitsiTrack;
const nextJitsiTrack = nextProps.audioTrack?.jitsiTrack;
if (currentJitsiTrack !== nextJitsiTrack) {
this._detachTrack(this.props.audioTrack);
this._attachTrack(nextProps.audioTrack);
}
if (this._ref?.current) {
const audio = this._ref?.current;
const currentVolume = audio.volume;
const nextVolume = nextProps._volume;
if (typeof nextVolume === 'number' && !isNaN(nextVolume) && currentVolume !== nextVolume) {
if (nextVolume === 0) {
logger.debug(`Setting audio element ${nextProps?.id} volume to 0`);
}
audio.volume = nextVolume;
}
const currentMuted = audio.muted;
const nextMuted = nextProps._muted;
if (typeof nextMuted === 'boolean' && currentMuted !== nextMuted) {
logger.debug(`Setting audio element ${nextProps?.id} muted to true`);
audio.muted = nextMuted;
}
}
return false;
}
/**
* Implements React's {@link Component#render()}.
*
* @inheritdoc
* @returns {ReactElement}
*/
override render() {
const { autoPlay, id } = this.props;
return (
<audio
autoPlay = { autoPlay }
id = { id }
ref = { this._ref } />
);
}
/**
* Calls into the passed in track to associate the track with the component's audio element.
*
* @param {Object} track - The redux representation of the {@code JitsiLocalTrack}.
* @private
* @returns {void}
*/
_attachTrack(track?: ITrack) {
const { id } = this.props;
if (!track?.jitsiTrack) {
logger.warn(`Attach is called on audio element ${id} without tracks passed!`);
return;
}
if (!this._ref?.current) {
logger.warn(`Attempting to attach track ${track?.jitsiTrack} on AudioTrack ${id} without reference!`);
return;
}
track.jitsiTrack.attach(this._ref.current)
.catch((error: Error) => {
logger.error(
`Attaching the remote track ${track.jitsiTrack} to video with id ${id} has failed with `,
error);
})
.finally(() => {
this._play();
});
}
/**
* Removes the association to the component's audio element from the passed
* in redux representation of jitsi audio track.
*
* @param {Object} track - The redux representation of the {@code JitsiLocalTrack}.
* @private
* @returns {void}
*/
_detachTrack(track?: ITrack) {
if (this._ref?.current && track?.jitsiTrack) {
clearTimeout(this._playTimeout);
this._playTimeout = undefined;
track.jitsiTrack.detach(this._ref.current);
}
}
/**
* Reattaches the audio track to the underlying HTMLAudioElement when an 'error' event is fired.
*
* @param {Error} error - The error event fired on the HTMLAudioElement.
* @returns {void}
*/
_errorHandler(error: Error) {
logger.error(`Error ${error?.message} called on audio track ${this.props.audioTrack?.jitsiTrack}. `
+ 'Attempting to reattach the audio track to the element and execute play on it');
this._detachTrack(this.props.audioTrack);
this._attachTrack(this.props.audioTrack);
}
/**
* Plays the underlying HTMLAudioElement.
*
* @param {number} retries - The number of previously failed retries.
* @returns {void}
*/
_play(retries = 0) {
const { autoPlay, id } = this.props;
if (!this._ref?.current) {
// nothing to play.
logger.warn(`Attempting to call play on AudioTrack ${id} without reference!`);
return;
}
if (autoPlay) {
// Ensure the audio gets play() called on it. This may be necessary in the
// case where the local video container was moved and re-attached, in which
// case the audio may not autoplay.
this._ref.current.play()
.then(() => {
if (retries !== 0) {
// success after some failures
this._playTimeout = undefined;
sendAnalytics(createAudioPlaySuccessEvent(id));
logger.info(`Successfully played audio track! retries: ${retries}`);
}
}, e => {
logger.error(`Failed to play audio track on audio element ${id}! retry: ${retries} ; Error:`, e);
if (retries < 3) {
this._playTimeout = window.setTimeout(() => this._play(retries + 1), 1000);
if (retries === 0) {
// send only 1 error event.
sendAnalytics(createAudioPlayErrorEvent(id));
}
} else {
this._playTimeout = undefined;
}
});
}
}
}
/**
* Maps (parts of) the Redux state to the associated {@code AudioTrack}'s props.
*
* @param {Object} state - The Redux state.
* @param {Object} ownProps - The props passed to the component.
* @private
* @returns {IProps}
*/
function _mapStateToProps(state: IReduxState, ownProps: any) {
const { participantsVolume } = state['features/filmstrip'];
return {
_muted: state['features/base/config'].startSilent,
_volume: participantsVolume[ownProps.participantId]
};
}
export default connect(_mapStateToProps)(AudioTrack);

View File

@@ -0,0 +1,391 @@
import React, { Component, ReactEventHandler } from 'react';
import { ITrack } from '../../../tracks/types';
import logger from '../../logger';
/**
* The type of the React {@code Component} props of {@link Video}.
*/
interface IProps {
/**
* Used to determine the value of the autoplay attribute of the underlying
* video element.
*/
autoPlay: boolean;
/**
* CSS classes to add to the video element.
*/
className: string;
/**
* A map of the event handlers for the video HTML element.
*/
eventHandlers?: {
/**
* OnAbort event handler.
*/
onAbort?: ReactEventHandler<HTMLVideoElement>;
/**
* OnCanPlay event handler.
*/
onCanPlay?: ReactEventHandler<HTMLVideoElement>;
/**
* OnCanPlayThrough event handler.
*/
onCanPlayThrough?: ReactEventHandler<HTMLVideoElement>;
/**
* OnEmptied event handler.
*/
onEmptied?: ReactEventHandler<HTMLVideoElement>;
/**
* OnEnded event handler.
*/
onEnded?: ReactEventHandler<HTMLVideoElement>;
/**
* OnError event handler.
*/
onError?: ReactEventHandler<HTMLVideoElement>;
/**
* OnLoadStart event handler.
*/
onLoadStart?: ReactEventHandler<HTMLVideoElement>;
/**
* OnLoadedData event handler.
*/
onLoadedData?: ReactEventHandler<HTMLVideoElement>;
/**
* OnLoadedMetadata event handler.
*/
onLoadedMetadata?: ReactEventHandler<HTMLVideoElement>;
/**
* OnPause event handler.
*/
onPause?: ReactEventHandler<HTMLVideoElement>;
/**
* OnPlay event handler.
*/
onPlay?: ReactEventHandler<HTMLVideoElement>;
/**
* OnPlaying event handler.
*/
onPlaying?: ReactEventHandler<HTMLVideoElement>;
/**
* OnRateChange event handler.
*/
onRateChange?: ReactEventHandler<HTMLVideoElement>;
/**
* OnStalled event handler.
*/
onStalled?: ReactEventHandler<HTMLVideoElement>;
/**
* OnSuspend event handler.
*/
onSuspend?: ReactEventHandler<HTMLVideoElement>;
/**
* OnWaiting event handler.
*/
onWaiting?: ReactEventHandler<HTMLVideoElement>;
};
/**
* The value of the id attribute of the video. Used by the torture tests to
* locate video elements.
*/
id: string;
/**
* Used on native.
*/
mirror?: boolean;
/**
* The value of the muted attribute for the underlying video element.
*/
muted?: boolean;
/**
* Used on native.
*/
onPlaying?: Function;
/**
* Used on native.
*/
onPress?: Function;
/**
* Optional callback to invoke once the video starts playing.
*/
onVideoPlaying?: Function;
/**
* Used to determine the value of the autoplay attribute of the underlying
* video element.
*/
playsinline: boolean;
/**
* Used on native.
*/
stream?: any;
/**
* A styles that will be applied on the video element.
*/
style?: Object;
/**
* The JitsiLocalTrack to display.
*/
videoTrack?: Partial<ITrack>;
/**
* Used on native.
*/
zOrder?: number;
/**
* Used on native.
*/
zoomEnabled?: boolean;
}
/**
* Component that renders a video element for a passed in video track.
*
* @augments Component
*/
class Video extends Component<IProps> {
_videoElement?: HTMLVideoElement | null;
_mounted: boolean;
/**
* Default values for {@code Video} component's properties.
*
* @static
*/
static defaultProps = {
className: '',
autoPlay: true,
id: '',
playsinline: true
};
/**
* Initializes a new {@code Video} instance.
*
* @param {Object} props - The read-only properties with which the new
* instance is to be initialized.
*/
constructor(props: IProps) {
super(props);
/**
* The internal reference to the DOM/HTML element intended for
* displaying a video.
*
* @private
* @type {HTMLVideoElement}
*/
this._videoElement = null;
// Bind event handlers so they are only bound once for every instance.
this._onVideoPlaying = this._onVideoPlaying.bind(this);
this._setVideoElement = this._setVideoElement.bind(this);
}
/**
* Invokes the library for rendering the video on initial display. Sets the
* volume level to zero to ensure no sound plays.
*
* @inheritdoc
* @returns {void}
*/
override componentDidMount() {
this._mounted = true;
if (this._videoElement) {
this._videoElement.volume = 0;
this._videoElement.onplaying = this._onVideoPlaying;
}
this._attachTrack(this.props.videoTrack).finally(() => {
if (this._videoElement && this.props.autoPlay) {
// Ensure the video gets play() called on it. This may be necessary in the
// case where the local video container was moved and re-attached, in which
// case video does not autoplay.
this._videoElement.play()
.catch(error => {
// Prevent uncaught "DOMException: The play() request was interrupted by a new load request"
// when video playback takes long to start and it starts after the component was unmounted.
if (this._mounted) {
logger.error(`Error while trying to play video with id ${
this.props.id} and video track ${this.props.videoTrack?.jitsiTrack}: ${error}`);
}
});
}
});
}
/**
* Remove any existing associations between the current video track and the
* component's video element.
*
* @inheritdoc
* @returns {void}
*/
override componentWillUnmount() {
this._mounted = false;
this._detachTrack(this.props.videoTrack);
}
/**
* Updates the video display only if a new track is added. This component's
* updating is blackboxed from React to prevent re-rendering of video
* element, as the lib uses {@code track.attach(videoElement)} instead.
*
* @inheritdoc
* @returns {boolean} - False is always returned to blackbox this component
* from React.
*/
override shouldComponentUpdate(nextProps: IProps) {
const currentJitsiTrack = this.props.videoTrack?.jitsiTrack;
const nextJitsiTrack = nextProps.videoTrack?.jitsiTrack;
if (currentJitsiTrack !== nextJitsiTrack) {
this._detachTrack(this.props.videoTrack);
this._attachTrack(nextProps.videoTrack).catch((_error: Error) => {
// Ignore the error. We are already logging it.
});
// NOTE: We may want to consider calling .play() explicitly in this case if any issues araise in future.
// For now it seems we are good with the autoplay attribute of the video element.
}
if (this.props.style !== nextProps.style || this.props.className !== nextProps.className) {
return true;
}
return false;
}
/**
* Renders the video element.
*
* @override
* @returns {ReactElement}
*/
override render() {
const {
autoPlay,
className,
id,
muted,
playsinline,
style,
eventHandlers
} = this.props;
return (
<video
autoPlay = { autoPlay }
className = { className }
id = { id }
muted = { muted }
playsInline = { playsinline }
ref = { this._setVideoElement }
style = { style }
{ ...eventHandlers } />
);
}
/**
* Calls into the passed in track to associate the track with the
* component's video element and render video.
*
* @param {Object} videoTrack - The redux representation of the
* {@code JitsiLocalTrack}.
* @private
* @returns {void}
*/
_attachTrack(videoTrack?: Partial<ITrack>) {
const { id } = this.props;
if (!videoTrack?.jitsiTrack) {
logger.warn(`Attach is called on video element ${id} without tracks passed!`);
// returning Promise.resolve just keep the previous logic.
// TODO: Check if it make sense to call play on this element or we can just return promise.reject().
return Promise.resolve();
}
return videoTrack.jitsiTrack.attach(this._videoElement)
.catch((error: Error) => {
logger.error(
`Attaching the remote track ${videoTrack.jitsiTrack} to video with id ${id} has failed with `,
error);
});
}
/**
* Removes the association to the component's video element from the passed
* in redux representation of jitsi video track to stop the track from
* rendering.
*
* @param {Object} videoTrack - The redux representation of the
* {@code JitsiLocalTrack}.
* @private
* @returns {void}
*/
_detachTrack(videoTrack?: Partial<ITrack>) {
if (this._videoElement && videoTrack?.jitsiTrack) {
videoTrack.jitsiTrack.detach(this._videoElement);
}
}
/**
* Invokes the onvideoplaying callback if defined.
*
* @private
* @returns {void}
*/
_onVideoPlaying() {
if (this.props.onVideoPlaying) {
this.props.onVideoPlaying();
}
}
/**
* Sets an instance variable for the component's video element so it can be
* referenced later for attaching and detaching a JitsiLocalTrack.
*
* @param {Object} element - DOM element for the component's video display.
* @private
* @returns {void}
*/
_setVideoElement(element: HTMLVideoElement | null) {
this._videoElement = element;
}
}
export default Video;

View File

@@ -0,0 +1,197 @@
import React, { ReactEventHandler } from 'react';
import { connect } from 'react-redux';
import { IReduxState } from '../../../../app/types';
import AbstractVideoTrack, { IProps as AbstractVideoTrackProps } from '../AbstractVideoTrack';
import Video from './Video';
/**
* The type of the React {@code Component} props of {@link VideoTrack}.
*/
interface IProps extends AbstractVideoTrackProps {
/**
*
* Used to determine the value of the autoplay attribute of the underlying
* video element.
*/
_noAutoPlayVideo: boolean;
/**
* CSS classes to add to the video element.
*/
className: string;
/**
* A map of the event handlers for the video HTML element.
*/
eventHandlers?: {
/**
* OnAbort event handler.
*/
onAbort?: ReactEventHandler<HTMLVideoElement>;
/**
* OnCanPlay event handler.
*/
onCanPlay?: ReactEventHandler<HTMLVideoElement>;
/**
* OnCanPlayThrough event handler.
*/
onCanPlayThrough?: ReactEventHandler<HTMLVideoElement>;
/**
* OnEmptied event handler.
*/
onEmptied?: ReactEventHandler<HTMLVideoElement>;
/**
* OnEnded event handler.
*/
onEnded?: ReactEventHandler<HTMLVideoElement>;
/**
* OnError event handler.
*/
onError?: ReactEventHandler<HTMLVideoElement>;
/**
* OnLoadStart event handler.
*/
onLoadStart?: ReactEventHandler<HTMLVideoElement>;
/**
* OnLoadedData event handler.
*/
onLoadedData?: ReactEventHandler<HTMLVideoElement>;
/**
* OnLoadedMetadata event handler.
*/
onLoadedMetadata?: ReactEventHandler<HTMLVideoElement>;
/**
* OnPause event handler.
*/
onPause?: ReactEventHandler<HTMLVideoElement>;
/**
* OnPlay event handler.
*/
onPlay?: ReactEventHandler<HTMLVideoElement>;
/**
* OnPlaying event handler.
*/
onPlaying?: ReactEventHandler<HTMLVideoElement>;
/**
* OnRateChange event handler.
*/
onRateChange?: ReactEventHandler<HTMLVideoElement>;
/**
* OnStalled event handler.
*/
onStalled?: ReactEventHandler<HTMLVideoElement>;
/**
* OnSuspend event handler.
*/
onSuspend?: ReactEventHandler<HTMLVideoElement>;
/**
* OnWaiting event handler.
*/
onWaiting?: ReactEventHandler<HTMLVideoElement>;
};
/**
* The value of the id attribute of the video. Used by the torture tests
* to locate video elements.
*/
id: string;
/**
* The value of the muted attribute for the underlying element.
*/
muted?: boolean;
/**
* A styles that will be applied on the video element.
*/
style: Object;
}
/**
* Component that renders a video element for a passed in video track and
* notifies the store when the video has started playing.
*
* @augments AbstractVideoTrack
*/
class VideoTrack extends AbstractVideoTrack<IProps> {
/**
* Default values for {@code VideoTrack} component's properties.
*
* @static
*/
static defaultProps = {
className: '',
id: ''
};
/**
* Renders the video element.
*
* @override
* @returns {ReactElement}
*/
override render() {
const {
_noAutoPlayVideo,
className,
id,
muted,
videoTrack,
style,
eventHandlers
} = this.props;
return (
<Video
autoPlay = { !_noAutoPlayVideo }
className = { className }
eventHandlers = { eventHandlers }
id = { id }
muted = { muted }
onVideoPlaying = { this._onVideoPlaying }
style = { style }
videoTrack = { videoTrack } />
);
}
}
/**
* Maps (parts of) the Redux state to the associated VideoTracks props.
*
* @param {Object} state - The Redux state.
* @private
* @returns {{
* _noAutoPlayVideo: boolean
* }}
*/
function _mapStateToProps(state: IReduxState) {
const testingConfig = state['features/base/config'].testing;
return {
_noAutoPlayVideo: Boolean(testingConfig?.noAutoPlayVideo)
};
}
export default connect(_mapStateToProps)(VideoTrack);

View File

@@ -0,0 +1,74 @@
/**
* The set of facing modes for camera.
*
* @enum {string}
*/
export const CAMERA_FACING_MODE: Record<string, string> = {
ENVIRONMENT: 'environment',
USER: 'user'
};
export type MediaType = 'audio' | 'video' | 'screenshare';
/**
* The set of media types.
*
* @enum {string}
*/
export const MEDIA_TYPE: {
AUDIO: MediaType;
SCREENSHARE: MediaType;
VIDEO: MediaType;
} = {
AUDIO: 'audio',
SCREENSHARE: 'screenshare',
VIDEO: 'video'
};
/* eslint-disable no-bitwise */
/**
* The types of authorities which may mute/unmute the local screenshare.
*
* @enum {number}
*/
export const SCREENSHARE_MUTISM_AUTHORITY = {
AUDIO_ONLY: 1 << 0,
USER: 1 << 2
};
/**
* The languages supported for audio files.
*/
export enum AudioSupportedLanguage {
en = 'en',
fr = 'fr',
frCA = 'frCA'
}
/**
* The types of authorities which may mute/unmute the local video.
*
* @enum {number}
*/
export const VIDEO_MUTISM_AUTHORITY = {
AUDIO_ONLY: 1 << 0,
BACKGROUND: 1 << 1,
USER: 1 << 2,
CAR_MODE: 1 << 3
};
/* eslint-enable no-bitwise */
/**
* The types of video tracks.
*
* @enum {string}
*/
export const VIDEO_TYPE: { [key: string]: VideoType; } = {
CAMERA: 'camera',
DESKTOP: 'desktop'
};
export type VideoType = 'camera' | 'desktop';

View File

@@ -0,0 +1,156 @@
import { IStateful } from '../app/types';
import { toState } from '../redux/functions';
import { getPropertyValue } from '../settings/functions';
import { AudioSupportedLanguage, VIDEO_MUTISM_AUTHORITY } from './constants';
// XXX The configurations/preferences/settings startWithAudioMuted and startWithVideoMuted were introduced for
// conferences/meetings. So it makes sense for these to not be considered outside of conferences/meetings
// (e.g. WelcomePage). Later on, though, we introduced a "Video <-> Voice" toggle on the WelcomePage which utilizes
// startAudioOnly outside of conferences/meetings so that particular configuration/preference/setting employs slightly
// exclusive logic.
const START_WITH_AUDIO_VIDEO_MUTED_SOURCES = {
// We have startWithAudioMuted and startWithVideoMuted here:
config: true,
settings: true,
// XXX We've already overwritten base/config with urlParams. However,
// settings are more important than the server-side config.
// Consequently, we need to read from urlParams anyway:
urlParams: true,
// We don't have startWithAudioMuted and startWithVideoMuted here:
jwt: false
};
/**
* Determines whether audio is currently muted.
*
* @param {Function|Object} stateful - The redux store, state, or
* {@code getState} function.
* @returns {boolean}
*/
export function isAudioMuted(stateful: IStateful) {
return Boolean(toState(stateful)['features/base/media'].audio.muted);
}
/**
* Determines whether video is currently muted by the audio-only authority.
*
* @param {Function|Object} stateful - The redux store, state, or
* {@code getState} function.
* @returns {boolean}
*/
export function isVideoMutedByAudioOnly(stateful: IStateful) {
return (
_isVideoMutedByAuthority(stateful, VIDEO_MUTISM_AUTHORITY.AUDIO_ONLY));
}
/**
* Determines whether video is currently muted by a specific
* {@code VIDEO_MUTISM_AUTHORITY}.
*
* @param {Function|Object} stateful - The redux store, state, or
* {@code getState} function.
* @param {number} videoMutismAuthority - The {@code VIDEO_MUTISM_AUTHORITY}
* which is to be checked whether it has muted video.
* @returns {boolean} If video is currently muted by the specified
* {@code videoMutismAuthority}, then {@code true}; otherwise, {@code false}.
*/
function _isVideoMutedByAuthority(
stateful: IStateful,
videoMutismAuthority: number) {
const { muted } = toState(stateful)['features/base/media'].video;
// eslint-disable-next-line no-bitwise
return Boolean(muted & videoMutismAuthority);
}
/**
* Computes the startWithAudioMuted by retrieving its values from config, URL and settings.
*
* @param {Object|Function} stateful - The redux state object or {@code getState} function.
* @returns {boolean} - The computed startWithAudioMuted value that will be used.
*/
export function getStartWithAudioMuted(stateful: IStateful) {
return Boolean(getPropertyValue(stateful, 'startWithAudioMuted', START_WITH_AUDIO_VIDEO_MUTED_SOURCES))
|| Boolean(getPropertyValue(stateful, 'startSilent', START_WITH_AUDIO_VIDEO_MUTED_SOURCES));
}
/**
* Computes the startWithVideoMuted by retrieving its values from config, URL and settings.
*
* @param {Object|Function} stateful - The redux state object or {@code getState} function.
* @returns {boolean} - The computed startWithVideoMuted value that will be used.
*/
export function getStartWithVideoMuted(stateful: IStateful) {
return Boolean(getPropertyValue(stateful, 'startWithVideoMuted', START_WITH_AUDIO_VIDEO_MUTED_SOURCES));
}
/**
* Determines whether screen-share is currently muted.
*
* @param {Function|Object} stateful - The redux store, state, or {@code getState} function.
* @returns {boolean}
*/
export function isScreenshareMuted(stateful: IStateful) {
return Boolean(toState(stateful)['features/base/media'].screenshare.muted);
}
/**
* Determines whether video is currently muted.
*
* @param {Function|Object} stateful - The redux store, state, or {@code getState} function.
* @returns {boolean}
*/
export function isVideoMuted(stateful: IStateful) {
return Boolean(toState(stateful)['features/base/media'].video.muted);
}
/**
* Determines whether video is currently muted by the user authority.
*
* @param {Function|Object} stateful - The redux store, state, or
* {@code getState} function.
* @returns {boolean}
*/
export function isVideoMutedByUser(stateful: IStateful) {
return _isVideoMutedByAuthority(stateful, VIDEO_MUTISM_AUTHORITY.USER);
}
/**
* Determines whether a specific videoTrack should be rendered.
*
* @param {Track} videoTrack - The video track which is to be rendered.
* @param {boolean} waitForVideoStarted - True if the specified videoTrack
* should be rendered only after its associated video has started;
* otherwise, false.
* @returns {boolean} True if the specified videoTrack should be rendered;
* otherwise, false.
*/
export function shouldRenderVideoTrack(
videoTrack: { muted: boolean; videoStarted: boolean; } | undefined,
waitForVideoStarted: boolean) {
return (
videoTrack
&& !videoTrack.muted
&& (!waitForVideoStarted || videoTrack.videoStarted));
}
/**
* Computes the localized sound file source.
*
* @param {string} file - The default file source.
* @param {string} language - The language to use for localization.
* @returns {string}
*/
export const getSoundFileSrc = (file: string, language: string): string => {
if (!AudioSupportedLanguage[language as keyof typeof AudioSupportedLanguage]
|| language === AudioSupportedLanguage.en) {
return file;
}
const fileTokens = file.split('.');
return `${fileTokens[0]}_${language}.${fileTokens[1]}`;
};

View File

@@ -0,0 +1,3 @@
import { getLogger } from '../logging/functions';
export default getLogger('features/base/media');

View File

@@ -0,0 +1,339 @@
import { AnyAction } from 'redux';
import {
createStartAudioOnlyEvent,
createStartMutedConfigurationEvent,
createSyncTrackStateEvent,
createTrackMutedEvent
} from '../../analytics/AnalyticsEvents';
import { sendAnalytics } from '../../analytics/functions';
import { IStore } from '../../app/types';
import { MEDIA_TYPE as AVM_MEDIA_TYPE } from '../../av-moderation/constants';
import { isForceMuted } from '../../av-moderation/functions';
import { APP_STATE_CHANGED } from '../../mobile/background/actionTypes';
import { showWarningNotification } from '../../notifications/actions';
import { NOTIFICATION_TIMEOUT_TYPE } from '../../notifications/constants';
import { isScreenMediaShared } from '../../screen-share/functions';
import { SET_AUDIO_ONLY } from '../audio-only/actionTypes';
import { setAudioOnly } from '../audio-only/actions';
import { SET_ROOM } from '../conference/actionTypes';
import { isRoomValid } from '../conference/functions';
import { PARTICIPANT_MUTED_US } from '../participants/actionTypes';
import { getLocalParticipant } from '../participants/functions';
import MiddlewareRegistry from '../redux/MiddlewareRegistry';
import { getPropertyValue } from '../settings/functions.any';
import { TRACK_ADDED } from '../tracks/actionTypes';
import { destroyLocalTracks } from '../tracks/actions.any';
import {
getCameraFacingMode,
isLocalTrackMuted,
isLocalVideoTrackDesktop,
setTrackMuted
} from '../tracks/functions.any';
import { ITrack } from '../tracks/types';
import {
SET_AUDIO_MUTED,
SET_AUDIO_UNMUTE_PERMISSIONS,
SET_SCREENSHARE_MUTED,
SET_VIDEO_MUTED,
SET_VIDEO_UNMUTE_PERMISSIONS
} from './actionTypes';
import {
setAudioMuted,
setCameraFacingMode,
setScreenshareMuted,
setVideoMuted
} from './actions';
import {
MEDIA_TYPE,
SCREENSHARE_MUTISM_AUTHORITY,
VIDEO_MUTISM_AUTHORITY,
VIDEO_TYPE
} from './constants';
import { getStartWithAudioMuted, getStartWithVideoMuted } from './functions';
import logger from './logger';
import {
_AUDIO_INITIAL_MEDIA_STATE,
_VIDEO_INITIAL_MEDIA_STATE
} from './reducer';
/**
* Implements the entry point of the middleware of the feature base/media.
*
* @param {Store} store - The redux store.
* @returns {Function}
*/
MiddlewareRegistry.register(store => next => action => {
switch (action.type) {
case APP_STATE_CHANGED:
return _appStateChanged(store, next, action);
case PARTICIPANT_MUTED_US: {
const { dispatch } = store;
const { track } = action;
// Sync the media muted state with the track muted state.
if (track.isAudioTrack()) {
dispatch(setAudioMuted(true, /* ensureTrack */ false));
} else if (track.isVideoTrack()) {
if (track.getVideoType() === VIDEO_TYPE.DESKTOP) {
dispatch(setScreenshareMuted(true, SCREENSHARE_MUTISM_AUTHORITY.USER, /* ensureTrack */ false));
} else {
dispatch(setVideoMuted(true, VIDEO_MUTISM_AUTHORITY.USER, /* ensureTrack */ false));
}
}
break;
}
case SET_AUDIO_ONLY:
return _setAudioOnly(store, next, action);
case SET_ROOM:
return _setRoom(store, next, action);
case TRACK_ADDED: {
const result = next(action);
const { track } = action;
// Don't sync track mute state with the redux store for screenshare
// since video mute state represents local camera mute state only.
track.local && track.videoType !== 'desktop'
&& _syncTrackMutedState(store, track);
return result;
}
case SET_AUDIO_MUTED: {
const state = store.getState();
const participant = getLocalParticipant(state);
if (!action.muted && isForceMuted(participant, AVM_MEDIA_TYPE.AUDIO, state)) {
return;
}
break;
}
case SET_AUDIO_UNMUTE_PERMISSIONS: {
const { blocked, skipNotification } = action;
const state = store.getState();
const tracks = state['features/base/tracks'];
const isAudioMuted = isLocalTrackMuted(tracks, MEDIA_TYPE.AUDIO);
if (blocked && isAudioMuted && !skipNotification) {
store.dispatch(showWarningNotification({
descriptionKey: 'notify.audioUnmuteBlockedDescription',
titleKey: 'notify.audioUnmuteBlockedTitle'
}, NOTIFICATION_TIMEOUT_TYPE.MEDIUM));
}
break;
}
case SET_SCREENSHARE_MUTED: {
const state = store.getState();
const participant = getLocalParticipant(state);
if (!action.muted && isForceMuted(participant, AVM_MEDIA_TYPE.DESKTOP, state)) {
return;
}
break;
}
case SET_VIDEO_MUTED: {
const state = store.getState();
const participant = getLocalParticipant(state);
if (!action.muted && isForceMuted(participant, AVM_MEDIA_TYPE.VIDEO, state)) {
return;
}
break;
}
case SET_VIDEO_UNMUTE_PERMISSIONS: {
const { blocked, skipNotification } = action;
const state = store.getState();
const tracks = state['features/base/tracks'];
const isVideoMuted = isLocalTrackMuted(tracks, MEDIA_TYPE.VIDEO);
const isMediaShared = isScreenMediaShared(state);
if (blocked && isVideoMuted && !isMediaShared && !skipNotification) {
store.dispatch(showWarningNotification({
descriptionKey: 'notify.videoUnmuteBlockedDescription',
titleKey: 'notify.videoUnmuteBlockedTitle'
}, NOTIFICATION_TIMEOUT_TYPE.MEDIUM));
}
break;
}
}
return next(action);
});
/**
* Adjusts the video muted state based on the app state.
*
* @param {Store} store - The redux store in which the specified {@code action}
* is being dispatched.
* @param {Dispatch} next - The redux {@code dispatch} function to dispatch the
* specified {@code action} to the specified {@code store}.
* @param {Action} action - The redux action {@code APP_STATE_CHANGED} which is
* being dispatched in the specified {@code store}.
* @private
* @returns {Object} The value returned by {@code next(action)}.
*/
function _appStateChanged({ dispatch, getState }: IStore, next: Function, action: AnyAction) {
if (navigator.product === 'ReactNative') {
const { appState } = action;
const mute = appState !== 'active' && !isLocalVideoTrackDesktop(getState());
sendAnalytics(createTrackMutedEvent('video', 'background mode', mute));
dispatch(setVideoMuted(mute, VIDEO_MUTISM_AUTHORITY.BACKGROUND));
}
return next(action);
}
/**
* Adjusts the video muted state based on the audio-only state.
*
* @param {Store} store - The redux store in which the specified {@code action}
* is being dispatched.
* @param {Dispatch} next - The redux {@code dispatch} function to dispatch the
* specified {@code action} to the specified {@code store}.
* @param {Action} action - The redux action {@code SET_AUDIO_ONLY} which is
* being dispatched in the specified {@code store}.
* @private
* @returns {Object} The value returned by {@code next(action)}.
*/
function _setAudioOnly({ dispatch }: IStore, next: Function, action: AnyAction) {
const { audioOnly } = action;
sendAnalytics(createTrackMutedEvent('video', 'audio-only mode', audioOnly));
// Make sure we mute both the desktop and video tracks.
dispatch(setVideoMuted(audioOnly, VIDEO_MUTISM_AUTHORITY.AUDIO_ONLY));
dispatch(setScreenshareMuted(audioOnly, SCREENSHARE_MUTISM_AUTHORITY.AUDIO_ONLY));
return next(action);
}
/**
* Notifies the feature base/media that the action {@link SET_ROOM} is being
* dispatched within a specific redux {@code store}.
*
* @param {Store} store - The redux store in which the specified {@code action}
* is being dispatched.
* @param {Dispatch} next - The redux {@code dispatch} function to dispatch the
* specified {@code action} to the specified {@code store}.
* @param {Action} action - The redux action, {@code SET_ROOM}, which is being
* dispatched in the specified {@code store}.
* @private
* @returns {Object} The new state that is the result of the reduction of the
* specified {@code action}.
*/
function _setRoom({ dispatch, getState }: IStore, next: Function, action: AnyAction) {
// Figure out the desires/intents i.e. the state of base/media. There are
// multiple desires/intents ordered by precedence such as server-side
// config, config overrides in the user-supplied URL, user's own app
// settings, etc.
const state = getState();
const { room } = action;
const roomIsValid = isRoomValid(room);
// when going to welcomepage on web(room is not valid) we want to skip resetting the values of startWithA/V
if (roomIsValid || navigator.product === 'ReactNative') {
const audioMuted = roomIsValid ? getStartWithAudioMuted(state) : _AUDIO_INITIAL_MEDIA_STATE.muted;
const videoMuted = roomIsValid ? getStartWithVideoMuted(state) : _VIDEO_INITIAL_MEDIA_STATE.muted;
sendAnalytics(createStartMutedConfigurationEvent('local', audioMuted, Boolean(videoMuted)));
logger.log(`Start muted: ${audioMuted ? 'audio, ' : ''}${videoMuted ? 'video' : ''}`);
// Unconditionally express the desires/expectations/intents of the app and
// the user i.e. the state of base/media. Eventually, practice/reality i.e.
// the state of base/tracks will or will not agree with the desires.
dispatch(setAudioMuted(audioMuted));
dispatch(setCameraFacingMode(getCameraFacingMode(state)));
dispatch(setVideoMuted(videoMuted));
}
// startAudioOnly
//
// FIXME Technically, the audio-only feature is owned by base/conference,
// not base/media so the following should be in base/conference.
// Practically, I presume it was easier to write the source code here
// because it looks like startWithAudioMuted and startWithVideoMuted.
//
// XXX After the introduction of the "Video <-> Voice" toggle on the
// WelcomePage, startAudioOnly is utilized even outside of
// conferences/meetings.
const audioOnly
= Boolean(
getPropertyValue(
state,
'startAudioOnly',
/* sources */ {
// FIXME Practically, base/config is (really) correct
// only if roomIsValid. At the time of this writing,
// base/config is overwritten by URL params which leaves
// base/config incorrect on the WelcomePage after
// leaving a conference which explicitly overwrites
// base/config with URL params.
config: roomIsValid,
// XXX We've already overwritten base/config with
// urlParams if roomIsValid. However, settings are more
// important than the server-side config. Consequently,
// we need to read from urlParams anyway. We also
// probably want to read from urlParams when
// !roomIsValid.
urlParams: true,
// The following don't have complications around whether
// they are defined or not:
jwt: false,
// We need to look for 'startAudioOnly' in settings only for react native clients. Otherwise, the
// default value from ISettingsState (false) will override the value set in config for web clients.
settings: typeof APP === 'undefined'
}));
sendAnalytics(createStartAudioOnlyEvent(audioOnly));
logger.log(`Start audio only set to ${audioOnly.toString()}`);
dispatch(setAudioOnly(audioOnly));
if (!roomIsValid) {
dispatch(destroyLocalTracks());
}
return next(action);
}
/**
* Syncs muted state of local media track with muted state from media state.
*
* @param {Store} store - The redux store.
* @param {Track} track - The local media track.
* @private
* @returns {void}
*/
function _syncTrackMutedState({ getState, dispatch }: IStore, track: ITrack) {
const state = getState()['features/base/media'];
const mediaType = track.mediaType;
const muted = Boolean(state[mediaType].muted);
// XXX If muted state of track when it was added is different from our media
// muted state, we need to mute track and explicitly modify 'muted' property
// on track. This is because though TRACK_ADDED action was dispatched it's
// not yet in redux state and JitsiTrackEvents.TRACK_MUTE_CHANGED may be
// fired before track gets to state.
if (track.muted !== muted) {
sendAnalytics(createSyncTrackStateEvent(mediaType, muted));
logger.log(`Sync ${mediaType} track muted state to ${muted ? 'muted' : 'unmuted'}`);
track.muted = muted;
setTrackMuted(track.jitsiTrack, muted, state, dispatch);
}
}

View File

@@ -0,0 +1 @@
import './middleware.any';

View File

@@ -0,0 +1,44 @@
import './middleware.any';
import { AnyAction } from 'redux';
import { IStore } from '../../app/types';
import { showNotification } from '../../notifications/actions';
import { NOTIFICATION_TIMEOUT_TYPE } from '../../notifications/constants';
import LocalRecordingManager from '../../recording/components/Recording/LocalRecordingManager.web';
import StopRecordingDialog from '../../recording/components/Recording/web/StopRecordingDialog';
import { openDialog } from '../dialog/actions';
import MiddlewareRegistry from '../redux/MiddlewareRegistry';
import { SET_VIDEO_MUTED } from './actionTypes';
import './subscriber';
/**
* Implements the entry point of the middleware of the feature base/media.
*
* @param {IStore} store - The redux store.
* @returns {Function}
*/
MiddlewareRegistry.register((store: IStore) => (next: Function) => (action: AnyAction) => {
const { dispatch } = store;
switch (action.type) {
case SET_VIDEO_MUTED: {
if (LocalRecordingManager.isRecordingLocally() && LocalRecordingManager.selfRecording.on) {
if (action.muted && LocalRecordingManager.selfRecording.withVideo) {
dispatch(openDialog(StopRecordingDialog, { localRecordingVideoStop: true }));
return;
} else if (!action.muted && !LocalRecordingManager.selfRecording.withVideo) {
dispatch(showNotification({
titleKey: 'recording.localRecordingNoVideo',
descriptionKey: 'recording.localRecordingVideoWarning',
uid: 'recording.localRecordingNoVideo'
}, NOTIFICATION_TIMEOUT_TYPE.MEDIUM));
}
}
}
}
return next(action);
});

View File

@@ -0,0 +1,379 @@
import { AnyAction, combineReducers } from 'redux';
import { CONFERENCE_FAILED, CONFERENCE_LEFT } from '../conference/actionTypes';
import ReducerRegistry from '../redux/ReducerRegistry';
import { TRACK_REMOVED } from '../tracks/actionTypes';
import {
GUM_PENDING,
SET_AUDIO_AVAILABLE,
SET_AUDIO_MUTED,
SET_AUDIO_UNMUTE_PERMISSIONS,
SET_CAMERA_FACING_MODE,
SET_INITIAL_GUM_PROMISE,
SET_SCREENSHARE_MUTED,
SET_VIDEO_AVAILABLE,
SET_VIDEO_MUTED,
SET_VIDEO_UNMUTE_PERMISSIONS,
STORE_VIDEO_TRANSFORM,
TOGGLE_CAMERA_FACING_MODE
} from './actionTypes';
import { CAMERA_FACING_MODE, MEDIA_TYPE, SCREENSHARE_MUTISM_AUTHORITY } from './constants';
import { IGUMPendingState } from './types';
/**
* Media state object for local audio.
*
* @typedef {Object} AudioMediaState
* @property {boolean} muted=false - Audio muted state.
*/
// FIXME Technically, _AUDIO_INITIAL_MEDIA_STATE is a constant internal to the
// feature base/media and used in multiple files so it should be in
// constants.js. Practically though, AudioMediaState would then be used in
// multiple files as well so I don't know where and how to move it.
/**
* Initial state for local audio.
*
* @type {AudioMediaState}
*/
export const _AUDIO_INITIAL_MEDIA_STATE = {
available: true,
gumPending: IGUMPendingState.NONE,
unmuteBlocked: false,
muted: false
};
/**
* Reducer for audio media state.
*
* @param {AudioMediaState} state - Media state of local audio.
* @param {Object} action - Action object.
* @param {string} action.type - Type of action.
* @private
* @returns {AudioMediaState}
*/
function _audio(state: IAudioState = _AUDIO_INITIAL_MEDIA_STATE, action: AnyAction) {
switch (action.type) {
case SET_AUDIO_AVAILABLE:
return {
...state,
available: action.available
};
case GUM_PENDING:
if (action.mediaTypes.includes(MEDIA_TYPE.AUDIO)) {
return {
...state,
gumPending: action.status
};
}
return state;
case SET_AUDIO_MUTED:
return {
...state,
muted: action.muted
};
case SET_AUDIO_UNMUTE_PERMISSIONS:
return {
...state,
unmuteBlocked: action.blocked
};
default:
return state;
}
}
// Using a deferred promise here to make sure that once the connection is established even if conference.init and the
// initial track creation haven't been started we would wait for it to finish before starting to join the room.
// NOTE: The previous implementation was using the GUM promise from conference.init. But it turned out that connect
// may finish even before conference.init is executed.
const DEFAULT_INITIAL_PROMISE_STATE = Promise.withResolvers<IInitialGUMPromiseResult>();
/**
* Reducer for the common properties in media state.
*
* @param {ICommonState} state - Common media state.
* @param {Object} action - Action object.
* @param {string} action.type - Type of action.
* @returns {ICommonState}
*/
function _initialGUMPromise(
state: PromiseWithResolvers<IInitialGUMPromiseResult> | null = DEFAULT_INITIAL_PROMISE_STATE,
action: AnyAction) {
if (action.type === SET_INITIAL_GUM_PROMISE) {
return action.promise ?? null;
}
return state;
}
/**
* Media state object for local screenshare.
*
* @typedef {Object} ScreenshareMediaState
* @property {boolean} available=true - Screenshare available state.
* @property {boolean} muted=true - Screenshare muted state.
* @property {boolean} unmuteBlocked=false - Screenshare unmute blocked state.
*/
/**
* Initial state for video.
*
* @type {ScreenshareMediaState}
*/
export const _SCREENSHARE_INITIAL_MEDIA_STATE = {
available: true,
muted: SCREENSHARE_MUTISM_AUTHORITY.USER,
unmuteBlocked: false
};
/**
* Reducer for screenshare media state.
*
* @param {VideoMediaState} state - Media state of local screenshare.
* @param {Object} action - Action object.
* @param {string} action.type - Type of action.
* @private
* @returns {ScreenshareMediaState}
*/
function _screenshare(state: IScreenshareState = _SCREENSHARE_INITIAL_MEDIA_STATE, action: AnyAction) {
switch (action.type) {
case SET_SCREENSHARE_MUTED:
return {
...state,
muted: action.muted
};
case SET_VIDEO_UNMUTE_PERMISSIONS:
return {
...state,
unmuteBlocked: action.blocked
};
default:
return state;
}
}
/**
* Media state object for local video.
*
* @typedef {Object} VideoMediaState
* @property {CAMERA_FACING_MODE} facingMode='user' - Camera facing mode.
* @property {boolean} muted=false - Video muted state.
*/
// FIXME Technically, _VIDEO_INITIAL_MEDIA_STATE is a constant internal to the
// feature base/media and used in multiple files so it should be in
// constants.js. Practically though, VideoMediaState would then be used in
// multiple files as well so I don't know where and how to move it.
/**
* Initial state for video.
*
* @type {VideoMediaState}
*/
export const _VIDEO_INITIAL_MEDIA_STATE = {
available: true,
gumPending: IGUMPendingState.NONE,
unmuteBlocked: false,
facingMode: CAMERA_FACING_MODE.USER,
muted: 0,
/**
* The video {@link Transform}s applied to {@code MediaStream}s by
* {@code id} i.e. "pinch to zoom".
*/
transforms: {}
};
/**
* Reducer for camera media state.
*
* @param {VideoMediaState} state - Media state of local video.
* @param {Object} action - Action object.
* @param {string} action.type - Type of action.
* @private
* @returns {VideoMediaState}
*/
function _video(state: IVideoState = _VIDEO_INITIAL_MEDIA_STATE, action: any) {
switch (action.type) {
case CONFERENCE_FAILED:
case CONFERENCE_LEFT:
return _clearAllVideoTransforms(state);
case GUM_PENDING:
if (action.mediaTypes.includes(MEDIA_TYPE.VIDEO)) {
return {
...state,
gumPending: action.status
};
}
return state;
case SET_CAMERA_FACING_MODE:
return {
...state,
facingMode: action.cameraFacingMode
};
case SET_VIDEO_AVAILABLE:
return {
...state,
available: action.available
};
case SET_VIDEO_MUTED:
return {
...state,
muted: action.muted
};
case SET_VIDEO_UNMUTE_PERMISSIONS:
return {
...state,
unmuteBlocked: action.blocked
};
case STORE_VIDEO_TRANSFORM:
return _storeVideoTransform(state, action);
case TOGGLE_CAMERA_FACING_MODE: {
let cameraFacingMode = state.facingMode;
cameraFacingMode
= cameraFacingMode === CAMERA_FACING_MODE.USER
? CAMERA_FACING_MODE.ENVIRONMENT
: CAMERA_FACING_MODE.USER;
return {
...state,
facingMode: cameraFacingMode
};
}
case TRACK_REMOVED:
return _trackRemoved(state, action);
default:
return state;
}
}
interface IAudioState {
available: boolean;
gumPending: IGUMPendingState;
muted: boolean;
unmuteBlocked: boolean;
}
interface IInitialGUMPromiseResult {
errors?: any;
tracks: Array<any>;
}
interface IScreenshareState {
available: boolean;
muted: number;
unmuteBlocked: boolean;
}
interface IVideoState {
available: boolean;
facingMode: string;
gumPending: IGUMPendingState;
muted: number;
transforms: Object;
unmuteBlocked: boolean;
}
export interface IMediaState {
audio: IAudioState;
initialGUMPromise: PromiseWithResolvers<IInitialGUMPromiseResult> | null;
screenshare: IScreenshareState;
video: IVideoState;
}
/**
* Listen for various actions related to media devices.
*
* @param {Object} state - State of media devices.
* @param {Object} action - Action object.
* @param {string} action.type - Type of action.
* @param {Object} action.media - Information about media devices to be
* modified.
* @returns {Object}
*/
ReducerRegistry.register<IMediaState>('features/base/media', combineReducers({
audio: _audio,
initialGUMPromise: _initialGUMPromise,
screenshare: _screenshare,
video: _video
}));
/**
* Removes all stored video {@link Transform}s.
*
* @param {Object} state - The {@code video} state of the feature base/media.
* @private
* @returns {Object}
*/
function _clearAllVideoTransforms(state: IVideoState) {
return {
...state,
transforms: _VIDEO_INITIAL_MEDIA_STATE.transforms
};
}
/**
* Stores the last applied transform to a stream.
*
* @param {Object} state - The {@code video} state of the feature base/media.
* @param {Object} action - The redux action {@link STORE_VIDEO_TRANSFORM}.
* @private
* @returns {Object}
*/
function _storeVideoTransform(state: IVideoState, { streamId, transform }: { streamId: string; transform: string; }) {
return {
...state,
transforms: {
...state.transforms,
[streamId]: transform
}
};
}
/**
* Removes the stored video {@link Transform} associated with a
* {@code MediaStream} when its respective track is removed.
*
* @param {Object} state - The {@code video} state of the feature base/media.
* @param {Object} action - The redux action {@link TRACK_REMOVED}.
* @private
* @returns {Object}
*/
function _trackRemoved(state: IVideoState, { track: { jitsiTrack } }: { track: { jitsiTrack: any; }; }) {
if (jitsiTrack) {
const streamId = jitsiTrack.getStreamId();
if (streamId && streamId in state.transforms) {
const nextTransforms: any = {
...state.transforms
};
delete nextTransforms[streamId];
return {
...state,
transforms: nextTransforms
};
}
}
return state;
}

View File

@@ -0,0 +1,18 @@
import { IReduxState, IStore } from '../../app/types';
import StateListenerRegistry from '../redux/StateListenerRegistry';
/**
* Notifies when the local audio mute state changes.
*/
StateListenerRegistry.register(
/* selector */ (state: IReduxState) => state['features/base/media'].audio.muted,
/* listener */ (muted: boolean, store: IStore, previousMuted: boolean) => {
if (typeof APP !== 'object') {
return;
}
if (muted !== previousMuted) {
APP.API.notifyAudioMutedStatusChanged(muted);
}
}
);

View File

@@ -0,0 +1,4 @@
export enum IGUMPendingState {
PENDING_UNMUTE = 1,
NONE = 2
}