mirror of https://github.com/jitsi/jitsi-meet
parent
49bcf5c179
commit
7cd39b7983
@ -1,7 +1,7 @@ |
||||
import { IStore } from '../../app/types'; |
||||
import JitsiMeetJS from '../lib-jitsi-meet'; |
||||
import { updateSettings } from '../settings/actions'; |
||||
import { getUserSelectedOutputDeviceId } from '../settings/functions.any'; |
||||
import { getUserSelectedOutputDeviceId } from '../settings/functions.web'; |
||||
|
||||
import { |
||||
ADD_PENDING_DEVICE_REQUEST, |
@ -0,0 +1,19 @@ |
||||
import { IReduxState } from '../../app/types'; |
||||
|
||||
/** |
||||
* Returns true if there are devices of a specific type or on native platform. |
||||
* |
||||
* @param {Object} state - The state of the application. |
||||
* @param {string} type - The type of device: VideoOutput | audioOutput | audioInput. |
||||
* |
||||
* @returns {boolean} |
||||
*/ |
||||
export function hasAvailableDevices(state: IReduxState, type: string) { |
||||
if (state['features/base/devices'] === undefined) { |
||||
return true; |
||||
} |
||||
|
||||
const availableDevices = state['features/base/devices'].availableDevices; |
||||
|
||||
return Number(availableDevices[type as keyof typeof availableDevices]?.length) > 0; |
||||
} |
@ -0,0 +1 @@ |
||||
export * from './functions.any'; |
@ -1,3 +0,0 @@ |
||||
export * from './actions'; |
||||
export * from './actionTypes'; |
||||
export * from './functions'; |
@ -0,0 +1,17 @@ |
||||
/* eslint-disable lines-around-comment */ |
||||
|
||||
export interface IDevicesState { |
||||
availableDevices: { |
||||
// @ts-ignore
|
||||
audioInput?: MediaDeviceInfo[]; |
||||
// @ts-ignore
|
||||
audioOutput?: MediaDeviceInfo[]; |
||||
// @ts-ignore
|
||||
videoInput?: MediaDeviceInfo[]; |
||||
}; |
||||
pendingRequests: any[]; |
||||
permissions: { |
||||
audio: boolean; |
||||
video: boolean; |
||||
}; |
||||
} |
@ -0,0 +1,10 @@ |
||||
/** |
||||
* Checks whether the chrome extensions defined in the config file are installed or not. |
||||
* |
||||
* @param {Object} _config - Objects containing info about the configured extensions. |
||||
* |
||||
* @returns {Promise[]} |
||||
*/ |
||||
export default function checkChromeExtensionsInstalled(_config: any = {}) { |
||||
return Promise.resolve([]); |
||||
} |
@ -0,0 +1,26 @@ |
||||
/** |
||||
* Checks whether the chrome extensions defined in the config file are installed or not. |
||||
* |
||||
* @param {Object} config - Objects containing info about the configured extensions. |
||||
* |
||||
* @returns {Promise[]} |
||||
*/ |
||||
export default function checkChromeExtensionsInstalled(config: any = {}) { |
||||
const isExtensionInstalled = (info: any) => new Promise(resolve => { |
||||
const img = new Image(); |
||||
|
||||
img.src = `chrome-extension://${info.id}/${info.path}`; |
||||
img.setAttribute('aria-hidden', 'true'); |
||||
img.onload = function() { |
||||
resolve(true); |
||||
}; |
||||
img.onerror = function() { |
||||
resolve(false); |
||||
}; |
||||
}); |
||||
const extensionInstalledFunction = (info: any) => isExtensionInstalled(info); |
||||
|
||||
return Promise.all( |
||||
(config.chromeExtensionsInfo || []).map((info: any) => extensionInstalledFunction(info)) |
||||
); |
||||
} |
@ -0,0 +1,45 @@ |
||||
import { IStore } from '../../app/types'; |
||||
import JitsiMeetJS from '../lib-jitsi-meet'; |
||||
|
||||
import { ITrackOptions } from './types'; |
||||
|
||||
export * from './functions.any'; |
||||
|
||||
/** |
||||
* Create local tracks of specific types. |
||||
* |
||||
* @param {Object} options - The options with which the local tracks are to be |
||||
* created. |
||||
* @param {string|null} [options.cameraDeviceId] - Camera device id or |
||||
* {@code undefined} to use app's settings. |
||||
* @param {string[]} options.devices - Required track types such as 'audio' |
||||
* and/or 'video'. |
||||
* @param {string|null} [options.micDeviceId] - Microphone device id or |
||||
* {@code undefined} to use app's settings. |
||||
* @param {number|undefined} [oprions.timeout] - A timeout for JitsiMeetJS.createLocalTracks used to create the tracks. |
||||
* @param {boolean} [options.firePermissionPromptIsShownEvent] - Whether lib-jitsi-meet |
||||
* should check for a {@code getUserMedia} permission prompt and fire a |
||||
* corresponding event. |
||||
* @param {IStore} store - The redux store in the context of which the function |
||||
* is to execute and from which state such as {@code config} is to be retrieved. |
||||
* @returns {Promise<JitsiLocalTrack[]>} |
||||
*/ |
||||
export function createLocalTracksF(options: ITrackOptions = {}, store: IStore) { |
||||
const { cameraDeviceId, micDeviceId } = options; |
||||
const state = store.getState(); |
||||
const { |
||||
resolution |
||||
} = state['features/base/config']; |
||||
const constraints = options.constraints ?? state['features/base/config'].constraints; |
||||
|
||||
return JitsiMeetJS.createLocalTracks( |
||||
{ |
||||
cameraDeviceId, |
||||
constraints, |
||||
|
||||
// Copy array to avoid mutations inside library.
|
||||
devices: options.devices?.slice(0), |
||||
micDeviceId, |
||||
resolution |
||||
}); |
||||
} |
@ -0,0 +1,242 @@ |
||||
import { IStore } from '../../app/types'; |
||||
import { IStateful } from '../app/types'; |
||||
import { isMobileBrowser } from '../environment/utils'; |
||||
import JitsiMeetJS from '../lib-jitsi-meet'; |
||||
import { setAudioMuted } from '../media/actions'; |
||||
import { MEDIA_TYPE } from '../media/constants'; |
||||
import { toState } from '../redux/functions'; |
||||
import { |
||||
getUserSelectedCameraDeviceId, |
||||
getUserSelectedMicDeviceId |
||||
} from '../settings/functions.web'; |
||||
|
||||
// @ts-ignore
|
||||
import loadEffects from './loadEffects'; |
||||
import logger from './logger'; |
||||
import { ITrackOptions } from './types'; |
||||
|
||||
export * from './functions.any'; |
||||
|
||||
/** |
||||
* Create local tracks of specific types. |
||||
* |
||||
* @param {Object} options - The options with which the local tracks are to be |
||||
* created. |
||||
* @param {string|null} [options.cameraDeviceId] - Camera device id or |
||||
* {@code undefined} to use app's settings. |
||||
* @param {string[]} options.devices - Required track types such as 'audio' |
||||
* and/or 'video'. |
||||
* @param {string|null} [options.micDeviceId] - Microphone device id or |
||||
* {@code undefined} to use app's settings. |
||||
* @param {number|undefined} [oprions.timeout] - A timeout for JitsiMeetJS.createLocalTracks used to create the tracks. |
||||
* @param {boolean} [options.firePermissionPromptIsShownEvent] - Whether lib-jitsi-meet |
||||
* should check for a {@code getUserMedia} permission prompt and fire a |
||||
* corresponding event. |
||||
* @param {IStore} store - The redux store in the context of which the function |
||||
* is to execute and from which state such as {@code config} is to be retrieved. |
||||
* @returns {Promise<JitsiLocalTrack[]>} |
||||
*/ |
||||
export function createLocalTracksF(options: ITrackOptions = {}, store?: IStore) { |
||||
let { cameraDeviceId, micDeviceId } = options; |
||||
const { |
||||
desktopSharingSourceDevice, |
||||
desktopSharingSources, |
||||
firePermissionPromptIsShownEvent, |
||||
timeout |
||||
} = options; |
||||
|
||||
// TODO The app's settings should go in the redux store and then the
|
||||
// reliance on the global variable APP will go away.
|
||||
store = store || APP.store; // eslint-disable-line no-param-reassign
|
||||
|
||||
const state = store.getState(); |
||||
|
||||
if (typeof cameraDeviceId === 'undefined' || cameraDeviceId === null) { |
||||
cameraDeviceId = getUserSelectedCameraDeviceId(state); |
||||
} |
||||
if (typeof micDeviceId === 'undefined' || micDeviceId === null) { |
||||
micDeviceId = getUserSelectedMicDeviceId(state); |
||||
} |
||||
|
||||
const { |
||||
desktopSharingFrameRate, |
||||
firefox_fake_device, // eslint-disable-line camelcase
|
||||
resolution |
||||
} = state['features/base/config']; |
||||
const constraints = options.constraints ?? state['features/base/config'].constraints; |
||||
|
||||
return ( |
||||
loadEffects(store).then((effectsArray: Object[]) => { |
||||
// Filter any undefined values returned by Promise.resolve().
|
||||
const effects = effectsArray.filter(effect => Boolean(effect)); |
||||
|
||||
return JitsiMeetJS.createLocalTracks( |
||||
{ |
||||
cameraDeviceId, |
||||
constraints, |
||||
desktopSharingFrameRate, |
||||
desktopSharingSourceDevice, |
||||
desktopSharingSources, |
||||
|
||||
// Copy array to avoid mutations inside library.
|
||||
devices: options.devices?.slice(0), |
||||
effects, |
||||
firefox_fake_device, // eslint-disable-line camelcase
|
||||
firePermissionPromptIsShownEvent, |
||||
micDeviceId, |
||||
resolution, |
||||
timeout |
||||
}) |
||||
.catch((err: Error) => { |
||||
logger.error('Failed to create local tracks', options.devices, err); |
||||
|
||||
return Promise.reject(err); |
||||
}); |
||||
})); |
||||
} |
||||
|
||||
/** |
||||
* Creates a local video track for presenter. The constraints are computed based |
||||
* on the height of the desktop that is being shared. |
||||
* |
||||
* @param {Object} options - The options with which the local presenter track |
||||
* is to be created. |
||||
* @param {string|null} [options.cameraDeviceId] - Camera device id or |
||||
* {@code undefined} to use app's settings. |
||||
* @param {number} desktopHeight - The height of the desktop that is being |
||||
* shared. |
||||
* @returns {Promise<JitsiLocalTrack>} |
||||
*/ |
||||
export async function createLocalPresenterTrack(options: ITrackOptions, desktopHeight: number) { |
||||
const { cameraDeviceId } = options; |
||||
|
||||
// compute the constraints of the camera track based on the resolution
|
||||
// of the desktop screen that is being shared.
|
||||
const cameraHeights = [ 180, 270, 360, 540, 720 ]; |
||||
const proportion = 5; |
||||
const result = cameraHeights.find( |
||||
height => (desktopHeight / proportion) < height); |
||||
const constraints = { |
||||
video: { |
||||
aspectRatio: 4 / 3, |
||||
height: { |
||||
ideal: result |
||||
} |
||||
} |
||||
}; |
||||
const [ videoTrack ] = await JitsiMeetJS.createLocalTracks( |
||||
{ |
||||
cameraDeviceId, |
||||
constraints, |
||||
devices: [ 'video' ] |
||||
}); |
||||
|
||||
videoTrack.type = MEDIA_TYPE.PRESENTER; |
||||
|
||||
return videoTrack; |
||||
} |
||||
|
||||
/** |
||||
* Returns an object containing a promise which resolves with the created tracks & |
||||
* the errors resulting from that process. |
||||
* |
||||
* @returns {Promise<JitsiLocalTrack>} |
||||
* |
||||
* @todo Refactor to not use APP. |
||||
*/ |
||||
export function createPrejoinTracks() { |
||||
const errors: any = {}; |
||||
const initialDevices = [ 'audio' ]; |
||||
const requestedAudio = true; |
||||
let requestedVideo = false; |
||||
const { startAudioOnly, startWithAudioMuted, startWithVideoMuted } = APP.store.getState()['features/base/settings']; |
||||
|
||||
// Always get a handle on the audio input device so that we have statistics even if the user joins the
|
||||
// conference muted. Previous implementation would only acquire the handle when the user first unmuted,
|
||||
// which would results in statistics ( such as "No audio input" or "Are you trying to speak?") being available
|
||||
// only after that point.
|
||||
if (startWithAudioMuted) { |
||||
APP.store.dispatch(setAudioMuted(true)); |
||||
} |
||||
|
||||
if (!startWithVideoMuted && !startAudioOnly) { |
||||
initialDevices.push('video'); |
||||
requestedVideo = true; |
||||
} |
||||
|
||||
let tryCreateLocalTracks; |
||||
|
||||
if (!requestedAudio && !requestedVideo) { |
||||
// Resolve with no tracks
|
||||
tryCreateLocalTracks = Promise.resolve([]); |
||||
} else { |
||||
tryCreateLocalTracks = createLocalTracksF({ |
||||
devices: initialDevices, |
||||
firePermissionPromptIsShownEvent: true |
||||
}, APP.store) |
||||
.catch((err: Error) => { |
||||
if (requestedAudio && requestedVideo) { |
||||
|
||||
// Try audio only...
|
||||
errors.audioAndVideoError = err; |
||||
|
||||
return ( |
||||
createLocalTracksF({ |
||||
devices: [ 'audio' ], |
||||
firePermissionPromptIsShownEvent: true |
||||
})); |
||||
} else if (requestedAudio && !requestedVideo) { |
||||
errors.audioOnlyError = err; |
||||
|
||||
return []; |
||||
} else if (requestedVideo && !requestedAudio) { |
||||
errors.videoOnlyError = err; |
||||
|
||||
return []; |
||||
} |
||||
logger.error('Should never happen'); |
||||
}) |
||||
.catch((err: Error) => { |
||||
// Log this just in case...
|
||||
if (!requestedAudio) { |
||||
logger.error('The impossible just happened', err); |
||||
} |
||||
errors.audioOnlyError = err; |
||||
|
||||
// Try video only...
|
||||
return requestedVideo |
||||
? createLocalTracksF({ |
||||
devices: [ 'video' ], |
||||
firePermissionPromptIsShownEvent: true |
||||
}) |
||||
: []; |
||||
}) |
||||
.catch((err: Error) => { |
||||
// Log this just in case...
|
||||
if (!requestedVideo) { |
||||
logger.error('The impossible just happened', err); |
||||
} |
||||
errors.videoOnlyError = err; |
||||
|
||||
return []; |
||||
}); |
||||
} |
||||
|
||||
return { |
||||
tryCreateLocalTracks, |
||||
errors |
||||
}; |
||||
} |
||||
|
||||
/** |
||||
* Determines whether toggle camera should be enabled or not. |
||||
* |
||||
* @param {Function|Object} stateful - The redux store or {@code getState} function. |
||||
* @returns {boolean} - Whether toggle camera should be enabled. |
||||
*/ |
||||
export function isToggleCameraEnabled(stateful: IStateful) { |
||||
const state = toState(stateful); |
||||
const { videoInput } = state['features/base/devices'].availableDevices; |
||||
|
||||
return isMobileBrowser() && Number(videoInput?.length) > 1; |
||||
} |
@ -0,0 +1,38 @@ |
||||
import { |
||||
MEDIA_TYPE, |
||||
VIDEO_TYPE |
||||
} from '../media/constants'; |
||||
import MiddlewareRegistry from '../redux/MiddlewareRegistry'; |
||||
|
||||
import { |
||||
TRACK_UPDATED |
||||
} from './actionTypes'; |
||||
import { |
||||
toggleScreensharing |
||||
} from './actions.native'; |
||||
|
||||
import './middleware.any'; |
||||
|
||||
/** |
||||
* Middleware that captures LIB_DID_DISPOSE and LIB_DID_INIT actions and, |
||||
* respectively, creates/destroys local media tracks. Also listens to |
||||
* media-related actions and performs corresponding operations with tracks. |
||||
* |
||||
* @param {Store} store - The redux store. |
||||
* @returns {Function} |
||||
*/ |
||||
MiddlewareRegistry.register(store => next => action => { |
||||
switch (action.type) { |
||||
case TRACK_UPDATED: { |
||||
const { jitsiTrack, local } = action.track; |
||||
|
||||
if (local && jitsiTrack.isMuted() |
||||
&& jitsiTrack.type === MEDIA_TYPE.VIDEO && jitsiTrack.videoType === VIDEO_TYPE.DESKTOP) { |
||||
store.dispatch(toggleScreensharing(false)); |
||||
} |
||||
break; |
||||
} |
||||
} |
||||
|
||||
return next(action); |
||||
}); |
@ -0,0 +1,198 @@ |
||||
import { IStore } from '../../app/types'; |
||||
import { hideNotification } from '../../notifications/actions'; |
||||
import { isPrejoinPageVisible } from '../../prejoin/functions'; |
||||
import { getMultipleVideoSendingSupportFeatureFlag } from '../config/functions.any'; |
||||
import { getAvailableDevices } from '../devices/actions.web'; |
||||
import { setScreenshareMuted } from '../media/actions'; |
||||
import { |
||||
MEDIA_TYPE, |
||||
VIDEO_TYPE |
||||
} from '../media/constants'; |
||||
import MiddlewareRegistry from '../redux/MiddlewareRegistry'; |
||||
|
||||
import { |
||||
TRACK_ADDED, |
||||
TRACK_MUTE_UNMUTE_FAILED, |
||||
TRACK_NO_DATA_FROM_SOURCE, |
||||
TRACK_REMOVED, |
||||
TRACK_STOPPED, |
||||
TRACK_UPDATED |
||||
} from './actionTypes'; |
||||
import { |
||||
showNoDataFromSourceVideoError, |
||||
toggleScreensharing, |
||||
trackNoDataFromSourceNotificationInfoChanged |
||||
} from './actions.web'; |
||||
import { |
||||
getTrackByJitsiTrack |
||||
} from './functions.web'; |
||||
import { ITrack } from './types'; |
||||
|
||||
import './middleware.any'; |
||||
|
||||
/** |
||||
* Middleware that captures LIB_DID_DISPOSE and LIB_DID_INIT actions and, |
||||
* respectively, creates/destroys local media tracks. Also listens to |
||||
* media-related actions and performs corresponding operations with tracks. |
||||
* |
||||
* @param {Store} store - The redux store. |
||||
* @returns {Function} |
||||
*/ |
||||
MiddlewareRegistry.register(store => next => action => { |
||||
switch (action.type) { |
||||
case TRACK_ADDED: { |
||||
const { local } = action.track; |
||||
|
||||
// The devices list needs to be refreshed when no initial video permissions
|
||||
// were granted and a local video track is added by umuting the video.
|
||||
if (local) { |
||||
store.dispatch(getAvailableDevices()); |
||||
} |
||||
|
||||
break; |
||||
} |
||||
case TRACK_NO_DATA_FROM_SOURCE: { |
||||
const result = next(action); |
||||
|
||||
_handleNoDataFromSourceErrors(store, action); |
||||
|
||||
return result; |
||||
} |
||||
|
||||
case TRACK_REMOVED: { |
||||
_removeNoDataFromSourceNotification(store, action.track); |
||||
break; |
||||
} |
||||
|
||||
case TRACK_MUTE_UNMUTE_FAILED: { |
||||
const { jitsiTrack } = action.track; |
||||
const muted = action.wasMuted; |
||||
const isVideoTrack = jitsiTrack.getType() !== MEDIA_TYPE.AUDIO; |
||||
|
||||
if (isVideoTrack && jitsiTrack.getVideoType() === VIDEO_TYPE.DESKTOP |
||||
&& getMultipleVideoSendingSupportFeatureFlag(store.getState())) { |
||||
store.dispatch(setScreenshareMuted(!muted)); |
||||
} else if (isVideoTrack) { |
||||
APP.conference.setVideoMuteStatus(); |
||||
} else { |
||||
APP.conference.setAudioMuteStatus(!muted); |
||||
} |
||||
|
||||
break; |
||||
} |
||||
|
||||
case TRACK_STOPPED: { |
||||
const { jitsiTrack } = action.track; |
||||
|
||||
if (getMultipleVideoSendingSupportFeatureFlag(store.getState()) |
||||
&& jitsiTrack.getVideoType() === VIDEO_TYPE.DESKTOP) { |
||||
store.dispatch(toggleScreensharing(false)); |
||||
} |
||||
break; |
||||
} |
||||
|
||||
case TRACK_UPDATED: { |
||||
// TODO Remove the following calls to APP.UI once components interested
|
||||
// in track mute changes are moved into React and/or redux.
|
||||
|
||||
const result = next(action); |
||||
const state = store.getState(); |
||||
|
||||
if (isPrejoinPageVisible(state)) { |
||||
return result; |
||||
} |
||||
|
||||
const { jitsiTrack } = action.track; |
||||
const muted = jitsiTrack.isMuted(); |
||||
const participantID = jitsiTrack.getParticipantId(); |
||||
const isVideoTrack = jitsiTrack.type !== MEDIA_TYPE.AUDIO; |
||||
|
||||
if (isVideoTrack) { |
||||
// Do not change the video mute state for local presenter tracks.
|
||||
if (jitsiTrack.type === MEDIA_TYPE.PRESENTER) { |
||||
APP.conference.mutePresenter(muted); |
||||
} else if (jitsiTrack.isLocal() && !(jitsiTrack.getVideoType() === VIDEO_TYPE.DESKTOP)) { |
||||
APP.conference.setVideoMuteStatus(); |
||||
} else if (jitsiTrack.isLocal() && muted && jitsiTrack.getVideoType() === VIDEO_TYPE.DESKTOP) { |
||||
!getMultipleVideoSendingSupportFeatureFlag(state) |
||||
&& store.dispatch(toggleScreensharing(false, false, true)); |
||||
} else { |
||||
APP.UI.setVideoMuted(participantID); |
||||
} |
||||
} else if (jitsiTrack.isLocal()) { |
||||
APP.conference.setAudioMuteStatus(muted); |
||||
} else { |
||||
APP.UI.setAudioMuted(participantID, muted); |
||||
} |
||||
|
||||
return result; |
||||
} |
||||
|
||||
} |
||||
|
||||
return next(action); |
||||
}); |
||||
|
||||
/** |
||||
* Handles no data from source errors. |
||||
* |
||||
* @param {Store} store - The redux store in which the specified action is |
||||
* dispatched. |
||||
* @param {Action} action - The redux action dispatched in the specified store. |
||||
* @private |
||||
* @returns {void} |
||||
*/ |
||||
function _handleNoDataFromSourceErrors(store: IStore, action: any) { |
||||
const { getState, dispatch } = store; |
||||
|
||||
const track = getTrackByJitsiTrack(getState()['features/base/tracks'], action.track.jitsiTrack); |
||||
|
||||
if (!track || !track.local) { |
||||
return; |
||||
} |
||||
|
||||
const { jitsiTrack } = track; |
||||
|
||||
if (track.mediaType === MEDIA_TYPE.AUDIO && track.isReceivingData) { |
||||
_removeNoDataFromSourceNotification(store, action.track); |
||||
} |
||||
|
||||
if (track.mediaType === MEDIA_TYPE.VIDEO) { |
||||
const { noDataFromSourceNotificationInfo = {} } = track; |
||||
|
||||
if (track.isReceivingData) { |
||||
if (noDataFromSourceNotificationInfo.timeout) { |
||||
clearTimeout(noDataFromSourceNotificationInfo.timeout); |
||||
dispatch(trackNoDataFromSourceNotificationInfoChanged(jitsiTrack, undefined)); |
||||
} |
||||
|
||||
// try to remove the notification if there is one.
|
||||
_removeNoDataFromSourceNotification(store, action.track); |
||||
} else { |
||||
if (noDataFromSourceNotificationInfo.timeout) { |
||||
return; |
||||
} |
||||
|
||||
const timeout = setTimeout(() => dispatch(showNoDataFromSourceVideoError(jitsiTrack)), 5000); |
||||
|
||||
dispatch(trackNoDataFromSourceNotificationInfoChanged(jitsiTrack, { timeout })); |
||||
} |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Removes the no data from source notification associated with the JitsiTrack if displayed. |
||||
* |
||||
* @param {Store} store - The redux store. |
||||
* @param {Track} track - The redux action dispatched in the specified store. |
||||
* @returns {void} |
||||
*/ |
||||
function _removeNoDataFromSourceNotification({ getState, dispatch }: IStore, track: ITrack) { |
||||
const t = getTrackByJitsiTrack(getState()['features/base/tracks'], track.jitsiTrack); |
||||
const { jitsiTrack, noDataFromSourceNotificationInfo = {} } = t || {}; |
||||
|
||||
if (noDataFromSourceNotificationInfo?.uid) { |
||||
dispatch(hideNotification(noDataFromSourceNotificationInfo.uid)); |
||||
dispatch(trackNoDataFromSourceNotificationInfoChanged(jitsiTrack, undefined)); |
||||
} |
||||
} |
@ -1 +0,0 @@ |
||||
export * from './native'; |
@ -1 +0,0 @@ |
||||
export * from './web'; |
@ -1 +0,0 @@ |
||||
export { default as Button } from './Button'; |
@ -0,0 +1 @@ |
||||
export * from './functions.any'; |
@ -0,0 +1,64 @@ |
||||
import { createLocalTrack } from '../base/lib-jitsi-meet/functions'; |
||||
|
||||
export * from './functions.any'; |
||||
|
||||
/** |
||||
* Returns a promise which resolves with a list of objects containing |
||||
* all the video jitsiTracks and appropriate errors for the given device ids. |
||||
* |
||||
* @param {string[]} ids - The list of the camera ids for which to create tracks. |
||||
* @param {number} [timeout] - A timeout for the createLocalTrack function call. |
||||
* |
||||
* @returns {Promise<Object[]>} |
||||
*/ |
||||
export function createLocalVideoTracks(ids: string[], timeout?: number) { |
||||
return Promise.all(ids.map(deviceId => createLocalTrack('video', deviceId, timeout) |
||||
.then((jitsiTrack: any) => { |
||||
return { |
||||
jitsiTrack, |
||||
deviceId |
||||
}; |
||||
}) |
||||
.catch(() => { |
||||
return { |
||||
jitsiTrack: null, |
||||
deviceId, |
||||
error: 'deviceSelection.previewUnavailable' |
||||
}; |
||||
}))); |
||||
} |
||||
|
||||
|
||||
/** |
||||
* Returns a promise which resolves with a list of objects containing |
||||
* the audio track and the corresponding audio device information. |
||||
* |
||||
* @param {Object[]} devices - A list of microphone devices. |
||||
* @param {number} [timeout] - A timeout for the createLocalTrack function call. |
||||
* @returns {Promise<{ |
||||
* deviceId: string, |
||||
* hasError: boolean, |
||||
* jitsiTrack: Object, |
||||
* label: string |
||||
* }[]>} |
||||
*/ |
||||
export function createLocalAudioTracks(devices: MediaDeviceInfo[], timeout?: number) { |
||||
return Promise.all( |
||||
devices.map(async ({ deviceId, label }) => { |
||||
let jitsiTrack = null; |
||||
let hasError = false; |
||||
|
||||
try { |
||||
jitsiTrack = await createLocalTrack('audio', deviceId, timeout); |
||||
} catch (err) { |
||||
hasError = true; |
||||
} |
||||
|
||||
return { |
||||
deviceId, |
||||
hasError, |
||||
jitsiTrack, |
||||
label |
||||
}; |
||||
})); |
||||
} |
Loading…
Reference in new issue