Muting support

fix Promise in setMuted
pull/3223/head
Radium Zheng 6 years ago
parent 5daa91ec1b
commit 65c76dcde5
  1. 40
      react/features/local-recording/controller/RecordingController.js
  2. 10
      react/features/local-recording/middleware.js
  3. 29
      react/features/local-recording/recording/OggAdapter.js
  4. 17
      react/features/local-recording/recording/RecordingAdapter.js
  5. 3
      react/features/local-recording/recording/Utils.js
  6. 62
      react/features/local-recording/recording/WavAdapter.js
  7. 32
      react/features/local-recording/recording/flac/FlacAdapter.js

@ -40,6 +40,11 @@ const COMMAND_PONG = 'localRecPong';
*/
const PROPERTY_STATS = 'localRecStats';
/**
* Supported recording formats.
*/
const RECORDING_FORMATS = new Set([ 'flac', 'wav', 'ogg' ]);
/**
* Default recording format.
*/
@ -135,6 +140,13 @@ class RecordingController {
*/
_state = ControllerState.IDLE;
/**
* Whether or not the audio is muted in the UI. This is stored as internal
* state of {@code RecordingController} because we might have recording
* sessions that start muted.
*/
_isMuted = false;
/**
* Current recording format. This will be in effect from the next
* recording session, i.e., if this value is changed during an on-going
@ -299,6 +311,21 @@ class RecordingController {
}
}
/**
* Mute or unmute audio. When muted, the ongoing local recording should
* produce silence.
*
* @param {boolean} muted - If the audio should be muted.
* @returns {void}
*/
setMuted(muted: boolean) {
this._isMuted = Boolean(muted);
if (this._state === ControllerState.RECORDING) {
this._adapters[this._currentSessionToken].setMuted(muted);
}
}
/**
* Switches the recording format.
*
@ -306,6 +333,11 @@ class RecordingController {
* @returns {void}
*/
switchFormat(newFormat: string) {
if (!RECORDING_FORMATS.has(newFormat)) {
logger.log(`Unknown format ${newFormat}. Ignoring...`);
return;
}
this._format = newFormat;
logger.log(`Recording format switched to ${newFormat}`);
@ -465,13 +497,13 @@ class RecordingController {
}
/**
* Generates a token that can be used to distinguish each
* recording session.
* Generates a token that can be used to distinguish each local recording
* session.
*
* @returns {number}
*/
_getRandomToken() {
return Math.floor(Math.random() * 10000) + 1;
return Math.floor(Math.random() * 100000000) + 1;
}
_doStartRecording: () => void;
@ -497,6 +529,8 @@ class RecordingController {
if (this._onStateChanged) {
this._onStateChanged(true);
}
delegate.setMuted(this._isMuted);
this._updateStats();
})
.catch(err => {

@ -5,6 +5,7 @@ import { APP_WILL_MOUNT, APP_WILL_UNMOUNT } from '../base/app';
import { CONFERENCE_JOINED } from '../base/conference';
import { toggleDialog } from '../base/dialog';
import { i18next } from '../base/i18n';
import { SET_AUDIO_MUTED } from '../base/media';
import { MiddlewareRegistry } from '../base/redux';
import { showNotification } from '../notifications';
@ -25,11 +26,17 @@ isFeatureEnabled
switch (action.type) {
case CONFERENCE_JOINED: {
const { conference } = getState()['features/base/conference'];
const { localRecording } = getState()['features/base/config'];
if (localRecording && localRecording.format) {
recordingController.switchFormat(localRecording.format);
}
recordingController.registerEvents(conference);
break;
}
case APP_WILL_MOUNT:
// realize the delegates on recordingController, allowing the UI to
// react to state changes in recordingController.
recordingController.onStateChanged = isEngaged => {
@ -66,6 +73,9 @@ isFeatureEnabled
recordingController.onNotify = null;
recordingController.onWarning = null;
break;
case SET_AUDIO_MUTED:
recordingController.setMuted(action.muted);
break;
}
// @todo: detect change in features/base/settings micDeviceID

@ -64,6 +64,34 @@ export class OggAdapter extends RecordingAdapter {
}
}
/**
* Implements {@link RecordingAdapter#setMuted()}.
*
* @inheritdoc
*/
setMuted(muted) {
const shouldEnable = !muted;
if (!this._stream) {
return Promise.resolve();
}
const track = this._stream.getAudioTracks()[0];
if (!track) {
logger.error('Cannot mute/unmute. Track not found!');
return Promise.resolve();
}
if (track.enabled !== shouldEnable) {
track.enabled = shouldEnable;
logger.log(muted ? 'Mute' : 'Unmute');
}
return Promise.resolve();
}
/**
* Initialize the adapter.
*
@ -78,6 +106,7 @@ export class OggAdapter extends RecordingAdapter {
return new Promise((resolve, error) => {
this._getAudioStream(0)
.then(stream => {
this._stream = stream;
this._mediaRecorder = new MediaRecorder(stream);
this._mediaRecorder.ondataavailable
= e => this._saveMediaData(e.data);

@ -33,8 +33,19 @@ export class RecordingAdapter {
}
/**
* Helper method for getting an audio MediaStream. Use this instead of
* calling browser APIs directly.
* Mutes or unmutes the current recording.
*
* @param {boolean} muted - Whether to mute or to unmute.
* @returns {Promise}
*/
setMuted(/* eslint-disable no-unused-vars */
muted/* eslint-enable no-unused-vars */) {
throw new Error('Not implemented');
}
/**
* Helper method for getting an audio {@code MediaStream}. Use this instead
* of calling browser APIs directly.
*
* @protected
* @param {number} micDeviceId - The ID of the current audio device.
@ -52,7 +63,7 @@ export class RecordingAdapter {
const mediaStream = result[0].stream;
if (mediaStream === undefined) {
throw new Error('Failed to get MediaStream.');
throw new Error('Failed to create local track.');
}
return mediaStream;

@ -17,8 +17,7 @@ export function downloadBlob(blob, fileName = 'recording.ogg') {
}
/**
* Obtains a timestamp of now.
* Used in filenames.
* Obtains a timestamp of now. Used in filenames.
*
* @returns {string}
*/

@ -11,6 +11,11 @@ const WAV_SAMPLE_RATE = 44100;
*/
export class WavAdapter extends RecordingAdapter {
/**
* The current {@code MediaStream} instance.
*/
_stream = null;
/**
* {@code AudioContext} instance.
*/
@ -65,17 +70,15 @@ export class WavAdapter extends RecordingAdapter {
this._initPromise = this._initialize();
}
return new Promise(
(resolve, /* eslint-disable */_reject/* eslint-enable */) => {
this._wavBuffers = [];
this._wavLength = 0;
this._wavBuffers.push(this._createWavHeader());
return this._initPromise.then(() => {
this._wavBuffers = [];
this._wavLength = 0;
this._wavBuffers.push(this._createWavHeader());
this._audioSource.connect(this._audioProcessingNode);
this._audioProcessingNode
.connect(this._audioContext.destination);
resolve();
});
this._audioSource.connect(this._audioProcessingNode);
this._audioProcessingNode
.connect(this._audioContext.destination);
});
}
/**
@ -108,6 +111,34 @@ export class WavAdapter extends RecordingAdapter {
}
}
/**
* Implements {@link RecordingAdapter#setMuted()}.
*
* @inheritdoc
*/
setMuted(muted) {
const shouldEnable = !muted;
if (!this._stream) {
return Promise.resolve();
}
const track = this._stream.getAudioTracks()[0];
if (!track) {
logger.error('Cannot mute/unmute. Track not found!');
return Promise.resolve();
}
if (track.enabled !== shouldEnable) {
track.enabled = shouldEnable;
logger.log(muted ? 'Mute' : 'Unmute');
}
return Promise.resolve();
}
/**
* Creates a WAVE file header.
*
@ -176,6 +207,7 @@ export class WavAdapter extends RecordingAdapter {
const p = new Promise((resolve, reject) => {
this._getAudioStream(0)
.then(stream => {
this._stream = stream;
this._audioContext = new AudioContext();
this._audioSource
= this._audioContext.createMediaStreamSource(stream);
@ -209,12 +241,10 @@ export class WavAdapter extends RecordingAdapter {
* @returns {void}
*/
_saveWavPCM(data) {
// need to copy the Float32Array,
// unlike passing to WebWorker,
// this data is passed by reference,
// so we need to copy it, otherwise the
// audio file will be just repeating the last
// segment.
// Need to copy the Float32Array:
// unlike passing to WebWorker, this data is passed by reference,
// so we need to copy it, otherwise the resulting audio file will be
// just repeating the last segment.
this._wavBuffers.push(new Float32Array(data));
this._wavLength += data.length;
}

@ -20,6 +20,7 @@ export class FlacAdapter extends RecordingAdapter {
_audioContext = null;
_audioProcessingNode = null;
_audioSource = null;
_stream = null;
/**
* Resolve function of the promise returned by {@code stop()}.
@ -85,6 +86,34 @@ export class FlacAdapter extends RecordingAdapter {
}
}
/**
* Implements {@link RecordingAdapter#setMuted()}.
*
* @inheritdoc
*/
setMuted(muted) {
const shouldEnable = !muted;
if (!this._stream) {
return Promise.resolve();
}
const track = this._stream.getAudioTracks()[0];
if (!track) {
logger.error('Cannot mute/unmute. Track not found!');
return Promise.resolve();
}
if (track.enabled !== shouldEnable) {
track.enabled = shouldEnable;
logger.log(muted ? 'Mute' : 'Unmute');
}
return Promise.resolve();
}
/**
* Initialize the adapter.
*
@ -138,6 +167,7 @@ export class FlacAdapter extends RecordingAdapter {
const callbackInitAudioContext = (resolve, reject) => {
this._getAudioStream(0)
.then(stream => {
this._stream = stream;
this._audioContext = new AudioContext();
this._audioSource
= this._audioContext.createMediaStreamSource(stream);
@ -161,7 +191,7 @@ export class FlacAdapter extends RecordingAdapter {
});
};
// FIXME: because Promise constructor immediately executes the executor
// Because Promise constructor immediately executes the executor
// function. This is undesirable, we want callbackInitAudioContext to be
// executed only **after** promiseInitWorker is resolved.
return promiseInitWorker

Loading…
Cancel
Save