mirror of https://github.com/jitsi/jitsi-meet
parent
996b1791d5
commit
5b34a66cb6
@ -1,180 +0,0 @@ |
||||
/* global connection, Strophe, updateLargeVideo, focusedVideoSrc*/ |
||||
|
||||
// cache datachannels to avoid garbage collection
|
||||
// https://code.google.com/p/chromium/issues/detail?id=405545
|
||||
var _dataChannels = []; |
||||
|
||||
/** |
||||
* Callback triggered by PeerConnection when new data channel is opened |
||||
* on the bridge. |
||||
* @param event the event info object. |
||||
*/ |
||||
|
||||
function onDataChannel(event) |
||||
{ |
||||
var dataChannel = event.channel; |
||||
|
||||
dataChannel.onopen = function () |
||||
{ |
||||
console.info("Data channel opened by the Videobridge!", dataChannel); |
||||
|
||||
// Code sample for sending string and/or binary data
|
||||
// Sends String message to the bridge
|
||||
//dataChannel.send("Hello bridge!");
|
||||
// Sends 12 bytes binary message to the bridge
|
||||
//dataChannel.send(new ArrayBuffer(12));
|
||||
|
||||
// when the data channel becomes available, tell the bridge about video
|
||||
// selections so that it can do adaptive simulcast,
|
||||
var userJid = VideoLayout.getLargeVideoState().userJid; |
||||
// we want the notification to trigger even if userJid is undefined,
|
||||
// or null.
|
||||
onSelectedEndpointChanged(userJid); |
||||
}; |
||||
|
||||
dataChannel.onerror = function (error) |
||||
{ |
||||
console.error("Data Channel Error:", error, dataChannel); |
||||
}; |
||||
|
||||
dataChannel.onmessage = function (event) |
||||
{ |
||||
var data = event.data; |
||||
// JSON
|
||||
var obj; |
||||
|
||||
try |
||||
{ |
||||
obj = JSON.parse(data); |
||||
} |
||||
catch (e) |
||||
{ |
||||
console.error( |
||||
"Failed to parse data channel message as JSON: ", |
||||
data, |
||||
dataChannel); |
||||
} |
||||
if (('undefined' !== typeof(obj)) && (null !== obj)) |
||||
{ |
||||
var colibriClass = obj.colibriClass; |
||||
|
||||
if ("DominantSpeakerEndpointChangeEvent" === colibriClass) |
||||
{ |
||||
// Endpoint ID from the Videobridge.
|
||||
var dominantSpeakerEndpoint = obj.dominantSpeakerEndpoint; |
||||
|
||||
console.info( |
||||
"Data channel new dominant speaker event: ", |
||||
dominantSpeakerEndpoint); |
||||
$(document).trigger( |
||||
'dominantspeakerchanged', |
||||
[dominantSpeakerEndpoint]); |
||||
} |
||||
else if ("InLastNChangeEvent" === colibriClass) |
||||
{ |
||||
var oldValue = obj.oldValue; |
||||
var newValue = obj.newValue; |
||||
// Make sure that oldValue and newValue are of type boolean.
|
||||
var type; |
||||
|
||||
if ((type = typeof oldValue) !== 'boolean') { |
||||
if (type === 'string') { |
||||
oldValue = (oldValue == "true"); |
||||
} else { |
||||
oldValue = new Boolean(oldValue).valueOf(); |
||||
} |
||||
} |
||||
if ((type = typeof newValue) !== 'boolean') { |
||||
if (type === 'string') { |
||||
newValue = (newValue == "true"); |
||||
} else { |
||||
newValue = new Boolean(newValue).valueOf(); |
||||
} |
||||
} |
||||
$(document).trigger('inlastnchanged', [oldValue, newValue]); |
||||
} |
||||
else if ("LastNEndpointsChangeEvent" === colibriClass) |
||||
{ |
||||
// The new/latest list of last-n endpoint IDs.
|
||||
var lastNEndpoints = obj.lastNEndpoints; |
||||
// The list of endpoint IDs which are entering the list of
|
||||
// last-n at this time i.e. were not in the old list of last-n
|
||||
// endpoint IDs.
|
||||
var endpointsEnteringLastN = obj.endpointsEnteringLastN; |
||||
var stream = obj.stream; |
||||
|
||||
console.log( |
||||
"Data channel new last-n event: ", |
||||
lastNEndpoints, endpointsEnteringLastN, obj); |
||||
$(document).trigger( |
||||
'lastnchanged', |
||||
[lastNEndpoints, endpointsEnteringLastN, stream]); |
||||
} |
||||
else if ("SimulcastLayersChangedEvent" === colibriClass) |
||||
{ |
||||
$(document).trigger( |
||||
'simulcastlayerschanged', |
||||
[obj.endpointSimulcastLayers]); |
||||
} |
||||
else if ("SimulcastLayersChangingEvent" === colibriClass) |
||||
{ |
||||
$(document).trigger( |
||||
'simulcastlayerschanging', |
||||
[obj.endpointSimulcastLayers]); |
||||
} |
||||
else if ("StartSimulcastLayerEvent" === colibriClass) |
||||
{ |
||||
$(document).trigger('startsimulcastlayer', obj.simulcastLayer); |
||||
} |
||||
else if ("StopSimulcastLayerEvent" === colibriClass) |
||||
{ |
||||
$(document).trigger('stopsimulcastlayer', obj.simulcastLayer); |
||||
} |
||||
else |
||||
{ |
||||
console.debug("Data channel JSON-formatted message: ", obj); |
||||
} |
||||
} |
||||
}; |
||||
|
||||
dataChannel.onclose = function () |
||||
{ |
||||
console.info("The Data Channel closed", dataChannel); |
||||
var idx = _dataChannels.indexOf(dataChannel); |
||||
if (idx > -1)
|
||||
_dataChannels = _dataChannels.splice(idx, 1); |
||||
}; |
||||
_dataChannels.push(dataChannel); |
||||
} |
||||
|
||||
/** |
||||
* Binds "ondatachannel" event listener to given PeerConnection instance. |
||||
* @param peerConnection WebRTC peer connection instance. |
||||
*/ |
||||
function bindDataChannelListener(peerConnection) |
||||
{ |
||||
peerConnection.ondatachannel = onDataChannel; |
||||
|
||||
// Sample code for opening new data channel from Jitsi Meet to the bridge.
|
||||
// Although it's not a requirement to open separate channels from both bridge
|
||||
// and peer as single channel can be used for sending and receiving data.
|
||||
// So either channel opened by the bridge or the one opened here is enough
|
||||
// for communication with the bridge.
|
||||
/* |
||||
var dataChannelOptions = { reliable: true }; |
||||
var dataChannel |
||||
= peerConnection.createDataChannel("myChannel", dataChannelOptions); |
||||
|
||||
// Can be used only when is in open state
|
||||
dataChannel.onopen = function () |
||||
{ |
||||
dataChannel.send("My channel !!!"); |
||||
}; |
||||
dataChannel.onmessage = function (event) |
||||
{ |
||||
var msgData = event.data; |
||||
console.info("Got My Data Channel Message:", msgData, dataChannel); |
||||
}; |
||||
*/ |
||||
} |
||||
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@ -0,0 +1,235 @@ |
||||
/* global connection, Strophe, updateLargeVideo, focusedVideoSrc*/ |
||||
|
||||
// cache datachannels to avoid garbage collection
|
||||
// https://code.google.com/p/chromium/issues/detail?id=405545
|
||||
var _dataChannels = []; |
||||
|
||||
|
||||
|
||||
var DataChannels = |
||||
{ |
||||
|
||||
/** |
||||
* Callback triggered by PeerConnection when new data channel is opened |
||||
* on the bridge. |
||||
* @param event the event info object. |
||||
*/ |
||||
|
||||
onDataChannel: function (event) |
||||
{ |
||||
var dataChannel = event.channel; |
||||
|
||||
dataChannel.onopen = function () { |
||||
console.info("Data channel opened by the Videobridge!", dataChannel); |
||||
|
||||
// Code sample for sending string and/or binary data
|
||||
// Sends String message to the bridge
|
||||
//dataChannel.send("Hello bridge!");
|
||||
// Sends 12 bytes binary message to the bridge
|
||||
//dataChannel.send(new ArrayBuffer(12));
|
||||
|
||||
// when the data channel becomes available, tell the bridge about video
|
||||
// selections so that it can do adaptive simulcast,
|
||||
// we want the notification to trigger even if userJid is undefined,
|
||||
// or null.
|
||||
var userJid = VideoLayout.getLargeVideoState().userJid; |
||||
// we want the notification to trigger even if userJid is undefined,
|
||||
// or null.
|
||||
onSelectedEndpointChanged(userJid); |
||||
}; |
||||
|
||||
dataChannel.onerror = function (error) { |
||||
console.error("Data Channel Error:", error, dataChannel); |
||||
}; |
||||
|
||||
dataChannel.onmessage = function (event) { |
||||
var data = event.data; |
||||
// JSON
|
||||
var obj; |
||||
|
||||
try { |
||||
obj = JSON.parse(data); |
||||
} |
||||
catch (e) { |
||||
console.error( |
||||
"Failed to parse data channel message as JSON: ", |
||||
data, |
||||
dataChannel); |
||||
} |
||||
if (('undefined' !== typeof(obj)) && (null !== obj)) { |
||||
var colibriClass = obj.colibriClass; |
||||
|
||||
if ("DominantSpeakerEndpointChangeEvent" === colibriClass) { |
||||
// Endpoint ID from the Videobridge.
|
||||
var dominantSpeakerEndpoint = obj.dominantSpeakerEndpoint; |
||||
|
||||
console.info( |
||||
"Data channel new dominant speaker event: ", |
||||
dominantSpeakerEndpoint); |
||||
$(document).trigger( |
||||
'dominantspeakerchanged', |
||||
[dominantSpeakerEndpoint]); |
||||
} |
||||
else if ("InLastNChangeEvent" === colibriClass) |
||||
{ |
||||
var oldValue = obj.oldValue; |
||||
var newValue = obj.newValue; |
||||
// Make sure that oldValue and newValue are of type boolean.
|
||||
var type; |
||||
|
||||
if ((type = typeof oldValue) !== 'boolean') { |
||||
if (type === 'string') { |
||||
oldValue = (oldValue == "true"); |
||||
} else { |
||||
oldValue = new Boolean(oldValue).valueOf(); |
||||
} |
||||
} |
||||
if ((type = typeof newValue) !== 'boolean') { |
||||
if (type === 'string') { |
||||
newValue = (newValue == "true"); |
||||
} else { |
||||
newValue = new Boolean(newValue).valueOf(); |
||||
} |
||||
} |
||||
$(document).trigger('inlastnchanged', [oldValue, newValue]); |
||||
} |
||||
else if ("LastNEndpointsChangeEvent" === colibriClass) |
||||
{ |
||||
// The new/latest list of last-n endpoint IDs.
|
||||
var lastNEndpoints = obj.lastNEndpoints; |
||||
// The list of endpoint IDs which are entering the list of
|
||||
// last-n at this time i.e. were not in the old list of last-n
|
||||
// endpoint IDs.
|
||||
var endpointsEnteringLastN = obj.endpointsEnteringLastN; |
||||
var stream = obj.stream; |
||||
|
||||
console.log( |
||||
"Data channel new last-n event: ", |
||||
lastNEndpoints, endpointsEnteringLastN, obj); |
||||
$(document).trigger( |
||||
'lastnchanged', |
||||
[lastNEndpoints, endpointsEnteringLastN, stream]); |
||||
} |
||||
else if ("SimulcastLayersChangedEvent" === colibriClass) |
||||
{ |
||||
$(document).trigger( |
||||
'simulcastlayerschanged', |
||||
[obj.endpointSimulcastLayers]); |
||||
} |
||||
else if ("SimulcastLayersChangingEvent" === colibriClass) |
||||
{ |
||||
$(document).trigger( |
||||
'simulcastlayerschanging', |
||||
[obj.endpointSimulcastLayers]); |
||||
} |
||||
else if ("StartSimulcastLayerEvent" === colibriClass) |
||||
{ |
||||
$(document).trigger('startsimulcastlayer', obj.simulcastLayer); |
||||
} |
||||
else if ("StopSimulcastLayerEvent" === colibriClass) |
||||
{ |
||||
$(document).trigger('stopsimulcastlayer', obj.simulcastLayer); |
||||
} |
||||
else |
||||
{ |
||||
console.debug("Data channel JSON-formatted message: ", obj); |
||||
} |
||||
} |
||||
}; |
||||
|
||||
dataChannel.onclose = function () |
||||
{ |
||||
console.info("The Data Channel closed", dataChannel); |
||||
var idx = _dataChannels.indexOf(dataChannel); |
||||
if (idx > -1) |
||||
_dataChannels = _dataChannels.splice(idx, 1); |
||||
}; |
||||
_dataChannels.push(dataChannel); |
||||
}, |
||||
|
||||
/** |
||||
* Binds "ondatachannel" event listener to given PeerConnection instance. |
||||
* @param peerConnection WebRTC peer connection instance. |
||||
*/ |
||||
bindDataChannelListener: function (peerConnection) { |
||||
if(!config.openSctp) |
||||
retrun; |
||||
|
||||
peerConnection.ondatachannel = this.onDataChannel; |
||||
|
||||
// Sample code for opening new data channel from Jitsi Meet to the bridge.
|
||||
// Although it's not a requirement to open separate channels from both bridge
|
||||
// and peer as single channel can be used for sending and receiving data.
|
||||
// So either channel opened by the bridge or the one opened here is enough
|
||||
// for communication with the bridge.
|
||||
/*var dataChannelOptions = |
||||
{ |
||||
reliable: true |
||||
}; |
||||
var dataChannel |
||||
= peerConnection.createDataChannel("myChannel", dataChannelOptions); |
||||
|
||||
// Can be used only when is in open state
|
||||
dataChannel.onopen = function () |
||||
{ |
||||
dataChannel.send("My channel !!!"); |
||||
}; |
||||
dataChannel.onmessage = function (event) |
||||
{ |
||||
var msgData = event.data; |
||||
console.info("Got My Data Channel Message:", msgData, dataChannel); |
||||
};*/ |
||||
} |
||||
|
||||
} |
||||
|
||||
function onSelectedEndpointChanged(userJid) |
||||
{ |
||||
console.log('selected endpoint changed: ', userJid); |
||||
if (_dataChannels && _dataChannels.length != 0) |
||||
{ |
||||
_dataChannels.some(function (dataChannel) { |
||||
if (dataChannel.readyState == 'open') |
||||
{ |
||||
dataChannel.send(JSON.stringify({ |
||||
'colibriClass': 'SelectedEndpointChangedEvent', |
||||
'selectedEndpoint': (!userJid || userJid == null) |
||||
? null : userJid |
||||
})); |
||||
|
||||
return true; |
||||
} |
||||
}); |
||||
} |
||||
} |
||||
|
||||
$(document).bind("selectedendpointchanged", function(event, userJid) { |
||||
onSelectedEndpointChanged(userJid); |
||||
}); |
||||
|
||||
function onPinnedEndpointChanged(userJid) |
||||
{ |
||||
console.log('pinned endpoint changed: ', userJid); |
||||
if (_dataChannels && _dataChannels.length != 0) |
||||
{ |
||||
_dataChannels.some(function (dataChannel) { |
||||
if (dataChannel.readyState == 'open') |
||||
{ |
||||
dataChannel.send(JSON.stringify({ |
||||
'colibriClass': 'PinnedEndpointChangedEvent', |
||||
'pinnedEndpoint': (!userJid || userJid == null) |
||||
? null : Strophe.getResourceFromJid(userJid) |
||||
})); |
||||
|
||||
return true; |
||||
} |
||||
}); |
||||
} |
||||
} |
||||
|
||||
$(document).bind("pinnedendpointchanged", function(event, userJid) { |
||||
onPinnedEndpointChanged(userJid); |
||||
}); |
||||
|
||||
module.exports = DataChannels; |
||||
|
@ -0,0 +1,66 @@ |
||||
//var StreamEventTypes = require("../../service/RTC/StreamEventTypes.js");
|
||||
|
||||
function LocalStream(stream, type, eventEmitter) |
||||
{ |
||||
this.stream = stream; |
||||
this.eventEmitter = eventEmitter; |
||||
this.type = type; |
||||
|
||||
var self = this; |
||||
this.stream.onended = function() |
||||
{ |
||||
self.streamEnded(); |
||||
}; |
||||
} |
||||
|
||||
LocalStream.prototype.streamEnded = function () { |
||||
this.eventEmitter.emit(StreamEventTypes.EVENT_TYPE_LOCAL_ENDED, this); |
||||
} |
||||
|
||||
LocalStream.prototype.getOriginalStream = function() |
||||
{ |
||||
return this.stream; |
||||
} |
||||
|
||||
LocalStream.prototype.isAudioStream = function () { |
||||
return (this.stream.getAudioTracks() && this.stream.getAudioTracks().length > 0); |
||||
} |
||||
|
||||
LocalStream.prototype.mute = function() |
||||
{ |
||||
var ismuted = false; |
||||
var tracks = []; |
||||
if(this.type = "audio") |
||||
{ |
||||
tracks = this.stream.getAudioTracks(); |
||||
} |
||||
else |
||||
{ |
||||
tracks = this.stream.getVideoTracks(); |
||||
} |
||||
|
||||
for (var idx = 0; idx < tracks.length; idx++) { |
||||
ismuted = !tracks[idx].enabled; |
||||
tracks[idx].enabled = !tracks[idx].enabled; |
||||
} |
||||
return ismuted; |
||||
} |
||||
|
||||
LocalStream.prototype.isMuted = function () { |
||||
var tracks = []; |
||||
if(this.type = "audio") |
||||
{ |
||||
tracks = this.stream.getAudioTracks(); |
||||
} |
||||
else |
||||
{ |
||||
tracks = this.stream.getVideoTracks(); |
||||
} |
||||
for (var idx = 0; idx < tracks.length; idx++) { |
||||
if(tracks[idx].enabled) |
||||
return false; |
||||
} |
||||
return true; |
||||
} |
||||
|
||||
module.exports = LocalStream; |
@ -0,0 +1,49 @@ |
||||
var RTC = require("./RTC.js"); |
||||
////These lines should be uncommented when require works in app.js
|
||||
//var RTCBrowserType = require("../../service/RTC/RTCBrowserType.js");
|
||||
//var StreamEventTypes = require("../../service/RTC/StreamEventTypes.js");
|
||||
//var MediaStreamType = require("../../service/RTC/MediaStreamTypes");
|
||||
|
||||
/** |
||||
* Creates a MediaStream object for the given data, session id and ssrc. |
||||
* It is a wrapper class for the MediaStream. |
||||
* |
||||
* @param data the data object from which we obtain the stream, |
||||
* the peerjid, etc. |
||||
* @param sid the session id |
||||
* @param ssrc the ssrc corresponding to this MediaStream |
||||
* |
||||
* @constructor |
||||
*/ |
||||
function MediaStream(data, sid, ssrc, eventEmmiter) { |
||||
this.sid = sid; |
||||
this.stream = data.stream; |
||||
this.peerjid = data.peerjid; |
||||
this.ssrc = ssrc; |
||||
this.type = (this.stream.getVideoTracks().length > 0)? |
||||
MediaStreamType.VIDEO_TYPE : MediaStreamType.AUDIO_TYPE; |
||||
this.muted = false; |
||||
eventEmmiter.emit(StreamEventTypes.EVENT_TYPE_REMOTE_CREATED, this); |
||||
} |
||||
|
||||
if(RTC.getBrowserType() == RTCBrowserType.RTC_BROWSER_FIREFOX) |
||||
{ |
||||
if (!MediaStream.prototype.getVideoTracks) |
||||
MediaStream.prototype.getVideoTracks = function () { return []; }; |
||||
if (!MediaStream.prototype.getAudioTracks) |
||||
MediaStream.prototype.getAudioTracks = function () { return []; }; |
||||
} |
||||
|
||||
MediaStream.prototype.getOriginalStream = function() |
||||
{ |
||||
return this.stream; |
||||
} |
||||
|
||||
MediaStream.prototype.setMute = function (value) |
||||
{ |
||||
this.stream.muted = value; |
||||
this.muted = value; |
||||
} |
||||
|
||||
|
||||
module.exports = MediaStream; |
@ -0,0 +1,122 @@ |
||||
var EventEmitter = require("events"); |
||||
var RTCUtils = require("./RTCUtils.js"); |
||||
//These lines should be uncommented when require works in app.js
|
||||
//var StreamEventTypes = require("../../service/RTC/StreamEventTypes.js");
|
||||
//var XMPPEvents = require("../service/xmpp/XMPPEvents");
|
||||
|
||||
var eventEmitter = new EventEmitter(); |
||||
|
||||
var RTC = { |
||||
rtcUtils: null, |
||||
localStreams: [], |
||||
remoteStreams: {}, |
||||
localAudio: null, |
||||
localVideo: null, |
||||
addStreamListener: function (listener, eventType) { |
||||
eventEmitter.on(eventType, listener); |
||||
}, |
||||
removeStreamListener: function (listener, eventType) { |
||||
if(!(eventType instanceof StreamEventTypes)) |
||||
throw "Illegal argument"; |
||||
|
||||
eventEmitter.removeListener(eventType, listener); |
||||
}, |
||||
createLocalStream: function (stream, type) { |
||||
var LocalStream = require("./LocalStream.js"); |
||||
var localStream = new LocalStream(stream, type, eventEmitter); |
||||
this.localStreams.push(localStream); |
||||
if(type == "audio") |
||||
{ |
||||
this.localAudio = localStream; |
||||
} |
||||
else |
||||
{ |
||||
this.localVideo = localStream; |
||||
} |
||||
eventEmitter.emit(StreamEventTypes.EVENT_TYPE_LOCAL_CREATED, |
||||
localStream); |
||||
return localStream; |
||||
}, |
||||
removeLocalStream: function (stream) { |
||||
for(var i = 0; i < this.localStreams.length; i++) |
||||
{ |
||||
if(this.localStreams[i].getOriginalStream() === stream) { |
||||
delete this.localStreams[i]; |
||||
return; |
||||
} |
||||
} |
||||
}, |
||||
createRemoteStream: function (data, sid, thessrc) { |
||||
var MediaStream = require("./MediaStream.js") |
||||
var remoteStream = new MediaStream(data, sid, thessrc, eventEmitter); |
||||
var jid = data.peerjid || connection.emuc.myroomjid; |
||||
if(!this.remoteStreams[jid]) { |
||||
this.remoteStreams[jid] = {}; |
||||
} |
||||
this.remoteStreams[jid][remoteStream.type]= remoteStream; |
||||
return remoteStream; |
||||
}, |
||||
getBrowserType: function () { |
||||
return this.rtcUtils.browser; |
||||
}, |
||||
getPCConstraints: function () { |
||||
return this.rtcUtils.pc_constraints; |
||||
}, |
||||
getUserMediaWithConstraints:function(um, success_callback, |
||||
failure_callback, resolution, |
||||
bandwidth, fps, desktopStream) |
||||
{ |
||||
return this.rtcUtils.getUserMediaWithConstraints(um, success_callback, |
||||
failure_callback, resolution, bandwidth, fps, desktopStream); |
||||
}, |
||||
attachMediaStream: function (element, stream) { |
||||
this.rtcUtils.attachMediaStream(element, stream); |
||||
}, |
||||
getStreamID: function (stream) { |
||||
return this.rtcUtils.getStreamID(stream); |
||||
}, |
||||
getVideoSrc: function (element) { |
||||
return this.rtcUtils.getVideoSrc(element); |
||||
}, |
||||
setVideoSrc: function (element, src) { |
||||
this.rtcUtils.setVideoSrc(element, src); |
||||
}, |
||||
dispose: function() { |
||||
if (this.rtcUtils) { |
||||
this.rtcUtils = null; |
||||
} |
||||
}, |
||||
stop: function () { |
||||
this.dispose(); |
||||
}, |
||||
start: function () { |
||||
this.rtcUtils = new RTCUtils(this); |
||||
this.rtcUtils.obtainAudioAndVideoPermissions(); |
||||
}, |
||||
onConferenceCreated: function(event) { |
||||
var DataChannels = require("./datachannels"); |
||||
DataChannels.bindDataChannelListener(event.peerconnection); |
||||
}, |
||||
muteRemoteVideoStream: function (jid, value) { |
||||
var stream; |
||||
|
||||
if(this.remoteStreams[jid] && |
||||
this.remoteStreams[jid][MediaStreamType.VIDEO_TYPE]) |
||||
{ |
||||
stream = this.remoteStreams[jid][MediaStreamType.VIDEO_TYPE]; |
||||
} |
||||
|
||||
if(!stream) |
||||
return false; |
||||
|
||||
var isMuted = (value === "true"); |
||||
if (isMuted != stream.muted) { |
||||
stream.setMute(isMuted); |
||||
return true; |
||||
} |
||||
return false; |
||||
} |
||||
|
||||
}; |
||||
|
||||
module.exports = RTC; |
@ -0,0 +1,338 @@ |
||||
//This should be uncommented when app.js supports require
|
||||
//var RTCBrowserType = require("../../service/RTC/RTCBrowserType.js");
|
||||
|
||||
var constraints = {audio: false, video: false}; |
||||
|
||||
function setResolutionConstraints(resolution, isAndroid) |
||||
{ |
||||
if (resolution && !constraints.video || isAndroid) { |
||||
constraints.video = { mandatory: {}, optional: [] };// same behaviour as true
|
||||
} |
||||
// see https://code.google.com/p/chromium/issues/detail?id=143631#c9 for list of supported resolutions
|
||||
switch (resolution) { |
||||
// 16:9 first
|
||||
case '1080': |
||||
case 'fullhd': |
||||
constraints.video.mandatory.minWidth = 1920; |
||||
constraints.video.mandatory.minHeight = 1080; |
||||
break; |
||||
case '720': |
||||
case 'hd': |
||||
constraints.video.mandatory.minWidth = 1280; |
||||
constraints.video.mandatory.minHeight = 720; |
||||
break; |
||||
case '360': |
||||
constraints.video.mandatory.minWidth = 640; |
||||
constraints.video.mandatory.minHeight = 360; |
||||
break; |
||||
case '180': |
||||
constraints.video.mandatory.minWidth = 320; |
||||
constraints.video.mandatory.minHeight = 180; |
||||
break; |
||||
// 4:3
|
||||
case '960': |
||||
constraints.video.mandatory.minWidth = 960; |
||||
constraints.video.mandatory.minHeight = 720; |
||||
break; |
||||
case '640': |
||||
case 'vga': |
||||
constraints.video.mandatory.minWidth = 640; |
||||
constraints.video.mandatory.minHeight = 480; |
||||
break; |
||||
case '320': |
||||
constraints.video.mandatory.minWidth = 320; |
||||
constraints.video.mandatory.minHeight = 240; |
||||
break; |
||||
default: |
||||
if (isAndroid) { |
||||
constraints.video.mandatory.minWidth = 320; |
||||
constraints.video.mandatory.minHeight = 240; |
||||
constraints.video.mandatory.maxFrameRate = 15; |
||||
} |
||||
break; |
||||
} |
||||
if (constraints.video.mandatory.minWidth) |
||||
constraints.video.mandatory.maxWidth = constraints.video.mandatory.minWidth; |
||||
if (constraints.video.mandatory.minHeight) |
||||
constraints.video.mandatory.maxHeight = constraints.video.mandatory.minHeight; |
||||
} |
||||
|
||||
|
||||
function setConstraints(um, resolution, bandwidth, fps, desktopStream, isAndroid) |
||||
{ |
||||
if (um.indexOf('video') >= 0) { |
||||
constraints.video = { mandatory: {}, optional: [] };// same behaviour as true
|
||||
} |
||||
if (um.indexOf('audio') >= 0) { |
||||
constraints.audio = { mandatory: {}, optional: []};// same behaviour as true
|
||||
} |
||||
if (um.indexOf('screen') >= 0) { |
||||
constraints.video = { |
||||
mandatory: { |
||||
chromeMediaSource: "screen", |
||||
googLeakyBucket: true, |
||||
maxWidth: window.screen.width, |
||||
maxHeight: window.screen.height, |
||||
maxFrameRate: 3 |
||||
}, |
||||
optional: [] |
||||
}; |
||||
} |
||||
if (um.indexOf('desktop') >= 0) { |
||||
constraints.video = { |
||||
mandatory: { |
||||
chromeMediaSource: "desktop", |
||||
chromeMediaSourceId: desktopStream, |
||||
googLeakyBucket: true, |
||||
maxWidth: window.screen.width, |
||||
maxHeight: window.screen.height, |
||||
maxFrameRate: 3 |
||||
}, |
||||
optional: [] |
||||
}; |
||||
} |
||||
|
||||
if (constraints.audio) { |
||||
// if it is good enough for hangouts...
|
||||
constraints.audio.optional.push( |
||||
{googEchoCancellation: true}, |
||||
{googAutoGainControl: true}, |
||||
{googNoiseSupression: true}, |
||||
{googHighpassFilter: true}, |
||||
{googNoisesuppression2: true}, |
||||
{googEchoCancellation2: true}, |
||||
{googAutoGainControl2: true} |
||||
); |
||||
} |
||||
if (constraints.video) { |
||||
constraints.video.optional.push( |
||||
{googNoiseReduction: false} // chrome 37 workaround for issue 3807, reenable in M38
|
||||
); |
||||
if (um.indexOf('video') >= 0) { |
||||
constraints.video.optional.push( |
||||
{googLeakyBucket: true} |
||||
); |
||||
} |
||||
} |
||||
|
||||
setResolutionConstraints(resolution, isAndroid); |
||||
|
||||
if (bandwidth) { // doesn't work currently, see webrtc issue 1846
|
||||
if (!constraints.video) constraints.video = {mandatory: {}, optional: []};//same behaviour as true
|
||||
constraints.video.optional.push({bandwidth: bandwidth}); |
||||
} |
||||
if (fps) { // for some cameras it might be necessary to request 30fps
|
||||
// so they choose 30fps mjpg over 10fps yuy2
|
||||
if (!constraints.video) constraints.video = {mandatory: {}, optional: []};// same behaviour as true;
|
||||
constraints.video.mandatory.minFrameRate = fps; |
||||
} |
||||
} |
||||
|
||||
|
||||
function RTCUtils(RTCService) |
||||
{ |
||||
this.service = RTCService; |
||||
if (navigator.mozGetUserMedia) { |
||||
console.log('This appears to be Firefox'); |
||||
var version = parseInt(navigator.userAgent.match(/Firefox\/([0-9]+)\./)[1], 10); |
||||
if (version >= 22) { |
||||
this.peerconnection = mozRTCPeerConnection; |
||||
this.browser = RTCBrowserType.RTC_BROWSER_FIREFOX; |
||||
this.getUserMedia = navigator.mozGetUserMedia.bind(navigator); |
||||
this.pc_constraints = {}; |
||||
this.attachMediaStream = function (element, stream) { |
||||
element[0].mozSrcObject = stream; |
||||
element[0].play(); |
||||
}; |
||||
this.getStreamID = function (stream) { |
||||
var tracks = stream.getVideoTracks(); |
||||
if(!tracks || tracks.length == 0) |
||||
{ |
||||
tracks = stream.getAudioTracks(); |
||||
} |
||||
return tracks[0].id.replace(/[\{,\}]/g,""); |
||||
}; |
||||
this.getVideoSrc = function (element) { |
||||
return element.mozSrcObject; |
||||
}; |
||||
this.setVideoSrc = function (element, src) { |
||||
element.mozSrcObject = src; |
||||
}; |
||||
RTCSessionDescription = mozRTCSessionDescription; |
||||
RTCIceCandidate = mozRTCIceCandidate; |
||||
} |
||||
} else if (navigator.webkitGetUserMedia) { |
||||
console.log('This appears to be Chrome'); |
||||
this.peerconnection = webkitRTCPeerConnection; |
||||
this.browser = RTCBrowserType.RTC_BROWSER_CHROME; |
||||
this.getUserMedia = navigator.webkitGetUserMedia.bind(navigator); |
||||
this.attachMediaStream = function (element, stream) { |
||||
element.attr('src', webkitURL.createObjectURL(stream)); |
||||
}; |
||||
this.getStreamID = function (stream) { |
||||
// streams from FF endpoints have the characters '{' and '}'
|
||||
// that make jQuery choke.
|
||||
return stream.id.replace(/[\{,\}]/g,""); |
||||
}; |
||||
this.getVideoSrc = function (element) { |
||||
return element.getAttribute("src"); |
||||
}; |
||||
this.setVideoSrc = function (element, src) { |
||||
element.setAttribute("src", src); |
||||
}; |
||||
// DTLS should now be enabled by default but..
|
||||
this.pc_constraints = {'optional': [{'DtlsSrtpKeyAgreement': 'true'}]}; |
||||
if (navigator.userAgent.indexOf('Android') != -1) { |
||||
this.pc_constraints = {}; // disable DTLS on Android
|
||||
} |
||||
if (!webkitMediaStream.prototype.getVideoTracks) { |
||||
webkitMediaStream.prototype.getVideoTracks = function () { |
||||
return this.videoTracks; |
||||
}; |
||||
} |
||||
if (!webkitMediaStream.prototype.getAudioTracks) { |
||||
webkitMediaStream.prototype.getAudioTracks = function () { |
||||
return this.audioTracks; |
||||
}; |
||||
} |
||||
} |
||||
else |
||||
{ |
||||
try { console.log('Browser does not appear to be WebRTC-capable'); } catch (e) { } |
||||
|
||||
window.location.href = 'webrtcrequired.html'; |
||||
return; |
||||
} |
||||
|
||||
if (this.browser !== RTCBrowserType.RTC_BROWSER_CHROME && |
||||
config.enableFirefoxSupport !== true) { |
||||
window.location.href = 'chromeonly.html'; |
||||
return; |
||||
} |
||||
|
||||
} |
||||
|
||||
|
||||
RTCUtils.prototype.getUserMediaWithConstraints = function( |
||||
um, success_callback, failure_callback, resolution,bandwidth, fps, |
||||
desktopStream) |
||||
{ |
||||
// Check if we are running on Android device
|
||||
var isAndroid = navigator.userAgent.indexOf('Android') != -1; |
||||
|
||||
setConstraints(um, resolution, bandwidth, fps, desktopStream, isAndroid); |
||||
|
||||
var isFF = navigator.userAgent.toLowerCase().indexOf('firefox') > -1; |
||||
|
||||
try { |
||||
if (config.enableSimulcast |
||||
&& constraints.video |
||||
&& constraints.video.chromeMediaSource !== 'screen' |
||||
&& constraints.video.chromeMediaSource !== 'desktop' |
||||
&& !isAndroid |
||||
|
||||
// We currently do not support FF, as it doesn't have multistream support.
|
||||
&& !isFF) { |
||||
simulcast.getUserMedia(constraints, function (stream) { |
||||
console.log('onUserMediaSuccess'); |
||||
success_callback(stream); |
||||
}, |
||||
function (error) { |
||||
console.warn('Failed to get access to local media. Error ', error); |
||||
if (failure_callback) { |
||||
failure_callback(error); |
||||
} |
||||
}); |
||||
} else { |
||||
|
||||
RTCUtils.getUserMedia(constraints, |
||||
function (stream) { |
||||
console.log('onUserMediaSuccess'); |
||||
success_callback(stream); |
||||
}, |
||||
function (error) { |
||||
console.warn('Failed to get access to local media. Error ', error); |
||||
if (failure_callback) { |
||||
failure_callback(error); |
||||
} |
||||
}); |
||||
|
||||
} |
||||
} catch (e) { |
||||
console.error('GUM failed: ', e); |
||||
if(failure_callback) { |
||||
failure_callback(e); |
||||
} |
||||
} |
||||
}; |
||||
|
||||
/** |
||||
* We ask for audio and video combined stream in order to get permissions and |
||||
* not to ask twice. |
||||
*/ |
||||
RTCUtils.prototype.obtainAudioAndVideoPermissions = function() { |
||||
var self = this; |
||||
// Get AV
|
||||
var cb = function (stream) { |
||||
console.log('got', stream, stream.getAudioTracks().length, stream.getVideoTracks().length); |
||||
self.handleLocalStream(stream); |
||||
trackUsage('localMedia', { |
||||
audio: stream.getAudioTracks().length, |
||||
video: stream.getVideoTracks().length |
||||
}); |
||||
}; |
||||
var self = this; |
||||
this.getUserMediaWithConstraints( |
||||
['audio', 'video'], |
||||
cb, |
||||
function (error) { |
||||
console.error('failed to obtain audio/video stream - trying audio only', error); |
||||
self.getUserMediaWithConstraints( |
||||
['audio'], |
||||
cb, |
||||
function (error) { |
||||
console.error('failed to obtain audio/video stream - stop', error); |
||||
trackUsage('localMediaError', { |
||||
media: error.media || 'video', |
||||
name : error.name |
||||
}); |
||||
messageHandler.showError("Error", |
||||
"Failed to obtain permissions to use the local microphone" + |
||||
"and/or camera."); |
||||
} |
||||
); |
||||
}, |
||||
config.resolution || '360'); |
||||
} |
||||
|
||||
RTCUtils.prototype.handleLocalStream = function(stream) |
||||
{ |
||||
if(window.webkitMediaStream) |
||||
{ |
||||
var audioStream = new webkitMediaStream(); |
||||
var videoStream = new webkitMediaStream(); |
||||
var audioTracks = stream.getAudioTracks(); |
||||
var videoTracks = stream.getVideoTracks(); |
||||
for (var i = 0; i < audioTracks.length; i++) { |
||||
audioStream.addTrack(audioTracks[i]); |
||||
} |
||||
|
||||
this.service.createLocalStream(audioStream, "audio"); |
||||
|
||||
for (i = 0; i < videoTracks.length; i++) { |
||||
videoStream.addTrack(videoTracks[i]); |
||||
} |
||||
|
||||
|
||||
this.service.createLocalStream(videoStream, "video"); |
||||
} |
||||
else |
||||
{//firefox
|
||||
this.service.createLocalStream(stream, "stream"); |
||||
} |
||||
|
||||
}; |
||||
|
||||
|
||||
|
||||
module.exports = RTCUtils; |
@ -0,0 +1,7 @@ |
||||
var MediaStreamType = { |
||||
VIDEO_TYPE: "Video", |
||||
|
||||
AUDIO_TYPE: "Audio" |
||||
}; |
||||
////These lines should be uncommented when require works in app.js
|
||||
//module.exports = MediaStreamType;
|
@ -0,0 +1,7 @@ |
||||
var RTCBrowserType = { |
||||
RTC_BROWSER_CHROME: "rtc_browser.chrome", |
||||
|
||||
RTC_BROWSER_FIREFOX: "rtc_browser.firefox" |
||||
}; |
||||
|
||||
//module.exports = RTCBrowserType;
|
@ -0,0 +1,12 @@ |
||||
var StreamEventTypes = { |
||||
EVENT_TYPE_LOCAL_CREATED: "stream.local_created", |
||||
|
||||
EVENT_TYPE_LOCAL_ENDED: "stream.local_ended", |
||||
|
||||
EVENT_TYPE_REMOTE_CREATED: "stream.remote_created", |
||||
|
||||
EVENT_TYPE_REMOTE_ENDED: "stream.remote_ended" |
||||
}; |
||||
|
||||
//These lines should be uncommented when require works in app.js
|
||||
//module.exports = StreamEventTypes;
|
Loading…
Reference in new issue