Commit 6b9794e0 authored by lazorfuzz's avatar lazorfuzz
Browse files

partial mesh network

parent 6638a09a
# LioWebRTC
An Electron-compatible, event-based WebRTC library that makes it easy to embed peer to peer communication into React components.
An Electron-compatible WebRTC library that makes it easy to embed scalable peer to peer communication into React components.
LioWebRTC was built on SimpleWebRTC, and modified to be compatible with React, JSX, and Electron.
LioWebRTC was built on SimpleWebRTC, and modified to be compatible with React, JSX, and Electron. It can also be configured for scalability using partial mesh networks, making it possible to emit data via data channels to thousands of peers in a room, while only needing to be connected to at least one other peer in the room.
[Click here](https://chatdemo.razorfart.com/) to see a chatroom demo built with React and LioWebRTC.
......@@ -38,7 +38,7 @@ const webrtc = new LioWebRTC({
```
### Data channels only
Disable webcam/mic streaming, and only allow data channels.
Disable video/audio streaming, and only allow data channels.
```js
const webrtc = new LioWebRTC({
dataOnly: true
......@@ -60,7 +60,7 @@ const webrtc = new LioWebRTC({
### Join a room once it's ready
```js
webrtc.on('readyToCall', () => {
webrtc.on('ready', () => {
// Joins a room if it exists, creates it if it doesn't
webrtc.joinRoom('your room name');
});
......@@ -153,7 +153,7 @@ class Party extends Component {
this.webrtc.on('videoAdded', this.addVideo);
this.webrtc.on('videoRemoved', this.removeVideo);
this.webrtc.on('readyToCall', this.readyToCall);
this.webrtc.on('ready', this.readyToJoin);
this.webrtc.on('iceFailed', this.handleConnectionError);
this.webrtc.on('connectivityError', this.handleConnectionError);
}
......@@ -176,7 +176,7 @@ class Party extends Component {
console.log('had remote relay candidate', pc.hadRemoteRelayCandidate);
}
readyToCall = () => {
readyToJoin = () => {
// Starts the process of joining a room.
this.webrtc.joinRoom(this.state.roomID, (err, desc) => {
});
......@@ -197,9 +197,7 @@ class Party extends Component {
));
disconnect = () => {
this.webrtc.stopLocalVideo();
this.webrtc.leaveRoom();
this.webrtc.disconnect();
this.webrtc.quit();
}
componentWillUnmount() {
......@@ -294,17 +292,19 @@ this.webrtc.on('receivedPeerData', (type, payload, peer) => {
`'createdPeer', peer` - this will be emitted when:
- joining a room with existing peers, once for each peer
- a new peer joins your room
- sharing screen, once for each peer
`'leftRoom', roomName` - emitted after successfully leaving the current room,
ending all peers, and stopping the local screen stream
`'localScreenAdded', el` - emitted after triggering the start of screen sharing
- `el` the element that contains the local screen stream
ending all peers, and stopping local stream
`'mute', data` - emitted when a peer mutes their video or audioOn
- `data` an object that contains an `id` property that returns the id of the peer, and a `name` property that indicates which stream was muted, `video` or `audio`
`'removedPeer', peer` - emitted when a peer loses connection or exits the room
- `peer` - the peer associated with the stream that was removed
`'ready', sessionId` - emitted when liowebrtc is ready to join a room
- `sessionId` - the socket.io connection session ID
`'receivedPeerData', type, payload, peer` - emitted when a peer sends data via `shout` or `whisper`
- `type` a label, usually a string, that describes the payload
- `payload` any kind of data sent by the peer, usually an object
......@@ -334,7 +334,7 @@ ending all peers, and stopping the local screen stream
`attachStream(stream, el, opts)` - attaches a media stream to a video or audio element
- `MediaStream stream` - an object representing a local or peer media stream
- `HTMLElement el` - the element (or ref if you're using React) to attach the media stream to, usually a video or audio element
- `object opts` - an object representing optional configuration for attachStream
- `object opts` - *optional* optional configuration for attachStream
- `bool autoplay` - autoplay the video once attached. Defaults to `true`
- `bool muted` - mute the video once attached. Defaults to `false`
- `bool mirror` - mirror the video once attached. Defaults to `true`
......@@ -342,7 +342,7 @@ ending all peers, and stopping the local screen stream
`broadcast(messageType, payload)` - broadcasts a message to all peers in the
room via the signaling server (similar to `shout`, but not p2p). Listen for peers' broadcasts on the `receivedSignalData` event.
- `string messageType` a label, usually a string, that describes the payload
- `string messageType` an arbitrary label, usually a string, that describes the payload
- `object payload` - an arbitrary value or object to send to peers
`createRoom(name, callback)` - emits the `create` event and optionally invokes `callback` on response
......@@ -352,31 +352,36 @@ room via the signaling server (similar to `shout`, but not p2p). Listen for peer
`emit(eventLabel, ...args)` - emit arbitrary event (Emits locally. To emit stuff other peers, use `shout`)
`getContainerId(peer)` - get the DOM id associated with a peer's media element. In JSX, you will need to set the id of the container element to this value
- `Peer peer` - the object representing the peer and its peer connection
`getId(peer)` - get the DOM id associated with a peer's media stream. In JSX, you will need to set the id of the peer's media element to this value.
`getLocalScreen()` - returns the local screen stream
- `Peer peer` - the object representing the peer and its peer connection
`getPeerById(id)` - returns a peer with a given `id`
- `string id` - the id of the peer
`getPeerByNick(nick)` - returns a peer with a given `nick`
- `string nick` - the peer's nickname
`getPeers(sessionId, type)` - returns all peers by `sessionId` and/or `type`
`getPeers(sessionId)` - returns all peers by `sessionId`
- `string sessionId` - the sid of the current room. Will return all peers if no sessionId is provided.
`joinRoom(name, callback)` - joins the room `name`. Callback is
invoked with `callback(err, roomDescription)` where `roomDescription` is yielded
by the connection on the `join` event. See [signalmaster](https://github.com/andyet/signalmaster) for details about rooms.
`leaveRoom()` - leaves the currently joined room and stops local screen share
`leaveRoom()` - leaves the currently joined room and stops local streams
`mute()` - mutes the local audio stream to your peers (stops sending audio in the WebRTC audio channel)
`on(ev, fn)` - creates an event listener for event `ev`
`on(ev, fn)` - creates an event listener for event `ev` handled by `fn`
`pause()` - pauses both video and audio streams to your peers
`pauseVideo()` - pauses the video stream to your peers (stops sending video in the WebRTC video channel)
`quit()` - stops the local video, leaves the currently joined room, and disconnects from the signaling server
`resume()` - resumes sending video and audio to your peers
`resumeVideo()` - resumes the video stream to your peers (resumes sending video in the WebRTC video channel)
......@@ -387,11 +392,9 @@ to all peers in the room via a data channel (same as `shout`, except you can spe
`setVolumeForAll(volume)` - set the volume level for all peers
`shareScreen(callback)` - initiates screen capture request to browser, then streams the video to peers in the room
`shout(messageType, payload)` - sends a message
to all peers in the room via the default p2p data channel. Listen for peers' shouts on the `receivedPeerData` event.
- `string messageType` a label, usually a string, that describes the payload
- `string messageType` - an arbitrary label, usually a string, that describes the payload
- `object payload` - an arbitrary value or object to send to peers
`startLocalVideo()` - starts the local video or audio streams with the `media` options provided
......@@ -399,28 +402,26 @@ in the config. Use this if `autoRequestMedia` is set to false
`stopLocalVideo()` - stops all local media streams
`stopScreenShare()` - stops the screen share stream and removes it from the room
`transmit(peer, messageType, payload)` - sends a message to a single peer in the
room via the signaling server (similar to `whisper`, but not p2p). Listen for peers' transmissions on the `receivedSignalData` event.
- `Peer peer` the object representing the peer and its peer connection
- `string messageType` a label, usually a string, that describes the payload
- `object payload` any kind of data sent by the peer, usually an object
- `Peer peer` - the object representing the peer and its peer connection
- `string messageType` - an arbitrary label, usually a string, that describes the payload
- `object payload` - any kind of data sent by the peer, usually an object
`unmute()` - unmutes the audio stream to your peers (resumes sending audio in the WebRTC audio channel)
- `float volume` - the volume level, between 0 and 1
`whisper(peer, messageType, payload)` - sends a message to a single peer in the room via the default p2p data channel. Listen for peers' whispers on the `receivedPeerData` event.
- `Peer peer` the object representing the peer and its peer connection
- `string messageType` a label, usually a string, that describes the payload
- `object payload` any kind of data sent by the peer, usually an object
- `Peer peer` - the object representing the peer and its peer connection
- `string messageType` - an arbitrary label, usually a string, that describes the payload
- `object payload` - any kind of data sent by the peer, usually an object
## Signaling
WebRTC needs to be facilitated with signaling; a service that acts as a matchmaker for peers before they establish direct video/audio/data channels. Signaling can be done in any way, e.g. via good old fashioned carrier pigeons. Signaling services only need to fulfill the absolute minimal role of matchmaking peers.
[Signalmaster](https://github.com/andyet/signalmaster) is a [socket.io](http://socket.io/) server signaling solution, and is very easy to set up. socket.io enables real-time, bidirectional communication between a client and server via web sockets. It also allows us to easily segment peers into "rooms."
[Signalmaster](https://github.com/andyet/signalmaster) is a [socket.io](http://socket.io/) server signaling solution, and is very easy to set up. socket.io enables real-time, bidirectional communication between a client and server via web sockets. It also allows us to easily segment peers into rooms.
For emitting data to peers, LioWebRTC provides a unified, event-based API that enables peers to seamlessly switch between `shout`ing (p2p data channels) or `broadcast`ing (socket.io) to all the peers in a room. It's up to you to decide which protocol to use, but socket.io should ideally only be used for transmitting things like metadata, one-off events, etc. Both protocols are real-time, bidirectional, and event-based.
......
......@@ -37,20 +37,26 @@ class LioWebRTC extends WildEmitter {
muted: true,
audio: false,
},
network: {
maxPeers: 4,
minPeers: 2
},
};
this.peerDataCache = {};
this.roomCount = 0;
this.roomName = '';
let connection;
// Set up logger
this.logger = ((() => {
// we assume that if you're in debug mode and you didn't
// pass in a logger, you actually want to log as much as
// possible.
if (opts.debug) {
return opts.logger || console;
}
return opts.logger || mockconsole;
})());
// set our config from options
// Set our config from options
Object.keys(options).forEach((o) => {
this.config[o] = options[o];
});
......@@ -84,6 +90,7 @@ class LioWebRTC extends WildEmitter {
connection.on('message', (message) => {
const peers = self.webrtc.getPeers(message.from, message.roomType);
const totalPeers = self.webrtc.getPeers().length;
let peer;
if (message.type === 'offer') {
......@@ -93,16 +100,21 @@ class LioWebRTC extends WildEmitter {
});
// if (!peer) peer = peers[0]; // fallback for old protocol versions
}
if (this.config.network.maxPeers > 0 && totalPeers >= this.config.network.maxPeers) {
return;
}
if (!peer) {
peer = self.webrtc.createPeer({
id: message.from,
sid: message.sid,
type: message.roomType,
enableDataChannels: self.config.enableDataChannels && message.roomType !== 'screen',
enableDataChannels: self.config.enableDataChannels,
sharemyscreen: message.roomType === 'screen' && !message.broadcaster,
broadcaster: message.roomType === 'screen' && !message.broadcaster ? self.connection.getSessionid() : null,
});
self.emit('createdPeer', peer);
} else {
return;
}
peer.handleMessage(message);
} else if (peers.length) {
......@@ -146,8 +158,8 @@ class LioWebRTC extends WildEmitter {
});
// proxy events from WebRTC
this.webrtc.on('*', function () {
self.emit(...arguments);
this.webrtc.on('*', function () { // eslint-disable-line
self.emit(...arguments); // eslint-disable-line
});
// log all events in debug mode
......@@ -165,7 +177,7 @@ class LioWebRTC extends WildEmitter {
});
this.webrtc.on('peerStreamAdded', this.handlePeerStreamAdded.bind(this));
this.webrtc.on('peerStreamRemoved', this.handlePeerStreamRemoved.bind(this));
this.webrtc.on('removedPeer', this.handlePeerStreamRemoved.bind(this));
// echo cancellation attempts
if (this.config.adjustPeerVolume) {
......@@ -183,14 +195,14 @@ class LioWebRTC extends WildEmitter {
self.webrtc.config.peerConnectionConfig.iceServers = self.webrtc.config.peerConnectionConfig.iceServers.concat(args);
self.emit('turnservers', args);
});
/*
this.webrtc.on('iceFailed', (peer) => {
// local ice failure
});
this.webrtc.on('connectivityError', (peer) => {
// remote ice failure
});
*/
// sending mute/unmute to all peers
this.webrtc.on('audioOn', () => {
......@@ -206,66 +218,83 @@ class LioWebRTC extends WildEmitter {
self.webrtc.sendToAll('mute', { name: 'video' });
});
// screensharing events
this.webrtc.on('localScreen', (stream) => {
let item;
const el = document.createElement('video');
const container = self.getRemoteVideoContainer();
el.oncontextmenu = () => false;
el.id = 'localScreen';
attachMediaStream(stream, el);
if (container) {
container.appendChild(el);
}
self.emit('localScreenAdded', el);
self.connection.emit('shareScreen');
self.webrtc.peers.forEach((existingPeer) => {
let peer;
if (existingPeer.type === 'video') {
peer = self.webrtc.createPeer({
id: existingPeer.id,
type: 'screen',
sharemyscreen: true,
enableDataChannels: false,
receiveMedia: {
offerToReceiveAudio: 0,
offerToReceiveVideo: 0,
},
broadcaster: self.connection.getSessionid(),
});
self.emit('createdPeer', peer);
peer.start();
}
});
});
this.webrtc.on('localScreenStopped', (stream) => {
if (self.getLocalScreen()) {
self.stopScreenShare();
this.webrtc.on('channelMessage', (peer, label, data) => {
if (data.payload._id && this.peerDataCache[data.payload._id]) {
return;
}
/*
self.connection.emit('unshareScreen');
self.webrtc.peers.forEach(function (peer) {
if (peer.sharemyscreen) {
peer.end();
}
switch (data.type) {
case 'volume':
self.emit('remoteVolumeChange', data.payload, peer);
break;
case 'propagate':
if (this.seenPeerEvent(data.payload._id)) {
return;
}
// Re-propagate message
this.propagateMessage(data.payload);
this.cachePeerEvent(data.payload._id, data.payload.senderId);
// Emit the propagated data as if it were received directly
self.emit('receivedPeerData', data.payload.type, data.payload.payload, {
id: data.payload.senderId,
nick: data.payload.senderNick,
isForwarded: true,
});
*/
});
this.webrtc.on('channelMessage', (peer, label, data) => {
if (data.type === 'volume') {
self.emit('remoteVolumeChange', data.payload, peer);
} else {
self.emit('receivedPeerData', data.type, data.payload, peer);
break;
default:
if (this.seenPeerEvent(data._id)) {
return;
}
this.cachePeerEvent(data._id, peer.id);
self.emit('receivedPeerData', data.type, data.payload, peer);
if (this.config.network.maxPeers > 0 && data.shout) {
data.senderId = peer.id;
const fwdData = Object.assign({}, { senderId: peer.id, senderNick: peer.nick }, data);
this.propagateMessage(fwdData);
}
break;
}
});
if (this.config.autoRequestMedia) this.startLocalVideo();
}
cachePeerEvent(eventId, peerId) {
if (!this.peerDataCache[eventId]) {
this.peerDataCache[eventId] = {
recipients: {
[peerId]: true
},
timestamp: Date.now(),
};
return;
}
if (!this.peerDataCache[eventId].recipients[peerId]) {
this.peerDataCache[eventId].recipients[peerId] = true;
}
if (Object.keys(this.peerDataCache).length > 500) {
// Object.keys(this.peerDataCache).re
}
}
seenPeerEvent(eventId) {
if (this.peerDataCache[eventId]) {
return true;
}
return false;
}
propagateMessage(data, channel = 'liowebrtc') {
this.getPeers()
.forEach((peer) => {
if (!this.peerDataCache[data._id]) {
this.cachePeerEvent(data._id, data.senderId);
}
if (!this.peerDataCache[data._id].recipients[peer.id]) {
peer.sendDirectly('propagate', data, channel, true);
}
});
}
leaveRoom() {
if (this.roomName) {
this.connection.emit('leave');
......@@ -305,10 +334,10 @@ class LioWebRTC extends WildEmitter {
}
handlePeerStreamRemoved(peer) {
this.emit('videoRemoved', peer);
// if (this.config.media.video) this.emit('videoRemoved', peer);
}
getId(peer) {
getId(peer) { // eslint-disable-line
return [peer.id, peer.type, peer.broadcaster ? 'broadcasting' : 'incoming'].join('_');
}
......@@ -335,21 +364,28 @@ class LioWebRTC extends WildEmitter {
let type;
let peer;
for (id in roomDescription.clients) {
this.roomCount = Object.keys(roomDescription.clients).length;
for (id of Object.keys(roomDescription.clients).reverse()) {
client = roomDescription.clients[id];
for (type in client) {
if (client[type]) {
const peerCount = this.webrtc.getPeers().length;
if (this.config.network.maxPeers > 0 && (peerCount >= this.config.network.minPeers || peerCount >= this.config.network.maxPeers)) {
break;
}
peer = self.webrtc.createPeer({
id,
type,
enableDataChannels: self.config.enableDataChannels && type !== 'screen',
receiveMedia: {
offerToReceiveAudio: type !== 'screen' && !self.config.dataOnly && self.config.receiveMedia.offerToReceiveAudio ? 1 : 0,
offerToReceiveVideo: !self.config.dataOnly && self.config.receiveMedia.offerToReceiveVideo,
offerToReceiveAudio: type !== 'screen' && !this.config.dataOnly && this.config.receiveMedia.offerToReceiveAudio ? 1 : 0,
offerToReceiveVideo: !this.config.dataOnly && self.config.receiveMedia.offerToReceiveVideo ? 1 : 0,
},
});
self.emit('createdPeer', peer);
peer.start();
if (this.config.debug) console.log('CREATED PEER');
}
}
}
......@@ -372,59 +408,36 @@ class LioWebRTC extends WildEmitter {
}
attachStream(stream, el, opts) { // eslint-disable-line
let options = {
const options = {
autoplay: true,
muted: false,
mirror: true,
audio: false,
};
if (opts) options = opts;
attachMediaStream(stream, el, options);
}
stopLocalVideo() {
this.webrtc.stop();
attachMediaStream(stream, el, opts || options);
}
shareScreen(cb) {
this.webrtc.startScreenShare(cb);
setLocalVideo(element) {
this.config.localVideoEl = element;
}
getLocalScreen() {
return this.webrtc.localScreens && this.webrtc.localScreens[0];
stopLocalVideo() {
this.webrtc.stop();
}
stopScreenShare() {
this.connection.emit('unshareScreen');
const videoEl = document.getElementById('localScreen');
const container = this.getRemoteVideoContainer();
if (this.config.autoRemoveVideos && container && videoEl) {
container.removeChild(videoEl);
}
// a hack to emit the event the removes the video
// element that we want
if (videoEl) {
this.emit('videoRemoved', videoEl);
}
if (this.getLocalScreen()) {
this.webrtc.stopScreenShare();
}
this.webrtc.peers.forEach((peer) => {
if (peer.broadcaster) {
peer.end();
}
});
quit() {
this.stopLocalVideo();
this.leaveRoom();
this.disconnect();
}
testReadiness() {
const self = this;
if (this.sessionReady) {
if (this.config.dataOnly || (!this.config.media.video && !this.config.media.audio)) {
self.emit('readyToCall', self.connection.getSessionid());
self.emit('ready', self.connection.getSessionid());
} else if (this.webrtc.localStreams.length > 0) {
self.emit('readyToCall', self.connection.getSessionid());
self.emit('ready', self.connection.getSessionid());
}
}
}
......
......@@ -3,10 +3,6 @@ import WildEmitter from 'wildemitter';
import FileTransfer from 'filetransfer';
import webrtcSupport from './webrtcsupport';
// the inband-v1 protocol is sending metadata inband in a serialized JSON object
// followed by the actual data. Receiver closes the datachannel upon completion
const INBAND_FILETRANSFER_V1 = 'https://simplewebrtc.com/protocol/filetransfer#inband-v1';
function isAllTracksEnded(stream) {
let isAllTracksEnded = true;
stream.getTracks().forEach((t) => {
......@@ -63,6 +59,8 @@ class Peer extends WildEmitter {
self.send('connectivityError');
}
break;
default:
break;
}
});
this.pc.on('signalingStateChange', this.emit.bind(this, 'signalingStateChange'));
......@@ -82,17 +80,6 @@ class Peer extends WildEmitter {
}
this.on('channelOpen', (channel) => {
if (channel.protocol === INBAND_FILETRANSFER_V1) {
channel.onmessage = (event) => {
const metadata = JSON.parse(event.data);
const receiver = new FileTransfer.Receiver();
receiver.receive(metadata, channel);
self.emit('fileTransfer', metadata, receiver);
receiver.on('receivedFile', (file, metadata) => {
receiver.channel.close();
});
};
}
});
// proxy events to parent
......@@ -107,8 +94,11 @@ class Peer extends WildEmitter {
if (message.prefix) this.browserPrefix = message.prefix;
if (message.type === 'offer') {
if (!this.nick) this.nick = message.payload.nick;
delete message.payload.nick;
if (!this.nick) {
const n = message.payload.nick;
this.nick = n;
}
// delete message.payload.nick;
this.pc.handleOffer(message.payload, (err) => {
if (err) {
return;
......@@ -116,6 +106,7 @@ class Peer extends WildEmitter {
// auto-accept
self.pc.answer((err, sessionDescription) => {
// self.send('answer', sessionDescription);
// console.log('answering', sessionDescription);
});
});
} else if (message.type === 'answer') {
......@@ -154,6 +145,7 @@ class Peer extends WildEmitter {
type: messageType,
payload,
prefix: webrtcSupport.prefix,
timestamp: Date.now()
};
this.logger.log('sending', messageType, message);
this.parent.emit('message', message);
......@@ -161,10 +153,12 @@ class Peer extends WildEmitter {
// send via data channel
// returns true when message was sent and false if channel is not open
sendDirectly(messageType, payload, channel = 'liowebrtc') {
sendDirectly(messageType, payload, channel = 'liowebrtc', shout = false, messageId = `${Date.now()}_${Math.random() * 1000000}`) {
const message = {
type: messageType,
payload,
_id: messageId,
shout,
};
this.logger.log('sending via datachannel', channel, messageType, message);
const dc = this.getDataChannel(channel);
......@@ -215,7 +209,7 @@ class Peer extends WildEmitter {
const self = this;
// well, the webrtc api requires that we either
// a) create a datachannel a prioris
// a) create a datachannel a priori
// b) do a renegotiation later to add the SCTP m-line
// Let's do (a) first...
if (this.enableDataChannels) {
......@@ -263,7 +257,7 @@ class Peer extends WildEmitter {
if (peerIndex > -1) {
this.parent.peers.splice(peerIndex, 1);