// Muaz Khan - www.MuazKhan.com
// MIT License - www.webrtc-experiment.com/licence
// Documentation - github.com/streamproc/MediaStreamRecorder
// ______________________
// MediaStreamRecorder.js
function MediaStreamRecorder(mediaStream) {
if (!mediaStream) throw 'MediaStream is mandatory.';
// void start(optional long timeSlice)
// timestamp to fire "ondataavailable"
this.start = function(timeSlice) {
// Media Stream Recording API has not been implemented in chrome yet;
// That's why using WebAudio API to record stereo audio in WAV format
var Recorder = IsChrome ? window.StereoRecorder : window.MediaRecorderWrapper;
// video recorder (in WebM format)
if (this.mimeType.indexOf('video') != -1) {
Recorder = IsChrome ? window.WhammyRecorder : window.MediaRecorderWrapper;
}
// video recorder (in GIF format)
if (this.mimeType === 'image/gif') Recorder = window.GifRecorder;
mediaRecorder = new Recorder(mediaStream);
mediaRecorder.ondataavailable = this.ondataavailable;
mediaRecorder.onstop = this.onstop;
mediaRecorder.onStartedDrawingNonBlankFrames = this.onStartedDrawingNonBlankFrames;
// Merge all data-types except "function"
mediaRecorder = mergeProps(mediaRecorder, this);
mediaRecorder.start(timeSlice);
};
this.onStartedDrawingNonBlankFrames = function() {};
this.clearOldRecordedFrames = function() {
if (!mediaRecorder) return;
mediaRecorder.clearOldRecordedFrames();
};
this.stop = function() {
if (mediaRecorder) mediaRecorder.stop();
};
this.ondataavailable = function(blob) {
console.log('ondataavailable..', blob);
};
this.onstop = function(error) {
console.warn('stopped..', error);
};
// Reference to "MediaRecorder.js"
var mediaRecorder;
}
// below scripts are used to auto-load required files.
function loadScript(src, onload) {
var root = window.MediaStreamRecorderScriptsDir;
var script = document.createElement('script');
script.src = root + src;
script.onload = onload || function() {};
document.documentElement.appendChild(script);
}
// Muaz Khan - www.MuazKhan.com
// MIT License - www.webrtc-experiment.com/licence
// Documentation - github.com/streamproc/MediaStreamRecorder
// _____________________________
// Cross-Browser-Declarations.js
// animation-frame used in WebM recording
if (!window.requestAnimationFrame) {
requestAnimationFrame = window.webkitRequestAnimationFrame || window.mozRequestAnimationFrame;
}
if (!window.cancelAnimationFrame) {
cancelAnimationFrame = window.webkitCancelAnimationFrame || window.mozCancelAnimationFrame;
}
// WebAudio API representer
if (!window.AudioContext) {
window.AudioContext = window.webkitAudioContext || window.mozAudioContext;
}
URL = window.URL || window.webkitURL;
if (window.webkitMediaStream) window.MediaStream = window.webkitMediaStream;
IsChrome = !!navigator.webkitGetUserMedia || (navigator.mediaDevices && navigator.userAgent.indexOf('Edge') !== -1);
// Merge all other data-types except "function"
function mergeProps(mergein, mergeto) {
mergeto = reformatProps(mergeto);
for (var t in mergeto) {
if (typeof mergeto[t] !== 'function') {
mergein[t] = mergeto[t];
}
}
return mergein;
}
function reformatProps(obj) {
var output = {};
for (var o in obj) {
if (o.indexOf('-') != -1) {
var splitted = o.split('-');
var name = splitted[0] + splitted[1].split('')[0].toUpperCase() + splitted[1].substr(1);
output[name] = obj[o];
} else output[o] = obj[o];
}
return output;
}
// ______________ (used to handle stuff like http://goo.gl/xmE5eg) issue #129
// ObjectStore.js
var ObjectStore = {
AudioContext: window.AudioContext || window.webkitAudioContext
};
// ================
// MediaRecorder.js
/**
* Implementation of https://dvcs.w3.org/hg/dap/raw-file/default/media-stream-capture/MediaRecorder.html
* The MediaRecorder accepts a mediaStream as input source passed from UA. When recorder starts,
* a MediaEncoder will be created and accept the mediaStream as input source.
* Encoder will get the raw data by track data changes, encode it by selected MIME Type, then store the encoded in EncodedBufferCache object.
* The encoded data will be extracted on every timeslice passed from Start function call or by RequestData function.
* Thread model:
* When the recorder starts, it creates a "Media Encoder" thread to read data from MediaEncoder object and store buffer in EncodedBufferCache object.
* Also extract the encoded data and create blobs on every timeslice passed from start function or RequestData function called by UA.
*/
function MediaRecorderWrapper(mediaStream) {
// if user chosen only audio option; and he tried to pass MediaStream with
// both audio and video tracks;
// using a dirty workaround to generate audio-only stream so that we can get audio/ogg output.
if (this.type == 'audio' && mediaStream.getVideoTracks && mediaStream.getVideoTracks().length && IsChrome) {
var context = new AudioContext();
var mediaStreamSource = context.createMediaStreamSource(mediaStream);
var destination = context.createMediaStreamDestination();
mediaStreamSource.connect(destination);
mediaStream = destination.stream;
}
// void start(optional long timeSlice)
// timestamp to fire "ondataavailable"
// starting a recording session; which will initiate "Reading Thread"
// "Reading Thread" are used to prevent main-thread blocking scenarios
this.start = function(mTimeSlice) {
mTimeSlice = mTimeSlice || 1000;
isStopRecording = false;
function startRecording() {
if (isStopRecording) return;
mediaRecorder = new MediaRecorder(mediaStream);
mediaRecorder.ondataavailable = function(e) {
console.log('ondataavailable', e.data.type, e.data.size, e.data);
// mediaRecorder.state == 'recording' means that media recorder is associated with "session"
// mediaRecorder.state == 'stopped' means that media recorder is detached from the "session" ... in this case; "session" will also be deleted.
if (!e.data.size) {
console.warn('Recording of', e.data.type, 'failed.');
return;
}
// at this stage, Firefox MediaRecorder API doesn't allow to choose the output mimeType format!
var blob = new window.Blob([e.data], {
type: e.data.type || self.mimeType || 'audio/ogg' // It specifies the container format as well as the audio and video capture formats.
});
// Dispatching OnDataAvailable Handler
self.ondataavailable(blob);
};
mediaRecorder.onstop = function(error) {
// for video recording on Firefox, it will be fired quickly.
// because work on VideoFrameContainer is still in progress
// https://wiki.mozilla.org/Gecko:MediaRecorder
// self.onstop(error);
};
// http://www.w3.org/TR/2012/WD-dom-20121206/#error-names-table
// showBrowserSpecificIndicator: got neither video nor audio access
// "VideoFrameContainer" can't be accessed directly; unable to find any wrapper using it.
// that's why there is no video recording support on firefox
// video recording fails because there is no encoder available there
// http://dxr.mozilla.org/mozilla-central/source/content/media/MediaRecorder.cpp#317
// Maybe "Read Thread" doesn't fire video-track read notification;
// that's why shutdown notification is received; and "Read Thread" is stopped.
// https://dvcs.w3.org/hg/dap/raw-file/default/media-stream-capture/MediaRecorder.html#error-handling
mediaRecorder.onerror = function(error) {
console.error(error);
self.start(mTimeSlice);
};
mediaRecorder.onwarning = function(warning) {
console.warn(warning);
};
// void start(optional long mTimeSlice)
// The interval of passing encoded data from EncodedBufferCache to onDataAvailable
// handler. "mTimeSlice < 0" means Session object does not push encoded data to
// onDataAvailable, instead, it passive wait the client side pull encoded data
// by calling requestData API.
mediaRecorder.start(0);
// Start recording. If timeSlice has been provided, mediaRecorder will
// raise a dataavailable event containing the Blob of collected data on every timeSlice milliseconds.
// If timeSlice isn't provided, UA should call the RequestData to obtain the Blob data, also set the mTimeSlice to zero.
setTimeout(function() {
mediaRecorder.stop();
startRecording();
}, mTimeSlice);
}
// dirty workaround to fix Firefox 2nd+ intervals
startRecording();
};
var isStopRecording = false;
this.stop = function() {
isStopRecording = true;
if (self.onstop) {
self.onstop({});
}
};
this.ondataavailable = this.onstop = function() {};
// Reference to itself
var self = this;
if (!self.mimeType && !!mediaStream.getAudioTracks) {
self.mimeType = mediaStream.getAudioTracks().length && mediaStream.getVideoTracks().length ? 'video/webm' : 'audio/ogg';
}
// Reference to "MediaRecorderWrapper" object
var mediaRecorder;
}
// =================
// StereoRecorder.js
function StereoRecorder(mediaStream) {
// void start(optional long timeSlice)
// timestamp to fire "ondataavailable"
this.start = function(timeSlice) {
timeSlice = timeSlice || 1000;
mediaRecorder = new StereoAudioRecorder(mediaStream, this);
mediaRecorder.record();
timeout = setInterval(function() {
mediaRecorder.requestData();
}, timeSlice);
};
this.stop = function() {
if (mediaRecorder) {
mediaRecorder.stop();
clearTimeout(timeout);
}
};
this.ondataavailable = function() {};
// Reference to "StereoAudioRecorder" object
var mediaRecorder;
var timeout;
}
// ======================
// StereoAudioRecorder.js
// source code from: http://typedarray.org/wp-content/projects/WebAudioRecorder/script.js
function StereoAudioRecorder(mediaStream, root) {
// variables
var leftchannel = [];
var rightchannel = [];
var scriptprocessornode;
var recording = false;
var recordingLength = 0;
var volume;
var audioInput;
var sampleRate = root.sampleRate || 44100; // range: 22050 to 96000
var audioContext;
var context;
var numChannels = root.audioChannels || 2;
this.record = function() {
recording = true;
// reset the buffers for the new recording
leftchannel.length = rightchannel.length = 0;
recordingLength = 0;
};
this.requestData = function() {
if (recordingLength == 0) {
requestDataInvoked = false;
return;
}
requestDataInvoked = true;
// clone stuff
var internal_leftchannel = leftchannel.slice(0);
var internal_rightchannel = rightchannel.slice(0);
var internal_recordingLength = recordingLength;
// reset the buffers for the new recording
leftchannel.length = rightchannel.length = [];
recordingLength = 0;
requestDataInvoked = false;
// we flat the left and right channels down
var leftBuffer = mergeBuffers(internal_leftchannel, internal_recordingLength);
var rightBuffer = mergeBuffers(internal_leftchannel, internal_recordingLength);
// we interleave both channels together
if (numChannels === 2) {
var interleaved = interleave(leftBuffer, rightBuffer);
} else {
var interleaved = leftBuffer;
}
// we create our wav file
var buffer = new ArrayBuffer(44 + interleaved.length * 2);
var view = new DataView(buffer);
// RIFF chunk descriptor
writeUTFBytes(view, 0, 'RIFF');
view.setUint32(4, 44 + interleaved.length * 2, true);
writeUTFBytes(view, 8, 'WAVE');
// FMT sub-chunk
writeUTFBytes(view, 12, 'fmt ');
view.setUint32(16, 16, true);
view.setUint16(20, 1, true);
// stereo (2 channels)
view.setUint16(22, numChannels, true);
view.setUint32(24, sampleRate, true);
view.setUint32(28, sampleRate * 4, true);
view.setUint16(32, numChannels * 2, true);
view.setUint16(34, 16, true);
// data sub-chunk
writeUTFBytes(view, 36, 'data');
view.setUint32(40, interleaved.length * 2, true);
// write the PCM samples
var lng = interleaved.length;
var index = 44;
var volume = 1;
for (var i = 0; i < lng; i++) {
view.setInt16(index, interleaved[i] * (0x7FFF * volume), true);
index += 2;
}
// our final binary blob
var blob = new Blob([view], {
type: 'audio/wav'
});
console.debug('audio recorded blob size:', bytesToSize(blob.size));
root.ondataavailable(blob);
};
this.stop = function() {
// we stop recording
recording = false;
this.requestData();
};
function interleave(leftChannel, rightChannel) {
var length = leftChannel.length + rightChannel.length;
var result = new Float32Array(length);
var inputIndex = 0;
for (var index = 0; index < length;) {
result[index++] = leftChannel[inputIndex];
result[index++] = rightChannel[inputIndex];
inputIndex++;
}
return result;
}
function mergeBuffers(channelBuffer, recordingLength) {
var result = new Float32Array(recordingLength);
var offset = 0;
var lng = channelBuffer.length;
for (var i = 0; i < lng; i++) {
var buffer = channelBuffer[i];
result.set(buffer, offset);
offset += buffer.length;
}
return result;
}
function writeUTFBytes(view, offset, string) {
var lng = string.length;
for (var i = 0; i < lng; i++) {
view.setUint8(offset + i, string.charCodeAt(i));
}
}
// creates the audio context
// creates the audio context
var audioContext = ObjectStore.AudioContext;
if (!ObjectStore.AudioContextConstructor)
ObjectStore.AudioContextConstructor = new audioContext();
var context = ObjectStore.AudioContextConstructor;
// creates a gain node
if (!ObjectStore.VolumeGainNode)
ObjectStore.VolumeGainNode = context.createGain();
var volume = ObjectStore.VolumeGainNode;
// creates an audio node from the microphone incoming stream
if (!ObjectStore.AudioInput)
ObjectStore.AudioInput = context.createMediaStreamSource(mediaStream);
// creates an audio node from the microphone incoming stream
var audioInput = ObjectStore.AudioInput;
// connect the stream to the gain node
audioInput.connect(volume);
/* From the spec: This value controls how frequently the audioprocess event is
dispatched and how many sample-frames need to be processed each call.
Lower values for buffer size will result in a lower (better) latency.
Higher values will be necessary to avoid audio breakup and glitches
Legal values are 256, 512, 1024, 2048, 4096, 8192, and 16384.*/
var bufferSize = root.bufferSize || 2048;
if (root.bufferSize == 0) bufferSize = 0;
if (context.createJavaScriptNode) {
scriptprocessornode = context.createJavaScriptNode(bufferSize, numChannels, numChannels);
} else if (context.createScriptProcessor) {
scriptprocessornode = context.createScriptProcessor(bufferSize, numChannels, numChannels);
} else {
throw 'WebAudio API has no support on this browser.';
}
bufferSize = scriptprocessornode.bufferSize;
console.debug('using audio buffer-size:', bufferSize);
var requestDataInvoked = false;
// sometimes "scriptprocessornode" disconnects from he destination-node
// and there is no exception thrown in this case.
// and obviously no further "ondataavailable" events will be emitted.
// below global-scope variable is added to debug such unexpected but "rare" cases.
window.scriptprocessornode = scriptprocessornode;
if (numChannels == 1) {
console.debug('All right-channels are skipped.');
}
// http://webaudio.github.io/web-audio-api/#the-scriptprocessornode-interface
scriptprocessornode.onaudioprocess = function(e) {
if (!recording || requestDataInvoked) return;
var left = e.inputBuffer.getChannelData(0);
leftchannel.push(new Float32Array(left));
if (numChannels == 2) {
var right = e.inputBuffer.getChannelData(1);
rightchannel.push(new Float32Array(right));
}
recordingLength += bufferSize;
};
volume.connect(scriptprocessornode);
scriptprocessornode.connect(context.destination);
}
// =======================
// WhammyRecorderHelper.js
function WhammyRecorderHelper(mediaStream, root) {
this.record = function(timeSlice) {
if (!this.width) this.width = 320;
if (!this.height) this.height = 240;
if (this.video && this.video instanceof HTMLVideoElement) {
if (!this.width) this.width = video.videoWidth || video.clientWidth || 320;
if (!this.height) this.height = video.videoHeight || video.clientHeight || 240;
}
if (!this.video) {
this.video = {
width: this.width,
height: this.height
};
}
if (!this.canvas || !this.canvas.width || !this.canvas.height) {
this.canvas = {
width: this.width,
height: this.height
};
}
canvas.width = this.canvas.width;
canvas.height = this.canvas.height;
// setting defaults
if (this.video && this.video instanceof HTMLVideoElement) {
video = this.video.cloneNode();
} else {
video = document.createElement('video');
video.src = URL.createObjectURL(mediaStream);
video.width = this.video.width;
video.height = this.video.height;
}
video.muted = true;
video.play();
lastTime = new Date().getTime();
whammy = new Whammy.Video();
console.log('canvas resolutions', canvas.width, '*', canvas.height);
console.log('video width/height', video.width || canvas.width, '*', video.height || canvas.height);
drawFrames();
};
this.clearOldRecordedFrames = function() {
frames = [];
};
var requestDataInvoked = false;
this.requestData = function() {
if (!frames.length) {
requestDataInvoked = false;
return;
}
requestDataInvoked = true;
// clone stuff
var internal_frames = frames.slice(0);
// reset the frames for the new recording
frames = [];
whammy.frames = dropBlackFrames(internal_frames, -1);
var WebM_Blob = whammy.compile();
root.ondataavailable(WebM_Blob);
console.debug('video recorded blob size:', bytesToSize(WebM_Blob.size));
requestDataInvoked = false;
};
var frames = [];
var isOnStartedDrawingNonBlankFramesInvoked = false;
function drawFrames() {
if (isStopDrawing) return;
if (requestDataInvoked) return setTimeout(drawFrames, 100);
var duration = new Date().getTime() - lastTime;
if (!duration) return drawFrames();
// via webrtc-experiment#206, by Jack i.e. @Seymourr
lastTime = new Date().getTime();
context.drawImage(video, 0, 0, canvas.width, canvas.height);
!isStopDrawing && frames.push({
duration: duration,
image: canvas.toDataURL('image/webp')
});
if (!isOnStartedDrawingNonBlankFramesInvoked && !isBlankFrame(frames[frames.length - 1])) {
isOnStartedDrawingNonBlankFramesInvoked = true;
root.onStartedDrawingNonBlankFrames();
}
setTimeout(drawFrames, 10);
}
var isStopDrawing = false;
this.stop = function() {
isStopDrawing = true;
this.requestData();
};
var canvas = document.createElement('canvas');
var context = canvas.getContext('2d');
var video;
var lastTime;
var whammy;
var self = this;
function isBlankFrame(frame, _pixTolerance, _frameTolerance) {
var localCanvas = document.createElement('canvas');
localCanvas.width = canvas.width;
localCanvas.height = canvas.height;
var context2d = localCanvas.getContext('2d');
var sampleColor = {
r: 0,
g: 0,
b: 0
};
var maxColorDifference = Math.sqrt(
Math.pow(255, 2) +
Math.pow(255, 2) +
Math.pow(255, 2)
);
var pixTolerance = _pixTolerance && _pixTolerance >= 0 && _pixTolerance <= 1 ? _pixTolerance : 0;
var frameTolerance = _frameTolerance && _frameTolerance >= 0 && _frameTolerance <= 1 ? _frameTolerance : 0;
var matchPixCount, endPixCheck, maxPixCount;
var image = new Image();
image.src = frame.image;
context2d.drawImage(image, 0, 0, canvas.width, canvas.height);
var imageData = context2d.getImageData(0, 0, canvas.width, canvas.height);
matchPixCount = 0;
endPixCheck = imageData.data.length;
maxPixCount = imageData.data.length / 4;
for (var pix = 0; pix < endPixCheck; pix += 4) {
var currentColor = {
r: imageData.data[pix],
g: imageData.data[pix + 1],
b: imageData.data[pix + 2]
};
var colorDifference = Math.sqrt(
Math.pow(currentColor.r - sampleColor.r, 2) +
Math.pow(currentColor.g - sampleColor.g, 2) +
Math.pow(currentColor.b - sampleColor.b, 2)
);
// difference in color it is difference in color vectors (r1,g1,b1) <=> (r2,g2,b2)
if (colorDifference <= maxColorDifference * pixTolerance) {
matchPixCount++;
}
}
if (maxPixCount - matchPixCount <= maxPixCount * frameTolerance) {
return false;
} else {
return true;
}
}
function dropBlackFrames(_frames, _framesToCheck, _pixTolerance, _frameTolerance) {
var localCanvas = document.createElement('canvas');
localCanvas.width = canvas.width;
localCanvas.height = canvas.height;
var context2d = localCanvas.getContext('2d');
var resultFrames = [];
var checkUntilNotBlack = _framesToCheck === -1;
var endCheckFrame = (_framesToCheck && _framesToCheck > 0 && _framesToCheck <= _frames.length) ?
_framesToCheck : _frames.length;
var sampleColor = {
r: 0,
g: 0,
b: 0
};
var maxColorDifference = Math.sqrt(
Math.pow(255, 2) +
Math.pow(255, 2) +
Math.pow(255, 2)
);
var pixTolerance = _pixTolerance && _pixTolerance >= 0 && _pixTolerance <= 1 ? _pixTolerance : 0;
var frameTolerance = _frameTolerance && _frameTolerance >= 0 && _frameTolerance <= 1 ? _frameTolerance : 0;
var doNotCheckNext = false;
for (var f = 0; f < endCheckFrame; f++) {
var matchPixCount, endPixCheck, maxPixCount;
if (!doNotCheckNext) {
var image = new Image();
image.src = _frames[f].image;
context2d.drawImage(image, 0, 0, canvas.width, canvas.height);
var imageData = context2d.getImageData(0, 0, canvas.width, canvas.height);
matchPixCount = 0;
endPixCheck = imageData.data.length;
maxPixCount = imageData.data.length / 4;
for (var pix = 0; pix < endPixCheck; pix += 4) {
var currentColor = {
r: imageData.data[pix],
g: imageData.data[pix + 1],
b: imageData.data[pix + 2]
};
var colorDifference = Math.sqrt(
Math.pow(currentColor.r - sampleColor.r, 2) +
Math.pow(currentColor.g - sampleColor.g, 2) +
Math.pow(currentColor.b - sampleColor.b, 2)
);
// difference in color it is difference in color vectors (r1,g1,b1) <=> (r2,g2,b2)
if (colorDifference <= maxColorDifference * pixTolerance) {
matchPixCount++;
}
}
}
if (!doNotCheckNext && maxPixCount - matchPixCount <= maxPixCount * frameTolerance) {
// console.log('removed black frame : ' + f + ' ; frame duration ' + _frames[f].duration);
} else {
// console.log('frame is passed : ' + f);
if (checkUntilNotBlack) {
doNotCheckNext = true;
}
resultFrames.push(_frames[f]);
}
}
resultFrames = resultFrames.concat(_frames.slice(endCheckFrame));
if (resultFrames.length <= 0) {
// at least one last frame should be available for next manipulation
// if total duration of all frames will be < 1000 than ffmpeg doesn't work well...
resultFrames.push(_frames[_frames.length - 1]);
}
return resultFrames;
}
}
// =================
// WhammyRecorder.js
function WhammyRecorder(mediaStream) {
// void start(optional long timeSlice)
// timestamp to fire "ondataavailable"
this.start = function(timeSlice) {
timeSlice = timeSlice || 1000;
mediaRecorder = new WhammyRecorderHelper(mediaStream, this);
for (var prop in this) {
if (typeof this[prop] !== 'function') {
mediaRecorder[prop] = this[prop];
}
}
mediaRecorder.record();
timeout = setInterval(function() {
mediaRecorder.requestData();
}, timeSlice);
};
this.stop = function() {
if (mediaRecorder) {
mediaRecorder.stop();
clearTimeout(timeout);
}
};
this.clearOldRecordedFrames = function() {
if (mediaRecorder) {
mediaRecorder.clearOldRecordedFrames();
}
};
this.ondataavailable = function() {};
// Reference to "WhammyRecorder" object
var mediaRecorder;
var timeout;
}
// Muaz Khan - https://github.com/muaz-khan
// neizerth - https://github.com/neizerth
// MIT License - https://www.webrtc-experiment.com/licence/
// Documentation - https://github.com/streamproc/MediaStreamRecorder
// Note:
// ==========================================================
// whammy.js is an "external library"
// and has its own copyrights. Taken from "Whammy" project.
// https://github.com/antimatter15/whammy/blob/master/LICENSE
// =========
// Whammy.js
// todo: Firefox now supports webp for webm containers!
// their MediaRecorder implementation works well!
// should we provide an option to record via Whammy.js or MediaRecorder API is a better solution?
var Whammy = (function() {
function toWebM(frames) {
var info = checkFrames(frames);
var CLUSTER_MAX_DURATION = 30000;
var EBML = [{
"id": 0x1a45dfa3, // EBML
"data": [{
"data": 1,
"id": 0x4286 // EBMLVersion
}, {
"data": 1,
"id": 0x42f7 // EBMLReadVersion
}, {
"data": 4,
"id": 0x42f2 // EBMLMaxIDLength
}, {
"data": 8,
"id": 0x42f3 // EBMLMaxSizeLength
}, {
"data": "webm",
"id": 0x4282 // DocType
}, {
"data": 2,
"id": 0x4287 // DocTypeVersion
}, {
"data": 2,
"id": 0x4285 // DocTypeReadVersion
}]
}, {
"id": 0x18538067, // Segment
"data": [{
"id": 0x1549a966, // Info
"data": [{
"data": 1e6, //do things in millisecs (num of nanosecs for duration scale)
"id": 0x2ad7b1 // TimecodeScale
}, {
"data": "whammy",
"id": 0x4d80 // MuxingApp
}, {
"data": "whammy",
"id": 0x5741 // WritingApp
}, {
"data": doubleToString(info.duration),
"id": 0x4489 // Duration
}]
}, {
"id": 0x1654ae6b, // Tracks
"data": [{
"id": 0xae, // TrackEntry
"data": [{
"data": 1,
"id": 0xd7 // TrackNumber
}, {
"data": 1,
"id": 0x63c5 // TrackUID
}, {
"data": 0,
"id": 0x9c // FlagLacing
}, {
"data": "und",
"id": 0x22b59c // Language
}, {
"data": "V_VP8",
"id": 0x86 // CodecID
}, {
"data": "VP8",
"id": 0x258688 // CodecName
}, {
"data": 1,
"id": 0x83 // TrackType
}, {
"id": 0xe0, // Video
"data": [{
"data": info.width,
"id": 0xb0 // PixelWidth
}, {
"data": info.height,
"id": 0xba // PixelHeight
}]
}]
}]
}]
}];
//Generate clusters (max duration)
var frameNumber = 0;
var clusterTimecode = 0;
while (frameNumber < frames.length) {
var clusterFrames = [];
var clusterDuration = 0;
do {
clusterFrames.push(frames[frameNumber]);
clusterDuration += frames[frameNumber].duration;
frameNumber++;
} while (frameNumber < frames.length && clusterDuration < CLUSTER_MAX_DURATION);
var clusterCounter = 0;
var cluster = {
"id": 0x1f43b675, // Cluster
"data": [{
"data": clusterTimecode,
"id": 0xe7 // Timecode
}].concat(clusterFrames.map(function(webp) {
var block = makeSimpleBlock({
discardable: 0,
frame: webp.data.slice(4),
invisible: 0,
keyframe: 1,
lacing: 0,
trackNum: 1,
timecode: Math.round(clusterCounter)
});
clusterCounter += webp.duration;
return {
data: block,
id: 0xa3
};
}))
}; //Add cluster to segment
EBML[1].data.push(cluster);
clusterTimecode += clusterDuration;
}
return generateEBML(EBML);
}
// sums the lengths of all the frames and gets the duration
function checkFrames(frames) {
if (!frames[0]) {
console.warn('Something went wrong. Maybe WebP format is not supported in the current browser.');
return;
}
var width = frames[0].width,
height = frames[0].height,
duration = frames[0].duration;
for (var i = 1; i < frames.length; i++) {
duration += frames[i].duration;
}
return {
duration: duration,
width: width,
height: height
};
}
function numToBuffer(num) {
var parts = [];
while (num > 0) {
parts.push(num & 0xff);
num = num >> 8;
}
return new Uint8Array(parts.reverse());
}
function strToBuffer(str) {
return new Uint8Array(str.split('').map(function(e) {
return e.charCodeAt(0);
}));
}
function bitsToBuffer(bits) {
var data = [];
var pad = (bits.length % 8) ? (new Array(1 + 8 - (bits.length % 8))).join('0') : '';
bits = pad + bits;
for (var i = 0; i < bits.length; i += 8) {
data.push(parseInt(bits.substr(i, 8), 2));
}
return new Uint8Array(data);
}
function generateEBML(json) {
var ebml = [];
for (var i = 0; i < json.length; i++) {
var data = json[i].data;
if (typeof data == 'object') data = generateEBML(data);
if (typeof data == 'number') data = bitsToBuffer(data.toString(2));
if (typeof data == 'string') data = strToBuffer(data);
var len = data.size || data.byteLength || data.length;
var zeroes = Math.ceil(Math.ceil(Math.log(len) / Math.log(2)) / 8);
var size_str = len.toString(2);
var padded = (new Array((zeroes * 7 + 7 + 1) - size_str.length)).join('0') + size_str;
var size = (new Array(zeroes)).join('0') + '1' + padded;
ebml.push(numToBuffer(json[i].id));
ebml.push(bitsToBuffer(size));
ebml.push(data);
}
return new Blob(ebml, {
type: "video/webm"
});
}
function toBinStr_old(bits) {
var data = '';
var pad = (bits.length % 8) ? (new Array(1 + 8 - (bits.length % 8))).join('0') : '';
bits = pad + bits;
for (var i = 0; i < bits.length; i += 8) {
data += String.fromCharCode(parseInt(bits.substr(i, 8), 2));
}
return data;
}
function generateEBML_old(json) {
var ebml = '';
for (var i = 0; i < json.length; i++) {
var data = json[i].data;
if (typeof data == 'object') data = generateEBML_old(data);
if (typeof data == 'number') data = toBinStr_old(data.toString(2));
var len = data.length;
var zeroes = Math.ceil(Math.ceil(Math.log(len) / Math.log(2)) / 8);
var size_str = len.toString(2);
var padded = (new Array((zeroes * 7 + 7 + 1) - size_str.length)).join('0') + size_str;
var size = (new Array(zeroes)).join('0') + '1' + padded;
ebml += toBinStr_old(json[i].id.toString(2)) + toBinStr_old(size) + data;
}
return ebml;
}
function makeSimpleBlock(data) {
var flags = 0;
if (data.keyframe) flags |= 128;
if (data.invisible) flags |= 8;
if (data.lacing) flags |= (data.lacing << 1);
if (data.discardable) flags |= 1;
if (data.trackNum > 127) {
throw "TrackNumber > 127 not supported";
}
var out = [data.trackNum | 0x80, data.timecode >> 8, data.timecode & 0xff, flags].map(function(e) {
return String.fromCharCode(e);
}).join('') + data.frame;
return out;
}
function parseWebP(riff) {
var VP8 = riff.RIFF[0].WEBP[0];
var frame_start = VP8.indexOf('\x9d\x01\x2a'); // A VP8 keyframe starts with the 0x9d012a header
for (var i = 0, c = []; i < 4; i++) c[i] = VP8.charCodeAt(frame_start + 3 + i);
var width, height, tmp;
//the code below is literally copied verbatim from the bitstream spec
tmp = (c[1] << 8) | c[0];
width = tmp & 0x3FFF;
tmp = (c[3] << 8) | c[2];
height = tmp & 0x3FFF;
return {
width: width,
height: height,
data: VP8,
riff: riff
};
}
function parseRIFF(string) {
var offset = 0;
var chunks = {};
while (offset < string.length) {
var id = string.substr(offset, 4);
var len = parseInt(string.substr(offset + 4, 4).split('').map(function(i) {
var unpadded = i.charCodeAt(0).toString(2);
return (new Array(8 - unpadded.length + 1)).join('0') + unpadded;
}).join(''), 2);
var data = string.substr(offset + 4 + 4, len);
offset += 4 + 4 + len;
chunks[id] = chunks[id] || [];
if (id == 'RIFF' || id == 'LIST') {
chunks[id].push(parseRIFF(data));
} else {
chunks[id].push(data);
}
}
return chunks;
}
function doubleToString(num) {
return [].slice.call(
new Uint8Array((new Float64Array([num])).buffer), 0).map(function(e) {
return String.fromCharCode(e);
}).reverse().join('');
}
// a more abstract-ish API
function WhammyVideo(duration) {
this.frames = [];
this.duration = duration || 1;
this.quality = 100;
}
WhammyVideo.prototype.add = function(frame, duration) {
if ('canvas' in frame) { //CanvasRenderingContext2D
frame = frame.canvas;
}
if ('toDataURL' in frame) {
frame = frame.toDataURL('image/webp', this.quality);
}
if (!(/^data:image\/webp;base64,/ig).test(frame)) {
throw "Input must be formatted properly as a base64 encoded DataURI of type image/webp";
}
this.frames.push({
image: frame,
duration: duration || this.duration
});
};
WhammyVideo.prototype.compile = function() {
return new toWebM(this.frames.map(function(frame) {
var webp = parseWebP(parseRIFF(atob(frame.image.slice(23))));
webp.duration = frame.duration;
return webp;
}));
};
return {
Video: WhammyVideo,
toWebM: toWebM
};
})();
// Muaz Khan - https://github.com/muaz-khan
// neizerth - https://github.com/neizerth
// MIT License - https://www.webrtc-experiment.com/licence/
// Documentation - https://github.com/streamproc/MediaStreamRecorder
// ==========================================================
// GifRecorder.js
function GifRecorder(mediaStream) {
if (!window.GIFEncoder) {
throw 'Please link: https://cdn.webrtc-experiment.com/gif-recorder.js';
}
// void start(optional long timeSlice)
// timestamp to fire "ondataavailable"
this.start = function(timeSlice) {
timeSlice = timeSlice || 1000;
var imageWidth = this.videoWidth || 320;
var imageHeight = this.videoHeight || 240;
canvas.width = video.width = imageWidth;
canvas.height = video.height = imageHeight;
// external library to record as GIF images
gifEncoder = new GIFEncoder();
// void setRepeat(int iter)
// Sets the number of times the set of GIF frames should be played.
// Default is 1; 0 means play indefinitely.
gifEncoder.setRepeat(0);
// void setFrameRate(Number fps)
// Sets frame rate in frames per second.
// Equivalent to setDelay(1000/fps).
// Using "setDelay" instead of "setFrameRate"
gifEncoder.setDelay(this.frameRate || 200);
// void setQuality(int quality)
// Sets quality of color quantization (conversion of images to the
// maximum 256 colors allowed by the GIF specification).
// Lower values (minimum = 1) produce better colors,
// but slow processing significantly. 10 is the default,
// and produces good color mapping at reasonable speeds.
// Values greater than 20 do not yield significant improvements in speed.
gifEncoder.setQuality(this.quality || 1);
// Boolean start()
// This writes the GIF Header and returns false if it fails.
gifEncoder.start();
startTime = Date.now();
function drawVideoFrame(time) {
lastAnimationFrame = requestAnimationFrame(drawVideoFrame);
if (typeof lastFrameTime === undefined) {
lastFrameTime = time;
}
// ~10 fps
if (time - lastFrameTime < 90) return;
context.drawImage(video, 0, 0, imageWidth, imageHeight);
gifEncoder.addFrame(context);
// console.log('Recording...' + Math.round((Date.now() - startTime) / 1000) + 's');
// console.log("fps: ", 1000 / (time - lastFrameTime));
lastFrameTime = time;
}
lastAnimationFrame = requestAnimationFrame(drawVideoFrame);
timeout = setTimeout(doneRecording, timeSlice);
};
function doneRecording() {
endTime = Date.now();
var gifBlob = new Blob([new Uint8Array(gifEncoder.stream().bin)], {
type: 'image/gif'
});
self.ondataavailable(gifBlob);
// todo: find a way to clear old recorded blobs
gifEncoder.stream().bin = [];
};
this.stop = function() {
if (lastAnimationFrame) {
cancelAnimationFrame(lastAnimationFrame);
clearTimeout(timeout);
doneRecording();
}
};
this.ondataavailable = function() {};
this.onstop = function() {};
// Reference to itself
var self = this;
var canvas = document.createElement('canvas');
var context = canvas.getContext('2d');
var video = document.createElement('video');
video.muted = true;
video.autoplay = true;
video.src = URL.createObjectURL(mediaStream);
video.play();
var lastAnimationFrame = null;
var startTime, endTime, lastFrameTime;
var gifEncoder;
var timeout;
}
// ______________________
// MultiStreamRecorder.js
function MultiStreamRecorder(mediaStream) {
if (!mediaStream) throw 'MediaStream is mandatory.';
var self = this;
var isFirefox = !!navigator.mozGetUserMedia;
this.stream = mediaStream;
// void start(optional long timeSlice)
// timestamp to fire "ondataavailable"
this.start = function(timeSlice) {
audioRecorder = new MediaStreamRecorder(mediaStream);
videoRecorder = new MediaStreamRecorder(mediaStream);
audioRecorder.mimeType = 'audio/ogg';
videoRecorder.mimeType = 'video/webm';
for (var prop in this) {
if (typeof this[prop] !== 'function') {
audioRecorder[prop] = videoRecorder[prop] = this[prop];
}
}
audioRecorder.ondataavailable = function(blob) {
if (!audioVideoBlobs[recordingInterval]) {
audioVideoBlobs[recordingInterval] = {};
}
audioVideoBlobs[recordingInterval].audio = blob;
if (audioVideoBlobs[recordingInterval].video && !audioVideoBlobs[recordingInterval].onDataAvailableEventFired) {
audioVideoBlobs[recordingInterval].onDataAvailableEventFired = true;
fireOnDataAvailableEvent(audioVideoBlobs[recordingInterval]);
}
};
videoRecorder.ondataavailable = function(blob) {
if (isFirefox) {
return self.ondataavailable({
video: blob,
audio: blob
});
}
if (!audioVideoBlobs[recordingInterval]) {
audioVideoBlobs[recordingInterval] = {};
}
audioVideoBlobs[recordingInterval].video = blob;
if (audioVideoBlobs[recordingInterval].audio && !audioVideoBlobs[recordingInterval].onDataAvailableEventFired) {
audioVideoBlobs[recordingInterval].onDataAvailableEventFired = true;
fireOnDataAvailableEvent(audioVideoBlobs[recordingInterval]);
}
};
function fireOnDataAvailableEvent(blobs) {
recordingInterval++;
self.ondataavailable(blobs);
}
videoRecorder.onstop = audioRecorder.onstop = function(error) {
self.onstop(error);
};
if (!isFirefox) {
// to make sure both audio/video are synced.
videoRecorder.onStartedDrawingNonBlankFrames = function() {
videoRecorder.clearOldRecordedFrames();
audioRecorder.start(timeSlice);
};
videoRecorder.start(timeSlice);
} else {
videoRecorder.start(timeSlice);
}
};
this.stop = function() {
if (audioRecorder) audioRecorder.stop();
if (videoRecorder) videoRecorder.stop();
};
this.ondataavailable = function(blob) {
console.log('ondataavailable..', blob);
};
this.onstop = function(error) {
console.warn('stopped..', error);
};
var audioRecorder;
var videoRecorder;
var audioVideoBlobs = {};
var recordingInterval = 0;
}
function bytesToSize(bytes) {
var k = 1000;
var sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB'];
if (bytes === 0) {
return '0 Bytes';
}
var i = parseInt(Math.floor(Math.log(bytes) / Math.log(k)), 10);
return (bytes / Math.pow(k, i)).toPrecision(3) + ' ' + sizes[i];
}
|