browser(firefox): implement screencast (#6452)
This commit is contained in:
parent
262824deb7
commit
78ec057117
|
|
@ -1,2 +1,2 @@
|
|||
1254
|
||||
Changed: lushnikov@chromium.org Thu 06 May 2021 11:02:00 AM PDT
|
||||
1255
|
||||
Changed: pavel.feldman@gmail.com Thu 06 May 2021 06:33:59 PM PDT
|
||||
|
|
|
|||
|
|
@ -159,8 +159,8 @@ class TargetRegistry {
|
|||
target.updateColorSchemeOverride();
|
||||
if (!hasExplicitSize)
|
||||
target.updateViewportSize();
|
||||
if (browserContext.screencastOptions)
|
||||
target._startVideoRecording(browserContext.screencastOptions);
|
||||
if (browserContext.videoRecordingOptions)
|
||||
target._startVideoRecording(browserContext.videoRecordingOptions);
|
||||
};
|
||||
|
||||
const onTabCloseListener = event => {
|
||||
|
|
@ -333,7 +333,8 @@ class PageTarget {
|
|||
this._url = 'about:blank';
|
||||
this._openerId = opener ? opener.id() : undefined;
|
||||
this._channel = SimpleChannel.createForMessageManager(`browser::page[${this._targetId}]`, this._linkedBrowser.messageManager);
|
||||
this._screencastInfo = undefined;
|
||||
this._videoRecordingInfo = undefined;
|
||||
this._screencastRecordingInfo = undefined;
|
||||
this._dialogs = new Map();
|
||||
|
||||
const navigationListener = {
|
||||
|
|
@ -352,6 +353,7 @@ class PageTarget {
|
|||
this._registry._browserBrowsingContextToTarget.set(this._linkedBrowser.browsingContext, this);
|
||||
|
||||
this._registry.emit(TargetRegistry.Events.TargetCreated, this);
|
||||
this._screencast = Cc['@mozilla.org/juggler/screencast;1'].getService(Ci.nsIScreencastService);
|
||||
}
|
||||
|
||||
dialog(dialogId) {
|
||||
|
|
@ -497,43 +499,69 @@ class PageTarget {
|
|||
if (width < 10 || width > 10000 || height < 10 || height > 10000)
|
||||
throw new Error("Invalid size");
|
||||
|
||||
const screencast = Cc['@mozilla.org/juggler/screencast;1'].getService(Ci.nsIScreencastService);
|
||||
const docShell = this._gBrowser.ownerGlobal.docShell;
|
||||
// Exclude address bar and navigation control from the video.
|
||||
const rect = this.linkedBrowser().getBoundingClientRect();
|
||||
const devicePixelRatio = this._window.devicePixelRatio;
|
||||
const videoSessionId = screencast.startVideoRecording(docShell, file, width, height, devicePixelRatio * rect.top);
|
||||
this._screencastInfo = { videoSessionId, file };
|
||||
const sessionId = this._screencast.startVideoRecording(docShell, true, file, width, height, 0, devicePixelRatio * rect.top);
|
||||
this._videoRecordingInfo = { sessionId, file };
|
||||
this.emit(PageTarget.Events.ScreencastStarted);
|
||||
}
|
||||
|
||||
async _stopVideoRecording() {
|
||||
if (!this._screencastInfo)
|
||||
_stopVideoRecording() {
|
||||
if (!this._videoRecordingInfo)
|
||||
throw new Error('No video recording in progress');
|
||||
const screencastInfo = this._screencastInfo;
|
||||
this._screencastInfo = undefined;
|
||||
const screencast = Cc['@mozilla.org/juggler/screencast;1'].getService(Ci.nsIScreencastService);
|
||||
const result = new Promise(resolve =>
|
||||
Services.obs.addObserver(function onStopped(subject, topic, data) {
|
||||
if (screencastInfo.videoSessionId != data)
|
||||
return;
|
||||
|
||||
Services.obs.removeObserver(onStopped, 'juggler-screencast-stopped');
|
||||
resolve();
|
||||
}, 'juggler-screencast-stopped')
|
||||
);
|
||||
screencast.stopVideoRecording(screencastInfo.videoSessionId);
|
||||
return result;
|
||||
const videoRecordingInfo = this._videoRecordingInfo;
|
||||
this._videoRecordingInfo = undefined;
|
||||
this._screencast.stopVideoRecording(videoRecordingInfo.sessionId);
|
||||
}
|
||||
|
||||
screencastInfo() {
|
||||
return this._screencastInfo;
|
||||
videoRecordingInfo() {
|
||||
return this._videoRecordingInfo;
|
||||
}
|
||||
|
||||
async startScreencast({ width, height, quality }) {
|
||||
// On Mac the window may not yet be visible when TargetCreated and its
|
||||
// NSWindow.windowNumber may be -1, so we wait until the window is known
|
||||
// to be initialized and visible.
|
||||
await this.windowReady();
|
||||
if (width < 10 || width > 10000 || height < 10 || height > 10000)
|
||||
throw new Error("Invalid size");
|
||||
|
||||
const docShell = this._gBrowser.ownerGlobal.docShell;
|
||||
// Exclude address bar and navigation control from the video.
|
||||
const rect = this.linkedBrowser().getBoundingClientRect();
|
||||
const devicePixelRatio = this._window.devicePixelRatio;
|
||||
const screencastId = this._screencast.startVideoRecording(docShell, false, '', width, height, quality || 90, devicePixelRatio * rect.top);
|
||||
const onFrame = (subject, topic, data) => {
|
||||
this.emit(PageTarget.Events.ScreencastFrame, data);
|
||||
};
|
||||
Services.obs.addObserver(onFrame, 'juggler-screencast-frame');
|
||||
this._screencastRecordingInfo = { screencastId, onFrame };
|
||||
return { screencastId };
|
||||
}
|
||||
|
||||
screencastFrameAck({ screencastId }) {
|
||||
if (!this._screencastRecordingInfo || this._screencastRecordingInfo.screencastId !== screencastId)
|
||||
return;
|
||||
this._screencast.screencastFrameAck(screencastId);
|
||||
}
|
||||
|
||||
stopScreencast() {
|
||||
if (!this._screencastRecordingInfo)
|
||||
throw new Error('No screencast in progress');
|
||||
const screencastInfo = this._screencastRecordingInfo;
|
||||
Services.obs.removeObserver(screencastInfo.onFrame, 'juggler-screencast-frame');
|
||||
this._screencastRecordingInfo = undefined;
|
||||
this._screencast.stopVideoRecording(screencastInfo.screencastId);
|
||||
}
|
||||
|
||||
dispose() {
|
||||
this._disposed = true;
|
||||
if (this._screencastInfo)
|
||||
this._stopVideoRecording().catch(e => dump(`stopVideoRecording failed:\n${e}\n`));
|
||||
if (this._videoRecordingInfo)
|
||||
this._stopVideoRecording();
|
||||
if (this._screencastRecordingInfo)
|
||||
this.stopScreencast();
|
||||
this._browserContext.pages.delete(this);
|
||||
this._registry._browserToTarget.delete(this._linkedBrowser);
|
||||
this._registry._browserBrowsingContextToTarget.delete(this._linkedBrowser.browsingContext);
|
||||
|
|
@ -551,6 +579,7 @@ class PageTarget {
|
|||
|
||||
PageTarget.Events = {
|
||||
ScreencastStarted: Symbol('PageTarget.ScreencastStarted'),
|
||||
ScreencastFrame: Symbol('PageTarget.ScreencastFrame'),
|
||||
Crashed: Symbol('PageTarget.Crashed'),
|
||||
DialogOpened: Symbol('PageTarget.DialogOpened'),
|
||||
DialogClosed: Symbol('PageTarget.DialogClosed'),
|
||||
|
|
@ -591,7 +620,7 @@ class BrowserContext {
|
|||
this.defaultUserAgent = null;
|
||||
this.touchOverride = false;
|
||||
this.colorScheme = 'none';
|
||||
this.screencastOptions = undefined;
|
||||
this.videoRecordingOptions = undefined;
|
||||
this.scriptsToEvaluateOnNewDocument = [];
|
||||
this.bindings = [];
|
||||
this.settings = {};
|
||||
|
|
@ -788,8 +817,8 @@ class BrowserContext {
|
|||
return result;
|
||||
}
|
||||
|
||||
async setScreencastOptions(options) {
|
||||
this.screencastOptions = options;
|
||||
async setVideoRecordingOptions(options) {
|
||||
this.videoRecordingOptions = options;
|
||||
if (!options)
|
||||
return;
|
||||
const promises = [];
|
||||
|
|
|
|||
|
|
@ -40,7 +40,7 @@ class BrowserHandler {
|
|||
];
|
||||
|
||||
const onScreencastStopped = (subject, topic, data) => {
|
||||
this._session.emitEvent('Browser.screencastFinished', {screencastId: '' + data});
|
||||
this._session.emitEvent('Browser.videoRecordingFinished', {screencastId: '' + data});
|
||||
};
|
||||
Services.obs.addObserver(onScreencastStopped, 'juggler-screencast-stopped');
|
||||
this._eventListeners.push(() => Services.obs.removeObserver(onScreencastStopped, 'juggler-screencast-stopped'));
|
||||
|
|
@ -204,8 +204,8 @@ class BrowserHandler {
|
|||
await this._targetRegistry.browserContextForId(browserContextId).setColorScheme(nullToUndefined(colorScheme));
|
||||
}
|
||||
|
||||
async ['Browser.setScreencastOptions']({browserContextId, dir, width, height, scale}) {
|
||||
await this._targetRegistry.browserContextForId(browserContextId).setScreencastOptions({dir, width, height, scale});
|
||||
async ['Browser.setVideoRecordingOptions']({browserContextId, dir, width, height, scale}) {
|
||||
await this._targetRegistry.browserContextForId(browserContextId).setVideoRecordingOptions({dir, width, height, scale});
|
||||
}
|
||||
|
||||
async ['Browser.setUserAgentOverride']({browserContextId, userAgent}) {
|
||||
|
|
|
|||
|
|
@ -88,8 +88,8 @@ class PageHandler {
|
|||
// to be ignored by the protocol clients.
|
||||
this._isPageReady = false;
|
||||
|
||||
if (this._pageTarget.screencastInfo())
|
||||
this._onScreencastStarted();
|
||||
if (this._pageTarget.videoRecordingInfo())
|
||||
this._onVideoRecordingStarted();
|
||||
|
||||
this._eventListeners = [
|
||||
helper.on(this._pageTarget, PageTarget.Events.DialogOpened, this._onDialogOpened.bind(this)),
|
||||
|
|
@ -97,7 +97,8 @@ class PageHandler {
|
|||
helper.on(this._pageTarget, PageTarget.Events.Crashed, () => {
|
||||
this._session.emitEvent('Page.crashed', {});
|
||||
}),
|
||||
helper.on(this._pageTarget, PageTarget.Events.ScreencastStarted, this._onScreencastStarted.bind(this)),
|
||||
helper.on(this._pageTarget, PageTarget.Events.ScreencastStarted, this._onVideoRecordingStarted.bind(this)),
|
||||
helper.on(this._pageTarget, PageTarget.Events.ScreencastFrame, this._onScreencastFrame.bind(this)),
|
||||
helper.on(this._pageNetwork, PageNetwork.Events.Request, this._handleNetworkEvent.bind(this, 'Network.requestWillBeSent')),
|
||||
helper.on(this._pageNetwork, PageNetwork.Events.Response, this._handleNetworkEvent.bind(this, 'Network.responseReceived')),
|
||||
helper.on(this._pageNetwork, PageNetwork.Events.RequestFinished, this._handleNetworkEvent.bind(this, 'Network.requestFinished')),
|
||||
|
|
@ -146,9 +147,13 @@ class PageHandler {
|
|||
helper.removeListeners(this._eventListeners);
|
||||
}
|
||||
|
||||
_onScreencastStarted() {
|
||||
const info = this._pageTarget.screencastInfo();
|
||||
this._session.emitEvent('Page.screencastStarted', { screencastId: info.videoSessionId, file: info.file });
|
||||
_onVideoRecordingStarted() {
|
||||
const info = this._pageTarget.videoRecordingInfo();
|
||||
this._session.emitEvent('Page.videoRecordingStarted', { screencastId: info.sessionId, file: info.file });
|
||||
}
|
||||
|
||||
_onScreencastFrame(data) {
|
||||
this._session.emitEvent('Page.screencastFrame', { data, deviceWidth: 0, deviceHeight: 0 });
|
||||
}
|
||||
|
||||
_onPageReady(event) {
|
||||
|
|
@ -373,6 +378,18 @@ class PageHandler {
|
|||
return await this._contentPage.send('setInterceptFileChooserDialog', options);
|
||||
}
|
||||
|
||||
async ['Page.startScreencast'](options) {
|
||||
return await this._pageTarget.startScreencast(options);
|
||||
}
|
||||
|
||||
async ['Page.screencastFrameAck'](options) {
|
||||
await this._pageTarget.screencastFrameAck(options);
|
||||
}
|
||||
|
||||
async ['Page.stopScreencast'](options) {
|
||||
await this._pageTarget.stopScreencast(options);
|
||||
}
|
||||
|
||||
async ['Page.sendMessageToWorker']({workerId, message}) {
|
||||
const worker = this._workers.get(workerId);
|
||||
if (!worker)
|
||||
|
|
|
|||
|
|
@ -226,7 +226,7 @@ const Browser = {
|
|||
canceled: t.Optional(t.Boolean),
|
||||
error: t.Optional(t.String),
|
||||
},
|
||||
'screencastFinished': {
|
||||
'videoRecordingFinished': {
|
||||
screencastId: t.String,
|
||||
},
|
||||
},
|
||||
|
|
@ -420,7 +420,7 @@ const Browser = {
|
|||
colorScheme: t.Nullable(t.Enum(['dark', 'light', 'no-preference'])),
|
||||
},
|
||||
},
|
||||
'setScreencastOptions': {
|
||||
'setVideoRecordingOptions': {
|
||||
params: {
|
||||
browserContextId: t.Optional(t.String),
|
||||
dir: t.String,
|
||||
|
|
@ -664,7 +664,7 @@ const Page = {
|
|||
workerId: t.String,
|
||||
message: t.String,
|
||||
},
|
||||
'screencastStarted': {
|
||||
'videoRecordingStarted': {
|
||||
screencastId: t.String,
|
||||
file: t.String,
|
||||
},
|
||||
|
|
@ -696,6 +696,11 @@ const Page = {
|
|||
opcode: t.Number,
|
||||
data: t.String,
|
||||
},
|
||||
'screencastFrame': {
|
||||
data: t.String,
|
||||
deviceWidth: t.Number,
|
||||
deviceHeight: t.Number,
|
||||
},
|
||||
},
|
||||
|
||||
methods: {
|
||||
|
|
@ -895,6 +900,23 @@ const Page = {
|
|||
message: t.String,
|
||||
},
|
||||
},
|
||||
'startScreencast': {
|
||||
params: {
|
||||
width: t.Number,
|
||||
height: t.Number,
|
||||
quality: t.Number,
|
||||
},
|
||||
returns: {
|
||||
screencastId: t.String,
|
||||
},
|
||||
},
|
||||
'screencastFrameAck': {
|
||||
params: {
|
||||
screencastId: t.String,
|
||||
},
|
||||
},
|
||||
'stopScreencast': {
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ using namespace webrtc;
|
|||
|
||||
namespace mozilla {
|
||||
|
||||
rtc::scoped_refptr<webrtc::VideoCaptureModule> HeadlessWindowCapturer::Create(HeadlessWidget* headlessWindow) {
|
||||
rtc::scoped_refptr<webrtc::VideoCaptureModuleEx> HeadlessWindowCapturer::Create(HeadlessWidget* headlessWindow) {
|
||||
return new rtc::RefCountedObject<HeadlessWindowCapturer>(headlessWindow);
|
||||
}
|
||||
|
||||
|
|
@ -41,6 +41,19 @@ void HeadlessWindowCapturer::DeRegisterCaptureDataCallback(rtc::VideoSinkInterfa
|
|||
}
|
||||
}
|
||||
|
||||
void HeadlessWindowCapturer::RegisterRawFrameCallback(webrtc::RawFrameCallback* rawFrameCallback) {
|
||||
rtc::CritScope lock2(&_callBackCs);
|
||||
_rawFrameCallbacks.insert(rawFrameCallback);
|
||||
}
|
||||
|
||||
void HeadlessWindowCapturer::DeRegisterRawFrameCallback(webrtc::RawFrameCallback* rawFrameCallback) {
|
||||
rtc::CritScope lock2(&_callBackCs);
|
||||
auto it = _rawFrameCallbacks.find(rawFrameCallback);
|
||||
if (it != _rawFrameCallbacks.end()) {
|
||||
_rawFrameCallbacks.erase(it);
|
||||
}
|
||||
}
|
||||
|
||||
void HeadlessWindowCapturer::NotifyFrameCaptured(const webrtc::VideoFrame& frame) {
|
||||
rtc::CritScope lock2(&_callBackCs);
|
||||
for (auto dataCallBack : _dataCallBacks)
|
||||
|
|
@ -63,10 +76,23 @@ int32_t HeadlessWindowCapturer::StartCapture(const VideoCaptureCapability& capab
|
|||
}
|
||||
|
||||
if (dataSurface->GetFormat() != gfx::SurfaceFormat::B8G8R8A8) {
|
||||
fprintf(stderr, "Uexpected snapshot surface format: %hhd\n", dataSurface->GetFormat());
|
||||
fprintf(stderr, "Unexpected snapshot surface format: %hhd\n", dataSurface->GetFormat());
|
||||
return;
|
||||
}
|
||||
|
||||
webrtc::VideoCaptureCapability frameInfo;
|
||||
frameInfo.width = dataSurface->GetSize().width;
|
||||
frameInfo.height = dataSurface->GetSize().height;
|
||||
#if MOZ_LITTLE_ENDIAN()
|
||||
frameInfo.videoType = VideoType::kARGB;
|
||||
#else
|
||||
frameInfo.videoType = VideoType::kBGRA;
|
||||
#endif
|
||||
|
||||
for (auto rawFrameCallback : _rawFrameCallbacks) {
|
||||
rawFrameCallback->OnRawFrame(dataSurface->GetData(), dataSurface->Stride(), frameInfo);
|
||||
}
|
||||
|
||||
int width = dataSurface->GetSize().width;
|
||||
int height = dataSurface->GetSize().height;
|
||||
rtc::scoped_refptr<I420Buffer> buffer = I420Buffer::Create(width, height);
|
||||
|
|
@ -87,7 +113,6 @@ int32_t HeadlessWindowCapturer::StartCapture(const VideoCaptureCapability& capab
|
|||
buffer->MutableDataU(), buffer->StrideU(),
|
||||
buffer->MutableDataV(), buffer->StrideV(),
|
||||
width, height);
|
||||
|
||||
if (conversionResult != 0) {
|
||||
fprintf(stderr, "Failed to convert capture frame to I420: %d\n", conversionResult);
|
||||
return;
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@
|
|||
#include "media/base/videosinkinterface.h"
|
||||
#include "modules/video_capture/video_capture.h"
|
||||
#include "rtc_base/criticalsection.h"
|
||||
#include "video_engine/desktop_capture_impl.h"
|
||||
|
||||
class nsIWidget;
|
||||
|
||||
|
|
@ -19,9 +20,9 @@ namespace widget {
|
|||
class HeadlessWidget;
|
||||
}
|
||||
|
||||
class HeadlessWindowCapturer : public webrtc::VideoCaptureModule {
|
||||
class HeadlessWindowCapturer : public webrtc::VideoCaptureModuleEx {
|
||||
public:
|
||||
static rtc::scoped_refptr<webrtc::VideoCaptureModule> Create(mozilla::widget::HeadlessWidget*);
|
||||
static rtc::scoped_refptr<webrtc::VideoCaptureModuleEx> Create(mozilla::widget::HeadlessWidget*);
|
||||
|
||||
void RegisterCaptureDataCallback(
|
||||
rtc::VideoSinkInterface<webrtc::VideoFrame>* dataCallback) override;
|
||||
|
|
@ -29,6 +30,9 @@ class HeadlessWindowCapturer : public webrtc::VideoCaptureModule {
|
|||
rtc::VideoSinkInterface<webrtc::VideoFrame>* dataCallback) override;
|
||||
int32_t StopCaptureIfAllClientsClose() override;
|
||||
|
||||
void RegisterRawFrameCallback(webrtc::RawFrameCallback* rawFrameCallback) override;
|
||||
void DeRegisterRawFrameCallback(webrtc::RawFrameCallback* rawFrameCallback) override;
|
||||
|
||||
int32_t SetCaptureRotation(webrtc::VideoRotation) override { return -1; }
|
||||
bool SetApplyRotation(bool) override { return false; }
|
||||
bool GetApplyRotation() override { return true; }
|
||||
|
|
@ -54,6 +58,7 @@ class HeadlessWindowCapturer : public webrtc::VideoCaptureModule {
|
|||
mozilla::widget::HeadlessWidget* mWindow = nullptr;
|
||||
rtc::CriticalSection _callBackCs;
|
||||
std::set<rtc::VideoSinkInterface<webrtc::VideoFrame>*> _dataCallBacks;
|
||||
std::set<webrtc::RawFrameCallback*> _rawFrameCallbacks;
|
||||
};
|
||||
|
||||
} // namespace mozilla
|
||||
|
|
|
|||
|
|
@ -12,10 +12,12 @@ interface nsIDocShell;
|
|||
[scriptable, uuid(d8c4d9e0-9462-445e-9e43-68d3872ad1de)]
|
||||
interface nsIScreencastService : nsISupports
|
||||
{
|
||||
AString startVideoRecording(in nsIDocShell docShell, in ACString fileName, in uint32_t width, in uint32_t height, in int32_t offset_top);
|
||||
AString startVideoRecording(in nsIDocShell docShell, in boolean isVideo, in ACString fileName, in uint32_t width, in uint32_t height, in uint32_t quality, in int32_t offset_top);
|
||||
|
||||
/**
|
||||
* Will emit 'juggler-screencast-stopped' when the video file is saved.
|
||||
*/
|
||||
void stopVideoRecording(in AString sessionId);
|
||||
|
||||
void screencastFrameAck(in AString sessionId);
|
||||
};
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@
|
|||
#include "ScreencastEncoder.h"
|
||||
#include "HeadlessWidget.h"
|
||||
#include "HeadlessWindowCapturer.h"
|
||||
#include "mozilla/Base64.h"
|
||||
#include "mozilla/ClearOnShutdown.h"
|
||||
#include "mozilla/PresShell.h"
|
||||
#include "mozilla/StaticPtr.h"
|
||||
|
|
@ -15,6 +16,7 @@
|
|||
#include "nsIRandomGenerator.h"
|
||||
#include "nsISupportsPrimitives.h"
|
||||
#include "nsThreadManager.h"
|
||||
#include "nsReadableUtils.h"
|
||||
#include "nsView.h"
|
||||
#include "nsViewManager.h"
|
||||
#include "webrtc/modules/desktop_capture/desktop_capturer.h"
|
||||
|
|
@ -24,6 +26,9 @@
|
|||
#include "webrtc/modules/video_capture/video_capture.h"
|
||||
#include "mozilla/widget/PlatformWidgetTypes.h"
|
||||
#include "video_engine/desktop_capture_impl.h"
|
||||
extern "C" {
|
||||
#include "jpeglib.h"
|
||||
}
|
||||
|
||||
using namespace mozilla::widget;
|
||||
|
||||
|
|
@ -33,9 +38,11 @@ NS_IMPL_ISUPPORTS(nsScreencastService, nsIScreencastService)
|
|||
|
||||
namespace {
|
||||
|
||||
const int kMaxFramesInFlight = 1;
|
||||
|
||||
StaticRefPtr<nsScreencastService> gScreencastService;
|
||||
|
||||
rtc::scoped_refptr<webrtc::VideoCaptureModule> CreateWindowCapturer(nsIWidget* widget) {
|
||||
rtc::scoped_refptr<webrtc::VideoCaptureModuleEx> CreateWindowCapturer(nsIWidget* widget) {
|
||||
if (gfxPlatform::IsHeadless()) {
|
||||
HeadlessWidget* headlessWidget = static_cast<HeadlessWidget*>(widget);
|
||||
return HeadlessWindowCapturer::Create(headlessWidget);
|
||||
|
|
@ -62,6 +69,18 @@ void NotifyScreencastStopped(const nsString& sessionId) {
|
|||
observerService->NotifyObservers(nullptr, "juggler-screencast-stopped", sessionId.get());
|
||||
}
|
||||
|
||||
void NotifyScreencastFrame(const nsCString& frameData) {
|
||||
nsString wideString;
|
||||
CopyASCIItoUTF16(frameData, wideString);
|
||||
nsCOMPtr<nsIObserverService> observerService = mozilla::services::GetObserverService();
|
||||
if (!observerService) {
|
||||
fprintf(stderr, "NotifyScreencastFrame error: no observer service\n");
|
||||
return;
|
||||
}
|
||||
|
||||
observerService->NotifyObservers(nullptr, "juggler-screencast-frame", wideString.get());
|
||||
}
|
||||
|
||||
nsresult generateUid(nsString& uid) {
|
||||
nsresult rv = NS_OK;
|
||||
nsCOMPtr<nsIRandomGenerator> rg = do_GetService("@mozilla.org/security/random-generator;1", &rv);
|
||||
|
|
@ -80,11 +99,14 @@ nsresult generateUid(nsString& uid) {
|
|||
}
|
||||
}
|
||||
|
||||
class nsScreencastService::Session : public rtc::VideoSinkInterface<webrtc::VideoFrame> {
|
||||
class nsScreencastService::Session : public rtc::VideoSinkInterface<webrtc::VideoFrame>,
|
||||
public webrtc::RawFrameCallback {
|
||||
public:
|
||||
Session(rtc::scoped_refptr<webrtc::VideoCaptureModule>&& capturer, RefPtr<ScreencastEncoder>&& encoder)
|
||||
Session(rtc::scoped_refptr<webrtc::VideoCaptureModuleEx>&& capturer, RefPtr<ScreencastEncoder>&& encoder, gfx::IntMargin margin, uint32_t jpegQuality)
|
||||
: mCaptureModule(std::move(capturer))
|
||||
, mEncoder(std::move(encoder)) {
|
||||
, mEncoder(std::move(encoder))
|
||||
, mJpegQuality(jpegQuality)
|
||||
, mMargin(margin) {
|
||||
}
|
||||
|
||||
bool Start() {
|
||||
|
|
@ -100,27 +122,113 @@ class nsScreencastService::Session : public rtc::VideoSinkInterface<webrtc::Vide
|
|||
return false;
|
||||
}
|
||||
|
||||
mCaptureModule->RegisterCaptureDataCallback(this);
|
||||
if (mEncoder)
|
||||
mCaptureModule->RegisterCaptureDataCallback(this);
|
||||
else
|
||||
mCaptureModule->RegisterRawFrameCallback(this);
|
||||
return true;
|
||||
}
|
||||
|
||||
void Stop(std::function<void()>&& callback) {
|
||||
mCaptureModule->DeRegisterCaptureDataCallback(this);
|
||||
if (mEncoder)
|
||||
mCaptureModule->DeRegisterCaptureDataCallback(this);
|
||||
else
|
||||
mCaptureModule->RegisterRawFrameCallback(this);
|
||||
int error = mCaptureModule->StopCapture();
|
||||
if (error) {
|
||||
fprintf(stderr, "StopCapture error %d\n", error);
|
||||
}
|
||||
mEncoder->finish(std::move(callback));
|
||||
if (mEncoder)
|
||||
mEncoder->finish(std::move(callback));
|
||||
else
|
||||
callback();
|
||||
}
|
||||
|
||||
void ScreencastFrameAck() {
|
||||
rtc::CritScope lock(&mFramesInFlightCs);
|
||||
--mFramesInFlight;
|
||||
}
|
||||
|
||||
// These callbacks end up running on the VideoCapture thread.
|
||||
void OnFrame(const webrtc::VideoFrame& videoFrame) override {
|
||||
if (!mEncoder)
|
||||
return;
|
||||
mEncoder->encodeFrame(videoFrame);
|
||||
}
|
||||
|
||||
// These callbacks end up running on the VideoCapture thread.
|
||||
void OnRawFrame(uint8_t* videoFrame, size_t videoFrameStride, const webrtc::VideoCaptureCapability& frameInfo) override {
|
||||
if (!mJpegQuality)
|
||||
return;
|
||||
{
|
||||
rtc::CritScope lock(&mFramesInFlightCs);
|
||||
if (mFramesInFlight >= kMaxFramesInFlight)
|
||||
return;
|
||||
++mFramesInFlight;
|
||||
}
|
||||
|
||||
jpeg_compress_struct info;
|
||||
jpeg_error_mgr error;
|
||||
info.err = jpeg_std_error(&error);
|
||||
jpeg_create_compress(&info);
|
||||
|
||||
unsigned char* bufferPtr = nullptr;
|
||||
unsigned long bufferSize;
|
||||
jpeg_mem_dest(&info, &bufferPtr, &bufferSize);
|
||||
|
||||
info.image_width = frameInfo.width - mMargin.LeftRight();
|
||||
info.image_height = frameInfo.height - mMargin.TopBottom();
|
||||
|
||||
#if MOZ_LITTLE_ENDIAN()
|
||||
if (frameInfo.videoType == webrtc::VideoType::kARGB)
|
||||
info.in_color_space = JCS_EXT_BGRA;
|
||||
if (frameInfo.videoType == webrtc::VideoType::kBGRA)
|
||||
info.in_color_space = JCS_EXT_ARGB;
|
||||
#else
|
||||
if (frameInfo.videoType == webrtc::VideoType::kARGB)
|
||||
info.in_color_space = JCS_EXT_ARGB;
|
||||
if (frameInfo.videoType == webrtc::VideoType::kBGRA)
|
||||
info.in_color_space = JCS_EXT_BGRA;
|
||||
#endif
|
||||
|
||||
// # of color components in input image
|
||||
info.input_components = 4;
|
||||
|
||||
jpeg_set_defaults(&info);
|
||||
jpeg_set_quality(&info, mJpegQuality, true);
|
||||
|
||||
jpeg_start_compress(&info, true);
|
||||
while (info.next_scanline < info.image_height) {
|
||||
JSAMPROW row = videoFrame + (mMargin.top + info.next_scanline) * videoFrameStride + 4 * mMargin.left;
|
||||
if (jpeg_write_scanlines(&info, &row, 1) != 1) {
|
||||
fprintf(stderr, "JPEG library failed to encode line\n");
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
jpeg_finish_compress(&info);
|
||||
jpeg_destroy_compress(&info);
|
||||
|
||||
nsCString base64;
|
||||
nsresult rv = mozilla::Base64Encode(reinterpret_cast<char *>(bufferPtr), bufferSize, base64);
|
||||
if (NS_WARN_IF(NS_FAILED(rv)))
|
||||
return;
|
||||
|
||||
NS_DispatchToMainThread(NS_NewRunnableFunction(
|
||||
"NotifyScreencastFrame", [base64]() -> void {
|
||||
NotifyScreencastFrame(base64);
|
||||
}));
|
||||
|
||||
free(bufferPtr);
|
||||
}
|
||||
|
||||
private:
|
||||
rtc::scoped_refptr<webrtc::VideoCaptureModule> mCaptureModule;
|
||||
rtc::scoped_refptr<webrtc::VideoCaptureModuleEx> mCaptureModule;
|
||||
RefPtr<ScreencastEncoder> mEncoder;
|
||||
uint32_t mJpegQuality;
|
||||
rtc::CriticalSection mFramesInFlightCs;
|
||||
uint32_t mFramesInFlight = 0;
|
||||
gfx::IntMargin mMargin;
|
||||
};
|
||||
|
||||
|
||||
|
|
@ -140,7 +248,7 @@ nsScreencastService::nsScreencastService() = default;
|
|||
nsScreencastService::~nsScreencastService() {
|
||||
}
|
||||
|
||||
nsresult nsScreencastService::StartVideoRecording(nsIDocShell* aDocShell, const nsACString& aFileName, uint32_t width, uint32_t height, int32_t offsetTop, nsAString& sessionId) {
|
||||
nsresult nsScreencastService::StartVideoRecording(nsIDocShell* aDocShell, bool isVideo, const nsACString& aVideoFileName, uint32_t width, uint32_t height, uint32_t quality, int32_t offsetTop, nsAString& sessionId) {
|
||||
MOZ_RELEASE_ASSERT(NS_IsMainThread(), "Screencast service must be started on the Main thread.");
|
||||
|
||||
PresShell* presShell = aDocShell->GetPresShell();
|
||||
|
|
@ -154,7 +262,7 @@ nsresult nsScreencastService::StartVideoRecording(nsIDocShell* aDocShell, const
|
|||
return NS_ERROR_UNEXPECTED;
|
||||
nsIWidget* widget = view->GetWidget();
|
||||
|
||||
rtc::scoped_refptr<webrtc::VideoCaptureModule> capturer = CreateWindowCapturer(widget);
|
||||
rtc::scoped_refptr<webrtc::VideoCaptureModuleEx> capturer = CreateWindowCapturer(widget);
|
||||
if (!capturer)
|
||||
return NS_ERROR_FAILURE;
|
||||
|
||||
|
|
@ -167,22 +275,24 @@ nsresult nsScreencastService::StartVideoRecording(nsIDocShell* aDocShell, const
|
|||
margin.top += offsetTop;
|
||||
|
||||
nsCString error;
|
||||
RefPtr<ScreencastEncoder> encoder = ScreencastEncoder::create(error, PromiseFlatCString(aFileName), width, height, margin);
|
||||
if (!encoder) {
|
||||
fprintf(stderr, "Failed to create ScreencastEncoder: %s\n", error.get());
|
||||
return NS_ERROR_FAILURE;
|
||||
RefPtr<ScreencastEncoder> encoder;
|
||||
if (isVideo) {
|
||||
encoder = ScreencastEncoder::create(error, PromiseFlatCString(aVideoFileName), width, height, margin);
|
||||
if (!encoder) {
|
||||
fprintf(stderr, "Failed to create ScreencastEncoder: %s\n", error.get());
|
||||
return NS_ERROR_FAILURE;
|
||||
}
|
||||
}
|
||||
|
||||
auto session = std::make_unique<Session>(std::move(capturer), std::move(encoder));
|
||||
if (!session->Start())
|
||||
return NS_ERROR_FAILURE;
|
||||
|
||||
nsString uid;
|
||||
nsresult rv = generateUid(uid);
|
||||
NS_ENSURE_SUCCESS(rv, rv);
|
||||
|
||||
sessionId = uid;
|
||||
mIdToSession.emplace(uid, std::move(session));
|
||||
|
||||
auto session = std::make_unique<Session>(std::move(capturer), std::move(encoder), margin, isVideo ? 0 : quality);
|
||||
if (!session->Start())
|
||||
return NS_ERROR_FAILURE;
|
||||
mIdToSession.emplace(sessionId, std::move(session));
|
||||
return NS_OK;
|
||||
}
|
||||
|
||||
|
|
@ -201,4 +311,13 @@ nsresult nsScreencastService::StopVideoRecording(const nsAString& aSessionId) {
|
|||
return NS_OK;
|
||||
}
|
||||
|
||||
nsresult nsScreencastService::ScreencastFrameAck(const nsAString& aSessionId) {
|
||||
nsString sessionId(aSessionId);
|
||||
auto it = mIdToSession.find(sessionId);
|
||||
if (it == mIdToSession.end())
|
||||
return NS_ERROR_INVALID_ARG;
|
||||
it->second->ScreencastFrameAck();
|
||||
return NS_OK;
|
||||
}
|
||||
|
||||
} // namespace mozilla
|
||||
|
|
|
|||
|
|
@ -1143,12 +1143,15 @@ index 2459a4c3a464becb5b668af6571940a275239848..f7c1f7c0f6da0749a502cdc40d2b6eda
|
|||
/** Synthesize a touch event. The event types supported are:
|
||||
* touchstart, touchend, touchmove, and touchcancel
|
||||
diff --git a/dom/media/systemservices/video_engine/desktop_capture_impl.cc b/dom/media/systemservices/video_engine/desktop_capture_impl.cc
|
||||
index 7bc92fe4408c2878c9d7c8bdb97a7c257258ee31..8885feebedf53c0748cef19d80ce5aa23adc900b 100644
|
||||
index 7bc92fe4408c2878c9d7c8bdb97a7c257258ee31..b20480c3c0ca96097e61d37f44e127d45ab4648a 100644
|
||||
--- a/dom/media/systemservices/video_engine/desktop_capture_impl.cc
|
||||
+++ b/dom/media/systemservices/video_engine/desktop_capture_impl.cc
|
||||
@@ -125,8 +125,9 @@ int32_t ScreenDeviceInfoImpl::GetOrientation(const char* deviceUniqueIdUTF8,
|
||||
@@ -123,10 +123,11 @@ int32_t ScreenDeviceInfoImpl::GetOrientation(const char* deviceUniqueIdUTF8,
|
||||
return 0;
|
||||
}
|
||||
|
||||
VideoCaptureModule* DesktopCaptureImpl::Create(const int32_t id,
|
||||
-VideoCaptureModule* DesktopCaptureImpl::Create(const int32_t id,
|
||||
+VideoCaptureModuleEx* DesktopCaptureImpl::Create(const int32_t id,
|
||||
const char* uniqueId,
|
||||
- const CaptureDeviceType type) {
|
||||
- return new rtc::RefCountedObject<DesktopCaptureImpl>(id, uniqueId, type);
|
||||
|
|
@ -1199,21 +1202,93 @@ index 7bc92fe4408c2878c9d7c8bdb97a7c257258ee31..8885feebedf53c0748cef19d80ce5aa2
|
|||
time_event_(EventWrapper::Create()),
|
||||
#if defined(_WIN32)
|
||||
capturer_thread_(
|
||||
@@ -437,6 +444,19 @@ void DesktopCaptureImpl::DeRegisterCaptureDataCallback(
|
||||
}
|
||||
}
|
||||
|
||||
+void DesktopCaptureImpl::RegisterRawFrameCallback(RawFrameCallback* rawFrameCallback) {
|
||||
+ rtc::CritScope lock(&_apiCs);
|
||||
+ _rawFrameCallbacks.insert(rawFrameCallback);
|
||||
+}
|
||||
+
|
||||
+void DesktopCaptureImpl::DeRegisterRawFrameCallback(RawFrameCallback* rawFrameCallback) {
|
||||
+ rtc::CritScope lock(&_apiCs);
|
||||
+ auto it = _rawFrameCallbacks.find(rawFrameCallback);
|
||||
+ if (it != _rawFrameCallbacks.end()) {
|
||||
+ _rawFrameCallbacks.erase(it);
|
||||
+ }
|
||||
+}
|
||||
+
|
||||
int32_t DesktopCaptureImpl::StopCaptureIfAllClientsClose() {
|
||||
if (_dataCallBacks.empty()) {
|
||||
return StopCapture();
|
||||
@@ -644,6 +664,12 @@ void DesktopCaptureImpl::OnCaptureResult(DesktopCapturer::Result result,
|
||||
frameInfo.height = frame->size().height();
|
||||
frameInfo.videoType = VideoType::kARGB;
|
||||
|
||||
+ size_t videoFrameStride =
|
||||
+ frameInfo.width * DesktopFrame::kBytesPerPixel;
|
||||
+ for (auto rawFrameCallback : _rawFrameCallbacks) {
|
||||
+ rawFrameCallback->OnRawFrame(videoFrame, videoFrameStride, frameInfo);
|
||||
+ }
|
||||
+
|
||||
size_t videoFrameLength =
|
||||
frameInfo.width * frameInfo.height * DesktopFrame::kBytesPerPixel;
|
||||
IncomingFrame(videoFrame, videoFrameLength, frameInfo);
|
||||
diff --git a/dom/media/systemservices/video_engine/desktop_capture_impl.h b/dom/media/systemservices/video_engine/desktop_capture_impl.h
|
||||
index 137281596e66793b8525ca2a5d156b4bd171da65..28978483e54e6424b6144d3a5509583df5885fdb 100644
|
||||
index 137281596e66793b8525ca2a5d156b4bd171da65..0e64c0b810b306e4a2509e4ad0d9596034d338fa 100644
|
||||
--- a/dom/media/systemservices/video_engine/desktop_capture_impl.h
|
||||
+++ b/dom/media/systemservices/video_engine/desktop_capture_impl.h
|
||||
@@ -160,7 +160,8 @@ class DesktopCaptureImpl : public DesktopCapturer::Callback,
|
||||
@@ -42,6 +42,21 @@ namespace webrtc {
|
||||
|
||||
class VideoCaptureEncodeInterface;
|
||||
|
||||
+class RawFrameCallback {
|
||||
+ public:
|
||||
+ virtual ~RawFrameCallback() {}
|
||||
+
|
||||
+ virtual void OnRawFrame(uint8_t* videoFrame, size_t videoFrameLength, const VideoCaptureCapability& frameInfo) = 0;
|
||||
+};
|
||||
+
|
||||
+class VideoCaptureModuleEx : public VideoCaptureModule {
|
||||
+ public:
|
||||
+ virtual ~VideoCaptureModuleEx() {}
|
||||
+
|
||||
+ virtual void RegisterRawFrameCallback(RawFrameCallback* rawFrameCallback) = 0;
|
||||
+ virtual void DeRegisterRawFrameCallback(RawFrameCallback* rawFrameCallback) = 0;
|
||||
+};
|
||||
+
|
||||
// simulate deviceInfo interface for video engine, bridge screen/application and
|
||||
// real screen/application device info
|
||||
|
||||
@@ -154,13 +169,14 @@ class BrowserDeviceInfoImpl : public VideoCaptureModule::DeviceInfo {
|
||||
// As with video, DesktopCaptureImpl is a proxy for screen sharing
|
||||
// and follows the video pipeline design
|
||||
class DesktopCaptureImpl : public DesktopCapturer::Callback,
|
||||
- public VideoCaptureModule,
|
||||
+ public VideoCaptureModuleEx,
|
||||
public VideoCaptureExternal {
|
||||
public:
|
||||
/* Create a screen capture modules object
|
||||
*/
|
||||
static VideoCaptureModule* Create(const int32_t id, const char* uniqueId,
|
||||
- static VideoCaptureModule* Create(const int32_t id, const char* uniqueId,
|
||||
- const CaptureDeviceType type);
|
||||
+ static VideoCaptureModuleEx* Create(const int32_t id, const char* uniqueId,
|
||||
+ const CaptureDeviceType type,
|
||||
+ bool captureCursor = true);
|
||||
static VideoCaptureModule::DeviceInfo* CreateDeviceInfo(
|
||||
const int32_t id, const CaptureDeviceType type);
|
||||
|
||||
@@ -192,7 +193,7 @@ class DesktopCaptureImpl : public DesktopCapturer::Callback,
|
||||
@@ -170,6 +186,8 @@ class DesktopCaptureImpl : public DesktopCapturer::Callback,
|
||||
void DeRegisterCaptureDataCallback(
|
||||
rtc::VideoSinkInterface<VideoFrame>* dataCallback) override;
|
||||
int32_t StopCaptureIfAllClientsClose() override;
|
||||
+ void RegisterRawFrameCallback(RawFrameCallback* rawFrameCallback) override;
|
||||
+ void DeRegisterRawFrameCallback(RawFrameCallback* rawFrameCallback) override;
|
||||
|
||||
int32_t SetCaptureRotation(VideoRotation rotation) override;
|
||||
bool SetApplyRotation(bool enable) override;
|
||||
@@ -192,7 +210,7 @@ class DesktopCaptureImpl : public DesktopCapturer::Callback,
|
||||
|
||||
protected:
|
||||
DesktopCaptureImpl(const int32_t id, const char* uniqueId,
|
||||
|
|
@ -1222,7 +1297,15 @@ index 137281596e66793b8525ca2a5d156b4bd171da65..28978483e54e6424b6144d3a5509583d
|
|||
virtual ~DesktopCaptureImpl();
|
||||
int32_t DeliverCapturedFrame(webrtc::VideoFrame& captureFrame,
|
||||
int64_t capture_time);
|
||||
@@ -240,6 +241,7 @@ class DesktopCaptureImpl : public DesktopCapturer::Callback,
|
||||
@@ -215,6 +233,7 @@ class DesktopCaptureImpl : public DesktopCapturer::Callback,
|
||||
rtc::CriticalSection _apiCs;
|
||||
|
||||
std::set<rtc::VideoSinkInterface<VideoFrame>*> _dataCallBacks;
|
||||
+ std::set<RawFrameCallback*> _rawFrameCallbacks;
|
||||
|
||||
int64_t _incomingFrameTimesNanos
|
||||
[kFrameRateCountHistorySize]; // timestamp for local captured frames
|
||||
@@ -240,6 +259,7 @@ class DesktopCaptureImpl : public DesktopCapturer::Callback,
|
||||
void process();
|
||||
|
||||
private:
|
||||
|
|
@ -1584,6 +1667,18 @@ index 77b4c4ea3581e3b66b0b40dae33c807b2d5aefd8..84af4461b9e946122527ac974dc30da5
|
|||
void updateTimeZone();
|
||||
|
||||
void internalResyncICUDefaultTimeZone();
|
||||
diff --git a/media/libjpeg/jconfig.h b/media/libjpeg/jconfig.h
|
||||
index f2723e654098ff27542e1eb16a536c11ad0af617..b0b480551ff7d895dfdeb5a9800874858929c8ba 100644
|
||||
--- a/media/libjpeg/jconfig.h
|
||||
+++ b/media/libjpeg/jconfig.h
|
||||
@@ -17,6 +17,7 @@
|
||||
/* #undef D_ARITH_CODING_SUPPORTED */
|
||||
|
||||
/* Support in-memory source/destination managers */
|
||||
+#define MEM_SRCDST_SUPPORTED 1
|
||||
/* #undef MEM_SRCDST_SUPPORTED */
|
||||
|
||||
/* Use accelerated SIMD routines. */
|
||||
diff --git a/netwerk/base/nsINetworkInterceptController.idl b/netwerk/base/nsINetworkInterceptController.idl
|
||||
index 64a4a71b03b28872f376aac8eee12805bebd1bd8..f6fa7d731f3b0c7c4fcb26babad3fc2cdb29aec1 100644
|
||||
--- a/netwerk/base/nsINetworkInterceptController.idl
|
||||
|
|
|
|||
Loading…
Reference in New Issue