browser(ff): migrate screencast to client interfaces

This commit is contained in:
Pavel Feldman 2021-05-07 18:05:26 -07:00
parent 918ae429e4
commit edd2cc807c
7 changed files with 94 additions and 78 deletions

View File

@ -1,2 +1,2 @@
1255 1256
Changed: pavel.feldman@gmail.com Thu 06 May 2021 06:33:59 PM PDT Changed: pavel.feldman@gmail.com Fri 07 May 2021 09:00:05 PM PDT

View File

@ -88,6 +88,8 @@ class DownloadInterceptor {
} }
} }
const screencastService = Cc['@mozilla.org/juggler/screencast;1'].getService(Ci.nsIScreencastService);
class TargetRegistry { class TargetRegistry {
constructor() { constructor() {
EventEmitter.decorate(this); EventEmitter.decorate(this);
@ -353,7 +355,6 @@ class PageTarget {
this._registry._browserBrowsingContextToTarget.set(this._linkedBrowser.browsingContext, this); this._registry._browserBrowsingContextToTarget.set(this._linkedBrowser.browsingContext, this);
this._registry.emit(TargetRegistry.Events.TargetCreated, this); this._registry.emit(TargetRegistry.Events.TargetCreated, this);
this._screencast = Cc['@mozilla.org/juggler/screencast;1'].getService(Ci.nsIScreencastService);
} }
dialog(dialogId) { dialog(dialogId) {
@ -503,7 +504,17 @@ class PageTarget {
// Exclude address bar and navigation control from the video. // Exclude address bar and navigation control from the video.
const rect = this.linkedBrowser().getBoundingClientRect(); const rect = this.linkedBrowser().getBoundingClientRect();
const devicePixelRatio = this._window.devicePixelRatio; const devicePixelRatio = this._window.devicePixelRatio;
const sessionId = this._screencast.startVideoRecording(docShell, true, file, width, height, 0, devicePixelRatio * rect.top); let sessionId;
const registry = this._registry;
const screencastClient = {
QueryInterface: ChromeUtils.generateQI([Ci.nsIScreencastServiceClient]),
screencastFrame(data, deviceWidth, deviceHeight) {
},
screencastStopped() {
registry.emit(TargetRegistry.Events.ScreencastStopped, sessionId);
},
};
sessionId = screencastService.startVideoRecording(screencastClient, docShell, true, file, width, height, 0, devicePixelRatio * rect.top);
this._videoRecordingInfo = { sessionId, file }; this._videoRecordingInfo = { sessionId, file };
this.emit(PageTarget.Events.ScreencastStarted); this.emit(PageTarget.Events.ScreencastStarted);
} }
@ -513,7 +524,7 @@ class PageTarget {
throw new Error('No video recording in progress'); throw new Error('No video recording in progress');
const videoRecordingInfo = this._videoRecordingInfo; const videoRecordingInfo = this._videoRecordingInfo;
this._videoRecordingInfo = undefined; this._videoRecordingInfo = undefined;
this._screencast.stopVideoRecording(videoRecordingInfo.sessionId); screencastService.stopVideoRecording(videoRecordingInfo.sessionId);
} }
videoRecordingInfo() { videoRecordingInfo() {
@ -532,28 +543,34 @@ class PageTarget {
// Exclude address bar and navigation control from the video. // Exclude address bar and navigation control from the video.
const rect = this.linkedBrowser().getBoundingClientRect(); const rect = this.linkedBrowser().getBoundingClientRect();
const devicePixelRatio = this._window.devicePixelRatio; const devicePixelRatio = this._window.devicePixelRatio;
const screencastId = this._screencast.startVideoRecording(docShell, false, '', width, height, quality || 90, devicePixelRatio * rect.top);
const onFrame = (subject, topic, data) => { const self = this;
this.emit(PageTarget.Events.ScreencastFrame, data); const screencastClient = {
QueryInterface: ChromeUtils.generateQI([Ci.nsIScreencastServiceClient]),
screencastFrame(data, deviceWidth, deviceHeight) {
if (self._screencastRecordingInfo)
self.emit(PageTarget.Events.ScreencastFrame, { data, deviceWidth, deviceHeight });
},
screencastStopped() {
},
}; };
Services.obs.addObserver(onFrame, 'juggler-screencast-frame'); const screencastId = screencastService.startVideoRecording(screencastClient, docShell, false, '', width, height, quality || 90, devicePixelRatio * rect.top);
this._screencastRecordingInfo = { screencastId, onFrame }; this._screencastRecordingInfo = { screencastId };
return { screencastId }; return { screencastId };
} }
screencastFrameAck({ screencastId }) { screencastFrameAck({ screencastId }) {
if (!this._screencastRecordingInfo || this._screencastRecordingInfo.screencastId !== screencastId) if (!this._screencastRecordingInfo || this._screencastRecordingInfo.screencastId !== screencastId)
return; return;
this._screencast.screencastFrameAck(screencastId); screencastService.screencastFrameAck(screencastId);
} }
stopScreencast() { stopScreencast() {
if (!this._screencastRecordingInfo) if (!this._screencastRecordingInfo)
throw new Error('No screencast in progress'); throw new Error('No screencast in progress');
const screencastInfo = this._screencastRecordingInfo; const { screencastId } = this._screencastRecordingInfo;
Services.obs.removeObserver(screencastInfo.onFrame, 'juggler-screencast-frame');
this._screencastRecordingInfo = undefined; this._screencastRecordingInfo = undefined;
this._screencast.stopVideoRecording(screencastInfo.screencastId); screencastService.stopVideoRecording(screencastId);
} }
dispose() { dispose() {
@ -934,6 +951,7 @@ TargetRegistry.Events = {
TargetDestroyed: Symbol('TargetRegistry.Events.TargetDestroyed'), TargetDestroyed: Symbol('TargetRegistry.Events.TargetDestroyed'),
DownloadCreated: Symbol('TargetRegistry.Events.DownloadCreated'), DownloadCreated: Symbol('TargetRegistry.Events.DownloadCreated'),
DownloadFinished: Symbol('TargetRegistry.Events.DownloadFinished'), DownloadFinished: Symbol('TargetRegistry.Events.DownloadFinished'),
ScreencastStopped: Symbol('TargetRegistry.ScreencastStopped'),
}; };
var EXPORTED_SYMBOLS = ['TargetRegistry', 'PageTarget']; var EXPORTED_SYMBOLS = ['TargetRegistry', 'PageTarget'];

View File

@ -37,14 +37,11 @@ class BrowserHandler {
helper.on(this._targetRegistry, TargetRegistry.Events.TargetDestroyed, this._onTargetDestroyed.bind(this)), helper.on(this._targetRegistry, TargetRegistry.Events.TargetDestroyed, this._onTargetDestroyed.bind(this)),
helper.on(this._targetRegistry, TargetRegistry.Events.DownloadCreated, this._onDownloadCreated.bind(this)), helper.on(this._targetRegistry, TargetRegistry.Events.DownloadCreated, this._onDownloadCreated.bind(this)),
helper.on(this._targetRegistry, TargetRegistry.Events.DownloadFinished, this._onDownloadFinished.bind(this)), helper.on(this._targetRegistry, TargetRegistry.Events.DownloadFinished, this._onDownloadFinished.bind(this)),
helper.on(this._targetRegistry, TargetRegistry.Events.ScreencastStopped, sessionId => {
this._session.emitEvent('Browser.videoRecordingFinished', {screencastId: '' + sessionId});
})
]; ];
const onScreencastStopped = (subject, topic, data) => {
this._session.emitEvent('Browser.videoRecordingFinished', {screencastId: '' + data});
};
Services.obs.addObserver(onScreencastStopped, 'juggler-screencast-stopped');
this._eventListeners.push(() => Services.obs.removeObserver(onScreencastStopped, 'juggler-screencast-stopped'));
for (const target of this._targetRegistry.targets()) for (const target of this._targetRegistry.targets())
this._onTargetCreated(target); this._onTargetCreated(target);

View File

@ -152,8 +152,8 @@ class PageHandler {
this._session.emitEvent('Page.videoRecordingStarted', { screencastId: info.sessionId, file: info.file }); this._session.emitEvent('Page.videoRecordingStarted', { screencastId: info.sessionId, file: info.file });
} }
_onScreencastFrame(data) { _onScreencastFrame(params) {
this._session.emitEvent('Page.screencastFrame', { data, deviceWidth: 0, deviceHeight: 0 }); this._session.emitEvent('Page.screencastFrame', params);
} }
_onPageReady(event) { _onPageReady(event) {

View File

@ -89,8 +89,13 @@ int32_t HeadlessWindowCapturer::StartCapture(const VideoCaptureCapability& capab
frameInfo.videoType = VideoType::kBGRA; frameInfo.videoType = VideoType::kBGRA;
#endif #endif
for (auto rawFrameCallback : _rawFrameCallbacks) { {
rawFrameCallback->OnRawFrame(dataSurface->GetData(), dataSurface->Stride(), frameInfo); rtc::CritScope lock2(&_callBackCs);
for (auto rawFrameCallback : _rawFrameCallbacks) {
rawFrameCallback->OnRawFrame(dataSurface->GetData(), dataSurface->Stride(), frameInfo);
}
if (!_dataCallBacks.size())
return;
} }
int width = dataSurface->GetSize().width; int width = dataSurface->GetSize().width;

View File

@ -6,13 +6,21 @@
interface nsIDocShell; interface nsIDocShell;
[scriptable, uuid(0b5d32c4-aeeb-11eb-8529-0242ac130003)]
interface nsIScreencastServiceClient : nsISupports
{
void screencastFrame(in AString frame, in uint32_t deviceWidth, in uint32_t deviceHeight);
void screencastStopped();
};
/** /**
* Service for recording window video. * Service for recording window video.
*/ */
[scriptable, uuid(d8c4d9e0-9462-445e-9e43-68d3872ad1de)] [scriptable, uuid(d8c4d9e0-9462-445e-9e43-68d3872ad1de)]
interface nsIScreencastService : nsISupports interface nsIScreencastService : nsISupports
{ {
AString startVideoRecording(in nsIDocShell docShell, in boolean isVideo, in ACString fileName, in uint32_t width, in uint32_t height, in uint32_t quality, in int32_t offset_top); AString startVideoRecording(in nsIScreencastServiceClient client, in nsIDocShell docShell, in boolean isVideo, in ACString fileName, in uint32_t width, in uint32_t height, in uint32_t quality, in int32_t offset_top);
/** /**
* Will emit 'juggler-screencast-stopped' when the video file is saved. * Will emit 'juggler-screencast-stopped' when the video file is saved.

View File

@ -16,7 +16,6 @@
#include "nsIRandomGenerator.h" #include "nsIRandomGenerator.h"
#include "nsISupportsPrimitives.h" #include "nsISupportsPrimitives.h"
#include "nsThreadManager.h" #include "nsThreadManager.h"
#include "nsReadableUtils.h"
#include "nsView.h" #include "nsView.h"
#include "nsViewManager.h" #include "nsViewManager.h"
#include "webrtc/modules/desktop_capture/desktop_capturer.h" #include "webrtc/modules/desktop_capture/desktop_capturer.h"
@ -59,28 +58,6 @@ rtc::scoped_refptr<webrtc::VideoCaptureModuleEx> CreateWindowCapturer(nsIWidget*
return webrtc::DesktopCaptureImpl::Create(++moduleId, windowId.get(), webrtc::CaptureDeviceType::Window, captureCursor); return webrtc::DesktopCaptureImpl::Create(++moduleId, windowId.get(), webrtc::CaptureDeviceType::Window, captureCursor);
} }
void NotifyScreencastStopped(const nsString& sessionId) {
nsCOMPtr<nsIObserverService> observerService = mozilla::services::GetObserverService();
if (!observerService) {
fprintf(stderr, "NotifyScreencastStopped error: no observer service\n");
return;
}
observerService->NotifyObservers(nullptr, "juggler-screencast-stopped", sessionId.get());
}
void NotifyScreencastFrame(const nsCString& frameData) {
nsString wideString;
CopyASCIItoUTF16(frameData, wideString);
nsCOMPtr<nsIObserverService> observerService = mozilla::services::GetObserverService();
if (!observerService) {
fprintf(stderr, "NotifyScreencastFrame error: no observer service\n");
return;
}
observerService->NotifyObservers(nullptr, "juggler-screencast-frame", wideString.get());
}
nsresult generateUid(nsString& uid) { nsresult generateUid(nsString& uid) {
nsresult rv = NS_OK; nsresult rv = NS_OK;
nsCOMPtr<nsIRandomGenerator> rg = do_GetService("@mozilla.org/security/random-generator;1", &rv); nsCOMPtr<nsIRandomGenerator> rg = do_GetService("@mozilla.org/security/random-generator;1", &rv);
@ -102,8 +79,9 @@ nsresult generateUid(nsString& uid) {
class nsScreencastService::Session : public rtc::VideoSinkInterface<webrtc::VideoFrame>, class nsScreencastService::Session : public rtc::VideoSinkInterface<webrtc::VideoFrame>,
public webrtc::RawFrameCallback { public webrtc::RawFrameCallback {
public: public:
Session(rtc::scoped_refptr<webrtc::VideoCaptureModuleEx>&& capturer, RefPtr<ScreencastEncoder>&& encoder, gfx::IntMargin margin, uint32_t jpegQuality) Session(nsIScreencastServiceClient* client, rtc::scoped_refptr<webrtc::VideoCaptureModuleEx>&& capturer, RefPtr<ScreencastEncoder>&& encoder, gfx::IntMargin margin, uint32_t jpegQuality)
: mCaptureModule(std::move(capturer)) : mClient(client)
, mCaptureModule(std::move(capturer))
, mEncoder(std::move(encoder)) , mEncoder(std::move(encoder))
, mJpegQuality(jpegQuality) , mJpegQuality(jpegQuality)
, mMargin(margin) { , mMargin(margin) {
@ -129,23 +107,32 @@ class nsScreencastService::Session : public rtc::VideoSinkInterface<webrtc::Vide
return true; return true;
} }
void Stop(std::function<void()>&& callback) { void Stop() {
if (mEncoder) if (mEncoder)
mCaptureModule->DeRegisterCaptureDataCallback(this); mCaptureModule->DeRegisterCaptureDataCallback(this);
else else
mCaptureModule->RegisterRawFrameCallback(this); mCaptureModule->DeRegisterRawFrameCallback(this);
int error = mCaptureModule->StopCapture(); int error = mCaptureModule->StopCapture();
if (error) { if (error) {
fprintf(stderr, "StopCapture error %d\n", error); fprintf(stderr, "StopCapture error %d\n", error);
} }
if (mEncoder) if (mEncoder) {
mEncoder->finish(std::move(callback)); rtc::CritScope lock(&mCaptureCallbackCs);
else mEncoder->finish([client = std::move(mClient)] {
callback(); NS_DispatchToMainThread(NS_NewRunnableFunction(
"NotifyScreencastStopped", [client = std::move(client)]() -> void {
client->ScreencastStopped();
}));
});
} else {
rtc::CritScope lock(&mCaptureCallbackCs);
mClient->ScreencastStopped();
mClient = nullptr;
}
} }
void ScreencastFrameAck() { void ScreencastFrameAck() {
rtc::CritScope lock(&mFramesInFlightCs); rtc::CritScope lock(&mCaptureCallbackCs);
--mFramesInFlight; --mFramesInFlight;
} }
@ -158,13 +145,14 @@ class nsScreencastService::Session : public rtc::VideoSinkInterface<webrtc::Vide
// These callbacks end up running on the VideoCapture thread. // These callbacks end up running on the VideoCapture thread.
void OnRawFrame(uint8_t* videoFrame, size_t videoFrameStride, const webrtc::VideoCaptureCapability& frameInfo) override { void OnRawFrame(uint8_t* videoFrame, size_t videoFrameStride, const webrtc::VideoCaptureCapability& frameInfo) override {
if (!mJpegQuality)
return;
{ {
rtc::CritScope lock(&mFramesInFlightCs); rtc::CritScope lock(&mCaptureCallbackCs);
if (mFramesInFlight >= kMaxFramesInFlight) if (mFramesInFlight >= kMaxFramesInFlight) {
return; return;
}
++mFramesInFlight; ++mFramesInFlight;
if (!mClient)
return;
} }
jpeg_compress_struct info; jpeg_compress_struct info;
@ -211,22 +199,27 @@ class nsScreencastService::Session : public rtc::VideoSinkInterface<webrtc::Vide
nsCString base64; nsCString base64;
nsresult rv = mozilla::Base64Encode(reinterpret_cast<char *>(bufferPtr), bufferSize, base64); nsresult rv = mozilla::Base64Encode(reinterpret_cast<char *>(bufferPtr), bufferSize, base64);
if (NS_WARN_IF(NS_FAILED(rv)))
return;
NS_DispatchToMainThread(NS_NewRunnableFunction(
"NotifyScreencastFrame", [base64]() -> void {
NotifyScreencastFrame(base64);
}));
free(bufferPtr); free(bufferPtr);
if (NS_WARN_IF(NS_FAILED(rv))) {
return;
}
uint32_t deviceWidth = info.image_width;
uint32_t deviceHeight = info.image_height;
nsIScreencastServiceClient* client = mClient.get();
NS_DispatchToMainThread(NS_NewRunnableFunction(
"NotifyScreencastFrame", [client, base64, deviceWidth, deviceHeight]() -> void {
NS_ConvertUTF8toUTF16 utf16(base64);
client->ScreencastFrame(utf16, deviceWidth, deviceHeight);
}));
} }
private: private:
RefPtr<nsIScreencastServiceClient> mClient;
rtc::scoped_refptr<webrtc::VideoCaptureModuleEx> mCaptureModule; rtc::scoped_refptr<webrtc::VideoCaptureModuleEx> mCaptureModule;
RefPtr<ScreencastEncoder> mEncoder; RefPtr<ScreencastEncoder> mEncoder;
uint32_t mJpegQuality; uint32_t mJpegQuality;
rtc::CriticalSection mFramesInFlightCs; rtc::CriticalSection mCaptureCallbackCs;
uint32_t mFramesInFlight = 0; uint32_t mFramesInFlight = 0;
gfx::IntMargin mMargin; gfx::IntMargin mMargin;
}; };
@ -248,7 +241,7 @@ nsScreencastService::nsScreencastService() = default;
nsScreencastService::~nsScreencastService() { nsScreencastService::~nsScreencastService() {
} }
nsresult nsScreencastService::StartVideoRecording(nsIDocShell* aDocShell, bool isVideo, const nsACString& aVideoFileName, uint32_t width, uint32_t height, uint32_t quality, int32_t offsetTop, nsAString& sessionId) { nsresult nsScreencastService::StartVideoRecording(nsIScreencastServiceClient* aClient, nsIDocShell* aDocShell, bool isVideo, const nsACString& aVideoFileName, uint32_t width, uint32_t height, uint32_t quality, int32_t offsetTop, nsAString& sessionId) {
MOZ_RELEASE_ASSERT(NS_IsMainThread(), "Screencast service must be started on the Main thread."); MOZ_RELEASE_ASSERT(NS_IsMainThread(), "Screencast service must be started on the Main thread.");
PresShell* presShell = aDocShell->GetPresShell(); PresShell* presShell = aDocShell->GetPresShell();
@ -289,7 +282,7 @@ nsresult nsScreencastService::StartVideoRecording(nsIDocShell* aDocShell, bool i
NS_ENSURE_SUCCESS(rv, rv); NS_ENSURE_SUCCESS(rv, rv);
sessionId = uid; sessionId = uid;
auto session = std::make_unique<Session>(std::move(capturer), std::move(encoder), margin, isVideo ? 0 : quality); auto session = std::make_unique<Session>(aClient, std::move(capturer), std::move(encoder), margin, isVideo ? 0 : quality);
if (!session->Start()) if (!session->Start())
return NS_ERROR_FAILURE; return NS_ERROR_FAILURE;
mIdToSession.emplace(sessionId, std::move(session)); mIdToSession.emplace(sessionId, std::move(session));
@ -301,12 +294,7 @@ nsresult nsScreencastService::StopVideoRecording(const nsAString& aSessionId) {
auto it = mIdToSession.find(sessionId); auto it = mIdToSession.find(sessionId);
if (it == mIdToSession.end()) if (it == mIdToSession.end())
return NS_ERROR_INVALID_ARG; return NS_ERROR_INVALID_ARG;
it->second->Stop([sessionId] { it->second->Stop();
NS_DispatchToMainThread(NS_NewRunnableFunction(
"NotifyScreencastStopped", [sessionId]() -> void {
NotifyScreencastStopped(sessionId);
}));
});
mIdToSession.erase(it); mIdToSession.erase(it);
return NS_OK; return NS_OK;
} }