Source code

Revision control

Other Tools

1
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
2
/* vim: set ts=8 sts=2 et sw=2 tw=80: */
3
/* This Source Code Form is subject to the terms of the Mozilla Public
4
* License, v. 2.0. If a copy of the MPL was not distributed with this
5
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6
#include "RemoteVideoDecoder.h"
7
8
#include "mozilla/layers/ImageDataSerializer.h"
9
10
#ifdef MOZ_AV1
11
# include "AOMDecoder.h"
12
# include "DAV1DDecoder.h"
13
#endif
14
#ifdef XP_WIN
15
# include "WMFDecoderModule.h"
16
#endif
17
#include "ImageContainer.h" // for PlanarYCbCrData and BufferRecycleBin
18
#include "mozilla/layers/VideoBridgeChild.h"
19
#include "mozilla/layers/ImageClient.h"
20
#include "PDMFactory.h"
21
#include "RemoteDecoderManagerChild.h"
22
#include "RemoteDecoderManagerParent.h"
23
#include "GPUVideoImage.h"
24
#include "MediaInfo.h"
25
#include "mozilla/Telemetry.h"
26
#include "mozilla/StaticPrefs_media.h"
27
#include "mozilla/layers/TextureClient.h"
28
29
namespace mozilla {
30
31
using namespace layers; // for PlanarYCbCrData and BufferRecycleBin
32
using namespace ipc;
33
using namespace gfx;
34
35
class KnowsCompositorVideo : public layers::KnowsCompositor {
36
public:
37
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(KnowsCompositorVideo, override)
38
39
layers::TextureForwarder* GetTextureForwarder() override {
40
return mTextureFactoryIdentifier.mParentProcessType == GeckoProcessType_GPU
41
? VideoBridgeChild::GetSingletonToGPUProcess()
42
: VideoBridgeChild::GetSingletonToParentProcess();
43
}
44
layers::LayersIPCActor* GetLayersIPCActor() override {
45
return GetTextureForwarder();
46
}
47
48
static already_AddRefed<KnowsCompositorVideo> TryCreateForIdentifier(
49
const layers::TextureFactoryIdentifier& aIdentifier) {
50
VideoBridgeChild* child =
51
(aIdentifier.mParentProcessType == GeckoProcessType_GPU)
52
? VideoBridgeChild::GetSingletonToGPUProcess()
53
: VideoBridgeChild::GetSingletonToParentProcess();
54
if (!child) {
55
return nullptr;
56
}
57
58
// The RDD process will never use hardware decoding since it's
59
// sandboxed, so don't bother trying to create a sync object.
60
TextureFactoryIdentifier ident = aIdentifier;
61
if (XRE_IsRDDProcess()) {
62
ident.mSyncHandle = 0;
63
}
64
65
RefPtr<KnowsCompositorVideo> knowsCompositor = new KnowsCompositorVideo();
66
knowsCompositor->IdentifyTextureHost(ident);
67
return knowsCompositor.forget();
68
}
69
70
private:
71
KnowsCompositorVideo() = default;
72
virtual ~KnowsCompositorVideo() = default;
73
};
74
75
RemoteVideoDecoderChild::RemoteVideoDecoderChild(bool aRecreatedOnCrash)
76
: RemoteDecoderChild(aRecreatedOnCrash),
77
mBufferRecycleBin(new BufferRecycleBin) {}
78
79
RefPtr<mozilla::layers::Image> RemoteVideoDecoderChild::DeserializeImage(
80
const SurfaceDescriptorBuffer& aSdBuffer, const IntSize& aPicSize) {
81
MOZ_ASSERT(aSdBuffer.desc().type() == BufferDescriptor::TYCbCrDescriptor);
82
if (aSdBuffer.desc().type() != BufferDescriptor::TYCbCrDescriptor) {
83
return nullptr;
84
}
85
const YCbCrDescriptor& descriptor = aSdBuffer.desc().get_YCbCrDescriptor();
86
87
uint8_t* buffer = nullptr;
88
const MemoryOrShmem& memOrShmem = aSdBuffer.data();
89
switch (memOrShmem.type()) {
90
case MemoryOrShmem::Tuintptr_t:
91
buffer = reinterpret_cast<uint8_t*>(memOrShmem.get_uintptr_t());
92
break;
93
case MemoryOrShmem::TShmem:
94
buffer = memOrShmem.get_Shmem().get<uint8_t>();
95
break;
96
default:
97
MOZ_ASSERT(false, "Unknown MemoryOrShmem type");
98
}
99
if (!buffer) {
100
return nullptr;
101
}
102
103
PlanarYCbCrData pData;
104
pData.mYSize = descriptor.ySize();
105
pData.mYStride = descriptor.yStride();
106
pData.mCbCrSize = descriptor.cbCrSize();
107
pData.mCbCrStride = descriptor.cbCrStride();
108
// default mYSkip, mCbSkip, mCrSkip because not held in YCbCrDescriptor
109
pData.mYSkip = pData.mCbSkip = pData.mCrSkip = 0;
110
// default mPicX, mPicY because not held in YCbCrDescriptor
111
pData.mPicX = pData.mPicY = 0;
112
pData.mPicSize = aPicSize;
113
pData.mStereoMode = descriptor.stereoMode();
114
pData.mColorDepth = descriptor.colorDepth();
115
pData.mYUVColorSpace = descriptor.yUVColorSpace();
116
pData.mYChannel = ImageDataSerializer::GetYChannel(buffer, descriptor);
117
pData.mCbChannel = ImageDataSerializer::GetCbChannel(buffer, descriptor);
118
pData.mCrChannel = ImageDataSerializer::GetCrChannel(buffer, descriptor);
119
120
// images coming from AOMDecoder are RecyclingPlanarYCbCrImages.
121
RefPtr<RecyclingPlanarYCbCrImage> image =
122
new RecyclingPlanarYCbCrImage(mBufferRecycleBin);
123
bool setData = image->CopyData(pData);
124
MOZ_ASSERT(setData);
125
126
switch (memOrShmem.type()) {
127
case MemoryOrShmem::Tuintptr_t:
128
delete[] reinterpret_cast<uint8_t*>(memOrShmem.get_uintptr_t());
129
break;
130
case MemoryOrShmem::TShmem:
131
DeallocShmem(memOrShmem.get_Shmem());
132
break;
133
default:
134
MOZ_ASSERT(false, "Unknown MemoryOrShmem type");
135
}
136
137
if (!setData) {
138
return nullptr;
139
}
140
141
return image;
142
}
143
144
mozilla::ipc::IPCResult RemoteVideoDecoderChild::RecvOutput(
145
const DecodedOutputIPDL& aDecodedData) {
146
AssertOnManagerThread();
147
MOZ_ASSERT(aDecodedData.type() == DecodedOutputIPDL::TRemoteVideoDataIPDL);
148
149
const RemoteVideoDataIPDL& aData = aDecodedData.get_RemoteVideoDataIPDL();
150
151
if (aData.sd().type() == SurfaceDescriptor::TSurfaceDescriptorBuffer) {
152
RefPtr<Image> image = DeserializeImage(
153
aData.sd().get_SurfaceDescriptorBuffer(), aData.frameSize());
154
155
RefPtr<VideoData> video = VideoData::CreateFromImage(
156
aData.display(), aData.base().offset(), aData.base().time(),
157
aData.base().duration(), image, aData.base().keyframe(),
158
aData.base().timecode());
159
160
mDecodedData.AppendElement(std::move(video));
161
} else {
162
// The Image here creates a TextureData object that takes ownership
163
// of the SurfaceDescriptor, and is responsible for making sure that
164
// it gets deallocated.
165
RefPtr<Image> image =
166
new GPUVideoImage(GetManager(), aData.sd(), aData.frameSize());
167
168
RefPtr<VideoData> video = VideoData::CreateFromImage(
169
aData.display(), aData.base().offset(), aData.base().time(),
170
aData.base().duration(), image, aData.base().keyframe(),
171
aData.base().timecode());
172
173
mDecodedData.AppendElement(std::move(video));
174
}
175
return IPC_OK();
176
}
177
178
MediaResult RemoteVideoDecoderChild::InitIPDL(
179
const VideoInfo& aVideoInfo, float aFramerate,
180
const CreateDecoderParams::OptionSet& aOptions,
181
const layers::TextureFactoryIdentifier* aIdentifier) {
182
RefPtr<RemoteDecoderManagerChild> manager =
183
RemoteDecoderManagerChild::GetRDDProcessSingleton();
184
185
// The manager isn't available because RemoteDecoderManagerChild has been
186
// initialized with null end points and we don't want to decode video on RDD
187
// process anymore. Return false here so that we can fallback to other PDMs.
188
if (!manager) {
189
return MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
190
RESULT_DETAIL("RemoteDecoderManager is not available."));
191
}
192
193
if (!manager->CanSend()) {
194
return MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
195
RESULT_DETAIL("RemoteDecoderManager unable to send."));
196
}
197
198
mIPDLSelfRef = this;
199
bool success = false;
200
nsCString errorDescription;
201
VideoDecoderInfoIPDL decoderInfo(aVideoInfo, aFramerate);
202
if (manager->SendPRemoteDecoderConstructor(this, decoderInfo, aOptions,
203
ToMaybe(aIdentifier), &success,
204
&errorDescription)) {
205
mCanSend = true;
206
}
207
208
return success ? MediaResult(NS_OK)
209
: MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR, errorDescription);
210
}
211
212
GpuRemoteVideoDecoderChild::GpuRemoteVideoDecoderChild()
213
: RemoteVideoDecoderChild(true) {}
214
215
MediaResult GpuRemoteVideoDecoderChild::InitIPDL(
216
const VideoInfo& aVideoInfo, float aFramerate,
217
const CreateDecoderParams::OptionSet& aOptions,
218
const layers::TextureFactoryIdentifier& aIdentifier) {
219
RefPtr<RemoteDecoderManagerChild> manager =
220
RemoteDecoderManagerChild::GetGPUProcessSingleton();
221
222
// The manager isn't available because RemoteDecoderManagerChild has been
223
// initialized with null end points and we don't want to decode video on GPU
224
// process anymore. Return false here so that we can fallback to other PDMs.
225
if (!manager) {
226
return MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
227
RESULT_DETAIL("RemoteDecoderManager is not available."));
228
}
229
230
// The manager doesn't support sending messages because we've just crashed
231
// and are working on reinitialization. Don't initialize mIPDLSelfRef and
232
// leave us in an error state. We'll then immediately reject the promise when
233
// Init() is called and the caller can try again. Hopefully by then the new
234
// manager is ready, or we've notified the caller of it being no longer
235
// available. If not, then the cycle repeats until we're ready.
236
if (!manager->CanSend()) {
237
return NS_OK;
238
}
239
240
mIPDLSelfRef = this;
241
bool success = false;
242
nsCString errorDescription;
243
VideoDecoderInfoIPDL decoderInfo(aVideoInfo, aFramerate);
244
if (manager->SendPRemoteDecoderConstructor(this, decoderInfo, aOptions,
245
Some(aIdentifier), &success,
246
&errorDescription)) {
247
mCanSend = true;
248
}
249
250
return success ? MediaResult(NS_OK)
251
: MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR, errorDescription);
252
}
253
254
RemoteVideoDecoderParent::RemoteVideoDecoderParent(
255
RemoteDecoderManagerParent* aParent, const VideoInfo& aVideoInfo,
256
float aFramerate, const CreateDecoderParams::OptionSet& aOptions,
257
const Maybe<layers::TextureFactoryIdentifier>& aIdentifier,
258
TaskQueue* aManagerTaskQueue, TaskQueue* aDecodeTaskQueue, bool* aSuccess,
259
nsCString* aErrorDescription)
260
: RemoteDecoderParent(aParent, aManagerTaskQueue, aDecodeTaskQueue),
261
mVideoInfo(aVideoInfo) {
262
if (aIdentifier) {
263
// Check to see if we have a direct PVideoBridge connection to the
264
// destination process specified in aIdentifier, and create a
265
// KnowsCompositor representing that connection if so. If this fails, then
266
// we fall back to returning the decoded frames directly via Output().
267
mKnowsCompositor =
268
KnowsCompositorVideo::TryCreateForIdentifier(*aIdentifier);
269
}
270
271
CreateDecoderParams params(mVideoInfo);
272
params.mTaskQueue = mDecodeTaskQueue;
273
params.mKnowsCompositor = mKnowsCompositor;
274
params.mImageContainer = new layers::ImageContainer();
275
params.mRate = CreateDecoderParams::VideoFrameRate(aFramerate);
276
params.mOptions = aOptions;
277
MediaResult error(NS_OK);
278
params.mError = &error;
279
280
if (XRE_IsGPUProcess()) {
281
#ifdef XP_WIN
282
// Ensure everything is properly initialized on the right thread.
283
PDMFactory::EnsureInit();
284
285
// TODO: Ideally we wouldn't hardcode the WMF PDM, and we'd use the normal
286
// PDM factory logic for picking a decoder.
287
RefPtr<WMFDecoderModule> pdm(new WMFDecoderModule());
288
pdm->Startup();
289
mDecoder = pdm->CreateVideoDecoder(params);
290
#else
291
MOZ_ASSERT(false,
292
"Can't use RemoteVideoDecoder in the GPU process on non-Windows "
293
"platforms yet");
294
#endif
295
}
296
297
#ifdef MOZ_AV1
298
if (AOMDecoder::IsAV1(params.mConfig.mMimeType)) {
299
if (StaticPrefs::media_av1_use_dav1d()) {
300
mDecoder = new DAV1DDecoder(params);
301
} else {
302
mDecoder = new AOMDecoder(params);
303
}
304
}
305
#endif
306
307
if (NS_FAILED(error)) {
308
MOZ_ASSERT(aErrorDescription);
309
*aErrorDescription = error.Description();
310
}
311
312
*aSuccess = !!mDecoder;
313
}
314
315
MediaResult RemoteVideoDecoderParent::ProcessDecodedData(
316
const MediaDataDecoder::DecodedData& aData) {
317
MOZ_ASSERT(OnManagerThread());
318
319
// If the video decoder bridge has shut down, stop.
320
if (mKnowsCompositor && !mKnowsCompositor->GetTextureForwarder()) {
321
return NS_OK;
322
}
323
324
for (const auto& data : aData) {
325
MOZ_ASSERT(data->mType == MediaData::Type::VIDEO_DATA,
326
"Can only decode videos using RemoteDecoderParent!");
327
VideoData* video = static_cast<VideoData*>(data.get());
328
329
MOZ_ASSERT(video->mImage,
330
"Decoded video must output a layer::Image to "
331
"be used with RemoteDecoderParent");
332
333
SurfaceDescriptor sd;
334
IntSize size;
335
336
if (mKnowsCompositor) {
337
RefPtr<TextureClient> texture =
338
video->mImage->GetTextureClient(mKnowsCompositor);
339
340
if (!texture) {
341
texture = ImageClient::CreateTextureClientForImage(video->mImage,
342
mKnowsCompositor);
343
}
344
345
if (texture && !texture->IsAddedToCompositableClient()) {
346
texture->InitIPDLActor(mKnowsCompositor);
347
texture->SetAddedToCompositableClient();
348
}
349
if (texture) {
350
sd = mParent->StoreImage(video->mImage, texture);
351
size = texture->GetSize();
352
}
353
} else {
354
PlanarYCbCrImage* image =
355
static_cast<PlanarYCbCrImage*>(video->mImage.get());
356
357
SurfaceDescriptorBuffer sdBuffer;
358
Shmem buffer;
359
if (!AllocShmem(image->GetDataSize(), Shmem::SharedMemory::TYPE_BASIC,
360
&buffer)) {
361
return MediaResult(NS_ERROR_OUT_OF_MEMORY,
362
"AllocShmem failed in "
363
"RemoteVideoDecoderParent::ProcessDecodedData");
364
}
365
if (image->GetDataSize() > buffer.Size<uint8_t>()) {
366
return MediaResult(NS_ERROR_OUT_OF_MEMORY,
367
"AllocShmem returned less than requested in "
368
"RemoteVideoDecoderParent::ProcessDecodedData");
369
}
370
371
sdBuffer.data() = std::move(buffer);
372
image->BuildSurfaceDescriptorBuffer(sdBuffer);
373
374
sd = sdBuffer;
375
size = image->GetSize();
376
}
377
378
RemoteVideoDataIPDL output(
379
MediaDataIPDL(data->mOffset, data->mTime, data->mTimecode,
380
data->mDuration, data->mKeyframe),
381
video->mDisplay, size, sd, video->mFrameID);
382
Unused << SendOutput(output);
383
}
384
385
return NS_OK;
386
}
387
388
} // namespace mozilla