Source code

Revision control

Other Tools

1
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
2
/* vim: set ts=8 sts=2 et sw=2 tw=80: */
3
/* This Source Code Form is subject to the terms of the Mozilla Public
4
* License, v. 2.0. If a copy of the MPL was not distributed with this
5
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6
#include "RemoteVideoDecoder.h"
7
8
#include "mozilla/layers/ImageDataSerializer.h"
9
10
#ifdef MOZ_AV1
11
# include "AOMDecoder.h"
12
# include "DAV1DDecoder.h"
13
#endif
14
#ifdef XP_WIN
15
# include "WMFDecoderModule.h"
16
#endif
17
#include "GPUVideoImage.h"
18
#include "ImageContainer.h" // for PlanarYCbCrData and BufferRecycleBin
19
#include "MediaInfo.h"
20
#include "PDMFactory.h"
21
#include "RemoteDecoderManagerChild.h"
22
#include "RemoteDecoderManagerParent.h"
23
#include "mozilla/StaticPrefs_media.h"
24
#include "mozilla/Telemetry.h"
25
#include "mozilla/layers/ImageClient.h"
26
#include "mozilla/layers/TextureClient.h"
27
#include "mozilla/layers/VideoBridgeChild.h"
28
29
namespace mozilla {
30
31
using namespace layers; // for PlanarYCbCrData and BufferRecycleBin
32
using namespace ipc;
33
using namespace gfx;
34
35
class KnowsCompositorVideo : public layers::KnowsCompositor {
36
public:
37
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(KnowsCompositorVideo, override)
38
39
layers::TextureForwarder* GetTextureForwarder() override {
40
auto* vbc = VideoBridgeChild::GetSingleton();
41
return (vbc && vbc->CanSend()) ? vbc : nullptr;
42
}
43
layers::LayersIPCActor* GetLayersIPCActor() override {
44
return GetTextureForwarder();
45
}
46
47
static already_AddRefed<KnowsCompositorVideo> TryCreateForIdentifier(
48
const layers::TextureFactoryIdentifier& aIdentifier) {
49
VideoBridgeChild* child = VideoBridgeChild::GetSingleton();
50
if (!child) {
51
return nullptr;
52
}
53
54
// The RDD process will never use hardware decoding since it's
55
// sandboxed, so don't bother trying to create a sync object.
56
TextureFactoryIdentifier ident = aIdentifier;
57
if (XRE_IsRDDProcess()) {
58
ident.mSyncHandle = 0;
59
}
60
61
RefPtr<KnowsCompositorVideo> knowsCompositor = new KnowsCompositorVideo();
62
knowsCompositor->IdentifyTextureHost(ident);
63
return knowsCompositor.forget();
64
}
65
66
private:
67
KnowsCompositorVideo() = default;
68
virtual ~KnowsCompositorVideo() = default;
69
};
70
71
RemoteVideoDecoderChild::RemoteVideoDecoderChild(bool aRecreatedOnCrash)
72
: RemoteDecoderChild(aRecreatedOnCrash),
73
mBufferRecycleBin(new BufferRecycleBin) {}
74
75
RefPtr<mozilla::layers::Image> RemoteVideoDecoderChild::DeserializeImage(
76
const SurfaceDescriptorBuffer& aSdBuffer, const IntSize& aPicSize) {
77
MOZ_ASSERT(aSdBuffer.desc().type() == BufferDescriptor::TYCbCrDescriptor);
78
if (aSdBuffer.desc().type() != BufferDescriptor::TYCbCrDescriptor) {
79
return nullptr;
80
}
81
const YCbCrDescriptor& descriptor = aSdBuffer.desc().get_YCbCrDescriptor();
82
83
uint8_t* buffer = nullptr;
84
const MemoryOrShmem& memOrShmem = aSdBuffer.data();
85
switch (memOrShmem.type()) {
86
case MemoryOrShmem::Tuintptr_t:
87
buffer = reinterpret_cast<uint8_t*>(memOrShmem.get_uintptr_t());
88
break;
89
case MemoryOrShmem::TShmem:
90
buffer = memOrShmem.get_Shmem().get<uint8_t>();
91
break;
92
default:
93
MOZ_ASSERT(false, "Unknown MemoryOrShmem type");
94
}
95
if (!buffer) {
96
return nullptr;
97
}
98
99
PlanarYCbCrData pData;
100
pData.mYSize = descriptor.ySize();
101
pData.mYStride = descriptor.yStride();
102
pData.mCbCrSize = descriptor.cbCrSize();
103
pData.mCbCrStride = descriptor.cbCrStride();
104
// default mYSkip, mCbSkip, mCrSkip because not held in YCbCrDescriptor
105
pData.mYSkip = pData.mCbSkip = pData.mCrSkip = 0;
106
// default mPicX, mPicY because not held in YCbCrDescriptor
107
pData.mPicX = pData.mPicY = 0;
108
pData.mPicSize = aPicSize;
109
pData.mStereoMode = descriptor.stereoMode();
110
pData.mColorDepth = descriptor.colorDepth();
111
pData.mYUVColorSpace = descriptor.yUVColorSpace();
112
pData.mYChannel = ImageDataSerializer::GetYChannel(buffer, descriptor);
113
pData.mCbChannel = ImageDataSerializer::GetCbChannel(buffer, descriptor);
114
pData.mCrChannel = ImageDataSerializer::GetCrChannel(buffer, descriptor);
115
116
// images coming from AOMDecoder are RecyclingPlanarYCbCrImages.
117
RefPtr<RecyclingPlanarYCbCrImage> image =
118
new RecyclingPlanarYCbCrImage(mBufferRecycleBin);
119
bool setData = image->CopyData(pData);
120
MOZ_ASSERT(setData);
121
122
switch (memOrShmem.type()) {
123
case MemoryOrShmem::Tuintptr_t:
124
delete[] reinterpret_cast<uint8_t*>(memOrShmem.get_uintptr_t());
125
break;
126
case MemoryOrShmem::TShmem:
127
// Memory buffer will be recycled by the parent automatically.
128
break;
129
default:
130
MOZ_ASSERT(false, "Unknown MemoryOrShmem type");
131
}
132
133
if (!setData) {
134
return nullptr;
135
}
136
137
return image;
138
}
139
140
MediaResult RemoteVideoDecoderChild::ProcessOutput(
141
const DecodedOutputIPDL& aDecodedData) {
142
AssertOnManagerThread();
143
MOZ_ASSERT(aDecodedData.type() ==
144
DecodedOutputIPDL::TArrayOfRemoteVideoDataIPDL);
145
146
const nsTArray<RemoteVideoDataIPDL>& arrayData =
147
aDecodedData.get_ArrayOfRemoteVideoDataIPDL();
148
149
for (auto&& data : arrayData) {
150
RefPtr<Image> image;
151
if (data.sd().type() == SurfaceDescriptor::TSurfaceDescriptorBuffer) {
152
image = DeserializeImage(data.sd().get_SurfaceDescriptorBuffer(),
153
data.frameSize());
154
} else {
155
// The Image here creates a TextureData object that takes ownership
156
// of the SurfaceDescriptor, and is responsible for making sure that
157
// it gets deallocated.
158
SurfaceDescriptorRemoteDecoder remoteSD =
159
static_cast<const SurfaceDescriptorGPUVideo&>(data.sd());
160
remoteSD.source() = Some(GetManager()->GetSource());
161
image = new GPUVideoImage(GetManager(), remoteSD, data.frameSize());
162
}
163
164
RefPtr<VideoData> video = VideoData::CreateFromImage(
165
data.display(), data.base().offset(), data.base().time(),
166
data.base().duration(), image, data.base().keyframe(),
167
data.base().timecode());
168
169
if (!video) {
170
// OOM
171
return MediaResult(NS_ERROR_OUT_OF_MEMORY, __func__);
172
}
173
mDecodedData.AppendElement(std::move(video));
174
}
175
return NS_OK;
176
}
177
178
MediaResult RemoteVideoDecoderChild::InitIPDL(
179
const VideoInfo& aVideoInfo, float aFramerate,
180
const CreateDecoderParams::OptionSet& aOptions,
181
const layers::TextureFactoryIdentifier* aIdentifier) {
182
RefPtr<RemoteDecoderManagerChild> manager =
183
RemoteDecoderManagerChild::GetRDDProcessSingleton();
184
185
// The manager isn't available because RemoteDecoderManagerChild has been
186
// initialized with null end points and we don't want to decode video on RDD
187
// process anymore. Return false here so that we can fallback to other PDMs.
188
if (!manager) {
189
return MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
190
RESULT_DETAIL("RemoteDecoderManager is not available."));
191
}
192
193
if (!manager->CanSend()) {
194
return MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
195
RESULT_DETAIL("RemoteDecoderManager unable to send."));
196
}
197
198
mIPDLSelfRef = this;
199
bool success = false;
200
nsCString errorDescription;
201
VideoDecoderInfoIPDL decoderInfo(aVideoInfo, aFramerate);
202
Unused << manager->SendPRemoteDecoderConstructor(this, decoderInfo, aOptions,
203
ToMaybe(aIdentifier),
204
&success, &errorDescription);
205
206
return success ? MediaResult(NS_OK)
207
: MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR, errorDescription);
208
}
209
210
GpuRemoteVideoDecoderChild::GpuRemoteVideoDecoderChild()
211
: RemoteVideoDecoderChild(true) {}
212
213
MediaResult GpuRemoteVideoDecoderChild::InitIPDL(
214
const VideoInfo& aVideoInfo, float aFramerate,
215
const CreateDecoderParams::OptionSet& aOptions,
216
const layers::TextureFactoryIdentifier& aIdentifier) {
217
RefPtr<RemoteDecoderManagerChild> manager =
218
RemoteDecoderManagerChild::GetGPUProcessSingleton();
219
220
// The manager isn't available because RemoteDecoderManagerChild has been
221
// initialized with null end points and we don't want to decode video on GPU
222
// process anymore. Return false here so that we can fallback to other PDMs.
223
if (!manager) {
224
return MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
225
RESULT_DETAIL("RemoteDecoderManager is not available."));
226
}
227
228
// The manager doesn't support sending messages because we've just crashed
229
// and are working on reinitialization. Don't initialize mIPDLSelfRef and
230
// leave us in an error state. We'll then immediately reject the promise when
231
// Init() is called and the caller can try again. Hopefully by then the new
232
// manager is ready, or we've notified the caller of it being no longer
233
// available. If not, then the cycle repeats until we're ready.
234
if (!manager->CanSend()) {
235
return NS_OK;
236
}
237
238
mIPDLSelfRef = this;
239
bool success = false;
240
nsCString errorDescription;
241
VideoDecoderInfoIPDL decoderInfo(aVideoInfo, aFramerate);
242
Unused << manager->SendPRemoteDecoderConstructor(this, decoderInfo, aOptions,
243
Some(aIdentifier), &success,
244
&errorDescription);
245
246
return success ? MediaResult(NS_OK)
247
: MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR, errorDescription);
248
}
249
250
RemoteVideoDecoderParent::RemoteVideoDecoderParent(
251
RemoteDecoderManagerParent* aParent, const VideoInfo& aVideoInfo,
252
float aFramerate, const CreateDecoderParams::OptionSet& aOptions,
253
const Maybe<layers::TextureFactoryIdentifier>& aIdentifier,
254
TaskQueue* aManagerTaskQueue, TaskQueue* aDecodeTaskQueue, bool* aSuccess,
255
nsCString* aErrorDescription)
256
: RemoteDecoderParent(aParent, aManagerTaskQueue, aDecodeTaskQueue),
257
mVideoInfo(aVideoInfo) {
258
if (aIdentifier) {
259
// Check to see if we have a direct PVideoBridge connection to the
260
// destination process specified in aIdentifier, and create a
261
// KnowsCompositor representing that connection if so. If this fails, then
262
// we fall back to returning the decoded frames directly via Output().
263
mKnowsCompositor =
264
KnowsCompositorVideo::TryCreateForIdentifier(*aIdentifier);
265
}
266
267
CreateDecoderParams params(mVideoInfo);
268
params.mTaskQueue = mDecodeTaskQueue;
269
params.mKnowsCompositor = mKnowsCompositor;
270
params.mImageContainer = new layers::ImageContainer();
271
params.mRate = CreateDecoderParams::VideoFrameRate(aFramerate);
272
params.mOptions = aOptions;
273
MediaResult error(NS_OK);
274
params.mError = &error;
275
276
if (XRE_IsGPUProcess()) {
277
#ifdef XP_WIN
278
// Ensure everything is properly initialized on the right thread.
279
PDMFactory::EnsureInit();
280
281
// TODO: Ideally we wouldn't hardcode the WMF PDM, and we'd use the normal
282
// PDM factory logic for picking a decoder.
283
RefPtr<WMFDecoderModule> pdm(new WMFDecoderModule());
284
pdm->Startup();
285
mDecoder = pdm->CreateVideoDecoder(params);
286
#else
287
MOZ_ASSERT(false,
288
"Can't use RemoteVideoDecoder in the GPU process on non-Windows "
289
"platforms yet");
290
#endif
291
}
292
293
#ifdef MOZ_AV1
294
if (AOMDecoder::IsAV1(params.mConfig.mMimeType)) {
295
if (StaticPrefs::media_av1_use_dav1d()) {
296
mDecoder = new DAV1DDecoder(params);
297
} else {
298
mDecoder = new AOMDecoder(params);
299
}
300
}
301
#endif
302
303
if (NS_FAILED(error)) {
304
MOZ_ASSERT(aErrorDescription);
305
*aErrorDescription = error.Description();
306
}
307
308
*aSuccess = !!mDecoder;
309
}
310
311
MediaResult RemoteVideoDecoderParent::ProcessDecodedData(
312
const MediaDataDecoder::DecodedData& aData,
313
DecodedOutputIPDL& aDecodedData) {
314
MOZ_ASSERT(OnManagerThread());
315
316
nsTArray<RemoteVideoDataIPDL> array;
317
318
// If the video decoder bridge has shut down, stop.
319
if (mKnowsCompositor && !mKnowsCompositor->GetTextureForwarder()) {
320
aDecodedData = std::move(array);
321
return NS_OK;
322
}
323
324
for (const auto& data : aData) {
325
MOZ_ASSERT(data->mType == MediaData::Type::VIDEO_DATA,
326
"Can only decode videos using RemoteDecoderParent!");
327
VideoData* video = static_cast<VideoData*>(data.get());
328
329
MOZ_ASSERT(video->mImage,
330
"Decoded video must output a layer::Image to "
331
"be used with RemoteDecoderParent");
332
333
SurfaceDescriptor sd;
334
IntSize size;
335
336
if (mKnowsCompositor) {
337
RefPtr<TextureClient> texture =
338
video->mImage->GetTextureClient(mKnowsCompositor);
339
340
if (!texture) {
341
texture = ImageClient::CreateTextureClientForImage(video->mImage,
342
mKnowsCompositor);
343
}
344
345
if (texture && !texture->IsAddedToCompositableClient()) {
346
texture->InitIPDLActor(mKnowsCompositor);
347
texture->SetAddedToCompositableClient();
348
}
349
if (texture) {
350
sd = mParent->StoreImage(video->mImage, texture);
351
size = texture->GetSize();
352
}
353
} else {
354
PlanarYCbCrImage* image =
355
static_cast<PlanarYCbCrImage*>(video->mImage.get());
356
357
SurfaceDescriptorBuffer sdBuffer;
358
ShmemBuffer buffer = AllocateBuffer(image->GetDataSize());
359
if (!buffer.Valid()) {
360
return MediaResult(NS_ERROR_OUT_OF_MEMORY,
361
"AllocShmem failed in "
362
"RemoteVideoDecoderParent::ProcessDecodedData");
363
}
364
365
sdBuffer.data() = std::move(buffer.Get());
366
image->BuildSurfaceDescriptorBuffer(sdBuffer);
367
368
sd = sdBuffer;
369
size = image->GetSize();
370
}
371
372
RemoteVideoDataIPDL output(
373
MediaDataIPDL(data->mOffset, data->mTime, data->mTimecode,
374
data->mDuration, data->mKeyframe),
375
video->mDisplay, size, sd, video->mFrameID);
376
377
array.AppendElement(output);
378
}
379
380
aDecodedData = std::move(array);
381
382
return NS_OK;
383
}
384
385
} // namespace mozilla