Source code

Revision control

Other Tools

1
/* This Source Code Form is subject to the terms of the Mozilla Public
2
* License, v. 2.0. If a copy of the MPL was not distributed with this
3
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
4
5
#include "RemoteDataDecoder.h"
6
7
#include "AndroidBridge.h"
8
#include "AndroidDecoderModule.h"
9
#include "EMEDecoderModule.h"
10
#include "GLImages.h"
11
#include "JavaCallbacksSupport.h"
12
#include "MediaData.h"
13
#include "MediaInfo.h"
14
#include "SimpleMap.h"
15
#include "VideoUtils.h"
16
#include "VPXDecoder.h"
17
18
#include "nsIGfxInfo.h"
19
#include "nsPromiseFlatString.h"
20
#include "nsThreadUtils.h"
21
#include "prlog.h"
22
23
#include <jni.h>
24
25
#undef LOG
26
#define LOG(arg, ...) \
27
MOZ_LOG(sAndroidDecoderModuleLog, mozilla::LogLevel::Debug, \
28
("RemoteDataDecoder(%p)::%s: " arg, this, __func__, ##__VA_ARGS__))
29
30
using namespace mozilla;
31
using namespace mozilla::gl;
32
using namespace mozilla::java;
33
using namespace mozilla::java::sdk;
34
using media::TimeUnit;
35
36
namespace mozilla {
37
38
// Hold a reference to the output buffer until we're ready to release it back to
39
// the Java codec (for rendering or not).
40
class RenderOrReleaseOutput {
41
public:
42
RenderOrReleaseOutput(CodecProxy::Param aCodec, Sample::Param aSample)
43
: mCodec(aCodec), mSample(aSample) {}
44
45
virtual ~RenderOrReleaseOutput() { ReleaseOutput(false); }
46
47
protected:
48
void ReleaseOutput(bool aToRender) {
49
if (mCodec && mSample) {
50
mCodec->ReleaseOutput(mSample, aToRender);
51
mCodec = nullptr;
52
mSample = nullptr;
53
}
54
}
55
56
private:
57
CodecProxy::GlobalRef mCodec;
58
Sample::GlobalRef mSample;
59
};
60
61
class RemoteVideoDecoder : public RemoteDataDecoder {
62
public:
63
// Render the output to the surface when the frame is sent
64
// to compositor, or release it if not presented.
65
class CompositeListener : private RenderOrReleaseOutput,
66
public VideoData::Listener {
67
public:
68
CompositeListener(CodecProxy::Param aCodec, Sample::Param aSample)
69
: RenderOrReleaseOutput(aCodec, aSample) {}
70
71
void OnSentToCompositor() override { ReleaseOutput(true); }
72
};
73
74
class InputInfo {
75
public:
76
InputInfo() {}
77
78
InputInfo(const int64_t aDurationUs, const gfx::IntSize& aImageSize,
79
const gfx::IntSize& aDisplaySize)
80
: mDurationUs(aDurationUs),
81
mImageSize(aImageSize),
82
mDisplaySize(aDisplaySize) {}
83
84
int64_t mDurationUs;
85
gfx::IntSize mImageSize;
86
gfx::IntSize mDisplaySize;
87
};
88
89
class CallbacksSupport final : public JavaCallbacksSupport {
90
public:
91
explicit CallbacksSupport(RemoteVideoDecoder* aDecoder)
92
: mDecoder(aDecoder) {}
93
94
void HandleInput(int64_t aTimestamp, bool aProcessed) override {
95
mDecoder->UpdateInputStatus(aTimestamp, aProcessed);
96
}
97
98
void HandleOutput(Sample::Param aSample,
99
java::SampleBuffer::Param aBuffer) override {
100
MOZ_ASSERT(!aBuffer, "Video sample should be bufferless");
101
// aSample will be implicitly converted into a GlobalRef.
102
mDecoder->ProcessOutput(std::move(aSample));
103
}
104
105
void HandleError(const MediaResult& aError) override {
106
mDecoder->Error(aError);
107
}
108
109
friend class RemoteDataDecoder;
110
111
private:
112
RemoteVideoDecoder* mDecoder;
113
};
114
115
RemoteVideoDecoder(const VideoInfo& aConfig, MediaFormat::Param aFormat,
116
const nsString& aDrmStubId, TaskQueue* aTaskQueue)
117
: RemoteDataDecoder(MediaData::Type::VIDEO_DATA, aConfig.mMimeType,
118
aFormat, aDrmStubId, aTaskQueue),
119
mConfig(aConfig) {}
120
121
~RemoteVideoDecoder() {
122
if (mSurface) {
123
SurfaceAllocator::DisposeSurface(mSurface);
124
}
125
}
126
127
RefPtr<InitPromise> Init() override {
128
BufferInfo::LocalRef bufferInfo;
129
if (NS_FAILED(BufferInfo::New(&bufferInfo)) || !bufferInfo) {
130
return InitPromise::CreateAndReject(NS_ERROR_OUT_OF_MEMORY, __func__);
131
}
132
mInputBufferInfo = bufferInfo;
133
134
mSurface = GeckoSurface::LocalRef(SurfaceAllocator::AcquireSurface(
135
mConfig.mImage.width, mConfig.mImage.height, false));
136
if (!mSurface) {
137
return InitPromise::CreateAndReject(NS_ERROR_DOM_MEDIA_FATAL_ERR,
138
__func__);
139
}
140
141
mSurfaceHandle = mSurface->GetHandle();
142
143
// Register native methods.
144
JavaCallbacksSupport::Init();
145
146
mJavaCallbacks = CodecProxy::NativeCallbacks::New();
147
if (!mJavaCallbacks) {
148
return InitPromise::CreateAndReject(NS_ERROR_DOM_MEDIA_FATAL_ERR,
149
__func__);
150
}
151
JavaCallbacksSupport::AttachNative(
152
mJavaCallbacks, mozilla::MakeUnique<CallbacksSupport>(this));
153
154
mJavaDecoder = CodecProxy::Create(
155
false, // false indicates to create a decoder and true denotes encoder
156
mFormat, mSurface, mJavaCallbacks, mDrmStubId);
157
if (mJavaDecoder == nullptr) {
158
return InitPromise::CreateAndReject(NS_ERROR_DOM_MEDIA_FATAL_ERR,
159
__func__);
160
}
161
mIsCodecSupportAdaptivePlayback =
162
mJavaDecoder->IsAdaptivePlaybackSupported();
163
mIsHardwareAccelerated = mJavaDecoder->IsHardwareAccelerated();
164
return InitPromise::CreateAndResolve(TrackInfo::kVideoTrack, __func__);
165
}
166
167
RefPtr<MediaDataDecoder::FlushPromise> Flush() override {
168
RefPtr<RemoteVideoDecoder> self = this;
169
return InvokeAsync(mTaskQueue, __func__, [self, this]() {
170
mInputInfos.Clear();
171
mSeekTarget.reset();
172
mLatestOutputTime.reset();
173
return RemoteDataDecoder::ProcessFlush();
174
});
175
}
176
177
RefPtr<MediaDataDecoder::DecodePromise> Decode(
178
MediaRawData* aSample) override {
179
RefPtr<RemoteVideoDecoder> self = this;
180
RefPtr<MediaRawData> sample = aSample;
181
return InvokeAsync(mTaskQueue, __func__, [self, sample]() {
182
const VideoInfo* config = sample->mTrackInfo
183
? sample->mTrackInfo->GetAsVideoInfo()
184
: &self->mConfig;
185
MOZ_ASSERT(config);
186
187
InputInfo info(sample->mDuration.ToMicroseconds(), config->mImage,
188
config->mDisplay);
189
self->mInputInfos.Insert(sample->mTime.ToMicroseconds(), info);
190
return self->RemoteDataDecoder::ProcessDecode(sample);
191
});
192
}
193
194
bool SupportDecoderRecycling() const override {
195
return mIsCodecSupportAdaptivePlayback;
196
}
197
198
void SetSeekThreshold(const TimeUnit& aTime) override {
199
RefPtr<RemoteVideoDecoder> self = this;
200
nsCOMPtr<nsIRunnable> runnable = NS_NewRunnableFunction(
201
"RemoteVideoDecoder::SetSeekThreshold", [self, aTime]() {
202
if (aTime.IsValid()) {
203
self->mSeekTarget = Some(aTime);
204
} else {
205
self->mSeekTarget.reset();
206
}
207
});
208
nsresult rv = mTaskQueue->Dispatch(runnable.forget());
209
MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
210
Unused << rv;
211
}
212
213
bool IsUsefulData(const RefPtr<MediaData>& aSample) override {
214
AssertOnTaskQueue();
215
216
if (mLatestOutputTime && aSample->mTime < mLatestOutputTime.value()) {
217
return false;
218
}
219
220
const TimeUnit endTime = aSample->GetEndTime();
221
if (mSeekTarget && endTime <= mSeekTarget.value()) {
222
return false;
223
}
224
225
mSeekTarget.reset();
226
mLatestOutputTime = Some(endTime);
227
return true;
228
}
229
230
bool IsHardwareAccelerated(nsACString& aFailureReason) const override {
231
return mIsHardwareAccelerated;
232
}
233
234
ConversionRequired NeedsConversion() const override {
235
return ConversionRequired::kNeedAnnexB;
236
}
237
238
private:
239
// Param and LocalRef are only valid for the duration of a JNI method call.
240
// Use GlobalRef as the parameter type to keep the Java object referenced
241
// until running.
242
void ProcessOutput(Sample::GlobalRef&& aSample) {
243
if (!mTaskQueue->IsCurrentThreadIn()) {
244
nsresult rv = mTaskQueue->Dispatch(NewRunnableMethod<Sample::GlobalRef&&>(
245
"RemoteVideoDecoder::ProcessOutput", this,
246
&RemoteVideoDecoder::ProcessOutput, std::move(aSample)));
247
MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
248
Unused << rv;
249
return;
250
}
251
252
AssertOnTaskQueue();
253
if (GetState() == State::SHUTDOWN) {
254
aSample->Dispose();
255
return;
256
}
257
258
UniquePtr<VideoData::Listener> releaseSample(
259
new CompositeListener(mJavaDecoder, aSample));
260
261
BufferInfo::LocalRef info = aSample->Info();
262
MOZ_ASSERT(info);
263
264
int32_t flags;
265
bool ok = NS_SUCCEEDED(info->Flags(&flags));
266
267
int32_t offset;
268
ok &= NS_SUCCEEDED(info->Offset(&offset));
269
270
int32_t size;
271
ok &= NS_SUCCEEDED(info->Size(&size));
272
273
int64_t presentationTimeUs;
274
ok &= NS_SUCCEEDED(info->PresentationTimeUs(&presentationTimeUs));
275
276
if (!ok) {
277
Error(MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
278
RESULT_DETAIL("VideoCallBack::HandleOutput")));
279
return;
280
}
281
282
InputInfo inputInfo;
283
ok = mInputInfos.Find(presentationTimeUs, inputInfo);
284
bool isEOS = !!(flags & MediaCodec::BUFFER_FLAG_END_OF_STREAM);
285
if (!ok && !isEOS) {
286
// Ignore output with no corresponding input.
287
return;
288
}
289
290
if (ok && (size > 0 || presentationTimeUs >= 0)) {
291
RefPtr<layers::Image> img = new layers::SurfaceTextureImage(
292
mSurfaceHandle, inputInfo.mImageSize, false /* NOT continuous */,
293
gl::OriginPos::BottomLeft, mConfig.HasAlpha());
294
295
RefPtr<VideoData> v = VideoData::CreateFromImage(
296
inputInfo.mDisplaySize, offset,
297
TimeUnit::FromMicroseconds(presentationTimeUs),
298
TimeUnit::FromMicroseconds(inputInfo.mDurationUs), img,
299
!!(flags & MediaCodec::BUFFER_FLAG_SYNC_FRAME),
300
TimeUnit::FromMicroseconds(presentationTimeUs));
301
302
v->SetListener(std::move(releaseSample));
303
RemoteDataDecoder::UpdateOutputStatus(std::move(v));
304
}
305
306
if (isEOS) {
307
DrainComplete();
308
}
309
}
310
311
const VideoInfo mConfig;
312
GeckoSurface::GlobalRef mSurface;
313
AndroidSurfaceTextureHandle mSurfaceHandle;
314
// Only accessed on reader's task queue.
315
bool mIsCodecSupportAdaptivePlayback = false;
316
// Can be accessed on any thread, but only written on during init.
317
bool mIsHardwareAccelerated = false;
318
// Accessed on mTaskQueue and reader's TaskQueue. SimpleMap however is
319
// thread-safe, so it's okay to do so.
320
SimpleMap<InputInfo> mInputInfos;
321
// Only accessed on the TaskQueue.
322
Maybe<TimeUnit> mSeekTarget;
323
Maybe<TimeUnit> mLatestOutputTime;
324
};
325
326
class RemoteAudioDecoder : public RemoteDataDecoder {
327
public:
328
RemoteAudioDecoder(const AudioInfo& aConfig, MediaFormat::Param aFormat,
329
const nsString& aDrmStubId, TaskQueue* aTaskQueue)
330
: RemoteDataDecoder(MediaData::Type::AUDIO_DATA, aConfig.mMimeType,
331
aFormat, aDrmStubId, aTaskQueue) {
332
JNIEnv* const env = jni::GetEnvForThread();
333
334
bool formatHasCSD = false;
335
NS_ENSURE_SUCCESS_VOID(
336
aFormat->ContainsKey(NS_LITERAL_STRING("csd-0"), &formatHasCSD));
337
338
if (!formatHasCSD && aConfig.mCodecSpecificConfig->Length() >= 2) {
339
jni::ByteBuffer::LocalRef buffer(env);
340
buffer = jni::ByteBuffer::New(aConfig.mCodecSpecificConfig->Elements(),
341
aConfig.mCodecSpecificConfig->Length());
342
NS_ENSURE_SUCCESS_VOID(
343
aFormat->SetByteBuffer(NS_LITERAL_STRING("csd-0"), buffer));
344
}
345
}
346
347
RefPtr<InitPromise> Init() override {
348
BufferInfo::LocalRef bufferInfo;
349
if (NS_FAILED(BufferInfo::New(&bufferInfo)) || !bufferInfo) {
350
return InitPromise::CreateAndReject(NS_ERROR_OUT_OF_MEMORY, __func__);
351
}
352
mInputBufferInfo = bufferInfo;
353
354
// Register native methods.
355
JavaCallbacksSupport::Init();
356
357
mJavaCallbacks = CodecProxy::NativeCallbacks::New();
358
if (!mJavaCallbacks) {
359
return InitPromise::CreateAndReject(NS_ERROR_DOM_MEDIA_FATAL_ERR,
360
__func__);
361
}
362
JavaCallbacksSupport::AttachNative(
363
mJavaCallbacks, mozilla::MakeUnique<CallbacksSupport>(this));
364
365
mJavaDecoder =
366
CodecProxy::Create(false, mFormat, nullptr, mJavaCallbacks, mDrmStubId);
367
if (mJavaDecoder == nullptr) {
368
return InitPromise::CreateAndReject(NS_ERROR_DOM_MEDIA_FATAL_ERR,
369
__func__);
370
}
371
372
return InitPromise::CreateAndResolve(TrackInfo::kAudioTrack, __func__);
373
}
374
375
RefPtr<FlushPromise> Flush() override {
376
RefPtr<RemoteAudioDecoder> self = this;
377
return InvokeAsync(mTaskQueue, __func__, [self]() {
378
self->mFirstDemuxedSampleTime.reset();
379
return self->RemoteDataDecoder::ProcessFlush();
380
});
381
}
382
383
RefPtr<DecodePromise> Decode(MediaRawData* aSample) override {
384
RefPtr<RemoteAudioDecoder> self = this;
385
RefPtr<MediaRawData> sample = aSample;
386
return InvokeAsync(mTaskQueue, __func__, [self, sample]() {
387
if (!self->mFirstDemuxedSampleTime) {
388
MOZ_ASSERT(sample->mTime.IsValid());
389
self->mFirstDemuxedSampleTime.emplace(sample->mTime);
390
}
391
return self->RemoteDataDecoder::ProcessDecode(sample);
392
});
393
}
394
395
private:
396
class CallbacksSupport final : public JavaCallbacksSupport {
397
public:
398
explicit CallbacksSupport(RemoteAudioDecoder* aDecoder)
399
: mDecoder(aDecoder) {}
400
401
void HandleInput(int64_t aTimestamp, bool aProcessed) override {
402
mDecoder->UpdateInputStatus(aTimestamp, aProcessed);
403
}
404
405
void HandleOutput(Sample::Param aSample,
406
java::SampleBuffer::Param aBuffer) override {
407
MOZ_ASSERT(aBuffer, "Audio sample should have buffer");
408
// aSample will be implicitly converted into a GlobalRef.
409
mDecoder->ProcessOutput(std::move(aSample), std::move(aBuffer));
410
}
411
412
void HandleOutputFormatChanged(MediaFormat::Param aFormat) override {
413
int32_t outputChannels = 0;
414
aFormat->GetInteger(NS_LITERAL_STRING("channel-count"), &outputChannels);
415
AudioConfig::ChannelLayout layout(outputChannels);
416
if (!layout.IsValid()) {
417
mDecoder->Error(MediaResult(
418
NS_ERROR_DOM_MEDIA_FATAL_ERR,
419
RESULT_DETAIL("Invalid channel layout:%d", outputChannels)));
420
return;
421
}
422
423
int32_t sampleRate = 0;
424
aFormat->GetInteger(NS_LITERAL_STRING("sample-rate"), &sampleRate);
425
LOG("Audio output format changed: channels:%d sample rate:%d",
426
outputChannels, sampleRate);
427
428
mDecoder->ProcessOutputFormatChange(outputChannels, sampleRate);
429
}
430
431
void HandleError(const MediaResult& aError) override {
432
mDecoder->Error(aError);
433
}
434
435
private:
436
RemoteAudioDecoder* mDecoder;
437
};
438
439
bool IsSampleTimeSmallerThanFirstDemuxedSampleTime(int64_t aTime) const {
440
return mFirstDemuxedSampleTime->ToMicroseconds() > aTime;
441
}
442
443
bool ShouldDiscardSample(int64_t aSession) const {
444
AssertOnTaskQueue();
445
// HandleOutput() runs on Android binder thread pool and could be preempted
446
// by RemoteDateDecoder task queue. That means ProcessOutput() could be
447
// scheduled after ProcessShutdown() or ProcessFlush(). We won't need the
448
// sample which is returned after calling Shutdown() and Flush(). We can
449
// check mFirstDemuxedSampleTime to know whether the Flush() has been
450
// called, becasue it would be reset in Flush().
451
return GetState() == State::SHUTDOWN || !mFirstDemuxedSampleTime ||
452
mSession != aSession;
453
}
454
455
// Param and LocalRef are only valid for the duration of a JNI method call.
456
// Use GlobalRef as the parameter type to keep the Java object referenced
457
// until running.
458
void ProcessOutput(Sample::GlobalRef&& aSample,
459
SampleBuffer::GlobalRef&& aBuffer) {
460
if (!mTaskQueue->IsCurrentThreadIn()) {
461
nsresult rv = mTaskQueue->Dispatch(
462
NewRunnableMethod<Sample::GlobalRef&&, SampleBuffer::GlobalRef&&>(
463
"RemoteAudioDecoder::ProcessOutput", this,
464
&RemoteAudioDecoder::ProcessOutput, std::move(aSample),
465
std::move(aBuffer)));
466
MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
467
Unused << rv;
468
return;
469
}
470
471
AssertOnTaskQueue();
472
473
if (ShouldDiscardSample(aSample->Session()) || !aBuffer->IsValid()) {
474
aSample->Dispose();
475
return;
476
}
477
478
RenderOrReleaseOutput autoRelease(mJavaDecoder, aSample);
479
480
BufferInfo::LocalRef info = aSample->Info();
481
MOZ_ASSERT(info);
482
483
int32_t flags = 0;
484
bool ok = NS_SUCCEEDED(info->Flags(&flags));
485
bool isEOS = !!(flags & MediaCodec::BUFFER_FLAG_END_OF_STREAM);
486
487
int32_t offset;
488
ok &= NS_SUCCEEDED(info->Offset(&offset));
489
490
int64_t presentationTimeUs;
491
ok &= NS_SUCCEEDED(info->PresentationTimeUs(&presentationTimeUs));
492
493
int32_t size;
494
ok &= NS_SUCCEEDED(info->Size(&size));
495
496
if (!ok ||
497
(IsSampleTimeSmallerThanFirstDemuxedSampleTime(presentationTimeUs) &&
498
!isEOS)) {
499
Error(MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR, __func__));
500
return;
501
}
502
503
if (size > 0) {
504
#ifdef MOZ_SAMPLE_TYPE_S16
505
const int32_t numSamples = size / 2;
506
#else
507
# error We only support 16-bit integer PCM
508
#endif
509
510
AlignedAudioBuffer audio(numSamples);
511
if (!audio) {
512
Error(MediaResult(NS_ERROR_OUT_OF_MEMORY, __func__));
513
return;
514
}
515
516
jni::ByteBuffer::LocalRef dest = jni::ByteBuffer::New(audio.get(), size);
517
aBuffer->WriteToByteBuffer(dest, offset, size);
518
519
RefPtr<AudioData> data =
520
new AudioData(0, TimeUnit::FromMicroseconds(presentationTimeUs),
521
std::move(audio), mOutputChannels, mOutputSampleRate);
522
523
UpdateOutputStatus(std::move(data));
524
}
525
526
if (isEOS) {
527
DrainComplete();
528
}
529
}
530
531
void ProcessOutputFormatChange(int32_t aChannels, int32_t aSampleRate) {
532
if (!mTaskQueue->IsCurrentThreadIn()) {
533
nsresult rv = mTaskQueue->Dispatch(NewRunnableMethod<int32_t, int32_t>(
534
"RemoteAudioDecoder::ProcessOutputFormatChange", this,
535
&RemoteAudioDecoder::ProcessOutputFormatChange, aChannels,
536
aSampleRate));
537
MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
538
Unused << rv;
539
return;
540
}
541
542
AssertOnTaskQueue();
543
544
mOutputChannels = aChannels;
545
mOutputSampleRate = aSampleRate;
546
}
547
548
int32_t mOutputChannels;
549
int32_t mOutputSampleRate;
550
Maybe<TimeUnit> mFirstDemuxedSampleTime;
551
};
552
553
already_AddRefed<MediaDataDecoder> RemoteDataDecoder::CreateAudioDecoder(
554
const CreateDecoderParams& aParams, const nsString& aDrmStubId,
555
CDMProxy* aProxy) {
556
const AudioInfo& config = aParams.AudioConfig();
557
MediaFormat::LocalRef format;
558
NS_ENSURE_SUCCESS(
559
MediaFormat::CreateAudioFormat(config.mMimeType, config.mRate,
560
config.mChannels, &format),
561
nullptr);
562
563
RefPtr<MediaDataDecoder> decoder =
564
new RemoteAudioDecoder(config, format, aDrmStubId, aParams.mTaskQueue);
565
if (aProxy) {
566
decoder = new EMEMediaDataDecoderProxy(aParams, decoder.forget(), aProxy);
567
}
568
return decoder.forget();
569
}
570
571
already_AddRefed<MediaDataDecoder> RemoteDataDecoder::CreateVideoDecoder(
572
const CreateDecoderParams& aParams, const nsString& aDrmStubId,
573
CDMProxy* aProxy) {
574
const VideoInfo& config = aParams.VideoConfig();
575
MediaFormat::LocalRef format;
576
NS_ENSURE_SUCCESS(MediaFormat::CreateVideoFormat(
577
TranslateMimeType(config.mMimeType),
578
config.mImage.width, config.mImage.height, &format),
579
nullptr);
580
581
RefPtr<MediaDataDecoder> decoder =
582
new RemoteVideoDecoder(config, format, aDrmStubId, aParams.mTaskQueue);
583
if (aProxy) {
584
decoder = new EMEMediaDataDecoderProxy(aParams, decoder.forget(), aProxy);
585
}
586
return decoder.forget();
587
}
588
589
RemoteDataDecoder::RemoteDataDecoder(MediaData::Type aType,
590
const nsACString& aMimeType,
591
MediaFormat::Param aFormat,
592
const nsString& aDrmStubId,
593
TaskQueue* aTaskQueue)
594
: mType(aType),
595
mMimeType(aMimeType),
596
mFormat(aFormat),
597
mDrmStubId(aDrmStubId),
598
mTaskQueue(aTaskQueue),
599
mSession(0),
600
mNumPendingInputs(0) {}
601
602
RefPtr<MediaDataDecoder::FlushPromise> RemoteDataDecoder::Flush() {
603
RefPtr<RemoteDataDecoder> self = this;
604
return InvokeAsync(mTaskQueue, this, __func__,
605
&RemoteDataDecoder::ProcessFlush);
606
}
607
608
RefPtr<MediaDataDecoder::FlushPromise> RemoteDataDecoder::ProcessFlush() {
609
AssertOnTaskQueue();
610
611
mDecodedData = DecodedData();
612
UpdatePendingInputStatus(PendingOp::CLEAR);
613
mDecodePromise.RejectIfExists(NS_ERROR_DOM_MEDIA_CANCELED, __func__);
614
mDrainPromise.RejectIfExists(NS_ERROR_DOM_MEDIA_CANCELED, __func__);
615
SetState(State::DRAINED);
616
mJavaDecoder->Flush();
617
return FlushPromise::CreateAndResolve(true, __func__);
618
}
619
620
RefPtr<MediaDataDecoder::DecodePromise> RemoteDataDecoder::Drain() {
621
RefPtr<RemoteDataDecoder> self = this;
622
return InvokeAsync(mTaskQueue, __func__, [self, this]() {
623
if (GetState() == State::SHUTDOWN) {
624
return DecodePromise::CreateAndReject(NS_ERROR_DOM_MEDIA_CANCELED,
625
__func__);
626
}
627
RefPtr<DecodePromise> p = mDrainPromise.Ensure(__func__);
628
if (GetState() == State::DRAINED) {
629
// There's no operation to perform other than returning any already
630
// decoded data.
631
ReturnDecodedData();
632
return p;
633
}
634
635
if (GetState() == State::DRAINING) {
636
// Draining operation already pending, let it complete its course.
637
return p;
638
}
639
640
SetState(State::DRAINING);
641
self->mInputBufferInfo->Set(0, 0, -1,
642
MediaCodec::BUFFER_FLAG_END_OF_STREAM);
643
mSession = mJavaDecoder->Input(nullptr, self->mInputBufferInfo, nullptr);
644
return p;
645
});
646
}
647
648
RefPtr<ShutdownPromise> RemoteDataDecoder::Shutdown() {
649
LOG("");
650
RefPtr<RemoteDataDecoder> self = this;
651
return InvokeAsync(mTaskQueue, this, __func__,
652
&RemoteDataDecoder::ProcessShutdown);
653
}
654
655
RefPtr<ShutdownPromise> RemoteDataDecoder::ProcessShutdown() {
656
AssertOnTaskQueue();
657
SetState(State::SHUTDOWN);
658
if (mJavaDecoder) {
659
mJavaDecoder->Release();
660
mJavaDecoder = nullptr;
661
}
662
663
if (mJavaCallbacks) {
664
JavaCallbacksSupport::GetNative(mJavaCallbacks)->Cancel();
665
JavaCallbacksSupport::DisposeNative(mJavaCallbacks);
666
mJavaCallbacks = nullptr;
667
}
668
669
mFormat = nullptr;
670
671
return ShutdownPromise::CreateAndResolve(true, __func__);
672
}
673
674
static CryptoInfo::LocalRef GetCryptoInfoFromSample(
675
const MediaRawData* aSample) {
676
auto& cryptoObj = aSample->mCrypto;
677
678
if (!cryptoObj.IsEncrypted()) {
679
return nullptr;
680
}
681
682
CryptoInfo::LocalRef cryptoInfo;
683
nsresult rv = CryptoInfo::New(&cryptoInfo);
684
NS_ENSURE_SUCCESS(rv, nullptr);
685
686
uint32_t numSubSamples = std::min<uint32_t>(
687
cryptoObj.mPlainSizes.Length(), cryptoObj.mEncryptedSizes.Length());
688
689
uint32_t totalSubSamplesSize = 0;
690
for (auto& size : cryptoObj.mEncryptedSizes) {
691
totalSubSamplesSize += size;
692
}
693
694
// mPlainSizes is uint16_t, need to transform to uint32_t first.
695
nsTArray<uint32_t> plainSizes;
696
for (auto& size : cryptoObj.mPlainSizes) {
697
totalSubSamplesSize += size;
698
plainSizes.AppendElement(size);
699
}
700
701
uint32_t codecSpecificDataSize = aSample->Size() - totalSubSamplesSize;
702
// Size of codec specific data("CSD") for Android MediaCodec usage should be
703
// included in the 1st plain size.
704
plainSizes[0] += codecSpecificDataSize;
705
706
static const int kExpectedIVLength = 16;
707
auto tempIV(cryptoObj.mIV);
708
auto tempIVLength = tempIV.Length();
709
MOZ_ASSERT(tempIVLength <= kExpectedIVLength);
710
for (size_t i = tempIVLength; i < kExpectedIVLength; i++) {
711
// Padding with 0
712
tempIV.AppendElement(0);
713
}
714
715
auto numBytesOfPlainData = mozilla::jni::IntArray::New(
716
reinterpret_cast<int32_t*>(&plainSizes[0]), plainSizes.Length());
717
718
auto numBytesOfEncryptedData = mozilla::jni::IntArray::New(
719
reinterpret_cast<const int32_t*>(&cryptoObj.mEncryptedSizes[0]),
720
cryptoObj.mEncryptedSizes.Length());
721
auto iv = mozilla::jni::ByteArray::New(reinterpret_cast<int8_t*>(&tempIV[0]),
722
tempIV.Length());
723
auto keyId = mozilla::jni::ByteArray::New(
724
reinterpret_cast<const int8_t*>(&cryptoObj.mKeyId[0]),
725
cryptoObj.mKeyId.Length());
726
cryptoInfo->Set(numSubSamples, numBytesOfPlainData, numBytesOfEncryptedData,
727
keyId, iv, MediaCodec::CRYPTO_MODE_AES_CTR);
728
729
return cryptoInfo;
730
}
731
732
RefPtr<MediaDataDecoder::DecodePromise> RemoteDataDecoder::Decode(
733
MediaRawData* aSample) {
734
RefPtr<RemoteDataDecoder> self = this;
735
RefPtr<MediaRawData> sample = aSample;
736
return InvokeAsync(mTaskQueue, __func__,
737
[self, sample]() { return self->ProcessDecode(sample); });
738
}
739
740
RefPtr<MediaDataDecoder::DecodePromise> RemoteDataDecoder::ProcessDecode(
741
MediaRawData* aSample) {
742
AssertOnTaskQueue();
743
MOZ_ASSERT(aSample != nullptr);
744
jni::ByteBuffer::LocalRef bytes = jni::ByteBuffer::New(
745
const_cast<uint8_t*>(aSample->Data()), aSample->Size());
746
747
SetState(State::DRAINABLE);
748
mInputBufferInfo->Set(0, aSample->Size(), aSample->mTime.ToMicroseconds(), 0);
749
int64_t session = mJavaDecoder->Input(bytes, mInputBufferInfo,
750
GetCryptoInfoFromSample(aSample));
751
if (session == java::CodecProxy::INVALID_SESSION) {
752
return DecodePromise::CreateAndReject(
753
MediaResult(NS_ERROR_OUT_OF_MEMORY, __func__), __func__);
754
}
755
mSession = session;
756
return mDecodePromise.Ensure(__func__);
757
}
758
759
void RemoteDataDecoder::UpdatePendingInputStatus(PendingOp aOp) {
760
AssertOnTaskQueue();
761
switch (aOp) {
762
case PendingOp::INCREASE:
763
mNumPendingInputs++;
764
break;
765
case PendingOp::DECREASE:
766
mNumPendingInputs--;
767
break;
768
case PendingOp::CLEAR:
769
mNumPendingInputs = 0;
770
break;
771
}
772
}
773
774
void RemoteDataDecoder::UpdateInputStatus(int64_t aTimestamp, bool aProcessed) {
775
if (!mTaskQueue->IsCurrentThreadIn()) {
776
nsresult rv = mTaskQueue->Dispatch(NewRunnableMethod<int64_t, bool>(
777
"RemoteDataDecoder::UpdateInputStatus", this,
778
&RemoteDataDecoder::UpdateInputStatus, aTimestamp, aProcessed));
779
MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
780
Unused << rv;
781
return;
782
}
783
AssertOnTaskQueue();
784
if (GetState() == State::SHUTDOWN) {
785
return;
786
}
787
788
if (!aProcessed) {
789
UpdatePendingInputStatus(PendingOp::INCREASE);
790
} else if (HasPendingInputs()) {
791
UpdatePendingInputStatus(PendingOp::DECREASE);
792
}
793
794
if (!HasPendingInputs() || // Input has been processed, request the next one.
795
!mDecodedData.IsEmpty()) { // Previous output arrived before Decode().
796
ReturnDecodedData();
797
}
798
}
799
800
void RemoteDataDecoder::UpdateOutputStatus(RefPtr<MediaData>&& aSample) {
801
AssertOnTaskQueue();
802
if (GetState() == State::SHUTDOWN) {
803
return;
804
}
805
if (IsUsefulData(aSample)) {
806
mDecodedData.AppendElement(std::move(aSample));
807
}
808
ReturnDecodedData();
809
}
810
811
void RemoteDataDecoder::ReturnDecodedData() {
812
AssertOnTaskQueue();
813
MOZ_ASSERT(GetState() != State::SHUTDOWN);
814
815
// We only want to clear mDecodedData when we have resolved the promises.
816
if (!mDecodePromise.IsEmpty()) {
817
mDecodePromise.Resolve(std::move(mDecodedData), __func__);
818
mDecodedData = DecodedData();
819
} else if (!mDrainPromise.IsEmpty() &&
820
(!mDecodedData.IsEmpty() || GetState() == State::DRAINED)) {
821
mDrainPromise.Resolve(std::move(mDecodedData), __func__);
822
mDecodedData = DecodedData();
823
}
824
}
825
826
void RemoteDataDecoder::DrainComplete() {
827
if (!mTaskQueue->IsCurrentThreadIn()) {
828
nsresult rv = mTaskQueue->Dispatch(
829
NewRunnableMethod("RemoteDataDecoder::DrainComplete", this,
830
&RemoteDataDecoder::DrainComplete));
831
MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
832
Unused << rv;
833
return;
834
}
835
AssertOnTaskQueue();
836
if (GetState() == State::SHUTDOWN) {
837
return;
838
}
839
SetState(State::DRAINED);
840
ReturnDecodedData();
841
// Make decoder accept input again.
842
mJavaDecoder->Flush();
843
}
844
845
void RemoteDataDecoder::Error(const MediaResult& aError) {
846
if (!mTaskQueue->IsCurrentThreadIn()) {
847
nsresult rv = mTaskQueue->Dispatch(NewRunnableMethod<MediaResult>(
848
"RemoteDataDecoder::Error", this, &RemoteDataDecoder::Error, aError));
849
MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
850
Unused << rv;
851
return;
852
}
853
AssertOnTaskQueue();
854
if (GetState() == State::SHUTDOWN) {
855
return;
856
}
857
mDecodePromise.RejectIfExists(aError, __func__);
858
mDrainPromise.RejectIfExists(aError, __func__);
859
}
860
861
} // namespace mozilla
862
#undef LOG