Source code

Revision control

Other Tools

1
/* This Source Code Form is subject to the terms of the Mozilla Public
2
* License, v. 2.0. If a copy of the MPL was not distributed with this
3
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
4
5
#include "AndroidBridge.h"
6
#include "AndroidDecoderModule.h"
7
#include "JavaCallbacksSupport.h"
8
#include "SimpleMap.h"
9
#include "GLImages.h"
10
#include "MediaData.h"
11
#include "MediaInfo.h"
12
#include "VideoUtils.h"
13
#include "VPXDecoder.h"
14
15
#include "nsIGfxInfo.h"
16
#include "nsPromiseFlatString.h"
17
#include "nsThreadUtils.h"
18
#include "prlog.h"
19
20
#include <jni.h>
21
22
#undef LOG
23
#define LOG(arg, ...) \
24
MOZ_LOG(sAndroidDecoderModuleLog, mozilla::LogLevel::Debug, \
25
("RemoteDataDecoder(%p)::%s: " arg, this, __func__, ##__VA_ARGS__))
26
27
using namespace mozilla;
28
using namespace mozilla::gl;
29
using namespace mozilla::java;
30
using namespace mozilla::java::sdk;
31
using media::TimeUnit;
32
33
namespace mozilla {
34
35
// Hold a reference to the output buffer until we're ready to release it back to
36
// the Java codec (for rendering or not).
37
class RenderOrReleaseOutput {
38
public:
39
RenderOrReleaseOutput(CodecProxy::Param aCodec, Sample::Param aSample)
40
: mCodec(aCodec), mSample(aSample) {}
41
42
virtual ~RenderOrReleaseOutput() { ReleaseOutput(false); }
43
44
protected:
45
void ReleaseOutput(bool aToRender) {
46
if (mCodec && mSample) {
47
mCodec->ReleaseOutput(mSample, aToRender);
48
mCodec = nullptr;
49
mSample = nullptr;
50
}
51
}
52
53
private:
54
CodecProxy::GlobalRef mCodec;
55
Sample::GlobalRef mSample;
56
};
57
58
class RemoteVideoDecoder : public RemoteDataDecoder {
59
public:
60
// Render the output to the surface when the frame is sent
61
// to compositor, or release it if not presented.
62
class CompositeListener : private RenderOrReleaseOutput,
63
public VideoData::Listener {
64
public:
65
CompositeListener(CodecProxy::Param aCodec, Sample::Param aSample)
66
: RenderOrReleaseOutput(aCodec, aSample) {}
67
68
void OnSentToCompositor() override { ReleaseOutput(true); }
69
};
70
71
class InputInfo {
72
public:
73
InputInfo() {}
74
75
InputInfo(const int64_t aDurationUs, const gfx::IntSize& aImageSize,
76
const gfx::IntSize& aDisplaySize)
77
: mDurationUs(aDurationUs),
78
mImageSize(aImageSize),
79
mDisplaySize(aDisplaySize) {}
80
81
int64_t mDurationUs;
82
gfx::IntSize mImageSize;
83
gfx::IntSize mDisplaySize;
84
};
85
86
class CallbacksSupport final : public JavaCallbacksSupport {
87
public:
88
explicit CallbacksSupport(RemoteVideoDecoder* aDecoder)
89
: mDecoder(aDecoder) {}
90
91
void HandleInput(int64_t aTimestamp, bool aProcessed) override {
92
mDecoder->UpdateInputStatus(aTimestamp, aProcessed);
93
}
94
95
void HandleOutput(Sample::Param aSample,
96
java::SampleBuffer::Param aBuffer) override {
97
MOZ_ASSERT(!aBuffer, "Video sample should be bufferless");
98
// aSample will be implicitly converted into a GlobalRef.
99
mDecoder->ProcessOutput(std::move(aSample));
100
}
101
102
void HandleError(const MediaResult& aError) override {
103
mDecoder->Error(aError);
104
}
105
106
friend class RemoteDataDecoder;
107
108
private:
109
RemoteVideoDecoder* mDecoder;
110
};
111
112
RemoteVideoDecoder(const VideoInfo& aConfig, MediaFormat::Param aFormat,
113
const nsString& aDrmStubId, TaskQueue* aTaskQueue)
114
: RemoteDataDecoder(MediaData::Type::VIDEO_DATA, aConfig.mMimeType,
115
aFormat, aDrmStubId, aTaskQueue),
116
mConfig(aConfig) {}
117
118
~RemoteVideoDecoder() {
119
if (mSurface) {
120
SurfaceAllocator::DisposeSurface(mSurface);
121
}
122
}
123
124
RefPtr<InitPromise> Init() override {
125
BufferInfo::LocalRef bufferInfo;
126
if (NS_FAILED(BufferInfo::New(&bufferInfo)) || !bufferInfo) {
127
return InitPromise::CreateAndReject(NS_ERROR_OUT_OF_MEMORY, __func__);
128
}
129
mInputBufferInfo = bufferInfo;
130
131
mSurface = GeckoSurface::LocalRef(SurfaceAllocator::AcquireSurface(
132
mConfig.mImage.width, mConfig.mImage.height, false));
133
if (!mSurface) {
134
return InitPromise::CreateAndReject(NS_ERROR_DOM_MEDIA_FATAL_ERR,
135
__func__);
136
}
137
138
mSurfaceHandle = mSurface->GetHandle();
139
140
// Register native methods.
141
JavaCallbacksSupport::Init();
142
143
mJavaCallbacks = CodecProxy::NativeCallbacks::New();
144
if (!mJavaCallbacks) {
145
return InitPromise::CreateAndReject(NS_ERROR_DOM_MEDIA_FATAL_ERR,
146
__func__);
147
}
148
JavaCallbacksSupport::AttachNative(
149
mJavaCallbacks, mozilla::MakeUnique<CallbacksSupport>(this));
150
151
mJavaDecoder = CodecProxy::Create(
152
false, // false indicates to create a decoder and true denotes encoder
153
mFormat, mSurface, mJavaCallbacks, mDrmStubId);
154
if (mJavaDecoder == nullptr) {
155
return InitPromise::CreateAndReject(NS_ERROR_DOM_MEDIA_FATAL_ERR,
156
__func__);
157
}
158
mIsCodecSupportAdaptivePlayback =
159
mJavaDecoder->IsAdaptivePlaybackSupported();
160
mIsHardwareAccelerated = mJavaDecoder->IsHardwareAccelerated();
161
return InitPromise::CreateAndResolve(TrackInfo::kVideoTrack, __func__);
162
}
163
164
RefPtr<MediaDataDecoder::FlushPromise> Flush() override {
165
RefPtr<RemoteVideoDecoder> self = this;
166
return InvokeAsync(mTaskQueue, __func__, [self, this]() {
167
mInputInfos.Clear();
168
mSeekTarget.reset();
169
mLatestOutputTime.reset();
170
return RemoteDataDecoder::ProcessFlush();
171
});
172
}
173
174
RefPtr<MediaDataDecoder::DecodePromise> Decode(
175
MediaRawData* aSample) override {
176
RefPtr<RemoteVideoDecoder> self = this;
177
RefPtr<MediaRawData> sample = aSample;
178
return InvokeAsync(mTaskQueue, __func__, [self, sample]() {
179
const VideoInfo* config = sample->mTrackInfo
180
? sample->mTrackInfo->GetAsVideoInfo()
181
: &self->mConfig;
182
MOZ_ASSERT(config);
183
184
InputInfo info(sample->mDuration.ToMicroseconds(), config->mImage,
185
config->mDisplay);
186
self->mInputInfos.Insert(sample->mTime.ToMicroseconds(), info);
187
return self->RemoteDataDecoder::ProcessDecode(sample);
188
});
189
}
190
191
bool SupportDecoderRecycling() const override {
192
return mIsCodecSupportAdaptivePlayback;
193
}
194
195
void SetSeekThreshold(const TimeUnit& aTime) override {
196
RefPtr<RemoteVideoDecoder> self = this;
197
nsCOMPtr<nsIRunnable> runnable = NS_NewRunnableFunction(
198
"RemoteVideoDecoder::SetSeekThreshold", [self, aTime]() {
199
if (aTime.IsValid()) {
200
self->mSeekTarget = Some(aTime);
201
} else {
202
self->mSeekTarget.reset();
203
}
204
});
205
nsresult rv = mTaskQueue->Dispatch(runnable.forget());
206
MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
207
Unused << rv;
208
}
209
210
bool IsUsefulData(const RefPtr<MediaData>& aSample) override {
211
AssertOnTaskQueue();
212
213
if (mLatestOutputTime && aSample->mTime < mLatestOutputTime.value()) {
214
return false;
215
}
216
217
const TimeUnit endTime = aSample->GetEndTime();
218
if (mSeekTarget && endTime <= mSeekTarget.value()) {
219
return false;
220
}
221
222
mSeekTarget.reset();
223
mLatestOutputTime = Some(endTime);
224
return true;
225
}
226
227
bool IsHardwareAccelerated(nsACString& aFailureReason) const override {
228
return mIsHardwareAccelerated;
229
}
230
231
ConversionRequired NeedsConversion() const override {
232
return ConversionRequired::kNeedAnnexB;
233
}
234
235
private:
236
// Param and LocalRef are only valid for the duration of a JNI method call.
237
// Use GlobalRef as the parameter type to keep the Java object referenced
238
// until running.
239
void ProcessOutput(Sample::GlobalRef&& aSample) {
240
if (!mTaskQueue->IsCurrentThreadIn()) {
241
nsresult rv = mTaskQueue->Dispatch(NewRunnableMethod<Sample::GlobalRef&&>(
242
"RemoteVideoDecoder::ProcessOutput", this,
243
&RemoteVideoDecoder::ProcessOutput, std::move(aSample)));
244
MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
245
Unused << rv;
246
return;
247
}
248
249
AssertOnTaskQueue();
250
if (GetState() == State::SHUTDOWN) {
251
aSample->Dispose();
252
return;
253
}
254
255
UniquePtr<VideoData::Listener> releaseSample(
256
new CompositeListener(mJavaDecoder, aSample));
257
258
BufferInfo::LocalRef info = aSample->Info();
259
MOZ_ASSERT(info);
260
261
int32_t flags;
262
bool ok = NS_SUCCEEDED(info->Flags(&flags));
263
264
int32_t offset;
265
ok &= NS_SUCCEEDED(info->Offset(&offset));
266
267
int32_t size;
268
ok &= NS_SUCCEEDED(info->Size(&size));
269
270
int64_t presentationTimeUs;
271
ok &= NS_SUCCEEDED(info->PresentationTimeUs(&presentationTimeUs));
272
273
if (!ok) {
274
Error(MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
275
RESULT_DETAIL("VideoCallBack::HandleOutput")));
276
return;
277
}
278
279
InputInfo inputInfo;
280
ok = mInputInfos.Find(presentationTimeUs, inputInfo);
281
bool isEOS = !!(flags & MediaCodec::BUFFER_FLAG_END_OF_STREAM);
282
if (!ok && !isEOS) {
283
// Ignore output with no corresponding input.
284
return;
285
}
286
287
if (ok && (size > 0 || presentationTimeUs >= 0)) {
288
RefPtr<layers::Image> img = new SurfaceTextureImage(
289
mSurfaceHandle, inputInfo.mImageSize, false /* NOT continuous */,
290
gl::OriginPos::BottomLeft, mConfig.HasAlpha());
291
292
RefPtr<VideoData> v = VideoData::CreateFromImage(
293
inputInfo.mDisplaySize, offset,
294
TimeUnit::FromMicroseconds(presentationTimeUs),
295
TimeUnit::FromMicroseconds(inputInfo.mDurationUs), img,
296
!!(flags & MediaCodec::BUFFER_FLAG_SYNC_FRAME),
297
TimeUnit::FromMicroseconds(presentationTimeUs));
298
299
v->SetListener(std::move(releaseSample));
300
RemoteDataDecoder::UpdateOutputStatus(std::move(v));
301
}
302
303
if (isEOS) {
304
DrainComplete();
305
}
306
}
307
308
const VideoInfo mConfig;
309
GeckoSurface::GlobalRef mSurface;
310
AndroidSurfaceTextureHandle mSurfaceHandle;
311
// Only accessed on reader's task queue.
312
bool mIsCodecSupportAdaptivePlayback = false;
313
// Can be accessed on any thread, but only written on during init.
314
bool mIsHardwareAccelerated = false;
315
// Accessed on mTaskQueue and reader's TaskQueue. SimpleMap however is
316
// thread-safe, so it's okay to do so.
317
SimpleMap<InputInfo> mInputInfos;
318
// Only accessed on the TaskQueue.
319
Maybe<TimeUnit> mSeekTarget;
320
Maybe<TimeUnit> mLatestOutputTime;
321
};
322
323
class RemoteAudioDecoder : public RemoteDataDecoder {
324
public:
325
RemoteAudioDecoder(const AudioInfo& aConfig, MediaFormat::Param aFormat,
326
const nsString& aDrmStubId, TaskQueue* aTaskQueue)
327
: RemoteDataDecoder(MediaData::Type::AUDIO_DATA, aConfig.mMimeType,
328
aFormat, aDrmStubId, aTaskQueue) {
329
JNIEnv* const env = jni::GetEnvForThread();
330
331
bool formatHasCSD = false;
332
NS_ENSURE_SUCCESS_VOID(
333
aFormat->ContainsKey(NS_LITERAL_STRING("csd-0"), &formatHasCSD));
334
335
if (!formatHasCSD && aConfig.mCodecSpecificConfig->Length() >= 2) {
336
jni::ByteBuffer::LocalRef buffer(env);
337
buffer = jni::ByteBuffer::New(aConfig.mCodecSpecificConfig->Elements(),
338
aConfig.mCodecSpecificConfig->Length());
339
NS_ENSURE_SUCCESS_VOID(
340
aFormat->SetByteBuffer(NS_LITERAL_STRING("csd-0"), buffer));
341
}
342
}
343
344
RefPtr<InitPromise> Init() override {
345
BufferInfo::LocalRef bufferInfo;
346
if (NS_FAILED(BufferInfo::New(&bufferInfo)) || !bufferInfo) {
347
return InitPromise::CreateAndReject(NS_ERROR_OUT_OF_MEMORY, __func__);
348
}
349
mInputBufferInfo = bufferInfo;
350
351
// Register native methods.
352
JavaCallbacksSupport::Init();
353
354
mJavaCallbacks = CodecProxy::NativeCallbacks::New();
355
if (!mJavaCallbacks) {
356
return InitPromise::CreateAndReject(NS_ERROR_DOM_MEDIA_FATAL_ERR,
357
__func__);
358
}
359
JavaCallbacksSupport::AttachNative(
360
mJavaCallbacks, mozilla::MakeUnique<CallbacksSupport>(this));
361
362
mJavaDecoder =
363
CodecProxy::Create(false, mFormat, nullptr, mJavaCallbacks, mDrmStubId);
364
if (mJavaDecoder == nullptr) {
365
return InitPromise::CreateAndReject(NS_ERROR_DOM_MEDIA_FATAL_ERR,
366
__func__);
367
}
368
369
return InitPromise::CreateAndResolve(TrackInfo::kAudioTrack, __func__);
370
}
371
372
RefPtr<FlushPromise> Flush() override {
373
RefPtr<RemoteAudioDecoder> self = this;
374
return InvokeAsync(mTaskQueue, __func__, [self]() {
375
self->mFirstDemuxedSampleTime.reset();
376
return self->RemoteDataDecoder::ProcessFlush();
377
});
378
}
379
380
RefPtr<DecodePromise> Decode(MediaRawData* aSample) override {
381
RefPtr<RemoteAudioDecoder> self = this;
382
RefPtr<MediaRawData> sample = aSample;
383
return InvokeAsync(mTaskQueue, __func__, [self, sample]() {
384
if (!self->mFirstDemuxedSampleTime) {
385
MOZ_ASSERT(sample->mTime.IsValid());
386
self->mFirstDemuxedSampleTime.emplace(sample->mTime);
387
}
388
return self->RemoteDataDecoder::ProcessDecode(sample);
389
});
390
}
391
392
private:
393
class CallbacksSupport final : public JavaCallbacksSupport {
394
public:
395
explicit CallbacksSupport(RemoteAudioDecoder* aDecoder)
396
: mDecoder(aDecoder) {}
397
398
void HandleInput(int64_t aTimestamp, bool aProcessed) override {
399
mDecoder->UpdateInputStatus(aTimestamp, aProcessed);
400
}
401
402
void HandleOutput(Sample::Param aSample,
403
java::SampleBuffer::Param aBuffer) override {
404
MOZ_ASSERT(aBuffer, "Audio sample should have buffer");
405
// aSample will be implicitly converted into a GlobalRef.
406
mDecoder->ProcessOutput(std::move(aSample), std::move(aBuffer));
407
}
408
409
void HandleOutputFormatChanged(MediaFormat::Param aFormat) override {
410
int32_t outputChannels = 0;
411
aFormat->GetInteger(NS_LITERAL_STRING("channel-count"), &outputChannels);
412
AudioConfig::ChannelLayout layout(outputChannels);
413
if (!layout.IsValid()) {
414
mDecoder->Error(MediaResult(
415
NS_ERROR_DOM_MEDIA_FATAL_ERR,
416
RESULT_DETAIL("Invalid channel layout:%d", outputChannels)));
417
return;
418
}
419
420
int32_t sampleRate = 0;
421
aFormat->GetInteger(NS_LITERAL_STRING("sample-rate"), &sampleRate);
422
LOG("Audio output format changed: channels:%d sample rate:%d",
423
outputChannels, sampleRate);
424
425
mDecoder->ProcessOutputFormatChange(outputChannels, sampleRate);
426
}
427
428
void HandleError(const MediaResult& aError) override {
429
mDecoder->Error(aError);
430
}
431
432
private:
433
RemoteAudioDecoder* mDecoder;
434
};
435
436
bool IsSampleTimeSmallerThanFirstDemuxedSampleTime(int64_t aTime) const {
437
return mFirstDemuxedSampleTime->ToMicroseconds() > aTime;
438
}
439
440
bool ShouldDiscardSample() const {
441
AssertOnTaskQueue();
442
// HandleOutput() runs on Android binder thread pool and could be preempted
443
// by RemoteDateDecoder task queue. That means ProcessOutput() could be
444
// scheduled after ProcessShutdown() or ProcessFlush(). We won't need the
445
// sample which is returned after calling Shutdown() and Flush(). We can
446
// check mFirstDemuxedSampleTime to know whether the Flush() has been
447
// called, becasue it would be reset in Flush().
448
return GetState() == State::SHUTDOWN || !mFirstDemuxedSampleTime;
449
}
450
451
// Param and LocalRef are only valid for the duration of a JNI method call.
452
// Use GlobalRef as the parameter type to keep the Java object referenced
453
// until running.
454
void ProcessOutput(Sample::GlobalRef&& aSample,
455
SampleBuffer::GlobalRef&& aBuffer) {
456
if (!mTaskQueue->IsCurrentThreadIn()) {
457
nsresult rv = mTaskQueue->Dispatch(
458
NewRunnableMethod<Sample::GlobalRef&&, SampleBuffer::GlobalRef&&>(
459
"RemoteAudioDecoder::ProcessOutput", this,
460
&RemoteAudioDecoder::ProcessOutput, std::move(aSample),
461
std::move(aBuffer)));
462
MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
463
Unused << rv;
464
return;
465
}
466
467
AssertOnTaskQueue();
468
469
if (ShouldDiscardSample() || !aBuffer->IsValid()) {
470
aSample->Dispose();
471
return;
472
}
473
474
RenderOrReleaseOutput autoRelease(mJavaDecoder, aSample);
475
476
BufferInfo::LocalRef info = aSample->Info();
477
MOZ_ASSERT(info);
478
479
int32_t flags;
480
bool ok = NS_SUCCEEDED(info->Flags(&flags));
481
482
int32_t offset;
483
ok &= NS_SUCCEEDED(info->Offset(&offset));
484
485
int64_t presentationTimeUs;
486
ok &= NS_SUCCEEDED(info->PresentationTimeUs(&presentationTimeUs));
487
488
int32_t size;
489
ok &= NS_SUCCEEDED(info->Size(&size));
490
491
if (!ok ||
492
IsSampleTimeSmallerThanFirstDemuxedSampleTime(presentationTimeUs)) {
493
Error(MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR, __func__));
494
return;
495
}
496
497
if (size > 0) {
498
#ifdef MOZ_SAMPLE_TYPE_S16
499
const int32_t numSamples = size / 2;
500
#else
501
# error We only support 16-bit integer PCM
502
#endif
503
504
AlignedAudioBuffer audio(numSamples);
505
if (!audio) {
506
Error(MediaResult(NS_ERROR_OUT_OF_MEMORY, __func__));
507
return;
508
}
509
510
jni::ByteBuffer::LocalRef dest = jni::ByteBuffer::New(audio.get(), size);
511
aBuffer->WriteToByteBuffer(dest, offset, size);
512
513
RefPtr<AudioData> data =
514
new AudioData(0, TimeUnit::FromMicroseconds(presentationTimeUs),
515
std::move(audio), mOutputChannels, mOutputSampleRate);
516
517
UpdateOutputStatus(std::move(data));
518
}
519
520
if ((flags & MediaCodec::BUFFER_FLAG_END_OF_STREAM) != 0) {
521
DrainComplete();
522
}
523
}
524
525
void ProcessOutputFormatChange(int32_t aChannels, int32_t aSampleRate) {
526
if (!mTaskQueue->IsCurrentThreadIn()) {
527
nsresult rv = mTaskQueue->Dispatch(NewRunnableMethod<int32_t, int32_t>(
528
"RemoteAudioDecoder::ProcessOutputFormatChange", this,
529
&RemoteAudioDecoder::ProcessOutputFormatChange, aChannels,
530
aSampleRate));
531
MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
532
Unused << rv;
533
return;
534
}
535
536
AssertOnTaskQueue();
537
538
mOutputChannels = aChannels;
539
mOutputSampleRate = aSampleRate;
540
}
541
542
int32_t mOutputChannels;
543
int32_t mOutputSampleRate;
544
Maybe<TimeUnit> mFirstDemuxedSampleTime;
545
};
546
547
already_AddRefed<MediaDataDecoder> RemoteDataDecoder::CreateAudioDecoder(
548
const CreateDecoderParams& aParams, const nsString& aDrmStubId,
549
CDMProxy* aProxy) {
550
const AudioInfo& config = aParams.AudioConfig();
551
MediaFormat::LocalRef format;
552
NS_ENSURE_SUCCESS(
553
MediaFormat::CreateAudioFormat(config.mMimeType, config.mRate,
554
config.mChannels, &format),
555
nullptr);
556
557
RefPtr<MediaDataDecoder> decoder =
558
new RemoteAudioDecoder(config, format, aDrmStubId, aParams.mTaskQueue);
559
if (aProxy) {
560
decoder = new EMEMediaDataDecoderProxy(aParams, decoder.forget(), aProxy);
561
}
562
return decoder.forget();
563
}
564
565
already_AddRefed<MediaDataDecoder> RemoteDataDecoder::CreateVideoDecoder(
566
const CreateDecoderParams& aParams, const nsString& aDrmStubId,
567
CDMProxy* aProxy) {
568
const VideoInfo& config = aParams.VideoConfig();
569
MediaFormat::LocalRef format;
570
NS_ENSURE_SUCCESS(MediaFormat::CreateVideoFormat(
571
TranslateMimeType(config.mMimeType),
572
config.mImage.width, config.mImage.height, &format),
573
nullptr);
574
575
RefPtr<MediaDataDecoder> decoder =
576
new RemoteVideoDecoder(config, format, aDrmStubId, aParams.mTaskQueue);
577
if (aProxy) {
578
decoder = new EMEMediaDataDecoderProxy(aParams, decoder.forget(), aProxy);
579
}
580
return decoder.forget();
581
}
582
583
RemoteDataDecoder::RemoteDataDecoder(MediaData::Type aType,
584
const nsACString& aMimeType,
585
MediaFormat::Param aFormat,
586
const nsString& aDrmStubId,
587
TaskQueue* aTaskQueue)
588
: mType(aType),
589
mMimeType(aMimeType),
590
mFormat(aFormat),
591
mDrmStubId(aDrmStubId),
592
mTaskQueue(aTaskQueue),
593
mNumPendingInputs(0) {}
594
595
RefPtr<MediaDataDecoder::FlushPromise> RemoteDataDecoder::Flush() {
596
RefPtr<RemoteDataDecoder> self = this;
597
return InvokeAsync(mTaskQueue, this, __func__,
598
&RemoteDataDecoder::ProcessFlush);
599
}
600
601
RefPtr<MediaDataDecoder::FlushPromise> RemoteDataDecoder::ProcessFlush() {
602
AssertOnTaskQueue();
603
604
mDecodedData = DecodedData();
605
UpdatePendingInputStatus(PendingOp::CLEAR);
606
mDecodePromise.RejectIfExists(NS_ERROR_DOM_MEDIA_CANCELED, __func__);
607
mDrainPromise.RejectIfExists(NS_ERROR_DOM_MEDIA_CANCELED, __func__);
608
SetState(State::DRAINED);
609
mJavaDecoder->Flush();
610
return FlushPromise::CreateAndResolve(true, __func__);
611
}
612
613
RefPtr<MediaDataDecoder::DecodePromise> RemoteDataDecoder::Drain() {
614
RefPtr<RemoteDataDecoder> self = this;
615
return InvokeAsync(mTaskQueue, __func__, [self, this]() {
616
if (GetState() == State::SHUTDOWN) {
617
return DecodePromise::CreateAndReject(NS_ERROR_DOM_MEDIA_CANCELED,
618
__func__);
619
}
620
RefPtr<DecodePromise> p = mDrainPromise.Ensure(__func__);
621
if (GetState() == State::DRAINED) {
622
// There's no operation to perform other than returning any already
623
// decoded data.
624
ReturnDecodedData();
625
return p;
626
}
627
628
if (GetState() == State::DRAINING) {
629
// Draining operation already pending, let it complete its course.
630
return p;
631
}
632
633
SetState(State::DRAINING);
634
self->mInputBufferInfo->Set(0, 0, -1,
635
MediaCodec::BUFFER_FLAG_END_OF_STREAM);
636
mJavaDecoder->Input(nullptr, self->mInputBufferInfo, nullptr);
637
return p;
638
});
639
}
640
641
RefPtr<ShutdownPromise> RemoteDataDecoder::Shutdown() {
642
LOG("");
643
RefPtr<RemoteDataDecoder> self = this;
644
return InvokeAsync(mTaskQueue, this, __func__,
645
&RemoteDataDecoder::ProcessShutdown);
646
}
647
648
RefPtr<ShutdownPromise> RemoteDataDecoder::ProcessShutdown() {
649
AssertOnTaskQueue();
650
SetState(State::SHUTDOWN);
651
if (mJavaDecoder) {
652
mJavaDecoder->Release();
653
mJavaDecoder = nullptr;
654
}
655
656
if (mJavaCallbacks) {
657
JavaCallbacksSupport::GetNative(mJavaCallbacks)->Cancel();
658
JavaCallbacksSupport::DisposeNative(mJavaCallbacks);
659
mJavaCallbacks = nullptr;
660
}
661
662
mFormat = nullptr;
663
664
return ShutdownPromise::CreateAndResolve(true, __func__);
665
}
666
667
static CryptoInfo::LocalRef GetCryptoInfoFromSample(
668
const MediaRawData* aSample) {
669
auto& cryptoObj = aSample->mCrypto;
670
671
if (!cryptoObj.IsEncrypted()) {
672
return nullptr;
673
}
674
675
CryptoInfo::LocalRef cryptoInfo;
676
nsresult rv = CryptoInfo::New(&cryptoInfo);
677
NS_ENSURE_SUCCESS(rv, nullptr);
678
679
uint32_t numSubSamples = std::min<uint32_t>(
680
cryptoObj.mPlainSizes.Length(), cryptoObj.mEncryptedSizes.Length());
681
682
uint32_t totalSubSamplesSize = 0;
683
for (auto& size : cryptoObj.mEncryptedSizes) {
684
totalSubSamplesSize += size;
685
}
686
687
// mPlainSizes is uint16_t, need to transform to uint32_t first.
688
nsTArray<uint32_t> plainSizes;
689
for (auto& size : cryptoObj.mPlainSizes) {
690
totalSubSamplesSize += size;
691
plainSizes.AppendElement(size);
692
}
693
694
uint32_t codecSpecificDataSize = aSample->Size() - totalSubSamplesSize;
695
// Size of codec specific data("CSD") for Android MediaCodec usage should be
696
// included in the 1st plain size.
697
plainSizes[0] += codecSpecificDataSize;
698
699
static const int kExpectedIVLength = 16;
700
auto tempIV(cryptoObj.mIV);
701
auto tempIVLength = tempIV.Length();
702
MOZ_ASSERT(tempIVLength <= kExpectedIVLength);
703
for (size_t i = tempIVLength; i < kExpectedIVLength; i++) {
704
// Padding with 0
705
tempIV.AppendElement(0);
706
}
707
708
auto numBytesOfPlainData = mozilla::jni::IntArray::New(
709
reinterpret_cast<int32_t*>(&plainSizes[0]), plainSizes.Length());
710
711
auto numBytesOfEncryptedData = mozilla::jni::IntArray::New(
712
reinterpret_cast<const int32_t*>(&cryptoObj.mEncryptedSizes[0]),
713
cryptoObj.mEncryptedSizes.Length());
714
auto iv = mozilla::jni::ByteArray::New(reinterpret_cast<int8_t*>(&tempIV[0]),
715
tempIV.Length());
716
auto keyId = mozilla::jni::ByteArray::New(
717
reinterpret_cast<const int8_t*>(&cryptoObj.mKeyId[0]),
718
cryptoObj.mKeyId.Length());
719
cryptoInfo->Set(numSubSamples, numBytesOfPlainData, numBytesOfEncryptedData,
720
keyId, iv, MediaCodec::CRYPTO_MODE_AES_CTR);
721
722
return cryptoInfo;
723
}
724
725
RefPtr<MediaDataDecoder::DecodePromise> RemoteDataDecoder::Decode(
726
MediaRawData* aSample) {
727
RefPtr<RemoteDataDecoder> self = this;
728
RefPtr<MediaRawData> sample = aSample;
729
return InvokeAsync(mTaskQueue, __func__,
730
[self, sample]() { return self->ProcessDecode(sample); });
731
}
732
733
RefPtr<MediaDataDecoder::DecodePromise> RemoteDataDecoder::ProcessDecode(
734
MediaRawData* aSample) {
735
AssertOnTaskQueue();
736
MOZ_ASSERT(aSample != nullptr);
737
jni::ByteBuffer::LocalRef bytes = jni::ByteBuffer::New(
738
const_cast<uint8_t*>(aSample->Data()), aSample->Size());
739
740
SetState(State::DRAINABLE);
741
mInputBufferInfo->Set(0, aSample->Size(), aSample->mTime.ToMicroseconds(), 0);
742
return mJavaDecoder->Input(bytes, mInputBufferInfo,
743
GetCryptoInfoFromSample(aSample))
744
? mDecodePromise.Ensure(__func__)
745
: DecodePromise::CreateAndReject(
746
MediaResult(NS_ERROR_OUT_OF_MEMORY, __func__), __func__);
747
}
748
749
void RemoteDataDecoder::UpdatePendingInputStatus(PendingOp aOp) {
750
AssertOnTaskQueue();
751
switch (aOp) {
752
case PendingOp::INCREASE:
753
mNumPendingInputs++;
754
break;
755
case PendingOp::DECREASE:
756
mNumPendingInputs--;
757
break;
758
case PendingOp::CLEAR:
759
mNumPendingInputs = 0;
760
break;
761
}
762
}
763
764
void RemoteDataDecoder::UpdateInputStatus(int64_t aTimestamp, bool aProcessed) {
765
if (!mTaskQueue->IsCurrentThreadIn()) {
766
nsresult rv = mTaskQueue->Dispatch(NewRunnableMethod<int64_t, bool>(
767
"RemoteDataDecoder::UpdateInputStatus", this,
768
&RemoteDataDecoder::UpdateInputStatus, aTimestamp, aProcessed));
769
MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
770
Unused << rv;
771
return;
772
}
773
AssertOnTaskQueue();
774
if (GetState() == State::SHUTDOWN) {
775
return;
776
}
777
778
if (!aProcessed) {
779
UpdatePendingInputStatus(PendingOp::INCREASE);
780
} else if (HasPendingInputs()) {
781
UpdatePendingInputStatus(PendingOp::DECREASE);
782
}
783
784
if (!HasPendingInputs() || // Input has been processed, request the next one.
785
!mDecodedData.IsEmpty()) { // Previous output arrived before Decode().
786
ReturnDecodedData();
787
}
788
}
789
790
void RemoteDataDecoder::UpdateOutputStatus(RefPtr<MediaData>&& aSample) {
791
AssertOnTaskQueue();
792
if (GetState() == State::SHUTDOWN) {
793
return;
794
}
795
if (IsUsefulData(aSample)) {
796
mDecodedData.AppendElement(std::move(aSample));
797
}
798
ReturnDecodedData();
799
}
800
801
void RemoteDataDecoder::ReturnDecodedData() {
802
AssertOnTaskQueue();
803
MOZ_ASSERT(GetState() != State::SHUTDOWN);
804
805
// We only want to clear mDecodedData when we have resolved the promises.
806
if (!mDecodePromise.IsEmpty()) {
807
mDecodePromise.Resolve(std::move(mDecodedData), __func__);
808
mDecodedData = DecodedData();
809
} else if (!mDrainPromise.IsEmpty() &&
810
(!mDecodedData.IsEmpty() || GetState() == State::DRAINED)) {
811
mDrainPromise.Resolve(std::move(mDecodedData), __func__);
812
mDecodedData = DecodedData();
813
}
814
}
815
816
void RemoteDataDecoder::DrainComplete() {
817
if (!mTaskQueue->IsCurrentThreadIn()) {
818
nsresult rv = mTaskQueue->Dispatch(
819
NewRunnableMethod("RemoteDataDecoder::DrainComplete", this,
820
&RemoteDataDecoder::DrainComplete));
821
MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
822
Unused << rv;
823
return;
824
}
825
AssertOnTaskQueue();
826
if (GetState() == State::SHUTDOWN) {
827
return;
828
}
829
SetState(State::DRAINED);
830
ReturnDecodedData();
831
// Make decoder accept input again.
832
mJavaDecoder->Flush();
833
}
834
835
void RemoteDataDecoder::Error(const MediaResult& aError) {
836
if (!mTaskQueue->IsCurrentThreadIn()) {
837
nsresult rv = mTaskQueue->Dispatch(NewRunnableMethod<MediaResult>(
838
"RemoteDataDecoder::Error", this, &RemoteDataDecoder::Error, aError));
839
MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
840
Unused << rv;
841
return;
842
}
843
AssertOnTaskQueue();
844
if (GetState() == State::SHUTDOWN) {
845
return;
846
}
847
mDecodePromise.RejectIfExists(aError, __func__);
848
mDrainPromise.RejectIfExists(aError, __func__);
849
}
850
851
} // namespace mozilla