1 /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
2 /* This Source Code Form is subject to the terms of the Mozilla Public
3 * License, v. 2.0. If a copy of the MPL was not distributed with this
4 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
5
6 #include "gtest/gtest.h"
7
8 #include "AnnexB.h"
9 #include "ImageContainer.h"
10 #include "mozilla/AbstractThread.h"
11 #include "mozilla/SpinEventLoopUntil.h"
12 #include "mozilla/media/MediaUtils.h" // For media::Await
13 #include "nsMimeTypes.h"
14 #include "PEMFactory.h"
15 #include "TimeUnits.h"
16 #include "VideoUtils.h"
17 #include <algorithm>
18
19 #include <fstream>
20
21 #define SKIP_IF_NOT_SUPPORTED(mimeType) \
22 do { \
23 RefPtr<PEMFactory> f(new PEMFactory()); \
24 if (!f->SupportsMimeType(nsLiteralCString(mimeType))) { \
25 return; \
26 } \
27 } while (0)
28
29 #define BLOCK_SIZE 64
30 #define WIDTH 640
31 #define HEIGHT 480
32 #define NUM_FRAMES 150UL
33 #define FRAME_RATE 30
34 #define FRAME_DURATION (1000000 / FRAME_RATE)
35 #define BIT_RATE (1000 * 1000) // 1Mbps
36 #define KEYFRAME_INTERVAL FRAME_RATE // 1 keyframe per second
37
38 using namespace mozilla;
39
40 static gfx::IntSize kImageSize(WIDTH, HEIGHT);
41
42 class MediaDataEncoderTest : public testing::Test {
43 protected:
SetUp()44 void SetUp() override { mData.Init(kImageSize); }
45
TearDown()46 void TearDown() override { mData.Deinit(); }
47
48 public:
49 struct FrameSource final {
50 layers::PlanarYCbCrData mYUV;
51 UniquePtr<uint8_t[]> mBuffer;
52 RefPtr<layers::BufferRecycleBin> mRecycleBin;
53 int16_t mColorStep = 4;
54
InitMediaDataEncoderTest::FrameSource55 void Init(const gfx::IntSize& aSize) {
56 mYUV.mPicSize = aSize;
57 mYUV.mYStride = aSize.width;
58 mYUV.mYSize = aSize;
59 mYUV.mCbCrStride = aSize.width / 2;
60 mYUV.mCbCrSize = gfx::IntSize(aSize.width / 2, aSize.height / 2);
61 size_t bufferSize = mYUV.mYStride * mYUV.mYSize.height +
62 mYUV.mCbCrStride * mYUV.mCbCrSize.height +
63 mYUV.mCbCrStride * mYUV.mCbCrSize.height;
64 mBuffer = MakeUnique<uint8_t[]>(bufferSize);
65 std::fill_n(mBuffer.get(), bufferSize, 0x7F);
66 mYUV.mYChannel = mBuffer.get();
67 mYUV.mCbChannel = mYUV.mYChannel + mYUV.mYStride * mYUV.mYSize.height;
68 mYUV.mCrChannel =
69 mYUV.mCbChannel + mYUV.mCbCrStride * mYUV.mCbCrSize.height;
70 mRecycleBin = new layers::BufferRecycleBin();
71 }
72
DeinitMediaDataEncoderTest::FrameSource73 void Deinit() {
74 mBuffer.reset();
75 mRecycleBin = nullptr;
76 }
77
GetFrameMediaDataEncoderTest::FrameSource78 already_AddRefed<MediaData> GetFrame(const size_t aIndex) {
79 Draw(aIndex);
80 RefPtr<layers::PlanarYCbCrImage> img =
81 new layers::RecyclingPlanarYCbCrImage(mRecycleBin);
82 img->CopyData(mYUV);
83 RefPtr<MediaData> frame = VideoData::CreateFromImage(
84 kImageSize, 0,
85 media::TimeUnit::FromMicroseconds(aIndex * FRAME_DURATION),
86 media::TimeUnit::FromMicroseconds(FRAME_DURATION), img,
87 (aIndex & 0xF) == 0,
88 media::TimeUnit::FromMicroseconds(aIndex * FRAME_DURATION));
89 return frame.forget();
90 }
91
DrawChessboardMediaDataEncoderTest::FrameSource92 void DrawChessboard(uint8_t* aAddr, const size_t aWidth,
93 const size_t aHeight, const size_t aOffset) {
94 uint8_t pixels[2][BLOCK_SIZE];
95 size_t x = aOffset % BLOCK_SIZE;
96 if ((aOffset / BLOCK_SIZE) & 1) {
97 x = BLOCK_SIZE - x;
98 }
99 for (size_t i = 0; i < x; i++) {
100 pixels[0][i] = 0x00;
101 pixels[1][i] = 0xFF;
102 }
103 for (size_t i = x; i < BLOCK_SIZE; i++) {
104 pixels[0][i] = 0xFF;
105 pixels[1][i] = 0x00;
106 }
107
108 uint8_t* p = aAddr;
109 for (size_t row = 0; row < aHeight; row++) {
110 for (size_t col = 0; col < aWidth; col += BLOCK_SIZE) {
111 memcpy(p, pixels[((row / BLOCK_SIZE) + (col / BLOCK_SIZE)) % 2],
112 BLOCK_SIZE);
113 p += BLOCK_SIZE;
114 }
115 }
116 }
117
DrawMediaDataEncoderTest::FrameSource118 void Draw(const size_t aIndex) {
119 DrawChessboard(mYUV.mYChannel, mYUV.mYSize.width, mYUV.mYSize.height,
120 aIndex << 1);
121 int16_t color = mYUV.mCbChannel[0] + mColorStep;
122 if (color > 255 || color < 0) {
123 mColorStep = -mColorStep;
124 color = mYUV.mCbChannel[0] + mColorStep;
125 }
126
127 size_t size = (mYUV.mCrChannel - mYUV.mCbChannel);
128
129 std::fill_n(mYUV.mCbChannel, size, static_cast<uint8_t>(color));
130 std::fill_n(mYUV.mCrChannel, size, 0xFF - static_cast<uint8_t>(color));
131 }
132 };
133
134 public:
135 FrameSource mData;
136 };
137
CreateH264Encoder(MediaDataEncoder::Usage aUsage=MediaDataEncoder::Usage::Realtime,MediaDataEncoder::PixelFormat aPixelFormat=MediaDataEncoder::PixelFormat::YUV420P,int32_t aWidth=WIDTH,int32_t aHeight=HEIGHT,const Maybe<MediaDataEncoder::H264Specific> & aSpecific=Some (MediaDataEncoder::H264Specific (KEYFRAME_INTERVAL,MediaDataEncoder::H264Specific::ProfileLevel::BaselineAutoLevel)))138 static already_AddRefed<MediaDataEncoder> CreateH264Encoder(
139 MediaDataEncoder::Usage aUsage = MediaDataEncoder::Usage::Realtime,
140 MediaDataEncoder::PixelFormat aPixelFormat =
141 MediaDataEncoder::PixelFormat::YUV420P,
142 int32_t aWidth = WIDTH, int32_t aHeight = HEIGHT,
143 const Maybe<MediaDataEncoder::H264Specific>& aSpecific =
144 Some(MediaDataEncoder::H264Specific(
145 KEYFRAME_INTERVAL,
146 MediaDataEncoder::H264Specific::ProfileLevel::BaselineAutoLevel))) {
147 RefPtr<PEMFactory> f(new PEMFactory());
148
149 if (!f->SupportsMimeType(nsLiteralCString(VIDEO_MP4))) {
150 return nullptr;
151 }
152
153 VideoInfo videoInfo(aWidth, aHeight);
154 videoInfo.mMimeType = nsLiteralCString(VIDEO_MP4);
155 const RefPtr<TaskQueue> taskQueue(
156 new TaskQueue(GetMediaThreadPool(MediaThreadType::PLATFORM_ENCODER)));
157
158 RefPtr<MediaDataEncoder> e;
159 if (aSpecific) {
160 e = f->CreateEncoder(CreateEncoderParams(
161 videoInfo /* track info */, aUsage, taskQueue, aPixelFormat,
162 FRAME_RATE /* FPS */, BIT_RATE /* bitrate */, aSpecific.value()));
163 } else {
164 e = f->CreateEncoder(CreateEncoderParams(
165 videoInfo /* track info */, aUsage, taskQueue, aPixelFormat,
166 FRAME_RATE /* FPS */, BIT_RATE /* bitrate */));
167 }
168
169 return e.forget();
170 }
171
WaitForShutdown(RefPtr<MediaDataEncoder> aEncoder)172 void WaitForShutdown(RefPtr<MediaDataEncoder> aEncoder) {
173 MOZ_ASSERT(aEncoder);
174
175 Maybe<bool> result;
176 // media::Await() supports exclusive promises only, but ShutdownPromise is
177 // not.
178 aEncoder->Shutdown()->Then(
179 AbstractThread::MainThread(), __func__,
180 [&result](bool rv) {
181 EXPECT_TRUE(rv);
182 result = Some(true);
183 },
184 [&result]() {
185 FAIL() << "Shutdown should never be rejected";
186 result = Some(false);
187 });
188 SpinEventLoopUntil([&result]() { return result; });
189 }
190
TEST_F(MediaDataEncoderTest,H264Create)191 TEST_F(MediaDataEncoderTest, H264Create) {
192 SKIP_IF_NOT_SUPPORTED(VIDEO_MP4);
193
194 RefPtr<MediaDataEncoder> e = CreateH264Encoder();
195
196 EXPECT_TRUE(e);
197
198 WaitForShutdown(e);
199 }
200
EnsureInit(RefPtr<MediaDataEncoder> aEncoder)201 static bool EnsureInit(RefPtr<MediaDataEncoder> aEncoder) {
202 if (!aEncoder) {
203 return false;
204 }
205
206 bool succeeded;
207 media::Await(
208 GetMediaThreadPool(MediaThreadType::SUPERVISOR), aEncoder->Init(),
209 [&succeeded](TrackInfo::TrackType t) {
210 EXPECT_EQ(TrackInfo::TrackType::kVideoTrack, t);
211 succeeded = true;
212 },
213 [&succeeded](MediaResult r) { succeeded = false; });
214 return succeeded;
215 }
216
TEST_F(MediaDataEncoderTest,H264InitWithoutSpecific)217 TEST_F(MediaDataEncoderTest, H264InitWithoutSpecific) {
218 SKIP_IF_NOT_SUPPORTED(VIDEO_MP4);
219
220 RefPtr<MediaDataEncoder> e = CreateH264Encoder(
221 MediaDataEncoder::Usage::Realtime, MediaDataEncoder::PixelFormat::YUV420P,
222 WIDTH, HEIGHT, Nothing());
223
224 #if defined(MOZ_WIDGET_ANDROID) // Android encoder requires I-frame interval
225 EXPECT_FALSE(EnsureInit(e));
226 #else
227 EXPECT_TRUE(EnsureInit(e));
228 #endif
229
230 WaitForShutdown(e);
231 }
232
TEST_F(MediaDataEncoderTest,H264Init)233 TEST_F(MediaDataEncoderTest, H264Init) {
234 SKIP_IF_NOT_SUPPORTED(VIDEO_MP4);
235
236 RefPtr<MediaDataEncoder> e = CreateH264Encoder();
237
238 EXPECT_TRUE(EnsureInit(e));
239
240 WaitForShutdown(e);
241 }
242
Encode(const RefPtr<MediaDataEncoder> aEncoder,const size_t aNumFrames,MediaDataEncoderTest::FrameSource & aSource)243 static MediaDataEncoder::EncodedData Encode(
244 const RefPtr<MediaDataEncoder> aEncoder, const size_t aNumFrames,
245 MediaDataEncoderTest::FrameSource& aSource) {
246 MediaDataEncoder::EncodedData output;
247 bool succeeded;
248 for (size_t i = 0; i < aNumFrames; i++) {
249 RefPtr<MediaData> frame = aSource.GetFrame(i);
250 media::Await(
251 GetMediaThreadPool(MediaThreadType::SUPERVISOR),
252 aEncoder->Encode(frame),
253 [&output, &succeeded](MediaDataEncoder::EncodedData encoded) {
254 output.AppendElements(std::move(encoded));
255 succeeded = true;
256 },
257 [&succeeded](MediaResult r) { succeeded = false; });
258 EXPECT_TRUE(succeeded);
259 if (!succeeded) {
260 return output;
261 }
262 }
263
264 size_t pending = 0;
265 do {
266 media::Await(
267 GetMediaThreadPool(MediaThreadType::SUPERVISOR), aEncoder->Drain(),
268 [&pending, &output, &succeeded](MediaDataEncoder::EncodedData encoded) {
269 pending = encoded.Length();
270 output.AppendElements(std::move(encoded));
271 succeeded = true;
272 },
273 [&succeeded](MediaResult r) { succeeded = false; });
274 EXPECT_TRUE(succeeded);
275 if (!succeeded) {
276 return output;
277 }
278 } while (pending > 0);
279
280 return output;
281 }
282
TEST_F(MediaDataEncoderTest,H264EncodeOneFrameAsAnnexB)283 TEST_F(MediaDataEncoderTest, H264EncodeOneFrameAsAnnexB) {
284 SKIP_IF_NOT_SUPPORTED(VIDEO_MP4);
285
286 RefPtr<MediaDataEncoder> e = CreateH264Encoder();
287 EnsureInit(e);
288
289 MediaDataEncoder::EncodedData output = Encode(e, 1UL, mData);
290 EXPECT_EQ(output.Length(), 1UL);
291 EXPECT_TRUE(AnnexB::IsAnnexB(output[0]));
292
293 WaitForShutdown(e);
294 }
295
TEST_F(MediaDataEncoderTest,EncodeMultipleFramesAsAnnexB)296 TEST_F(MediaDataEncoderTest, EncodeMultipleFramesAsAnnexB) {
297 SKIP_IF_NOT_SUPPORTED(VIDEO_MP4);
298
299 RefPtr<MediaDataEncoder> e = CreateH264Encoder();
300 EnsureInit(e);
301
302 MediaDataEncoder::EncodedData output = Encode(e, NUM_FRAMES, mData);
303 EXPECT_EQ(output.Length(), NUM_FRAMES);
304 for (auto frame : output) {
305 EXPECT_TRUE(AnnexB::IsAnnexB(frame));
306 }
307
308 WaitForShutdown(e);
309 }
310
TEST_F(MediaDataEncoderTest,EncodeMultipleFramesAsAVCC)311 TEST_F(MediaDataEncoderTest, EncodeMultipleFramesAsAVCC) {
312 SKIP_IF_NOT_SUPPORTED(VIDEO_MP4);
313
314 RefPtr<MediaDataEncoder> e =
315 CreateH264Encoder(MediaDataEncoder::Usage::Record);
316 EnsureInit(e);
317
318 MediaDataEncoder::EncodedData output = Encode(e, NUM_FRAMES, mData);
319 EXPECT_EQ(output.Length(), NUM_FRAMES);
320 AnnexB::IsAVCC(output[0]); // Only 1st frame has extra data.
321 for (auto frame : output) {
322 EXPECT_FALSE(AnnexB::IsAnnexB(frame));
323 }
324
325 WaitForShutdown(e);
326 }
327
328 #ifndef DEBUG // Zero width or height will assert/crash in debug builds.
TEST_F(MediaDataEncoderTest,InvalidSize)329 TEST_F(MediaDataEncoderTest, InvalidSize) {
330 SKIP_IF_NOT_SUPPORTED(VIDEO_MP4);
331
332 RefPtr<MediaDataEncoder> e0x0 =
333 CreateH264Encoder(MediaDataEncoder::Usage::Realtime,
334 MediaDataEncoder::PixelFormat::YUV420P, 0, 0);
335 EXPECT_NE(e0x0, nullptr);
336 EXPECT_FALSE(EnsureInit(e0x0));
337
338 RefPtr<MediaDataEncoder> e0x1 =
339 CreateH264Encoder(MediaDataEncoder::Usage::Realtime,
340 MediaDataEncoder::PixelFormat::YUV420P, 0, 1);
341 EXPECT_NE(e0x1, nullptr);
342 EXPECT_FALSE(EnsureInit(e0x1));
343
344 RefPtr<MediaDataEncoder> e1x0 =
345 CreateH264Encoder(MediaDataEncoder::Usage::Realtime,
346 MediaDataEncoder::PixelFormat::YUV420P, 1, 0);
347 EXPECT_NE(e1x0, nullptr);
348 EXPECT_FALSE(EnsureInit(e1x0));
349 }
350 #endif
351
352 #ifdef MOZ_WIDGET_ANDROID
TEST_F(MediaDataEncoderTest,AndroidNotSupportedSize)353 TEST_F(MediaDataEncoderTest, AndroidNotSupportedSize) {
354 SKIP_IF_NOT_SUPPORTED(VIDEO_MP4);
355
356 RefPtr<MediaDataEncoder> e =
357 CreateH264Encoder(MediaDataEncoder::Usage::Realtime,
358 MediaDataEncoder::PixelFormat::YUV420P, 1, 1);
359 EXPECT_NE(e, nullptr);
360 EXPECT_FALSE(EnsureInit(e));
361 }
362 #endif
363