1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
2 /* This Source Code Form is subject to the terms of the Mozilla Public
3  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
4  * You can obtain one at http://mozilla.org/MPL/2.0/. */
5 
6 #include "gtest/gtest.h"
7 #include "VideoFrameConverter.h"
8 #include "YUVBufferGenerator.h"
9 
10 using namespace mozilla;
11 
12 class VideoFrameConverterTest;
13 
14 class FrameListener : public VideoConverterListener {
15  public:
16   explicit FrameListener(VideoFrameConverterTest* aTest);
17   void OnVideoFrameConverted(const webrtc::VideoFrame& aVideoFrame) override;
18 
19  private:
20   VideoFrameConverterTest* mTest;
21 };
22 
23 class VideoFrameConverterTest : public ::testing::Test {
24  protected:
25   using FrameType = std::pair<webrtc::VideoFrame, TimeStamp>;
26   Monitor mMonitor;
27   RefPtr<VideoFrameConverter> mConverter;
28   RefPtr<FrameListener> mListener;
29   std::vector<FrameType> mConvertedFrames;
30 
VideoFrameConverterTest()31   VideoFrameConverterTest()
32       : mMonitor("PacingFixture::mMonitor"),
33         mConverter(MakeAndAddRef<VideoFrameConverter>()),
34         mListener(MakeAndAddRef<FrameListener>(this)) {
35     mConverter->AddListener(mListener);
36   }
37 
TearDown()38   void TearDown() override { mConverter->Shutdown(); }
39 
NumConvertedFrames()40   size_t NumConvertedFrames() {
41     MonitorAutoLock lock(mMonitor);
42     return mConvertedFrames.size();
43   }
44 
WaitForNConverted(size_t aN)45   std::vector<FrameType> WaitForNConverted(size_t aN) {
46     MonitorAutoLock l(mMonitor);
47     while (mConvertedFrames.size() < aN) {
48       l.Wait();
49     }
50     std::vector<FrameType> v(mConvertedFrames.begin(),
51                              mConvertedFrames.begin() + aN);
52     return v;
53   }
54 
55  public:
OnVideoFrameConverted(const webrtc::VideoFrame & aVideoFrame)56   void OnVideoFrameConverted(const webrtc::VideoFrame& aVideoFrame) {
57     MonitorAutoLock lock(mMonitor);
58     EXPECT_NE(aVideoFrame.timestamp_us(), 0);
59     mConvertedFrames.push_back(std::make_pair(aVideoFrame, TimeStamp::Now()));
60     mMonitor.Notify();
61   }
62 };
63 
FrameListener(VideoFrameConverterTest * aTest)64 FrameListener::FrameListener(VideoFrameConverterTest* aTest) : mTest(aTest) {}
OnVideoFrameConverted(const webrtc::VideoFrame & aVideoFrame)65 void FrameListener::OnVideoFrameConverted(
66     const webrtc::VideoFrame& aVideoFrame) {
67   mTest->OnVideoFrameConverted(aVideoFrame);
68 }
69 
GenerateChunk(int32_t aWidth,int32_t aHeight,TimeStamp aTime)70 VideoChunk GenerateChunk(int32_t aWidth, int32_t aHeight, TimeStamp aTime) {
71   YUVBufferGenerator generator;
72   generator.Init(gfx::IntSize(aWidth, aHeight));
73   VideoFrame f(generator.GenerateI420Image(), gfx::IntSize(aWidth, aHeight));
74   VideoChunk c;
75   c.mFrame.TakeFrom(&f);
76   c.mTimeStamp = aTime;
77   c.mDuration = 0;
78   return c;
79 }
80 
TEST_F(VideoFrameConverterTest,BasicConversion)81 TEST_F(VideoFrameConverterTest, BasicConversion) {
82   TimeStamp now = TimeStamp::Now();
83   VideoChunk chunk = GenerateChunk(640, 480, now);
84   mConverter->SetActive(true);
85   mConverter->QueueVideoChunk(chunk, false);
86   auto frames = WaitForNConverted(1);
87   ASSERT_EQ(frames.size(), 1U);
88   EXPECT_EQ(frames[0].first.width(), 640);
89   EXPECT_EQ(frames[0].first.height(), 480);
90   EXPECT_GT(frames[0].second - now, TimeDuration::FromMilliseconds(0));
91 }
92 
TEST_F(VideoFrameConverterTest,BasicPacing)93 TEST_F(VideoFrameConverterTest, BasicPacing) {
94   TimeStamp now = TimeStamp::Now();
95   TimeStamp future = now + TimeDuration::FromMilliseconds(100);
96   VideoChunk chunk = GenerateChunk(640, 480, future);
97   mConverter->SetActive(true);
98   mConverter->QueueVideoChunk(chunk, false);
99   auto frames = WaitForNConverted(1);
100   EXPECT_GT(TimeStamp::Now(), future);
101   ASSERT_EQ(frames.size(), 1U);
102   EXPECT_EQ(frames[0].first.width(), 640);
103   EXPECT_EQ(frames[0].first.height(), 480);
104   EXPECT_GT(frames[0].second - now, future - now);
105 }
106 
TEST_F(VideoFrameConverterTest,MultiPacing)107 TEST_F(VideoFrameConverterTest, MultiPacing) {
108   TimeStamp now = TimeStamp::Now();
109   TimeStamp future1 = now + TimeDuration::FromMilliseconds(100);
110   TimeStamp future2 = now + TimeDuration::FromMilliseconds(200);
111   VideoChunk chunk = GenerateChunk(640, 480, future1);
112   mConverter->SetActive(true);
113   mConverter->QueueVideoChunk(chunk, false);
114   chunk = GenerateChunk(640, 480, future2);
115   mConverter->QueueVideoChunk(chunk, false);
116   auto frames = WaitForNConverted(2);
117   EXPECT_GT(TimeStamp::Now(), future2);
118   ASSERT_EQ(frames.size(), 2U);
119   EXPECT_EQ(frames[0].first.width(), 640);
120   EXPECT_EQ(frames[0].first.height(), 480);
121   EXPECT_GT(frames[0].second - now, future1 - now);
122   EXPECT_EQ(frames[1].first.width(), 640);
123   EXPECT_EQ(frames[1].first.height(), 480);
124   EXPECT_GT(frames[1].second, future2);
125   EXPECT_GT(frames[1].second - now, frames[0].second - now);
126 }
127 
TEST_F(VideoFrameConverterTest,Duplication)128 TEST_F(VideoFrameConverterTest, Duplication) {
129   TimeStamp now = TimeStamp::Now();
130   TimeStamp future1 = now + TimeDuration::FromMilliseconds(100);
131   VideoChunk chunk = GenerateChunk(640, 480, future1);
132   mConverter->SetActive(true);
133   mConverter->QueueVideoChunk(chunk, false);
134   auto frames = WaitForNConverted(2);
135   EXPECT_GT(TimeStamp::Now() - now, TimeDuration::FromMilliseconds(1100));
136   ASSERT_EQ(frames.size(), 2U);
137   EXPECT_EQ(frames[0].first.width(), 640);
138   EXPECT_EQ(frames[0].first.height(), 480);
139   EXPECT_GT(frames[0].second, future1);
140   EXPECT_EQ(frames[1].first.width(), 640);
141   EXPECT_EQ(frames[1].first.height(), 480);
142   EXPECT_GT(frames[1].second - now, TimeDuration::FromMilliseconds(1100));
143   // Check that the second frame comes between 1s and 2s after the first.
144   EXPECT_NEAR(frames[1].first.timestamp_us(),
145               frames[0].first.timestamp_us() + ((PR_USEC_PER_SEC * 3) / 2),
146               PR_USEC_PER_SEC / 2);
147 }
148 
TEST_F(VideoFrameConverterTest,DropsOld)149 TEST_F(VideoFrameConverterTest, DropsOld) {
150   TimeStamp now = TimeStamp::Now();
151   TimeStamp future1 = now + TimeDuration::FromMilliseconds(1000);
152   TimeStamp future2 = now + TimeDuration::FromMilliseconds(100);
153   mConverter->SetActive(true);
154   mConverter->QueueVideoChunk(GenerateChunk(800, 600, future1), false);
155   mConverter->QueueVideoChunk(GenerateChunk(640, 480, future2), false);
156   auto frames = WaitForNConverted(1);
157   EXPECT_GT(TimeStamp::Now(), future2);
158   ASSERT_EQ(frames.size(), 1U);
159   EXPECT_EQ(frames[0].first.width(), 640);
160   EXPECT_EQ(frames[0].first.height(), 480);
161   EXPECT_GT(frames[0].second - now, future2 - now);
162 }
163 
164 // We check that the disabling code was triggered by sending multiple,
165 // different, frames to the converter within one second. While black, it shall
166 // treat all frames identical and issue one black frame per second.
TEST_F(VideoFrameConverterTest,BlackOnDisable)167 TEST_F(VideoFrameConverterTest, BlackOnDisable) {
168   TimeStamp now = TimeStamp::Now();
169   TimeStamp future1 = now + TimeDuration::FromMilliseconds(100);
170   TimeStamp future2 = now + TimeDuration::FromMilliseconds(200);
171   TimeStamp future3 = now + TimeDuration::FromMilliseconds(400);
172   mConverter->SetActive(true);
173   mConverter->SetTrackEnabled(false);
174   mConverter->QueueVideoChunk(GenerateChunk(640, 480, future1), false);
175   mConverter->QueueVideoChunk(GenerateChunk(640, 480, future2), false);
176   mConverter->QueueVideoChunk(GenerateChunk(640, 480, future3), false);
177   auto frames = WaitForNConverted(2);
178   EXPECT_GT(TimeStamp::Now() - now, TimeDuration::FromMilliseconds(1100));
179   ASSERT_EQ(frames.size(), 2U);
180   EXPECT_EQ(frames[0].first.width(), 640);
181   EXPECT_EQ(frames[0].first.height(), 480);
182   EXPECT_GT(frames[0].second - now, future1 - now);
183   EXPECT_EQ(frames[1].first.width(), 640);
184   EXPECT_EQ(frames[1].first.height(), 480);
185   EXPECT_GT(frames[1].second - now,
186             future1 - now + TimeDuration::FromSeconds(1));
187   // Check that the second frame comes between 1s and 2s after the first.
188   EXPECT_NEAR(frames[1].first.timestamp_us(),
189               frames[0].first.timestamp_us() + ((PR_USEC_PER_SEC * 3) / 2),
190               PR_USEC_PER_SEC / 2);
191 }
192 
TEST_F(VideoFrameConverterTest,ClearFutureFramesOnJumpingBack)193 TEST_F(VideoFrameConverterTest, ClearFutureFramesOnJumpingBack) {
194   TimeStamp start = TimeStamp::Now();
195   TimeStamp future1 = start + TimeDuration::FromMilliseconds(100);
196 
197   mConverter->SetActive(true);
198   mConverter->QueueVideoChunk(GenerateChunk(640, 480, future1), false);
199   WaitForNConverted(1);
200 
201   // We are now at t=100ms+. Queue a future frame and jump back in time to
202   // signal a reset.
203 
204   TimeStamp step1 = TimeStamp::Now();
205   ASSERT_GT(step1 - start, future1 - start);
206   TimeStamp future2 = step1 + TimeDuration::FromMilliseconds(200);
207   TimeStamp future3 = step1 + TimeDuration::FromMilliseconds(100);
208   ASSERT_LT(future2 - start, future1 + TimeDuration::FromSeconds(1) - start);
209   mConverter->QueueVideoChunk(GenerateChunk(800, 600, future2), false);
210   VideoChunk nullChunk;
211   nullChunk.mFrame = VideoFrame(nullptr, gfx::IntSize(800, 600));
212   nullChunk.mTimeStamp = step1;
213   mConverter->QueueVideoChunk(nullChunk, false);
214 
215   // We queue one more chunk after the reset so we don't have to wait a full
216   // second for the same-frame timer. It has a different time and resolution
217   // so we can differentiate them.
218   mConverter->QueueVideoChunk(GenerateChunk(320, 240, future3), false);
219 
220   auto frames = WaitForNConverted(2);
221   TimeStamp step2 = TimeStamp::Now();
222   EXPECT_GT(step2 - start, future3 - start);
223   ASSERT_EQ(frames.size(), 2U);
224   EXPECT_EQ(frames[0].first.width(), 640);
225   EXPECT_EQ(frames[0].first.height(), 480);
226   EXPECT_GT(frames[0].second - start, future1 - start);
227   EXPECT_EQ(frames[1].first.width(), 320);
228   EXPECT_EQ(frames[1].first.height(), 240);
229   EXPECT_GT(frames[1].second - start, future3 - start);
230 }
231 
232 // We check that the no frame is converted while inactive, and that on
233 // activating the most recently queued frame gets converted.
TEST_F(VideoFrameConverterTest,NoConversionsWhileInactive)234 TEST_F(VideoFrameConverterTest, NoConversionsWhileInactive) {
235   TimeStamp now = TimeStamp::Now();
236   TimeStamp future1 = now - TimeDuration::FromMilliseconds(1);
237   TimeStamp future2 = now;
238   mConverter->QueueVideoChunk(GenerateChunk(640, 480, future1), false);
239   mConverter->QueueVideoChunk(GenerateChunk(800, 600, future2), false);
240 
241   // SetActive needs to follow the same async path as the frames to be in sync.
242   auto q =
243       MakeRefPtr<TaskQueue>(GetMediaThreadPool(MediaThreadType::WEBRTC_DECODER),
244                             "VideoFrameConverterTest");
245   auto timer = MakeRefPtr<MediaTimer>(false);
246   timer->WaitFor(TimeDuration::FromMilliseconds(100), __func__)
247       ->Then(q, __func__,
248              [converter = mConverter] { converter->SetActive(true); });
249 
250   auto frames = WaitForNConverted(1);
251   ASSERT_EQ(frames.size(), 1U);
252   EXPECT_EQ(frames[0].first.width(), 800);
253   EXPECT_EQ(frames[0].first.height(), 600);
254 }
255