1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
2 /* vim: set ts=8 sts=2 et sw=2 tw=80: */
3 /* This Source Code Form is subject to the terms of the Mozilla Public
4  * License, v. 2.0. If a copy of the MPL was not distributed with this
5  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6 
7 #include "MediaData.h"
8 
9 #include "ImageContainer.h"
10 #include "MediaInfo.h"
11 #include "VideoUtils.h"
12 #include "YCbCrUtils.h"
13 #include "mozilla/layers/ImageBridgeChild.h"
14 #include "mozilla/layers/KnowsCompositor.h"
15 #include "mozilla/layers/SharedRGBImage.h"
16 
17 #include <stdint.h>
18 
19 #ifdef XP_WIN
20 #include "mozilla/WindowsVersion.h"
21 #include "mozilla/layers/D3D11YCbCrImage.h"
22 #endif
23 
24 namespace mozilla {
25 
26 using namespace mozilla::gfx;
27 using layers::ImageContainer;
28 using layers::PlanarYCbCrData;
29 using layers::PlanarYCbCrImage;
30 using media::TimeUnit;
31 
32 const char* AudioData::sTypeName = "audio";
33 const char* VideoData::sTypeName = "video";
34 
IsDataLoudnessHearable(const AudioDataValue aData)35 bool IsDataLoudnessHearable(const AudioDataValue aData) {
36   // We can transfer the digital value to dBFS via following formula. According
37   // to American SMPTE standard, 0 dBu equals -20 dBFS. In theory 0 dBu is still
38   // hearable, so we choose a smaller value as our threshold. If the loudness
39   // is under this threshold, it might not be hearable.
40   return 20.0f * std::log10(AudioSampleToFloat(aData)) > -100;
41 }
42 
EnsureAudioBuffer()43 void AudioData::EnsureAudioBuffer() {
44   if (mAudioBuffer) return;
45   mAudioBuffer =
46       SharedBuffer::Create(mFrames * mChannels * sizeof(AudioDataValue));
47 
48   AudioDataValue* data = static_cast<AudioDataValue*>(mAudioBuffer->Data());
49   for (uint32_t i = 0; i < mFrames; ++i) {
50     for (uint32_t j = 0; j < mChannels; ++j) {
51       data[j * mFrames + i] = mAudioData[i * mChannels + j];
52     }
53   }
54 }
55 
SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const56 size_t AudioData::SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const {
57   size_t size =
58       aMallocSizeOf(this) + mAudioData.SizeOfExcludingThis(aMallocSizeOf);
59   if (mAudioBuffer) {
60     size += mAudioBuffer->SizeOfIncludingThis(aMallocSizeOf);
61   }
62   return size;
63 }
64 
IsAudible() const65 bool AudioData::IsAudible() const {
66   if (!mAudioData) {
67     return false;
68   }
69 
70   for (uint32_t frame = 0; frame < mFrames; ++frame) {
71     for (uint32_t channel = 0; channel < mChannels; ++channel) {
72       if (IsDataLoudnessHearable(mAudioData[frame * mChannels + channel])) {
73         return true;
74       }
75     }
76   }
77   return false;
78 }
79 
80 /* static */
TransferAndUpdateTimestampAndDuration(AudioData * aOther,const TimeUnit & aTimestamp,const TimeUnit & aDuration)81 already_AddRefed<AudioData> AudioData::TransferAndUpdateTimestampAndDuration(
82     AudioData* aOther, const TimeUnit& aTimestamp, const TimeUnit& aDuration) {
83   NS_ENSURE_TRUE(aOther, nullptr);
84   RefPtr<AudioData> v =
85       new AudioData(aOther->mOffset, aTimestamp, aDuration, aOther->mFrames,
86                     Move(aOther->mAudioData), aOther->mChannels, aOther->mRate);
87   return v.forget();
88 }
89 
ValidatePlane(const VideoData::YCbCrBuffer::Plane & aPlane)90 static bool ValidatePlane(const VideoData::YCbCrBuffer::Plane& aPlane) {
91   return aPlane.mWidth <= PlanarYCbCrImage::MAX_DIMENSION &&
92          aPlane.mHeight <= PlanarYCbCrImage::MAX_DIMENSION &&
93          aPlane.mWidth * aPlane.mHeight < MAX_VIDEO_WIDTH * MAX_VIDEO_HEIGHT &&
94          aPlane.mStride > 0 && aPlane.mWidth <= aPlane.mStride;
95 }
96 
ValidateBufferAndPicture(const VideoData::YCbCrBuffer & aBuffer,const IntRect & aPicture)97 static bool ValidateBufferAndPicture(const VideoData::YCbCrBuffer& aBuffer,
98                                      const IntRect& aPicture) {
99   // The following situation should never happen unless there is a bug
100   // in the decoder
101   if (aBuffer.mPlanes[1].mWidth != aBuffer.mPlanes[2].mWidth ||
102       aBuffer.mPlanes[1].mHeight != aBuffer.mPlanes[2].mHeight) {
103     NS_ERROR("C planes with different sizes");
104     return false;
105   }
106 
107   // The following situations could be triggered by invalid input
108   if (aPicture.width <= 0 || aPicture.height <= 0) {
109     // In debug mode, makes the error more noticeable
110     MOZ_ASSERT(false, "Empty picture rect");
111     return false;
112   }
113   if (!ValidatePlane(aBuffer.mPlanes[0]) ||
114       !ValidatePlane(aBuffer.mPlanes[1]) ||
115       !ValidatePlane(aBuffer.mPlanes[2])) {
116     NS_WARNING("Invalid plane size");
117     return false;
118   }
119 
120   // Ensure the picture size specified in the headers can be extracted out of
121   // the frame we've been supplied without indexing out of bounds.
122   CheckedUint32 xLimit = aPicture.x + CheckedUint32(aPicture.width);
123   CheckedUint32 yLimit = aPicture.y + CheckedUint32(aPicture.height);
124   if (!xLimit.isValid() || xLimit.value() > aBuffer.mPlanes[0].mStride ||
125       !yLimit.isValid() || yLimit.value() > aBuffer.mPlanes[0].mHeight) {
126     // The specified picture dimensions can't be contained inside the video
127     // frame, we'll stomp memory if we try to copy it. Fail.
128     NS_WARNING("Overflowing picture rect");
129     return false;
130   }
131   return true;
132 }
133 
VideoData(int64_t aOffset,const TimeUnit & aTime,const TimeUnit & aDuration,bool aKeyframe,const TimeUnit & aTimecode,IntSize aDisplay,layers::ImageContainer::FrameID aFrameID)134 VideoData::VideoData(int64_t aOffset, const TimeUnit& aTime,
135                      const TimeUnit& aDuration, bool aKeyframe,
136                      const TimeUnit& aTimecode, IntSize aDisplay,
137                      layers::ImageContainer::FrameID aFrameID)
138     : MediaData(VIDEO_DATA, aOffset, aTime, aDuration, 1),
139       mDisplay(aDisplay),
140       mFrameID(aFrameID),
141       mSentToCompositor(false),
142       mNextKeyFrameTime(TimeUnit::Invalid()) {
143   MOZ_ASSERT(!mDuration.IsNegative(), "Frame must have non-negative duration.");
144   mKeyframe = aKeyframe;
145   mTimecode = aTimecode;
146 }
147 
~VideoData()148 VideoData::~VideoData() {}
149 
SetListener(UniquePtr<Listener> aListener)150 void VideoData::SetListener(UniquePtr<Listener> aListener) {
151   MOZ_ASSERT(!mSentToCompositor,
152              "Listener should be registered before sending data");
153 
154   mListener = Move(aListener);
155 }
156 
MarkSentToCompositor()157 void VideoData::MarkSentToCompositor() {
158   if (mSentToCompositor) {
159     return;
160   }
161 
162   mSentToCompositor = true;
163   if (mListener != nullptr) {
164     mListener->OnSentToCompositor();
165     mListener = nullptr;
166   }
167 }
168 
SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const169 size_t VideoData::SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const {
170   size_t size = aMallocSizeOf(this);
171 
172   // Currently only PLANAR_YCBCR has a well defined function for determining
173   // it's size, so reporting is limited to that type.
174   if (mImage && mImage->GetFormat() == ImageFormat::PLANAR_YCBCR) {
175     const mozilla::layers::PlanarYCbCrImage* img =
176         static_cast<const mozilla::layers::PlanarYCbCrImage*>(mImage.get());
177     size += img->SizeOfIncludingThis(aMallocSizeOf);
178   }
179 
180   return size;
181 }
182 
UpdateDuration(const TimeUnit & aDuration)183 void VideoData::UpdateDuration(const TimeUnit& aDuration) {
184   MOZ_ASSERT(!aDuration.IsNegative());
185   mDuration = aDuration;
186 }
187 
UpdateTimestamp(const TimeUnit & aTimestamp)188 void VideoData::UpdateTimestamp(const TimeUnit& aTimestamp) {
189   MOZ_ASSERT(!aTimestamp.IsNegative());
190 
191   auto updatedDuration = GetEndTime() - aTimestamp;
192   MOZ_ASSERT(!updatedDuration.IsNegative());
193 
194   mTime = aTimestamp;
195   mDuration = updatedDuration;
196 }
197 
ConstructPlanarYCbCrData(const VideoInfo & aInfo,const VideoData::YCbCrBuffer & aBuffer,const IntRect & aPicture)198 PlanarYCbCrData ConstructPlanarYCbCrData(const VideoInfo& aInfo,
199                                          const VideoData::YCbCrBuffer& aBuffer,
200                                          const IntRect& aPicture) {
201   const VideoData::YCbCrBuffer::Plane& Y = aBuffer.mPlanes[0];
202   const VideoData::YCbCrBuffer::Plane& Cb = aBuffer.mPlanes[1];
203   const VideoData::YCbCrBuffer::Plane& Cr = aBuffer.mPlanes[2];
204 
205   PlanarYCbCrData data;
206   data.mYChannel = Y.mData + Y.mOffset;
207   data.mYSize = IntSize(Y.mWidth, Y.mHeight);
208   data.mYStride = Y.mStride;
209   data.mYSkip = Y.mSkip;
210   data.mCbChannel = Cb.mData + Cb.mOffset;
211   data.mCrChannel = Cr.mData + Cr.mOffset;
212   data.mCbCrSize = IntSize(Cb.mWidth, Cb.mHeight);
213   data.mCbCrStride = Cb.mStride;
214   data.mCbSkip = Cb.mSkip;
215   data.mCrSkip = Cr.mSkip;
216   data.mPicX = aPicture.x;
217   data.mPicY = aPicture.y;
218   data.mPicSize = aPicture.Size();
219   data.mStereoMode = aInfo.mStereoMode;
220   data.mYUVColorSpace = aBuffer.mYUVColorSpace;
221   data.mBitDepth = aBuffer.mBitDepth;
222   return data;
223 }
224 
SetVideoDataToImage(PlanarYCbCrImage * aVideoImage,const VideoInfo & aInfo,const YCbCrBuffer & aBuffer,const IntRect & aPicture,bool aCopyData)225 /* static */ bool VideoData::SetVideoDataToImage(PlanarYCbCrImage* aVideoImage,
226                                                  const VideoInfo& aInfo,
227                                                  const YCbCrBuffer& aBuffer,
228                                                  const IntRect& aPicture,
229                                                  bool aCopyData) {
230   if (!aVideoImage) {
231     return false;
232   }
233 
234   PlanarYCbCrData data = ConstructPlanarYCbCrData(aInfo, aBuffer, aPicture);
235 
236   aVideoImage->SetDelayedConversion(true);
237   if (aCopyData) {
238     return aVideoImage->CopyData(data);
239   } else {
240     return aVideoImage->AdoptData(data);
241   }
242 }
243 
244 /* static */
CreateAndCopyData(const VideoInfo & aInfo,ImageContainer * aContainer,int64_t aOffset,const TimeUnit & aTime,const TimeUnit & aDuration,const YCbCrBuffer & aBuffer,bool aKeyframe,const TimeUnit & aTimecode,const IntRect & aPicture,layers::KnowsCompositor * aAllocator)245 already_AddRefed<VideoData> VideoData::CreateAndCopyData(
246     const VideoInfo& aInfo, ImageContainer* aContainer, int64_t aOffset,
247     const TimeUnit& aTime, const TimeUnit& aDuration,
248     const YCbCrBuffer& aBuffer, bool aKeyframe, const TimeUnit& aTimecode,
249     const IntRect& aPicture, layers::KnowsCompositor* aAllocator) {
250   if (!aContainer) {
251     // Create a dummy VideoData with no image. This gives us something to
252     // send to media streams if necessary.
253     RefPtr<VideoData> v(new VideoData(aOffset, aTime, aDuration, aKeyframe,
254                                       aTimecode, aInfo.mDisplay, 0));
255     return v.forget();
256   }
257 
258   if (!ValidateBufferAndPicture(aBuffer, aPicture)) {
259     return nullptr;
260   }
261 
262   RefPtr<VideoData> v(new VideoData(aOffset, aTime, aDuration, aKeyframe,
263                                     aTimecode, aInfo.mDisplay, 0));
264 
265   // Currently our decoder only knows how to output to ImageFormat::PLANAR_YCBCR
266   // format.
267 #if XP_WIN
268   // We disable this code path on Windows version earlier of Windows 8 due to
269   // intermittent crashes with old drivers. See bug 1405110.
270   if (IsWin8OrLater() && !XRE_IsParentProcess() && aAllocator &&
271       aAllocator->SupportsD3D11()) {
272     RefPtr<layers::D3D11YCbCrImage> d3d11Image = new layers::D3D11YCbCrImage();
273     PlanarYCbCrData data = ConstructPlanarYCbCrData(aInfo, aBuffer, aPicture);
274     if (d3d11Image->SetData(layers::ImageBridgeChild::GetSingleton()
275                                 ? layers::ImageBridgeChild::GetSingleton().get()
276                                 : aAllocator,
277                             aContainer, data)) {
278       v->mImage = d3d11Image;
279       return v.forget();
280     }
281   }
282 #endif
283   if (!v->mImage) {
284     v->mImage = aContainer->CreatePlanarYCbCrImage();
285   }
286 
287   if (!v->mImage) {
288     return nullptr;
289   }
290   NS_ASSERTION(v->mImage->GetFormat() == ImageFormat::PLANAR_YCBCR,
291                "Wrong format?");
292   PlanarYCbCrImage* videoImage = v->mImage->AsPlanarYCbCrImage();
293   MOZ_ASSERT(videoImage);
294 
295   if (!VideoData::SetVideoDataToImage(videoImage, aInfo, aBuffer, aPicture,
296                                       true /* aCopyData */)) {
297     return nullptr;
298   }
299 
300   return v.forget();
301 }
302 
303 /* static */
CreateAndCopyData(const VideoInfo & aInfo,ImageContainer * aContainer,int64_t aOffset,const TimeUnit & aTime,const TimeUnit & aDuration,const YCbCrBuffer & aBuffer,const YCbCrBuffer::Plane & aAlphaPlane,bool aKeyframe,const TimeUnit & aTimecode,const IntRect & aPicture)304 already_AddRefed<VideoData> VideoData::CreateAndCopyData(
305     const VideoInfo& aInfo, ImageContainer* aContainer, int64_t aOffset,
306     const TimeUnit& aTime, const TimeUnit& aDuration,
307     const YCbCrBuffer& aBuffer, const YCbCrBuffer::Plane& aAlphaPlane,
308     bool aKeyframe, const TimeUnit& aTimecode, const IntRect& aPicture) {
309   if (!aContainer) {
310     // Create a dummy VideoData with no image. This gives us something to
311     // send to media streams if necessary.
312     RefPtr<VideoData> v(new VideoData(aOffset, aTime, aDuration, aKeyframe,
313                                       aTimecode, aInfo.mDisplay, 0));
314     return v.forget();
315   }
316 
317   if (!ValidateBufferAndPicture(aBuffer, aPicture)) {
318     return nullptr;
319   }
320 
321   RefPtr<VideoData> v(new VideoData(aOffset, aTime, aDuration, aKeyframe,
322                                     aTimecode, aInfo.mDisplay, 0));
323 
324   // Convert from YUVA to BGRA format on the software side.
325   RefPtr<layers::SharedRGBImage> videoImage =
326       aContainer->CreateSharedRGBImage();
327   v->mImage = videoImage;
328 
329   if (!v->mImage) {
330     return nullptr;
331   }
332   if (!videoImage->Allocate(
333           IntSize(aBuffer.mPlanes[0].mWidth, aBuffer.mPlanes[0].mHeight),
334           SurfaceFormat::B8G8R8A8)) {
335     return nullptr;
336   }
337   uint8_t* argb_buffer = videoImage->GetBuffer();
338   IntSize size = videoImage->GetSize();
339 
340   // The naming convention for libyuv and associated utils is word-order.
341   // The naming convention in the gfx stack is byte-order.
342   ConvertYCbCrAToARGB(aBuffer.mPlanes[0].mData, aBuffer.mPlanes[1].mData,
343                       aBuffer.mPlanes[2].mData, aAlphaPlane.mData,
344                       aBuffer.mPlanes[0].mStride, aBuffer.mPlanes[1].mStride,
345                       argb_buffer, size.width * 4, size.width, size.height);
346 
347   return v.forget();
348 }
349 
350 /* static */
CreateFromImage(const IntSize & aDisplay,int64_t aOffset,const TimeUnit & aTime,const TimeUnit & aDuration,const RefPtr<Image> & aImage,bool aKeyframe,const TimeUnit & aTimecode)351 already_AddRefed<VideoData> VideoData::CreateFromImage(
352     const IntSize& aDisplay, int64_t aOffset, const TimeUnit& aTime,
353     const TimeUnit& aDuration, const RefPtr<Image>& aImage, bool aKeyframe,
354     const TimeUnit& aTimecode) {
355   RefPtr<VideoData> v(new VideoData(aOffset, aTime, aDuration, aKeyframe,
356                                     aTimecode, aDisplay, 0));
357   v->mImage = aImage;
358   return v.forget();
359 }
360 
MediaRawData()361 MediaRawData::MediaRawData()
362     : MediaData(RAW_DATA, 0), mCrypto(mCryptoInternal) {}
363 
MediaRawData(const uint8_t * aData,size_t aSize)364 MediaRawData::MediaRawData(const uint8_t* aData, size_t aSize)
365     : MediaData(RAW_DATA, 0), mCrypto(mCryptoInternal), mBuffer(aData, aSize) {}
366 
MediaRawData(const uint8_t * aData,size_t aSize,const uint8_t * aAlphaData,size_t aAlphaSize)367 MediaRawData::MediaRawData(const uint8_t* aData, size_t aSize,
368                            const uint8_t* aAlphaData, size_t aAlphaSize)
369     : MediaData(RAW_DATA, 0),
370       mCrypto(mCryptoInternal),
371       mBuffer(aData, aSize),
372       mAlphaBuffer(aAlphaData, aAlphaSize) {}
373 
Clone() const374 already_AddRefed<MediaRawData> MediaRawData::Clone() const {
375   RefPtr<MediaRawData> s = new MediaRawData;
376   s->mTimecode = mTimecode;
377   s->mTime = mTime;
378   s->mDuration = mDuration;
379   s->mOffset = mOffset;
380   s->mKeyframe = mKeyframe;
381   s->mExtraData = mExtraData;
382   s->mCryptoInternal = mCryptoInternal;
383   s->mTrackInfo = mTrackInfo;
384   s->mEOS = mEOS;
385   if (!s->mBuffer.Append(mBuffer.Data(), mBuffer.Length())) {
386     return nullptr;
387   }
388   if (!s->mAlphaBuffer.Append(mAlphaBuffer.Data(), mAlphaBuffer.Length())) {
389     return nullptr;
390   }
391   return s.forget();
392 }
393 
~MediaRawData()394 MediaRawData::~MediaRawData() {}
395 
SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const396 size_t MediaRawData::SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const {
397   size_t size = aMallocSizeOf(this);
398   size += mBuffer.SizeOfExcludingThis(aMallocSizeOf);
399   return size;
400 }
401 
CreateWriter()402 MediaRawDataWriter* MediaRawData::CreateWriter() {
403   return new MediaRawDataWriter(this);
404 }
405 
MediaRawDataWriter(MediaRawData * aMediaRawData)406 MediaRawDataWriter::MediaRawDataWriter(MediaRawData* aMediaRawData)
407     : mCrypto(aMediaRawData->mCryptoInternal), mTarget(aMediaRawData) {}
408 
SetSize(size_t aSize)409 bool MediaRawDataWriter::SetSize(size_t aSize) {
410   return mTarget->mBuffer.SetLength(aSize);
411 }
412 
Prepend(const uint8_t * aData,size_t aSize)413 bool MediaRawDataWriter::Prepend(const uint8_t* aData, size_t aSize) {
414   return mTarget->mBuffer.Prepend(aData, aSize);
415 }
416 
Replace(const uint8_t * aData,size_t aSize)417 bool MediaRawDataWriter::Replace(const uint8_t* aData, size_t aSize) {
418   return mTarget->mBuffer.Replace(aData, aSize);
419 }
420 
Clear()421 void MediaRawDataWriter::Clear() { mTarget->mBuffer.Clear(); }
422 
Data()423 uint8_t* MediaRawDataWriter::Data() { return mTarget->mBuffer.Data(); }
424 
Size()425 size_t MediaRawDataWriter::Size() { return mTarget->Size(); }
426 
PopFront(size_t aSize)427 void MediaRawDataWriter::PopFront(size_t aSize) {
428   mTarget->mBuffer.PopFront(aSize);
429 }
430 
431 }  // namespace mozilla
432