1 /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
2 /* vim:set ts=2 sw=2 sts=2 et cindent: */
3 /* This Source Code Form is subject to the terms of the Mozilla Public
4  * License, v. 2.0. If a copy of the MPL was not distributed with this
5  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6 
7 #ifndef __FFmpegVideoDecoder_h__
8 #define __FFmpegVideoDecoder_h__
9 
10 #include "FFmpegLibWrapper.h"
11 #include "FFmpegDataDecoder.h"
12 #include "SimpleMap.h"
13 #ifdef MOZ_WAYLAND_USE_VAAPI
14 #  include "mozilla/widget/WaylandDMABufSurface.h"
15 #  include <list>
16 #endif
17 
18 namespace mozilla {
19 
20 #ifdef MOZ_WAYLAND_USE_VAAPI
21 // When VA-API decoding is running, ffmpeg allocates AVHWFramesContext - a pool
22 // of "hardware" frames. Every "hardware" frame (VASurface) is backed
23 // by actual piece of GPU memory which holds the decoded image data.
24 //
25 // The VASurface is wrapped by WaylandDMABufSurface and transferred to
26 // rendering queue by WaylandDMABUFSurfaceImage, where TextureClient is
27 // created and VASurface is used as a texture there.
28 //
29 // As there's a limited number of VASurfaces, ffmpeg reuses them to decode
30 // next frames ASAP even if they are still attached to WaylandDMABufSurface
31 // and used as a texture in our rendering engine.
32 //
33 // Unfortunately there isn't any obvious way how to mark particular VASurface
34 // as used. The best we can do is to hold a reference to particular AVBuffer
35 // from decoded AVFrame and AVHWFramesContext which owns the AVBuffer.
36 
37 class VAAPIFrameHolder final {
38  public:
39   VAAPIFrameHolder(FFmpegLibWrapper* aLib, WaylandDMABufSurface* aSurface,
40                    AVCodecContext* aAVCodecContext, AVFrame* aAVFrame);
41   ~VAAPIFrameHolder();
42 
43   // Check if WaylandDMABufSurface is used by any gecko rendering process
44   // (WebRender or GL compositor) or by WaylandDMABUFSurfaceImage/VideoData.
IsUsed()45   bool IsUsed() const { return mSurface->IsGlobalRefSet(); }
46 
47  private:
48   const FFmpegLibWrapper* mLib;
49   const RefPtr<WaylandDMABufSurface> mSurface;
50   AVBufferRef* mAVHWFramesContext;
51   AVBufferRef* mHWAVBuffer;
52 };
53 #endif
54 
55 template <int V>
56 class FFmpegVideoDecoder : public FFmpegDataDecoder<V> {};
57 
58 template <>
59 class FFmpegVideoDecoder<LIBAV_VER>;
60 DDLoggedTypeNameAndBase(FFmpegVideoDecoder<LIBAV_VER>,
61                         FFmpegDataDecoder<LIBAV_VER>);
62 
63 template <>
64 class FFmpegVideoDecoder<LIBAV_VER>
65     : public FFmpegDataDecoder<LIBAV_VER>,
66       public DecoderDoctorLifeLogger<FFmpegVideoDecoder<LIBAV_VER>> {
67   typedef mozilla::layers::Image Image;
68   typedef mozilla::layers::ImageContainer ImageContainer;
69   typedef mozilla::layers::KnowsCompositor KnowsCompositor;
70   typedef SimpleMap<int64_t> DurationMap;
71 
72  public:
73   FFmpegVideoDecoder(FFmpegLibWrapper* aLib, TaskQueue* aTaskQueue,
74                      const VideoInfo& aConfig, KnowsCompositor* aAllocator,
75                      ImageContainer* aImageContainer, bool aLowLatency,
76                      bool aDisableHardwareDecoding);
77 
78   RefPtr<InitPromise> Init() override;
79   void InitCodecContext() override;
GetDescriptionName()80   nsCString GetDescriptionName() const override {
81 #ifdef USING_MOZFFVPX
82     return NS_LITERAL_CSTRING("ffvpx video decoder");
83 #else
84     return NS_LITERAL_CSTRING("ffmpeg video decoder");
85 #endif
86   }
NeedsConversion()87   ConversionRequired NeedsConversion() const override {
88     return ConversionRequired::kNeedAVCC;
89   }
90 
91   static AVCodecID GetCodecId(const nsACString& aMimeType);
92 
93  private:
94   RefPtr<FlushPromise> ProcessFlush() override;
95   void ProcessShutdown() override;
96   MediaResult DoDecode(MediaRawData* aSample, uint8_t* aData, int aSize,
97                        bool* aGotFrame, DecodedData& aResults) override;
98   void OutputDelayedFrames();
NeedParser()99   bool NeedParser() const override {
100     return
101 #if LIBAVCODEC_VERSION_MAJOR >= 58
102         false;
103 #else
104 #  if LIBAVCODEC_VERSION_MAJOR >= 55
105         mCodecID == AV_CODEC_ID_VP9 ||
106 #  endif
107         mCodecID == AV_CODEC_ID_VP8;
108 #endif
109   }
110   gfx::YUVColorSpace GetFrameColorSpace();
111 
112   MediaResult CreateImage(int64_t aOffset, int64_t aPts, int64_t aDuration,
113                           MediaDataDecoder::DecodedData& aResults);
114 
115 #ifdef MOZ_WAYLAND_USE_VAAPI
116   MediaResult InitVAAPIDecoder();
117   bool CreateVAAPIDeviceContext();
118   void InitVAAPICodecContext();
119   AVCodec* FindVAAPICodec();
120   bool IsHardwareAccelerated(nsACString& aFailureReason) const override;
121 
122   MediaResult CreateImageVAAPI(int64_t aOffset, int64_t aPts, int64_t aDuration,
123                                MediaDataDecoder::DecodedData& aResults);
124   void ReleaseUnusedVAAPIFrames();
125   void ReleaseAllVAAPIFrames();
126 #endif
127 
128   /**
129    * This method allocates a buffer for FFmpeg's decoder, wrapped in an Image.
130    * Currently it only supports Planar YUV420, which appears to be the only
131    * non-hardware accelerated image format that FFmpeg's H264 decoder is
132    * capable of outputting.
133    */
134   int AllocateYUV420PVideoBuffer(AVCodecContext* aCodecContext,
135                                  AVFrame* aFrame);
136 
137 #ifdef MOZ_WAYLAND_USE_VAAPI
138   AVBufferRef* mVAAPIDeviceContext;
139   const bool mDisableHardwareDecoding;
140   VADisplay mDisplay;
141   std::list<UniquePtr<VAAPIFrameHolder>> mFrameHolders;
142 #endif
143   RefPtr<KnowsCompositor> mImageAllocator;
144   RefPtr<ImageContainer> mImageContainer;
145   VideoInfo mInfo;
146 
147   class PtsCorrectionContext {
148    public:
149     PtsCorrectionContext();
150     int64_t GuessCorrectPts(int64_t aPts, int64_t aDts);
151     void Reset();
LastDts()152     int64_t LastDts() const { return mLastDts; }
153 
154    private:
155     int64_t mNumFaultyPts;  /// Number of incorrect PTS values so far
156     int64_t mNumFaultyDts;  /// Number of incorrect DTS values so far
157     int64_t mLastPts;       /// PTS of the last frame
158     int64_t mLastDts;       /// DTS of the last frame
159   };
160 
161   PtsCorrectionContext mPtsContext;
162 
163   DurationMap mDurationMap;
164   const bool mLowLatency;
165 };
166 
167 }  // namespace mozilla
168 
169 #endif  // __FFmpegVideoDecoder_h__
170