1 /*
2 Copyright (c) 2012-2020 Maarten Baert <maarten-baert@hotmail.com>
3 
4 This file is part of SimpleScreenRecorder.
5 
6 SimpleScreenRecorder is free software: you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation, either version 3 of the License, or
9 (at your option) any later version.
10 
11 SimpleScreenRecorder is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14 GNU General Public License for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with SimpleScreenRecorder.  If not, see <http://www.gnu.org/licenses/>.
18 */
19 
20 #include "VideoEncoder.h"
21 
22 #include "Logger.h"
23 #include "AVWrapper.h"
24 #include "Muxer.h"
25 #include "X264Presets.h"
26 
27 const std::vector<VideoEncoder::PixelFormatData> VideoEncoder::SUPPORTED_PIXEL_FORMATS = {
28 	{"nv12", AV_PIX_FMT_NV12, true},
29 	{"yuv420", AV_PIX_FMT_YUV420P, true},
30 	{"yuv422", AV_PIX_FMT_YUV422P, true},
31 	{"yuv444", AV_PIX_FMT_YUV444P, true},
32 	{"bgra", AV_PIX_FMT_BGRA, false},
33 	{"bgr", AV_PIX_FMT_BGR24, false},
34 	{"rgb", AV_PIX_FMT_RGB24, false},
35 };
36 
VideoEncoder(Muxer * muxer,AVStream * stream,AVCodecContext * codec_context,AVCodec * codec,AVDictionary ** options)37 VideoEncoder::VideoEncoder(Muxer* muxer, AVStream* stream, AVCodecContext* codec_context, AVCodec* codec, AVDictionary** options)
38 	: BaseEncoder(muxer, stream, codec_context, codec, options) {
39 
40 #if !SSR_USE_AVCODEC_ENCODE_VIDEO2
41 	// allocate a temporary buffer
42 	// Apparently libav/ffmpeg completely ignores the size of the buffer, and if it's too small it just crashes.
43 	// Originally it was 256k, which is large enough for about 99.9% of the packets, but it still occasionally crashes.
44 	// So now I'm using a buffer that's always at least large enough to hold a 256k header and *two* completely uncompressed frames.
45 	// (one YUV frame takes w * h * 1.5 bytes)
46 	// Newer versions of libav/ffmpeg have deprecated avcodec_encode_video and added a new function which does the allocation
47 	// automatically, just like avcodec_encode_audio2, but that function isn't available in Ubuntu 12.04/12.10 yet.
48 	m_temp_buffer.resize(std::max<unsigned int>(FF_MIN_BUFFER_SIZE, 256 * 1024 + GetCodecContext()->width * GetCodecContext()->height * 3));
49 #endif
50 
51 	StartThread();
52 }
53 
~VideoEncoder()54 VideoEncoder::~VideoEncoder() {
55 	StopThread();
56 }
57 
GetPixelFormat()58 AVPixelFormat VideoEncoder::GetPixelFormat() {
59 	return GetCodecContext()->pix_fmt;
60 }
61 
GetColorSpace()62 int VideoEncoder::GetColorSpace() {
63 	switch(GetCodecContext()->colorspace) {
64 		case AVCOL_SPC_BT709:
65 			return SWS_CS_ITU709;
66 		case AVCOL_SPC_FCC:
67 			return SWS_CS_FCC;
68 		case AVCOL_SPC_BT470BG:
69 			return SWS_CS_ITU601;
70 		case AVCOL_SPC_SMPTE170M:
71 			return SWS_CS_SMPTE170M;
72 		case AVCOL_SPC_SMPTE240M:
73 			return SWS_CS_SMPTE240M;
74 #ifdef SWS_CS_BT2020
75 		case AVCOL_SPC_BT2020_NCL:
76 		case AVCOL_SPC_BT2020_CL:
77 			return SWS_CS_BT2020;
78 #endif
79 		default:
80 			return SWS_CS_DEFAULT;
81 	}
82 }
83 
GetWidth()84 unsigned int VideoEncoder::GetWidth() {
85 	return GetCodecContext()->width;
86 }
87 
GetHeight()88 unsigned int VideoEncoder::GetHeight() {
89 	return GetCodecContext()->height;
90 }
91 
GetFrameRate()92 unsigned int VideoEncoder::GetFrameRate() {
93 	assert(GetCodecContext()->time_base.num == 1);
94 	return GetCodecContext()->time_base.den;
95 }
96 
AVCodecIsSupported(const QString & codec_name)97 bool VideoEncoder::AVCodecIsSupported(const QString& codec_name) {
98 	AVCodec *codec = avcodec_find_encoder_by_name(codec_name.toUtf8().constData());
99 	if(codec == NULL)
100 		return false;
101 	if(!av_codec_is_encoder(codec))
102 		return false;
103 	if(codec->type != AVMEDIA_TYPE_VIDEO)
104 		return false;
105 	for(unsigned int i = 0; i < SUPPORTED_PIXEL_FORMATS.size(); ++i) {
106 		if(AVCodecSupportsPixelFormat(codec, SUPPORTED_PIXEL_FORMATS[i].m_format)) {
107 			//qDebug() << codec_name << "supported by" << SUPPORTED_PIXEL_FORMATS[i].m_name;
108 			return true;
109 		}
110 	}
111 	return false;
112 }
113 
PrepareStream(AVStream * stream,AVCodecContext * codec_context,AVCodec * codec,AVDictionary ** options,const std::vector<std::pair<QString,QString>> & codec_options,unsigned int bit_rate,unsigned int width,unsigned int height,unsigned int frame_rate)114 void VideoEncoder::PrepareStream(AVStream* stream, AVCodecContext* codec_context, AVCodec* codec, AVDictionary** options, const std::vector<std::pair<QString, QString> >& codec_options,
115 								 unsigned int bit_rate, unsigned int width, unsigned int height, unsigned int frame_rate) {
116 
117 	if(width == 0 || height == 0) {
118 		Logger::LogError("[VideoEncoder::PrepareStream] " + Logger::tr("Error: Width or height is zero!"));
119 		throw LibavException();
120 	}
121 	if(width > SSR_MAX_IMAGE_SIZE || height > SSR_MAX_IMAGE_SIZE) {
122 		Logger::LogError("[VideoEncoder::PrepareStream] " + Logger::tr("Error: Width or height is too large, the maximum width and height is %1!").arg(SSR_MAX_IMAGE_SIZE));
123 		throw LibavException();
124 	}
125 	if(width % 2 != 0 || height % 2 != 0) {
126 		Logger::LogError("[VideoEncoder::PrepareStream] " + Logger::tr("Error: Width or height is not an even number!"));
127 		throw LibavException();
128 	}
129 	if(frame_rate == 0) {
130 		Logger::LogError("[VideoEncoder::PrepareStream] " + Logger::tr("Error: Frame rate is zero!"));
131 		throw LibavException();
132 	}
133 
134 	// initialize codec context
135 	codec_context->bit_rate = bit_rate;
136 	codec_context->width = width;
137 	codec_context->height = height;
138 	codec_context->time_base.num = 1;
139 	codec_context->time_base.den = frame_rate;
140 #if SSR_USE_AVSTREAM_TIME_BASE
141 	stream->time_base = codec_context->time_base;
142 #endif
143 	codec_context->sample_aspect_ratio.num = 1;
144 	codec_context->sample_aspect_ratio.den = 1;
145 	stream->sample_aspect_ratio = codec_context->sample_aspect_ratio;
146 	codec_context->thread_count = std::max(1, (int) std::thread::hardware_concurrency());
147 
148 	// parse options
149 	QString pixel_format_name;
150 	for(unsigned int i = 0; i < codec_options.size(); ++i) {
151 		const QString &key = codec_options[i].first, &value = codec_options[i].second;
152 		if(key == "threads") {
153 			codec_context->thread_count = ParseCodecOptionInt(key, value, 1, 100);
154 		} else if(key == "qscale") {
155 			codec_context->flags |= AV_CODEC_FLAG_QSCALE;
156 			codec_context->global_quality = lrint(ParseCodecOptionDouble(key, value, -1.0e6, 1.0e6, FF_QP2LAMBDA));
157 		} else if(key == "minrate") {
158 			codec_context->rc_min_rate = ParseCodecOptionInt(key, value, 1, 1000000, 1000); // kbit/s
159 		} else if(key == "maxrate") {
160 			codec_context->rc_max_rate = ParseCodecOptionInt(key, value, 1, 1000000, 1000); // kbit/s
161 		} else if(key == "bufsize") {
162 			codec_context->rc_buffer_size = ParseCodecOptionInt(key, value, 1, 1000000, 1000); // kbit/s
163 		} else if(key == "keyint") {
164 			codec_context->gop_size = ParseCodecOptionInt(key, value, 1, 1000000);
165 		} else if(key == "pixelformat") {
166 			pixel_format_name = value;
167 #if !SSR_USE_AVCODEC_PRIVATE_PRESET
168 		} else if(key == "crf") {
169 			codec_context->crf = ParseCodecOptionInt(key, value, 0, 51);
170 #endif
171 #if !SSR_USE_AVCODEC_PRIVATE_PRESET
172 		} else if(key == "preset") {
173 			X264Preset(codec_context, value.toUtf8().constData());
174 #endif
175 		} else {
176 			av_dict_set(options, key.toUtf8().constData(), value.toUtf8().constData(), 0);
177 		}
178 	}
179 
180 	// choose the pixel format
181 	codec_context->pix_fmt = AV_PIX_FMT_NONE;
182 	for(unsigned int i = 0; i < SUPPORTED_PIXEL_FORMATS.size(); ++i) {
183 		if(!pixel_format_name.isEmpty() && pixel_format_name != SUPPORTED_PIXEL_FORMATS[i].m_name)
184 			continue;
185 		if(!AVCodecSupportsPixelFormat(codec, SUPPORTED_PIXEL_FORMATS[i].m_format))
186 			continue;
187 		Logger::LogInfo("[VideoEncoder::PrepareStream] " + Logger::tr("Using pixel format %1.").arg(SUPPORTED_PIXEL_FORMATS[i].m_name));
188 		codec_context->pix_fmt = SUPPORTED_PIXEL_FORMATS[i].m_format;
189 		if(SUPPORTED_PIXEL_FORMATS[i].m_is_yuv) {
190 			codec_context->color_primaries = AVCOL_PRI_BT709;
191 			codec_context->color_trc = AVCOL_TRC_BT709;
192 			codec_context->colorspace = AVCOL_SPC_BT709;
193 			codec_context->color_range = AVCOL_RANGE_MPEG;
194 			codec_context->chroma_sample_location = AVCHROMA_LOC_CENTER;
195 		} else {
196 			codec_context->colorspace = AVCOL_SPC_RGB;
197 		}
198 		break;
199 	}
200 	if(codec_context->pix_fmt == AV_PIX_FMT_NONE) {
201 		Logger::LogError("[VideoEncoder::PrepareStream] " + Logger::tr("Error: The pixel format is not supported by the codec!"));
202 		throw LibavException();
203 	}
204 
205 }
206 
EncodeFrame(AVFrameWrapper * frame)207 bool VideoEncoder::EncodeFrame(AVFrameWrapper* frame) {
208 
209 	if(frame != NULL) {
210 #if SSR_USE_AVFRAME_WIDTH_HEIGHT
211 		assert(frame->GetFrame()->width == GetCodecContext()->width);
212 		assert(frame->GetFrame()->height == GetCodecContext()->height);
213 #endif
214 #if SSR_USE_AVFRAME_FORMAT
215 		assert(frame->GetFrame()->format == GetCodecContext()->pix_fmt);
216 #endif
217 #if SSR_USE_AVFRAME_SAR
218 		assert(frame->GetFrame()->sample_aspect_ratio.num == GetCodecContext()->sample_aspect_ratio.num);
219 		assert(frame->GetFrame()->sample_aspect_ratio.den == GetCodecContext()->sample_aspect_ratio.den);
220 #endif
221 	}
222 
223 #if SSR_USE_AVCODEC_SEND_RECEIVE
224 
225 	// send a frame
226 	AVFrame *avframe = (frame == NULL)? NULL : frame->Release();
227 	try {
228 		if(avcodec_send_frame(GetCodecContext(), avframe) < 0) {
229 			Logger::LogError("[VideoEncoder::EncodeFrame] " + Logger::tr("Error: Sending of video frame failed!"));
230 			throw LibavException();
231 		}
232 	} catch(...) {
233 		av_frame_free(&avframe);
234 		throw;
235 	}
236 	av_frame_free(&avframe);
237 
238 	// try to receive a packet
239 	for( ; ; ) {
240 		std::unique_ptr<AVPacketWrapper> packet(new AVPacketWrapper());
241 		int res = avcodec_receive_packet(GetCodecContext(), packet->GetPacket());
242 		if(res == 0) { // we have a packet, send the packet to the muxer
243 			GetMuxer()->AddPacket(GetStream()->index, std::move(packet));
244 			IncrementPacketCounter();
245 		} else if(res == AVERROR(EAGAIN)) { // we have no packet
246 			return true;
247 		} else if(res == AVERROR_EOF) { // this is the end of the stream
248 			return false;
249 		} else {
250 			Logger::LogError("[VideoEncoder::EncodeFrame] " + Logger::tr("Error: Receiving of video packet failed!"));
251 			throw LibavException();
252 		}
253 	}
254 
255 #elif SSR_USE_AVCODEC_ENCODE_VIDEO2
256 
257 	// allocate a packet
258 	std::unique_ptr<AVPacketWrapper> packet(new AVPacketWrapper());
259 
260 	// encode the frame
261 	int got_packet;
262 	if(avcodec_encode_video2(GetCodecContext(), packet->GetPacket(), (frame == NULL)? NULL : frame->GetFrame(), &got_packet) < 0) {
263 		Logger::LogError("[VideoEncoder::EncodeFrame] " + Logger::tr("Error: Encoding of video frame failed!"));
264 		throw LibavException();
265 	}
266 
267 	// do we have a packet?
268 	if(got_packet) {
269 
270 		// send the packet to the muxer
271 		GetMuxer()->AddPacket(GetStream()->index, std::move(packet));
272 		IncrementPacketCounter();
273 		return true;
274 
275 	} else {
276 		return false;
277 	}
278 
279 #else
280 
281 	// encode the frame
282 	int bytes_encoded = avcodec_encode_video(GetCodecContext(), m_temp_buffer.data(), m_temp_buffer.size(), (frame == NULL)? NULL : frame->GetFrame());
283 	if(bytes_encoded < 0) {
284 		Logger::LogError("[VideoEncoder::EncodeFrame] " + Logger::tr("Error: Encoding of video frame failed!"));
285 		throw LibavException();
286 	}
287 
288 	// do we have a packet?
289 	if(bytes_encoded > 0) {
290 
291 		// allocate a packet
292 		std::unique_ptr<AVPacketWrapper> packet(new AVPacketWrapper(bytes_encoded));
293 
294 		// copy the data
295 		memcpy(packet->GetPacket()->data, m_temp_buffer.data(), bytes_encoded);
296 
297 		// set the timestamp
298 		// note: pts will be rescaled and stream_index will be set by Muxer
299 		if(GetCodecContext()->coded_frame != NULL && GetCodecContext()->coded_frame->pts != (int64_t) AV_NOPTS_VALUE)
300 			packet->GetPacket()->pts = GetCodecContext()->coded_frame->pts;
301 
302 		// set the keyframe flag
303 		if(GetCodecContext()->coded_frame->key_frame)
304 			packet->GetPacket()->flags |= AV_PKT_FLAG_KEY;
305 
306 		// send the packet to the muxer
307 		GetMuxer()->AddPacket(GetStream()->index, std::move(packet));
308 		IncrementPacketCounter();
309 		return true;
310 
311 	} else {
312 		return false;
313 	}
314 
315 #endif
316 
317 }
318