1 /***
2 
3     Olive - Non-Linear Video Editor
4     Copyright (C) 2019  Olive Team
5 
6     This program is free software: you can redistribute it and/or modify
7     it under the terms of the GNU General Public License as published by
8     the Free Software Foundation, either version 3 of the License, or
9     (at your option) any later version.
10 
11     This program is distributed in the hope that it will be useful,
12     but WITHOUT ANY WARRANTY; without even the implied warranty of
13     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14     GNU General Public License for more details.
15 
16     You should have received a copy of the GNU General Public License
17     along with this program.  If not, see <http://www.gnu.org/licenses/>.
18 
19 ***/
20 
21 #ifndef RENDERFUNCTIONS_H
22 #define RENDERFUNCTIONS_H
23 
24 #include <QOpenGLContext>
25 #include <QVector>
26 #include <QOpenGLShaderProgram>
27 
28 #include "timeline/sequence.h"
29 #include "effects/effect.h"
30 #include "panels/viewer.h"
31 
32 /**
33  * @brief The ComposeSequenceParams struct
34  *
35  * Struct sent to the compose_sequence() function.
36  */
37 struct ComposeSequenceParams {
38 
39     /**
40      * @brief Reference to the Viewer class that's calling compose_sequence()
41      *
42      * Primarily used for calling Viewer::play_wake() when appropriate.
43      */
44     Viewer* viewer;
45 
46     /**
47      * @brief The OpenGL context to use while rendering.
48      *
49      * For video rendering, this must be a valid OpenGL context. For audio, this variable is never accessed.
50      *
51      * \see ComposeSequenceParams::video
52      */
53     QOpenGLContext* ctx;
54 
55     /**
56      * @brief The sequence to compose
57      *
58      * In addition to clips, sequences also contain the playhead position so compose_sequence() knows which frame
59      * to render.
60      */
61     Sequence* seq;
62 
63     /**
64      * @brief Array to store the nested sequence hierarchy
65      *
66      * Should be left empty. This array gets passed around compose_sequence() as it calls itself recursively to
67      * handle nested sequences.
68      */
69     QVector<Clip*> nests;
70 
71     /**
72      * @brief Set compose mode to video or audio
73      *
74      * **TRUE** if this function should render video, **FALSE** if this function should render audio.
75      */
76     bool video;
77 
78     /**
79      * @brief Set to the Effect whose gizmos were chosen to be drawn on screen
80      *
81      * The currently active Effect that compose_sequence() will update the gizmos of.
82      */
83     Effect* gizmos;
84 
85     /**
86      * @brief A variable that compose_sequence() will set to **TRUE** if any of the clips couldn't be shown.
87      *
88      * A footage item or shader may not be ready at the time this frame is drawn. If compose_sequence() couldn't draw
89      * any of the clips in the scene, this variable is set to **TRUE** indicating that the image rendered is a
90      * "best effort", but not the actual image.
91      *
92      * This variable should be checked after compose_sequence() and a repaint should be triggered if it's **TRUE**.
93      *
94      * \note This variable is probably bad design and is a relic of an earlier rendering backend. There may be a better
95      * way to communicate this information.
96      *
97      * Additionally, since
98      * compose_sequence() for video will now always run in a separate thread anyway, there's no real issue with
99      * stalling it to wait for footage to complete opening or whatever may be lagging behind. A possible side effect
100      * of this though is that the preview may become less responsive if it's stuck trying to render one frame. With
101      * the current system, the preview may show incomplete frames occasionally but at least it will show something.
102      * This may be preferable. See ComposeSequenceParams::single_threaded for a similar function that could be
103      * removed.
104      */
105     bool texture_failed;
106 
107     /**
108      * @brief Run all cachers in the same thread that compose_sequence() is in
109      *
110      * Standard behavior is that all clips cache frames in their own thread and signals are sent between
111      * compose_sequence() and the clip's cacher thread regarding which frames to display and cache without stalling
112      * the compose_sequence() thread. Setting this to **TRUE** will run all cachers in the same thread creating a
113      * technically more "perfect" connection between them that will also stall the compose_sequence() thread. Used
114      * when rendering as timing isn't as important as creating output frames as quickly as possible.
115      *
116      * \note Exporting should probably be rewritten without this. While running all the cachers in one thread makes
117      * it easier to synchronize everything, export performance could probably benefit from keeping them in separate
118      * threads and syncing up with them. See ComposeSequenceParams::texture_failed for a similar function that could
119      * be removed.
120      */
121     bool wait_for_mutexes;
122 
123     /**
124      * @brief Set the current playback speed (adjusted with Shuttle Left/Right)
125      *
126      * Only used for audio rendering to determine how many samples to skip in order to play audio at the correct speed.
127      *
128      * \see ComposeSequenceParams::video
129      */
130     int playback_speed;
131 
132     /**
133      * @brief Blending mode shader
134      *
135      * Used only for video rendering. Never accessed with audio rendering.
136      *
137      * A program containing the current active
138      * blending mode shader that can be bound during rendering. Must be compiled and linked beforehand. See
139      * RenderThread::blend_mode_program for how this is properly set up.
140      *
141      * \see ComposeSequenceParams::video
142      */
143     QOpenGLShaderProgram* blend_mode_program;
144 
145     /**
146      * @brief Premultiply alpha shader
147      *
148      * Used only for video rendering. Never accessed with audio rendering.
149      *
150      * compose_sequence()'s internal composition
151      * expects premultipled alpha, but it will pre-emptively multiply any footage that is not set as already
152      * premultiplied (see Footage::alpha_is_premultiplied) using this shader. Must be compiled and linked beforehand.
153      * See RenderThread::premultiply_program for how this is properly set up.
154      */
155     QOpenGLShaderProgram* premultiply_program;
156 
157     /**
158      * @brief The OpenGL framebuffer object that the final texture to be shown is rendered to.
159      *
160      * Used only for video rendering. Never accessed with audio rendering.
161      *
162      * When compose_sequence() is rendering the final image, this framebuffer will be bound.
163      */
164     GLuint main_buffer;
165 
166     /**
167      * @brief The attachment to the framebuffer in main_buffer
168      *
169      * Used only for video rendering. Never accessed with audio rendering.
170      *
171      * The OpenGL texture attached to the framebuffer referenced by main_buffer.
172      */
173     GLuint main_attachment;
174 
175     /**
176      * @brief Backend OpenGL framebuffer 1 used for further processing before rendering to main_buffer
177      *
178      * In some situations, compose_sequence() will do some processing through shaders that requires "ping-ponging"
179      * between framebuffers. backend_buffer1 and backend_buffer2 are used for this purpose.
180      */
181     GLuint backend_buffer1;
182 
183     /**
184      * @brief Backend OpenGL framebuffer 1's texture attachment
185      *
186      * The texture that ComposeSequenceParams::backend_buffer1 renders to. Bound and drawn to
187      * ComposeSequenceParams::backend_buffer2 to "ping-pong" between them and various shaders.
188      */
189     GLuint backend_attachment1;
190 
191     /**
192      * @brief Backend OpenGL framebuffer 2 used for further processing before rendering to main_buffer
193      *
194      * In some situations, compose_sequence() will do some processing through shaders that requires "ping-ponging"
195      * between framebuffers. backend_buffer1 and backend_buffer2 are used for this purpose.
196      */
197     GLuint backend_buffer2;
198 
199     /**
200      * @brief Backend OpenGL framebuffer 2's texture attachment
201      *
202      * The texture that ComposeSequenceParams::backend_buffer2 renders to. Bound and drawn to
203      * ComposeSequenceParams::backend_buffer1 to "ping-pong" between them and various shaders.
204      */
205     GLuint backend_attachment2;
206 
207     /**
208      * @brief OpenGL shader containing OpenColorIO shader information
209      */
210     QOpenGLShaderProgram* ocio_shader;
211 
212     /**
213      * @brief OpenGL texture containing LUT obtained form OpenColorIO
214      */
215     GLuint ocio_lut_texture;
216 };
217 
218 namespace olive {
219 namespace rendering {
220 /**
221  * @brief Compose a frame of a given sequence
222  *
223  * For any given Sequence, this function will render the current frame indicated by Sequence::playhead. Will
224  * automatically open and close clips (memory allocation and file handles) as necessary, communicate with the
225  * Clip::cacher objects to retrieve upcoming frames and store them in memory, run Effect processing functions, and
226  * finally composite all the currently active clips together into a final texture.
227  *
228  * Will sometimes render a frame incomplete or inaccurately, e.g. if a video file hadn't finished opening by the time
229  * of the render or a clip's cacher didn't have the requested frame available at the time of the render. If so,
230  * the `texture_failed` variable of `params` will be set to **TRUE**. Check this after calling compose_sequence() and
231  * if it is **TRUE**, compose_sequence() should be called again later to attempt another render (unless the Sequence
232  * is being played, in which case just play the next frame rather than redrawing an old frame).
233  *
234  * @param params
235  *
236  * A struct of parameters to use while rendering.
237  *
238  * @return A reference to the OpenGL texture resulting from the render. Will usually be equal to
239  * ComposeSequenceParams::main_attachment unless it's rendering a nested sequence, in which case it'll be a reference
240  * to one of the textures referenced by Clip::fbo. Can be used directly to draw the rendered frame.
241  */
242 GLuint compose_sequence(ComposeSequenceParams &params);
243 
244 /**
245  * @brief Convenience wrapper function for compose_sequence() to render audio
246  *
247  * Much of the functionality provided (and parameters required) by compose_sequence() is only useful/necessary for
248  * video rendering. For audio rendering, this function is easier to handle and will correctly set up
249  * compose_sequence() to render audio without the cumbersome effort of setting up a ComposeSequenceParams object.
250  *
251  * @param viewer
252  *
253  * The Viewer object calling this function
254  *
255  * @param seq
256  *
257  * The Sequence whose audio to render.
258  *
259  * @param playback_speed
260  *
261  * The current playback speed (controlled by Shuttle Left/Right)
262  *
263  * @param
264  *
265  * Whether to wait for media to open or simply fail if the media is not yet open. This should usually be **FALSE**.
266  */
267 void compose_audio(Viewer* viewer, Sequence *seq, int playback_speed, bool wait_for_mutexes);
268 }
269 }
270 
271 /**
272  * @brief Rescale a frame number between two frame rates
273  *
274  * Converts a frame number from one frame rate to its equivalent in another frame rate
275  *
276  * @param framenumber
277  *
278  * The frame number to convert
279  *
280  * @param source_frame_rate
281  *
282  * Frame rate that the frame number is currently in
283  *
284  * @param target_frame_rate
285  *
286  * Frame rate to convert to
287  *
288  * @return
289  *
290  * Rescaled frame number
291  */
292 long rescale_frame_number(long framenumber, double source_frame_rate, double target_frame_rate);
293 
294 /**
295  * @brief Get timecode
296  *
297  * Get the current clip/media time from the Timeline playhead in seconds. For instance if the playhead was at the start
298  * of a clip (whose in point wasn't trimmed), this would be 0.0 as it's the start of the clip/media;
299  *
300  * @param c
301  *
302  * Clip to get the timecode of
303  *
304  * @param playhead
305  *
306  * Sequence playhead to convert to a clip/media timecode
307  *
308  * @return
309  *
310  * Timecode in seconds
311  */
312 double get_timecode(Clip *c, long playhead);
313 
314 /**
315  * @brief Convert playhead frame number to a clip frame number
316  *
317  * Converts a Timeline playhead to a the current clip's frame. Equivalent to
318  * `PLAYHEAD - CLIP_TIMELINE_IN + CLIP_MEDIA_IN`. All keyframes are in clip frames.
319  *
320  * @param c
321  *
322  * The clip to get the current frame number of
323  *
324  * @param playhead
325  *
326  * The current Timeline frame number
327  *
328  * @return
329  *
330  * The curren frame number of the clip at `playhead`
331  */
332 long playhead_to_clip_frame(Clip* c, long playhead);
333 
334 /**
335  * @brief Converts the playhead to clip seconds
336  *
337  * Get the current timecode at the playhead in terms of clip seconds.
338  *
339  * FIXME: Possible duplicate of get_timecode()? Will need to research this more.
340  *
341  * @param c
342  *
343  * Clip to return clip seconds of.
344  *
345  * @param playhead
346  *
347  * Current Timeline playhead to convert to clip seconds
348  *
349  * @return
350  *
351  * Clip time in seconds
352  */
353 double playhead_to_clip_seconds(Clip *c, long playhead);
354 
355 /**
356  * @brief Convert seconds to FFmpeg timestamp
357  *
358  * Used for interaction with FFmpeg, converts seconds in a floating-point value to a timestamp in AVStream->time_base
359  * units.
360  *
361  * @param c
362  *
363  * Clip to get timestamp of
364  *
365  * @param seconds
366  *
367  * Clip time in seconds
368  *
369  * @return
370  *
371  * An FFmpeg-compatible timestamp in AVStream->time_base units.
372  */
373 int64_t seconds_to_timestamp(Clip* c, double seconds);
374 
375 /**
376  * @brief Convert Timeline playhead to FFmpeg timestamp
377  *
378  * Used for interaction with FFmpeg, converts the Timeline playhead to a timestamp in AVStream->time_base
379  * units.
380  *
381  * @param c
382  *
383  * Clip to get timestamp of
384  *
385  * @param playhead
386  *
387  * Timeline playhead to convert to a timestamp
388  *
389  * @return
390  *
391  * An FFmpeg-compatible timestamp in AVStream->time_base units.
392  */
393 int64_t playhead_to_timestamp(Clip *c, long playhead);
394 
395 /**
396  * @brief Close all open clips in a Sequence
397  *
398  * Closes any currently open clips on a Sequence and waits for them to close before returning. This may be slow as a
399  * result on large Sequence objects. If a Clip is a nested Sequence, this function calls itself recursively on that
400  * Sequence too.
401  *
402  * @param s
403  *
404  * The Sequence to close all clips on.
405  */
406 void close_active_clips(Sequence* s);
407 
408 #endif // RENDERFUNCTIONS_H
409