1 /**
2  * @file
3  * @brief Header file for Timeline class
4  * @author Jonathan Thomas <jonathan@openshot.org>
5  *
6  * @ref License
7  */
8 
9 /* LICENSE
10  *
11  * Copyright (c) 2008-2019 OpenShot Studios, LLC
12  * <http://www.openshotstudios.com/>. This file is part of
13  * OpenShot Library (libopenshot), an open-source project dedicated to
14  * delivering high quality video editing and animation solutions to the
15  * world. For more information visit <http://www.openshot.org/>.
16  *
17  * OpenShot Library (libopenshot) is free software: you can redistribute it
18  * and/or modify it under the terms of the GNU Lesser General Public License
19  * as published by the Free Software Foundation, either version 3 of the
20  * License, or (at your option) any later version.
21  *
22  * OpenShot Library (libopenshot) is distributed in the hope that it will be
23  * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
24  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
25  * GNU Lesser General Public License for more details.
26  *
27  * You should have received a copy of the GNU Lesser General Public License
28  * along with OpenShot Library. If not, see <http://www.gnu.org/licenses/>.
29  */
30 
31 #ifndef OPENSHOT_TIMELINE_H
32 #define OPENSHOT_TIMELINE_H
33 
34 #include <list>
35 #include <memory>
36 #include <mutex>
37 #include <set>
38 #include <QtGui/QImage>
39 #include <QtGui/QPainter>
40 #include <QtCore/QRegularExpression>
41 
42 #include "TimelineBase.h"
43 #include "ReaderBase.h"
44 
45 #include "Color.h"
46 #include "Clip.h"
47 #include "EffectBase.h"
48 #include "Fraction.h"
49 #include "Frame.h"
50 #include "KeyFrame.h"
51 #ifdef USE_OPENCV
52 #include "TrackedObjectBBox.h"
53 #endif
54 #include "TrackedObjectBase.h"
55 
56 
57 
58 namespace openshot {
59 
60 	// Forward decls
61 	class FrameMapper;
62 	class CacheBase;
63 
64 	/// Comparison method for sorting clip pointers (by Layer and then Position). Clips are sorted
65 	/// from lowest layer to top layer (since that is the sequence they need to be combined), and then
66 	/// by position (left to right).
67 	struct CompareClips{
operatorCompareClips68 		bool operator()( openshot::Clip* lhs, openshot::Clip* rhs){
69 			if( lhs->Layer() < rhs->Layer() ) return true;
70 			if( lhs->Layer() == rhs->Layer() && lhs->Position() <= rhs->Position() ) return true;
71 			return false;
72 	}};
73 
74 	/// Comparison method for sorting effect pointers (by Position, Layer, and Order). Effects are sorted
75 	/// from lowest layer to top layer (since that is sequence clips are combined), and then by
76 	/// position, and then by effect order.
77 	struct CompareEffects{
operatorCompareEffects78 		bool operator()( openshot::EffectBase* lhs, openshot::EffectBase* rhs){
79 			if( lhs->Layer() < rhs->Layer() ) return true;
80 			if( lhs->Layer() == rhs->Layer() && lhs->Position() < rhs->Position() ) return true;
81 			if( lhs->Layer() == rhs->Layer() && lhs->Position() == rhs->Position() && lhs->Order() > rhs->Order() ) return true;
82 			return false;
83 	}};
84 
85 	/// Comparison method for finding the far end of the timeline, by locating
86 	/// the Clip with the highest end-frame number using std::max_element
87 	struct CompareClipEndFrames {
operatorCompareClipEndFrames88 		bool operator()(const openshot::Clip* lhs, const openshot::Clip* rhs) {
89 			return (lhs->Position() + lhs->Duration())
90 			       <= (rhs->Position() + rhs->Duration());
91 	}};
92 
93 	/// Like CompareClipEndFrames, but for effects
94 	struct CompareEffectEndFrames {
operatorCompareEffectEndFrames95 		bool operator()(const openshot::EffectBase* lhs, const openshot::EffectBase* rhs) {
96 			return (lhs->Position() + lhs->Duration())
97 				<= (rhs->Position() + rhs->Duration());
98 	}};
99 
100 	/**
101 	 * @brief This class represents a timeline
102 	 *
103 	 * The timeline is one of the <b>most important</b> features of a video editor, and controls all
104 	 * aspects of how video, image, and audio clips are combined together, and how the final
105 	 * video output will be rendered.  It has a collection of layers and clips, that arrange,
106 	 * sequence, and generate the final video output.
107 	 *
108 	 * The <b>following graphic</b> displays a timeline, and how clips can be arranged, scaled, and layered together. It
109 	 * also demonstrates how the viewport can be scaled smaller than the canvas, which can be used to zoom and pan around the
110 	 * canvas (i.e. pan & scan).
111 	 * \image html /doc/images/Timeline_Layers.png
112 	 *
113 	 * The <b>following graphic</b> displays how the playhead determines which frames to combine and layer.
114 	 * \image html /doc/images/Playhead.png
115 	 *
116 	 * Lets take a look at what the code looks like:
117 	 * @code
118 	 * // Create a Timeline
119 	 * Timeline t(1280, // width
120 	 *            720, // height
121 	 *            Fraction(25,1), // framerate
122 	 *            44100, // sample rate
123 	 *            2 // channels
124 	 *            ChannelLayout::LAYOUT_STEREO,
125 	 *            );
126 	 *
127 	 * // Create some clips
128 	 * Clip c1(new ImageReader("MyAwesomeLogo.jpeg"));
129 	 * Clip c2(new FFmpegReader("BackgroundVideo.webm"));
130 	 *
131 	 * // CLIP 1 (logo) - Set some clip properties (with Keyframes)
132 	 * c1.Position(0.0); // Set the position or location (in seconds) on the timeline
133 	 * c1.gravity = GRAVITY_LEFT; // Set the alignment / gravity of the clip (position on the screen)
134 	 * c1.scale = SCALE_CROP; // Set the scale mode (how the image is resized to fill the screen)
135 	 * c1.Layer(1); // Set the layer of the timeline (higher layers cover up images of lower layers)
136 	 * c1.Start(0.0); // Set the starting position of the video (trim the left side of the video)
137 	 * c1.End(16.0); // Set the ending position of the video (trim the right side of the video)
138 	 * c1.alpha.AddPoint(1, 0.0); // Set the alpha to transparent on frame #1
139 	 * c1.alpha.AddPoint(500, 0.0); // Keep the alpha transparent until frame #500
140 	 * c1.alpha.AddPoint(565, 1.0); // Animate the alpha from transparent to visible (between frame #501 and #565)
141 	 *
142 	 * // CLIP 2 (background video) - Set some clip properties (with Keyframes)
143 	 * c2.Position(0.0); // Set the position or location (in seconds) on the timeline
144 	 * c2.Start(10.0); // Set the starting position of the video (trim the left side of the video)
145 	 * c2.Layer(0); // Set the layer of the timeline (higher layers cover up images of lower layers)
146 	 * c2.alpha.AddPoint(1, 1.0); // Set the alpha to visible on frame #1
147 	 * c2.alpha.AddPoint(150, 0.0); // Animate the alpha to transparent (between frame 2 and frame #150)
148 	 * c2.alpha.AddPoint(360, 0.0, LINEAR); // Keep the alpha transparent until frame #360
149 	 * c2.alpha.AddPoint(384, 1.0); // Animate the alpha to visible (between frame #360 and frame #384)
150 	 *
151 	 * // Add clips to timeline
152 	 * t.AddClip(&c1);
153 	 * t.AddClip(&c2);
154 	 *
155 	 * // Open the timeline reader
156 	 * t.Open();
157 	 *
158 	 * // Get frame number 1 from the timeline (This will generate a new frame, made up from the previous clips and settings)
159 	 * std::shared_ptr<Frame> f = t.GetFrame(1);
160 	 *
161 	 * // Now that we have an openshot::Frame object, lets have some fun!
162 	 * f->Display(); // Display the frame on the screen
163 	 *
164 	 * // Close the timeline reader
165 	 * t.Close();
166 	 * @endcode
167 	 */
168 	class Timeline : public openshot::TimelineBase, public openshot::ReaderBase {
169 	private:
170 		bool is_open; ///<Is Timeline Open?
171 		bool auto_map_clips; ///< Auto map framerates and sample rates to all clips
172 		std::list<openshot::Clip*> clips; ///<List of clips on this timeline
173 		std::list<openshot::Clip*> closing_clips; ///<List of clips that need to be closed
174 		std::map<openshot::Clip*, openshot::Clip*> open_clips; ///<List of 'opened' clips on this timeline
175 		std::list<openshot::EffectBase*> effects; ///<List of clips on this timeline
176 		openshot::CacheBase *final_cache; ///<Final cache of timeline frames
177 		std::set<openshot::FrameMapper*> allocated_frame_mappers; ///< all the frame mappers we allocated and must free
178 		bool managed_cache; ///< Does this timeline instance manage the cache object
179 		std::string path; ///< Optional path of loaded UTF-8 OpenShot JSON project file
180 		std::mutex get_frame_mutex; ///< Mutex to protect GetFrame method from different threads calling it
181 		int max_concurrent_frames; ///< Max concurrent frames to process at one time
182 
183 		std::map<std::string, std::shared_ptr<openshot::TrackedObjectBase>> tracked_objects; ///< map of TrackedObjectBBoxes and their IDs
184 
185 		/// Process a new layer of video or audio
186 		void add_layer(std::shared_ptr<openshot::Frame> new_frame, openshot::Clip* source_clip, int64_t clip_frame_number, bool is_top_clip, float max_volume);
187 
188 		/// Apply a FrameMapper to a clip which matches the settings of this timeline
189 		void apply_mapper_to_clip(openshot::Clip* clip);
190 
191 		// Apply JSON Diffs to various objects contained in this timeline
192 		void apply_json_to_clips(Json::Value change); ///<Apply JSON diff to clips
193 		void apply_json_to_effects(Json::Value change); ///< Apply JSON diff to effects
194 		void apply_json_to_effects(Json::Value change, openshot::EffectBase* existing_effect); ///<Apply JSON diff to a specific effect
195 		void apply_json_to_timeline(Json::Value change); ///<Apply JSON diff to timeline properties
196 
197 		/// Calculate time of a frame number, based on a framerate
198 		double calculate_time(int64_t number, openshot::Fraction rate);
199 
200 		/// Find intersecting (or non-intersecting) openshot::Clip objects
201 		///
202 		/// @returns A list of openshot::Clip objects
203 		/// @param requested_frame The frame number that is requested.
204 		/// @param number_of_frames The number of frames to check
205 		/// @param include Include or Exclude intersecting clips
206 		std::vector<openshot::Clip*> find_intersecting_clips(int64_t requested_frame, int number_of_frames, bool include);
207 
208 		/// Get a clip's frame or generate a blank frame
209 		std::shared_ptr<openshot::Frame> GetOrCreateFrame(std::shared_ptr<Frame> background_frame, openshot::Clip* clip, int64_t number, openshot::TimelineInfoStruct* options);
210 
211 		/// Compare 2 floating point numbers for equality
212 		bool isEqual(double a, double b);
213 
214 		/// Sort clips by position on the timeline
215 		void sort_clips();
216 
217 		/// Sort effects by position on the timeline
218 		void sort_effects();
219 
220 		/// Update the list of 'opened' clips
221 		void update_open_clips(openshot::Clip *clip, bool does_clip_intersect);
222 
223 	public:
224 
225 		/// @brief Constructor for the timeline (which configures the default frame properties)
226 		/// @param width The image width of generated openshot::Frame objects
227 		/// @param height The image height of generated openshot::Frame objects
228 		/// @param fps The frame rate of the generated video
229 		/// @param sample_rate The audio sample rate
230 		/// @param channels The number of audio channels
231 		/// @param channel_layout The channel layout (i.e. mono, stereo, 3 point surround, etc...)
232 		Timeline(int width, int height, openshot::Fraction fps, int sample_rate, int channels, openshot::ChannelLayout channel_layout);
233 
234 		/// @brief Constructor which takes a ReaderInfo struct to configure parameters
235 		/// @param info The reader parameters to configure the new timeline with
236 		Timeline(ReaderInfo info);
237 
238 		/// @brief Project-file constructor for the timeline
239 		///
240 		/// Loads a JSON structure from a file path, and
241 		/// initializes the timeline described within.
242 		///
243 		/// @param projectPath The path of the UTF-8 *.osp project file (JSON contents). Contents will be loaded automatically.
244 		/// @param convert_absolute_paths Should all paths be converted to absolute paths (relative to the location of projectPath)
245 		Timeline(const std::string& projectPath, bool convert_absolute_paths);
246 
247         virtual ~Timeline();
248 
249 		/// Add to the tracked_objects map a pointer to a tracked object (TrackedObjectBBox)
250 		void AddTrackedObject(std::shared_ptr<openshot::TrackedObjectBase> trackedObject);
251 		/// Return tracked object pointer by it's id
252 		std::shared_ptr<openshot::TrackedObjectBase> GetTrackedObject(std::string id) const;
253 		/// Return the ID's of the tracked objects as a list of strings
254 		std::list<std::string> GetTrackedObjectsIds() const;
255 		/// Return the trackedObject's properties as a JSON string
256         #ifdef USE_OPENCV
257 		std::string GetTrackedObjectValues(std::string id, int64_t frame_number) const;
258         #endif
259 
260 		/// @brief Add an openshot::Clip to the timeline
261 		/// @param clip Add an openshot::Clip to the timeline. A clip can contain any type of Reader.
262 		void AddClip(openshot::Clip* clip);
263 
264 		/// @brief Add an effect to the timeline
265 		/// @param effect Add an effect to the timeline. An effect can modify the audio or video of an openshot::Frame.
266 		void AddEffect(openshot::EffectBase* effect);
267 
268         /// Apply global/timeline effects to the source frame (if any)
269         std::shared_ptr<openshot::Frame> apply_effects(std::shared_ptr<openshot::Frame> frame, int64_t timeline_frame_number, int layer);
270 
271 		/// Apply the timeline's framerate and samplerate to all clips
272 		void ApplyMapperToClips();
273 
274 		/// Determine if clips are automatically mapped to the timeline's framerate and samplerate
AutoMapClips()275 		bool AutoMapClips() { return auto_map_clips; };
276 
277 		/// @brief Automatically map all clips to the timeline's framerate and samplerate
AutoMapClips(bool auto_map)278 		void AutoMapClips(bool auto_map) { auto_map_clips = auto_map; };
279 
280         /// Clear all cache for this timeline instance, and all clips, mappers, and readers under it
281         void ClearAllCache();
282 
283 		/// Return a list of clips on the timeline
Clips()284 		std::list<openshot::Clip*> Clips() { return clips; };
285 
286 		/// Look up a single clip by ID
287 		openshot::Clip* GetClip(const std::string& id);
288 
289 		/// Look up a clip effect by ID
290 		openshot::EffectBase* GetClipEffect(const std::string& id);
291 
292 		/// Look up a timeline effect by ID
293 		openshot::EffectBase* GetEffect(const std::string& id);
294 
295 		/// Look up the end time of the latest timeline element
296 		double GetMaxTime();
297 		/// Look up the end frame number of the latest element on the timeline
298 		int64_t GetMaxFrame();
299 
300 		/// Close the timeline reader (and any resources it was consuming)
301 		void Close() override;
302 
303 		/// Return the list of effects on the timeline
Effects()304 		std::list<openshot::EffectBase*> Effects() { return effects; };
305 
306 		/// Return the list of effects on all clips
307 		std::list<openshot::EffectBase*> ClipEffects() const;
308 
309 		/// Get the cache object used by this reader
GetCache()310 		openshot::CacheBase* GetCache() override { return final_cache; };
311 
312 		/// Set the cache object used by this reader. You must now manage the lifecycle
313 		/// of this cache object though (Timeline will not delete it for you).
314 		void SetCache(openshot::CacheBase* new_cache);
315 
316 		/// Get an openshot::Frame object for a specific frame number of this timeline.
317 		///
318 		/// @returns The requested frame (containing the image)
319 		/// @param requested_frame The frame number that is requested.
320 		std::shared_ptr<openshot::Frame> GetFrame(int64_t requested_frame) override;
321 
322 		// Curves for the viewport
323 		openshot::Keyframe viewport_scale; ///<Curve representing the scale of the viewport (0 to 100)
324 		openshot::Keyframe viewport_x; ///<Curve representing the x coordinate for the viewport
325 		openshot::Keyframe viewport_y; ///<Curve representing the y coordinate for the viewport
326 
327 		// Background color
328 		openshot::Color color; ///<Background color of timeline canvas
329 
330 		/// Determine if reader is open or closed
IsOpen()331 		bool IsOpen() override { return is_open; };
332 
333 		/// Return the type name of the class
Name()334 		std::string Name() override { return "Timeline"; };
335 
336 		// Get and Set JSON methods
337 		std::string Json() const override; ///< Generate JSON string of this object
338 		void SetJson(const std::string value) override; ///< Load JSON string into this object
339 		Json::Value JsonValue() const override; ///< Generate Json::Value for this object
340 		void SetJsonValue(const Json::Value root) override; ///< Load Json::Value into this object
341 
342 		/// Set Max Image Size (used for performance optimization). Convenience function for setting
343 		/// Settings::Instance()->MAX_WIDTH and Settings::Instance()->MAX_HEIGHT.
344 		void SetMaxSize(int width, int height);
345 
346 		/// @brief Apply a special formatted JSON object, which represents a change to the timeline (add, update, delete)
347 		/// This is primarily designed to keep the timeline (and its child objects... such as clips and effects) in sync
348 		/// with another application... such as OpenShot Video Editor (http://www.openshot.org).
349 		/// @param value A JSON string containing a key, value, and type of change.
350 		void ApplyJsonDiff(std::string value);
351 
352 		/// Open the reader (and start consuming resources)
353 		void Open() override;
354 
355 		/// @brief Remove an openshot::Clip from the timeline
356 		/// @param clip Remove an openshot::Clip from the timeline.
357 		void RemoveClip(openshot::Clip* clip);
358 
359 		/// @brief Remove an effect from the timeline
360 		/// @param effect Remove an effect from the timeline.
361 		void RemoveEffect(openshot::EffectBase* effect);
362 	};
363 
364 }
365 
366 #endif // OPENSHOT_TIMELINE_H
367