1 /**
2  * @file
3  * @brief Source file for Example Executable (example app for libopenshot)
4  * @author Jonathan Thomas <jonathan@openshot.org>
5  *
6  * @ref License
7  */
8 
9 /* LICENSE
10  *
11  * Copyright (c) 2008-2019 OpenShot Studios, LLC
12  * <http://www.openshotstudios.com/>. This file is part of
13  * OpenShot Library (libopenshot), an open-source project dedicated to
14  * delivering high quality video editing and animation solutions to the
15  * world. For more information visit <http://www.openshot.org/>.
16  *
17  * OpenShot Library (libopenshot) is free software: you can redistribute it
18  * and/or modify it under the terms of the GNU Lesser General Public License
19  * as published by the Free Software Foundation, either version 3 of the
20  * License, or (at your option) any later version.
21  *
22  * OpenShot Library (libopenshot) is distributed in the hope that it will be
23  * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
24  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
25  * GNU Lesser General Public License for more details.
26  *
27  * You should have received a copy of the GNU Lesser General Public License
28  * along with OpenShot Library. If not, see <http://www.gnu.org/licenses/>.
29  */
30 
31 #include <fstream>
32 #include <iostream>
33 #include <memory>
34 #include "CVTracker.h"
35 #include "CVStabilization.h"
36 #include "CVObjectDetection.h"
37 
38 #include "Clip.h"
39 #include "EffectBase.h"
40 #include "EffectInfo.h"
41 #include "Frame.h"
42 #include "CrashHandler.h"
43 
44 using namespace openshot;
45 using namespace std;
46 
47 /*
48 ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
49 The following methods are just for getting JSON info to the pre-processing effects
50 */
51 
52 string jsonFormat(string key, string value, string type="string"); // Format variables to the needed JSON format
53 string trackerJson(cv::Rect2d r, bool onlyProtoPath); // Set variable values for tracker effect
54 string stabilizerJson(bool onlyProtoPath); // Set variable values for stabilizer effect
55 string objectDetectionJson(bool onlyProtoPath); // Set variable values for object detector effect
56 
57 /*
58 ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
59 */
60 
61 // Show the pre-processed clip on the screen
displayClip(openshot::Clip & r9)62 void displayClip(openshot::Clip &r9){
63 
64     // Opencv display window
65     cv::namedWindow("Display Image", cv::WINDOW_NORMAL );
66 
67     // Get video lenght
68     int videoLenght = r9.Reader()->info.video_length;
69 
70     // Loop through the clip and show it with the effects, if any
71     for (long int frame = 0; frame < videoLenght; frame++)
72     {
73         int frame_number = frame;
74         // Get the frame
75         std::shared_ptr<openshot::Frame> f = r9.GetFrame(frame_number);
76         // Grab OpenCV::Mat image
77         cv::Mat cvimage = f->GetImageCV();
78 
79         // Display the frame
80         cv::imshow("Display Image", cvimage);
81 
82         // Press ESC on keyboard to exit
83         char c=(char)cv::waitKey(25);
84         if(c==27)
85             break;
86     }
87     // Destroy all remaining windows
88     cv::destroyAllWindows();
89 }
90 
main(int argc,char * argv[])91 int main(int argc, char* argv[]) {
92 
93     // Set pre-processing effects
94     bool TRACK_DATA = true;
95     bool SMOOTH_VIDEO = false;
96     bool OBJECT_DETECTION_DATA = false;
97 
98     // Get media path
99     std::stringstream path;
100     path << TEST_MEDIA_PATH << ((OBJECT_DETECTION_DATA) ? "run.mp4" : "test.avi");
101     //  run.mp4 --> Used for object detector
102     // test.avi --> Used for tracker and stabilizer
103 
104     // Thread controller just for the pre-processing constructors, it won't be used
105     ProcessingController processingController;
106 
107     // Open clip
108     openshot::Clip r9(path.str());
109     r9.Open();
110 
111     // Apply tracking effect on the clip
112     if(TRACK_DATA){
113 
114         // Take the bounding box coordinates
115         cv::Mat roi = r9.GetFrame(0)->GetImageCV();
116         cv::Rect2d r = cv::selectROI(roi);
117         cv::destroyAllWindows();
118 
119         // Create a tracker object by passing a JSON string and a thread controller, this last one won't be used
120         // JSON info: path to save the tracked data, type of tracker and bbox coordinates
121         CVTracker tracker(trackerJson(r, false), processingController);
122 
123         // Start the tracking
124         tracker.trackClip(r9, 0, 0, true);
125         // Save the tracked data
126         tracker.SaveTrackedData();
127 
128         // Create a tracker effect
129         EffectBase* e = EffectInfo().CreateEffect("Tracker");
130 
131         // Pass a JSON string with the saved tracked data
132         // The effect will read and save the tracking in a map::<frame,data_struct>
133         e->SetJson(trackerJson(r, true));
134         // Add the effect to the clip
135         r9.AddEffect(e);
136     }
137 
138     // Apply stabilizer effect on the clip
139     if(SMOOTH_VIDEO){
140 
141         // Create a stabilizer object by passing a JSON string and a thread controller, this last one won't be used
142         // JSON info: path to save the stabilized data and smoothing window value
143         CVStabilization stabilizer(stabilizerJson(false), processingController);
144 
145         // Start the stabilization
146         stabilizer.stabilizeClip(r9, 0, 100, true);
147         // Save the stabilization data
148         stabilizer.SaveStabilizedData();
149 
150         // Create a stabilizer effect
151         EffectBase* e = EffectInfo().CreateEffect("Stabilizer");
152 
153         // Pass a JSON string with the saved stabilized data
154         // The effect will read and save the stabilization in a map::<frame,data_struct>
155         e->SetJson(stabilizerJson(true));
156         // Add the effect to the clip
157         r9.AddEffect(e);
158     }
159 
160     // Apply object detection effect on the clip
161     if(OBJECT_DETECTION_DATA){
162 
163         // Create a object detection object by passing a JSON string and a thread controller, this last one won't be used
164         // JSON info: path to save the detection data, processing devicee, model weights, model configuration and class names
165         CVObjectDetection objectDetection(objectDetectionJson(false), processingController);
166 
167         // Start the object detection
168         objectDetection.detectObjectsClip(r9, 0, 100, true);
169         // Save the object detection data
170         objectDetection.SaveObjDetectedData();
171 
172         // Create a object detector effect
173         EffectBase* e = EffectInfo().CreateEffect("Object Detector");
174 
175         // Pass a JSON string with the saved detections data
176         // The effect will read and save the detections in a map::<frame,data_struct>
177 
178         e->SetJson(objectDetectionJson(true));
179         // Add the effect to the clip
180         r9.AddEffect(e);
181     }
182 
183     // Show the pre-processed clip on the screen
184     displayClip(r9);
185 
186     // Close timeline
187     r9.Close();
188 
189 	std::cout << "Completed successfully!" << std::endl;
190 
191     return 0;
192 }
193 
194 
195 
196 /*
197 ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
198 
199 The following methods are just for getting JSON info to the pre-processing effects
200 
201 ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
202 */
203 
204 
205 
jsonFormat(string key,string value,string type)206 string jsonFormat(string key, string value, string type){
207     stringstream jsonFormatMessage;
208     jsonFormatMessage << ( "\"" + key + "\": " );
209 
210     if(type == "string")
211         jsonFormatMessage << ( "\"" + value + "\"" );
212     if(type == "rstring")
213         jsonFormatMessage <<  value;
214     if(type == "int")
215         jsonFormatMessage << stoi(value);
216     if(type == "float")
217         jsonFormatMessage << (float)stof(value);
218     if(type == "double")
219         jsonFormatMessage << (double)stof(value);
220     if (type == "bool")
221         jsonFormatMessage << ((value == "true" || value == "1") ? "true" : "false");
222 
223     return jsonFormatMessage.str();
224 }
225 
226 // Return JSON string for the tracker effect
trackerJson(cv::Rect2d r,bool onlyProtoPath)227 string trackerJson(cv::Rect2d r, bool onlyProtoPath){
228 
229     // Define path to save tracked data
230     string protobufDataPath = "kcf_tracker.data";
231     // Set the tracker
232     string tracker = "KCF";
233 
234     // Construct all the composition of the JSON string
235     string protobuf_data_path = jsonFormat("protobuf_data_path", protobufDataPath);
236     string trackerType = jsonFormat("tracker-type", tracker);
237     string bboxCoords = jsonFormat(
238                                     "region",
239                                             "{" + jsonFormat("x", to_string(r.x), "int") +
240                                             "," + jsonFormat("y", to_string(r.y), "int") +
241                                             "," + jsonFormat("width", to_string(r.width), "int") +
242                                             "," + jsonFormat("height", to_string(r.height), "int") +
243                                             "," + jsonFormat("first-frame", to_string(0), "int") +
244                                             "}",
245                                     "rstring");
246 
247     // Return only the the protobuf path in JSON format
248     if(onlyProtoPath)
249         return "{" + protobuf_data_path + "}";
250     // Return all the parameters for the pre-processing effect
251     else
252         return "{" + protobuf_data_path + "," + trackerType + "," + bboxCoords + "}";
253 }
254 
255 // Return JSON string for the stabilizer effect
stabilizerJson(bool onlyProtoPath)256 string stabilizerJson(bool onlyProtoPath){
257 
258     // Define path to save stabilized data
259     string protobufDataPath = "example_stabilizer.data";
260     // Set smoothing window value
261     string smoothingWindow = "30";
262 
263     // Construct all the composition of the JSON string
264     string protobuf_data_path = jsonFormat("protobuf_data_path", protobufDataPath);
265     string smoothing_window = jsonFormat("smoothing_window", smoothingWindow, "int");
266 
267     // Return only the the protobuf path in JSON format
268     if(onlyProtoPath)
269         return "{" + protobuf_data_path + "}";
270     // Return all the parameters for the pre-processing effect
271     else
272         return "{" + protobuf_data_path + "," + smoothing_window + "}";
273 }
274 
objectDetectionJson(bool onlyProtoPath)275 string objectDetectionJson(bool onlyProtoPath){
276 
277     // Define path to save object detection data
278     string protobufDataPath = "example_object_detection.data";
279     // Define processing device
280     string processingDevice = "GPU";
281     // Set path to model configuration file
282     string modelConfiguration = "yolov3.cfg";
283     // Set path to model weights
284     string modelWeights = "yolov3.weights";
285     // Set path to class names file
286     string classesFile = "obj.names";
287 
288     // Construct all the composition of the JSON string
289     string protobuf_data_path = jsonFormat("protobuf_data_path", protobufDataPath);
290     string processing_device = jsonFormat("processing_device", processingDevice);
291     string model_configuration = jsonFormat("model_configuration", modelConfiguration);
292     string model_weights = jsonFormat("model_weights", modelWeights);
293     string classes_file = jsonFormat("classes_file", classesFile);
294 
295     // Return only the the protobuf path in JSON format
296     if(onlyProtoPath)
297         return "{" + protobuf_data_path + "}";
298     else
299         return "{" + protobuf_data_path + "," + processing_device + "," + model_configuration + ","
300                 + model_weights + "," + classes_file + "}";
301 }
302