1 /*
2  * This program is free software; you can redistribute it and/or
3  * modify it under the terms of the GNU General Public License
4  * as published by the Free Software Foundation; either version 2
5  * of the License, or (at your option) any later version.
6  *
7  * This program is distributed in the hope that it will be useful,
8  * but WITHOUT ANY WARRANTY; without even the implied warranty of
9  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
10  * GNU General Public License for more details.
11  *
12  * You should have received a copy of the GNU General Public License
13  * along with this program; if not, write to the Free Software Foundation,
14  * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
15  *
16  * The Original Code is Copyright (C) 2013 Blender Foundation.
17  * All rights reserved.
18  */
19 
20 /** \file
21  * \ingroup depsgraph
22  *
23  * Evaluation engine entry-points for Depsgraph Engine.
24  */
25 
26 #include "intern/eval/deg_eval.h"
27 
28 #include "PIL_time.h"
29 
30 #include "BLI_compiler_attrs.h"
31 #include "BLI_gsqueue.h"
32 #include "BLI_task.h"
33 #include "BLI_utildefines.h"
34 
35 #include "BKE_global.h"
36 
37 #include "DNA_node_types.h"
38 #include "DNA_object_types.h"
39 #include "DNA_scene_types.h"
40 
41 #include "DEG_depsgraph.h"
42 #include "DEG_depsgraph_query.h"
43 
44 #include "atomic_ops.h"
45 
46 #include "intern/depsgraph.h"
47 #include "intern/depsgraph_relation.h"
48 #include "intern/eval/deg_eval_copy_on_write.h"
49 #include "intern/eval/deg_eval_flush.h"
50 #include "intern/eval/deg_eval_stats.h"
51 #include "intern/node/deg_node.h"
52 #include "intern/node/deg_node_component.h"
53 #include "intern/node/deg_node_id.h"
54 #include "intern/node/deg_node_operation.h"
55 #include "intern/node/deg_node_time.h"
56 
57 namespace blender {
58 namespace deg {
59 
60 namespace {
61 
62 struct DepsgraphEvalState;
63 
64 void deg_task_run_func(TaskPool *pool, void *taskdata);
65 
66 template<typename ScheduleFunction, typename... ScheduleFunctionArgs>
67 void schedule_children(DepsgraphEvalState *state,
68                        OperationNode *node,
69                        ScheduleFunction *schedule_function,
70                        ScheduleFunctionArgs... schedule_function_args);
71 
schedule_node_to_pool(OperationNode * node,const int UNUSED (thread_id),TaskPool * pool)72 void schedule_node_to_pool(OperationNode *node, const int UNUSED(thread_id), TaskPool *pool)
73 {
74   BLI_task_pool_push(pool, deg_task_run_func, node, false, NULL);
75 }
76 
77 /* Denotes which part of dependency graph is being evaluated. */
78 enum class EvaluationStage {
79   /* Stage 1: Only  Copy-on-Write operations are to be evaluated, prior to anything else.
80    * This allows other operations to access its dependencies when there is a dependency cycle
81    * involved. */
82   COPY_ON_WRITE,
83 
84   /* Threaded evaluation of all possible operations. */
85   THREADED_EVALUATION,
86 
87   /* Workaround for areas which can not be evaluated in threads.
88    *
89    * For example, meta-balls, which are iterating over all bases and are requesting dupli-lists
90    * to see whether there are meta-balls inside. */
91   SINGLE_THREADED_WORKAROUND,
92 };
93 
94 struct DepsgraphEvalState {
95   Depsgraph *graph;
96   bool do_stats;
97   EvaluationStage stage;
98   bool need_single_thread_pass;
99 };
100 
evaluate_node(const DepsgraphEvalState * state,OperationNode * operation_node)101 void evaluate_node(const DepsgraphEvalState *state, OperationNode *operation_node)
102 {
103   ::Depsgraph *depsgraph = reinterpret_cast<::Depsgraph *>(state->graph);
104 
105   /* Sanity checks. */
106   BLI_assert(!operation_node->is_noop() && "NOOP nodes should not actually be scheduled");
107   /* Perform operation. */
108   if (state->do_stats) {
109     const double start_time = PIL_check_seconds_timer();
110     operation_node->evaluate(depsgraph);
111     operation_node->stats.current_time += PIL_check_seconds_timer() - start_time;
112   }
113   else {
114     operation_node->evaluate(depsgraph);
115   }
116 }
117 
deg_task_run_func(TaskPool * pool,void * taskdata)118 void deg_task_run_func(TaskPool *pool, void *taskdata)
119 {
120   void *userdata_v = BLI_task_pool_user_data(pool);
121   DepsgraphEvalState *state = (DepsgraphEvalState *)userdata_v;
122 
123   /* Evaluate node. */
124   OperationNode *operation_node = reinterpret_cast<OperationNode *>(taskdata);
125   evaluate_node(state, operation_node);
126 
127   /* Schedule children. */
128   schedule_children(state, operation_node, schedule_node_to_pool, pool);
129 }
130 
check_operation_node_visible(OperationNode * op_node)131 bool check_operation_node_visible(OperationNode *op_node)
132 {
133   const ComponentNode *comp_node = op_node->owner;
134   /* Special exception, copy on write component is to be always evaluated,
135    * to keep copied "database" in a consistent state. */
136   if (comp_node->type == NodeType::COPY_ON_WRITE) {
137     return true;
138   }
139   return comp_node->affects_directly_visible;
140 }
141 
calculate_pending_parents_for_node(OperationNode * node)142 void calculate_pending_parents_for_node(OperationNode *node)
143 {
144   /* Update counters, applies for both visible and invisible IDs. */
145   node->num_links_pending = 0;
146   node->scheduled = false;
147   /* Invisible IDs requires no pending operations. */
148   if (!check_operation_node_visible(node)) {
149     return;
150   }
151   /* No need to bother with anything if node is not tagged for update. */
152   if ((node->flag & DEPSOP_FLAG_NEEDS_UPDATE) == 0) {
153     return;
154   }
155   for (Relation *rel : node->inlinks) {
156     if (rel->from->type == NodeType::OPERATION && (rel->flag & RELATION_FLAG_CYCLIC) == 0) {
157       OperationNode *from = (OperationNode *)rel->from;
158       /* TODO(sergey): This is how old layer system was checking for the
159        * calculation, but how is it possible that visible object depends
160        * on an invisible? This is something what is prohibited after
161        * deg_graph_build_flush_layers(). */
162       if (!check_operation_node_visible(from)) {
163         continue;
164       }
165       /* No need to wait for operation which is up to date. */
166       if ((from->flag & DEPSOP_FLAG_NEEDS_UPDATE) == 0) {
167         continue;
168       }
169       ++node->num_links_pending;
170     }
171   }
172 }
173 
calculate_pending_parents(Depsgraph * graph)174 void calculate_pending_parents(Depsgraph *graph)
175 {
176   for (OperationNode *node : graph->operations) {
177     calculate_pending_parents_for_node(node);
178   }
179 }
180 
initialize_execution(DepsgraphEvalState * state,Depsgraph * graph)181 void initialize_execution(DepsgraphEvalState *state, Depsgraph *graph)
182 {
183   const bool do_stats = state->do_stats;
184   calculate_pending_parents(graph);
185   /* Clear tags and other things which needs to be clear. */
186   for (OperationNode *node : graph->operations) {
187     if (do_stats) {
188       node->stats.reset_current();
189     }
190   }
191 }
192 
is_metaball_object_operation(const OperationNode * operation_node)193 bool is_metaball_object_operation(const OperationNode *operation_node)
194 {
195   const ComponentNode *component_node = operation_node->owner;
196   const IDNode *id_node = component_node->owner;
197   if (GS(id_node->id_cow->name) != ID_OB) {
198     return false;
199   }
200   const Object *object = reinterpret_cast<const Object *>(id_node->id_cow);
201   return object->type == OB_MBALL;
202 }
203 
need_evaluate_operation_at_stage(DepsgraphEvalState * state,const OperationNode * operation_node)204 bool need_evaluate_operation_at_stage(DepsgraphEvalState *state,
205                                       const OperationNode *operation_node)
206 {
207   const ComponentNode *component_node = operation_node->owner;
208   switch (state->stage) {
209     case EvaluationStage::COPY_ON_WRITE:
210       return (component_node->type == NodeType::COPY_ON_WRITE);
211 
212     case EvaluationStage::THREADED_EVALUATION:
213       /* Sanity check: copy-on-write node should be evaluated already. This will be indicated by
214        * scheduled flag (we assume that scheduled operations have been actually handled by previous
215        * stage). */
216       BLI_assert(operation_node->scheduled || component_node->type != NodeType::COPY_ON_WRITE);
217       if (is_metaball_object_operation(operation_node)) {
218         state->need_single_thread_pass = true;
219         return false;
220       }
221       return true;
222 
223     case EvaluationStage::SINGLE_THREADED_WORKAROUND:
224       return true;
225   }
226   BLI_assert(!"Unhandled evaluation stage, should never happen.");
227   return false;
228 }
229 
230 /* Schedule a node if it needs evaluation.
231  *   dec_parents: Decrement pending parents count, true when child nodes are
232  *                scheduled after a task has been completed.
233  */
234 template<typename ScheduleFunction, typename... ScheduleFunctionArgs>
schedule_node(DepsgraphEvalState * state,OperationNode * node,bool dec_parents,ScheduleFunction * schedule_function,ScheduleFunctionArgs...schedule_function_args)235 void schedule_node(DepsgraphEvalState *state,
236                    OperationNode *node,
237                    bool dec_parents,
238                    ScheduleFunction *schedule_function,
239                    ScheduleFunctionArgs... schedule_function_args)
240 {
241   /* No need to schedule nodes of invisible ID. */
242   if (!check_operation_node_visible(node)) {
243     return;
244   }
245   /* No need to schedule operations which are not tagged for update, they are
246    * considered to be up to date. */
247   if ((node->flag & DEPSOP_FLAG_NEEDS_UPDATE) == 0) {
248     return;
249   }
250   /* TODO(sergey): This is not strictly speaking safe to read
251    * num_links_pending. */
252   if (dec_parents) {
253     BLI_assert(node->num_links_pending > 0);
254     atomic_sub_and_fetch_uint32(&node->num_links_pending, 1);
255   }
256   /* Cal not schedule operation while its dependencies are not yet
257    * evaluated. */
258   if (node->num_links_pending != 0) {
259     return;
260   }
261   /* During the COW stage only schedule COW nodes. */
262   if (!need_evaluate_operation_at_stage(state, node)) {
263     return;
264   }
265   /* Actually schedule the node. */
266   bool is_scheduled = atomic_fetch_and_or_uint8((uint8_t *)&node->scheduled, (uint8_t) true);
267   if (!is_scheduled) {
268     if (node->is_noop()) {
269       /* skip NOOP node, schedule children right away */
270       schedule_children(state, node, schedule_function, schedule_function_args...);
271     }
272     else {
273       /* children are scheduled once this task is completed */
274       schedule_function(node, 0, schedule_function_args...);
275     }
276   }
277 }
278 
279 template<typename ScheduleFunction, typename... ScheduleFunctionArgs>
schedule_graph(DepsgraphEvalState * state,ScheduleFunction * schedule_function,ScheduleFunctionArgs...schedule_function_args)280 void schedule_graph(DepsgraphEvalState *state,
281                     ScheduleFunction *schedule_function,
282                     ScheduleFunctionArgs... schedule_function_args)
283 {
284   for (OperationNode *node : state->graph->operations) {
285     schedule_node(state, node, false, schedule_function, schedule_function_args...);
286   }
287 }
288 
289 template<typename ScheduleFunction, typename... ScheduleFunctionArgs>
schedule_children(DepsgraphEvalState * state,OperationNode * node,ScheduleFunction * schedule_function,ScheduleFunctionArgs...schedule_function_args)290 void schedule_children(DepsgraphEvalState *state,
291                        OperationNode *node,
292                        ScheduleFunction *schedule_function,
293                        ScheduleFunctionArgs... schedule_function_args)
294 {
295   for (Relation *rel : node->outlinks) {
296     OperationNode *child = (OperationNode *)rel->to;
297     BLI_assert(child->type == NodeType::OPERATION);
298     if (child->scheduled) {
299       /* Happens when having cyclic dependencies. */
300       continue;
301     }
302     schedule_node(state,
303                   child,
304                   (rel->flag & RELATION_FLAG_CYCLIC) == 0,
305                   schedule_function,
306                   schedule_function_args...);
307   }
308 }
309 
schedule_node_to_queue(OperationNode * node,const int,GSQueue * evaluation_queue)310 void schedule_node_to_queue(OperationNode *node,
311                             const int /*thread_id*/,
312                             GSQueue *evaluation_queue)
313 {
314   BLI_gsqueue_push(evaluation_queue, &node);
315 }
316 
evaluate_graph_single_threaded(DepsgraphEvalState * state)317 void evaluate_graph_single_threaded(DepsgraphEvalState *state)
318 {
319   GSQueue *evaluation_queue = BLI_gsqueue_new(sizeof(OperationNode *));
320   schedule_graph(state, schedule_node_to_queue, evaluation_queue);
321 
322   while (!BLI_gsqueue_is_empty(evaluation_queue)) {
323     OperationNode *operation_node;
324     BLI_gsqueue_pop(evaluation_queue, &operation_node);
325 
326     evaluate_node(state, operation_node);
327     schedule_children(state, operation_node, schedule_node_to_queue, evaluation_queue);
328   }
329 
330   BLI_gsqueue_free(evaluation_queue);
331 }
332 
depsgraph_ensure_view_layer(Depsgraph * graph)333 void depsgraph_ensure_view_layer(Depsgraph *graph)
334 {
335   /* We update copy-on-write scene in the following cases:
336    * - It was not expanded yet.
337    * - It was tagged for update of CoW component.
338    * This allows us to have proper view layer pointer. */
339   Scene *scene_cow = graph->scene_cow;
340   if (deg_copy_on_write_is_expanded(&scene_cow->id) &&
341       (scene_cow->id.recalc & ID_RECALC_COPY_ON_WRITE) == 0) {
342     return;
343   }
344 
345   const IDNode *scene_id_node = graph->find_id_node(&graph->scene->id);
346   deg_update_copy_on_write_datablock(graph, scene_id_node);
347 }
348 
349 }  // namespace
350 
deg_evaluate_task_pool_create(DepsgraphEvalState * state)351 static TaskPool *deg_evaluate_task_pool_create(DepsgraphEvalState *state)
352 {
353   if (G.debug & G_DEBUG_DEPSGRAPH_NO_THREADS) {
354     return BLI_task_pool_create_no_threads(state);
355   }
356 
357   return BLI_task_pool_create_suspended(state, TASK_PRIORITY_HIGH);
358 }
359 
360 /**
361  * Evaluate all nodes tagged for updating,
362  * \warning This is usually done as part of main loop, but may also be
363  * called from frame-change update.
364  *
365  * \note Time sources should be all valid!
366  */
deg_evaluate_on_refresh(Depsgraph * graph)367 void deg_evaluate_on_refresh(Depsgraph *graph)
368 {
369   /* Nothing to update, early out. */
370   if (graph->entry_tags.is_empty()) {
371     return;
372   }
373 
374   graph->debug.begin_graph_evaluation();
375 
376   graph->is_evaluating = true;
377   depsgraph_ensure_view_layer(graph);
378   /* Set up evaluation state. */
379   DepsgraphEvalState state;
380   state.graph = graph;
381   state.do_stats = graph->debug.do_time_debug();
382   state.need_single_thread_pass = false;
383   /* Prepare all nodes for evaluation. */
384   initialize_execution(&state, graph);
385 
386   /* Do actual evaluation now. */
387   /* First, process all Copy-On-Write nodes. */
388   state.stage = EvaluationStage::COPY_ON_WRITE;
389   TaskPool *task_pool = deg_evaluate_task_pool_create(&state);
390   schedule_graph(&state, schedule_node_to_pool, task_pool);
391   BLI_task_pool_work_and_wait(task_pool);
392   BLI_task_pool_free(task_pool);
393 
394   /* After that, process all other nodes. */
395   state.stage = EvaluationStage::THREADED_EVALUATION;
396   task_pool = deg_evaluate_task_pool_create(&state);
397   schedule_graph(&state, schedule_node_to_pool, task_pool);
398   BLI_task_pool_work_and_wait(task_pool);
399   BLI_task_pool_free(task_pool);
400 
401   if (state.need_single_thread_pass) {
402     state.stage = EvaluationStage::SINGLE_THREADED_WORKAROUND;
403     evaluate_graph_single_threaded(&state);
404   }
405 
406   /* Finalize statistics gathering. This is because we only gather single
407    * operation timing here, without aggregating anything to avoid any extra
408    * synchronization. */
409   if (state.do_stats) {
410     deg_eval_stats_aggregate(graph);
411   }
412   /* Clear any uncleared tags - just in case. */
413   deg_graph_clear_tags(graph);
414   graph->is_evaluating = false;
415 
416   graph->debug.end_graph_evaluation();
417 }
418 
419 }  // namespace deg
420 }  // namespace blender
421