1 // Copyright 2019 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_COMPILER_MEMORY_LOWERING_H_
6 #define V8_COMPILER_MEMORY_LOWERING_H_
7 
8 #include "src/compiler/graph-assembler.h"
9 #include "src/compiler/graph-reducer.h"
10 
11 namespace v8 {
12 namespace internal {
13 namespace compiler {
14 
15 // Forward declarations.
16 class CommonOperatorBuilder;
17 struct ElementAccess;
18 class Graph;
19 class JSGraph;
20 class MachineOperatorBuilder;
21 class Node;
22 class Operator;
23 
24 // Provides operations to lower all simplified memory access and allocation
25 // related nodes (i.e. Allocate, LoadField, StoreField and friends) to machine
26 // operators.
27 class MemoryLowering final : public Reducer {
28  public:
29   enum class AllocationFolding { kDoAllocationFolding, kDontAllocationFolding };
30   class AllocationGroup;
31 
32   // An allocation state is propagated on the effect paths through the graph.
33   class AllocationState final : public ZoneObject {
34    public:
Empty(Zone * zone)35     static AllocationState const* Empty(Zone* zone) {
36       return new (zone) AllocationState();
37     }
Closed(AllocationGroup * group,Node * effect,Zone * zone)38     static AllocationState const* Closed(AllocationGroup* group, Node* effect,
39                                          Zone* zone) {
40       return new (zone) AllocationState(group, effect);
41     }
Open(AllocationGroup * group,intptr_t size,Node * top,Node * effect,Zone * zone)42     static AllocationState const* Open(AllocationGroup* group, intptr_t size,
43                                        Node* top, Node* effect, Zone* zone) {
44       return new (zone) AllocationState(group, size, top, effect);
45     }
46 
47     bool IsYoungGenerationAllocation() const;
48 
group()49     AllocationGroup* group() const { return group_; }
top()50     Node* top() const { return top_; }
effect()51     Node* effect() const { return effect_; }
size()52     intptr_t size() const { return size_; }
53 
54    private:
55     AllocationState();
56     explicit AllocationState(AllocationGroup* group, Node* effect);
57     AllocationState(AllocationGroup* group, intptr_t size, Node* top,
58                     Node* effect);
59 
60     AllocationGroup* const group_;
61     // The upper bound of the combined allocated object size on the current path
62     // (max int if allocation folding is impossible on this path).
63     intptr_t const size_;
64     Node* const top_;
65     Node* const effect_;
66 
67     DISALLOW_COPY_AND_ASSIGN(AllocationState);
68   };
69 
70   using WriteBarrierAssertFailedCallback = std::function<void(
71       Node* node, Node* object, const char* name, Zone* temp_zone)>;
72 
73   MemoryLowering(
74       JSGraph* jsgraph, Zone* zone, JSGraphAssembler* graph_assembler,
75       PoisoningMitigationLevel poisoning_level,
76       AllocationFolding allocation_folding =
77           AllocationFolding::kDontAllocationFolding,
78       WriteBarrierAssertFailedCallback callback = [](Node*, Node*, const char*,
79                                                      Zone*) { UNREACHABLE(); },
80       const char* function_debug_name = nullptr);
81   ~MemoryLowering() = default;
82 
reducer_name()83   const char* reducer_name() const override { return "MemoryReducer"; }
84 
85   // Perform memory lowering reduction on the given Node.
86   Reduction Reduce(Node* node) override;
87 
88   // Specific reducers for each optype to enable keeping track of
89   // AllocationState by the MemoryOptimizer.
90   Reduction ReduceAllocateRaw(Node* node, AllocationType allocation_type,
91                               AllowLargeObjects allow_large_objects,
92                               AllocationState const** state);
93   Reduction ReduceLoadFromObject(Node* node);
94   Reduction ReduceLoadElement(Node* node);
95   Reduction ReduceLoadField(Node* node);
96   Reduction ReduceStoreToObject(Node* node,
97                                 AllocationState const* state = nullptr);
98   Reduction ReduceStoreElement(Node* node,
99                                AllocationState const* state = nullptr);
100   Reduction ReduceStoreField(Node* node,
101                              AllocationState const* state = nullptr);
102   Reduction ReduceStore(Node* node, AllocationState const* state = nullptr);
103 
104  private:
105   Reduction ReduceAllocateRaw(Node* node);
106   WriteBarrierKind ComputeWriteBarrierKind(Node* node, Node* object,
107                                            Node* value,
108                                            AllocationState const* state,
109                                            WriteBarrierKind);
110   Node* ComputeIndex(ElementAccess const& access, Node* node);
111   bool NeedsPoisoning(LoadSensitivity load_sensitivity) const;
112 
113   Graph* graph() const;
isolate()114   Isolate* isolate() const { return isolate_; }
zone()115   Zone* zone() const { return zone_; }
graph_zone()116   Zone* graph_zone() const { return graph_zone_; }
common()117   CommonOperatorBuilder* common() const { return common_; }
machine()118   MachineOperatorBuilder* machine() const { return machine_; }
gasm()119   JSGraphAssembler* gasm() const { return graph_assembler_; }
120 
121   SetOncePointer<const Operator> allocate_operator_;
122   Isolate* isolate_;
123   Zone* zone_;
124   Zone* graph_zone_;
125   CommonOperatorBuilder* common_;
126   MachineOperatorBuilder* machine_;
127   JSGraphAssembler* graph_assembler_;
128   AllocationFolding allocation_folding_;
129   PoisoningMitigationLevel poisoning_level_;
130   WriteBarrierAssertFailedCallback write_barrier_assert_failed_;
131   const char* function_debug_name_;
132 
133   DISALLOW_IMPLICIT_CONSTRUCTORS(MemoryLowering);
134 };
135 
136 }  // namespace compiler
137 }  // namespace internal
138 }  // namespace v8
139 
140 #endif  // V8_COMPILER_MEMORY_LOWERING_H_
141