1//
2// Copyright (c) 2019, 2021, Oracle and/or its affiliates. All rights reserved.
3// DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4//
5// This code is free software; you can redistribute it and/or modify it
6// under the terms of the GNU General Public License version 2 only, as
7// published by the Free Software Foundation.
8//
9// This code is distributed in the hope that it will be useful, but WITHOUT
10// ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11// FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
12// version 2 for more details (a copy is included in the LICENSE file that
13// accompanied this code).
14//
15// You should have received a copy of the GNU General Public License version
16// 2 along with this work; if not, write to the Free Software Foundation,
17// Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18//
19// Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20// or visit www.oracle.com if you need additional information or have any
21// questions.
22//
23
24source_hpp %{
25
26#include "gc/shared/gc_globals.hpp"
27#include "gc/z/c2/zBarrierSetC2.hpp"
28#include "gc/z/zThreadLocalData.hpp"
29
30%}
31
32source %{
33
34static void z_load_barrier(MacroAssembler& _masm, const MachNode* node, Address ref_addr, Register ref, Register tmp, uint8_t barrier_data) {
35  if (barrier_data == ZLoadBarrierElided) {
36    return;
37  }
38  ZLoadBarrierStubC2* const stub = ZLoadBarrierStubC2::create(node, ref_addr, ref, tmp, barrier_data);
39  __ ldr(tmp, Address(rthread, ZThreadLocalData::address_bad_mask_offset()));
40  __ andr(tmp, tmp, ref);
41  __ cbnz(tmp, *stub->entry());
42  __ bind(*stub->continuation());
43}
44
45static void z_load_barrier_slow_path(MacroAssembler& _masm, const MachNode* node, Address ref_addr, Register ref, Register tmp) {
46  ZLoadBarrierStubC2* const stub = ZLoadBarrierStubC2::create(node, ref_addr, ref, tmp, ZLoadBarrierStrong);
47  __ b(*stub->entry());
48  __ bind(*stub->continuation());
49}
50
51%}
52
53// Load Pointer
54instruct zLoadP(iRegPNoSp dst, memory8 mem, rFlagsReg cr)
55%{
56  match(Set dst (LoadP mem));
57  predicate(UseZGC && !needs_acquiring_load(n) && (n->as_Load()->barrier_data() != 0));
58  effect(TEMP dst, KILL cr);
59
60  ins_cost(4 * INSN_COST);
61
62  format %{ "ldr  $dst, $mem" %}
63
64  ins_encode %{
65    const Address ref_addr = mem2address($mem->opcode(), as_Register($mem$$base), $mem$$index, $mem$$scale, $mem$$disp);
66    __ ldr($dst$$Register, ref_addr);
67    z_load_barrier(_masm, this, ref_addr, $dst$$Register, rscratch2 /* tmp */, barrier_data());
68  %}
69
70  ins_pipe(iload_reg_mem);
71%}
72
73// Load Pointer Volatile
74instruct zLoadPVolatile(iRegPNoSp dst, indirect mem /* sync_memory */, rFlagsReg cr)
75%{
76  match(Set dst (LoadP mem));
77  predicate(UseZGC && needs_acquiring_load(n) && n->as_Load()->barrier_data() != 0);
78  effect(TEMP dst, KILL cr);
79
80  ins_cost(VOLATILE_REF_COST);
81
82  format %{ "ldar  $dst, $mem\t" %}
83
84  ins_encode %{
85    __ ldar($dst$$Register, $mem$$Register);
86    z_load_barrier(_masm, this, Address($mem$$Register), $dst$$Register, rscratch2 /* tmp */, barrier_data());
87  %}
88
89  ins_pipe(pipe_serial);
90%}
91
92instruct zCompareAndSwapP(iRegINoSp res, indirect mem, iRegP oldval, iRegP newval, rFlagsReg cr) %{
93  match(Set res (CompareAndSwapP mem (Binary oldval newval)));
94  match(Set res (WeakCompareAndSwapP mem (Binary oldval newval)));
95  predicate(UseZGC && !needs_acquiring_load_exclusive(n) && n->as_LoadStore()->barrier_data() == ZLoadBarrierStrong);
96  effect(KILL cr, TEMP_DEF res);
97
98  ins_cost(2 * VOLATILE_REF_COST);
99
100  format %{ "cmpxchg $mem, $oldval, $newval\n\t"
101            "cset    $res, EQ" %}
102
103  ins_encode %{
104    guarantee($mem$$index == -1 && $mem$$disp == 0, "impossible encoding");
105    __ cmpxchg($mem$$Register, $oldval$$Register, $newval$$Register, Assembler::xword,
106               false /* acquire */, true /* release */, false /* weak */, rscratch2);
107    __ cset($res$$Register, Assembler::EQ);
108    if (barrier_data() != ZLoadBarrierElided) {
109      Label good;
110      __ ldr(rscratch1, Address(rthread, ZThreadLocalData::address_bad_mask_offset()));
111      __ andr(rscratch1, rscratch1, rscratch2);
112      __ cbz(rscratch1, good);
113      z_load_barrier_slow_path(_masm, this, Address($mem$$Register), rscratch2 /* ref */, rscratch1 /* tmp */);
114      __ cmpxchg($mem$$Register, $oldval$$Register, $newval$$Register, Assembler::xword,
115                 false /* acquire */, true /* release */, false /* weak */, rscratch2);
116      __ cset($res$$Register, Assembler::EQ);
117      __ bind(good);
118    }
119  %}
120
121  ins_pipe(pipe_slow);
122%}
123
124instruct zCompareAndSwapPAcq(iRegINoSp res, indirect mem, iRegP oldval, iRegP newval, rFlagsReg cr) %{
125  match(Set res (CompareAndSwapP mem (Binary oldval newval)));
126  match(Set res (WeakCompareAndSwapP mem (Binary oldval newval)));
127  predicate(UseZGC && needs_acquiring_load_exclusive(n) && (n->as_LoadStore()->barrier_data() == ZLoadBarrierStrong));
128  effect(KILL cr, TEMP_DEF res);
129
130  ins_cost(2 * VOLATILE_REF_COST);
131
132 format %{ "cmpxchg $mem, $oldval, $newval\n\t"
133           "cset    $res, EQ" %}
134
135  ins_encode %{
136    guarantee($mem$$index == -1 && $mem$$disp == 0, "impossible encoding");
137    __ cmpxchg($mem$$Register, $oldval$$Register, $newval$$Register, Assembler::xword,
138               true /* acquire */, true /* release */, false /* weak */, rscratch2);
139    __ cset($res$$Register, Assembler::EQ);
140    if (barrier_data() != ZLoadBarrierElided) {
141      Label good;
142      __ ldr(rscratch1, Address(rthread, ZThreadLocalData::address_bad_mask_offset()));
143      __ andr(rscratch1, rscratch1, rscratch2);
144      __ cbz(rscratch1, good);
145      z_load_barrier_slow_path(_masm, this, Address($mem$$Register), rscratch2 /* ref */, rscratch1 /* tmp */ );
146      __ cmpxchg($mem$$Register, $oldval$$Register, $newval$$Register, Assembler::xword,
147                 true /* acquire */, true /* release */, false /* weak */, rscratch2);
148      __ cset($res$$Register, Assembler::EQ);
149      __ bind(good);
150    }
151  %}
152
153  ins_pipe(pipe_slow);
154%}
155
156instruct zCompareAndExchangeP(iRegPNoSp res, indirect mem, iRegP oldval, iRegP newval, rFlagsReg cr) %{
157  match(Set res (CompareAndExchangeP mem (Binary oldval newval)));
158  predicate(UseZGC && !needs_acquiring_load_exclusive(n) && n->as_LoadStore()->barrier_data() == ZLoadBarrierStrong);
159  effect(TEMP_DEF res, KILL cr);
160
161  ins_cost(2 * VOLATILE_REF_COST);
162
163  format %{ "cmpxchg $res = $mem, $oldval, $newval" %}
164
165  ins_encode %{
166    guarantee($mem$$index == -1 && $mem$$disp == 0, "impossible encoding");
167    __ cmpxchg($mem$$Register, $oldval$$Register, $newval$$Register, Assembler::xword,
168               false /* acquire */, true /* release */, false /* weak */, $res$$Register);
169    if (barrier_data() != ZLoadBarrierElided) {
170      Label good;
171      __ ldr(rscratch1, Address(rthread, ZThreadLocalData::address_bad_mask_offset()));
172      __ andr(rscratch1, rscratch1, $res$$Register);
173      __ cbz(rscratch1, good);
174      z_load_barrier_slow_path(_masm, this, Address($mem$$Register), $res$$Register /* ref */, rscratch1 /* tmp */);
175      __ cmpxchg($mem$$Register, $oldval$$Register, $newval$$Register, Assembler::xword,
176                 false /* acquire */, true /* release */, false /* weak */, $res$$Register);
177      __ bind(good);
178    }
179  %}
180
181  ins_pipe(pipe_slow);
182%}
183
184instruct zCompareAndExchangePAcq(iRegPNoSp res, indirect mem, iRegP oldval, iRegP newval, rFlagsReg cr) %{
185  match(Set res (CompareAndExchangeP mem (Binary oldval newval)));
186  predicate(UseZGC && needs_acquiring_load_exclusive(n) && n->as_LoadStore()->barrier_data() == ZLoadBarrierStrong);
187  effect(TEMP_DEF res, KILL cr);
188
189  ins_cost(2 * VOLATILE_REF_COST);
190
191  format %{ "cmpxchg $res = $mem, $oldval, $newval" %}
192
193  ins_encode %{
194    guarantee($mem$$index == -1 && $mem$$disp == 0, "impossible encoding");
195    __ cmpxchg($mem$$Register, $oldval$$Register, $newval$$Register, Assembler::xword,
196               true /* acquire */, true /* release */, false /* weak */, $res$$Register);
197    if (barrier_data() != ZLoadBarrierElided) {
198      Label good;
199      __ ldr(rscratch1, Address(rthread, ZThreadLocalData::address_bad_mask_offset()));
200      __ andr(rscratch1, rscratch1, $res$$Register);
201      __ cbz(rscratch1, good);
202      z_load_barrier_slow_path(_masm, this, Address($mem$$Register), $res$$Register /* ref */, rscratch1 /* tmp */);
203      __ cmpxchg($mem$$Register, $oldval$$Register, $newval$$Register, Assembler::xword,
204                 true /* acquire */, true /* release */, false /* weak */, $res$$Register);
205      __ bind(good);
206    }
207  %}
208
209  ins_pipe(pipe_slow);
210%}
211
212instruct zGetAndSetP(indirect mem, iRegP newv, iRegPNoSp prev, rFlagsReg cr) %{
213  match(Set prev (GetAndSetP mem newv));
214  predicate(UseZGC && !needs_acquiring_load_exclusive(n) && n->as_LoadStore()->barrier_data() != 0);
215  effect(TEMP_DEF prev, KILL cr);
216
217  ins_cost(2 * VOLATILE_REF_COST);
218
219  format %{ "atomic_xchg  $prev, $newv, [$mem]" %}
220
221  ins_encode %{
222    __ atomic_xchg($prev$$Register, $newv$$Register, $mem$$Register);
223    z_load_barrier(_masm, this, Address(noreg, 0), $prev$$Register, rscratch2 /* tmp */, barrier_data());
224  %}
225
226  ins_pipe(pipe_serial);
227%}
228
229instruct zGetAndSetPAcq(indirect mem, iRegP newv, iRegPNoSp prev, rFlagsReg cr) %{
230  match(Set prev (GetAndSetP mem newv));
231  predicate(UseZGC && needs_acquiring_load_exclusive(n) && (n->as_LoadStore()->barrier_data() != 0));
232  effect(TEMP_DEF prev, KILL cr);
233
234  ins_cost(VOLATILE_REF_COST);
235
236  format %{ "atomic_xchg_acq  $prev, $newv, [$mem]" %}
237
238  ins_encode %{
239    __ atomic_xchgal($prev$$Register, $newv$$Register, $mem$$Register);
240    z_load_barrier(_masm, this, Address(noreg, 0), $prev$$Register, rscratch2 /* tmp */, barrier_data());
241  %}
242  ins_pipe(pipe_serial);
243%}
244