1// 2// Copyright (c) 2015, 2021, Oracle and/or its affiliates. All rights reserved. 3// DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4// 5// This code is free software; you can redistribute it and/or modify it 6// under the terms of the GNU General Public License version 2 only, as 7// published by the Free Software Foundation. 8// 9// This code is distributed in the hope that it will be useful, but WITHOUT 10// ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11// FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12// version 2 for more details (a copy is included in the LICENSE file that 13// accompanied this code). 14// 15// You should have received a copy of the GNU General Public License version 16// 2 along with this work; if not, write to the Free Software Foundation, 17// Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18// 19// Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20// or visit www.oracle.com if you need additional information or have any 21// questions. 22// 23 24source_hpp %{ 25 26#include "gc/shared/gc_globals.hpp" 27#include "gc/z/c2/zBarrierSetC2.hpp" 28#include "gc/z/zThreadLocalData.hpp" 29 30%} 31 32source %{ 33 34#include "c2_intelJccErratum_x86.hpp" 35 36static void z_load_barrier(MacroAssembler& _masm, const MachNode* node, Address ref_addr, Register ref, Register tmp, uint8_t barrier_data) { 37 if (barrier_data == ZLoadBarrierElided) { 38 return; // Elided. 39 } 40 ZLoadBarrierStubC2* const stub = ZLoadBarrierStubC2::create(node, ref_addr, ref, tmp, barrier_data); 41 { 42 IntelJccErratumAlignment intel_alignment(_masm, 10 /* jcc_size */); 43 __ testptr(ref, Address(r15_thread, ZThreadLocalData::address_bad_mask_offset())); 44 __ jcc(Assembler::notZero, *stub->entry()); 45 } 46 __ bind(*stub->continuation()); 47} 48 49static void z_load_barrier_cmpxchg(MacroAssembler& _masm, const MachNode* node, Address ref_addr, Register ref, Register tmp, Label& good) { 50 ZLoadBarrierStubC2* const stub = ZLoadBarrierStubC2::create(node, ref_addr, ref, tmp, ZLoadBarrierStrong); 51 { 52 IntelJccErratumAlignment intel_alignment(_masm, 10 /* jcc_size */); 53 __ testptr(ref, Address(r15_thread, ZThreadLocalData::address_bad_mask_offset())); 54 __ jcc(Assembler::zero, good); 55 } 56 { 57 IntelJccErratumAlignment intel_alignment(_masm, 5 /* jcc_size */); 58 __ jmp(*stub->entry()); 59 } 60 __ bind(*stub->continuation()); 61} 62 63%} 64 65// Load Pointer 66instruct zLoadP(rRegP dst, memory mem, rFlagsReg cr) 67%{ 68 predicate(UseZGC && n->as_Load()->barrier_data() != 0); 69 match(Set dst (LoadP mem)); 70 effect(KILL cr, TEMP dst); 71 72 ins_cost(125); 73 74 format %{ "movq $dst, $mem" %} 75 76 ins_encode %{ 77 __ movptr($dst$$Register, $mem$$Address); 78 z_load_barrier(_masm, this, $mem$$Address, $dst$$Register, noreg /* tmp */, barrier_data()); 79 %} 80 81 ins_pipe(ialu_reg_mem); 82%} 83 84instruct zCompareAndExchangeP(memory mem, rax_RegP oldval, rRegP newval, rRegP tmp, rFlagsReg cr) %{ 85 match(Set oldval (CompareAndExchangeP mem (Binary oldval newval))); 86 predicate(UseZGC && n->as_LoadStore()->barrier_data() == ZLoadBarrierStrong); 87 effect(KILL cr, TEMP tmp); 88 89 format %{ "lock\n\t" 90 "cmpxchgq $newval, $mem" %} 91 92 ins_encode %{ 93 if (barrier_data() != ZLoadBarrierElided) { // barrier could be elided by ZBarrierSetC2::analyze_dominating_barriers() 94 __ movptr($tmp$$Register, $oldval$$Register); 95 } 96 __ lock(); 97 __ cmpxchgptr($newval$$Register, $mem$$Address); 98 99 if (barrier_data() != ZLoadBarrierElided) { 100 Label good; 101 z_load_barrier_cmpxchg(_masm, this, $mem$$Address, $oldval$$Register, $tmp$$Register, good); 102 __ movptr($oldval$$Register, $tmp$$Register); 103 __ lock(); 104 __ cmpxchgptr($newval$$Register, $mem$$Address); 105 __ bind(good); 106 } 107 %} 108 109 ins_pipe(pipe_cmpxchg); 110%} 111 112instruct zCompareAndSwapP(rRegI res, memory mem, rRegP newval, rRegP tmp, rFlagsReg cr, rax_RegP oldval) %{ 113 match(Set res (CompareAndSwapP mem (Binary oldval newval))); 114 match(Set res (WeakCompareAndSwapP mem (Binary oldval newval))); 115 predicate(UseZGC && n->as_LoadStore()->barrier_data() == ZLoadBarrierStrong); 116 effect(KILL cr, KILL oldval, TEMP tmp); 117 118 format %{ "lock\n\t" 119 "cmpxchgq $newval, $mem\n\t" 120 "sete $res\n\t" 121 "movzbl $res, $res" %} 122 123 ins_encode %{ 124 if (barrier_data() != ZLoadBarrierElided) { // barrier could be elided by ZBarrierSetC2::analyze_dominating_barriers() 125 __ movptr($tmp$$Register, $oldval$$Register); 126 } 127 __ lock(); 128 __ cmpxchgptr($newval$$Register, $mem$$Address); 129 130 if (barrier_data() != ZLoadBarrierElided) { 131 Label good; 132 z_load_barrier_cmpxchg(_masm, this, $mem$$Address, $oldval$$Register, $tmp$$Register, good); 133 __ movptr($oldval$$Register, $tmp$$Register); 134 __ lock(); 135 __ cmpxchgptr($newval$$Register, $mem$$Address); 136 __ bind(good); 137 __ cmpptr($tmp$$Register, $oldval$$Register); 138 } 139 __ setb(Assembler::equal, $res$$Register); 140 __ movzbl($res$$Register, $res$$Register); 141 %} 142 143 ins_pipe(pipe_cmpxchg); 144%} 145 146instruct zXChgP(memory mem, rRegP newval, rFlagsReg cr) %{ 147 match(Set newval (GetAndSetP mem newval)); 148 predicate(UseZGC && n->as_LoadStore()->barrier_data() != 0); 149 effect(KILL cr); 150 151 format %{ "xchgq $newval, $mem" %} 152 153 ins_encode %{ 154 __ xchgptr($newval$$Register, $mem$$Address); 155 z_load_barrier(_masm, this, Address(noreg, 0), $newval$$Register, noreg /* tmp */, barrier_data()); 156 %} 157 158 ins_pipe(pipe_cmpxchg); 159%} 160