Home
last modified time | relevance | path

Searched refs:AsmOperand (Results 1 – 25 of 234) sorted by relevance

12345678910

/dports/java/openjdk11/jdk11u-jdk-11.0.13-8-1/src/hotspot/cpu/arm/
H A DstubRoutinesCrypto_arm.cpp175 __ mov(R12, AsmOperand(R5, lsr, 24)); in generate_aescrypt_encryptBlock()
180 __ eor(R1, R1, AsmOperand(R2, ror, 8)); in generate_aescrypt_encryptBlock()
189 __ mov(R12, AsmOperand(R6, lsr, 24)); in generate_aescrypt_encryptBlock()
203 __ mov(R12, AsmOperand(R7, lsr, 24)); in generate_aescrypt_encryptBlock()
218 __ mov(R12, AsmOperand(R8, lsr, 24)); in generate_aescrypt_encryptBlock()
381 __ mov(R12, AsmOperand(R5, lsr, 24)); in generate_aescrypt_decryptBlock()
395 __ mov(R12, AsmOperand(R6, lsr, 24)); in generate_aescrypt_decryptBlock()
409 __ mov(R12, AsmOperand(R7, lsr, 24)); in generate_aescrypt_decryptBlock()
424 __ mov(R12, AsmOperand(R8, lsr, 24)); in generate_aescrypt_decryptBlock()
801 __ mov(R12, AsmOperand(R5, lsr, 24)); in generate_cipherBlockChaining_decryptAESCrypt()
[all …]
H A DstubGenerator_arm.cpp456 __ cmp(remainder, AsmOperand(divisor, lsl, i)); in generate_idiv_irem()
1697 __ orr(R5, R5, AsmOperand(R6, lsl, lsl_shift)); in generate_forward_shifted_copy_loop()
1699 __ orr(R6, R6, AsmOperand(R7, lsl, lsl_shift)); in generate_forward_shifted_copy_loop()
1707 __ orr(R7, R7, AsmOperand(R8, lsl, lsl_shift)); in generate_forward_shifted_copy_loop()
1709 __ orr(R8, R8, AsmOperand(R9, lsl, lsl_shift)); in generate_forward_shifted_copy_loop()
1825 __ mov(R3, AsmOperand(R12, lsr, lsr_shift)); in generate_forward_shifted_copy_loop()
1857 __ mov(R3, AsmOperand(R12, lsr, lsr_shift)); in generate_forward_shifted_copy_loop()
1899 __ mov(R3, AsmOperand(R3, lsr, 16), gt); in generate_forward_shifted_copy_loop()
2233 __ mov(R9, AsmOperand(R12, lsr, 16), ne); in generate_backward_shifted_copy_loop()
2236 __ mov(R12, AsmOperand(R12, lsl, 16), ne); in generate_backward_shifted_copy_loop()
[all …]
H A DmacroAssembler_arm.cpp800 if (AsmOperand::is_rotated_imm(c)) { in mov_slow()
802 } else if (AsmOperand::is_rotated_imm(~c)) { in mov_slow()
1018 mov(rd, AsmOperand(rn, lsl, 32 - bits)); in zero_extend()
1019 mov(rd, AsmOperand(rd, lsr, 32 - bits)); in zero_extend()
1033 mov(rd, AsmOperand(rn, lsl, 32 - bits)); in sign_extend()
1034 mov(rd, AsmOperand(rd, asr, 32 - bits)); in sign_extend()
2068 mov(tmp_reg, AsmOperand(tmp_reg, lsl, 23)); in biased_locking_enter()
2069 mov(tmp_reg, AsmOperand(tmp_reg, lsr, 23)); in biased_locking_enter()
2103 mov(tmp_reg, AsmOperand(tmp_reg, lsl, 23)); in biased_locking_enter()
2335 eor(dst, dst, AsmOperand(dst, lsl, 3)); in floating_cmp()
[all …]
H A Dassembler_arm_32.cpp70 void AsmOperand::initialize_rotated_imm(unsigned int imm) { in initialize_rotated_imm()
81 bool AsmOperand::is_rotated_imm(unsigned int imm) { in is_rotated_imm()
/dports/java/openjdk13/jdk13u-jdk-13.0.10-1-1/src/hotspot/cpu/arm/
H A DstubRoutinesCrypto_arm.cpp175 __ mov(R12, AsmOperand(R5, lsr, 24)); in generate_aescrypt_encryptBlock()
180 __ eor(R1, R1, AsmOperand(R2, ror, 8)); in generate_aescrypt_encryptBlock()
189 __ mov(R12, AsmOperand(R6, lsr, 24)); in generate_aescrypt_encryptBlock()
203 __ mov(R12, AsmOperand(R7, lsr, 24)); in generate_aescrypt_encryptBlock()
218 __ mov(R12, AsmOperand(R8, lsr, 24)); in generate_aescrypt_encryptBlock()
381 __ mov(R12, AsmOperand(R5, lsr, 24)); in generate_aescrypt_decryptBlock()
395 __ mov(R12, AsmOperand(R6, lsr, 24)); in generate_aescrypt_decryptBlock()
409 __ mov(R12, AsmOperand(R7, lsr, 24)); in generate_aescrypt_decryptBlock()
424 __ mov(R12, AsmOperand(R8, lsr, 24)); in generate_aescrypt_decryptBlock()
801 __ mov(R12, AsmOperand(R5, lsr, 24)); in generate_cipherBlockChaining_decryptAESCrypt()
[all …]
H A DstubGenerator_arm.cpp344 __ cmp(remainder, AsmOperand(divisor, lsl, i)); in generate_idiv_irem()
1316 __ orr(R5, R5, AsmOperand(R6, lsl, lsl_shift)); in generate_forward_shifted_copy_loop()
1318 __ orr(R6, R6, AsmOperand(R7, lsl, lsl_shift)); in generate_forward_shifted_copy_loop()
1324 __ orr(R7, R7, AsmOperand(R8, lsl, lsl_shift)); in generate_forward_shifted_copy_loop()
1326 __ orr(R8, R8, AsmOperand(R9, lsl, lsl_shift)); in generate_forward_shifted_copy_loop()
1352 __ mov(R3, AsmOperand(R12, lsr, lsr_shift)); in generate_forward_shifted_copy_loop()
1384 __ mov(R3, AsmOperand(R12, lsr, lsr_shift)); in generate_forward_shifted_copy_loop()
1426 __ mov(R3, AsmOperand(R3, lsr, 16), gt); in generate_forward_shifted_copy_loop()
1649 __ mov(R9, AsmOperand(R12, lsr, 16), ne); in generate_backward_shifted_copy_loop()
1652 __ mov(R12, AsmOperand(R12, lsl, 16), ne); in generate_backward_shifted_copy_loop()
[all …]
H A DmacroAssembler_arm.cpp608 if (AsmOperand::is_rotated_imm(c)) { in mov_slow()
610 } else if (AsmOperand::is_rotated_imm(~c)) { in mov_slow()
737 mov(rd, AsmOperand(rn, lsl, 32 - bits)); in zero_extend()
738 mov(rd, AsmOperand(rd, lsr, 32 - bits)); in zero_extend()
743 mov(rd, AsmOperand(rn, lsl, 32 - bits)); in sign_extend()
744 mov(rd, AsmOperand(rd, asr, 32 - bits)); in sign_extend()
1465 mov(tmp_reg, AsmOperand(tmp_reg, lsl, 23)); in biased_locking_enter()
1466 mov(tmp_reg, AsmOperand(tmp_reg, lsr, 23)); in biased_locking_enter()
1495 mov(tmp_reg, AsmOperand(tmp_reg, lsl, 23)); in biased_locking_enter()
1690 eor(dst, dst, AsmOperand(dst, lsl, 3)); in floating_cmp()
[all …]
/dports/java/openjdk12/openjdk-jdk12u-jdk-12.0.2-10-4/src/hotspot/cpu/arm/
H A DstubRoutinesCrypto_arm.cpp175 __ mov(R12, AsmOperand(R5, lsr, 24)); in generate_aescrypt_encryptBlock()
180 __ eor(R1, R1, AsmOperand(R2, ror, 8)); in generate_aescrypt_encryptBlock()
189 __ mov(R12, AsmOperand(R6, lsr, 24)); in generate_aescrypt_encryptBlock()
203 __ mov(R12, AsmOperand(R7, lsr, 24)); in generate_aescrypt_encryptBlock()
218 __ mov(R12, AsmOperand(R8, lsr, 24)); in generate_aescrypt_encryptBlock()
381 __ mov(R12, AsmOperand(R5, lsr, 24)); in generate_aescrypt_decryptBlock()
395 __ mov(R12, AsmOperand(R6, lsr, 24)); in generate_aescrypt_decryptBlock()
409 __ mov(R12, AsmOperand(R7, lsr, 24)); in generate_aescrypt_decryptBlock()
424 __ mov(R12, AsmOperand(R8, lsr, 24)); in generate_aescrypt_decryptBlock()
801 __ mov(R12, AsmOperand(R5, lsr, 24)); in generate_cipherBlockChaining_decryptAESCrypt()
[all …]
H A DstubGenerator_arm.cpp347 __ cmp(remainder, AsmOperand(divisor, lsl, i)); in generate_idiv_irem()
1312 __ orr(R5, R5, AsmOperand(R6, lsl, lsl_shift)); in generate_forward_shifted_copy_loop()
1314 __ orr(R6, R6, AsmOperand(R7, lsl, lsl_shift)); in generate_forward_shifted_copy_loop()
1320 __ orr(R7, R7, AsmOperand(R8, lsl, lsl_shift)); in generate_forward_shifted_copy_loop()
1322 __ orr(R8, R8, AsmOperand(R9, lsl, lsl_shift)); in generate_forward_shifted_copy_loop()
1348 __ mov(R3, AsmOperand(R12, lsr, lsr_shift)); in generate_forward_shifted_copy_loop()
1380 __ mov(R3, AsmOperand(R12, lsr, lsr_shift)); in generate_forward_shifted_copy_loop()
1422 __ mov(R3, AsmOperand(R3, lsr, 16), gt); in generate_forward_shifted_copy_loop()
1645 __ mov(R9, AsmOperand(R12, lsr, 16), ne); in generate_backward_shifted_copy_loop()
1648 __ mov(R12, AsmOperand(R12, lsl, 16), ne); in generate_backward_shifted_copy_loop()
[all …]
H A DmacroAssembler_arm.cpp608 if (AsmOperand::is_rotated_imm(c)) { in mov_slow()
610 } else if (AsmOperand::is_rotated_imm(~c)) { in mov_slow()
737 mov(rd, AsmOperand(rn, lsl, 32 - bits)); in zero_extend()
738 mov(rd, AsmOperand(rd, lsr, 32 - bits)); in zero_extend()
743 mov(rd, AsmOperand(rn, lsl, 32 - bits)); in sign_extend()
744 mov(rd, AsmOperand(rd, asr, 32 - bits)); in sign_extend()
1465 mov(tmp_reg, AsmOperand(tmp_reg, lsl, 23)); in biased_locking_enter()
1466 mov(tmp_reg, AsmOperand(tmp_reg, lsr, 23)); in biased_locking_enter()
1495 mov(tmp_reg, AsmOperand(tmp_reg, lsl, 23)); in biased_locking_enter()
1690 eor(dst, dst, AsmOperand(dst, lsl, 3)); in floating_cmp()
[all …]
/dports/java/openjdk16/jdk16u-jdk-16.0.2-7-1/src/hotspot/cpu/arm/
H A DstubRoutinesCrypto_arm.cpp175 __ mov(R12, AsmOperand(R5, lsr, 24)); in generate_aescrypt_encryptBlock()
180 __ eor(R1, R1, AsmOperand(R2, ror, 8)); in generate_aescrypt_encryptBlock()
189 __ mov(R12, AsmOperand(R6, lsr, 24)); in generate_aescrypt_encryptBlock()
203 __ mov(R12, AsmOperand(R7, lsr, 24)); in generate_aescrypt_encryptBlock()
218 __ mov(R12, AsmOperand(R8, lsr, 24)); in generate_aescrypt_encryptBlock()
381 __ mov(R12, AsmOperand(R5, lsr, 24)); in generate_aescrypt_decryptBlock()
395 __ mov(R12, AsmOperand(R6, lsr, 24)); in generate_aescrypt_decryptBlock()
409 __ mov(R12, AsmOperand(R7, lsr, 24)); in generate_aescrypt_decryptBlock()
424 __ mov(R12, AsmOperand(R8, lsr, 24)); in generate_aescrypt_decryptBlock()
801 __ mov(R12, AsmOperand(R5, lsr, 24)); in generate_cipherBlockChaining_decryptAESCrypt()
[all …]
H A DstubGenerator_arm.cpp345 __ cmp(remainder, AsmOperand(divisor, lsl, i)); in generate_idiv_irem()
1320 __ orr(R5, R5, AsmOperand(R6, lsl, lsl_shift)); in generate_forward_shifted_copy_loop()
1322 __ orr(R6, R6, AsmOperand(R7, lsl, lsl_shift)); in generate_forward_shifted_copy_loop()
1328 __ orr(R7, R7, AsmOperand(R8, lsl, lsl_shift)); in generate_forward_shifted_copy_loop()
1330 __ orr(R8, R8, AsmOperand(R9, lsl, lsl_shift)); in generate_forward_shifted_copy_loop()
1356 __ mov(R3, AsmOperand(R12, lsr, lsr_shift)); in generate_forward_shifted_copy_loop()
1388 __ mov(R3, AsmOperand(R12, lsr, lsr_shift)); in generate_forward_shifted_copy_loop()
1430 __ mov(R3, AsmOperand(R3, lsr, 16), gt); in generate_forward_shifted_copy_loop()
1653 __ mov(R9, AsmOperand(R12, lsr, 16), ne); in generate_backward_shifted_copy_loop()
1656 __ mov(R12, AsmOperand(R12, lsl, 16), ne); in generate_backward_shifted_copy_loop()
[all …]
H A DmacroAssembler_arm.cpp595 if (AsmOperand::is_rotated_imm(c)) { in mov_slow()
597 } else if (AsmOperand::is_rotated_imm(~c)) { in mov_slow()
724 mov(rd, AsmOperand(rn, lsl, 32 - bits)); in zero_extend()
725 mov(rd, AsmOperand(rd, lsr, 32 - bits)); in zero_extend()
730 mov(rd, AsmOperand(rn, lsl, 32 - bits)); in sign_extend()
731 mov(rd, AsmOperand(rd, asr, 32 - bits)); in sign_extend()
1448 mov(tmp_reg, AsmOperand(tmp_reg, lsl, 23)); in biased_locking_enter()
1449 mov(tmp_reg, AsmOperand(tmp_reg, lsr, 23)); in biased_locking_enter()
1478 mov(tmp_reg, AsmOperand(tmp_reg, lsl, 23)); in biased_locking_enter()
1620 eor(dst, dst, AsmOperand(dst, lsl, 3)); in floating_cmp()
[all …]
/dports/java/openjdk17/jdk17u-jdk-17.0.1-12-1/src/hotspot/cpu/arm/
H A DstubRoutinesCrypto_arm.cpp175 __ mov(R12, AsmOperand(R5, lsr, 24)); in generate_aescrypt_encryptBlock()
180 __ eor(R1, R1, AsmOperand(R2, ror, 8)); in generate_aescrypt_encryptBlock()
189 __ mov(R12, AsmOperand(R6, lsr, 24)); in generate_aescrypt_encryptBlock()
203 __ mov(R12, AsmOperand(R7, lsr, 24)); in generate_aescrypt_encryptBlock()
218 __ mov(R12, AsmOperand(R8, lsr, 24)); in generate_aescrypt_encryptBlock()
381 __ mov(R12, AsmOperand(R5, lsr, 24)); in generate_aescrypt_decryptBlock()
395 __ mov(R12, AsmOperand(R6, lsr, 24)); in generate_aescrypt_decryptBlock()
409 __ mov(R12, AsmOperand(R7, lsr, 24)); in generate_aescrypt_decryptBlock()
424 __ mov(R12, AsmOperand(R8, lsr, 24)); in generate_aescrypt_decryptBlock()
801 __ mov(R12, AsmOperand(R5, lsr, 24)); in generate_cipherBlockChaining_decryptAESCrypt()
[all …]
H A DstubGenerator_arm.cpp345 __ cmp(remainder, AsmOperand(divisor, lsl, i)); in generate_idiv_irem()
1320 __ orr(R5, R5, AsmOperand(R6, lsl, lsl_shift)); in generate_forward_shifted_copy_loop()
1322 __ orr(R6, R6, AsmOperand(R7, lsl, lsl_shift)); in generate_forward_shifted_copy_loop()
1328 __ orr(R7, R7, AsmOperand(R8, lsl, lsl_shift)); in generate_forward_shifted_copy_loop()
1330 __ orr(R8, R8, AsmOperand(R9, lsl, lsl_shift)); in generate_forward_shifted_copy_loop()
1356 __ mov(R3, AsmOperand(R12, lsr, lsr_shift)); in generate_forward_shifted_copy_loop()
1388 __ mov(R3, AsmOperand(R12, lsr, lsr_shift)); in generate_forward_shifted_copy_loop()
1430 __ mov(R3, AsmOperand(R3, lsr, 16), gt); in generate_forward_shifted_copy_loop()
1653 __ mov(R9, AsmOperand(R12, lsr, 16), ne); in generate_backward_shifted_copy_loop()
1656 __ mov(R12, AsmOperand(R12, lsl, 16), ne); in generate_backward_shifted_copy_loop()
[all …]
H A DmacroAssembler_arm.cpp597 if (AsmOperand::is_rotated_imm(c)) { in mov_slow()
599 } else if (AsmOperand::is_rotated_imm(~c)) { in mov_slow()
726 mov(rd, AsmOperand(rn, lsl, 32 - bits)); in zero_extend()
727 mov(rd, AsmOperand(rd, lsr, 32 - bits)); in zero_extend()
732 mov(rd, AsmOperand(rn, lsl, 32 - bits)); in sign_extend()
733 mov(rd, AsmOperand(rd, asr, 32 - bits)); in sign_extend()
1445 mov(tmp_reg, AsmOperand(tmp_reg, lsl, 23)); in biased_locking_enter()
1446 mov(tmp_reg, AsmOperand(tmp_reg, lsr, 23)); in biased_locking_enter()
1475 mov(tmp_reg, AsmOperand(tmp_reg, lsl, 23)); in biased_locking_enter()
1617 eor(dst, dst, AsmOperand(dst, lsl, 3)); in floating_cmp()
[all …]
/dports/java/openjdk11-jre/jdk11u-jdk-11.0.13-8-1/src/hotspot/cpu/arm/
H A DstubRoutinesCrypto_arm.cpp175 __ mov(R12, AsmOperand(R5, lsr, 24)); in generate_aescrypt_encryptBlock()
180 __ eor(R1, R1, AsmOperand(R2, ror, 8)); in generate_aescrypt_encryptBlock()
189 __ mov(R12, AsmOperand(R6, lsr, 24)); in generate_aescrypt_encryptBlock()
203 __ mov(R12, AsmOperand(R7, lsr, 24)); in generate_aescrypt_encryptBlock()
218 __ mov(R12, AsmOperand(R8, lsr, 24)); in generate_aescrypt_encryptBlock()
381 __ mov(R12, AsmOperand(R5, lsr, 24)); in generate_aescrypt_decryptBlock()
395 __ mov(R12, AsmOperand(R6, lsr, 24)); in generate_aescrypt_decryptBlock()
409 __ mov(R12, AsmOperand(R7, lsr, 24)); in generate_aescrypt_decryptBlock()
424 __ mov(R12, AsmOperand(R8, lsr, 24)); in generate_aescrypt_decryptBlock()
801 __ mov(R12, AsmOperand(R5, lsr, 24)); in generate_cipherBlockChaining_decryptAESCrypt()
[all …]
H A DstubGenerator_arm.cpp456 __ cmp(remainder, AsmOperand(divisor, lsl, i)); in generate_idiv_irem()
1697 __ orr(R5, R5, AsmOperand(R6, lsl, lsl_shift)); in generate_forward_shifted_copy_loop()
1699 __ orr(R6, R6, AsmOperand(R7, lsl, lsl_shift)); in generate_forward_shifted_copy_loop()
1707 __ orr(R7, R7, AsmOperand(R8, lsl, lsl_shift)); in generate_forward_shifted_copy_loop()
1709 __ orr(R8, R8, AsmOperand(R9, lsl, lsl_shift)); in generate_forward_shifted_copy_loop()
1825 __ mov(R3, AsmOperand(R12, lsr, lsr_shift)); in generate_forward_shifted_copy_loop()
1857 __ mov(R3, AsmOperand(R12, lsr, lsr_shift)); in generate_forward_shifted_copy_loop()
1899 __ mov(R3, AsmOperand(R3, lsr, 16), gt); in generate_forward_shifted_copy_loop()
2233 __ mov(R9, AsmOperand(R12, lsr, 16), ne); in generate_backward_shifted_copy_loop()
2236 __ mov(R12, AsmOperand(R12, lsl, 16), ne); in generate_backward_shifted_copy_loop()
[all …]
H A DmacroAssembler_arm.cpp800 if (AsmOperand::is_rotated_imm(c)) { in mov_slow()
802 } else if (AsmOperand::is_rotated_imm(~c)) { in mov_slow()
1018 mov(rd, AsmOperand(rn, lsl, 32 - bits)); in zero_extend()
1019 mov(rd, AsmOperand(rd, lsr, 32 - bits)); in zero_extend()
1033 mov(rd, AsmOperand(rn, lsl, 32 - bits)); in sign_extend()
1034 mov(rd, AsmOperand(rd, asr, 32 - bits)); in sign_extend()
2068 mov(tmp_reg, AsmOperand(tmp_reg, lsl, 23)); in biased_locking_enter()
2069 mov(tmp_reg, AsmOperand(tmp_reg, lsr, 23)); in biased_locking_enter()
2103 mov(tmp_reg, AsmOperand(tmp_reg, lsl, 23)); in biased_locking_enter()
2335 eor(dst, dst, AsmOperand(dst, lsl, 3)); in floating_cmp()
[all …]
/dports/java/openjdk15/jdk15u-jdk-15.0.6-1-1/src/hotspot/cpu/arm/
H A DstubRoutinesCrypto_arm.cpp175 __ mov(R12, AsmOperand(R5, lsr, 24)); in generate_aescrypt_encryptBlock()
180 __ eor(R1, R1, AsmOperand(R2, ror, 8)); in generate_aescrypt_encryptBlock()
189 __ mov(R12, AsmOperand(R6, lsr, 24)); in generate_aescrypt_encryptBlock()
203 __ mov(R12, AsmOperand(R7, lsr, 24)); in generate_aescrypt_encryptBlock()
218 __ mov(R12, AsmOperand(R8, lsr, 24)); in generate_aescrypt_encryptBlock()
381 __ mov(R12, AsmOperand(R5, lsr, 24)); in generate_aescrypt_decryptBlock()
395 __ mov(R12, AsmOperand(R6, lsr, 24)); in generate_aescrypt_decryptBlock()
409 __ mov(R12, AsmOperand(R7, lsr, 24)); in generate_aescrypt_decryptBlock()
424 __ mov(R12, AsmOperand(R8, lsr, 24)); in generate_aescrypt_decryptBlock()
801 __ mov(R12, AsmOperand(R5, lsr, 24)); in generate_cipherBlockChaining_decryptAESCrypt()
[all …]
H A DstubGenerator_arm.cpp345 __ cmp(remainder, AsmOperand(divisor, lsl, i)); in generate_idiv_irem()
1320 __ orr(R5, R5, AsmOperand(R6, lsl, lsl_shift)); in generate_forward_shifted_copy_loop()
1322 __ orr(R6, R6, AsmOperand(R7, lsl, lsl_shift)); in generate_forward_shifted_copy_loop()
1328 __ orr(R7, R7, AsmOperand(R8, lsl, lsl_shift)); in generate_forward_shifted_copy_loop()
1330 __ orr(R8, R8, AsmOperand(R9, lsl, lsl_shift)); in generate_forward_shifted_copy_loop()
1356 __ mov(R3, AsmOperand(R12, lsr, lsr_shift)); in generate_forward_shifted_copy_loop()
1388 __ mov(R3, AsmOperand(R12, lsr, lsr_shift)); in generate_forward_shifted_copy_loop()
1430 __ mov(R3, AsmOperand(R3, lsr, 16), gt); in generate_forward_shifted_copy_loop()
1653 __ mov(R9, AsmOperand(R12, lsr, 16), ne); in generate_backward_shifted_copy_loop()
1656 __ mov(R12, AsmOperand(R12, lsl, 16), ne); in generate_backward_shifted_copy_loop()
[all …]
H A DmacroAssembler_arm.cpp609 if (AsmOperand::is_rotated_imm(c)) { in mov_slow()
611 } else if (AsmOperand::is_rotated_imm(~c)) { in mov_slow()
738 mov(rd, AsmOperand(rn, lsl, 32 - bits)); in zero_extend()
739 mov(rd, AsmOperand(rd, lsr, 32 - bits)); in zero_extend()
744 mov(rd, AsmOperand(rn, lsl, 32 - bits)); in sign_extend()
745 mov(rd, AsmOperand(rd, asr, 32 - bits)); in sign_extend()
1466 mov(tmp_reg, AsmOperand(tmp_reg, lsl, 23)); in biased_locking_enter()
1467 mov(tmp_reg, AsmOperand(tmp_reg, lsr, 23)); in biased_locking_enter()
1496 mov(tmp_reg, AsmOperand(tmp_reg, lsl, 23)); in biased_locking_enter()
1640 eor(dst, dst, AsmOperand(dst, lsl, 3)); in floating_cmp()
[all …]
/dports/java/openjdk14/jdk14u-jdk-14.0.2-12-1/src/hotspot/cpu/arm/
H A DstubRoutinesCrypto_arm.cpp175 __ mov(R12, AsmOperand(R5, lsr, 24)); in generate_aescrypt_encryptBlock()
180 __ eor(R1, R1, AsmOperand(R2, ror, 8)); in generate_aescrypt_encryptBlock()
189 __ mov(R12, AsmOperand(R6, lsr, 24)); in generate_aescrypt_encryptBlock()
203 __ mov(R12, AsmOperand(R7, lsr, 24)); in generate_aescrypt_encryptBlock()
218 __ mov(R12, AsmOperand(R8, lsr, 24)); in generate_aescrypt_encryptBlock()
381 __ mov(R12, AsmOperand(R5, lsr, 24)); in generate_aescrypt_decryptBlock()
395 __ mov(R12, AsmOperand(R6, lsr, 24)); in generate_aescrypt_decryptBlock()
409 __ mov(R12, AsmOperand(R7, lsr, 24)); in generate_aescrypt_decryptBlock()
424 __ mov(R12, AsmOperand(R8, lsr, 24)); in generate_aescrypt_decryptBlock()
801 __ mov(R12, AsmOperand(R5, lsr, 24)); in generate_cipherBlockChaining_decryptAESCrypt()
[all …]
H A DstubGenerator_arm.cpp344 __ cmp(remainder, AsmOperand(divisor, lsl, i)); in generate_idiv_irem()
1319 __ orr(R5, R5, AsmOperand(R6, lsl, lsl_shift)); in generate_forward_shifted_copy_loop()
1321 __ orr(R6, R6, AsmOperand(R7, lsl, lsl_shift)); in generate_forward_shifted_copy_loop()
1327 __ orr(R7, R7, AsmOperand(R8, lsl, lsl_shift)); in generate_forward_shifted_copy_loop()
1329 __ orr(R8, R8, AsmOperand(R9, lsl, lsl_shift)); in generate_forward_shifted_copy_loop()
1355 __ mov(R3, AsmOperand(R12, lsr, lsr_shift)); in generate_forward_shifted_copy_loop()
1387 __ mov(R3, AsmOperand(R12, lsr, lsr_shift)); in generate_forward_shifted_copy_loop()
1429 __ mov(R3, AsmOperand(R3, lsr, 16), gt); in generate_forward_shifted_copy_loop()
1652 __ mov(R9, AsmOperand(R12, lsr, 16), ne); in generate_backward_shifted_copy_loop()
1655 __ mov(R12, AsmOperand(R12, lsl, 16), ne); in generate_backward_shifted_copy_loop()
[all …]
H A DmacroAssembler_arm.cpp608 if (AsmOperand::is_rotated_imm(c)) { in mov_slow()
610 } else if (AsmOperand::is_rotated_imm(~c)) { in mov_slow()
737 mov(rd, AsmOperand(rn, lsl, 32 - bits)); in zero_extend()
738 mov(rd, AsmOperand(rd, lsr, 32 - bits)); in zero_extend()
743 mov(rd, AsmOperand(rn, lsl, 32 - bits)); in sign_extend()
744 mov(rd, AsmOperand(rd, asr, 32 - bits)); in sign_extend()
1465 mov(tmp_reg, AsmOperand(tmp_reg, lsl, 23)); in biased_locking_enter()
1466 mov(tmp_reg, AsmOperand(tmp_reg, lsr, 23)); in biased_locking_enter()
1495 mov(tmp_reg, AsmOperand(tmp_reg, lsl, 23)); in biased_locking_enter()
1690 eor(dst, dst, AsmOperand(dst, lsl, 3)); in floating_cmp()
[all …]

12345678910