1/*- 2 * Copyright (c) 2010 Konstantin Belousov <kib@FreeBSD.org> 3 * All rights reserved. 4 * 5 * Redistribution and use in source and binary forms, with or without 6 * modification, are permitted provided that the following conditions 7 * are met: 8 * 1. Redistributions of source code must retain the above copyright 9 * notice, this list of conditions and the following disclaimer. 10 * 2. Redistributions in binary form must reproduce the above copyright 11 * notice, this list of conditions and the following disclaimer in the 12 * documentation and/or other materials provided with the distribution. 13 * 14 * THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS ``AS IS'' AND 15 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 16 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 17 * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE 18 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 19 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS 20 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) 21 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 22 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY 23 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF 24 * SUCH DAMAGE. 25 * 26 * $FreeBSD: src/sys/crypto/aesni/aesencdec_amd64.S,v 1.1 2010/07/23 11:00:46 kib Exp $ 27 */ 28 29#include <machine/asmacros.h> 30 31 .text 32 33ENTRY(aesni_enc) 34 .cfi_startproc 35 movdqu (%rdx),%xmm0 36 cmpq $0,%r8 37 je 1f 38 movdqu (%r8),%xmm1 /* unaligned load into reg */ 39 pxor %xmm1,%xmm0 /* pxor otherwise can fault on iv */ 401: 41 pxor (%rsi),%xmm0 422: 43 addq $0x10,%rsi 44// aesenc (%rsi),%xmm0 45 .byte 0x66,0x0f,0x38,0xdc,0x06 46 decl %edi 47 jne 2b 48 addq $0x10,%rsi 49// aesenclast (%rsi),%xmm0 50 .byte 0x66,0x0f,0x38,0xdd,0x06 51 movdqu %xmm0,(%rcx) 52 retq 53 .cfi_endproc 54END(aesni_enc) 55 56ENTRY(aesni_dec) 57 .cfi_startproc 58 movdqu (%rdx),%xmm0 59 pxor (%rsi),%xmm0 601: 61 addq $0x10,%rsi 62// aesdec (%rsi),%xmm0 63 .byte 0x66,0x0f,0x38,0xde,0x06 64 decl %edi 65 jne 1b 66 addq $0x10,%rsi 67// aesdeclast (%rsi),%xmm0 68 .byte 0x66,0x0f,0x38,0xdf,0x06 69 cmpq $0,%r8 70 je 2f 71 movdqu (%r8),%xmm1 72 pxor %xmm1,%xmm0 732: 74 movdqu %xmm0,(%rcx) 75 retq 76 .cfi_endproc 77END(aesni_dec) 78 79ENTRY(aesni_decrypt_cbc) 80 .cfi_startproc 81 shrq $4,%rdx 82 movdqu (%r8),%xmm1 831: 84 movdqu (%rcx),%xmm0 85 movdqa %xmm0,%xmm2 86 pxor (%rsi),%xmm0 87 cmpl $12,%edi 88// aesdec 0x10(%rsi),%xmm0 89 .byte 0x66,0x0f,0x38,0xde,0x46,0x10 90// aesdec 0x20(%rsi),%xmm0 91 .byte 0x66,0x0f,0x38,0xde,0x46,0x20 92// aesdec 0x30(%rsi),%xmm0 93 .byte 0x66,0x0f,0x38,0xde,0x46,0x30 94// aesdec 0x40(%rsi),%xmm0 95 .byte 0x66,0x0f,0x38,0xde,0x46,0x40 96// aesdec 0x50(%rsi),%xmm0 97 .byte 0x66,0x0f,0x38,0xde,0x46,0x50 98// aesdec 0x60(%rsi),%xmm0 99 .byte 0x66,0x0f,0x38,0xde,0x46,0x60 100// aesdec 0x70(%rsi),%xmm0 101 .byte 0x66,0x0f,0x38,0xde,0x46,0x70 102// aesdec 0x80(%rsi),%xmm0 103 .byte 0x66,0x0f,0x38,0xde,0x86,0x80,0x00,0x00,0x00 104// aesdec 0x90(%rsi),%xmm0 105 .byte 0x66,0x0f,0x38,0xde,0x86,0x90,0x00,0x00,0x00 106 jge 2f 107// aesdeclast 0xa0(%rsi),%xmm0 108 .byte 0x66,0x0f,0x38,0xdf,0x86,0xa0,0x00,0x00,0x00 109 jmp 4f 1102: 111// aesdec 0xa0(%rsi),%xmm0 112 .byte 0x66,0x0f,0x38,0xde,0x86,0xa0,0x00,0x00,0x00 113// aesdec 0xb0(%rsi),%xmm0 114 .byte 0x66,0x0f,0x38,0xde,0x86,0xb0,0x00,0x00,0x00 115 jg 3f 116// aesdeclast 0xc0(%rsi),%xmm0 117 .byte 0x66,0x0f,0x38,0xdf,0x86,0xc0,0x00,0x00,0x00 118 jmp 4f 1193: 120// aesdec 0xc0(%rsi),%xmm0 121 .byte 0x66,0x0f,0x38,0xde,0x86,0xc0,0x00,0x00,0x00 122// aesdec 0xd0(%rsi),%xmm0 123 .byte 0x66,0x0f,0x38,0xde,0x86,0xd0,0x00,0x00,0x00 124// aesdeclast 0xe0(%rsi),%xmm0 125 .byte 0x66,0x0f,0x38,0xdf,0x86,0xe0,0x00,0x00,0x00 1264: 127 pxor %xmm1,%xmm0 128 movdqu %xmm0,(%rcx) 129 movdqa %xmm2,%xmm1 // iv 130 addq $0x10,%rcx 131 decq %rdx 132 jne 1b 133 retq 134 .cfi_endproc 135END(aesni_decrypt_cbc) 136