1 /*
2  *  Copyright (C) 2007-2010 Lawrence Livermore National Security, LLC.
3  *  Copyright (C) 2007 The Regents of the University of California.
4  *  Produced at Lawrence Livermore National Laboratory (cf, DISCLAIMER).
5  *  Written by Brian Behlendorf <behlendorf1@llnl.gov>.
6  *  UCRL-CODE-235197
7  *
8  *  This file is part of the SPL, Solaris Porting Layer.
9  *
10  *  The SPL is free software; you can redistribute it and/or modify it
11  *  under the terms of the GNU General Public License as published by the
12  *  Free Software Foundation; either version 2 of the License, or (at your
13  *  option) any later version.
14  *
15  *  The SPL is distributed in the hope that it will be useful, but WITHOUT
16  *  ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
17  *  FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
18  *  for more details.
19  *
20  *  You should have received a copy of the GNU General Public License along
21  *  with the SPL.  If not, see <http://www.gnu.org/licenses/>.
22  */
23 
24 #ifndef _SPL_BYTEORDER_H
25 #define	_SPL_BYTEORDER_H
26 
27 #include <asm/byteorder.h>
28 
29 #if defined(__BIG_ENDIAN) && !defined(_ZFS_BIG_ENDIAN)
30 #define	_ZFS_BIG_ENDIAN
31 #endif
32 
33 #if defined(__LITTLE_ENDIAN) && !defined(_ZFS_LITTLE_ENDIAN)
34 #define	_ZFS_LITTLE_ENDIAN
35 #endif
36 
37 #include <sys/isa_defs.h>
38 
39 #ifdef __COVERITY__
40 /*
41  * Coverity's taint warnings from byteswapping are false positives for us.
42  * Suppress them by hiding byteswapping from Coverity.
43  */
44 
45 #define	BSWAP_8(x)	((x) & 0xff)
46 #define	BSWAP_16(x)	((x) & 0xffff)
47 #define	BSWAP_32(x)	((x) & 0xffffffff)
48 #define	BSWAP_64(x)	(x)
49 
50 #else /* __COVERITY__ */
51 
52 #define	BSWAP_8(x)	((x) & 0xff)
53 #define	BSWAP_16(x)	((BSWAP_8(x) << 8) | BSWAP_8((x) >> 8))
54 #define	BSWAP_32(x)	((BSWAP_16(x) << 16) | BSWAP_16((x) >> 16))
55 #define	BSWAP_64(x)	((BSWAP_32(x) << 32) | BSWAP_32((x) >> 32))
56 
57 #endif /* __COVERITY__ */
58 
59 #define	LE_16(x)	cpu_to_le16(x)
60 #define	LE_32(x)	cpu_to_le32(x)
61 #define	LE_64(x)	cpu_to_le64(x)
62 #define	BE_16(x)	cpu_to_be16(x)
63 #define	BE_32(x)	cpu_to_be32(x)
64 #define	BE_64(x)	cpu_to_be64(x)
65 
66 #define	BE_IN8(xa) \
67 	*((uint8_t *)(xa))
68 
69 #define	BE_IN16(xa) \
70 	(((uint16_t)BE_IN8(xa) << 8) | BE_IN8((uint8_t *)(xa)+1))
71 
72 #define	BE_IN32(xa) \
73 	(((uint32_t)BE_IN16(xa) << 16) | BE_IN16((uint8_t *)(xa)+2))
74 
75 #ifdef _ZFS_BIG_ENDIAN
76 static __inline__ uint64_t
77 htonll(uint64_t n)
78 {
79 	return (n);
80 }
81 
82 static __inline__ uint64_t
83 ntohll(uint64_t n)
84 {
85 	return (n);
86 }
87 #else
88 static __inline__ uint64_t
89 htonll(uint64_t n)
90 {
91 	return ((((uint64_t)htonl(n)) << 32) + htonl(n >> 32));
92 }
93 
94 static __inline__ uint64_t
95 ntohll(uint64_t n)
96 {
97 	return ((((uint64_t)ntohl(n)) << 32) + ntohl(n >> 32));
98 }
99 #endif
100 
101 #endif /* SPL_BYTEORDER_H */
102