xref: /aosp_15_r20/external/abseil-cpp/absl/base/internal/endian.h (revision 9356374a3709195abf420251b3e825997ff56c0f)
1 // Copyright 2017 The Abseil Authors.
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 //      https://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14 //
15 
16 #ifndef ABSL_BASE_INTERNAL_ENDIAN_H_
17 #define ABSL_BASE_INTERNAL_ENDIAN_H_
18 
19 #include <cstdint>
20 #include <cstdlib>
21 
22 #include "absl/base/casts.h"
23 #include "absl/base/config.h"
24 #include "absl/base/internal/unaligned_access.h"
25 #include "absl/base/nullability.h"
26 #include "absl/base/port.h"
27 
28 namespace absl {
29 ABSL_NAMESPACE_BEGIN
30 
gbswap_64(uint64_t host_int)31 inline uint64_t gbswap_64(uint64_t host_int) {
32 #if ABSL_HAVE_BUILTIN(__builtin_bswap64) || defined(__GNUC__)
33   return __builtin_bswap64(host_int);
34 #elif defined(_MSC_VER)
35   return _byteswap_uint64(host_int);
36 #else
37   return (((host_int & uint64_t{0xFF}) << 56) |
38           ((host_int & uint64_t{0xFF00}) << 40) |
39           ((host_int & uint64_t{0xFF0000}) << 24) |
40           ((host_int & uint64_t{0xFF000000}) << 8) |
41           ((host_int & uint64_t{0xFF00000000}) >> 8) |
42           ((host_int & uint64_t{0xFF0000000000}) >> 24) |
43           ((host_int & uint64_t{0xFF000000000000}) >> 40) |
44           ((host_int & uint64_t{0xFF00000000000000}) >> 56));
45 #endif
46 }
47 
gbswap_32(uint32_t host_int)48 inline uint32_t gbswap_32(uint32_t host_int) {
49 #if ABSL_HAVE_BUILTIN(__builtin_bswap32) || defined(__GNUC__)
50   return __builtin_bswap32(host_int);
51 #elif defined(_MSC_VER)
52   return _byteswap_ulong(host_int);
53 #else
54   return (((host_int & uint32_t{0xFF}) << 24) |
55           ((host_int & uint32_t{0xFF00}) << 8) |
56           ((host_int & uint32_t{0xFF0000}) >> 8) |
57           ((host_int & uint32_t{0xFF000000}) >> 24));
58 #endif
59 }
60 
gbswap_16(uint16_t host_int)61 inline uint16_t gbswap_16(uint16_t host_int) {
62 #if ABSL_HAVE_BUILTIN(__builtin_bswap16) || defined(__GNUC__)
63   return __builtin_bswap16(host_int);
64 #elif defined(_MSC_VER)
65   return _byteswap_ushort(host_int);
66 #else
67   return (((host_int & uint16_t{0xFF}) << 8) |
68           ((host_int & uint16_t{0xFF00}) >> 8));
69 #endif
70 }
71 
72 #ifdef ABSL_IS_LITTLE_ENDIAN
73 
74 // Portable definitions for htonl (host-to-network) and friends on little-endian
75 // architectures.
ghtons(uint16_t x)76 inline uint16_t ghtons(uint16_t x) { return gbswap_16(x); }
ghtonl(uint32_t x)77 inline uint32_t ghtonl(uint32_t x) { return gbswap_32(x); }
ghtonll(uint64_t x)78 inline uint64_t ghtonll(uint64_t x) { return gbswap_64(x); }
79 
80 #elif defined ABSL_IS_BIG_ENDIAN
81 
82 // Portable definitions for htonl (host-to-network) etc on big-endian
83 // architectures. These definitions are simpler since the host byte order is the
84 // same as network byte order.
ghtons(uint16_t x)85 inline uint16_t ghtons(uint16_t x) { return x; }
ghtonl(uint32_t x)86 inline uint32_t ghtonl(uint32_t x) { return x; }
ghtonll(uint64_t x)87 inline uint64_t ghtonll(uint64_t x) { return x; }
88 
89 #else
90 #error \
91     "Unsupported byte order: Either ABSL_IS_BIG_ENDIAN or " \
92        "ABSL_IS_LITTLE_ENDIAN must be defined"
93 #endif  // byte order
94 
gntohs(uint16_t x)95 inline uint16_t gntohs(uint16_t x) { return ghtons(x); }
gntohl(uint32_t x)96 inline uint32_t gntohl(uint32_t x) { return ghtonl(x); }
gntohll(uint64_t x)97 inline uint64_t gntohll(uint64_t x) { return ghtonll(x); }
98 
99 // Utilities to convert numbers between the current hosts's native byte
100 // order and little-endian byte order
101 //
102 // Load/Store methods are alignment safe
103 namespace little_endian {
104 // Conversion functions.
105 #ifdef ABSL_IS_LITTLE_ENDIAN
106 
FromHost16(uint16_t x)107 inline uint16_t FromHost16(uint16_t x) { return x; }
ToHost16(uint16_t x)108 inline uint16_t ToHost16(uint16_t x) { return x; }
109 
FromHost32(uint32_t x)110 inline uint32_t FromHost32(uint32_t x) { return x; }
ToHost32(uint32_t x)111 inline uint32_t ToHost32(uint32_t x) { return x; }
112 
FromHost64(uint64_t x)113 inline uint64_t FromHost64(uint64_t x) { return x; }
ToHost64(uint64_t x)114 inline uint64_t ToHost64(uint64_t x) { return x; }
115 
IsLittleEndian()116 inline constexpr bool IsLittleEndian() { return true; }
117 
118 #elif defined ABSL_IS_BIG_ENDIAN
119 
120 inline uint16_t FromHost16(uint16_t x) { return gbswap_16(x); }
121 inline uint16_t ToHost16(uint16_t x) { return gbswap_16(x); }
122 
123 inline uint32_t FromHost32(uint32_t x) { return gbswap_32(x); }
124 inline uint32_t ToHost32(uint32_t x) { return gbswap_32(x); }
125 
126 inline uint64_t FromHost64(uint64_t x) { return gbswap_64(x); }
127 inline uint64_t ToHost64(uint64_t x) { return gbswap_64(x); }
128 
129 inline constexpr bool IsLittleEndian() { return false; }
130 
131 #endif /* ENDIAN */
132 
FromHost(uint8_t x)133 inline uint8_t FromHost(uint8_t x) { return x; }
FromHost(uint16_t x)134 inline uint16_t FromHost(uint16_t x) { return FromHost16(x); }
FromHost(uint32_t x)135 inline uint32_t FromHost(uint32_t x) { return FromHost32(x); }
FromHost(uint64_t x)136 inline uint64_t FromHost(uint64_t x) { return FromHost64(x); }
ToHost(uint8_t x)137 inline uint8_t ToHost(uint8_t x) { return x; }
ToHost(uint16_t x)138 inline uint16_t ToHost(uint16_t x) { return ToHost16(x); }
ToHost(uint32_t x)139 inline uint32_t ToHost(uint32_t x) { return ToHost32(x); }
ToHost(uint64_t x)140 inline uint64_t ToHost(uint64_t x) { return ToHost64(x); }
141 
FromHost(int8_t x)142 inline int8_t FromHost(int8_t x) { return x; }
FromHost(int16_t x)143 inline int16_t FromHost(int16_t x) {
144   return bit_cast<int16_t>(FromHost16(bit_cast<uint16_t>(x)));
145 }
FromHost(int32_t x)146 inline int32_t FromHost(int32_t x) {
147   return bit_cast<int32_t>(FromHost32(bit_cast<uint32_t>(x)));
148 }
FromHost(int64_t x)149 inline int64_t FromHost(int64_t x) {
150   return bit_cast<int64_t>(FromHost64(bit_cast<uint64_t>(x)));
151 }
ToHost(int8_t x)152 inline int8_t ToHost(int8_t x) { return x; }
ToHost(int16_t x)153 inline int16_t ToHost(int16_t x) {
154   return bit_cast<int16_t>(ToHost16(bit_cast<uint16_t>(x)));
155 }
ToHost(int32_t x)156 inline int32_t ToHost(int32_t x) {
157   return bit_cast<int32_t>(ToHost32(bit_cast<uint32_t>(x)));
158 }
ToHost(int64_t x)159 inline int64_t ToHost(int64_t x) {
160   return bit_cast<int64_t>(ToHost64(bit_cast<uint64_t>(x)));
161 }
162 
163 // Functions to do unaligned loads and stores in little-endian order.
Load16(absl::Nonnull<const void * > p)164 inline uint16_t Load16(absl::Nonnull<const void *> p) {
165   return ToHost16(ABSL_INTERNAL_UNALIGNED_LOAD16(p));
166 }
167 
Store16(absl::Nonnull<void * > p,uint16_t v)168 inline void Store16(absl::Nonnull<void *> p, uint16_t v) {
169   ABSL_INTERNAL_UNALIGNED_STORE16(p, FromHost16(v));
170 }
171 
Load32(absl::Nonnull<const void * > p)172 inline uint32_t Load32(absl::Nonnull<const void *> p) {
173   return ToHost32(ABSL_INTERNAL_UNALIGNED_LOAD32(p));
174 }
175 
Store32(absl::Nonnull<void * > p,uint32_t v)176 inline void Store32(absl::Nonnull<void *> p, uint32_t v) {
177   ABSL_INTERNAL_UNALIGNED_STORE32(p, FromHost32(v));
178 }
179 
Load64(absl::Nonnull<const void * > p)180 inline uint64_t Load64(absl::Nonnull<const void *> p) {
181   return ToHost64(ABSL_INTERNAL_UNALIGNED_LOAD64(p));
182 }
183 
Store64(absl::Nonnull<void * > p,uint64_t v)184 inline void Store64(absl::Nonnull<void *> p, uint64_t v) {
185   ABSL_INTERNAL_UNALIGNED_STORE64(p, FromHost64(v));
186 }
187 
188 }  // namespace little_endian
189 
190 // Utilities to convert numbers between the current hosts's native byte
191 // order and big-endian byte order (same as network byte order)
192 //
193 // Load/Store methods are alignment safe
194 namespace big_endian {
195 #ifdef ABSL_IS_LITTLE_ENDIAN
196 
FromHost16(uint16_t x)197 inline uint16_t FromHost16(uint16_t x) { return gbswap_16(x); }
ToHost16(uint16_t x)198 inline uint16_t ToHost16(uint16_t x) { return gbswap_16(x); }
199 
FromHost32(uint32_t x)200 inline uint32_t FromHost32(uint32_t x) { return gbswap_32(x); }
ToHost32(uint32_t x)201 inline uint32_t ToHost32(uint32_t x) { return gbswap_32(x); }
202 
FromHost64(uint64_t x)203 inline uint64_t FromHost64(uint64_t x) { return gbswap_64(x); }
ToHost64(uint64_t x)204 inline uint64_t ToHost64(uint64_t x) { return gbswap_64(x); }
205 
IsLittleEndian()206 inline constexpr bool IsLittleEndian() { return true; }
207 
208 #elif defined ABSL_IS_BIG_ENDIAN
209 
210 inline uint16_t FromHost16(uint16_t x) { return x; }
211 inline uint16_t ToHost16(uint16_t x) { return x; }
212 
213 inline uint32_t FromHost32(uint32_t x) { return x; }
214 inline uint32_t ToHost32(uint32_t x) { return x; }
215 
216 inline uint64_t FromHost64(uint64_t x) { return x; }
217 inline uint64_t ToHost64(uint64_t x) { return x; }
218 
219 inline constexpr bool IsLittleEndian() { return false; }
220 
221 #endif /* ENDIAN */
222 
FromHost(uint8_t x)223 inline uint8_t FromHost(uint8_t x) { return x; }
FromHost(uint16_t x)224 inline uint16_t FromHost(uint16_t x) { return FromHost16(x); }
FromHost(uint32_t x)225 inline uint32_t FromHost(uint32_t x) { return FromHost32(x); }
FromHost(uint64_t x)226 inline uint64_t FromHost(uint64_t x) { return FromHost64(x); }
ToHost(uint8_t x)227 inline uint8_t ToHost(uint8_t x) { return x; }
ToHost(uint16_t x)228 inline uint16_t ToHost(uint16_t x) { return ToHost16(x); }
ToHost(uint32_t x)229 inline uint32_t ToHost(uint32_t x) { return ToHost32(x); }
ToHost(uint64_t x)230 inline uint64_t ToHost(uint64_t x) { return ToHost64(x); }
231 
FromHost(int8_t x)232 inline int8_t FromHost(int8_t x) { return x; }
FromHost(int16_t x)233 inline int16_t FromHost(int16_t x) {
234   return bit_cast<int16_t>(FromHost16(bit_cast<uint16_t>(x)));
235 }
FromHost(int32_t x)236 inline int32_t FromHost(int32_t x) {
237   return bit_cast<int32_t>(FromHost32(bit_cast<uint32_t>(x)));
238 }
FromHost(int64_t x)239 inline int64_t FromHost(int64_t x) {
240   return bit_cast<int64_t>(FromHost64(bit_cast<uint64_t>(x)));
241 }
ToHost(int8_t x)242 inline int8_t ToHost(int8_t x) { return x; }
ToHost(int16_t x)243 inline int16_t ToHost(int16_t x) {
244   return bit_cast<int16_t>(ToHost16(bit_cast<uint16_t>(x)));
245 }
ToHost(int32_t x)246 inline int32_t ToHost(int32_t x) {
247   return bit_cast<int32_t>(ToHost32(bit_cast<uint32_t>(x)));
248 }
ToHost(int64_t x)249 inline int64_t ToHost(int64_t x) {
250   return bit_cast<int64_t>(ToHost64(bit_cast<uint64_t>(x)));
251 }
252 
253 // Functions to do unaligned loads and stores in big-endian order.
Load16(absl::Nonnull<const void * > p)254 inline uint16_t Load16(absl::Nonnull<const void *> p) {
255   return ToHost16(ABSL_INTERNAL_UNALIGNED_LOAD16(p));
256 }
257 
Store16(absl::Nonnull<void * > p,uint16_t v)258 inline void Store16(absl::Nonnull<void *> p, uint16_t v) {
259   ABSL_INTERNAL_UNALIGNED_STORE16(p, FromHost16(v));
260 }
261 
Load32(absl::Nonnull<const void * > p)262 inline uint32_t Load32(absl::Nonnull<const void *> p) {
263   return ToHost32(ABSL_INTERNAL_UNALIGNED_LOAD32(p));
264 }
265 
Store32(absl::Nonnull<void * > p,uint32_t v)266 inline void Store32(absl::Nonnull<void *>p, uint32_t v) {
267   ABSL_INTERNAL_UNALIGNED_STORE32(p, FromHost32(v));
268 }
269 
Load64(absl::Nonnull<const void * > p)270 inline uint64_t Load64(absl::Nonnull<const void *> p) {
271   return ToHost64(ABSL_INTERNAL_UNALIGNED_LOAD64(p));
272 }
273 
Store64(absl::Nonnull<void * > p,uint64_t v)274 inline void Store64(absl::Nonnull<void *> p, uint64_t v) {
275   ABSL_INTERNAL_UNALIGNED_STORE64(p, FromHost64(v));
276 }
277 
278 }  // namespace big_endian
279 
280 ABSL_NAMESPACE_END
281 }  // namespace absl
282 
283 #endif  // ABSL_BASE_INTERNAL_ENDIAN_H_
284