Jens Wiklander | 2ee221c | 2023-05-22 14:22:38 +0200 | [diff] [blame] | 1 | /* SPDX-License-Identifier: GPL-2.0 */ |
Mike Frysinger | 4e192f2 | 2010-01-21 05:01:14 -0500 | [diff] [blame] | 2 | #ifndef _GENERIC_UNALIGNED_H |
| 3 | #define _GENERIC_UNALIGNED_H |
| 4 | |
| 5 | #include <asm/byteorder.h> |
| 6 | |
Jens Wiklander | 2ee221c | 2023-05-22 14:22:38 +0200 | [diff] [blame] | 7 | #define __get_unaligned_t(type, ptr) ({ \ |
| 8 | const struct { type x; } __packed * __pptr = (typeof(__pptr))(ptr); \ |
| 9 | __pptr->x; \ |
| 10 | }) |
| 11 | |
| 12 | #define __put_unaligned_t(type, val, ptr) do { \ |
| 13 | struct { type x; } __packed * __pptr = (typeof(__pptr))(ptr); \ |
| 14 | __pptr->x = (val); \ |
| 15 | } while (0) |
| 16 | |
| 17 | #define get_unaligned(ptr) __get_unaligned_t(typeof(*(ptr)), (ptr)) |
| 18 | #define put_unaligned(val, ptr) __put_unaligned_t(typeof(*(ptr)), (val), (ptr)) |
| 19 | |
| 20 | static inline u16 get_unaligned_le16(const void *p) |
| 21 | { |
| 22 | return le16_to_cpu(__get_unaligned_t(__le16, p)); |
| 23 | } |
| 24 | |
| 25 | static inline u32 get_unaligned_le32(const void *p) |
| 26 | { |
| 27 | return le32_to_cpu(__get_unaligned_t(__le32, p)); |
| 28 | } |
| 29 | |
| 30 | static inline u64 get_unaligned_le64(const void *p) |
| 31 | { |
| 32 | return le64_to_cpu(__get_unaligned_t(__le64, p)); |
| 33 | } |
| 34 | |
| 35 | static inline void put_unaligned_le16(u16 val, void *p) |
| 36 | { |
| 37 | __put_unaligned_t(__le16, cpu_to_le16(val), p); |
| 38 | } |
| 39 | |
| 40 | static inline void put_unaligned_le32(u32 val, void *p) |
| 41 | { |
| 42 | __put_unaligned_t(__le32, cpu_to_le32(val), p); |
| 43 | } |
| 44 | |
| 45 | static inline void put_unaligned_le64(u64 val, void *p) |
| 46 | { |
| 47 | __put_unaligned_t(__le64, cpu_to_le64(val), p); |
| 48 | } |
| 49 | |
| 50 | static inline u16 get_unaligned_be16(const void *p) |
| 51 | { |
| 52 | return be16_to_cpu(__get_unaligned_t(__be16, p)); |
| 53 | } |
| 54 | |
| 55 | static inline u32 get_unaligned_be32(const void *p) |
| 56 | { |
| 57 | return be32_to_cpu(__get_unaligned_t(__be32, p)); |
| 58 | } |
| 59 | |
| 60 | static inline u64 get_unaligned_be64(const void *p) |
| 61 | { |
| 62 | return be64_to_cpu(__get_unaligned_t(__be64, p)); |
| 63 | } |
| 64 | |
| 65 | static inline void put_unaligned_be16(u16 val, void *p) |
| 66 | { |
| 67 | __put_unaligned_t(__be16, cpu_to_be16(val), p); |
| 68 | } |
| 69 | |
| 70 | static inline void put_unaligned_be32(u32 val, void *p) |
| 71 | { |
| 72 | __put_unaligned_t(__be32, cpu_to_be32(val), p); |
| 73 | } |
Mike Frysinger | 4e192f2 | 2010-01-21 05:01:14 -0500 | [diff] [blame] | 74 | |
Jens Wiklander | 2ee221c | 2023-05-22 14:22:38 +0200 | [diff] [blame] | 75 | static inline void put_unaligned_be64(u64 val, void *p) |
| 76 | { |
| 77 | __put_unaligned_t(__be64, cpu_to_be64(val), p); |
| 78 | } |
Mike Frysinger | 4e192f2 | 2010-01-21 05:01:14 -0500 | [diff] [blame] | 79 | |
Heinrich Schuchardt | 954e2b9 | 2018-04-03 21:59:32 +0200 | [diff] [blame] | 80 | /* Allow unaligned memory access */ |
| 81 | void allow_unaligned(void); |
| 82 | |
Mike Frysinger | 4e192f2 | 2010-01-21 05:01:14 -0500 | [diff] [blame] | 83 | #endif |