access_ok.h 1.4 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667
  1. #ifndef _LINUX_UNALIGNED_ACCESS_OK_H
  2. #define _LINUX_UNALIGNED_ACCESS_OK_H
  3. #include <linux/kernel.h>
  4. #include <asm/byteorder.h>
  5. static __always_inline u16 get_unaligned_le16(const void *p)
  6. {
  7. return le16_to_cpup((__le16 *)p);
  8. }
  9. static __always_inline u32 get_unaligned_le32(const void *p)
  10. {
  11. return le32_to_cpup((__le32 *)p);
  12. }
  13. static __always_inline u64 get_unaligned_le64(const void *p)
  14. {
  15. return le64_to_cpup((__le64 *)p);
  16. }
  17. static __always_inline u16 get_unaligned_be16(const void *p)
  18. {
  19. return be16_to_cpup((__be16 *)p);
  20. }
  21. static __always_inline u32 get_unaligned_be32(const void *p)
  22. {
  23. return be32_to_cpup((__be32 *)p);
  24. }
  25. static __always_inline u64 get_unaligned_be64(const void *p)
  26. {
  27. return be64_to_cpup((__be64 *)p);
  28. }
  29. static __always_inline void put_unaligned_le16(u16 val, void *p)
  30. {
  31. *((__le16 *)p) = cpu_to_le16(val);
  32. }
  33. static __always_inline void put_unaligned_le32(u32 val, void *p)
  34. {
  35. *((__le32 *)p) = cpu_to_le32(val);
  36. }
  37. static __always_inline void put_unaligned_le64(u64 val, void *p)
  38. {
  39. *((__le64 *)p) = cpu_to_le64(val);
  40. }
  41. static __always_inline void put_unaligned_be16(u16 val, void *p)
  42. {
  43. *((__be16 *)p) = cpu_to_be16(val);
  44. }
  45. static __always_inline void put_unaligned_be32(u32 val, void *p)
  46. {
  47. *((__be32 *)p) = cpu_to_be32(val);
  48. }
  49. static __always_inline void put_unaligned_be64(u64 val, void *p)
  50. {
  51. *((__be64 *)p) = cpu_to_be64(val);
  52. }
  53. #endif /* _LINUX_UNALIGNED_ACCESS_OK_H */