mirror of
git://projects.qi-hardware.com/openwrt-xburst.git
synced 2024-11-14 06:57:30 +02:00
4ae4ff2095
git-svn-id: svn://svn.openwrt.org/openwrt/trunk@14156 3c298f89-4303-0410-b956-a3cf2f4a3e73
341 lines
8.1 KiB
Diff
341 lines
8.1 KiB
Diff
glibc headers define both __LITTLE_ENDIAN and __BIG_ENDIAN
|
|
which was tripping the check in linux/byteorder.h. Let's
|
|
just stay out of userspace's way and use __KERN_{endian}
|
|
|
|
The old implementation got away with it as it kept the two
|
|
implementations in different headers and conditionally included
|
|
the right one. The combined header does checks within each
|
|
function body and depends on only one being defined.
|
|
|
|
Converted the arches in mainline that have already moved to the
|
|
new header, as the other arches merge the will need simlar
|
|
fixups.
|
|
|
|
Signed-off-by: Harvey Harrison <harvey.harrison@gmail.com>
|
|
---
|
|
arch/avr32/include/asm/byteorder.h | 2 +-
|
|
arch/mips/include/asm/byteorder.h | 4 +-
|
|
arch/sh/include/asm/byteorder.h | 4 +-
|
|
arch/sparc/include/asm/byteorder.h | 2 +-
|
|
include/linux/byteorder.h | 84 ++++++++++++++++++------------------
|
|
5 files changed, 48 insertions(+), 48 deletions(-)
|
|
|
|
--- a/arch/avr32/include/asm/byteorder.h
|
|
+++ b/arch/avr32/include/asm/byteorder.h
|
|
@@ -7,7 +7,7 @@
|
|
#include <asm/types.h>
|
|
#include <linux/compiler.h>
|
|
|
|
-#define __BIG_ENDIAN
|
|
+#define __KERN_BIG_ENDIAN
|
|
#define __SWAB_64_THRU_32__
|
|
|
|
#ifdef __CHECKER__
|
|
--- a/arch/mips/include/asm/byteorder.h
|
|
+++ b/arch/mips/include/asm/byteorder.h
|
|
@@ -12,9 +12,9 @@
|
|
#include <asm/types.h>
|
|
|
|
#if defined(__MIPSEB__)
|
|
-# define __BIG_ENDIAN
|
|
+# define __KERN_BIG_ENDIAN
|
|
#elif defined(__MIPSEL__)
|
|
-# define __LITTLE_ENDIAN
|
|
+# define __KERN_LITTLE_ENDIAN
|
|
#else
|
|
# error "MIPS, but neither __MIPSEB__, nor __MIPSEL__???"
|
|
#endif
|
|
--- a/arch/sh/include/asm/byteorder.h
|
|
+++ b/arch/sh/include/asm/byteorder.h
|
|
@@ -9,9 +9,9 @@
|
|
#include <linux/types.h>
|
|
|
|
#ifdef __LITTLE_ENDIAN__
|
|
-# define __LITTLE_ENDIAN
|
|
+# define __KERN_LITTLE_ENDIAN
|
|
#else
|
|
-# define __BIG_ENDIAN
|
|
+# define __KERN_BIG_ENDIAN
|
|
#endif
|
|
|
|
#define __SWAB_64_THRU_32__
|
|
--- a/arch/sparc/include/asm/byteorder.h
|
|
+++ b/arch/sparc/include/asm/byteorder.h
|
|
@@ -4,7 +4,7 @@
|
|
#include <asm/types.h>
|
|
#include <asm/asi.h>
|
|
|
|
-#define __BIG_ENDIAN
|
|
+#define __KERN_BIG_ENDIAN
|
|
|
|
#ifdef CONFIG_SPARC32
|
|
#define __SWAB_64_THRU_32__
|
|
--- a/include/linux/byteorder.h
|
|
+++ b/include/linux/byteorder.h
|
|
@@ -4,33 +4,33 @@
|
|
#include <linux/types.h>
|
|
#include <linux/swab.h>
|
|
|
|
-#if defined(__LITTLE_ENDIAN) && defined(__BIG_ENDIAN)
|
|
+#if defined(__KERN_LITTLE_ENDIAN) && defined(__KERN_BIG_ENDIAN)
|
|
# error Fix asm/byteorder.h to define one endianness
|
|
#endif
|
|
|
|
-#if !defined(__LITTLE_ENDIAN) && !defined(__BIG_ENDIAN)
|
|
+#if !defined(__KERN_LITTLE_ENDIAN) && !defined(__KERN_BIG_ENDIAN)
|
|
# error Fix asm/byteorder.h to define arch endianness
|
|
#endif
|
|
|
|
-#ifdef __LITTLE_ENDIAN
|
|
-# undef __LITTLE_ENDIAN
|
|
-# define __LITTLE_ENDIAN 1234
|
|
+#ifdef __KERN_LITTLE_ENDIAN
|
|
+# ifndef __LITTLE_ENDIAN
|
|
+# define __LITTLE_ENDIAN 1234
|
|
+# endif
|
|
+# ifndef __LITTLE_ENDIAN_BITFIELD
|
|
+# define __LITTLE_ENDIAN_BITFIELD
|
|
+# endif
|
|
+#endif
|
|
+
|
|
+#ifdef __KERN_BIG_ENDIAN
|
|
+# ifndef __BIG_ENDIAN
|
|
+# define __BIG_ENDIAN 4321
|
|
+# endif
|
|
+# ifndef __BIG_ENDIAN_BITFIELD
|
|
+# define __BIG_ENDIAN_BITFIELD
|
|
+# endif
|
|
#endif
|
|
|
|
-#ifdef __BIG_ENDIAN
|
|
-# undef __BIG_ENDIAN
|
|
-# define __BIG_ENDIAN 4321
|
|
-#endif
|
|
-
|
|
-#if defined(__LITTLE_ENDIAN) && !defined(__LITTLE_ENDIAN_BITFIELD)
|
|
-# define __LITTLE_ENDIAN_BITFIELD
|
|
-#endif
|
|
-
|
|
-#if defined(__BIG_ENDIAN) && !defined(__BIG_ENDIAN_BITFIELD)
|
|
-# define __BIG_ENDIAN_BITFIELD
|
|
-#endif
|
|
-
|
|
-#ifdef __LITTLE_ENDIAN
|
|
+#ifdef __KERN_LITTLE_ENDIAN
|
|
# define __le16_to_cpu(x) ((__force __u16)(__le16)(x))
|
|
# define __le32_to_cpu(x) ((__force __u32)(__le32)(x))
|
|
# define __le64_to_cpu(x) ((__force __u64)(__le64)(x))
|
|
@@ -46,7 +46,7 @@
|
|
# define __cpu_to_be64(x) ((__force __be64)__swab64(x))
|
|
#endif
|
|
|
|
-#ifdef __BIG_ENDIAN
|
|
+#ifdef __KERN_BIG_ENDIAN
|
|
# define __be16_to_cpu(x) ((__force __u16)(__be16)(x))
|
|
# define __be32_to_cpu(x) ((__force __u32)(__be32)(x))
|
|
# define __be64_to_cpu(x) ((__force __u64)(__be64)(x))
|
|
@@ -87,91 +87,91 @@
|
|
|
|
static inline void __le16_to_cpus(__u16 *p)
|
|
{
|
|
-#ifdef __BIG_ENDIAN
|
|
+#ifdef __KERN_BIG_ENDIAN
|
|
__swab16s(p);
|
|
#endif
|
|
}
|
|
|
|
static inline void __cpu_to_le16s(__u16 *p)
|
|
{
|
|
-#ifdef __BIG_ENDIAN
|
|
+#ifdef __KERN_BIG_ENDIAN
|
|
__swab16s(p);
|
|
#endif
|
|
}
|
|
|
|
static inline void __le32_to_cpus(__u32 *p)
|
|
{
|
|
-#ifdef __BIG_ENDIAN
|
|
+#ifdef __KERN_BIG_ENDIAN
|
|
__swab32s(p);
|
|
#endif
|
|
}
|
|
|
|
static inline void __cpu_to_le32s(__u32 *p)
|
|
{
|
|
-#ifdef __BIG_ENDIAN
|
|
+#ifdef __KERN_BIG_ENDIAN
|
|
__swab32s(p);
|
|
#endif
|
|
}
|
|
|
|
static inline void __le64_to_cpus(__u64 *p)
|
|
{
|
|
-#ifdef __BIG_ENDIAN
|
|
+#ifdef __KERN_BIG_ENDIAN
|
|
__swab64s(p);
|
|
#endif
|
|
}
|
|
|
|
static inline void __cpu_to_le64s(__u64 *p)
|
|
{
|
|
-#ifdef __BIG_ENDIAN
|
|
+#ifdef __KERN_BIG_ENDIAN
|
|
__swab64s(p);
|
|
#endif
|
|
}
|
|
|
|
static inline void __be16_to_cpus(__u16 *p)
|
|
{
|
|
-#ifdef __LITTLE_ENDIAN
|
|
+#ifdef __KERN_LITTLE_ENDIAN
|
|
__swab16s(p);
|
|
#endif
|
|
}
|
|
|
|
static inline void __cpu_to_be16s(__u16 *p)
|
|
{
|
|
-#ifdef __LITTLE_ENDIAN
|
|
+#ifdef __KERN_LITTLE_ENDIAN
|
|
__swab16s(p);
|
|
#endif
|
|
}
|
|
|
|
static inline void __be32_to_cpus(__u32 *p)
|
|
{
|
|
-#ifdef __LITTLE_ENDIAN
|
|
+#ifdef __KERN_LITTLE_ENDIAN
|
|
__swab32s(p);
|
|
#endif
|
|
}
|
|
|
|
static inline void __cpu_to_be32s(__u32 *p)
|
|
{
|
|
-#ifdef __LITTLE_ENDIAN
|
|
+#ifdef __KERN_LITTLE_ENDIAN
|
|
__swab32s(p);
|
|
#endif
|
|
}
|
|
|
|
static inline void __be64_to_cpus(__u64 *p)
|
|
{
|
|
-#ifdef __LITTLE_ENDIAN
|
|
+#ifdef __KERN_LITTLE_ENDIAN
|
|
__swab64s(p);
|
|
#endif
|
|
}
|
|
|
|
static inline void __cpu_to_be64s(__u64 *p)
|
|
{
|
|
-#ifdef __LITTLE_ENDIAN
|
|
+#ifdef __KERN_LITTLE_ENDIAN
|
|
__swab64s(p);
|
|
#endif
|
|
}
|
|
|
|
static inline __u16 __le16_to_cpup(const __le16 *p)
|
|
{
|
|
-#ifdef __LITTLE_ENDIAN
|
|
+#ifdef __KERN_LITTLE_ENDIAN
|
|
return (__force __u16)*p;
|
|
#else
|
|
return __swab16p((__force __u16 *)p);
|
|
@@ -180,7 +180,7 @@ static inline __u16 __le16_to_cpup(const
|
|
|
|
static inline __u32 __le32_to_cpup(const __le32 *p)
|
|
{
|
|
-#ifdef __LITTLE_ENDIAN
|
|
+#ifdef __KERN_LITTLE_ENDIAN
|
|
return (__force __u32)*p;
|
|
#else
|
|
return __swab32p((__force __u32 *)p);
|
|
@@ -189,7 +189,7 @@ static inline __u32 __le32_to_cpup(const
|
|
|
|
static inline __u64 __le64_to_cpup(const __le64 *p)
|
|
{
|
|
-#ifdef __LITTLE_ENDIAN
|
|
+#ifdef __KERN_LITTLE_ENDIAN
|
|
return (__force __u64)*p;
|
|
#else
|
|
return __swab64p((__force __u64 *)p);
|
|
@@ -198,7 +198,7 @@ static inline __u64 __le64_to_cpup(const
|
|
|
|
static inline __le16 __cpu_to_le16p(const __u16 *p)
|
|
{
|
|
-#ifdef __LITTLE_ENDIAN
|
|
+#ifdef __KERN_LITTLE_ENDIAN
|
|
return (__force __le16)*p;
|
|
#else
|
|
return (__force __le16)__swab16p(p);
|
|
@@ -207,7 +207,7 @@ static inline __le16 __cpu_to_le16p(cons
|
|
|
|
static inline __le32 __cpu_to_le32p(const __u32 *p)
|
|
{
|
|
-#ifdef __LITTLE_ENDIAN
|
|
+#ifdef __KERN_LITTLE_ENDIAN
|
|
return (__force __le32)*p;
|
|
#else
|
|
return (__force __le32)__swab32p(p);
|
|
@@ -216,7 +216,7 @@ static inline __le32 __cpu_to_le32p(cons
|
|
|
|
static inline __le64 __cpu_to_le64p(const __u64 *p)
|
|
{
|
|
-#ifdef __LITTLE_ENDIAN
|
|
+#ifdef __KERN_LITTLE_ENDIAN
|
|
return (__force __le64)*p;
|
|
#else
|
|
return (__force __le64)__swab64p(p);
|
|
@@ -225,7 +225,7 @@ static inline __le64 __cpu_to_le64p(cons
|
|
|
|
static inline __u16 __be16_to_cpup(const __be16 *p)
|
|
{
|
|
-#ifdef __BIG_ENDIAN
|
|
+#ifdef __KERN_BIG_ENDIAN
|
|
return (__force __u16)*p;
|
|
#else
|
|
return __swab16p((__force __u16 *)p);
|
|
@@ -234,7 +234,7 @@ static inline __u16 __be16_to_cpup(const
|
|
|
|
static inline __u32 __be32_to_cpup(const __be32 *p)
|
|
{
|
|
-#ifdef __BIG_ENDIAN
|
|
+#ifdef __KERN_BIG_ENDIAN
|
|
return (__force __u32)*p;
|
|
#else
|
|
return __swab32p((__force __u32 *)p);
|
|
@@ -243,7 +243,7 @@ static inline __u32 __be32_to_cpup(const
|
|
|
|
static inline __u64 __be64_to_cpup(const __be64 *p)
|
|
{
|
|
-#ifdef __BIG_ENDIAN
|
|
+#ifdef __KERN_BIG_ENDIAN
|
|
return (__force __u64)*p;
|
|
#else
|
|
return __swab64p((__force __u64 *)p);
|
|
@@ -252,7 +252,7 @@ static inline __u64 __be64_to_cpup(const
|
|
|
|
static inline __be16 __cpu_to_be16p(const __u16 *p)
|
|
{
|
|
-#ifdef __BIG_ENDIAN
|
|
+#ifdef __KERN_BIG_ENDIAN
|
|
return (__force __be16)*p;
|
|
#else
|
|
return (__force __be16)__swab16p(p);
|
|
@@ -261,7 +261,7 @@ static inline __be16 __cpu_to_be16p(cons
|
|
|
|
static inline __be32 __cpu_to_be32p(const __u32 *p)
|
|
{
|
|
-#ifdef __BIG_ENDIAN
|
|
+#ifdef __KERN_BIG_ENDIAN
|
|
return (__force __be32)*p;
|
|
#else
|
|
return (__force __be32)__swab32p(p);
|
|
@@ -270,7 +270,7 @@ static inline __be32 __cpu_to_be32p(cons
|
|
|
|
static inline __be64 __cpu_to_be64p(const __u64 *p)
|
|
{
|
|
-#ifdef __BIG_ENDIAN
|
|
+#ifdef __KERN_BIG_ENDIAN
|
|
return (__force __be64)*p;
|
|
#else
|
|
return (__force __be64)__swab64p(p);
|