1
by Faidon Liambotis
Import upstream version 0.8.0 |
1 |
#include <stdlib.h> |
2 |
#include <assert.h> |
|
3 |
#include <string.h> |
|
4 |
#include <errno.h> |
|
5 |
#include <stdbool.h> |
|
6 |
#include <limits.h> |
|
7 |
#include <sys/uio.h> |
|
8 |
||
1.2.1
by Faidon Liambotis
Import upstream version 0.8.3 |
9 |
#include "endian_compat.h" |
10 |
||
1
by Faidon Liambotis
Import upstream version 0.8.0 |
11 |
#if defined(__arm__) && \
|
12 |
!defined(__ARM_ARCH_4__) && \
|
|
13 |
!defined(__ARM_ARCH_4T__) && \
|
|
14 |
!defined(__ARM_ARCH_5__) && \
|
|
15 |
!defined(__ARM_ARCH_5T__) && \
|
|
16 |
!defined(__ARM_ARCH_5TE__) && \
|
|
17 |
!defined(__ARM_ARCH_5TEJ__) && \
|
|
18 |
!defined(__ARM_ARCH_6__) && \
|
|
19 |
!defined(__ARM_ARCH_6J__) && \
|
|
20 |
!defined(__ARM_ARCH_6K__) && \
|
|
21 |
!defined(__ARM_ARCH_6Z__) && \
|
|
22 |
!defined(__ARM_ARCH_6ZK__) && \
|
|
23 |
!defined(__ARM_ARCH_6T2__)
|
|
24 |
#define UNALIGNED64_REALLYS_SLOW 1
|
|
25 |
#endif
|
|
26 |
||
1.2.1
by Faidon Liambotis
Import upstream version 0.8.3 |
27 |
#ifndef htole16
|
28 |
# if __BYTE_ORDER == __LITTLE_ENDIAN
|
|
29 |
# define htole16(x) (x)
|
|
30 |
# define le32toh(x) (x)
|
|
31 |
# else
|
|
32 |
# define htole16(x) __bswap_16 (x)
|
|
33 |
# define le32toh(x) __bswap_32 (x)
|
|
34 |
#endif
|
|
35 |
#endif
|
|
36 |
||
1
by Faidon Liambotis
Import upstream version 0.8.0 |
37 |
typedef unsigned char u8; |
38 |
typedef unsigned short u16; |
|
39 |
typedef unsigned u32; |
|
40 |
typedef unsigned long long u64; |
|
41 |
||
42 |
#define BUG_ON(x) assert(!(x))
|
|
43 |
||
44 |
#define get_unaligned(x) (*(x))
|
|
45 |
#define get_unaligned_le32(x) (le32toh(*(u32 *)(x)))
|
|
46 |
#define put_unaligned(v,x) (*(x) = (v))
|
|
47 |
#define put_unaligned_le16(v,x) (*(u16 *)(x) = htole16(v))
|
|
48 |
||
49 |
/* You may want to define this on various ARM architectures */
|
|
50 |
#ifdef UNALIGNED64_REALLYS_SLOW
|
|
51 |
static inline u64 get_unaligned64(const void *p) |
|
52 |
{
|
|
53 |
u64 t; |
|
54 |
memcpy(&t, p, 8); |
|
55 |
return t; |
|
56 |
}
|
|
57 |
static inline u64 put_unaligned64(u64 t, void *p) |
|
58 |
{
|
|
59 |
memcpy(p, &t, 8); |
|
60 |
return t; |
|
61 |
}
|
|
62 |
#else
|
|
63 |
#define get_unaligned64(x) get_unaligned(x)
|
|
64 |
#define put_unaligned64(x,p) put_unaligned(x,p)
|
|
65 |
#endif
|
|
66 |
||
67 |
#define vmalloc(x) malloc(x)
|
|
68 |
#define vfree(x) free(x)
|
|
69 |
||
70 |
#define EXPORT_SYMBOL(x)
|
|
71 |
||
72 |
#define ARRAY_SIZE(x) (sizeof(x) / sizeof(*(x)))
|
|
73 |
||
74 |
#define likely(x) __builtin_expect((x), 1)
|
|
75 |
#define unlikely(x) __builtin_expect((x), 0)
|
|
76 |
||
77 |
#define min_t(t,x,y) ((x) < (y) ? (x) : (y))
|
|
78 |
#define max_t(t,x,y) ((x) > (y) ? (x) : (y))
|
|
79 |
||
80 |
#if __BYTE_ORDER == __LITTLE_ENDIAN
|
|
81 |
#define __LITTLE_ENDIAN__ 1
|
|
82 |
#endif
|
|
83 |
||
84 |
#define BITS_PER_LONG (__SIZEOF_LONG__ * 8)
|