1 /*
2  * lz4defs.h -- architecture specific defines
3  *
4  * Copyright (C) 2013, LG Electronics, Kyungsik Lee <kyungsik.lee@lge.com>
5  *
6  * This program is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License version 2 as
8  * published by the Free Software Foundation.
9  */
10 
11 #ifndef COMMON_LZ4_DEFS_H
12 #define COMMON_LZ4_DEFS_H
13 
14 #ifdef __XEN__
15 #include <asm/byteorder.h>
16 #include <xen/unaligned.h>
17 #else
18 
get_unaligned_le16(const void * p)19 static inline u16 get_unaligned_le16(const void *p)
20 {
21 	return le16_to_cpup(p);
22 }
23 
24 #endif
25 
26 /*
27  * Detects 64 bits mode
28  */
29 #if (defined(__x86_64__) || defined(__x86_64) || defined(__amd64__) \
30 	|| defined(__ppc64__) || defined(__LP64__))
31 #define LZ4_ARCH64 1
32 #else
33 #define LZ4_ARCH64 0
34 #endif
35 
36 /*
37  * Architecture-specific macros
38  */
39 #define BYTE	u8
40 typedef struct _U16_S { u16 v; } U16_S;
41 typedef struct _U32_S { u32 v; } U32_S;
42 typedef struct _U64_S { u64 v; } U64_S;
43 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS)		\
44 	|| defined(CONFIG_ARM) && __LINUX_ARM_ARCH__ >= 6	\
45 	&& defined(ARM_EFFICIENT_UNALIGNED_ACCESS)
46 
47 #define A16(x) (((U16_S *)(x))->v)
48 #define A32(x) (((U32_S *)(x))->v)
49 #define A64(x) (((U64_S *)(x))->v)
50 
51 #define PUT4(s, d) (A32(d) = A32(s))
52 #define PUT8(s, d) (A64(d) = A64(s))
53 #define LZ4_WRITE_LITTLEENDIAN_16(p, v)	\
54 	do {	\
55 		A16(p) = v; \
56 		p += 2; \
57 	} while (0)
58 #else /* CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS */
59 
60 #define A64(x) get_unaligned((u64 *)&(((U16_S *)(x))->v))
61 #define A32(x) get_unaligned((u32 *)&(((U16_S *)(x))->v))
62 #define A16(x) get_unaligned((u16 *)&(((U16_S *)(x))->v))
63 
64 #define PUT4(s, d) \
65 	put_unaligned(get_unaligned((const u32 *) s), (u32 *) d)
66 #define PUT8(s, d) \
67 	put_unaligned(get_unaligned((const u64 *) s), (u64 *) d)
68 
69 #define LZ4_WRITE_LITTLEENDIAN_16(p, v)	\
70 	do {	\
71 		put_unaligned(v, (u16 *)(p)); \
72 		p += 2; \
73 	} while (0)
74 #endif
75 
76 #define COPYLENGTH 8
77 #define ML_BITS  4
78 #define ML_MASK  ((1U << ML_BITS) - 1)
79 #define RUN_BITS (8 - ML_BITS)
80 #define RUN_MASK ((1U << RUN_BITS) - 1)
81 #define MEMORY_USAGE	14
82 #define MINMATCH	4
83 #define SKIPSTRENGTH	6
84 #define LASTLITERALS	5
85 #define MFLIMIT		(COPYLENGTH + MINMATCH)
86 #define MINLENGTH	(MFLIMIT + 1)
87 #define MAXD_LOG	16
88 #define MAXD		(1 << MAXD_LOG)
89 #define MAXD_MASK	(u32)(MAXD - 1)
90 #define MAX_DISTANCE	(MAXD - 1)
91 #define HASH_LOG	(MAXD_LOG - 1)
92 #define HASHTABLESIZE	(1 << HASH_LOG)
93 #define MAX_NB_ATTEMPTS	256
94 #define OPTIMAL_ML	(int)((ML_MASK-1)+MINMATCH)
95 #define LZ4_64KLIMIT	((1<<16) + (MFLIMIT - 1))
96 #define HASHLOG64K	((MEMORY_USAGE - 2) + 1)
97 #define HASH64KTABLESIZE	(1U << HASHLOG64K)
98 #define LZ4_HASH_VALUE(p)	(((A32(p)) * 2654435761U) >> \
99 				((MINMATCH * 8) - (MEMORY_USAGE-2)))
100 #define LZ4_HASH64K_VALUE(p)	(((A32(p)) * 2654435761U) >> \
101 				((MINMATCH * 8) - HASHLOG64K))
102 #define HASH_VALUE(p)		(((A32(p)) * 2654435761U) >> \
103 				((MINMATCH * 8) - HASH_LOG))
104 
105 #if LZ4_ARCH64/* 64-bit */
106 #define STEPSIZE 8
107 
108 #define LZ4_COPYSTEP(s, d)	\
109 	do {			\
110 		PUT8(s, d);	\
111 		d += 8;		\
112 		s += 8;		\
113 	} while (0)
114 
115 #define LZ4_COPYPACKET(s, d)	LZ4_COPYSTEP(s, d)
116 
117 #define LZ4_SECURECOPY(s, d, e)			\
118 	do {					\
119 		if (d < e) {			\
120 			LZ4_WILDCOPY(s, d, e);	\
121 		}				\
122 	} while (0)
123 #define HTYPE u32
124 
125 #ifdef __BIG_ENDIAN
126 #define LZ4_NBCOMMONBYTES(val) (__builtin_clzll(val) >> 3)
127 #else
128 #define LZ4_NBCOMMONBYTES(val) (__builtin_ctzll(val) >> 3)
129 #endif
130 
131 #else	/* 32-bit */
132 #define STEPSIZE 4
133 
134 #define LZ4_COPYSTEP(s, d)	\
135 	do {			\
136 		PUT4(s, d);	\
137 		d += 4;		\
138 		s += 4;		\
139 	} while (0)
140 
141 #define LZ4_COPYPACKET(s, d)		\
142 	do {				\
143 		LZ4_COPYSTEP(s, d);	\
144 		LZ4_COPYSTEP(s, d);	\
145 	} while (0)
146 
147 #define LZ4_SECURECOPY	LZ4_WILDCOPY
148 #define HTYPE const u8*
149 
150 #ifdef __BIG_ENDIAN
151 #define LZ4_NBCOMMONBYTES(val) (__builtin_clz(val) >> 3)
152 #else
153 #define LZ4_NBCOMMONBYTES(val) (__builtin_ctz(val) >> 3)
154 #endif
155 
156 #endif
157 
158 #define LZ4_READ_LITTLEENDIAN_16(d, s, p) \
159 	(d = s - get_unaligned_le16(p))
160 
161 #define LZ4_WILDCOPY(s, d, e)		\
162 	do {				\
163 		LZ4_COPYPACKET(s, d);	\
164 	} while (d < e)
165 
166 #define LZ4_BLINDCOPY(s, d, l)	\
167 	do {	\
168 		u8 *e = (d) + l;	\
169 		LZ4_WILDCOPY(s, d, e);	\
170 		d = e;	\
171 	} while (0)
172 
173 #endif /* COMMON_LZ4_DEFS_H */
174