1 /*
2  * lz4defs.h -- architecture specific defines
3  *
4  * Copyright (C) 2013, LG Electronics, Kyungsik Lee <kyungsik.lee@lge.com>
5  *
6  * This program is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License version 2 as
8  * published by the Free Software Foundation.
9  */
10 
11 #ifndef COMMON_LZ4_DEFS_H
12 #define COMMON_LZ4_DEFS_H
13 
14 #ifdef __XEN__
15 #include <xen/unaligned.h>
16 #else
17 
get_unaligned_le16(const void * p)18 static inline uint16_t get_unaligned_le16(const void *p)
19 {
20 	uint16_t v;
21 
22 	memcpy(&v, p, sizeof(v));
23 
24 	return le16_to_cpu(v);
25 }
26 
27 #endif
28 
29 /*
30  * Detects 64 bits mode
31  */
32 #if (defined(__x86_64__) || defined(__x86_64) || defined(__amd64__) \
33 	|| defined(__ppc64__) || defined(__LP64__))
34 #define LZ4_ARCH64 1
35 #else
36 #define LZ4_ARCH64 0
37 #endif
38 
39 /*
40  * Architecture-specific macros
41  */
42 #define BYTE	u8
43 typedef struct _U16_S { u16 v; } U16_S;
44 typedef struct _U32_S { u32 v; } U32_S;
45 typedef struct _U64_S { u64 v; } U64_S;
46 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS)		\
47 	|| defined(CONFIG_ARM) && __LINUX_ARM_ARCH__ >= 6	\
48 	&& defined(ARM_EFFICIENT_UNALIGNED_ACCESS)
49 
50 #define A16(x) (((U16_S *)(x))->v)
51 #define A32(x) (((U32_S *)(x))->v)
52 #define A64(x) (((U64_S *)(x))->v)
53 
54 #define PUT4(s, d) (A32(d) = A32(s))
55 #define PUT8(s, d) (A64(d) = A64(s))
56 #define LZ4_WRITE_LITTLEENDIAN_16(p, v)	\
57 	do {	\
58 		A16(p) = v; \
59 		p += 2; \
60 	} while (0)
61 #else /* CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS */
62 
63 #define A64(x) get_unaligned((u64 *)&(((U16_S *)(x))->v))
64 #define A32(x) get_unaligned((u32 *)&(((U16_S *)(x))->v))
65 #define A16(x) get_unaligned((u16 *)&(((U16_S *)(x))->v))
66 
67 #define PUT4(s, d) \
68 	put_unaligned(get_unaligned((const u32 *) s), (u32 *) d)
69 #define PUT8(s, d) \
70 	put_unaligned(get_unaligned((const u64 *) s), (u64 *) d)
71 
72 #define LZ4_WRITE_LITTLEENDIAN_16(p, v)	\
73 	do {	\
74 		put_unaligned(v, (u16 *)(p)); \
75 		p += 2; \
76 	} while (0)
77 #endif
78 
79 #define COPYLENGTH 8
80 #define ML_BITS  4
81 #define ML_MASK  ((1U << ML_BITS) - 1)
82 #define RUN_BITS (8 - ML_BITS)
83 #define RUN_MASK ((1U << RUN_BITS) - 1)
84 #define MEMORY_USAGE	14
85 #define MINMATCH	4
86 #define SKIPSTRENGTH	6
87 #define LASTLITERALS	5
88 #define MFLIMIT		(COPYLENGTH + MINMATCH)
89 #define MINLENGTH	(MFLIMIT + 1)
90 #define MAXD_LOG	16
91 #define MAXD		(1 << MAXD_LOG)
92 #define MAXD_MASK	(u32)(MAXD - 1)
93 #define MAX_DISTANCE	(MAXD - 1)
94 #define HASH_LOG	(MAXD_LOG - 1)
95 #define HASHTABLESIZE	(1 << HASH_LOG)
96 #define MAX_NB_ATTEMPTS	256
97 #define OPTIMAL_ML	(int)((ML_MASK-1)+MINMATCH)
98 #define LZ4_64KLIMIT	((1<<16) + (MFLIMIT - 1))
99 #define HASHLOG64K	((MEMORY_USAGE - 2) + 1)
100 #define HASH64KTABLESIZE	(1U << HASHLOG64K)
101 #define LZ4_HASH_VALUE(p)	(((A32(p)) * 2654435761U) >> \
102 				((MINMATCH * 8) - (MEMORY_USAGE-2)))
103 #define LZ4_HASH64K_VALUE(p)	(((A32(p)) * 2654435761U) >> \
104 				((MINMATCH * 8) - HASHLOG64K))
105 #define HASH_VALUE(p)		(((A32(p)) * 2654435761U) >> \
106 				((MINMATCH * 8) - HASH_LOG))
107 
108 #if LZ4_ARCH64/* 64-bit */
109 #define STEPSIZE 8
110 
111 #define LZ4_COPYSTEP(s, d)	\
112 	do {			\
113 		PUT8(s, d);	\
114 		d += 8;		\
115 		s += 8;		\
116 	} while (0)
117 
118 #define LZ4_COPYPACKET(s, d)	LZ4_COPYSTEP(s, d)
119 
120 #define LZ4_SECURECOPY(s, d, e)			\
121 	do {					\
122 		if (d < e) {			\
123 			LZ4_WILDCOPY(s, d, e);	\
124 		}				\
125 	} while (0)
126 #define HTYPE u32
127 
128 #ifdef __BIG_ENDIAN
129 #define LZ4_NBCOMMONBYTES(val) (__builtin_clzll(val) >> 3)
130 #else
131 #define LZ4_NBCOMMONBYTES(val) (__builtin_ctzll(val) >> 3)
132 #endif
133 
134 #else	/* 32-bit */
135 #define STEPSIZE 4
136 
137 #define LZ4_COPYSTEP(s, d)	\
138 	do {			\
139 		PUT4(s, d);	\
140 		d += 4;		\
141 		s += 4;		\
142 	} while (0)
143 
144 #define LZ4_COPYPACKET(s, d)		\
145 	do {				\
146 		LZ4_COPYSTEP(s, d);	\
147 		LZ4_COPYSTEP(s, d);	\
148 	} while (0)
149 
150 #define LZ4_SECURECOPY	LZ4_WILDCOPY
151 #define HTYPE const u8*
152 
153 #ifdef __BIG_ENDIAN
154 #define LZ4_NBCOMMONBYTES(val) (__builtin_clz(val) >> 3)
155 #else
156 #define LZ4_NBCOMMONBYTES(val) (__builtin_ctz(val) >> 3)
157 #endif
158 
159 #endif
160 
161 #define LZ4_READ_LITTLEENDIAN_16(d, s, p) \
162 	(d = s - get_unaligned_le16(p))
163 
164 #define LZ4_WILDCOPY(s, d, e)		\
165 	do {				\
166 		LZ4_COPYPACKET(s, d);	\
167 	} while (d < e)
168 
169 #define LZ4_BLINDCOPY(s, d, l)	\
170 	do {	\
171 		u8 *e = (d) + l;	\
172 		LZ4_WILDCOPY(s, d, e);	\
173 		d = e;	\
174 	} while (0)
175 
176 #endif /* COMMON_LZ4_DEFS_H */
177