1 /**
2 * \file
3 * \brief Cache functions
4 *
5 * \date 2007-11
6 * \author Adam Lackorzynski <adam@os.inf.tu-dresden.de>
7 *
8 */
9 /*
10 * (c) 2007-2009 Author(s)
11 * economic rights: Technische Universität Dresden (Germany)
12 *
13 * This file is part of TUD:OS and distributed under the terms of the
14 * GNU General Public License 2.
15 * Please see the COPYING-GPL-2 file for details.
16 *
17 * As a special exception, you may use this file as part of a free software
18 * library without restriction. Specifically, if other files instantiate
19 * templates or use macros or inline functions from this file, or you compile
20 * this file and link it with other files to produce an executable, this
21 * file does not by itself cause the resulting executable to be covered by
22 * the GNU General Public License. This exception does not however
23 * invalidate any other reasons why the executable file might be covered by
24 * the GNU General Public License.
25 */
26 #ifndef __L4SYS__INCLUDE__ARCH_ARM__CACHE_H__
27 #define __L4SYS__INCLUDE__ARCH_ARM__CACHE_H__
28
29 #include <l4/sys/compiler.h>
30 #include <l4/sys/syscall_defs.h>
31
32 #include_next <l4/sys/cache.h>
33
34 /**
35 * \internal
36 */
37 L4_INLINE void
38 l4_cache_op_arm_call(unsigned long op,
39 unsigned long start,
40 unsigned long end);
41
42 L4_INLINE void
l4_cache_op_arm_call(unsigned long op,unsigned long start,unsigned long end)43 l4_cache_op_arm_call(unsigned long op,
44 unsigned long start,
45 unsigned long end)
46 {
47 register unsigned long _op __asm__ ("r0") = op;
48 register unsigned long _start __asm__ ("r1") = start;
49 register unsigned long _end __asm__ ("r2") = end;
50
51 __asm__ __volatile__
52 ("@ l4_cache_op_arm_call(start) \n\t"
53 "mov lr, pc \n\t"
54 "mov pc, %[sc] \n\t"
55 "@ l4_cache_op_arm_call(end) \n\t"
56 :
57 "=r" (_op),
58 "=r" (_start),
59 "=r" (_end)
60 :
61 [sc] "i" (L4_SYSCALL_MEM_OP),
62 "0" (_op),
63 "1" (_start),
64 "2" (_end)
65 :
66 "cc", "memory", "lr"
67 );
68 }
69
70 enum L4_mem_cache_ops
71 {
72 L4_MEM_CACHE_OP_CLEAN_DATA = 0,
73 L4_MEM_CACHE_OP_FLUSH_DATA = 1,
74 L4_MEM_CACHE_OP_INV_DATA = 2,
75 L4_MEM_CACHE_OP_COHERENT = 3,
76 L4_MEM_CACHE_OP_DMA_COHERENT = 4,
77 L4_MEM_CACHE_OP_DMA_COHERENT_FULL = 5,
78 };
79
80 L4_INLINE int
l4_cache_clean_data(unsigned long start,unsigned long end)81 l4_cache_clean_data(unsigned long start,
82 unsigned long end) L4_NOTHROW
83 {
84 l4_cache_op_arm_call(L4_MEM_CACHE_OP_CLEAN_DATA, start, end);
85 return 0;
86 }
87
88 L4_INLINE int
l4_cache_flush_data(unsigned long start,unsigned long end)89 l4_cache_flush_data(unsigned long start,
90 unsigned long end) L4_NOTHROW
91 {
92 l4_cache_op_arm_call(L4_MEM_CACHE_OP_FLUSH_DATA, start, end);
93 return 0;
94 }
95
96 L4_INLINE int
l4_cache_inv_data(unsigned long start,unsigned long end)97 l4_cache_inv_data(unsigned long start,
98 unsigned long end) L4_NOTHROW
99 {
100 l4_cache_op_arm_call(L4_MEM_CACHE_OP_INV_DATA, start, end);
101 return 0;
102 }
103
104 L4_INLINE int
l4_cache_coherent(unsigned long start,unsigned long end)105 l4_cache_coherent(unsigned long start,
106 unsigned long end) L4_NOTHROW
107 {
108 l4_cache_op_arm_call(L4_MEM_CACHE_OP_COHERENT, start, end);
109 return 0;
110 }
111
112 L4_INLINE int
l4_cache_dma_coherent(unsigned long start,unsigned long end)113 l4_cache_dma_coherent(unsigned long start,
114 unsigned long end) L4_NOTHROW
115 {
116 l4_cache_op_arm_call(L4_MEM_CACHE_OP_DMA_COHERENT, start, end);
117 return 0;
118 }
119
120 L4_INLINE int
l4_cache_dma_coherent_full(void)121 l4_cache_dma_coherent_full(void) L4_NOTHROW
122 {
123 l4_cache_op_arm_call(L4_MEM_CACHE_OP_DMA_COHERENT_FULL, 0, 0);
124 return 0;
125 }
126
127 #endif /* ! __L4SYS__INCLUDE__ARCH_ARM__CACHE_H__ */
128