nlm: Ensure callback code also checks that the files match
[GitHub/mt8127/android_kernel_alcatel_ttab.git] / include / linux / prefetch.h
CommitLineData
1da177e4
LT
1/*
2 * Generic cache management functions. Everything is arch-specific,
3 * but this header exists to make sure the defines/functions can be
4 * used in a generic way.
5 *
6 * 2000-11-13 Arjan van de Ven <arjan@fenrus.demon.nl>
7 *
8 */
9
10#ifndef _LINUX_PREFETCH_H
11#define _LINUX_PREFETCH_H
12
13#include <linux/types.h>
14#include <asm/processor.h>
15#include <asm/cache.h>
16
17/*
18 prefetch(x) attempts to pre-emptively get the memory pointed to
19 by address "x" into the CPU L1 cache.
20 prefetch(x) should not cause any kind of exception, prefetch(0) is
21 specifically ok.
22
23 prefetch() should be defined by the architecture, if not, the
24 #define below provides a no-op define.
25
26 There are 3 prefetch() macros:
27
28 prefetch(x) - prefetches the cacheline at "x" for read
29 prefetchw(x) - prefetches the cacheline at "x" for write
52161845 30 spin_lock_prefetch(x) - prefetches the spinlock *x for taking
1da177e4 31
25985edc 32 there is also PREFETCH_STRIDE which is the architecure-preferred
1da177e4
LT
33 "lookahead" size for prefetching streamed operations.
34
35*/
36
1da177e4 37#ifndef ARCH_HAS_PREFETCH
ab483570 38#define prefetch(x) __builtin_prefetch(x)
1da177e4
LT
39#endif
40
41#ifndef ARCH_HAS_PREFETCHW
ab483570 42#define prefetchw(x) __builtin_prefetch(x,1)
1da177e4
LT
43#endif
44
45#ifndef ARCH_HAS_SPINLOCK_PREFETCH
46#define spin_lock_prefetch(x) prefetchw(x)
47#endif
48
49#ifndef PREFETCH_STRIDE
50#define PREFETCH_STRIDE (4*L1_CACHE_BYTES)
51#endif
52
53static inline void prefetch_range(void *addr, size_t len)
54{
55#ifdef ARCH_HAS_PREFETCH
56 char *cp;
57 char *end = addr + len;
58
59 for (cp = addr; cp < end; cp += PREFETCH_STRIDE)
60 prefetch(cp);
61#endif
62}
63
64#endif