fork: move the real prepare_to_copy() users to arch_dup_task_struct()
[GitHub/mt8127/android_kernel_alcatel_ttab.git] / arch / alpha / include / asm / processor.h
1 /*
2 * include/asm-alpha/processor.h
3 *
4 * Copyright (C) 1994 Linus Torvalds
5 */
6
7 #ifndef __ASM_ALPHA_PROCESSOR_H
8 #define __ASM_ALPHA_PROCESSOR_H
9
10 #include <linux/personality.h> /* for ADDR_LIMIT_32BIT */
11
12 /*
13 * Returns current instruction pointer ("program counter").
14 */
15 #define current_text_addr() \
16 ({ void *__pc; __asm__ ("br %0,.+4" : "=r"(__pc)); __pc; })
17
18 /*
19 * We have a 42-bit user address space: 4TB user VM...
20 */
21 #define TASK_SIZE (0x40000000000UL)
22
23 #define STACK_TOP \
24 (current->personality & ADDR_LIMIT_32BIT ? 0x80000000 : 0x00120000000UL)
25
26 #define STACK_TOP_MAX 0x00120000000UL
27
28 /* This decides where the kernel will search for a free chunk of vm
29 * space during mmap's.
30 */
31 #define TASK_UNMAPPED_BASE \
32 ((current->personality & ADDR_LIMIT_32BIT) ? 0x40000000 : TASK_SIZE / 2)
33
34 typedef struct {
35 unsigned long seg;
36 } mm_segment_t;
37
38 /* This is dead. Everything has been moved to thread_info. */
39 struct thread_struct { };
40 #define INIT_THREAD { }
41
42 /* Return saved PC of a blocked thread. */
43 struct task_struct;
44 extern unsigned long thread_saved_pc(struct task_struct *);
45
46 /* Do necessary setup to start up a newly executed thread. */
47 extern void start_thread(struct pt_regs *, unsigned long, unsigned long);
48
49 /* Free all resources held by a thread. */
50 extern void release_thread(struct task_struct *);
51
52 /* Create a kernel thread without removing it from tasklists. */
53 extern long kernel_thread(int (*fn)(void *), void *arg, unsigned long flags);
54
55 unsigned long get_wchan(struct task_struct *p);
56
57 #define KSTK_EIP(tsk) (task_pt_regs(tsk)->pc)
58
59 #define KSTK_ESP(tsk) \
60 ((tsk) == current ? rdusp() : task_thread_info(tsk)->pcb.usp)
61
62 #define cpu_relax() barrier()
63
64 #define ARCH_HAS_PREFETCH
65 #define ARCH_HAS_PREFETCHW
66 #define ARCH_HAS_SPINLOCK_PREFETCH
67
68 #ifndef CONFIG_SMP
69 /* Nothing to prefetch. */
70 #define spin_lock_prefetch(lock) do { } while (0)
71 #endif
72
73 extern inline void prefetch(const void *ptr)
74 {
75 __builtin_prefetch(ptr, 0, 3);
76 }
77
78 extern inline void prefetchw(const void *ptr)
79 {
80 __builtin_prefetch(ptr, 1, 3);
81 }
82
83 #ifdef CONFIG_SMP
84 extern inline void spin_lock_prefetch(const void *ptr)
85 {
86 __builtin_prefetch(ptr, 1, 3);
87 }
88 #endif
89
90 #endif /* __ASM_ALPHA_PROCESSOR_H */