Merge branch 'linux-2.6' into for-2.6.24
[GitHub/mt8127/android_kernel_alcatel_ttab.git] / include / asm-ia64 / machvec.h
CommitLineData
1da177e4
LT
1/*
2 * Machine vector for IA-64.
3 *
4 * Copyright (C) 1999 Silicon Graphics, Inc.
5 * Copyright (C) Srinivasa Thirumalachar <sprasad@engr.sgi.com>
6 * Copyright (C) Vijay Chander <vijay@engr.sgi.com>
7 * Copyright (C) 1999-2001, 2003-2004 Hewlett-Packard Co.
8 * David Mosberger-Tang <davidm@hpl.hp.com>
9 */
10#ifndef _ASM_IA64_MACHVEC_H
11#define _ASM_IA64_MACHVEC_H
12
1da177e4
LT
13#include <linux/types.h>
14
15/* forward declarations: */
16struct device;
17struct pt_regs;
18struct scatterlist;
19struct page;
20struct mm_struct;
21struct pci_bus;
e08e6c52 22struct task_struct;
3b7d1921 23struct pci_dev;
f7feaca7 24struct msi_desc;
1da177e4
LT
25
26typedef void ia64_mv_setup_t (char **);
27typedef void ia64_mv_cpu_init_t (void);
28typedef void ia64_mv_irq_init_t (void);
29typedef void ia64_mv_send_ipi_t (int, int, int, int);
7d12e780 30typedef void ia64_mv_timer_interrupt_t (int, void *);
c1902aae 31typedef void ia64_mv_global_tlb_purge_t (struct mm_struct *, unsigned long, unsigned long, unsigned long);
1da177e4 32typedef void ia64_mv_tlb_migrate_finish_t (struct mm_struct *);
1115200a 33typedef u8 ia64_mv_irq_to_vector (int);
1da177e4
LT
34typedef unsigned int ia64_mv_local_vector_to_irq (u8);
35typedef char *ia64_mv_pci_get_legacy_mem_t (struct pci_bus *);
36typedef int ia64_mv_pci_legacy_read_t (struct pci_bus *, u16 port, u32 *val,
37 u8 size);
38typedef int ia64_mv_pci_legacy_write_t (struct pci_bus *, u16 port, u32 val,
39 u8 size);
e08e6c52 40typedef void ia64_mv_migrate_t(struct task_struct * task);
8ea6091f 41typedef void ia64_mv_pci_fixup_bus_t (struct pci_bus *);
a7956113 42typedef void ia64_mv_kernel_launch_event_t(void);
1da177e4
LT
43
44/* DMA-mapping interface: */
45typedef void ia64_mv_dma_init (void);
06a54497 46typedef void *ia64_mv_dma_alloc_coherent (struct device *, size_t, dma_addr_t *, gfp_t);
1da177e4
LT
47typedef void ia64_mv_dma_free_coherent (struct device *, size_t, void *, dma_addr_t);
48typedef dma_addr_t ia64_mv_dma_map_single (struct device *, void *, size_t, int);
49typedef void ia64_mv_dma_unmap_single (struct device *, dma_addr_t, size_t, int);
50typedef int ia64_mv_dma_map_sg (struct device *, struct scatterlist *, int, int);
51typedef void ia64_mv_dma_unmap_sg (struct device *, struct scatterlist *, int, int);
52typedef void ia64_mv_dma_sync_single_for_cpu (struct device *, dma_addr_t, size_t, int);
53typedef void ia64_mv_dma_sync_sg_for_cpu (struct device *, struct scatterlist *, int, int);
54typedef void ia64_mv_dma_sync_single_for_device (struct device *, dma_addr_t, size_t, int);
55typedef void ia64_mv_dma_sync_sg_for_device (struct device *, struct scatterlist *, int, int);
56typedef int ia64_mv_dma_mapping_error (dma_addr_t dma_addr);
57typedef int ia64_mv_dma_supported (struct device *, u64);
58
59/*
60 * WARNING: The legacy I/O space is _architected_. Platforms are
61 * expected to follow this architected model (see Section 10.7 in the
62 * IA-64 Architecture Software Developer's Manual). Unfortunately,
63 * some broken machines do not follow that model, which is why we have
64 * to make the inX/outX operations part of the machine vector.
65 * Platform designers should follow the architected model whenever
66 * possible.
67 */
68typedef unsigned int ia64_mv_inb_t (unsigned long);
69typedef unsigned int ia64_mv_inw_t (unsigned long);
70typedef unsigned int ia64_mv_inl_t (unsigned long);
71typedef void ia64_mv_outb_t (unsigned char, unsigned long);
72typedef void ia64_mv_outw_t (unsigned short, unsigned long);
73typedef void ia64_mv_outl_t (unsigned int, unsigned long);
74typedef void ia64_mv_mmiowb_t (void);
75typedef unsigned char ia64_mv_readb_t (const volatile void __iomem *);
76typedef unsigned short ia64_mv_readw_t (const volatile void __iomem *);
77typedef unsigned int ia64_mv_readl_t (const volatile void __iomem *);
78typedef unsigned long ia64_mv_readq_t (const volatile void __iomem *);
79typedef unsigned char ia64_mv_readb_relaxed_t (const volatile void __iomem *);
80typedef unsigned short ia64_mv_readw_relaxed_t (const volatile void __iomem *);
81typedef unsigned int ia64_mv_readl_relaxed_t (const volatile void __iomem *);
82typedef unsigned long ia64_mv_readq_relaxed_t (const volatile void __iomem *);
3b7d1921 83
f7feaca7 84typedef int ia64_mv_setup_msi_irq_t (struct pci_dev *pdev, struct msi_desc *);
3b7d1921 85typedef void ia64_mv_teardown_msi_irq_t (unsigned int irq);
1da177e4
LT
86
87static inline void
88machvec_noop (void)
89{
90}
91
92static inline void
93machvec_noop_mm (struct mm_struct *mm)
94{
95}
96
e08e6c52
BC
97static inline void
98machvec_noop_task (struct task_struct *task)
99{
100}
101
8ea6091f
JK
102static inline void
103machvec_noop_bus (struct pci_bus *bus)
104{
105}
106
1da177e4 107extern void machvec_setup (char **);
7d12e780 108extern void machvec_timer_interrupt (int, void *);
1da177e4
LT
109extern void machvec_dma_sync_single (struct device *, dma_addr_t, size_t, int);
110extern void machvec_dma_sync_sg (struct device *, struct scatterlist *, int, int);
111extern void machvec_tlb_migrate_finish (struct mm_struct *);
112
113# if defined (CONFIG_IA64_HP_SIM)
114# include <asm/machvec_hpsim.h>
115# elif defined (CONFIG_IA64_DIG)
116# include <asm/machvec_dig.h>
117# elif defined (CONFIG_IA64_HP_ZX1)
118# include <asm/machvec_hpzx1.h>
119# elif defined (CONFIG_IA64_HP_ZX1_SWIOTLB)
120# include <asm/machvec_hpzx1_swiotlb.h>
121# elif defined (CONFIG_IA64_SGI_SN2)
122# include <asm/machvec_sn2.h>
123# elif defined (CONFIG_IA64_GENERIC)
124
125# ifdef MACHVEC_PLATFORM_HEADER
126# include MACHVEC_PLATFORM_HEADER
127# else
128# define platform_name ia64_mv.name
129# define platform_setup ia64_mv.setup
130# define platform_cpu_init ia64_mv.cpu_init
131# define platform_irq_init ia64_mv.irq_init
132# define platform_send_ipi ia64_mv.send_ipi
133# define platform_timer_interrupt ia64_mv.timer_interrupt
134# define platform_global_tlb_purge ia64_mv.global_tlb_purge
135# define platform_tlb_migrate_finish ia64_mv.tlb_migrate_finish
136# define platform_dma_init ia64_mv.dma_init
137# define platform_dma_alloc_coherent ia64_mv.dma_alloc_coherent
138# define platform_dma_free_coherent ia64_mv.dma_free_coherent
139# define platform_dma_map_single ia64_mv.dma_map_single
140# define platform_dma_unmap_single ia64_mv.dma_unmap_single
141# define platform_dma_map_sg ia64_mv.dma_map_sg
142# define platform_dma_unmap_sg ia64_mv.dma_unmap_sg
143# define platform_dma_sync_single_for_cpu ia64_mv.dma_sync_single_for_cpu
144# define platform_dma_sync_sg_for_cpu ia64_mv.dma_sync_sg_for_cpu
145# define platform_dma_sync_single_for_device ia64_mv.dma_sync_single_for_device
146# define platform_dma_sync_sg_for_device ia64_mv.dma_sync_sg_for_device
147# define platform_dma_mapping_error ia64_mv.dma_mapping_error
148# define platform_dma_supported ia64_mv.dma_supported
1115200a 149# define platform_irq_to_vector ia64_mv.irq_to_vector
1da177e4
LT
150# define platform_local_vector_to_irq ia64_mv.local_vector_to_irq
151# define platform_pci_get_legacy_mem ia64_mv.pci_get_legacy_mem
152# define platform_pci_legacy_read ia64_mv.pci_legacy_read
153# define platform_pci_legacy_write ia64_mv.pci_legacy_write
154# define platform_inb ia64_mv.inb
155# define platform_inw ia64_mv.inw
156# define platform_inl ia64_mv.inl
157# define platform_outb ia64_mv.outb
158# define platform_outw ia64_mv.outw
159# define platform_outl ia64_mv.outl
160# define platform_mmiowb ia64_mv.mmiowb
161# define platform_readb ia64_mv.readb
162# define platform_readw ia64_mv.readw
163# define platform_readl ia64_mv.readl
164# define platform_readq ia64_mv.readq
165# define platform_readb_relaxed ia64_mv.readb_relaxed
166# define platform_readw_relaxed ia64_mv.readw_relaxed
167# define platform_readl_relaxed ia64_mv.readl_relaxed
168# define platform_readq_relaxed ia64_mv.readq_relaxed
e08e6c52 169# define platform_migrate ia64_mv.migrate
3b7d1921
EB
170# define platform_setup_msi_irq ia64_mv.setup_msi_irq
171# define platform_teardown_msi_irq ia64_mv.teardown_msi_irq
8ea6091f 172# define platform_pci_fixup_bus ia64_mv.pci_fixup_bus
6a3d0390 173# define platform_kernel_launch_event ia64_mv.kernel_launch_event
1da177e4
LT
174# endif
175
176/* __attribute__((__aligned__(16))) is required to make size of the
177 * structure multiple of 16 bytes.
178 * This will fillup the holes created because of section 3.3.1 in
179 * Software Conventions guide.
180 */
181struct ia64_machine_vector {
182 const char *name;
183 ia64_mv_setup_t *setup;
184 ia64_mv_cpu_init_t *cpu_init;
185 ia64_mv_irq_init_t *irq_init;
186 ia64_mv_send_ipi_t *send_ipi;
187 ia64_mv_timer_interrupt_t *timer_interrupt;
188 ia64_mv_global_tlb_purge_t *global_tlb_purge;
189 ia64_mv_tlb_migrate_finish_t *tlb_migrate_finish;
190 ia64_mv_dma_init *dma_init;
191 ia64_mv_dma_alloc_coherent *dma_alloc_coherent;
192 ia64_mv_dma_free_coherent *dma_free_coherent;
193 ia64_mv_dma_map_single *dma_map_single;
194 ia64_mv_dma_unmap_single *dma_unmap_single;
195 ia64_mv_dma_map_sg *dma_map_sg;
196 ia64_mv_dma_unmap_sg *dma_unmap_sg;
197 ia64_mv_dma_sync_single_for_cpu *dma_sync_single_for_cpu;
198 ia64_mv_dma_sync_sg_for_cpu *dma_sync_sg_for_cpu;
199 ia64_mv_dma_sync_single_for_device *dma_sync_single_for_device;
200 ia64_mv_dma_sync_sg_for_device *dma_sync_sg_for_device;
201 ia64_mv_dma_mapping_error *dma_mapping_error;
202 ia64_mv_dma_supported *dma_supported;
1115200a 203 ia64_mv_irq_to_vector *irq_to_vector;
1da177e4
LT
204 ia64_mv_local_vector_to_irq *local_vector_to_irq;
205 ia64_mv_pci_get_legacy_mem_t *pci_get_legacy_mem;
206 ia64_mv_pci_legacy_read_t *pci_legacy_read;
207 ia64_mv_pci_legacy_write_t *pci_legacy_write;
208 ia64_mv_inb_t *inb;
209 ia64_mv_inw_t *inw;
210 ia64_mv_inl_t *inl;
211 ia64_mv_outb_t *outb;
212 ia64_mv_outw_t *outw;
213 ia64_mv_outl_t *outl;
214 ia64_mv_mmiowb_t *mmiowb;
215 ia64_mv_readb_t *readb;
216 ia64_mv_readw_t *readw;
217 ia64_mv_readl_t *readl;
218 ia64_mv_readq_t *readq;
219 ia64_mv_readb_relaxed_t *readb_relaxed;
220 ia64_mv_readw_relaxed_t *readw_relaxed;
221 ia64_mv_readl_relaxed_t *readl_relaxed;
222 ia64_mv_readq_relaxed_t *readq_relaxed;
e08e6c52 223 ia64_mv_migrate_t *migrate;
3b7d1921
EB
224 ia64_mv_setup_msi_irq_t *setup_msi_irq;
225 ia64_mv_teardown_msi_irq_t *teardown_msi_irq;
8ea6091f 226 ia64_mv_pci_fixup_bus_t *pci_fixup_bus;
a7956113 227 ia64_mv_kernel_launch_event_t *kernel_launch_event;
1da177e4
LT
228} __attribute__((__aligned__(16))); /* align attrib? see above comment */
229
230#define MACHVEC_INIT(name) \
231{ \
232 #name, \
233 platform_setup, \
234 platform_cpu_init, \
235 platform_irq_init, \
236 platform_send_ipi, \
237 platform_timer_interrupt, \
238 platform_global_tlb_purge, \
239 platform_tlb_migrate_finish, \
240 platform_dma_init, \
241 platform_dma_alloc_coherent, \
242 platform_dma_free_coherent, \
243 platform_dma_map_single, \
244 platform_dma_unmap_single, \
245 platform_dma_map_sg, \
246 platform_dma_unmap_sg, \
247 platform_dma_sync_single_for_cpu, \
248 platform_dma_sync_sg_for_cpu, \
249 platform_dma_sync_single_for_device, \
250 platform_dma_sync_sg_for_device, \
251 platform_dma_mapping_error, \
252 platform_dma_supported, \
1115200a 253 platform_irq_to_vector, \
1da177e4
LT
254 platform_local_vector_to_irq, \
255 platform_pci_get_legacy_mem, \
256 platform_pci_legacy_read, \
257 platform_pci_legacy_write, \
258 platform_inb, \
259 platform_inw, \
260 platform_inl, \
261 platform_outb, \
262 platform_outw, \
263 platform_outl, \
264 platform_mmiowb, \
265 platform_readb, \
266 platform_readw, \
267 platform_readl, \
268 platform_readq, \
269 platform_readb_relaxed, \
270 platform_readw_relaxed, \
271 platform_readl_relaxed, \
272 platform_readq_relaxed, \
e08e6c52 273 platform_migrate, \
3b7d1921
EB
274 platform_setup_msi_irq, \
275 platform_teardown_msi_irq, \
8ea6091f 276 platform_pci_fixup_bus, \
6a3d0390 277 platform_kernel_launch_event \
1da177e4
LT
278}
279
280extern struct ia64_machine_vector ia64_mv;
281extern void machvec_init (const char *name);
a07ee862 282extern void machvec_init_from_cmdline(const char *cmdline);
1da177e4
LT
283
284# else
285# error Unknown configuration. Update asm-ia64/machvec.h.
286# endif /* CONFIG_IA64_GENERIC */
287
288/*
289 * Declare default routines which aren't declared anywhere else:
290 */
291extern ia64_mv_dma_init swiotlb_init;
292extern ia64_mv_dma_alloc_coherent swiotlb_alloc_coherent;
293extern ia64_mv_dma_free_coherent swiotlb_free_coherent;
294extern ia64_mv_dma_map_single swiotlb_map_single;
295extern ia64_mv_dma_unmap_single swiotlb_unmap_single;
296extern ia64_mv_dma_map_sg swiotlb_map_sg;
297extern ia64_mv_dma_unmap_sg swiotlb_unmap_sg;
298extern ia64_mv_dma_sync_single_for_cpu swiotlb_sync_single_for_cpu;
299extern ia64_mv_dma_sync_sg_for_cpu swiotlb_sync_sg_for_cpu;
300extern ia64_mv_dma_sync_single_for_device swiotlb_sync_single_for_device;
301extern ia64_mv_dma_sync_sg_for_device swiotlb_sync_sg_for_device;
302extern ia64_mv_dma_mapping_error swiotlb_dma_mapping_error;
303extern ia64_mv_dma_supported swiotlb_dma_supported;
304
305/*
306 * Define default versions so we can extend machvec for new platforms without having
307 * to update the machvec files for all existing platforms.
308 */
309#ifndef platform_setup
310# define platform_setup machvec_setup
311#endif
312#ifndef platform_cpu_init
313# define platform_cpu_init machvec_noop
314#endif
315#ifndef platform_irq_init
316# define platform_irq_init machvec_noop
317#endif
318
319#ifndef platform_send_ipi
320# define platform_send_ipi ia64_send_ipi /* default to architected version */
321#endif
322#ifndef platform_timer_interrupt
323# define platform_timer_interrupt machvec_timer_interrupt
324#endif
325#ifndef platform_global_tlb_purge
326# define platform_global_tlb_purge ia64_global_tlb_purge /* default to architected version */
327#endif
328#ifndef platform_tlb_migrate_finish
329# define platform_tlb_migrate_finish machvec_noop_mm
330#endif
a7956113
ZN
331#ifndef platform_kernel_launch_event
332# define platform_kernel_launch_event machvec_noop
333#endif
1da177e4
LT
334#ifndef platform_dma_init
335# define platform_dma_init swiotlb_init
336#endif
337#ifndef platform_dma_alloc_coherent
338# define platform_dma_alloc_coherent swiotlb_alloc_coherent
339#endif
340#ifndef platform_dma_free_coherent
341# define platform_dma_free_coherent swiotlb_free_coherent
342#endif
343#ifndef platform_dma_map_single
344# define platform_dma_map_single swiotlb_map_single
345#endif
346#ifndef platform_dma_unmap_single
347# define platform_dma_unmap_single swiotlb_unmap_single
348#endif
349#ifndef platform_dma_map_sg
350# define platform_dma_map_sg swiotlb_map_sg
351#endif
352#ifndef platform_dma_unmap_sg
353# define platform_dma_unmap_sg swiotlb_unmap_sg
354#endif
355#ifndef platform_dma_sync_single_for_cpu
356# define platform_dma_sync_single_for_cpu swiotlb_sync_single_for_cpu
357#endif
358#ifndef platform_dma_sync_sg_for_cpu
359# define platform_dma_sync_sg_for_cpu swiotlb_sync_sg_for_cpu
360#endif
361#ifndef platform_dma_sync_single_for_device
362# define platform_dma_sync_single_for_device swiotlb_sync_single_for_device
363#endif
364#ifndef platform_dma_sync_sg_for_device
365# define platform_dma_sync_sg_for_device swiotlb_sync_sg_for_device
366#endif
367#ifndef platform_dma_mapping_error
368# define platform_dma_mapping_error swiotlb_dma_mapping_error
369#endif
370#ifndef platform_dma_supported
371# define platform_dma_supported swiotlb_dma_supported
372#endif
1115200a
KK
373#ifndef platform_irq_to_vector
374# define platform_irq_to_vector __ia64_irq_to_vector
375#endif
1da177e4
LT
376#ifndef platform_local_vector_to_irq
377# define platform_local_vector_to_irq __ia64_local_vector_to_irq
378#endif
379#ifndef platform_pci_get_legacy_mem
380# define platform_pci_get_legacy_mem ia64_pci_get_legacy_mem
381#endif
382#ifndef platform_pci_legacy_read
383# define platform_pci_legacy_read ia64_pci_legacy_read
a72391e4 384extern int ia64_pci_legacy_read(struct pci_bus *bus, u16 port, u32 *val, u8 size);
1da177e4
LT
385#endif
386#ifndef platform_pci_legacy_write
387# define platform_pci_legacy_write ia64_pci_legacy_write
a72391e4 388extern int ia64_pci_legacy_write(struct pci_bus *bus, u16 port, u32 val, u8 size);
1da177e4
LT
389#endif
390#ifndef platform_inb
391# define platform_inb __ia64_inb
392#endif
393#ifndef platform_inw
394# define platform_inw __ia64_inw
395#endif
396#ifndef platform_inl
397# define platform_inl __ia64_inl
398#endif
399#ifndef platform_outb
400# define platform_outb __ia64_outb
401#endif
402#ifndef platform_outw
403# define platform_outw __ia64_outw
404#endif
405#ifndef platform_outl
406# define platform_outl __ia64_outl
407#endif
408#ifndef platform_mmiowb
409# define platform_mmiowb __ia64_mmiowb
410#endif
411#ifndef platform_readb
412# define platform_readb __ia64_readb
413#endif
414#ifndef platform_readw
415# define platform_readw __ia64_readw
416#endif
417#ifndef platform_readl
418# define platform_readl __ia64_readl
419#endif
420#ifndef platform_readq
421# define platform_readq __ia64_readq
422#endif
423#ifndef platform_readb_relaxed
424# define platform_readb_relaxed __ia64_readb_relaxed
425#endif
426#ifndef platform_readw_relaxed
427# define platform_readw_relaxed __ia64_readw_relaxed
428#endif
429#ifndef platform_readl_relaxed
430# define platform_readl_relaxed __ia64_readl_relaxed
431#endif
432#ifndef platform_readq_relaxed
433# define platform_readq_relaxed __ia64_readq_relaxed
434#endif
e08e6c52
BC
435#ifndef platform_migrate
436# define platform_migrate machvec_noop_task
437#endif
3b7d1921
EB
438#ifndef platform_setup_msi_irq
439# define platform_setup_msi_irq ((ia64_mv_setup_msi_irq_t*)NULL)
440#endif
441#ifndef platform_teardown_msi_irq
442# define platform_teardown_msi_irq ((ia64_mv_teardown_msi_irq_t*)NULL)
fd58e55f 443#endif
8ea6091f
JK
444#ifndef platform_pci_fixup_bus
445# define platform_pci_fixup_bus machvec_noop_bus
446#endif
1da177e4
LT
447
448#endif /* _ASM_IA64_MACHVEC_H */