2 * Copyright (c) 1998 Doug Rabson
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
14 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
15 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
17 * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
18 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
19 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
20 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
21 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
22 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
23 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
29 #ifndef _MACHINE_CPUFUNC_H_
30 #define _MACHINE_CPUFUNC_H_
34 #include <sys/types.h>
35 #include <machine/ia64_cpu.h>
37 #define CRITICAL_FORK (ia64_get_psr() | IA64_PSR_I)
44 __asm __volatile("break 0x80100"); /* XXX use linux value */
49 extern u_int64_t ia64_port_base;
51 static __inline volatile void *
52 ia64_port_address(u_int port)
54 return (volatile void *)(ia64_port_base
56 | (port & ((1 << 12) - 1)));
59 static __inline volatile void *
60 ia64_memory_address(u_int64_t addr)
62 return (volatile void *) IA64_PHYS_TO_RR6(addr);
65 static __inline u_int8_t
68 volatile u_int8_t *p = ia64_port_address(port);
75 static __inline u_int16_t
78 volatile u_int16_t *p = ia64_port_address(port);
85 static __inline u_int32_t
88 volatile u_int32_t *p = ia64_port_address(port);
96 insb(u_int port, void *addr, size_t count)
104 insw(u_int port, void *addr, size_t count)
112 insl(u_int port, void *addr, size_t count)
120 outb(u_int port, u_int8_t data)
122 volatile u_int8_t *p = ia64_port_address(port);
129 outw(u_int port, u_int16_t data)
131 volatile u_int16_t *p = ia64_port_address(port);
138 outl(u_int port, u_int32_t data)
140 volatile u_int32_t *p = ia64_port_address(port);
147 outsb(u_int port, const void *addr, size_t count)
149 const u_int8_t *p = addr;
155 outsw(u_int port, const void *addr, size_t count)
157 const u_int16_t *p = addr;
163 outsl(u_int port, const void *addr, size_t count)
165 const u_int32_t *p = addr;
170 static __inline u_int8_t
173 volatile u_int8_t *p = ia64_memory_address(addr);
180 static __inline u_int16_t
183 volatile u_int16_t *p = ia64_memory_address(addr);
190 static __inline u_int32_t
193 volatile u_int32_t *p = ia64_memory_address(addr);
201 writeb(u_int addr, u_int8_t data)
203 volatile u_int8_t *p = ia64_memory_address(addr);
210 writew(u_int addr, u_int16_t data)
212 volatile u_int16_t *p = ia64_memory_address(addr);
219 writel(u_int addr, u_int32_t data)
221 volatile u_int32_t *p = ia64_memory_address(addr);
228 memcpy_fromio(u_int8_t *addr, size_t ofs, size_t count)
230 volatile u_int8_t *p = ia64_memory_address(ofs);
236 memcpy_io(size_t dst, size_t src, size_t count)
238 volatile u_int8_t *dp = ia64_memory_address(dst);
239 volatile u_int8_t *sp = ia64_memory_address(src);
245 memcpy_toio(size_t ofs, u_int8_t *addr, size_t count)
247 volatile u_int8_t *p = ia64_memory_address(ofs);
253 memset_io(size_t ofs, u_int8_t value, size_t count)
255 volatile u_int8_t *p = ia64_memory_address(ofs);
261 memsetw(u_int16_t *addr, int val, size_t size)
268 memsetw_io(size_t ofs, u_int16_t value, size_t count)
270 volatile u_int16_t *p = ia64_memory_address(ofs);
278 __asm __volatile ("rsm psr.i;;");
284 __asm __volatile (";; ssm psr.i;; srlz.d");
287 static __inline critical_t
288 cpu_critical_enter(void)
292 __asm __volatile ("mov %0=psr;;" : "=r" (psr));
298 cpu_critical_exit(critical_t psr)
300 __asm __volatile ("mov psr.l=%0;; srlz.d" :: "r" (psr));
305 #endif /* !_MACHINE_CPUFUNC_H_ */