2 * SPDX-License-Identifier: BSD-2-Clause-FreeBSD
4 * Copyright (c) 2006 Semihalf, Rafal Jaworowski <raj@semihalf.com>
5 * Copyright (c) 1996, 1997, 1998 The NetBSD Foundation, Inc.
8 * This code is derived from software contributed to The NetBSD Foundation
9 * by Jason R. Thorpe of the Numerical Aerospace Simulation Facility,
10 * NASA Ames Research Center.
12 * Redistribution and use in source and binary forms, with or without
13 * modification, are permitted provided that the following conditions
15 * 1. Redistributions of source code must retain the above copyright
16 * notice, this list of conditions and the following disclaimer.
17 * 2. Redistributions in binary form must reproduce the above copyright
18 * notice, this list of conditions and the following disclaimer in the
19 * documentation and/or other materials provided with the distribution.
21 * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
22 * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
23 * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
24 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
25 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
26 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
27 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
28 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
29 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
30 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
31 * POSSIBILITY OF SUCH DAMAGE.
34 #include <sys/cdefs.h>
35 __FBSDID("$FreeBSD$");
40 #include <sys/param.h>
41 #include <sys/systm.h>
46 #include <sys/endian.h>
48 #include <machine/bus.h>
49 #include <machine/pio.h>
50 #include <machine/md_var.h>
52 #define TODO panic("%s: not implemented", __func__)
54 #define MAX_EARLYBOOT_MAPPINGS 6
61 } earlyboot_mappings[MAX_EARLYBOOT_MAPPINGS];
62 static int earlyboot_map_idx = 0;
64 void bs_remap_earlyboot(void);
66 static __inline void *
67 __ppc_ba(bus_space_handle_t bsh, bus_size_t ofs)
69 return ((void *)(bsh + ofs));
73 bs_gen_map(bus_addr_t addr, bus_size_t size, int flags,
74 bus_space_handle_t *bshp)
79 * Record what we did if we haven't enabled the MMU yet. We
80 * will need to remap it as soon as the MMU comes up.
82 if (!pmap_bootstrapped) {
83 KASSERT(earlyboot_map_idx < MAX_EARLYBOOT_MAPPINGS,
84 ("%s: too many early boot mapping requests", __func__));
85 earlyboot_mappings[earlyboot_map_idx].addr = addr;
86 earlyboot_mappings[earlyboot_map_idx].virt =
87 pmap_early_io_map(addr, size);
88 earlyboot_mappings[earlyboot_map_idx].size = size;
89 earlyboot_mappings[earlyboot_map_idx].flags = flags;
90 *bshp = earlyboot_mappings[earlyboot_map_idx].virt;
93 ma = VM_MEMATTR_DEFAULT;
95 case BUS_SPACE_MAP_CACHEABLE:
96 ma = VM_MEMATTR_CACHEABLE;
98 case BUS_SPACE_MAP_PREFETCHABLE:
99 ma = VM_MEMATTR_PREFETCHABLE;
102 *bshp = (bus_space_handle_t)pmap_mapdev_attr(addr, size, ma);
109 bs_remap_earlyboot(void)
116 for (i = 0; i < earlyboot_map_idx; i++) {
117 spa = earlyboot_mappings[i].addr;
120 PHYS_TO_DMAP(spa) == earlyboot_mappings[i].virt &&
121 pmap_dev_direct_mapped(spa, earlyboot_mappings[i].size) == 0)
124 ma = VM_MEMATTR_DEFAULT;
125 switch (earlyboot_mappings[i].flags) {
126 case BUS_SPACE_MAP_CACHEABLE:
127 ma = VM_MEMATTR_CACHEABLE;
129 case BUS_SPACE_MAP_PREFETCHABLE:
130 ma = VM_MEMATTR_PREFETCHABLE;
134 pa = trunc_page(spa);
135 va = trunc_page(earlyboot_mappings[i].virt);
136 while (pa < spa + earlyboot_mappings[i].size) {
137 pmap_kenter_attr(va, pa, ma);
145 bs_gen_unmap(bus_size_t size __unused)
150 bs_gen_subregion(bus_space_handle_t bsh, bus_size_t ofs,
151 bus_size_t size __unused, bus_space_handle_t *nbshp)
158 bs_gen_alloc(bus_addr_t rstart __unused, bus_addr_t rend __unused,
159 bus_size_t size __unused, bus_size_t alignment __unused,
160 bus_size_t boundary __unused, int flags __unused,
161 bus_addr_t *bpap __unused, bus_space_handle_t *bshp __unused)
167 bs_gen_free(bus_space_handle_t bsh __unused, bus_size_t size __unused)
173 bs_gen_barrier(bus_space_handle_t bsh __unused, bus_size_t ofs __unused,
174 bus_size_t size __unused, int flags __unused)
181 * Native-endian access functions
184 native_bs_rs_1(bus_space_handle_t bsh, bus_size_t ofs)
186 volatile uint8_t *addr;
189 addr = __ppc_ba(bsh, ofs);
192 CTR4(KTR_BE_IO, "%s(bsh=%#x, ofs=%#x) = %#x", __func__, bsh, ofs, res);
197 native_bs_rs_2(bus_space_handle_t bsh, bus_size_t ofs)
199 volatile uint16_t *addr;
202 addr = __ppc_ba(bsh, ofs);
205 CTR4(KTR_BE_IO, "%s(bsh=%#x, ofs=%#x) = %#x", __func__, bsh, ofs, res);
210 native_bs_rs_4(bus_space_handle_t bsh, bus_size_t ofs)
212 volatile uint32_t *addr;
215 addr = __ppc_ba(bsh, ofs);
218 CTR4(KTR_BE_IO, "%s(bsh=%#x, ofs=%#x) = %#x", __func__, bsh, ofs, res);
223 native_bs_rs_8(bus_space_handle_t bsh, bus_size_t ofs)
225 volatile uint64_t *addr;
228 addr = __ppc_ba(bsh, ofs);
235 native_bs_rm_1(bus_space_handle_t bsh, bus_size_t ofs, uint8_t *addr, size_t cnt)
237 ins8(__ppc_ba(bsh, ofs), addr, cnt);
241 native_bs_rm_2(bus_space_handle_t bsh, bus_size_t ofs, uint16_t *addr, size_t cnt)
243 ins16(__ppc_ba(bsh, ofs), addr, cnt);
247 native_bs_rm_4(bus_space_handle_t bsh, bus_size_t ofs, uint32_t *addr, size_t cnt)
249 ins32(__ppc_ba(bsh, ofs), addr, cnt);
253 native_bs_rm_8(bus_space_handle_t bsh, bus_size_t ofs, uint64_t *addr, size_t cnt)
255 ins64(__ppc_ba(bsh, ofs), addr, cnt);
259 native_bs_rr_1(bus_space_handle_t bsh, bus_size_t ofs, uint8_t *addr, size_t cnt)
261 volatile uint8_t *s = __ppc_ba(bsh, ofs);
269 native_bs_rr_2(bus_space_handle_t bsh, bus_size_t ofs, uint16_t *addr, size_t cnt)
271 volatile uint16_t *s = __ppc_ba(bsh, ofs);
279 native_bs_rr_4(bus_space_handle_t bsh, bus_size_t ofs, uint32_t *addr, size_t cnt)
281 volatile uint32_t *s = __ppc_ba(bsh, ofs);
289 native_bs_rr_8(bus_space_handle_t bsh, bus_size_t ofs, uint64_t *addr, size_t cnt)
291 volatile uint64_t *s = __ppc_ba(bsh, ofs);
299 native_bs_ws_1(bus_space_handle_t bsh, bus_size_t ofs, uint8_t val)
301 volatile uint8_t *addr;
303 addr = __ppc_ba(bsh, ofs);
306 CTR4(KTR_BE_IO, "%s(bsh=%#x, ofs=%#x, val=%#x)", __func__, bsh, ofs, val);
310 native_bs_ws_2(bus_space_handle_t bsh, bus_size_t ofs, uint16_t val)
312 volatile uint16_t *addr;
314 addr = __ppc_ba(bsh, ofs);
317 CTR4(KTR_BE_IO, "%s(bsh=%#x, ofs=%#x, val=%#x)", __func__, bsh, ofs, val);
321 native_bs_ws_4(bus_space_handle_t bsh, bus_size_t ofs, uint32_t val)
323 volatile uint32_t *addr;
325 addr = __ppc_ba(bsh, ofs);
328 CTR4(KTR_BE_IO, "%s(bsh=%#x, ofs=%#x, val=%#x)", __func__, bsh, ofs, val);
332 native_bs_ws_8(bus_space_handle_t bsh, bus_size_t ofs, uint64_t val)
334 volatile uint64_t *addr;
336 addr = __ppc_ba(bsh, ofs);
339 CTR4(KTR_BE_IO, "%s(bsh=%#x, ofs=%#x, val=%#x)", __func__, bsh, ofs, val);
343 native_bs_wm_1(bus_space_handle_t bsh, bus_size_t ofs, const uint8_t *addr,
346 outsb(__ppc_ba(bsh, ofs), addr, cnt);
350 native_bs_wm_2(bus_space_handle_t bsh, bus_size_t ofs, const uint16_t *addr,
353 outsw(__ppc_ba(bsh, ofs), addr, cnt);
357 native_bs_wm_4(bus_space_handle_t bsh, bus_size_t ofs, const uint32_t *addr,
360 outsl(__ppc_ba(bsh, ofs), addr, cnt);
364 native_bs_wm_8(bus_space_handle_t bsh, bus_size_t ofs, const uint64_t *addr,
367 outsll(__ppc_ba(bsh, ofs), addr, cnt);
371 native_bs_wr_1(bus_space_handle_t bsh, bus_size_t ofs, const uint8_t *addr,
374 volatile uint8_t *d = __ppc_ba(bsh, ofs);
382 native_bs_wr_2(bus_space_handle_t bsh, bus_size_t ofs, const uint16_t *addr,
385 volatile uint16_t *d = __ppc_ba(bsh, ofs);
393 native_bs_wr_4(bus_space_handle_t bsh, bus_size_t ofs, const uint32_t *addr,
396 volatile uint32_t *d = __ppc_ba(bsh, ofs);
404 native_bs_wr_8(bus_space_handle_t bsh, bus_size_t ofs, const uint64_t *addr,
407 volatile uint64_t *d = __ppc_ba(bsh, ofs);
415 native_bs_sm_1(bus_space_handle_t bsh, bus_size_t ofs, uint8_t val, size_t cnt)
417 volatile uint8_t *d = __ppc_ba(bsh, ofs);
425 native_bs_sm_2(bus_space_handle_t bsh, bus_size_t ofs, uint16_t val, size_t cnt)
427 volatile uint16_t *d = __ppc_ba(bsh, ofs);
435 native_bs_sm_4(bus_space_handle_t bsh, bus_size_t ofs, uint32_t val, size_t cnt)
437 volatile uint32_t *d = __ppc_ba(bsh, ofs);
445 native_bs_sm_8(bus_space_handle_t bsh, bus_size_t ofs, uint64_t val, size_t cnt)
447 volatile uint64_t *d = __ppc_ba(bsh, ofs);
455 native_bs_sr_1(bus_space_handle_t bsh, bus_size_t ofs, uint8_t val, size_t cnt)
457 volatile uint8_t *d = __ppc_ba(bsh, ofs);
465 native_bs_sr_2(bus_space_handle_t bsh, bus_size_t ofs, uint16_t val, size_t cnt)
467 volatile uint16_t *d = __ppc_ba(bsh, ofs);
475 native_bs_sr_4(bus_space_handle_t bsh, bus_size_t ofs, uint32_t val, size_t cnt)
477 volatile uint32_t *d = __ppc_ba(bsh, ofs);
485 native_bs_sr_8(bus_space_handle_t bsh, bus_size_t ofs, uint64_t val, size_t cnt)
487 volatile uint64_t *d = __ppc_ba(bsh, ofs);
495 * Byteswapped access functions
498 swapped_bs_rs_1(bus_space_handle_t bsh, bus_size_t ofs)
500 volatile uint8_t *addr;
503 addr = __ppc_ba(bsh, ofs);
506 CTR4(KTR_LE_IO, "%s(bsh=%#x, ofs=%#x) = %#x", __func__, bsh, ofs, res);
511 swapped_bs_rs_2(bus_space_handle_t bsh, bus_size_t ofs)
513 volatile uint16_t *addr;
516 addr = __ppc_ba(bsh, ofs);
517 __asm __volatile("lhbrx %0, 0, %1" : "=r"(res) : "r"(addr));
519 CTR4(KTR_LE_IO, "%s(bsh=%#x, ofs=%#x) = %#x", __func__, bsh, ofs, res);
524 swapped_bs_rs_4(bus_space_handle_t bsh, bus_size_t ofs)
526 volatile uint32_t *addr;
529 addr = __ppc_ba(bsh, ofs);
530 __asm __volatile("lwbrx %0, 0, %1" : "=r"(res) : "r"(addr));
532 CTR4(KTR_LE_IO, "%s(bsh=%#x, ofs=%#x) = %#x", __func__, bsh, ofs, res);
537 swapped_bs_rs_8(bus_space_handle_t bsh, bus_size_t ofs)
539 volatile uint64_t *addr;
542 addr = __ppc_ba(bsh, ofs);
543 res = le64toh(*addr);
545 CTR4(KTR_LE_IO, "%s(bsh=%#x, ofs=%#x) = %#x", __func__, bsh, ofs, res);
550 swapped_bs_rm_1(bus_space_handle_t bsh, bus_size_t ofs, uint8_t *addr, size_t cnt)
552 ins8(__ppc_ba(bsh, ofs), addr, cnt);
556 swapped_bs_rm_2(bus_space_handle_t bsh, bus_size_t ofs, uint16_t *addr, size_t cnt)
558 ins16rb(__ppc_ba(bsh, ofs), addr, cnt);
562 swapped_bs_rm_4(bus_space_handle_t bsh, bus_size_t ofs, uint32_t *addr, size_t cnt)
564 ins32rb(__ppc_ba(bsh, ofs), addr, cnt);
568 swapped_bs_rm_8(bus_space_handle_t bshh, bus_size_t ofs, uint64_t *addr, size_t cnt)
574 swapped_bs_rr_1(bus_space_handle_t bsh, bus_size_t ofs, uint8_t *addr, size_t cnt)
576 volatile uint8_t *s = __ppc_ba(bsh, ofs);
584 swapped_bs_rr_2(bus_space_handle_t bsh, bus_size_t ofs, uint16_t *addr, size_t cnt)
586 volatile uint16_t *s = __ppc_ba(bsh, ofs);
589 *addr++ = in16rb(s++);
594 swapped_bs_rr_4(bus_space_handle_t bsh, bus_size_t ofs, uint32_t *addr, size_t cnt)
596 volatile uint32_t *s = __ppc_ba(bsh, ofs);
599 *addr++ = in32rb(s++);
604 swapped_bs_rr_8(bus_space_handle_t bsh, bus_size_t ofs, uint64_t *addr, size_t cnt)
610 swapped_bs_ws_1(bus_space_handle_t bsh, bus_size_t ofs, uint8_t val)
612 volatile uint8_t *addr;
614 addr = __ppc_ba(bsh, ofs);
617 CTR4(KTR_LE_IO, "%s(bsh=%#x, ofs=%#x, val=%#x)", __func__, bsh, ofs, val);
621 swapped_bs_ws_2(bus_space_handle_t bsh, bus_size_t ofs, uint16_t val)
623 volatile uint16_t *addr;
625 addr = __ppc_ba(bsh, ofs);
626 __asm __volatile("sthbrx %0, 0, %1" :: "r"(val), "r"(addr));
628 CTR4(KTR_LE_IO, "%s(bsh=%#x, ofs=%#x, val=%#x)", __func__, bsh, ofs, val);
632 swapped_bs_ws_4(bus_space_handle_t bsh, bus_size_t ofs, uint32_t val)
634 volatile uint32_t *addr;
636 addr = __ppc_ba(bsh, ofs);
637 __asm __volatile("stwbrx %0, 0, %1" :: "r"(val), "r"(addr));
639 CTR4(KTR_LE_IO, "%s(bsh=%#x, ofs=%#x, val=%#x)", __func__, bsh, ofs, val);
643 swapped_bs_ws_8(bus_space_handle_t bsh, bus_size_t ofs, uint64_t val)
645 volatile uint64_t *addr;
647 addr = __ppc_ba(bsh, ofs);
648 *addr = htole64(val);
650 CTR4(KTR_LE_IO, "%s(bsh=%#x, ofs=%#x, val=%#x)", __func__, bsh, ofs, val);
654 swapped_bs_wm_1(bus_space_handle_t bsh, bus_size_t ofs, const uint8_t *addr,
657 outs8(__ppc_ba(bsh, ofs), addr, cnt);
661 swapped_bs_wm_2(bus_space_handle_t bsh, bus_size_t ofs, const uint16_t *addr,
664 outs16rb(__ppc_ba(bsh, ofs), addr, cnt);
668 swapped_bs_wm_4(bus_space_handle_t bsh, bus_size_t ofs, const uint32_t *addr,
671 outs32rb(__ppc_ba(bsh, ofs), addr, cnt);
675 swapped_bs_wm_8(bus_space_handle_t bsh, bus_size_t ofs, const uint64_t *addr,
682 swapped_bs_wr_1(bus_space_handle_t bsh, bus_size_t ofs, const uint8_t *addr,
685 volatile uint8_t *d = __ppc_ba(bsh, ofs);
693 swapped_bs_wr_2(bus_space_handle_t bsh, bus_size_t ofs, const uint16_t *addr,
696 volatile uint16_t *d = __ppc_ba(bsh, ofs);
699 out16rb(d++, *addr++);
704 swapped_bs_wr_4(bus_space_handle_t bsh, bus_size_t ofs, const uint32_t *addr,
707 volatile uint32_t *d = __ppc_ba(bsh, ofs);
710 out32rb(d++, *addr++);
715 swapped_bs_wr_8(bus_space_handle_t bsh, bus_size_t ofs, const uint64_t *addr,
722 swapped_bs_sm_1(bus_space_handle_t bsh, bus_size_t ofs, uint8_t val, size_t cnt)
724 volatile uint8_t *d = __ppc_ba(bsh, ofs);
732 swapped_bs_sm_2(bus_space_handle_t bsh, bus_size_t ofs, uint16_t val, size_t cnt)
734 volatile uint16_t *d = __ppc_ba(bsh, ofs);
742 swapped_bs_sm_4(bus_space_handle_t bsh, bus_size_t ofs, uint32_t val, size_t cnt)
744 volatile uint32_t *d = __ppc_ba(bsh, ofs);
752 swapped_bs_sm_8(bus_space_handle_t bsh, bus_size_t ofs, uint64_t val, size_t cnt)
758 swapped_bs_sr_1(bus_space_handle_t bsh, bus_size_t ofs, uint8_t val, size_t cnt)
760 volatile uint8_t *d = __ppc_ba(bsh, ofs);
768 swapped_bs_sr_2(bus_space_handle_t bsh, bus_size_t ofs, uint16_t val, size_t cnt)
770 volatile uint16_t *d = __ppc_ba(bsh, ofs);
778 swapped_bs_sr_4(bus_space_handle_t bsh, bus_size_t ofs, uint32_t val, size_t cnt)
780 volatile uint32_t *d = __ppc_ba(bsh, ofs);
788 swapped_bs_sr_8(bus_space_handle_t bsh, bus_size_t ofs, uint64_t val, size_t cnt)
793 #if BYTE_ORDER == LITTLE_ENDIAN
794 struct bus_space bs_le_tag = {
796 struct bus_space bs_be_tag = {
798 /* mapping/unmapping */
799 .bs_map = bs_gen_map,
800 .bs_unmap = bs_gen_unmap,
801 .bs_subregion = bs_gen_subregion,
803 /* allocation/deallocation */
804 .bs_alloc = bs_gen_alloc,
805 .bs_free = bs_gen_free,
808 .bs_barrier = bs_gen_barrier,
811 .bs_r_1 = native_bs_rs_1,
812 .bs_r_2 = native_bs_rs_2,
813 .bs_r_4 = native_bs_rs_4,
814 .bs_r_8 = native_bs_rs_8,
816 /* read (single) stream */
817 .bs_r_s_2 = native_bs_rs_2,
818 .bs_r_s_4 = native_bs_rs_4,
819 .bs_r_s_8 = native_bs_rs_8,
822 .bs_rm_1 = native_bs_rm_1,
823 .bs_rm_2 = native_bs_rm_2,
824 .bs_rm_4 = native_bs_rm_4,
825 .bs_rm_8 = native_bs_rm_8,
827 /* read multiple stream */
828 .bs_rm_s_2 = native_bs_rm_2,
829 .bs_rm_s_4 = native_bs_rm_4,
830 .bs_rm_s_8 = native_bs_rm_8,
833 .bs_rr_1 = native_bs_rr_1,
834 .bs_rr_2 = native_bs_rr_2,
835 .bs_rr_4 = native_bs_rr_4,
836 .bs_rr_8 = native_bs_rr_8,
838 /* read region stream */
839 .bs_rr_s_2 = native_bs_rr_2,
840 .bs_rr_s_4 = native_bs_rr_4,
841 .bs_rr_s_8 = native_bs_rr_8,
844 .bs_w_1 = native_bs_ws_1,
845 .bs_w_2 = native_bs_ws_2,
846 .bs_w_4 = native_bs_ws_4,
847 .bs_w_8 = native_bs_ws_8,
849 /* write (single) stream */
850 .bs_w_s_2 = native_bs_ws_2,
851 .bs_w_s_4 = native_bs_ws_4,
852 .bs_w_s_8 = native_bs_ws_8,
855 .bs_wm_1 = native_bs_wm_1,
856 .bs_wm_2 = native_bs_wm_2,
857 .bs_wm_4 = native_bs_wm_4,
858 .bs_wm_8 = native_bs_wm_8,
860 /* write multiple stream */
861 .bs_wm_s_2 = native_bs_wm_2,
862 .bs_wm_s_4 = native_bs_wm_4,
863 .bs_wm_s_8 = native_bs_wm_8,
866 .bs_wr_1 = native_bs_wr_1,
867 .bs_wr_2 = native_bs_wr_2,
868 .bs_wr_4 = native_bs_wr_4,
869 .bs_wr_8 = native_bs_wr_8,
871 /* write region stream */
872 .bs_wr_s_2 = native_bs_wr_2,
873 .bs_wr_s_4 = native_bs_wr_4,
874 .bs_wr_s_8 = native_bs_wr_8,
877 .bs_sm_1 = native_bs_sm_1,
878 .bs_sm_2 = native_bs_sm_2,
879 .bs_sm_4 = native_bs_sm_4,
880 .bs_sm_8 = native_bs_sm_8,
882 /* set multiple stream */
883 .bs_sm_s_2 = native_bs_sm_2,
884 .bs_sm_s_4 = native_bs_sm_4,
885 .bs_sm_s_8 = native_bs_sm_8,
888 .bs_sr_1 = native_bs_sr_1,
889 .bs_sr_2 = native_bs_sr_2,
890 .bs_sr_4 = native_bs_sr_4,
891 .bs_sr_8 = native_bs_sr_8,
893 /* set region stream */
894 .bs_sr_s_2 = native_bs_sr_2,
895 .bs_sr_s_4 = native_bs_sr_4,
896 .bs_sr_s_8 = native_bs_sr_8,
899 .bs_cr_1 = NULL, /* UNIMPLEMENTED */
900 .bs_cr_2 = NULL, /* UNIMPLEMENTED */
901 .bs_cr_4 = NULL, /* UNIMPLEMENTED */
902 .bs_cr_8 = NULL, /* UNIMPLEMENTED */
904 /* copy region stream */
905 .bs_cr_s_2 = NULL, /* UNIMPLEMENTED */
906 .bs_cr_s_4 = NULL, /* UNIMPLEMENTED */
907 .bs_cr_s_8 = NULL, /* UNIMPLEMENTED */
910 #if BYTE_ORDER == LITTLE_ENDIAN
911 struct bus_space bs_be_tag = {
913 struct bus_space bs_le_tag = {
915 /* mapping/unmapping */
916 .bs_map = bs_gen_map,
917 .bs_unmap = bs_gen_unmap,
918 .bs_subregion = bs_gen_subregion,
920 /* allocation/deallocation */
921 .bs_alloc = bs_gen_alloc,
922 .bs_free = bs_gen_free,
925 .bs_barrier = bs_gen_barrier,
928 .bs_r_1 = swapped_bs_rs_1,
929 .bs_r_2 = swapped_bs_rs_2,
930 .bs_r_4 = swapped_bs_rs_4,
931 .bs_r_8 = swapped_bs_rs_8,
933 /* read (single) stream */
934 .bs_r_s_2 = native_bs_rs_2,
935 .bs_r_s_4 = native_bs_rs_4,
936 .bs_r_s_8 = native_bs_rs_8,
939 .bs_rm_1 = swapped_bs_rm_1,
940 .bs_rm_2 = swapped_bs_rm_2,
941 .bs_rm_4 = swapped_bs_rm_4,
942 .bs_rm_8 = swapped_bs_rm_8,
944 /* read multiple stream */
945 .bs_rm_s_2 = native_bs_rm_2,
946 .bs_rm_s_4 = native_bs_rm_4,
947 .bs_rm_s_8 = native_bs_rm_8,
950 .bs_rr_1 = swapped_bs_rr_1,
951 .bs_rr_2 = swapped_bs_rr_2,
952 .bs_rr_4 = swapped_bs_rr_4,
953 .bs_rr_8 = swapped_bs_rr_8,
955 /* read region stream */
956 .bs_rr_s_2 = native_bs_rr_2,
957 .bs_rr_s_4 = native_bs_rr_4,
958 .bs_rr_s_8 = native_bs_rr_8,
961 .bs_w_1 = swapped_bs_ws_1,
962 .bs_w_2 = swapped_bs_ws_2,
963 .bs_w_4 = swapped_bs_ws_4,
964 .bs_w_8 = swapped_bs_ws_8,
966 /* write (single) stream */
967 .bs_w_s_2 = native_bs_ws_2,
968 .bs_w_s_4 = native_bs_ws_4,
969 .bs_w_s_8 = native_bs_ws_8,
972 .bs_wm_1 = swapped_bs_wm_1,
973 .bs_wm_2 = swapped_bs_wm_2,
974 .bs_wm_4 = swapped_bs_wm_4,
975 .bs_wm_8 = swapped_bs_wm_8,
977 /* write multiple stream */
978 .bs_wm_s_2 = native_bs_wm_2,
979 .bs_wm_s_4 = native_bs_wm_4,
980 .bs_wm_s_8 = native_bs_wm_8,
983 .bs_wr_1 = swapped_bs_wr_1,
984 .bs_wr_2 = swapped_bs_wr_2,
985 .bs_wr_4 = swapped_bs_wr_4,
986 .bs_wr_8 = swapped_bs_wr_8,
988 /* write region stream */
989 .bs_wr_s_2 = native_bs_wr_2,
990 .bs_wr_s_4 = native_bs_wr_4,
991 .bs_wr_s_8 = native_bs_wr_8,
994 .bs_sm_1 = swapped_bs_sm_1,
995 .bs_sm_2 = swapped_bs_sm_2,
996 .bs_sm_4 = swapped_bs_sm_4,
997 .bs_sm_8 = swapped_bs_sm_8,
999 /* set multiple stream */
1000 .bs_sm_s_2 = native_bs_sm_2,
1001 .bs_sm_s_4 = native_bs_sm_4,
1002 .bs_sm_s_8 = native_bs_sm_8,
1005 .bs_sr_1 = swapped_bs_sr_1,
1006 .bs_sr_2 = swapped_bs_sr_2,
1007 .bs_sr_4 = swapped_bs_sr_4,
1008 .bs_sr_8 = swapped_bs_sr_8,
1010 /* set region stream */
1011 .bs_sr_s_2 = native_bs_sr_2,
1012 .bs_sr_s_4 = native_bs_sr_4,
1013 .bs_sr_s_8 = native_bs_sr_8,
1016 .bs_cr_1 = NULL, /* UNIMPLEMENTED */
1017 .bs_cr_2 = NULL, /* UNIMPLEMENTED */
1018 .bs_cr_4 = NULL, /* UNIMPLEMENTED */
1019 .bs_cr_8 = NULL, /* UNIMPLEMENTED */
1021 /* copy region stream */
1022 .bs_cr_s_2 = NULL, /* UNIMPLEMENTED */
1023 .bs_cr_s_4 = NULL, /* UNIMPLEMENTED */
1024 .bs_cr_s_8 = NULL, /* UNIMPLEMENTED */