Home
last modified time | relevance | path

Searched refs:sva (Results 1 – 25 of 50) sorted by relevance

12

/netbsd/sys/arch/powerpc/booke/
H A Dbooke_pmap.c137 pmap_kvptefill(vaddr_t sva, vaddr_t eva, pt_entry_t pt_entry) in pmap_kvptefill() argument
140 KASSERT(sva == trunc_page(sva)); in pmap_kvptefill()
141 pt_entry_t *ptep = kvtopte(stb, sva); in pmap_kvptefill()
142 for (; sva < eva; sva += NBPG) { in pmap_kvptefill()
143 *ptep++ = pt_entry ? (sva | pt_entry) : 0; in pmap_kvptefill()
145 return sva; in pmap_kvptefill()
322 const vaddr_t sva = (vaddr_t) pa; in pmap_md_map_poolpage() local
324 const vaddr_t eva = sva + size; in pmap_md_map_poolpage()
325 pmap_kvptefill(sva, eva, PTE_M | PTE_xR | PTE_xW); in pmap_md_map_poolpage()
327 return sva; in pmap_md_map_poolpage()
/netbsd/sys/arch/vax/include/
H A Dpmap.h154 int *pte, sva; in pmap_extract() local
167 sva = PG_PFNUM(va); in pmap_extract()
169 if (sva >= (pmap->pm_p0lr & ~AST_MASK)) in pmap_extract()
173 if (sva < pmap->pm_p1lr) in pmap_extract()
181 if (kvtopte(&pte[sva])->pg_v && (pte[sva] & PG_FRAME)) { in pmap_extract()
183 *pap = (pte[sva] & PG_FRAME) << VAX_PGSHIFT; in pmap_extract()
/netbsd/sys/arch/x86/acpi/
H A Dacpi_machdep.c422 vaddr_t sva, eva; in acpi_md_OsReadable() local
425 sva = trunc_page((vaddr_t) Pointer); in acpi_md_OsReadable()
428 if (sva < VM_MIN_KERNEL_ADDRESS) in acpi_md_OsReadable()
431 for (; sva < eva; sva += PAGE_SIZE) { in acpi_md_OsReadable()
432 pte = kvtopte(sva); in acpi_md_OsReadable()
446 vaddr_t sva, eva; in acpi_md_OsWritable() local
449 sva = trunc_page((vaddr_t) Pointer); in acpi_md_OsWritable()
452 if (sva < VM_MIN_KERNEL_ADDRESS) in acpi_md_OsWritable()
455 for (; sva < eva; sva += PAGE_SIZE) { in acpi_md_OsWritable()
456 pte = kvtopte(sva); in acpi_md_OsWritable()
/netbsd/sys/arch/sun2/sun2/
H A Dpmap.c706 sva, eva, keep); in pmeg_mon_init()
709 sva &= ~(NBSG - 1); in pmeg_mon_init()
711 while (sva < eva) { in pmeg_mon_init()
712 sme = get_segmap(sva); in pmeg_mon_init()
715 endseg = sva + NBSG; in pmeg_mon_init()
732 sva += NBSG; in pmeg_mon_init()
2996 va = sva; in pmap_protect()
3030 sme = get_segmap(sva); in pmap_protect1()
3045 sme = get_segmap(sva); in pmap_protect1()
3234 va = sva; in pmap_remove()
[all …]
/netbsd/sys/arch/sun3/sun3/
H A Dpmap.c714 sva, eva, keep); in pmeg_mon_init()
717 sva &= ~(NBSG - 1); in pmeg_mon_init()
719 while (sva < eva) { in pmeg_mon_init()
720 sme = get_segmap(sva); in pmeg_mon_init()
723 endseg = sva + NBSG; in pmeg_mon_init()
733 sva, sme, valid); in pmeg_mon_init()
740 sva += NBSG; in pmeg_mon_init()
2974 va = sva; in pmap_protect()
3006 sme = get_segmap(sva); in pmap_protect1()
3020 sme = get_segmap(sva); in pmap_protect1()
[all …]
/netbsd/sys/arch/arm/acpi/
H A Dacpi_machdep.c229 vaddr_t sva, eva; in acpi_md_OsReadable() local
232 sva = trunc_page((vaddr_t)va); in acpi_md_OsReadable()
235 if (sva < VM_MIN_KERNEL_ADDRESS) in acpi_md_OsReadable()
238 for (; sva < eva; sva += PAGE_SIZE) { in acpi_md_OsReadable()
239 pte = kvtopte(sva); in acpi_md_OsReadable()
250 vaddr_t sva, eva; in acpi_md_OsWritable() local
253 sva = trunc_page((vaddr_t)va); in acpi_md_OsWritable()
256 if (sva < VM_MIN_KERNEL_ADDRESS) in acpi_md_OsWritable()
259 for (; sva < eva; sva += PAGE_SIZE) { in acpi_md_OsWritable()
260 pte = kvtopte(sva); in acpi_md_OsWritable()
/netbsd/sys/arch/alpha/alpha/
H A Dpmap.c1745 while (sva < eva) { in pmap_remove_internal()
1753 sva += PAGE_SIZE; in pmap_remove_internal()
1779 for (; sva < eva; sva = l1eva, l1pte++) { in pmap_remove_internal()
1790 for (; sva < l1eva && sva < eva; sva = l2eva, l2pte++) { in pmap_remove_internal()
1808 l3vptva = sva; in pmap_remove_internal()
1810 for (; sva < l2eva && sva < eva; in pmap_remove_internal()
1817 pmap, sva, in pmap_remove_internal()
2025 pmap, sva, eva, prot); in pmap_protect()
2041 for (; sva < eva; sva = l1eva, l1pte++) { in pmap_protect()
2045 for (; sva < l1eva && sva < eva; sva = l2eva, l2pte++) { in pmap_protect()
[all …]
/netbsd/sys/arch/hppa/hppa/
H A Dpmap.c488 vaddr_t va = sva; in pmap_dump_table()
1496 for (batch = 0; sva < eva; sva += PAGE_SIZE) { in pmap_remove()
1497 pdemask = sva & PDE_MASK; in pmap_remove()
1502 batch = pdemask == sva && sva + PDE_SIZE <= eva; in pmap_remove()
1510 pmap_pte_flush(pmap, sva, pte); in pmap_remove()
1517 pmap_pte_set(pde, sva, 0); in pmap_remove()
1535 pmap_pde_release(pmap, sva, in pmap_remove()
1559 sva = trunc_page(sva); in pmap_write_protect()
1564 for (pdemask = 1; sva < eva; sva += PAGE_SIZE) { in pmap_write_protect()
1566 pdemask = sva & PDE_MASK; in pmap_write_protect()
[all …]
/netbsd/sys/uvm/pmap/
H A Dpmap.c388 if (sva < VM_MIN_KERNEL_ADDRESS) in pmap_addr_range_check()
390 func, sva); in pmap_addr_range_check()
1113 for (; sva < eva; sva += NBPG, ptep++) { in pmap_pte_remove()
1135 pmap_tlb_invalidate_addr(pmap, sva); in pmap_pte_remove()
1153 (uintptr_t)pmap, sva, eva, 0); in pmap_remove()
1258 for (; sva < eva; sva += NBPG, ptep++) { in pmap_pte_protect()
1308 (uintptr_t)pmap, sva, eva, prot); in pmap_protect()
1312 pmap_remove(pmap, sva, eva); in pmap_protect()
1649 for (; sva < eva; sva += NBPG, ptep++) { in pmap_pte_kremove()
1664 pmap_tlb_invalidate_addr(pmap, sva); in pmap_pte_kremove()
[all …]
/netbsd/sys/uvm/
H A Duvm_glue.c129 vaddr_t sva, eva; in uvm_chgkprot() local
133 for (sva = trunc_page((vaddr_t)addr); sva < eva; sva += PAGE_SIZE) { in uvm_chgkprot()
137 if (pmap_extract(pmap_kernel(), sva, &pa) == false) in uvm_chgkprot()
139 pmap_enter(pmap_kernel(), sva, pa, prot, PMAP_WIRED); in uvm_chgkprot()
/netbsd/sys/arch/hppa/include/
H A Dpmap.h161 void pmap_remove(struct pmap *pmap, vaddr_t sva, vaddr_t eva);
198 pmap_protect(struct pmap *pmap, vaddr_t sva, vaddr_t eva, vm_prot_t prot) in pmap_protect() argument
202 pmap_write_protect(pmap, sva, eva, prot); in pmap_protect()
204 pmap_remove(pmap, sva, eva); in pmap_protect()
/netbsd/lib/libpuffs/
H A Dsubr.c155 puffs_setvattr(struct vattr *vap, const struct vattr *sva) in puffs_setvattr() argument
158 #define SETIFVAL(a, t) if (sva->a != (t)PUFFS_VNOVAL) vap->a = sva->a in puffs_setvattr()
159 if (sva->va_type != VNON) in puffs_setvattr()
160 vap->va_type = sva->va_type; in puffs_setvattr()
/netbsd/sys/arch/sun3/sun3x/
H A Dpmap.c2982 TBIS(sva); in pmap_remove_kernel()
2983 sva += PAGE_SIZE; in pmap_remove_kernel()
2997 pmap_remove_kernel(sva, eva); in pmap_remove()
3090 nstart = MMU_ROUND_UP_A(sva); in pmap_remove_a()
3095 if (sva < nstart) { in pmap_remove_a()
3103 idx = MMU_TIA(sva); in pmap_remove_a()
3285 nstart = MMU_ROUND_UP_B(sva); in pmap_remove_b()
3290 if (sva < nstart) { in pmap_remove_b()
3291 idx = MMU_TIB(sva); in pmap_remove_b()
3402 idx = MMU_TIC(sva); in pmap_remove_c()
[all …]
/netbsd/sys/arch/hpcmips/hpcmips/
H A Dbus_space.c229 mips_pte_cachechange(struct pmap *pmap, vaddr_t sva, vaddr_t eva, in mips_pte_cachechange() argument
232 mips_dcache_wbinv_range(sva, eva - sva); in mips_pte_cachechange()
234 for (; sva < eva; sva += PAGE_SIZE) { in mips_pte_cachechange()
240 tlb_update_addr(sva, KERNEL_PID, pte, 0); in mips_pte_cachechange()
251 const vaddr_t sva = mips_trunc_page(bpa); in __hpcmips_cacheable() local
253 pmap_pte_process(pmap_kernel(), sva, eva, in __hpcmips_cacheable()
/netbsd/sys/arch/m68k/m68k/
H A Dpmap_motorola.c799 while (sva < eva) { in pmap_remove()
808 pte = pmap_pte(pmap, sva); in pmap_remove()
809 while (sva < nssva) { in pmap_remove()
817 sva = nssva; in pmap_remove()
850 sva += PAGE_SIZE; in pmap_remove()
954 pmap, sva, eva, prot)); in pmap_protect()
966 while (sva < eva) { in pmap_protect()
977 sva = nssva; in pmap_protect()
987 while (sva < nssva) { in pmap_protect()
1025 TBIS(sva); in pmap_protect()
[all …]
/netbsd/sys/arch/amd64/stand/prekern/
H A Dmm.c207 vaddr_t sva, eva; in mm_randva_kregion() local
224 sva = bootspace.segs[i].va; in mm_randva_kregion()
225 eva = sva + bootspace.segs[i].sz; in mm_randva_kregion()
227 if ((sva <= randva) && (randva < eva)) { in mm_randva_kregion()
231 if ((sva < randva + size) && (randva + size <= eva)) { in mm_randva_kregion()
235 if (randva < sva && eva < (randva + size)) { in mm_randva_kregion()
/netbsd/sys/arch/x86/include/
H A Dpmap.h182 pmap_protect(struct pmap *pmap, vaddr_t sva, vaddr_t eva, vm_prot_t prot) in pmap_protect() argument
186 pmap_write_protect(pmap, sva, eva, prot); in pmap_protect()
188 pmap_remove(pmap, sva, eva); in pmap_protect()
/netbsd/sys/arch/riscv/riscv/
H A Dpmap_machdep.c417 vaddr_t sva = MEGAPAGE_TRUNC(va); in pmap_kenter_range() local
423 while (sva < eva) { in pmap_kenter_range()
424 const size_t sidx = (sva >> vshift) & pdetab_mask; in pmap_kenter_range()
428 sva += NBSEG; in pmap_kenter_range()
/netbsd/sys/rump/librump/rumpkern/arch/x86/
H A Drump_x86_pmap.c92 pmap_remove(pmap_t pmap, vaddr_t sva, vaddr_t eva) in pmap_remove() argument
107 pmap_write_protect(pmap_t pmap, vaddr_t sva, vaddr_t eva, vm_prot_t prot) in pmap_write_protect() argument
/netbsd/sys/arch/x86/x86/
H A Dpmap.c1114 eva = sva + len; in pmap_kremove1()
1139 pmap_kremove1(sva, len, false); in pmap_kremove()
1152 pmap_kremove1(sva, len, true); in pmap_kremove_local()
4289 vaddr_t blkendva, va = sva;
4849 sva &= ~PAGE_MASK;
5322 pgnt->pd_gnt_sva = sva;
5885 for (/* null */ ; sva < eva ; sva = blkendva) {
5897 pte = &ptes[pl1_i(sva)];
5898 for (/* null */; sva < blkendva ; sva += PAGE_SIZE, pte++) {
6678 vaddr_t blkendva, va = sva;
[all …]
/netbsd/sys/arch/sh3/sh3/
H A Dpmap.c514 pmap_remove(pmap_t pmap, vaddr_t sva, vaddr_t eva) in pmap_remove() argument
520 KDASSERT((sva & PGOFSET) == 0); in pmap_remove()
522 for (va = sva; va < eva; va += PAGE_SIZE) { in pmap_remove()
655 pmap_protect(pmap_t pmap, vaddr_t sva, vaddr_t eva, vm_prot_t prot) in pmap_protect() argument
661 sva = trunc_page(sva); in pmap_protect()
664 pmap_remove(pmap, sva, eva); in pmap_protect()
682 for (va = sva; va < eva; va += PAGE_SIZE) { in pmap_protect()
/netbsd/sys/arch/aarch64/aarch64/
H A Dpmap.c879 KASSERT_PM_ADDR(pm, sva); in pmap_icache_sync_range()
933 cpu_icache_sync_range(sva, len); in pmap_procwr()
1277 pm, sva, eva, prot); in pmap_protect()
1279 KASSERT_PM_ADDR(pm, sva); in pmap_protect()
1280 KASSERT(!IN_DIRECTMAP_ADDR(sva)); in pmap_protect()
1288 pmap_remove(pm, sva, eva); in pmap_protect()
1293 KDASSERT((sva & PAGE_MASK) == 0); in pmap_protect()
2162 pm, sva, eva, kremove); in _pmap_remove()
2225 KASSERT_PM_ADDR(pm, sva); in pmap_remove()
2226 KASSERT(!IN_DIRECTMAP_ADDR(sva)); in pmap_remove()
[all …]
/netbsd/sys/arch/sparc/sparc/
H A Dmemreg.c179 u_int ser, u_int sva, u_int aer, u_int ava, in memerr4_4c() argument
187 issync ? "" : "a", bits, sva); in memerr4_4c()
191 pte = getpte4(sva); in memerr4_4c()
/netbsd/sys/arch/arm/arm32/
H A Dpmap.c3534 while (sva < eva) { in pmap_remove()
3544 sva = next_bucket; in pmap_remove()
3551 for (;sva < next_bucket; in pmap_remove()
4132 while (sva < eva) { in pmap_protect()
4139 sva = next_bucket; in pmap_protect()
4198 sva += PAGE_SIZE; in pmap_protect()
4223 vsize_t page_size = trunc_page(sva) + PAGE_SIZE - sva; in pmap_icache_sync_range()
4231 while (sva < eva) { in pmap_icache_sync_range()
4238 sva = next_bucket; in pmap_icache_sync_range()
4243 sva < next_bucket; in pmap_icache_sync_range()
[all …]
/netbsd/sys/arch/ia64/ia64/
H A Dpmap.c1825 pmap_remove(pmap_t pmap, vaddr_t sva, vaddr_t eva) in pmap_remove() argument
1843 for (va = sva; va < eva; va += PAGE_SIZE) { in pmap_remove()
1875 pmap_protect(pmap_t pmap, vaddr_t sva, vaddr_t eva, vm_prot_t prot) in pmap_protect() argument
1882 pmap, sva, eva, prot); in pmap_protect()
1888 pmap_remove(pmap, sva, eva); in pmap_protect()
1896 if ((sva & PAGE_MASK) || (eva & PAGE_MASK)) in pmap_protect()
1898 sva = trunc_page(sva); in pmap_protect()
1903 for ( ; sva < eva; sva += PAGE_SIZE) { in pmap_protect()
1905 pte = pmap_find_vhpt(sva); in pmap_protect()
1927 ia64_sync_icache(sva, PAGE_SIZE); in pmap_protect()
[all …]

12