Home
last modified time | relevance | path

Searched refs:vsid (Results 1 – 25 of 37) sorted by relevance

12

/linux/arch/powerpc/mm/
H A Dcopro_fault.c85 u64 vsid, vsidkey; in copro_calculate_slb() local
95 vsid = get_user_vsid(&mm->context, ea, ssize); in copro_calculate_slb()
102 vsid = get_kernel_vsid(ea, mmu_kernel_ssize); in copro_calculate_slb()
109 vsid = get_kernel_vsid(ea, mmu_kernel_ssize); in copro_calculate_slb()
116 vsid = get_kernel_vsid(ea, mmu_kernel_ssize); in copro_calculate_slb()
124 if (!vsid) in copro_calculate_slb()
127 vsid = (vsid << slb_vsid_shift(ssize)) | vsidkey; in copro_calculate_slb()
129 vsid |= mmu_psize_defs[psize].sllp | in copro_calculate_slb()
133 slb->vsid = vsid; in copro_calculate_slb()
/linux/arch/powerpc/include/asm/book3s/64/
H A Dmmu-hash.h159 void (*hugepage_invalidate)(unsigned long vsid,
421 unsigned long vsid, int ssize) in hpt_vpn() argument
437 unsigned long hash, vsid; in hpt_hash() local
446 vsid = vpn >> (SID_SHIFT_1T - VPN_SHIFT); in hpt_hash()
447 hash = vsid ^ (vsid << 25) ^ in hpt_hash()
483 unsigned long vsid, pmd_t *pmdp, in __hash_page_thp() argument
515 u64 vsid; member
757 unsigned long vsid; in vsid_scramble() local
762 vsid = protovsid * vsid_multiplier; in vsid_scramble()
763 vsid = (vsid >> vsid_bits) + (vsid & vsid_modulus); in vsid_scramble()
[all …]
H A Dtlbflush-hash.h65 extern void flush_hash_hugepage(unsigned long vsid, unsigned long addr,
/linux/arch/powerpc/kvm/
H A Dbook3s_32_mmu.c73 u64 *vsid);
83 u64 vsid; in kvmppc_mmu_book3s_32_ea_to_vp() local
156 u64 vsid; in kvmppc_mmu_book3s_32_xlate_bat() local
158 eaddr >> SID_SHIFT, &vsid); in kvmppc_mmu_book3s_32_xlate_bat()
159 vsid <<= 16; in kvmppc_mmu_book3s_32_xlate_bat()
349 u64 *vsid) in kvmppc_mmu_book3s_32_esid_to_vsid() argument
367 *vsid = VSID_REAL | esid; in kvmppc_mmu_book3s_32_esid_to_vsid()
370 *vsid = VSID_REAL_IR | gvsid; in kvmppc_mmu_book3s_32_esid_to_vsid()
377 *vsid = sr_vsid(sr); in kvmppc_mmu_book3s_32_esid_to_vsid()
379 *vsid = VSID_BAT | gvsid; in kvmppc_mmu_book3s_32_esid_to_vsid()
[all …]
H A Dbook3s_32_mmu_host.c106 static u32 *kvmppc_mmu_get_pteg(struct kvm_vcpu *vcpu, u32 vsid, u32 eaddr, in kvmppc_mmu_get_pteg() argument
114 hash = ((vsid ^ page) << 6); in kvmppc_mmu_get_pteg()
135 u64 vsid; in kvmppc_mmu_map_page() local
158 vcpu->arch.mmu.esid_to_vsid(vcpu, orig_pte->eaddr >> SID_SHIFT, &vsid); in kvmppc_mmu_map_page()
159 map = find_sid_vsid(vcpu, vsid); in kvmppc_mmu_map_page()
162 map = find_sid_vsid(vcpu, vsid); in kvmppc_mmu_map_page()
166 vsid = map->host_vsid; in kvmppc_mmu_map_page()
167 vpn = (vsid << (SID_SHIFT - VPN_SHIFT)) | in kvmppc_mmu_map_page()
176 pteg = kvmppc_mmu_get_pteg(vcpu, vsid, eaddr, primary); in kvmppc_mmu_map_page()
194 pteg0 = ((eaddr & 0x0fffffff) >> 22) | (vsid << 7) | PTE_V | in kvmppc_mmu_map_page()
H A Dbook3s_64_mmu_host.c77 u64 vsid; in kvmppc_mmu_map_page() local
107 vcpu->arch.mmu.esid_to_vsid(vcpu, orig_pte->eaddr >> SID_SHIFT, &vsid); in kvmppc_mmu_map_page()
108 map = find_sid_vsid(vcpu, vsid); in kvmppc_mmu_map_page()
112 map = find_sid_vsid(vcpu, vsid); in kvmppc_mmu_map_page()
116 vsid, orig_pte->eaddr); in kvmppc_mmu_map_page()
144 if (vsid & VSID_64K) in kvmppc_mmu_map_page()
217 u64 vsid; in kvmppc_mmu_unmap_page() local
219 vcpu->arch.mmu.esid_to_vsid(vcpu, pte->eaddr >> SID_SHIFT, &vsid); in kvmppc_mmu_unmap_page()
220 if (vsid & VSID_64K) in kvmppc_mmu_unmap_page()
349 svcpu->slb[slb_index].vsid = slb_vsid; in kvmppc_mmu_map_segment()
H A Dbook3s_64_mmu.c51 if (vcpu->arch.slb[i].vsid) in kvmppc_mmu_book3s_64_find_slbe()
57 vcpu->arch.slb[i].vsid); in kvmppc_mmu_book3s_64_find_slbe()
78 ((slb->vsid) << (kvmppc_slb_sid_shift(slb) - VPN_SHIFT)); in kvmppc_slb_calc_vpn()
141 page, vcpu_book3s->sdr1, pteg, slbe->vsid); in kvmppc_mmu_book3s_64_get_pteg()
161 avpn |= slbe->vsid << (kvmppc_slb_sid_shift(slbe) - p); in kvmppc_mmu_book3s_64_get_avpn()
383 slbe->vsid = (rs & ~SLB_VSID_B) >> (kvmppc_slb_sid_shift(slbe) - 16); in kvmppc_mmu_book3s_64_slbmte()
573 u64 *vsid) in kvmppc_mmu_book3s_64_esid_to_vsid() argument
585 gvsid = slb->vsid; in kvmppc_mmu_book3s_64_esid_to_vsid()
631 *vsid = gvsid; in kvmppc_mmu_book3s_64_esid_to_vsid()
639 *vsid = VSID_REAL | esid; in kvmppc_mmu_book3s_64_esid_to_vsid()
H A Dbook3s_64_mmu_hv.c1304 unsigned long offset, vsid; in resize_hpt_rehash_hpte() local
1308 vsid = avpn >> 5; in resize_hpt_rehash_hpte()
1311 offset |= ((vsid ^ pteg) & old_hash_mask) << pshift; in resize_hpt_rehash_hpte()
1313 hash = vsid ^ (offset >> pshift); in resize_hpt_rehash_hpte()
1315 unsigned long offset, vsid; in resize_hpt_rehash_hpte() local
1319 vsid = avpn >> 17; in resize_hpt_rehash_hpte()
1321 offset |= ((vsid ^ (vsid << 25) ^ pteg) & old_hash_mask) << pshift; in resize_hpt_rehash_hpte()
1323 hash = vsid ^ (vsid << 25) ^ (offset >> pshift); in resize_hpt_rehash_hpte()
/linux/drivers/misc/lkdtm/
H A Dpowerpc.c73 unsigned long esid, vsid; in insert_dup_slb_entry_0() local
80 asm volatile("slbmfev %0,%1" : "=r" (vsid) : "r" (i)); in insert_dup_slb_entry_0()
84 : "r" (vsid), in insert_dup_slb_entry_0()
89 asm volatile("slbmfev %0,%1" : "=r" (vsid) : "r" (i)); in insert_dup_slb_entry_0()
93 : "r" (vsid), in insert_dup_slb_entry_0()
/linux/arch/powerpc/mm/book3s64/
H A Dhash_native.c489 unsigned long vsid; in native_hpte_updateboltedpp() local
496 vsid = get_kernel_vsid(ea, ssize); in native_hpte_updateboltedpp()
497 vpn = hpt_vpn(ea, vsid, ssize); in native_hpte_updateboltedpp()
525 unsigned long vsid; in native_hpte_removebolted() local
532 vsid = get_kernel_vsid(ea, ssize); in native_hpte_removebolted()
533 vpn = hpt_vpn(ea, vsid, ssize); in native_hpte_removebolted()
620 vpn = hpt_vpn(addr, vsid, ssize); in native_hugepage_invalidate()
670 unsigned long vsid, seg_off; in hpte_decode() local
699 vsid = avpn >> 5; in hpte_decode()
710 vsid = avpn >> 17; in hpte_decode()
[all …]
H A Dhash_utils.c385 if (!vsid) in htab_bolt_mapping()
1425 unsigned long vsid, unsigned long trap, in hash_failure_debug() argument
1433 trap, vsid, ssize, psize, lpsize, pte); in hash_failure_debug()
1465 unsigned long vsid; in hash_page_mm() local
1489 vsid = get_kernel_vsid(ea, mmu_kernel_ssize); in hash_page_mm()
1496 vsid = get_kernel_vsid(ea, mmu_kernel_ssize); in hash_page_mm()
1512 if (!vsid) { in hash_page_mm()
1765 unsigned long vsid; in hash_preload() local
1787 if (!vsid) in hash_preload()
1994 vpn = hpt_vpn(addr, vsid, ssize); in flush_hash_hugepage()
[all …]
H A Dhash_tlb.c45 unsigned long vsid; in hpte_need_flush() local
90 vsid = get_user_vsid(&mm->context, addr, ssize); in hpte_need_flush()
92 vsid = get_kernel_vsid(addr, mmu_kernel_ssize); in hpte_need_flush()
95 WARN_ON(vsid == 0); in hpte_need_flush()
96 vpn = hpt_vpn(addr, vsid, ssize); in hpte_need_flush()
H A Dhash_hugepage.c21 int __hash_page_thp(unsigned long ea, unsigned long access, unsigned long vsid, in __hash_page_thp() argument
76 vpn = hpt_vpn(ea, vsid, ssize); in __hash_page_thp()
85 flush_hash_hugepage(vsid, ea, pmdp, MMU_PAGE_64K, in __hash_page_thp()
163 hash_failure_debug(ea, access, vsid, trap, ssize, in __hash_page_thp()
H A Dhash_64k.c38 int __hash_page_4K(unsigned long ea, unsigned long access, unsigned long vsid, in __hash_page_4K() argument
90 vpn = hpt_vpn(ea, vsid, ssize); in __hash_page_4K()
213 hash_failure_debug(ea, access, vsid, trap, ssize, in __hash_page_4K()
229 unsigned long vsid, pte_t *ptep, unsigned long trap, in __hash_page_64K() argument
275 vpn = hpt_vpn(ea, vsid, ssize); in __hash_page_64K()
328 hash_failure_debug(ea, access, vsid, trap, ssize, in __hash_page_64K()
H A Dslb.c78 WRITE_ONCE(p->save_area[index].vsid, cpu_to_be64(mk_vsid_data(ea, ssize, flags))); in slb_shadow_update()
117 : "r" (be64_to_cpu(p->save_area[index].vsid)), in __slb_restore_bolted_realmode()
164 ksp_vsid_data = be64_to_cpu(p->save_area[KSTACK_INDEX].vsid); in __slb_flush_and_restore_bolted()
217 slb_ptr->vsid = v; in slb_save_contents()
235 v = slb_ptr->vsid; in slb_dump_contents()
674 unsigned long vsid; in slb_insert_entry() local
678 vsid = get_vsid(context, ea, ssize); in slb_insert_entry()
679 if (!vsid) in slb_insert_entry()
695 vsid_data = __mk_vsid_data(vsid, ssize, flags); in slb_insert_entry()
H A Dhash_4k.c21 int __hash_page_4K(unsigned long ea, unsigned long access, unsigned long vsid, in __hash_page_4K() argument
66 vpn = hpt_vpn(ea, vsid, ssize); in __hash_page_4K()
117 hash_failure_debug(ea, access, vsid, trap, ssize, in __hash_page_4K()
H A Dhugetlbpage.c20 int __hash_page_huge(unsigned long ea, unsigned long access, unsigned long vsid, in __hash_page_huge() argument
33 vpn = hpt_vpn(ea, vsid, ssize); in __hash_page_huge()
112 hash_failure_debug(ea, access, vsid, trap, ssize, in __hash_page_huge()
H A Dhash_pgtable.c317 unsigned long vsid; in hpte_do_hugepage_flush() local
332 vsid = get_user_vsid(&mm->context, addr, ssize); in hpte_do_hugepage_flush()
333 WARN_ON(vsid == 0); in hpte_do_hugepage_flush()
335 vsid = get_kernel_vsid(addr, mmu_kernel_ssize); in hpte_do_hugepage_flush()
342 return flush_hash_hugepage(vsid, addr, pmdp, psize, ssize, flags); in hpte_do_hugepage_flush()
/linux/drivers/misc/cxl/
H A Dfault.c24 return ((sste->vsid_data == cpu_to_be64(slb->vsid)) && in sste_matches()
40 if (slb->vsid & SLB_VSID_B_1T) in find_free_sste()
75 sste - ctx->sstp, slb->vsid, slb->esid); in cxl_load_segment()
76 trace_cxl_ste_write(ctx, sste - ctx->sstp, slb->esid, slb->vsid); in cxl_load_segment()
78 sste->vsid_data = cpu_to_be64(slb->vsid); in cxl_load_segment()
283 static u64 next_segment(u64 ea, u64 vsid) in next_segment() argument
285 if (vsid & SLB_VSID_B_1T) in next_segment()
304 ea = next_segment(ea, slb.vsid)) { in cxl_prefault_vma()
H A Dmain.c110 unsigned long vsid; in cxl_alloc_sst() local
125 vsid = get_kernel_vsid((u64)ctx->sstp, mmu_kernel_ssize) << 12; in cxl_alloc_sst()
142 sstp0 |= vsid >> (50-14); /* Top 14 bits of VSID */ in cxl_alloc_sst()
143 sstp1 |= (vsid << (64-(50-14))) & ~ea_mask; in cxl_alloc_sst()
148 (u64)ctx->sstp, (u64)ctx->sstp & ESID_MASK, mmu_kernel_ssize, vsid, sstp0, sstp1); in cxl_alloc_sst()
/linux/arch/microblaze/include/asm/
H A Dmmu.h20 unsigned long vsid:24; /* Virtual segment identifier */ member
48 unsigned long vsid:24; /* Virtual Segment Identifier */ member
/linux/arch/powerpc/mm/ptdump/
H A Dhashpagetable.c210 unsigned long hash, vsid, vpn, hpte_group, want_v, hpte_v; in native_find() local
215 vsid = get_kernel_vsid(ea, ssize); in native_find()
216 vpn = hpt_vpn(ea, vsid, ssize); in native_find()
245 unsigned long vsid, vpn, hash, hpte_group, want_v; in pseries_find() local
251 vsid = get_kernel_vsid(ea, ssize); in pseries_find()
252 vpn = hpt_vpn(ea, vsid, ssize); in pseries_find()
/linux/arch/powerpc/platforms/pseries/
H A Dlpar.c1017 unsigned long lpar_rc, slot, vsid, flags; in pSeries_lpar_hpte_updateboltedpp() local
1019 vsid = get_kernel_vsid(ea, ssize); in pSeries_lpar_hpte_updateboltedpp()
1020 vpn = hpt_vpn(ea, vsid, ssize); in pSeries_lpar_hpte_updateboltedpp()
1271 vpn = hpt_vpn(addr, vsid, ssize); in pSeries_lpar_hugepage_invalidate()
1311 unsigned long slot, vsid; in pSeries_lpar_hpte_removebolted() local
1313 vsid = get_kernel_vsid(ea, ssize); in pSeries_lpar_hpte_removebolted()
1314 vpn = hpt_vpn(ea, vsid, ssize); in pSeries_lpar_hpte_removebolted()
1951 if (vsid >= vsid_modulus) in vsid_unscramble()
1961 max_mod_inv = 0x7fffffffffffffffull / vsid; in vsid_unscramble()
1963 return (vsid * modinv) % vsid_modulus; in vsid_unscramble()
[all …]
/linux/arch/powerpc/include/asm/
H A Dcopro.h13 u64 esid, vsid; member
H A Dlppaca.h137 __be64 vsid; member

12