Home
last modified time | relevance | path

Searched refs:__va (Results 1 – 25 of 239) sorted by relevance

12345678910

/linux/arch/x86/platform/intel-quark/
H A Dimr_selftest.c72 imr_self_test_result(ret < 0, fmt_over, __va(base), __va(base + size)); in imr_self_test()
77 imr_self_test_result(ret < 0, fmt_over, __va(base), __va(base + size)); in imr_self_test()
82 imr_self_test_result(ret < 0, fmt_over, __va(base), __va(base + size)); in imr_self_test()
/linux/arch/x86/include/asm/
H A Dpage.h57 #ifndef __va
58 #define __va(x) ((void *)((unsigned long)(x)+PAGE_OFFSET)) macro
61 #define __boot_va(x) __va(x)
74 return __va(pfn << PAGE_SHIFT); in pfn_to_kaddr()
/linux/arch/m68k/include/asm/
H A Dpage_no.h20 #define __va(paddr) ((void *)((unsigned long)(paddr))) macro
29 return __va(pfn << PAGE_SHIFT); in pfn_to_virt()
33 #define page_to_virt(page) __va(((((page) - mem_map) << PAGE_SHIFT) + PAGE_OFFSET))
H A Dmotorola_pgtable.h106 #define __pte_page(pte) ((unsigned long)__va(pte_val(pte) & PAGE_MASK))
107 #define pmd_page_vaddr(pmd) ((unsigned long)__va(pmd_val(pmd) & _TABLE_MASK))
108 #define pud_pgtable(pud) ((pmd_t *)__va(pud_val(pud) & _TABLE_MASK))
116 #define pte_page(pte) virt_to_page(__va(pte_val(pte)))
138 #define pud_page(pud) (mem_map + ((unsigned long)(__va(pud_val(pud)) - PAGE_OFFSET) >> PAGE_SHIFT))
H A Dpage_mm.h81 static inline void *__va(unsigned long paddr) in __va() function
105 static inline void *__va(unsigned long x) in __va() function
130 return __va(pfn << PAGE_SHIFT); in pfn_to_virt()
/linux/arch/powerpc/include/asm/
H A Dpage.h197 #define __va(x) ((void *)(unsigned long)((phys_addr_t)(x) + VIRT_PHYS_OFFSET)) macro
209 #define __va(x) \ macro
222 #define __va(x) ((void *)(unsigned long)((phys_addr_t)(x) + PAGE_OFFSET - MEMORY_START)) macro
235 return __va(pfn << PAGE_SHIFT); in pfn_to_kaddr()
H A Dsections.h68 return start < (unsigned long)__va(real_end) && in overlaps_interrupt_vector_text()
69 (unsigned long)__va(real_start) < end; in overlaps_interrupt_vector_text()
/linux/arch/loongarch/include/asm/
H A Dpage.h73 #define __va(x) ((void *)((unsigned long)(x) + PAGE_OFFSET - PHYS_OFFSET)) macro
75 #define pfn_to_kaddr(pfn) __va((pfn) << PAGE_SHIFT)
89 #define page_to_virt(page) __va(page_to_phys(page))
99 (__kfence_pool == NULL) ? __va(page_to_phys(page)) : page_address(page); \
/linux/arch/x86/mm/
H A Dmem_encrypt_amd.c67 early_snp_set_memory_shared((unsigned long)__va(paddr), paddr, npages); in snp_memcpy()
72 early_snp_set_memory_private((unsigned long)__va(paddr), paddr, npages); in snp_memcpy()
194 __sme_early_map_unmap_mem(__va(cmdline_paddr), COMMAND_LINE_SIZE, false); in sme_unmap_bootdata()
214 __sme_early_map_unmap_mem(__va(cmdline_paddr), COMMAND_LINE_SIZE, true); in sme_map_bootdata()
342 clflush_cache_range(__va(pa), size); in __set_clr_pte_enc()
354 early_snp_set_memory_shared((unsigned long)__va(pa), pa, 1); in __set_clr_pte_enc()
366 early_snp_set_memory_private((unsigned long)__va(pa), pa, 1); in __set_clr_pte_enc()
H A Dnuma_32.c44 high_memory = (void *) __va(highstart_pfn * PAGE_SIZE - 1) + 1; in initmem_init()
46 high_memory = (void *) __va(max_low_pfn * PAGE_SIZE - 1) + 1; in initmem_init()
/linux/arch/openrisc/mm/
H A Dinit.c82 v = (u32) __va(p); in map_ram()
155 unsigned long *dtlb_vector = __va(0x900); in paging_init()
156 unsigned long *itlb_vector = __va(0xa00); in paging_init()
197 high_memory = (void *)__va(max_low_pfn * PAGE_SIZE); in mem_init()
/linux/arch/loongarch/mm/
H A Dkasan_init.c112 memcpy(__va(pte_phys), kasan_early_shadow_pte, sizeof(kasan_early_shadow_pte)); in kasan_pte_offset()
113 pmd_populate_kernel(NULL, pmdp, (pte_t *)__va(pte_phys)); in kasan_pte_offset()
125 memcpy(__va(pmd_phys), kasan_early_shadow_pmd, sizeof(kasan_early_shadow_pmd)); in kasan_pmd_offset()
126 pud_populate(&init_mm, pudp, (pmd_t *)__va(pmd_phys)); in kasan_pmd_offset()
138 memcpy(__va(pud_phys), kasan_early_shadow_pud, sizeof(kasan_early_shadow_pud)); in kasan_pud_offset()
139 p4d_populate(&init_mm, p4dp, (pud_t *)__va(pud_phys)); in kasan_pud_offset()
/linux/arch/riscv/mm/
H A Dinit.c282 high_memory = (void *)(__va(PFN_PHYS(max_low_pfn))); in setup_bootmem()
375 return (pte_t *) __va(pa); in get_pte_virt_late()
457 return (pmd_t *) __va(pa); in get_pmd_virt_late()
520 return (pud_t *)__va(pa); in get_pud_virt_late()
558 return (p4d_t *)__va(pa); in get_p4d_virt_late()
637 pt_ops.alloc_p4d(__va) : (pgtable_l4_enabled ? \
638 pt_ops.alloc_pud(__va) : pt_ops.alloc_pmd(__va)))
656 #define alloc_pgd_next(__va) pt_ops.alloc_pte(__va) argument
1252 va = (uintptr_t)__va(pa); in create_linear_mapping_range()
1288 __kfence_pool = __va(kfence_pool); in create_linear_mapping_page_table()
[all …]
H A Dkasan_init.c45 memset(__va(phys_addr), KASAN_SHADOW_INIT, PAGE_SIZE); in kasan_populate_pte()
71 memset(__va(phys_addr), KASAN_SHADOW_INIT, PMD_SIZE); in kasan_populate_pmd()
102 memset(__va(phys_addr), KASAN_SHADOW_INIT, PUD_SIZE); in kasan_populate_pud()
133 memset(__va(phys_addr), KASAN_SHADOW_INIT, P4D_SIZE); in kasan_populate_p4d()
156 memset(__va(phys_addr), KASAN_SHADOW_INIT, PGDIR_SIZE); in kasan_populate_pgd()
512 void *start = (void *)__va(p_start); in kasan_init()
513 void *end = (void *)__va(p_end); in kasan_init()
/linux/arch/microblaze/include/asm/
H A Dpage.h103 # define page_to_virt(page) __va(page_to_pfn(page) << PAGE_SHIFT)
123 # define __va(x) ((void *)__phys_to_virt((unsigned long)(x))) macro
132 return __va(pfn_to_phys((pfn))); in pfn_to_virt()
/linux/arch/arm/mm/
H A Ddma-mapping-nommu.c19 dmac_map_area(__va(paddr), size, dir); in arch_sync_dma_for_device()
32 dmac_unmap_area(__va(paddr), size, dir); in arch_sync_dma_for_cpu()
/linux/tools/include/linux/
H A Dmm.h17 #define __va(x) ((void *)((unsigned long)(x))) macro
25 return __va(address); in phys_to_virt()
/linux/arch/x86/realmode/
H A Dinit.c137 __va(real_mode_header->trampoline_header); in setup_real_mode()
160 trampoline_pgd = (u64 *) __va(real_mode_header->trampoline_pgd); in setup_real_mode()
199 (unsigned long) __va(real_mode_header->text_start); in set_real_mode_permissions()
/linux/arch/x86/include/asm/uv/
H A Duv_hub.h538 return __va(((unsigned long)pnode << m_val) | offset); in uv_pnode_offset_to_vaddr()
544 return __va((unsigned long)offset); in uv_pnode_offset_to_vaddr()
547 return __va(base << UV_GAM_RANGE_SHFT | offset); in uv_pnode_offset_to_vaddr()
565 return __va(UV_GLOBAL_MMR32_BASE | in uv_global_mmr32_address()
585 return __va(UV_GLOBAL_MMR64_BASE | in uv_global_mmr64_address()
615 return __va(UV_LOCAL_MMR_BASE | offset); in uv_local_mmr_address()
/linux/arch/sparc/include/asm/
H A Dpage_64.h147 #define __va(x) ((void *)((unsigned long) (x) + PAGE_OFFSET)) macro
149 #define pfn_to_kaddr(pfn) __va((pfn) << PAGE_SHIFT)
156 #define phys_to_virt __va
/linux/arch/x86/include/asm/numachip/
H A Dnumachip_csr.h41 return __va(NUMACHIP_LCSR_BASE | (1UL << 15) | in lcsr_address()
69 return (void __iomem *)__va(NUMACHIP2_LCSR_BASE | in numachip2_lcsr_address()
/linux/arch/parisc/kernel/
H A Dsetup.c54 strscpy(boot_command_line, (char *)__va(boot_args[1]), in setup_cmdline()
74 initrd_start = (unsigned long)__va(boot_args[2]); in setup_cmdline()
75 initrd_end = (unsigned long)__va(boot_args[3]); in setup_cmdline()
/linux/arch/powerpc/platforms/powernv/
H A Dopal-fadump.c193 opal_fdm = __va(fadump_conf->kernel_metadata); in opal_fadump_init_mem_struct()
240 opal_fdm = __va(fadump_conf->kernel_metadata); in opal_fadump_setup_metadata()
382 (u64)__va(be64_to_cpu(opal_cpu_metadata->region[0].dest)); in is_opal_fadump_cpu_data_valid()
431 bufp = __va(fadump_conf->cpu_state_dest_vaddr); in opal_fadump_build_cpu_notes()
529 fdh = __va(fadump_conf->fadumphdr_addr); in opal_fadump_process()
695 opal_fdm_active = __va(addr); in opal_fadump_dt_scan()
712 opal_cpu_metadata = __va(addr); in opal_fadump_dt_scan()
/linux/arch/s390/include/asm/
H A Ddma-types.h45 return __va((__force unsigned long)addr); in dma32_to_virt()
80 return __va((__force unsigned long)addr); in dma64_to_virt()
/linux/arch/nios2/include/asm/
H A Dpage.h81 # define __va(x) \ macro
87 # define pfn_to_kaddr(pfn) __va((pfn) << PAGE_SHIFT)

12345678910