Home
last modified time | relevance | path

Searched refs:ptes (Results 1 – 25 of 42) sorted by relevance

12

/linux/drivers/gpu/drm/nouveau/nvkm/subdev/mmu/
H A Dvmmnv44.c38 while (ptes--) { in nv44_vmm_pgt_fill()
84 ptes -= pten; in nv44_vmm_pgt_pte()
94 ptes -= 4; in nv44_vmm_pgt_pte()
97 if (ptes) { in nv44_vmm_pgt_pte()
121 ptes -= pten; in nv44_vmm_pgt_dma()
133 ptes -= 4; in nv44_vmm_pgt_dma()
136 if (ptes) { in nv44_vmm_pgt_dma()
138 map->dma += ptes; in nv44_vmm_pgt_dma()
155 ptes -= pten; in nv44_vmm_pgt_unmap()
163 ptes -= 4; in nv44_vmm_pgt_unmap()
[all …]
H A Dvmmgp100.c41 while (ptes--) { in gp100_vmm_pfn_unmap()
60 while (ptes--) { in gp100_vmm_pfn_clear()
82 for (; ptes; ptes--, map->pfn++) { in gp100_vmm_pgt_pfn()
120 map->type += ptes * map->ctag; in gp100_vmm_pgt_pte()
122 while (ptes--) { in gp100_vmm_pgt_pte()
142 while (ptes--) { in gp100_vmm_pgt_dma()
203 map->type += ptes * map->ctag; in gp100_vmm_pd0_pte()
205 while (ptes--) { in gp100_vmm_pd0_pte()
275 while (ptes--) { in gp100_vmm_pd0_pfn_unmap()
296 while (ptes--) { in gp100_vmm_pd0_pfn_clear()
[all …]
H A Dvmmnv41.c28 u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr) in nv41_vmm_pgt_pte() argument
31 while (ptes--) { in nv41_vmm_pgt_pte()
39 u32 ptei, u32 ptes, struct nvkm_vmm_map *map) in nv41_vmm_pgt_sgl() argument
41 VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, nv41_vmm_pgt_pte); in nv41_vmm_pgt_sgl()
46 u32 ptei, u32 ptes, struct nvkm_vmm_map *map) in nv41_vmm_pgt_dma() argument
50 while (ptes--) { in nv41_vmm_pgt_dma()
56 VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, nv41_vmm_pgt_pte); in nv41_vmm_pgt_dma()
62 struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) in nv41_vmm_pgt_unmap() argument
64 VMM_FO032(pt, vmm, ptei * 4, 0, ptes); in nv41_vmm_pgt_unmap()
H A Dvmmnv04.c29 u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr) in nv04_vmm_pgt_pte() argument
32 while (ptes--) { in nv04_vmm_pgt_pte()
40 u32 ptei, u32 ptes, struct nvkm_vmm_map *map) in nv04_vmm_pgt_sgl() argument
42 VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, nv04_vmm_pgt_pte); in nv04_vmm_pgt_sgl()
47 u32 ptei, u32 ptes, struct nvkm_vmm_map *map) in nv04_vmm_pgt_dma() argument
51 while (ptes--) in nv04_vmm_pgt_dma()
55 VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, nv04_vmm_pgt_pte); in nv04_vmm_pgt_dma()
61 struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) in nv04_vmm_pgt_unmap() argument
63 VMM_FO032(pt, vmm, 8 + (ptei * 4), 0, ptes); in nv04_vmm_pgt_unmap()
H A Dvmmnv50.c39 map->type += ptes * map->ctag; in nv50_vmm_pgt_pte()
41 while (ptes) { in nv50_vmm_pgt_pte()
44 if (ptes >= pten && IS_ALIGNED(ptei, pten)) in nv50_vmm_pgt_pte()
50 ptes -= pten; in nv50_vmm_pgt_pte()
59 u32 ptei, u32 ptes, struct nvkm_vmm_map *map) in nv50_vmm_pgt_sgl() argument
66 u32 ptei, u32 ptes, struct nvkm_vmm_map *map) in nv50_vmm_pgt_dma() argument
69 VMM_SPAM(vmm, "DMAA %08x %08x PTE(s)", ptei, ptes); in nv50_vmm_pgt_dma()
71 while (ptes--) { in nv50_vmm_pgt_dma()
85 u32 ptei, u32 ptes, struct nvkm_vmm_map *map) in nv50_vmm_pgt_mem() argument
92 struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) in nv50_vmm_pgt_unmap() argument
[all …]
H A Dvmmgf100.c33 u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr) in gf100_vmm_pgt_pte() argument
39 while (ptes--) { in gf100_vmm_pgt_pte()
48 map->type += ptes * map->ctag; in gf100_vmm_pgt_pte()
50 while (ptes--) { in gf100_vmm_pgt_pte()
59 u32 ptei, u32 ptes, struct nvkm_vmm_map *map) in gf100_vmm_pgt_sgl() argument
66 u32 ptei, u32 ptes, struct nvkm_vmm_map *map) in gf100_vmm_pgt_dma() argument
69 VMM_SPAM(vmm, "DMAA %08x %08x PTE(s)", ptei, ptes); in gf100_vmm_pgt_dma()
71 while (ptes--) { in gf100_vmm_pgt_dma()
85 u32 ptei, u32 ptes, struct nvkm_vmm_map *map) in gf100_vmm_pgt_mem() argument
92 struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) in gf100_vmm_pgt_unmap() argument
[all …]
H A Dvmm.c212 ptes -= pten; in nvkm_vmm_unref_sptes()
222 for (ptes = 1, ptei++; ptei < lpti; ptes++, ptei++) { in nvkm_vmm_unref_sptes()
236 for (ptes = 1, ptei++; ptei < lpti; ptes++, ptei++) { in nvkm_vmm_unref_sptes()
311 ptes -= pten; in nvkm_vmm_ref_sptes()
321 for (ptes = 1, ptei++; ptei < lpti; ptes++, ptei++) { in nvkm_vmm_ref_sptes()
335 for (ptes = 1, ptei++; ptei < lpti; ptes++, ptei++) { in nvkm_vmm_ref_sptes()
385 while (ptes--) in nvkm_vmm_sparse_ptes()
423 u32 pteb, ptei, ptes; in nvkm_vmm_ref_hwpt() local
449 for (ptes = 1, ptei++; ptei < pten; ptes++, ptei++) { in nvkm_vmm_ref_hwpt()
463 while (ptes--) in nvkm_vmm_ref_hwpt()
[all …]
H A Dvmmgk104.c26 struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) in gk104_vmm_lpt_invalid() argument
29 VMM_FO064(pt, vmm, ptei * 8, BIT_ULL(1) /* PRIV. */, ptes); in gk104_vmm_lpt_invalid()
H A Dvmmgm200.c29 struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) in gm200_vmm_pgt_sparse() argument
32 VMM_FO064(pt, vmm, ptei * 8, BIT_ULL(32) /* VOL. */, ptes); in gm200_vmm_pgt_sparse()
H A Dvmm.h54 struct nvkm_mmu_pt *, u32 ptei, u32 ptes);
58 u32 ptei, u32 ptes, struct nvkm_vmm_map *);
72 bool (*pfn_clear)(struct nvkm_vmm *, struct nvkm_mmu_pt *, u32 ptei, u32 ptes);
/linux/arch/x86/xen/
H A Dgrant-table.c27 pte_t **ptes; member
45 set_pte_at(&init_mm, addr, gnttab_shared_vm_area.ptes[i], in arch_gnttab_map_shared()
77 pte_t **ptes; in arch_gnttab_unmap() local
82 ptes = gnttab_status_vm_area.ptes; in arch_gnttab_unmap()
84 ptes = gnttab_shared_vm_area.ptes; in arch_gnttab_unmap()
89 set_pte_at(&init_mm, addr, ptes[i], __pte(0)); in arch_gnttab_unmap()
98 area->ptes[area->idx++] = pte; in gnttab_apply()
104 area->ptes = kmalloc_array(nr_frames, sizeof(*area->ptes), GFP_KERNEL); in arch_gnttab_valloc()
105 if (area->ptes == NULL) in arch_gnttab_valloc()
117 kfree(area->ptes); in arch_gnttab_valloc()
[all …]
/linux/block/partitions/
H A Defi.c341 if (!ptes) in is_gpt_valid()
445 kfree(*ptes); in is_gpt_valid()
446 *ptes = NULL; in is_gpt_valid()
582 gpt_entry **ptes) in find_valid_gpt() argument
593 if (!ptes) in find_valid_gpt()
643 *ptes = pptes; in find_valid_gpt()
665 *ptes = NULL; in find_valid_gpt()
716 gpt_entry *ptes = NULL; in efi_partition() local
720 if (!find_valid_gpt(state, &gpt, &ptes) || !gpt || !ptes) { in efi_partition()
722 kfree(ptes); in efi_partition()
[all …]
/linux/arch/alpha/kernel/
H A Dpci_iommu.c79 if (!arena->ptes) in iommu_arena_new_node()
108 unsigned long *ptes; in iommu_arena_find_pages() local
118 ptes = arena->ptes; in iommu_arena_find_pages()
130 if (ptes[p+i]) { in iommu_arena_find_pages()
164 unsigned long *ptes; in iommu_arena_alloc() local
170 ptes = arena->ptes; in iommu_arena_alloc()
542 unsigned long *ptes; in sg_fill() local
597 ptes = &arena->ptes[dma_ofs]; in sg_fill()
840 ptes = arena->ptes; in iommu_reserve()
867 ptes = arena->ptes; in iommu_release()
[all …]
H A Dcore_titan.c328 port->tba[0].csr = virt_to_phys(hose->sg_isa->ptes); in titan_init_one_pachip_port()
336 port->tba[2].csr = virt_to_phys(hose->sg_pci->ptes); in titan_init_one_pachip_port()
463 unsigned long *ptes; in titan_ioremap() local
518 ptes = hose->sg_pci->ptes; in titan_ioremap()
522 pfn = ptes[baddr >> PAGE_SHIFT]; in titan_ioremap()
711 pte = aper->arena->ptes[baddr >> PAGE_SHIFT]; in titan_agp_translate()
H A Dcore_marvel.c295 csrs->POx_TBASE[0].csr = virt_to_phys(hose->sg_isa->ptes); in io7_init_hose()
312 csrs->POx_TBASE[2].csr = virt_to_phys(hose->sg_pci->ptes); in io7_init_hose()
690 unsigned long *ptes; in marvel_ioremap() local
745 ptes = hose->sg_pci->ptes; in marvel_ioremap()
749 pfn = ptes[baddr >> PAGE_SHIFT]; in marvel_ioremap()
1003 pte = aper->arena->ptes[baddr >> PAGE_SHIFT]; in marvel_agp_translate()
H A Dcore_cia.c464 arena->ptes[4] = pte0; in verify_tb_operation()
488 arena->ptes[5] = pte0; in verify_tb_operation()
524 arena->ptes[4] = 0; in verify_tb_operation()
525 arena->ptes[5] = 0; in verify_tb_operation()
737 *(vip)CIA_IOC_PCI_T0_BASE = virt_to_phys(hose->sg_isa->ptes) >> 2; in do_init_arch()
H A Dpci_impl.h139 unsigned long *ptes; member
H A Dcore_tsunami.c337 pchip->tba[0].csr = virt_to_phys(hose->sg_isa->ptes); in tsunami_init_one_pchip()
341 pchip->tba[1].csr = virt_to_phys(hose->sg_pci->ptes); in tsunami_init_one_pchip()
/linux/arch/powerpc/mm/ptdump/
H A Dhashpagetable.c244 } ptes[4]; in pseries_find() local
262 lpar_rc = plpar_pte_read_4(0, hpte_group, (void *)ptes); in pseries_find()
267 if (HPTE_V_COMPARE(ptes[j].v, want_v) && in pseries_find()
268 (ptes[j].v & HPTE_V_VALID)) { in pseries_find()
270 *v = ptes[j].v; in pseries_find()
271 *r = ptes[j].r; in pseries_find()
/linux/arch/powerpc/include/asm/
H A Dplpar_wrappers.h176 unsigned long *ptes) in plpar_pte_read_4() argument
184 memcpy(ptes, retbuf, 8*sizeof(unsigned long)); in plpar_pte_read_4()
194 unsigned long *ptes) in plpar_pte_read_4_raw() argument
202 memcpy(ptes, retbuf, 8*sizeof(unsigned long)); in plpar_pte_read_4_raw()
606 unsigned long *ptes) in plpar_pte_read_4() argument
/linux/arch/powerpc/platforms/pseries/
H A Dlpar.c854 } ptes[4]; in manual_hpte_clear_all() local
863 lpar_rc = plpar_pte_read_4_raw(0, i, (void *)ptes); in manual_hpte_clear_all()
870 if ((ptes[j].pteh & HPTE_V_VRMA_MASK) == in manual_hpte_clear_all()
873 if (ptes[j].pteh & HPTE_V_VALID) in manual_hpte_clear_all()
875 &(ptes[j].pteh), &(ptes[j].ptel)); in manual_hpte_clear_all()
965 } ptes[4]; in __pSeries_lpar_hpte_find() local
969 lpar_rc = plpar_pte_read_4(0, hpte_group, (void *)ptes); in __pSeries_lpar_hpte_find()
977 if (HPTE_V_COMPARE(ptes[j].pteh, want_v) && in __pSeries_lpar_hpte_find()
978 (ptes[j].pteh & HPTE_V_VALID)) in __pSeries_lpar_hpte_find()
/linux/arch/arm64/kernel/pi/
H A Dmap_kernel.c190 static u8 ptes[INIT_IDMAP_FDT_SIZE] __initdata __aligned(PAGE_SIZE); in map_fdt() local
192 u64 ptep = (u64)ptes; in map_fdt()
/linux/arch/x86/kvm/mmu/
H A Dpaging_tmpl.h84 pt_element_t ptes[PT_MAX_FULL_LEVELS]; member
213 pte = orig_pte = walker->ptes[level - 1]; in FNAME()
254 walker->ptes[level - 1] = pte; in FNAME()
425 walker->ptes[walker->level - 1] = pte; in FNAME()
581 return r || curr_pte != gw->ptes[level - 1]; in FNAME()
/linux/arch/powerpc/mm/
H A Dhugetlbpage.c258 void *ptes[]; member
270 kmem_cache_free(PGT_CACHE(PTE_T_ORDER), batch->ptes[i]); in hugepd_free_rcu_callback()
293 (*batchp)->ptes[(*batchp)->index++] = hugepte; in hugepd_free()
/linux/drivers/gpu/drm/nouveau/nvkm/subdev/gsp/
H A Dr535.c1673 gsp->shm.ptes.nr = (gsp->shm.cmdq.size + gsp->shm.msgq.size) >> GSP_PAGE_SHIFT; in r535_gsp_shared_init()
1674 gsp->shm.ptes.nr += DIV_ROUND_UP(gsp->shm.ptes.nr * sizeof(u64), GSP_PAGE_SIZE); in r535_gsp_shared_init()
1675 gsp->shm.ptes.size = ALIGN(gsp->shm.ptes.nr * sizeof(u64), GSP_PAGE_SIZE); in r535_gsp_shared_init()
1677 ret = nvkm_gsp_mem_ctor(gsp, gsp->shm.ptes.size + in r535_gsp_shared_init()
1684 gsp->shm.ptes.ptr = gsp->shm.mem.data; in r535_gsp_shared_init()
1685 gsp->shm.cmdq.ptr = (u8 *)gsp->shm.ptes.ptr + gsp->shm.ptes.size; in r535_gsp_shared_init()
1688 for (i = 0; i < gsp->shm.ptes.nr; i++) in r535_gsp_shared_init()
1689 gsp->shm.ptes.ptr[i] = gsp->shm.mem.addr + (i << GSP_PAGE_SHIFT); in r535_gsp_shared_init()
1730 args->messageQueueInitArguments.pageTableEntryCount = gsp->shm.ptes.nr; in r535_gsp_rmargs_init()
1777 static void create_pte_array(u64 *ptes, dma_addr_t addr, size_t size) in create_pte_array() argument
[all …]

12