Home
last modified time | relevance | path

Searched refs:PAGE_SIZE (Results 1 – 25 of 375) sorted by relevance

12345678910>>...15

/dragonfly/test/testcases/libnvmm/
H A Dh_io_assist.c47 #define PAGE_SIZE 4096 macro
179 if (nvmm_hva_map(mach, (uintptr_t)L4, PAGE_SIZE) == -1) in map_pages()
181 if (nvmm_hva_map(mach, (uintptr_t)L3, PAGE_SIZE) == -1) in map_pages()
183 if (nvmm_hva_map(mach, (uintptr_t)L2, PAGE_SIZE) == -1) in map_pages()
205 memset(L4, 0, PAGE_SIZE); in map_pages()
206 memset(L3, 0, PAGE_SIZE); in map_pages()
207 memset(L2, 0, PAGE_SIZE); in map_pages()
208 memset(L1, 0, PAGE_SIZE); in map_pages()
213 L1[0x2000 / PAGE_SIZE] = PTE_P | PTE_W | 0x2000; in map_pages()
214 L1[0x1000 / PAGE_SIZE] = PTE_P | PTE_W | 0x1000; in map_pages()
[all …]
H A Dh_mem_assist.c47 #define PAGE_SIZE 4096 macro
59 static uint8_t mmiobuf[PAGE_SIZE];
154 memset(mmiobuf, 0, PAGE_SIZE); in run_test()
315 if (nvmm_hva_map(mach, (uintptr_t)L4, PAGE_SIZE) == -1) in map_pages64()
317 if (nvmm_hva_map(mach, (uintptr_t)L3, PAGE_SIZE) == -1) in map_pages64()
341 memset(L4, 0, PAGE_SIZE); in map_pages64()
342 memset(L3, 0, PAGE_SIZE); in map_pages64()
343 memset(L2, 0, PAGE_SIZE); in map_pages64()
344 memset(L1, 0, PAGE_SIZE); in map_pages64()
349 L1[0x2000 / PAGE_SIZE] = PTE_P | PTE_W | 0x2000; in map_pages64()
[all …]
/dragonfly/sys/kern/
H A Dkern_xio.c121 if ((n = kbytes) > PAGE_SIZE) in xio_init_kbuf()
122 n = PAGE_SIZE; in xio_init_kbuf()
123 addr += PAGE_SIZE; in xio_init_kbuf()
257 if ((n = bytes) > PAGE_SIZE) in xio_copy_xtou()
258 n = PAGE_SIZE; in xio_copy_xtou()
304 if ((n = bytes) > PAGE_SIZE) in xio_copy_xtok()
305 n = PAGE_SIZE; in xio_copy_xtok()
353 if ((n = bytes) > PAGE_SIZE) in xio_copy_utox()
354 n = PAGE_SIZE; in xio_copy_utox()
400 if ((n = bytes) > PAGE_SIZE) in xio_copy_ktox()
[all …]
H A Dimgact_shell.c75 while (ihp < &image_header[PAGE_SIZE]) { in exec_shell_imgact()
90 } while (ihp < &image_header[PAGE_SIZE] && in exec_shell_imgact()
103 KKASSERT(offset <= PAGE_SIZE); in exec_shell_imgact()
106 if (ihp == &image_header[PAGE_SIZE]) in exec_shell_imgact()
138 while (ihp < &image_header[PAGE_SIZE]) { in exec_shell_imgact()
154 } while (ihp < &image_header[PAGE_SIZE] && in exec_shell_imgact()
H A Dsys_process.c96 IDX_TO_OFF(pindex), &kva, PAGE_SIZE,
97 PAGE_SIZE, FALSE,
105 rv = vm_map_kernel_wiring(kernel_map, kva, kva + PAGE_SIZE, 0);
111 vm_map_remove (kernel_map, kva, kva + PAGE_SIZE);
145 if (vm_map_check_protection (map, pageno, pageno + PAGE_SIZE,
154 if ((rv = vm_map_protect (map, pageno, pageno + PAGE_SIZE,
191 IDX_TO_OFF(pindex), &kva, PAGE_SIZE,
192 PAGE_SIZE, FALSE,
199 rv = vm_map_kernel_wiring(kernel_map, kva, kva + PAGE_SIZE, 0);
203 vm_map_remove (kernel_map, kva, kva + PAGE_SIZE);
[all …]
/dragonfly/sys/cpu/x86_64/include/
H A Dparam.h78 #define PAGE_SIZE (1<<PAGE_SHIFT) /* bytes/page */ macro
79 #define PAGE_MASK (PAGE_SIZE-1)
80 #define NPTEPG (PAGE_SIZE/8LU) /* PAGE_SIZE/sizeof(pt_entry_t) */
87 #define NPDEPG (PAGE_SIZE/8LU) /* PAGE_SIZE/sizeof(pd_entry_t) */
94 #define NPDPEPG (PAGE_SIZE/8LU) /* PAGE_SIZE/sizeof(pdp_entry_t) */
101 #define NPML4EPG (PAGE_SIZE/8LU) /* PAGE_SIZE/sizeof(pml4_entry_t) */
119 #define BLKDEV_IOSIZE PAGE_SIZE /* default block device I/O size */
123 #define MAXDUMPPGS (MAXPHYS/PAGE_SIZE)
192 #define pgtok(x) ((x) * (PAGE_SIZE / 1024))
/dragonfly/sys/dev/drm/include/linux/
H A Dgfp.h61 vm_page_free_contig((struct vm_page *)page, PAGE_SIZE); in __free_page()
72 PAGE_SIZE, PAGE_SIZE, PAGE_SIZE, in alloc_page()
90 size_t bytes = PAGE_SIZE << order; in alloc_pages()
105 size_t bytes = PAGE_SIZE << order; in __free_pages()
/dragonfly/sys/platform/pc64/include/
H A Dglobaldata.h104 (MDGLOBALDATA_BASEALLOC_SIZE / PAGE_SIZE)
126 char unused2[PAGE_SIZE];
137 char reserved1[PAGE_SIZE -
149 char dblstack[PAGE_SIZE * 2 -
152 char dbgstack[PAGE_SIZE * 2 -
157 char idlestack[UPAGES * PAGE_SIZE];
/dragonfly/sys/dev/virtual/nvmm/
H A Dnvmm_netbsd.c151 ret = (void *)uvm_km_alloc(kernel_map, roundup(size, PAGE_SIZE), 0, in os_pagemem_zalloc()
154 OS_ASSERT((uintptr_t)ret % PAGE_SIZE == 0); in os_pagemem_zalloc()
162 uvm_km_free(kernel_map, (vaddr_t)ptr, roundup(size, PAGE_SIZE), in os_pagemem_free()
191 ret = uvm_pglistalloc(npages * PAGE_SIZE, 0, ~0UL, PAGE_SIZE, 0, in os_contigpa_zalloc()
196 _va = uvm_km_alloc(kernel_map, npages * PAGE_SIZE, 0, in os_contigpa_zalloc()
202 pmap_kenter_pa(_va + i * PAGE_SIZE, _pa + i * PAGE_SIZE, in os_contigpa_zalloc()
207 memset((void *)_va, 0, npages * PAGE_SIZE); in os_contigpa_zalloc()
215 uvm_pagefree(PHYS_TO_VM_PAGE(_pa + i * PAGE_SIZE)); in os_contigpa_zalloc()
225 pmap_kremove(va, npages * PAGE_SIZE); in os_contigpa_free()
227 uvm_km_free(kernel_map, va, npages * PAGE_SIZE, UVM_KMF_VAONLY); in os_contigpa_free()
[all …]
/dragonfly/sys/platform/pc64/x86_64/
H A Dminidump_machdep.c151 for (i = 0; i < len; i += PAGE_SIZE) { in blk_write()
261 lpdpttl += PAGE_SIZE; in minidumpsys()
268 lpdpttl += PAGE_SIZE * NPTEPG; in minidumpsys()
273 pa += PAGE_SIZE; in minidumpsys()
282 lpdpttl += PAGE_SIZE; in minidumpsys()
290 lpdpttl += PAGE_SIZE; in minidumpsys()
312 dumpsize += PAGE_SIZE; in minidumpsys()
319 dumpsize += PAGE_SIZE; in minidumpsys()
356 error = blk_write(di, (char *)fakept, 0, PAGE_SIZE); in minidumpsys()
399 error = blk_write(di, (char *)fakept, 0, PAGE_SIZE); in minidumpsys()
[all …]
/dragonfly/test/nvmm/demo/smallkern/
H A Dmain.c111 static uint8_t idtstore[PAGE_SIZE] __aligned(PAGE_SIZE);
112 static uint8_t faultstack[PAGE_SIZE] __aligned(PAGE_SIZE);
119 memset(&idtstore, 0, PAGE_SIZE); in triple_fault()
220 smallkern_tss.tss_ist[0] = (uintptr_t)(&faultstack[PAGE_SIZE-1]) & ~0xf; in init_tss()
222 smallkern_tss.tss_ist1 = (uintptr_t)(&faultstack[PAGE_SIZE-1]) & ~0xf; in init_tss()
245 setregion(&region, &idtstore, PAGE_SIZE - 1); in init_idt()
H A Dlocore.S53 #define PROC0_STK_OFF (PROC0_PML4_OFF + 1 * PAGE_SIZE)
54 #define PROC0_PTP3_OFF (PROC0_STK_OFF + UPAGES * PAGE_SIZE)
55 #define PROC0_PTP2_OFF (PROC0_PTP3_OFF + NKL4_KIMG_ENTRIES * PAGE_SIZE)
56 #define PROC0_PTP1_OFF (PROC0_PTP2_OFF + TABLE_L3_ENTRIES * PAGE_SIZE)
59 * PAGE_SIZE)
75 addl $PAGE_SIZE,%eax ; /* next phys page */ \
90 addl $PAGE_SIZE,%eax ; /* next phys page */ \
310 movl $PAGE_SIZE,%ecx /* size of the LAPIC PAGE */
417 addq $PAGE_SIZE,%rdx
/dragonfly/sys/vm/
H A Dvm_kern.c114 npages = size / PAGE_SIZE; in kmem_alloc_swapbacked()
124 PAGE_SIZE, TRUE, in kmem_alloc_swapbacked()
173 PAGE_SIZE, TRUE, in kmem_alloc_pageable()
228 gstart = PAGE_SIZE; in kmem_alloc3()
285 for (i = gstart; i < size; i += PAGE_SIZE) { in kmem_alloc3()
347 PAGE_SIZE, TRUE, in kmem_suballoc()
384 size, PAGE_SIZE, 0, &addr) == 0) { in kmem_alloc_wait()
428 if (vm_map_findspace(map, vm_map_min(map), size, PAGE_SIZE, in kmem_alloc_attr()
446 for (i = 0; i < size; i += PAGE_SIZE) { in kmem_alloc_attr()
447 m = vm_page_alloc_contig(low, high, PAGE_SIZE, 0, in kmem_alloc_attr()
[all …]
/dragonfly/sys/platform/vkernel64/include/
H A Dglobaldata.h89 (MDGLOBALDATA_BASEALLOC_SIZE / PAGE_SIZE)
104 ((sizeof(struct mdglobaldata) + MDGLOBALDATA_PAD + PAGE_SIZE * 4 + \
105 UPAGES * PAGE_SIZE) % SEG_SIZE)) \
113 char idlestack[UPAGES * PAGE_SIZE];
/dragonfly/libexec/rtld-elf/
H A Dmap_object.c127 if ((segs[nsegs]->p_align & (PAGE_SIZE - 1)) != 0) { in map_object()
157 if (phdr->p_offset > PAGE_SIZE || in map_object()
158 phdr->p_offset + phdr->p_filesz > PAGE_SIZE) in map_object()
242 mprotect(clear_page, PAGE_SIZE, data_prot|PROT_WRITE)) { in map_object()
255 madvise(clear_page, PAGE_SIZE, MADV_CORE); in map_object()
256 mprotect(clear_page, PAGE_SIZE, data_prot); in map_object()
325 munmap(hdr, PAGE_SIZE); in map_object()
331 munmap(hdr, PAGE_SIZE); in map_object()
341 hdr = mmap(NULL, PAGE_SIZE, PROT_READ, MAP_PRIVATE, fd, 0); in get_elf_header()
382 (size_t)PAGE_SIZE) { in get_elf_header()
[all …]
/dragonfly/sys/platform/vkernel64/platform/
H A Dpmap_inval.c106 pmap_inval_cpu(pmap, va, PAGE_SIZE); in pmap_inval_pte()
117 pmap_inval_cpu(pmap, va, PAGE_SIZE); in pmap_inval_pte_quick()
217 pmap_inval_cpu(pmap, va, PAGE_SIZE); in pmap_clean_pte()
249 pmap_inval_cpu(pmap, va, PAGE_SIZE); in pmap_inval_loadandclear()
256 madvise(addr, PAGE_SIZE, MADV_INVAL); in cpu_invlpg()
H A Dcopyio.c57 if (PAGE_SIZE - ((vm_offset_t)p & PAGE_MASK) < sizeof(uint64_t)) in casu64()
95 if (PAGE_SIZE - ((vm_offset_t)p & PAGE_MASK) < sizeof(u_int)) in casu32()
132 if (PAGE_SIZE - ((vm_offset_t)p & PAGE_MASK) < sizeof(uint64_t)) in swapu64()
164 if (PAGE_SIZE - ((vm_offset_t)p & PAGE_MASK) < sizeof(uint64_t)) in swapu32()
196 if (PAGE_SIZE - ((vm_offset_t)p & PAGE_MASK) < sizeof(uint64_t)) in fuwordadd64()
228 if (PAGE_SIZE - ((vm_offset_t)p & PAGE_MASK) < sizeof(uint64_t)) in fuwordadd32()
282 n = PAGE_SIZE - ((vm_offset_t)uptr & PAGE_MASK); in copyinstr()
327 n = PAGE_SIZE - ((vm_offset_t)udaddr & PAGE_MASK); in copyin()
368 n = PAGE_SIZE - ((vm_offset_t)udaddr & PAGE_MASK); in copyout()
/dragonfly/sys/dev/drm/i915/
H A Dintel_guc_ct.c146 vma = intel_guc_allocate_vma(guc, PAGE_SIZE); in ctch_init()
164 ctch->ctbs[i].desc = blob + PAGE_SIZE/4 * i; in ctch_init()
165 ctch->ctbs[i].cmds = blob + PAGE_SIZE/4 * i + PAGE_SIZE/2; in ctch_init()
212 base + PAGE_SIZE/4 * i + PAGE_SIZE/2, in ctch_open()
213 PAGE_SIZE/4, in ctch_open()
221 base + PAGE_SIZE/4 * CTB_RECV, in ctch_open()
227 base + PAGE_SIZE/4 * CTB_SEND, in ctch_open()
H A Di915_gem_internal.c65 PAGE_SIZE) >> PAGE_SHIFT; in i915_gem_object_get_pages_internal()
83 npages = obj->base.size / PAGE_SIZE; in i915_gem_object_get_pages_internal()
108 sg_set_page(sg, page, PAGE_SIZE << order, 0); in i915_gem_object_get_pages_internal()
109 sg_page_sizes |= PAGE_SIZE << order; in i915_gem_object_get_pages_internal()
186 GEM_BUG_ON(!IS_ALIGNED(size, PAGE_SIZE)); in i915_gem_object_create_internal()
H A Di915_sysfs.c60 return snprintf(buf, PAGE_SIZE, "%u\n", rc6_residency);
68 return snprintf(buf, PAGE_SIZE, "%u\n", rc6p_residency);
76 return snprintf(buf, PAGE_SIZE, "%u\n", rc6pp_residency);
84 return snprintf(buf, PAGE_SIZE, "%u\n", rc6_residency);
268 return snprintf(buf, PAGE_SIZE, "%d\n", ret);
276 return snprintf(buf, PAGE_SIZE, "%d\n",
285 return snprintf(buf, PAGE_SIZE, "%d\n",
320 return snprintf(buf, PAGE_SIZE, "%d\n",
329 return snprintf(buf, PAGE_SIZE, "%d\n",
387 return snprintf(buf, PAGE_SIZE, "%d\n",
[all …]
/dragonfly/lib/libkvm/
H A Dkvm_x86_64.c118 return (PAGE_SIZE - ((size_t)pa & PAGE_MASK)); in _kvm_pa2off()
129 return (PAGE_SIZE - ((size_t)pa & PAGE_MASK)); in _kvm_pa2off()
213 PML4 = _kvm_malloc(kd, PAGE_SIZE); in _kvm_initvtop()
214 if (kvm_read(kd, pa, PML4, PAGE_SIZE) != PAGE_SIZE) { in _kvm_initvtop()
243 offset = va & (PAGE_SIZE - 1); in _kvm_vatop()
256 return (PAGE_SIZE - offset); in _kvm_vatop()
353 return (PAGE_SIZE - offset); in _kvm_vatop()
/dragonfly/test/nvmm/
H A Dcalc-vm.c37 #define PAGE_SIZE 4096 macro
98 hva = (uintptr_t)mmap(NULL, PAGE_SIZE, PROT_READ|PROT_WRITE, in main()
102 if (nvmm_hva_map(&mach, hva, PAGE_SIZE) == -1) in main()
107 if (nvmm_gpa_map(&mach, hva, gpa, PAGE_SIZE, PROT_READ|PROT_EXEC) == -1) in main()
/dragonfly/sys/dev/drm/radeon/
H A Dradeon_gart.c133 PAGE_SIZE, true, RADEON_GEM_DOMAIN_VRAM, in radeon_gart_table_vram_alloc()
253 p = t / (PAGE_SIZE / RADEON_GPU_PAGE_SIZE); in radeon_gart_unbind()
257 for (j = 0; j < (PAGE_SIZE / RADEON_GPU_PAGE_SIZE); j++, t++) { in radeon_gart_unbind()
300 p = t / (PAGE_SIZE / RADEON_GPU_PAGE_SIZE); in radeon_gart_bind()
305 for (j = 0; j < (PAGE_SIZE / RADEON_GPU_PAGE_SIZE); j++, t++) { in radeon_gart_bind()
337 if (PAGE_SIZE < RADEON_GPU_PAGE_SIZE) { in radeon_gart_init()
345 rdev->gart.num_cpu_pages = rdev->mc.gtt_size / PAGE_SIZE; in radeon_gart_init()
/dragonfly/test/sysperf/
H A Dpipe2.c9 #define PAGE_SIZE 4096 macro
10 #define PAGE_MASK (PAGE_SIZE - 1)
49 buf = mmap(NULL, bytes * 2 + PAGE_SIZE, in main()
56 bzero(buf, bytes * 2 + PAGE_SIZE); in main()
H A Dsocketpair.c10 #define PAGE_SIZE 4096 macro
11 #define PAGE_MASK (PAGE_SIZE - 1)
50 buf = mmap(NULL, bytes * 2 + PAGE_SIZE, PROT_READ|PROT_WRITE, MAP_SHARED|MAP_ANON, -1, 0); in main()
56 bzero(buf, bytes * 2 + PAGE_SIZE); in main()

12345678910>>...15