Home
last modified time | relevance | path

Searched refs:ggtt (Results 1 – 25 of 63) sorted by relevance

123

/netbsd/sys/external/bsd/drm2/dist/drm/i915/gt/
H A Dintel_ggtt.c69 ggtt->vm.cleanup(&ggtt->vm); in ggtt_init_hw()
81 ggtt->vm.cleanup(&ggtt->vm); in ggtt_init_hw()
146 ggtt->vm.clear_range(&ggtt->vm, 0, ggtt->vm.total); in ggtt_suspend_mappings()
148 ggtt->invalidate(ggtt); in ggtt_suspend_mappings()
236 ggtt->invalidate(ggtt); in gen8_ggtt_insert_page()
287 ggtt->invalidate(ggtt); in gen8_ggtt_insert_entries()
309 ggtt->invalidate(ggtt); in gen6_ggtt_insert_page()
364 ggtt->invalidate(ggtt); in gen6_ggtt_insert_entries()
1367 ggtt->invalidate(ggtt); in i915_ggtt_enable_guc()
1381 ggtt->invalidate(ggtt); in i915_ggtt_disable_guc()
[all …]
H A Dgen6_ppgtt.c210 gen6_ggtt_invalidate(ppgtt->base.vm.gt->ggtt); in gen6_flush_pd()
343 struct i915_ggtt *ggtt = i915_vm_to_ggtt(vma->vm); in pd_vma_bind() local
363 (uint64_t)ggtt->gsmsz, in pd_vma_bind()
364 (uint64_t)(ggtt->gsmsz - ggtt_offset_bytes)); in pd_vma_bind()
365 ret = -bus_space_subregion(ggtt->gsmt, ggtt->gsmh, ggtt_offset_bytes, in pd_vma_bind()
371 ppgtt->pd_bst = ggtt->gsmt; in pd_vma_bind()
414 struct i915_ggtt *ggtt = ppgtt->base.vm.gt->ggtt; in pd_vma_create() local
418 GEM_BUG_ON(size > ggtt->vm.total); in pd_vma_create()
428 vma->vm = i915_vm_get(&ggtt->vm); in pd_vma_create()
499 struct i915_ggtt * const ggtt = gt->ggtt; in gen6_ppgtt_create() local
[all …]
H A Dintel_gtt.h77 #define ggtt_total_entries(ggtt) ((ggtt)->vm.total >> PAGE_SHIFT) argument
374 void (*invalidate)(struct i915_ggtt *ggtt);
556 void i915_ggtt_enable_guc(struct i915_ggtt *ggtt);
557 void i915_ggtt_disable_guc(struct i915_ggtt *ggtt);
561 static inline bool i915_ggtt_has_aperture(const struct i915_ggtt *ggtt) in i915_ggtt_has_aperture() argument
563 return ggtt->mappable_end > 0; in i915_ggtt_has_aperture()
622 void gen6_ggtt_invalidate(struct i915_ggtt *ggtt);
H A Dintel_ring.c104 static struct i915_vma *create_ring_vma(struct i915_ggtt *ggtt, int size) in create_ring_vma() argument
106 struct i915_address_space *vm = &ggtt->vm; in create_ring_vma()
112 if (i915_ggtt_has_aperture(ggtt)) in create_ring_vma()
166 vma = create_ring_vma(engine->gt->ggtt, size); in intel_engine_create_ring()
/netbsd/sys/external/bsd/drm2/dist/drm/i915/selftests/
H A Di915_gem_evict.c137 struct i915_ggtt *ggtt = gt->ggtt; in igt_evict_something() local
160 unpin_ggtt(ggtt); in igt_evict_something()
183 struct i915_ggtt *ggtt = gt->ggtt; in igt_overcommit() local
220 struct i915_ggtt *ggtt = gt->ggtt; in igt_evict_for_vma() local
244 unpin_ggtt(ggtt); in igt_evict_for_vma()
271 struct i915_ggtt *ggtt = gt->ggtt; in igt_evict_for_cache_color() local
353 unpin_ggtt(ggtt); in igt_evict_for_cache_color()
362 struct i915_ggtt *ggtt = gt->ggtt; in igt_evict_vm() local
382 unpin_ggtt(ggtt); in igt_evict_vm()
402 struct i915_ggtt *ggtt = gt->ggtt; in igt_evict_contexts() local
[all …]
H A Dmock_gtt.c110 memset(ggtt, 0, sizeof(*ggtt)); in mock_init_ggtt()
112 ggtt->vm.gt = &i915->gt; in mock_init_ggtt()
113 ggtt->vm.i915 = i915; in mock_init_ggtt()
114 ggtt->vm.is_ggtt = true; in mock_init_ggtt()
117 ggtt->mappable_end = resource_size(&ggtt->gmadr); in mock_init_ggtt()
118 ggtt->vm.total = 4096 * PAGE_SIZE; in mock_init_ggtt()
120 ggtt->vm.clear_range = mock_clear_range; in mock_init_ggtt()
121 ggtt->vm.insert_page = mock_insert_page; in mock_init_ggtt()
123 ggtt->vm.cleanup = mock_cleanup; in mock_init_ggtt()
131 i915->gt.ggtt = ggtt; in mock_init_ggtt()
[all …]
H A Di915_gem_gtt.c1086 struct i915_ggtt *ggtt = &i915->ggtt; in exercise_ggtt() local
1146 struct i915_ggtt *ggtt = &i915->ggtt; in igt_ggtt_page() local
1180 ggtt->vm.insert_page(&ggtt->vm, in igt_ggtt_page()
1221 ggtt->vm.clear_range(&ggtt->vm, tmp.start, tmp.size); in igt_ggtt_page()
1486 0, ggtt->vm.total, in igt_gtt_insert()
1561 0, ggtt->vm.total, in igt_gtt_insert()
1702 struct i915_ggtt *ggtt; in i915_gem_gtt_mock_selftests() local
1709 ggtt = kmalloc(sizeof(*ggtt), GFP_KERNEL); in i915_gem_gtt_mock_selftests()
1710 if (!ggtt) { in i915_gem_gtt_mock_selftests()
1720 mock_fini_ggtt(ggtt); in i915_gem_gtt_mock_selftests()
[all …]
H A Di915_vma.c155 struct i915_ggtt *ggtt = arg; in igt_vma_create() local
262 struct i915_ggtt *ggtt = arg; in igt_vma_pin1() local
483 struct i915_ggtt *ggtt = arg; in igt_vma_rotate_remap() local
705 struct i915_ggtt *ggtt = arg; in igt_vma_partial() local
828 struct i915_ggtt *ggtt; in i915_vma_mock_selftests() local
835 ggtt = kmalloc(sizeof(*ggtt), GFP_KERNEL); in i915_vma_mock_selftests()
836 if (!ggtt) { in i915_vma_mock_selftests()
840 mock_init_ggtt(i915, ggtt); in i915_vma_mock_selftests()
842 err = i915_subtests(tests, ggtt); in i915_vma_mock_selftests()
846 mock_fini_ggtt(ggtt); in i915_vma_mock_selftests()
[all …]
H A Di915_gem.c48 struct i915_ggtt *ggtt = &i915->ggtt; in trash_stolen() local
49 const u64 slot = ggtt->error_capture.start; in trash_stolen()
55 if (!i915_ggtt_has_aperture(ggtt)) in trash_stolen()
63 ggtt->vm.insert_page(&ggtt->vm, dma, slot, I915_CACHE_NONE, 0); in trash_stolen()
65 s = io_mapping_map_atomic_wc(&ggtt->iomap, slot); in trash_stolen()
73 ggtt->vm.clear_range(&ggtt->vm, slot, PAGE_SIZE); in trash_stolen()
133 i915_gem_restore_fences(&i915->ggtt); in pm_resume()
H A Dmock_gtt.h33 void mock_init_ggtt(struct drm_i915_private *i915, struct i915_ggtt *ggtt);
34 void mock_fini_ggtt(struct i915_ggtt *ggtt);
H A Dmock_gem_device.c71 mock_fini_ggtt(&i915->ggtt); in mock_device_release()
182 mock_init_ggtt(i915, &i915->ggtt); in mock_gem_device()
183 i915->gt.vm = i915_vm_get(&i915->ggtt.vm); in mock_gem_device()
/netbsd/sys/external/bsd/drm2/dist/drm/i915/
H A Di915_vgpu.c178 ggtt->vm.reserved -= node->size; in vgt_deballoon_space()
193 if (!intel_vgpu_active(ggtt->vm.i915)) in intel_vgt_deballoon()
214 ret = i915_gem_gtt_reserve(&ggtt->vm, node, in vgt_balloon_space()
218 ggtt->vm.reserved += size; in vgt_balloon_space()
270 unsigned long ggtt_end = ggtt->vm.total; in intel_vgt_balloon()
276 if (!intel_vgpu_active(ggtt->vm.i915)) in intel_vgt_balloon()
297 if (mappable_end > ggtt->mappable_end || in intel_vgt_balloon()
298 unmappable_base < ggtt->mappable_end || in intel_vgt_balloon()
307 ggtt->mappable_end, unmappable_base); in intel_vgt_balloon()
329 if (mappable_end < ggtt->mappable_end) { in intel_vgt_balloon()
[all …]
H A Di915_gem_fence_reg.c70 return fence->ggtt->vm.i915; in fence_to_i915()
75 return fence->ggtt->vm.gt->uncore; in fence_to_uncore()
236 struct i915_ggtt *ggtt = fence->ggtt; in fence_update() local
374 fence = fence_find(ggtt); in __i915_vma_pin_fence()
464 fence = fence_find(ggtt); in i915_reserve_fence()
488 struct i915_ggtt *ggtt = fence->ggtt; in i915_unreserve_fence() local
852 INIT_LIST_HEAD(&ggtt->fence_list); in i915_ggtt_init_fences()
856 detect_bit_6_swizzle(ggtt); in i915_ggtt_init_fences()
878 fence->ggtt = ggtt; in i915_ggtt_init_fences()
882 ggtt->num_fences = num_fences; in i915_ggtt_init_fences()
[all …]
H A Di915_gem.c84 0, ggtt->mappable_end, in insert_mappable_node()
95 mutex_lock(&ggtt->vm.mutex); in remove_mappable_node()
104 struct i915_ggtt *ggtt = &to_i915(dev)->ggtt; in i915_gem_get_aperture_ioctl() local
112 pinned = ggtt->vm.reserved; in i915_gem_get_aperture_ioctl()
424 struct i915_ggtt *ggtt = &i915->ggtt; in i915_gem_gtt_pread() local
483 ggtt->vm.insert_page(&ggtt->vm, in i915_gem_gtt_pread()
504 ggtt->vm.clear_range(&ggtt->vm, node.start, node.size); in i915_gem_gtt_pread()
611 struct i915_ggtt *ggtt = &i915->ggtt; in i915_gem_gtt_pwrite_fast() local
689 ggtt->vm.insert_page(&ggtt->vm, in i915_gem_gtt_pwrite_fast()
719 ggtt->vm.clear_range(&ggtt->vm, node.start, node.size); in i915_gem_gtt_pwrite_fast()
[all …]
H A Dintel_region_lmem.c20 struct i915_ggtt *ggtt = &i915->ggtt; in init_fake_lmem_bar() local
30 ret = drm_mm_reserve_node(&ggtt->vm.mm, &mem->fake_mappable); in init_fake_lmem_bar()
45 ggtt->vm.insert_page(&ggtt->vm, in init_fake_lmem_bar()
118 GEM_BUG_ON(i915_ggtt_has_aperture(&i915->ggtt)); in intel_setup_fake_lmem()
H A Di915_gem_fence_reg.h43 struct i915_ggtt *ggtt; member
59 struct i915_fence_reg *i915_reserve_fence(struct i915_ggtt *ggtt);
62 void i915_gem_restore_fences(struct i915_ggtt *ggtt);
69 void i915_ggtt_init_fences(struct i915_ggtt *ggtt);
H A Di915_gem_gtt.c87 struct i915_ggtt *ggtt = &dev_priv->ggtt; in i915_gem_gtt_finish_pages() local
89 if (unlikely(ggtt->do_idle_maps)) { in i915_gem_gtt_finish_pages()
91 if (intel_gt_retire_requests_timeout(ggtt->vm.gt, in i915_gem_gtt_finish_pages()
138 GEM_BUG_ON(vm == &vm->i915->ggtt.alias->vm); in i915_gem_gtt_reserve()
236 GEM_BUG_ON(vm == &vm->i915->ggtt.alias->vm); in i915_gem_gtt_insert()
H A Di915_vgpu.h48 int intel_vgt_balloon(struct i915_ggtt *ggtt);
49 void intel_vgt_deballoon(struct i915_ggtt *ggtt);
H A Di915_gpu_error.c981 struct i915_ggtt *ggtt = gt->ggtt; in i915_vma_coredump_create() local
1020 ggtt->vm.insert_page(&ggtt->vm, dma, slot, in i915_vma_coredump_create()
1091 struct i915_ggtt *ggtt = gt->_gt->ggtt; in gt_record_fences() local
1096 for (i = 0; i < ggtt->num_fences; i++) in gt_record_fences()
1101 for (i = 0; i < ggtt->num_fences; i++) in gt_record_fences()
1535 struct i915_ggtt *ggtt = gt->_gt->ggtt; in gt_capture_prepare() local
1537 mutex_lock(&ggtt->error_mutex); in gt_capture_prepare()
1542 struct i915_ggtt *ggtt = gt->_gt->ggtt; in gt_capture_finish() local
1545 ggtt->vm.clear_range(&ggtt->vm, in gt_capture_finish()
1546 ggtt->error_capture.start, in gt_capture_finish()
[all …]
/netbsd/sys/external/bsd/drm2/dist/drm/i915/gvt/
H A Daperture_gm.c69 mutex_lock(&dev_priv->ggtt.vm.mutex); in alloc_gm()
76 mutex_unlock(&dev_priv->ggtt.vm.mutex); in alloc_gm()
106 mutex_lock(&dev_priv->ggtt.vm.mutex); in alloc_vgpu_gm()
108 mutex_unlock(&dev_priv->ggtt.vm.mutex); in alloc_vgpu_gm()
116 mutex_lock(&dev_priv->ggtt.vm.mutex); in free_vgpu_gm()
119 mutex_unlock(&dev_priv->ggtt.vm.mutex); in free_vgpu_gm()
180 mutex_lock(&dev_priv->ggtt.vm.mutex); in free_vgpu_fence()
187 mutex_unlock(&dev_priv->ggtt.vm.mutex); in free_vgpu_fence()
203 mutex_lock(&dev_priv->ggtt.vm.mutex); in alloc_vgpu_fence()
215 mutex_unlock(&dev_priv->ggtt.vm.mutex); in alloc_vgpu_fence()
[all …]
/netbsd/sys/external/bsd/drm2/dist/drm/i915/gem/
H A Di915_gem_tiling.c191 struct i915_ggtt *ggtt = &to_i915(obj->base.dev)->ggtt; in i915_gem_object_fence_prepare() local
198 mutex_lock(&ggtt->vm.mutex); in i915_gem_object_fence_prepare()
207 mutex_unlock(&ggtt->vm.mutex); in i915_gem_object_fence_prepare()
332 if (!dev_priv->ggtt.num_fences) in i915_gem_set_tiling_ioctl()
358 args->swizzle_mode = to_i915(dev)->ggtt.bit_6_swizzle_x; in i915_gem_set_tiling_ioctl()
360 args->swizzle_mode = to_i915(dev)->ggtt.bit_6_swizzle_y; in i915_gem_set_tiling_ioctl()
415 if (!dev_priv->ggtt.num_fences) in i915_gem_get_tiling_ioctl()
431 args->swizzle_mode = dev_priv->ggtt.bit_6_swizzle_x; in i915_gem_get_tiling_ioctl()
434 args->swizzle_mode = dev_priv->ggtt.bit_6_swizzle_y; in i915_gem_get_tiling_ioctl()
H A Di915_gem_stolen.c77 struct i915_ggtt *ggtt = &i915->ggtt; in i915_adjust_stolen() local
78 struct intel_uncore *uncore = ggtt->vm.gt->uncore; in i915_adjust_stolen()
105 ggtt_total_entries(ggtt) * 4); in i915_adjust_stolen()
756 struct i915_ggtt *ggtt = &i915->ggtt; in i915_gem_object_create_stolen_for_preallocated() local
805 vma = i915_vma_instance(obj, &ggtt->vm, NULL); in i915_gem_object_create_stolen_for_preallocated()
816 mutex_lock(&ggtt->vm.mutex); in i915_gem_object_create_stolen_for_preallocated()
817 ret = i915_gem_gtt_reserve(&ggtt->vm, &vma->node, in i915_gem_object_create_stolen_for_preallocated()
822 mutex_unlock(&ggtt->vm.mutex); in i915_gem_object_create_stolen_for_preallocated()
835 list_add_tail(&vma->vm_link, &ggtt->vm.bound_list); in i915_gem_object_create_stolen_for_preallocated()
836 mutex_unlock(&ggtt->vm.mutex); in i915_gem_object_create_stolen_for_preallocated()
H A Di915_gem_mman.c391 struct i915_ggtt *ggtt = &i915->ggtt; in vm_fault_gtt() local
427 ret = intel_gt_reset_trylock(ggtt->vm.gt, &srcu); in vm_fault_gtt()
487 paddr = ggtt->gmadr.start + vma->node.start in vm_fault_gtt()
502 &ggtt->iomap); in vm_fault_gtt()
510 mutex_lock(&i915->ggtt.vm.mutex); in vm_fault_gtt()
513 mutex_unlock(&i915->ggtt.vm.mutex); in vm_fault_gtt()
519 intel_wakeref_auto(&i915->ggtt.userfault_wakeref, in vm_fault_gtt()
533 intel_gt_reset_unlock(ggtt->vm.gt, srcu); in vm_fault_gtt()
647 mutex_lock(&i915->ggtt.vm.mutex); in i915_gem_object_release_mmap_gtt()
665 mutex_unlock(&i915->ggtt.vm.mutex); in i915_gem_object_release_mmap_gtt()
[all …]
/netbsd/sys/external/bsd/drm2/dist/drm/i915/gt/uc/
H A Dintel_uc_fw.c399 struct i915_ggtt *ggtt = __uc_fw_to_gt(uc_fw)->ggtt; in uc_fw_ggtt_offset() local
400 struct drm_mm_node *node = &ggtt->uc_fw; in uc_fw_ggtt_offset()
412 struct i915_ggtt *ggtt = __uc_fw_to_gt(uc_fw)->ggtt; in uc_fw_bind_ggtt() local
417 .vm = &ggtt->vm, in uc_fw_bind_ggtt()
421 GEM_BUG_ON(dummy.node.size > ggtt->uc_fw.size); in uc_fw_bind_ggtt()
426 ggtt->vm.insert_entries(&ggtt->vm, &dummy, I915_CACHE_NONE, 0); in uc_fw_bind_ggtt()
432 struct i915_ggtt *ggtt = __uc_fw_to_gt(uc_fw)->ggtt; in uc_fw_unbind_ggtt() local
435 ggtt->vm.clear_range(&ggtt->vm, start, obj->base.size); in uc_fw_unbind_ggtt()
/netbsd/sys/external/bsd/drm2/i915drm/
H A Dintelfb.c178 struct i915_ggtt *const ggtt = &i915->ggtt; in intelfb_drmfb_mmapfb() local
184 return bus_space_mmap(dev->bst, ggtt->gmadr.start, in intelfb_drmfb_mmapfb()

123