Home
last modified time | relevance | path

Searched refs:ce (Results 1 – 25 of 226) sorted by relevance

12345678910

/openbsd/sys/dev/pci/drm/i915/gt/
H A Dintel_context.h79 return ce; in intel_context_to_parent()
85 return intel_context_is_child(ce) || intel_context_is_parent(ce); in intel_context_is_parallel()
131 return ce->ops->cancel_request(ce, rq); in intel_context_cancel_request()
198 ce->ops->sched_disable(ce); in intel_context_unpin()
214 ce->ops->enter(ce); in intel_context_enter()
222 ++ce->active_count; in intel_context_mark_active()
233 ce->ops->exit(ce); in intel_context_exit()
238 kref_get(&ce->ref); in intel_context_get()
239 return ce; in intel_context_get()
244 kref_put(&ce->ref, ce->ops->destroy); in intel_context_put()
[all …]
H A Dintel_context.c51 if (!ce) in intel_context_create()
72 err = ce->ops->alloc(ce); in intel_context_alloc_state()
240 err = ce->ops->pre_pin(ce, ww, &vaddr); in __intel_context_do_pin_ww()
264 err = ce->ops->pin(ce, vaddr); in __intel_context_do_pin_ww()
272 ce->ring->head, ce->ring->tail); in __intel_context_do_pin_ww()
289 ce->ops->post_unpin(ce); in __intel_context_do_pin_ww()
327 ce->ops->unpin(ce); in __intel_context_do_unpin()
328 ce->ops->post_unpin(ce); in __intel_context_do_unpin()
603 ce->ops->update_stats(ce); in intel_context_get_total_runtime_ns()
633 ce->ops->revoke(ce, rq, in intel_context_ban()
[all …]
H A Dintel_lrc.c961 __lrc_init_regs(ce->lrc_reg_state, ce, engine, inhibit); in lrc_init_regs()
1049 return i915_ggtt_offset(ce->state) + context_wa_bb_offset(ce); in lrc_indirect_bb()
1168 ce->ring = ring; in lrc_alloc()
1169 ce->state = vma; in lrc_alloc()
1184 intel_ring_reset(ce->ring, ce->ring->emit); in lrc_reset()
1187 lrc_init_regs(ce, ce->engine, true); in lrc_reset()
1188 ce->lrc.lrca = lrc_update_regs(ce, ce->engine, ce->ring->tail); in lrc_reset()
1219 ce->lrc.lrca = lrc_update_regs(ce, engine, ce->ring->tail); in lrc_pin()
1240 if (!ce->state) in lrc_fini()
1249 struct intel_context *ce = container_of(kref, typeof(*ce), ref); in lrc_destroy() local
[all …]
H A Dintel_engine_pm.c40 if (ce->state) { in dbg_poison_ce()
62 struct intel_context *ce; in __engine_unpark() local
69 ce = engine->kernel_context; in __engine_unpark()
70 if (ce) { in __engine_unpark()
78 dbg_poison_ce(ce); in __engine_unpark()
81 ce->ops->reset(ce); in __engine_unpark()
84 ce->timeline->seqno, in __engine_unpark()
86 ce->ring->emit); in __engine_unpark()
312 struct intel_context *ce; in intel_engine_reset_pinned_contexts() local
320 dbg_poison_ce(ce); in intel_engine_reset_pinned_contexts()
[all …]
H A Dintel_lrc.h36 int lrc_alloc(struct intel_context *ce,
38 void lrc_reset(struct intel_context *ce);
39 void lrc_fini(struct intel_context *ce);
43 lrc_pre_pin(struct intel_context *ce,
48 lrc_pin(struct intel_context *ce,
51 void lrc_unpin(struct intel_context *ce);
52 void lrc_post_unpin(struct intel_context *ce);
54 void lrc_init_state(struct intel_context *ce,
67 void lrc_update_offsets(struct intel_context *ce,
120 if (intel_context_is_barrier(ce)) in lrc_runtime_start()
[all …]
H A Dintel_breadcrumbs.c83 struct intel_context *ce) in add_signaling_context() argument
97 if (!list_empty(&ce->signals)) in remove_signaling_context()
110 if (rq->context != ce) in check_signal_order()
172 struct intel_context *ce; in signal_irq_work() local
233 intel_context_put(ce); in signal_irq_work()
365 intel_context_get(ce); in insert_breadcrumb()
367 pos = &ce->signals; in insert_breadcrumb()
424 spin_lock(&ce->signal_lock); in i915_request_enable_breadcrumb()
448 intel_context_put(ce); in i915_request_cancel_breadcrumb()
483 intel_context_put(ce); in intel_context_remove_breadcrumbs()
[all …]
H A Dintel_context_sseu.c17 const struct intel_context *ce, in gen8_emit_rpcs_config() argument
27 offset = i915_ggtt_offset(ce->state) + in gen8_emit_rpcs_config()
46 lockdep_assert_held(&ce->pin_mutex); in gen8_modify_rpcs()
54 if (!intel_context_pin_if_active(ce)) in gen8_modify_rpcs()
66 ret = gen8_emit_rpcs_config(rq, ce, sseu); in gen8_modify_rpcs()
70 intel_context_unpin(ce); in gen8_modify_rpcs()
82 ret = intel_context_lock_pinned(ce); in intel_context_reconfigure_sseu()
87 if (!memcmp(&ce->sseu, &sseu, sizeof(sseu))) in intel_context_reconfigure_sseu()
90 ret = gen8_modify_rpcs(ce, sseu); in intel_context_reconfigure_sseu()
92 ce->sseu = sseu; in intel_context_reconfigure_sseu()
[all …]
H A Dselftest_mocs.c26 struct intel_context *ce; in mocs_context_create() local
29 if (IS_ERR(ce)) in mocs_context_create()
30 return ce; in mocs_context_create()
33 ce->ring_size = SZ_16K; in mocs_context_create()
35 return ce; in mocs_context_create()
304 if (IS_ERR(ce)) { in live_mocs_clean()
305 err = PTR_ERR(ce); in live_mocs_clean()
310 intel_context_put(ce); in live_mocs_clean()
415 if (IS_ERR(ce)) { in live_mocs_reset()
416 err = PTR_ERR(ce); in live_mocs_reset()
[all …]
H A Dselftest_lrc.c419 if (IS_ERR(ce)) in __live_lrc_state()
420 return PTR_ERR(ce); in __live_lrc_state()
633 if (IS_ERR(ce)) in __live_lrc_gpr()
634 return PTR_ERR(ce); in __live_lrc_gpr()
890 data.ce[i] = tmp; in live_lrc_timestamp()
908 if (!data.ce[i]) in live_lrc_timestamp()
1603 setup_indirect_ctx_bb(ce, ce->engine, emit_indirect_ctx_bb_canary); in indirect_ctx_bb_setup()
1769 if (IS_ERR(ce)) in __lrc_garbage()
1770 return PTR_ERR(ce); in __lrc_garbage()
1856 if (IS_ERR(ce)) in __live_pphwsp_runtime()
[all …]
H A Dintel_context_types.h41 int (*alloc)(struct intel_context *ce);
46 void (*close)(struct intel_context *ce);
50 void (*unpin)(struct intel_context *ce);
51 void (*post_unpin)(struct intel_context *ce);
53 void (*cancel_request)(struct intel_context *ce,
56 void (*enter)(struct intel_context *ce);
57 void (*exit)(struct intel_context *ce);
61 void (*update_stats)(struct intel_context *ce);
63 void (*reset)(struct intel_context *ce);
93 #define intel_context_inflight(ce) \ argument
[all …]
H A Dmock_engine.c150 struct intel_context *ce = container_of(ref, typeof(*ce), ref); in mock_context_destroy() local
155 mock_ring_free(ce->ring); in mock_context_destroy()
159 intel_context_fini(ce); in mock_context_destroy()
160 intel_context_free(ce); in mock_context_destroy()
167 ce->ring = mock_ring(ce->engine); in mock_context_alloc()
168 if (!ce->ring) in mock_context_alloc()
171 ce->timeline = intel_timeline_create(ce->engine->gt); in mock_context_alloc()
173 kfree(ce->engine); in mock_context_alloc()
180 ce->timeline = NULL; in mock_context_alloc()
395 struct intel_context *ce; in mock_engine_init() local
[all …]
H A Dselftest_ring_submission.c91 struct intel_context *ce; in new_context_sync() local
95 if (IS_ERR(ce)) in new_context_sync()
96 return PTR_ERR(ce); in new_context_sync()
98 err = context_sync(ce); in new_context_sync()
99 intel_context_put(ce); in new_context_sync()
153 if (IS_ERR(ce)) in double_context_sync_00()
154 return PTR_ERR(ce); in double_context_sync_00()
162 intel_context_put(ce); in double_context_sync_00()
180 if (IS_ERR(ce)) in kernel_context_sync_00()
181 return PTR_ERR(ce); in kernel_context_sync_00()
[all …]
H A Dselftest_workarounds.c277 if (IS_ERR(ce)) in switch_to_scratch_context()
278 return PTR_ERR(ce); in switch_to_scratch_context()
281 intel_context_put(ce); in switch_to_scratch_context()
311 if (IS_ERR(ce)) in check_whitelist_across_reset()
312 return PTR_ERR(ce); in check_whitelist_across_reset()
361 intel_context_put(ce); in check_whitelist_across_reset()
362 ce = tmp; in check_whitelist_across_reset()
775 if (IS_ERR(ce)) in live_dirty_whitelist()
1179 if (IS_ERR(ce)) in verify_wa_lists()
1273 if (IS_ERR(ce)) { in live_engine_reset_workarounds()
[all …]
H A Dintel_migrate.c264 return ce; in pinned_context()
274 if (IS_ERR(ce)) in intel_migrate_init()
277 m->context = ce; in intel_migrate_init()
315 if (IS_ERR(ce)) in intel_migrate_create_context()
316 return ce; in intel_migrate_create_context()
318 ce->ring = NULL; in intel_migrate_create_context()
324 return ce; in intel_migrate_create_context()
699 GEM_BUG_ON(ce->vm != ce->engine->gt->migrate.context->vm); in intel_context_migrate_copy()
998 GEM_BUG_ON(ce->vm != ce->engine->gt->migrate.context->vm); in intel_context_migrate_clear()
1097 if (IS_ERR(ce)) in intel_migrate_copy()
[all …]
H A Dintel_engine_heartbeat.c72 intel_context_enter(ce); in heartbeat_create()
73 rq = __i915_request_create(ce, gfp); in heartbeat_create()
74 intel_context_exit(ce); in heartbeat_create()
140 struct intel_context *ce = engine->kernel_context; in heartbeat() local
212 if (!mutex_trylock(&ce->timeline->mutex)) { in heartbeat()
229 mutex_unlock(&ce->timeline->mutex); in heartbeat()
280 lockdep_assert_held(&ce->timeline->mutex); in __intel_engine_pulse()
347 mutex_unlock(&ce->timeline->mutex); in intel_engine_set_heartbeat()
368 mutex_unlock(&ce->timeline->mutex); in intel_engine_pulse()
394 rq = heartbeat_create(ce, GFP_KERNEL); in intel_engine_flush_barriers()
[all …]
H A Dselftest_tlb.c32 pte_tlbinv(struct intel_context *ce, in pte_tlbinv() argument
124 rq = i915_request_create(ce); in pte_tlbinv()
161 ce->vm->insert_entries(ce->vm, &vb_res, pat_index, pte_flags); in pte_tlbinv()
168 ce->engine->name); in pte_tlbinv()
291 struct intel_context *ce; in mem_tlbinv() local
295 if (IS_ERR(ce)) { in mem_tlbinv()
296 err = PTR_ERR(ce); in mem_tlbinv()
300 i915_vm_put(ce->vm); in mem_tlbinv()
317 err = pte_tlbinv(ce, va, va, in mem_tlbinv()
337 intel_context_unpin(ce); in mem_tlbinv()
[all …]
H A Dselftest_execlists.c128 if (IS_ERR(ce)) { in live_sanitycheck()
222 ce[n] = tmp; in live_unlite_restore()
225 intel_ring_reset(ce[1]->ring, ce[1]->ring->size / 2); in live_unlite_restore()
226 lrc_update_regs(ce[1], engine, ce[1]->ring->head); in live_unlite_restore()
379 ce[n] = tmp; in live_unlite_ring()
451 ce[0]->ring->tail, ce[0]->ring->emit, in live_unlite_ring()
452 ce[1]->ring->tail, ce[1]->ring->emit); in live_unlite_ring()
864 if (IS_ERR(ce)) in semaphore_queue()
2830 ce[n] = tmp; in __live_preempt_ring()
2896 ce[0]->ring->tail, ce[0]->ring->emit, in __live_preempt_ring()
[all …]
/openbsd/sys/dev/pci/drm/i915/gt/uc/
H A Dintel_guc_submission.c1792 ce = list_next_entry(ce, parallel.child_link); in __guc_reset_context()
2770 ret = deregister_context(ce, ce->guc_id.id); in try_context_registration()
3230 deregister_context(ce, ce->guc_id.id); in guc_lrc_desc_unpin()
3269 if (ce) in guc_flush_destroyed_contexts()
3291 if (ce) in deregister_destroyed_contexts()
3352 return lrc_alloc(ce, ce->engine); in guc_context_alloc()
4782 ce->guc_id.id, ce->engine->name); in capture_error_state()
4799 __guc_reset_context(ce, ce->engine->mask); in guc_context_replay()
4809 ce->guc_id.id, ce->engine->name, in guc_handle_context_reset()
4818 ce->guc_id.id, ce->engine->name); in guc_handle_context_reset()
[all …]
H A Dselftest_guc.c57 struct intel_context *ce; in intel_guc_scrub_ctbs() local
68 if (IS_ERR(ce)) { in intel_guc_scrub_ctbs()
69 ret = PTR_ERR(ce); in intel_guc_scrub_ctbs()
82 ce->drop_deregister = true; in intel_guc_scrub_ctbs()
87 intel_context_put(ce); in intel_guc_scrub_ctbs()
151 struct intel_context **ce; in intel_guc_steal_guc_ids() local
156 ce = kcalloc(GUC_MAX_CONTEXT_ID, sizeof(*ce), GFP_KERNEL); in intel_guc_steal_guc_ids()
157 if (!ce) { in intel_guc_steal_guc_ids()
172 ce[context_index] = NULL; in intel_guc_steal_guc_ids()
199 ce[context_index--] = NULL; in intel_guc_steal_guc_ids()
[all …]
H A Dselftest_guc_multi_lrc.c58 GEM_BUG_ON(!intel_context_is_parent(ce)); in multi_lrc_context_unpin()
60 for_each_child(ce, child) in multi_lrc_context_unpin()
62 intel_context_unpin(ce); in multi_lrc_context_unpin()
65 static void multi_lrc_context_put(struct intel_context *ce) in multi_lrc_context_put() argument
67 GEM_BUG_ON(!intel_context_is_parent(ce)); in multi_lrc_context_put()
73 intel_context_put(ce); in multi_lrc_context_put()
77 multi_lrc_nop_request(struct intel_context *ce) in multi_lrc_nop_request() argument
83 GEM_BUG_ON(!intel_context_is_parent(ce)); in multi_lrc_nop_request()
85 rq = intel_context_create_request(ce); in multi_lrc_nop_request()
92 for_each_child(ce, child) { in multi_lrc_nop_request()
[all …]
/openbsd/sys/dev/pci/drm/i915/selftests/
H A Di915_request.c654 if (IS_ERR(ce)) { in __cancel_inactive()
707 if (IS_ERR(ce)) { in __cancel_active()
768 if (IS_ERR(ce)) { in __cancel_completed()
837 if (IS_ERR(ce)) { in __cancel_reset()
2383 ce, ce->engine->kernel_context in measure_context_switch()
2871 ps->ce[idx++] = ce; in perf_series_engines()
2887 struct intel_context *ce = ps->ce[idx]; in perf_series_engines() local
2906 struct intel_context *ce = ps->ce[idx]; in perf_series_engines() local
2976 if (IS_ERR(ce)) { in p_sync0()
3051 if (IS_ERR(ce)) { in p_sync1()
[all …]
/openbsd/sys/dev/acpi/
H A Dacpidmar.h307 ce->lo &= ~CTX_FPD; in context_set_fpd()
309 ce->lo |= CTX_FPD; in context_set_fpd()
316 ce->lo |= CTX_P; in context_set_present()
323 ce->lo &= VTD_PAGE_MASK; in context_set_slpte()
324 ce->lo |= (slpte & ~VTD_PAGE_MASK); in context_set_slpte()
331 ce->lo &= ~(CTX_T_MASK << CTX_T_SHIFT); in context_set_translation_type()
353 context_pte(struct context_entry *ce) in context_pte() argument
355 return (ce->lo & ~VTD_PAGE_MASK); in context_pte()
367 context_domain_id(struct context_entry *ce) in context_domain_id() argument
383 return (ce->lo & CTX_P); in context_entry_is_valid()
[all …]
/openbsd/sys/dev/pci/drm/i915/gem/selftests/
H A Di915_gem_context.c334 data[m++].ce[0] = intel_context_get(ce); in live_parallel_switch()
347 if (!data[m].ce[0]) in live_parallel_switch()
350 ce = intel_context_create(data[m].ce[0]->engine); in live_parallel_switch()
351 if (IS_ERR(ce)) { in live_parallel_switch()
352 err = PTR_ERR(ce); in live_parallel_switch()
362 data[m].ce[n] = ce; in live_parallel_switch()
369 if (!data[n].ce[0]) in live_parallel_switch()
390 if (!data[n].ce[0]) in live_parallel_switch()
415 if (!data[n].ce[m]) in live_parallel_switch()
1292 if (IS_ERR(ce)) { in __igt_ctx_sseu()
[all …]
/openbsd/sbin/unwind/libunbound/validator/
H A Dval_nsec3.c957 ce->ce = nm; in nsec3_find_closest_encloser()
1015 memset(ce, 0, sizeof(*ce)); in nsec3_prove_closest_encloser()
1146 log_assert(ce.ce); in nsec3_do_prove_nameerror()
1147 wc = nsec3_ce_wildcard(ct->region, ce.ce, ce.ce_len, &wclen); in nsec3_do_prove_nameerror()
1173 if(ce.nc_rrset && nsec3_has_optout(ce.nc_rrset, ce.nc_rr)) { in nsec3_do_prove_nameerror()
1295 log_assert(ce.ce); in nsec3_do_prove_nodata()
1296 wc = nsec3_ce_wildcard(ct->region, ce.ce, ce.ce_len, &wclen); in nsec3_do_prove_nodata()
1322 if(ce.nc_rrset && nsec3_has_optout(ce.nc_rrset, ce.nc_rr)) { in nsec3_do_prove_nodata()
1409 memset(&ce, 0, sizeof(ce)); in nsec3_prove_wildcard()
1410 ce.ce = wc; in nsec3_prove_wildcard()
[all …]
/openbsd/usr.sbin/unbound/validator/
H A Dval_nsec3.c957 ce->ce = nm; in nsec3_find_closest_encloser()
1015 memset(ce, 0, sizeof(*ce)); in nsec3_prove_closest_encloser()
1146 log_assert(ce.ce); in nsec3_do_prove_nameerror()
1147 wc = nsec3_ce_wildcard(ct->region, ce.ce, ce.ce_len, &wclen); in nsec3_do_prove_nameerror()
1173 if(ce.nc_rrset && nsec3_has_optout(ce.nc_rrset, ce.nc_rr)) { in nsec3_do_prove_nameerror()
1295 log_assert(ce.ce); in nsec3_do_prove_nodata()
1296 wc = nsec3_ce_wildcard(ct->region, ce.ce, ce.ce_len, &wclen); in nsec3_do_prove_nodata()
1322 if(ce.nc_rrset && nsec3_has_optout(ce.nc_rrset, ce.nc_rr)) { in nsec3_do_prove_nodata()
1409 memset(&ce, 0, sizeof(ce)); in nsec3_prove_wildcard()
1410 ce.ce = wc; in nsec3_prove_wildcard()
[all …]

12345678910