Home
last modified time | relevance | path

Searched refs:seqno (Results 1 – 25 of 64) sorted by relevance

123

/dragonfly/sys/dev/drm/i915/
H A Di915_gem_request.c210 if (!i915_seqno_passed(seqno, tl->seqno)) { in reset_all_global_seqno()
222 tl->seqno = seqno; in reset_all_global_seqno()
238 if (seqno == 0) in i915_gem_set_global_seqno()
271 u32 seqno = engine->timeline->seqno; in reserve_engine() local
448 return ++tl->seqno; in timeline_get_seqno()
455 u32 seqno; in __i915_gem_request_submit() local
716 GEM_BUG_ON(req->timeline->seqno != req->fence.seqno); in i915_gem_request_alloc()
761 u32 seqno; in i915_gem_request_await_request() local
766 if (!seqno) in i915_gem_request_await_request()
930 GEM_BUG_ON(timeline->seqno != request->fence.seqno); in __i915_add_request()
[all …]
H A Di915_gem_timeline.h38 u32 seqno; member
100 u64 context, u32 seqno) in __intel_timeline_sync_set() argument
102 return i915_syncmap_set(&tl->sync, context, seqno); in __intel_timeline_sync_set()
108 return __intel_timeline_sync_set(tl, fence->context, fence->seqno); in intel_timeline_sync_set()
112 u64 context, u32 seqno) in __intel_timeline_sync_is_later() argument
114 return i915_syncmap_is_later(&tl->sync, context, seqno); in __intel_timeline_sync_is_later()
120 return __intel_timeline_sync_is_later(tl, fence->context, fence->seqno); in intel_timeline_sync_is_later()
H A Dintel_breadcrumbs.c346 u32 seqno; in __intel_engine_add_wait() local
371 if (i915_seqno_passed(seqno, wait->seqno)) { in __intel_engine_add_wait()
379 if (wait->seqno == to_wait(parent)->seqno) { in __intel_engine_add_wait()
394 if (i915_seqno_passed(seqno, to_wait(parent)->seqno)) in __intel_engine_add_wait()
460 wait->seqno - 1); in intel_engine_add_wait()
513 while (i915_seqno_passed(seqno, to_wait(next)->seqno)) { in __intel_engine_remove_wait()
677 u32 seqno; in intel_engine_enable_signaling() local
691 if (!seqno) in intel_engine_enable_signaling()
696 request->signaling.wait.seqno = seqno; in intel_engine_enable_signaling()
724 if (i915_seqno_passed(seqno, in intel_engine_enable_signaling()
[all …]
H A Di915_syncmap.c154 bool i915_syncmap_is_later(struct i915_syncmap **root, u64 id, u32 seqno) in i915_syncmap_is_later() argument
195 return seqno_later(__sync_seqno(p)[idx], seqno); in i915_syncmap_is_later()
214 static inline void __sync_set_seqno(struct i915_syncmap *p, u64 id, u32 seqno) in __sync_set_seqno() argument
219 __sync_seqno(p)[idx] = seqno; in __sync_set_seqno()
230 static noinline int __sync_set(struct i915_syncmap **root, u64 id, u32 seqno) in __sync_set() argument
335 __sync_set_seqno(p, id, seqno); in __sync_set()
353 int i915_syncmap_set(struct i915_syncmap **root, u64 id, u32 seqno) in i915_syncmap_set() argument
362 __sync_set_seqno(p, id, seqno); in i915_syncmap_set()
366 return __sync_set(root, id, seqno); in i915_syncmap_set()
H A Dintel_hangcheck.c74 semaphore_waits_for(struct intel_engine_cs *engine, u32 *seqno) in semaphore_waits_for() argument
137 *seqno = ioread32(vaddr + head + 4) + 1; in semaphore_waits_for()
150 u32 seqno; in semaphore_passed() local
154 signaller = semaphore_waits_for(engine, &seqno); in semaphore_passed()
165 if (i915_seqno_passed(intel_engine_get_seqno(signaller), seqno)) in semaphore_passed()
308 hc->seqno = intel_engine_get_seqno(engine); in hangcheck_load_sample()
315 engine->hangcheck.seqno = hc->seqno; in hangcheck_store_sample()
324 if (engine->hangcheck.seqno != hc->seqno) in hangcheck_get_action()
H A Di915_gem_request.h43 u32 seqno; member
323 __i915_gem_request_completed(const struct drm_i915_gem_request *req, u32 seqno) in __i915_gem_request_completed() argument
325 GEM_BUG_ON(!seqno); in __i915_gem_request_completed()
326 return i915_seqno_passed(intel_engine_get_seqno(req->engine), seqno) && in __i915_gem_request_completed()
327 seqno == i915_gem_request_global_seqno(req); in __i915_gem_request_completed()
333 u32 seqno; in i915_gem_request_completed() local
335 seqno = i915_gem_request_global_seqno(req); in i915_gem_request_completed()
336 if (!seqno) in i915_gem_request_completed()
339 return __i915_gem_request_completed(req, seqno); in i915_gem_request_completed()
H A Dintel_ringbuffer.h121 u32 seqno; member
726 void intel_engine_init_global_seqno(struct intel_engine_cs *engine, u32 seqno);
755 return READ_ONCE(engine->timeline->seqno); in intel_engine_last_submit()
788 static inline void intel_wait_init_for_seqno(struct intel_wait *wait, u32 seqno) in intel_wait_init_for_seqno() argument
791 wait->seqno = seqno; in intel_wait_init_for_seqno()
796 return wait->seqno; in intel_wait_has_seqno()
800 intel_wait_update_seqno(struct intel_wait *wait, u32 seqno) in intel_wait_update_seqno() argument
802 wait->seqno = seqno; in intel_wait_update_seqno()
814 intel_wait_check_seqno(const struct intel_wait *wait, u32 seqno) in intel_wait_check_seqno() argument
816 return wait->seqno == seqno; in intel_wait_check_seqno()
H A Di915_syncmap.h34 int i915_syncmap_set(struct i915_syncmap **root, u64 id, u32 seqno);
35 bool i915_syncmap_is_later(struct i915_syncmap **root, u64 id, u32 seqno);
/dragonfly/lib/libthread_xu/thread/
H A Dthr_spec.c63 _thread_keytable[i].seqno++; in _pthread_key_create()
121 if (curthread->specific[key].seqno == in _thread_cleanupspecific()
122 _thread_keytable[key].seqno) { in _thread_cleanupspecific()
208 pthread->specific[key].seqno = in _pthread_setspecific()
209 _thread_keytable[key].seqno; in _pthread_setspecific()
236 (pthread->specific[key].seqno == _thread_keytable[key].seqno)) { in _pthread_getspecific()
/dragonfly/usr.sbin/ppp/
H A Ddeflate.c45 u_short seqno; member
61 state->seqno = 0; in DeflateResetOutput()
99 *wp++ = state->seqno >> 8; in DeflateOutput()
100 *wp++ = state->seqno & 0377; in DeflateOutput()
102 state->seqno++; in DeflateOutput()
120 state->seqno--; in DeflateOutput()
193 state->seqno = 0; in DeflateResetInput()
216 if (seq != state->seqno) { in DeflateInput()
223 state->seqno = seq; in DeflateInput()
232 state->seqno++; in DeflateInput()
[all …]
/dragonfly/contrib/tcpdump/
H A Dprint-babel.c447 u_short seqno, interval, unicast; in babel_print_v2_tlvs() local
454 seqno = GET_BE_U_2(message + 4); in babel_print_v2_tlvs()
458 ND_PRINT("seqno %u ", seqno); in babel_print_v2_tlvs()
537 u_short interval, seqno, metric; in babel_print_v2_tlvs() local
553 seqno = GET_BE_U_2(message + 8); in babel_print_v2_tlvs()
599 u_short seqno; in babel_print_v2_tlvs() local
604 seqno = GET_BE_U_2(message + 4); in babel_print_v2_tlvs()
648 u_short interval, seqno, metric; in babel_print_v2_tlvs() local
659 seqno = GET_BE_U_2(message + 8); in babel_print_v2_tlvs()
725 u_short seqno; in babel_print_v2_tlvs() local
[all …]
H A Dprint-dccp.c227 uint64_t seqno; in dccp_seqno() local
231 seqno = GET_BE_U_6(dhx->dccph_seq); in dccp_seqno()
233 seqno = GET_BE_U_3(dh->dccph_seq); in dccp_seqno()
236 return seqno; in dccp_seqno()
/dragonfly/crypto/openssh/
H A Dmac.c164 mac_compute(struct sshmac *mac, u_int32_t seqno, in mac_compute() argument
180 put_u32(b, seqno); in mac_compute()
189 POKE_U64(nonce, seqno); in mac_compute()
194 put_u64(nonce, seqno); in mac_compute()
210 mac_check(struct sshmac *mac, u_int32_t seqno, in mac_check() argument
219 if ((r = mac_compute(mac, seqno, data, dlen, in mac_check()
/dragonfly/sys/vfs/hammer/
H A Dhammer_undo.c305 hammer_format_undo(hammer_mount_t hmp, void *base, uint32_t seqno) in hammer_format_undo() argument
321 head->hdr_seq = seqno++; in hammer_format_undo()
350 uint32_t seqno; in hammer_upgrade_undo_4() local
378 seqno = 0; in hammer_upgrade_undo_4()
391 head->hdr_seq = seqno; in hammer_upgrade_undo_4()
404 ++seqno; in hammer_upgrade_undo_4()
410 hmp->undo_seqno = seqno; in hammer_upgrade_undo_4()
411 hmkprintf(hmp, "version upgrade seqno start %08x\n", seqno); in hammer_upgrade_undo_4()
H A Dhammer_recover.c217 uint32_t seqno; in hammer_recover_stage1() local
258 seqno = 0; in hammer_recover_stage1()
267 seqno = head->head.hdr_seq; in hammer_recover_stage1()
294 ++seqno; in hammer_recover_stage1()
295 hmp->recover_stage2_seqno = seqno; in hammer_recover_stage1()
304 if (seqno != head->head.hdr_seq) { in hammer_recover_stage1()
309 ++seqno; in hammer_recover_stage1()
342 hmp->undo_seqno = seqno; in hammer_recover_stage1()
356 seqno); in hammer_recover_stage1()
529 uint32_t seqno; in hammer_recover_stage2() local
[all …]
/dragonfly/sys/dev/drm/include/linux/
H A Ddma-fence.h48 unsigned seqno; member
85 spinlock_t *lock, u64 context, unsigned seqno);
164 return (a->seqno > b->seqno); in dma_fence_is_later()
/dragonfly/test/testcases/crypto/aesxts/
H A Daes_xts.c140 u_int64_t seqno; member
1818 if (syscrypt(tv->key, tv->key_len, tv->seqno, tv->plaintext, in main()
1831 if (syscrypt(tv->key, tv->key_len, tv->seqno, tv->ciphertext, in main()
/dragonfly/contrib/gcc-8.0/gcc/
H A Dsel-sched-ir.c4035 int seqno; in get_seqno_by_succs() local
4046 seqno = INT_MAX; in get_seqno_by_succs()
4050 seqno = MIN (seqno, INSN_SEQNO (succ)); in get_seqno_by_succs()
4055 return seqno; in get_seqno_by_succs()
4063 int seqno; in get_seqno_for_a_jump() local
4114 if (seqno < 0) in get_seqno_for_a_jump()
4117 if (seqno < 0) in get_seqno_for_a_jump()
4127 return seqno; in get_seqno_for_a_jump()
4152 seqno = MAX (seqno, INSN_SEQNO (preds[i])); in get_seqno_by_preds()
4154 return seqno; in get_seqno_by_preds()
[all …]
H A Dsel-sched.c676 if (seqno > 0 && seqno <= orig_max_seqno in extract_new_fences_from()
4729 int seqno; in find_seqno_for_bookkeeping() local
4752 if (seqno < 0) in find_seqno_for_bookkeeping()
4755 seqno = 1; in find_seqno_for_bookkeeping()
4759 gcc_assert (seqno > 0); in find_seqno_for_bookkeeping()
4760 return seqno; in find_seqno_for_bookkeeping()
7332 int seqno = 0; in schedule_on_fences() local
7389 *min_seqno = seqno; in find_min_max_seqno()
7391 *max_seqno = seqno; in find_min_max_seqno()
7415 int seqno; in calculate_new_fences() local
[all …]
/dragonfly/contrib/lvm2/dist/lib/format_text/
H A Darchiver.c129 vg->seqno); in archive()
386 log_verbose("Creating volume group backup \"%s\" (seqno %u).", file, vg->seqno); in backup_to_file()
432 (vg->seqno == vg_backup->seqno) && in check_current_backup()
/dragonfly/sys/dev/drm/
H A Ddrm_dp_mst_topology.c194 hdr->seqno = (buf[idx] >> 4) & 0x1; in drm_dp_decode_sideband_msg_hdr()
1454 if (txmsg->seqno == -1) { in set_hdr_from_dst_qlock()
1460 txmsg->seqno = mstb->last_seqno; in set_hdr_from_dst_qlock()
1463 txmsg->seqno = 0; in set_hdr_from_dst_qlock()
1465 txmsg->seqno = 1; in set_hdr_from_dst_qlock()
1480 hdr->seqno = txmsg->seqno; in set_hdr_from_dst_qlock()
1498 txmsg->seqno = -1; in process_single_tx_qlock()
1561 if (txmsg->seqno != -1) in process_single_down_tx_qlock()
2058 txmsg->seqno = seqno; in drm_dp_send_up_ack_reply()
2375 bool seqno; in drm_dp_mst_handle_up_req() local
[all …]
H A Dlinux_fence-array.c169 u64 context, unsigned seqno, in dma_fence_array_create() argument
183 context, seqno); in dma_fence_array_create()
H A Dlinux_fence.c30 spinlock_t *lock, u64 context, unsigned seqno) in dma_fence_init() argument
35 fence->seqno = seqno; in dma_fence_init()
/dragonfly/contrib/gcc-4.7/gcc/
H A Dsel-sched-ir.c3987 int seqno; in get_seqno_by_succs() local
3998 seqno = INT_MAX; in get_seqno_by_succs()
4002 seqno = MIN (seqno, INSN_SEQNO (succ)); in get_seqno_by_succs()
4007 return seqno; in get_seqno_by_succs()
4014 int seqno; in get_seqno_for_a_jump() local
4065 if (seqno < 0) in get_seqno_for_a_jump()
4070 return seqno; in get_seqno_for_a_jump()
4081 int n, i, seqno; in get_seqno_by_preds() local
4092 seqno = MAX (seqno, INSN_SEQNO (preds[i])); in get_seqno_by_preds()
4094 return seqno; in get_seqno_by_preds()
[all …]
H A Dsel-sched.c690 if (0 < seqno && seqno <= orig_max_seqno in extract_new_fences_from()
4801 int seqno; in find_seqno_for_bookkeeping() local
4825 if (seqno < 0) in find_seqno_for_bookkeeping()
4828 seqno = 1; in find_seqno_for_bookkeeping()
4832 gcc_assert (seqno > 0); in find_seqno_for_bookkeeping()
4833 return seqno; in find_seqno_for_bookkeeping()
7388 int seqno = 0; in schedule_on_fences() local
7445 *min_seqno = seqno; in find_min_max_seqno()
7447 *max_seqno = seqno; in find_min_max_seqno()
7470 int seqno; in calculate_new_fences() local
[all …]

123