Home
last modified time | relevance | path

Searched refs:wb (Results 1 – 25 of 61) sorted by relevance

123

/dragonfly/sbin/routed/
H A Doutput.c249 wb->lim = wb->base + NETS_LEN; in clr_ws_buf()
250 wb->n = wb->base; in clr_ws_buf()
251 memset(wb->n, 0, NETS_LEN*sizeof(*wb->n)); in clr_ws_buf()
263 wb->n++; in clr_ws_buf()
271 wb->n++; in clr_ws_buf()
296 wb->n++; in end_md5_auth()
319 if (output(wb->type, &ws.to, ws.ifp, wb->buf, in supply_write()
320 ((char *)wb->n - (char*)wb->buf)) < 0 in supply_write()
364 wb = &v2buf; in supply_out()
409 wb->n->n_metric = htonl(wb->n->n_metric); in supply_out()
[all …]
/dragonfly/sys/dev/drm/radeon/
H A Dr600_dma.c54 if (rdev->wb.enabled) in r600_dma_get_rptr()
55 rptr = rdev->wb.wb[ring->rptr_offs/4]; in r600_dma_get_rptr()
142 upper_32_bits(rdev->wb.gpu_addr + R600_WB_DMA_RPTR_OFFSET) & 0xFF); in r600_dma_resume()
144 ((rdev->wb.gpu_addr + R600_WB_DMA_RPTR_OFFSET) & 0xFFFFFFFC)); in r600_dma_resume()
146 if (rdev->wb.enabled) in r600_dma_resume()
242 gpu_addr = rdev->wb.gpu_addr + index; in r600_dma_ring_test()
245 rdev->wb.wb[index/4] = cpu_to_le32(tmp); in r600_dma_ring_test()
259 tmp = le32_to_cpu(rdev->wb.wb[index/4]); in r600_dma_ring_test()
349 gpu_addr = rdev->wb.gpu_addr + index; in r600_dma_ib_test()
380 tmp = le32_to_cpu(rdev->wb.wb[index/4]); in r600_dma_ib_test()
[all …]
H A Dradeon_device.c442 rdev->wb.enabled = false; in radeon_wb_disable()
456 if (rdev->wb.wb_obj) { in radeon_wb_fini()
463 rdev->wb.wb = NULL; in radeon_wb_fini()
464 rdev->wb.wb_obj = NULL; in radeon_wb_fini()
485 &rdev->wb.wb_obj); in radeon_wb_init()
503 wb_ptr = &rdev->wb.wb; in radeon_wb_init()
516 rdev->wb.use_event = false; in radeon_wb_init()
519 rdev->wb.enabled = false; in radeon_wb_init()
523 rdev->wb.enabled = false; in radeon_wb_init()
528 rdev->wb.enabled = true; in radeon_wb_init()
[all …]
H A Dcik_sdma.c66 if (rdev->wb.enabled) { in cik_sdma_get_rptr()
67 rptr = rdev->wb.wb[ring->rptr_offs/4]; in cik_sdma_get_rptr()
137 if (rdev->wb.enabled) { in cik_sdma_ring_ib_execute()
401 ((rdev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC)); in cik_sdma_gfx_resume()
403 if (rdev->wb.enabled) in cik_sdma_gfx_resume()
657 gpu_addr = rdev->wb.gpu_addr + index; in cik_sdma_ring_test()
660 rdev->wb.wb[index/4] = cpu_to_le32(tmp); in cik_sdma_ring_test()
675 tmp = le32_to_cpu(rdev->wb.wb[index/4]); in cik_sdma_ring_test()
714 gpu_addr = rdev->wb.gpu_addr + index; in cik_sdma_ib_test()
717 rdev->wb.wb[index/4] = cpu_to_le32(tmp); in cik_sdma_ib_test()
[all …]
H A Dni_dma.c56 if (rdev->wb.enabled) { in cayman_dma_get_rptr()
57 rptr = rdev->wb.wb[ring->rptr_offs/4]; in cayman_dma_get_rptr()
126 if (rdev->wb.enabled) { in cayman_dma_ring_ib_execute()
221 upper_32_bits(rdev->wb.gpu_addr + wb_offset) & 0xFF); in cayman_dma_resume()
223 ((rdev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC)); in cayman_dma_resume()
225 if (rdev->wb.enabled) in cayman_dma_resume()
H A Dradeon_fence.c64 if (likely(rdev->wb.enabled || !drv->scratch_reg)) { in radeon_fence_write()
87 if (likely(rdev->wb.enabled || !drv->scratch_reg)) { in radeon_fence_read()
832 if (rdev->wb.use_event || !radeon_ring_supports_scratch_reg(rdev, &rdev->ring[ring])) { in radeon_fence_driver_start_ring()
836 rdev->fence_drv[ring].cpu_addr = &rdev->wb.wb[index/4]; in radeon_fence_driver_start_ring()
837 rdev->fence_drv[ring].gpu_addr = rdev->wb.gpu_addr + in radeon_fence_driver_start_ring()
856 rdev->fence_drv[ring].cpu_addr = &rdev->wb.wb[index/4]; in radeon_fence_driver_start_ring()
857 rdev->fence_drv[ring].gpu_addr = rdev->wb.gpu_addr + index; in radeon_fence_driver_start_ring()
H A Dradeon_ring.c300 else if (rdev->wb.enabled) in radeon_ring_backup()
412 if (rdev->wb.enabled) { in radeon_ring_init()
414 ring->next_rptr_gpu_addr = rdev->wb.gpu_addr + index; in radeon_ring_init()
415 ring->next_rptr_cpu_addr = &rdev->wb.wb[index/4]; in radeon_ring_init()
/dragonfly/crypto/libressl/ssl/
H A Dssl_pkt.c544 if (wb->buf == NULL) in do_ssl3_write()
552 if (wb->left != 0) in do_ssl3_write()
601 wb->offset = align; in do_ssl3_write()
603 if (!CBB_init_fixed(&cbb, wb->buf + align, wb->len - align)) in do_ssl3_write()
621 wb->left = out_len; in do_ssl3_write()
660 i = BIO_write(s->wbio, (char *)&(wb->buf[wb->offset]), in ssl3_write_pending()
667 wb->left = 0; in ssl3_write_pending()
668 wb->offset += i; in ssl3_write_pending()
680 wb->left = 0; in ssl3_write_pending()
683 wb->offset += i; in ssl3_write_pending()
[all …]
H A Dd1_pkt.c931 SSL3_BUFFER_INTERNAL *wb = &(s->s3->wbuf); in do_dtls1_write() local
942 if (wb->left != 0) { in do_dtls1_write()
957 wb->offset = 0; in do_dtls1_write()
959 if (!CBB_init_fixed(&cbb, wb->buf, wb->len)) in do_dtls1_write()
970 wb->left = out_len; in do_dtls1_write()
/dragonfly/sys/dev/drm/amd/amdgpu/
H A Dsdma_v3_0.c351 return ring->adev->wb.wb[ring->rptr_offs] >> 2; in sdma_v3_0_ring_get_rptr()
368 wptr = ring->adev->wb.wb[ring->wptr_offs] >> 2; in sdma_v3_0_ring_get_wptr()
388 u32 *wb = (u32 *)&adev->wb.wb[ring->wptr_offs]; in sdma_v3_0_ring_set_wptr() local
390 WRITE_ONCE(*wb, (lower_32_bits(ring->wptr) << 2)); in sdma_v3_0_ring_set_wptr()
393 u32 *wb = (u32 *)&adev->wb.wb[ring->wptr_offs]; in sdma_v3_0_ring_set_wptr() local
873 gpu_addr = adev->wb.gpu_addr + (index * 4); in sdma_v3_0_ring_test_ring()
875 adev->wb.wb[index] = cpu_to_le32(tmp); in sdma_v3_0_ring_test_ring()
893 tmp = le32_to_cpu(adev->wb.wb[index]); in sdma_v3_0_ring_test_ring()
935 gpu_addr = adev->wb.gpu_addr + (index * 4); in sdma_v3_0_ring_test_ib()
937 adev->wb.wb[index] = cpu_to_le32(tmp); in sdma_v3_0_ring_test_ib()
[all …]
H A Dsdma_v4_0.c282 rptr = ((u64 *)&ring->adev->wb.wb[ring->rptr_offs]); in sdma_v4_0_ring_get_rptr()
302 wptr = READ_ONCE(*((u64 *)&adev->wb.wb[ring->wptr_offs])); in sdma_v4_0_ring_get_wptr()
333 u64 *wb = (u64 *)&adev->wb.wb[ring->wptr_offs]; in sdma_v4_0_ring_set_wptr() local
343 WRITE_ONCE(*wb, (ring->wptr << 2)); in sdma_v4_0_ring_set_wptr()
707 wptr_gpu_addr = adev->wb.gpu_addr + (ring->wptr_offs * 4); in sdma_v4_0_gfx_resume()
936 gpu_addr = adev->wb.gpu_addr + (index * 4); in sdma_v4_0_ring_test_ring()
938 adev->wb.wb[index] = cpu_to_le32(tmp); in sdma_v4_0_ring_test_ring()
956 tmp = le32_to_cpu(adev->wb.wb[index]); in sdma_v4_0_ring_test_ring()
998 gpu_addr = adev->wb.gpu_addr + (index * 4); in sdma_v4_0_ring_test_ib()
1000 adev->wb.wb[index] = cpu_to_le32(tmp); in sdma_v4_0_ring_test_ib()
[all …]
H A Dsdma_v2_4.c194 return ring->adev->wb.wb[ring->rptr_offs] >> 2; in sdma_v2_4_ring_get_rptr()
450 upper_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); in sdma_v2_4_gfx_resume()
452 lower_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC); in sdma_v2_4_gfx_resume()
600 gpu_addr = adev->wb.gpu_addr + (index * 4); in sdma_v2_4_ring_test_ring()
602 adev->wb.wb[index] = cpu_to_le32(tmp); in sdma_v2_4_ring_test_ring()
620 tmp = le32_to_cpu(adev->wb.wb[index]); in sdma_v2_4_ring_test_ring()
662 gpu_addr = adev->wb.gpu_addr + (index * 4); in sdma_v2_4_ring_test_ib()
664 adev->wb.wb[index] = cpu_to_le32(tmp); in sdma_v2_4_ring_test_ib()
696 tmp = le32_to_cpu(adev->wb.wb[index]); in sdma_v2_4_ring_test_ib()
H A Damdgpu_device.c574 if (adev->wb.wb_obj) { in amdgpu_device_wb_fini()
577 (void **)&adev->wb.wb); in amdgpu_device_wb_fini()
578 adev->wb.wb_obj = NULL; in amdgpu_device_wb_fini()
599 &adev->wb.wb_obj, (u64 *)&adev->wb.gpu_addr, in amdgpu_device_wb_init()
600 (void **)&adev->wb.wb); in amdgpu_device_wb_init()
607 memset(&adev->wb.used, 0, sizeof(adev->wb.used)); in amdgpu_device_wb_init()
610 memset((char *)adev->wb.wb, 0, AMDGPU_MAX_WB * sizeof(uint32_t) * 8); in amdgpu_device_wb_init()
627 unsigned long offset = find_first_zero_bit(adev->wb.used, adev->wb.num_wb); in amdgpu_device_wb_get()
648 wb >>= 3; in amdgpu_device_wb_free()
649 if (wb < adev->wb.num_wb) in amdgpu_device_wb_free()
[all …]
H A Dtonga_ih.c142 wptr_off = adev->wb.gpu_addr + (adev->irq.ih.wptr_offs * 4); in tonga_ih_irq_init()
203 wptr = le32_to_cpu(adev->wb.wb[adev->irq.ih.wptr_offs]); in tonga_ih_get_wptr()
294 adev->wb.wb[adev->irq.ih.rptr_offs] = adev->irq.ih.rptr; in tonga_ih_set_rptr()
H A Dvega10_ih.c130 wptr_off = adev->wb.gpu_addr + (adev->irq.ih.wptr_offs * 4); in vega10_ih_irq_init()
201 wptr = le32_to_cpu(adev->wb.wb[adev->irq.ih.wptr_offs]); in vega10_ih_get_wptr()
367 adev->wb.wb[adev->irq.ih.rptr_offs] = adev->irq.ih.rptr; in vega10_ih_set_rptr()
H A Dcz_ih.c136 wptr_off = adev->wb.gpu_addr + (adev->irq.ih.wptr_offs * 4); in cz_ih_irq_init()
192 wptr = le32_to_cpu(adev->wb.wb[adev->irq.ih.wptr_offs]); in cz_ih_get_wptr()
H A Diceland_ih.c136 wptr_off = adev->wb.gpu_addr + (adev->irq.ih.wptr_offs * 4); in iceland_ih_irq_init()
192 wptr = le32_to_cpu(adev->wb.wb[adev->irq.ih.wptr_offs]); in iceland_ih_get_wptr()
/dragonfly/sys/dev/netif/emx/
H A Dif_emx.h207 #define rxd_length wb.upper.length /* 16bits */
208 #define rxd_vlan wb.upper.vlan /* 16bits */
209 #define rxd_staterr wb.upper.status_error /* 32bits */
210 #define rxd_mrq wb.lower.mrq /* 32bits */
211 #define rxd_rss wb.lower.hi_dword.rss /* 32bits */
/dragonfly/contrib/tcsh-6/
H A Dtc.who.c519 char *wb; local
557 wb = wbuf;
564 *wb++ = isupper((unsigned char)*pb) ?
567 *wb = '\0';
577 wb = wbuf;
579 *wb++ = isupper((unsigned char)*pb) ?
581 *wb = '\0';
/dragonfly/contrib/wpa_supplicant/wpa_supplicant/
H A Dscan.c1958 struct wpa_scan_res *wb = *_wb; in wpa_scan_result_compar() local
1984 snr_b_full = wb->snr; in wpa_scan_result_compar()
1985 snr_b = MIN(wb->snr, GREAT_SNR); in wpa_scan_result_compar()
1990 snr_b = snr_b_full = wb->level; in wpa_scan_result_compar()
1996 return (int) wb->est_throughput - in wpa_scan_result_compar()
2000 (wa->qual && wb->qual && abs(wb->qual - wa->qual) < 10)) { in wpa_scan_result_compar()
2009 return wb->qual - wa->qual; in wpa_scan_result_compar()
2023 struct wpa_scan_res *wb = *_wb; in wpa_scan_result_wps_compar() local
2056 if (wb->level == wa->level) in wpa_scan_result_wps_compar()
2057 return wb->qual - wa->qual; in wpa_scan_result_wps_compar()
[all …]
/dragonfly/contrib/gdtoa/
H A Dmisc.c273 int k, wa, wb, wc; local
292 wb = b->wds;
293 wc = wa + wb;
302 xbe = xb + wb;
530 int i, wa, wb; local
561 wb = b->wds;
563 xbe = xb + wb;
/dragonfly/usr.sbin/ppp/
H A Droute.c735 int s, nb, wb; in rt_Set() local
805 wb = ID0write(s, &rtmes, nb); in rt_Set()
806 if (wb < 0) { in rt_Set()
821 if ((wb = ID0write(s, &rtmes, nb)) < 0) in rt_Set()
847 wb, cmdstr, ncprange_ntoa(dst), gwstr); in rt_Set()
861 int s, wb; in rt_Update() local
905 wb = ID0write(s, &rtmes, rtmes.m_rtm.rtm_msglen); in rt_Update()
906 if (wb < 0) { in rt_Update()
/dragonfly/sys/vfs/hpfs/
H A Dhpfs_alsubr.c555 u_long wb; in hpfs_addextentr() local
567 wb = 0; in hpfs_addextentr()
601 wb = 1; in hpfs_addextentr()
717 wb = 1; in hpfs_addextentr()
720 if (wb) in hpfs_addextentr()
/dragonfly/contrib/libarchive/libarchive/
H A Darchive_write_set_format_7zip.c681 unsigned char *wb; in copy_out() local
687 wb = zip->wbuff + (sizeof(zip->wbuff) - zip->wbuff_remaining); in copy_out()
688 rs = read(zip->temp_fd, wb, rsize); in copy_out()
715 unsigned char *wb; in _7z_close() local
834 wb = zip->wbuff; in _7z_close()
836 memcpy(&wb[0], "7z\xBC\xAF\x27\x1C", 6); in _7z_close()
837 wb[6] = 0;/* Major version. */ in _7z_close()
838 wb[7] = 3;/* Minor version. */ in _7z_close()
840 archive_le64enc(&wb[20], header_size);/* Next Header Size */ in _7z_close()
841 archive_le32enc(&wb[28], header_crc32);/* Next Header CRC */ in _7z_close()
[all …]
/dragonfly/sys/dev/netif/ig_hal/
H A De1000_vf.h113 } wb; /* writeback */ member
130 } wb; member

123