Home
last modified time | relevance | path

Searched refs:ring_size (Results 1 – 25 of 47) sorted by relevance

12

/dragonfly/sys/dev/drm/amd/amdgpu/
H A Damdgpu_ih.c43 r = amdgpu_bo_create_kernel(adev, adev->irq.ih.ring_size, in amdgpu_ih_ring_alloc()
65 int amdgpu_ih_ring_init(struct amdgpu_device *adev, unsigned ring_size, in amdgpu_ih_ring_init() argument
72 rb_bufsz = order_base_2(ring_size / 4); in amdgpu_ih_ring_init()
73 ring_size = (1 << rb_bufsz) * 4; in amdgpu_ih_ring_init()
74 adev->irq.ih.ring_size = ring_size; in amdgpu_ih_ring_init()
75 adev->irq.ih.ptr_mask = adev->irq.ih.ring_size - 1; in amdgpu_ih_ring_init()
85 adev->irq.ih.ring_size + 8, in amdgpu_ih_ring_init()
89 memset((void *)adev->irq.ih.ring, 0, adev->irq.ih.ring_size + 8); in amdgpu_ih_ring_init()
90 adev->irq.ih.wptr_offs = (adev->irq.ih.ring_size / 4) + 0; in amdgpu_ih_ring_init()
91 adev->irq.ih.rptr_offs = (adev->irq.ih.ring_size / 4) + 1; in amdgpu_ih_ring_init()
[all …]
H A Dpsp_v10_0.c180 ring->ring_size = 0x1000; in psp_v10_0_ring_init()
181 ret = amdgpu_bo_create_kernel(adev, ring->ring_size, PAGE_SIZE, in psp_v10_0_ring_init()
187 ring->ring_size = 0; in psp_v10_0_ring_init()
209 psp_ring_reg = ring->ring_size; in psp_v10_0_ring_create()
278 ring->ring_size / sizeof(struct psp_gfx_rb_frame) - 1; in psp_v10_0_cmd_submit()
280 uint32_t ring_size_dw = ring->ring_size / 4; in psp_v10_0_cmd_submit()
H A Damdgpu_ih.h49 unsigned ring_size; member
79 int amdgpu_ih_ring_init(struct amdgpu_device *adev, unsigned ring_size,
H A Dpsp_v3_1.c311 ring->ring_size = 0x1000; in psp_v3_1_ring_init()
312 ret = amdgpu_bo_create_kernel(adev, ring->ring_size, PAGE_SIZE, in psp_v3_1_ring_init()
318 ring->ring_size = 0; in psp_v3_1_ring_init()
340 psp_ring_reg = ring->ring_size; in psp_v3_1_ring_create()
409 ring->ring_size / sizeof(struct psp_gfx_rb_frame) - 1; in psp_v3_1_cmd_submit()
411 uint32_t ring_size_dw = ring->ring_size / 4; in psp_v3_1_cmd_submit()
H A Damdgpu_ring.c301 ring->ring_size = roundup_pow_of_two(max_dw * 4 * sched_hw_submission); in amdgpu_ring_init()
303 ring->buf_mask = (ring->ring_size / 4) - 1; in amdgpu_ring_init()
308 r = amdgpu_bo_create_kernel(adev, ring->ring_size + ring->funcs->extra_dw, PAGE_SIZE, in amdgpu_ring_init()
524 if (*pos >= (ring->ring_size + 12)) in amdgpu_debugfs_ring_read()
564 i_size_write(ent->d_inode, ring->ring_size + 12); in amdgpu_debugfs_ring_init()
H A Damdgpu_test.c50 n -= adev->rings[i]->ring_size; in amdgpu_do_test_moves()
54 n -= adev->irq.ih.ring_size; in amdgpu_do_test_moves()
H A Damdgpu_ring.h185 unsigned ring_size; member
234 unsigned ring_size, struct amdgpu_irq_src *irq_src,
H A Dvce_v4_0.c238 ring->ring_size / 4); in vce_v4_0_sriov_start()
341 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_SIZE), ring->ring_size / 4); in vce_v4_0_start()
349 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_SIZE2), ring->ring_size / 4); in vce_v4_0_start()
357 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_SIZE3), ring->ring_size / 4); in vce_v4_0_start()
H A Damdgpu_psp.h55 uint32_t ring_size; member
H A Dvce_v3_0.c286 WREG32(mmVCE_RB_SIZE, ring->ring_size / 4); in vce_v3_0_start()
293 WREG32(mmVCE_RB_SIZE2, ring->ring_size / 4); in vce_v3_0_start()
300 WREG32(mmVCE_RB_SIZE3, ring->ring_size / 4); in vce_v3_0_start()
H A Duvd_v7_0.c896 size = order_base_2(ring->ring_size); in uvd_v7_0_sriov_start()
905 MMSCH_V1_0_INSERT_DIRECT_WT(SOC15_REG_OFFSET(UVD, i, mmUVD_RB_SIZE), ring->ring_size / 4); in uvd_v7_0_sriov_start()
1063 rb_bufsz = order_base_2(ring->ring_size); in uvd_v7_0_start()
1100 WREG32_SOC15(UVD, k, mmUVD_RB_SIZE, ring->ring_size / 4); in uvd_v7_0_start()
1107 WREG32_SOC15(UVD, k, mmUVD_RB_SIZE2, ring->ring_size / 4); in uvd_v7_0_start()
H A Dcz_ih.c127 rb_bufsz = order_base_2(adev->irq.ih.ring_size / 4); in cz_ih_irq_init()
H A Diceland_ih.c127 rb_bufsz = order_base_2(adev->irq.ih.ring_size / 4); in iceland_ih_irq_init()
/dragonfly/sys/dev/drm/radeon/
H A Dradeon_ring.c83 ring->ring_free_dw = rptr + (ring->ring_size / 4); in radeon_ring_free_size()
88 ring->ring_free_dw = ring->ring_size / 4; in radeon_ring_free_size()
109 if (ndw > (ring->ring_size / 4)) in radeon_ring_alloc()
308 size = ring->wptr + (ring->ring_size / 4); in radeon_ring_backup()
375 int radeon_ring_init(struct radeon_device *rdev, struct radeon_ring *ring, unsigned ring_size, in radeon_ring_init() argument
380 ring->ring_size = ring_size; in radeon_ring_init()
385 r = radeon_bo_create(rdev, ring->ring_size, PAGE_SIZE, true, in radeon_ring_init()
410 ring->ptr_mask = (ring->ring_size / 4) - 1; in radeon_ring_init()
411 ring->ring_free_dw = ring->ring_size / 4; in radeon_ring_init()
472 count = (ring->ring_size / 4) - ring->ring_free_dw; in radeon_debugfs_ring_info()
H A Dni.c1703 rb_cntl = order_base_2(ring->ring_size / 8); in cayman_cp_resume()
2056 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0; in cayman_uvd_start()
2064 if (!rdev->has_uvd || !rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size) in cayman_uvd_resume()
2068 r = radeon_ring_init(rdev, ring, ring->ring_size, 0, PACKET0(UVD_NO_OP, 0)); in cayman_uvd_resume()
2136 rdev->ring[TN_RING_TYPE_VCE1_INDEX].ring_size = 0; in cayman_vce_start()
2137 rdev->ring[TN_RING_TYPE_VCE2_INDEX].ring_size = 0; in cayman_vce_start()
2145 if (!rdev->has_vce || !rdev->ring[TN_RING_TYPE_VCE1_INDEX].ring_size) in cayman_vce_resume()
2149 r = radeon_ring_init(rdev, ring, ring->ring_size, 0, 0x0); in cayman_vce_resume()
2155 r = radeon_ring_init(rdev, ring, ring->ring_size, 0, 0x0); in cayman_vce_resume()
2263 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET, in cayman_startup()
[all …]
H A Dvce_v1_0.c303 WREG32(VCE_RB_SIZE, ring->ring_size / 4); in vce_v1_0_start()
310 WREG32(VCE_RB_SIZE2, ring->ring_size / 4); in vce_v1_0_start()
H A Dr600.c2739 rb_bufsz = order_base_2(ring->ring_size / 8); in r600_cp_resume()
2795 rb_bufsz = order_base_2(ring_size / 8); in r600_ring_init()
2796 ring_size = (1 << (rb_bufsz + 1)) * 4; in r600_ring_init()
2797 ring->ring_size = ring_size; in r600_ring_init()
3093 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0; in r600_uvd_start()
3485 rb_bufsz = order_base_2(ring_size / 4); in r600_ih_ring_init()
3486 ring_size = (1 << rb_bufsz) * 4; in r600_ih_ring_init()
3487 rdev->ih.ring_size = ring_size; in r600_ih_ring_init()
3488 rdev->ih.ptr_mask = rdev->ih.ring_size - 1; in r600_ih_ring_init()
3498 r = radeon_bo_create(rdev, rdev->ih.ring_size, in r600_ih_ring_alloc()
[all …]
H A Drv770.c1726 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0; in rv770_uvd_start()
1734 if (!rdev->has_uvd || !rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size) in rv770_uvd_resume()
1738 r = radeon_ring_init(rdev, ring, ring->ring_size, 0, PACKET0(UVD_NO_OP, 0)); in rv770_uvd_resume()
1810 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET, in rv770_startup()
1816 r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET, in rv770_startup()
H A Dsi.c3655 rb_bufsz = order_base_2(ring->ring_size / 8); in si_cp_resume()
3686 rb_bufsz = order_base_2(ring->ring_size / 8); in si_cp_resume()
3710 rb_bufsz = order_base_2(ring->ring_size / 8); in si_cp_resume()
5998 rb_bufsz = order_base_2(rdev->ih.ring_size / 4); in si_irq_init()
6488 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0; in si_uvd_start()
6496 if (!rdev->has_uvd || !rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size) in si_uvd_resume()
6567 rdev->ring[TN_RING_TYPE_VCE1_INDEX].ring_size = 0; in si_vce_start()
6568 rdev->ring[TN_RING_TYPE_VCE2_INDEX].ring_size = 0; in si_vce_start()
6576 if (!rdev->has_vce || !rdev->ring[TN_RING_TYPE_VCE1_INDEX].ring_size) in si_vce_resume()
6580 r = radeon_ring_init(rdev, ring, ring->ring_size, 0, VCE_CMD_NO_OP); in si_vce_resume()
[all …]
H A Duvd_v1_0.c377 rb_bufsz = order_base_2(ring->ring_size); in uvd_v1_0_start()
/dragonfly/sys/dev/virtual/amazon/ena/
H A Dena.h118 #define ENA_TX_RING_IDX_NEXT(idx, ring_size) (((idx) + 1) & ((ring_size) - 1)) argument
120 #define ENA_RX_RING_IDX_NEXT(idx, ring_size) (((idx) + 1) & ((ring_size) - 1)) argument
273 int ring_size; /* number of tx/rx_buffer_info's entries */ member
H A Dena.c418 txr->ring_size = adapter->tx_ring_size; in ena_init_io_rings()
431 rxr->ring_size = adapter->rx_ring_size; in ena_init_io_rings()
583 for (i = 0; i < tx_ring->ring_size; i++) in ena_setup_tx_resources()
596 for (i = 0; i < tx_ring->ring_size; i++) { in ena_setup_tx_resources()
702 if (likely(req_id < rx_ring->ring_size)) in validate_rx_req_id()
749 for (i = 0; i < rx_ring->ring_size; i++) in ena_setup_rx_resources()
1034 rx_ring->ring_size); in ena_refill_rx_bufs()
1082 bufs_num = rx_ring->ring_size - 1; in ena_refill_all_rx_bufs()
1344 tx_ring->ring_size); in ena_tx_cleanup()
1652 rx_ring->ring_size); in ena_rx_cleanup()
[all …]
/dragonfly/sys/dev/drm/i915/
H A Di915_gem_context.h164 u32 ring_size; member
H A Di915_gem_context.c311 ctx->ring_size = 4 * PAGE_SIZE; in __create_hw_context()
413 ctx->ring_size = 512 * PAGE_SIZE; /* Max ring buffer size */ in i915_gem_context_create_gvt()
432 ctx->ring_size = PAGE_SIZE; in create_kernel_context()
/dragonfly/sys/dev/netif/oce/
H A Doce_hw.h735 uint32_t ring_size:4; member
752 uint32_t ring_size:4;
775 uint32_t ring_size:4; member
791 uint32_t ring_size:4;
1412 uint32_t ring_size:4; member
1427 uint32_t ring_size:4;

12