Home
last modified time | relevance | path

Searched refs:length_dw (Results 1 – 25 of 46) sorted by relevance

12

/dragonfly/sys/dev/drm/radeon/
H A Dsi_dma.c81 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in si_dma_vm_copy_pages()
82 ib->ptr[ib->length_dw++] = lower_32_bits(src); in si_dma_vm_copy_pages()
121 ib->ptr[ib->length_dw++] = pe; in si_dma_vm_write_pages()
133 ib->ptr[ib->length_dw++] = value; in si_dma_vm_write_pages()
134 ib->ptr[ib->length_dw++] = upper_32_bits(value); in si_dma_vm_write_pages()
173 ib->ptr[ib->length_dw++] = pe; /* dst addr */ in si_dma_vm_set_pages()
175 ib->ptr[ib->length_dw++] = flags; /* mask */ in si_dma_vm_set_pages()
176 ib->ptr[ib->length_dw++] = 0; in si_dma_vm_set_pages()
177 ib->ptr[ib->length_dw++] = value; /* value */ in si_dma_vm_set_pages()
178 ib->ptr[ib->length_dw++] = upper_32_bits(value); in si_dma_vm_set_pages()
[all …]
H A Dradeon_vce.c363 ib.length_dw = 0; in radeon_vce_get_create_msg()
366 ib.ptr[ib.length_dw++] = cpu_to_le32(handle); in radeon_vce_get_create_msg()
384 ib.ptr[ib.length_dw++] = cpu_to_le32(dummy); in radeon_vce_get_create_msg()
387 for (i = ib.length_dw; i < ib_size_dw; ++i) in radeon_vce_get_create_msg()
430 ib.length_dw = 0; in radeon_vce_get_destroy_msg()
433 ib.ptr[ib.length_dw++] = cpu_to_le32(handle); in radeon_vce_get_destroy_msg()
438 ib.ptr[ib.length_dw++] = cpu_to_le32(dummy); in radeon_vce_get_destroy_msg()
444 for (i = ib.length_dw; i < ib_size_dw; ++i) in radeon_vce_get_destroy_msg()
482 if (idx >= relocs_chunk->length_dw) { in radeon_vce_cs_reloc()
484 idx, relocs_chunk->length_dw); in radeon_vce_cs_reloc()
[all …]
H A Dni_dma.c327 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in cayman_dma_vm_copy_pages()
328 ib->ptr[ib->length_dw++] = lower_32_bits(src); in cayman_dma_vm_copy_pages()
368 ib->ptr[ib->length_dw++] = pe; in cayman_dma_vm_write_pages()
380 ib->ptr[ib->length_dw++] = value; in cayman_dma_vm_write_pages()
420 ib->ptr[ib->length_dw++] = pe; /* dst addr */ in cayman_dma_vm_set_pages()
422 ib->ptr[ib->length_dw++] = flags; /* mask */ in cayman_dma_vm_set_pages()
423 ib->ptr[ib->length_dw++] = 0; in cayman_dma_vm_set_pages()
424 ib->ptr[ib->length_dw++] = value; /* value */ in cayman_dma_vm_set_pages()
425 ib->ptr[ib->length_dw++] = upper_32_bits(value); in cayman_dma_vm_set_pages()
427 ib->ptr[ib->length_dw++] = 0; in cayman_dma_vm_set_pages()
[all …]
H A Dradeon_cs.c89 p->nrelocs = chunk->length_dw / 4; in radeon_cs_parser_relocs()
312 p->chunks[i].length_dw = user_chunk.length_dw; in radeon_cs_parser_init()
319 if (p->chunks[i].length_dw == 0) in radeon_cs_parser_init()
325 if (p->chunks[i].length_dw == 0) in radeon_cs_parser_init()
335 size = p->chunks[i].length_dw; in radeon_cs_parser_init()
356 if (p->chunks[i].length_dw > 1) in radeon_cs_parser_init()
358 if (p->chunks[i].length_dw > 2) in radeon_cs_parser_init()
549 if (parser->const_ib.length_dw) { in radeon_cs_ib_vm_chunk()
626 parser->const_ib.length_dw = ib_chunk->length_dw; in radeon_cs_ib_fill()
647 parser->ib.length_dw = ib_chunk->length_dw; in radeon_cs_ib_fill()
[all …]
H A Dcik_sdma.c155 radeon_ring_write(ring, ib->length_dw); in cik_sdma_ring_ib_execute()
730 ib.length_dw = 5; in cik_sdma_ib_test()
813 ib->ptr[ib->length_dw++] = bytes; in cik_sdma_vm_copy_pages()
817 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in cik_sdma_vm_copy_pages()
856 ib->ptr[ib->length_dw++] = pe; in cik_sdma_vm_write_pages()
858 ib->ptr[ib->length_dw++] = ndw; in cik_sdma_vm_write_pages()
869 ib->ptr[ib->length_dw++] = value; in cik_sdma_vm_write_pages()
911 ib->ptr[ib->length_dw++] = flags; /* mask */ in cik_sdma_vm_set_pages()
912 ib->ptr[ib->length_dw++] = 0; in cik_sdma_vm_set_pages()
916 ib->ptr[ib->length_dw++] = 0; in cik_sdma_vm_set_pages()
[all …]
H A Dradeon_uvd.c585 if (idx >= relocs_chunk->length_dw) { in radeon_uvd_cs_reloc()
587 idx, relocs_chunk->length_dw); in radeon_uvd_cs_reloc()
700 if (p->chunk_ib->length_dw % 16) { in radeon_uvd_cs_parse()
702 p->chunk_ib->length_dw); in radeon_uvd_cs_parse()
730 } while (p->idx < p->chunk_ib->length_dw); in radeon_uvd_cs_parse()
761 ib.length_dw = 16; in radeon_uvd_send_msg()
H A Dradeon_vm.c411 ib.length_dw = 0; in radeon_vm_clear_bo()
415 WARN_ON(ib.length_dw > 64); in radeon_vm_clear_bo()
665 ib.length_dw = 0; in radeon_vm_update_page_directory()
702 if (ib.length_dw != 0) { in radeon_vm_update_page_directory()
706 WARN_ON(ib.length_dw > ndw); in radeon_vm_update_page_directory()
1003 ib.length_dw = 0; in radeon_vm_bo_update()
1021 WARN_ON(ib.length_dw > ndw); in radeon_vm_bo_update()
H A Dr600_dma.c361 ib.length_dw = 4; in r600_dma_ib_test()
425 radeon_ring_write(ring, (ib->length_dw << 16) | (upper_32_bits(ib->gpu_addr) & 0xFF)); in r600_dma_ring_ib_execute()
H A Devergreen_dma.c88 radeon_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF)); in evergreen_dma_ring_ib_execute()
H A Dradeon_ib.c128 if (!ib->length_dw || !ring->ready) { in radeon_ib_schedule()
H A Dr600_cs.c2319 } while (p->idx < p->chunk_ib->length_dw); in r600_cs_parse()
2321 for (r = 0; r < p->ib.length_dw; r++) { in r600_cs_parse()
2389 if (p->idx >= ib_chunk->length_dw) { in r600_dma_cs_parse()
2391 p->idx, ib_chunk->length_dw); in r600_dma_cs_parse()
2527 } while (p->idx < p->chunk_ib->length_dw); in r600_dma_cs_parse()
2529 for (r = 0; r < p->ib->length_dw; r++) { in r600_dma_cs_parse()
/dragonfly/sys/dev/drm/amd/amdgpu/
H A Damdgpu_vce.c450 ib->length_dw = 0; in amdgpu_vce_get_create_msg()
453 ib->ptr[ib->length_dw++] = handle; in amdgpu_vce_get_create_msg()
460 ib->ptr[ib->length_dw++] = 0x00000000; in amdgpu_vce_get_create_msg()
461 ib->ptr[ib->length_dw++] = 0x00000042; in amdgpu_vce_get_create_msg()
462 ib->ptr[ib->length_dw++] = 0x0000000a; in amdgpu_vce_get_create_msg()
480 ib->ptr[ib->length_dw++] = dummy; in amdgpu_vce_get_create_msg()
526 ib->length_dw = 0; in amdgpu_vce_get_destroy_msg()
529 ib->ptr[ib->length_dw++] = handle; in amdgpu_vce_get_destroy_msg()
724 for (idx = 0; idx < ib->length_dw;) { in amdgpu_vce_ring_parse_cs()
790 for (idx = 0; idx < ib->length_dw;) { in amdgpu_vce_ring_parse_cs()
[all …]
H A Damdgpu_vcn.c312 ib->length_dw = 16; in amdgpu_vcn_dec_send_msg()
485 ib->length_dw = 0; in amdgpu_vcn_enc_get_create_msg()
486 ib->ptr[ib->length_dw++] = 0x00000018; in amdgpu_vcn_enc_get_create_msg()
488 ib->ptr[ib->length_dw++] = handle; in amdgpu_vcn_enc_get_create_msg()
490 ib->ptr[ib->length_dw++] = dummy; in amdgpu_vcn_enc_get_create_msg()
491 ib->ptr[ib->length_dw++] = 0x0000000b; in amdgpu_vcn_enc_get_create_msg()
493 ib->ptr[ib->length_dw++] = 0x00000014; in amdgpu_vcn_enc_get_create_msg()
537 ib->length_dw = 0; in amdgpu_vcn_enc_get_destroy_msg()
540 ib->ptr[ib->length_dw++] = handle; in amdgpu_vcn_enc_get_destroy_msg()
542 ib->ptr[ib->length_dw++] = dummy; in amdgpu_vcn_enc_get_destroy_msg()
[all …]
H A Dsdma_v2_4.c259 amdgpu_ring_write(ring, ib->length_dw); in sdma_v2_4_ring_emit_ib()
681 ib.length_dw = 8; in sdma_v2_4_ring_test_ib()
731 ib->ptr[ib->length_dw++] = bytes; in sdma_v2_4_vm_copy_pte()
758 ib->ptr[ib->length_dw++] = pe; in sdma_v2_4_vm_write_pte()
760 ib->ptr[ib->length_dw++] = ndw; in sdma_v2_4_vm_write_pte()
793 ib->ptr[ib->length_dw++] = 0; in sdma_v2_4_vm_set_pte_pde()
812 ib->ptr[ib->length_dw++] = in sdma_v2_4_ring_pad_ib()
816 ib->ptr[ib->length_dw++] = in sdma_v2_4_ring_pad_ib()
1257 ib->ptr[ib->length_dw++] = byte_count; in sdma_v2_4_emit_copy_buffer()
1283 ib->ptr[ib->length_dw++] = src_data; in sdma_v2_4_emit_fill_buffer()
[all …]
H A Dsdma_v3_0.c434 amdgpu_ring_write(ring, ib->length_dw); in sdma_v3_0_ring_emit_ib()
954 ib.length_dw = 8; in sdma_v3_0_ring_test_ib()
1003 ib->ptr[ib->length_dw++] = bytes; in sdma_v3_0_vm_copy_pte()
1032 ib->ptr[ib->length_dw++] = ndw; in sdma_v3_0_vm_write_pte()
1065 ib->ptr[ib->length_dw++] = 0; in sdma_v3_0_vm_set_pte_pde()
1081 pad_count = (8 - (ib->length_dw & 0x7)) % 8; in sdma_v3_0_ring_pad_ib()
1084 ib->ptr[ib->length_dw++] = in sdma_v3_0_ring_pad_ib()
1088 ib->ptr[ib->length_dw++] = in sdma_v3_0_ring_pad_ib()
1697 ib->ptr[ib->length_dw++] = byte_count; in sdma_v3_0_emit_copy_buffer()
1723 ib->ptr[ib->length_dw++] = src_data; in sdma_v3_0_emit_fill_buffer()
[all …]
H A Dsdma_v4_0.c393 amdgpu_ring_write(ring, ib->length_dw); in sdma_v4_0_ring_emit_ib()
1017 ib.length_dw = 8; in sdma_v4_0_ring_test_ib()
1067 ib->ptr[ib->length_dw++] = bytes - 1; in sdma_v4_0_vm_copy_pte()
1098 ib->ptr[ib->length_dw++] = ndw - 1; in sdma_v4_0_vm_write_pte()
1132 ib->ptr[ib->length_dw++] = 0; in sdma_v4_0_vm_set_pte_pde()
1148 pad_count = (8 - (ib->length_dw & 0x7)) % 8; in sdma_v4_0_ring_pad_ib()
1151 ib->ptr[ib->length_dw++] = in sdma_v4_0_ring_pad_ib()
1155 ib->ptr[ib->length_dw++] = in sdma_v4_0_ring_pad_ib()
1699 ib->ptr[ib->length_dw++] = byte_count - 1; in sdma_v4_0_emit_copy_buffer()
1725 ib->ptr[ib->length_dw++] = src_data; in sdma_v4_0_emit_fill_buffer()
[all …]
H A Duvd_v6_0.c234 ib->length_dw = 0; in uvd_v6_0_enc_get_create_msg()
235 ib->ptr[ib->length_dw++] = 0x00000018; in uvd_v6_0_enc_get_create_msg()
237 ib->ptr[ib->length_dw++] = handle; in uvd_v6_0_enc_get_create_msg()
238 ib->ptr[ib->length_dw++] = 0x00010000; in uvd_v6_0_enc_get_create_msg()
240 ib->ptr[ib->length_dw++] = dummy; in uvd_v6_0_enc_get_create_msg()
242 ib->ptr[ib->length_dw++] = 0x00000014; in uvd_v6_0_enc_get_create_msg()
244 ib->ptr[ib->length_dw++] = 0x0000001c; in uvd_v6_0_enc_get_create_msg()
245 ib->ptr[ib->length_dw++] = 0x00000001; in uvd_v6_0_enc_get_create_msg()
296 ib->length_dw = 0; in uvd_v6_0_enc_get_destroy_msg()
299 ib->ptr[ib->length_dw++] = handle; in uvd_v6_0_enc_get_destroy_msg()
[all …]
H A Duvd_v7_0.c242 ib->length_dw = 0; in uvd_v7_0_enc_get_create_msg()
243 ib->ptr[ib->length_dw++] = 0x00000018; in uvd_v7_0_enc_get_create_msg()
245 ib->ptr[ib->length_dw++] = handle; in uvd_v7_0_enc_get_create_msg()
246 ib->ptr[ib->length_dw++] = 0x00000000; in uvd_v7_0_enc_get_create_msg()
248 ib->ptr[ib->length_dw++] = dummy; in uvd_v7_0_enc_get_create_msg()
250 ib->ptr[ib->length_dw++] = 0x00000014; in uvd_v7_0_enc_get_create_msg()
252 ib->ptr[ib->length_dw++] = 0x0000001c; in uvd_v7_0_enc_get_create_msg()
253 ib->ptr[ib->length_dw++] = 0x00000000; in uvd_v7_0_enc_get_create_msg()
305 ib->length_dw = 0; in uvd_v7_0_enc_get_destroy_msg()
308 ib->ptr[ib->length_dw++] = handle; in uvd_v7_0_enc_get_destroy_msg()
[all …]
H A Dgfx_v8_0.c907 ib.length_dw = 5; in gfx_v8_0_ring_test_ib()
1669 ib.length_dw = 0; in gfx_v8_0_do_edc_gpr_workarounds()
1687 ib.ptr[ib.length_dw++] = 8; /* x */ in gfx_v8_0_do_edc_gpr_workarounds()
1688 ib.ptr[ib.length_dw++] = 1; /* y */ in gfx_v8_0_do_edc_gpr_workarounds()
1689 ib.ptr[ib.length_dw++] = 1; /* z */ in gfx_v8_0_do_edc_gpr_workarounds()
1690 ib.ptr[ib.length_dw++] = in gfx_v8_0_do_edc_gpr_workarounds()
1713 ib.ptr[ib.length_dw++] = 8; /* x */ in gfx_v8_0_do_edc_gpr_workarounds()
1714 ib.ptr[ib.length_dw++] = 1; /* y */ in gfx_v8_0_do_edc_gpr_workarounds()
1715 ib.ptr[ib.length_dw++] = 1; /* z */ in gfx_v8_0_do_edc_gpr_workarounds()
1716 ib.ptr[ib.length_dw++] = in gfx_v8_0_do_edc_gpr_workarounds()
[all …]
H A Damdgpu_cs.c162 p->chunks[i].length_dw = user_chunk.length_dw; in amdgpu_cs_parser_init()
164 size = p->chunks[i].length_dw; in amdgpu_cs_parser_init()
186 if (p->chunks[i].length_dw * sizeof(uint32_t) < size) { in amdgpu_cs_parser_init()
200 if (p->chunks[i].length_dw * sizeof(uint32_t) < size) { in amdgpu_cs_parser_init()
1042 ib->length_dw = chunk_ib->ib_bytes / 4; in amdgpu_cs_ib_fill()
1066 num_deps = chunk->length_dw * 4 / in amdgpu_cs_process_fence_dep()
1128 num_deps = chunk->length_dw * 4 / in amdgpu_cs_process_syncobj_in_dep()
1146 num_deps = chunk->length_dw * 4 / in amdgpu_cs_process_syncobj_out_dep()
H A Damdgpu_uvd.c898 if (ctx->idx >= ib->length_dw) { in amdgpu_uvd_cs_reg()
941 for (ctx->idx = 0 ; ctx->idx < ib->length_dw; ) { in amdgpu_uvd_cs_packets()
986 if (ib->length_dw % 16) { in amdgpu_uvd_ring_parse_cs()
988 ib->length_dw); in amdgpu_uvd_ring_parse_cs()
1071 ib->length_dw = 16; in amdgpu_uvd_send_msg()
H A Damdgpu_ring.c109 while (ib->length_dw & ring->funcs->align_mask) in amdgpu_ring_generic_pad_ib()
110 ib->ptr[ib->length_dw++] = ring->funcs->nop; in amdgpu_ring_generic_pad_ib()
H A Dsoc15.c244 u32 i, length_dw; in soc15_read_bios_from_rom() local
255 length_dw = ALIGN(length_bytes, 4) / 4; in soc15_read_bios_from_rom()
260 for (i = 0; i < length_dw; i++) in soc15_read_bios_from_rom()
H A Dcik.c940 u32 i, length_dw; in cik_read_bios_from_rom() local
951 length_dw = ALIGN(length_bytes, 4) / 4; in cik_read_bios_from_rom()
959 for (i = 0; i < length_dw; i++) in cik_read_bios_from_rom()
H A Dvi.c424 u32 i, length_dw; in vi_read_bios_from_rom() local
435 length_dw = ALIGN(length_bytes, 4) / 4; in vi_read_bios_from_rom()
443 for (i = 0; i < length_dw; i++) in vi_read_bios_from_rom()

12