Searched refs:xcp (Results 1 – 12 of 12) sorted by relevance
/openbsd/sys/dev/pci/drm/amd/amdgpu/ |
H A D | amdgpu_xcp.c | 66 struct amdgpu_xcp *xcp; in amdgpu_xcp_run_transition() local 72 xcp = &xcp_mgr->xcp[xcp_id]; in amdgpu_xcp_run_transition() 74 xcp_ip = &xcp->ip[i]; in amdgpu_xcp_run_transition() 108 struct amdgpu_xcp *xcp; in __amdgpu_xcp_add_block() local 113 xcp = &xcp_mgr->xcp[xcp_id]; in __amdgpu_xcp_add_block() 117 xcp->valid = true; in __amdgpu_xcp_add_block() 148 xcp_mgr->xcp[i].id = i; in amdgpu_xcp_init() 296 struct amdgpu_xcp *xcp; in amdgpu_xcp_get_partition() local 303 xcp = &xcp_mgr->xcp[i]; in amdgpu_xcp_get_partition() 304 if ((xcp->valid) && (xcp->ip[ip].valid) && in amdgpu_xcp_get_partition() [all …]
|
H A D | amdgpu_xcp.h | 94 struct amdgpu_xcp xcp[MAX_XCP]; member 110 struct amdgpu_xcp *xcp, uint8_t *mem_id); 135 int amdgpu_xcp_get_inst_details(struct amdgpu_xcp *xcp, 172 if (xcp_mgr->xcp[*from].valid) in amdgpu_get_next_xcp() 173 return &xcp_mgr->xcp[*from]; in amdgpu_get_next_xcp() 180 #define for_each_xcp(xcp_mgr, xcp, i) \ argument 181 for (i = 0, xcp = amdgpu_get_next_xcp(xcp_mgr, &i); xcp; \ 182 ++i, xcp = amdgpu_get_next_xcp(xcp_mgr, &i))
|
H A D | aqua_vanjaram.c | 103 if (adev->xcp_mgr->xcp[xcp_id].ip[ip_blk].inst_mask & inst_mask) { in aqua_vanjaram_set_xcp_id() 117 num_gpu_sched = &adev->xcp_mgr->xcp[sel_xcp_id] in aqua_vanjaram_xcp_gpu_sched_update() 119 adev->xcp_mgr->xcp[sel_xcp_id].gpu_sched[ring->funcs->type][ring->hw_prio] in aqua_vanjaram_xcp_gpu_sched_update() 133 atomic_set(&adev->xcp_mgr->xcp[i].ref_cnt, 0); in aqua_vanjaram_xcp_sched_list_update() 134 memset(adev->xcp_mgr->xcp[i].gpu_sched, 0, sizeof(adev->xcp_mgr->xcp->gpu_sched)); in aqua_vanjaram_xcp_sched_list_update() 194 total_ref_cnt = atomic_read(&adev->xcp_mgr->xcp[i].ref_cnt); in aqua_vanjaram_select_scheds() 203 if (adev->xcp_mgr->xcp[sel_xcp_id].gpu_sched[hw_ip][hw_prio].num_scheds) { in aqua_vanjaram_select_scheds() 205 *scheds = adev->xcp_mgr->xcp[fpriv->xcp_id].gpu_sched[hw_ip][hw_prio].sched; in aqua_vanjaram_select_scheds() 206 atomic_inc(&adev->xcp_mgr->xcp[sel_xcp_id].ref_cnt); in aqua_vanjaram_select_scheds() 557 struct amdgpu_xcp *xcp, uint8_t *mem_id) in aqua_vanjaram_get_xcp_mem_id() argument [all …]
|
H A D | amdgpu_amdkfd.c | 427 struct amdgpu_xcp *xcp) in amdgpu_amdkfd_get_local_mem_info() argument 431 if (xcp) { in amdgpu_amdkfd_get_local_mem_info() 434 KFD_XCP_MEMORY_SIZE(adev, xcp->id); in amdgpu_amdkfd_get_local_mem_info() 437 KFD_XCP_MEMORY_SIZE(adev, xcp->id); in amdgpu_amdkfd_get_local_mem_info()
|
H A D | amdgpu_amdkfd.h | 234 struct amdgpu_xcp *xcp); 344 (adev)->xcp_mgr->xcp[(xcp_id)].mem_id : -1)
|
/openbsd/bin/ksh/ |
H A D | emacs.c | 284 *xcp = 0; in x_emacs() 436 memmove(xcp+len, xcp, xep - xcp + 1); in x_do_ins() 438 xcp += len; in x_do_ins() 527 cp = xcp; in x_delete() 533 memmove(xcp, xcp+nc, xep - xcp + 1); /* Copies the null */ in x_delete() 640 xcp = cp; in x_goto() 987 *xcp = 0; in x_del_line() 1132 xcp[-1] = xcp[-2]; in x_transpose() 1142 xcp[-1] = xcp[0]; in x_transpose() 1201 xmp = xcp; in x_yank() [all …]
|
/openbsd/regress/lib/libcrypto/x509/bettertls/certificates/ |
H A D | 796.crt | 15 3xcp/9JlP1F4YvXqEzRo0OYCC3aksB9JmdbBd/mEL40D/Jcgx7s7RvuYMKN5BQID
|
/openbsd/sys/dev/pci/drm/amd/amdkfd/ |
H A D | kfd_device.c | 765 node->xcp = amdgpu_get_next_xcp(kfd->adev->xcp_mgr, &xcp_idx); in kgd2kfd_device_init() 767 if (node->xcp) { in kgd2kfd_device_init() 768 amdgpu_xcp_get_inst_details(node->xcp, AMDGPU_XCP_GFX, in kgd2kfd_device_init() 776 if (node->xcp) { in kgd2kfd_device_init() 778 node->node_id, node->xcp->mem_id, in kgd2kfd_device_init() 808 &node->local_mem_info, node->xcp); in kgd2kfd_device_init()
|
H A D | kfd_migrate.c | 522 node->xcp ? node->xcp->id : 0); in svm_migrate_ram_to_vram() 565 node->xcp ? node->xcp->id : 0); in svm_migrate_ram_to_vram()
|
H A D | kfd_priv.h | 271 struct amdgpu_xcp *xcp; member 1490 if (node->xcp) in kfd_devcgroup_check_permission() 1491 ddev = node->xcp->ddev; in kfd_devcgroup_check_permission()
|
H A D | kfd_topology.c | 1184 dev->gpu->xcp); in kfd_fill_mem_clk_max_info() 1931 if (gpu->xcp && !gpu->xcp->ddev) { in kfd_topology_add_device() 1997 if (gpu->xcp) in kfd_topology_add_device() 1998 dev->node_props.drm_render_minor = gpu->xcp->ddev->render->index; in kfd_topology_add_device()
|
H A D | kfd_svm.c | 586 if (node->xcp) in svm_range_vram_node_new() 587 bp.xcp_id_plus1 = node->xcp->id + 1; in svm_range_vram_node_new() 1271 (!bo_node->xcp || !node->xcp || bo_node->xcp->mem_id == node->xcp->mem_id)) in svm_range_get_pte_flags() 2024 if (adev->kfd.dev->nodes[i]->xcp) in svm_range_set_max_pages() 2025 id = adev->kfd.dev->nodes[i]->xcp->id; in svm_range_set_max_pages()
|