1 /*
2  * Copyright (C) 2020-2021 Intel Corporation
3  *
4  * SPDX-License-Identifier: MIT
5  *
6  */
7 
8 #include "level_zero/core/source/context/context_imp.h"
9 
10 #include "shared/source/command_container/implicit_scaling.h"
11 #include "shared/source/memory_manager/memory_operations_handler.h"
12 #include "shared/source/memory_manager/unified_memory_manager.h"
13 
14 #include "level_zero/core/source/device/device_imp.h"
15 #include "level_zero/core/source/driver/driver_handle_imp.h"
16 #include "level_zero/core/source/helpers/properties_parser.h"
17 #include "level_zero/core/source/hw_helpers/l0_hw_helper.h"
18 #include "level_zero/core/source/image/image.h"
19 #include "level_zero/core/source/memory/memory_operations_helper.h"
20 
21 namespace L0 {
22 
destroy()23 ze_result_t ContextImp::destroy() {
24     delete this;
25 
26     return ZE_RESULT_SUCCESS;
27 }
28 
getStatus()29 ze_result_t ContextImp::getStatus() {
30     DriverHandleImp *driverHandleImp = static_cast<DriverHandleImp *>(this->driverHandle);
31     for (auto device : driverHandleImp->devices) {
32         DeviceImp *deviceImp = static_cast<DeviceImp *>(device);
33         if (deviceImp->resourcesReleased) {
34             return ZE_RESULT_ERROR_DEVICE_LOST;
35         }
36     }
37     return ZE_RESULT_SUCCESS;
38 }
39 
getDriverHandle()40 DriverHandle *ContextImp::getDriverHandle() {
41     return this->driverHandle;
42 }
43 
ContextImp(DriverHandle * driverHandle)44 ContextImp::ContextImp(DriverHandle *driverHandle) {
45     this->driverHandle = static_cast<DriverHandleImp *>(driverHandle);
46 }
47 
addDeviceAndSubDevices(Device * device)48 void ContextImp::addDeviceAndSubDevices(Device *device) {
49     this->devices.insert(std::make_pair(device->toHandle(), device));
50     DeviceImp *deviceImp = static_cast<DeviceImp *>(device);
51     for (auto subDevice : deviceImp->subDevices) {
52         this->addDeviceAndSubDevices(subDevice);
53     }
54 }
55 
allocHostMem(const ze_host_mem_alloc_desc_t * hostDesc,size_t size,size_t alignment,void ** ptr)56 ze_result_t ContextImp::allocHostMem(const ze_host_mem_alloc_desc_t *hostDesc,
57                                      size_t size,
58                                      size_t alignment,
59                                      void **ptr) {
60 
61     bool relaxedSizeAllowed = NEO::DebugManager.flags.AllowUnrestrictedSize.get();
62     if (hostDesc->pNext) {
63         const ze_base_desc_t *extendedDesc = reinterpret_cast<const ze_base_desc_t *>(hostDesc->pNext);
64         if (extendedDesc->stype == ZE_STRUCTURE_TYPE_RELAXED_ALLOCATION_LIMITS_EXP_DESC) {
65             const ze_relaxed_allocation_limits_exp_desc_t *relaxedLimitsDesc =
66                 reinterpret_cast<const ze_relaxed_allocation_limits_exp_desc_t *>(extendedDesc);
67             if (!(relaxedLimitsDesc->flags & ZE_RELAXED_ALLOCATION_LIMITS_EXP_FLAG_MAX_SIZE)) {
68                 return ZE_RESULT_ERROR_INVALID_ARGUMENT;
69             }
70             relaxedSizeAllowed = true;
71         }
72     }
73 
74     if (relaxedSizeAllowed == false &&
75         (size > this->driverHandle->devices[0]->getNEODevice()->getDeviceInfo().maxMemAllocSize)) {
76         *ptr = nullptr;
77         return ZE_RESULT_ERROR_UNSUPPORTED_SIZE;
78     }
79 
80     NEO::SVMAllocsManager::UnifiedMemoryProperties unifiedMemoryProperties(InternalMemoryType::HOST_UNIFIED_MEMORY,
81                                                                            this->rootDeviceIndices,
82                                                                            this->deviceBitfields);
83 
84     if (hostDesc->flags & ZE_HOST_MEM_ALLOC_FLAG_BIAS_UNCACHED) {
85         unifiedMemoryProperties.allocationFlags.flags.locallyUncachedResource = 1;
86     }
87 
88     auto usmPtr = this->driverHandle->svmAllocsManager->createHostUnifiedMemoryAllocation(size,
89                                                                                           unifiedMemoryProperties);
90     if (usmPtr == nullptr) {
91         return ZE_RESULT_ERROR_OUT_OF_HOST_MEMORY;
92     }
93 
94     *ptr = usmPtr;
95 
96     return ZE_RESULT_SUCCESS;
97 }
98 
isDeviceDefinedForThisContext(Device * inDevice)99 bool ContextImp::isDeviceDefinedForThisContext(Device *inDevice) {
100     return (this->getDevices().find(inDevice->toHandle()) != this->getDevices().end());
101 }
102 
allocDeviceMem(ze_device_handle_t hDevice,const ze_device_mem_alloc_desc_t * deviceDesc,size_t size,size_t alignment,void ** ptr)103 ze_result_t ContextImp::allocDeviceMem(ze_device_handle_t hDevice,
104                                        const ze_device_mem_alloc_desc_t *deviceDesc,
105                                        size_t size,
106                                        size_t alignment, void **ptr) {
107 
108     auto device = Device::fromHandle(hDevice);
109     if (isDeviceDefinedForThisContext(device) == false) {
110         return ZE_RESULT_ERROR_DEVICE_LOST;
111     }
112 
113     StructuresLookupTable lookupTable = {};
114 
115     lookupTable.relaxedSizeAllowed = NEO::DebugManager.flags.AllowUnrestrictedSize.get();
116     auto parseResult = prepareL0StructuresLookupTable(lookupTable, deviceDesc->pNext);
117 
118     if (parseResult != ZE_RESULT_SUCCESS) {
119         return parseResult;
120     }
121 
122     auto neoDevice = device->getNEODevice();
123     auto rootDeviceIndex = neoDevice->getRootDeviceIndex();
124     auto deviceBitfields = this->driverHandle->deviceBitfields;
125 
126     deviceBitfields[rootDeviceIndex] = neoDevice->getDeviceBitfield();
127 
128     if (lookupTable.isSharedHandle) {
129         if (lookupTable.sharedHandleType.isDMABUFHandle) {
130             ze_ipc_memory_flags_t flags = {};
131             *ptr = this->driverHandle->importFdHandle(hDevice, flags, lookupTable.sharedHandleType.fd, nullptr);
132             if (nullptr == *ptr) {
133                 return ZE_RESULT_ERROR_INVALID_ARGUMENT;
134             }
135         } else {
136             UNRECOVERABLE_IF(!lookupTable.sharedHandleType.isNTHandle);
137             *ptr = this->driverHandle->importNTHandle(hDevice, lookupTable.sharedHandleType.ntHnadle);
138             if (*ptr == nullptr) {
139                 return ZE_RESULT_ERROR_INVALID_ARGUMENT;
140             }
141         }
142         return ZE_RESULT_SUCCESS;
143     }
144 
145     if (lookupTable.relaxedSizeAllowed == false &&
146         (size > neoDevice->getDeviceInfo().maxMemAllocSize)) {
147         *ptr = nullptr;
148         return ZE_RESULT_ERROR_UNSUPPORTED_SIZE;
149     }
150 
151     uint64_t globalMemSize = neoDevice->getDeviceInfo().globalMemSize;
152 
153     uint32_t numSubDevices = neoDevice->getNumGenericSubDevices();
154     if ((!device->isImplicitScalingCapable()) && (numSubDevices > 1)) {
155         globalMemSize = globalMemSize / numSubDevices;
156     }
157     if (lookupTable.relaxedSizeAllowed && (size > globalMemSize)) {
158         *ptr = nullptr;
159         return ZE_RESULT_ERROR_UNSUPPORTED_SIZE;
160     }
161 
162     deviceBitfields[rootDeviceIndex] = neoDevice->getDeviceBitfield();
163 
164     NEO::SVMAllocsManager::UnifiedMemoryProperties unifiedMemoryProperties(InternalMemoryType::DEVICE_UNIFIED_MEMORY, this->driverHandle->rootDeviceIndices, deviceBitfields);
165     unifiedMemoryProperties.allocationFlags.flags.shareable = static_cast<uint32_t>(lookupTable.exportMemory);
166     unifiedMemoryProperties.device = neoDevice;
167     unifiedMemoryProperties.allocationFlags.flags.compressedHint = isAllocationSuitableForCompression(lookupTable, *device, size);
168 
169     if (deviceDesc->flags & ZE_DEVICE_MEM_ALLOC_FLAG_BIAS_UNCACHED) {
170         unifiedMemoryProperties.allocationFlags.flags.locallyUncachedResource = 1;
171     }
172 
173     void *usmPtr =
174         this->driverHandle->svmAllocsManager->createUnifiedMemoryAllocation(size, unifiedMemoryProperties);
175     if (usmPtr == nullptr) {
176         return ZE_RESULT_ERROR_OUT_OF_DEVICE_MEMORY;
177     }
178     *ptr = usmPtr;
179 
180     return ZE_RESULT_SUCCESS;
181 }
182 
allocSharedMem(ze_device_handle_t hDevice,const ze_device_mem_alloc_desc_t * deviceDesc,const ze_host_mem_alloc_desc_t * hostDesc,size_t size,size_t alignment,void ** ptr)183 ze_result_t ContextImp::allocSharedMem(ze_device_handle_t hDevice,
184                                        const ze_device_mem_alloc_desc_t *deviceDesc,
185                                        const ze_host_mem_alloc_desc_t *hostDesc,
186                                        size_t size,
187                                        size_t alignment,
188                                        void **ptr) {
189 
190     auto device = this->devices.begin()->second;
191     if (hDevice != nullptr) {
192         device = Device::fromHandle(hDevice);
193     }
194     auto neoDevice = device->getNEODevice();
195 
196     bool relaxedSizeAllowed = NEO::DebugManager.flags.AllowUnrestrictedSize.get();
197     if (deviceDesc->pNext) {
198         const ze_base_desc_t *extendedDesc = reinterpret_cast<const ze_base_desc_t *>(deviceDesc->pNext);
199         if (extendedDesc->stype == ZE_STRUCTURE_TYPE_RELAXED_ALLOCATION_LIMITS_EXP_DESC) {
200             const ze_relaxed_allocation_limits_exp_desc_t *relaxedLimitsDesc =
201                 reinterpret_cast<const ze_relaxed_allocation_limits_exp_desc_t *>(extendedDesc);
202             if (!(relaxedLimitsDesc->flags & ZE_RELAXED_ALLOCATION_LIMITS_EXP_FLAG_MAX_SIZE)) {
203                 return ZE_RESULT_ERROR_INVALID_ARGUMENT;
204             }
205             relaxedSizeAllowed = true;
206         }
207     }
208 
209     if (relaxedSizeAllowed == false &&
210         (size > neoDevice->getDeviceInfo().maxMemAllocSize)) {
211         *ptr = nullptr;
212         return ZE_RESULT_ERROR_UNSUPPORTED_SIZE;
213     }
214 
215     uint64_t globalMemSize = neoDevice->getDeviceInfo().globalMemSize;
216 
217     uint32_t numSubDevices = neoDevice->getNumGenericSubDevices();
218     if ((!device->isImplicitScalingCapable()) && (numSubDevices > 1)) {
219         globalMemSize = globalMemSize / numSubDevices;
220     }
221     if (relaxedSizeAllowed &&
222         (size > globalMemSize)) {
223         *ptr = nullptr;
224         return ZE_RESULT_ERROR_UNSUPPORTED_SIZE;
225     }
226 
227     auto deviceBitfields = this->deviceBitfields;
228     NEO::Device *unifiedMemoryPropertiesDevice = nullptr;
229     if (hDevice) {
230         device = Device::fromHandle(hDevice);
231         if (isDeviceDefinedForThisContext(device) == false) {
232             return ZE_RESULT_ERROR_DEVICE_LOST;
233         }
234 
235         neoDevice = device->getNEODevice();
236         auto rootDeviceIndex = neoDevice->getRootDeviceIndex();
237         unifiedMemoryPropertiesDevice = neoDevice;
238         deviceBitfields[rootDeviceIndex] = neoDevice->getDeviceBitfield();
239     }
240 
241     NEO::SVMAllocsManager::UnifiedMemoryProperties unifiedMemoryProperties(InternalMemoryType::SHARED_UNIFIED_MEMORY,
242                                                                            this->rootDeviceIndices,
243                                                                            deviceBitfields);
244     unifiedMemoryProperties.device = unifiedMemoryPropertiesDevice;
245 
246     if (deviceDesc->flags & ZE_DEVICE_MEM_ALLOC_FLAG_BIAS_UNCACHED) {
247         unifiedMemoryProperties.allocationFlags.flags.locallyUncachedResource = 1;
248     }
249 
250     if (deviceDesc->flags & ZE_DEVICE_MEM_ALLOC_FLAG_BIAS_INITIAL_PLACEMENT) {
251         unifiedMemoryProperties.allocationFlags.allocFlags.usmInitialPlacementGpu = 1;
252     }
253 
254     if (hostDesc->flags & ZE_HOST_MEM_ALLOC_FLAG_BIAS_INITIAL_PLACEMENT) {
255         unifiedMemoryProperties.allocationFlags.allocFlags.usmInitialPlacementCpu = 1;
256     }
257 
258     auto usmPtr =
259         this->driverHandle->svmAllocsManager->createSharedUnifiedMemoryAllocation(size,
260                                                                                   unifiedMemoryProperties,
261                                                                                   static_cast<void *>(neoDevice->getSpecializedDevice<L0::Device>()));
262 
263     if (usmPtr == nullptr) {
264         return ZE_RESULT_ERROR_OUT_OF_DEVICE_MEMORY;
265     }
266     *ptr = usmPtr;
267 
268     return ZE_RESULT_SUCCESS;
269 }
270 
freeMem(const void * ptr)271 ze_result_t ContextImp::freeMem(const void *ptr) {
272     auto allocation = this->driverHandle->svmAllocsManager->getSVMAlloc(ptr);
273     if (allocation == nullptr) {
274         return ZE_RESULT_ERROR_INVALID_ARGUMENT;
275     }
276 
277     for (auto pairDevice : this->devices) {
278         DeviceImp *deviceImp = static_cast<DeviceImp *>(pairDevice.second);
279 
280         std::unique_lock<NEO::SpinLock> lock(deviceImp->peerAllocationsMutex);
281 
282         auto iter = deviceImp->peerAllocations.allocations.find(ptr);
283         if (iter != deviceImp->peerAllocations.allocations.end()) {
284             auto peerAllocData = &iter->second;
285             auto peerAlloc = peerAllocData->gpuAllocations.getDefaultGraphicsAllocation();
286             auto peerPtr = reinterpret_cast<void *>(peerAlloc->getGpuAddress());
287             this->driverHandle->svmAllocsManager->freeSVMAlloc(peerPtr);
288             deviceImp->peerAllocations.allocations.erase(iter);
289         }
290     }
291 
292     this->driverHandle->svmAllocsManager->freeSVMAlloc(const_cast<void *>(ptr));
293     if (this->driverHandle->svmAllocsManager->getSvmMapOperation(ptr)) {
294         this->driverHandle->svmAllocsManager->removeSvmMapOperation(ptr);
295     }
296     return ZE_RESULT_SUCCESS;
297 }
298 
makeMemoryResident(ze_device_handle_t hDevice,void * ptr,size_t size)299 ze_result_t ContextImp::makeMemoryResident(ze_device_handle_t hDevice, void *ptr, size_t size) {
300     Device *device = L0::Device::fromHandle(hDevice);
301     NEO::Device *neoDevice = device->getNEODevice();
302     auto allocation = device->getDriverHandle()->getDriverSystemMemoryAllocation(
303         ptr,
304         size,
305         neoDevice->getRootDeviceIndex(),
306         nullptr);
307     if (allocation == nullptr) {
308         return ZE_RESULT_ERROR_INVALID_ARGUMENT;
309     }
310 
311     NEO::MemoryOperationsHandler *memoryOperationsIface = neoDevice->getRootDeviceEnvironment().memoryOperationsInterface.get();
312     auto success = memoryOperationsIface->makeResident(neoDevice, ArrayRef<NEO::GraphicsAllocation *>(&allocation, 1));
313     ze_result_t res = changeMemoryOperationStatusToL0ResultType(success);
314 
315     if (ZE_RESULT_SUCCESS == res) {
316         auto allocData = device->getDriverHandle()->getSvmAllocsManager()->getSVMAlloc(ptr);
317         if (allocData && allocData->memoryType == InternalMemoryType::SHARED_UNIFIED_MEMORY) {
318             DriverHandleImp *driverHandleImp = static_cast<DriverHandleImp *>(device->getDriverHandle());
319             std::lock_guard<std::mutex> lock(driverHandleImp->sharedMakeResidentAllocationsLock);
320             driverHandleImp->sharedMakeResidentAllocations.insert({ptr, allocation});
321         }
322     }
323 
324     return res;
325 }
326 
evictMemory(ze_device_handle_t hDevice,void * ptr,size_t size)327 ze_result_t ContextImp::evictMemory(ze_device_handle_t hDevice, void *ptr, size_t size) {
328     Device *device = L0::Device::fromHandle(hDevice);
329     NEO::Device *neoDevice = device->getNEODevice();
330     auto allocation = device->getDriverHandle()->getDriverSystemMemoryAllocation(
331         ptr,
332         size,
333         neoDevice->getRootDeviceIndex(),
334         nullptr);
335     if (allocation == nullptr) {
336         return ZE_RESULT_ERROR_INVALID_ARGUMENT;
337     }
338 
339     {
340         DriverHandleImp *driverHandleImp = static_cast<DriverHandleImp *>(device->getDriverHandle());
341         std::lock_guard<std::mutex> lock(driverHandleImp->sharedMakeResidentAllocationsLock);
342         driverHandleImp->sharedMakeResidentAllocations.erase(ptr);
343     }
344 
345     NEO::MemoryOperationsHandler *memoryOperationsIface = neoDevice->getRootDeviceEnvironment().memoryOperationsInterface.get();
346     auto success = memoryOperationsIface->evict(neoDevice, *allocation);
347     return changeMemoryOperationStatusToL0ResultType(success);
348 }
349 
makeImageResident(ze_device_handle_t hDevice,ze_image_handle_t hImage)350 ze_result_t ContextImp::makeImageResident(ze_device_handle_t hDevice, ze_image_handle_t hImage) {
351     auto alloc = Image::fromHandle(hImage)->getAllocation();
352 
353     NEO::Device *neoDevice = L0::Device::fromHandle(hDevice)->getNEODevice();
354     NEO::MemoryOperationsHandler *memoryOperationsIface = neoDevice->getRootDeviceEnvironment().memoryOperationsInterface.get();
355     auto success = memoryOperationsIface->makeResident(neoDevice, ArrayRef<NEO::GraphicsAllocation *>(&alloc, 1));
356     return changeMemoryOperationStatusToL0ResultType(success);
357 }
evictImage(ze_device_handle_t hDevice,ze_image_handle_t hImage)358 ze_result_t ContextImp::evictImage(ze_device_handle_t hDevice, ze_image_handle_t hImage) {
359     auto alloc = Image::fromHandle(hImage)->getAllocation();
360 
361     NEO::Device *neoDevice = L0::Device::fromHandle(hDevice)->getNEODevice();
362     NEO::MemoryOperationsHandler *memoryOperationsIface = neoDevice->getRootDeviceEnvironment().memoryOperationsInterface.get();
363     auto success = memoryOperationsIface->evict(neoDevice, *alloc);
364     return changeMemoryOperationStatusToL0ResultType(success);
365 }
366 
getMemAddressRange(const void * ptr,void ** pBase,size_t * pSize)367 ze_result_t ContextImp::getMemAddressRange(const void *ptr,
368                                            void **pBase,
369                                            size_t *pSize) {
370     NEO::SvmAllocationData *allocData = this->driverHandle->svmAllocsManager->getSVMAlloc(ptr);
371     if (allocData) {
372         NEO::GraphicsAllocation *alloc;
373         alloc = allocData->gpuAllocations.getDefaultGraphicsAllocation();
374         if (pBase) {
375             uint64_t *allocBase = reinterpret_cast<uint64_t *>(pBase);
376             *allocBase = alloc->getGpuAddress();
377         }
378 
379         if (pSize) {
380             *pSize = alloc->getUnderlyingBufferSize();
381         }
382 
383         return ZE_RESULT_SUCCESS;
384     }
385     DEBUG_BREAK_IF(true);
386     return ZE_RESULT_ERROR_UNKNOWN;
387 }
388 
closeIpcMemHandle(const void * ptr)389 ze_result_t ContextImp::closeIpcMemHandle(const void *ptr) {
390     return this->freeMem(ptr);
391 }
392 
getIpcMemHandle(const void * ptr,ze_ipc_mem_handle_t * pIpcHandle)393 ze_result_t ContextImp::getIpcMemHandle(const void *ptr,
394                                         ze_ipc_mem_handle_t *pIpcHandle) {
395     NEO::SvmAllocationData *allocData = this->driverHandle->svmAllocsManager->getSVMAlloc(ptr);
396     if (allocData) {
397         uint64_t handle = allocData->gpuAllocations.getDefaultGraphicsAllocation()->peekInternalHandle(this->driverHandle->getMemoryManager());
398         memcpy_s(reinterpret_cast<void *>(pIpcHandle->data),
399                  sizeof(ze_ipc_mem_handle_t),
400                  &handle,
401                  sizeof(handle));
402 
403         return ZE_RESULT_SUCCESS;
404     }
405     return ZE_RESULT_ERROR_INVALID_ARGUMENT;
406 }
407 
openIpcMemHandle(ze_device_handle_t hDevice,ze_ipc_mem_handle_t pIpcHandle,ze_ipc_memory_flags_t flags,void ** ptr)408 ze_result_t ContextImp::openIpcMemHandle(ze_device_handle_t hDevice,
409                                          ze_ipc_mem_handle_t pIpcHandle,
410                                          ze_ipc_memory_flags_t flags,
411                                          void **ptr) {
412     uint64_t handle = 0u;
413     memcpy_s(&handle,
414              sizeof(handle),
415              reinterpret_cast<void *>(pIpcHandle.data),
416              sizeof(handle));
417 
418     *ptr = this->driverHandle->importFdHandle(hDevice, flags, handle, nullptr);
419     if (nullptr == *ptr) {
420         return ZE_RESULT_ERROR_INVALID_ARGUMENT;
421     }
422 
423     return ZE_RESULT_SUCCESS;
424 }
425 
closeIpcHandle()426 ze_result_t EventPoolImp::closeIpcHandle() {
427     return this->destroy();
428 }
429 
getIpcHandle(ze_ipc_event_pool_handle_t * pIpcHandle)430 ze_result_t EventPoolImp::getIpcHandle(ze_ipc_event_pool_handle_t *pIpcHandle) {
431     // L0 uses a vector of ZE_MAX_IPC_HANDLE_SIZE bytes to send the IPC handle, i.e.
432     // char data[ZE_MAX_IPC_HANDLE_SIZE];
433     // First four bytes (which is of size sizeof(int)) of it contain the file descriptor
434     // associated with the dma-buf,
435     // Rest is payload to communicate extra info to the other processes.
436     // For the event pool, this contains:
437     // - the number of events the pool has.
438     // - the id for the device used during pool creation
439 
440     uint64_t handle = this->eventPoolAllocations->getDefaultGraphicsAllocation()->peekInternalHandle(this->context->getDriverHandle()->getMemoryManager());
441 
442     memcpy_s(pIpcHandle->data, sizeof(int), &handle, sizeof(int));
443 
444     memcpy_s(pIpcHandle->data + sizeof(int), sizeof(this->numEvents), &this->numEvents, sizeof(this->numEvents));
445 
446     uint32_t rootDeviceIndex = this->getDevice()->getRootDeviceIndex();
447     memcpy_s(pIpcHandle->data + sizeof(int) + sizeof(this->numEvents),
448              sizeof(rootDeviceIndex), &rootDeviceIndex, sizeof(rootDeviceIndex));
449 
450     return ZE_RESULT_SUCCESS;
451 }
452 
openEventPoolIpcHandle(ze_ipc_event_pool_handle_t hIpc,ze_event_pool_handle_t * phEventPool)453 ze_result_t ContextImp::openEventPoolIpcHandle(ze_ipc_event_pool_handle_t hIpc,
454                                                ze_event_pool_handle_t *phEventPool) {
455     uint64_t handle = 0u;
456     memcpy_s(&handle, sizeof(int), hIpc.data, sizeof(int));
457 
458     size_t numEvents = 0;
459     memcpy_s(&numEvents, sizeof(numEvents), hIpc.data + sizeof(int), sizeof(numEvents));
460 
461     uint32_t rootDeviceIndex = std::numeric_limits<uint32_t>::max();
462     memcpy_s(&rootDeviceIndex, sizeof(rootDeviceIndex),
463              hIpc.data + sizeof(int) + sizeof(numEvents), sizeof(rootDeviceIndex));
464 
465     Device *device = this->devices.begin()->second;
466     auto neoDevice = device->getNEODevice();
467     NEO::osHandle osHandle = static_cast<NEO::osHandle>(handle);
468     auto &hwHelper = device->getHwHelper();
469     const uint32_t eventAlignment = static_cast<uint32_t>(hwHelper.getTimestampPacketAllocatorAlignment());
470     uint32_t eventSize = static_cast<uint32_t>(alignUp(EventPacketsCount::eventPackets * hwHelper.getSingleTimestampPacketSize(), eventAlignment));
471     size_t alignedSize = alignUp<size_t>(numEvents * eventSize, MemoryConstants::pageSize64k);
472     NEO::AllocationProperties unifiedMemoryProperties{rootDeviceIndex,
473                                                       alignedSize,
474                                                       NEO::GraphicsAllocation::AllocationType::BUFFER_HOST_MEMORY,
475                                                       systemMemoryBitfield};
476 
477     unifiedMemoryProperties.subDevicesBitfield = neoDevice->getDeviceBitfield();
478     auto memoryManager = this->getDriverHandle()->getMemoryManager();
479     NEO::GraphicsAllocation *alloc = memoryManager->createGraphicsAllocationFromSharedHandle(osHandle,
480                                                                                              unifiedMemoryProperties,
481                                                                                              false,
482                                                                                              true);
483 
484     if (alloc == nullptr) {
485         return ZE_RESULT_ERROR_INVALID_ARGUMENT;
486     }
487 
488     ze_event_pool_desc_t desc = {};
489     auto eventPool = new EventPoolImp(&desc);
490     eventPool->context = this;
491     eventPool->eventPoolAllocations =
492         std::make_unique<NEO::MultiGraphicsAllocation>(static_cast<uint32_t>(this->rootDeviceIndices.size()));
493     eventPool->eventPoolAllocations->addAllocation(alloc);
494     eventPool->eventPoolPtr = reinterpret_cast<void *>(alloc->getUnderlyingBuffer());
495     eventPool->devices.push_back(device);
496     eventPool->isImportedIpcPool = true;
497     eventPool->setEventSize(eventSize);
498     eventPool->setEventAlignment(eventAlignment);
499 
500     for (auto currDeviceIndex : this->rootDeviceIndices) {
501         if (currDeviceIndex == rootDeviceIndex) {
502             continue;
503         }
504 
505         unifiedMemoryProperties.rootDeviceIndex = currDeviceIndex;
506         unifiedMemoryProperties.flags.isUSMHostAllocation = true;
507         unifiedMemoryProperties.flags.forceSystemMemory = true;
508         unifiedMemoryProperties.flags.allocateMemory = false;
509         auto graphicsAllocation = memoryManager->createGraphicsAllocationFromExistingStorage(unifiedMemoryProperties,
510                                                                                              eventPool->eventPoolPtr,
511                                                                                              eventPool->getAllocation());
512         if (!graphicsAllocation) {
513             for (auto gpuAllocation : eventPool->getAllocation().getGraphicsAllocations()) {
514                 memoryManager->freeGraphicsMemory(gpuAllocation);
515             }
516             memoryManager->freeGraphicsMemory(alloc);
517 
518             delete eventPool;
519 
520             return ZE_RESULT_ERROR_OUT_OF_HOST_MEMORY;
521         }
522         eventPool->eventPoolAllocations->addAllocation(graphicsAllocation);
523     }
524 
525     *phEventPool = eventPool;
526 
527     return ZE_RESULT_SUCCESS;
528 }
529 
getMemAllocProperties(const void * ptr,ze_memory_allocation_properties_t * pMemAllocProperties,ze_device_handle_t * phDevice)530 ze_result_t ContextImp::getMemAllocProperties(const void *ptr,
531                                               ze_memory_allocation_properties_t *pMemAllocProperties,
532                                               ze_device_handle_t *phDevice) {
533     auto alloc = driverHandle->svmAllocsManager->getSVMAlloc(ptr);
534     if (nullptr == alloc) {
535         pMemAllocProperties->type = ZE_MEMORY_TYPE_UNKNOWN;
536         return ZE_RESULT_SUCCESS;
537     }
538 
539     pMemAllocProperties->type = Context::parseUSMType(alloc->memoryType);
540     pMemAllocProperties->id = alloc->getAllocId();
541 
542     if (phDevice != nullptr) {
543         if (alloc->device == nullptr) {
544             *phDevice = nullptr;
545         } else {
546             auto device = static_cast<NEO::Device *>(alloc->device)->getSpecializedDevice<DeviceImp>();
547             DEBUG_BREAK_IF(device == nullptr);
548             *phDevice = device->toHandle();
549         }
550     }
551 
552     if (pMemAllocProperties->pNext) {
553         ze_base_properties_t *extendedProperties =
554             reinterpret_cast<ze_base_properties_t *>(pMemAllocProperties->pNext);
555         if (extendedProperties->stype == ZE_STRUCTURE_TYPE_EXTERNAL_MEMORY_EXPORT_FD) {
556             ze_external_memory_export_fd_t *extendedMemoryExportProperties =
557                 reinterpret_cast<ze_external_memory_export_fd_t *>(extendedProperties);
558             if (extendedMemoryExportProperties->flags & ZE_EXTERNAL_MEMORY_TYPE_FLAG_OPAQUE_FD) {
559                 return ZE_RESULT_ERROR_UNSUPPORTED_ENUMERATION;
560             }
561             if (pMemAllocProperties->type != ZE_MEMORY_TYPE_DEVICE) {
562                 return ZE_RESULT_ERROR_UNSUPPORTED_FEATURE;
563             }
564             uint64_t handle = alloc->gpuAllocations.getDefaultGraphicsAllocation()->peekInternalHandle(this->driverHandle->getMemoryManager());
565             extendedMemoryExportProperties->fd = static_cast<int>(handle);
566         } else if (extendedProperties->stype == ZE_STRUCTURE_TYPE_EXTERNAL_MEMORY_EXPORT_WIN32) {
567             ze_external_memory_export_win32_handle_t *exportStructure = reinterpret_cast<ze_external_memory_export_win32_handle_t *>(extendedProperties);
568             if (exportStructure->flags != ZE_EXTERNAL_MEMORY_TYPE_FLAG_OPAQUE_WIN32) {
569                 return ZE_RESULT_ERROR_UNSUPPORTED_ENUMERATION;
570             }
571             uint64_t handle = alloc->gpuAllocations.getDefaultGraphicsAllocation()->peekInternalHandle(this->driverHandle->getMemoryManager());
572             exportStructure->handle = reinterpret_cast<void *>(reinterpret_cast<uintptr_t *>(handle));
573         }
574     }
575 
576     return ZE_RESULT_SUCCESS;
577 }
578 
createModule(ze_device_handle_t hDevice,const ze_module_desc_t * desc,ze_module_handle_t * phModule,ze_module_build_log_handle_t * phBuildLog)579 ze_result_t ContextImp::createModule(ze_device_handle_t hDevice,
580                                      const ze_module_desc_t *desc,
581                                      ze_module_handle_t *phModule,
582                                      ze_module_build_log_handle_t *phBuildLog) {
583     return L0::Device::fromHandle(hDevice)->createModule(desc, phModule, phBuildLog, ModuleType::User);
584 }
585 
createSampler(ze_device_handle_t hDevice,const ze_sampler_desc_t * pDesc,ze_sampler_handle_t * phSampler)586 ze_result_t ContextImp::createSampler(ze_device_handle_t hDevice,
587                                       const ze_sampler_desc_t *pDesc,
588                                       ze_sampler_handle_t *phSampler) {
589     return L0::Device::fromHandle(hDevice)->createSampler(pDesc, phSampler);
590 }
591 
createCommandQueue(ze_device_handle_t hDevice,const ze_command_queue_desc_t * desc,ze_command_queue_handle_t * commandQueue)592 ze_result_t ContextImp::createCommandQueue(ze_device_handle_t hDevice,
593                                            const ze_command_queue_desc_t *desc,
594                                            ze_command_queue_handle_t *commandQueue) {
595     return L0::Device::fromHandle(hDevice)->createCommandQueue(desc, commandQueue);
596 }
597 
createCommandList(ze_device_handle_t hDevice,const ze_command_list_desc_t * desc,ze_command_list_handle_t * commandList)598 ze_result_t ContextImp::createCommandList(ze_device_handle_t hDevice,
599                                           const ze_command_list_desc_t *desc,
600                                           ze_command_list_handle_t *commandList) {
601     return L0::Device::fromHandle(hDevice)->createCommandList(desc, commandList);
602 }
603 
createCommandListImmediate(ze_device_handle_t hDevice,const ze_command_queue_desc_t * desc,ze_command_list_handle_t * commandList)604 ze_result_t ContextImp::createCommandListImmediate(ze_device_handle_t hDevice,
605                                                    const ze_command_queue_desc_t *desc,
606                                                    ze_command_list_handle_t *commandList) {
607     return L0::Device::fromHandle(hDevice)->createCommandListImmediate(desc, commandList);
608 }
609 
activateMetricGroups(zet_device_handle_t hDevice,uint32_t count,zet_metric_group_handle_t * phMetricGroups)610 ze_result_t ContextImp::activateMetricGroups(zet_device_handle_t hDevice,
611                                              uint32_t count,
612                                              zet_metric_group_handle_t *phMetricGroups) {
613     return L0::Device::fromHandle(hDevice)->activateMetricGroupsDeferred(count, phMetricGroups);
614 }
615 
reserveVirtualMem(const void * pStart,size_t size,void ** pptr)616 ze_result_t ContextImp::reserveVirtualMem(const void *pStart,
617                                           size_t size,
618                                           void **pptr) {
619     return ZE_RESULT_ERROR_UNSUPPORTED_FEATURE;
620 }
621 
freeVirtualMem(const void * ptr,size_t size)622 ze_result_t ContextImp::freeVirtualMem(const void *ptr,
623                                        size_t size) {
624     return ZE_RESULT_ERROR_UNSUPPORTED_FEATURE;
625 }
626 
queryVirtualMemPageSize(ze_device_handle_t hDevice,size_t size,size_t * pagesize)627 ze_result_t ContextImp::queryVirtualMemPageSize(ze_device_handle_t hDevice,
628                                                 size_t size,
629                                                 size_t *pagesize) {
630     return ZE_RESULT_ERROR_UNSUPPORTED_FEATURE;
631 }
632 
createPhysicalMem(ze_device_handle_t hDevice,ze_physical_mem_desc_t * desc,ze_physical_mem_handle_t * phPhysicalMemory)633 ze_result_t ContextImp::createPhysicalMem(ze_device_handle_t hDevice,
634                                           ze_physical_mem_desc_t *desc,
635                                           ze_physical_mem_handle_t *phPhysicalMemory) {
636     return ZE_RESULT_ERROR_UNSUPPORTED_FEATURE;
637 }
638 
destroyPhysicalMem(ze_physical_mem_handle_t hPhysicalMemory)639 ze_result_t ContextImp::destroyPhysicalMem(ze_physical_mem_handle_t hPhysicalMemory) {
640     return ZE_RESULT_ERROR_UNSUPPORTED_FEATURE;
641 }
642 
mapVirtualMem(const void * ptr,size_t size,ze_physical_mem_handle_t hPhysicalMemory,size_t offset,ze_memory_access_attribute_t access)643 ze_result_t ContextImp::mapVirtualMem(const void *ptr,
644                                       size_t size,
645                                       ze_physical_mem_handle_t hPhysicalMemory,
646                                       size_t offset,
647                                       ze_memory_access_attribute_t access) {
648     return ZE_RESULT_ERROR_UNSUPPORTED_FEATURE;
649 }
650 
unMapVirtualMem(const void * ptr,size_t size)651 ze_result_t ContextImp::unMapVirtualMem(const void *ptr,
652                                         size_t size) {
653     return ZE_RESULT_ERROR_UNSUPPORTED_FEATURE;
654 }
655 
setVirtualMemAccessAttribute(const void * ptr,size_t size,ze_memory_access_attribute_t access)656 ze_result_t ContextImp::setVirtualMemAccessAttribute(const void *ptr,
657                                                      size_t size,
658                                                      ze_memory_access_attribute_t access) {
659     return ZE_RESULT_ERROR_UNSUPPORTED_FEATURE;
660 }
661 
getVirtualMemAccessAttribute(const void * ptr,size_t size,ze_memory_access_attribute_t * access,size_t * outSize)662 ze_result_t ContextImp::getVirtualMemAccessAttribute(const void *ptr,
663                                                      size_t size,
664                                                      ze_memory_access_attribute_t *access,
665                                                      size_t *outSize) {
666     return ZE_RESULT_ERROR_UNSUPPORTED_FEATURE;
667 }
668 
createEventPool(const ze_event_pool_desc_t * desc,uint32_t numDevices,ze_device_handle_t * phDevices,ze_event_pool_handle_t * phEventPool)669 ze_result_t ContextImp::createEventPool(const ze_event_pool_desc_t *desc,
670                                         uint32_t numDevices,
671                                         ze_device_handle_t *phDevices,
672                                         ze_event_pool_handle_t *phEventPool) {
673     ze_result_t result;
674     EventPool *eventPool = EventPool::create(this->driverHandle, this, numDevices, phDevices, desc, result);
675 
676     if (eventPool == nullptr) {
677         return result;
678     }
679 
680     *phEventPool = eventPool->toHandle();
681 
682     return ZE_RESULT_SUCCESS;
683 }
684 
createImage(ze_device_handle_t hDevice,const ze_image_desc_t * desc,ze_image_handle_t * phImage)685 ze_result_t ContextImp::createImage(ze_device_handle_t hDevice,
686                                     const ze_image_desc_t *desc,
687                                     ze_image_handle_t *phImage) {
688     return L0::Device::fromHandle(hDevice)->createImage(desc, phImage);
689 }
690 
isAllocationSuitableForCompression(const StructuresLookupTable & structuresLookupTable,Device & device,size_t allocSize)691 bool ContextImp::isAllocationSuitableForCompression(const StructuresLookupTable &structuresLookupTable, Device &device, size_t allocSize) {
692     auto &hwInfo = device.getHwInfo();
693     auto &hwHelper = device.getHwHelper();
694     auto &l0HwHelper = L0HwHelper::get(hwInfo.platform.eRenderCoreFamily);
695 
696     if (!l0HwHelper.usmCompressionSupported(hwInfo) || !hwHelper.isBufferSizeSuitableForCompression(allocSize, hwInfo) || structuresLookupTable.uncompressedHint) {
697         return false;
698     }
699 
700     if (l0HwHelper.forceDefaultUsmCompressionSupport()) {
701         return true;
702     }
703 
704     return structuresLookupTable.compressedHint;
705 }
706 
707 } // namespace L0
708