1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "ui/ozone/platform/drm/gpu/gbm_surface_factory.h"
6
7 #include <gbm.h>
8 #include <xf86drm.h>
9
10 #include <memory>
11 #include <utility>
12
13 #include "base/files/file_path.h"
14 #include "base/strings/stringprintf.h"
15 #include "build/build_config.h"
16 #include "third_party/khronos/EGL/egl.h"
17 #include "ui/gfx/buffer_format_util.h"
18 #include "ui/gfx/extension_set.h"
19 #include "ui/gfx/linux/drm_util_linux.h"
20 #include "ui/gfx/linux/gbm_defines.h"
21 #include "ui/gfx/linux/scoped_gbm_device.h"
22 #include "ui/gfx/native_pixmap.h"
23 #include "ui/gl/gl_surface_egl.h"
24 #include "ui/ozone/common/egl_util.h"
25 #include "ui/ozone/common/gl_ozone_egl.h"
26 #include "ui/ozone/platform/drm/common/drm_util.h"
27 #include "ui/ozone/platform/drm/common/scoped_drm_types.h"
28 #include "ui/ozone/platform/drm/gpu/drm_gpu_util.h"
29 #include "ui/ozone/platform/drm/gpu/drm_thread_proxy.h"
30 #include "ui/ozone/platform/drm/gpu/drm_window_proxy.h"
31 #include "ui/ozone/platform/drm/gpu/gbm_overlay_surface.h"
32 #include "ui/ozone/platform/drm/gpu/gbm_pixmap.h"
33 #include "ui/ozone/platform/drm/gpu/gbm_surfaceless.h"
34 #include "ui/ozone/platform/drm/gpu/proxy_helpers.h"
35 #include "ui/ozone/platform/drm/gpu/screen_manager.h"
36 #include "ui/ozone/public/surface_ozone_canvas.h"
37
38 #if BUILDFLAG(ENABLE_VULKAN)
39 #include "gpu/vulkan/vulkan_function_pointers.h"
40 #include "ui/ozone/platform/drm/gpu/vulkan_implementation_gbm.h"
41 #define VK_STRUCTURE_TYPE_DMA_BUF_IMAGE_CREATE_INFO_INTEL 1024
42 typedef struct VkDmaBufImageCreateInfo_ {
43 VkStructureType sType;
44 const void* pNext;
45 int fd;
46 VkFormat format;
47 VkExtent3D extent;
48 uint32_t strideInBytes;
49 } VkDmaBufImageCreateInfo;
50
51 typedef VkResult(VKAPI_PTR* PFN_vkCreateDmaBufImageINTEL)(
52 VkDevice device,
53 const VkDmaBufImageCreateInfo* pCreateInfo,
54 const VkAllocationCallbacks* pAllocator,
55 VkDeviceMemory* pMem,
56 VkImage* pImage);
57 #endif
58
59 namespace ui {
60
61 namespace {
62
63 class GLOzoneEGLGbm : public GLOzoneEGL {
64 public:
GLOzoneEGLGbm(GbmSurfaceFactory * surface_factory,DrmThreadProxy * drm_thread_proxy)65 GLOzoneEGLGbm(GbmSurfaceFactory* surface_factory,
66 DrmThreadProxy* drm_thread_proxy)
67 : surface_factory_(surface_factory),
68 drm_thread_proxy_(drm_thread_proxy) {}
~GLOzoneEGLGbm()69 ~GLOzoneEGLGbm() override {}
70
CreateViewGLSurface(gfx::AcceleratedWidget window)71 scoped_refptr<gl::GLSurface> CreateViewGLSurface(
72 gfx::AcceleratedWidget window) override {
73 return nullptr;
74 }
75
CreateSurfacelessViewGLSurface(gfx::AcceleratedWidget window)76 scoped_refptr<gl::GLSurface> CreateSurfacelessViewGLSurface(
77 gfx::AcceleratedWidget window) override {
78 return gl::InitializeGLSurface(new GbmSurfaceless(
79 surface_factory_, drm_thread_proxy_->CreateDrmWindowProxy(window),
80 window));
81 }
82
CreateOffscreenGLSurface(const gfx::Size & size)83 scoped_refptr<gl::GLSurface> CreateOffscreenGLSurface(
84 const gfx::Size& size) override {
85 DCHECK_EQ(size.width(), 0);
86 DCHECK_EQ(size.height(), 0);
87 return gl::InitializeGLSurface(new gl::SurfacelessEGL(size));
88 }
89
90 protected:
GetNativeDisplay()91 gl::EGLDisplayPlatform GetNativeDisplay() override {
92 if (native_display_.Valid())
93 return native_display_;
94
95 // Default to null platform
96 native_display_ = gl::EGLDisplayPlatform(EGL_DEFAULT_DISPLAY);
97
98 gl::g_driver_egl.InitializeClientExtensionBindings();
99
100 const char* client_extensions_string =
101 eglQueryString(EGL_NO_DISPLAY, EGL_EXTENSIONS);
102
103 gfx::ExtensionSet client_extensions =
104 client_extensions_string
105 ? gfx::MakeExtensionSet(client_extensions_string)
106 : gfx::ExtensionSet();
107
108 if (gfx::HasExtension(client_extensions, "EGL_MESA_platform_surfaceless")) {
109 native_display_ = gl::EGLDisplayPlatform(EGL_DEFAULT_DISPLAY,
110 EGL_PLATFORM_SURFACELESS_MESA);
111 }
112
113 if (!(gfx::HasExtension(client_extensions, "EGL_EXT_device_query") &&
114 gfx::HasExtension(client_extensions, "EGL_EXT_platform_device") &&
115 gfx::HasExtension(client_extensions, "EGL_EXT_device_enumeration"))) {
116 LOG(WARNING) << "Platform device extensions not found.";
117 return native_display_;
118 }
119
120 std::vector<EGLDeviceEXT> devices(DRM_MAX_MINOR, EGL_NO_DEVICE_EXT);
121 EGLDeviceEXT virgl_device = EGL_NO_DEVICE_EXT;
122 EGLDeviceEXT amdgpu_device = EGL_NO_DEVICE_EXT;
123 EGLDeviceEXT i915_device = EGL_NO_DEVICE_EXT;
124 EGLint num_devices = 0;
125
126 eglQueryDevicesEXT(DRM_MAX_MINOR, devices.data(), &num_devices);
127 devices.resize(num_devices);
128 for (EGLDeviceEXT device : devices) {
129 const char* filename =
130 eglQueryDeviceStringEXT(device, EGL_DRM_DEVICE_FILE_EXT);
131 if (!filename) // Not a DRM device.
132 continue;
133 if (IsDriverName(filename, "virtio_gpu"))
134 virgl_device = device;
135 if (IsDriverName(filename, "amdgpu"))
136 amdgpu_device = device;
137 if (IsDriverName(filename, "i915"))
138 i915_device = device;
139 }
140
141 if (virgl_device != EGL_NO_DEVICE_EXT) {
142 native_display_ = gl::EGLDisplayPlatform(
143 reinterpret_cast<EGLNativeDisplayType>(virgl_device),
144 EGL_PLATFORM_DEVICE_EXT);
145 }
146
147 if (amdgpu_device != EGL_NO_DEVICE_EXT) {
148 native_display_ = gl::EGLDisplayPlatform(
149 reinterpret_cast<EGLNativeDisplayType>(amdgpu_device),
150 EGL_PLATFORM_DEVICE_EXT);
151 }
152
153 // If we also have Intel integrated, use it instead.
154 if (i915_device != EGL_NO_DEVICE_EXT) {
155 native_display_ = gl::EGLDisplayPlatform(
156 reinterpret_cast<EGLNativeDisplayType>(i915_device),
157 EGL_PLATFORM_DEVICE_EXT);
158 }
159
160 return native_display_;
161 }
162
LoadGLES2Bindings(gl::GLImplementation impl)163 bool LoadGLES2Bindings(gl::GLImplementation impl) override {
164 return LoadDefaultEGLGLES2Bindings(impl);
165 }
166
167 private:
168 GbmSurfaceFactory* surface_factory_;
169 DrmThreadProxy* drm_thread_proxy_;
170 gl::EGLDisplayPlatform native_display_;
171
172 DISALLOW_COPY_AND_ASSIGN(GLOzoneEGLGbm);
173 };
174
EnumerateSupportedBufferFormatsForTexturing()175 std::vector<gfx::BufferFormat> EnumerateSupportedBufferFormatsForTexturing() {
176 std::vector<gfx::BufferFormat> supported_buffer_formats;
177 // We cannot use FileEnumerator here because the sandbox is already closed.
178 constexpr char kRenderNodeFilePattern[] = "/dev/dri/renderD%d";
179 for (int i = 128; /* end on first card# that does not exist */; i++) {
180 base::FilePath dev_path(FILE_PATH_LITERAL(
181 base::StringPrintf(kRenderNodeFilePattern, i).c_str()));
182
183 base::ThreadRestrictions::ScopedAllowIO scoped_allow_io;
184 base::File dev_path_file(dev_path,
185 base::File::FLAG_OPEN | base::File::FLAG_READ);
186 if (!dev_path_file.IsValid())
187 break;
188
189 // Skip the virtual graphics memory manager device.
190 ScopedDrmVersionPtr version(drmGetVersion(dev_path_file.GetPlatformFile()));
191 if (!version || base::LowerCaseEqualsASCII(version->name, "vgem")) {
192 continue;
193 }
194
195 ScopedGbmDevice device(gbm_create_device(dev_path_file.GetPlatformFile()));
196 if (!device) {
197 LOG(ERROR) << "Couldn't create Gbm Device at " << dev_path.MaybeAsASCII();
198 return supported_buffer_formats;
199 }
200
201 for (int i = 0; i <= static_cast<int>(gfx::BufferFormat::LAST); ++i) {
202 const gfx::BufferFormat buffer_format = static_cast<gfx::BufferFormat>(i);
203 if (base::Contains(supported_buffer_formats, buffer_format))
204 continue;
205 if (gbm_device_is_format_supported(
206 device.get(), GetFourCCFormatFromBufferFormat(buffer_format),
207 GBM_BO_USE_TEXTURING)) {
208 supported_buffer_formats.push_back(buffer_format);
209 }
210 }
211 }
212 return supported_buffer_formats;
213 }
214
OnNativePixmapCreated(GbmSurfaceFactory::NativePixmapCallback callback,base::WeakPtr<GbmSurfaceFactory> weak_ptr,std::unique_ptr<GbmBuffer> buffer,scoped_refptr<DrmFramebuffer> framebuffer)215 void OnNativePixmapCreated(GbmSurfaceFactory::NativePixmapCallback callback,
216 base::WeakPtr<GbmSurfaceFactory> weak_ptr,
217 std::unique_ptr<GbmBuffer> buffer,
218 scoped_refptr<DrmFramebuffer> framebuffer) {
219 if (!weak_ptr || !buffer) {
220 std::move(callback).Run(nullptr);
221 } else {
222 std::move(callback).Run(base::MakeRefCounted<GbmPixmap>(
223 weak_ptr.get(), std::move(buffer), std::move(framebuffer)));
224 }
225 }
226
227 } // namespace
228
GbmSurfaceFactory(DrmThreadProxy * drm_thread_proxy)229 GbmSurfaceFactory::GbmSurfaceFactory(DrmThreadProxy* drm_thread_proxy)
230 : egl_implementation_(
231 std::make_unique<GLOzoneEGLGbm>(this, drm_thread_proxy)),
232 drm_thread_proxy_(drm_thread_proxy) {}
233
~GbmSurfaceFactory()234 GbmSurfaceFactory::~GbmSurfaceFactory() {
235 DCHECK(thread_checker_.CalledOnValidThread());
236 }
237
RegisterSurface(gfx::AcceleratedWidget widget,GbmSurfaceless * surface)238 void GbmSurfaceFactory::RegisterSurface(gfx::AcceleratedWidget widget,
239 GbmSurfaceless* surface) {
240 DCHECK(thread_checker_.CalledOnValidThread());
241 widget_to_surface_map_.emplace(widget, surface);
242 }
243
UnregisterSurface(gfx::AcceleratedWidget widget)244 void GbmSurfaceFactory::UnregisterSurface(gfx::AcceleratedWidget widget) {
245 DCHECK(thread_checker_.CalledOnValidThread());
246 widget_to_surface_map_.erase(widget);
247 }
248
GetSurface(gfx::AcceleratedWidget widget) const249 GbmSurfaceless* GbmSurfaceFactory::GetSurface(
250 gfx::AcceleratedWidget widget) const {
251 DCHECK(thread_checker_.CalledOnValidThread());
252 auto it = widget_to_surface_map_.find(widget);
253 DCHECK(it != widget_to_surface_map_.end());
254 return it->second;
255 }
256
257 std::vector<gl::GLImplementation>
GetAllowedGLImplementations()258 GbmSurfaceFactory::GetAllowedGLImplementations() {
259 DCHECK(thread_checker_.CalledOnValidThread());
260 return std::vector<gl::GLImplementation>{gl::kGLImplementationEGLGLES2,
261 gl::kGLImplementationEGLANGLE,
262 gl::kGLImplementationSwiftShaderGL};
263 }
264
GetGLOzone(gl::GLImplementation implementation)265 GLOzone* GbmSurfaceFactory::GetGLOzone(gl::GLImplementation implementation) {
266 switch (implementation) {
267 case gl::kGLImplementationEGLGLES2:
268 case gl::kGLImplementationSwiftShaderGL:
269 case gl::kGLImplementationEGLANGLE:
270 return egl_implementation_.get();
271 default:
272 return nullptr;
273 }
274 }
275
276 #if BUILDFLAG(ENABLE_VULKAN)
277 std::unique_ptr<gpu::VulkanImplementation>
CreateVulkanImplementation(bool allow_protected_memory,bool enforce_protected_memory)278 GbmSurfaceFactory::CreateVulkanImplementation(bool allow_protected_memory,
279 bool enforce_protected_memory) {
280 return std::make_unique<ui::VulkanImplementationGbm>();
281 }
282
CreateNativePixmapForVulkan(gfx::AcceleratedWidget widget,gfx::Size size,gfx::BufferFormat format,gfx::BufferUsage usage,VkDevice vk_device,VkDeviceMemory * vk_device_memory,VkImage * vk_image)283 scoped_refptr<gfx::NativePixmap> GbmSurfaceFactory::CreateNativePixmapForVulkan(
284 gfx::AcceleratedWidget widget,
285 gfx::Size size,
286 gfx::BufferFormat format,
287 gfx::BufferUsage usage,
288 VkDevice vk_device,
289 VkDeviceMemory* vk_device_memory,
290 VkImage* vk_image) {
291 std::unique_ptr<GbmBuffer> buffer;
292 scoped_refptr<DrmFramebuffer> framebuffer;
293
294 drm_thread_proxy_->CreateBuffer(widget, size, /*framebuffer_size=*/size,
295 format, usage, GbmPixmap::kFlagNoModifiers,
296 &buffer, &framebuffer);
297 if (!buffer)
298 return nullptr;
299
300 PFN_vkCreateDmaBufImageINTEL create_dma_buf_image_intel =
301 reinterpret_cast<PFN_vkCreateDmaBufImageINTEL>(
302 vkGetDeviceProcAddr(vk_device, "vkCreateDmaBufImageINTEL"));
303 if (!create_dma_buf_image_intel) {
304 LOG(ERROR) << "Scanout buffers can only be imported into vulkan when "
305 "vkCreateDmaBufImageINTEL is available.";
306 return nullptr;
307 }
308
309 DCHECK(buffer->AreFdsValid());
310 DCHECK_EQ(buffer->GetNumPlanes(), 1U);
311
312 base::ScopedFD vk_image_fd(dup(buffer->GetPlaneFd(0)));
313 DCHECK(vk_image_fd.is_valid());
314
315 // TODO(spang): Fix this for formats other than gfx::BufferFormat::BGRA_8888
316 DCHECK_EQ(format, display::DisplaySnapshot::PrimaryFormat());
317 VkFormat vk_format = VK_FORMAT_B8G8R8A8_SRGB;
318
319 VkDmaBufImageCreateInfo dma_buf_image_create_info = {
320 /* .sType = */ static_cast<VkStructureType>(
321 VK_STRUCTURE_TYPE_DMA_BUF_IMAGE_CREATE_INFO_INTEL),
322 /* .pNext = */ nullptr,
323 /* .fd = */ vk_image_fd.release(),
324 /* .format = */ vk_format,
325 /* .extent = */
326 {
327 /* .width = */ size.width(),
328 /* .height = */ size.height(),
329 /* .depth = */ 1,
330 },
331 /* .strideInBytes = */ buffer->GetPlaneStride(0),
332 };
333
334 VkResult result =
335 create_dma_buf_image_intel(vk_device, &dma_buf_image_create_info, nullptr,
336 vk_device_memory, vk_image);
337 if (result != VK_SUCCESS) {
338 LOG(ERROR) << "Failed to create a Vulkan image from a dmabuf.";
339 return nullptr;
340 }
341
342 return base::MakeRefCounted<GbmPixmap>(this, std::move(buffer),
343 std::move(framebuffer));
344 }
345 #endif
346
CreateOverlaySurface(gfx::AcceleratedWidget window)347 std::unique_ptr<OverlaySurface> GbmSurfaceFactory::CreateOverlaySurface(
348 gfx::AcceleratedWidget window) {
349 return std::make_unique<GbmOverlaySurface>(
350 drm_thread_proxy_->CreateDrmWindowProxy(window));
351 }
352
CreateCanvasForWidget(gfx::AcceleratedWidget widget)353 std::unique_ptr<SurfaceOzoneCanvas> GbmSurfaceFactory::CreateCanvasForWidget(
354 gfx::AcceleratedWidget widget) {
355 DCHECK(thread_checker_.CalledOnValidThread());
356 LOG(ERROR) << "Software rendering mode is not supported with GBM platform";
357 return nullptr;
358 }
359
CreateNativePixmap(gfx::AcceleratedWidget widget,VkDevice vk_device,gfx::Size size,gfx::BufferFormat format,gfx::BufferUsage usage,base::Optional<gfx::Size> framebuffer_size)360 scoped_refptr<gfx::NativePixmap> GbmSurfaceFactory::CreateNativePixmap(
361 gfx::AcceleratedWidget widget,
362 VkDevice vk_device,
363 gfx::Size size,
364 gfx::BufferFormat format,
365 gfx::BufferUsage usage,
366 base::Optional<gfx::Size> framebuffer_size) {
367 if (framebuffer_size &&
368 !gfx::Rect(size).Contains(gfx::Rect(*framebuffer_size))) {
369 return nullptr;
370 }
371 std::unique_ptr<GbmBuffer> buffer;
372 scoped_refptr<DrmFramebuffer> framebuffer;
373 drm_thread_proxy_->CreateBuffer(
374 widget, size, framebuffer_size ? *framebuffer_size : size, format, usage,
375 0 /* flags */, &buffer, &framebuffer);
376 if (!buffer)
377 return nullptr;
378 return base::MakeRefCounted<GbmPixmap>(this, std::move(buffer),
379 std::move(framebuffer));
380 }
381
CreateNativePixmapAsync(gfx::AcceleratedWidget widget,VkDevice vk_device,gfx::Size size,gfx::BufferFormat format,gfx::BufferUsage usage,NativePixmapCallback callback)382 void GbmSurfaceFactory::CreateNativePixmapAsync(gfx::AcceleratedWidget widget,
383 VkDevice vk_device,
384 gfx::Size size,
385 gfx::BufferFormat format,
386 gfx::BufferUsage usage,
387 NativePixmapCallback callback) {
388 drm_thread_proxy_->CreateBufferAsync(
389 widget, size, format, usage, 0 /* flags */,
390 base::BindOnce(OnNativePixmapCreated, std::move(callback),
391 weak_factory_.GetWeakPtr()));
392 }
393
394 scoped_refptr<gfx::NativePixmap>
CreateNativePixmapFromHandleInternal(gfx::AcceleratedWidget widget,gfx::Size size,gfx::BufferFormat format,gfx::NativePixmapHandle handle)395 GbmSurfaceFactory::CreateNativePixmapFromHandleInternal(
396 gfx::AcceleratedWidget widget,
397 gfx::Size size,
398 gfx::BufferFormat format,
399 gfx::NativePixmapHandle handle) {
400 if (handle.planes.size() > GBM_MAX_PLANES) {
401 return nullptr;
402 }
403
404 std::unique_ptr<GbmBuffer> buffer;
405 scoped_refptr<DrmFramebuffer> framebuffer;
406 drm_thread_proxy_->CreateBufferFromHandle(
407 widget, size, format, std::move(handle), &buffer, &framebuffer);
408 if (!buffer)
409 return nullptr;
410 return base::MakeRefCounted<GbmPixmap>(this, std::move(buffer),
411 std::move(framebuffer));
412 }
413
414 scoped_refptr<gfx::NativePixmap>
CreateNativePixmapFromHandle(gfx::AcceleratedWidget widget,gfx::Size size,gfx::BufferFormat format,gfx::NativePixmapHandle handle)415 GbmSurfaceFactory::CreateNativePixmapFromHandle(
416 gfx::AcceleratedWidget widget,
417 gfx::Size size,
418 gfx::BufferFormat format,
419 gfx::NativePixmapHandle handle) {
420 // Query the external service (if available), whether it recognizes this
421 // NativePixmapHandle, and whether it can provide a corresponding NativePixmap
422 // backing it. If so, the handle is consumed. Otherwise, the handle remains
423 // valid and can be further importer by standard means.
424 if (!get_protected_native_pixmap_callback_.is_null()) {
425 auto protected_pixmap = get_protected_native_pixmap_callback_.Run(handle);
426 if (protected_pixmap)
427 return protected_pixmap;
428 }
429
430 return CreateNativePixmapFromHandleInternal(widget, size, format,
431 std::move(handle));
432 }
433
434 scoped_refptr<gfx::NativePixmap>
CreateNativePixmapForProtectedBufferHandle(gfx::AcceleratedWidget widget,gfx::Size size,gfx::BufferFormat format,gfx::NativePixmapHandle handle)435 GbmSurfaceFactory::CreateNativePixmapForProtectedBufferHandle(
436 gfx::AcceleratedWidget widget,
437 gfx::Size size,
438 gfx::BufferFormat format,
439 gfx::NativePixmapHandle handle) {
440 // Create a new NativePixmap without querying the external service for any
441 // existing mappings.
442 return CreateNativePixmapFromHandleInternal(widget, size, format,
443 std::move(handle));
444 }
445
SetGetProtectedNativePixmapDelegate(const GetProtectedNativePixmapCallback & get_protected_native_pixmap_callback)446 void GbmSurfaceFactory::SetGetProtectedNativePixmapDelegate(
447 const GetProtectedNativePixmapCallback&
448 get_protected_native_pixmap_callback) {
449 get_protected_native_pixmap_callback_ = get_protected_native_pixmap_callback;
450 }
451
452 std::vector<gfx::BufferFormat>
GetSupportedFormatsForTexturing() const453 GbmSurfaceFactory::GetSupportedFormatsForTexturing() const {
454 return EnumerateSupportedBufferFormatsForTexturing();
455 }
456
457 } // namespace ui
458