1 /* Cairo - a vector graphics library with display and print output
2 *
3 * Copyright © 2009 Chris Wilson
4 *
5 * This library is free software; you can redistribute it and/or
6 * modify it either under the terms of the GNU Lesser General Public
7 * License version 2.1 as published by the Free Software Foundation
8 * (the "LGPL") or, at your option, under the terms of the Mozilla
9 * Public License Version 1.1 (the "MPL"). If you do not alter this
10 * notice, a recipient may use your version of this file under either
11 * the MPL or the LGPL.
12 *
13 * You should have received a copy of the LGPL along with this library
14 * in the file COPYING-LGPL-2.1; if not, write to the Free Software
15 * Foundation, Inc., 51 Franklin Street, Suite 500, Boston, MA 02110-1335, USA
16 * You should have received a copy of the MPL along with this library
17 * in the file COPYING-MPL-1.1
18 *
19 * The contents of this file are subject to the Mozilla Public License
20 * Version 1.1 (the "License"); you may not use this file except in
21 * compliance with the License. You may obtain a copy of the License at
22 * http://www.mozilla.org/MPL/
23 *
24 * This software is distributed on an "AS IS" basis, WITHOUT WARRANTY
25 * OF ANY KIND, either express or implied. See the LGPL or the MPL for
26 * the specific language governing rights and limitations.
27 *
28 */
29
30 #ifndef CAIRO_DRM_INTEL_PRIVATE_H
31 #define CAIRO_DRM_INTEL_PRIVATE_H
32
33 #include "cairoint.h"
34 #include "cairo-cache-private.h"
35 #include "cairo-compiler-private.h"
36 #include "cairo-drm-private.h"
37 #include "cairo-freelist-private.h"
38 #include "cairo-list-private.h"
39 #include "cairo-mutex-private.h"
40 #include "cairo-rtree-private.h"
41 #include "cairo-types-private.h"
42 #include "cairo-pattern-private.h"
43
44 #include "cairo-drm-intel-ioctl-private.h"
45
46 #define INTEL_TILING_DEFAULT I915_TILING_Y
47
48 #define INTEL_BO_CACHE_BUCKETS 12 /* cache surfaces up to 16 MiB */
49
50 #define INTEL_GLYPH_CACHE_WIDTH 1024
51 #define INTEL_GLYPH_CACHE_HEIGHT 1024
52 #define INTEL_GLYPH_CACHE_MIN_SIZE 1
53 #define INTEL_GLYPH_CACHE_MAX_SIZE 128
54
55 typedef struct _intel_bo {
56 cairo_drm_bo_t base;
57
58 cairo_list_t link;
59 cairo_list_t cache_list;
60
61 uint32_t offset;
62 uint32_t batch_read_domains;
63 uint32_t batch_write_domain;
64
65 uint32_t opaque0;
66 uint32_t opaque1;
67
68 uint32_t full_size;
69 uint16_t stride;
70 uint16_t _stride;
71 uint32_t tiling :4;
72 uint32_t _tiling :4;
73 uint32_t purgeable :1;
74 uint32_t busy :1;
75 uint32_t cpu :1;
76
77 struct drm_i915_gem_exec_object2 *exec;
78 void *virtual;
79 } intel_bo_t;
80
81 #define INTEL_BATCH_SIZE (64*1024)
82 #define INTEL_VERTEX_BUFFER_SIZE (512*1024)
83 #define INTEL_MAX_RELOCS 2048
84
85 static inline void
intel_bo_mark_purgeable(intel_bo_t * bo)86 intel_bo_mark_purgeable (intel_bo_t *bo)
87 {
88 if (bo->base.name == 0)
89 bo->purgeable = 1;
90 }
91
92 typedef struct _intel_vertex_buffer intel_vertex_buffer_t;
93
94 typedef void (*intel_vertex_buffer_new_func_t) (intel_vertex_buffer_t *vertex_buffer);
95 typedef void (*intel_vertex_buffer_start_rectangles_func_t) (intel_vertex_buffer_t *vertex_buffer,
96 uint32_t floats_per_vertex);
97 typedef void (*intel_vertex_buffer_flush_func_t) (intel_vertex_buffer_t *vertex_buffer);
98 typedef void (*intel_vertex_buffer_finish_func_t) (intel_vertex_buffer_t *vertex_buffer);
99
100 struct _intel_vertex_buffer {
101 uint32_t vbo_batch; /* reloc position in batch, 0 -> not yet allocated */
102 uint32_t vbo_offset;
103 uint32_t vbo_used;
104
105 uint32_t vertex_index;
106 uint32_t vertex_count;
107
108 uint32_t floats_per_vertex;
109 uint32_t rectangle_size;
110
111 intel_bo_t *last_vbo;
112 uint32_t last_vbo_offset;
113 uint32_t last_vbo_space;
114
115 intel_vertex_buffer_new_func_t new;
116 intel_vertex_buffer_start_rectangles_func_t start_rectangles;
117 intel_vertex_buffer_flush_func_t flush;
118 intel_vertex_buffer_finish_func_t finish;
119
120 uint32_t base[INTEL_VERTEX_BUFFER_SIZE / sizeof (uint32_t)];
121 };
122
123 typedef struct _intel_batch intel_batch_t;
124
125 typedef void (*intel_batch_commit_func_t) (intel_batch_t *batch);
126 typedef void (*intel_batch_reset_func_t) (intel_batch_t *batch);
127
128 struct _intel_batch {
129 size_t gtt_size;
130 size_t gtt_avail_size;
131
132 intel_batch_commit_func_t commit;
133 intel_batch_reset_func_t reset;
134
135 uint16_t exec_count;
136 uint16_t reloc_count;
137 uint16_t used;
138 uint16_t header;
139
140 intel_bo_t *target_bo[INTEL_MAX_RELOCS];
141 struct drm_i915_gem_exec_object2 exec[INTEL_MAX_RELOCS];
142 struct drm_i915_gem_relocation_entry reloc[INTEL_MAX_RELOCS];
143
144 uint32_t base[INTEL_BATCH_SIZE / sizeof (uint32_t)];
145
146 intel_vertex_buffer_t vertex_buffer;
147 };
148
149 typedef struct _intel_buffer {
150 intel_bo_t *bo;
151 uint32_t offset;
152 cairo_format_t format;
153 uint32_t map0, map1;
154 uint32_t width;
155 uint32_t height;
156 uint32_t stride;
157 } intel_buffer_t;
158
159 typedef struct _intel_buffer_cache {
160 int ref_count;
161 intel_buffer_t buffer;
162 cairo_rtree_t rtree;
163 cairo_list_t link;
164 } intel_buffer_cache_t;
165
166 typedef struct _intel_glyph {
167 cairo_rtree_node_t node;
168 intel_buffer_cache_t *cache;
169 void **owner;
170 float texcoord[3];
171 int width, height;
172 } intel_glyph_t;
173
174 typedef struct _intel_gradient_cache {
175 cairo_pattern_union_t pattern;
176 intel_buffer_t buffer;
177 } intel_gradient_cache_t;
178 #define GRADIENT_CACHE_SIZE 16
179
180 typedef struct _intel_surface {
181 cairo_drm_surface_t drm;
182
183 cairo_cache_entry_t snapshot_cache_entry;
184 } intel_surface_t;
185
186 typedef void (*intel_reset_context_func_t) (void *device);
187
188 typedef struct _intel_device {
189 cairo_drm_device_t base;
190
191 size_t gtt_max_size;
192 size_t gtt_avail_size;
193
194 cairo_freepool_t bo_pool;
195 cairo_list_t bo_in_flight;
196
197 cairo_mutex_t mutex;
198 intel_batch_t batch;
199
200 intel_buffer_cache_t glyph_cache[2];
201 cairo_list_t fonts;
202
203 struct {
204 intel_gradient_cache_t cache[GRADIENT_CACHE_SIZE];
205 unsigned int size;
206 } gradient_cache;
207
208 cairo_cache_t snapshot_cache;
209 size_t snapshot_cache_max_size;
210
211 intel_reset_context_func_t reset_context;
212
213 cairo_status_t (*flush) (struct _intel_device *);
214 } intel_device_t;
215
216 static inline intel_device_t *
to_intel_device(cairo_device_t * base)217 to_intel_device (cairo_device_t *base)
218 {
219 return (intel_device_t *) base;
220 }
221
222 static inline intel_bo_t *
to_intel_bo(cairo_drm_bo_t * base)223 to_intel_bo (cairo_drm_bo_t *base)
224 {
225 return (intel_bo_t *) base;
226 }
227
228 static inline intel_bo_t *
intel_bo_reference(intel_bo_t * bo)229 intel_bo_reference (intel_bo_t *bo)
230 {
231 return to_intel_bo (cairo_drm_bo_reference (&bo->base));
232 }
233
234 cairo_private cairo_bool_t
235 intel_bo_madvise (intel_device_t *device, intel_bo_t *bo, int madv);
236
237 static cairo_always_inline void
intel_bo_destroy(intel_device_t * device,intel_bo_t * bo)238 intel_bo_destroy (intel_device_t *device, intel_bo_t *bo)
239 {
240 cairo_drm_bo_destroy (&device->base.base, &bo->base);
241 }
242
243 static inline void
intel_bo_in_flight_add(intel_device_t * device,intel_bo_t * bo)244 intel_bo_in_flight_add (intel_device_t *device,
245 intel_bo_t *bo)
246 {
247 if (bo->base.name == 0 && bo->exec != NULL && cairo_list_is_empty (&bo->cache_list))
248 cairo_list_add (&bo->cache_list, &device->bo_in_flight);
249 }
250
251 cairo_private int
252 intel_get (int fd, int param);
253
254 cairo_private cairo_bool_t
255 intel_info (int fd, uint64_t *gtt_size);
256
257 cairo_private cairo_status_t
258 intel_device_init (intel_device_t *device, int fd);
259
260 cairo_private void
261 intel_device_fini (intel_device_t *dev);
262
263 cairo_private intel_bo_t *
264 intel_bo_create (intel_device_t *dev,
265 uint32_t max_size,
266 uint32_t real_size,
267 cairo_bool_t gpu_target,
268 uint32_t tiling,
269 uint32_t stride);
270
271 cairo_private intel_bo_t *
272 intel_bo_create_for_name (intel_device_t *dev, uint32_t name);
273
274 cairo_private void
275 intel_bo_set_tiling (const intel_device_t *dev,
276 intel_bo_t *bo);
277
278 cairo_private cairo_bool_t
279 intel_bo_is_inactive (const intel_device_t *device,
280 intel_bo_t *bo);
281
282 cairo_private cairo_bool_t
283 intel_bo_wait (const intel_device_t *device, const intel_bo_t *bo);
284
285 cairo_private void
286 intel_bo_write (const intel_device_t *dev,
287 intel_bo_t *bo,
288 unsigned long offset,
289 unsigned long size,
290 const void *data);
291
292 cairo_private void
293 intel_bo_read (const intel_device_t *dev,
294 intel_bo_t *bo,
295 unsigned long offset,
296 unsigned long size,
297 void *data);
298
299 cairo_private void *
300 intel_bo_map (const intel_device_t *dev, intel_bo_t *bo);
301
302 cairo_private void
303 intel_bo_unmap (intel_bo_t *bo);
304
305 cairo_private cairo_status_t
306 intel_bo_init (const intel_device_t *dev,
307 intel_bo_t *bo,
308 uint32_t size,
309 uint32_t initial_domain);
310
311 cairo_private cairo_status_t
312 intel_bo_init_for_name (const intel_device_t *dev,
313 intel_bo_t *bo,
314 uint32_t size,
315 uint32_t name);
316
317 cairo_private cairo_status_t
318 intel_bo_put_image (intel_device_t *dev,
319 intel_bo_t *bo,
320 cairo_image_surface_t *src,
321 int src_x, int src_y,
322 int width, int height,
323 int dst_x, int dst_y);
324
325 cairo_private void
326 intel_surface_init (intel_surface_t *surface,
327 const cairo_surface_backend_t *backend,
328 cairo_drm_device_t *device,
329 cairo_format_t format,
330 int width, int height);
331
332 cairo_private cairo_status_t
333 intel_buffer_cache_init (intel_buffer_cache_t *cache,
334 intel_device_t *device,
335 cairo_format_t format,
336 int width, int height);
337
338 cairo_private cairo_status_t
339 intel_gradient_render (intel_device_t *device,
340 const cairo_gradient_pattern_t *pattern,
341 intel_buffer_t *buffer);
342
343 cairo_private cairo_int_status_t
344 intel_get_glyph (intel_device_t *device,
345 cairo_scaled_font_t *scaled_font,
346 cairo_scaled_glyph_t *scaled_glyph);
347
348 cairo_private void
349 intel_scaled_glyph_fini (cairo_scaled_glyph_t *scaled_glyph,
350 cairo_scaled_font_t *scaled_font);
351
352 cairo_private void
353 intel_scaled_font_fini (cairo_scaled_font_t *scaled_font);
354
355 cairo_private void
356 intel_glyph_cache_unpin (intel_device_t *device);
357
358 static inline intel_glyph_t *
intel_glyph_pin(intel_glyph_t * glyph)359 intel_glyph_pin (intel_glyph_t *glyph)
360 {
361 cairo_rtree_node_t *node = &glyph->node;
362 if (unlikely (node->pinned == 0))
363 return _cairo_rtree_pin (&glyph->cache->rtree, node);
364 return glyph;
365 }
366
367 cairo_private cairo_status_t
368 intel_snapshot_cache_insert (intel_device_t *device,
369 intel_surface_t *surface);
370
371 cairo_private void
372 intel_surface_detach_snapshot (cairo_surface_t *abstract_surface);
373
374 cairo_private void
375 intel_snapshot_cache_thaw (intel_device_t *device);
376
377 cairo_private void
378 intel_throttle (intel_device_t *device);
379
380 cairo_private cairo_status_t
381 intel_surface_acquire_source_image (void *abstract_surface,
382 cairo_image_surface_t **image_out,
383 void **image_extra);
384
385 cairo_private void
386 intel_surface_release_source_image (void *abstract_surface,
387 cairo_image_surface_t *image,
388 void *image_extra);
389 cairo_private cairo_surface_t *
390 intel_surface_map_to_image (void *abstract_surface);
391
392 cairo_private cairo_status_t
393 intel_surface_flush (void *abstract_surface,
394 unsigned flags);
395
396 cairo_private cairo_status_t
397 intel_surface_finish (void *abstract_surface);
398
399 cairo_private void
400 intel_dump_batchbuffer (const void *batch,
401 uint32_t length,
402 int devid);
403
404 static inline uint32_t cairo_const
MS3_tiling(uint32_t tiling)405 MS3_tiling (uint32_t tiling)
406 {
407 switch (tiling) {
408 default:
409 case I915_TILING_NONE: return 0;
410 case I915_TILING_X: return MS3_TILED_SURFACE;
411 case I915_TILING_Y: return MS3_TILED_SURFACE | MS3_TILE_WALK;
412 }
413 }
414
415 static inline float cairo_const
texcoord_2d_16(double x,double y)416 texcoord_2d_16 (double x, double y)
417 {
418 union {
419 uint32_t ui;
420 float f;
421 } u;
422 u.ui = (_cairo_half_from_float (y) << 16) | _cairo_half_from_float (x);
423 return u.f;
424 }
425
426 #define PCI_CHIP_I810 0x7121
427 #define PCI_CHIP_I810_DC100 0x7123
428 #define PCI_CHIP_I810_E 0x7125
429 #define PCI_CHIP_I815 0x1132
430
431 #define PCI_CHIP_I830_M 0x3577
432 #define PCI_CHIP_845_G 0x2562
433 #define PCI_CHIP_I855_GM 0x3582
434 #define PCI_CHIP_I865_G 0x2572
435
436 #define PCI_CHIP_I915_G 0x2582
437 #define PCI_CHIP_E7221_G 0x258A
438 #define PCI_CHIP_I915_GM 0x2592
439 #define PCI_CHIP_I945_G 0x2772
440 #define PCI_CHIP_I945_GM 0x27A2
441 #define PCI_CHIP_I945_GME 0x27AE
442
443 #define PCI_CHIP_Q35_G 0x29B2
444 #define PCI_CHIP_G33_G 0x29C2
445 #define PCI_CHIP_Q33_G 0x29D2
446
447 #define PCI_CHIP_IGD_GM 0xA011
448 #define PCI_CHIP_IGD_G 0xA001
449
450 #define IS_IGDGM(devid) (devid == PCI_CHIP_IGD_GM)
451 #define IS_IGDG(devid) (devid == PCI_CHIP_IGD_G)
452 #define IS_IGD(devid) (IS_IGDG(devid) || IS_IGDGM(devid))
453
454 #define PCI_CHIP_I965_G 0x29A2
455 #define PCI_CHIP_I965_Q 0x2992
456 #define PCI_CHIP_I965_G_1 0x2982
457 #define PCI_CHIP_I946_GZ 0x2972
458 #define PCI_CHIP_I965_GM 0x2A02
459 #define PCI_CHIP_I965_GME 0x2A12
460
461 #define PCI_CHIP_GM45_GM 0x2A42
462
463 #define PCI_CHIP_IGD_E_G 0x2E02
464 #define PCI_CHIP_Q45_G 0x2E12
465 #define PCI_CHIP_G45_G 0x2E22
466 #define PCI_CHIP_G41_G 0x2E32
467
468 #define PCI_CHIP_ILD_G 0x0042
469 #define PCI_CHIP_ILM_G 0x0046
470
471 #define IS_MOBILE(devid) (devid == PCI_CHIP_I855_GM || \
472 devid == PCI_CHIP_I915_GM || \
473 devid == PCI_CHIP_I945_GM || \
474 devid == PCI_CHIP_I945_GME || \
475 devid == PCI_CHIP_I965_GM || \
476 devid == PCI_CHIP_I965_GME || \
477 devid == PCI_CHIP_GM45_GM || IS_IGD(devid))
478
479 #define IS_G45(devid) (devid == PCI_CHIP_IGD_E_G || \
480 devid == PCI_CHIP_Q45_G || \
481 devid == PCI_CHIP_G45_G || \
482 devid == PCI_CHIP_G41_G)
483 #define IS_GM45(devid) (devid == PCI_CHIP_GM45_GM)
484 #define IS_G4X(devid) (IS_G45(devid) || IS_GM45(devid))
485
486 #define IS_ILD(devid) (devid == PCI_CHIP_ILD_G)
487 #define IS_ILM(devid) (devid == PCI_CHIP_ILM_G)
488 #define IS_IRONLAKE(devid) (IS_ILD(devid) || IS_ILM(devid))
489
490 #define IS_915(devid) (devid == PCI_CHIP_I915_G || \
491 devid == PCI_CHIP_E7221_G || \
492 devid == PCI_CHIP_I915_GM)
493
494 #define IS_945(devid) (devid == PCI_CHIP_I945_G || \
495 devid == PCI_CHIP_I945_GM || \
496 devid == PCI_CHIP_I945_GME || \
497 devid == PCI_CHIP_G33_G || \
498 devid == PCI_CHIP_Q33_G || \
499 devid == PCI_CHIP_Q35_G || IS_IGD(devid))
500
501 #define IS_965(devid) (devid == PCI_CHIP_I965_G || \
502 devid == PCI_CHIP_I965_Q || \
503 devid == PCI_CHIP_I965_G_1 || \
504 devid == PCI_CHIP_I965_GM || \
505 devid == PCI_CHIP_I965_GME || \
506 devid == PCI_CHIP_I946_GZ || \
507 IS_G4X(devid) || \
508 IS_IRONLAKE(devid))
509
510 #define IS_9XX(devid) (IS_915(devid) || \
511 IS_945(devid) || \
512 IS_965(devid))
513
514
515 #endif /* CAIRO_DRM_INTEL_PRIVATE_H */
516