xref: /qemu/hw/display/virtio-gpu-virgl.c (revision 7d2ad4e1)
1*7d2ad4e1SGerd Hoffmann /*
2*7d2ad4e1SGerd Hoffmann  * Virtio GPU Device
3*7d2ad4e1SGerd Hoffmann  *
4*7d2ad4e1SGerd Hoffmann  * Copyright Red Hat, Inc. 2013-2014
5*7d2ad4e1SGerd Hoffmann  *
6*7d2ad4e1SGerd Hoffmann  * Authors:
7*7d2ad4e1SGerd Hoffmann  *     Dave Airlie <airlied@redhat.com>
8*7d2ad4e1SGerd Hoffmann  *     Gerd Hoffmann <kraxel@redhat.com>
9*7d2ad4e1SGerd Hoffmann  *
10*7d2ad4e1SGerd Hoffmann  * This work is licensed under the terms of the GNU GPL, version 2 or later.
11*7d2ad4e1SGerd Hoffmann  * See the COPYING file in the top-level directory.
12*7d2ad4e1SGerd Hoffmann  */
13*7d2ad4e1SGerd Hoffmann 
14*7d2ad4e1SGerd Hoffmann #include "qemu/osdep.h"
15*7d2ad4e1SGerd Hoffmann #include "qemu/iov.h"
16*7d2ad4e1SGerd Hoffmann #include "trace.h"
17*7d2ad4e1SGerd Hoffmann #include "hw/virtio/virtio.h"
18*7d2ad4e1SGerd Hoffmann #include "hw/virtio/virtio-gpu.h"
19*7d2ad4e1SGerd Hoffmann 
20*7d2ad4e1SGerd Hoffmann #include <virglrenderer.h>
21*7d2ad4e1SGerd Hoffmann 
22*7d2ad4e1SGerd Hoffmann static struct virgl_renderer_callbacks virtio_gpu_3d_cbs;
23*7d2ad4e1SGerd Hoffmann 
24*7d2ad4e1SGerd Hoffmann static void virgl_cmd_create_resource_2d(VirtIOGPU *g,
25*7d2ad4e1SGerd Hoffmann                                          struct virtio_gpu_ctrl_command *cmd)
26*7d2ad4e1SGerd Hoffmann {
27*7d2ad4e1SGerd Hoffmann     struct virtio_gpu_resource_create_2d c2d;
28*7d2ad4e1SGerd Hoffmann     struct virgl_renderer_resource_create_args args;
29*7d2ad4e1SGerd Hoffmann 
30*7d2ad4e1SGerd Hoffmann     VIRTIO_GPU_FILL_CMD(c2d);
31*7d2ad4e1SGerd Hoffmann     trace_virtio_gpu_cmd_res_create_2d(c2d.resource_id, c2d.format,
32*7d2ad4e1SGerd Hoffmann                                        c2d.width, c2d.height);
33*7d2ad4e1SGerd Hoffmann 
34*7d2ad4e1SGerd Hoffmann     args.handle = c2d.resource_id;
35*7d2ad4e1SGerd Hoffmann     args.target = 2;
36*7d2ad4e1SGerd Hoffmann     args.format = c2d.format;
37*7d2ad4e1SGerd Hoffmann     args.bind = (1 << 1);
38*7d2ad4e1SGerd Hoffmann     args.width = c2d.width;
39*7d2ad4e1SGerd Hoffmann     args.height = c2d.height;
40*7d2ad4e1SGerd Hoffmann     args.depth = 1;
41*7d2ad4e1SGerd Hoffmann     args.array_size = 1;
42*7d2ad4e1SGerd Hoffmann     args.last_level = 0;
43*7d2ad4e1SGerd Hoffmann     args.nr_samples = 0;
44*7d2ad4e1SGerd Hoffmann     args.flags = VIRTIO_GPU_RESOURCE_FLAG_Y_0_TOP;
45*7d2ad4e1SGerd Hoffmann     virgl_renderer_resource_create(&args, NULL, 0);
46*7d2ad4e1SGerd Hoffmann }
47*7d2ad4e1SGerd Hoffmann 
48*7d2ad4e1SGerd Hoffmann static void virgl_cmd_create_resource_3d(VirtIOGPU *g,
49*7d2ad4e1SGerd Hoffmann                                          struct virtio_gpu_ctrl_command *cmd)
50*7d2ad4e1SGerd Hoffmann {
51*7d2ad4e1SGerd Hoffmann     struct virtio_gpu_resource_create_3d c3d;
52*7d2ad4e1SGerd Hoffmann     struct virgl_renderer_resource_create_args args;
53*7d2ad4e1SGerd Hoffmann 
54*7d2ad4e1SGerd Hoffmann     VIRTIO_GPU_FILL_CMD(c3d);
55*7d2ad4e1SGerd Hoffmann     trace_virtio_gpu_cmd_res_create_3d(c3d.resource_id, c3d.format,
56*7d2ad4e1SGerd Hoffmann                                        c3d.width, c3d.height, c3d.depth);
57*7d2ad4e1SGerd Hoffmann 
58*7d2ad4e1SGerd Hoffmann     args.handle = c3d.resource_id;
59*7d2ad4e1SGerd Hoffmann     args.target = c3d.target;
60*7d2ad4e1SGerd Hoffmann     args.format = c3d.format;
61*7d2ad4e1SGerd Hoffmann     args.bind = c3d.bind;
62*7d2ad4e1SGerd Hoffmann     args.width = c3d.width;
63*7d2ad4e1SGerd Hoffmann     args.height = c3d.height;
64*7d2ad4e1SGerd Hoffmann     args.depth = c3d.depth;
65*7d2ad4e1SGerd Hoffmann     args.array_size = c3d.array_size;
66*7d2ad4e1SGerd Hoffmann     args.last_level = c3d.last_level;
67*7d2ad4e1SGerd Hoffmann     args.nr_samples = c3d.nr_samples;
68*7d2ad4e1SGerd Hoffmann     args.flags = c3d.flags;
69*7d2ad4e1SGerd Hoffmann     virgl_renderer_resource_create(&args, NULL, 0);
70*7d2ad4e1SGerd Hoffmann }
71*7d2ad4e1SGerd Hoffmann 
72*7d2ad4e1SGerd Hoffmann static void virgl_cmd_resource_unref(VirtIOGPU *g,
73*7d2ad4e1SGerd Hoffmann                                      struct virtio_gpu_ctrl_command *cmd)
74*7d2ad4e1SGerd Hoffmann {
75*7d2ad4e1SGerd Hoffmann     struct virtio_gpu_resource_unref unref;
76*7d2ad4e1SGerd Hoffmann     struct iovec *res_iovs = NULL;
77*7d2ad4e1SGerd Hoffmann     int num_iovs = 0;
78*7d2ad4e1SGerd Hoffmann 
79*7d2ad4e1SGerd Hoffmann     VIRTIO_GPU_FILL_CMD(unref);
80*7d2ad4e1SGerd Hoffmann     trace_virtio_gpu_cmd_res_unref(unref.resource_id);
81*7d2ad4e1SGerd Hoffmann 
82*7d2ad4e1SGerd Hoffmann     virgl_renderer_resource_detach_iov(unref.resource_id,
83*7d2ad4e1SGerd Hoffmann                                        &res_iovs,
84*7d2ad4e1SGerd Hoffmann                                        &num_iovs);
85*7d2ad4e1SGerd Hoffmann     if (res_iovs != NULL && num_iovs != 0) {
86*7d2ad4e1SGerd Hoffmann         virtio_gpu_cleanup_mapping_iov(g, res_iovs, num_iovs);
87*7d2ad4e1SGerd Hoffmann     }
88*7d2ad4e1SGerd Hoffmann     virgl_renderer_resource_unref(unref.resource_id);
89*7d2ad4e1SGerd Hoffmann }
90*7d2ad4e1SGerd Hoffmann 
91*7d2ad4e1SGerd Hoffmann static void virgl_cmd_context_create(VirtIOGPU *g,
92*7d2ad4e1SGerd Hoffmann                                      struct virtio_gpu_ctrl_command *cmd)
93*7d2ad4e1SGerd Hoffmann {
94*7d2ad4e1SGerd Hoffmann     struct virtio_gpu_ctx_create cc;
95*7d2ad4e1SGerd Hoffmann 
96*7d2ad4e1SGerd Hoffmann     VIRTIO_GPU_FILL_CMD(cc);
97*7d2ad4e1SGerd Hoffmann     trace_virtio_gpu_cmd_ctx_create(cc.hdr.ctx_id,
98*7d2ad4e1SGerd Hoffmann                                     cc.debug_name);
99*7d2ad4e1SGerd Hoffmann 
100*7d2ad4e1SGerd Hoffmann     virgl_renderer_context_create(cc.hdr.ctx_id, cc.nlen,
101*7d2ad4e1SGerd Hoffmann                                   cc.debug_name);
102*7d2ad4e1SGerd Hoffmann }
103*7d2ad4e1SGerd Hoffmann 
104*7d2ad4e1SGerd Hoffmann static void virgl_cmd_context_destroy(VirtIOGPU *g,
105*7d2ad4e1SGerd Hoffmann                                       struct virtio_gpu_ctrl_command *cmd)
106*7d2ad4e1SGerd Hoffmann {
107*7d2ad4e1SGerd Hoffmann     struct virtio_gpu_ctx_destroy cd;
108*7d2ad4e1SGerd Hoffmann 
109*7d2ad4e1SGerd Hoffmann     VIRTIO_GPU_FILL_CMD(cd);
110*7d2ad4e1SGerd Hoffmann     trace_virtio_gpu_cmd_ctx_destroy(cd.hdr.ctx_id);
111*7d2ad4e1SGerd Hoffmann 
112*7d2ad4e1SGerd Hoffmann     virgl_renderer_context_destroy(cd.hdr.ctx_id);
113*7d2ad4e1SGerd Hoffmann }
114*7d2ad4e1SGerd Hoffmann 
115*7d2ad4e1SGerd Hoffmann static void virtio_gpu_rect_update(VirtIOGPU *g, int idx, int x, int y,
116*7d2ad4e1SGerd Hoffmann                                 int width, int height)
117*7d2ad4e1SGerd Hoffmann {
118*7d2ad4e1SGerd Hoffmann     if (!g->parent_obj.scanout[idx].con) {
119*7d2ad4e1SGerd Hoffmann         return;
120*7d2ad4e1SGerd Hoffmann     }
121*7d2ad4e1SGerd Hoffmann 
122*7d2ad4e1SGerd Hoffmann     dpy_gl_update(g->parent_obj.scanout[idx].con, x, y, width, height);
123*7d2ad4e1SGerd Hoffmann }
124*7d2ad4e1SGerd Hoffmann 
125*7d2ad4e1SGerd Hoffmann static void virgl_cmd_resource_flush(VirtIOGPU *g,
126*7d2ad4e1SGerd Hoffmann                                      struct virtio_gpu_ctrl_command *cmd)
127*7d2ad4e1SGerd Hoffmann {
128*7d2ad4e1SGerd Hoffmann     struct virtio_gpu_resource_flush rf;
129*7d2ad4e1SGerd Hoffmann     int i;
130*7d2ad4e1SGerd Hoffmann 
131*7d2ad4e1SGerd Hoffmann     VIRTIO_GPU_FILL_CMD(rf);
132*7d2ad4e1SGerd Hoffmann     trace_virtio_gpu_cmd_res_flush(rf.resource_id,
133*7d2ad4e1SGerd Hoffmann                                    rf.r.width, rf.r.height, rf.r.x, rf.r.y);
134*7d2ad4e1SGerd Hoffmann 
135*7d2ad4e1SGerd Hoffmann     for (i = 0; i < g->parent_obj.conf.max_outputs; i++) {
136*7d2ad4e1SGerd Hoffmann         if (g->parent_obj.scanout[i].resource_id != rf.resource_id) {
137*7d2ad4e1SGerd Hoffmann             continue;
138*7d2ad4e1SGerd Hoffmann         }
139*7d2ad4e1SGerd Hoffmann         virtio_gpu_rect_update(g, i, rf.r.x, rf.r.y, rf.r.width, rf.r.height);
140*7d2ad4e1SGerd Hoffmann     }
141*7d2ad4e1SGerd Hoffmann }
142*7d2ad4e1SGerd Hoffmann 
143*7d2ad4e1SGerd Hoffmann static void virgl_cmd_set_scanout(VirtIOGPU *g,
144*7d2ad4e1SGerd Hoffmann                                   struct virtio_gpu_ctrl_command *cmd)
145*7d2ad4e1SGerd Hoffmann {
146*7d2ad4e1SGerd Hoffmann     struct virtio_gpu_set_scanout ss;
147*7d2ad4e1SGerd Hoffmann     struct virgl_renderer_resource_info info;
148*7d2ad4e1SGerd Hoffmann     int ret;
149*7d2ad4e1SGerd Hoffmann 
150*7d2ad4e1SGerd Hoffmann     VIRTIO_GPU_FILL_CMD(ss);
151*7d2ad4e1SGerd Hoffmann     trace_virtio_gpu_cmd_set_scanout(ss.scanout_id, ss.resource_id,
152*7d2ad4e1SGerd Hoffmann                                      ss.r.width, ss.r.height, ss.r.x, ss.r.y);
153*7d2ad4e1SGerd Hoffmann 
154*7d2ad4e1SGerd Hoffmann     if (ss.scanout_id >= g->parent_obj.conf.max_outputs) {
155*7d2ad4e1SGerd Hoffmann         qemu_log_mask(LOG_GUEST_ERROR, "%s: illegal scanout id specified %d",
156*7d2ad4e1SGerd Hoffmann                       __func__, ss.scanout_id);
157*7d2ad4e1SGerd Hoffmann         cmd->error = VIRTIO_GPU_RESP_ERR_INVALID_SCANOUT_ID;
158*7d2ad4e1SGerd Hoffmann         return;
159*7d2ad4e1SGerd Hoffmann     }
160*7d2ad4e1SGerd Hoffmann     g->parent_obj.enable = 1;
161*7d2ad4e1SGerd Hoffmann 
162*7d2ad4e1SGerd Hoffmann     memset(&info, 0, sizeof(info));
163*7d2ad4e1SGerd Hoffmann 
164*7d2ad4e1SGerd Hoffmann     if (ss.resource_id && ss.r.width && ss.r.height) {
165*7d2ad4e1SGerd Hoffmann         ret = virgl_renderer_resource_get_info(ss.resource_id, &info);
166*7d2ad4e1SGerd Hoffmann         if (ret == -1) {
167*7d2ad4e1SGerd Hoffmann             qemu_log_mask(LOG_GUEST_ERROR,
168*7d2ad4e1SGerd Hoffmann                           "%s: illegal resource specified %d\n",
169*7d2ad4e1SGerd Hoffmann                           __func__, ss.resource_id);
170*7d2ad4e1SGerd Hoffmann             cmd->error = VIRTIO_GPU_RESP_ERR_INVALID_RESOURCE_ID;
171*7d2ad4e1SGerd Hoffmann             return;
172*7d2ad4e1SGerd Hoffmann         }
173*7d2ad4e1SGerd Hoffmann         qemu_console_resize(g->parent_obj.scanout[ss.scanout_id].con,
174*7d2ad4e1SGerd Hoffmann                             ss.r.width, ss.r.height);
175*7d2ad4e1SGerd Hoffmann         virgl_renderer_force_ctx_0();
176*7d2ad4e1SGerd Hoffmann         dpy_gl_scanout_texture(
177*7d2ad4e1SGerd Hoffmann             g->parent_obj.scanout[ss.scanout_id].con, info.tex_id,
178*7d2ad4e1SGerd Hoffmann             info.flags & 1 /* FIXME: Y_0_TOP */,
179*7d2ad4e1SGerd Hoffmann             info.width, info.height,
180*7d2ad4e1SGerd Hoffmann             ss.r.x, ss.r.y, ss.r.width, ss.r.height);
181*7d2ad4e1SGerd Hoffmann     } else {
182*7d2ad4e1SGerd Hoffmann         dpy_gfx_replace_surface(
183*7d2ad4e1SGerd Hoffmann             g->parent_obj.scanout[ss.scanout_id].con, NULL);
184*7d2ad4e1SGerd Hoffmann         dpy_gl_scanout_disable(g->parent_obj.scanout[ss.scanout_id].con);
185*7d2ad4e1SGerd Hoffmann     }
186*7d2ad4e1SGerd Hoffmann     g->parent_obj.scanout[ss.scanout_id].resource_id = ss.resource_id;
187*7d2ad4e1SGerd Hoffmann }
188*7d2ad4e1SGerd Hoffmann 
189*7d2ad4e1SGerd Hoffmann static void virgl_cmd_submit_3d(VirtIOGPU *g,
190*7d2ad4e1SGerd Hoffmann                                 struct virtio_gpu_ctrl_command *cmd)
191*7d2ad4e1SGerd Hoffmann {
192*7d2ad4e1SGerd Hoffmann     struct virtio_gpu_cmd_submit cs;
193*7d2ad4e1SGerd Hoffmann     void *buf;
194*7d2ad4e1SGerd Hoffmann     size_t s;
195*7d2ad4e1SGerd Hoffmann 
196*7d2ad4e1SGerd Hoffmann     VIRTIO_GPU_FILL_CMD(cs);
197*7d2ad4e1SGerd Hoffmann     trace_virtio_gpu_cmd_ctx_submit(cs.hdr.ctx_id, cs.size);
198*7d2ad4e1SGerd Hoffmann 
199*7d2ad4e1SGerd Hoffmann     buf = g_malloc(cs.size);
200*7d2ad4e1SGerd Hoffmann     s = iov_to_buf(cmd->elem.out_sg, cmd->elem.out_num,
201*7d2ad4e1SGerd Hoffmann                    sizeof(cs), buf, cs.size);
202*7d2ad4e1SGerd Hoffmann     if (s != cs.size) {
203*7d2ad4e1SGerd Hoffmann         qemu_log_mask(LOG_GUEST_ERROR, "%s: size mismatch (%zd/%d)",
204*7d2ad4e1SGerd Hoffmann                       __func__, s, cs.size);
205*7d2ad4e1SGerd Hoffmann         cmd->error = VIRTIO_GPU_RESP_ERR_INVALID_PARAMETER;
206*7d2ad4e1SGerd Hoffmann         goto out;
207*7d2ad4e1SGerd Hoffmann     }
208*7d2ad4e1SGerd Hoffmann 
209*7d2ad4e1SGerd Hoffmann     if (virtio_gpu_stats_enabled(g->parent_obj.conf)) {
210*7d2ad4e1SGerd Hoffmann         g->stats.req_3d++;
211*7d2ad4e1SGerd Hoffmann         g->stats.bytes_3d += cs.size;
212*7d2ad4e1SGerd Hoffmann     }
213*7d2ad4e1SGerd Hoffmann 
214*7d2ad4e1SGerd Hoffmann     virgl_renderer_submit_cmd(buf, cs.hdr.ctx_id, cs.size / 4);
215*7d2ad4e1SGerd Hoffmann 
216*7d2ad4e1SGerd Hoffmann out:
217*7d2ad4e1SGerd Hoffmann     g_free(buf);
218*7d2ad4e1SGerd Hoffmann }
219*7d2ad4e1SGerd Hoffmann 
220*7d2ad4e1SGerd Hoffmann static void virgl_cmd_transfer_to_host_2d(VirtIOGPU *g,
221*7d2ad4e1SGerd Hoffmann                                           struct virtio_gpu_ctrl_command *cmd)
222*7d2ad4e1SGerd Hoffmann {
223*7d2ad4e1SGerd Hoffmann     struct virtio_gpu_transfer_to_host_2d t2d;
224*7d2ad4e1SGerd Hoffmann     struct virtio_gpu_box box;
225*7d2ad4e1SGerd Hoffmann 
226*7d2ad4e1SGerd Hoffmann     VIRTIO_GPU_FILL_CMD(t2d);
227*7d2ad4e1SGerd Hoffmann     trace_virtio_gpu_cmd_res_xfer_toh_2d(t2d.resource_id);
228*7d2ad4e1SGerd Hoffmann 
229*7d2ad4e1SGerd Hoffmann     box.x = t2d.r.x;
230*7d2ad4e1SGerd Hoffmann     box.y = t2d.r.y;
231*7d2ad4e1SGerd Hoffmann     box.z = 0;
232*7d2ad4e1SGerd Hoffmann     box.w = t2d.r.width;
233*7d2ad4e1SGerd Hoffmann     box.h = t2d.r.height;
234*7d2ad4e1SGerd Hoffmann     box.d = 1;
235*7d2ad4e1SGerd Hoffmann 
236*7d2ad4e1SGerd Hoffmann     virgl_renderer_transfer_write_iov(t2d.resource_id,
237*7d2ad4e1SGerd Hoffmann                                       0,
238*7d2ad4e1SGerd Hoffmann                                       0,
239*7d2ad4e1SGerd Hoffmann                                       0,
240*7d2ad4e1SGerd Hoffmann                                       0,
241*7d2ad4e1SGerd Hoffmann                                       (struct virgl_box *)&box,
242*7d2ad4e1SGerd Hoffmann                                       t2d.offset, NULL, 0);
243*7d2ad4e1SGerd Hoffmann }
244*7d2ad4e1SGerd Hoffmann 
245*7d2ad4e1SGerd Hoffmann static void virgl_cmd_transfer_to_host_3d(VirtIOGPU *g,
246*7d2ad4e1SGerd Hoffmann                                           struct virtio_gpu_ctrl_command *cmd)
247*7d2ad4e1SGerd Hoffmann {
248*7d2ad4e1SGerd Hoffmann     struct virtio_gpu_transfer_host_3d t3d;
249*7d2ad4e1SGerd Hoffmann 
250*7d2ad4e1SGerd Hoffmann     VIRTIO_GPU_FILL_CMD(t3d);
251*7d2ad4e1SGerd Hoffmann     trace_virtio_gpu_cmd_res_xfer_toh_3d(t3d.resource_id);
252*7d2ad4e1SGerd Hoffmann 
253*7d2ad4e1SGerd Hoffmann     virgl_renderer_transfer_write_iov(t3d.resource_id,
254*7d2ad4e1SGerd Hoffmann                                       t3d.hdr.ctx_id,
255*7d2ad4e1SGerd Hoffmann                                       t3d.level,
256*7d2ad4e1SGerd Hoffmann                                       t3d.stride,
257*7d2ad4e1SGerd Hoffmann                                       t3d.layer_stride,
258*7d2ad4e1SGerd Hoffmann                                       (struct virgl_box *)&t3d.box,
259*7d2ad4e1SGerd Hoffmann                                       t3d.offset, NULL, 0);
260*7d2ad4e1SGerd Hoffmann }
261*7d2ad4e1SGerd Hoffmann 
262*7d2ad4e1SGerd Hoffmann static void
263*7d2ad4e1SGerd Hoffmann virgl_cmd_transfer_from_host_3d(VirtIOGPU *g,
264*7d2ad4e1SGerd Hoffmann                                 struct virtio_gpu_ctrl_command *cmd)
265*7d2ad4e1SGerd Hoffmann {
266*7d2ad4e1SGerd Hoffmann     struct virtio_gpu_transfer_host_3d tf3d;
267*7d2ad4e1SGerd Hoffmann 
268*7d2ad4e1SGerd Hoffmann     VIRTIO_GPU_FILL_CMD(tf3d);
269*7d2ad4e1SGerd Hoffmann     trace_virtio_gpu_cmd_res_xfer_fromh_3d(tf3d.resource_id);
270*7d2ad4e1SGerd Hoffmann 
271*7d2ad4e1SGerd Hoffmann     virgl_renderer_transfer_read_iov(tf3d.resource_id,
272*7d2ad4e1SGerd Hoffmann                                      tf3d.hdr.ctx_id,
273*7d2ad4e1SGerd Hoffmann                                      tf3d.level,
274*7d2ad4e1SGerd Hoffmann                                      tf3d.stride,
275*7d2ad4e1SGerd Hoffmann                                      tf3d.layer_stride,
276*7d2ad4e1SGerd Hoffmann                                      (struct virgl_box *)&tf3d.box,
277*7d2ad4e1SGerd Hoffmann                                      tf3d.offset, NULL, 0);
278*7d2ad4e1SGerd Hoffmann }
279*7d2ad4e1SGerd Hoffmann 
280*7d2ad4e1SGerd Hoffmann 
281*7d2ad4e1SGerd Hoffmann static void virgl_resource_attach_backing(VirtIOGPU *g,
282*7d2ad4e1SGerd Hoffmann                                           struct virtio_gpu_ctrl_command *cmd)
283*7d2ad4e1SGerd Hoffmann {
284*7d2ad4e1SGerd Hoffmann     struct virtio_gpu_resource_attach_backing att_rb;
285*7d2ad4e1SGerd Hoffmann     struct iovec *res_iovs;
286*7d2ad4e1SGerd Hoffmann     uint32_t res_niov;
287*7d2ad4e1SGerd Hoffmann     int ret;
288*7d2ad4e1SGerd Hoffmann 
289*7d2ad4e1SGerd Hoffmann     VIRTIO_GPU_FILL_CMD(att_rb);
290*7d2ad4e1SGerd Hoffmann     trace_virtio_gpu_cmd_res_back_attach(att_rb.resource_id);
291*7d2ad4e1SGerd Hoffmann 
292*7d2ad4e1SGerd Hoffmann     ret = virtio_gpu_create_mapping_iov(g, &att_rb, cmd, NULL, &res_iovs, &res_niov);
293*7d2ad4e1SGerd Hoffmann     if (ret != 0) {
294*7d2ad4e1SGerd Hoffmann         cmd->error = VIRTIO_GPU_RESP_ERR_UNSPEC;
295*7d2ad4e1SGerd Hoffmann         return;
296*7d2ad4e1SGerd Hoffmann     }
297*7d2ad4e1SGerd Hoffmann 
298*7d2ad4e1SGerd Hoffmann     ret = virgl_renderer_resource_attach_iov(att_rb.resource_id,
299*7d2ad4e1SGerd Hoffmann                                              res_iovs, res_niov);
300*7d2ad4e1SGerd Hoffmann 
301*7d2ad4e1SGerd Hoffmann     if (ret != 0)
302*7d2ad4e1SGerd Hoffmann         virtio_gpu_cleanup_mapping_iov(g, res_iovs, res_niov);
303*7d2ad4e1SGerd Hoffmann }
304*7d2ad4e1SGerd Hoffmann 
305*7d2ad4e1SGerd Hoffmann static void virgl_resource_detach_backing(VirtIOGPU *g,
306*7d2ad4e1SGerd Hoffmann                                           struct virtio_gpu_ctrl_command *cmd)
307*7d2ad4e1SGerd Hoffmann {
308*7d2ad4e1SGerd Hoffmann     struct virtio_gpu_resource_detach_backing detach_rb;
309*7d2ad4e1SGerd Hoffmann     struct iovec *res_iovs = NULL;
310*7d2ad4e1SGerd Hoffmann     int num_iovs = 0;
311*7d2ad4e1SGerd Hoffmann 
312*7d2ad4e1SGerd Hoffmann     VIRTIO_GPU_FILL_CMD(detach_rb);
313*7d2ad4e1SGerd Hoffmann     trace_virtio_gpu_cmd_res_back_detach(detach_rb.resource_id);
314*7d2ad4e1SGerd Hoffmann 
315*7d2ad4e1SGerd Hoffmann     virgl_renderer_resource_detach_iov(detach_rb.resource_id,
316*7d2ad4e1SGerd Hoffmann                                        &res_iovs,
317*7d2ad4e1SGerd Hoffmann                                        &num_iovs);
318*7d2ad4e1SGerd Hoffmann     if (res_iovs == NULL || num_iovs == 0) {
319*7d2ad4e1SGerd Hoffmann         return;
320*7d2ad4e1SGerd Hoffmann     }
321*7d2ad4e1SGerd Hoffmann     virtio_gpu_cleanup_mapping_iov(g, res_iovs, num_iovs);
322*7d2ad4e1SGerd Hoffmann }
323*7d2ad4e1SGerd Hoffmann 
324*7d2ad4e1SGerd Hoffmann 
325*7d2ad4e1SGerd Hoffmann static void virgl_cmd_ctx_attach_resource(VirtIOGPU *g,
326*7d2ad4e1SGerd Hoffmann                                           struct virtio_gpu_ctrl_command *cmd)
327*7d2ad4e1SGerd Hoffmann {
328*7d2ad4e1SGerd Hoffmann     struct virtio_gpu_ctx_resource att_res;
329*7d2ad4e1SGerd Hoffmann 
330*7d2ad4e1SGerd Hoffmann     VIRTIO_GPU_FILL_CMD(att_res);
331*7d2ad4e1SGerd Hoffmann     trace_virtio_gpu_cmd_ctx_res_attach(att_res.hdr.ctx_id,
332*7d2ad4e1SGerd Hoffmann                                         att_res.resource_id);
333*7d2ad4e1SGerd Hoffmann 
334*7d2ad4e1SGerd Hoffmann     virgl_renderer_ctx_attach_resource(att_res.hdr.ctx_id, att_res.resource_id);
335*7d2ad4e1SGerd Hoffmann }
336*7d2ad4e1SGerd Hoffmann 
337*7d2ad4e1SGerd Hoffmann static void virgl_cmd_ctx_detach_resource(VirtIOGPU *g,
338*7d2ad4e1SGerd Hoffmann                                           struct virtio_gpu_ctrl_command *cmd)
339*7d2ad4e1SGerd Hoffmann {
340*7d2ad4e1SGerd Hoffmann     struct virtio_gpu_ctx_resource det_res;
341*7d2ad4e1SGerd Hoffmann 
342*7d2ad4e1SGerd Hoffmann     VIRTIO_GPU_FILL_CMD(det_res);
343*7d2ad4e1SGerd Hoffmann     trace_virtio_gpu_cmd_ctx_res_detach(det_res.hdr.ctx_id,
344*7d2ad4e1SGerd Hoffmann                                         det_res.resource_id);
345*7d2ad4e1SGerd Hoffmann 
346*7d2ad4e1SGerd Hoffmann     virgl_renderer_ctx_detach_resource(det_res.hdr.ctx_id, det_res.resource_id);
347*7d2ad4e1SGerd Hoffmann }
348*7d2ad4e1SGerd Hoffmann 
349*7d2ad4e1SGerd Hoffmann static void virgl_cmd_get_capset_info(VirtIOGPU *g,
350*7d2ad4e1SGerd Hoffmann                                       struct virtio_gpu_ctrl_command *cmd)
351*7d2ad4e1SGerd Hoffmann {
352*7d2ad4e1SGerd Hoffmann     struct virtio_gpu_get_capset_info info;
353*7d2ad4e1SGerd Hoffmann     struct virtio_gpu_resp_capset_info resp;
354*7d2ad4e1SGerd Hoffmann 
355*7d2ad4e1SGerd Hoffmann     VIRTIO_GPU_FILL_CMD(info);
356*7d2ad4e1SGerd Hoffmann 
357*7d2ad4e1SGerd Hoffmann     memset(&resp, 0, sizeof(resp));
358*7d2ad4e1SGerd Hoffmann     if (info.capset_index == 0) {
359*7d2ad4e1SGerd Hoffmann         resp.capset_id = VIRTIO_GPU_CAPSET_VIRGL;
360*7d2ad4e1SGerd Hoffmann         virgl_renderer_get_cap_set(resp.capset_id,
361*7d2ad4e1SGerd Hoffmann                                    &resp.capset_max_version,
362*7d2ad4e1SGerd Hoffmann                                    &resp.capset_max_size);
363*7d2ad4e1SGerd Hoffmann     } else if (info.capset_index == 1) {
364*7d2ad4e1SGerd Hoffmann         resp.capset_id = VIRTIO_GPU_CAPSET_VIRGL2;
365*7d2ad4e1SGerd Hoffmann         virgl_renderer_get_cap_set(resp.capset_id,
366*7d2ad4e1SGerd Hoffmann                                    &resp.capset_max_version,
367*7d2ad4e1SGerd Hoffmann                                    &resp.capset_max_size);
368*7d2ad4e1SGerd Hoffmann     } else {
369*7d2ad4e1SGerd Hoffmann         resp.capset_max_version = 0;
370*7d2ad4e1SGerd Hoffmann         resp.capset_max_size = 0;
371*7d2ad4e1SGerd Hoffmann     }
372*7d2ad4e1SGerd Hoffmann     resp.hdr.type = VIRTIO_GPU_RESP_OK_CAPSET_INFO;
373*7d2ad4e1SGerd Hoffmann     virtio_gpu_ctrl_response(g, cmd, &resp.hdr, sizeof(resp));
374*7d2ad4e1SGerd Hoffmann }
375*7d2ad4e1SGerd Hoffmann 
376*7d2ad4e1SGerd Hoffmann static void virgl_cmd_get_capset(VirtIOGPU *g,
377*7d2ad4e1SGerd Hoffmann                                  struct virtio_gpu_ctrl_command *cmd)
378*7d2ad4e1SGerd Hoffmann {
379*7d2ad4e1SGerd Hoffmann     struct virtio_gpu_get_capset gc;
380*7d2ad4e1SGerd Hoffmann     struct virtio_gpu_resp_capset *resp;
381*7d2ad4e1SGerd Hoffmann     uint32_t max_ver, max_size;
382*7d2ad4e1SGerd Hoffmann     VIRTIO_GPU_FILL_CMD(gc);
383*7d2ad4e1SGerd Hoffmann 
384*7d2ad4e1SGerd Hoffmann     virgl_renderer_get_cap_set(gc.capset_id, &max_ver,
385*7d2ad4e1SGerd Hoffmann                                &max_size);
386*7d2ad4e1SGerd Hoffmann     if (!max_size) {
387*7d2ad4e1SGerd Hoffmann         cmd->error = VIRTIO_GPU_RESP_ERR_INVALID_PARAMETER;
388*7d2ad4e1SGerd Hoffmann         return;
389*7d2ad4e1SGerd Hoffmann     }
390*7d2ad4e1SGerd Hoffmann 
391*7d2ad4e1SGerd Hoffmann     resp = g_malloc0(sizeof(*resp) + max_size);
392*7d2ad4e1SGerd Hoffmann     resp->hdr.type = VIRTIO_GPU_RESP_OK_CAPSET;
393*7d2ad4e1SGerd Hoffmann     virgl_renderer_fill_caps(gc.capset_id,
394*7d2ad4e1SGerd Hoffmann                              gc.capset_version,
395*7d2ad4e1SGerd Hoffmann                              (void *)resp->capset_data);
396*7d2ad4e1SGerd Hoffmann     virtio_gpu_ctrl_response(g, cmd, &resp->hdr, sizeof(*resp) + max_size);
397*7d2ad4e1SGerd Hoffmann     g_free(resp);
398*7d2ad4e1SGerd Hoffmann }
399*7d2ad4e1SGerd Hoffmann 
400*7d2ad4e1SGerd Hoffmann void virtio_gpu_virgl_process_cmd(VirtIOGPU *g,
401*7d2ad4e1SGerd Hoffmann                                       struct virtio_gpu_ctrl_command *cmd)
402*7d2ad4e1SGerd Hoffmann {
403*7d2ad4e1SGerd Hoffmann     VIRTIO_GPU_FILL_CMD(cmd->cmd_hdr);
404*7d2ad4e1SGerd Hoffmann 
405*7d2ad4e1SGerd Hoffmann     virgl_renderer_force_ctx_0();
406*7d2ad4e1SGerd Hoffmann     switch (cmd->cmd_hdr.type) {
407*7d2ad4e1SGerd Hoffmann     case VIRTIO_GPU_CMD_CTX_CREATE:
408*7d2ad4e1SGerd Hoffmann         virgl_cmd_context_create(g, cmd);
409*7d2ad4e1SGerd Hoffmann         break;
410*7d2ad4e1SGerd Hoffmann     case VIRTIO_GPU_CMD_CTX_DESTROY:
411*7d2ad4e1SGerd Hoffmann         virgl_cmd_context_destroy(g, cmd);
412*7d2ad4e1SGerd Hoffmann         break;
413*7d2ad4e1SGerd Hoffmann     case VIRTIO_GPU_CMD_RESOURCE_CREATE_2D:
414*7d2ad4e1SGerd Hoffmann         virgl_cmd_create_resource_2d(g, cmd);
415*7d2ad4e1SGerd Hoffmann         break;
416*7d2ad4e1SGerd Hoffmann     case VIRTIO_GPU_CMD_RESOURCE_CREATE_3D:
417*7d2ad4e1SGerd Hoffmann         virgl_cmd_create_resource_3d(g, cmd);
418*7d2ad4e1SGerd Hoffmann         break;
419*7d2ad4e1SGerd Hoffmann     case VIRTIO_GPU_CMD_SUBMIT_3D:
420*7d2ad4e1SGerd Hoffmann         virgl_cmd_submit_3d(g, cmd);
421*7d2ad4e1SGerd Hoffmann         break;
422*7d2ad4e1SGerd Hoffmann     case VIRTIO_GPU_CMD_TRANSFER_TO_HOST_2D:
423*7d2ad4e1SGerd Hoffmann         virgl_cmd_transfer_to_host_2d(g, cmd);
424*7d2ad4e1SGerd Hoffmann         break;
425*7d2ad4e1SGerd Hoffmann     case VIRTIO_GPU_CMD_TRANSFER_TO_HOST_3D:
426*7d2ad4e1SGerd Hoffmann         virgl_cmd_transfer_to_host_3d(g, cmd);
427*7d2ad4e1SGerd Hoffmann         break;
428*7d2ad4e1SGerd Hoffmann     case VIRTIO_GPU_CMD_TRANSFER_FROM_HOST_3D:
429*7d2ad4e1SGerd Hoffmann         virgl_cmd_transfer_from_host_3d(g, cmd);
430*7d2ad4e1SGerd Hoffmann         break;
431*7d2ad4e1SGerd Hoffmann     case VIRTIO_GPU_CMD_RESOURCE_ATTACH_BACKING:
432*7d2ad4e1SGerd Hoffmann         virgl_resource_attach_backing(g, cmd);
433*7d2ad4e1SGerd Hoffmann         break;
434*7d2ad4e1SGerd Hoffmann     case VIRTIO_GPU_CMD_RESOURCE_DETACH_BACKING:
435*7d2ad4e1SGerd Hoffmann         virgl_resource_detach_backing(g, cmd);
436*7d2ad4e1SGerd Hoffmann         break;
437*7d2ad4e1SGerd Hoffmann     case VIRTIO_GPU_CMD_SET_SCANOUT:
438*7d2ad4e1SGerd Hoffmann         virgl_cmd_set_scanout(g, cmd);
439*7d2ad4e1SGerd Hoffmann         break;
440*7d2ad4e1SGerd Hoffmann     case VIRTIO_GPU_CMD_RESOURCE_FLUSH:
441*7d2ad4e1SGerd Hoffmann         virgl_cmd_resource_flush(g, cmd);
442*7d2ad4e1SGerd Hoffmann         break;
443*7d2ad4e1SGerd Hoffmann     case VIRTIO_GPU_CMD_RESOURCE_UNREF:
444*7d2ad4e1SGerd Hoffmann         virgl_cmd_resource_unref(g, cmd);
445*7d2ad4e1SGerd Hoffmann         break;
446*7d2ad4e1SGerd Hoffmann     case VIRTIO_GPU_CMD_CTX_ATTACH_RESOURCE:
447*7d2ad4e1SGerd Hoffmann         /* TODO add security */
448*7d2ad4e1SGerd Hoffmann         virgl_cmd_ctx_attach_resource(g, cmd);
449*7d2ad4e1SGerd Hoffmann         break;
450*7d2ad4e1SGerd Hoffmann     case VIRTIO_GPU_CMD_CTX_DETACH_RESOURCE:
451*7d2ad4e1SGerd Hoffmann         /* TODO add security */
452*7d2ad4e1SGerd Hoffmann         virgl_cmd_ctx_detach_resource(g, cmd);
453*7d2ad4e1SGerd Hoffmann         break;
454*7d2ad4e1SGerd Hoffmann     case VIRTIO_GPU_CMD_GET_CAPSET_INFO:
455*7d2ad4e1SGerd Hoffmann         virgl_cmd_get_capset_info(g, cmd);
456*7d2ad4e1SGerd Hoffmann         break;
457*7d2ad4e1SGerd Hoffmann     case VIRTIO_GPU_CMD_GET_CAPSET:
458*7d2ad4e1SGerd Hoffmann         virgl_cmd_get_capset(g, cmd);
459*7d2ad4e1SGerd Hoffmann         break;
460*7d2ad4e1SGerd Hoffmann     case VIRTIO_GPU_CMD_GET_DISPLAY_INFO:
461*7d2ad4e1SGerd Hoffmann         virtio_gpu_get_display_info(g, cmd);
462*7d2ad4e1SGerd Hoffmann         break;
463*7d2ad4e1SGerd Hoffmann     case VIRTIO_GPU_CMD_GET_EDID:
464*7d2ad4e1SGerd Hoffmann         virtio_gpu_get_edid(g, cmd);
465*7d2ad4e1SGerd Hoffmann         break;
466*7d2ad4e1SGerd Hoffmann     default:
467*7d2ad4e1SGerd Hoffmann         cmd->error = VIRTIO_GPU_RESP_ERR_UNSPEC;
468*7d2ad4e1SGerd Hoffmann         break;
469*7d2ad4e1SGerd Hoffmann     }
470*7d2ad4e1SGerd Hoffmann 
471*7d2ad4e1SGerd Hoffmann     if (cmd->finished) {
472*7d2ad4e1SGerd Hoffmann         return;
473*7d2ad4e1SGerd Hoffmann     }
474*7d2ad4e1SGerd Hoffmann     if (cmd->error) {
475*7d2ad4e1SGerd Hoffmann         fprintf(stderr, "%s: ctrl 0x%x, error 0x%x\n", __func__,
476*7d2ad4e1SGerd Hoffmann                 cmd->cmd_hdr.type, cmd->error);
477*7d2ad4e1SGerd Hoffmann         virtio_gpu_ctrl_response_nodata(g, cmd, cmd->error);
478*7d2ad4e1SGerd Hoffmann         return;
479*7d2ad4e1SGerd Hoffmann     }
480*7d2ad4e1SGerd Hoffmann     if (!(cmd->cmd_hdr.flags & VIRTIO_GPU_FLAG_FENCE)) {
481*7d2ad4e1SGerd Hoffmann         virtio_gpu_ctrl_response_nodata(g, cmd, VIRTIO_GPU_RESP_OK_NODATA);
482*7d2ad4e1SGerd Hoffmann         return;
483*7d2ad4e1SGerd Hoffmann     }
484*7d2ad4e1SGerd Hoffmann 
485*7d2ad4e1SGerd Hoffmann     trace_virtio_gpu_fence_ctrl(cmd->cmd_hdr.fence_id, cmd->cmd_hdr.type);
486*7d2ad4e1SGerd Hoffmann     virgl_renderer_create_fence(cmd->cmd_hdr.fence_id, cmd->cmd_hdr.type);
487*7d2ad4e1SGerd Hoffmann }
488*7d2ad4e1SGerd Hoffmann 
489*7d2ad4e1SGerd Hoffmann static void virgl_write_fence(void *opaque, uint32_t fence)
490*7d2ad4e1SGerd Hoffmann {
491*7d2ad4e1SGerd Hoffmann     VirtIOGPU *g = opaque;
492*7d2ad4e1SGerd Hoffmann     struct virtio_gpu_ctrl_command *cmd, *tmp;
493*7d2ad4e1SGerd Hoffmann 
494*7d2ad4e1SGerd Hoffmann     QTAILQ_FOREACH_SAFE(cmd, &g->fenceq, next, tmp) {
495*7d2ad4e1SGerd Hoffmann         /*
496*7d2ad4e1SGerd Hoffmann          * the guest can end up emitting fences out of order
497*7d2ad4e1SGerd Hoffmann          * so we should check all fenced cmds not just the first one.
498*7d2ad4e1SGerd Hoffmann          */
499*7d2ad4e1SGerd Hoffmann         if (cmd->cmd_hdr.fence_id > fence) {
500*7d2ad4e1SGerd Hoffmann             continue;
501*7d2ad4e1SGerd Hoffmann         }
502*7d2ad4e1SGerd Hoffmann         trace_virtio_gpu_fence_resp(cmd->cmd_hdr.fence_id);
503*7d2ad4e1SGerd Hoffmann         virtio_gpu_ctrl_response_nodata(g, cmd, VIRTIO_GPU_RESP_OK_NODATA);
504*7d2ad4e1SGerd Hoffmann         QTAILQ_REMOVE(&g->fenceq, cmd, next);
505*7d2ad4e1SGerd Hoffmann         g_free(cmd);
506*7d2ad4e1SGerd Hoffmann         g->inflight--;
507*7d2ad4e1SGerd Hoffmann         if (virtio_gpu_stats_enabled(g->parent_obj.conf)) {
508*7d2ad4e1SGerd Hoffmann             fprintf(stderr, "inflight: %3d (-)\r", g->inflight);
509*7d2ad4e1SGerd Hoffmann         }
510*7d2ad4e1SGerd Hoffmann     }
511*7d2ad4e1SGerd Hoffmann }
512*7d2ad4e1SGerd Hoffmann 
513*7d2ad4e1SGerd Hoffmann static virgl_renderer_gl_context
514*7d2ad4e1SGerd Hoffmann virgl_create_context(void *opaque, int scanout_idx,
515*7d2ad4e1SGerd Hoffmann                      struct virgl_renderer_gl_ctx_param *params)
516*7d2ad4e1SGerd Hoffmann {
517*7d2ad4e1SGerd Hoffmann     VirtIOGPU *g = opaque;
518*7d2ad4e1SGerd Hoffmann     QEMUGLContext ctx;
519*7d2ad4e1SGerd Hoffmann     QEMUGLParams qparams;
520*7d2ad4e1SGerd Hoffmann 
521*7d2ad4e1SGerd Hoffmann     qparams.major_ver = params->major_ver;
522*7d2ad4e1SGerd Hoffmann     qparams.minor_ver = params->minor_ver;
523*7d2ad4e1SGerd Hoffmann 
524*7d2ad4e1SGerd Hoffmann     ctx = dpy_gl_ctx_create(g->parent_obj.scanout[scanout_idx].con, &qparams);
525*7d2ad4e1SGerd Hoffmann     return (virgl_renderer_gl_context)ctx;
526*7d2ad4e1SGerd Hoffmann }
527*7d2ad4e1SGerd Hoffmann 
528*7d2ad4e1SGerd Hoffmann static void virgl_destroy_context(void *opaque, virgl_renderer_gl_context ctx)
529*7d2ad4e1SGerd Hoffmann {
530*7d2ad4e1SGerd Hoffmann     VirtIOGPU *g = opaque;
531*7d2ad4e1SGerd Hoffmann     QEMUGLContext qctx = (QEMUGLContext)ctx;
532*7d2ad4e1SGerd Hoffmann 
533*7d2ad4e1SGerd Hoffmann     dpy_gl_ctx_destroy(g->parent_obj.scanout[0].con, qctx);
534*7d2ad4e1SGerd Hoffmann }
535*7d2ad4e1SGerd Hoffmann 
536*7d2ad4e1SGerd Hoffmann static int virgl_make_context_current(void *opaque, int scanout_idx,
537*7d2ad4e1SGerd Hoffmann                                       virgl_renderer_gl_context ctx)
538*7d2ad4e1SGerd Hoffmann {
539*7d2ad4e1SGerd Hoffmann     VirtIOGPU *g = opaque;
540*7d2ad4e1SGerd Hoffmann     QEMUGLContext qctx = (QEMUGLContext)ctx;
541*7d2ad4e1SGerd Hoffmann 
542*7d2ad4e1SGerd Hoffmann     return dpy_gl_ctx_make_current(g->parent_obj.scanout[scanout_idx].con,
543*7d2ad4e1SGerd Hoffmann                                    qctx);
544*7d2ad4e1SGerd Hoffmann }
545*7d2ad4e1SGerd Hoffmann 
546*7d2ad4e1SGerd Hoffmann static struct virgl_renderer_callbacks virtio_gpu_3d_cbs = {
547*7d2ad4e1SGerd Hoffmann     .version             = 1,
548*7d2ad4e1SGerd Hoffmann     .write_fence         = virgl_write_fence,
549*7d2ad4e1SGerd Hoffmann     .create_gl_context   = virgl_create_context,
550*7d2ad4e1SGerd Hoffmann     .destroy_gl_context  = virgl_destroy_context,
551*7d2ad4e1SGerd Hoffmann     .make_current        = virgl_make_context_current,
552*7d2ad4e1SGerd Hoffmann };
553*7d2ad4e1SGerd Hoffmann 
554*7d2ad4e1SGerd Hoffmann static void virtio_gpu_print_stats(void *opaque)
555*7d2ad4e1SGerd Hoffmann {
556*7d2ad4e1SGerd Hoffmann     VirtIOGPU *g = opaque;
557*7d2ad4e1SGerd Hoffmann 
558*7d2ad4e1SGerd Hoffmann     if (g->stats.requests) {
559*7d2ad4e1SGerd Hoffmann         fprintf(stderr, "stats: vq req %4d, %3d -- 3D %4d (%5d)\n",
560*7d2ad4e1SGerd Hoffmann                 g->stats.requests,
561*7d2ad4e1SGerd Hoffmann                 g->stats.max_inflight,
562*7d2ad4e1SGerd Hoffmann                 g->stats.req_3d,
563*7d2ad4e1SGerd Hoffmann                 g->stats.bytes_3d);
564*7d2ad4e1SGerd Hoffmann         g->stats.requests     = 0;
565*7d2ad4e1SGerd Hoffmann         g->stats.max_inflight = 0;
566*7d2ad4e1SGerd Hoffmann         g->stats.req_3d       = 0;
567*7d2ad4e1SGerd Hoffmann         g->stats.bytes_3d     = 0;
568*7d2ad4e1SGerd Hoffmann     } else {
569*7d2ad4e1SGerd Hoffmann         fprintf(stderr, "stats: idle\r");
570*7d2ad4e1SGerd Hoffmann     }
571*7d2ad4e1SGerd Hoffmann     timer_mod(g->print_stats, qemu_clock_get_ms(QEMU_CLOCK_VIRTUAL) + 1000);
572*7d2ad4e1SGerd Hoffmann }
573*7d2ad4e1SGerd Hoffmann 
574*7d2ad4e1SGerd Hoffmann static void virtio_gpu_fence_poll(void *opaque)
575*7d2ad4e1SGerd Hoffmann {
576*7d2ad4e1SGerd Hoffmann     VirtIOGPU *g = opaque;
577*7d2ad4e1SGerd Hoffmann 
578*7d2ad4e1SGerd Hoffmann     virgl_renderer_poll();
579*7d2ad4e1SGerd Hoffmann     virtio_gpu_process_cmdq(g);
580*7d2ad4e1SGerd Hoffmann     if (!QTAILQ_EMPTY(&g->cmdq) || !QTAILQ_EMPTY(&g->fenceq)) {
581*7d2ad4e1SGerd Hoffmann         timer_mod(g->fence_poll, qemu_clock_get_ms(QEMU_CLOCK_VIRTUAL) + 10);
582*7d2ad4e1SGerd Hoffmann     }
583*7d2ad4e1SGerd Hoffmann }
584*7d2ad4e1SGerd Hoffmann 
585*7d2ad4e1SGerd Hoffmann void virtio_gpu_virgl_fence_poll(VirtIOGPU *g)
586*7d2ad4e1SGerd Hoffmann {
587*7d2ad4e1SGerd Hoffmann     virtio_gpu_fence_poll(g);
588*7d2ad4e1SGerd Hoffmann }
589*7d2ad4e1SGerd Hoffmann 
590*7d2ad4e1SGerd Hoffmann void virtio_gpu_virgl_reset(VirtIOGPU *g)
591*7d2ad4e1SGerd Hoffmann {
592*7d2ad4e1SGerd Hoffmann     int i;
593*7d2ad4e1SGerd Hoffmann 
594*7d2ad4e1SGerd Hoffmann     virgl_renderer_reset();
595*7d2ad4e1SGerd Hoffmann     for (i = 0; i < g->parent_obj.conf.max_outputs; i++) {
596*7d2ad4e1SGerd Hoffmann         dpy_gfx_replace_surface(g->parent_obj.scanout[i].con, NULL);
597*7d2ad4e1SGerd Hoffmann         dpy_gl_scanout_disable(g->parent_obj.scanout[i].con);
598*7d2ad4e1SGerd Hoffmann     }
599*7d2ad4e1SGerd Hoffmann }
600*7d2ad4e1SGerd Hoffmann 
601*7d2ad4e1SGerd Hoffmann int virtio_gpu_virgl_init(VirtIOGPU *g)
602*7d2ad4e1SGerd Hoffmann {
603*7d2ad4e1SGerd Hoffmann     int ret;
604*7d2ad4e1SGerd Hoffmann 
605*7d2ad4e1SGerd Hoffmann     ret = virgl_renderer_init(g, 0, &virtio_gpu_3d_cbs);
606*7d2ad4e1SGerd Hoffmann     if (ret != 0) {
607*7d2ad4e1SGerd Hoffmann         return ret;
608*7d2ad4e1SGerd Hoffmann     }
609*7d2ad4e1SGerd Hoffmann 
610*7d2ad4e1SGerd Hoffmann     g->fence_poll = timer_new_ms(QEMU_CLOCK_VIRTUAL,
611*7d2ad4e1SGerd Hoffmann                                  virtio_gpu_fence_poll, g);
612*7d2ad4e1SGerd Hoffmann 
613*7d2ad4e1SGerd Hoffmann     if (virtio_gpu_stats_enabled(g->parent_obj.conf)) {
614*7d2ad4e1SGerd Hoffmann         g->print_stats = timer_new_ms(QEMU_CLOCK_VIRTUAL,
615*7d2ad4e1SGerd Hoffmann                                       virtio_gpu_print_stats, g);
616*7d2ad4e1SGerd Hoffmann         timer_mod(g->print_stats, qemu_clock_get_ms(QEMU_CLOCK_VIRTUAL) + 1000);
617*7d2ad4e1SGerd Hoffmann     }
618*7d2ad4e1SGerd Hoffmann     return 0;
619*7d2ad4e1SGerd Hoffmann }
620*7d2ad4e1SGerd Hoffmann 
621*7d2ad4e1SGerd Hoffmann int virtio_gpu_virgl_get_num_capsets(VirtIOGPU *g)
622*7d2ad4e1SGerd Hoffmann {
623*7d2ad4e1SGerd Hoffmann     uint32_t capset2_max_ver, capset2_max_size;
624*7d2ad4e1SGerd Hoffmann     virgl_renderer_get_cap_set(VIRTIO_GPU_CAPSET_VIRGL2,
625*7d2ad4e1SGerd Hoffmann                               &capset2_max_ver,
626*7d2ad4e1SGerd Hoffmann                               &capset2_max_size);
627*7d2ad4e1SGerd Hoffmann 
628*7d2ad4e1SGerd Hoffmann     return capset2_max_ver ? 2 : 1;
629*7d2ad4e1SGerd Hoffmann }
630