1*677dec6eSriastradh /*	$NetBSD: vmwgfx_binding.c,v 1.3 2021/12/18 23:45:45 riastradh Exp $	*/
2d350ecf5Sriastradh 
3*677dec6eSriastradh // SPDX-License-Identifier: GPL-2.0 OR MIT
4d350ecf5Sriastradh /**************************************************************************
5d350ecf5Sriastradh  *
6*677dec6eSriastradh  * Copyright 2015 VMware, Inc., Palo Alto, CA., USA
7d350ecf5Sriastradh  *
8d350ecf5Sriastradh  * Permission is hereby granted, free of charge, to any person obtaining a
9d350ecf5Sriastradh  * copy of this software and associated documentation files (the
10d350ecf5Sriastradh  * "Software"), to deal in the Software without restriction, including
11d350ecf5Sriastradh  * without limitation the rights to use, copy, modify, merge, publish,
12d350ecf5Sriastradh  * distribute, sub license, and/or sell copies of the Software, and to
13d350ecf5Sriastradh  * permit persons to whom the Software is furnished to do so, subject to
14d350ecf5Sriastradh  * the following conditions:
15d350ecf5Sriastradh  *
16d350ecf5Sriastradh  * The above copyright notice and this permission notice (including the
17d350ecf5Sriastradh  * next paragraph) shall be included in all copies or substantial portions
18d350ecf5Sriastradh  * of the Software.
19d350ecf5Sriastradh  *
20d350ecf5Sriastradh  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
21d350ecf5Sriastradh  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
22d350ecf5Sriastradh  * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL
23d350ecf5Sriastradh  * THE COPYRIGHT HOLDERS, AUTHORS AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM,
24d350ecf5Sriastradh  * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
25d350ecf5Sriastradh  * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
26d350ecf5Sriastradh  * USE OR OTHER DEALINGS IN THE SOFTWARE.
27d350ecf5Sriastradh  *
28d350ecf5Sriastradh  **************************************************************************/
29d350ecf5Sriastradh /*
30d350ecf5Sriastradh  * This file implements the vmwgfx context binding manager,
31d350ecf5Sriastradh  * The sole reason for having to use this code is that vmware guest
32d350ecf5Sriastradh  * backed contexts can be swapped out to their backing mobs by the device
33d350ecf5Sriastradh  * at any time, also swapped in at any time. At swapin time, the device
34d350ecf5Sriastradh  * validates the context bindings to make sure they point to valid resources.
35d350ecf5Sriastradh  * It's this outside-of-drawcall validation (that can happen at any time),
36d350ecf5Sriastradh  * that makes this code necessary.
37d350ecf5Sriastradh  *
38d350ecf5Sriastradh  * We therefore need to kill any context bindings pointing to a resource
39d350ecf5Sriastradh  * when the resource is swapped out. Furthermore, if the vmwgfx driver has
40d350ecf5Sriastradh  * swapped out the context we can't swap it in again to kill bindings because
41d350ecf5Sriastradh  * of backing mob reservation lockdep violations, so as part of
42d350ecf5Sriastradh  * context swapout, also kill all bindings of a context, so that they are
43d350ecf5Sriastradh  * already killed if a resource to which a binding points
44d350ecf5Sriastradh  * needs to be swapped out.
45d350ecf5Sriastradh  *
46d350ecf5Sriastradh  * Note that a resource can be pointed to by bindings from multiple contexts,
47d350ecf5Sriastradh  * Therefore we can't easily protect this data by a per context mutex
48d350ecf5Sriastradh  * (unless we use deadlock-safe WW mutexes). So we use a global binding_mutex
49d350ecf5Sriastradh  * to protect all binding manager data.
50d350ecf5Sriastradh  *
51d350ecf5Sriastradh  * Finally, any association between a context and a global resource
52d350ecf5Sriastradh  * (surface, shader or even DX query) is conceptually a context binding that
53d350ecf5Sriastradh  * needs to be tracked by this code.
54d350ecf5Sriastradh  */
55d350ecf5Sriastradh 
56d350ecf5Sriastradh #include <sys/cdefs.h>
57*677dec6eSriastradh __KERNEL_RCSID(0, "$NetBSD: vmwgfx_binding.c,v 1.3 2021/12/18 23:45:45 riastradh Exp $");
58d350ecf5Sriastradh 
59d350ecf5Sriastradh #include "vmwgfx_drv.h"
60d350ecf5Sriastradh #include "vmwgfx_binding.h"
61d350ecf5Sriastradh #include "device_include/svga3d_reg.h"
62d350ecf5Sriastradh 
63d350ecf5Sriastradh #define VMW_BINDING_RT_BIT     0
64d350ecf5Sriastradh #define VMW_BINDING_PS_BIT     1
65d350ecf5Sriastradh #define VMW_BINDING_SO_BIT     2
66d350ecf5Sriastradh #define VMW_BINDING_VB_BIT     3
67d350ecf5Sriastradh #define VMW_BINDING_NUM_BITS   4
68d350ecf5Sriastradh 
69d350ecf5Sriastradh #define VMW_BINDING_PS_SR_BIT  0
70d350ecf5Sriastradh 
71d350ecf5Sriastradh /**
72d350ecf5Sriastradh  * struct vmw_ctx_binding_state - per context binding state
73d350ecf5Sriastradh  *
74d350ecf5Sriastradh  * @dev_priv: Pointer to device private structure.
75d350ecf5Sriastradh  * @list: linked list of individual active bindings.
76d350ecf5Sriastradh  * @render_targets: Render target bindings.
77d350ecf5Sriastradh  * @texture_units: Texture units bindings.
78d350ecf5Sriastradh  * @ds_view: Depth-stencil view binding.
79d350ecf5Sriastradh  * @so_targets: StreamOutput target bindings.
80d350ecf5Sriastradh  * @vertex_buffers: Vertex buffer bindings.
81d350ecf5Sriastradh  * @index_buffer: Index buffer binding.
82d350ecf5Sriastradh  * @per_shader: Per shader-type bindings.
83d350ecf5Sriastradh  * @dirty: Bitmap tracking per binding-type changes that have not yet
84d350ecf5Sriastradh  * been emitted to the device.
85d350ecf5Sriastradh  * @dirty_vb: Bitmap tracking individual vertex buffer binding changes that
86d350ecf5Sriastradh  * have not yet been emitted to the device.
87d350ecf5Sriastradh  * @bind_cmd_buffer: Scratch space used to construct binding commands.
88d350ecf5Sriastradh  * @bind_cmd_count: Number of binding command data entries in @bind_cmd_buffer
89d350ecf5Sriastradh  * @bind_first_slot: Used together with @bind_cmd_buffer to indicate the
90d350ecf5Sriastradh  * device binding slot of the first command data entry in @bind_cmd_buffer.
91d350ecf5Sriastradh  *
92d350ecf5Sriastradh  * Note that this structure also provides storage space for the individual
93d350ecf5Sriastradh  * struct vmw_ctx_binding objects, so that no dynamic allocation is needed
94d350ecf5Sriastradh  * for individual bindings.
95d350ecf5Sriastradh  *
96d350ecf5Sriastradh  */
97d350ecf5Sriastradh struct vmw_ctx_binding_state {
98d350ecf5Sriastradh 	struct vmw_private *dev_priv;
99d350ecf5Sriastradh 	struct list_head list;
100d350ecf5Sriastradh 	struct vmw_ctx_bindinfo_view render_targets[SVGA3D_RT_MAX];
101d350ecf5Sriastradh 	struct vmw_ctx_bindinfo_tex texture_units[SVGA3D_NUM_TEXTURE_UNITS];
102d350ecf5Sriastradh 	struct vmw_ctx_bindinfo_view ds_view;
103d350ecf5Sriastradh 	struct vmw_ctx_bindinfo_so so_targets[SVGA3D_DX_MAX_SOTARGETS];
104d350ecf5Sriastradh 	struct vmw_ctx_bindinfo_vb vertex_buffers[SVGA3D_DX_MAX_VERTEXBUFFERS];
105d350ecf5Sriastradh 	struct vmw_ctx_bindinfo_ib index_buffer;
106d350ecf5Sriastradh 	struct vmw_dx_shader_bindings per_shader[SVGA3D_NUM_SHADERTYPE_DX10];
107d350ecf5Sriastradh 
108d350ecf5Sriastradh 	unsigned long dirty;
109d350ecf5Sriastradh 	DECLARE_BITMAP(dirty_vb, SVGA3D_DX_MAX_VERTEXBUFFERS);
110d350ecf5Sriastradh 
111d350ecf5Sriastradh 	u32 bind_cmd_buffer[VMW_MAX_VIEW_BINDINGS];
112d350ecf5Sriastradh 	u32 bind_cmd_count;
113d350ecf5Sriastradh 	u32 bind_first_slot;
114d350ecf5Sriastradh };
115d350ecf5Sriastradh 
116d350ecf5Sriastradh static int vmw_binding_scrub_shader(struct vmw_ctx_bindinfo *bi, bool rebind);
117d350ecf5Sriastradh static int vmw_binding_scrub_render_target(struct vmw_ctx_bindinfo *bi,
118d350ecf5Sriastradh 					   bool rebind);
119d350ecf5Sriastradh static int vmw_binding_scrub_texture(struct vmw_ctx_bindinfo *bi, bool rebind);
120d350ecf5Sriastradh static int vmw_binding_scrub_cb(struct vmw_ctx_bindinfo *bi, bool rebind);
121d350ecf5Sriastradh static int vmw_binding_scrub_dx_rt(struct vmw_ctx_bindinfo *bi, bool rebind);
122d350ecf5Sriastradh static int vmw_binding_scrub_sr(struct vmw_ctx_bindinfo *bi, bool rebind);
123d350ecf5Sriastradh static int vmw_binding_scrub_so(struct vmw_ctx_bindinfo *bi, bool rebind);
124d350ecf5Sriastradh static int vmw_binding_emit_dirty(struct vmw_ctx_binding_state *cbs);
125d350ecf5Sriastradh static int vmw_binding_scrub_dx_shader(struct vmw_ctx_bindinfo *bi,
126d350ecf5Sriastradh 				       bool rebind);
127d350ecf5Sriastradh static int vmw_binding_scrub_ib(struct vmw_ctx_bindinfo *bi, bool rebind);
128d350ecf5Sriastradh static int vmw_binding_scrub_vb(struct vmw_ctx_bindinfo *bi, bool rebind);
129d350ecf5Sriastradh static void vmw_binding_build_asserts(void) __attribute__ ((unused));
130d350ecf5Sriastradh 
131d350ecf5Sriastradh typedef int (*vmw_scrub_func)(struct vmw_ctx_bindinfo *, bool);
132d350ecf5Sriastradh 
133d350ecf5Sriastradh /**
134d350ecf5Sriastradh  * struct vmw_binding_info - Per binding type information for the binding
135d350ecf5Sriastradh  * manager
136d350ecf5Sriastradh  *
137d350ecf5Sriastradh  * @size: The size of the struct binding derived from a struct vmw_ctx_bindinfo.
138d350ecf5Sriastradh  * @offsets: array[shader_slot] of offsets to the array[slot]
139d350ecf5Sriastradh  * of struct bindings for the binding type.
140d350ecf5Sriastradh  * @scrub_func: Pointer to the scrub function for this binding type.
141d350ecf5Sriastradh  *
142d350ecf5Sriastradh  * Holds static information to help optimize the binding manager and avoid
143d350ecf5Sriastradh  * an excessive amount of switch statements.
144d350ecf5Sriastradh  */
145d350ecf5Sriastradh struct vmw_binding_info {
146d350ecf5Sriastradh 	size_t size;
147d350ecf5Sriastradh 	const size_t *offsets;
148d350ecf5Sriastradh 	vmw_scrub_func scrub_func;
149d350ecf5Sriastradh };
150d350ecf5Sriastradh 
151d350ecf5Sriastradh /*
152d350ecf5Sriastradh  * A number of static variables that help determine the scrub func and the
153d350ecf5Sriastradh  * location of the struct vmw_ctx_bindinfo slots for each binding type.
154d350ecf5Sriastradh  */
155d350ecf5Sriastradh static const size_t vmw_binding_shader_offsets[] = {
156d350ecf5Sriastradh 	offsetof(struct vmw_ctx_binding_state, per_shader[0].shader),
157d350ecf5Sriastradh 	offsetof(struct vmw_ctx_binding_state, per_shader[1].shader),
158d350ecf5Sriastradh 	offsetof(struct vmw_ctx_binding_state, per_shader[2].shader),
159d350ecf5Sriastradh };
160d350ecf5Sriastradh static const size_t vmw_binding_rt_offsets[] = {
161d350ecf5Sriastradh 	offsetof(struct vmw_ctx_binding_state, render_targets),
162d350ecf5Sriastradh };
163d350ecf5Sriastradh static const size_t vmw_binding_tex_offsets[] = {
164d350ecf5Sriastradh 	offsetof(struct vmw_ctx_binding_state, texture_units),
165d350ecf5Sriastradh };
166d350ecf5Sriastradh static const size_t vmw_binding_cb_offsets[] = {
167d350ecf5Sriastradh 	offsetof(struct vmw_ctx_binding_state, per_shader[0].const_buffers),
168d350ecf5Sriastradh 	offsetof(struct vmw_ctx_binding_state, per_shader[1].const_buffers),
169d350ecf5Sriastradh 	offsetof(struct vmw_ctx_binding_state, per_shader[2].const_buffers),
170d350ecf5Sriastradh };
171d350ecf5Sriastradh static const size_t vmw_binding_dx_ds_offsets[] = {
172d350ecf5Sriastradh 	offsetof(struct vmw_ctx_binding_state, ds_view),
173d350ecf5Sriastradh };
174d350ecf5Sriastradh static const size_t vmw_binding_sr_offsets[] = {
175d350ecf5Sriastradh 	offsetof(struct vmw_ctx_binding_state, per_shader[0].shader_res),
176d350ecf5Sriastradh 	offsetof(struct vmw_ctx_binding_state, per_shader[1].shader_res),
177d350ecf5Sriastradh 	offsetof(struct vmw_ctx_binding_state, per_shader[2].shader_res),
178d350ecf5Sriastradh };
179d350ecf5Sriastradh static const size_t vmw_binding_so_offsets[] = {
180d350ecf5Sriastradh 	offsetof(struct vmw_ctx_binding_state, so_targets),
181d350ecf5Sriastradh };
182d350ecf5Sriastradh static const size_t vmw_binding_vb_offsets[] = {
183d350ecf5Sriastradh 	offsetof(struct vmw_ctx_binding_state, vertex_buffers),
184d350ecf5Sriastradh };
185d350ecf5Sriastradh static const size_t vmw_binding_ib_offsets[] = {
186d350ecf5Sriastradh 	offsetof(struct vmw_ctx_binding_state, index_buffer),
187d350ecf5Sriastradh };
188d350ecf5Sriastradh 
189d350ecf5Sriastradh static const struct vmw_binding_info vmw_binding_infos[] = {
190d350ecf5Sriastradh 	[vmw_ctx_binding_shader] = {
191d350ecf5Sriastradh 		.size = sizeof(struct vmw_ctx_bindinfo_shader),
192d350ecf5Sriastradh 		.offsets = vmw_binding_shader_offsets,
193d350ecf5Sriastradh 		.scrub_func = vmw_binding_scrub_shader},
194d350ecf5Sriastradh 	[vmw_ctx_binding_rt] = {
195d350ecf5Sriastradh 		.size = sizeof(struct vmw_ctx_bindinfo_view),
196d350ecf5Sriastradh 		.offsets = vmw_binding_rt_offsets,
197d350ecf5Sriastradh 		.scrub_func = vmw_binding_scrub_render_target},
198d350ecf5Sriastradh 	[vmw_ctx_binding_tex] = {
199d350ecf5Sriastradh 		.size = sizeof(struct vmw_ctx_bindinfo_tex),
200d350ecf5Sriastradh 		.offsets = vmw_binding_tex_offsets,
201d350ecf5Sriastradh 		.scrub_func = vmw_binding_scrub_texture},
202d350ecf5Sriastradh 	[vmw_ctx_binding_cb] = {
203d350ecf5Sriastradh 		.size = sizeof(struct vmw_ctx_bindinfo_cb),
204d350ecf5Sriastradh 		.offsets = vmw_binding_cb_offsets,
205d350ecf5Sriastradh 		.scrub_func = vmw_binding_scrub_cb},
206d350ecf5Sriastradh 	[vmw_ctx_binding_dx_shader] = {
207d350ecf5Sriastradh 		.size = sizeof(struct vmw_ctx_bindinfo_shader),
208d350ecf5Sriastradh 		.offsets = vmw_binding_shader_offsets,
209d350ecf5Sriastradh 		.scrub_func = vmw_binding_scrub_dx_shader},
210d350ecf5Sriastradh 	[vmw_ctx_binding_dx_rt] = {
211d350ecf5Sriastradh 		.size = sizeof(struct vmw_ctx_bindinfo_view),
212d350ecf5Sriastradh 		.offsets = vmw_binding_rt_offsets,
213d350ecf5Sriastradh 		.scrub_func = vmw_binding_scrub_dx_rt},
214d350ecf5Sriastradh 	[vmw_ctx_binding_sr] = {
215d350ecf5Sriastradh 		.size = sizeof(struct vmw_ctx_bindinfo_view),
216d350ecf5Sriastradh 		.offsets = vmw_binding_sr_offsets,
217d350ecf5Sriastradh 		.scrub_func = vmw_binding_scrub_sr},
218d350ecf5Sriastradh 	[vmw_ctx_binding_ds] = {
219d350ecf5Sriastradh 		.size = sizeof(struct vmw_ctx_bindinfo_view),
220d350ecf5Sriastradh 		.offsets = vmw_binding_dx_ds_offsets,
221d350ecf5Sriastradh 		.scrub_func = vmw_binding_scrub_dx_rt},
222d350ecf5Sriastradh 	[vmw_ctx_binding_so] = {
223d350ecf5Sriastradh 		.size = sizeof(struct vmw_ctx_bindinfo_so),
224d350ecf5Sriastradh 		.offsets = vmw_binding_so_offsets,
225d350ecf5Sriastradh 		.scrub_func = vmw_binding_scrub_so},
226d350ecf5Sriastradh 	[vmw_ctx_binding_vb] = {
227d350ecf5Sriastradh 		.size = sizeof(struct vmw_ctx_bindinfo_vb),
228d350ecf5Sriastradh 		.offsets = vmw_binding_vb_offsets,
229d350ecf5Sriastradh 		.scrub_func = vmw_binding_scrub_vb},
230d350ecf5Sriastradh 	[vmw_ctx_binding_ib] = {
231d350ecf5Sriastradh 		.size = sizeof(struct vmw_ctx_bindinfo_ib),
232d350ecf5Sriastradh 		.offsets = vmw_binding_ib_offsets,
233d350ecf5Sriastradh 		.scrub_func = vmw_binding_scrub_ib},
234d350ecf5Sriastradh };
235d350ecf5Sriastradh 
236d350ecf5Sriastradh /**
237d350ecf5Sriastradh  * vmw_cbs_context - Return a pointer to the context resource of a
238d350ecf5Sriastradh  * context binding state tracker.
239d350ecf5Sriastradh  *
240d350ecf5Sriastradh  * @cbs: The context binding state tracker.
241d350ecf5Sriastradh  *
242d350ecf5Sriastradh  * Provided there are any active bindings, this function will return an
243d350ecf5Sriastradh  * unreferenced pointer to the context resource that owns the context
244d350ecf5Sriastradh  * binding state tracker. If there are no active bindings, this function
245d350ecf5Sriastradh  * will return NULL. Note that the caller must somehow ensure that a reference
246d350ecf5Sriastradh  * is held on the context resource prior to calling this function.
247d350ecf5Sriastradh  */
248d350ecf5Sriastradh static const struct vmw_resource *
vmw_cbs_context(const struct vmw_ctx_binding_state * cbs)249d350ecf5Sriastradh vmw_cbs_context(const struct vmw_ctx_binding_state *cbs)
250d350ecf5Sriastradh {
251d350ecf5Sriastradh 	if (list_empty(&cbs->list))
252d350ecf5Sriastradh 		return NULL;
253d350ecf5Sriastradh 
254d350ecf5Sriastradh 	return list_first_entry(&cbs->list, struct vmw_ctx_bindinfo,
255d350ecf5Sriastradh 				ctx_list)->ctx;
256d350ecf5Sriastradh }
257d350ecf5Sriastradh 
258d350ecf5Sriastradh /**
259d350ecf5Sriastradh  * vmw_binding_loc - determine the struct vmw_ctx_bindinfo slot location.
260d350ecf5Sriastradh  *
261d350ecf5Sriastradh  * @cbs: Pointer to a struct vmw_ctx_binding state which holds the slot.
262d350ecf5Sriastradh  * @bt: The binding type.
263d350ecf5Sriastradh  * @shader_slot: The shader slot of the binding. If none, then set to 0.
264d350ecf5Sriastradh  * @slot: The slot of the binding.
265d350ecf5Sriastradh  */
266d350ecf5Sriastradh static struct vmw_ctx_bindinfo *
vmw_binding_loc(struct vmw_ctx_binding_state * cbs,enum vmw_ctx_binding_type bt,u32 shader_slot,u32 slot)267d350ecf5Sriastradh vmw_binding_loc(struct vmw_ctx_binding_state *cbs,
268d350ecf5Sriastradh 		enum vmw_ctx_binding_type bt, u32 shader_slot, u32 slot)
269d350ecf5Sriastradh {
270d350ecf5Sriastradh 	const struct vmw_binding_info *b = &vmw_binding_infos[bt];
271d350ecf5Sriastradh 	size_t offset = b->offsets[shader_slot] + b->size*slot;
272d350ecf5Sriastradh 
273d350ecf5Sriastradh 	return (struct vmw_ctx_bindinfo *)((u8 *) cbs + offset);
274d350ecf5Sriastradh }
275d350ecf5Sriastradh 
276d350ecf5Sriastradh /**
277d350ecf5Sriastradh  * vmw_binding_drop: Stop tracking a context binding
278d350ecf5Sriastradh  *
279d350ecf5Sriastradh  * @bi: Pointer to binding tracker storage.
280d350ecf5Sriastradh  *
281d350ecf5Sriastradh  * Stops tracking a context binding, and re-initializes its storage.
282d350ecf5Sriastradh  * Typically used when the context binding is replaced with a binding to
283d350ecf5Sriastradh  * another (or the same, for that matter) resource.
284d350ecf5Sriastradh  */
vmw_binding_drop(struct vmw_ctx_bindinfo * bi)285d350ecf5Sriastradh static void vmw_binding_drop(struct vmw_ctx_bindinfo *bi)
286d350ecf5Sriastradh {
287d350ecf5Sriastradh 	list_del(&bi->ctx_list);
288d350ecf5Sriastradh 	if (!list_empty(&bi->res_list))
289d350ecf5Sriastradh 		list_del(&bi->res_list);
290d350ecf5Sriastradh 	bi->ctx = NULL;
291d350ecf5Sriastradh }
292d350ecf5Sriastradh 
293d350ecf5Sriastradh /**
294d350ecf5Sriastradh  * vmw_binding_add: Start tracking a context binding
295d350ecf5Sriastradh  *
296d350ecf5Sriastradh  * @cbs: Pointer to the context binding state tracker.
297d350ecf5Sriastradh  * @bi: Information about the binding to track.
298d350ecf5Sriastradh  *
299d350ecf5Sriastradh  * Starts tracking the binding in the context binding
300d350ecf5Sriastradh  * state structure @cbs.
301d350ecf5Sriastradh  */
vmw_binding_add(struct vmw_ctx_binding_state * cbs,const struct vmw_ctx_bindinfo * bi,u32 shader_slot,u32 slot)302d350ecf5Sriastradh void vmw_binding_add(struct vmw_ctx_binding_state *cbs,
303d350ecf5Sriastradh 		    const struct vmw_ctx_bindinfo *bi,
304d350ecf5Sriastradh 		    u32 shader_slot, u32 slot)
305d350ecf5Sriastradh {
306d350ecf5Sriastradh 	struct vmw_ctx_bindinfo *loc =
307d350ecf5Sriastradh 		vmw_binding_loc(cbs, bi->bt, shader_slot, slot);
308d350ecf5Sriastradh 	const struct vmw_binding_info *b = &vmw_binding_infos[bi->bt];
309d350ecf5Sriastradh 
310d350ecf5Sriastradh 	if (loc->ctx != NULL)
311d350ecf5Sriastradh 		vmw_binding_drop(loc);
312d350ecf5Sriastradh 
313d350ecf5Sriastradh 	memcpy(loc, bi, b->size);
314d350ecf5Sriastradh 	loc->scrubbed = false;
315d350ecf5Sriastradh 	list_add(&loc->ctx_list, &cbs->list);
316d350ecf5Sriastradh 	INIT_LIST_HEAD(&loc->res_list);
317d350ecf5Sriastradh }
318d350ecf5Sriastradh 
319d350ecf5Sriastradh /**
320d350ecf5Sriastradh  * vmw_binding_transfer: Transfer a context binding tracking entry.
321d350ecf5Sriastradh  *
322d350ecf5Sriastradh  * @cbs: Pointer to the persistent context binding state tracker.
323d350ecf5Sriastradh  * @bi: Information about the binding to track.
324d350ecf5Sriastradh  *
325d350ecf5Sriastradh  */
vmw_binding_transfer(struct vmw_ctx_binding_state * cbs,const struct vmw_ctx_binding_state * from,const struct vmw_ctx_bindinfo * bi)326d350ecf5Sriastradh static void vmw_binding_transfer(struct vmw_ctx_binding_state *cbs,
327d350ecf5Sriastradh 				 const struct vmw_ctx_binding_state *from,
328d350ecf5Sriastradh 				 const struct vmw_ctx_bindinfo *bi)
329d350ecf5Sriastradh {
330d350ecf5Sriastradh 	size_t offset = (unsigned long)bi - (unsigned long)from;
331d350ecf5Sriastradh 	struct vmw_ctx_bindinfo *loc = (struct vmw_ctx_bindinfo *)
332d350ecf5Sriastradh 		((unsigned long) cbs + offset);
333d350ecf5Sriastradh 
334d350ecf5Sriastradh 	if (loc->ctx != NULL) {
335d350ecf5Sriastradh 		WARN_ON(bi->scrubbed);
336d350ecf5Sriastradh 
337d350ecf5Sriastradh 		vmw_binding_drop(loc);
338d350ecf5Sriastradh 	}
339d350ecf5Sriastradh 
340d350ecf5Sriastradh 	if (bi->res != NULL) {
341d350ecf5Sriastradh 		memcpy(loc, bi, vmw_binding_infos[bi->bt].size);
342d350ecf5Sriastradh 		list_add_tail(&loc->ctx_list, &cbs->list);
343d350ecf5Sriastradh 		list_add_tail(&loc->res_list, &loc->res->binding_head);
344d350ecf5Sriastradh 	}
345d350ecf5Sriastradh }
346d350ecf5Sriastradh 
347d350ecf5Sriastradh /**
348d350ecf5Sriastradh  * vmw_binding_state_kill - Kill all bindings associated with a
349d350ecf5Sriastradh  * struct vmw_ctx_binding state structure, and re-initialize the structure.
350d350ecf5Sriastradh  *
351d350ecf5Sriastradh  * @cbs: Pointer to the context binding state tracker.
352d350ecf5Sriastradh  *
353d350ecf5Sriastradh  * Emits commands to scrub all bindings associated with the
354d350ecf5Sriastradh  * context binding state tracker. Then re-initializes the whole structure.
355d350ecf5Sriastradh  */
vmw_binding_state_kill(struct vmw_ctx_binding_state * cbs)356d350ecf5Sriastradh void vmw_binding_state_kill(struct vmw_ctx_binding_state *cbs)
357d350ecf5Sriastradh {
358d350ecf5Sriastradh 	struct vmw_ctx_bindinfo *entry, *next;
359d350ecf5Sriastradh 
360d350ecf5Sriastradh 	vmw_binding_state_scrub(cbs);
361d350ecf5Sriastradh 	list_for_each_entry_safe(entry, next, &cbs->list, ctx_list)
362d350ecf5Sriastradh 		vmw_binding_drop(entry);
363d350ecf5Sriastradh }
364d350ecf5Sriastradh 
365d350ecf5Sriastradh /**
366d350ecf5Sriastradh  * vmw_binding_state_scrub - Scrub all bindings associated with a
367d350ecf5Sriastradh  * struct vmw_ctx_binding state structure.
368d350ecf5Sriastradh  *
369d350ecf5Sriastradh  * @cbs: Pointer to the context binding state tracker.
370d350ecf5Sriastradh  *
371d350ecf5Sriastradh  * Emits commands to scrub all bindings associated with the
372d350ecf5Sriastradh  * context binding state tracker.
373d350ecf5Sriastradh  */
vmw_binding_state_scrub(struct vmw_ctx_binding_state * cbs)374d350ecf5Sriastradh void vmw_binding_state_scrub(struct vmw_ctx_binding_state *cbs)
375d350ecf5Sriastradh {
376d350ecf5Sriastradh 	struct vmw_ctx_bindinfo *entry;
377d350ecf5Sriastradh 
378d350ecf5Sriastradh 	list_for_each_entry(entry, &cbs->list, ctx_list) {
379d350ecf5Sriastradh 		if (!entry->scrubbed) {
380d350ecf5Sriastradh 			(void) vmw_binding_infos[entry->bt].scrub_func
381d350ecf5Sriastradh 				(entry, false);
382d350ecf5Sriastradh 			entry->scrubbed = true;
383d350ecf5Sriastradh 		}
384d350ecf5Sriastradh 	}
385d350ecf5Sriastradh 
386d350ecf5Sriastradh 	(void) vmw_binding_emit_dirty(cbs);
387d350ecf5Sriastradh }
388d350ecf5Sriastradh 
389d350ecf5Sriastradh /**
390d350ecf5Sriastradh  * vmw_binding_res_list_kill - Kill all bindings on a
391d350ecf5Sriastradh  * resource binding list
392d350ecf5Sriastradh  *
393d350ecf5Sriastradh  * @head: list head of resource binding list
394d350ecf5Sriastradh  *
395d350ecf5Sriastradh  * Kills all bindings associated with a specific resource. Typically
396d350ecf5Sriastradh  * called before the resource is destroyed.
397d350ecf5Sriastradh  */
vmw_binding_res_list_kill(struct list_head * head)398d350ecf5Sriastradh void vmw_binding_res_list_kill(struct list_head *head)
399d350ecf5Sriastradh {
400d350ecf5Sriastradh 	struct vmw_ctx_bindinfo *entry, *next;
401d350ecf5Sriastradh 
402d350ecf5Sriastradh 	vmw_binding_res_list_scrub(head);
403d350ecf5Sriastradh 	list_for_each_entry_safe(entry, next, head, res_list)
404d350ecf5Sriastradh 		vmw_binding_drop(entry);
405d350ecf5Sriastradh }
406d350ecf5Sriastradh 
407d350ecf5Sriastradh /**
408d350ecf5Sriastradh  * vmw_binding_res_list_scrub - Scrub all bindings on a
409d350ecf5Sriastradh  * resource binding list
410d350ecf5Sriastradh  *
411d350ecf5Sriastradh  * @head: list head of resource binding list
412d350ecf5Sriastradh  *
413d350ecf5Sriastradh  * Scrub all bindings associated with a specific resource. Typically
414d350ecf5Sriastradh  * called before the resource is evicted.
415d350ecf5Sriastradh  */
vmw_binding_res_list_scrub(struct list_head * head)416d350ecf5Sriastradh void vmw_binding_res_list_scrub(struct list_head *head)
417d350ecf5Sriastradh {
418d350ecf5Sriastradh 	struct vmw_ctx_bindinfo *entry;
419d350ecf5Sriastradh 
420d350ecf5Sriastradh 	list_for_each_entry(entry, head, res_list) {
421d350ecf5Sriastradh 		if (!entry->scrubbed) {
422d350ecf5Sriastradh 			(void) vmw_binding_infos[entry->bt].scrub_func
423d350ecf5Sriastradh 				(entry, false);
424d350ecf5Sriastradh 			entry->scrubbed = true;
425d350ecf5Sriastradh 		}
426d350ecf5Sriastradh 	}
427d350ecf5Sriastradh 
428d350ecf5Sriastradh 	list_for_each_entry(entry, head, res_list) {
429d350ecf5Sriastradh 		struct vmw_ctx_binding_state *cbs =
430d350ecf5Sriastradh 			vmw_context_binding_state(entry->ctx);
431d350ecf5Sriastradh 
432d350ecf5Sriastradh 		(void) vmw_binding_emit_dirty(cbs);
433d350ecf5Sriastradh 	}
434d350ecf5Sriastradh }
435d350ecf5Sriastradh 
436d350ecf5Sriastradh 
437d350ecf5Sriastradh /**
438d350ecf5Sriastradh  * vmw_binding_state_commit - Commit staged binding info
439d350ecf5Sriastradh  *
440d350ecf5Sriastradh  * @ctx: Pointer to context to commit the staged binding info to.
441d350ecf5Sriastradh  * @from: Staged binding info built during execbuf.
442d350ecf5Sriastradh  * @scrubbed: Transfer only scrubbed bindings.
443d350ecf5Sriastradh  *
444d350ecf5Sriastradh  * Transfers binding info from a temporary structure
445d350ecf5Sriastradh  * (typically used by execbuf) to the persistent
446d350ecf5Sriastradh  * structure in the context. This can be done once commands have been
447d350ecf5Sriastradh  * submitted to hardware
448d350ecf5Sriastradh  */
vmw_binding_state_commit(struct vmw_ctx_binding_state * to,struct vmw_ctx_binding_state * from)449d350ecf5Sriastradh void vmw_binding_state_commit(struct vmw_ctx_binding_state *to,
450d350ecf5Sriastradh 			      struct vmw_ctx_binding_state *from)
451d350ecf5Sriastradh {
452d350ecf5Sriastradh 	struct vmw_ctx_bindinfo *entry, *next;
453d350ecf5Sriastradh 
454d350ecf5Sriastradh 	list_for_each_entry_safe(entry, next, &from->list, ctx_list) {
455d350ecf5Sriastradh 		vmw_binding_transfer(to, from, entry);
456d350ecf5Sriastradh 		vmw_binding_drop(entry);
457d350ecf5Sriastradh 	}
458d350ecf5Sriastradh }
459d350ecf5Sriastradh 
460d350ecf5Sriastradh /**
461d350ecf5Sriastradh  * vmw_binding_rebind_all - Rebind all scrubbed bindings of a context
462d350ecf5Sriastradh  *
463d350ecf5Sriastradh  * @ctx: The context resource
464d350ecf5Sriastradh  *
465d350ecf5Sriastradh  * Walks through the context binding list and rebinds all scrubbed
466d350ecf5Sriastradh  * resources.
467d350ecf5Sriastradh  */
vmw_binding_rebind_all(struct vmw_ctx_binding_state * cbs)468d350ecf5Sriastradh int vmw_binding_rebind_all(struct vmw_ctx_binding_state *cbs)
469d350ecf5Sriastradh {
470d350ecf5Sriastradh 	struct vmw_ctx_bindinfo *entry;
471d350ecf5Sriastradh 	int ret;
472d350ecf5Sriastradh 
473d350ecf5Sriastradh 	list_for_each_entry(entry, &cbs->list, ctx_list) {
474d350ecf5Sriastradh 		if (likely(!entry->scrubbed))
475d350ecf5Sriastradh 			continue;
476d350ecf5Sriastradh 
477d350ecf5Sriastradh 		if ((entry->res == NULL || entry->res->id ==
478d350ecf5Sriastradh 			    SVGA3D_INVALID_ID))
479d350ecf5Sriastradh 			continue;
480d350ecf5Sriastradh 
481d350ecf5Sriastradh 		ret = vmw_binding_infos[entry->bt].scrub_func(entry, true);
482d350ecf5Sriastradh 		if (unlikely(ret != 0))
483d350ecf5Sriastradh 			return ret;
484d350ecf5Sriastradh 
485d350ecf5Sriastradh 		entry->scrubbed = false;
486d350ecf5Sriastradh 	}
487d350ecf5Sriastradh 
488d350ecf5Sriastradh 	return vmw_binding_emit_dirty(cbs);
489d350ecf5Sriastradh }
490d350ecf5Sriastradh 
491d350ecf5Sriastradh /**
492d350ecf5Sriastradh  * vmw_binding_scrub_shader - scrub a shader binding from a context.
493d350ecf5Sriastradh  *
494d350ecf5Sriastradh  * @bi: single binding information.
495d350ecf5Sriastradh  * @rebind: Whether to issue a bind instead of scrub command.
496d350ecf5Sriastradh  */
vmw_binding_scrub_shader(struct vmw_ctx_bindinfo * bi,bool rebind)497d350ecf5Sriastradh static int vmw_binding_scrub_shader(struct vmw_ctx_bindinfo *bi, bool rebind)
498d350ecf5Sriastradh {
499d350ecf5Sriastradh 	struct vmw_ctx_bindinfo_shader *binding =
500d350ecf5Sriastradh 		container_of(bi, typeof(*binding), bi);
501d350ecf5Sriastradh 	struct vmw_private *dev_priv = bi->ctx->dev_priv;
502d350ecf5Sriastradh 	struct {
503d350ecf5Sriastradh 		SVGA3dCmdHeader header;
504d350ecf5Sriastradh 		SVGA3dCmdSetShader body;
505d350ecf5Sriastradh 	} *cmd;
506d350ecf5Sriastradh 
507*677dec6eSriastradh 	cmd = VMW_FIFO_RESERVE(dev_priv, sizeof(*cmd));
508*677dec6eSriastradh 	if (unlikely(cmd == NULL))
509d350ecf5Sriastradh 		return -ENOMEM;
510d350ecf5Sriastradh 
511d350ecf5Sriastradh 	cmd->header.id = SVGA_3D_CMD_SET_SHADER;
512d350ecf5Sriastradh 	cmd->header.size = sizeof(cmd->body);
513d350ecf5Sriastradh 	cmd->body.cid = bi->ctx->id;
514d350ecf5Sriastradh 	cmd->body.type = binding->shader_slot + SVGA3D_SHADERTYPE_MIN;
515d350ecf5Sriastradh 	cmd->body.shid = ((rebind) ? bi->res->id : SVGA3D_INVALID_ID);
516d350ecf5Sriastradh 	vmw_fifo_commit(dev_priv, sizeof(*cmd));
517d350ecf5Sriastradh 
518d350ecf5Sriastradh 	return 0;
519d350ecf5Sriastradh }
520d350ecf5Sriastradh 
521d350ecf5Sriastradh /**
522d350ecf5Sriastradh  * vmw_binding_scrub_render_target - scrub a render target binding
523d350ecf5Sriastradh  * from a context.
524d350ecf5Sriastradh  *
525d350ecf5Sriastradh  * @bi: single binding information.
526d350ecf5Sriastradh  * @rebind: Whether to issue a bind instead of scrub command.
527d350ecf5Sriastradh  */
vmw_binding_scrub_render_target(struct vmw_ctx_bindinfo * bi,bool rebind)528d350ecf5Sriastradh static int vmw_binding_scrub_render_target(struct vmw_ctx_bindinfo *bi,
529d350ecf5Sriastradh 					   bool rebind)
530d350ecf5Sriastradh {
531d350ecf5Sriastradh 	struct vmw_ctx_bindinfo_view *binding =
532d350ecf5Sriastradh 		container_of(bi, typeof(*binding), bi);
533d350ecf5Sriastradh 	struct vmw_private *dev_priv = bi->ctx->dev_priv;
534d350ecf5Sriastradh 	struct {
535d350ecf5Sriastradh 		SVGA3dCmdHeader header;
536d350ecf5Sriastradh 		SVGA3dCmdSetRenderTarget body;
537d350ecf5Sriastradh 	} *cmd;
538d350ecf5Sriastradh 
539*677dec6eSriastradh 	cmd = VMW_FIFO_RESERVE(dev_priv, sizeof(*cmd));
540*677dec6eSriastradh 	if (unlikely(cmd == NULL))
541d350ecf5Sriastradh 		return -ENOMEM;
542d350ecf5Sriastradh 
543d350ecf5Sriastradh 	cmd->header.id = SVGA_3D_CMD_SETRENDERTARGET;
544d350ecf5Sriastradh 	cmd->header.size = sizeof(cmd->body);
545d350ecf5Sriastradh 	cmd->body.cid = bi->ctx->id;
546d350ecf5Sriastradh 	cmd->body.type = binding->slot;
547d350ecf5Sriastradh 	cmd->body.target.sid = ((rebind) ? bi->res->id : SVGA3D_INVALID_ID);
548d350ecf5Sriastradh 	cmd->body.target.face = 0;
549d350ecf5Sriastradh 	cmd->body.target.mipmap = 0;
550d350ecf5Sriastradh 	vmw_fifo_commit(dev_priv, sizeof(*cmd));
551d350ecf5Sriastradh 
552d350ecf5Sriastradh 	return 0;
553d350ecf5Sriastradh }
554d350ecf5Sriastradh 
555d350ecf5Sriastradh /**
556d350ecf5Sriastradh  * vmw_binding_scrub_texture - scrub a texture binding from a context.
557d350ecf5Sriastradh  *
558d350ecf5Sriastradh  * @bi: single binding information.
559d350ecf5Sriastradh  * @rebind: Whether to issue a bind instead of scrub command.
560d350ecf5Sriastradh  *
561d350ecf5Sriastradh  * TODO: Possibly complement this function with a function that takes
562d350ecf5Sriastradh  * a list of texture bindings and combines them to a single command.
563d350ecf5Sriastradh  */
vmw_binding_scrub_texture(struct vmw_ctx_bindinfo * bi,bool rebind)564d350ecf5Sriastradh static int vmw_binding_scrub_texture(struct vmw_ctx_bindinfo *bi,
565d350ecf5Sriastradh 				     bool rebind)
566d350ecf5Sriastradh {
567d350ecf5Sriastradh 	struct vmw_ctx_bindinfo_tex *binding =
568d350ecf5Sriastradh 		container_of(bi, typeof(*binding), bi);
569d350ecf5Sriastradh 	struct vmw_private *dev_priv = bi->ctx->dev_priv;
570d350ecf5Sriastradh 	struct {
571d350ecf5Sriastradh 		SVGA3dCmdHeader header;
572d350ecf5Sriastradh 		struct {
573d350ecf5Sriastradh 			SVGA3dCmdSetTextureState c;
574d350ecf5Sriastradh 			SVGA3dTextureState s1;
575d350ecf5Sriastradh 		} body;
576d350ecf5Sriastradh 	} *cmd;
577d350ecf5Sriastradh 
578*677dec6eSriastradh 	cmd = VMW_FIFO_RESERVE(dev_priv, sizeof(*cmd));
579*677dec6eSriastradh 	if (unlikely(cmd == NULL))
580d350ecf5Sriastradh 		return -ENOMEM;
581d350ecf5Sriastradh 
582d350ecf5Sriastradh 	cmd->header.id = SVGA_3D_CMD_SETTEXTURESTATE;
583d350ecf5Sriastradh 	cmd->header.size = sizeof(cmd->body);
584d350ecf5Sriastradh 	cmd->body.c.cid = bi->ctx->id;
585d350ecf5Sriastradh 	cmd->body.s1.stage = binding->texture_stage;
586d350ecf5Sriastradh 	cmd->body.s1.name = SVGA3D_TS_BIND_TEXTURE;
587d350ecf5Sriastradh 	cmd->body.s1.value = ((rebind) ? bi->res->id : SVGA3D_INVALID_ID);
588d350ecf5Sriastradh 	vmw_fifo_commit(dev_priv, sizeof(*cmd));
589d350ecf5Sriastradh 
590d350ecf5Sriastradh 	return 0;
591d350ecf5Sriastradh }
592d350ecf5Sriastradh 
593d350ecf5Sriastradh /**
594d350ecf5Sriastradh  * vmw_binding_scrub_dx_shader - scrub a dx shader binding from a context.
595d350ecf5Sriastradh  *
596d350ecf5Sriastradh  * @bi: single binding information.
597d350ecf5Sriastradh  * @rebind: Whether to issue a bind instead of scrub command.
598d350ecf5Sriastradh  */
vmw_binding_scrub_dx_shader(struct vmw_ctx_bindinfo * bi,bool rebind)599d350ecf5Sriastradh static int vmw_binding_scrub_dx_shader(struct vmw_ctx_bindinfo *bi, bool rebind)
600d350ecf5Sriastradh {
601d350ecf5Sriastradh 	struct vmw_ctx_bindinfo_shader *binding =
602d350ecf5Sriastradh 		container_of(bi, typeof(*binding), bi);
603d350ecf5Sriastradh 	struct vmw_private *dev_priv = bi->ctx->dev_priv;
604d350ecf5Sriastradh 	struct {
605d350ecf5Sriastradh 		SVGA3dCmdHeader header;
606d350ecf5Sriastradh 		SVGA3dCmdDXSetShader body;
607d350ecf5Sriastradh 	} *cmd;
608d350ecf5Sriastradh 
609*677dec6eSriastradh 	cmd = VMW_FIFO_RESERVE_DX(dev_priv, sizeof(*cmd), bi->ctx->id);
610*677dec6eSriastradh 	if (unlikely(cmd == NULL))
611d350ecf5Sriastradh 		return -ENOMEM;
612*677dec6eSriastradh 
613d350ecf5Sriastradh 	cmd->header.id = SVGA_3D_CMD_DX_SET_SHADER;
614d350ecf5Sriastradh 	cmd->header.size = sizeof(cmd->body);
615d350ecf5Sriastradh 	cmd->body.type = binding->shader_slot + SVGA3D_SHADERTYPE_MIN;
616d350ecf5Sriastradh 	cmd->body.shaderId = ((rebind) ? bi->res->id : SVGA3D_INVALID_ID);
617d350ecf5Sriastradh 	vmw_fifo_commit(dev_priv, sizeof(*cmd));
618d350ecf5Sriastradh 
619d350ecf5Sriastradh 	return 0;
620d350ecf5Sriastradh }
621d350ecf5Sriastradh 
622d350ecf5Sriastradh /**
623d350ecf5Sriastradh  * vmw_binding_scrub_cb - scrub a constant buffer binding from a context.
624d350ecf5Sriastradh  *
625d350ecf5Sriastradh  * @bi: single binding information.
626d350ecf5Sriastradh  * @rebind: Whether to issue a bind instead of scrub command.
627d350ecf5Sriastradh  */
vmw_binding_scrub_cb(struct vmw_ctx_bindinfo * bi,bool rebind)628d350ecf5Sriastradh static int vmw_binding_scrub_cb(struct vmw_ctx_bindinfo *bi, bool rebind)
629d350ecf5Sriastradh {
630d350ecf5Sriastradh 	struct vmw_ctx_bindinfo_cb *binding =
631d350ecf5Sriastradh 		container_of(bi, typeof(*binding), bi);
632d350ecf5Sriastradh 	struct vmw_private *dev_priv = bi->ctx->dev_priv;
633d350ecf5Sriastradh 	struct {
634d350ecf5Sriastradh 		SVGA3dCmdHeader header;
635d350ecf5Sriastradh 		SVGA3dCmdDXSetSingleConstantBuffer body;
636d350ecf5Sriastradh 	} *cmd;
637d350ecf5Sriastradh 
638*677dec6eSriastradh 	cmd = VMW_FIFO_RESERVE_DX(dev_priv, sizeof(*cmd), bi->ctx->id);
639*677dec6eSriastradh 	if (unlikely(cmd == NULL))
640d350ecf5Sriastradh 		return -ENOMEM;
641d350ecf5Sriastradh 
642d350ecf5Sriastradh 	cmd->header.id = SVGA_3D_CMD_DX_SET_SINGLE_CONSTANT_BUFFER;
643d350ecf5Sriastradh 	cmd->header.size = sizeof(cmd->body);
644d350ecf5Sriastradh 	cmd->body.slot = binding->slot;
645d350ecf5Sriastradh 	cmd->body.type = binding->shader_slot + SVGA3D_SHADERTYPE_MIN;
646d350ecf5Sriastradh 	if (rebind) {
647d350ecf5Sriastradh 		cmd->body.offsetInBytes = binding->offset;
648d350ecf5Sriastradh 		cmd->body.sizeInBytes = binding->size;
649d350ecf5Sriastradh 		cmd->body.sid = bi->res->id;
650d350ecf5Sriastradh 	} else {
651d350ecf5Sriastradh 		cmd->body.offsetInBytes = 0;
652d350ecf5Sriastradh 		cmd->body.sizeInBytes = 0;
653d350ecf5Sriastradh 		cmd->body.sid = SVGA3D_INVALID_ID;
654d350ecf5Sriastradh 	}
655d350ecf5Sriastradh 	vmw_fifo_commit(dev_priv, sizeof(*cmd));
656d350ecf5Sriastradh 
657d350ecf5Sriastradh 	return 0;
658d350ecf5Sriastradh }
659d350ecf5Sriastradh 
660d350ecf5Sriastradh /**
661d350ecf5Sriastradh  * vmw_collect_view_ids - Build view id data for a view binding command
662d350ecf5Sriastradh  * without checking which bindings actually need to be emitted
663d350ecf5Sriastradh  *
664d350ecf5Sriastradh  * @cbs: Pointer to the context's struct vmw_ctx_binding_state
665d350ecf5Sriastradh  * @bi: Pointer to where the binding info array is stored in @cbs
666d350ecf5Sriastradh  * @max_num: Maximum number of entries in the @bi array.
667d350ecf5Sriastradh  *
668d350ecf5Sriastradh  * Scans the @bi array for bindings and builds a buffer of view id data.
669d350ecf5Sriastradh  * Stops at the first non-existing binding in the @bi array.
670d350ecf5Sriastradh  * On output, @cbs->bind_cmd_count contains the number of bindings to be
671d350ecf5Sriastradh  * emitted, @cbs->bind_first_slot is set to zero, and @cbs->bind_cmd_buffer
672d350ecf5Sriastradh  * contains the command data.
673d350ecf5Sriastradh  */
vmw_collect_view_ids(struct vmw_ctx_binding_state * cbs,const struct vmw_ctx_bindinfo * bi,u32 max_num)674d350ecf5Sriastradh static void vmw_collect_view_ids(struct vmw_ctx_binding_state *cbs,
675d350ecf5Sriastradh 				 const struct vmw_ctx_bindinfo *bi,
676d350ecf5Sriastradh 				 u32 max_num)
677d350ecf5Sriastradh {
678d350ecf5Sriastradh 	const struct vmw_ctx_bindinfo_view *biv =
679d350ecf5Sriastradh 		container_of(bi, struct vmw_ctx_bindinfo_view, bi);
680d350ecf5Sriastradh 	unsigned long i;
681d350ecf5Sriastradh 
682d350ecf5Sriastradh 	cbs->bind_cmd_count = 0;
683d350ecf5Sriastradh 	cbs->bind_first_slot = 0;
684d350ecf5Sriastradh 
685d350ecf5Sriastradh 	for (i = 0; i < max_num; ++i, ++biv) {
686d350ecf5Sriastradh 		if (!biv->bi.ctx)
687d350ecf5Sriastradh 			break;
688d350ecf5Sriastradh 
689d350ecf5Sriastradh 		cbs->bind_cmd_buffer[cbs->bind_cmd_count++] =
690d350ecf5Sriastradh 			((biv->bi.scrubbed) ?
691d350ecf5Sriastradh 			 SVGA3D_INVALID_ID : biv->bi.res->id);
692d350ecf5Sriastradh 	}
693d350ecf5Sriastradh }
694d350ecf5Sriastradh 
695d350ecf5Sriastradh /**
696d350ecf5Sriastradh  * vmw_collect_dirty_view_ids - Build view id data for a view binding command
697d350ecf5Sriastradh  *
698d350ecf5Sriastradh  * @cbs: Pointer to the context's struct vmw_ctx_binding_state
699d350ecf5Sriastradh  * @bi: Pointer to where the binding info array is stored in @cbs
700d350ecf5Sriastradh  * @dirty: Bitmap indicating which bindings need to be emitted.
701d350ecf5Sriastradh  * @max_num: Maximum number of entries in the @bi array.
702d350ecf5Sriastradh  *
703d350ecf5Sriastradh  * Scans the @bi array for bindings that need to be emitted and
704d350ecf5Sriastradh  * builds a buffer of view id data.
705d350ecf5Sriastradh  * On output, @cbs->bind_cmd_count contains the number of bindings to be
706d350ecf5Sriastradh  * emitted, @cbs->bind_first_slot indicates the index of the first emitted
707d350ecf5Sriastradh  * binding, and @cbs->bind_cmd_buffer contains the command data.
708d350ecf5Sriastradh  */
vmw_collect_dirty_view_ids(struct vmw_ctx_binding_state * cbs,const struct vmw_ctx_bindinfo * bi,unsigned long * dirty,u32 max_num)709d350ecf5Sriastradh static void vmw_collect_dirty_view_ids(struct vmw_ctx_binding_state *cbs,
710d350ecf5Sriastradh 				       const struct vmw_ctx_bindinfo *bi,
711d350ecf5Sriastradh 				       unsigned long *dirty,
712d350ecf5Sriastradh 				       u32 max_num)
713d350ecf5Sriastradh {
714d350ecf5Sriastradh 	const struct vmw_ctx_bindinfo_view *biv =
715d350ecf5Sriastradh 		container_of(bi, struct vmw_ctx_bindinfo_view, bi);
716d350ecf5Sriastradh 	unsigned long i, next_bit;
717d350ecf5Sriastradh 
718d350ecf5Sriastradh 	cbs->bind_cmd_count = 0;
719d350ecf5Sriastradh 	i = find_first_bit(dirty, max_num);
720d350ecf5Sriastradh 	next_bit = i;
721d350ecf5Sriastradh 	cbs->bind_first_slot = i;
722d350ecf5Sriastradh 
723d350ecf5Sriastradh 	biv += i;
724d350ecf5Sriastradh 	for (; i < max_num; ++i, ++biv) {
725d350ecf5Sriastradh 		cbs->bind_cmd_buffer[cbs->bind_cmd_count++] =
726d350ecf5Sriastradh 			((!biv->bi.ctx || biv->bi.scrubbed) ?
727d350ecf5Sriastradh 			 SVGA3D_INVALID_ID : biv->bi.res->id);
728d350ecf5Sriastradh 
729d350ecf5Sriastradh 		if (next_bit == i) {
730d350ecf5Sriastradh 			next_bit = find_next_bit(dirty, max_num, i + 1);
731d350ecf5Sriastradh 			if (next_bit >= max_num)
732d350ecf5Sriastradh 				break;
733d350ecf5Sriastradh 		}
734d350ecf5Sriastradh 	}
735d350ecf5Sriastradh }
736d350ecf5Sriastradh 
737d350ecf5Sriastradh /**
738d350ecf5Sriastradh  * vmw_binding_emit_set_sr - Issue delayed DX shader resource binding commands
739d350ecf5Sriastradh  *
740d350ecf5Sriastradh  * @cbs: Pointer to the context's struct vmw_ctx_binding_state
741d350ecf5Sriastradh  */
vmw_emit_set_sr(struct vmw_ctx_binding_state * cbs,int shader_slot)742d350ecf5Sriastradh static int vmw_emit_set_sr(struct vmw_ctx_binding_state *cbs,
743d350ecf5Sriastradh 			   int shader_slot)
744d350ecf5Sriastradh {
745d350ecf5Sriastradh 	const struct vmw_ctx_bindinfo *loc =
746d350ecf5Sriastradh 		&cbs->per_shader[shader_slot].shader_res[0].bi;
747d350ecf5Sriastradh 	struct {
748d350ecf5Sriastradh 		SVGA3dCmdHeader header;
749d350ecf5Sriastradh 		SVGA3dCmdDXSetShaderResources body;
750d350ecf5Sriastradh 	} *cmd;
751d350ecf5Sriastradh 	size_t cmd_size, view_id_size;
752d350ecf5Sriastradh 	const struct vmw_resource *ctx = vmw_cbs_context(cbs);
753d350ecf5Sriastradh 
754d350ecf5Sriastradh 	vmw_collect_dirty_view_ids(cbs, loc,
755d350ecf5Sriastradh 				   cbs->per_shader[shader_slot].dirty_sr,
756d350ecf5Sriastradh 				   SVGA3D_DX_MAX_SRVIEWS);
757d350ecf5Sriastradh 	if (cbs->bind_cmd_count == 0)
758d350ecf5Sriastradh 		return 0;
759d350ecf5Sriastradh 
760d350ecf5Sriastradh 	view_id_size = cbs->bind_cmd_count*sizeof(uint32);
761d350ecf5Sriastradh 	cmd_size = sizeof(*cmd) + view_id_size;
762*677dec6eSriastradh 	cmd = VMW_FIFO_RESERVE_DX(ctx->dev_priv, cmd_size, ctx->id);
763*677dec6eSriastradh 	if (unlikely(cmd == NULL))
764d350ecf5Sriastradh 		return -ENOMEM;
765d350ecf5Sriastradh 
766d350ecf5Sriastradh 	cmd->header.id = SVGA_3D_CMD_DX_SET_SHADER_RESOURCES;
767d350ecf5Sriastradh 	cmd->header.size = sizeof(cmd->body) + view_id_size;
768d350ecf5Sriastradh 	cmd->body.type = shader_slot + SVGA3D_SHADERTYPE_MIN;
769d350ecf5Sriastradh 	cmd->body.startView = cbs->bind_first_slot;
770d350ecf5Sriastradh 
771d350ecf5Sriastradh 	memcpy(&cmd[1], cbs->bind_cmd_buffer, view_id_size);
772d350ecf5Sriastradh 
773d350ecf5Sriastradh 	vmw_fifo_commit(ctx->dev_priv, cmd_size);
774d350ecf5Sriastradh 	bitmap_clear(cbs->per_shader[shader_slot].dirty_sr,
775d350ecf5Sriastradh 		     cbs->bind_first_slot, cbs->bind_cmd_count);
776d350ecf5Sriastradh 
777d350ecf5Sriastradh 	return 0;
778d350ecf5Sriastradh }
779d350ecf5Sriastradh 
780d350ecf5Sriastradh /**
781d350ecf5Sriastradh  * vmw_binding_emit_set_rt - Issue delayed DX rendertarget binding commands
782d350ecf5Sriastradh  *
783d350ecf5Sriastradh  * @cbs: Pointer to the context's struct vmw_ctx_binding_state
784d350ecf5Sriastradh  */
vmw_emit_set_rt(struct vmw_ctx_binding_state * cbs)785d350ecf5Sriastradh static int vmw_emit_set_rt(struct vmw_ctx_binding_state *cbs)
786d350ecf5Sriastradh {
787d350ecf5Sriastradh 	const struct vmw_ctx_bindinfo *loc = &cbs->render_targets[0].bi;
788d350ecf5Sriastradh 	struct {
789d350ecf5Sriastradh 		SVGA3dCmdHeader header;
790d350ecf5Sriastradh 		SVGA3dCmdDXSetRenderTargets body;
791d350ecf5Sriastradh 	} *cmd;
792d350ecf5Sriastradh 	size_t cmd_size, view_id_size;
793d350ecf5Sriastradh 	const struct vmw_resource *ctx = vmw_cbs_context(cbs);
794d350ecf5Sriastradh 
795d350ecf5Sriastradh 	vmw_collect_view_ids(cbs, loc, SVGA3D_MAX_SIMULTANEOUS_RENDER_TARGETS);
796d350ecf5Sriastradh 	view_id_size = cbs->bind_cmd_count*sizeof(uint32);
797d350ecf5Sriastradh 	cmd_size = sizeof(*cmd) + view_id_size;
798*677dec6eSriastradh 	cmd = VMW_FIFO_RESERVE_DX(ctx->dev_priv, cmd_size, ctx->id);
799*677dec6eSriastradh 	if (unlikely(cmd == NULL))
800d350ecf5Sriastradh 		return -ENOMEM;
801d350ecf5Sriastradh 
802d350ecf5Sriastradh 	cmd->header.id = SVGA_3D_CMD_DX_SET_RENDERTARGETS;
803d350ecf5Sriastradh 	cmd->header.size = sizeof(cmd->body) + view_id_size;
804d350ecf5Sriastradh 
805d350ecf5Sriastradh 	if (cbs->ds_view.bi.ctx && !cbs->ds_view.bi.scrubbed)
806d350ecf5Sriastradh 		cmd->body.depthStencilViewId = cbs->ds_view.bi.res->id;
807d350ecf5Sriastradh 	else
808d350ecf5Sriastradh 		cmd->body.depthStencilViewId = SVGA3D_INVALID_ID;
809d350ecf5Sriastradh 
810d350ecf5Sriastradh 	memcpy(&cmd[1], cbs->bind_cmd_buffer, view_id_size);
811d350ecf5Sriastradh 
812d350ecf5Sriastradh 	vmw_fifo_commit(ctx->dev_priv, cmd_size);
813d350ecf5Sriastradh 
814d350ecf5Sriastradh 	return 0;
815d350ecf5Sriastradh 
816d350ecf5Sriastradh }
817d350ecf5Sriastradh 
818d350ecf5Sriastradh /**
819d350ecf5Sriastradh  * vmw_collect_so_targets - Build SVGA3dSoTarget data for a binding command
820d350ecf5Sriastradh  * without checking which bindings actually need to be emitted
821d350ecf5Sriastradh  *
822d350ecf5Sriastradh  * @cbs: Pointer to the context's struct vmw_ctx_binding_state
823d350ecf5Sriastradh  * @bi: Pointer to where the binding info array is stored in @cbs
824d350ecf5Sriastradh  * @max_num: Maximum number of entries in the @bi array.
825d350ecf5Sriastradh  *
826d350ecf5Sriastradh  * Scans the @bi array for bindings and builds a buffer of SVGA3dSoTarget data.
827d350ecf5Sriastradh  * Stops at the first non-existing binding in the @bi array.
828d350ecf5Sriastradh  * On output, @cbs->bind_cmd_count contains the number of bindings to be
829d350ecf5Sriastradh  * emitted, @cbs->bind_first_slot is set to zero, and @cbs->bind_cmd_buffer
830d350ecf5Sriastradh  * contains the command data.
831d350ecf5Sriastradh  */
vmw_collect_so_targets(struct vmw_ctx_binding_state * cbs,const struct vmw_ctx_bindinfo * bi,u32 max_num)832d350ecf5Sriastradh static void vmw_collect_so_targets(struct vmw_ctx_binding_state *cbs,
833d350ecf5Sriastradh 				   const struct vmw_ctx_bindinfo *bi,
834d350ecf5Sriastradh 				   u32 max_num)
835d350ecf5Sriastradh {
836d350ecf5Sriastradh 	const struct vmw_ctx_bindinfo_so *biso =
837d350ecf5Sriastradh 		container_of(bi, struct vmw_ctx_bindinfo_so, bi);
838d350ecf5Sriastradh 	unsigned long i;
839d350ecf5Sriastradh 	SVGA3dSoTarget *so_buffer = (SVGA3dSoTarget *) cbs->bind_cmd_buffer;
840d350ecf5Sriastradh 
841d350ecf5Sriastradh 	cbs->bind_cmd_count = 0;
842d350ecf5Sriastradh 	cbs->bind_first_slot = 0;
843d350ecf5Sriastradh 
844d350ecf5Sriastradh 	for (i = 0; i < max_num; ++i, ++biso, ++so_buffer,
845d350ecf5Sriastradh 		    ++cbs->bind_cmd_count) {
846d350ecf5Sriastradh 		if (!biso->bi.ctx)
847d350ecf5Sriastradh 			break;
848d350ecf5Sriastradh 
849d350ecf5Sriastradh 		if (!biso->bi.scrubbed) {
850d350ecf5Sriastradh 			so_buffer->sid = biso->bi.res->id;
851d350ecf5Sriastradh 			so_buffer->offset = biso->offset;
852d350ecf5Sriastradh 			so_buffer->sizeInBytes = biso->size;
853d350ecf5Sriastradh 		} else {
854d350ecf5Sriastradh 			so_buffer->sid = SVGA3D_INVALID_ID;
855d350ecf5Sriastradh 			so_buffer->offset = 0;
856d350ecf5Sriastradh 			so_buffer->sizeInBytes = 0;
857d350ecf5Sriastradh 		}
858d350ecf5Sriastradh 	}
859d350ecf5Sriastradh }
860d350ecf5Sriastradh 
861d350ecf5Sriastradh /**
862d350ecf5Sriastradh  * vmw_binding_emit_set_so - Issue delayed streamout binding commands
863d350ecf5Sriastradh  *
864d350ecf5Sriastradh  * @cbs: Pointer to the context's struct vmw_ctx_binding_state
865d350ecf5Sriastradh  */
vmw_emit_set_so(struct vmw_ctx_binding_state * cbs)866d350ecf5Sriastradh static int vmw_emit_set_so(struct vmw_ctx_binding_state *cbs)
867d350ecf5Sriastradh {
868d350ecf5Sriastradh 	const struct vmw_ctx_bindinfo *loc = &cbs->so_targets[0].bi;
869d350ecf5Sriastradh 	struct {
870d350ecf5Sriastradh 		SVGA3dCmdHeader header;
871d350ecf5Sriastradh 		SVGA3dCmdDXSetSOTargets body;
872d350ecf5Sriastradh 	} *cmd;
873d350ecf5Sriastradh 	size_t cmd_size, so_target_size;
874d350ecf5Sriastradh 	const struct vmw_resource *ctx = vmw_cbs_context(cbs);
875d350ecf5Sriastradh 
876d350ecf5Sriastradh 	vmw_collect_so_targets(cbs, loc, SVGA3D_DX_MAX_SOTARGETS);
877d350ecf5Sriastradh 	if (cbs->bind_cmd_count == 0)
878d350ecf5Sriastradh 		return 0;
879d350ecf5Sriastradh 
880d350ecf5Sriastradh 	so_target_size = cbs->bind_cmd_count*sizeof(SVGA3dSoTarget);
881d350ecf5Sriastradh 	cmd_size = sizeof(*cmd) + so_target_size;
882*677dec6eSriastradh 	cmd = VMW_FIFO_RESERVE_DX(ctx->dev_priv, cmd_size, ctx->id);
883*677dec6eSriastradh 	if (unlikely(cmd == NULL))
884d350ecf5Sriastradh 		return -ENOMEM;
885d350ecf5Sriastradh 
886d350ecf5Sriastradh 	cmd->header.id = SVGA_3D_CMD_DX_SET_SOTARGETS;
887d350ecf5Sriastradh 	cmd->header.size = sizeof(cmd->body) + so_target_size;
888d350ecf5Sriastradh 	memcpy(&cmd[1], cbs->bind_cmd_buffer, so_target_size);
889d350ecf5Sriastradh 
890d350ecf5Sriastradh 	vmw_fifo_commit(ctx->dev_priv, cmd_size);
891d350ecf5Sriastradh 
892d350ecf5Sriastradh 	return 0;
893d350ecf5Sriastradh 
894d350ecf5Sriastradh }
895d350ecf5Sriastradh 
896d350ecf5Sriastradh /**
897d350ecf5Sriastradh  * vmw_binding_emit_dirty_ps - Issue delayed per shader binding commands
898d350ecf5Sriastradh  *
899d350ecf5Sriastradh  * @cbs: Pointer to the context's struct vmw_ctx_binding_state
900d350ecf5Sriastradh  *
901d350ecf5Sriastradh  */
vmw_binding_emit_dirty_ps(struct vmw_ctx_binding_state * cbs)902d350ecf5Sriastradh static int vmw_binding_emit_dirty_ps(struct vmw_ctx_binding_state *cbs)
903d350ecf5Sriastradh {
904d350ecf5Sriastradh 	struct vmw_dx_shader_bindings *sb = &cbs->per_shader[0];
905d350ecf5Sriastradh 	u32 i;
906d350ecf5Sriastradh 	int ret;
907d350ecf5Sriastradh 
908d350ecf5Sriastradh 	for (i = 0; i < SVGA3D_NUM_SHADERTYPE_DX10; ++i, ++sb) {
909d350ecf5Sriastradh 		if (!test_bit(VMW_BINDING_PS_SR_BIT, &sb->dirty))
910d350ecf5Sriastradh 			continue;
911d350ecf5Sriastradh 
912d350ecf5Sriastradh 		ret = vmw_emit_set_sr(cbs, i);
913d350ecf5Sriastradh 		if (ret)
914d350ecf5Sriastradh 			break;
915d350ecf5Sriastradh 
916d350ecf5Sriastradh 		__clear_bit(VMW_BINDING_PS_SR_BIT, &sb->dirty);
917d350ecf5Sriastradh 	}
918d350ecf5Sriastradh 
919d350ecf5Sriastradh 	return 0;
920d350ecf5Sriastradh }
921d350ecf5Sriastradh 
922d350ecf5Sriastradh /**
923d350ecf5Sriastradh  * vmw_collect_dirty_vbs - Build SVGA3dVertexBuffer data for a
924d350ecf5Sriastradh  * SVGA3dCmdDXSetVertexBuffers command
925d350ecf5Sriastradh  *
926d350ecf5Sriastradh  * @cbs: Pointer to the context's struct vmw_ctx_binding_state
927d350ecf5Sriastradh  * @bi: Pointer to where the binding info array is stored in @cbs
928d350ecf5Sriastradh  * @dirty: Bitmap indicating which bindings need to be emitted.
929d350ecf5Sriastradh  * @max_num: Maximum number of entries in the @bi array.
930d350ecf5Sriastradh  *
931d350ecf5Sriastradh  * Scans the @bi array for bindings that need to be emitted and
932d350ecf5Sriastradh  * builds a buffer of SVGA3dVertexBuffer data.
933d350ecf5Sriastradh  * On output, @cbs->bind_cmd_count contains the number of bindings to be
934d350ecf5Sriastradh  * emitted, @cbs->bind_first_slot indicates the index of the first emitted
935d350ecf5Sriastradh  * binding, and @cbs->bind_cmd_buffer contains the command data.
936d350ecf5Sriastradh  */
vmw_collect_dirty_vbs(struct vmw_ctx_binding_state * cbs,const struct vmw_ctx_bindinfo * bi,unsigned long * dirty,u32 max_num)937d350ecf5Sriastradh static void vmw_collect_dirty_vbs(struct vmw_ctx_binding_state *cbs,
938d350ecf5Sriastradh 				  const struct vmw_ctx_bindinfo *bi,
939d350ecf5Sriastradh 				  unsigned long *dirty,
940d350ecf5Sriastradh 				  u32 max_num)
941d350ecf5Sriastradh {
942d350ecf5Sriastradh 	const struct vmw_ctx_bindinfo_vb *biv =
943d350ecf5Sriastradh 		container_of(bi, struct vmw_ctx_bindinfo_vb, bi);
944d350ecf5Sriastradh 	unsigned long i, next_bit;
945d350ecf5Sriastradh 	SVGA3dVertexBuffer *vbs = (SVGA3dVertexBuffer *) &cbs->bind_cmd_buffer;
946d350ecf5Sriastradh 
947d350ecf5Sriastradh 	cbs->bind_cmd_count = 0;
948d350ecf5Sriastradh 	i = find_first_bit(dirty, max_num);
949d350ecf5Sriastradh 	next_bit = i;
950d350ecf5Sriastradh 	cbs->bind_first_slot = i;
951d350ecf5Sriastradh 
952d350ecf5Sriastradh 	biv += i;
953d350ecf5Sriastradh 	for (; i < max_num; ++i, ++biv, ++vbs) {
954d350ecf5Sriastradh 		if (!biv->bi.ctx || biv->bi.scrubbed) {
955d350ecf5Sriastradh 			vbs->sid = SVGA3D_INVALID_ID;
956d350ecf5Sriastradh 			vbs->stride = 0;
957d350ecf5Sriastradh 			vbs->offset = 0;
958d350ecf5Sriastradh 		} else {
959d350ecf5Sriastradh 			vbs->sid = biv->bi.res->id;
960d350ecf5Sriastradh 			vbs->stride = biv->stride;
961d350ecf5Sriastradh 			vbs->offset = biv->offset;
962d350ecf5Sriastradh 		}
963d350ecf5Sriastradh 		cbs->bind_cmd_count++;
964d350ecf5Sriastradh 		if (next_bit == i) {
965d350ecf5Sriastradh 			next_bit = find_next_bit(dirty, max_num, i + 1);
966d350ecf5Sriastradh 			if (next_bit >= max_num)
967d350ecf5Sriastradh 				break;
968d350ecf5Sriastradh 		}
969d350ecf5Sriastradh 	}
970d350ecf5Sriastradh }
971d350ecf5Sriastradh 
972d350ecf5Sriastradh /**
973d350ecf5Sriastradh  * vmw_binding_emit_set_vb - Issue delayed vertex buffer binding commands
974d350ecf5Sriastradh  *
975d350ecf5Sriastradh  * @cbs: Pointer to the context's struct vmw_ctx_binding_state
976d350ecf5Sriastradh  *
977d350ecf5Sriastradh  */
vmw_emit_set_vb(struct vmw_ctx_binding_state * cbs)978d350ecf5Sriastradh static int vmw_emit_set_vb(struct vmw_ctx_binding_state *cbs)
979d350ecf5Sriastradh {
980d350ecf5Sriastradh 	const struct vmw_ctx_bindinfo *loc =
981d350ecf5Sriastradh 		&cbs->vertex_buffers[0].bi;
982d350ecf5Sriastradh 	struct {
983d350ecf5Sriastradh 		SVGA3dCmdHeader header;
984d350ecf5Sriastradh 		SVGA3dCmdDXSetVertexBuffers body;
985d350ecf5Sriastradh 	} *cmd;
986d350ecf5Sriastradh 	size_t cmd_size, set_vb_size;
987d350ecf5Sriastradh 	const struct vmw_resource *ctx = vmw_cbs_context(cbs);
988d350ecf5Sriastradh 
989d350ecf5Sriastradh 	vmw_collect_dirty_vbs(cbs, loc, cbs->dirty_vb,
990d350ecf5Sriastradh 			     SVGA3D_DX_MAX_VERTEXBUFFERS);
991d350ecf5Sriastradh 	if (cbs->bind_cmd_count == 0)
992d350ecf5Sriastradh 		return 0;
993d350ecf5Sriastradh 
994d350ecf5Sriastradh 	set_vb_size = cbs->bind_cmd_count*sizeof(SVGA3dVertexBuffer);
995d350ecf5Sriastradh 	cmd_size = sizeof(*cmd) + set_vb_size;
996*677dec6eSriastradh 	cmd = VMW_FIFO_RESERVE_DX(ctx->dev_priv, cmd_size, ctx->id);
997*677dec6eSriastradh 	if (unlikely(cmd == NULL))
998d350ecf5Sriastradh 		return -ENOMEM;
999d350ecf5Sriastradh 
1000d350ecf5Sriastradh 	cmd->header.id = SVGA_3D_CMD_DX_SET_VERTEX_BUFFERS;
1001d350ecf5Sriastradh 	cmd->header.size = sizeof(cmd->body) + set_vb_size;
1002d350ecf5Sriastradh 	cmd->body.startBuffer = cbs->bind_first_slot;
1003d350ecf5Sriastradh 
1004d350ecf5Sriastradh 	memcpy(&cmd[1], cbs->bind_cmd_buffer, set_vb_size);
1005d350ecf5Sriastradh 
1006d350ecf5Sriastradh 	vmw_fifo_commit(ctx->dev_priv, cmd_size);
1007d350ecf5Sriastradh 	bitmap_clear(cbs->dirty_vb,
1008d350ecf5Sriastradh 		     cbs->bind_first_slot, cbs->bind_cmd_count);
1009d350ecf5Sriastradh 
1010d350ecf5Sriastradh 	return 0;
1011d350ecf5Sriastradh }
1012d350ecf5Sriastradh 
1013d350ecf5Sriastradh /**
1014d350ecf5Sriastradh  * vmw_binding_emit_dirty - Issue delayed binding commands
1015d350ecf5Sriastradh  *
1016d350ecf5Sriastradh  * @cbs: Pointer to the context's struct vmw_ctx_binding_state
1017d350ecf5Sriastradh  *
1018d350ecf5Sriastradh  * This function issues the delayed binding commands that arise from
1019d350ecf5Sriastradh  * previous scrub / unscrub calls. These binding commands are typically
1020d350ecf5Sriastradh  * commands that batch a number of bindings and therefore it makes sense
1021d350ecf5Sriastradh  * to delay them.
1022d350ecf5Sriastradh  */
vmw_binding_emit_dirty(struct vmw_ctx_binding_state * cbs)1023d350ecf5Sriastradh static int vmw_binding_emit_dirty(struct vmw_ctx_binding_state *cbs)
1024d350ecf5Sriastradh {
1025d350ecf5Sriastradh 	int ret = 0;
1026d350ecf5Sriastradh 	unsigned long hit = 0;
1027d350ecf5Sriastradh 
1028d350ecf5Sriastradh 	while ((hit = find_next_bit(&cbs->dirty, VMW_BINDING_NUM_BITS, hit))
1029d350ecf5Sriastradh 	      < VMW_BINDING_NUM_BITS) {
1030d350ecf5Sriastradh 
1031d350ecf5Sriastradh 		switch (hit) {
1032d350ecf5Sriastradh 		case VMW_BINDING_RT_BIT:
1033d350ecf5Sriastradh 			ret = vmw_emit_set_rt(cbs);
1034d350ecf5Sriastradh 			break;
1035d350ecf5Sriastradh 		case VMW_BINDING_PS_BIT:
1036d350ecf5Sriastradh 			ret = vmw_binding_emit_dirty_ps(cbs);
1037d350ecf5Sriastradh 			break;
1038d350ecf5Sriastradh 		case VMW_BINDING_SO_BIT:
1039d350ecf5Sriastradh 			ret = vmw_emit_set_so(cbs);
1040d350ecf5Sriastradh 			break;
1041d350ecf5Sriastradh 		case VMW_BINDING_VB_BIT:
1042d350ecf5Sriastradh 			ret = vmw_emit_set_vb(cbs);
1043d350ecf5Sriastradh 			break;
1044d350ecf5Sriastradh 		default:
1045d350ecf5Sriastradh 			BUG();
1046d350ecf5Sriastradh 		}
1047d350ecf5Sriastradh 		if (ret)
1048d350ecf5Sriastradh 			return ret;
1049d350ecf5Sriastradh 
1050d350ecf5Sriastradh 		__clear_bit(hit, &cbs->dirty);
1051d350ecf5Sriastradh 		hit++;
1052d350ecf5Sriastradh 	}
1053d350ecf5Sriastradh 
1054d350ecf5Sriastradh 	return 0;
1055d350ecf5Sriastradh }
1056d350ecf5Sriastradh 
1057d350ecf5Sriastradh /**
1058d350ecf5Sriastradh  * vmw_binding_scrub_sr - Schedule a dx shaderresource binding
1059d350ecf5Sriastradh  * scrub from a context
1060d350ecf5Sriastradh  *
1061d350ecf5Sriastradh  * @bi: single binding information.
1062d350ecf5Sriastradh  * @rebind: Whether to issue a bind instead of scrub command.
1063d350ecf5Sriastradh  */
vmw_binding_scrub_sr(struct vmw_ctx_bindinfo * bi,bool rebind)1064d350ecf5Sriastradh static int vmw_binding_scrub_sr(struct vmw_ctx_bindinfo *bi, bool rebind)
1065d350ecf5Sriastradh {
1066d350ecf5Sriastradh 	struct vmw_ctx_bindinfo_view *biv =
1067d350ecf5Sriastradh 		container_of(bi, struct vmw_ctx_bindinfo_view, bi);
1068d350ecf5Sriastradh 	struct vmw_ctx_binding_state *cbs =
1069d350ecf5Sriastradh 		vmw_context_binding_state(bi->ctx);
1070d350ecf5Sriastradh 
1071d350ecf5Sriastradh 	__set_bit(biv->slot, cbs->per_shader[biv->shader_slot].dirty_sr);
1072d350ecf5Sriastradh 	__set_bit(VMW_BINDING_PS_SR_BIT,
1073d350ecf5Sriastradh 		  &cbs->per_shader[biv->shader_slot].dirty);
1074d350ecf5Sriastradh 	__set_bit(VMW_BINDING_PS_BIT, &cbs->dirty);
1075d350ecf5Sriastradh 
1076d350ecf5Sriastradh 	return 0;
1077d350ecf5Sriastradh }
1078d350ecf5Sriastradh 
1079d350ecf5Sriastradh /**
1080d350ecf5Sriastradh  * vmw_binding_scrub_dx_rt - Schedule a dx rendertarget binding
1081d350ecf5Sriastradh  * scrub from a context
1082d350ecf5Sriastradh  *
1083d350ecf5Sriastradh  * @bi: single binding information.
1084d350ecf5Sriastradh  * @rebind: Whether to issue a bind instead of scrub command.
1085d350ecf5Sriastradh  */
vmw_binding_scrub_dx_rt(struct vmw_ctx_bindinfo * bi,bool rebind)1086d350ecf5Sriastradh static int vmw_binding_scrub_dx_rt(struct vmw_ctx_bindinfo *bi, bool rebind)
1087d350ecf5Sriastradh {
1088d350ecf5Sriastradh 	struct vmw_ctx_binding_state *cbs =
1089d350ecf5Sriastradh 		vmw_context_binding_state(bi->ctx);
1090d350ecf5Sriastradh 
1091d350ecf5Sriastradh 	__set_bit(VMW_BINDING_RT_BIT, &cbs->dirty);
1092d350ecf5Sriastradh 
1093d350ecf5Sriastradh 	return 0;
1094d350ecf5Sriastradh }
1095d350ecf5Sriastradh 
1096d350ecf5Sriastradh /**
1097d350ecf5Sriastradh  * vmw_binding_scrub_so - Schedule a dx streamoutput buffer binding
1098d350ecf5Sriastradh  * scrub from a context
1099d350ecf5Sriastradh  *
1100d350ecf5Sriastradh  * @bi: single binding information.
1101d350ecf5Sriastradh  * @rebind: Whether to issue a bind instead of scrub command.
1102d350ecf5Sriastradh  */
vmw_binding_scrub_so(struct vmw_ctx_bindinfo * bi,bool rebind)1103d350ecf5Sriastradh static int vmw_binding_scrub_so(struct vmw_ctx_bindinfo *bi, bool rebind)
1104d350ecf5Sriastradh {
1105d350ecf5Sriastradh 	struct vmw_ctx_binding_state *cbs =
1106d350ecf5Sriastradh 		vmw_context_binding_state(bi->ctx);
1107d350ecf5Sriastradh 
1108d350ecf5Sriastradh 	__set_bit(VMW_BINDING_SO_BIT, &cbs->dirty);
1109d350ecf5Sriastradh 
1110d350ecf5Sriastradh 	return 0;
1111d350ecf5Sriastradh }
1112d350ecf5Sriastradh 
1113d350ecf5Sriastradh /**
1114d350ecf5Sriastradh  * vmw_binding_scrub_vb - Schedule a dx vertex buffer binding
1115d350ecf5Sriastradh  * scrub from a context
1116d350ecf5Sriastradh  *
1117d350ecf5Sriastradh  * @bi: single binding information.
1118d350ecf5Sriastradh  * @rebind: Whether to issue a bind instead of scrub command.
1119d350ecf5Sriastradh  */
vmw_binding_scrub_vb(struct vmw_ctx_bindinfo * bi,bool rebind)1120d350ecf5Sriastradh static int vmw_binding_scrub_vb(struct vmw_ctx_bindinfo *bi, bool rebind)
1121d350ecf5Sriastradh {
1122d350ecf5Sriastradh 	struct vmw_ctx_bindinfo_vb *bivb =
1123d350ecf5Sriastradh 		container_of(bi, struct vmw_ctx_bindinfo_vb, bi);
1124d350ecf5Sriastradh 	struct vmw_ctx_binding_state *cbs =
1125d350ecf5Sriastradh 		vmw_context_binding_state(bi->ctx);
1126d350ecf5Sriastradh 
1127d350ecf5Sriastradh 	__set_bit(bivb->slot, cbs->dirty_vb);
1128d350ecf5Sriastradh 	__set_bit(VMW_BINDING_VB_BIT, &cbs->dirty);
1129d350ecf5Sriastradh 
1130d350ecf5Sriastradh 	return 0;
1131d350ecf5Sriastradh }
1132d350ecf5Sriastradh 
1133d350ecf5Sriastradh /**
1134d350ecf5Sriastradh  * vmw_binding_scrub_ib - scrub a dx index buffer binding from a context
1135d350ecf5Sriastradh  *
1136d350ecf5Sriastradh  * @bi: single binding information.
1137d350ecf5Sriastradh  * @rebind: Whether to issue a bind instead of scrub command.
1138d350ecf5Sriastradh  */
vmw_binding_scrub_ib(struct vmw_ctx_bindinfo * bi,bool rebind)1139d350ecf5Sriastradh static int vmw_binding_scrub_ib(struct vmw_ctx_bindinfo *bi, bool rebind)
1140d350ecf5Sriastradh {
1141d350ecf5Sriastradh 	struct vmw_ctx_bindinfo_ib *binding =
1142d350ecf5Sriastradh 		container_of(bi, typeof(*binding), bi);
1143d350ecf5Sriastradh 	struct vmw_private *dev_priv = bi->ctx->dev_priv;
1144d350ecf5Sriastradh 	struct {
1145d350ecf5Sriastradh 		SVGA3dCmdHeader header;
1146d350ecf5Sriastradh 		SVGA3dCmdDXSetIndexBuffer body;
1147d350ecf5Sriastradh 	} *cmd;
1148d350ecf5Sriastradh 
1149*677dec6eSriastradh 	cmd = VMW_FIFO_RESERVE_DX(dev_priv, sizeof(*cmd), bi->ctx->id);
1150*677dec6eSriastradh 	if (unlikely(cmd == NULL))
1151d350ecf5Sriastradh 		return -ENOMEM;
1152*677dec6eSriastradh 
1153d350ecf5Sriastradh 	cmd->header.id = SVGA_3D_CMD_DX_SET_INDEX_BUFFER;
1154d350ecf5Sriastradh 	cmd->header.size = sizeof(cmd->body);
1155d350ecf5Sriastradh 	if (rebind) {
1156d350ecf5Sriastradh 		cmd->body.sid = bi->res->id;
1157d350ecf5Sriastradh 		cmd->body.format = binding->format;
1158d350ecf5Sriastradh 		cmd->body.offset = binding->offset;
1159d350ecf5Sriastradh 	} else {
1160d350ecf5Sriastradh 		cmd->body.sid = SVGA3D_INVALID_ID;
1161d350ecf5Sriastradh 		cmd->body.format = 0;
1162d350ecf5Sriastradh 		cmd->body.offset = 0;
1163d350ecf5Sriastradh 	}
1164d350ecf5Sriastradh 
1165d350ecf5Sriastradh 	vmw_fifo_commit(dev_priv, sizeof(*cmd));
1166d350ecf5Sriastradh 
1167d350ecf5Sriastradh 	return 0;
1168d350ecf5Sriastradh }
1169d350ecf5Sriastradh 
1170d350ecf5Sriastradh /**
1171d350ecf5Sriastradh  * vmw_binding_state_alloc - Allocate a struct vmw_ctx_binding_state with
1172d350ecf5Sriastradh  * memory accounting.
1173d350ecf5Sriastradh  *
1174d350ecf5Sriastradh  * @dev_priv: Pointer to a device private structure.
1175d350ecf5Sriastradh  *
1176d350ecf5Sriastradh  * Returns a pointer to a newly allocated struct or an error pointer on error.
1177d350ecf5Sriastradh  */
1178d350ecf5Sriastradh struct vmw_ctx_binding_state *
vmw_binding_state_alloc(struct vmw_private * dev_priv)1179d350ecf5Sriastradh vmw_binding_state_alloc(struct vmw_private *dev_priv)
1180d350ecf5Sriastradh {
1181d350ecf5Sriastradh 	struct vmw_ctx_binding_state *cbs;
1182*677dec6eSriastradh 	struct ttm_operation_ctx ctx = {
1183*677dec6eSriastradh 		.interruptible = false,
1184*677dec6eSriastradh 		.no_wait_gpu = false
1185*677dec6eSriastradh 	};
1186d350ecf5Sriastradh 	int ret;
1187d350ecf5Sriastradh 
1188d350ecf5Sriastradh 	ret = ttm_mem_global_alloc(vmw_mem_glob(dev_priv), sizeof(*cbs),
1189*677dec6eSriastradh 				&ctx);
1190d350ecf5Sriastradh 	if (ret)
1191d350ecf5Sriastradh 		return ERR_PTR(ret);
1192d350ecf5Sriastradh 
1193d350ecf5Sriastradh 	cbs = vzalloc(sizeof(*cbs));
1194d350ecf5Sriastradh 	if (!cbs) {
1195d350ecf5Sriastradh 		ttm_mem_global_free(vmw_mem_glob(dev_priv), sizeof(*cbs));
1196d350ecf5Sriastradh 		return ERR_PTR(-ENOMEM);
1197d350ecf5Sriastradh 	}
1198d350ecf5Sriastradh 
1199d350ecf5Sriastradh 	cbs->dev_priv = dev_priv;
1200d350ecf5Sriastradh 	INIT_LIST_HEAD(&cbs->list);
1201d350ecf5Sriastradh 
1202d350ecf5Sriastradh 	return cbs;
1203d350ecf5Sriastradh }
1204d350ecf5Sriastradh 
1205d350ecf5Sriastradh /**
1206d350ecf5Sriastradh  * vmw_binding_state_free - Free a struct vmw_ctx_binding_state and its
1207d350ecf5Sriastradh  * memory accounting info.
1208d350ecf5Sriastradh  *
1209d350ecf5Sriastradh  * @cbs: Pointer to the struct vmw_ctx_binding_state to be freed.
1210d350ecf5Sriastradh  */
vmw_binding_state_free(struct vmw_ctx_binding_state * cbs)1211d350ecf5Sriastradh void vmw_binding_state_free(struct vmw_ctx_binding_state *cbs)
1212d350ecf5Sriastradh {
1213d350ecf5Sriastradh 	struct vmw_private *dev_priv = cbs->dev_priv;
1214d350ecf5Sriastradh 
1215d350ecf5Sriastradh 	vfree(cbs);
1216d350ecf5Sriastradh 	ttm_mem_global_free(vmw_mem_glob(dev_priv), sizeof(*cbs));
1217d350ecf5Sriastradh }
1218d350ecf5Sriastradh 
1219d350ecf5Sriastradh /**
1220d350ecf5Sriastradh  * vmw_binding_state_list - Get the binding list of a
1221d350ecf5Sriastradh  * struct vmw_ctx_binding_state
1222d350ecf5Sriastradh  *
1223d350ecf5Sriastradh  * @cbs: Pointer to the struct vmw_ctx_binding_state
1224d350ecf5Sriastradh  *
1225d350ecf5Sriastradh  * Returns the binding list which can be used to traverse through the bindings
1226d350ecf5Sriastradh  * and access the resource information of all bindings.
1227d350ecf5Sriastradh  */
vmw_binding_state_list(struct vmw_ctx_binding_state * cbs)1228d350ecf5Sriastradh struct list_head *vmw_binding_state_list(struct vmw_ctx_binding_state *cbs)
1229d350ecf5Sriastradh {
1230d350ecf5Sriastradh 	return &cbs->list;
1231d350ecf5Sriastradh }
1232d350ecf5Sriastradh 
1233d350ecf5Sriastradh /**
1234d350ecf5Sriastradh  * vmwgfx_binding_state_reset - clear a struct vmw_ctx_binding_state
1235d350ecf5Sriastradh  *
1236d350ecf5Sriastradh  * @cbs: Pointer to the struct vmw_ctx_binding_state to be cleared
1237d350ecf5Sriastradh  *
1238d350ecf5Sriastradh  * Drops all bindings registered in @cbs. No device binding actions are
1239d350ecf5Sriastradh  * performed.
1240d350ecf5Sriastradh  */
vmw_binding_state_reset(struct vmw_ctx_binding_state * cbs)1241d350ecf5Sriastradh void vmw_binding_state_reset(struct vmw_ctx_binding_state *cbs)
1242d350ecf5Sriastradh {
1243d350ecf5Sriastradh 	struct vmw_ctx_bindinfo *entry, *next;
1244d350ecf5Sriastradh 
1245d350ecf5Sriastradh 	list_for_each_entry_safe(entry, next, &cbs->list, ctx_list)
1246d350ecf5Sriastradh 		vmw_binding_drop(entry);
1247d350ecf5Sriastradh }
1248d350ecf5Sriastradh 
1249*677dec6eSriastradh /**
1250*677dec6eSriastradh  * vmw_binding_dirtying - Return whether a binding type is dirtying its resource
1251*677dec6eSriastradh  * @binding_type: The binding type
1252*677dec6eSriastradh  *
1253*677dec6eSriastradh  * Each time a resource is put on the validation list as the result of a
1254*677dec6eSriastradh  * context binding referencing it, we need to determine whether that resource
1255*677dec6eSriastradh  * will be dirtied (written to by the GPU) as a result of the corresponding
1256*677dec6eSriastradh  * GPU operation. Currently rendertarget-, depth-stencil-, and
1257*677dec6eSriastradh  * stream-output-target bindings are capable of dirtying its resource.
1258*677dec6eSriastradh  *
1259*677dec6eSriastradh  * Return: Whether the binding type dirties the resource its binding points to.
1260*677dec6eSriastradh  */
vmw_binding_dirtying(enum vmw_ctx_binding_type binding_type)1261*677dec6eSriastradh u32 vmw_binding_dirtying(enum vmw_ctx_binding_type binding_type)
1262*677dec6eSriastradh {
1263*677dec6eSriastradh 	static u32 is_binding_dirtying[vmw_ctx_binding_max] = {
1264*677dec6eSriastradh 		[vmw_ctx_binding_rt] = VMW_RES_DIRTY_SET,
1265*677dec6eSriastradh 		[vmw_ctx_binding_dx_rt] = VMW_RES_DIRTY_SET,
1266*677dec6eSriastradh 		[vmw_ctx_binding_ds] = VMW_RES_DIRTY_SET,
1267*677dec6eSriastradh 		[vmw_ctx_binding_so] = VMW_RES_DIRTY_SET,
1268*677dec6eSriastradh 	};
1269*677dec6eSriastradh 
1270*677dec6eSriastradh 	/* Review this function as new bindings are added. */
1271*677dec6eSriastradh 	BUILD_BUG_ON(vmw_ctx_binding_max != 11);
1272*677dec6eSriastradh 	return is_binding_dirtying[binding_type];
1273*677dec6eSriastradh }
1274*677dec6eSriastradh 
1275d350ecf5Sriastradh /*
1276d350ecf5Sriastradh  * This function is unused at run-time, and only used to hold various build
1277d350ecf5Sriastradh  * asserts important for code optimization assumptions.
1278d350ecf5Sriastradh  */
vmw_binding_build_asserts(void)1279d350ecf5Sriastradh static void vmw_binding_build_asserts(void)
1280d350ecf5Sriastradh {
1281d350ecf5Sriastradh 	BUILD_BUG_ON(SVGA3D_NUM_SHADERTYPE_DX10 != 3);
1282d350ecf5Sriastradh 	BUILD_BUG_ON(SVGA3D_MAX_SIMULTANEOUS_RENDER_TARGETS > SVGA3D_RT_MAX);
1283d350ecf5Sriastradh 	BUILD_BUG_ON(sizeof(uint32) != sizeof(u32));
1284d350ecf5Sriastradh 
1285d350ecf5Sriastradh 	/*
1286d350ecf5Sriastradh 	 * struct vmw_ctx_binding_state::bind_cmd_buffer is used for various
1287d350ecf5Sriastradh 	 * view id arrays.
1288d350ecf5Sriastradh 	 */
1289d350ecf5Sriastradh 	BUILD_BUG_ON(VMW_MAX_VIEW_BINDINGS < SVGA3D_RT_MAX);
1290d350ecf5Sriastradh 	BUILD_BUG_ON(VMW_MAX_VIEW_BINDINGS < SVGA3D_DX_MAX_SRVIEWS);
1291d350ecf5Sriastradh 	BUILD_BUG_ON(VMW_MAX_VIEW_BINDINGS < SVGA3D_DX_MAX_CONSTBUFFERS);
1292d350ecf5Sriastradh 
1293d350ecf5Sriastradh 	/*
1294d350ecf5Sriastradh 	 * struct vmw_ctx_binding_state::bind_cmd_buffer is used for
1295d350ecf5Sriastradh 	 * u32 view ids, SVGA3dSoTargets and SVGA3dVertexBuffers
1296d350ecf5Sriastradh 	 */
1297d350ecf5Sriastradh 	BUILD_BUG_ON(SVGA3D_DX_MAX_SOTARGETS*sizeof(SVGA3dSoTarget) >
1298d350ecf5Sriastradh 		     VMW_MAX_VIEW_BINDINGS*sizeof(u32));
1299d350ecf5Sriastradh 	BUILD_BUG_ON(SVGA3D_DX_MAX_VERTEXBUFFERS*sizeof(SVGA3dVertexBuffer) >
1300d350ecf5Sriastradh 		     VMW_MAX_VIEW_BINDINGS*sizeof(u32));
1301d350ecf5Sriastradh }
1302