xref: /dragonfly/sys/dev/drm/radeon/radeon_fb.c (revision 279dd846)
1 /*
2  * Copyright © 2007 David Airlie
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice (including the next
12  * paragraph) shall be included in all copies or substantial portions of the
13  * Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
21  * DEALINGS IN THE SOFTWARE.
22  *
23  * Authors:
24  *     David Airlie
25  *
26  * $FreeBSD: head/sys/dev/drm2/radeon/radeon_fb.c 254885 2013-08-25 19:37:15Z dumbbell $
27  */
28 
29 #include <drm/drmP.h>
30 #include <drm/drm_crtc.h>
31 #include <drm/drm_crtc_helper.h>
32 #include <uapi_drm/radeon_drm.h>
33 #include "radeon.h"
34 
35 #include <drm/drm_fb_helper.h>
36 
37 /* object hierarchy -
38    this contains a helper + a radeon fb
39    the helper contains a pointer to radeon framebuffer baseclass.
40 */
41 struct radeon_fbdev {
42 	struct drm_fb_helper helper;
43 	struct radeon_framebuffer rfb;
44 	struct list_head fbdev_list;
45 	struct radeon_device *rdev;
46 };
47 
48 #ifdef DUMBBELL_WIP
49 static struct fb_ops radeonfb_ops = {
50 	.owner = THIS_MODULE,
51 	.fb_check_var = drm_fb_helper_check_var,
52 	.fb_set_par = drm_fb_helper_set_par,
53 	.fb_fillrect = cfb_fillrect,
54 	.fb_copyarea = cfb_copyarea,
55 	.fb_imageblit = cfb_imageblit,
56 	.fb_pan_display = drm_fb_helper_pan_display,
57 	.fb_blank = drm_fb_helper_blank,
58 	.fb_setcmap = drm_fb_helper_setcmap,
59 	.fb_debug_enter = drm_fb_helper_debug_enter,
60 	.fb_debug_leave = drm_fb_helper_debug_leave,
61 };
62 #endif /* DUMBBELL_WIP */
63 
64 
65 int radeon_align_pitch(struct radeon_device *rdev, int width, int bpp, bool tiled)
66 {
67 	int aligned = width;
68 	int align_large = (ASIC_IS_AVIVO(rdev)) || tiled;
69 	int pitch_mask = 0;
70 
71 	switch (bpp / 8) {
72 	case 1:
73 		pitch_mask = align_large ? 255 : 127;
74 		break;
75 	case 2:
76 		pitch_mask = align_large ? 127 : 31;
77 		break;
78 	case 3:
79 	case 4:
80 		pitch_mask = align_large ? 63 : 15;
81 		break;
82 	}
83 
84 	aligned += pitch_mask;
85 	aligned &= ~pitch_mask;
86 	return aligned;
87 }
88 
89 static void radeonfb_destroy_pinned_object(struct drm_gem_object *gobj)
90 {
91 	struct radeon_bo *rbo = gem_to_radeon_bo(gobj);
92 	int ret;
93 
94 	ret = radeon_bo_reserve(rbo, false);
95 	if (likely(ret == 0)) {
96 		radeon_bo_kunmap(rbo);
97 		radeon_bo_unpin(rbo);
98 		radeon_bo_unreserve(rbo);
99 	}
100 	drm_gem_object_unreference_unlocked(gobj);
101 }
102 
103 static int radeonfb_create_pinned_object(struct radeon_fbdev *rfbdev,
104 					 struct drm_mode_fb_cmd2 *mode_cmd,
105 					 struct drm_gem_object **gobj_p)
106 {
107 	struct radeon_device *rdev = rfbdev->rdev;
108 	struct drm_gem_object *gobj = NULL;
109 	struct radeon_bo *rbo = NULL;
110 	bool fb_tiled = false; /* useful for testing */
111 	u32 tiling_flags = 0;
112 	int ret;
113 	int aligned_size, size;
114 	int height = mode_cmd->height;
115 	u32 bpp, depth;
116 
117 	drm_fb_get_bpp_depth(mode_cmd->pixel_format, &depth, &bpp);
118 
119 	/* need to align pitch with crtc limits */
120 	mode_cmd->pitches[0] = radeon_align_pitch(rdev, mode_cmd->width, bpp,
121 						  fb_tiled) * ((bpp + 1) / 8);
122 
123 	if (rdev->family >= CHIP_R600)
124 		height = ALIGN(mode_cmd->height, 8);
125 	size = mode_cmd->pitches[0] * height;
126 	aligned_size = ALIGN(size, PAGE_SIZE);
127 	ret = radeon_gem_object_create(rdev, aligned_size, 0,
128 				       RADEON_GEM_DOMAIN_VRAM,
129 				       0, true, &gobj);
130 	if (ret) {
131 		printk(KERN_ERR "failed to allocate framebuffer (%d)\n",
132 		       aligned_size);
133 		return -ENOMEM;
134 	}
135 	rbo = gem_to_radeon_bo(gobj);
136 
137 	if (fb_tiled)
138 		tiling_flags = RADEON_TILING_MACRO;
139 
140 #ifdef __BIG_ENDIAN
141 	switch (bpp) {
142 	case 32:
143 		tiling_flags |= RADEON_TILING_SWAP_32BIT;
144 		break;
145 	case 16:
146 		tiling_flags |= RADEON_TILING_SWAP_16BIT;
147 	default:
148 		break;
149 	}
150 #endif
151 
152 	if (tiling_flags) {
153 		ret = radeon_bo_set_tiling_flags(rbo,
154 						 tiling_flags | RADEON_TILING_SURFACE,
155 						 mode_cmd->pitches[0]);
156 		if (ret)
157 			dev_err(rdev->dev, "FB failed to set tiling flags\n");
158 	}
159 
160 
161 	ret = radeon_bo_reserve(rbo, false);
162 	if (unlikely(ret != 0))
163 		goto out_unref;
164 	/* Only 27 bit offset for legacy CRTC */
165 	ret = radeon_bo_pin_restricted(rbo, RADEON_GEM_DOMAIN_VRAM,
166 				       ASIC_IS_AVIVO(rdev) ? 0 : 1 << 27,
167 				       NULL);
168 	if (ret) {
169 		radeon_bo_unreserve(rbo);
170 		goto out_unref;
171 	}
172 	if (fb_tiled)
173 		radeon_bo_check_tiling(rbo, 0, 0);
174 	ret = radeon_bo_kmap(rbo, NULL);
175 	radeon_bo_unreserve(rbo);
176 	if (ret) {
177 		goto out_unref;
178 	}
179 
180 	*gobj_p = gobj;
181 	return 0;
182 out_unref:
183 	radeonfb_destroy_pinned_object(gobj);
184 	*gobj_p = NULL;
185 	return ret;
186 }
187 
188 static int radeonfb_create(struct drm_fb_helper *helper,
189 			   struct drm_fb_helper_surface_size *sizes)
190 {
191 	struct radeon_fbdev *rfbdev = (struct radeon_fbdev *)helper;
192 	struct radeon_device *rdev = rfbdev->rdev;
193 	struct fb_info *info;
194 	struct drm_framebuffer *fb = NULL;
195 	struct drm_mode_fb_cmd2 mode_cmd;
196 	struct drm_gem_object *gobj = NULL;
197 	struct radeon_bo *rbo = NULL;
198 #ifdef DUMBBELL_WIP
199 	device_t device = rdev->dev;
200 #endif /* DUMBBELL_WIP */
201 	device_t vga_dev = device_get_parent(rdev->dev);
202 	int ret;
203 	unsigned long tmp;
204 
205 	mode_cmd.width = sizes->surface_width;
206 	mode_cmd.height = sizes->surface_height;
207 
208 	/* avivo can't scanout real 24bpp */
209 	if ((sizes->surface_bpp == 24) && ASIC_IS_AVIVO(rdev))
210 		sizes->surface_bpp = 32;
211 
212 	mode_cmd.pixel_format = drm_mode_legacy_fb_format(sizes->surface_bpp,
213 							  sizes->surface_depth);
214 
215 	ret = radeonfb_create_pinned_object(rfbdev, &mode_cmd, &gobj);
216 	if (ret) {
217 		DRM_ERROR("failed to create fbcon object %d\n", ret);
218 		return ret;
219 	}
220 
221 	rbo = gem_to_radeon_bo(gobj);
222 
223 	info = kmalloc(sizeof(*info), M_DRM, M_WAITOK | M_ZERO);
224 
225 	ret = radeon_framebuffer_init(rdev->ddev, &rfbdev->rfb, &mode_cmd, gobj);
226 	if (ret) {
227 		DRM_ERROR("failed to initialize framebuffer %d\n", ret);
228 		goto out_unref;
229 	}
230 
231 	fb = &rfbdev->rfb.base;
232 
233 	/* setup helper */
234 	rfbdev->helper.fb = fb;
235 	rfbdev->helper.fbdev = info;
236 
237 	memset(rbo->kptr, 0, radeon_bo_size(rbo));
238 	tmp = radeon_bo_gpu_offset(rbo) - rdev->mc.vram_start;
239 	info->vaddr = (vm_offset_t)rbo->kptr;
240 	info->paddr = rdev->mc.aper_base + tmp;
241 	info->width = sizes->surface_width;
242 	info->height = sizes->surface_height;
243 	info->stride = fb->pitches[0];
244 	info->depth = sizes->surface_bpp;
245 	info->is_vga_boot_display = vga_pci_is_boot_display(vga_dev);
246 
247 	DRM_INFO("fb mappable at 0x%jX\n",  info->paddr);
248 	DRM_INFO("vram apper at 0x%lX\n",  (unsigned long)rdev->mc.aper_base);
249 	DRM_INFO("size %lu\n", (unsigned long)radeon_bo_size(rbo));
250 	DRM_INFO("fb depth is %d\n", fb->depth);
251 	DRM_INFO("   pitch is %d\n", fb->pitches[0]);
252 	return 0;
253 
254 out_unref:
255 	if (rbo) {
256 
257 	}
258 	if (fb && ret) {
259 		drm_gem_object_unreference(gobj);
260 		drm_framebuffer_unregister_private(fb);
261 		drm_framebuffer_cleanup(fb);
262 		kfree(fb); /* XXX malloc'd in radeon_user_framebuffer_create? */
263 	}
264 	return ret;
265 }
266 
267 void radeon_fb_output_poll_changed(struct radeon_device *rdev)
268 {
269 	drm_fb_helper_hotplug_event(&rdev->mode_info.rfbdev->helper);
270 }
271 
272 static int radeon_fbdev_destroy(struct drm_device *dev, struct radeon_fbdev *rfbdev)
273 {
274 	/* XXX unconfigure fb_info from syscons */
275 #ifdef DUMBBELL_WIP
276 	struct fb_info *info;
277 #endif /* DUMBBELL_WIP */
278 	struct radeon_framebuffer *rfb = &rfbdev->rfb;
279 
280 #ifdef DUMBBELL_WIP
281 	if (rfbdev->helper.fbdev) {
282 		info = rfbdev->helper.fbdev;
283 
284 		unregister_framebuffer(info);
285 		if (info->cmap.len)
286 			fb_dealloc_cmap(&info->cmap);
287 		framebuffer_release(info);
288 	}
289 #endif /* DUMBBELL_WIP */
290 
291 	if (rfb->obj) {
292 		DRM_UNLOCK(dev); /* Work around lock recursion. dumbbell@ */
293 		radeonfb_destroy_pinned_object(rfb->obj);
294 		DRM_LOCK(dev);
295 		rfb->obj = NULL;
296 	}
297 	drm_fb_helper_fini(&rfbdev->helper);
298 	drm_framebuffer_unregister_private(&rfb->base);
299 	drm_framebuffer_cleanup(&rfb->base);
300 
301 	return 0;
302 }
303 
304 static const struct drm_fb_helper_funcs radeon_fb_helper_funcs = {
305 	.gamma_set = radeon_crtc_fb_gamma_set,
306 	.gamma_get = radeon_crtc_fb_gamma_get,
307 	.fb_probe = radeonfb_create,
308 };
309 
310 int radeon_fbdev_init(struct radeon_device *rdev)
311 {
312 	struct radeon_fbdev *rfbdev;
313 	int bpp_sel = 32;
314 	int ret;
315 
316 	/* select 8 bpp console on RN50 or 16MB cards */
317 	if (ASIC_IS_RN50(rdev) || rdev->mc.real_vram_size <= (32*1024*1024))
318 		bpp_sel = 8;
319 
320 	rfbdev = kzalloc(sizeof(struct radeon_fbdev), GFP_KERNEL);
321 	if (!rfbdev)
322 		return -ENOMEM;
323 
324 	rfbdev->rdev = rdev;
325 	rdev->mode_info.rfbdev = rfbdev;
326 
327 	drm_fb_helper_prepare(rdev->ddev, &rfbdev->helper,
328 			      &radeon_fb_helper_funcs);
329 
330 	ret = drm_fb_helper_init(rdev->ddev, &rfbdev->helper,
331 				 rdev->num_crtc,
332 				 RADEONFB_CONN_LIMIT);
333 	if (ret) {
334 		kfree(rfbdev);
335 		return ret;
336 	}
337 
338 	drm_fb_helper_single_add_all_connectors(&rfbdev->helper);
339 
340 	/* disable all the possible outputs/crtcs before entering KMS mode */
341 	drm_helper_disable_unused_functions(rdev->ddev);
342 
343 	drm_fb_helper_initial_config(&rfbdev->helper, bpp_sel);
344 	return 0;
345 }
346 
347 void radeon_fbdev_fini(struct radeon_device *rdev)
348 {
349 	if (!rdev->mode_info.rfbdev)
350 		return;
351 
352 	radeon_fbdev_destroy(rdev->ddev, rdev->mode_info.rfbdev);
353 	kfree(rdev->mode_info.rfbdev);
354 	rdev->mode_info.rfbdev = NULL;
355 }
356 
357 void radeon_fbdev_set_suspend(struct radeon_device *rdev, int state)
358 {
359 #ifdef DUMBBELL_WIP
360 	fb_set_suspend(rdev->mode_info.rfbdev->helper.fbdev, state);
361 #endif /* DUMBBELL_WIP */
362 }
363 
364 int radeon_fbdev_total_size(struct radeon_device *rdev)
365 {
366 	struct radeon_bo *robj;
367 	int size = 0;
368 
369 	robj = gem_to_radeon_bo(rdev->mode_info.rfbdev->rfb.obj);
370 	size += radeon_bo_size(robj);
371 	return size;
372 }
373 
374 bool radeon_fbdev_robj_is_fb(struct radeon_device *rdev, struct radeon_bo *robj)
375 {
376 	if (robj == gem_to_radeon_bo(rdev->mode_info.rfbdev->rfb.obj))
377 		return true;
378 	return false;
379 }
380