1 /*
2 * Copyright 2008 Advanced Micro Devices, Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 *
22 * Author: Stanislaw Skowronek
23 */
24
25 #include <linux/module.h>
26 #include <linux/sched.h>
27 #include <linux/slab.h>
28 #include <linux/string_helpers.h>
29
30 #include <asm/unaligned.h>
31
32 #include <drm/drm_util.h>
33
34 #define ATOM_DEBUG
35
36 #include "atomfirmware.h"
37 #include "atom.h"
38 #include "atom-names.h"
39 #include "atom-bits.h"
40 #include "amdgpu.h"
41
42 #define ATOM_COND_ABOVE 0
43 #define ATOM_COND_ABOVEOREQUAL 1
44 #define ATOM_COND_ALWAYS 2
45 #define ATOM_COND_BELOW 3
46 #define ATOM_COND_BELOWOREQUAL 4
47 #define ATOM_COND_EQUAL 5
48 #define ATOM_COND_NOTEQUAL 6
49
50 #define ATOM_PORT_ATI 0
51 #define ATOM_PORT_PCI 1
52 #define ATOM_PORT_SYSIO 2
53
54 #define ATOM_UNIT_MICROSEC 0
55 #define ATOM_UNIT_MILLISEC 1
56
57 #define PLL_INDEX 2
58 #define PLL_DATA 3
59
60 #define ATOM_CMD_TIMEOUT_SEC 20
61
62 typedef struct {
63 struct atom_context *ctx;
64 uint32_t *ps, *ws;
65 int ps_shift;
66 uint16_t start;
67 unsigned last_jump;
68 unsigned long last_jump_jiffies;
69 bool abort;
70 } atom_exec_context;
71
72 int amdgpu_atom_debug;
73 static int amdgpu_atom_execute_table_locked(struct atom_context *ctx, int index, uint32_t *params);
74 int amdgpu_atom_execute_table(struct atom_context *ctx, int index, uint32_t *params);
75
76 static uint32_t atom_arg_mask[8] =
77 { 0xFFFFFFFF, 0xFFFF, 0xFFFF00, 0xFFFF0000, 0xFF, 0xFF00, 0xFF0000,
78 0xFF000000 };
79 static int atom_arg_shift[8] = { 0, 0, 8, 16, 0, 8, 16, 24 };
80
81 static int atom_dst_to_src[8][4] = {
82 /* translate destination alignment field to the source alignment encoding */
83 {0, 0, 0, 0},
84 {1, 2, 3, 0},
85 {1, 2, 3, 0},
86 {1, 2, 3, 0},
87 {4, 5, 6, 7},
88 {4, 5, 6, 7},
89 {4, 5, 6, 7},
90 {4, 5, 6, 7},
91 };
92 static int atom_def_dst[8] = { 0, 0, 1, 2, 0, 1, 2, 3 };
93
94 static int debug_depth;
95 #ifdef ATOM_DEBUG
debug_print_spaces(int n)96 static void debug_print_spaces(int n)
97 {
98 while (n--)
99 printk(" ");
100 }
101
102 #ifdef DEBUG
103 #undef DEBUG
104 #endif
105
106 #define DEBUG(...) do if (amdgpu_atom_debug) { printk(KERN_DEBUG __VA_ARGS__); } while (0)
107 #define SDEBUG(...) do if (amdgpu_atom_debug) { printk(KERN_DEBUG); debug_print_spaces(debug_depth); printk(__VA_ARGS__); } while (0)
108 #else
109 #define DEBUG(...) do { } while (0)
110 #define SDEBUG(...) do { } while (0)
111 #endif
112
atom_iio_execute(struct atom_context * ctx,int base,uint32_t index,uint32_t data)113 static uint32_t atom_iio_execute(struct atom_context *ctx, int base,
114 uint32_t index, uint32_t data)
115 {
116 uint32_t temp = 0xCDCDCDCD;
117
118 while (1)
119 switch (CU8(base)) {
120 case ATOM_IIO_NOP:
121 base++;
122 break;
123 case ATOM_IIO_READ:
124 temp = ctx->card->reg_read(ctx->card, CU16(base + 1));
125 base += 3;
126 break;
127 case ATOM_IIO_WRITE:
128 ctx->card->reg_write(ctx->card, CU16(base + 1), temp);
129 base += 3;
130 break;
131 case ATOM_IIO_CLEAR:
132 temp &=
133 ~((0xFFFFFFFF >> (32 - CU8(base + 1))) <<
134 CU8(base + 2));
135 base += 3;
136 break;
137 case ATOM_IIO_SET:
138 temp |=
139 (0xFFFFFFFF >> (32 - CU8(base + 1))) << CU8(base +
140 2);
141 base += 3;
142 break;
143 case ATOM_IIO_MOVE_INDEX:
144 temp &=
145 ~((0xFFFFFFFF >> (32 - CU8(base + 1))) <<
146 CU8(base + 3));
147 temp |=
148 ((index >> CU8(base + 2)) &
149 (0xFFFFFFFF >> (32 - CU8(base + 1)))) << CU8(base +
150 3);
151 base += 4;
152 break;
153 case ATOM_IIO_MOVE_DATA:
154 temp &=
155 ~((0xFFFFFFFF >> (32 - CU8(base + 1))) <<
156 CU8(base + 3));
157 temp |=
158 ((data >> CU8(base + 2)) &
159 (0xFFFFFFFF >> (32 - CU8(base + 1)))) << CU8(base +
160 3);
161 base += 4;
162 break;
163 case ATOM_IIO_MOVE_ATTR:
164 temp &=
165 ~((0xFFFFFFFF >> (32 - CU8(base + 1))) <<
166 CU8(base + 3));
167 temp |=
168 ((ctx->
169 io_attr >> CU8(base + 2)) & (0xFFFFFFFF >> (32 -
170 CU8
171 (base
172 +
173 1))))
174 << CU8(base + 3);
175 base += 4;
176 break;
177 case ATOM_IIO_END:
178 return temp;
179 default:
180 pr_info("Unknown IIO opcode\n");
181 return 0;
182 }
183 }
184
atom_get_src_int(atom_exec_context * ctx,uint8_t attr,int * ptr,uint32_t * saved,int print)185 static uint32_t atom_get_src_int(atom_exec_context *ctx, uint8_t attr,
186 int *ptr, uint32_t *saved, int print)
187 {
188 uint32_t idx, val = 0xCDCDCDCD, align, arg;
189 struct atom_context *gctx = ctx->ctx;
190 arg = attr & 7;
191 align = (attr >> 3) & 7;
192 switch (arg) {
193 case ATOM_ARG_REG:
194 idx = U16(*ptr);
195 (*ptr) += 2;
196 if (print)
197 DEBUG("REG[0x%04X]", idx);
198 idx += gctx->reg_block;
199 switch (gctx->io_mode) {
200 case ATOM_IO_MM:
201 val = gctx->card->reg_read(gctx->card, idx);
202 break;
203 case ATOM_IO_PCI:
204 pr_info("PCI registers are not implemented\n");
205 return 0;
206 case ATOM_IO_SYSIO:
207 pr_info("SYSIO registers are not implemented\n");
208 return 0;
209 default:
210 if (!(gctx->io_mode & 0x80)) {
211 pr_info("Bad IO mode\n");
212 return 0;
213 }
214 if (!gctx->iio[gctx->io_mode & 0x7F]) {
215 pr_info("Undefined indirect IO read method %d\n",
216 gctx->io_mode & 0x7F);
217 return 0;
218 }
219 val =
220 atom_iio_execute(gctx,
221 gctx->iio[gctx->io_mode & 0x7F],
222 idx, 0);
223 }
224 break;
225 case ATOM_ARG_PS:
226 idx = U8(*ptr);
227 (*ptr)++;
228 /* get_unaligned_le32 avoids unaligned accesses from atombios
229 * tables, noticed on a DEC Alpha. */
230 val = get_unaligned_le32((u32 *)&ctx->ps[idx]);
231 if (print)
232 DEBUG("PS[0x%02X,0x%04X]", idx, val);
233 break;
234 case ATOM_ARG_WS:
235 idx = U8(*ptr);
236 (*ptr)++;
237 if (print)
238 DEBUG("WS[0x%02X]", idx);
239 switch (idx) {
240 case ATOM_WS_QUOTIENT:
241 val = gctx->divmul[0];
242 break;
243 case ATOM_WS_REMAINDER:
244 val = gctx->divmul[1];
245 break;
246 case ATOM_WS_DATAPTR:
247 val = gctx->data_block;
248 break;
249 case ATOM_WS_SHIFT:
250 val = gctx->shift;
251 break;
252 case ATOM_WS_OR_MASK:
253 val = 1 << gctx->shift;
254 break;
255 case ATOM_WS_AND_MASK:
256 val = ~(1 << gctx->shift);
257 break;
258 case ATOM_WS_FB_WINDOW:
259 val = gctx->fb_base;
260 break;
261 case ATOM_WS_ATTRIBUTES:
262 val = gctx->io_attr;
263 break;
264 case ATOM_WS_REGPTR:
265 val = gctx->reg_block;
266 break;
267 default:
268 val = ctx->ws[idx];
269 }
270 break;
271 case ATOM_ARG_ID:
272 idx = U16(*ptr);
273 (*ptr) += 2;
274 if (print) {
275 if (gctx->data_block)
276 DEBUG("ID[0x%04X+%04X]", idx, gctx->data_block);
277 else
278 DEBUG("ID[0x%04X]", idx);
279 }
280 val = U32(idx + gctx->data_block);
281 break;
282 case ATOM_ARG_FB:
283 idx = U8(*ptr);
284 (*ptr)++;
285 if ((gctx->fb_base + (idx * 4)) > gctx->scratch_size_bytes) {
286 DRM_ERROR("ATOM: fb read beyond scratch region: %d vs. %d\n",
287 gctx->fb_base + (idx * 4), gctx->scratch_size_bytes);
288 val = 0;
289 } else
290 val = gctx->scratch[(gctx->fb_base / 4) + idx];
291 if (print)
292 DEBUG("FB[0x%02X]", idx);
293 break;
294 case ATOM_ARG_IMM:
295 switch (align) {
296 case ATOM_SRC_DWORD:
297 val = U32(*ptr);
298 (*ptr) += 4;
299 if (print)
300 DEBUG("IMM 0x%08X\n", val);
301 return val;
302 case ATOM_SRC_WORD0:
303 case ATOM_SRC_WORD8:
304 case ATOM_SRC_WORD16:
305 val = U16(*ptr);
306 (*ptr) += 2;
307 if (print)
308 DEBUG("IMM 0x%04X\n", val);
309 return val;
310 case ATOM_SRC_BYTE0:
311 case ATOM_SRC_BYTE8:
312 case ATOM_SRC_BYTE16:
313 case ATOM_SRC_BYTE24:
314 val = U8(*ptr);
315 (*ptr)++;
316 if (print)
317 DEBUG("IMM 0x%02X\n", val);
318 return val;
319 }
320 break;
321 case ATOM_ARG_PLL:
322 idx = U8(*ptr);
323 (*ptr)++;
324 if (print)
325 DEBUG("PLL[0x%02X]", idx);
326 val = gctx->card->pll_read(gctx->card, idx);
327 break;
328 case ATOM_ARG_MC:
329 idx = U8(*ptr);
330 (*ptr)++;
331 if (print)
332 DEBUG("MC[0x%02X]", idx);
333 val = gctx->card->mc_read(gctx->card, idx);
334 break;
335 }
336 if (saved)
337 *saved = val;
338 val &= atom_arg_mask[align];
339 val >>= atom_arg_shift[align];
340 if (print)
341 switch (align) {
342 case ATOM_SRC_DWORD:
343 DEBUG(".[31:0] -> 0x%08X\n", val);
344 break;
345 case ATOM_SRC_WORD0:
346 DEBUG(".[15:0] -> 0x%04X\n", val);
347 break;
348 case ATOM_SRC_WORD8:
349 DEBUG(".[23:8] -> 0x%04X\n", val);
350 break;
351 case ATOM_SRC_WORD16:
352 DEBUG(".[31:16] -> 0x%04X\n", val);
353 break;
354 case ATOM_SRC_BYTE0:
355 DEBUG(".[7:0] -> 0x%02X\n", val);
356 break;
357 case ATOM_SRC_BYTE8:
358 DEBUG(".[15:8] -> 0x%02X\n", val);
359 break;
360 case ATOM_SRC_BYTE16:
361 DEBUG(".[23:16] -> 0x%02X\n", val);
362 break;
363 case ATOM_SRC_BYTE24:
364 DEBUG(".[31:24] -> 0x%02X\n", val);
365 break;
366 }
367 return val;
368 }
369
atom_skip_src_int(atom_exec_context * ctx,uint8_t attr,int * ptr)370 static void atom_skip_src_int(atom_exec_context *ctx, uint8_t attr, int *ptr)
371 {
372 uint32_t align = (attr >> 3) & 7, arg = attr & 7;
373 switch (arg) {
374 case ATOM_ARG_REG:
375 case ATOM_ARG_ID:
376 (*ptr) += 2;
377 break;
378 case ATOM_ARG_PLL:
379 case ATOM_ARG_MC:
380 case ATOM_ARG_PS:
381 case ATOM_ARG_WS:
382 case ATOM_ARG_FB:
383 (*ptr)++;
384 break;
385 case ATOM_ARG_IMM:
386 switch (align) {
387 case ATOM_SRC_DWORD:
388 (*ptr) += 4;
389 return;
390 case ATOM_SRC_WORD0:
391 case ATOM_SRC_WORD8:
392 case ATOM_SRC_WORD16:
393 (*ptr) += 2;
394 return;
395 case ATOM_SRC_BYTE0:
396 case ATOM_SRC_BYTE8:
397 case ATOM_SRC_BYTE16:
398 case ATOM_SRC_BYTE24:
399 (*ptr)++;
400 return;
401 }
402 return;
403 }
404 }
405
atom_get_src(atom_exec_context * ctx,uint8_t attr,int * ptr)406 static uint32_t atom_get_src(atom_exec_context *ctx, uint8_t attr, int *ptr)
407 {
408 return atom_get_src_int(ctx, attr, ptr, NULL, 1);
409 }
410
atom_get_src_direct(atom_exec_context * ctx,uint8_t align,int * ptr)411 static uint32_t atom_get_src_direct(atom_exec_context *ctx, uint8_t align, int *ptr)
412 {
413 uint32_t val = 0xCDCDCDCD;
414
415 switch (align) {
416 case ATOM_SRC_DWORD:
417 val = U32(*ptr);
418 (*ptr) += 4;
419 break;
420 case ATOM_SRC_WORD0:
421 case ATOM_SRC_WORD8:
422 case ATOM_SRC_WORD16:
423 val = U16(*ptr);
424 (*ptr) += 2;
425 break;
426 case ATOM_SRC_BYTE0:
427 case ATOM_SRC_BYTE8:
428 case ATOM_SRC_BYTE16:
429 case ATOM_SRC_BYTE24:
430 val = U8(*ptr);
431 (*ptr)++;
432 break;
433 }
434 return val;
435 }
436
atom_get_dst(atom_exec_context * ctx,int arg,uint8_t attr,int * ptr,uint32_t * saved,int print)437 static uint32_t atom_get_dst(atom_exec_context *ctx, int arg, uint8_t attr,
438 int *ptr, uint32_t *saved, int print)
439 {
440 return atom_get_src_int(ctx,
441 arg | atom_dst_to_src[(attr >> 3) &
442 7][(attr >> 6) & 3] << 3,
443 ptr, saved, print);
444 }
445
atom_skip_dst(atom_exec_context * ctx,int arg,uint8_t attr,int * ptr)446 static void atom_skip_dst(atom_exec_context *ctx, int arg, uint8_t attr, int *ptr)
447 {
448 atom_skip_src_int(ctx,
449 arg | atom_dst_to_src[(attr >> 3) & 7][(attr >> 6) &
450 3] << 3, ptr);
451 }
452
atom_put_dst(atom_exec_context * ctx,int arg,uint8_t attr,int * ptr,uint32_t val,uint32_t saved)453 static void atom_put_dst(atom_exec_context *ctx, int arg, uint8_t attr,
454 int *ptr, uint32_t val, uint32_t saved)
455 {
456 uint32_t align =
457 atom_dst_to_src[(attr >> 3) & 7][(attr >> 6) & 3], old_val =
458 val, idx;
459 struct atom_context *gctx = ctx->ctx;
460 old_val &= atom_arg_mask[align] >> atom_arg_shift[align];
461 val <<= atom_arg_shift[align];
462 val &= atom_arg_mask[align];
463 saved &= ~atom_arg_mask[align];
464 val |= saved;
465 switch (arg) {
466 case ATOM_ARG_REG:
467 idx = U16(*ptr);
468 (*ptr) += 2;
469 DEBUG("REG[0x%04X]", idx);
470 idx += gctx->reg_block;
471 switch (gctx->io_mode) {
472 case ATOM_IO_MM:
473 if (idx == 0)
474 gctx->card->reg_write(gctx->card, idx,
475 val << 2);
476 else
477 gctx->card->reg_write(gctx->card, idx, val);
478 break;
479 case ATOM_IO_PCI:
480 pr_info("PCI registers are not implemented\n");
481 return;
482 case ATOM_IO_SYSIO:
483 pr_info("SYSIO registers are not implemented\n");
484 return;
485 default:
486 if (!(gctx->io_mode & 0x80)) {
487 pr_info("Bad IO mode\n");
488 return;
489 }
490 if (!gctx->iio[gctx->io_mode & 0xFF]) {
491 pr_info("Undefined indirect IO write method %d\n",
492 gctx->io_mode & 0x7F);
493 return;
494 }
495 atom_iio_execute(gctx, gctx->iio[gctx->io_mode & 0xFF],
496 idx, val);
497 }
498 break;
499 case ATOM_ARG_PS:
500 idx = U8(*ptr);
501 (*ptr)++;
502 DEBUG("PS[0x%02X]", idx);
503 ctx->ps[idx] = cpu_to_le32(val);
504 break;
505 case ATOM_ARG_WS:
506 idx = U8(*ptr);
507 (*ptr)++;
508 DEBUG("WS[0x%02X]", idx);
509 switch (idx) {
510 case ATOM_WS_QUOTIENT:
511 gctx->divmul[0] = val;
512 break;
513 case ATOM_WS_REMAINDER:
514 gctx->divmul[1] = val;
515 break;
516 case ATOM_WS_DATAPTR:
517 gctx->data_block = val;
518 break;
519 case ATOM_WS_SHIFT:
520 gctx->shift = val;
521 break;
522 case ATOM_WS_OR_MASK:
523 case ATOM_WS_AND_MASK:
524 break;
525 case ATOM_WS_FB_WINDOW:
526 gctx->fb_base = val;
527 break;
528 case ATOM_WS_ATTRIBUTES:
529 gctx->io_attr = val;
530 break;
531 case ATOM_WS_REGPTR:
532 gctx->reg_block = val;
533 break;
534 default:
535 ctx->ws[idx] = val;
536 }
537 break;
538 case ATOM_ARG_FB:
539 idx = U8(*ptr);
540 (*ptr)++;
541 if ((gctx->fb_base + (idx * 4)) > gctx->scratch_size_bytes) {
542 DRM_ERROR("ATOM: fb write beyond scratch region: %d vs. %d\n",
543 gctx->fb_base + (idx * 4), gctx->scratch_size_bytes);
544 } else
545 gctx->scratch[(gctx->fb_base / 4) + idx] = val;
546 DEBUG("FB[0x%02X]", idx);
547 break;
548 case ATOM_ARG_PLL:
549 idx = U8(*ptr);
550 (*ptr)++;
551 DEBUG("PLL[0x%02X]", idx);
552 gctx->card->pll_write(gctx->card, idx, val);
553 break;
554 case ATOM_ARG_MC:
555 idx = U8(*ptr);
556 (*ptr)++;
557 DEBUG("MC[0x%02X]", idx);
558 gctx->card->mc_write(gctx->card, idx, val);
559 return;
560 }
561 switch (align) {
562 case ATOM_SRC_DWORD:
563 DEBUG(".[31:0] <- 0x%08X\n", old_val);
564 break;
565 case ATOM_SRC_WORD0:
566 DEBUG(".[15:0] <- 0x%04X\n", old_val);
567 break;
568 case ATOM_SRC_WORD8:
569 DEBUG(".[23:8] <- 0x%04X\n", old_val);
570 break;
571 case ATOM_SRC_WORD16:
572 DEBUG(".[31:16] <- 0x%04X\n", old_val);
573 break;
574 case ATOM_SRC_BYTE0:
575 DEBUG(".[7:0] <- 0x%02X\n", old_val);
576 break;
577 case ATOM_SRC_BYTE8:
578 DEBUG(".[15:8] <- 0x%02X\n", old_val);
579 break;
580 case ATOM_SRC_BYTE16:
581 DEBUG(".[23:16] <- 0x%02X\n", old_val);
582 break;
583 case ATOM_SRC_BYTE24:
584 DEBUG(".[31:24] <- 0x%02X\n", old_val);
585 break;
586 }
587 }
588
atom_op_add(atom_exec_context * ctx,int * ptr,int arg)589 static void atom_op_add(atom_exec_context *ctx, int *ptr, int arg)
590 {
591 uint8_t attr = U8((*ptr)++);
592 uint32_t dst, src, saved;
593 int dptr = *ptr;
594 SDEBUG(" dst: ");
595 dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1);
596 SDEBUG(" src: ");
597 src = atom_get_src(ctx, attr, ptr);
598 dst += src;
599 SDEBUG(" dst: ");
600 atom_put_dst(ctx, arg, attr, &dptr, dst, saved);
601 }
602
atom_op_and(atom_exec_context * ctx,int * ptr,int arg)603 static void atom_op_and(atom_exec_context *ctx, int *ptr, int arg)
604 {
605 uint8_t attr = U8((*ptr)++);
606 uint32_t dst, src, saved;
607 int dptr = *ptr;
608 SDEBUG(" dst: ");
609 dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1);
610 SDEBUG(" src: ");
611 src = atom_get_src(ctx, attr, ptr);
612 dst &= src;
613 SDEBUG(" dst: ");
614 atom_put_dst(ctx, arg, attr, &dptr, dst, saved);
615 }
616
atom_op_beep(atom_exec_context * ctx,int * ptr,int arg)617 static void atom_op_beep(atom_exec_context *ctx, int *ptr, int arg)
618 {
619 printk("ATOM BIOS beeped!\n");
620 }
621
atom_op_calltable(atom_exec_context * ctx,int * ptr,int arg)622 static void atom_op_calltable(atom_exec_context *ctx, int *ptr, int arg)
623 {
624 int idx = U8((*ptr)++);
625 int r = 0;
626
627 if (idx < ATOM_TABLE_NAMES_CNT)
628 SDEBUG(" table: %d (%s)\n", idx, atom_table_names[idx]);
629 else
630 SDEBUG(" table: %d\n", idx);
631 if (U16(ctx->ctx->cmd_table + 4 + 2 * idx))
632 r = amdgpu_atom_execute_table_locked(ctx->ctx, idx, ctx->ps + ctx->ps_shift);
633 if (r) {
634 ctx->abort = true;
635 }
636 }
637
atom_op_clear(atom_exec_context * ctx,int * ptr,int arg)638 static void atom_op_clear(atom_exec_context *ctx, int *ptr, int arg)
639 {
640 uint8_t attr = U8((*ptr)++);
641 uint32_t saved;
642 int dptr = *ptr;
643 attr &= 0x38;
644 attr |= atom_def_dst[attr >> 3] << 6;
645 atom_get_dst(ctx, arg, attr, ptr, &saved, 0);
646 SDEBUG(" dst: ");
647 atom_put_dst(ctx, arg, attr, &dptr, 0, saved);
648 }
649
atom_op_compare(atom_exec_context * ctx,int * ptr,int arg)650 static void atom_op_compare(atom_exec_context *ctx, int *ptr, int arg)
651 {
652 uint8_t attr = U8((*ptr)++);
653 uint32_t dst, src;
654 SDEBUG(" src1: ");
655 dst = atom_get_dst(ctx, arg, attr, ptr, NULL, 1);
656 SDEBUG(" src2: ");
657 src = atom_get_src(ctx, attr, ptr);
658 ctx->ctx->cs_equal = (dst == src);
659 ctx->ctx->cs_above = (dst > src);
660 SDEBUG(" result: %s %s\n", ctx->ctx->cs_equal ? "EQ" : "NE",
661 ctx->ctx->cs_above ? "GT" : "LE");
662 }
663
atom_op_delay(atom_exec_context * ctx,int * ptr,int arg)664 static void atom_op_delay(atom_exec_context *ctx, int *ptr, int arg)
665 {
666 unsigned count = U8((*ptr)++);
667 SDEBUG(" count: %d\n", count);
668 if (arg == ATOM_UNIT_MICROSEC)
669 udelay(count);
670 else if (!drm_can_sleep())
671 mdelay(count);
672 else
673 drm_msleep(count);
674 }
675
atom_op_div(atom_exec_context * ctx,int * ptr,int arg)676 static void atom_op_div(atom_exec_context *ctx, int *ptr, int arg)
677 {
678 uint8_t attr = U8((*ptr)++);
679 uint32_t dst, src;
680 SDEBUG(" src1: ");
681 dst = atom_get_dst(ctx, arg, attr, ptr, NULL, 1);
682 SDEBUG(" src2: ");
683 src = atom_get_src(ctx, attr, ptr);
684 if (src != 0) {
685 ctx->ctx->divmul[0] = dst / src;
686 ctx->ctx->divmul[1] = dst % src;
687 } else {
688 ctx->ctx->divmul[0] = 0;
689 ctx->ctx->divmul[1] = 0;
690 }
691 }
692
atom_op_div32(atom_exec_context * ctx,int * ptr,int arg)693 static void atom_op_div32(atom_exec_context *ctx, int *ptr, int arg)
694 {
695 uint64_t val64;
696 uint8_t attr = U8((*ptr)++);
697 uint32_t dst, src;
698 SDEBUG(" src1: ");
699 dst = atom_get_dst(ctx, arg, attr, ptr, NULL, 1);
700 SDEBUG(" src2: ");
701 src = atom_get_src(ctx, attr, ptr);
702 if (src != 0) {
703 val64 = dst;
704 val64 |= ((uint64_t)ctx->ctx->divmul[1]) << 32;
705 do_div(val64, src);
706 ctx->ctx->divmul[0] = lower_32_bits(val64);
707 ctx->ctx->divmul[1] = upper_32_bits(val64);
708 } else {
709 ctx->ctx->divmul[0] = 0;
710 ctx->ctx->divmul[1] = 0;
711 }
712 }
713
atom_op_eot(atom_exec_context * ctx,int * ptr,int arg)714 static void atom_op_eot(atom_exec_context *ctx, int *ptr, int arg)
715 {
716 /* functionally, a nop */
717 }
718
atom_op_jump(atom_exec_context * ctx,int * ptr,int arg)719 static void atom_op_jump(atom_exec_context *ctx, int *ptr, int arg)
720 {
721 int execute = 0, target = U16(*ptr);
722 unsigned long cjiffies;
723
724 (*ptr) += 2;
725 switch (arg) {
726 case ATOM_COND_ABOVE:
727 execute = ctx->ctx->cs_above;
728 break;
729 case ATOM_COND_ABOVEOREQUAL:
730 execute = ctx->ctx->cs_above || ctx->ctx->cs_equal;
731 break;
732 case ATOM_COND_ALWAYS:
733 execute = 1;
734 break;
735 case ATOM_COND_BELOW:
736 execute = !(ctx->ctx->cs_above || ctx->ctx->cs_equal);
737 break;
738 case ATOM_COND_BELOWOREQUAL:
739 execute = !ctx->ctx->cs_above;
740 break;
741 case ATOM_COND_EQUAL:
742 execute = ctx->ctx->cs_equal;
743 break;
744 case ATOM_COND_NOTEQUAL:
745 execute = !ctx->ctx->cs_equal;
746 break;
747 }
748 if (arg != ATOM_COND_ALWAYS)
749 SDEBUG(" taken: %s\n", str_yes_no(execute));
750 SDEBUG(" target: 0x%04X\n", target);
751 if (execute) {
752 if (ctx->last_jump == (ctx->start + target)) {
753 cjiffies = jiffies;
754 if (time_after(cjiffies, ctx->last_jump_jiffies)) {
755 cjiffies -= ctx->last_jump_jiffies;
756 if ((jiffies_to_msecs(cjiffies) > ATOM_CMD_TIMEOUT_SEC*1000)) {
757 DRM_ERROR("atombios stuck in loop for more than %dsecs aborting\n",
758 ATOM_CMD_TIMEOUT_SEC);
759 ctx->abort = true;
760 }
761 } else {
762 /* jiffies wrap around we will just wait a little longer */
763 ctx->last_jump_jiffies = jiffies;
764 }
765 } else {
766 ctx->last_jump = ctx->start + target;
767 ctx->last_jump_jiffies = jiffies;
768 }
769 *ptr = ctx->start + target;
770 }
771 }
772
atom_op_mask(atom_exec_context * ctx,int * ptr,int arg)773 static void atom_op_mask(atom_exec_context *ctx, int *ptr, int arg)
774 {
775 uint8_t attr = U8((*ptr)++);
776 uint32_t dst, mask, src, saved;
777 int dptr = *ptr;
778 SDEBUG(" dst: ");
779 dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1);
780 mask = atom_get_src_direct(ctx, ((attr >> 3) & 7), ptr);
781 SDEBUG(" mask: 0x%08x", mask);
782 SDEBUG(" src: ");
783 src = atom_get_src(ctx, attr, ptr);
784 dst &= mask;
785 dst |= src;
786 SDEBUG(" dst: ");
787 atom_put_dst(ctx, arg, attr, &dptr, dst, saved);
788 }
789
atom_op_move(atom_exec_context * ctx,int * ptr,int arg)790 static void atom_op_move(atom_exec_context *ctx, int *ptr, int arg)
791 {
792 uint8_t attr = U8((*ptr)++);
793 uint32_t src, saved;
794 int dptr = *ptr;
795 if (((attr >> 3) & 7) != ATOM_SRC_DWORD)
796 atom_get_dst(ctx, arg, attr, ptr, &saved, 0);
797 else {
798 atom_skip_dst(ctx, arg, attr, ptr);
799 saved = 0xCDCDCDCD;
800 }
801 SDEBUG(" src: ");
802 src = atom_get_src(ctx, attr, ptr);
803 SDEBUG(" dst: ");
804 atom_put_dst(ctx, arg, attr, &dptr, src, saved);
805 }
806
atom_op_mul(atom_exec_context * ctx,int * ptr,int arg)807 static void atom_op_mul(atom_exec_context *ctx, int *ptr, int arg)
808 {
809 uint8_t attr = U8((*ptr)++);
810 uint32_t dst, src;
811 SDEBUG(" src1: ");
812 dst = atom_get_dst(ctx, arg, attr, ptr, NULL, 1);
813 SDEBUG(" src2: ");
814 src = atom_get_src(ctx, attr, ptr);
815 ctx->ctx->divmul[0] = dst * src;
816 }
817
atom_op_mul32(atom_exec_context * ctx,int * ptr,int arg)818 static void atom_op_mul32(atom_exec_context *ctx, int *ptr, int arg)
819 {
820 uint64_t val64;
821 uint8_t attr = U8((*ptr)++);
822 uint32_t dst, src;
823 SDEBUG(" src1: ");
824 dst = atom_get_dst(ctx, arg, attr, ptr, NULL, 1);
825 SDEBUG(" src2: ");
826 src = atom_get_src(ctx, attr, ptr);
827 val64 = (uint64_t)dst * (uint64_t)src;
828 ctx->ctx->divmul[0] = lower_32_bits(val64);
829 ctx->ctx->divmul[1] = upper_32_bits(val64);
830 }
831
atom_op_nop(atom_exec_context * ctx,int * ptr,int arg)832 static void atom_op_nop(atom_exec_context *ctx, int *ptr, int arg)
833 {
834 /* nothing */
835 }
836
atom_op_or(atom_exec_context * ctx,int * ptr,int arg)837 static void atom_op_or(atom_exec_context *ctx, int *ptr, int arg)
838 {
839 uint8_t attr = U8((*ptr)++);
840 uint32_t dst, src, saved;
841 int dptr = *ptr;
842 SDEBUG(" dst: ");
843 dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1);
844 SDEBUG(" src: ");
845 src = atom_get_src(ctx, attr, ptr);
846 dst |= src;
847 SDEBUG(" dst: ");
848 atom_put_dst(ctx, arg, attr, &dptr, dst, saved);
849 }
850
atom_op_postcard(atom_exec_context * ctx,int * ptr,int arg)851 static void atom_op_postcard(atom_exec_context *ctx, int *ptr, int arg)
852 {
853 uint8_t val = U8((*ptr)++);
854 SDEBUG("POST card output: 0x%02X\n", val);
855 }
856
atom_op_repeat(atom_exec_context * ctx,int * ptr,int arg)857 static void atom_op_repeat(atom_exec_context *ctx, int *ptr, int arg)
858 {
859 pr_info("unimplemented!\n");
860 }
861
atom_op_restorereg(atom_exec_context * ctx,int * ptr,int arg)862 static void atom_op_restorereg(atom_exec_context *ctx, int *ptr, int arg)
863 {
864 pr_info("unimplemented!\n");
865 }
866
atom_op_savereg(atom_exec_context * ctx,int * ptr,int arg)867 static void atom_op_savereg(atom_exec_context *ctx, int *ptr, int arg)
868 {
869 pr_info("unimplemented!\n");
870 }
871
atom_op_setdatablock(atom_exec_context * ctx,int * ptr,int arg)872 static void atom_op_setdatablock(atom_exec_context *ctx, int *ptr, int arg)
873 {
874 int idx = U8(*ptr);
875 (*ptr)++;
876 SDEBUG(" block: %d\n", idx);
877 if (!idx)
878 ctx->ctx->data_block = 0;
879 else if (idx == 255)
880 ctx->ctx->data_block = ctx->start;
881 else
882 ctx->ctx->data_block = U16(ctx->ctx->data_table + 4 + 2 * idx);
883 SDEBUG(" base: 0x%04X\n", ctx->ctx->data_block);
884 }
885
atom_op_setfbbase(atom_exec_context * ctx,int * ptr,int arg)886 static void atom_op_setfbbase(atom_exec_context *ctx, int *ptr, int arg)
887 {
888 uint8_t attr = U8((*ptr)++);
889 SDEBUG(" fb_base: ");
890 ctx->ctx->fb_base = atom_get_src(ctx, attr, ptr);
891 }
892
atom_op_setport(atom_exec_context * ctx,int * ptr,int arg)893 static void atom_op_setport(atom_exec_context *ctx, int *ptr, int arg)
894 {
895 int port;
896 switch (arg) {
897 case ATOM_PORT_ATI:
898 port = U16(*ptr);
899 if (port < ATOM_IO_NAMES_CNT)
900 SDEBUG(" port: %d (%s)\n", port, atom_io_names[port]);
901 else
902 SDEBUG(" port: %d\n", port);
903 if (!port)
904 ctx->ctx->io_mode = ATOM_IO_MM;
905 else
906 ctx->ctx->io_mode = ATOM_IO_IIO | port;
907 (*ptr) += 2;
908 break;
909 case ATOM_PORT_PCI:
910 ctx->ctx->io_mode = ATOM_IO_PCI;
911 (*ptr)++;
912 break;
913 case ATOM_PORT_SYSIO:
914 ctx->ctx->io_mode = ATOM_IO_SYSIO;
915 (*ptr)++;
916 break;
917 }
918 }
919
atom_op_setregblock(atom_exec_context * ctx,int * ptr,int arg)920 static void atom_op_setregblock(atom_exec_context *ctx, int *ptr, int arg)
921 {
922 ctx->ctx->reg_block = U16(*ptr);
923 (*ptr) += 2;
924 SDEBUG(" base: 0x%04X\n", ctx->ctx->reg_block);
925 }
926
atom_op_shift_left(atom_exec_context * ctx,int * ptr,int arg)927 static void atom_op_shift_left(atom_exec_context *ctx, int *ptr, int arg)
928 {
929 uint8_t attr = U8((*ptr)++), shift;
930 uint32_t saved, dst;
931 int dptr = *ptr;
932 attr &= 0x38;
933 attr |= atom_def_dst[attr >> 3] << 6;
934 SDEBUG(" dst: ");
935 dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1);
936 shift = atom_get_src_direct(ctx, ATOM_SRC_BYTE0, ptr);
937 SDEBUG(" shift: %d\n", shift);
938 dst <<= shift;
939 SDEBUG(" dst: ");
940 atom_put_dst(ctx, arg, attr, &dptr, dst, saved);
941 }
942
atom_op_shift_right(atom_exec_context * ctx,int * ptr,int arg)943 static void atom_op_shift_right(atom_exec_context *ctx, int *ptr, int arg)
944 {
945 uint8_t attr = U8((*ptr)++), shift;
946 uint32_t saved, dst;
947 int dptr = *ptr;
948 attr &= 0x38;
949 attr |= atom_def_dst[attr >> 3] << 6;
950 SDEBUG(" dst: ");
951 dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1);
952 shift = atom_get_src_direct(ctx, ATOM_SRC_BYTE0, ptr);
953 SDEBUG(" shift: %d\n", shift);
954 dst >>= shift;
955 SDEBUG(" dst: ");
956 atom_put_dst(ctx, arg, attr, &dptr, dst, saved);
957 }
958
atom_op_shl(atom_exec_context * ctx,int * ptr,int arg)959 static void atom_op_shl(atom_exec_context *ctx, int *ptr, int arg)
960 {
961 uint8_t attr = U8((*ptr)++), shift;
962 uint32_t saved, dst;
963 int dptr = *ptr;
964 uint32_t dst_align = atom_dst_to_src[(attr >> 3) & 7][(attr >> 6) & 3];
965 SDEBUG(" dst: ");
966 dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1);
967 /* op needs to full dst value */
968 dst = saved;
969 shift = atom_get_src(ctx, attr, ptr);
970 SDEBUG(" shift: %d\n", shift);
971 dst <<= shift;
972 dst &= atom_arg_mask[dst_align];
973 dst >>= atom_arg_shift[dst_align];
974 SDEBUG(" dst: ");
975 atom_put_dst(ctx, arg, attr, &dptr, dst, saved);
976 }
977
atom_op_shr(atom_exec_context * ctx,int * ptr,int arg)978 static void atom_op_shr(atom_exec_context *ctx, int *ptr, int arg)
979 {
980 uint8_t attr = U8((*ptr)++), shift;
981 uint32_t saved, dst;
982 int dptr = *ptr;
983 uint32_t dst_align = atom_dst_to_src[(attr >> 3) & 7][(attr >> 6) & 3];
984 SDEBUG(" dst: ");
985 dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1);
986 /* op needs to full dst value */
987 dst = saved;
988 shift = atom_get_src(ctx, attr, ptr);
989 SDEBUG(" shift: %d\n", shift);
990 dst >>= shift;
991 dst &= atom_arg_mask[dst_align];
992 dst >>= atom_arg_shift[dst_align];
993 SDEBUG(" dst: ");
994 atom_put_dst(ctx, arg, attr, &dptr, dst, saved);
995 }
996
atom_op_sub(atom_exec_context * ctx,int * ptr,int arg)997 static void atom_op_sub(atom_exec_context *ctx, int *ptr, int arg)
998 {
999 uint8_t attr = U8((*ptr)++);
1000 uint32_t dst, src, saved;
1001 int dptr = *ptr;
1002 SDEBUG(" dst: ");
1003 dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1);
1004 SDEBUG(" src: ");
1005 src = atom_get_src(ctx, attr, ptr);
1006 dst -= src;
1007 SDEBUG(" dst: ");
1008 atom_put_dst(ctx, arg, attr, &dptr, dst, saved);
1009 }
1010
atom_op_switch(atom_exec_context * ctx,int * ptr,int arg)1011 static void atom_op_switch(atom_exec_context *ctx, int *ptr, int arg)
1012 {
1013 uint8_t attr = U8((*ptr)++);
1014 uint32_t src, val, target;
1015 SDEBUG(" switch: ");
1016 src = atom_get_src(ctx, attr, ptr);
1017 while (U16(*ptr) != ATOM_CASE_END)
1018 if (U8(*ptr) == ATOM_CASE_MAGIC) {
1019 (*ptr)++;
1020 SDEBUG(" case: ");
1021 val =
1022 atom_get_src(ctx, (attr & 0x38) | ATOM_ARG_IMM,
1023 ptr);
1024 target = U16(*ptr);
1025 if (val == src) {
1026 SDEBUG(" target: %04X\n", target);
1027 *ptr = ctx->start + target;
1028 return;
1029 }
1030 (*ptr) += 2;
1031 } else {
1032 pr_info("Bad case\n");
1033 return;
1034 }
1035 (*ptr) += 2;
1036 }
1037
atom_op_test(atom_exec_context * ctx,int * ptr,int arg)1038 static void atom_op_test(atom_exec_context *ctx, int *ptr, int arg)
1039 {
1040 uint8_t attr = U8((*ptr)++);
1041 uint32_t dst, src;
1042 SDEBUG(" src1: ");
1043 dst = atom_get_dst(ctx, arg, attr, ptr, NULL, 1);
1044 SDEBUG(" src2: ");
1045 src = atom_get_src(ctx, attr, ptr);
1046 ctx->ctx->cs_equal = ((dst & src) == 0);
1047 SDEBUG(" result: %s\n", ctx->ctx->cs_equal ? "EQ" : "NE");
1048 }
1049
atom_op_xor(atom_exec_context * ctx,int * ptr,int arg)1050 static void atom_op_xor(atom_exec_context *ctx, int *ptr, int arg)
1051 {
1052 uint8_t attr = U8((*ptr)++);
1053 uint32_t dst, src, saved;
1054 int dptr = *ptr;
1055 SDEBUG(" dst: ");
1056 dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1);
1057 SDEBUG(" src: ");
1058 src = atom_get_src(ctx, attr, ptr);
1059 dst ^= src;
1060 SDEBUG(" dst: ");
1061 atom_put_dst(ctx, arg, attr, &dptr, dst, saved);
1062 }
1063
atom_op_debug(atom_exec_context * ctx,int * ptr,int arg)1064 static void atom_op_debug(atom_exec_context *ctx, int *ptr, int arg)
1065 {
1066 uint8_t val = U8((*ptr)++);
1067 SDEBUG("DEBUG output: 0x%02X\n", val);
1068 }
1069
atom_op_processds(atom_exec_context * ctx,int * ptr,int arg)1070 static void atom_op_processds(atom_exec_context *ctx, int *ptr, int arg)
1071 {
1072 uint16_t val = U16(*ptr);
1073 (*ptr) += val + 2;
1074 SDEBUG("PROCESSDS output: 0x%02X\n", val);
1075 }
1076
1077 static struct {
1078 void (*func) (atom_exec_context *, int *, int);
1079 int arg;
1080 } opcode_table[ATOM_OP_CNT] = {
1081 {
1082 NULL, 0}, {
1083 atom_op_move, ATOM_ARG_REG}, {
1084 atom_op_move, ATOM_ARG_PS}, {
1085 atom_op_move, ATOM_ARG_WS}, {
1086 atom_op_move, ATOM_ARG_FB}, {
1087 atom_op_move, ATOM_ARG_PLL}, {
1088 atom_op_move, ATOM_ARG_MC}, {
1089 atom_op_and, ATOM_ARG_REG}, {
1090 atom_op_and, ATOM_ARG_PS}, {
1091 atom_op_and, ATOM_ARG_WS}, {
1092 atom_op_and, ATOM_ARG_FB}, {
1093 atom_op_and, ATOM_ARG_PLL}, {
1094 atom_op_and, ATOM_ARG_MC}, {
1095 atom_op_or, ATOM_ARG_REG}, {
1096 atom_op_or, ATOM_ARG_PS}, {
1097 atom_op_or, ATOM_ARG_WS}, {
1098 atom_op_or, ATOM_ARG_FB}, {
1099 atom_op_or, ATOM_ARG_PLL}, {
1100 atom_op_or, ATOM_ARG_MC}, {
1101 atom_op_shift_left, ATOM_ARG_REG}, {
1102 atom_op_shift_left, ATOM_ARG_PS}, {
1103 atom_op_shift_left, ATOM_ARG_WS}, {
1104 atom_op_shift_left, ATOM_ARG_FB}, {
1105 atom_op_shift_left, ATOM_ARG_PLL}, {
1106 atom_op_shift_left, ATOM_ARG_MC}, {
1107 atom_op_shift_right, ATOM_ARG_REG}, {
1108 atom_op_shift_right, ATOM_ARG_PS}, {
1109 atom_op_shift_right, ATOM_ARG_WS}, {
1110 atom_op_shift_right, ATOM_ARG_FB}, {
1111 atom_op_shift_right, ATOM_ARG_PLL}, {
1112 atom_op_shift_right, ATOM_ARG_MC}, {
1113 atom_op_mul, ATOM_ARG_REG}, {
1114 atom_op_mul, ATOM_ARG_PS}, {
1115 atom_op_mul, ATOM_ARG_WS}, {
1116 atom_op_mul, ATOM_ARG_FB}, {
1117 atom_op_mul, ATOM_ARG_PLL}, {
1118 atom_op_mul, ATOM_ARG_MC}, {
1119 atom_op_div, ATOM_ARG_REG}, {
1120 atom_op_div, ATOM_ARG_PS}, {
1121 atom_op_div, ATOM_ARG_WS}, {
1122 atom_op_div, ATOM_ARG_FB}, {
1123 atom_op_div, ATOM_ARG_PLL}, {
1124 atom_op_div, ATOM_ARG_MC}, {
1125 atom_op_add, ATOM_ARG_REG}, {
1126 atom_op_add, ATOM_ARG_PS}, {
1127 atom_op_add, ATOM_ARG_WS}, {
1128 atom_op_add, ATOM_ARG_FB}, {
1129 atom_op_add, ATOM_ARG_PLL}, {
1130 atom_op_add, ATOM_ARG_MC}, {
1131 atom_op_sub, ATOM_ARG_REG}, {
1132 atom_op_sub, ATOM_ARG_PS}, {
1133 atom_op_sub, ATOM_ARG_WS}, {
1134 atom_op_sub, ATOM_ARG_FB}, {
1135 atom_op_sub, ATOM_ARG_PLL}, {
1136 atom_op_sub, ATOM_ARG_MC}, {
1137 atom_op_setport, ATOM_PORT_ATI}, {
1138 atom_op_setport, ATOM_PORT_PCI}, {
1139 atom_op_setport, ATOM_PORT_SYSIO}, {
1140 atom_op_setregblock, 0}, {
1141 atom_op_setfbbase, 0}, {
1142 atom_op_compare, ATOM_ARG_REG}, {
1143 atom_op_compare, ATOM_ARG_PS}, {
1144 atom_op_compare, ATOM_ARG_WS}, {
1145 atom_op_compare, ATOM_ARG_FB}, {
1146 atom_op_compare, ATOM_ARG_PLL}, {
1147 atom_op_compare, ATOM_ARG_MC}, {
1148 atom_op_switch, 0}, {
1149 atom_op_jump, ATOM_COND_ALWAYS}, {
1150 atom_op_jump, ATOM_COND_EQUAL}, {
1151 atom_op_jump, ATOM_COND_BELOW}, {
1152 atom_op_jump, ATOM_COND_ABOVE}, {
1153 atom_op_jump, ATOM_COND_BELOWOREQUAL}, {
1154 atom_op_jump, ATOM_COND_ABOVEOREQUAL}, {
1155 atom_op_jump, ATOM_COND_NOTEQUAL}, {
1156 atom_op_test, ATOM_ARG_REG}, {
1157 atom_op_test, ATOM_ARG_PS}, {
1158 atom_op_test, ATOM_ARG_WS}, {
1159 atom_op_test, ATOM_ARG_FB}, {
1160 atom_op_test, ATOM_ARG_PLL}, {
1161 atom_op_test, ATOM_ARG_MC}, {
1162 atom_op_delay, ATOM_UNIT_MILLISEC}, {
1163 atom_op_delay, ATOM_UNIT_MICROSEC}, {
1164 atom_op_calltable, 0}, {
1165 atom_op_repeat, 0}, {
1166 atom_op_clear, ATOM_ARG_REG}, {
1167 atom_op_clear, ATOM_ARG_PS}, {
1168 atom_op_clear, ATOM_ARG_WS}, {
1169 atom_op_clear, ATOM_ARG_FB}, {
1170 atom_op_clear, ATOM_ARG_PLL}, {
1171 atom_op_clear, ATOM_ARG_MC}, {
1172 atom_op_nop, 0}, {
1173 atom_op_eot, 0}, {
1174 atom_op_mask, ATOM_ARG_REG}, {
1175 atom_op_mask, ATOM_ARG_PS}, {
1176 atom_op_mask, ATOM_ARG_WS}, {
1177 atom_op_mask, ATOM_ARG_FB}, {
1178 atom_op_mask, ATOM_ARG_PLL}, {
1179 atom_op_mask, ATOM_ARG_MC}, {
1180 atom_op_postcard, 0}, {
1181 atom_op_beep, 0}, {
1182 atom_op_savereg, 0}, {
1183 atom_op_restorereg, 0}, {
1184 atom_op_setdatablock, 0}, {
1185 atom_op_xor, ATOM_ARG_REG}, {
1186 atom_op_xor, ATOM_ARG_PS}, {
1187 atom_op_xor, ATOM_ARG_WS}, {
1188 atom_op_xor, ATOM_ARG_FB}, {
1189 atom_op_xor, ATOM_ARG_PLL}, {
1190 atom_op_xor, ATOM_ARG_MC}, {
1191 atom_op_shl, ATOM_ARG_REG}, {
1192 atom_op_shl, ATOM_ARG_PS}, {
1193 atom_op_shl, ATOM_ARG_WS}, {
1194 atom_op_shl, ATOM_ARG_FB}, {
1195 atom_op_shl, ATOM_ARG_PLL}, {
1196 atom_op_shl, ATOM_ARG_MC}, {
1197 atom_op_shr, ATOM_ARG_REG}, {
1198 atom_op_shr, ATOM_ARG_PS}, {
1199 atom_op_shr, ATOM_ARG_WS}, {
1200 atom_op_shr, ATOM_ARG_FB}, {
1201 atom_op_shr, ATOM_ARG_PLL}, {
1202 atom_op_shr, ATOM_ARG_MC}, {
1203 atom_op_debug, 0}, {
1204 atom_op_processds, 0}, {
1205 atom_op_mul32, ATOM_ARG_PS}, {
1206 atom_op_mul32, ATOM_ARG_WS}, {
1207 atom_op_div32, ATOM_ARG_PS}, {
1208 atom_op_div32, ATOM_ARG_WS},
1209 };
1210
amdgpu_atom_execute_table_locked(struct atom_context * ctx,int index,uint32_t * params)1211 static int amdgpu_atom_execute_table_locked(struct atom_context *ctx, int index, uint32_t *params)
1212 {
1213 int base = CU16(ctx->cmd_table + 4 + 2 * index);
1214 int len, ws, ps, ptr;
1215 unsigned char op;
1216 atom_exec_context ectx;
1217 int ret = 0;
1218
1219 if (!base)
1220 return -EINVAL;
1221
1222 len = CU16(base + ATOM_CT_SIZE_PTR);
1223 ws = CU8(base + ATOM_CT_WS_PTR);
1224 ps = CU8(base + ATOM_CT_PS_PTR) & ATOM_CT_PS_MASK;
1225 ptr = base + ATOM_CT_CODE_PTR;
1226
1227 SDEBUG(">> execute %04X (len %d, WS %d, PS %d)\n", base, len, ws, ps);
1228
1229 ectx.ctx = ctx;
1230 ectx.ps_shift = ps / 4;
1231 ectx.start = base;
1232 ectx.ps = params;
1233 ectx.abort = false;
1234 ectx.last_jump = 0;
1235 if (ws)
1236 ectx.ws = kcalloc(4, ws, GFP_KERNEL);
1237 else
1238 ectx.ws = NULL;
1239
1240 debug_depth++;
1241 while (1) {
1242 op = CU8(ptr++);
1243 if (op < ATOM_OP_NAMES_CNT)
1244 SDEBUG("%s @ 0x%04X\n", atom_op_names[op], ptr - 1);
1245 else
1246 SDEBUG("[%d] @ 0x%04X\n", op, ptr - 1);
1247 if (ectx.abort) {
1248 DRM_ERROR("atombios stuck executing %04X (len %d, WS %d, PS %d) @ 0x%04X\n",
1249 base, len, ws, ps, ptr - 1);
1250 ret = -EINVAL;
1251 goto free;
1252 }
1253
1254 if (op < ATOM_OP_CNT && op > 0)
1255 opcode_table[op].func(&ectx, &ptr,
1256 opcode_table[op].arg);
1257 else
1258 break;
1259
1260 if (op == ATOM_OP_EOT)
1261 break;
1262 }
1263 debug_depth--;
1264 SDEBUG("<<\n");
1265
1266 free:
1267 if (ws)
1268 kfree(ectx.ws);
1269 return ret;
1270 }
1271
amdgpu_atom_execute_table(struct atom_context * ctx,int index,uint32_t * params)1272 int amdgpu_atom_execute_table(struct atom_context *ctx, int index, uint32_t *params)
1273 {
1274 int r;
1275
1276 mutex_lock(&ctx->mutex);
1277 /* reset data block */
1278 ctx->data_block = 0;
1279 /* reset reg block */
1280 ctx->reg_block = 0;
1281 /* reset fb window */
1282 ctx->fb_base = 0;
1283 /* reset io mode */
1284 ctx->io_mode = ATOM_IO_MM;
1285 /* reset divmul */
1286 ctx->divmul[0] = 0;
1287 ctx->divmul[1] = 0;
1288 r = amdgpu_atom_execute_table_locked(ctx, index, params);
1289 mutex_unlock(&ctx->mutex);
1290 return r;
1291 }
1292
1293 static int atom_iio_len[] = { 1, 2, 3, 3, 3, 3, 4, 4, 4, 3 };
1294
atom_index_iio(struct atom_context * ctx,int base)1295 static void atom_index_iio(struct atom_context *ctx, int base)
1296 {
1297 ctx->iio = kzalloc(2 * 256, GFP_KERNEL);
1298 if (!ctx->iio)
1299 return;
1300 while (CU8(base) == ATOM_IIO_START) {
1301 ctx->iio[CU8(base + 1)] = base + 2;
1302 base += 2;
1303 while (CU8(base) != ATOM_IIO_END)
1304 base += atom_iio_len[CU8(base)];
1305 base += 3;
1306 }
1307 }
1308
atom_get_vbios_name(struct atom_context * ctx)1309 static void atom_get_vbios_name(struct atom_context *ctx)
1310 {
1311 unsigned char *p_rom;
1312 unsigned char str_num;
1313 unsigned short off_to_vbios_str;
1314 unsigned char *c_ptr;
1315 int name_size;
1316 int i;
1317
1318 const char *na = "--N/A--";
1319 char *back;
1320
1321 p_rom = ctx->bios;
1322
1323 str_num = *(p_rom + OFFSET_TO_GET_ATOMBIOS_NUMBER_OF_STRINGS);
1324 if (str_num != 0) {
1325 off_to_vbios_str =
1326 *(unsigned short *)(p_rom + OFFSET_TO_GET_ATOMBIOS_STRING_START);
1327
1328 c_ptr = (unsigned char *)(p_rom + off_to_vbios_str);
1329 } else {
1330 /* do not know where to find name */
1331 memcpy(ctx->name, na, 7);
1332 ctx->name[7] = 0;
1333 return;
1334 }
1335
1336 /*
1337 * skip the atombios strings, usually 4
1338 * 1st is P/N, 2nd is ASIC, 3rd is PCI type, 4th is Memory type
1339 */
1340 for (i = 0; i < str_num; i++) {
1341 while (*c_ptr != 0)
1342 c_ptr++;
1343 c_ptr++;
1344 }
1345
1346 /* skip the following 2 chars: 0x0D 0x0A */
1347 c_ptr += 2;
1348
1349 name_size = strnlen(c_ptr, STRLEN_LONG - 1);
1350 memcpy(ctx->name, c_ptr, name_size);
1351 back = ctx->name + name_size;
1352 while ((*--back) == ' ')
1353 ;
1354 *(back + 1) = '\0';
1355 }
1356
atom_get_vbios_date(struct atom_context * ctx)1357 static void atom_get_vbios_date(struct atom_context *ctx)
1358 {
1359 unsigned char *p_rom;
1360 unsigned char *date_in_rom;
1361
1362 p_rom = ctx->bios;
1363
1364 date_in_rom = p_rom + OFFSET_TO_VBIOS_DATE;
1365
1366 ctx->date[0] = '2';
1367 ctx->date[1] = '0';
1368 ctx->date[2] = date_in_rom[6];
1369 ctx->date[3] = date_in_rom[7];
1370 ctx->date[4] = '/';
1371 ctx->date[5] = date_in_rom[0];
1372 ctx->date[6] = date_in_rom[1];
1373 ctx->date[7] = '/';
1374 ctx->date[8] = date_in_rom[3];
1375 ctx->date[9] = date_in_rom[4];
1376 ctx->date[10] = ' ';
1377 ctx->date[11] = date_in_rom[9];
1378 ctx->date[12] = date_in_rom[10];
1379 ctx->date[13] = date_in_rom[11];
1380 ctx->date[14] = date_in_rom[12];
1381 ctx->date[15] = date_in_rom[13];
1382 ctx->date[16] = '\0';
1383 }
1384
atom_find_str_in_rom(struct atom_context * ctx,char * str,int start,int end,int maxlen)1385 static unsigned char *atom_find_str_in_rom(struct atom_context *ctx, char *str, int start,
1386 int end, int maxlen)
1387 {
1388 unsigned long str_off;
1389 unsigned char *p_rom;
1390 unsigned short str_len;
1391
1392 str_off = 0;
1393 str_len = strnlen(str, maxlen);
1394 p_rom = ctx->bios;
1395
1396 for (; start <= end; ++start) {
1397 for (str_off = 0; str_off < str_len; ++str_off) {
1398 if (str[str_off] != *(p_rom + start + str_off))
1399 break;
1400 }
1401
1402 if (str_off == str_len || str[str_off] == 0)
1403 return p_rom + start;
1404 }
1405 return NULL;
1406 }
1407
atom_get_vbios_pn(struct atom_context * ctx)1408 static void atom_get_vbios_pn(struct atom_context *ctx)
1409 {
1410 unsigned char *p_rom;
1411 unsigned short off_to_vbios_str;
1412 unsigned char *vbios_str;
1413 int count;
1414
1415 off_to_vbios_str = 0;
1416 p_rom = ctx->bios;
1417
1418 if (*(p_rom + OFFSET_TO_GET_ATOMBIOS_NUMBER_OF_STRINGS) != 0) {
1419 off_to_vbios_str =
1420 *(unsigned short *)(p_rom + OFFSET_TO_GET_ATOMBIOS_STRING_START);
1421
1422 vbios_str = (unsigned char *)(p_rom + off_to_vbios_str);
1423 } else {
1424 vbios_str = p_rom + OFFSET_TO_VBIOS_PART_NUMBER;
1425 }
1426
1427 if (*vbios_str == 0) {
1428 vbios_str = atom_find_str_in_rom(ctx, BIOS_ATOM_PREFIX, 3, 1024, 64);
1429 if (vbios_str == NULL)
1430 vbios_str += sizeof(BIOS_ATOM_PREFIX) - 1;
1431 }
1432 if (vbios_str != NULL && *vbios_str == 0)
1433 vbios_str++;
1434
1435 if (vbios_str != NULL) {
1436 count = 0;
1437 while ((count < BIOS_STRING_LENGTH) && vbios_str[count] >= ' ' &&
1438 vbios_str[count] <= 'z') {
1439 ctx->vbios_pn[count] = vbios_str[count];
1440 count++;
1441 }
1442
1443 ctx->vbios_pn[count] = 0;
1444 }
1445
1446 pr_info("ATOM BIOS: %s\n", ctx->vbios_pn);
1447 }
1448
atom_get_vbios_version(struct atom_context * ctx)1449 static void atom_get_vbios_version(struct atom_context *ctx)
1450 {
1451 unsigned char *vbios_ver;
1452
1453 /* find anchor ATOMBIOSBK-AMD */
1454 vbios_ver = atom_find_str_in_rom(ctx, BIOS_VERSION_PREFIX, 3, 1024, 64);
1455 if (vbios_ver != NULL) {
1456 /* skip ATOMBIOSBK-AMD VER */
1457 vbios_ver += 18;
1458 memcpy(ctx->vbios_ver_str, vbios_ver, STRLEN_NORMAL);
1459 } else {
1460 ctx->vbios_ver_str[0] = '\0';
1461 }
1462 }
1463
amdgpu_atom_parse(struct card_info * card,void * bios)1464 struct atom_context *amdgpu_atom_parse(struct card_info *card, void *bios)
1465 {
1466 int base;
1467 struct atom_context *ctx =
1468 kzalloc(sizeof(struct atom_context), GFP_KERNEL);
1469 struct _ATOM_ROM_HEADER *atom_rom_header;
1470 struct _ATOM_MASTER_DATA_TABLE *master_table;
1471 struct _ATOM_FIRMWARE_INFO *atom_fw_info;
1472
1473 if (!ctx)
1474 return NULL;
1475
1476 ctx->card = card;
1477 ctx->bios = bios;
1478
1479 if (CU16(0) != ATOM_BIOS_MAGIC) {
1480 pr_info("Invalid BIOS magic\n");
1481 kfree(ctx);
1482 return NULL;
1483 }
1484 if (strncmp
1485 (CSTR(ATOM_ATI_MAGIC_PTR), ATOM_ATI_MAGIC,
1486 strlen(ATOM_ATI_MAGIC))) {
1487 pr_info("Invalid ATI magic\n");
1488 kfree(ctx);
1489 return NULL;
1490 }
1491
1492 base = CU16(ATOM_ROM_TABLE_PTR);
1493 if (strncmp
1494 (CSTR(base + ATOM_ROM_MAGIC_PTR), ATOM_ROM_MAGIC,
1495 strlen(ATOM_ROM_MAGIC))) {
1496 pr_info("Invalid ATOM magic\n");
1497 kfree(ctx);
1498 return NULL;
1499 }
1500
1501 ctx->cmd_table = CU16(base + ATOM_ROM_CMD_PTR);
1502 ctx->data_table = CU16(base + ATOM_ROM_DATA_PTR);
1503 atom_index_iio(ctx, CU16(ctx->data_table + ATOM_DATA_IIO_PTR) + 4);
1504 if (!ctx->iio) {
1505 amdgpu_atom_destroy(ctx);
1506 return NULL;
1507 }
1508
1509 atom_rom_header = (struct _ATOM_ROM_HEADER *)CSTR(base);
1510 if (atom_rom_header->usMasterDataTableOffset != 0) {
1511 master_table = (struct _ATOM_MASTER_DATA_TABLE *)
1512 CSTR(atom_rom_header->usMasterDataTableOffset);
1513 if (master_table->ListOfDataTables.FirmwareInfo != 0) {
1514 atom_fw_info = (struct _ATOM_FIRMWARE_INFO *)
1515 CSTR(master_table->ListOfDataTables.FirmwareInfo);
1516 ctx->version = atom_fw_info->ulFirmwareRevision;
1517 }
1518 }
1519
1520 atom_get_vbios_name(ctx);
1521 atom_get_vbios_pn(ctx);
1522 atom_get_vbios_date(ctx);
1523 atom_get_vbios_version(ctx);
1524
1525 return ctx;
1526 }
1527
amdgpu_atom_asic_init(struct atom_context * ctx)1528 int amdgpu_atom_asic_init(struct atom_context *ctx)
1529 {
1530 int hwi = CU16(ctx->data_table + ATOM_DATA_FWI_PTR);
1531 uint32_t ps[16];
1532 int ret;
1533
1534 memset(ps, 0, 64);
1535
1536 ps[0] = cpu_to_le32(CU32(hwi + ATOM_FWI_DEFSCLK_PTR));
1537 ps[1] = cpu_to_le32(CU32(hwi + ATOM_FWI_DEFMCLK_PTR));
1538 if (!ps[0] || !ps[1])
1539 return 1;
1540
1541 if (!CU16(ctx->cmd_table + 4 + 2 * ATOM_CMD_INIT))
1542 return 1;
1543 ret = amdgpu_atom_execute_table(ctx, ATOM_CMD_INIT, ps);
1544 if (ret)
1545 return ret;
1546
1547 memset(ps, 0, 64);
1548
1549 return ret;
1550 }
1551
amdgpu_atom_destroy(struct atom_context * ctx)1552 void amdgpu_atom_destroy(struct atom_context *ctx)
1553 {
1554 kfree(ctx->iio);
1555 kfree(ctx);
1556 }
1557
amdgpu_atom_parse_data_header(struct atom_context * ctx,int index,uint16_t * size,uint8_t * frev,uint8_t * crev,uint16_t * data_start)1558 bool amdgpu_atom_parse_data_header(struct atom_context *ctx, int index,
1559 uint16_t *size, uint8_t *frev, uint8_t *crev,
1560 uint16_t *data_start)
1561 {
1562 int offset = index * 2 + 4;
1563 int idx = CU16(ctx->data_table + offset);
1564 u16 *mdt = (u16 *)(ctx->bios + ctx->data_table + 4);
1565
1566 if (!mdt[index])
1567 return false;
1568
1569 if (size)
1570 *size = CU16(idx);
1571 if (frev)
1572 *frev = CU8(idx + 2);
1573 if (crev)
1574 *crev = CU8(idx + 3);
1575 *data_start = idx;
1576 return true;
1577 }
1578
amdgpu_atom_parse_cmd_header(struct atom_context * ctx,int index,uint8_t * frev,uint8_t * crev)1579 bool amdgpu_atom_parse_cmd_header(struct atom_context *ctx, int index, uint8_t *frev,
1580 uint8_t *crev)
1581 {
1582 int offset = index * 2 + 4;
1583 int idx = CU16(ctx->cmd_table + offset);
1584 u16 *mct = (u16 *)(ctx->bios + ctx->cmd_table + 4);
1585
1586 if (!mct[index])
1587 return false;
1588
1589 if (frev)
1590 *frev = CU8(idx + 2);
1591 if (crev)
1592 *crev = CU8(idx + 3);
1593 return true;
1594 }
1595
1596