2 * Copyright 2008 Advanced Micro Devices, Inc.
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
22 * Author: Stanislaw Skowronek
25 #include <linux/module.h>
26 #include <linux/sched.h>
27 #include <linux/slab.h>
28 #include <asm/unaligned.h>
33 #include "atom-names.h"
34 #include "atom-bits.h"
37 #define ATOM_COND_ABOVE 0
38 #define ATOM_COND_ABOVEOREQUAL 1
39 #define ATOM_COND_ALWAYS 2
40 #define ATOM_COND_BELOW 3
41 #define ATOM_COND_BELOWOREQUAL 4
42 #define ATOM_COND_EQUAL 5
43 #define ATOM_COND_NOTEQUAL 6
45 #define ATOM_PORT_ATI 0
46 #define ATOM_PORT_PCI 1
47 #define ATOM_PORT_SYSIO 2
49 #define ATOM_UNIT_MICROSEC 0
50 #define ATOM_UNIT_MILLISEC 1
56 struct atom_context *ctx;
61 unsigned long last_jump_jiffies;
66 static int atom_execute_table_locked(struct atom_context *ctx, int index, uint32_t * params);
67 int atom_execute_table(struct atom_context *ctx, int index, uint32_t * params);
69 static uint32_t atom_arg_mask[8] =
70 { 0xFFFFFFFF, 0xFFFF, 0xFFFF00, 0xFFFF0000, 0xFF, 0xFF00, 0xFF0000,
72 static int atom_arg_shift[8] = { 0, 0, 8, 16, 0, 8, 16, 24 };
74 static int atom_dst_to_src[8][4] = {
75 /* translate destination alignment field to the source alignment encoding */
85 static int atom_def_dst[8] = { 0, 0, 1, 2, 0, 1, 2, 3 };
87 static int debug_depth = 0;
89 static void debug_print_spaces(int n)
95 #define DEBUG(...) do if (atom_debug) { printk(KERN_DEBUG __VA_ARGS__); } while (0)
96 #define SDEBUG(...) do if (atom_debug) { printk(KERN_DEBUG); debug_print_spaces(debug_depth); printk(__VA_ARGS__); } while (0)
98 #define DEBUG(...) do { } while (0)
99 #define SDEBUG(...) do { } while (0)
102 static uint32_t atom_iio_execute(struct atom_context *ctx, int base,
103 uint32_t index, uint32_t data)
105 struct radeon_device *rdev = ctx->card->dev->dev_private;
106 uint32_t temp = 0xCDCDCDCD;
114 temp = ctx->card->ioreg_read(ctx->card, CU16(base + 1));
118 if (rdev->family == CHIP_RV515)
119 (void)ctx->card->ioreg_read(ctx->card, CU16(base + 1));
120 ctx->card->ioreg_write(ctx->card, CU16(base + 1), temp);
125 ~((0xFFFFFFFF >> (32 - CU8(base + 1))) <<
131 (0xFFFFFFFF >> (32 - CU8(base + 1))) << CU8(base +
135 case ATOM_IIO_MOVE_INDEX:
137 ~((0xFFFFFFFF >> (32 - CU8(base + 1))) <<
140 ((index >> CU8(base + 2)) &
141 (0xFFFFFFFF >> (32 - CU8(base + 1)))) << CU8(base +
145 case ATOM_IIO_MOVE_DATA:
147 ~((0xFFFFFFFF >> (32 - CU8(base + 1))) <<
150 ((data >> CU8(base + 2)) &
151 (0xFFFFFFFF >> (32 - CU8(base + 1)))) << CU8(base +
155 case ATOM_IIO_MOVE_ATTR:
157 ~((0xFFFFFFFF >> (32 - CU8(base + 1))) <<
161 io_attr >> CU8(base + 2)) & (0xFFFFFFFF >> (32 -
172 printk(KERN_INFO "Unknown IIO opcode.\n");
177 static uint32_t atom_get_src_int(atom_exec_context *ctx, uint8_t attr,
178 int *ptr, uint32_t *saved, int print)
180 uint32_t idx, val = 0xCDCDCDCD, align, arg;
181 struct atom_context *gctx = ctx->ctx;
183 align = (attr >> 3) & 7;
189 DEBUG("REG[0x%04X]", idx);
190 idx += gctx->reg_block;
191 switch (gctx->io_mode) {
193 val = gctx->card->reg_read(gctx->card, idx);
197 "PCI registers are not implemented.\n");
201 "SYSIO registers are not implemented.\n");
204 if (!(gctx->io_mode & 0x80)) {
205 printk(KERN_INFO "Bad IO mode.\n");
208 if (!gctx->iio[gctx->io_mode & 0x7F]) {
210 "Undefined indirect IO read method %d.\n",
211 gctx->io_mode & 0x7F);
215 atom_iio_execute(gctx,
216 gctx->iio[gctx->io_mode & 0x7F],
223 /* get_unaligned_le32 avoids unaligned accesses from atombios
224 * tables, noticed on a DEC Alpha. */
225 val = get_unaligned_le32((u32 *)&ctx->ps[idx]);
227 DEBUG("PS[0x%02X,0x%04X]", idx, val);
233 DEBUG("WS[0x%02X]", idx);
235 case ATOM_WS_QUOTIENT:
236 val = gctx->divmul[0];
238 case ATOM_WS_REMAINDER:
239 val = gctx->divmul[1];
241 case ATOM_WS_DATAPTR:
242 val = gctx->data_block;
247 case ATOM_WS_OR_MASK:
248 val = 1 << gctx->shift;
250 case ATOM_WS_AND_MASK:
251 val = ~(1 << gctx->shift);
253 case ATOM_WS_FB_WINDOW:
256 case ATOM_WS_ATTRIBUTES:
260 val = gctx->reg_block;
270 if (gctx->data_block)
271 DEBUG("ID[0x%04X+%04X]", idx, gctx->data_block);
273 DEBUG("ID[0x%04X]", idx);
275 val = U32(idx + gctx->data_block);
280 if ((gctx->fb_base + (idx * 4)) > gctx->scratch_size_bytes) {
281 DRM_ERROR("ATOM: fb read beyond scratch region: %d vs. %d\n",
282 gctx->fb_base + (idx * 4), gctx->scratch_size_bytes);
285 val = gctx->scratch[(gctx->fb_base / 4) + idx];
287 DEBUG("FB[0x%02X]", idx);
295 DEBUG("IMM 0x%08X\n", val);
299 case ATOM_SRC_WORD16:
303 DEBUG("IMM 0x%04X\n", val);
307 case ATOM_SRC_BYTE16:
308 case ATOM_SRC_BYTE24:
312 DEBUG("IMM 0x%02X\n", val);
320 DEBUG("PLL[0x%02X]", idx);
321 val = gctx->card->pll_read(gctx->card, idx);
327 DEBUG("MC[0x%02X]", idx);
328 val = gctx->card->mc_read(gctx->card, idx);
333 val &= atom_arg_mask[align];
334 val >>= atom_arg_shift[align];
338 DEBUG(".[31:0] -> 0x%08X\n", val);
341 DEBUG(".[15:0] -> 0x%04X\n", val);
344 DEBUG(".[23:8] -> 0x%04X\n", val);
346 case ATOM_SRC_WORD16:
347 DEBUG(".[31:16] -> 0x%04X\n", val);
350 DEBUG(".[7:0] -> 0x%02X\n", val);
353 DEBUG(".[15:8] -> 0x%02X\n", val);
355 case ATOM_SRC_BYTE16:
356 DEBUG(".[23:16] -> 0x%02X\n", val);
358 case ATOM_SRC_BYTE24:
359 DEBUG(".[31:24] -> 0x%02X\n", val);
365 static void atom_skip_src_int(atom_exec_context *ctx, uint8_t attr, int *ptr)
367 uint32_t align = (attr >> 3) & 7, arg = attr & 7;
387 case ATOM_SRC_WORD16:
392 case ATOM_SRC_BYTE16:
393 case ATOM_SRC_BYTE24:
401 static uint32_t atom_get_src(atom_exec_context *ctx, uint8_t attr, int *ptr)
403 return atom_get_src_int(ctx, attr, ptr, NULL, 1);
406 static uint32_t atom_get_src_direct(atom_exec_context *ctx, uint8_t align, int *ptr)
408 uint32_t val = 0xCDCDCDCD;
417 case ATOM_SRC_WORD16:
423 case ATOM_SRC_BYTE16:
424 case ATOM_SRC_BYTE24:
432 static uint32_t atom_get_dst(atom_exec_context *ctx, int arg, uint8_t attr,
433 int *ptr, uint32_t *saved, int print)
435 return atom_get_src_int(ctx,
436 arg | atom_dst_to_src[(attr >> 3) &
437 7][(attr >> 6) & 3] << 3,
441 static void atom_skip_dst(atom_exec_context *ctx, int arg, uint8_t attr, int *ptr)
443 atom_skip_src_int(ctx,
444 arg | atom_dst_to_src[(attr >> 3) & 7][(attr >> 6) &
448 static void atom_put_dst(atom_exec_context *ctx, int arg, uint8_t attr,
449 int *ptr, uint32_t val, uint32_t saved)
452 atom_dst_to_src[(attr >> 3) & 7][(attr >> 6) & 3], old_val =
454 struct atom_context *gctx = ctx->ctx;
455 old_val &= atom_arg_mask[align] >> atom_arg_shift[align];
456 val <<= atom_arg_shift[align];
457 val &= atom_arg_mask[align];
458 saved &= ~atom_arg_mask[align];
464 DEBUG("REG[0x%04X]", idx);
465 idx += gctx->reg_block;
466 switch (gctx->io_mode) {
469 gctx->card->reg_write(gctx->card, idx,
472 gctx->card->reg_write(gctx->card, idx, val);
476 "PCI registers are not implemented.\n");
480 "SYSIO registers are not implemented.\n");
483 if (!(gctx->io_mode & 0x80)) {
484 printk(KERN_INFO "Bad IO mode.\n");
487 if (!gctx->iio[gctx->io_mode & 0xFF]) {
489 "Undefined indirect IO write method %d.\n",
490 gctx->io_mode & 0x7F);
493 atom_iio_execute(gctx, gctx->iio[gctx->io_mode & 0xFF],
500 DEBUG("PS[0x%02X]", idx);
501 ctx->ps[idx] = cpu_to_le32(val);
506 DEBUG("WS[0x%02X]", idx);
508 case ATOM_WS_QUOTIENT:
509 gctx->divmul[0] = val;
511 case ATOM_WS_REMAINDER:
512 gctx->divmul[1] = val;
514 case ATOM_WS_DATAPTR:
515 gctx->data_block = val;
520 case ATOM_WS_OR_MASK:
521 case ATOM_WS_AND_MASK:
523 case ATOM_WS_FB_WINDOW:
526 case ATOM_WS_ATTRIBUTES:
530 gctx->reg_block = val;
539 if ((gctx->fb_base + (idx * 4)) > gctx->scratch_size_bytes) {
540 DRM_ERROR("ATOM: fb write beyond scratch region: %d vs. %d\n",
541 gctx->fb_base + (idx * 4), gctx->scratch_size_bytes);
543 gctx->scratch[(gctx->fb_base / 4) + idx] = val;
544 DEBUG("FB[0x%02X]", idx);
549 DEBUG("PLL[0x%02X]", idx);
550 gctx->card->pll_write(gctx->card, idx, val);
555 DEBUG("MC[0x%02X]", idx);
556 gctx->card->mc_write(gctx->card, idx, val);
561 DEBUG(".[31:0] <- 0x%08X\n", old_val);
564 DEBUG(".[15:0] <- 0x%04X\n", old_val);
567 DEBUG(".[23:8] <- 0x%04X\n", old_val);
569 case ATOM_SRC_WORD16:
570 DEBUG(".[31:16] <- 0x%04X\n", old_val);
573 DEBUG(".[7:0] <- 0x%02X\n", old_val);
576 DEBUG(".[15:8] <- 0x%02X\n", old_val);
578 case ATOM_SRC_BYTE16:
579 DEBUG(".[23:16] <- 0x%02X\n", old_val);
581 case ATOM_SRC_BYTE24:
582 DEBUG(".[31:24] <- 0x%02X\n", old_val);
587 static void atom_op_add(atom_exec_context *ctx, int *ptr, int arg)
589 uint8_t attr = U8((*ptr)++);
590 uint32_t dst, src, saved;
593 dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1);
595 src = atom_get_src(ctx, attr, ptr);
598 atom_put_dst(ctx, arg, attr, &dptr, dst, saved);
601 static void atom_op_and(atom_exec_context *ctx, int *ptr, int arg)
603 uint8_t attr = U8((*ptr)++);
604 uint32_t dst, src, saved;
607 dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1);
609 src = atom_get_src(ctx, attr, ptr);
612 atom_put_dst(ctx, arg, attr, &dptr, dst, saved);
615 static void atom_op_beep(atom_exec_context *ctx, int *ptr, int arg)
617 printk("ATOM BIOS beeped!\n");
620 static void atom_op_calltable(atom_exec_context *ctx, int *ptr, int arg)
622 int idx = U8((*ptr)++);
625 if (idx < ATOM_TABLE_NAMES_CNT)
626 SDEBUG(" table: %d (%s)\n", idx, atom_table_names[idx]);
628 SDEBUG(" table: %d\n", idx);
629 if (U16(ctx->ctx->cmd_table + 4 + 2 * idx))
630 r = atom_execute_table_locked(ctx->ctx, idx, ctx->ps + ctx->ps_shift);
636 static void atom_op_clear(atom_exec_context *ctx, int *ptr, int arg)
638 uint8_t attr = U8((*ptr)++);
642 attr |= atom_def_dst[attr >> 3] << 6;
643 atom_get_dst(ctx, arg, attr, ptr, &saved, 0);
645 atom_put_dst(ctx, arg, attr, &dptr, 0, saved);
648 static void atom_op_compare(atom_exec_context *ctx, int *ptr, int arg)
650 uint8_t attr = U8((*ptr)++);
653 dst = atom_get_dst(ctx, arg, attr, ptr, NULL, 1);
655 src = atom_get_src(ctx, attr, ptr);
656 ctx->ctx->cs_equal = (dst == src);
657 ctx->ctx->cs_above = (dst > src);
658 SDEBUG(" result: %s %s\n", ctx->ctx->cs_equal ? "EQ" : "NE",
659 ctx->ctx->cs_above ? "GT" : "LE");
662 static void atom_op_delay(atom_exec_context *ctx, int *ptr, int arg)
664 unsigned count = U8((*ptr)++);
665 SDEBUG(" count: %d\n", count);
666 if (arg == ATOM_UNIT_MICROSEC)
668 else if (!drm_can_sleep())
674 static void atom_op_div(atom_exec_context *ctx, int *ptr, int arg)
676 uint8_t attr = U8((*ptr)++);
679 dst = atom_get_dst(ctx, arg, attr, ptr, NULL, 1);
681 src = atom_get_src(ctx, attr, ptr);
683 ctx->ctx->divmul[0] = dst / src;
684 ctx->ctx->divmul[1] = dst % src;
686 ctx->ctx->divmul[0] = 0;
687 ctx->ctx->divmul[1] = 0;
691 static void atom_op_eot(atom_exec_context *ctx, int *ptr, int arg)
693 /* functionally, a nop */
696 static void atom_op_jump(atom_exec_context *ctx, int *ptr, int arg)
698 int execute = 0, target = U16(*ptr);
699 unsigned long cjiffies;
703 case ATOM_COND_ABOVE:
704 execute = ctx->ctx->cs_above;
706 case ATOM_COND_ABOVEOREQUAL:
707 execute = ctx->ctx->cs_above || ctx->ctx->cs_equal;
709 case ATOM_COND_ALWAYS:
712 case ATOM_COND_BELOW:
713 execute = !(ctx->ctx->cs_above || ctx->ctx->cs_equal);
715 case ATOM_COND_BELOWOREQUAL:
716 execute = !ctx->ctx->cs_above;
718 case ATOM_COND_EQUAL:
719 execute = ctx->ctx->cs_equal;
721 case ATOM_COND_NOTEQUAL:
722 execute = !ctx->ctx->cs_equal;
725 if (arg != ATOM_COND_ALWAYS)
726 SDEBUG(" taken: %s\n", execute ? "yes" : "no");
727 SDEBUG(" target: 0x%04X\n", target);
729 if (ctx->last_jump == (ctx->start + target)) {
731 if (time_after(cjiffies, ctx->last_jump_jiffies)) {
732 cjiffies -= ctx->last_jump_jiffies;
733 if ((jiffies_to_msecs(cjiffies) > 5000)) {
734 DRM_ERROR("atombios stuck in loop for more than 5secs aborting\n");
738 /* jiffies wrap around we will just wait a little longer */
739 ctx->last_jump_jiffies = jiffies;
742 ctx->last_jump = ctx->start + target;
743 ctx->last_jump_jiffies = jiffies;
745 *ptr = ctx->start + target;
749 static void atom_op_mask(atom_exec_context *ctx, int *ptr, int arg)
751 uint8_t attr = U8((*ptr)++);
752 uint32_t dst, mask, src, saved;
755 dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1);
756 mask = atom_get_src_direct(ctx, ((attr >> 3) & 7), ptr);
757 SDEBUG(" mask: 0x%08x", mask);
759 src = atom_get_src(ctx, attr, ptr);
763 atom_put_dst(ctx, arg, attr, &dptr, dst, saved);
766 static void atom_op_move(atom_exec_context *ctx, int *ptr, int arg)
768 uint8_t attr = U8((*ptr)++);
771 if (((attr >> 3) & 7) != ATOM_SRC_DWORD)
772 atom_get_dst(ctx, arg, attr, ptr, &saved, 0);
774 atom_skip_dst(ctx, arg, attr, ptr);
778 src = atom_get_src(ctx, attr, ptr);
780 atom_put_dst(ctx, arg, attr, &dptr, src, saved);
783 static void atom_op_mul(atom_exec_context *ctx, int *ptr, int arg)
785 uint8_t attr = U8((*ptr)++);
788 dst = atom_get_dst(ctx, arg, attr, ptr, NULL, 1);
790 src = atom_get_src(ctx, attr, ptr);
791 ctx->ctx->divmul[0] = dst * src;
794 static void atom_op_nop(atom_exec_context *ctx, int *ptr, int arg)
799 static void atom_op_or(atom_exec_context *ctx, int *ptr, int arg)
801 uint8_t attr = U8((*ptr)++);
802 uint32_t dst, src, saved;
805 dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1);
807 src = atom_get_src(ctx, attr, ptr);
810 atom_put_dst(ctx, arg, attr, &dptr, dst, saved);
813 static void atom_op_postcard(atom_exec_context *ctx, int *ptr, int arg)
815 uint8_t val = U8((*ptr)++);
816 SDEBUG("POST card output: 0x%02X\n", val);
819 static void atom_op_repeat(atom_exec_context *ctx, int *ptr, int arg)
821 printk(KERN_INFO "unimplemented!\n");
824 static void atom_op_restorereg(atom_exec_context *ctx, int *ptr, int arg)
826 printk(KERN_INFO "unimplemented!\n");
829 static void atom_op_savereg(atom_exec_context *ctx, int *ptr, int arg)
831 printk(KERN_INFO "unimplemented!\n");
834 static void atom_op_setdatablock(atom_exec_context *ctx, int *ptr, int arg)
838 SDEBUG(" block: %d\n", idx);
840 ctx->ctx->data_block = 0;
842 ctx->ctx->data_block = ctx->start;
844 ctx->ctx->data_block = U16(ctx->ctx->data_table + 4 + 2 * idx);
845 SDEBUG(" base: 0x%04X\n", ctx->ctx->data_block);
848 static void atom_op_setfbbase(atom_exec_context *ctx, int *ptr, int arg)
850 uint8_t attr = U8((*ptr)++);
851 SDEBUG(" fb_base: ");
852 ctx->ctx->fb_base = atom_get_src(ctx, attr, ptr);
855 static void atom_op_setport(atom_exec_context *ctx, int *ptr, int arg)
861 if (port < ATOM_IO_NAMES_CNT)
862 SDEBUG(" port: %d (%s)\n", port, atom_io_names[port]);
864 SDEBUG(" port: %d\n", port);
866 ctx->ctx->io_mode = ATOM_IO_MM;
868 ctx->ctx->io_mode = ATOM_IO_IIO | port;
872 ctx->ctx->io_mode = ATOM_IO_PCI;
875 case ATOM_PORT_SYSIO:
876 ctx->ctx->io_mode = ATOM_IO_SYSIO;
882 static void atom_op_setregblock(atom_exec_context *ctx, int *ptr, int arg)
884 ctx->ctx->reg_block = U16(*ptr);
886 SDEBUG(" base: 0x%04X\n", ctx->ctx->reg_block);
889 static void atom_op_shift_left(atom_exec_context *ctx, int *ptr, int arg)
891 uint8_t attr = U8((*ptr)++), shift;
895 attr |= atom_def_dst[attr >> 3] << 6;
897 dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1);
898 shift = atom_get_src_direct(ctx, ATOM_SRC_BYTE0, ptr);
899 SDEBUG(" shift: %d\n", shift);
902 atom_put_dst(ctx, arg, attr, &dptr, dst, saved);
905 static void atom_op_shift_right(atom_exec_context *ctx, int *ptr, int arg)
907 uint8_t attr = U8((*ptr)++), shift;
911 attr |= atom_def_dst[attr >> 3] << 6;
913 dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1);
914 shift = atom_get_src_direct(ctx, ATOM_SRC_BYTE0, ptr);
915 SDEBUG(" shift: %d\n", shift);
918 atom_put_dst(ctx, arg, attr, &dptr, dst, saved);
921 static void atom_op_shl(atom_exec_context *ctx, int *ptr, int arg)
923 uint8_t attr = U8((*ptr)++), shift;
926 uint32_t dst_align = atom_dst_to_src[(attr >> 3) & 7][(attr >> 6) & 3];
928 dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1);
929 /* op needs to full dst value */
931 shift = atom_get_src(ctx, attr, ptr);
932 SDEBUG(" shift: %d\n", shift);
934 dst &= atom_arg_mask[dst_align];
935 dst >>= atom_arg_shift[dst_align];
937 atom_put_dst(ctx, arg, attr, &dptr, dst, saved);
940 static void atom_op_shr(atom_exec_context *ctx, int *ptr, int arg)
942 uint8_t attr = U8((*ptr)++), shift;
945 uint32_t dst_align = atom_dst_to_src[(attr >> 3) & 7][(attr >> 6) & 3];
947 dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1);
948 /* op needs to full dst value */
950 shift = atom_get_src(ctx, attr, ptr);
951 SDEBUG(" shift: %d\n", shift);
953 dst &= atom_arg_mask[dst_align];
954 dst >>= atom_arg_shift[dst_align];
956 atom_put_dst(ctx, arg, attr, &dptr, dst, saved);
959 static void atom_op_sub(atom_exec_context *ctx, int *ptr, int arg)
961 uint8_t attr = U8((*ptr)++);
962 uint32_t dst, src, saved;
965 dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1);
967 src = atom_get_src(ctx, attr, ptr);
970 atom_put_dst(ctx, arg, attr, &dptr, dst, saved);
973 static void atom_op_switch(atom_exec_context *ctx, int *ptr, int arg)
975 uint8_t attr = U8((*ptr)++);
976 uint32_t src, val, target;
978 src = atom_get_src(ctx, attr, ptr);
979 while (U16(*ptr) != ATOM_CASE_END)
980 if (U8(*ptr) == ATOM_CASE_MAGIC) {
984 atom_get_src(ctx, (attr & 0x38) | ATOM_ARG_IMM,
988 SDEBUG(" target: %04X\n", target);
989 *ptr = ctx->start + target;
994 printk(KERN_INFO "Bad case.\n");
1000 static void atom_op_test(atom_exec_context *ctx, int *ptr, int arg)
1002 uint8_t attr = U8((*ptr)++);
1005 dst = atom_get_dst(ctx, arg, attr, ptr, NULL, 1);
1007 src = atom_get_src(ctx, attr, ptr);
1008 ctx->ctx->cs_equal = ((dst & src) == 0);
1009 SDEBUG(" result: %s\n", ctx->ctx->cs_equal ? "EQ" : "NE");
1012 static void atom_op_xor(atom_exec_context *ctx, int *ptr, int arg)
1014 uint8_t attr = U8((*ptr)++);
1015 uint32_t dst, src, saved;
1018 dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1);
1020 src = atom_get_src(ctx, attr, ptr);
1023 atom_put_dst(ctx, arg, attr, &dptr, dst, saved);
1026 static void atom_op_debug(atom_exec_context *ctx, int *ptr, int arg)
1028 printk(KERN_INFO "unimplemented!\n");
1032 void (*func) (atom_exec_context *, int *, int);
1034 } opcode_table[ATOM_OP_CNT] = {
1037 atom_op_move, ATOM_ARG_REG}, {
1038 atom_op_move, ATOM_ARG_PS}, {
1039 atom_op_move, ATOM_ARG_WS}, {
1040 atom_op_move, ATOM_ARG_FB}, {
1041 atom_op_move, ATOM_ARG_PLL}, {
1042 atom_op_move, ATOM_ARG_MC}, {
1043 atom_op_and, ATOM_ARG_REG}, {
1044 atom_op_and, ATOM_ARG_PS}, {
1045 atom_op_and, ATOM_ARG_WS}, {
1046 atom_op_and, ATOM_ARG_FB}, {
1047 atom_op_and, ATOM_ARG_PLL}, {
1048 atom_op_and, ATOM_ARG_MC}, {
1049 atom_op_or, ATOM_ARG_REG}, {
1050 atom_op_or, ATOM_ARG_PS}, {
1051 atom_op_or, ATOM_ARG_WS}, {
1052 atom_op_or, ATOM_ARG_FB}, {
1053 atom_op_or, ATOM_ARG_PLL}, {
1054 atom_op_or, ATOM_ARG_MC}, {
1055 atom_op_shift_left, ATOM_ARG_REG}, {
1056 atom_op_shift_left, ATOM_ARG_PS}, {
1057 atom_op_shift_left, ATOM_ARG_WS}, {
1058 atom_op_shift_left, ATOM_ARG_FB}, {
1059 atom_op_shift_left, ATOM_ARG_PLL}, {
1060 atom_op_shift_left, ATOM_ARG_MC}, {
1061 atom_op_shift_right, ATOM_ARG_REG}, {
1062 atom_op_shift_right, ATOM_ARG_PS}, {
1063 atom_op_shift_right, ATOM_ARG_WS}, {
1064 atom_op_shift_right, ATOM_ARG_FB}, {
1065 atom_op_shift_right, ATOM_ARG_PLL}, {
1066 atom_op_shift_right, ATOM_ARG_MC}, {
1067 atom_op_mul, ATOM_ARG_REG}, {
1068 atom_op_mul, ATOM_ARG_PS}, {
1069 atom_op_mul, ATOM_ARG_WS}, {
1070 atom_op_mul, ATOM_ARG_FB}, {
1071 atom_op_mul, ATOM_ARG_PLL}, {
1072 atom_op_mul, ATOM_ARG_MC}, {
1073 atom_op_div, ATOM_ARG_REG}, {
1074 atom_op_div, ATOM_ARG_PS}, {
1075 atom_op_div, ATOM_ARG_WS}, {
1076 atom_op_div, ATOM_ARG_FB}, {
1077 atom_op_div, ATOM_ARG_PLL}, {
1078 atom_op_div, ATOM_ARG_MC}, {
1079 atom_op_add, ATOM_ARG_REG}, {
1080 atom_op_add, ATOM_ARG_PS}, {
1081 atom_op_add, ATOM_ARG_WS}, {
1082 atom_op_add, ATOM_ARG_FB}, {
1083 atom_op_add, ATOM_ARG_PLL}, {
1084 atom_op_add, ATOM_ARG_MC}, {
1085 atom_op_sub, ATOM_ARG_REG}, {
1086 atom_op_sub, ATOM_ARG_PS}, {
1087 atom_op_sub, ATOM_ARG_WS}, {
1088 atom_op_sub, ATOM_ARG_FB}, {
1089 atom_op_sub, ATOM_ARG_PLL}, {
1090 atom_op_sub, ATOM_ARG_MC}, {
1091 atom_op_setport, ATOM_PORT_ATI}, {
1092 atom_op_setport, ATOM_PORT_PCI}, {
1093 atom_op_setport, ATOM_PORT_SYSIO}, {
1094 atom_op_setregblock, 0}, {
1095 atom_op_setfbbase, 0}, {
1096 atom_op_compare, ATOM_ARG_REG}, {
1097 atom_op_compare, ATOM_ARG_PS}, {
1098 atom_op_compare, ATOM_ARG_WS}, {
1099 atom_op_compare, ATOM_ARG_FB}, {
1100 atom_op_compare, ATOM_ARG_PLL}, {
1101 atom_op_compare, ATOM_ARG_MC}, {
1102 atom_op_switch, 0}, {
1103 atom_op_jump, ATOM_COND_ALWAYS}, {
1104 atom_op_jump, ATOM_COND_EQUAL}, {
1105 atom_op_jump, ATOM_COND_BELOW}, {
1106 atom_op_jump, ATOM_COND_ABOVE}, {
1107 atom_op_jump, ATOM_COND_BELOWOREQUAL}, {
1108 atom_op_jump, ATOM_COND_ABOVEOREQUAL}, {
1109 atom_op_jump, ATOM_COND_NOTEQUAL}, {
1110 atom_op_test, ATOM_ARG_REG}, {
1111 atom_op_test, ATOM_ARG_PS}, {
1112 atom_op_test, ATOM_ARG_WS}, {
1113 atom_op_test, ATOM_ARG_FB}, {
1114 atom_op_test, ATOM_ARG_PLL}, {
1115 atom_op_test, ATOM_ARG_MC}, {
1116 atom_op_delay, ATOM_UNIT_MILLISEC}, {
1117 atom_op_delay, ATOM_UNIT_MICROSEC}, {
1118 atom_op_calltable, 0}, {
1119 atom_op_repeat, 0}, {
1120 atom_op_clear, ATOM_ARG_REG}, {
1121 atom_op_clear, ATOM_ARG_PS}, {
1122 atom_op_clear, ATOM_ARG_WS}, {
1123 atom_op_clear, ATOM_ARG_FB}, {
1124 atom_op_clear, ATOM_ARG_PLL}, {
1125 atom_op_clear, ATOM_ARG_MC}, {
1128 atom_op_mask, ATOM_ARG_REG}, {
1129 atom_op_mask, ATOM_ARG_PS}, {
1130 atom_op_mask, ATOM_ARG_WS}, {
1131 atom_op_mask, ATOM_ARG_FB}, {
1132 atom_op_mask, ATOM_ARG_PLL}, {
1133 atom_op_mask, ATOM_ARG_MC}, {
1134 atom_op_postcard, 0}, {
1136 atom_op_savereg, 0}, {
1137 atom_op_restorereg, 0}, {
1138 atom_op_setdatablock, 0}, {
1139 atom_op_xor, ATOM_ARG_REG}, {
1140 atom_op_xor, ATOM_ARG_PS}, {
1141 atom_op_xor, ATOM_ARG_WS}, {
1142 atom_op_xor, ATOM_ARG_FB}, {
1143 atom_op_xor, ATOM_ARG_PLL}, {
1144 atom_op_xor, ATOM_ARG_MC}, {
1145 atom_op_shl, ATOM_ARG_REG}, {
1146 atom_op_shl, ATOM_ARG_PS}, {
1147 atom_op_shl, ATOM_ARG_WS}, {
1148 atom_op_shl, ATOM_ARG_FB}, {
1149 atom_op_shl, ATOM_ARG_PLL}, {
1150 atom_op_shl, ATOM_ARG_MC}, {
1151 atom_op_shr, ATOM_ARG_REG}, {
1152 atom_op_shr, ATOM_ARG_PS}, {
1153 atom_op_shr, ATOM_ARG_WS}, {
1154 atom_op_shr, ATOM_ARG_FB}, {
1155 atom_op_shr, ATOM_ARG_PLL}, {
1156 atom_op_shr, ATOM_ARG_MC}, {
1157 atom_op_debug, 0},};
1159 static int atom_execute_table_locked(struct atom_context *ctx, int index, uint32_t * params)
1161 int base = CU16(ctx->cmd_table + 4 + 2 * index);
1162 int len, ws, ps, ptr;
1164 atom_exec_context ectx;
1170 len = CU16(base + ATOM_CT_SIZE_PTR);
1171 ws = CU8(base + ATOM_CT_WS_PTR);
1172 ps = CU8(base + ATOM_CT_PS_PTR) & ATOM_CT_PS_MASK;
1173 ptr = base + ATOM_CT_CODE_PTR;
1175 SDEBUG(">> execute %04X (len %d, WS %d, PS %d)\n", base, len, ws, ps);
1178 ectx.ps_shift = ps / 4;
1184 ectx.ws = kzalloc(4 * ws, GFP_KERNEL);
1191 if (op < ATOM_OP_NAMES_CNT)
1192 SDEBUG("%s @ 0x%04X\n", atom_op_names[op], ptr - 1);
1194 SDEBUG("[%d] @ 0x%04X\n", op, ptr - 1);
1196 DRM_ERROR("atombios stuck executing %04X (len %d, WS %d, PS %d) @ 0x%04X\n",
1197 base, len, ws, ps, ptr - 1);
1202 if (op < ATOM_OP_CNT && op > 0)
1203 opcode_table[op].func(&ectx, &ptr,
1204 opcode_table[op].arg);
1208 if (op == ATOM_OP_EOT)
1220 int atom_execute_table(struct atom_context *ctx, int index, uint32_t * params)
1224 mutex_lock(&ctx->mutex);
1225 /* reset data block */
1226 ctx->data_block = 0;
1227 /* reset reg block */
1229 /* reset fb window */
1232 ctx->io_mode = ATOM_IO_MM;
1236 r = atom_execute_table_locked(ctx, index, params);
1237 mutex_unlock(&ctx->mutex);
1241 static int atom_iio_len[] = { 1, 2, 3, 3, 3, 3, 4, 4, 4, 3 };
1243 static void atom_index_iio(struct atom_context *ctx, int base)
1245 ctx->iio = kzalloc(2 * 256, GFP_KERNEL);
1248 while (CU8(base) == ATOM_IIO_START) {
1249 ctx->iio[CU8(base + 1)] = base + 2;
1251 while (CU8(base) != ATOM_IIO_END)
1252 base += atom_iio_len[CU8(base)];
1257 struct atom_context *atom_parse(struct card_info *card, void *bios)
1260 struct atom_context *ctx =
1261 kzalloc(sizeof(struct atom_context), GFP_KERNEL);
1272 if (CU16(0) != ATOM_BIOS_MAGIC) {
1273 printk(KERN_INFO "Invalid BIOS magic.\n");
1278 (CSTR(ATOM_ATI_MAGIC_PTR), ATOM_ATI_MAGIC,
1279 strlen(ATOM_ATI_MAGIC))) {
1280 printk(KERN_INFO "Invalid ATI magic.\n");
1285 base = CU16(ATOM_ROM_TABLE_PTR);
1287 (CSTR(base + ATOM_ROM_MAGIC_PTR), ATOM_ROM_MAGIC,
1288 strlen(ATOM_ROM_MAGIC))) {
1289 printk(KERN_INFO "Invalid ATOM magic.\n");
1294 ctx->cmd_table = CU16(base + ATOM_ROM_CMD_PTR);
1295 ctx->data_table = CU16(base + ATOM_ROM_DATA_PTR);
1296 atom_index_iio(ctx, CU16(ctx->data_table + ATOM_DATA_IIO_PTR) + 4);
1302 str = CSTR(CU16(base + ATOM_ROM_MSG_PTR));
1303 while (*str && ((*str == '\n') || (*str == '\r')))
1305 /* name string isn't always 0 terminated */
1306 for (i = 0; i < 511; i++) {
1308 if (name[i] < '.' || name[i] > 'z') {
1313 printk(KERN_INFO "ATOM BIOS: %s\n", name);
1318 int atom_asic_init(struct atom_context *ctx)
1320 struct radeon_device *rdev = ctx->card->dev->dev_private;
1321 int hwi = CU16(ctx->data_table + ATOM_DATA_FWI_PTR);
1327 ps[0] = cpu_to_le32(CU32(hwi + ATOM_FWI_DEFSCLK_PTR));
1328 ps[1] = cpu_to_le32(CU32(hwi + ATOM_FWI_DEFMCLK_PTR));
1329 if (!ps[0] || !ps[1])
1332 if (!CU16(ctx->cmd_table + 4 + 2 * ATOM_CMD_INIT))
1334 ret = atom_execute_table(ctx, ATOM_CMD_INIT, ps);
1340 if (rdev->family < CHIP_R600) {
1341 if (CU16(ctx->cmd_table + 4 + 2 * ATOM_CMD_SPDFANCNTL))
1342 atom_execute_table(ctx, ATOM_CMD_SPDFANCNTL, ps);
1347 void atom_destroy(struct atom_context *ctx)
1353 bool atom_parse_data_header(struct atom_context *ctx, int index,
1354 uint16_t * size, uint8_t * frev, uint8_t * crev,
1355 uint16_t * data_start)
1357 int offset = index * 2 + 4;
1358 int idx = CU16(ctx->data_table + offset);
1359 u16 *mdt = (u16 *)(ctx->bios + ctx->data_table + 4);
1367 *frev = CU8(idx + 2);
1369 *crev = CU8(idx + 3);
1374 bool atom_parse_cmd_header(struct atom_context *ctx, int index, uint8_t * frev,
1377 int offset = index * 2 + 4;
1378 int idx = CU16(ctx->cmd_table + offset);
1379 u16 *mct = (u16 *)(ctx->bios + ctx->cmd_table + 4);
1385 *frev = CU8(idx + 2);
1387 *crev = CU8(idx + 3);
1391 int atom_allocate_fb_scratch(struct atom_context *ctx)
1393 int index = GetIndexIntoMasterTable(DATA, VRAM_UsageByFirmware);
1394 uint16_t data_offset;
1395 int usage_bytes = 0;
1396 struct _ATOM_VRAM_USAGE_BY_FIRMWARE *firmware_usage;
1398 if (atom_parse_data_header(ctx, index, NULL, NULL, NULL, &data_offset)) {
1399 firmware_usage = (struct _ATOM_VRAM_USAGE_BY_FIRMWARE *)(ctx->bios + data_offset);
1401 DRM_DEBUG("atom firmware requested %08x %dkb\n",
1402 le32_to_cpu(firmware_usage->asFirmwareVramReserveInfo[0].ulStartAddrUsedByFirmware),
1403 le16_to_cpu(firmware_usage->asFirmwareVramReserveInfo[0].usFirmwareUseInKb));
1405 usage_bytes = le16_to_cpu(firmware_usage->asFirmwareVramReserveInfo[0].usFirmwareUseInKb) * 1024;
1407 ctx->scratch_size_bytes = 0;
1408 if (usage_bytes == 0)
1409 usage_bytes = 20 * 1024;
1410 /* allocate some scratch memory */
1411 ctx->scratch = kzalloc(usage_bytes, GFP_KERNEL);
1414 ctx->scratch_size_bytes = usage_bytes;