rc12 preview

git-svn-id: svn://kolibrios.org@2997 a494cfbc-eb01-0410-851d-a64ba20cac60
This commit is contained in:
Sergey Semyonov (Serge) 2012-11-03 02:41:31 +00:00
parent 2d1db4c224
commit acf20d57c8
107 changed files with 28752 additions and 9558 deletions

View File

@ -42,23 +42,24 @@ HFILES:= $(DRV_INCLUDES)/linux/types.h \
NAME_SRC= \
pci.c \
$(DRM_TOPDIR)/drm_mm.c \
$(DRM_TOPDIR)/drm_irq.c \
$(DRM_TOPDIR)/drm_edid.c \
$(DRM_TOPDIR)/drm_modes.c \
$(DRM_TOPDIR)/drm_crtc.c \
$(DRM_TOPDIR)/drm_crtc_helper.c \
$(DRM_TOPDIR)/drm_fb_helper.c \
$(DRM_TOPDIR)/drm_dp_i2c_helper.c \
$(DRM_TOPDIR)/drm_edid.c \
$(DRM_TOPDIR)/drm_fb_helper.c \
$(DRM_TOPDIR)/drm_irq.c \
$(DRM_TOPDIR)/drm_mm.c \
$(DRM_TOPDIR)/drm_modes.c \
$(DRM_TOPDIR)/drm_pci.c \
$(DRM_TOPDIR)/drm_stub.c \
$(DRM_TOPDIR)/i2c/i2c-core.c \
$(DRM_TOPDIR)/i2c/i2c-algo-bit.c \
tracker/bitmap.c \
r700_vs.c \
r600_video.c \
radeon_device.c \
evergreen.c \
evergreen_blit_shaders.c \
evergreen_blit_kms.c \
evergreen_hdmi.c \
cayman_blit_shaders.c \
radeon_clocks.c \
atom.c \
@ -72,6 +73,8 @@ NAME_SRC= \
radeon_connectors.c \
atombios_crtc.c \
atombios_dp.c \
atombios_encoders.c \
atombios_i2c.c \
radeon_encoders.c \
radeon_fence.c \
radeon_gem.c \
@ -84,6 +87,8 @@ NAME_SRC= \
radeon_gart.c \
radeon_ring.c \
radeon_object_kos.c \
radeon_sa.c \
radeon_semaphore.c \
radeon_pm.c \
r100.c \
r200.c \
@ -92,7 +97,6 @@ NAME_SRC= \
rv515.c \
r520.c \
r600.c \
r600_audio.c \
r600_blit_kms.c \
r600_blit_shaders.c \
r600_hdmi.c \
@ -104,6 +108,8 @@ NAME_SRC= \
rdisplay.c \
rdisplay_kms.c \
cmdline.c \
si.c \
si_blit_shaders.c \
fwblob.asm
FW_BINS= \

View File

@ -85,6 +85,7 @@
#define ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA 0x1F
#define ENCODER_OBJECT_ID_INTERNAL_UNIPHY1 0x20
#define ENCODER_OBJECT_ID_INTERNAL_UNIPHY2 0x21
#define ENCODER_OBJECT_ID_INTERNAL_VCE 0x24
#define ENCODER_OBJECT_ID_GENERAL_EXTERNAL_DVO 0xFF
@ -387,6 +388,10 @@
GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
ENCODER_OBJECT_ID_NUTMEG << OBJECT_ID_SHIFT)
#define ENCODER_VCE_ENUM_ID1 ( GRAPH_OBJECT_TYPE_ENCODER << OBJECT_TYPE_SHIFT |\
GRAPH_OBJECT_ENUM_ID1 << ENUM_ID_SHIFT |\
ENCODER_OBJECT_ID_INTERNAL_VCE << OBJECT_ID_SHIFT)
/****************************************************/
/* Connector Object ID definition - Shared with BIOS */
/****************************************************/

View File

@ -277,7 +277,12 @@ static uint32_t atom_get_src_int(atom_exec_context *ctx, uint8_t attr,
case ATOM_ARG_FB:
idx = U8(*ptr);
(*ptr)++;
val = gctx->scratch[((gctx->fb_base + idx) / 4)];
if ((gctx->fb_base + (idx * 4)) > gctx->scratch_size_bytes) {
DRM_ERROR("ATOM: fb read beyond scratch region: %d vs. %d\n",
gctx->fb_base + (idx * 4), gctx->scratch_size_bytes);
val = 0;
} else
val = gctx->scratch[(gctx->fb_base / 4) + idx];
if (print)
DEBUG("FB[0x%02X]", idx);
break;
@ -531,7 +536,11 @@ static void atom_put_dst(atom_exec_context *ctx, int arg, uint8_t attr,
case ATOM_ARG_FB:
idx = U8(*ptr);
(*ptr)++;
gctx->scratch[((gctx->fb_base + idx) / 4)] = val;
if ((gctx->fb_base + (idx * 4)) > gctx->scratch_size_bytes) {
DRM_ERROR("ATOM: fb write beyond scratch region: %d vs. %d\n",
gctx->fb_base + (idx * 4), gctx->scratch_size_bytes);
} else
gctx->scratch[(gctx->fb_base / 4) + idx] = val;
DEBUG("FB[0x%02X]", idx);
break;
case ATOM_ARG_PLL:
@ -714,8 +723,25 @@ static void atom_op_jump(atom_exec_context *ctx, int *ptr, int arg)
if (arg != ATOM_COND_ALWAYS)
SDEBUG(" taken: %s\n", execute ? "yes" : "no");
SDEBUG(" target: 0x%04X\n", target);
if (execute)
if (execute) {
if (ctx->last_jump == (ctx->start + target)) {
cjiffies = GetTimerTicks();
if (time_after(cjiffies, ctx->last_jump_jiffies)) {
cjiffies -= ctx->last_jump_jiffies;
if ((jiffies_to_msecs(cjiffies) > 5000)) {
DRM_ERROR("atombios stuck in loop for more than 5secs aborting\n");
ctx->abort = true;
}
} else {
/* jiffies wrap around we will just wait a little longer */
ctx->last_jump_jiffies = GetTimerTicks();
}
} else {
ctx->last_jump = ctx->start + target;
ctx->last_jump_jiffies = GetTimerTicks();
}
*ptr = ctx->start + target;
}
}
static void atom_op_mask(atom_exec_context *ctx, int *ptr, int arg)
@ -1278,8 +1304,11 @@ struct atom_context *atom_parse(struct card_info *card, void *bios)
int atom_asic_init(struct atom_context *ctx)
{
struct radeon_device *rdev = ctx->card->dev->dev_private;
int hwi = CU16(ctx->data_table + ATOM_DATA_FWI_PTR);
uint32_t ps[16];
int ret;
memset(ps, 0, 64);
ps[0] = cpu_to_le32(CU32(hwi + ATOM_FWI_DEFSCLK_PTR));
@ -1289,7 +1318,17 @@ int atom_asic_init(struct atom_context *ctx)
if (!CU16(ctx->cmd_table + 4 + 2 * ATOM_CMD_INIT))
return 1;
return atom_execute_table(ctx, ATOM_CMD_INIT, ps);
ret = atom_execute_table(ctx, ATOM_CMD_INIT, ps);
if (ret)
return ret;
memset(ps, 0, 64);
if (rdev->family < CHIP_R600) {
if (CU16(ctx->cmd_table + 4 + 2 * ATOM_CMD_SPDFANCNTL))
atom_execute_table(ctx, ATOM_CMD_SPDFANCNTL, ps);
}
return ret;
}
void atom_destroy(struct atom_context *ctx)
@ -1353,11 +1392,13 @@ int atom_allocate_fb_scratch(struct atom_context *ctx)
usage_bytes = firmware_usage->asFirmwareVramReserveInfo[0].usFirmwareUseInKb * 1024;
}
ctx->scratch_size_bytes = 0;
if (usage_bytes == 0)
usage_bytes = 20 * 1024;
/* allocate some scratch memory */
ctx->scratch = kzalloc(usage_bytes, GFP_KERNEL);
if (!ctx->scratch)
return -ENOMEM;
ctx->scratch_size_bytes = usage_bytes;
return 0;
}

View File

@ -26,7 +26,7 @@
#define ATOM_H
#include <linux/types.h>
#include "drmP.h"
#include <drm/drmP.h>
#define ATOM_BIOS_MAGIC 0xAA55
#define ATOM_ATI_MAGIC_PTR 0x30
@ -44,6 +44,7 @@
#define ATOM_CMD_SETSCLK 0x0A
#define ATOM_CMD_SETMCLK 0x0B
#define ATOM_CMD_SETPCLK 0x0C
#define ATOM_CMD_SPDFANCNTL 0x39
#define ATOM_DATA_FWI_PTR 0xC
#define ATOM_DATA_IIO_PTR 0x32
@ -137,6 +138,7 @@ struct atom_context {
int cs_equal, cs_above;
int io_mode;
uint32_t *scratch;
int scratch_size_bytes;
};
extern int atom_debug;

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -22,14 +22,15 @@
*
* Authors: Dave Airlie
* Alex Deucher
* Jerome Glisse
*/
#include "drmP.h"
#include "radeon_drm.h"
#include <drm/drmP.h>
#include <drm/radeon_drm.h>
#include "radeon.h"
#include "atom.h"
#include "atom-bits.h"
#include "drm_dp_helper.h"
#include <drm/drm_dp_helper.h>
/* move these to drm_dp_helper.c/h */
#define DP_LINK_CONFIGURATION_SIZE 9
@ -63,12 +64,12 @@ static int radeon_process_aux_ch(struct radeon_i2c_chan *chan,
memset(&args, 0, sizeof(args));
base = (unsigned char *)rdev->mode_info.atom_context->scratch;
base = (unsigned char *)(rdev->mode_info.atom_context->scratch + 1);
memcpy(base, send, send_bytes);
args.v1.lpAuxRequest = 0;
args.v1.lpDataOut = 16;
args.v1.lpAuxRequest = 0 + 4;
args.v1.lpDataOut = 16 + 4;
args.v1.ucDataOutLen = 0;
args.v1.ucChannelID = chan->rec.i2c_id;
args.v1.ucDelay = delay / 10;
@ -115,6 +116,7 @@ static int radeon_dp_aux_native_write(struct radeon_connector *radeon_connector,
u8 msg[20];
int msg_bytes = send_bytes + 4;
u8 ack;
unsigned retry;
if (send_bytes > 16)
return -1;
@ -125,20 +127,22 @@ static int radeon_dp_aux_native_write(struct radeon_connector *radeon_connector,
msg[3] = (msg_bytes << 4) | (send_bytes - 1);
memcpy(&msg[4], send, send_bytes);
while (1) {
for (retry = 0; retry < 4; retry++) {
ret = radeon_process_aux_ch(dig_connector->dp_i2c_bus,
msg, msg_bytes, NULL, 0, delay, &ack);
if (ret < 0)
if (ret == -EBUSY)
continue;
else if (ret < 0)
return ret;
if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_ACK)
break;
return send_bytes;
else if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_DEFER)
udelay(400);
else
return -EIO;
}
return send_bytes;
return -EIO;
}
static int radeon_dp_aux_native_read(struct radeon_connector *radeon_connector,
@ -149,26 +153,31 @@ static int radeon_dp_aux_native_read(struct radeon_connector *radeon_connector,
int msg_bytes = 4;
u8 ack;
int ret;
unsigned retry;
msg[0] = address;
msg[1] = address >> 8;
msg[2] = AUX_NATIVE_READ << 4;
msg[3] = (msg_bytes << 4) | (recv_bytes - 1);
while (1) {
for (retry = 0; retry < 4; retry++) {
ret = radeon_process_aux_ch(dig_connector->dp_i2c_bus,
msg, msg_bytes, recv, recv_bytes, delay, &ack);
if (ret == 0)
return -EPROTO;
if (ret < 0)
if (ret == -EBUSY)
continue;
else if (ret < 0)
return ret;
if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_ACK)
return ret;
else if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_DEFER)
udelay(400);
else if (ret == 0)
return -EPROTO;
else
return -EIO;
}
return -EIO;
}
static void radeon_write_dpcd_reg(struct radeon_connector *radeon_connector,
@ -232,7 +241,9 @@ int radeon_dp_i2c_aux_ch(struct i2c_adapter *adapter, int mode,
for (retry = 0; retry < 4; retry++) {
ret = radeon_process_aux_ch(auxch,
msg, msg_bytes, reply, reply_bytes, 0, &ack);
if (ret < 0) {
if (ret == -EBUSY)
continue;
else if (ret < 0) {
DRM_DEBUG_KMS("aux_ch failed %d\n", ret);
return ret;
}
@ -273,7 +284,7 @@ int radeon_dp_i2c_aux_ch(struct i2c_adapter *adapter, int mode,
}
}
DRM_ERROR("aux i2c too many retries, giving up\n");
DRM_DEBUG_KMS("aux i2c too many retries, giving up\n");
return -EREMOTEIO;
}
@ -450,7 +461,7 @@ static int radeon_dp_get_dp_lane_number(struct drm_connector *connector,
u8 dpcd[DP_DPCD_SIZE],
int pix_clock)
{
int bpp = convert_bpc_to_bpp(connector->display_info.bpc);
int bpp = convert_bpc_to_bpp(radeon_get_monitor_bpc(connector));
int max_link_rate = dp_get_max_link_rate(dpcd);
int max_lane_num = dp_get_max_lane_number(dpcd);
int lane_num;
@ -469,10 +480,11 @@ static int radeon_dp_get_dp_link_clock(struct drm_connector *connector,
u8 dpcd[DP_DPCD_SIZE],
int pix_clock)
{
int bpp = convert_bpc_to_bpp(connector->display_info.bpc);
int bpp = convert_bpc_to_bpp(radeon_get_monitor_bpc(connector));
int lane_num, max_pix_clock;
if (radeon_connector_encoder_is_dp_bridge(connector))
if (radeon_connector_encoder_get_dp_bridge_encoder_id(connector) ==
ENCODER_OBJECT_ID_NUTMEG)
return 270000;
lane_num = radeon_dp_get_dp_lane_number(connector, dpcd, pix_clock);
@ -519,6 +531,23 @@ u8 radeon_dp_getsinktype(struct radeon_connector *radeon_connector)
dig_connector->dp_i2c_bus->rec.i2c_id, 0);
}
static void radeon_dp_probe_oui(struct radeon_connector *radeon_connector)
{
struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
u8 buf[3];
if (!(dig_connector->dpcd[DP_DOWN_STREAM_PORT_COUNT] & DP_OUI_SUPPORT))
return;
if (radeon_dp_aux_native_read(radeon_connector, DP_SINK_OUI, buf, 3, 0))
DRM_DEBUG_KMS("Sink OUI: %02hx%02hx%02hx\n",
buf[0], buf[1], buf[2]);
if (radeon_dp_aux_native_read(radeon_connector, DP_BRANCH_OUI, buf, 3, 0))
DRM_DEBUG_KMS("Branch OUI: %02hx%02hx%02hx\n",
buf[0], buf[1], buf[2]);
}
bool radeon_dp_getdpcd(struct radeon_connector *radeon_connector)
{
struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
@ -532,32 +561,50 @@ bool radeon_dp_getdpcd(struct radeon_connector *radeon_connector)
for (i = 0; i < 8; i++)
DRM_DEBUG_KMS("%02x ", msg[i]);
DRM_DEBUG_KMS("\n");
radeon_dp_probe_oui(radeon_connector);
return true;
}
dig_connector->dpcd[0] = 0;
return false;
}
static void radeon_dp_set_panel_mode(struct drm_encoder *encoder,
int radeon_dp_get_panel_mode(struct drm_encoder *encoder,
struct drm_connector *connector)
{
struct drm_device *dev = encoder->dev;
struct radeon_device *rdev = dev->dev_private;
struct radeon_connector *radeon_connector = to_radeon_connector(connector);
int panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE;
u16 dp_bridge = radeon_connector_encoder_get_dp_bridge_encoder_id(connector);
u8 tmp;
if (!ASIC_IS_DCE4(rdev))
return;
return panel_mode;
if (radeon_connector_encoder_is_dp_bridge(connector))
if (dp_bridge != ENCODER_OBJECT_ID_NONE) {
/* DP bridge chips */
tmp = radeon_read_dpcd_reg(radeon_connector, DP_EDP_CONFIGURATION_CAP);
if (tmp & 1)
panel_mode = DP_PANEL_MODE_INTERNAL_DP2_MODE;
else if ((dp_bridge == ENCODER_OBJECT_ID_NUTMEG) ||
(dp_bridge == ENCODER_OBJECT_ID_TRAVIS))
panel_mode = DP_PANEL_MODE_INTERNAL_DP1_MODE;
else
panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE;
} else if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
/* eDP */
tmp = radeon_read_dpcd_reg(radeon_connector, DP_EDP_CONFIGURATION_CAP);
if (tmp & 1)
panel_mode = DP_PANEL_MODE_INTERNAL_DP2_MODE;
}
atombios_dig_encoder_setup(encoder,
ATOM_ENCODER_CMD_SETUP_PANEL_MODE,
panel_mode);
return panel_mode;
}
void radeon_dp_set_link_config(struct drm_connector *connector,
struct drm_display_mode *mode)
const struct drm_display_mode *mode)
{
struct radeon_connector *radeon_connector = to_radeon_connector(connector);
struct radeon_connector_atom_dig *dig_connector;
@ -603,13 +650,22 @@ static bool radeon_dp_get_link_status(struct radeon_connector *radeon_connector,
ret = radeon_dp_aux_native_read(radeon_connector, DP_LANE0_1_STATUS,
link_status, DP_LINK_STATUS_SIZE, 100);
if (ret <= 0) {
DRM_ERROR("displayport link status failed\n");
return false;
}
DRM_DEBUG_KMS("link status %02x %02x %02x %02x %02x %02x\n",
link_status[0], link_status[1], link_status[2],
link_status[3], link_status[4], link_status[5]);
DRM_DEBUG_KMS("link status %*ph\n", 6, link_status);
return true;
}
bool radeon_dp_needs_link_train(struct radeon_connector *radeon_connector)
{
u8 link_status[DP_LINK_STATUS_SIZE];
struct radeon_connector_atom_dig *dig = radeon_connector->con_priv;
if (!radeon_dp_get_link_status(radeon_connector, link_status))
return false;
if (dp_channel_eq_ok(link_status, dig->dp_lane_count))
return false;
return true;
}
@ -679,6 +735,8 @@ static void radeon_dp_set_tp(struct radeon_dp_link_train_info *dp_info, int tp)
static int radeon_dp_link_train_init(struct radeon_dp_link_train_info *dp_info)
{
struct radeon_encoder *radeon_encoder = to_radeon_encoder(dp_info->encoder);
struct radeon_encoder_atom_dig *dig = radeon_encoder->enc_priv;
u8 tmp;
/* power up the sink */
@ -694,11 +752,15 @@ static int radeon_dp_link_train_init(struct radeon_dp_link_train_info *dp_info)
radeon_write_dpcd_reg(dp_info->radeon_connector,
DP_DOWNSPREAD_CTRL, 0);
radeon_dp_set_panel_mode(dp_info->encoder, dp_info->connector);
if ((dp_info->connector->connector_type == DRM_MODE_CONNECTOR_eDP) &&
(dig->panel_mode == DP_PANEL_MODE_INTERNAL_DP2_MODE)) {
radeon_write_dpcd_reg(dp_info->radeon_connector, DP_EDP_CONFIGURATION_SET, 1);
}
/* set the lane count on the sink */
tmp = dp_info->dp_lane_count;
if (dp_info->dpcd[0] >= 0x11)
if (dp_info->dpcd[DP_DPCD_REV] >= 0x11 &&
dp_info->dpcd[DP_MAX_LANE_COUNT] & DP_ENHANCED_FRAME_CAP)
tmp |= DP_LANE_COUNT_ENHANCED_FRAME_EN;
radeon_write_dpcd_reg(dp_info->radeon_connector, DP_LANE_COUNT_SET, tmp);
@ -764,8 +826,10 @@ static int radeon_dp_link_train_cr(struct radeon_dp_link_train_info *dp_info)
else
mdelay(dp_info->rd_interval * 4);
if (!radeon_dp_get_link_status(dp_info->radeon_connector, dp_info->link_status))
if (!radeon_dp_get_link_status(dp_info->radeon_connector, dp_info->link_status)) {
DRM_ERROR("displayport link status failed\n");
break;
}
if (dp_clock_recovery_ok(dp_info->link_status, dp_info->dp_lane_count)) {
clock_recovery = true;
@ -827,8 +891,10 @@ static int radeon_dp_link_train_ce(struct radeon_dp_link_train_info *dp_info)
else
mdelay(dp_info->rd_interval * 4);
if (!radeon_dp_get_link_status(dp_info->radeon_connector, dp_info->link_status))
if (!radeon_dp_get_link_status(dp_info->radeon_connector, dp_info->link_status)) {
DRM_ERROR("displayport link status failed\n");
break;
}
if (dp_channel_eq_ok(dp_info->link_status, dp_info->dp_lane_count)) {
channel_eq = true;

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,139 @@
/*
* Copyright 2011 Advanced Micro Devices, Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
* OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
* ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*
* Authors: Alex Deucher
*
*/
#include <drm/drmP.h>
#include <drm/radeon_drm.h>
#include "radeon.h"
#include "atom.h"
#define TARGET_HW_I2C_CLOCK 50
/* these are a limitation of ProcessI2cChannelTransaction not the hw */
#define ATOM_MAX_HW_I2C_WRITE 2
#define ATOM_MAX_HW_I2C_READ 255
static int radeon_process_i2c_ch(struct radeon_i2c_chan *chan,
u8 slave_addr, u8 flags,
u8 *buf, u8 num)
{
struct drm_device *dev = chan->dev;
struct radeon_device *rdev = dev->dev_private;
PROCESS_I2C_CHANNEL_TRANSACTION_PS_ALLOCATION args;
int index = GetIndexIntoMasterTable(COMMAND, ProcessI2cChannelTransaction);
unsigned char *base;
u16 out;
memset(&args, 0, sizeof(args));
base = (unsigned char *)rdev->mode_info.atom_context->scratch;
if (flags & HW_I2C_WRITE) {
if (num > ATOM_MAX_HW_I2C_WRITE) {
DRM_ERROR("hw i2c: tried to write too many bytes (%d vs 2)\n", num);
return -EINVAL;
}
memcpy(&out, buf, num);
args.lpI2CDataOut = cpu_to_le16(out);
} else {
if (num > ATOM_MAX_HW_I2C_READ) {
DRM_ERROR("hw i2c: tried to read too many bytes (%d vs 255)\n", num);
return -EINVAL;
}
}
args.ucI2CSpeed = TARGET_HW_I2C_CLOCK;
args.ucRegIndex = 0;
args.ucTransBytes = num;
args.ucSlaveAddr = slave_addr << 1;
args.ucLineNumber = chan->rec.i2c_id;
atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
/* error */
if (args.ucStatus != HW_ASSISTED_I2C_STATUS_SUCCESS) {
DRM_DEBUG_KMS("hw_i2c error\n");
return -EIO;
}
if (!(flags & HW_I2C_WRITE))
memcpy(buf, base, num);
return 0;
}
int radeon_atom_hw_i2c_xfer(struct i2c_adapter *i2c_adap,
struct i2c_msg *msgs, int num)
{
struct radeon_i2c_chan *i2c = i2c_get_adapdata(i2c_adap);
struct i2c_msg *p;
int i, remaining, current_count, buffer_offset, max_bytes, ret;
u8 buf = 0, flags;
/* check for bus probe */
p = &msgs[0];
if ((num == 1) && (p->len == 0)) {
ret = radeon_process_i2c_ch(i2c,
p->addr, HW_I2C_WRITE,
&buf, 1);
if (ret)
return ret;
else
return num;
}
for (i = 0; i < num; i++) {
p = &msgs[i];
remaining = p->len;
buffer_offset = 0;
/* max_bytes are a limitation of ProcessI2cChannelTransaction not the hw */
if (p->flags & I2C_M_RD) {
max_bytes = ATOM_MAX_HW_I2C_READ;
flags = HW_I2C_READ;
} else {
max_bytes = ATOM_MAX_HW_I2C_WRITE;
flags = HW_I2C_WRITE;
}
while (remaining) {
if (remaining > max_bytes)
current_count = max_bytes;
else
current_count = remaining;
ret = radeon_process_i2c_ch(i2c,
p->addr, flags,
&p->buf[buffer_offset], current_count);
if (ret)
return ret;
remaining -= current_count;
buffer_offset += current_count;
}
}
return num;
}
u32 radeon_atom_hw_i2c_func(struct i2c_adapter *adap)
{
return I2C_FUNC_I2C | I2C_FUNC_SMBUS_EMUL;
}

View File

@ -24,6 +24,7 @@
* Alex Deucher <alexander.deucher@amd.com>
*/
#include <linux/bug.h>
#include <linux/types.h>
#include <linux/kernel.h>

View File

@ -51,6 +51,10 @@ struct tag_display
void (__stdcall *move_cursor)(cursor_t *cursor, int x, int y);
void (__stdcall *restore_cursor)(int x, int y);
void (*disable_mouse)(void);
u32 mask_seqno;
u32 check_mouse;
u32 check_m_pixel;
};
extern display_t *rdisplay;

File diff suppressed because it is too large Load Diff

View File

@ -24,31 +24,21 @@
* Alex Deucher <alexander.deucher@amd.com>
*/
#include "drmP.h"
#include "drm.h"
#include "radeon_drm.h"
#include <drm/drmP.h>
#include <drm/radeon_drm.h>
#include "radeon.h"
#include "evergreend.h"
#include "evergreen_blit_shaders.h"
#include "cayman_blit_shaders.h"
#define DI_PT_RECTLIST 0x11
#define DI_INDEX_SIZE_16_BIT 0x0
#define DI_SRC_SEL_AUTO_INDEX 0x2
#define FMT_8 0x1
#define FMT_5_6_5 0x8
#define FMT_8_8_8_8 0x1a
#define COLOR_8 0x1
#define COLOR_5_6_5 0x8
#define COLOR_8_8_8_8 0x1a
#include "radeon_blit_common.h"
/* emits 17 */
static void
set_render_target(struct radeon_device *rdev, int format,
int w, int h, u64 gpu_addr)
{
struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
u32 cb_color_info;
int pitch, slice;
@ -56,27 +46,29 @@ set_render_target(struct radeon_device *rdev, int format,
if (h < 8)
h = 8;
cb_color_info = ((format << 2) | (1 << 24) | (1 << 8));
cb_color_info = CB_FORMAT(format) |
CB_SOURCE_FORMAT(CB_SF_EXPORT_NORM) |
CB_ARRAY_MODE(ARRAY_1D_TILED_THIN1);
pitch = (w / 8) - 1;
slice = ((w * h) / 64) - 1;
radeon_ring_write(rdev, PACKET3(PACKET3_SET_CONTEXT_REG, 15));
radeon_ring_write(rdev, (CB_COLOR0_BASE - PACKET3_SET_CONTEXT_REG_START) >> 2);
radeon_ring_write(rdev, gpu_addr >> 8);
radeon_ring_write(rdev, pitch);
radeon_ring_write(rdev, slice);
radeon_ring_write(rdev, 0);
radeon_ring_write(rdev, cb_color_info);
radeon_ring_write(rdev, (1 << 4));
radeon_ring_write(rdev, (w - 1) | ((h - 1) << 16));
radeon_ring_write(rdev, 0);
radeon_ring_write(rdev, 0);
radeon_ring_write(rdev, 0);
radeon_ring_write(rdev, 0);
radeon_ring_write(rdev, 0);
radeon_ring_write(rdev, 0);
radeon_ring_write(rdev, 0);
radeon_ring_write(rdev, 0);
radeon_ring_write(ring, PACKET3(PACKET3_SET_CONTEXT_REG, 15));
radeon_ring_write(ring, (CB_COLOR0_BASE - PACKET3_SET_CONTEXT_REG_START) >> 2);
radeon_ring_write(ring, gpu_addr >> 8);
radeon_ring_write(ring, pitch);
radeon_ring_write(ring, slice);
radeon_ring_write(ring, 0);
radeon_ring_write(ring, cb_color_info);
radeon_ring_write(ring, 0);
radeon_ring_write(ring, (w - 1) | ((h - 1) << 16));
radeon_ring_write(ring, 0);
radeon_ring_write(ring, 0);
radeon_ring_write(ring, 0);
radeon_ring_write(ring, 0);
radeon_ring_write(ring, 0);
radeon_ring_write(ring, 0);
radeon_ring_write(ring, 0);
radeon_ring_write(ring, 0);
}
/* emits 5dw */
@ -85,6 +77,7 @@ cp_set_surface_sync(struct radeon_device *rdev,
u32 sync_type, u32 size,
u64 mc_addr)
{
struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
u32 cp_coher_size;
if (size == 0xffffffff)
@ -92,35 +85,45 @@ cp_set_surface_sync(struct radeon_device *rdev,
else
cp_coher_size = ((size + 255) >> 8);
radeon_ring_write(rdev, PACKET3(PACKET3_SURFACE_SYNC, 3));
radeon_ring_write(rdev, sync_type);
radeon_ring_write(rdev, cp_coher_size);
radeon_ring_write(rdev, mc_addr >> 8);
radeon_ring_write(rdev, 10); /* poll interval */
if (rdev->family >= CHIP_CAYMAN) {
/* CP_COHER_CNTL2 has to be set manually when submitting a surface_sync
* to the RB directly. For IBs, the CP programs this as part of the
* surface_sync packet.
*/
radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
radeon_ring_write(ring, (0x85e8 - PACKET3_SET_CONFIG_REG_START) >> 2);
radeon_ring_write(ring, 0); /* CP_COHER_CNTL2 */
}
radeon_ring_write(ring, PACKET3(PACKET3_SURFACE_SYNC, 3));
radeon_ring_write(ring, sync_type);
radeon_ring_write(ring, cp_coher_size);
radeon_ring_write(ring, mc_addr >> 8);
radeon_ring_write(ring, 10); /* poll interval */
}
/* emits 11dw + 1 surface sync = 16dw */
static void
set_shaders(struct radeon_device *rdev)
{
struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
u64 gpu_addr;
/* VS */
gpu_addr = rdev->r600_blit.shader_gpu_addr + rdev->r600_blit.vs_offset;
radeon_ring_write(rdev, PACKET3(PACKET3_SET_CONTEXT_REG, 3));
radeon_ring_write(rdev, (SQ_PGM_START_VS - PACKET3_SET_CONTEXT_REG_START) >> 2);
radeon_ring_write(rdev, gpu_addr >> 8);
radeon_ring_write(rdev, 2);
radeon_ring_write(rdev, 0);
radeon_ring_write(ring, PACKET3(PACKET3_SET_CONTEXT_REG, 3));
radeon_ring_write(ring, (SQ_PGM_START_VS - PACKET3_SET_CONTEXT_REG_START) >> 2);
radeon_ring_write(ring, gpu_addr >> 8);
radeon_ring_write(ring, 2);
radeon_ring_write(ring, 0);
/* PS */
gpu_addr = rdev->r600_blit.shader_gpu_addr + rdev->r600_blit.ps_offset;
radeon_ring_write(rdev, PACKET3(PACKET3_SET_CONTEXT_REG, 4));
radeon_ring_write(rdev, (SQ_PGM_START_PS - PACKET3_SET_CONTEXT_REG_START) >> 2);
radeon_ring_write(rdev, gpu_addr >> 8);
radeon_ring_write(rdev, 1);
radeon_ring_write(rdev, 0);
radeon_ring_write(rdev, 2);
radeon_ring_write(ring, PACKET3(PACKET3_SET_CONTEXT_REG, 4));
radeon_ring_write(ring, (SQ_PGM_START_PS - PACKET3_SET_CONTEXT_REG_START) >> 2);
radeon_ring_write(ring, gpu_addr >> 8);
radeon_ring_write(ring, 1);
radeon_ring_write(ring, 0);
radeon_ring_write(ring, 2);
gpu_addr = rdev->r600_blit.shader_gpu_addr + rdev->r600_blit.vs_offset;
cp_set_surface_sync(rdev, PACKET3_SH_ACTION_ENA, 512, gpu_addr);
@ -130,26 +133,31 @@ set_shaders(struct radeon_device *rdev)
static void
set_vtx_resource(struct radeon_device *rdev, u64 gpu_addr)
{
struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
u32 sq_vtx_constant_word2, sq_vtx_constant_word3;
/* high addr, stride */
sq_vtx_constant_word2 = ((upper_32_bits(gpu_addr) & 0xff) | (16 << 8));
sq_vtx_constant_word2 = SQ_VTXC_BASE_ADDR_HI(upper_32_bits(gpu_addr) & 0xff) |
SQ_VTXC_STRIDE(16);
#ifdef __BIG_ENDIAN
sq_vtx_constant_word2 |= (2 << 30);
sq_vtx_constant_word2 |= SQ_VTXC_ENDIAN_SWAP(SQ_ENDIAN_8IN32);
#endif
/* xyzw swizzles */
sq_vtx_constant_word3 = (0 << 3) | (1 << 6) | (2 << 9) | (3 << 12);
sq_vtx_constant_word3 = SQ_VTCX_SEL_X(SQ_SEL_X) |
SQ_VTCX_SEL_Y(SQ_SEL_Y) |
SQ_VTCX_SEL_Z(SQ_SEL_Z) |
SQ_VTCX_SEL_W(SQ_SEL_W);
radeon_ring_write(rdev, PACKET3(PACKET3_SET_RESOURCE, 8));
radeon_ring_write(rdev, 0x580);
radeon_ring_write(rdev, gpu_addr & 0xffffffff);
radeon_ring_write(rdev, 48 - 1); /* size */
radeon_ring_write(rdev, sq_vtx_constant_word2);
radeon_ring_write(rdev, sq_vtx_constant_word3);
radeon_ring_write(rdev, 0);
radeon_ring_write(rdev, 0);
radeon_ring_write(rdev, 0);
radeon_ring_write(rdev, SQ_TEX_VTX_VALID_BUFFER << 30);
radeon_ring_write(ring, PACKET3(PACKET3_SET_RESOURCE, 8));
radeon_ring_write(ring, 0x580);
radeon_ring_write(ring, gpu_addr & 0xffffffff);
radeon_ring_write(ring, 48 - 1); /* size */
radeon_ring_write(ring, sq_vtx_constant_word2);
radeon_ring_write(ring, sq_vtx_constant_word3);
radeon_ring_write(ring, 0);
radeon_ring_write(ring, 0);
radeon_ring_write(ring, 0);
radeon_ring_write(ring, S__SQ_CONSTANT_TYPE(SQ_TEX_VTX_VALID_BUFFER));
if ((rdev->family == CHIP_CEDAR) ||
(rdev->family == CHIP_PALM) ||
@ -168,33 +176,42 @@ set_vtx_resource(struct radeon_device *rdev, u64 gpu_addr)
static void
set_tex_resource(struct radeon_device *rdev,
int format, int w, int h, int pitch,
u64 gpu_addr)
u64 gpu_addr, u32 size)
{
struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
u32 sq_tex_resource_word0, sq_tex_resource_word1;
u32 sq_tex_resource_word4, sq_tex_resource_word7;
if (h < 1)
h = 1;
sq_tex_resource_word0 = (1 << 0); /* 2D */
sq_tex_resource_word0 = TEX_DIM(SQ_TEX_DIM_2D);
sq_tex_resource_word0 |= ((((pitch >> 3) - 1) << 6) |
((w - 1) << 18));
sq_tex_resource_word1 = ((h - 1) << 0) | (1 << 28);
sq_tex_resource_word1 = ((h - 1) << 0) |
TEX_ARRAY_MODE(ARRAY_1D_TILED_THIN1);
/* xyzw swizzles */
sq_tex_resource_word4 = (0 << 16) | (1 << 19) | (2 << 22) | (3 << 25);
sq_tex_resource_word4 = TEX_DST_SEL_X(SQ_SEL_X) |
TEX_DST_SEL_Y(SQ_SEL_Y) |
TEX_DST_SEL_Z(SQ_SEL_Z) |
TEX_DST_SEL_W(SQ_SEL_W);
sq_tex_resource_word7 = format | (SQ_TEX_VTX_VALID_TEXTURE << 30);
sq_tex_resource_word7 = format |
S__SQ_CONSTANT_TYPE(SQ_TEX_VTX_VALID_TEXTURE);
radeon_ring_write(rdev, PACKET3(PACKET3_SET_RESOURCE, 8));
radeon_ring_write(rdev, 0);
radeon_ring_write(rdev, sq_tex_resource_word0);
radeon_ring_write(rdev, sq_tex_resource_word1);
radeon_ring_write(rdev, gpu_addr >> 8);
radeon_ring_write(rdev, gpu_addr >> 8);
radeon_ring_write(rdev, sq_tex_resource_word4);
radeon_ring_write(rdev, 0);
radeon_ring_write(rdev, 0);
radeon_ring_write(rdev, sq_tex_resource_word7);
cp_set_surface_sync(rdev,
PACKET3_TC_ACTION_ENA, size, gpu_addr);
radeon_ring_write(ring, PACKET3(PACKET3_SET_RESOURCE, 8));
radeon_ring_write(ring, 0);
radeon_ring_write(ring, sq_tex_resource_word0);
radeon_ring_write(ring, sq_tex_resource_word1);
radeon_ring_write(ring, gpu_addr >> 8);
radeon_ring_write(ring, gpu_addr >> 8);
radeon_ring_write(ring, sq_tex_resource_word4);
radeon_ring_write(ring, 0);
radeon_ring_write(ring, 0);
radeon_ring_write(ring, sq_tex_resource_word7);
}
/* emits 12 */
@ -202,53 +219,55 @@ static void
set_scissors(struct radeon_device *rdev, int x1, int y1,
int x2, int y2)
{
struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
/* workaround some hw bugs */
if (x2 == 0)
x1 = 1;
if (y2 == 0)
y1 = 1;
if (rdev->family == CHIP_CAYMAN) {
if (rdev->family >= CHIP_CAYMAN) {
if ((x2 == 1) && (y2 == 1))
x2 = 2;
}
radeon_ring_write(rdev, PACKET3(PACKET3_SET_CONTEXT_REG, 2));
radeon_ring_write(rdev, (PA_SC_SCREEN_SCISSOR_TL - PACKET3_SET_CONTEXT_REG_START) >> 2);
radeon_ring_write(rdev, (x1 << 0) | (y1 << 16));
radeon_ring_write(rdev, (x2 << 0) | (y2 << 16));
radeon_ring_write(ring, PACKET3(PACKET3_SET_CONTEXT_REG, 2));
radeon_ring_write(ring, (PA_SC_SCREEN_SCISSOR_TL - PACKET3_SET_CONTEXT_REG_START) >> 2);
radeon_ring_write(ring, (x1 << 0) | (y1 << 16));
radeon_ring_write(ring, (x2 << 0) | (y2 << 16));
radeon_ring_write(rdev, PACKET3(PACKET3_SET_CONTEXT_REG, 2));
radeon_ring_write(rdev, (PA_SC_GENERIC_SCISSOR_TL - PACKET3_SET_CONTEXT_REG_START) >> 2);
radeon_ring_write(rdev, (x1 << 0) | (y1 << 16) | (1 << 31));
radeon_ring_write(rdev, (x2 << 0) | (y2 << 16));
radeon_ring_write(ring, PACKET3(PACKET3_SET_CONTEXT_REG, 2));
radeon_ring_write(ring, (PA_SC_GENERIC_SCISSOR_TL - PACKET3_SET_CONTEXT_REG_START) >> 2);
radeon_ring_write(ring, (x1 << 0) | (y1 << 16) | (1 << 31));
radeon_ring_write(ring, (x2 << 0) | (y2 << 16));
radeon_ring_write(rdev, PACKET3(PACKET3_SET_CONTEXT_REG, 2));
radeon_ring_write(rdev, (PA_SC_WINDOW_SCISSOR_TL - PACKET3_SET_CONTEXT_REG_START) >> 2);
radeon_ring_write(rdev, (x1 << 0) | (y1 << 16) | (1 << 31));
radeon_ring_write(rdev, (x2 << 0) | (y2 << 16));
radeon_ring_write(ring, PACKET3(PACKET3_SET_CONTEXT_REG, 2));
radeon_ring_write(ring, (PA_SC_WINDOW_SCISSOR_TL - PACKET3_SET_CONTEXT_REG_START) >> 2);
radeon_ring_write(ring, (x1 << 0) | (y1 << 16) | (1 << 31));
radeon_ring_write(ring, (x2 << 0) | (y2 << 16));
}
/* emits 10 */
static void
draw_auto(struct radeon_device *rdev)
{
radeon_ring_write(rdev, PACKET3(PACKET3_SET_CONFIG_REG, 1));
radeon_ring_write(rdev, (VGT_PRIMITIVE_TYPE - PACKET3_SET_CONFIG_REG_START) >> 2);
radeon_ring_write(rdev, DI_PT_RECTLIST);
struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
radeon_ring_write(ring, (VGT_PRIMITIVE_TYPE - PACKET3_SET_CONFIG_REG_START) >> 2);
radeon_ring_write(ring, DI_PT_RECTLIST);
radeon_ring_write(rdev, PACKET3(PACKET3_INDEX_TYPE, 0));
radeon_ring_write(rdev,
radeon_ring_write(ring, PACKET3(PACKET3_INDEX_TYPE, 0));
radeon_ring_write(ring,
#ifdef __BIG_ENDIAN
(2 << 2) |
#endif
DI_INDEX_SIZE_16_BIT);
radeon_ring_write(rdev, PACKET3(PACKET3_NUM_INSTANCES, 0));
radeon_ring_write(rdev, 1);
radeon_ring_write(ring, PACKET3(PACKET3_NUM_INSTANCES, 0));
radeon_ring_write(ring, 1);
radeon_ring_write(rdev, PACKET3(PACKET3_DRAW_INDEX_AUTO, 1));
radeon_ring_write(rdev, 3);
radeon_ring_write(rdev, DI_SRC_SEL_AUTO_INDEX);
radeon_ring_write(ring, PACKET3(PACKET3_DRAW_INDEX_AUTO, 1));
radeon_ring_write(ring, 3);
radeon_ring_write(ring, DI_SRC_SEL_AUTO_INDEX);
}
@ -256,6 +275,7 @@ draw_auto(struct radeon_device *rdev)
static void
set_default_state(struct radeon_device *rdev)
{
struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
u32 sq_config, sq_gpr_resource_mgmt_1, sq_gpr_resource_mgmt_2, sq_gpr_resource_mgmt_3;
u32 sq_thread_resource_mgmt, sq_thread_resource_mgmt_2;
u32 sq_stack_resource_mgmt_1, sq_stack_resource_mgmt_2, sq_stack_resource_mgmt_3;
@ -269,8 +289,8 @@ set_default_state(struct radeon_device *rdev)
int dwords;
/* set clear context state */
radeon_ring_write(rdev, PACKET3(PACKET3_CLEAR_STATE, 0));
radeon_ring_write(rdev, 0);
radeon_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
radeon_ring_write(ring, 0);
if (rdev->family < CHIP_CAYMAN) {
switch (rdev->family) {
@ -527,88 +547,63 @@ set_default_state(struct radeon_device *rdev)
NUM_LS_STACK_ENTRIES(num_ls_stack_entries));
/* disable dyn gprs */
radeon_ring_write(rdev, PACKET3(PACKET3_SET_CONFIG_REG, 1));
radeon_ring_write(rdev, (SQ_DYN_GPR_CNTL_PS_FLUSH_REQ - PACKET3_SET_CONFIG_REG_START) >> 2);
radeon_ring_write(rdev, 0);
radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
radeon_ring_write(ring, (SQ_DYN_GPR_CNTL_PS_FLUSH_REQ - PACKET3_SET_CONFIG_REG_START) >> 2);
radeon_ring_write(ring, 0);
/* setup LDS */
radeon_ring_write(rdev, PACKET3(PACKET3_SET_CONFIG_REG, 1));
radeon_ring_write(rdev, (SQ_LDS_RESOURCE_MGMT - PACKET3_SET_CONFIG_REG_START) >> 2);
radeon_ring_write(rdev, 0x10001000);
radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
radeon_ring_write(ring, (SQ_LDS_RESOURCE_MGMT - PACKET3_SET_CONFIG_REG_START) >> 2);
radeon_ring_write(ring, 0x10001000);
/* SQ config */
radeon_ring_write(rdev, PACKET3(PACKET3_SET_CONFIG_REG, 11));
radeon_ring_write(rdev, (SQ_CONFIG - PACKET3_SET_CONFIG_REG_START) >> 2);
radeon_ring_write(rdev, sq_config);
radeon_ring_write(rdev, sq_gpr_resource_mgmt_1);
radeon_ring_write(rdev, sq_gpr_resource_mgmt_2);
radeon_ring_write(rdev, sq_gpr_resource_mgmt_3);
radeon_ring_write(rdev, 0);
radeon_ring_write(rdev, 0);
radeon_ring_write(rdev, sq_thread_resource_mgmt);
radeon_ring_write(rdev, sq_thread_resource_mgmt_2);
radeon_ring_write(rdev, sq_stack_resource_mgmt_1);
radeon_ring_write(rdev, sq_stack_resource_mgmt_2);
radeon_ring_write(rdev, sq_stack_resource_mgmt_3);
radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 11));
radeon_ring_write(ring, (SQ_CONFIG - PACKET3_SET_CONFIG_REG_START) >> 2);
radeon_ring_write(ring, sq_config);
radeon_ring_write(ring, sq_gpr_resource_mgmt_1);
radeon_ring_write(ring, sq_gpr_resource_mgmt_2);
radeon_ring_write(ring, sq_gpr_resource_mgmt_3);
radeon_ring_write(ring, 0);
radeon_ring_write(ring, 0);
radeon_ring_write(ring, sq_thread_resource_mgmt);
radeon_ring_write(ring, sq_thread_resource_mgmt_2);
radeon_ring_write(ring, sq_stack_resource_mgmt_1);
radeon_ring_write(ring, sq_stack_resource_mgmt_2);
radeon_ring_write(ring, sq_stack_resource_mgmt_3);
}
/* CONTEXT_CONTROL */
radeon_ring_write(rdev, 0xc0012800);
radeon_ring_write(rdev, 0x80000000);
radeon_ring_write(rdev, 0x80000000);
radeon_ring_write(ring, 0xc0012800);
radeon_ring_write(ring, 0x80000000);
radeon_ring_write(ring, 0x80000000);
/* SQ_VTX_BASE_VTX_LOC */
radeon_ring_write(rdev, 0xc0026f00);
radeon_ring_write(rdev, 0x00000000);
radeon_ring_write(rdev, 0x00000000);
radeon_ring_write(rdev, 0x00000000);
radeon_ring_write(ring, 0xc0026f00);
radeon_ring_write(ring, 0x00000000);
radeon_ring_write(ring, 0x00000000);
radeon_ring_write(ring, 0x00000000);
/* SET_SAMPLER */
radeon_ring_write(rdev, 0xc0036e00);
radeon_ring_write(rdev, 0x00000000);
radeon_ring_write(rdev, 0x00000012);
radeon_ring_write(rdev, 0x00000000);
radeon_ring_write(rdev, 0x00000000);
radeon_ring_write(ring, 0xc0036e00);
radeon_ring_write(ring, 0x00000000);
radeon_ring_write(ring, 0x00000012);
radeon_ring_write(ring, 0x00000000);
radeon_ring_write(ring, 0x00000000);
/* set to DX10/11 mode */
radeon_ring_write(rdev, PACKET3(PACKET3_MODE_CONTROL, 0));
radeon_ring_write(rdev, 1);
radeon_ring_write(ring, PACKET3(PACKET3_MODE_CONTROL, 0));
radeon_ring_write(ring, 1);
/* emit an IB pointing at default state */
dwords = ALIGN(rdev->r600_blit.state_len, 0x10);
gpu_addr = rdev->r600_blit.shader_gpu_addr + rdev->r600_blit.state_offset;
radeon_ring_write(rdev, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
radeon_ring_write(rdev, gpu_addr & 0xFFFFFFFC);
radeon_ring_write(rdev, upper_32_bits(gpu_addr) & 0xFF);
radeon_ring_write(rdev, dwords);
radeon_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
radeon_ring_write(ring, gpu_addr & 0xFFFFFFFC);
radeon_ring_write(ring, upper_32_bits(gpu_addr) & 0xFF);
radeon_ring_write(ring, dwords);
}
static inline uint32_t i2f(uint32_t input)
{
u32 result, i, exponent, fraction;
if ((input & 0x3fff) == 0)
result = 0; /* 0 is a special case */
else {
exponent = 140; /* exponent biased by 127; */
fraction = (input & 0x3fff) << 10; /* cheat and only
handle numbers below 2^^15 */
for (i = 0; i < 14; i++) {
if (fraction & 0x800000)
break;
else {
fraction = fraction << 1; /* keep
shifting left until top bit = 1 */
exponent = exponent - 1;
}
}
result = exponent << 23 | (fraction & 0x7fffff); /* mask
off top bit; assumed 1 */
}
return result;
}
int evergreen_blit_init(struct radeon_device *rdev)
{
u32 obj_size;
@ -616,6 +611,27 @@ int evergreen_blit_init(struct radeon_device *rdev)
void *ptr;
u32 packet2s[16];
int num_packet2s = 0;
#if 0
rdev->r600_blit.primitives.set_render_target = set_render_target;
rdev->r600_blit.primitives.cp_set_surface_sync = cp_set_surface_sync;
rdev->r600_blit.primitives.set_shaders = set_shaders;
rdev->r600_blit.primitives.set_vtx_resource = set_vtx_resource;
rdev->r600_blit.primitives.set_tex_resource = set_tex_resource;
rdev->r600_blit.primitives.set_scissors = set_scissors;
rdev->r600_blit.primitives.draw_auto = draw_auto;
rdev->r600_blit.primitives.set_default_state = set_default_state;
rdev->r600_blit.ring_size_common = 8; /* sync semaphore */
rdev->r600_blit.ring_size_common += 55; /* shaders + def state */
rdev->r600_blit.ring_size_common += 16; /* fence emit for VB IB */
rdev->r600_blit.ring_size_common += 5; /* done copy */
rdev->r600_blit.ring_size_common += 16; /* fence emit for done copy */
rdev->r600_blit.ring_size_per_loop = 74;
if (rdev->family >= CHIP_CAYMAN)
rdev->r600_blit.ring_size_per_loop += 9; /* additional DWs for surface sync */
rdev->r600_blit.max_dim = 16384;
/* pin copy shader into vram if already initialized */
if (rdev->r600_blit.shader_obj)
@ -710,279 +726,8 @@ done:
return r;
}
// radeon_ttm_set_active_vram_size(rdev, rdev->mc.real_vram_size);
#endif
return 0;
}
void evergreen_blit_fini(struct radeon_device *rdev)
{
int r;
// radeon_ttm_set_active_vram_size(rdev, rdev->mc.visible_vram_size);
if (rdev->r600_blit.shader_obj == NULL)
return;
/* If we can't reserve the bo, unref should be enough to destroy
* it when it becomes idle.
*/
r = radeon_bo_reserve(rdev->r600_blit.shader_obj, false);
if (!r) {
radeon_bo_unpin(rdev->r600_blit.shader_obj);
radeon_bo_unreserve(rdev->r600_blit.shader_obj);
}
radeon_bo_unref(&rdev->r600_blit.shader_obj);
}
static int evergreen_vb_ib_get(struct radeon_device *rdev)
{
int r;
r = radeon_ib_get(rdev, &rdev->r600_blit.vb_ib);
if (r) {
DRM_ERROR("failed to get IB for vertex buffer\n");
return r;
}
rdev->r600_blit.vb_total = 64*1024;
rdev->r600_blit.vb_used = 0;
return 0;
}
static void evergreen_vb_ib_put(struct radeon_device *rdev)
{
radeon_fence_emit(rdev, rdev->r600_blit.vb_ib->fence);
radeon_ib_free(rdev, &rdev->r600_blit.vb_ib);
}
int evergreen_blit_prepare_copy(struct radeon_device *rdev, int size_bytes)
{
int r;
int ring_size, line_size;
int max_size;
/* loops of emits + fence emit possible */
int dwords_per_loop = 74, num_loops;
r = evergreen_vb_ib_get(rdev);
if (r)
return r;
/* 8 bpp vs 32 bpp for xfer unit */
if (size_bytes & 3)
line_size = 8192;
else
line_size = 8192 * 4;
max_size = 8192 * line_size;
/* major loops cover the max size transfer */
num_loops = ((size_bytes + max_size) / max_size);
/* minor loops cover the extra non aligned bits */
num_loops += ((size_bytes % line_size) ? 1 : 0);
/* calculate number of loops correctly */
ring_size = num_loops * dwords_per_loop;
/* set default + shaders */
ring_size += 55; /* shaders + def state */
ring_size += 10; /* fence emit for VB IB */
ring_size += 5; /* done copy */
ring_size += 10; /* fence emit for done copy */
r = radeon_ring_lock(rdev, ring_size);
if (r)
return r;
set_default_state(rdev); /* 36 */
set_shaders(rdev); /* 16 */
return 0;
}
void evergreen_blit_done_copy(struct radeon_device *rdev, struct radeon_fence *fence)
{
int r;
if (rdev->r600_blit.vb_ib)
evergreen_vb_ib_put(rdev);
if (fence)
r = radeon_fence_emit(rdev, fence);
radeon_ring_unlock_commit(rdev);
}
void evergreen_kms_blit_copy(struct radeon_device *rdev,
u64 src_gpu_addr, u64 dst_gpu_addr,
int size_bytes)
{
int max_bytes;
u64 vb_gpu_addr;
u32 *vb;
DRM_DEBUG("emitting copy %16llx %16llx %d %d\n", src_gpu_addr, dst_gpu_addr,
size_bytes, rdev->r600_blit.vb_used);
vb = (u32 *)(rdev->r600_blit.vb_ib->ptr + rdev->r600_blit.vb_used);
if ((size_bytes & 3) || (src_gpu_addr & 3) || (dst_gpu_addr & 3)) {
max_bytes = 8192;
while (size_bytes) {
int cur_size = size_bytes;
int src_x = src_gpu_addr & 255;
int dst_x = dst_gpu_addr & 255;
int h = 1;
src_gpu_addr = src_gpu_addr & ~255ULL;
dst_gpu_addr = dst_gpu_addr & ~255ULL;
if (!src_x && !dst_x) {
h = (cur_size / max_bytes);
if (h > 8192)
h = 8192;
if (h == 0)
h = 1;
else
cur_size = max_bytes;
} else {
if (cur_size > max_bytes)
cur_size = max_bytes;
if (cur_size > (max_bytes - dst_x))
cur_size = (max_bytes - dst_x);
if (cur_size > (max_bytes - src_x))
cur_size = (max_bytes - src_x);
}
if ((rdev->r600_blit.vb_used + 48) > rdev->r600_blit.vb_total) {
// WARN_ON(1);
}
vb[0] = i2f(dst_x);
vb[1] = 0;
vb[2] = i2f(src_x);
vb[3] = 0;
vb[4] = i2f(dst_x);
vb[5] = i2f(h);
vb[6] = i2f(src_x);
vb[7] = i2f(h);
vb[8] = i2f(dst_x + cur_size);
vb[9] = i2f(h);
vb[10] = i2f(src_x + cur_size);
vb[11] = i2f(h);
/* src 10 */
set_tex_resource(rdev, FMT_8,
src_x + cur_size, h, src_x + cur_size,
src_gpu_addr);
/* 5 */
cp_set_surface_sync(rdev,
PACKET3_TC_ACTION_ENA, (src_x + cur_size * h), src_gpu_addr);
/* dst 17 */
set_render_target(rdev, COLOR_8,
dst_x + cur_size, h,
dst_gpu_addr);
/* scissors 12 */
set_scissors(rdev, dst_x, 0, dst_x + cur_size, h);
/* 15 */
vb_gpu_addr = rdev->r600_blit.vb_ib->gpu_addr + rdev->r600_blit.vb_used;
set_vtx_resource(rdev, vb_gpu_addr);
/* draw 10 */
draw_auto(rdev);
/* 5 */
cp_set_surface_sync(rdev,
PACKET3_CB_ACTION_ENA | PACKET3_CB0_DEST_BASE_ENA,
cur_size * h, dst_gpu_addr);
vb += 12;
rdev->r600_blit.vb_used += 12 * 4;
src_gpu_addr += cur_size * h;
dst_gpu_addr += cur_size * h;
size_bytes -= cur_size * h;
}
} else {
max_bytes = 8192 * 4;
while (size_bytes) {
int cur_size = size_bytes;
int src_x = (src_gpu_addr & 255);
int dst_x = (dst_gpu_addr & 255);
int h = 1;
src_gpu_addr = src_gpu_addr & ~255ULL;
dst_gpu_addr = dst_gpu_addr & ~255ULL;
if (!src_x && !dst_x) {
h = (cur_size / max_bytes);
if (h > 8192)
h = 8192;
if (h == 0)
h = 1;
else
cur_size = max_bytes;
} else {
if (cur_size > max_bytes)
cur_size = max_bytes;
if (cur_size > (max_bytes - dst_x))
cur_size = (max_bytes - dst_x);
if (cur_size > (max_bytes - src_x))
cur_size = (max_bytes - src_x);
}
if ((rdev->r600_blit.vb_used + 48) > rdev->r600_blit.vb_total) {
// WARN_ON(1);
}
vb[0] = i2f(dst_x / 4);
vb[1] = 0;
vb[2] = i2f(src_x / 4);
vb[3] = 0;
vb[4] = i2f(dst_x / 4);
vb[5] = i2f(h);
vb[6] = i2f(src_x / 4);
vb[7] = i2f(h);
vb[8] = i2f((dst_x + cur_size) / 4);
vb[9] = i2f(h);
vb[10] = i2f((src_x + cur_size) / 4);
vb[11] = i2f(h);
/* src 10 */
set_tex_resource(rdev, FMT_8_8_8_8,
(src_x + cur_size) / 4,
h, (src_x + cur_size) / 4,
src_gpu_addr);
/* 5 */
cp_set_surface_sync(rdev,
PACKET3_TC_ACTION_ENA, (src_x + cur_size * h), src_gpu_addr);
/* dst 17 */
set_render_target(rdev, COLOR_8_8_8_8,
(dst_x + cur_size) / 4, h,
dst_gpu_addr);
/* scissors 12 */
set_scissors(rdev, (dst_x / 4), 0, (dst_x + cur_size / 4), h);
/* Vertex buffer setup 15 */
vb_gpu_addr = rdev->r600_blit.vb_ib->gpu_addr + rdev->r600_blit.vb_used;
set_vtx_resource(rdev, vb_gpu_addr);
/* draw 10 */
draw_auto(rdev);
/* 5 */
cp_set_surface_sync(rdev,
PACKET3_CB_ACTION_ENA | PACKET3_CB0_DEST_BASE_ENA,
cur_size * h, dst_gpu_addr);
/* 74 ring dwords per loop */
vb += 12;
rdev->r600_blit.vb_used += 12 * 4;
src_gpu_addr += cur_size * h;
dst_gpu_addr += cur_size * h;
size_bytes -= cur_size * h;
}
}
}

View File

@ -24,6 +24,7 @@
* Alex Deucher <alexander.deucher@amd.com>
*/
#include <linux/bug.h>
#include <linux/types.h>
#include <linux/kernel.h>

View File

@ -0,0 +1,213 @@
/*
* Copyright 2008 Advanced Micro Devices, Inc.
* Copyright 2008 Red Hat Inc.
* Copyright 2009 Christian König.
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
* OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
* ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*
* Authors: Christian König
* Rafał Miłecki
*/
#include <drm/drmP.h>
#include <drm/radeon_drm.h>
#include "radeon.h"
#include "radeon_asic.h"
#include "evergreend.h"
#include "atom.h"
/*
* update the N and CTS parameters for a given pixel clock rate
*/
static void evergreen_hdmi_update_ACR(struct drm_encoder *encoder, uint32_t clock)
{
struct drm_device *dev = encoder->dev;
struct radeon_device *rdev = dev->dev_private;
struct radeon_hdmi_acr acr = r600_hdmi_acr(clock);
struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
struct radeon_encoder_atom_dig *dig = radeon_encoder->enc_priv;
uint32_t offset = dig->afmt->offset;
WREG32(HDMI_ACR_32_0 + offset, HDMI_ACR_CTS_32(acr.cts_32khz));
WREG32(HDMI_ACR_32_1 + offset, acr.n_32khz);
WREG32(HDMI_ACR_44_0 + offset, HDMI_ACR_CTS_44(acr.cts_44_1khz));
WREG32(HDMI_ACR_44_1 + offset, acr.n_44_1khz);
WREG32(HDMI_ACR_48_0 + offset, HDMI_ACR_CTS_48(acr.cts_48khz));
WREG32(HDMI_ACR_48_1 + offset, acr.n_48khz);
}
/*
* calculate the crc for a given info frame
*/
static void evergreen_hdmi_infoframe_checksum(uint8_t packetType,
uint8_t versionNumber,
uint8_t length,
uint8_t *frame)
{
int i;
frame[0] = packetType + versionNumber + length;
for (i = 1; i <= length; i++)
frame[0] += frame[i];
frame[0] = 0x100 - frame[0];
}
/*
* build a HDMI Video Info Frame
*/
static void evergreen_hdmi_videoinfoframe(
struct drm_encoder *encoder,
uint8_t color_format,
int active_information_present,
uint8_t active_format_aspect_ratio,
uint8_t scan_information,
uint8_t colorimetry,
uint8_t ex_colorimetry,
uint8_t quantization,
int ITC,
uint8_t picture_aspect_ratio,
uint8_t video_format_identification,
uint8_t pixel_repetition,
uint8_t non_uniform_picture_scaling,
uint8_t bar_info_data_valid,
uint16_t top_bar,
uint16_t bottom_bar,
uint16_t left_bar,
uint16_t right_bar
)
{
struct drm_device *dev = encoder->dev;
struct radeon_device *rdev = dev->dev_private;
struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
struct radeon_encoder_atom_dig *dig = radeon_encoder->enc_priv;
uint32_t offset = dig->afmt->offset;
uint8_t frame[14];
frame[0x0] = 0;
frame[0x1] =
(scan_information & 0x3) |
((bar_info_data_valid & 0x3) << 2) |
((active_information_present & 0x1) << 4) |
((color_format & 0x3) << 5);
frame[0x2] =
(active_format_aspect_ratio & 0xF) |
((picture_aspect_ratio & 0x3) << 4) |
((colorimetry & 0x3) << 6);
frame[0x3] =
(non_uniform_picture_scaling & 0x3) |
((quantization & 0x3) << 2) |
((ex_colorimetry & 0x7) << 4) |
((ITC & 0x1) << 7);
frame[0x4] = (video_format_identification & 0x7F);
frame[0x5] = (pixel_repetition & 0xF);
frame[0x6] = (top_bar & 0xFF);
frame[0x7] = (top_bar >> 8);
frame[0x8] = (bottom_bar & 0xFF);
frame[0x9] = (bottom_bar >> 8);
frame[0xA] = (left_bar & 0xFF);
frame[0xB] = (left_bar >> 8);
frame[0xC] = (right_bar & 0xFF);
frame[0xD] = (right_bar >> 8);
evergreen_hdmi_infoframe_checksum(0x82, 0x02, 0x0D, frame);
/* Our header values (type, version, length) should be alright, Intel
* is using the same. Checksum function also seems to be OK, it works
* fine for audio infoframe. However calculated value is always lower
* by 2 in comparison to fglrx. It breaks displaying anything in case
* of TVs that strictly check the checksum. Hack it manually here to
* workaround this issue. */
frame[0x0] += 2;
WREG32(AFMT_AVI_INFO0 + offset,
frame[0x0] | (frame[0x1] << 8) | (frame[0x2] << 16) | (frame[0x3] << 24));
WREG32(AFMT_AVI_INFO1 + offset,
frame[0x4] | (frame[0x5] << 8) | (frame[0x6] << 16) | (frame[0x7] << 24));
WREG32(AFMT_AVI_INFO2 + offset,
frame[0x8] | (frame[0x9] << 8) | (frame[0xA] << 16) | (frame[0xB] << 24));
WREG32(AFMT_AVI_INFO3 + offset,
frame[0xC] | (frame[0xD] << 8));
}
/*
* update the info frames with the data from the current display mode
*/
void evergreen_hdmi_setmode(struct drm_encoder *encoder, struct drm_display_mode *mode)
{
struct drm_device *dev = encoder->dev;
struct radeon_device *rdev = dev->dev_private;
struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
struct radeon_encoder_atom_dig *dig = radeon_encoder->enc_priv;
uint32_t offset;
/* Silent, r600_hdmi_enable will raise WARN for us */
if (!dig->afmt->enabled)
return;
offset = dig->afmt->offset;
// r600_audio_set_clock(encoder, mode->clock);
WREG32(HDMI_VBI_PACKET_CONTROL + offset,
HDMI_NULL_SEND); /* send null packets when required */
WREG32(AFMT_AUDIO_CRC_CONTROL + offset, 0x1000);
WREG32(HDMI_AUDIO_PACKET_CONTROL + offset,
HDMI_AUDIO_DELAY_EN(1) | /* set the default audio delay */
HDMI_AUDIO_PACKETS_PER_LINE(3)); /* should be suffient for all audio modes and small enough for all hblanks */
WREG32(AFMT_AUDIO_PACKET_CONTROL + offset,
AFMT_AUDIO_SAMPLE_SEND | /* send audio packets */
AFMT_60958_CS_UPDATE); /* allow 60958 channel status fields to be updated */
WREG32(HDMI_ACR_PACKET_CONTROL + offset,
HDMI_ACR_AUTO_SEND | /* allow hw to sent ACR packets when required */
HDMI_ACR_SOURCE); /* select SW CTS value */
WREG32(HDMI_VBI_PACKET_CONTROL + offset,
HDMI_NULL_SEND | /* send null packets when required */
HDMI_GC_SEND | /* send general control packets */
HDMI_GC_CONT); /* send general control packets every frame */
WREG32(HDMI_INFOFRAME_CONTROL0 + offset,
HDMI_AVI_INFO_SEND | /* enable AVI info frames */
HDMI_AVI_INFO_CONT | /* send AVI info frames every frame/field */
HDMI_AUDIO_INFO_SEND | /* enable audio info frames (frames won't be set until audio is enabled) */
HDMI_AUDIO_INFO_CONT); /* required for audio info values to be updated */
WREG32(AFMT_INFOFRAME_CONTROL0 + offset,
AFMT_AUDIO_INFO_UPDATE); /* required for audio info values to be updated */
WREG32(HDMI_INFOFRAME_CONTROL1 + offset,
HDMI_AVI_INFO_LINE(2) | /* anything other than 0 */
HDMI_AUDIO_INFO_LINE(2)); /* anything other than 0 */
WREG32(HDMI_GC + offset, 0); /* unset HDMI_GC_AVMUTE */
evergreen_hdmi_videoinfoframe(encoder, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0);
evergreen_hdmi_update_ACR(encoder, mode->clock);
/* it's unknown what these bits do excatly, but it's indeed quite useful for debugging */
WREG32(AFMT_RAMP_CONTROL0 + offset, 0x00FFFFFF);
WREG32(AFMT_RAMP_CONTROL1 + offset, 0x007FFFFF);
WREG32(AFMT_RAMP_CONTROL2 + offset, 0x00000001);
WREG32(AFMT_RAMP_CONTROL3 + offset, 0x00000001);
}

View File

@ -35,6 +35,14 @@
#define EVERGREEN_P1PLL_SS_CNTL 0x414
#define EVERGREEN_P2PLL_SS_CNTL 0x454
# define EVERGREEN_PxPLL_SS_EN (1 << 12)
#define EVERGREEN_AUDIO_PLL1_MUL 0x5b0
#define EVERGREEN_AUDIO_PLL1_DIV 0x5b4
#define EVERGREEN_AUDIO_PLL1_UNK 0x5bc
#define EVERGREEN_AUDIO_ENABLE 0x5e78
#define EVERGREEN_AUDIO_VENDOR_ID 0x5ec0
/* GRPH blocks at 0x6800, 0x7400, 0x10000, 0x10c00, 0x11800, 0x12400 */
#define EVERGREEN_GRPH_ENABLE 0x6800
#define EVERGREEN_GRPH_CONTROL 0x6804
@ -42,6 +50,17 @@
# define EVERGREEN_GRPH_DEPTH_8BPP 0
# define EVERGREEN_GRPH_DEPTH_16BPP 1
# define EVERGREEN_GRPH_DEPTH_32BPP 2
# define EVERGREEN_GRPH_NUM_BANKS(x) (((x) & 0x3) << 2)
# define EVERGREEN_ADDR_SURF_2_BANK 0
# define EVERGREEN_ADDR_SURF_4_BANK 1
# define EVERGREEN_ADDR_SURF_8_BANK 2
# define EVERGREEN_ADDR_SURF_16_BANK 3
# define EVERGREEN_GRPH_Z(x) (((x) & 0x3) << 4)
# define EVERGREEN_GRPH_BANK_WIDTH(x) (((x) & 0x3) << 6)
# define EVERGREEN_ADDR_SURF_BANK_WIDTH_1 0
# define EVERGREEN_ADDR_SURF_BANK_WIDTH_2 1
# define EVERGREEN_ADDR_SURF_BANK_WIDTH_4 2
# define EVERGREEN_ADDR_SURF_BANK_WIDTH_8 3
# define EVERGREEN_GRPH_FORMAT(x) (((x) & 0x7) << 8)
/* 8 BPP */
# define EVERGREEN_GRPH_FORMAT_INDEXED 0
@ -61,6 +80,24 @@
# define EVERGREEN_GRPH_FORMAT_8B_BGRA1010102 5
# define EVERGREEN_GRPH_FORMAT_RGB111110 6
# define EVERGREEN_GRPH_FORMAT_BGR101111 7
# define EVERGREEN_GRPH_BANK_HEIGHT(x) (((x) & 0x3) << 11)
# define EVERGREEN_ADDR_SURF_BANK_HEIGHT_1 0
# define EVERGREEN_ADDR_SURF_BANK_HEIGHT_2 1
# define EVERGREEN_ADDR_SURF_BANK_HEIGHT_4 2
# define EVERGREEN_ADDR_SURF_BANK_HEIGHT_8 3
# define EVERGREEN_GRPH_TILE_SPLIT(x) (((x) & 0x7) << 13)
# define EVERGREEN_ADDR_SURF_TILE_SPLIT_64B 0
# define EVERGREEN_ADDR_SURF_TILE_SPLIT_128B 1
# define EVERGREEN_ADDR_SURF_TILE_SPLIT_256B 2
# define EVERGREEN_ADDR_SURF_TILE_SPLIT_512B 3
# define EVERGREEN_ADDR_SURF_TILE_SPLIT_1KB 4
# define EVERGREEN_ADDR_SURF_TILE_SPLIT_2KB 5
# define EVERGREEN_ADDR_SURF_TILE_SPLIT_4KB 6
# define EVERGREEN_GRPH_MACRO_TILE_ASPECT(x) (((x) & 0x3) << 18)
# define EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_1 0
# define EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_2 1
# define EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_4 2
# define EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_8 3
# define EVERGREEN_GRPH_ARRAY_MODE(x) (((x) & 0x7) << 20)
# define EVERGREEN_GRPH_ARRAY_LINEAR_GENERAL 0
# define EVERGREEN_GRPH_ARRAY_LINEAR_ALIGNED 1
@ -181,7 +218,10 @@
#define EVERGREEN_CRTC_CONTROL 0x6e70
# define EVERGREEN_CRTC_MASTER_EN (1 << 0)
# define EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE (1 << 24)
#define EVERGREEN_CRTC_BLANK_CONTROL 0x6e74
# define EVERGREEN_CRTC_BLANK_DATA_EN (1 << 8)
#define EVERGREEN_CRTC_STATUS 0x6e8c
# define EVERGREEN_CRTC_V_BLANK (1 << 0)
#define EVERGREEN_CRTC_STATUS_POSITION 0x6e90
#define EVERGREEN_MASTER_UPDATE_MODE 0x6ef8
#define EVERGREEN_CRTC_UPDATE_LOCK 0x6ed4
@ -191,4 +231,7 @@
#define EVERGREEN_DC_GPIO_HPD_EN 0x64b8
#define EVERGREEN_DC_GPIO_HPD_Y 0x64bc
/* HDMI blocks at 0x7030, 0x7c30, 0x10830, 0x11430, 0x12030, 0x12c30 */
#define EVERGREEN_HDMI_BASE 0x7030
#endif

View File

@ -37,6 +37,15 @@
#define EVERGREEN_MAX_PIPES_MASK 0xFF
#define EVERGREEN_MAX_LDS_NUM 0xFFFF
#define CYPRESS_GB_ADDR_CONFIG_GOLDEN 0x02011003
#define BARTS_GB_ADDR_CONFIG_GOLDEN 0x02011003
#define CAYMAN_GB_ADDR_CONFIG_GOLDEN 0x02011003
#define JUNIPER_GB_ADDR_CONFIG_GOLDEN 0x02010002
#define REDWOOD_GB_ADDR_CONFIG_GOLDEN 0x02010002
#define TURKS_GB_ADDR_CONFIG_GOLDEN 0x02010002
#define CEDAR_GB_ADDR_CONFIG_GOLDEN 0x02010001
#define CAICOS_GB_ADDR_CONFIG_GOLDEN 0x02010001
/* Registers */
#define RCU_IND_INDEX 0x100
@ -54,6 +63,7 @@
#define BACKEND_DISABLE(x) ((x) << 16)
#define GB_ADDR_CONFIG 0x98F8
#define NUM_PIPES(x) ((x) << 0)
#define NUM_PIPES_MASK 0x0000000f
#define PIPE_INTERLEAVE_SIZE(x) ((x) << 4)
#define BANK_INTERLEAVE_SIZE(x) ((x) << 8)
#define NUM_SHADER_ENGINES(x) ((x) << 12)
@ -77,6 +87,15 @@
#define CONFIG_MEMSIZE 0x5428
#define BIF_FB_EN 0x5490
#define FB_READ_EN (1 << 0)
#define FB_WRITE_EN (1 << 1)
#define CP_COHER_BASE 0x85F8
#define CP_STALLED_STAT1 0x8674
#define CP_STALLED_STAT2 0x8678
#define CP_BUSY_STAT 0x867C
#define CP_STAT 0x8680
#define CP_ME_CNTL 0x86D8
#define CP_ME_HALT (1 << 28)
#define CP_PFP_HALT (1 << 26)
@ -108,8 +127,229 @@
#define CP_RB_WPTR_ADDR_HI 0xC11C
#define CP_RB_WPTR_DELAY 0x8704
#define CP_SEM_WAIT_TIMER 0x85BC
#define CP_SEM_INCOMPLETE_TIMER_CNTL 0x85C8
#define CP_DEBUG 0xC1FC
/* Audio clocks */
#define DCCG_AUDIO_DTO_SOURCE 0x05ac
# define DCCG_AUDIO_DTO0_SOURCE_SEL(x) ((x) << 0) /* crtc0 - crtc5 */
# define DCCG_AUDIO_DTO_SEL (1 << 4) /* 0=dto0 1=dto1 */
#define DCCG_AUDIO_DTO0_PHASE 0x05b0
#define DCCG_AUDIO_DTO0_MODULE 0x05b4
#define DCCG_AUDIO_DTO0_LOAD 0x05b8
#define DCCG_AUDIO_DTO0_CNTL 0x05bc
#define DCCG_AUDIO_DTO1_PHASE 0x05c0
#define DCCG_AUDIO_DTO1_MODULE 0x05c4
#define DCCG_AUDIO_DTO1_LOAD 0x05c8
#define DCCG_AUDIO_DTO1_CNTL 0x05cc
/* DCE 4.0 AFMT */
#define HDMI_CONTROL 0x7030
# define HDMI_KEEPOUT_MODE (1 << 0)
# define HDMI_PACKET_GEN_VERSION (1 << 4) /* 0 = r6xx compat */
# define HDMI_ERROR_ACK (1 << 8)
# define HDMI_ERROR_MASK (1 << 9)
# define HDMI_DEEP_COLOR_ENABLE (1 << 24)
# define HDMI_DEEP_COLOR_DEPTH (((x) & 3) << 28)
# define HDMI_24BIT_DEEP_COLOR 0
# define HDMI_30BIT_DEEP_COLOR 1
# define HDMI_36BIT_DEEP_COLOR 2
#define HDMI_STATUS 0x7034
# define HDMI_ACTIVE_AVMUTE (1 << 0)
# define HDMI_AUDIO_PACKET_ERROR (1 << 16)
# define HDMI_VBI_PACKET_ERROR (1 << 20)
#define HDMI_AUDIO_PACKET_CONTROL 0x7038
# define HDMI_AUDIO_DELAY_EN(x) (((x) & 3) << 4)
# define HDMI_AUDIO_PACKETS_PER_LINE(x) (((x) & 0x1f) << 16)
#define HDMI_ACR_PACKET_CONTROL 0x703c
# define HDMI_ACR_SEND (1 << 0)
# define HDMI_ACR_CONT (1 << 1)
# define HDMI_ACR_SELECT(x) (((x) & 3) << 4)
# define HDMI_ACR_HW 0
# define HDMI_ACR_32 1
# define HDMI_ACR_44 2
# define HDMI_ACR_48 3
# define HDMI_ACR_SOURCE (1 << 8) /* 0 - hw; 1 - cts value */
# define HDMI_ACR_AUTO_SEND (1 << 12)
# define HDMI_ACR_N_MULTIPLE(x) (((x) & 7) << 16)
# define HDMI_ACR_X1 1
# define HDMI_ACR_X2 2
# define HDMI_ACR_X4 4
# define HDMI_ACR_AUDIO_PRIORITY (1 << 31)
#define HDMI_VBI_PACKET_CONTROL 0x7040
# define HDMI_NULL_SEND (1 << 0)
# define HDMI_GC_SEND (1 << 4)
# define HDMI_GC_CONT (1 << 5) /* 0 - once; 1 - every frame */
#define HDMI_INFOFRAME_CONTROL0 0x7044
# define HDMI_AVI_INFO_SEND (1 << 0)
# define HDMI_AVI_INFO_CONT (1 << 1)
# define HDMI_AUDIO_INFO_SEND (1 << 4)
# define HDMI_AUDIO_INFO_CONT (1 << 5)
# define HDMI_MPEG_INFO_SEND (1 << 8)
# define HDMI_MPEG_INFO_CONT (1 << 9)
#define HDMI_INFOFRAME_CONTROL1 0x7048
# define HDMI_AVI_INFO_LINE(x) (((x) & 0x3f) << 0)
# define HDMI_AUDIO_INFO_LINE(x) (((x) & 0x3f) << 8)
# define HDMI_MPEG_INFO_LINE(x) (((x) & 0x3f) << 16)
#define HDMI_GENERIC_PACKET_CONTROL 0x704c
# define HDMI_GENERIC0_SEND (1 << 0)
# define HDMI_GENERIC0_CONT (1 << 1)
# define HDMI_GENERIC1_SEND (1 << 4)
# define HDMI_GENERIC1_CONT (1 << 5)
# define HDMI_GENERIC0_LINE(x) (((x) & 0x3f) << 16)
# define HDMI_GENERIC1_LINE(x) (((x) & 0x3f) << 24)
#define HDMI_GC 0x7058
# define HDMI_GC_AVMUTE (1 << 0)
# define HDMI_GC_AVMUTE_CONT (1 << 2)
#define AFMT_AUDIO_PACKET_CONTROL2 0x705c
# define AFMT_AUDIO_LAYOUT_OVRD (1 << 0)
# define AFMT_AUDIO_LAYOUT_SELECT (1 << 1)
# define AFMT_60958_CS_SOURCE (1 << 4)
# define AFMT_AUDIO_CHANNEL_ENABLE(x) (((x) & 0xff) << 8)
# define AFMT_DP_AUDIO_STREAM_ID(x) (((x) & 0xff) << 16)
#define AFMT_AVI_INFO0 0x7084
# define AFMT_AVI_INFO_CHECKSUM(x) (((x) & 0xff) << 0)
# define AFMT_AVI_INFO_S(x) (((x) & 3) << 8)
# define AFMT_AVI_INFO_B(x) (((x) & 3) << 10)
# define AFMT_AVI_INFO_A(x) (((x) & 1) << 12)
# define AFMT_AVI_INFO_Y(x) (((x) & 3) << 13)
# define AFMT_AVI_INFO_Y_RGB 0
# define AFMT_AVI_INFO_Y_YCBCR422 1
# define AFMT_AVI_INFO_Y_YCBCR444 2
# define AFMT_AVI_INFO_Y_A_B_S(x) (((x) & 0xff) << 8)
# define AFMT_AVI_INFO_R(x) (((x) & 0xf) << 16)
# define AFMT_AVI_INFO_M(x) (((x) & 0x3) << 20)
# define AFMT_AVI_INFO_C(x) (((x) & 0x3) << 22)
# define AFMT_AVI_INFO_C_M_R(x) (((x) & 0xff) << 16)
# define AFMT_AVI_INFO_SC(x) (((x) & 0x3) << 24)
# define AFMT_AVI_INFO_Q(x) (((x) & 0x3) << 26)
# define AFMT_AVI_INFO_EC(x) (((x) & 0x3) << 28)
# define AFMT_AVI_INFO_ITC(x) (((x) & 0x1) << 31)
# define AFMT_AVI_INFO_ITC_EC_Q_SC(x) (((x) & 0xff) << 24)
#define AFMT_AVI_INFO1 0x7088
# define AFMT_AVI_INFO_VIC(x) (((x) & 0x7f) << 0) /* don't use avi infoframe v1 */
# define AFMT_AVI_INFO_PR(x) (((x) & 0xf) << 8) /* don't use avi infoframe v1 */
# define AFMT_AVI_INFO_CN(x) (((x) & 0x3) << 12)
# define AFMT_AVI_INFO_YQ(x) (((x) & 0x3) << 14)
# define AFMT_AVI_INFO_TOP(x) (((x) & 0xffff) << 16)
#define AFMT_AVI_INFO2 0x708c
# define AFMT_AVI_INFO_BOTTOM(x) (((x) & 0xffff) << 0)
# define AFMT_AVI_INFO_LEFT(x) (((x) & 0xffff) << 16)
#define AFMT_AVI_INFO3 0x7090
# define AFMT_AVI_INFO_RIGHT(x) (((x) & 0xffff) << 0)
# define AFMT_AVI_INFO_VERSION(x) (((x) & 3) << 24)
#define AFMT_MPEG_INFO0 0x7094
# define AFMT_MPEG_INFO_CHECKSUM(x) (((x) & 0xff) << 0)
# define AFMT_MPEG_INFO_MB0(x) (((x) & 0xff) << 8)
# define AFMT_MPEG_INFO_MB1(x) (((x) & 0xff) << 16)
# define AFMT_MPEG_INFO_MB2(x) (((x) & 0xff) << 24)
#define AFMT_MPEG_INFO1 0x7098
# define AFMT_MPEG_INFO_MB3(x) (((x) & 0xff) << 0)
# define AFMT_MPEG_INFO_MF(x) (((x) & 3) << 8)
# define AFMT_MPEG_INFO_FR(x) (((x) & 1) << 12)
#define AFMT_GENERIC0_HDR 0x709c
#define AFMT_GENERIC0_0 0x70a0
#define AFMT_GENERIC0_1 0x70a4
#define AFMT_GENERIC0_2 0x70a8
#define AFMT_GENERIC0_3 0x70ac
#define AFMT_GENERIC0_4 0x70b0
#define AFMT_GENERIC0_5 0x70b4
#define AFMT_GENERIC0_6 0x70b8
#define AFMT_GENERIC1_HDR 0x70bc
#define AFMT_GENERIC1_0 0x70c0
#define AFMT_GENERIC1_1 0x70c4
#define AFMT_GENERIC1_2 0x70c8
#define AFMT_GENERIC1_3 0x70cc
#define AFMT_GENERIC1_4 0x70d0
#define AFMT_GENERIC1_5 0x70d4
#define AFMT_GENERIC1_6 0x70d8
#define HDMI_ACR_32_0 0x70dc
# define HDMI_ACR_CTS_32(x) (((x) & 0xfffff) << 12)
#define HDMI_ACR_32_1 0x70e0
# define HDMI_ACR_N_32(x) (((x) & 0xfffff) << 0)
#define HDMI_ACR_44_0 0x70e4
# define HDMI_ACR_CTS_44(x) (((x) & 0xfffff) << 12)
#define HDMI_ACR_44_1 0x70e8
# define HDMI_ACR_N_44(x) (((x) & 0xfffff) << 0)
#define HDMI_ACR_48_0 0x70ec
# define HDMI_ACR_CTS_48(x) (((x) & 0xfffff) << 12)
#define HDMI_ACR_48_1 0x70f0
# define HDMI_ACR_N_48(x) (((x) & 0xfffff) << 0)
#define HDMI_ACR_STATUS_0 0x70f4
#define HDMI_ACR_STATUS_1 0x70f8
#define AFMT_AUDIO_INFO0 0x70fc
# define AFMT_AUDIO_INFO_CHECKSUM(x) (((x) & 0xff) << 0)
# define AFMT_AUDIO_INFO_CC(x) (((x) & 7) << 8)
# define AFMT_AUDIO_INFO_CT(x) (((x) & 0xf) << 11)
# define AFMT_AUDIO_INFO_CHECKSUM_OFFSET(x) (((x) & 0xff) << 16)
# define AFMT_AUDIO_INFO_CXT(x) (((x) & 0x1f) << 24)
#define AFMT_AUDIO_INFO1 0x7100
# define AFMT_AUDIO_INFO_CA(x) (((x) & 0xff) << 0)
# define AFMT_AUDIO_INFO_LSV(x) (((x) & 0xf) << 11)
# define AFMT_AUDIO_INFO_DM_INH(x) (((x) & 1) << 15)
# define AFMT_AUDIO_INFO_DM_INH_LSV(x) (((x) & 0xff) << 8)
# define AFMT_AUDIO_INFO_LFEBPL(x) (((x) & 3) << 16)
#define AFMT_60958_0 0x7104
# define AFMT_60958_CS_A(x) (((x) & 1) << 0)
# define AFMT_60958_CS_B(x) (((x) & 1) << 1)
# define AFMT_60958_CS_C(x) (((x) & 1) << 2)
# define AFMT_60958_CS_D(x) (((x) & 3) << 3)
# define AFMT_60958_CS_MODE(x) (((x) & 3) << 6)
# define AFMT_60958_CS_CATEGORY_CODE(x) (((x) & 0xff) << 8)
# define AFMT_60958_CS_SOURCE_NUMBER(x) (((x) & 0xf) << 16)
# define AFMT_60958_CS_CHANNEL_NUMBER_L(x) (((x) & 0xf) << 20)
# define AFMT_60958_CS_SAMPLING_FREQUENCY(x) (((x) & 0xf) << 24)
# define AFMT_60958_CS_CLOCK_ACCURACY(x) (((x) & 3) << 28)
#define AFMT_60958_1 0x7108
# define AFMT_60958_CS_WORD_LENGTH(x) (((x) & 0xf) << 0)
# define AFMT_60958_CS_ORIGINAL_SAMPLING_FREQUENCY(x) (((x) & 0xf) << 4)
# define AFMT_60958_CS_VALID_L(x) (((x) & 1) << 16)
# define AFMT_60958_CS_VALID_R(x) (((x) & 1) << 18)
# define AFMT_60958_CS_CHANNEL_NUMBER_R(x) (((x) & 0xf) << 20)
#define AFMT_AUDIO_CRC_CONTROL 0x710c
# define AFMT_AUDIO_CRC_EN (1 << 0)
#define AFMT_RAMP_CONTROL0 0x7110
# define AFMT_RAMP_MAX_COUNT(x) (((x) & 0xffffff) << 0)
# define AFMT_RAMP_DATA_SIGN (1 << 31)
#define AFMT_RAMP_CONTROL1 0x7114
# define AFMT_RAMP_MIN_COUNT(x) (((x) & 0xffffff) << 0)
# define AFMT_AUDIO_TEST_CH_DISABLE(x) (((x) & 0xff) << 24)
#define AFMT_RAMP_CONTROL2 0x7118
# define AFMT_RAMP_INC_COUNT(x) (((x) & 0xffffff) << 0)
#define AFMT_RAMP_CONTROL3 0x711c
# define AFMT_RAMP_DEC_COUNT(x) (((x) & 0xffffff) << 0)
#define AFMT_60958_2 0x7120
# define AFMT_60958_CS_CHANNEL_NUMBER_2(x) (((x) & 0xf) << 0)
# define AFMT_60958_CS_CHANNEL_NUMBER_3(x) (((x) & 0xf) << 4)
# define AFMT_60958_CS_CHANNEL_NUMBER_4(x) (((x) & 0xf) << 8)
# define AFMT_60958_CS_CHANNEL_NUMBER_5(x) (((x) & 0xf) << 12)
# define AFMT_60958_CS_CHANNEL_NUMBER_6(x) (((x) & 0xf) << 16)
# define AFMT_60958_CS_CHANNEL_NUMBER_7(x) (((x) & 0xf) << 20)
#define AFMT_STATUS 0x7128
# define AFMT_AUDIO_ENABLE (1 << 4)
# define AFMT_AUDIO_HBR_ENABLE (1 << 8)
# define AFMT_AZ_FORMAT_WTRIG (1 << 28)
# define AFMT_AZ_FORMAT_WTRIG_INT (1 << 29)
# define AFMT_AZ_AUDIO_ENABLE_CHG (1 << 30)
#define AFMT_AUDIO_PACKET_CONTROL 0x712c
# define AFMT_AUDIO_SAMPLE_SEND (1 << 0)
# define AFMT_RESET_FIFO_WHEN_AUDIO_DIS (1 << 11) /* set to 1 */
# define AFMT_AUDIO_TEST_EN (1 << 12)
# define AFMT_AUDIO_CHANNEL_SWAP (1 << 24)
# define AFMT_60958_CS_UPDATE (1 << 26)
# define AFMT_AZ_AUDIO_ENABLE_CHG_MASK (1 << 27)
# define AFMT_AZ_FORMAT_WTRIG_MASK (1 << 28)
# define AFMT_AZ_FORMAT_WTRIG_ACK (1 << 29)
# define AFMT_AZ_AUDIO_ENABLE_CHG_ACK (1 << 30)
#define AFMT_VBI_PACKET_CONTROL 0x7130
# define AFMT_GENERIC0_UPDATE (1 << 2)
#define AFMT_INFOFRAME_CONTROL0 0x7134
# define AFMT_AUDIO_INFO_SOURCE (1 << 6) /* 0 - sound block; 1 - afmt regs */
# define AFMT_AUDIO_INFO_UPDATE (1 << 7)
# define AFMT_MPEG_INFO_UPDATE (1 << 10)
#define AFMT_GENERIC0_7 0x7138
#define GC_USER_SHADER_PIPE_CONFIG 0x8954
#define INACTIVE_QD_PIPES(x) ((x) << 8)
@ -194,6 +434,9 @@
#define NOOFCHAN_MASK 0x00003000
#define MC_SHARED_CHREMAP 0x2008
#define MC_SHARED_BLACKOUT_CNTL 0x20ac
#define BLACKOUT_MODE_MASK 0x00000007
#define MC_ARB_RAMCFG 0x2760
#define NOOFBANK_SHIFT 0
#define NOOFBANK_MASK 0x00000003
@ -230,6 +473,7 @@
#define MC_VM_MD_L1_TLB0_CNTL 0x2654
#define MC_VM_MD_L1_TLB1_CNTL 0x2658
#define MC_VM_MD_L1_TLB2_CNTL 0x265C
#define MC_VM_MD_L1_TLB3_CNTL 0x2698
#define FUS_MC_VM_MD_L1_TLB0_CNTL 0x265C
#define FUS_MC_VM_MD_L1_TLB1_CNTL 0x2660
@ -242,6 +486,7 @@
#define PA_CL_ENHANCE 0x8A14
#define CLIP_VTX_REORDER_ENA (1 << 0)
#define NUM_CLIP_SEQ(x) ((x) << 1)
#define PA_SC_ENHANCE 0x8BF0
#define PA_SC_AA_CONFIG 0x28C04
#define MSAA_NUM_SAMPLES_SHIFT 0
#define MSAA_NUM_SAMPLES_MASK 0x3
@ -269,6 +514,7 @@
#define SCRATCH_UMSK 0x8540
#define SCRATCH_ADDR 0x8544
#define SMX_SAR_CTL0 0xA008
#define SMX_DC_CTL0 0xA020
#define USE_HASH_FUNCTION (1 << 0)
#define NUMBER_OF_SETS(x) ((x) << 1)
@ -319,6 +565,8 @@
#define SQ_GPR_RESOURCE_MGMT_3 0x8C0C
#define NUM_HS_GPRS(x) ((x) << 0)
#define NUM_LS_GPRS(x) ((x) << 16)
#define SQ_GLOBAL_GPR_RESOURCE_MGMT_1 0x8C10
#define SQ_GLOBAL_GPR_RESOURCE_MGMT_2 0x8C14
#define SQ_THREAD_RESOURCE_MGMT 0x8C18
#define NUM_PS_THREADS(x) ((x) << 0)
#define NUM_VS_THREADS(x) ((x) << 8)
@ -337,6 +585,10 @@
#define NUM_HS_STACK_ENTRIES(x) ((x) << 0)
#define NUM_LS_STACK_ENTRIES(x) ((x) << 16)
#define SQ_DYN_GPR_CNTL_PS_FLUSH_REQ 0x8D8C
#define SQ_DYN_GPR_SIMD_LOCK_EN 0x8D94
#define SQ_STATIC_THREAD_MGMT_1 0x8E20
#define SQ_STATIC_THREAD_MGMT_2 0x8E24
#define SQ_STATIC_THREAD_MGMT_3 0x8E28
#define SQ_LDS_RESOURCE_MGMT 0x8E2C
#define SQ_MS_FIFO_SIZES 0x8CF0
@ -691,6 +943,7 @@
#define PACKET3_DRAW_INDEX_MULTI_ELEMENT 0x36
#define PACKET3_MEM_SEMAPHORE 0x39
#define PACKET3_MPEG_INDEX 0x3A
#define PACKET3_COPY_DW 0x3B
#define PACKET3_WAIT_REG_MEM 0x3C
#define PACKET3_MEM_WRITE 0x3D
#define PACKET3_INDIRECT_BUFFER 0x32
@ -768,6 +1021,8 @@
#define SQ_TEX_VTX_VALID_TEXTURE 0x2
#define SQ_TEX_VTX_VALID_BUFFER 0x3
#define VGT_VTX_VECT_EJECT_REG 0x88b0
#define SQ_CONST_MEM_BASE 0x8df8
#define SQ_ESGS_RING_BASE 0x8c40
@ -892,19 +1147,162 @@
#define PA_SC_SCREEN_SCISSOR_TL 0x28030
#define PA_SC_GENERIC_SCISSOR_TL 0x28240
#define PA_SC_WINDOW_SCISSOR_TL 0x28204
#define VGT_PRIMITIVE_TYPE 0x8958
#define VGT_PRIMITIVE_TYPE 0x8958
#define VGT_INDEX_TYPE 0x895C
#define VGT_NUM_INDICES 0x8970
#define VGT_COMPUTE_DIM_X 0x8990
#define VGT_COMPUTE_DIM_Y 0x8994
#define VGT_COMPUTE_DIM_Z 0x8998
#define VGT_COMPUTE_START_X 0x899C
#define VGT_COMPUTE_START_Y 0x89A0
#define VGT_COMPUTE_START_Z 0x89A4
#define VGT_COMPUTE_INDEX 0x89A8
#define VGT_COMPUTE_THREAD_GROUP_SIZE 0x89AC
#define VGT_HS_OFFCHIP_PARAM 0x89B0
#define DB_DEBUG 0x9830
#define DB_DEBUG2 0x9834
#define DB_DEBUG3 0x9838
#define DB_DEBUG4 0x983C
#define DB_WATERMARKS 0x9854
#define DB_DEPTH_CONTROL 0x28800
#define R_028800_DB_DEPTH_CONTROL 0x028800
#define S_028800_STENCIL_ENABLE(x) (((x) & 0x1) << 0)
#define G_028800_STENCIL_ENABLE(x) (((x) >> 0) & 0x1)
#define C_028800_STENCIL_ENABLE 0xFFFFFFFE
#define S_028800_Z_ENABLE(x) (((x) & 0x1) << 1)
#define G_028800_Z_ENABLE(x) (((x) >> 1) & 0x1)
#define C_028800_Z_ENABLE 0xFFFFFFFD
#define S_028800_Z_WRITE_ENABLE(x) (((x) & 0x1) << 2)
#define G_028800_Z_WRITE_ENABLE(x) (((x) >> 2) & 0x1)
#define C_028800_Z_WRITE_ENABLE 0xFFFFFFFB
#define S_028800_ZFUNC(x) (((x) & 0x7) << 4)
#define G_028800_ZFUNC(x) (((x) >> 4) & 0x7)
#define C_028800_ZFUNC 0xFFFFFF8F
#define S_028800_BACKFACE_ENABLE(x) (((x) & 0x1) << 7)
#define G_028800_BACKFACE_ENABLE(x) (((x) >> 7) & 0x1)
#define C_028800_BACKFACE_ENABLE 0xFFFFFF7F
#define S_028800_STENCILFUNC(x) (((x) & 0x7) << 8)
#define G_028800_STENCILFUNC(x) (((x) >> 8) & 0x7)
#define C_028800_STENCILFUNC 0xFFFFF8FF
#define V_028800_STENCILFUNC_NEVER 0x00000000
#define V_028800_STENCILFUNC_LESS 0x00000001
#define V_028800_STENCILFUNC_EQUAL 0x00000002
#define V_028800_STENCILFUNC_LEQUAL 0x00000003
#define V_028800_STENCILFUNC_GREATER 0x00000004
#define V_028800_STENCILFUNC_NOTEQUAL 0x00000005
#define V_028800_STENCILFUNC_GEQUAL 0x00000006
#define V_028800_STENCILFUNC_ALWAYS 0x00000007
#define S_028800_STENCILFAIL(x) (((x) & 0x7) << 11)
#define G_028800_STENCILFAIL(x) (((x) >> 11) & 0x7)
#define C_028800_STENCILFAIL 0xFFFFC7FF
#define V_028800_STENCIL_KEEP 0x00000000
#define V_028800_STENCIL_ZERO 0x00000001
#define V_028800_STENCIL_REPLACE 0x00000002
#define V_028800_STENCIL_INCR 0x00000003
#define V_028800_STENCIL_DECR 0x00000004
#define V_028800_STENCIL_INVERT 0x00000005
#define V_028800_STENCIL_INCR_WRAP 0x00000006
#define V_028800_STENCIL_DECR_WRAP 0x00000007
#define S_028800_STENCILZPASS(x) (((x) & 0x7) << 14)
#define G_028800_STENCILZPASS(x) (((x) >> 14) & 0x7)
#define C_028800_STENCILZPASS 0xFFFE3FFF
#define S_028800_STENCILZFAIL(x) (((x) & 0x7) << 17)
#define G_028800_STENCILZFAIL(x) (((x) >> 17) & 0x7)
#define C_028800_STENCILZFAIL 0xFFF1FFFF
#define S_028800_STENCILFUNC_BF(x) (((x) & 0x7) << 20)
#define G_028800_STENCILFUNC_BF(x) (((x) >> 20) & 0x7)
#define C_028800_STENCILFUNC_BF 0xFF8FFFFF
#define S_028800_STENCILFAIL_BF(x) (((x) & 0x7) << 23)
#define G_028800_STENCILFAIL_BF(x) (((x) >> 23) & 0x7)
#define C_028800_STENCILFAIL_BF 0xFC7FFFFF
#define S_028800_STENCILZPASS_BF(x) (((x) & 0x7) << 26)
#define G_028800_STENCILZPASS_BF(x) (((x) >> 26) & 0x7)
#define C_028800_STENCILZPASS_BF 0xE3FFFFFF
#define S_028800_STENCILZFAIL_BF(x) (((x) & 0x7) << 29)
#define G_028800_STENCILZFAIL_BF(x) (((x) >> 29) & 0x7)
#define C_028800_STENCILZFAIL_BF 0x1FFFFFFF
#define DB_DEPTH_VIEW 0x28008
#define R_028008_DB_DEPTH_VIEW 0x00028008
#define S_028008_SLICE_START(x) (((x) & 0x7FF) << 0)
#define G_028008_SLICE_START(x) (((x) >> 0) & 0x7FF)
#define C_028008_SLICE_START 0xFFFFF800
#define S_028008_SLICE_MAX(x) (((x) & 0x7FF) << 13)
#define G_028008_SLICE_MAX(x) (((x) >> 13) & 0x7FF)
#define C_028008_SLICE_MAX 0xFF001FFF
#define DB_HTILE_DATA_BASE 0x28014
#define DB_HTILE_SURFACE 0x28abc
#define S_028ABC_HTILE_WIDTH(x) (((x) & 0x1) << 0)
#define G_028ABC_HTILE_WIDTH(x) (((x) >> 0) & 0x1)
#define C_028ABC_HTILE_WIDTH 0xFFFFFFFE
#define S_028ABC_HTILE_HEIGHT(x) (((x) & 0x1) << 1)
#define G_028ABC_HTILE_HEIGHT(x) (((x) >> 1) & 0x1)
#define C_028ABC_HTILE_HEIGHT 0xFFFFFFFD
#define G_028ABC_LINEAR(x) (((x) >> 2) & 0x1)
#define DB_Z_INFO 0x28040
# define Z_ARRAY_MODE(x) ((x) << 4)
# define DB_TILE_SPLIT(x) (((x) & 0x7) << 8)
# define DB_NUM_BANKS(x) (((x) & 0x3) << 12)
# define DB_BANK_WIDTH(x) (((x) & 0x3) << 16)
# define DB_BANK_HEIGHT(x) (((x) & 0x3) << 20)
# define DB_MACRO_TILE_ASPECT(x) (((x) & 0x3) << 24)
#define R_028040_DB_Z_INFO 0x028040
#define S_028040_FORMAT(x) (((x) & 0x3) << 0)
#define G_028040_FORMAT(x) (((x) >> 0) & 0x3)
#define C_028040_FORMAT 0xFFFFFFFC
#define V_028040_Z_INVALID 0x00000000
#define V_028040_Z_16 0x00000001
#define V_028040_Z_24 0x00000002
#define V_028040_Z_32_FLOAT 0x00000003
#define S_028040_ARRAY_MODE(x) (((x) & 0xF) << 4)
#define G_028040_ARRAY_MODE(x) (((x) >> 4) & 0xF)
#define C_028040_ARRAY_MODE 0xFFFFFF0F
#define S_028040_READ_SIZE(x) (((x) & 0x1) << 28)
#define G_028040_READ_SIZE(x) (((x) >> 28) & 0x1)
#define C_028040_READ_SIZE 0xEFFFFFFF
#define S_028040_TILE_SURFACE_ENABLE(x) (((x) & 0x1) << 29)
#define G_028040_TILE_SURFACE_ENABLE(x) (((x) >> 29) & 0x1)
#define C_028040_TILE_SURFACE_ENABLE 0xDFFFFFFF
#define S_028040_ZRANGE_PRECISION(x) (((x) & 0x1) << 31)
#define G_028040_ZRANGE_PRECISION(x) (((x) >> 31) & 0x1)
#define C_028040_ZRANGE_PRECISION 0x7FFFFFFF
#define S_028040_TILE_SPLIT(x) (((x) & 0x7) << 8)
#define G_028040_TILE_SPLIT(x) (((x) >> 8) & 0x7)
#define S_028040_NUM_BANKS(x) (((x) & 0x3) << 12)
#define G_028040_NUM_BANKS(x) (((x) >> 12) & 0x3)
#define S_028040_BANK_WIDTH(x) (((x) & 0x3) << 16)
#define G_028040_BANK_WIDTH(x) (((x) >> 16) & 0x3)
#define S_028040_BANK_HEIGHT(x) (((x) & 0x3) << 20)
#define G_028040_BANK_HEIGHT(x) (((x) >> 20) & 0x3)
#define S_028040_MACRO_TILE_ASPECT(x) (((x) & 0x3) << 24)
#define G_028040_MACRO_TILE_ASPECT(x) (((x) >> 24) & 0x3)
#define DB_STENCIL_INFO 0x28044
#define R_028044_DB_STENCIL_INFO 0x028044
#define S_028044_FORMAT(x) (((x) & 0x1) << 0)
#define G_028044_FORMAT(x) (((x) >> 0) & 0x1)
#define C_028044_FORMAT 0xFFFFFFFE
#define V_028044_STENCIL_INVALID 0
#define V_028044_STENCIL_8 1
#define G_028044_TILE_SPLIT(x) (((x) >> 8) & 0x7)
#define DB_Z_READ_BASE 0x28048
#define DB_STENCIL_READ_BASE 0x2804c
#define DB_Z_WRITE_BASE 0x28050
#define DB_STENCIL_WRITE_BASE 0x28054
#define DB_DEPTH_SIZE 0x28058
#define R_028058_DB_DEPTH_SIZE 0x028058
#define S_028058_PITCH_TILE_MAX(x) (((x) & 0x7FF) << 0)
#define G_028058_PITCH_TILE_MAX(x) (((x) >> 0) & 0x7FF)
#define C_028058_PITCH_TILE_MAX 0xFFFFF800
#define S_028058_HEIGHT_TILE_MAX(x) (((x) & 0x7FF) << 11)
#define G_028058_HEIGHT_TILE_MAX(x) (((x) >> 11) & 0x7FF)
#define C_028058_HEIGHT_TILE_MAX 0xFFC007FF
#define R_02805C_DB_DEPTH_SLICE 0x02805C
#define S_02805C_SLICE_TILE_MAX(x) (((x) & 0x3FFFFF) << 0)
#define G_02805C_SLICE_TILE_MAX(x) (((x) >> 0) & 0x3FFFFF)
#define C_02805C_SLICE_TILE_MAX 0xFFC00000
#define SQ_PGM_START_PS 0x28840
#define SQ_PGM_START_VS 0x2885c
@ -914,6 +1312,14 @@
#define SQ_PGM_START_HS 0x288b8
#define SQ_PGM_START_LS 0x288d0
#define VGT_STRMOUT_BUFFER_BASE_0 0x28AD8
#define VGT_STRMOUT_BUFFER_BASE_1 0x28AE8
#define VGT_STRMOUT_BUFFER_BASE_2 0x28AF8
#define VGT_STRMOUT_BUFFER_BASE_3 0x28B08
#define VGT_STRMOUT_BUFFER_SIZE_0 0x28AD0
#define VGT_STRMOUT_BUFFER_SIZE_1 0x28AE0
#define VGT_STRMOUT_BUFFER_SIZE_2 0x28AF0
#define VGT_STRMOUT_BUFFER_SIZE_3 0x28B00
#define VGT_STRMOUT_CONFIG 0x28b94
#define VGT_STRMOUT_BUFFER_CONFIG 0x28b98
@ -940,13 +1346,163 @@
#define CB_COLOR0_PITCH 0x28c64
#define CB_COLOR0_SLICE 0x28c68
#define CB_COLOR0_VIEW 0x28c6c
#define R_028C6C_CB_COLOR0_VIEW 0x00028C6C
#define S_028C6C_SLICE_START(x) (((x) & 0x7FF) << 0)
#define G_028C6C_SLICE_START(x) (((x) >> 0) & 0x7FF)
#define C_028C6C_SLICE_START 0xFFFFF800
#define S_028C6C_SLICE_MAX(x) (((x) & 0x7FF) << 13)
#define G_028C6C_SLICE_MAX(x) (((x) >> 13) & 0x7FF)
#define C_028C6C_SLICE_MAX 0xFF001FFF
#define R_028C70_CB_COLOR0_INFO 0x028C70
#define S_028C70_ENDIAN(x) (((x) & 0x3) << 0)
#define G_028C70_ENDIAN(x) (((x) >> 0) & 0x3)
#define C_028C70_ENDIAN 0xFFFFFFFC
#define S_028C70_FORMAT(x) (((x) & 0x3F) << 2)
#define G_028C70_FORMAT(x) (((x) >> 2) & 0x3F)
#define C_028C70_FORMAT 0xFFFFFF03
#define V_028C70_COLOR_INVALID 0x00000000
#define V_028C70_COLOR_8 0x00000001
#define V_028C70_COLOR_4_4 0x00000002
#define V_028C70_COLOR_3_3_2 0x00000003
#define V_028C70_COLOR_16 0x00000005
#define V_028C70_COLOR_16_FLOAT 0x00000006
#define V_028C70_COLOR_8_8 0x00000007
#define V_028C70_COLOR_5_6_5 0x00000008
#define V_028C70_COLOR_6_5_5 0x00000009
#define V_028C70_COLOR_1_5_5_5 0x0000000A
#define V_028C70_COLOR_4_4_4_4 0x0000000B
#define V_028C70_COLOR_5_5_5_1 0x0000000C
#define V_028C70_COLOR_32 0x0000000D
#define V_028C70_COLOR_32_FLOAT 0x0000000E
#define V_028C70_COLOR_16_16 0x0000000F
#define V_028C70_COLOR_16_16_FLOAT 0x00000010
#define V_028C70_COLOR_8_24 0x00000011
#define V_028C70_COLOR_8_24_FLOAT 0x00000012
#define V_028C70_COLOR_24_8 0x00000013
#define V_028C70_COLOR_24_8_FLOAT 0x00000014
#define V_028C70_COLOR_10_11_11 0x00000015
#define V_028C70_COLOR_10_11_11_FLOAT 0x00000016
#define V_028C70_COLOR_11_11_10 0x00000017
#define V_028C70_COLOR_11_11_10_FLOAT 0x00000018
#define V_028C70_COLOR_2_10_10_10 0x00000019
#define V_028C70_COLOR_8_8_8_8 0x0000001A
#define V_028C70_COLOR_10_10_10_2 0x0000001B
#define V_028C70_COLOR_X24_8_32_FLOAT 0x0000001C
#define V_028C70_COLOR_32_32 0x0000001D
#define V_028C70_COLOR_32_32_FLOAT 0x0000001E
#define V_028C70_COLOR_16_16_16_16 0x0000001F
#define V_028C70_COLOR_16_16_16_16_FLOAT 0x00000020
#define V_028C70_COLOR_32_32_32_32 0x00000022
#define V_028C70_COLOR_32_32_32_32_FLOAT 0x00000023
#define V_028C70_COLOR_32_32_32_FLOAT 0x00000030
#define S_028C70_ARRAY_MODE(x) (((x) & 0xF) << 8)
#define G_028C70_ARRAY_MODE(x) (((x) >> 8) & 0xF)
#define C_028C70_ARRAY_MODE 0xFFFFF0FF
#define V_028C70_ARRAY_LINEAR_GENERAL 0x00000000
#define V_028C70_ARRAY_LINEAR_ALIGNED 0x00000001
#define V_028C70_ARRAY_1D_TILED_THIN1 0x00000002
#define V_028C70_ARRAY_2D_TILED_THIN1 0x00000004
#define S_028C70_NUMBER_TYPE(x) (((x) & 0x7) << 12)
#define G_028C70_NUMBER_TYPE(x) (((x) >> 12) & 0x7)
#define C_028C70_NUMBER_TYPE 0xFFFF8FFF
#define V_028C70_NUMBER_UNORM 0x00000000
#define V_028C70_NUMBER_SNORM 0x00000001
#define V_028C70_NUMBER_USCALED 0x00000002
#define V_028C70_NUMBER_SSCALED 0x00000003
#define V_028C70_NUMBER_UINT 0x00000004
#define V_028C70_NUMBER_SINT 0x00000005
#define V_028C70_NUMBER_SRGB 0x00000006
#define V_028C70_NUMBER_FLOAT 0x00000007
#define S_028C70_COMP_SWAP(x) (((x) & 0x3) << 15)
#define G_028C70_COMP_SWAP(x) (((x) >> 15) & 0x3)
#define C_028C70_COMP_SWAP 0xFFFE7FFF
#define V_028C70_SWAP_STD 0x00000000
#define V_028C70_SWAP_ALT 0x00000001
#define V_028C70_SWAP_STD_REV 0x00000002
#define V_028C70_SWAP_ALT_REV 0x00000003
#define S_028C70_FAST_CLEAR(x) (((x) & 0x1) << 17)
#define G_028C70_FAST_CLEAR(x) (((x) >> 17) & 0x1)
#define C_028C70_FAST_CLEAR 0xFFFDFFFF
#define S_028C70_COMPRESSION(x) (((x) & 0x3) << 18)
#define G_028C70_COMPRESSION(x) (((x) >> 18) & 0x3)
#define C_028C70_COMPRESSION 0xFFF3FFFF
#define S_028C70_BLEND_CLAMP(x) (((x) & 0x1) << 19)
#define G_028C70_BLEND_CLAMP(x) (((x) >> 19) & 0x1)
#define C_028C70_BLEND_CLAMP 0xFFF7FFFF
#define S_028C70_BLEND_BYPASS(x) (((x) & 0x1) << 20)
#define G_028C70_BLEND_BYPASS(x) (((x) >> 20) & 0x1)
#define C_028C70_BLEND_BYPASS 0xFFEFFFFF
#define S_028C70_SIMPLE_FLOAT(x) (((x) & 0x1) << 21)
#define G_028C70_SIMPLE_FLOAT(x) (((x) >> 21) & 0x1)
#define C_028C70_SIMPLE_FLOAT 0xFFDFFFFF
#define S_028C70_ROUND_MODE(x) (((x) & 0x1) << 22)
#define G_028C70_ROUND_MODE(x) (((x) >> 22) & 0x1)
#define C_028C70_ROUND_MODE 0xFFBFFFFF
#define S_028C70_TILE_COMPACT(x) (((x) & 0x1) << 23)
#define G_028C70_TILE_COMPACT(x) (((x) >> 23) & 0x1)
#define C_028C70_TILE_COMPACT 0xFF7FFFFF
#define S_028C70_SOURCE_FORMAT(x) (((x) & 0x3) << 24)
#define G_028C70_SOURCE_FORMAT(x) (((x) >> 24) & 0x3)
#define C_028C70_SOURCE_FORMAT 0xFCFFFFFF
#define V_028C70_EXPORT_4C_32BPC 0x0
#define V_028C70_EXPORT_4C_16BPC 0x1
#define V_028C70_EXPORT_2C_32BPC 0x2 /* Do not use */
#define S_028C70_RAT(x) (((x) & 0x1) << 26)
#define G_028C70_RAT(x) (((x) >> 26) & 0x1)
#define C_028C70_RAT 0xFBFFFFFF
#define S_028C70_RESOURCE_TYPE(x) (((x) & 0x7) << 27)
#define G_028C70_RESOURCE_TYPE(x) (((x) >> 27) & 0x7)
#define C_028C70_RESOURCE_TYPE 0xC7FFFFFF
#define CB_COLOR0_INFO 0x28c70
# define CB_FORMAT(x) ((x) << 2)
# define CB_ARRAY_MODE(x) ((x) << 8)
# define ARRAY_LINEAR_GENERAL 0
# define ARRAY_LINEAR_ALIGNED 1
# define ARRAY_1D_TILED_THIN1 2
# define ARRAY_2D_TILED_THIN1 4
# define CB_SOURCE_FORMAT(x) ((x) << 24)
# define CB_SF_EXPORT_FULL 0
# define CB_SF_EXPORT_NORM 1
#define R_028C74_CB_COLOR0_ATTRIB 0x028C74
#define S_028C74_NON_DISP_TILING_ORDER(x) (((x) & 0x1) << 4)
#define G_028C74_NON_DISP_TILING_ORDER(x) (((x) >> 4) & 0x1)
#define C_028C74_NON_DISP_TILING_ORDER 0xFFFFFFEF
#define S_028C74_TILE_SPLIT(x) (((x) & 0xf) << 5)
#define G_028C74_TILE_SPLIT(x) (((x) >> 5) & 0xf)
#define S_028C74_NUM_BANKS(x) (((x) & 0x3) << 10)
#define G_028C74_NUM_BANKS(x) (((x) >> 10) & 0x3)
#define S_028C74_BANK_WIDTH(x) (((x) & 0x3) << 13)
#define G_028C74_BANK_WIDTH(x) (((x) >> 13) & 0x3)
#define S_028C74_BANK_HEIGHT(x) (((x) & 0x3) << 16)
#define G_028C74_BANK_HEIGHT(x) (((x) >> 16) & 0x3)
#define S_028C74_MACRO_TILE_ASPECT(x) (((x) & 0x3) << 19)
#define G_028C74_MACRO_TILE_ASPECT(x) (((x) >> 19) & 0x3)
#define CB_COLOR0_ATTRIB 0x28c74
# define CB_TILE_SPLIT(x) (((x) & 0x7) << 5)
# define ADDR_SURF_TILE_SPLIT_64B 0
# define ADDR_SURF_TILE_SPLIT_128B 1
# define ADDR_SURF_TILE_SPLIT_256B 2
# define ADDR_SURF_TILE_SPLIT_512B 3
# define ADDR_SURF_TILE_SPLIT_1KB 4
# define ADDR_SURF_TILE_SPLIT_2KB 5
# define ADDR_SURF_TILE_SPLIT_4KB 6
# define CB_NUM_BANKS(x) (((x) & 0x3) << 10)
# define ADDR_SURF_2_BANK 0
# define ADDR_SURF_4_BANK 1
# define ADDR_SURF_8_BANK 2
# define ADDR_SURF_16_BANK 3
# define CB_BANK_WIDTH(x) (((x) & 0x3) << 13)
# define ADDR_SURF_BANK_WIDTH_1 0
# define ADDR_SURF_BANK_WIDTH_2 1
# define ADDR_SURF_BANK_WIDTH_4 2
# define ADDR_SURF_BANK_WIDTH_8 3
# define CB_BANK_HEIGHT(x) (((x) & 0x3) << 16)
# define ADDR_SURF_BANK_HEIGHT_1 0
# define ADDR_SURF_BANK_HEIGHT_2 1
# define ADDR_SURF_BANK_HEIGHT_4 2
# define ADDR_SURF_BANK_HEIGHT_8 3
# define CB_MACRO_TILE_ASPECT(x) (((x) & 0x3) << 19)
#define CB_COLOR0_DIM 0x28c78
/* only CB0-7 blocks have these regs */
#define CB_COLOR0_CMASK 0x28c7c
@ -1107,17 +1663,226 @@
#define CB_COLOR7_CLEAR_WORD3 0x28e3c
#define SQ_TEX_RESOURCE_WORD0_0 0x30000
# define TEX_DIM(x) ((x) << 0)
# define SQ_TEX_DIM_1D 0
# define SQ_TEX_DIM_2D 1
# define SQ_TEX_DIM_3D 2
# define SQ_TEX_DIM_CUBEMAP 3
# define SQ_TEX_DIM_1D_ARRAY 4
# define SQ_TEX_DIM_2D_ARRAY 5
# define SQ_TEX_DIM_2D_MSAA 6
# define SQ_TEX_DIM_2D_ARRAY_MSAA 7
#define SQ_TEX_RESOURCE_WORD1_0 0x30004
# define TEX_ARRAY_MODE(x) ((x) << 28)
#define SQ_TEX_RESOURCE_WORD2_0 0x30008
#define SQ_TEX_RESOURCE_WORD3_0 0x3000C
#define SQ_TEX_RESOURCE_WORD4_0 0x30010
# define TEX_DST_SEL_X(x) ((x) << 16)
# define TEX_DST_SEL_Y(x) ((x) << 19)
# define TEX_DST_SEL_Z(x) ((x) << 22)
# define TEX_DST_SEL_W(x) ((x) << 25)
# define SQ_SEL_X 0
# define SQ_SEL_Y 1
# define SQ_SEL_Z 2
# define SQ_SEL_W 3
# define SQ_SEL_0 4
# define SQ_SEL_1 5
#define SQ_TEX_RESOURCE_WORD5_0 0x30014
#define SQ_TEX_RESOURCE_WORD6_0 0x30018
# define TEX_TILE_SPLIT(x) (((x) & 0x7) << 29)
#define SQ_TEX_RESOURCE_WORD7_0 0x3001c
# define MACRO_TILE_ASPECT(x) (((x) & 0x3) << 6)
# define TEX_BANK_WIDTH(x) (((x) & 0x3) << 8)
# define TEX_BANK_HEIGHT(x) (((x) & 0x3) << 10)
# define TEX_NUM_BANKS(x) (((x) & 0x3) << 16)
#define R_030000_SQ_TEX_RESOURCE_WORD0_0 0x030000
#define S_030000_DIM(x) (((x) & 0x7) << 0)
#define G_030000_DIM(x) (((x) >> 0) & 0x7)
#define C_030000_DIM 0xFFFFFFF8
#define V_030000_SQ_TEX_DIM_1D 0x00000000
#define V_030000_SQ_TEX_DIM_2D 0x00000001
#define V_030000_SQ_TEX_DIM_3D 0x00000002
#define V_030000_SQ_TEX_DIM_CUBEMAP 0x00000003
#define V_030000_SQ_TEX_DIM_1D_ARRAY 0x00000004
#define V_030000_SQ_TEX_DIM_2D_ARRAY 0x00000005
#define V_030000_SQ_TEX_DIM_2D_MSAA 0x00000006
#define V_030000_SQ_TEX_DIM_2D_ARRAY_MSAA 0x00000007
#define S_030000_NON_DISP_TILING_ORDER(x) (((x) & 0x1) << 5)
#define G_030000_NON_DISP_TILING_ORDER(x) (((x) >> 5) & 0x1)
#define C_030000_NON_DISP_TILING_ORDER 0xFFFFFFDF
#define S_030000_PITCH(x) (((x) & 0xFFF) << 6)
#define G_030000_PITCH(x) (((x) >> 6) & 0xFFF)
#define C_030000_PITCH 0xFFFC003F
#define S_030000_TEX_WIDTH(x) (((x) & 0x3FFF) << 18)
#define G_030000_TEX_WIDTH(x) (((x) >> 18) & 0x3FFF)
#define C_030000_TEX_WIDTH 0x0003FFFF
#define R_030004_SQ_TEX_RESOURCE_WORD1_0 0x030004
#define S_030004_TEX_HEIGHT(x) (((x) & 0x3FFF) << 0)
#define G_030004_TEX_HEIGHT(x) (((x) >> 0) & 0x3FFF)
#define C_030004_TEX_HEIGHT 0xFFFFC000
#define S_030004_TEX_DEPTH(x) (((x) & 0x1FFF) << 14)
#define G_030004_TEX_DEPTH(x) (((x) >> 14) & 0x1FFF)
#define C_030004_TEX_DEPTH 0xF8003FFF
#define S_030004_ARRAY_MODE(x) (((x) & 0xF) << 28)
#define G_030004_ARRAY_MODE(x) (((x) >> 28) & 0xF)
#define C_030004_ARRAY_MODE 0x0FFFFFFF
#define R_030008_SQ_TEX_RESOURCE_WORD2_0 0x030008
#define S_030008_BASE_ADDRESS(x) (((x) & 0xFFFFFFFF) << 0)
#define G_030008_BASE_ADDRESS(x) (((x) >> 0) & 0xFFFFFFFF)
#define C_030008_BASE_ADDRESS 0x00000000
#define R_03000C_SQ_TEX_RESOURCE_WORD3_0 0x03000C
#define S_03000C_MIP_ADDRESS(x) (((x) & 0xFFFFFFFF) << 0)
#define G_03000C_MIP_ADDRESS(x) (((x) >> 0) & 0xFFFFFFFF)
#define C_03000C_MIP_ADDRESS 0x00000000
#define R_030010_SQ_TEX_RESOURCE_WORD4_0 0x030010
#define S_030010_FORMAT_COMP_X(x) (((x) & 0x3) << 0)
#define G_030010_FORMAT_COMP_X(x) (((x) >> 0) & 0x3)
#define C_030010_FORMAT_COMP_X 0xFFFFFFFC
#define V_030010_SQ_FORMAT_COMP_UNSIGNED 0x00000000
#define V_030010_SQ_FORMAT_COMP_SIGNED 0x00000001
#define V_030010_SQ_FORMAT_COMP_UNSIGNED_BIASED 0x00000002
#define S_030010_FORMAT_COMP_Y(x) (((x) & 0x3) << 2)
#define G_030010_FORMAT_COMP_Y(x) (((x) >> 2) & 0x3)
#define C_030010_FORMAT_COMP_Y 0xFFFFFFF3
#define S_030010_FORMAT_COMP_Z(x) (((x) & 0x3) << 4)
#define G_030010_FORMAT_COMP_Z(x) (((x) >> 4) & 0x3)
#define C_030010_FORMAT_COMP_Z 0xFFFFFFCF
#define S_030010_FORMAT_COMP_W(x) (((x) & 0x3) << 6)
#define G_030010_FORMAT_COMP_W(x) (((x) >> 6) & 0x3)
#define C_030010_FORMAT_COMP_W 0xFFFFFF3F
#define S_030010_NUM_FORMAT_ALL(x) (((x) & 0x3) << 8)
#define G_030010_NUM_FORMAT_ALL(x) (((x) >> 8) & 0x3)
#define C_030010_NUM_FORMAT_ALL 0xFFFFFCFF
#define V_030010_SQ_NUM_FORMAT_NORM 0x00000000
#define V_030010_SQ_NUM_FORMAT_INT 0x00000001
#define V_030010_SQ_NUM_FORMAT_SCALED 0x00000002
#define S_030010_SRF_MODE_ALL(x) (((x) & 0x1) << 10)
#define G_030010_SRF_MODE_ALL(x) (((x) >> 10) & 0x1)
#define C_030010_SRF_MODE_ALL 0xFFFFFBFF
#define V_030010_SRF_MODE_ZERO_CLAMP_MINUS_ONE 0x00000000
#define V_030010_SRF_MODE_NO_ZERO 0x00000001
#define S_030010_FORCE_DEGAMMA(x) (((x) & 0x1) << 11)
#define G_030010_FORCE_DEGAMMA(x) (((x) >> 11) & 0x1)
#define C_030010_FORCE_DEGAMMA 0xFFFFF7FF
#define S_030010_ENDIAN_SWAP(x) (((x) & 0x3) << 12)
#define G_030010_ENDIAN_SWAP(x) (((x) >> 12) & 0x3)
#define C_030010_ENDIAN_SWAP 0xFFFFCFFF
#define S_030010_DST_SEL_X(x) (((x) & 0x7) << 16)
#define G_030010_DST_SEL_X(x) (((x) >> 16) & 0x7)
#define C_030010_DST_SEL_X 0xFFF8FFFF
#define V_030010_SQ_SEL_X 0x00000000
#define V_030010_SQ_SEL_Y 0x00000001
#define V_030010_SQ_SEL_Z 0x00000002
#define V_030010_SQ_SEL_W 0x00000003
#define V_030010_SQ_SEL_0 0x00000004
#define V_030010_SQ_SEL_1 0x00000005
#define S_030010_DST_SEL_Y(x) (((x) & 0x7) << 19)
#define G_030010_DST_SEL_Y(x) (((x) >> 19) & 0x7)
#define C_030010_DST_SEL_Y 0xFFC7FFFF
#define S_030010_DST_SEL_Z(x) (((x) & 0x7) << 22)
#define G_030010_DST_SEL_Z(x) (((x) >> 22) & 0x7)
#define C_030010_DST_SEL_Z 0xFE3FFFFF
#define S_030010_DST_SEL_W(x) (((x) & 0x7) << 25)
#define G_030010_DST_SEL_W(x) (((x) >> 25) & 0x7)
#define C_030010_DST_SEL_W 0xF1FFFFFF
#define S_030010_BASE_LEVEL(x) (((x) & 0xF) << 28)
#define G_030010_BASE_LEVEL(x) (((x) >> 28) & 0xF)
#define C_030010_BASE_LEVEL 0x0FFFFFFF
#define R_030014_SQ_TEX_RESOURCE_WORD5_0 0x030014
#define S_030014_LAST_LEVEL(x) (((x) & 0xF) << 0)
#define G_030014_LAST_LEVEL(x) (((x) >> 0) & 0xF)
#define C_030014_LAST_LEVEL 0xFFFFFFF0
#define S_030014_BASE_ARRAY(x) (((x) & 0x1FFF) << 4)
#define G_030014_BASE_ARRAY(x) (((x) >> 4) & 0x1FFF)
#define C_030014_BASE_ARRAY 0xFFFE000F
#define S_030014_LAST_ARRAY(x) (((x) & 0x1FFF) << 17)
#define G_030014_LAST_ARRAY(x) (((x) >> 17) & 0x1FFF)
#define C_030014_LAST_ARRAY 0xC001FFFF
#define R_030018_SQ_TEX_RESOURCE_WORD6_0 0x030018
#define S_030018_MAX_ANISO(x) (((x) & 0x7) << 0)
#define G_030018_MAX_ANISO(x) (((x) >> 0) & 0x7)
#define C_030018_MAX_ANISO 0xFFFFFFF8
#define S_030018_PERF_MODULATION(x) (((x) & 0x7) << 3)
#define G_030018_PERF_MODULATION(x) (((x) >> 3) & 0x7)
#define C_030018_PERF_MODULATION 0xFFFFFFC7
#define S_030018_INTERLACED(x) (((x) & 0x1) << 6)
#define G_030018_INTERLACED(x) (((x) >> 6) & 0x1)
#define C_030018_INTERLACED 0xFFFFFFBF
#define S_030018_TILE_SPLIT(x) (((x) & 0x7) << 29)
#define G_030018_TILE_SPLIT(x) (((x) >> 29) & 0x7)
#define R_03001C_SQ_TEX_RESOURCE_WORD7_0 0x03001C
#define S_03001C_MACRO_TILE_ASPECT(x) (((x) & 0x3) << 6)
#define G_03001C_MACRO_TILE_ASPECT(x) (((x) >> 6) & 0x3)
#define S_03001C_BANK_WIDTH(x) (((x) & 0x3) << 8)
#define G_03001C_BANK_WIDTH(x) (((x) >> 8) & 0x3)
#define S_03001C_BANK_HEIGHT(x) (((x) & 0x3) << 10)
#define G_03001C_BANK_HEIGHT(x) (((x) >> 10) & 0x3)
#define S_03001C_NUM_BANKS(x) (((x) & 0x3) << 16)
#define G_03001C_NUM_BANKS(x) (((x) >> 16) & 0x3)
#define S_03001C_TYPE(x) (((x) & 0x3) << 30)
#define G_03001C_TYPE(x) (((x) >> 30) & 0x3)
#define C_03001C_TYPE 0x3FFFFFFF
#define V_03001C_SQ_TEX_VTX_INVALID_TEXTURE 0x00000000
#define V_03001C_SQ_TEX_VTX_INVALID_BUFFER 0x00000001
#define V_03001C_SQ_TEX_VTX_VALID_TEXTURE 0x00000002
#define V_03001C_SQ_TEX_VTX_VALID_BUFFER 0x00000003
#define S_03001C_DATA_FORMAT(x) (((x) & 0x3F) << 0)
#define G_03001C_DATA_FORMAT(x) (((x) >> 0) & 0x3F)
#define C_03001C_DATA_FORMAT 0xFFFFFFC0
#define SQ_VTX_CONSTANT_WORD0_0 0x30000
#define SQ_VTX_CONSTANT_WORD1_0 0x30004
#define SQ_VTX_CONSTANT_WORD2_0 0x30008
# define SQ_VTXC_BASE_ADDR_HI(x) ((x) << 0)
# define SQ_VTXC_STRIDE(x) ((x) << 8)
# define SQ_VTXC_ENDIAN_SWAP(x) ((x) << 30)
# define SQ_ENDIAN_NONE 0
# define SQ_ENDIAN_8IN16 1
# define SQ_ENDIAN_8IN32 2
#define SQ_VTX_CONSTANT_WORD3_0 0x3000C
# define SQ_VTCX_SEL_X(x) ((x) << 3)
# define SQ_VTCX_SEL_Y(x) ((x) << 6)
# define SQ_VTCX_SEL_Z(x) ((x) << 9)
# define SQ_VTCX_SEL_W(x) ((x) << 12)
#define SQ_VTX_CONSTANT_WORD4_0 0x30010
#define SQ_VTX_CONSTANT_WORD5_0 0x30014
#define SQ_VTX_CONSTANT_WORD6_0 0x30018
#define SQ_VTX_CONSTANT_WORD7_0 0x3001c
#define TD_PS_BORDER_COLOR_INDEX 0xA400
#define TD_PS_BORDER_COLOR_RED 0xA404
#define TD_PS_BORDER_COLOR_GREEN 0xA408
#define TD_PS_BORDER_COLOR_BLUE 0xA40C
#define TD_PS_BORDER_COLOR_ALPHA 0xA410
#define TD_VS_BORDER_COLOR_INDEX 0xA414
#define TD_VS_BORDER_COLOR_RED 0xA418
#define TD_VS_BORDER_COLOR_GREEN 0xA41C
#define TD_VS_BORDER_COLOR_BLUE 0xA420
#define TD_VS_BORDER_COLOR_ALPHA 0xA424
#define TD_GS_BORDER_COLOR_INDEX 0xA428
#define TD_GS_BORDER_COLOR_RED 0xA42C
#define TD_GS_BORDER_COLOR_GREEN 0xA430
#define TD_GS_BORDER_COLOR_BLUE 0xA434
#define TD_GS_BORDER_COLOR_ALPHA 0xA438
#define TD_HS_BORDER_COLOR_INDEX 0xA43C
#define TD_HS_BORDER_COLOR_RED 0xA440
#define TD_HS_BORDER_COLOR_GREEN 0xA444
#define TD_HS_BORDER_COLOR_BLUE 0xA448
#define TD_HS_BORDER_COLOR_ALPHA 0xA44C
#define TD_LS_BORDER_COLOR_INDEX 0xA450
#define TD_LS_BORDER_COLOR_RED 0xA454
#define TD_LS_BORDER_COLOR_GREEN 0xA458
#define TD_LS_BORDER_COLOR_BLUE 0xA45C
#define TD_LS_BORDER_COLOR_ALPHA 0xA460
#define TD_CS_BORDER_COLOR_INDEX 0xA464
#define TD_CS_BORDER_COLOR_RED 0xA468
#define TD_CS_BORDER_COLOR_GREEN 0xA46C
#define TD_CS_BORDER_COLOR_BLUE 0xA470
#define TD_CS_BORDER_COLOR_ALPHA 0xA474
/* cayman 3D regs */
#define CAYMAN_VGT_OFFCHIP_LDS_BASE 0x89B0
#define CAYMAN_VGT_OFFCHIP_LDS_BASE 0x89B4
#define CAYMAN_SQ_EX_ALLOC_TABLE_SLOTS 0x8E48
#define CAYMAN_DB_EQAA 0x28804
#define CAYMAN_DB_DEPTH_INFO 0x2803C
#define CAYMAN_PA_SC_AA_CONFIG 0x28BE0

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -119,7 +119,7 @@ ___start_builtin_fw:
dd (SUMO2ME_END - SUMO2ME_START)
macro ni_code [arg]
macro NI_code [arg]
{
dd FIRMWARE_#arg#_ME
dd arg#ME_START
@ -135,7 +135,7 @@ macro ni_code [arg]
}
ni_code BARTS, TURKS, CAICOS, CAYMAN
NI_code BARTS, TURKS, CAICOS, CAYMAN
dd FIRMWARE_RV610_PFP
dd RV610PFP_START
@ -235,6 +235,31 @@ ni_code BARTS, TURKS, CAICOS, CAYMAN
dd SUMORLC_START
dd (SUMORLC_END - SUMORLC_START)
macro SI_code [arg]
{
dd FIRMWARE_#arg#_PFP
dd arg#_PFP_START
dd (arg#_PFP_END - arg#_PFP_START)
dd FIRMWARE_#arg#_ME
dd arg#_ME_START
dd (arg#_ME_END - arg#_ME_START)
dd FIRMWARE_#arg#_CE
dd arg#_CE_START
dd (arg#_CE_END - arg#_CE_START)
dd FIRMWARE_#arg#_MC
dd arg#_MC_START
dd (arg#_MC_END - arg#_MC_START)
dd FIRMWARE_#arg#_RLC
dd arg#_RLC_START
dd (arg#_RLC_END - arg#_RLC_START)
}
SI_code TAHITI, PITCAIRN, VERDE
___end_builtin_fw:
@ -315,6 +340,47 @@ FIRMWARE_TURKS_MC db 'radeon/TURKS_mc.bin',0
FIRMWARE_CAICOS_MC db 'radeon/CAICOS_mc.bin',0
FIRMWARE_CAYMAN_MC db 'radeon/CAYMAN_mc.bin',0
macro SI_firmware [arg]
{
forward
FIRMWARE_#arg#_PFP db 'radeon/',`arg,'_pfp.bin',0
FIRMWARE_#arg#_ME db 'radeon/',`arg,'_me.bin',0
FIRMWARE_#arg#_CE db 'radeon/',`arg,'_ce.bin',0
FIRMWARE_#arg#_MC db 'radeon/',`arg,'_mc.bin',0
FIRMWARE_#arg#_RLC db 'radeon/',`arg,'_rlc.bin',0
forward
align 16
arg#_PFP_START:
file "firmware/"#`arg#"_pfp.bin"
arg#_PFP_END:
align 16
arg#_ME_START:
file "firmware/"#`arg#"_me.bin"
arg#_ME_END:
align 16
arg#_CE_START:
file "firmware/"#`arg#"_ce.bin"
arg#_CE_END:
align 16
arg#_MC_START:
file "firmware/"#`arg#"_mc.bin"
arg#_MC_END:
align 16
arg#_RLC_START:
file "firmware/"#`arg#"_rlc.bin"
arg#_RLC_END:
}
SI_firmware TAHITI,PITCAIRN,VERDE
align 16
R100CP_START:
@ -627,3 +693,5 @@ align 16
CAYMANMC_START:
file 'firmware/CAYMAN_mc.bin'
CAYMANMC_END:

File diff suppressed because it is too large Load Diff

View File

@ -41,7 +41,13 @@
#define CAYMAN_MAX_TCC 16
#define CAYMAN_MAX_TCC_MASK 0xFF
#define CAYMAN_GB_ADDR_CONFIG_GOLDEN 0x02011003
#define ARUBA_GB_ADDR_CONFIG_GOLDEN 0x12010001
#define DMIF_ADDR_CONFIG 0xBD4
#define SRBM_GFX_CNTL 0x0E44
#define RINGID(x) (((x) & 0x3) << 0)
#define VMID(x) (((x) & 0x7) << 0)
#define SRBM_STATUS 0x0E50
#define VM_CONTEXT0_REQUEST_RESPONSE 0x1470
@ -103,6 +109,7 @@
#define SYSTEM_ACCESS_MODE_NOT_IN_SYS (3 << 3)
#define SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU (0 << 5)
#define ENABLE_ADVANCED_DRIVER_MODEL (1 << 6)
#define FUS_MC_VM_FB_OFFSET 0x2068
#define MC_SHARED_BLACKOUT_CNTL 0x20ac
#define MC_ARB_RAMCFG 0x2760
@ -144,6 +151,8 @@
#define CGTS_SYS_TCC_DISABLE 0x3F90
#define CGTS_USER_SYS_TCC_DISABLE 0x3F94
#define RLC_GFX_INDEX 0x3FC4
#define CONFIG_MEMSIZE 0x5428
#define HDP_MEM_COHERENCY_FLUSH_CNTL 0x5480
@ -208,6 +217,12 @@
#define SOFT_RESET_VGT (1 << 14)
#define SOFT_RESET_IA (1 << 15)
#define GRBM_GFX_INDEX 0x802C
#define INSTANCE_INDEX(x) ((x) << 0)
#define SE_INDEX(x) ((x) << 16)
#define INSTANCE_BROADCAST_WRITES (1 << 30)
#define SE_BROADCAST_WRITES (1 << 31)
#define SCRATCH_REG0 0x8500
#define SCRATCH_REG1 0x8504
#define SCRATCH_REG2 0x8508
@ -219,6 +234,12 @@
#define SCRATCH_UMSK 0x8540
#define SCRATCH_ADDR 0x8544
#define CP_SEM_WAIT_TIMER 0x85BC
#define CP_SEM_INCOMPLETE_TIMER_CNTL 0x85C8
#define CP_COHER_CNTL2 0x85E8
#define CP_STALLED_STAT1 0x8674
#define CP_STALLED_STAT2 0x8678
#define CP_BUSY_STAT 0x867C
#define CP_STAT 0x8680
#define CP_ME_CNTL 0x86D8
#define CP_ME_HALT (1 << 28)
#define CP_PFP_HALT (1 << 26)
@ -394,6 +415,12 @@
#define CP_RB0_RPTR_ADDR 0xC10C
#define CP_RB0_RPTR_ADDR_HI 0xC110
#define CP_RB0_WPTR 0xC114
#define CP_INT_CNTL 0xC124
# define CNTX_BUSY_INT_ENABLE (1 << 19)
# define CNTX_EMPTY_INT_ENABLE (1 << 20)
# define TIME_STAMP_INT_ENABLE (1 << 26)
#define CP_RB1_BASE 0xC180
#define CP_RB1_CNTL 0xC184
#define CP_RB1_RPTR_ADDR 0xC188
@ -411,6 +438,10 @@
#define CP_ME_RAM_DATA 0xC160
#define CP_DEBUG 0xC1FC
#define VGT_EVENT_INITIATOR 0x28a90
# define CACHE_FLUSH_AND_INV_EVENT_TS (0x14 << 0)
# define CACHE_FLUSH_AND_INV_EVENT (0x16 << 0)
/*
* PM4
*/
@ -445,6 +476,7 @@
#define PACKET3_DISPATCH_DIRECT 0x15
#define PACKET3_DISPATCH_INDIRECT 0x16
#define PACKET3_INDIRECT_BUFFER_END 0x17
#define PACKET3_MODE_CONTROL 0x18
#define PACKET3_SET_PREDICATION 0x20
#define PACKET3_REG_RMW 0x21
#define PACKET3_COND_EXEC 0x22
@ -470,6 +502,7 @@
#define PACKET3_MPEG_INDEX 0x3A
#define PACKET3_WAIT_REG_MEM 0x3C
#define PACKET3_MEM_WRITE 0x3D
#define PACKET3_PFP_SYNC_ME 0x42
#define PACKET3_SURFACE_SYNC 0x43
# define PACKET3_CB0_DEST_BASE_ENA (1 << 6)
# define PACKET3_CB1_DEST_BASE_ENA (1 << 7)
@ -494,7 +527,27 @@
#define PACKET3_ME_INITIALIZE_DEVICE_ID(x) ((x) << 16)
#define PACKET3_COND_WRITE 0x45
#define PACKET3_EVENT_WRITE 0x46
#define EVENT_TYPE(x) ((x) << 0)
#define EVENT_INDEX(x) ((x) << 8)
/* 0 - any non-TS event
* 1 - ZPASS_DONE
* 2 - SAMPLE_PIPELINESTAT
* 3 - SAMPLE_STREAMOUTSTAT*
* 4 - *S_PARTIAL_FLUSH
* 5 - TS events
*/
#define PACKET3_EVENT_WRITE_EOP 0x47
#define DATA_SEL(x) ((x) << 29)
/* 0 - discard
* 1 - send low 32bit data
* 2 - send 64bit data
* 3 - send 64bit counter value
*/
#define INT_SEL(x) ((x) << 24)
/* 0 - none
* 1 - interrupt only (DATA_SEL = 0)
* 2 - interrupt when data write is confirmed
*/
#define PACKET3_EVENT_WRITE_EOS 0x48
#define PACKET3_PREAMBLE_CNTL 0x4A
# define PACKET3_PREAMBLE_BEGIN_CLEAR_STATE (2 << 28)
@ -533,6 +586,7 @@
#define PACKET3_SET_CONTEXT_REG_INDIRECT 0x73
#define PACKET3_SET_RESOURCE_INDIRECT 0x74
#define PACKET3_SET_APPEND_CNT 0x75
#define PACKET3_ME_WRITE 0x7A
#endif

View File

@ -1,16 +1,16 @@
#include <linux/kernel.h>
#include <linux/export.h>
#include <linux/mutex.h>
#include <linux/mod_devicetable.h>
#include <errno-base.h>
#include <pci.h>
#include <syscall.h>
extern int pci_scan_filter(u32_t id, u32_t busnr, u32_t devfn);
static LIST_HEAD(devices);
static pci_dev_t* pci_scan_device(u32_t bus, int devfn);
/* PCI control bits. Shares IORESOURCE_BITS with above PCI ROM. */
#define IORESOURCE_PCI_FIXED (1<<4) /* Do not move resource */
@ -345,14 +345,17 @@ static pci_dev_t* pci_scan_device(u32_t busnr, int devfn)
}
};
if( pci_scan_filter(id, busnr, devfn) == 0)
return NULL;
hdr = PciRead8(busnr, devfn, PCI_HEADER_TYPE);
dev = (pci_dev_t*)kzalloc(sizeof(pci_dev_t), 0);
if(unlikely(dev == NULL))
return NULL;
INIT_LIST_HEAD(&dev->link);
if(unlikely(dev == NULL))
return NULL;
dev->pci_dev.busnr = busnr;
dev->pci_dev.devfn = devfn;
@ -367,6 +370,9 @@ static pci_dev_t* pci_scan_device(u32_t busnr, int devfn)
};
int pci_scan_slot(u32_t bus, int devfn)
{
int func, nr = 0;
@ -405,49 +411,6 @@ int pci_scan_slot(u32_t bus, int devfn)
return nr;
};
void pci_scan_bus(u32_t bus)
{
u32_t devfn;
pci_dev_t *dev;
for (devfn = 0; devfn < 0x100; devfn += 8)
pci_scan_slot(bus, devfn);
}
int enum_pci_devices()
{
pci_dev_t *dev;
u32_t last_bus;
u32_t bus = 0 , devfn = 0;
// list_initialize(&devices);
last_bus = PciApi(1);
if( unlikely(last_bus == -1))
return -1;
for(;bus <= last_bus; bus++)
pci_scan_bus(bus);
// for(dev = (dev_t*)devices.next;
// &dev->link != &devices;
// dev = (dev_t*)dev->link.next)
// {
// dbgprintf("PCI device %x:%x bus:%x devfn:%x\n",
// dev->pci_dev.vendor,
// dev->pci_dev.device,
// dev->pci_dev.bus,
// dev->pci_dev.devfn);
//
// }
return 0;
}
#define PCI_FIND_CAP_TTL 48
static int __pci_find_next_cap_ttl(unsigned int bus, unsigned int devfn,
@ -513,244 +476,46 @@ int pci_find_capability(struct pci_dev *dev, int cap)
}
#if 0
/**
* pci_set_power_state - Set the power state of a PCI device
* @dev: PCI device to be suspended
* @state: PCI power state (D0, D1, D2, D3hot, D3cold) we're entering
*
* Transition a device to a new power state, using the Power Management
* Capabilities in the device's config space.
*
* RETURN VALUE:
* -EINVAL if trying to enter a lower state than we're already in.
* 0 if we're already in the requested state.
* -EIO if device does not support PCI PM.
* 0 if we can successfully change the power state.
*/
int
pci_set_power_state(struct pci_dev *dev, pci_power_t state)
int enum_pci_devices()
{
int pm, need_restore = 0;
u16 pmcsr, pmc;
/* bound the state we're entering */
if (state > PCI_D3hot)
state = PCI_D3hot;
/*
* If the device or the parent bridge can't support PCI PM, ignore
* the request if we're doing anything besides putting it into D0
* (which would only happen on boot).
*/
if ((state == PCI_D1 || state == PCI_D2) && pci_no_d1d2(dev))
return 0;
/* find PCI PM capability in list */
pm = pci_find_capability(dev, PCI_CAP_ID_PM);
/* abort if the device doesn't support PM capabilities */
if (!pm)
return -EIO;
/* Validate current state:
* Can enter D0 from any state, but if we can only go deeper
* to sleep if we're already in a low power state
*/
if (state != PCI_D0 && dev->current_state > state) {
printk(KERN_ERR "%s(): %s: state=%d, current state=%d\n",
__FUNCTION__, pci_name(dev), state, dev->current_state);
return -EINVAL;
} else if (dev->current_state == state)
return 0; /* we're already there */
pci_dev_t *dev;
u32_t last_bus;
u32_t bus = 0 , devfn = 0;
pci_read_config_word(dev,pm + PCI_PM_PMC,&pmc);
if ((pmc & PCI_PM_CAP_VER_MASK) > 3) {
printk(KERN_DEBUG
"PCI: %s has unsupported PM cap regs version (%u)\n",
pci_name(dev), pmc & PCI_PM_CAP_VER_MASK);
return -EIO;
}
last_bus = PciApi(1);
/* check if this device supports the desired state */
if (state == PCI_D1 && !(pmc & PCI_PM_CAP_D1))
return -EIO;
else if (state == PCI_D2 && !(pmc & PCI_PM_CAP_D2))
return -EIO;
pci_read_config_word(dev, pm + PCI_PM_CTRL, &pmcsr);
if( unlikely(last_bus == -1))
return -1;
/* If we're (effectively) in D3, force entire word to 0.
* This doesn't affect PME_Status, disables PME_En, and
* sets PowerState to 0.
*/
switch (dev->current_state) {
case PCI_D0:
case PCI_D1:
case PCI_D2:
pmcsr &= ~PCI_PM_CTRL_STATE_MASK;
pmcsr |= state;
break;
case PCI_UNKNOWN: /* Boot-up */
if ((pmcsr & PCI_PM_CTRL_STATE_MASK) == PCI_D3hot
&& !(pmcsr & PCI_PM_CTRL_NO_SOFT_RESET))
need_restore = 1;
/* Fall-through: force to D0 */
default:
pmcsr = 0;
break;
}
/* enter specified state */
pci_write_config_word(dev, pm + PCI_PM_CTRL, pmcsr);
/* Mandatory power management transition delays */
/* see PCI PM 1.1 5.6.1 table 18 */
if (state == PCI_D3hot || dev->current_state == PCI_D3hot)
msleep(pci_pm_d3_delay);
else if (state == PCI_D2 || dev->current_state == PCI_D2)
udelay(200);
/*
* Give firmware a chance to be called, such as ACPI _PRx, _PSx
* Firmware method after native method ?
*/
if (platform_pci_set_power_state)
platform_pci_set_power_state(dev, state);
dev->current_state = state;
/* According to section 5.4.1 of the "PCI BUS POWER MANAGEMENT
* INTERFACE SPECIFICATION, REV. 1.2", a device transitioning
* from D3hot to D0 _may_ perform an internal reset, thereby
* going to "D0 Uninitialized" rather than "D0 Initialized".
* For example, at least some versions of the 3c905B and the
* 3c556B exhibit this behaviour.
*
* At least some laptop BIOSen (e.g. the Thinkpad T21) leave
* devices in a D3hot state at boot. Consequently, we need to
* restore at least the BARs so that the device will be
* accessible to its driver.
*/
if (need_restore)
pci_restore_bars(dev);
return 0;
}
#endif
int pcibios_enable_resources(struct pci_dev *dev, int mask)
{
u16_t cmd, old_cmd;
int idx;
struct resource *r;
cmd = PciRead16(dev->busnr, dev->devfn, PCI_COMMAND);
old_cmd = cmd;
for (idx = 0; idx < PCI_NUM_RESOURCES; idx++)
for(;bus <= last_bus; bus++)
{
/* Only set up the requested stuff */
if (!(mask & (1 << idx)))
continue;
for (devfn = 0; devfn < 0x100; devfn += 8)
pci_scan_slot(bus, devfn);
r = &dev->resource[idx];
if (!(r->flags & (IORESOURCE_IO | IORESOURCE_MEM)))
continue;
if ((idx == PCI_ROM_RESOURCE) &&
(!(r->flags & IORESOURCE_ROM_ENABLE)))
continue;
if (!r->start && r->end) {
printk(KERN_ERR "PCI: Device %s not available "
"because of resource %d collisions\n",
pci_name(dev), idx);
return -EINVAL;
}
if (r->flags & IORESOURCE_IO)
cmd |= PCI_COMMAND_IO;
if (r->flags & IORESOURCE_MEM)
cmd |= PCI_COMMAND_MEMORY;
}
if (cmd != old_cmd) {
printk("PCI: Enabling device %s (%04x -> %04x)\n",
pci_name(dev), old_cmd, cmd);
PciWrite16(dev->busnr, dev->devfn, PCI_COMMAND, cmd);
for(dev = (pci_dev_t*)devices.next;
&dev->link != &devices;
dev = (pci_dev_t*)dev->link.next)
{
dbgprintf("PCI device %x:%x bus:%x devfn:%x\n",
dev->pci_dev.vendor,
dev->pci_dev.device,
dev->pci_dev.busnr,
dev->pci_dev.devfn);
}
return 0;
}
int pcibios_enable_device(struct pci_dev *dev, int mask)
{
int err;
if ((err = pcibios_enable_resources(dev, mask)) < 0)
return err;
// if (!dev->msi_enabled)
// return pcibios_enable_irq(dev);
return 0;
}
static int do_pci_enable_device(struct pci_dev *dev, int bars)
{
int err;
// err = pci_set_power_state(dev, PCI_D0);
// if (err < 0 && err != -EIO)
// return err;
err = pcibios_enable_device(dev, bars);
// if (err < 0)
// return err;
// pci_fixup_device(pci_fixup_enable, dev);
return 0;
}
static int __pci_enable_device_flags(struct pci_dev *dev,
resource_size_t flags)
{
int err;
int i, bars = 0;
// if (atomic_add_return(1, &dev->enable_cnt) > 1)
// return 0; /* already enabled */
for (i = 0; i < DEVICE_COUNT_RESOURCE; i++)
if (dev->resource[i].flags & flags)
bars |= (1 << i);
err = do_pci_enable_device(dev, bars);
// if (err < 0)
// atomic_dec(&dev->enable_cnt);
return err;
}
/**
* pci_enable_device - Initialize device before it's used by a driver.
* @dev: PCI device to be initialized
*
* Initialize device before it's used by a driver. Ask low-level code
* to enable I/O and memory. Wake up the device if it was suspended.
* Beware, this function can fail.
*
* Note we don't actually enable the device many times if we call
* this function repeatedly (we just increment the count).
*/
int pci_enable_device(struct pci_dev *dev)
{
return __pci_enable_device_flags(dev, IORESOURCE_MEM | IORESOURCE_IO);
}
struct pci_device_id* find_pci_device(pci_dev_t* pdev, struct pci_device_id *idlist)
const struct pci_device_id* find_pci_device(pci_dev_t* pdev, const struct pci_device_id *idlist)
{
pci_dev_t *dev;
struct pci_device_id *ent;
const struct pci_device_id *ent;
for(dev = (pci_dev_t*)devices.next;
&dev->link != &devices;
@ -772,58 +537,303 @@ struct pci_device_id* find_pci_device(pci_dev_t* pdev, struct pci_device_id *idl
return NULL;
};
struct pci_dev *
pci_get_device(unsigned int vendor, unsigned int device, struct pci_dev *from)
{
pci_dev_t *dev;
dev = (pci_dev_t*)devices.next;
if(from != NULL)
{
for(; &dev->link != &devices;
dev = (pci_dev_t*)dev->link.next)
{
if( &dev->pci_dev == from)
{
dev = (pci_dev_t*)dev->link.next;
break;
};
}
};
for(; &dev->link != &devices;
dev = (pci_dev_t*)dev->link.next)
{
if( dev->pci_dev.vendor != vendor )
continue;
if(dev->pci_dev.device == device)
{
return &dev->pci_dev;
}
}
return NULL;
};
struct pci_dev * pci_get_bus_and_slot(unsigned int bus, unsigned int devfn)
{
pci_dev_t *dev;
for(dev = (pci_dev_t*)devices.next;
&dev->link != &devices;
dev = (pci_dev_t*)dev->link.next)
{
if ( dev->pci_dev.busnr == bus && dev->pci_dev.devfn == devfn)
return &dev->pci_dev;
}
return NULL;
}
struct pci_dev *pci_get_class(unsigned int class, struct pci_dev *from)
{
pci_dev_t *dev;
dev = (pci_dev_t*)devices.next;
if(from != NULL)
{
for(; &dev->link != &devices;
dev = (pci_dev_t*)dev->link.next)
{
if( &dev->pci_dev == from)
{
dev = (pci_dev_t*)dev->link.next;
break;
};
}
};
for(; &dev->link != &devices;
dev = (pci_dev_t*)dev->link.next)
{
if( dev->pci_dev.class == class)
{
return &dev->pci_dev;
}
}
return NULL;
}
#define PIO_OFFSET 0x10000UL
#define PIO_MASK 0x0ffffUL
#define PIO_RESERVED 0x40000UL
#define IO_COND(addr, is_pio, is_mmio) do { \
unsigned long port = (unsigned long __force)addr; \
if (port >= PIO_RESERVED) { \
is_mmio; \
} else if (port > PIO_OFFSET) { \
port &= PIO_MASK; \
is_pio; \
}; \
} while (0)
/* Create a virtual mapping cookie for an IO port range */
void __iomem *ioport_map(unsigned long port, unsigned int nr)
{
return (void __iomem *) port;
}
void __iomem *pci_iomap(struct pci_dev *dev, int bar, unsigned long maxlen)
{
resource_size_t start = pci_resource_start(dev, bar);
resource_size_t len = pci_resource_len(dev, bar);
unsigned long flags = pci_resource_flags(dev, bar);
if (!len || !start)
return NULL;
if (maxlen && len > maxlen)
len = maxlen;
if (flags & IORESOURCE_IO)
return ioport_map(start, len);
if (flags & IORESOURCE_MEM) {
return ioremap(start, len);
}
/* What? */
return NULL;
}
void pci_iounmap(struct pci_dev *dev, void __iomem * addr)
{
IO_COND(addr, /* nothing */, iounmap(addr));
}
struct pci_bus_region {
resource_size_t start;
resource_size_t end;
};
static inline void
pcibios_resource_to_bus(struct pci_dev *dev, struct pci_bus_region *region,
struct resource *res)
{
region->start = res->start;
region->end = res->end;
}
static inline int pci_read_config_dword(struct pci_dev *dev, int where,
u32 *val)
{
*val = PciRead32(dev->busnr, dev->devfn, where);
return 1;
}
static inline int pci_write_config_dword(struct pci_dev *dev, int where,
u32 val)
{
PciWrite32(dev->busnr, dev->devfn, where, val);
return 1;
}
static inline int pci_read_config_word(struct pci_dev *dev, int where,
u16 *val)
{
*val = PciRead16(dev->busnr, dev->devfn, where);
return 1;
}
static inline int pci_write_config_word(struct pci_dev *dev, int where,
u16 val)
{
PciWrite16(dev->busnr, dev->devfn, where, val);
return 1;
}
int pci_enable_rom(struct pci_dev *pdev)
{
struct resource *res = pdev->resource + PCI_ROM_RESOURCE;
struct pci_bus_region region;
u32 rom_addr;
if (!res->flags)
return -1;
pcibios_resource_to_bus(pdev, &region, res);
pci_read_config_dword(pdev, pdev->rom_base_reg, &rom_addr);
rom_addr &= ~PCI_ROM_ADDRESS_MASK;
rom_addr |= region.start | PCI_ROM_ADDRESS_ENABLE;
pci_write_config_dword(pdev, pdev->rom_base_reg, rom_addr);
return 0;
}
void pci_disable_rom(struct pci_dev *pdev)
{
u32 rom_addr;
pci_read_config_dword(pdev, pdev->rom_base_reg, &rom_addr);
rom_addr &= ~PCI_ROM_ADDRESS_ENABLE;
pci_write_config_dword(pdev, pdev->rom_base_reg, rom_addr);
}
/**
* pci_get_rom_size - obtain the actual size of the ROM image
* @pdev: target PCI device
* @rom: kernel virtual pointer to image of ROM
* @size: size of PCI window
* return: size of actual ROM image
*
* Determine the actual length of the ROM image.
* The PCI window size could be much larger than the
* actual image size.
*/
size_t pci_get_rom_size(struct pci_dev *pdev, void __iomem *rom, size_t size)
{
void __iomem *image;
int last_image;
image = rom;
do {
void __iomem *pds;
/* Standard PCI ROMs start out with these bytes 55 AA */
if (readb(image) != 0x55) {
dev_err(&pdev->dev, "Invalid ROM contents\n");
break;
}
if (readb(image + 1) != 0xAA)
break;
/* get the PCI data structure and check its signature */
pds = image + readw(image + 24);
if (readb(pds) != 'P')
break;
if (readb(pds + 1) != 'C')
break;
if (readb(pds + 2) != 'I')
break;
if (readb(pds + 3) != 'R')
break;
last_image = readb(pds + 21) & 0x80;
/* this length is reliable */
image += readw(pds + 16) * 512;
} while (!last_image);
/* never return a size larger than the PCI resource window */
/* there are known ROMs that get the size wrong */
return min((size_t)(image - rom), size);
}
/**
* pci_map_rom - map a PCI ROM to kernel space
* @pdev: pointer to pci device struct
* @size: pointer to receive size of pci window over ROM
* @return: kernel virtual pointer to image of ROM
*
* Return: kernel virtual pointer to image of ROM
*
* Map a PCI ROM into kernel space. If ROM is boot video ROM,
* the shadow BIOS copy will be returned instead of the
* actual ROM.
*/
#define legacyBIOSLocation 0xC0000
#define OS_BASE 0x80000000
void *pci_map_rom(struct pci_dev *pdev, size_t *size)
void __iomem *pci_map_rom(struct pci_dev *pdev, size_t *size)
{
struct resource *res = &pdev->resource[PCI_ROM_RESOURCE];
u32_t start;
void *rom;
loff_t start;
void __iomem *rom;
#if 0
// ENTER();
// dbgprintf("resource start %x end %x flags %x\n",
// res->start, res->end, res->flags);
/*
* IORESOURCE_ROM_SHADOW set on x86, x86_64 and IA64 supports legacy
* memory map if the VGA enable bit of the Bridge Control register is
* set for embedded VGA.
*/
start = (loff_t)0xC0000;
*size = 0x20000; /* cover C000:0 through E000:0 */
#if 0
if (res->flags & IORESOURCE_ROM_SHADOW) {
/* primary video rom always starts here */
start = (u32_t)0xC0000;
start = (loff_t)0xC0000;
*size = 0x20000; /* cover C000:0 through E000:0 */
} else {
if (res->flags & (IORESOURCE_ROM_COPY | IORESOURCE_ROM_BIOS_COPY)) {
*size = pci_resource_len(pdev, PCI_ROM_RESOURCE);
return (void *)(unsigned long)
pci_resource_start(pdev, PCI_ROM_RESOURCE);
return (void __iomem *)(unsigned long)
pci_resource_start(pdev, PCI_ROM_RESOURCE);
} else {
/* assign the ROM an address if it doesn't have one */
//if (res->parent == NULL &&
// pci_assign_resource(pdev,PCI_ROM_RESOURCE))
// return NULL;
start = pci_resource_start(pdev, PCI_ROM_RESOURCE);
*size = pci_resource_len(pdev, PCI_ROM_RESOURCE);
if (*size == 0)
// if (res->parent == NULL &&
// pci_assign_resource(pdev,PCI_ROM_RESOURCE))
return NULL;
// start = pci_resource_start(pdev, PCI_ROM_RESOURCE);
// *size = pci_resource_len(pdev, PCI_ROM_RESOURCE);
// if (*size == 0)
// return NULL;
/* Enable ROM space decodes */
if (pci_enable_rom(pdev))
return NULL;
// if (pci_enable_rom(pdev))
// return NULL;
}
}
#endif
rom = ioremap(start, *size);
if (!rom) {
@ -840,37 +850,237 @@ void *pci_map_rom(struct pci_dev *pdev, size_t *size)
* size is much larger than the actual size of the ROM.
* True size is important if the ROM is going to be copied.
*/
*size = pci_get_rom_size(rom, *size);
#endif
unsigned char tmp[32];
rom = NULL;
dbgprintf("Getting BIOS copy from legacy VBIOS location\n");
memcpy(tmp,(char*)(OS_BASE+legacyBIOSLocation), 32);
*size = tmp[2] * 512;
if (*size > 0x10000 )
{
*size = 0;
dbgprintf("Invalid BIOS length field\n");
}
else
rom = (void*)( OS_BASE+legacyBIOSLocation);
*size = pci_get_rom_size(pdev, rom, *size);
// LEAVE();
return rom;
}
int
pci_set_dma_mask(struct pci_dev *dev, u64 mask)
void pci_unmap_rom(struct pci_dev *pdev, void __iomem *rom)
{
// if (!pci_dma_supported(dev, mask))
// return -EIO;
struct resource *res = &pdev->resource[PCI_ROM_RESOURCE];
dev->dma_mask = mask;
if (res->flags & (IORESOURCE_ROM_COPY | IORESOURCE_ROM_BIOS_COPY))
return;
return 0;
iounmap(rom);
/* Disable again before continuing, leave enabled if pci=rom */
if (!(res->flags & (IORESOURCE_ROM_ENABLE | IORESOURCE_ROM_SHADOW)))
pci_disable_rom(pdev);
}
int pci_set_dma_mask(struct pci_dev *dev, u64 mask)
{
dev->dma_mask = mask;
return 0;
}
static void __pci_set_master(struct pci_dev *dev, bool enable)
{
u16 old_cmd, cmd;
pci_read_config_word(dev, PCI_COMMAND, &old_cmd);
if (enable)
cmd = old_cmd | PCI_COMMAND_MASTER;
else
cmd = old_cmd & ~PCI_COMMAND_MASTER;
if (cmd != old_cmd) {
pci_write_config_word(dev, PCI_COMMAND, cmd);
}
dev->is_busmaster = enable;
}
/* pci_set_master - enables bus-mastering for device dev
* @dev: the PCI device to enable
*
* Enables bus-mastering on the device and calls pcibios_set_master()
* to do the needed arch specific settings.
*/
void pci_set_master(struct pci_dev *dev)
{
__pci_set_master(dev, true);
// pcibios_set_master(dev);
}
/**
* pci_clear_master - disables bus-mastering for device dev
* @dev: the PCI device to disable
*/
void pci_clear_master(struct pci_dev *dev)
{
__pci_set_master(dev, false);
}
static inline int pcie_cap_version(const struct pci_dev *dev)
{
return dev->pcie_flags_reg & PCI_EXP_FLAGS_VERS;
}
static inline bool pcie_cap_has_devctl(const struct pci_dev *dev)
{
return true;
}
static inline bool pcie_cap_has_lnkctl(const struct pci_dev *dev)
{
int type = pci_pcie_type(dev);
return pcie_cap_version(dev) > 1 ||
type == PCI_EXP_TYPE_ROOT_PORT ||
type == PCI_EXP_TYPE_ENDPOINT ||
type == PCI_EXP_TYPE_LEG_END;
}
static inline bool pcie_cap_has_sltctl(const struct pci_dev *dev)
{
int type = pci_pcie_type(dev);
return pcie_cap_version(dev) > 1 ||
type == PCI_EXP_TYPE_ROOT_PORT ||
(type == PCI_EXP_TYPE_DOWNSTREAM &&
dev->pcie_flags_reg & PCI_EXP_FLAGS_SLOT);
}
static inline bool pcie_cap_has_rtctl(const struct pci_dev *dev)
{
int type = pci_pcie_type(dev);
return pcie_cap_version(dev) > 1 ||
type == PCI_EXP_TYPE_ROOT_PORT ||
type == PCI_EXP_TYPE_RC_EC;
}
static bool pcie_capability_reg_implemented(struct pci_dev *dev, int pos)
{
if (!pci_is_pcie(dev))
return false;
switch (pos) {
case PCI_EXP_FLAGS_TYPE:
return true;
case PCI_EXP_DEVCAP:
case PCI_EXP_DEVCTL:
case PCI_EXP_DEVSTA:
return pcie_cap_has_devctl(dev);
case PCI_EXP_LNKCAP:
case PCI_EXP_LNKCTL:
case PCI_EXP_LNKSTA:
return pcie_cap_has_lnkctl(dev);
case PCI_EXP_SLTCAP:
case PCI_EXP_SLTCTL:
case PCI_EXP_SLTSTA:
return pcie_cap_has_sltctl(dev);
case PCI_EXP_RTCTL:
case PCI_EXP_RTCAP:
case PCI_EXP_RTSTA:
return pcie_cap_has_rtctl(dev);
case PCI_EXP_DEVCAP2:
case PCI_EXP_DEVCTL2:
case PCI_EXP_LNKCAP2:
case PCI_EXP_LNKCTL2:
case PCI_EXP_LNKSTA2:
return pcie_cap_version(dev) > 1;
default:
return false;
}
}
/*
* Note that these accessor functions are only for the "PCI Express
* Capability" (see PCIe spec r3.0, sec 7.8). They do not apply to the
* other "PCI Express Extended Capabilities" (AER, VC, ACS, MFVC, etc.)
*/
int pcie_capability_read_word(struct pci_dev *dev, int pos, u16 *val)
{
int ret;
*val = 0;
if (pos & 1)
return -EINVAL;
if (pcie_capability_reg_implemented(dev, pos)) {
ret = pci_read_config_word(dev, pci_pcie_cap(dev) + pos, val);
/*
* Reset *val to 0 if pci_read_config_word() fails, it may
* have been written as 0xFFFF if hardware error happens
* during pci_read_config_word().
*/
if (ret)
*val = 0;
return ret;
}
/*
* For Functions that do not implement the Slot Capabilities,
* Slot Status, and Slot Control registers, these spaces must
* be hardwired to 0b, with the exception of the Presence Detect
* State bit in the Slot Status register of Downstream Ports,
* which must be hardwired to 1b. (PCIe Base Spec 3.0, sec 7.8)
*/
if (pci_is_pcie(dev) && pos == PCI_EXP_SLTSTA &&
pci_pcie_type(dev) == PCI_EXP_TYPE_DOWNSTREAM) {
*val = PCI_EXP_SLTSTA_PDS;
}
return 0;
}
EXPORT_SYMBOL(pcie_capability_read_word);
int pcie_capability_read_dword(struct pci_dev *dev, int pos, u32 *val)
{
int ret;
*val = 0;
if (pos & 3)
return -EINVAL;
if (pcie_capability_reg_implemented(dev, pos)) {
ret = pci_read_config_dword(dev, pci_pcie_cap(dev) + pos, val);
/*
* Reset *val to 0 if pci_read_config_dword() fails, it may
* have been written as 0xFFFFFFFF if hardware error happens
* during pci_read_config_dword().
*/
if (ret)
*val = 0;
return ret;
}
if (pci_is_pcie(dev) && pos == PCI_EXP_SLTCTL &&
pci_pcie_type(dev) == PCI_EXP_TYPE_DOWNSTREAM) {
*val = PCI_EXP_SLTSTA_PDS;
}
return 0;
}
EXPORT_SYMBOL(pcie_capability_read_dword);
int pcie_capability_write_word(struct pci_dev *dev, int pos, u16 val)
{
if (pos & 1)
return -EINVAL;
if (!pcie_capability_reg_implemented(dev, pos))
return 0;
return pci_write_config_word(dev, pci_pcie_cap(dev) + pos, val);
}
EXPORT_SYMBOL(pcie_capability_write_word);
int pcie_capability_write_dword(struct pci_dev *dev, int pos, u32 val)
{
if (pos & 3)
return -EINVAL;
if (!pcie_capability_reg_implemented(dev, pos))
return 0;
return pci_write_config_dword(dev, pci_pcie_cap(dev) + pos, val);
}
EXPORT_SYMBOL(pcie_capability_write_dword);

File diff suppressed because it is too large Load Diff

View File

@ -25,9 +25,8 @@
* Alex Deucher
* Jerome Glisse
*/
#include "drmP.h"
#include "drm.h"
#include "radeon_drm.h"
#include <drm/drmP.h>
#include <drm/radeon_drm.h>
#include "radeon_reg.h"
#include "radeon.h"
#include "radeon_asic.h"
@ -87,44 +86,45 @@ static int r200_get_vtx_size_0(uint32_t vtx_fmt_0)
int r200_copy_dma(struct radeon_device *rdev,
uint64_t src_offset,
uint64_t dst_offset,
unsigned num_pages,
struct radeon_fence *fence)
unsigned num_gpu_pages,
struct radeon_fence **fence)
{
struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
uint32_t size;
uint32_t cur_size;
int i, num_loops;
int r = 0;
/* radeon pitch is /64 */
size = num_pages << PAGE_SHIFT;
size = num_gpu_pages << RADEON_GPU_PAGE_SHIFT;
num_loops = DIV_ROUND_UP(size, 0x1FFFFF);
r = radeon_ring_lock(rdev, num_loops * 4 + 64);
r = radeon_ring_lock(rdev, ring, num_loops * 4 + 64);
if (r) {
DRM_ERROR("radeon: moving bo (%d).\n", r);
return r;
}
/* Must wait for 2D idle & clean before DMA or hangs might happen */
radeon_ring_write(rdev, PACKET0(RADEON_WAIT_UNTIL, 0));
radeon_ring_write(rdev, (1 << 16));
radeon_ring_write(ring, PACKET0(RADEON_WAIT_UNTIL, 0));
radeon_ring_write(ring, (1 << 16));
for (i = 0; i < num_loops; i++) {
cur_size = size;
if (cur_size > 0x1FFFFF) {
cur_size = 0x1FFFFF;
}
size -= cur_size;
radeon_ring_write(rdev, PACKET0(0x720, 2));
radeon_ring_write(rdev, src_offset);
radeon_ring_write(rdev, dst_offset);
radeon_ring_write(rdev, cur_size | (1 << 31) | (1 << 30));
radeon_ring_write(ring, PACKET0(0x720, 2));
radeon_ring_write(ring, src_offset);
radeon_ring_write(ring, dst_offset);
radeon_ring_write(ring, cur_size | (1 << 31) | (1 << 30));
src_offset += cur_size;
dst_offset += cur_size;
}
radeon_ring_write(rdev, PACKET0(RADEON_WAIT_UNTIL, 0));
radeon_ring_write(rdev, RADEON_WAIT_DMA_GUI_IDLE);
radeon_ring_write(ring, PACKET0(RADEON_WAIT_UNTIL, 0));
radeon_ring_write(ring, RADEON_WAIT_DMA_GUI_IDLE);
if (fence) {
r = radeon_fence_emit(rdev, fence);
r = radeon_fence_emit(rdev, fence, RADEON_RING_TYPE_GFX_INDEX);
}
radeon_ring_unlock_commit(rdev);
radeon_ring_unlock_commit(rdev, ring);
return r;
}
#if 0
@ -156,7 +156,7 @@ int r200_packet0_check(struct radeon_cs_parser *p,
u32 tile_flags = 0;
u32 idx_value;
ib = p->ib->ptr;
ib = p->ib.ptr;
track = (struct r100_cs_track *)p->track;
idx_value = radeon_get_ib_value(p, idx);
switch (reg) {
@ -217,6 +217,16 @@ int r200_packet0_check(struct radeon_cs_parser *p,
r100_cs_dump_packet(p, pkt);
return r;
}
if (!(p->cs_flags & RADEON_CS_KEEP_TILING_FLAGS)) {
if (reloc->lobj.tiling_flags & RADEON_TILING_MACRO)
tile_flags |= R200_TXO_MACRO_TILE;
if (reloc->lobj.tiling_flags & RADEON_TILING_MICRO)
tile_flags |= R200_TXO_MICRO_TILE;
tmp = idx_value & ~(0x7 << 2);
tmp |= tile_flags;
ib[idx] = tmp + ((u32)reloc->lobj.gpu_offset);
} else
ib[idx] = idx_value + ((u32)reloc->lobj.gpu_offset);
track->textures[i].robj = reloc->robj;
track->tex_dirty = true;
@ -279,6 +289,7 @@ int r200_packet0_check(struct radeon_cs_parser *p,
return r;
}
if (!(p->cs_flags & RADEON_CS_KEEP_TILING_FLAGS)) {
if (reloc->lobj.tiling_flags & RADEON_TILING_MACRO)
tile_flags |= RADEON_COLOR_TILE_ENABLE;
if (reloc->lobj.tiling_flags & RADEON_TILING_MICRO)
@ -287,6 +298,8 @@ int r200_packet0_check(struct radeon_cs_parser *p,
tmp = idx_value & ~(0x7 << 16);
tmp |= tile_flags;
ib[idx] = tmp;
} else
ib[idx] = idx_value;
track->cb[0].pitch = idx_value & RADEON_COLORPITCH_MASK;
track->cb_dirty = true;

View File

@ -33,7 +33,7 @@
#include "radeon_reg.h"
#include "radeon.h"
#include "radeon_asic.h"
#include "radeon_drm.h"
#include <drm/radeon_drm.h>
#include "r300d.h"
#include "rv350d.h"
@ -74,7 +74,7 @@ void rv370_pcie_gart_tlb_flush(struct radeon_device *rdev)
int rv370_pcie_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr)
{
void __iomem *ptr = (void *)rdev->gart.table.vram.ptr;
void __iomem *ptr = rdev->gart.ptr;
if (i < 0 || i > rdev->gart.num_gpu_pages) {
return -EINVAL;
@ -93,7 +93,7 @@ int rv370_pcie_gart_init(struct radeon_device *rdev)
{
int r;
if (rdev->gart.table.vram.robj) {
if (rdev->gart.robj) {
WARN(1, "RV370 PCIE GART already initialized\n");
return 0;
}
@ -105,8 +105,8 @@ int rv370_pcie_gart_init(struct radeon_device *rdev)
if (r)
DRM_ERROR("Failed to register debugfs file for PCIE gart !\n");
rdev->gart.table_size = rdev->gart.num_gpu_pages * 4;
rdev->asic->gart_tlb_flush = &rv370_pcie_gart_tlb_flush;
rdev->asic->gart_set_page = &rv370_pcie_gart_set_page;
rdev->asic->gart.tlb_flush = &rv370_pcie_gart_tlb_flush;
rdev->asic->gart.set_page = &rv370_pcie_gart_set_page;
return radeon_gart_table_vram_alloc(rdev);
}
@ -116,7 +116,7 @@ int rv370_pcie_gart_enable(struct radeon_device *rdev)
uint32_t tmp;
int r;
if (rdev->gart.table.vram.robj == NULL) {
if (rdev->gart.robj == NULL) {
dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
return -EINVAL;
}
@ -144,8 +144,9 @@ int rv370_pcie_gart_enable(struct radeon_device *rdev)
tmp |= RADEON_PCIE_TX_GART_UNMAPPED_ACCESS_DISCARD;
WREG32_PCIE(RADEON_PCIE_TX_GART_CNTL, tmp);
rv370_pcie_gart_tlb_flush(rdev);
DRM_INFO("PCIE GART of %uM enabled (table at 0x%08X).\n",
(unsigned)(rdev->mc.gtt_size >> 20), table_addr);
DRM_INFO("PCIE GART of %uM enabled (table at 0x%016llX).\n",
(unsigned)(rdev->mc.gtt_size >> 20),
(unsigned long long)table_addr);
rdev->gart.ready = true;
return 0;
}
@ -153,7 +154,6 @@ int rv370_pcie_gart_enable(struct radeon_device *rdev)
void rv370_pcie_gart_disable(struct radeon_device *rdev)
{
u32 tmp;
int r;
WREG32_PCIE(RADEON_PCIE_TX_GART_START_LO, 0);
WREG32_PCIE(RADEON_PCIE_TX_GART_END_LO, 0);
@ -162,14 +162,7 @@ void rv370_pcie_gart_disable(struct radeon_device *rdev)
tmp = RREG32_PCIE(RADEON_PCIE_TX_GART_CNTL);
tmp |= RADEON_PCIE_TX_GART_UNMAPPED_ACCESS_DISCARD;
WREG32_PCIE(RADEON_PCIE_TX_GART_CNTL, tmp & ~RADEON_PCIE_TX_GART_EN);
if (rdev->gart.table.vram.robj) {
r = radeon_bo_reserve(rdev->gart.table.vram.robj, false);
if (likely(r == 0)) {
radeon_bo_kunmap(rdev->gart.table.vram.robj);
radeon_bo_unpin(rdev->gart.table.vram.robj);
radeon_bo_unreserve(rdev->gart.table.vram.robj);
}
}
radeon_gart_table_vram_unpin(rdev);
}
void rv370_pcie_gart_fini(struct radeon_device *rdev)
@ -182,36 +175,38 @@ void rv370_pcie_gart_fini(struct radeon_device *rdev)
void r300_fence_ring_emit(struct radeon_device *rdev,
struct radeon_fence *fence)
{
struct radeon_ring *ring = &rdev->ring[fence->ring];
/* Who ever call radeon_fence_emit should call ring_lock and ask
* for enough space (today caller are ib schedule and buffer move) */
/* Write SC register so SC & US assert idle */
radeon_ring_write(rdev, PACKET0(R300_RE_SCISSORS_TL, 0));
radeon_ring_write(rdev, 0);
radeon_ring_write(rdev, PACKET0(R300_RE_SCISSORS_BR, 0));
radeon_ring_write(rdev, 0);
radeon_ring_write(ring, PACKET0(R300_RE_SCISSORS_TL, 0));
radeon_ring_write(ring, 0);
radeon_ring_write(ring, PACKET0(R300_RE_SCISSORS_BR, 0));
radeon_ring_write(ring, 0);
/* Flush 3D cache */
radeon_ring_write(rdev, PACKET0(R300_RB3D_DSTCACHE_CTLSTAT, 0));
radeon_ring_write(rdev, R300_RB3D_DC_FLUSH);
radeon_ring_write(rdev, PACKET0(R300_RB3D_ZCACHE_CTLSTAT, 0));
radeon_ring_write(rdev, R300_ZC_FLUSH);
radeon_ring_write(ring, PACKET0(R300_RB3D_DSTCACHE_CTLSTAT, 0));
radeon_ring_write(ring, R300_RB3D_DC_FLUSH);
radeon_ring_write(ring, PACKET0(R300_RB3D_ZCACHE_CTLSTAT, 0));
radeon_ring_write(ring, R300_ZC_FLUSH);
/* Wait until IDLE & CLEAN */
radeon_ring_write(rdev, PACKET0(RADEON_WAIT_UNTIL, 0));
radeon_ring_write(rdev, (RADEON_WAIT_3D_IDLECLEAN |
radeon_ring_write(ring, PACKET0(RADEON_WAIT_UNTIL, 0));
radeon_ring_write(ring, (RADEON_WAIT_3D_IDLECLEAN |
RADEON_WAIT_2D_IDLECLEAN |
RADEON_WAIT_DMA_GUI_IDLE));
radeon_ring_write(rdev, PACKET0(RADEON_HOST_PATH_CNTL, 0));
radeon_ring_write(rdev, rdev->config.r300.hdp_cntl |
radeon_ring_write(ring, PACKET0(RADEON_HOST_PATH_CNTL, 0));
radeon_ring_write(ring, rdev->config.r300.hdp_cntl |
RADEON_HDP_READ_BUFFER_INVALIDATE);
radeon_ring_write(rdev, PACKET0(RADEON_HOST_PATH_CNTL, 0));
radeon_ring_write(rdev, rdev->config.r300.hdp_cntl);
radeon_ring_write(ring, PACKET0(RADEON_HOST_PATH_CNTL, 0));
radeon_ring_write(ring, rdev->config.r300.hdp_cntl);
/* Emit fence sequence & fire IRQ */
radeon_ring_write(rdev, PACKET0(rdev->fence_drv.scratch_reg, 0));
radeon_ring_write(rdev, fence->seq);
radeon_ring_write(rdev, PACKET0(RADEON_GEN_INT_STATUS, 0));
radeon_ring_write(rdev, RADEON_SW_INT_FIRE);
radeon_ring_write(ring, PACKET0(rdev->fence_drv[fence->ring].scratch_reg, 0));
radeon_ring_write(ring, fence->seq);
radeon_ring_write(ring, PACKET0(RADEON_GEN_INT_STATUS, 0));
radeon_ring_write(ring, RADEON_SW_INT_FIRE);
}
void r300_ring_start(struct radeon_device *rdev)
void r300_ring_start(struct radeon_device *rdev, struct radeon_ring *ring)
{
unsigned gb_tile_config;
int r;
@ -234,44 +229,44 @@ void r300_ring_start(struct radeon_device *rdev)
break;
}
r = radeon_ring_lock(rdev, 64);
r = radeon_ring_lock(rdev, ring, 64);
if (r) {
return;
}
radeon_ring_write(rdev, PACKET0(RADEON_ISYNC_CNTL, 0));
radeon_ring_write(rdev,
radeon_ring_write(ring, PACKET0(RADEON_ISYNC_CNTL, 0));
radeon_ring_write(ring,
RADEON_ISYNC_ANY2D_IDLE3D |
RADEON_ISYNC_ANY3D_IDLE2D |
RADEON_ISYNC_WAIT_IDLEGUI |
RADEON_ISYNC_CPSCRATCH_IDLEGUI);
radeon_ring_write(rdev, PACKET0(R300_GB_TILE_CONFIG, 0));
radeon_ring_write(rdev, gb_tile_config);
radeon_ring_write(rdev, PACKET0(RADEON_WAIT_UNTIL, 0));
radeon_ring_write(rdev,
radeon_ring_write(ring, PACKET0(R300_GB_TILE_CONFIG, 0));
radeon_ring_write(ring, gb_tile_config);
radeon_ring_write(ring, PACKET0(RADEON_WAIT_UNTIL, 0));
radeon_ring_write(ring,
RADEON_WAIT_2D_IDLECLEAN |
RADEON_WAIT_3D_IDLECLEAN);
radeon_ring_write(rdev, PACKET0(R300_DST_PIPE_CONFIG, 0));
radeon_ring_write(rdev, R300_PIPE_AUTO_CONFIG);
radeon_ring_write(rdev, PACKET0(R300_GB_SELECT, 0));
radeon_ring_write(rdev, 0);
radeon_ring_write(rdev, PACKET0(R300_GB_ENABLE, 0));
radeon_ring_write(rdev, 0);
radeon_ring_write(rdev, PACKET0(R300_RB3D_DSTCACHE_CTLSTAT, 0));
radeon_ring_write(rdev, R300_RB3D_DC_FLUSH | R300_RB3D_DC_FREE);
radeon_ring_write(rdev, PACKET0(R300_RB3D_ZCACHE_CTLSTAT, 0));
radeon_ring_write(rdev, R300_ZC_FLUSH | R300_ZC_FREE);
radeon_ring_write(rdev, PACKET0(RADEON_WAIT_UNTIL, 0));
radeon_ring_write(rdev,
radeon_ring_write(ring, PACKET0(R300_DST_PIPE_CONFIG, 0));
radeon_ring_write(ring, R300_PIPE_AUTO_CONFIG);
radeon_ring_write(ring, PACKET0(R300_GB_SELECT, 0));
radeon_ring_write(ring, 0);
radeon_ring_write(ring, PACKET0(R300_GB_ENABLE, 0));
radeon_ring_write(ring, 0);
radeon_ring_write(ring, PACKET0(R300_RB3D_DSTCACHE_CTLSTAT, 0));
radeon_ring_write(ring, R300_RB3D_DC_FLUSH | R300_RB3D_DC_FREE);
radeon_ring_write(ring, PACKET0(R300_RB3D_ZCACHE_CTLSTAT, 0));
radeon_ring_write(ring, R300_ZC_FLUSH | R300_ZC_FREE);
radeon_ring_write(ring, PACKET0(RADEON_WAIT_UNTIL, 0));
radeon_ring_write(ring,
RADEON_WAIT_2D_IDLECLEAN |
RADEON_WAIT_3D_IDLECLEAN);
radeon_ring_write(rdev, PACKET0(R300_GB_AA_CONFIG, 0));
radeon_ring_write(rdev, 0);
radeon_ring_write(rdev, PACKET0(R300_RB3D_DSTCACHE_CTLSTAT, 0));
radeon_ring_write(rdev, R300_RB3D_DC_FLUSH | R300_RB3D_DC_FREE);
radeon_ring_write(rdev, PACKET0(R300_RB3D_ZCACHE_CTLSTAT, 0));
radeon_ring_write(rdev, R300_ZC_FLUSH | R300_ZC_FREE);
radeon_ring_write(rdev, PACKET0(R300_GB_MSPOS0, 0));
radeon_ring_write(rdev,
radeon_ring_write(ring, PACKET0(R300_GB_AA_CONFIG, 0));
radeon_ring_write(ring, 0);
radeon_ring_write(ring, PACKET0(R300_RB3D_DSTCACHE_CTLSTAT, 0));
radeon_ring_write(ring, R300_RB3D_DC_FLUSH | R300_RB3D_DC_FREE);
radeon_ring_write(ring, PACKET0(R300_RB3D_ZCACHE_CTLSTAT, 0));
radeon_ring_write(ring, R300_ZC_FLUSH | R300_ZC_FREE);
radeon_ring_write(ring, PACKET0(R300_GB_MSPOS0, 0));
radeon_ring_write(ring,
((6 << R300_MS_X0_SHIFT) |
(6 << R300_MS_Y0_SHIFT) |
(6 << R300_MS_X1_SHIFT) |
@ -280,8 +275,8 @@ void r300_ring_start(struct radeon_device *rdev)
(6 << R300_MS_Y2_SHIFT) |
(6 << R300_MSBD0_Y_SHIFT) |
(6 << R300_MSBD0_X_SHIFT)));
radeon_ring_write(rdev, PACKET0(R300_GB_MSPOS1, 0));
radeon_ring_write(rdev,
radeon_ring_write(ring, PACKET0(R300_GB_MSPOS1, 0));
radeon_ring_write(ring,
((6 << R300_MS_X3_SHIFT) |
(6 << R300_MS_Y3_SHIFT) |
(6 << R300_MS_X4_SHIFT) |
@ -289,19 +284,19 @@ void r300_ring_start(struct radeon_device *rdev)
(6 << R300_MS_X5_SHIFT) |
(6 << R300_MS_Y5_SHIFT) |
(6 << R300_MSBD1_SHIFT)));
radeon_ring_write(rdev, PACKET0(R300_GA_ENHANCE, 0));
radeon_ring_write(rdev, R300_GA_DEADLOCK_CNTL | R300_GA_FASTSYNC_CNTL);
radeon_ring_write(rdev, PACKET0(R300_GA_POLY_MODE, 0));
radeon_ring_write(rdev,
radeon_ring_write(ring, PACKET0(R300_GA_ENHANCE, 0));
radeon_ring_write(ring, R300_GA_DEADLOCK_CNTL | R300_GA_FASTSYNC_CNTL);
radeon_ring_write(ring, PACKET0(R300_GA_POLY_MODE, 0));
radeon_ring_write(ring,
R300_FRONT_PTYPE_TRIANGE | R300_BACK_PTYPE_TRIANGE);
radeon_ring_write(rdev, PACKET0(R300_GA_ROUND_MODE, 0));
radeon_ring_write(rdev,
radeon_ring_write(ring, PACKET0(R300_GA_ROUND_MODE, 0));
radeon_ring_write(ring,
R300_GEOMETRY_ROUND_NEAREST |
R300_COLOR_ROUND_NEAREST);
radeon_ring_unlock_commit(rdev);
radeon_ring_unlock_commit(rdev, ring);
}
void r300_errata(struct radeon_device *rdev)
static void r300_errata(struct radeon_device *rdev)
{
rdev->pll_errata = 0;
@ -327,7 +322,7 @@ int r300_mc_wait_for_idle(struct radeon_device *rdev)
return -1;
}
void r300_gpu_init(struct radeon_device *rdev)
static void r300_gpu_init(struct radeon_device *rdev)
{
uint32_t gb_tile_config, tmp;
@ -382,28 +377,6 @@ void r300_gpu_init(struct radeon_device *rdev)
rdev->num_gb_pipes, rdev->num_z_pipes);
}
bool r300_gpu_is_lockup(struct radeon_device *rdev)
{
u32 rbbm_status;
int r;
rbbm_status = RREG32(R_000E40_RBBM_STATUS);
if (!G_000E40_GUI_ACTIVE(rbbm_status)) {
r100_gpu_lockup_update(&rdev->config.r300.lockup, &rdev->cp);
return false;
}
/* force CP activities */
r = radeon_ring_lock(rdev, 2);
if (!r) {
/* PACKET2 NOP */
radeon_ring_write(rdev, 0x80000000);
radeon_ring_write(rdev, 0x80000000);
radeon_ring_unlock_commit(rdev);
}
rdev->cp.rptr = RREG32(RADEON_CP_RB_RPTR);
return r100_gpu_cp_is_lockup(rdev, &rdev->config.r300.lockup, &rdev->cp);
}
int r300_asic_reset(struct radeon_device *rdev)
{
struct r100_mc_save save;
@ -454,7 +427,6 @@ int r300_asic_reset(struct radeon_device *rdev)
/* Check if GPU is idle */
if (G_000E40_GA_BUSY(status) || G_000E40_VAP_BUSY(status)) {
dev_err(rdev->dev, "failed to reset GPU\n");
rdev->gpu_lockup = true;
ret = -1;
} else
dev_info(rdev->dev, "GPU reset succeed\n");
@ -635,7 +607,7 @@ static int r300_packet0_check(struct radeon_cs_parser *p,
int r;
u32 idx_value;
ib = p->ib->ptr;
ib = p->ib.ptr;
track = (struct r100_cs_track *)p->track;
idx_value = radeon_get_ib_value(p, idx);
@ -711,6 +683,10 @@ static int r300_packet0_check(struct radeon_cs_parser *p,
return r;
}
if (p->cs_flags & RADEON_CS_KEEP_TILING_FLAGS) {
ib[idx] = (idx_value & 31) | /* keep the 1st 5 bits */
((idx_value & ~31) + (u32)reloc->lobj.gpu_offset);
} else {
if (reloc->lobj.tiling_flags & RADEON_TILING_MACRO)
tile_flags |= R300_TXO_MACRO_TILE;
if (reloc->lobj.tiling_flags & RADEON_TILING_MICRO)
@ -721,6 +697,7 @@ static int r300_packet0_check(struct radeon_cs_parser *p,
tmp = idx_value + ((u32)reloc->lobj.gpu_offset);
tmp |= tile_flags;
ib[idx] = tmp;
}
track->textures[i].robj = reloc->robj;
track->tex_dirty = true;
break;
@ -770,6 +747,7 @@ static int r300_packet0_check(struct radeon_cs_parser *p,
/* RB3D_COLORPITCH1 */
/* RB3D_COLORPITCH2 */
/* RB3D_COLORPITCH3 */
if (!(p->cs_flags & RADEON_CS_KEEP_TILING_FLAGS)) {
r = r100_cs_packet_next_reloc(p, &reloc);
if (r) {
DRM_ERROR("No reloc for ib[%d]=0x%04X\n",
@ -788,6 +766,7 @@ static int r300_packet0_check(struct radeon_cs_parser *p,
tmp = idx_value & ~(0x7 << 16);
tmp |= tile_flags;
ib[idx] = tmp;
}
i = (reg - 0x4E38) >> 2;
track->cb[i].pitch = idx_value & 0x3FFE;
switch (((idx_value >> 21) & 0xF)) {
@ -853,6 +832,7 @@ static int r300_packet0_check(struct radeon_cs_parser *p,
break;
case 0x4F24:
/* ZB_DEPTHPITCH */
if (!(p->cs_flags & RADEON_CS_KEEP_TILING_FLAGS)) {
r = r100_cs_packet_next_reloc(p, &reloc);
if (r) {
DRM_ERROR("No reloc for ib[%d]=0x%04X\n",
@ -871,7 +851,7 @@ static int r300_packet0_check(struct radeon_cs_parser *p,
tmp = idx_value & ~(0x7 << 16);
tmp |= tile_flags;
ib[idx] = tmp;
}
track->zb.pitch = idx_value & 0x3FFC;
track->zb_dirty = true;
break;
@ -1169,7 +1149,7 @@ static int r300_packet3_check(struct radeon_cs_parser *p,
unsigned idx;
int r;
ib = p->ib->ptr;
ib = p->ib.ptr;
idx = pkt->idx + 1;
track = (struct r100_cs_track *)p->track;
switch(pkt->opcode) {
@ -1409,11 +1389,13 @@ static int r300_startup(struct radeon_device *rdev)
dev_err(rdev->dev, "failed initializing CP (%d).\n", r);
return r;
}
r = r100_ib_init(rdev);
r = radeon_ib_pool_init(rdev);
if (r) {
dev_err(rdev->dev, "failed initializing IB (%d).\n", r);
dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
return r;
}
return 0;
}
@ -1492,6 +1474,7 @@ int r300_init(struct radeon_device *rdev)
return r;
}
r300_set_reg_safe(rdev);
rdev->accel_working = true;
r = r300_startup(rdev);
if (r) {

View File

@ -27,7 +27,7 @@
*/
#include <linux/seq_file.h>
#include <linux/slab.h>
#include "drmP.h"
#include <drm/drmP.h>
#include "radeon_reg.h"
#include "radeon.h"
#include "radeon_asic.h"
@ -160,6 +160,8 @@ static void r420_clock_resume(struct radeon_device *rdev)
static void r420_cp_errata_init(struct radeon_device *rdev)
{
struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
/* RV410 and R420 can lock up if CP DMA to host memory happens
* while the 2D engine is busy.
*
@ -167,22 +169,24 @@ static void r420_cp_errata_init(struct radeon_device *rdev)
* of the CP init, apparently.
*/
radeon_scratch_get(rdev, &rdev->config.r300.resync_scratch);
radeon_ring_lock(rdev, 8);
radeon_ring_write(rdev, PACKET0(R300_CP_RESYNC_ADDR, 1));
radeon_ring_write(rdev, rdev->config.r300.resync_scratch);
radeon_ring_write(rdev, 0xDEADBEEF);
radeon_ring_unlock_commit(rdev);
radeon_ring_lock(rdev, ring, 8);
radeon_ring_write(ring, PACKET0(R300_CP_RESYNC_ADDR, 1));
radeon_ring_write(ring, rdev->config.r300.resync_scratch);
radeon_ring_write(ring, 0xDEADBEEF);
radeon_ring_unlock_commit(rdev, ring);
}
static void r420_cp_errata_fini(struct radeon_device *rdev)
{
struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
/* Catch the RESYNC we dispatched all the way back,
* at the very beginning of the CP init.
*/
radeon_ring_lock(rdev, 8);
radeon_ring_write(rdev, PACKET0(R300_RB3D_DSTCACHE_CTLSTAT, 0));
radeon_ring_write(rdev, R300_RB3D_DC_FINISH);
radeon_ring_unlock_commit(rdev);
radeon_ring_lock(rdev, ring, 8);
radeon_ring_write(ring, PACKET0(R300_RB3D_DSTCACHE_CTLSTAT, 0));
radeon_ring_write(ring, R300_RB3D_DC_FINISH);
radeon_ring_unlock_commit(rdev, ring);
radeon_scratch_free(rdev, rdev->config.r300.resync_scratch);
}
@ -225,41 +229,18 @@ static int r420_startup(struct radeon_device *rdev)
return r;
}
r420_cp_errata_init(rdev);
r = r100_ib_init(rdev);
r = radeon_ib_pool_init(rdev);
if (r) {
dev_err(rdev->dev, "failed initializing IB (%d).\n", r);
dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
return r;
}
return 0;
}
int r420_resume(struct radeon_device *rdev)
{
/* Make sur GART are not working */
if (rdev->flags & RADEON_IS_PCIE)
rv370_pcie_gart_disable(rdev);
if (rdev->flags & RADEON_IS_PCI)
r100_pci_gart_disable(rdev);
/* Resume clock before doing reset */
r420_clock_resume(rdev);
/* Reset gpu before posting otherwise ATOM will enter infinite loop */
if (radeon_asic_reset(rdev)) {
dev_warn(rdev->dev, "GPU reset failed ! (0xE40=0x%08X, 0x7C0=0x%08X)\n",
RREG32(R_000E40_RBBM_STATUS),
RREG32(R_0007C0_CP_STAT));
}
/* check if cards are posted or not */
if (rdev->is_atom_bios) {
atom_asic_init(rdev->mode_info.atom_context);
} else {
radeon_combios_asic_init(rdev->ddev);
}
/* Resume clock after posting */
r420_clock_resume(rdev);
/* Initialize surface registers */
radeon_surface_init(rdev);
return r420_startup(rdev);
}
@ -341,6 +322,7 @@ int r420_init(struct radeon_device *rdev)
return r;
}
r420_set_reg_safe(rdev);
rdev->accel_working = true;
r = r420_startup(rdev);
if (r) {

View File

@ -351,6 +351,8 @@
#define AVIVO_D1CRTC_BLANK_CONTROL 0x6084
#define AVIVO_D1CRTC_INTERLACE_CONTROL 0x6088
#define AVIVO_D1CRTC_INTERLACE_STATUS 0x608c
#define AVIVO_D1CRTC_STATUS 0x609c
# define AVIVO_D1CRTC_V_BLANK (1 << 0)
#define AVIVO_D1CRTC_STATUS_POSITION 0x60a0
#define AVIVO_D1CRTC_FRAME_COUNT 0x60a4
#define AVIVO_D1CRTC_STEREO_CONTROL 0x60c4
@ -573,6 +575,7 @@
#define AVIVO_TMDSA_CNTL 0x7880
# define AVIVO_TMDSA_CNTL_ENABLE (1 << 0)
# define AVIVO_TMDSA_CNTL_HDMI_EN (1 << 2)
# define AVIVO_TMDSA_CNTL_HPD_MASK (1 << 4)
# define AVIVO_TMDSA_CNTL_HPD_SELECT (1 << 8)
# define AVIVO_TMDSA_CNTL_SYNC_PHASE (1 << 12)
@ -633,6 +636,7 @@
#define AVIVO_LVTMA_CNTL 0x7a80
# define AVIVO_LVTMA_CNTL_ENABLE (1 << 0)
# define AVIVO_LVTMA_CNTL_HDMI_EN (1 << 2)
# define AVIVO_LVTMA_CNTL_HPD_MASK (1 << 4)
# define AVIVO_LVTMA_CNTL_HPD_SELECT (1 << 8)
# define AVIVO_LVTMA_CNTL_SYNC_PHASE (1 << 12)

View File

@ -25,7 +25,7 @@
* Alex Deucher
* Jerome Glisse
*/
#include "drmP.h"
#include <drm/drmP.h>
#include "radeon.h"
#include "radeon_asic.h"
#include "atom.h"
@ -33,7 +33,7 @@
/* This files gather functions specifics to: r520,rv530,rv560,rv570,r580 */
static int r520_mc_wait_for_idle(struct radeon_device *rdev)
int r520_mc_wait_for_idle(struct radeon_device *rdev)
{
unsigned i;
uint32_t tmp;
@ -119,7 +119,7 @@ static void r520_vram_get_type(struct radeon_device *rdev)
rdev->mc.vram_width *= 2;
}
void r520_mc_init(struct radeon_device *rdev)
static void r520_mc_init(struct radeon_device *rdev)
{
r520_vram_get_type(rdev);
@ -131,7 +131,7 @@ void r520_mc_init(struct radeon_device *rdev)
radeon_update_bandwidth_info(rdev);
}
void r520_mc_program(struct radeon_device *rdev)
static void r520_mc_program(struct radeon_device *rdev)
{
struct rv515_mc_save save;
@ -196,11 +196,13 @@ static int r520_startup(struct radeon_device *rdev)
dev_err(rdev->dev, "failed initializing CP (%d).\n", r);
return r;
}
r = r100_ib_init(rdev);
r = radeon_ib_pool_init(rdev);
if (r) {
dev_err(rdev->dev, "failed initializing IB (%d).\n", r);
dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
return r;
}
return 0;
}
@ -272,6 +274,7 @@ int r520_init(struct radeon_device *rdev)
if (r)
return r;
rv515_set_safe_registers(rdev);
rdev->accel_working = true;
r = r520_startup(rdev);
if (r) {

File diff suppressed because it is too large Load Diff

View File

@ -29,122 +29,119 @@
#include "radeon_asic.h"
#include "atom.h"
#define AUDIO_TIMER_INTERVALL 100 /* 1/10 sekund should be enough */
/*
* check if enc_priv stores radeon_encoder_atom_dig
*/
static bool radeon_dig_encoder(struct drm_encoder *encoder)
{
struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
switch (radeon_encoder->encoder_id) {
case ENCODER_OBJECT_ID_INTERNAL_LVDS:
case ENCODER_OBJECT_ID_INTERNAL_TMDS1:
case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_TMDS1:
case ENCODER_OBJECT_ID_INTERNAL_LVTM1:
case ENCODER_OBJECT_ID_INTERNAL_DVO1:
case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
case ENCODER_OBJECT_ID_INTERNAL_DDI:
case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
return true;
}
return false;
}
/*
* check if the chipset is supported
*/
static int r600_audio_chipset_supported(struct radeon_device *rdev)
{
return (rdev->family >= CHIP_R600 && rdev->family < CHIP_CEDAR)
return (rdev->family >= CHIP_R600 && !ASIC_IS_DCE6(rdev))
|| rdev->family == CHIP_RS600
|| rdev->family == CHIP_RS690
|| rdev->family == CHIP_RS740;
}
/*
* current number of channels
*/
int r600_audio_channels(struct radeon_device *rdev)
struct r600_audio r600_audio_status(struct radeon_device *rdev)
{
return (RREG32(R600_AUDIO_RATE_BPS_CHANNEL) & 0x7) + 1;
}
struct r600_audio status;
uint32_t value;
/*
* current bits per sample
*/
int r600_audio_bits_per_sample(struct radeon_device *rdev)
{
uint32_t value = (RREG32(R600_AUDIO_RATE_BPS_CHANNEL) & 0xF0) >> 4;
switch (value) {
case 0x0: return 8;
case 0x1: return 16;
case 0x2: return 20;
case 0x3: return 24;
case 0x4: return 32;
value = RREG32(R600_AUDIO_RATE_BPS_CHANNEL);
/* number of channels */
status.channels = (value & 0x7) + 1;
/* bits per sample */
switch ((value & 0xF0) >> 4) {
case 0x0:
status.bits_per_sample = 8;
break;
case 0x1:
status.bits_per_sample = 16;
break;
case 0x2:
status.bits_per_sample = 20;
break;
case 0x3:
status.bits_per_sample = 24;
break;
case 0x4:
status.bits_per_sample = 32;
break;
default:
dev_err(rdev->dev, "Unknown bits per sample 0x%x, using 16\n",
(int)value);
status.bits_per_sample = 16;
}
dev_err(rdev->dev, "Unknown bits per sample 0x%x using 16 instead\n",
(int)value);
return 16;
}
/*
* current sampling rate in HZ
*/
int r600_audio_rate(struct radeon_device *rdev)
{
uint32_t value = RREG32(R600_AUDIO_RATE_BPS_CHANNEL);
uint32_t result;
/* current sampling rate in HZ */
if (value & 0x4000)
result = 44100;
status.rate = 44100;
else
result = 48000;
status.rate = 48000;
status.rate *= ((value >> 11) & 0x7) + 1;
status.rate /= ((value >> 8) & 0x7) + 1;
result *= ((value >> 11) & 0x7) + 1;
result /= ((value >> 8) & 0x7) + 1;
value = RREG32(R600_AUDIO_STATUS_BITS);
return result;
}
/* iec 60958 status bits */
status.status_bits = value & 0xff;
/*
* iec 60958 status bits
*/
uint8_t r600_audio_status_bits(struct radeon_device *rdev)
{
return RREG32(R600_AUDIO_STATUS_BITS) & 0xff;
}
/* iec 60958 category code */
status.category_code = (value >> 8) & 0xff;
/*
* iec 60958 category code
*/
uint8_t r600_audio_category_code(struct radeon_device *rdev)
{
return (RREG32(R600_AUDIO_STATUS_BITS) >> 8) & 0xff;
return status;
}
/*
* update all hdmi interfaces with current audio parameters
*/
static void r600_audio_update_hdmi(unsigned long param)
void r600_audio_update_hdmi(struct work_struct *work)
{
struct radeon_device *rdev = (struct radeon_device *)param;
struct radeon_device *rdev = container_of(work, struct radeon_device,
audio_work);
struct drm_device *dev = rdev->ddev;
int channels = r600_audio_channels(rdev);
int rate = r600_audio_rate(rdev);
int bps = r600_audio_bits_per_sample(rdev);
uint8_t status_bits = r600_audio_status_bits(rdev);
uint8_t category_code = r600_audio_category_code(rdev);
struct r600_audio audio_status = r600_audio_status(rdev);
struct drm_encoder *encoder;
int changes = 0, still_going = 0;
bool changed = false;
changes |= channels != rdev->audio_channels;
changes |= rate != rdev->audio_rate;
changes |= bps != rdev->audio_bits_per_sample;
changes |= status_bits != rdev->audio_status_bits;
changes |= category_code != rdev->audio_category_code;
if (changes) {
rdev->audio_channels = channels;
rdev->audio_rate = rate;
rdev->audio_bits_per_sample = bps;
rdev->audio_status_bits = status_bits;
rdev->audio_category_code = category_code;
if (rdev->audio_status.channels != audio_status.channels ||
rdev->audio_status.rate != audio_status.rate ||
rdev->audio_status.bits_per_sample != audio_status.bits_per_sample ||
rdev->audio_status.status_bits != audio_status.status_bits ||
rdev->audio_status.category_code != audio_status.category_code) {
rdev->audio_status = audio_status;
changed = true;
}
list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
still_going |= radeon_encoder->audio_polling_active;
if (changes || r600_hdmi_buffer_status_changed(encoder))
if (!radeon_dig_encoder(encoder))
continue;
if (changed || r600_hdmi_buffer_status_changed(encoder))
r600_hdmi_update_audio_settings(encoder);
}
// mod_timer(&rdev->audio_timer,
// jiffies + msecs_to_jiffies(AUDIO_TIMER_INTERVALL));
}
/*
@ -152,13 +149,23 @@ static void r600_audio_update_hdmi(unsigned long param)
*/
static void r600_audio_engine_enable(struct radeon_device *rdev, bool enable)
{
u32 value = 0;
DRM_INFO("%s audio support\n", enable ? "Enabling" : "Disabling");
WREG32_P(R600_AUDIO_ENABLE, enable ? 0x81000000 : 0x0, ~0x81000000);
if (ASIC_IS_DCE4(rdev)) {
if (enable) {
value |= 0x81000000; /* Required to enable audio */
value |= 0x0e1000f0; /* fglrx sets that too */
}
WREG32(EVERGREEN_AUDIO_ENABLE, value);
} else {
WREG32_P(R600_AUDIO_ENABLE,
enable ? 0x81000000 : 0x0, ~0x81000000);
}
rdev->audio_enabled = enable;
}
/*
* initialize the audio vars and register the update timer
* initialize the audio vars
*/
int r600_audio_init(struct radeon_device *rdev)
{
@ -167,18 +174,11 @@ int r600_audio_init(struct radeon_device *rdev)
r600_audio_engine_enable(rdev, true);
rdev->audio_channels = -1;
rdev->audio_rate = -1;
rdev->audio_bits_per_sample = -1;
rdev->audio_status_bits = 0;
rdev->audio_category_code = 0;
// setup_timer(
// &rdev->audio_timer,
// r600_audio_update_hdmi,
// (unsigned long)rdev);
// mod_timer(&rdev->audio_timer, jiffies + 1);
rdev->audio_status.channels = -1;
rdev->audio_status.rate = -1;
rdev->audio_status.bits_per_sample = -1;
rdev->audio_status.status_bits = 0;
rdev->audio_status.category_code = 0;
return 0;
}
@ -192,6 +192,7 @@ void r600_audio_set_clock(struct drm_encoder *encoder, int clock)
struct radeon_device *rdev = dev->dev_private;
struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
struct radeon_encoder_atom_dig *dig = radeon_encoder->enc_priv;
struct radeon_crtc *radeon_crtc = to_radeon_crtc(encoder->crtc);
int base_rate = 48000;
switch (radeon_encoder->encoder_id) {
@ -211,6 +212,15 @@ void r600_audio_set_clock(struct drm_encoder *encoder, int clock)
return;
}
if (ASIC_IS_DCE4(rdev)) {
/* TODO: other PLLs? */
WREG32(EVERGREEN_AUDIO_PLL1_MUL, base_rate * 10);
WREG32(EVERGREEN_AUDIO_PLL1_DIV, clock * 10);
WREG32(EVERGREEN_AUDIO_PLL1_UNK, 0x00000071);
/* Select DTO source */
WREG32(0x5ac, radeon_crtc->crtc_id);
} else {
switch (dig->dig_encoder) {
case 0:
WREG32(R600_AUDIO_PLL1_MUL, base_rate * 50);
@ -224,10 +234,12 @@ void r600_audio_set_clock(struct drm_encoder *encoder, int clock)
WREG32(R600_AUDIO_CLK_SRCSEL, 1);
break;
default:
dev_err(rdev->dev, "Unsupported DIG on encoder 0x%02X\n",
dev_err(rdev->dev,
"Unsupported DIG on encoder 0x%02X\n",
radeon_encoder->encoder_id);
return;
}
}
}
/*
@ -239,7 +251,5 @@ void r600_audio_fini(struct radeon_device *rdev)
if (!rdev->audio_enabled)
return;
// del_timer(&rdev->audio_timer);
r600_audio_engine_enable(rdev, false);
}

View File

@ -23,30 +23,20 @@
*
*/
#include "drmP.h"
#include "drm.h"
#include "radeon_drm.h"
#include <drm/drmP.h>
#include <drm/radeon_drm.h>
#include "radeon.h"
#include "r600d.h"
#include "r600_blit_shaders.h"
#define DI_PT_RECTLIST 0x11
#define DI_INDEX_SIZE_16_BIT 0x0
#define DI_SRC_SEL_AUTO_INDEX 0x2
#define FMT_8 0x1
#define FMT_5_6_5 0x8
#define FMT_8_8_8_8 0x1a
#define COLOR_8 0x1
#define COLOR_5_6_5 0x8
#define COLOR_8_8_8_8 0x1a
#include "radeon_blit_common.h"
/* emits 21 on rv770+, 23 on r600 */
static void
set_render_target(struct radeon_device *rdev, int format,
int w, int h, u64 gpu_addr)
{
struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
u32 cb_color_info;
int pitch, slice;
@ -54,42 +44,44 @@ set_render_target(struct radeon_device *rdev, int format,
if (h < 8)
h = 8;
cb_color_info = ((format << 2) | (1 << 27) | (1 << 8));
cb_color_info = CB_FORMAT(format) |
CB_SOURCE_FORMAT(CB_SF_EXPORT_NORM) |
CB_ARRAY_MODE(ARRAY_1D_TILED_THIN1);
pitch = (w / 8) - 1;
slice = ((w * h) / 64) - 1;
radeon_ring_write(rdev, PACKET3(PACKET3_SET_CONTEXT_REG, 1));
radeon_ring_write(rdev, (CB_COLOR0_BASE - PACKET3_SET_CONTEXT_REG_OFFSET) >> 2);
radeon_ring_write(rdev, gpu_addr >> 8);
radeon_ring_write(ring, PACKET3(PACKET3_SET_CONTEXT_REG, 1));
radeon_ring_write(ring, (CB_COLOR0_BASE - PACKET3_SET_CONTEXT_REG_OFFSET) >> 2);
radeon_ring_write(ring, gpu_addr >> 8);
if (rdev->family > CHIP_R600 && rdev->family < CHIP_RV770) {
radeon_ring_write(rdev, PACKET3(PACKET3_SURFACE_BASE_UPDATE, 0));
radeon_ring_write(rdev, 2 << 0);
radeon_ring_write(ring, PACKET3(PACKET3_SURFACE_BASE_UPDATE, 0));
radeon_ring_write(ring, 2 << 0);
}
radeon_ring_write(rdev, PACKET3(PACKET3_SET_CONTEXT_REG, 1));
radeon_ring_write(rdev, (CB_COLOR0_SIZE - PACKET3_SET_CONTEXT_REG_OFFSET) >> 2);
radeon_ring_write(rdev, (pitch << 0) | (slice << 10));
radeon_ring_write(ring, PACKET3(PACKET3_SET_CONTEXT_REG, 1));
radeon_ring_write(ring, (CB_COLOR0_SIZE - PACKET3_SET_CONTEXT_REG_OFFSET) >> 2);
radeon_ring_write(ring, (pitch << 0) | (slice << 10));
radeon_ring_write(rdev, PACKET3(PACKET3_SET_CONTEXT_REG, 1));
radeon_ring_write(rdev, (CB_COLOR0_VIEW - PACKET3_SET_CONTEXT_REG_OFFSET) >> 2);
radeon_ring_write(rdev, 0);
radeon_ring_write(ring, PACKET3(PACKET3_SET_CONTEXT_REG, 1));
radeon_ring_write(ring, (CB_COLOR0_VIEW - PACKET3_SET_CONTEXT_REG_OFFSET) >> 2);
radeon_ring_write(ring, 0);
radeon_ring_write(rdev, PACKET3(PACKET3_SET_CONTEXT_REG, 1));
radeon_ring_write(rdev, (CB_COLOR0_INFO - PACKET3_SET_CONTEXT_REG_OFFSET) >> 2);
radeon_ring_write(rdev, cb_color_info);
radeon_ring_write(ring, PACKET3(PACKET3_SET_CONTEXT_REG, 1));
radeon_ring_write(ring, (CB_COLOR0_INFO - PACKET3_SET_CONTEXT_REG_OFFSET) >> 2);
radeon_ring_write(ring, cb_color_info);
radeon_ring_write(rdev, PACKET3(PACKET3_SET_CONTEXT_REG, 1));
radeon_ring_write(rdev, (CB_COLOR0_TILE - PACKET3_SET_CONTEXT_REG_OFFSET) >> 2);
radeon_ring_write(rdev, 0);
radeon_ring_write(ring, PACKET3(PACKET3_SET_CONTEXT_REG, 1));
radeon_ring_write(ring, (CB_COLOR0_TILE - PACKET3_SET_CONTEXT_REG_OFFSET) >> 2);
radeon_ring_write(ring, 0);
radeon_ring_write(rdev, PACKET3(PACKET3_SET_CONTEXT_REG, 1));
radeon_ring_write(rdev, (CB_COLOR0_FRAG - PACKET3_SET_CONTEXT_REG_OFFSET) >> 2);
radeon_ring_write(rdev, 0);
radeon_ring_write(ring, PACKET3(PACKET3_SET_CONTEXT_REG, 1));
radeon_ring_write(ring, (CB_COLOR0_FRAG - PACKET3_SET_CONTEXT_REG_OFFSET) >> 2);
radeon_ring_write(ring, 0);
radeon_ring_write(rdev, PACKET3(PACKET3_SET_CONTEXT_REG, 1));
radeon_ring_write(rdev, (CB_COLOR0_MASK - PACKET3_SET_CONTEXT_REG_OFFSET) >> 2);
radeon_ring_write(rdev, 0);
radeon_ring_write(ring, PACKET3(PACKET3_SET_CONTEXT_REG, 1));
radeon_ring_write(ring, (CB_COLOR0_MASK - PACKET3_SET_CONTEXT_REG_OFFSET) >> 2);
radeon_ring_write(ring, 0);
}
/* emits 5dw */
@ -98,6 +90,7 @@ cp_set_surface_sync(struct radeon_device *rdev,
u32 sync_type, u32 size,
u64 mc_addr)
{
struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
u32 cp_coher_size;
if (size == 0xffffffff)
@ -105,17 +98,18 @@ cp_set_surface_sync(struct radeon_device *rdev,
else
cp_coher_size = ((size + 255) >> 8);
radeon_ring_write(rdev, PACKET3(PACKET3_SURFACE_SYNC, 3));
radeon_ring_write(rdev, sync_type);
radeon_ring_write(rdev, cp_coher_size);
radeon_ring_write(rdev, mc_addr >> 8);
radeon_ring_write(rdev, 10); /* poll interval */
radeon_ring_write(ring, PACKET3(PACKET3_SURFACE_SYNC, 3));
radeon_ring_write(ring, sync_type);
radeon_ring_write(ring, cp_coher_size);
radeon_ring_write(ring, mc_addr >> 8);
radeon_ring_write(ring, 10); /* poll interval */
}
/* emits 21dw + 1 surface sync = 26dw */
static void
set_shaders(struct radeon_device *rdev)
{
struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
u64 gpu_addr;
u32 sq_pgm_resources;
@ -124,35 +118,35 @@ set_shaders(struct radeon_device *rdev)
/* VS */
gpu_addr = rdev->r600_blit.shader_gpu_addr + rdev->r600_blit.vs_offset;
radeon_ring_write(rdev, PACKET3(PACKET3_SET_CONTEXT_REG, 1));
radeon_ring_write(rdev, (SQ_PGM_START_VS - PACKET3_SET_CONTEXT_REG_OFFSET) >> 2);
radeon_ring_write(rdev, gpu_addr >> 8);
radeon_ring_write(ring, PACKET3(PACKET3_SET_CONTEXT_REG, 1));
radeon_ring_write(ring, (SQ_PGM_START_VS - PACKET3_SET_CONTEXT_REG_OFFSET) >> 2);
radeon_ring_write(ring, gpu_addr >> 8);
radeon_ring_write(rdev, PACKET3(PACKET3_SET_CONTEXT_REG, 1));
radeon_ring_write(rdev, (SQ_PGM_RESOURCES_VS - PACKET3_SET_CONTEXT_REG_OFFSET) >> 2);
radeon_ring_write(rdev, sq_pgm_resources);
radeon_ring_write(ring, PACKET3(PACKET3_SET_CONTEXT_REG, 1));
radeon_ring_write(ring, (SQ_PGM_RESOURCES_VS - PACKET3_SET_CONTEXT_REG_OFFSET) >> 2);
radeon_ring_write(ring, sq_pgm_resources);
radeon_ring_write(rdev, PACKET3(PACKET3_SET_CONTEXT_REG, 1));
radeon_ring_write(rdev, (SQ_PGM_CF_OFFSET_VS - PACKET3_SET_CONTEXT_REG_OFFSET) >> 2);
radeon_ring_write(rdev, 0);
radeon_ring_write(ring, PACKET3(PACKET3_SET_CONTEXT_REG, 1));
radeon_ring_write(ring, (SQ_PGM_CF_OFFSET_VS - PACKET3_SET_CONTEXT_REG_OFFSET) >> 2);
radeon_ring_write(ring, 0);
/* PS */
gpu_addr = rdev->r600_blit.shader_gpu_addr + rdev->r600_blit.ps_offset;
radeon_ring_write(rdev, PACKET3(PACKET3_SET_CONTEXT_REG, 1));
radeon_ring_write(rdev, (SQ_PGM_START_PS - PACKET3_SET_CONTEXT_REG_OFFSET) >> 2);
radeon_ring_write(rdev, gpu_addr >> 8);
radeon_ring_write(ring, PACKET3(PACKET3_SET_CONTEXT_REG, 1));
radeon_ring_write(ring, (SQ_PGM_START_PS - PACKET3_SET_CONTEXT_REG_OFFSET) >> 2);
radeon_ring_write(ring, gpu_addr >> 8);
radeon_ring_write(rdev, PACKET3(PACKET3_SET_CONTEXT_REG, 1));
radeon_ring_write(rdev, (SQ_PGM_RESOURCES_PS - PACKET3_SET_CONTEXT_REG_OFFSET) >> 2);
radeon_ring_write(rdev, sq_pgm_resources | (1 << 28));
radeon_ring_write(ring, PACKET3(PACKET3_SET_CONTEXT_REG, 1));
radeon_ring_write(ring, (SQ_PGM_RESOURCES_PS - PACKET3_SET_CONTEXT_REG_OFFSET) >> 2);
radeon_ring_write(ring, sq_pgm_resources | (1 << 28));
radeon_ring_write(rdev, PACKET3(PACKET3_SET_CONTEXT_REG, 1));
radeon_ring_write(rdev, (SQ_PGM_EXPORTS_PS - PACKET3_SET_CONTEXT_REG_OFFSET) >> 2);
radeon_ring_write(rdev, 2);
radeon_ring_write(ring, PACKET3(PACKET3_SET_CONTEXT_REG, 1));
radeon_ring_write(ring, (SQ_PGM_EXPORTS_PS - PACKET3_SET_CONTEXT_REG_OFFSET) >> 2);
radeon_ring_write(ring, 2);
radeon_ring_write(rdev, PACKET3(PACKET3_SET_CONTEXT_REG, 1));
radeon_ring_write(rdev, (SQ_PGM_CF_OFFSET_PS - PACKET3_SET_CONTEXT_REG_OFFSET) >> 2);
radeon_ring_write(rdev, 0);
radeon_ring_write(ring, PACKET3(PACKET3_SET_CONTEXT_REG, 1));
radeon_ring_write(ring, (SQ_PGM_CF_OFFSET_PS - PACKET3_SET_CONTEXT_REG_OFFSET) >> 2);
radeon_ring_write(ring, 0);
gpu_addr = rdev->r600_blit.shader_gpu_addr + rdev->r600_blit.vs_offset;
cp_set_surface_sync(rdev, PACKET3_SH_ACTION_ENA, 512, gpu_addr);
@ -162,22 +156,24 @@ set_shaders(struct radeon_device *rdev)
static void
set_vtx_resource(struct radeon_device *rdev, u64 gpu_addr)
{
struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
u32 sq_vtx_constant_word2;
sq_vtx_constant_word2 = ((upper_32_bits(gpu_addr) & 0xff) | (16 << 8));
sq_vtx_constant_word2 = SQ_VTXC_BASE_ADDR_HI(upper_32_bits(gpu_addr) & 0xff) |
SQ_VTXC_STRIDE(16);
#ifdef __BIG_ENDIAN
sq_vtx_constant_word2 |= (2 << 30);
sq_vtx_constant_word2 |= SQ_VTXC_ENDIAN_SWAP(SQ_ENDIAN_8IN32);
#endif
radeon_ring_write(rdev, PACKET3(PACKET3_SET_RESOURCE, 7));
radeon_ring_write(rdev, 0x460);
radeon_ring_write(rdev, gpu_addr & 0xffffffff);
radeon_ring_write(rdev, 48 - 1);
radeon_ring_write(rdev, sq_vtx_constant_word2);
radeon_ring_write(rdev, 1 << 0);
radeon_ring_write(rdev, 0);
radeon_ring_write(rdev, 0);
radeon_ring_write(rdev, SQ_TEX_VTX_VALID_BUFFER << 30);
radeon_ring_write(ring, PACKET3(PACKET3_SET_RESOURCE, 7));
radeon_ring_write(ring, 0x460);
radeon_ring_write(ring, gpu_addr & 0xffffffff);
radeon_ring_write(ring, 48 - 1);
radeon_ring_write(ring, sq_vtx_constant_word2);
radeon_ring_write(ring, 1 << 0);
radeon_ring_write(ring, 0);
radeon_ring_write(ring, 0);
radeon_ring_write(ring, SQ_TEX_VTX_VALID_BUFFER << 30);
if ((rdev->family == CHIP_RV610) ||
(rdev->family == CHIP_RV620) ||
@ -195,35 +191,40 @@ set_vtx_resource(struct radeon_device *rdev, u64 gpu_addr)
static void
set_tex_resource(struct radeon_device *rdev,
int format, int w, int h, int pitch,
u64 gpu_addr)
u64 gpu_addr, u32 size)
{
struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
uint32_t sq_tex_resource_word0, sq_tex_resource_word1, sq_tex_resource_word4;
if (h < 1)
h = 1;
sq_tex_resource_word0 = (1 << 0) | (1 << 3);
sq_tex_resource_word0 |= ((((pitch >> 3) - 1) << 8) |
((w - 1) << 19));
sq_tex_resource_word0 = S_038000_DIM(V_038000_SQ_TEX_DIM_2D) |
S_038000_TILE_MODE(V_038000_ARRAY_1D_TILED_THIN1);
sq_tex_resource_word0 |= S_038000_PITCH((pitch >> 3) - 1) |
S_038000_TEX_WIDTH(w - 1);
sq_tex_resource_word1 = (format << 26);
sq_tex_resource_word1 |= ((h - 1) << 0);
sq_tex_resource_word1 = S_038004_DATA_FORMAT(format);
sq_tex_resource_word1 |= S_038004_TEX_HEIGHT(h - 1);
sq_tex_resource_word4 = ((1 << 14) |
(0 << 16) |
(1 << 19) |
(2 << 22) |
(3 << 25));
sq_tex_resource_word4 = S_038010_REQUEST_SIZE(1) |
S_038010_DST_SEL_X(SQ_SEL_X) |
S_038010_DST_SEL_Y(SQ_SEL_Y) |
S_038010_DST_SEL_Z(SQ_SEL_Z) |
S_038010_DST_SEL_W(SQ_SEL_W);
radeon_ring_write(rdev, PACKET3(PACKET3_SET_RESOURCE, 7));
radeon_ring_write(rdev, 0);
radeon_ring_write(rdev, sq_tex_resource_word0);
radeon_ring_write(rdev, sq_tex_resource_word1);
radeon_ring_write(rdev, gpu_addr >> 8);
radeon_ring_write(rdev, gpu_addr >> 8);
radeon_ring_write(rdev, sq_tex_resource_word4);
radeon_ring_write(rdev, 0);
radeon_ring_write(rdev, SQ_TEX_VTX_VALID_TEXTURE << 30);
cp_set_surface_sync(rdev,
PACKET3_TC_ACTION_ENA, size, gpu_addr);
radeon_ring_write(ring, PACKET3(PACKET3_SET_RESOURCE, 7));
radeon_ring_write(ring, 0);
radeon_ring_write(ring, sq_tex_resource_word0);
radeon_ring_write(ring, sq_tex_resource_word1);
radeon_ring_write(ring, gpu_addr >> 8);
radeon_ring_write(ring, gpu_addr >> 8);
radeon_ring_write(ring, sq_tex_resource_word4);
radeon_ring_write(ring, 0);
radeon_ring_write(ring, SQ_TEX_VTX_VALID_TEXTURE << 30);
}
/* emits 12 */
@ -231,43 +232,45 @@ static void
set_scissors(struct radeon_device *rdev, int x1, int y1,
int x2, int y2)
{
radeon_ring_write(rdev, PACKET3(PACKET3_SET_CONTEXT_REG, 2));
radeon_ring_write(rdev, (PA_SC_SCREEN_SCISSOR_TL - PACKET3_SET_CONTEXT_REG_OFFSET) >> 2);
radeon_ring_write(rdev, (x1 << 0) | (y1 << 16));
radeon_ring_write(rdev, (x2 << 0) | (y2 << 16));
struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
radeon_ring_write(ring, PACKET3(PACKET3_SET_CONTEXT_REG, 2));
radeon_ring_write(ring, (PA_SC_SCREEN_SCISSOR_TL - PACKET3_SET_CONTEXT_REG_OFFSET) >> 2);
radeon_ring_write(ring, (x1 << 0) | (y1 << 16));
radeon_ring_write(ring, (x2 << 0) | (y2 << 16));
radeon_ring_write(rdev, PACKET3(PACKET3_SET_CONTEXT_REG, 2));
radeon_ring_write(rdev, (PA_SC_GENERIC_SCISSOR_TL - PACKET3_SET_CONTEXT_REG_OFFSET) >> 2);
radeon_ring_write(rdev, (x1 << 0) | (y1 << 16) | (1 << 31));
radeon_ring_write(rdev, (x2 << 0) | (y2 << 16));
radeon_ring_write(ring, PACKET3(PACKET3_SET_CONTEXT_REG, 2));
radeon_ring_write(ring, (PA_SC_GENERIC_SCISSOR_TL - PACKET3_SET_CONTEXT_REG_OFFSET) >> 2);
radeon_ring_write(ring, (x1 << 0) | (y1 << 16) | (1 << 31));
radeon_ring_write(ring, (x2 << 0) | (y2 << 16));
radeon_ring_write(rdev, PACKET3(PACKET3_SET_CONTEXT_REG, 2));
radeon_ring_write(rdev, (PA_SC_WINDOW_SCISSOR_TL - PACKET3_SET_CONTEXT_REG_OFFSET) >> 2);
radeon_ring_write(rdev, (x1 << 0) | (y1 << 16) | (1 << 31));
radeon_ring_write(rdev, (x2 << 0) | (y2 << 16));
radeon_ring_write(ring, PACKET3(PACKET3_SET_CONTEXT_REG, 2));
radeon_ring_write(ring, (PA_SC_WINDOW_SCISSOR_TL - PACKET3_SET_CONTEXT_REG_OFFSET) >> 2);
radeon_ring_write(ring, (x1 << 0) | (y1 << 16) | (1 << 31));
radeon_ring_write(ring, (x2 << 0) | (y2 << 16));
}
/* emits 10 */
static void
draw_auto(struct radeon_device *rdev)
{
radeon_ring_write(rdev, PACKET3(PACKET3_SET_CONFIG_REG, 1));
radeon_ring_write(rdev, (VGT_PRIMITIVE_TYPE - PACKET3_SET_CONFIG_REG_OFFSET) >> 2);
radeon_ring_write(rdev, DI_PT_RECTLIST);
struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
radeon_ring_write(ring, (VGT_PRIMITIVE_TYPE - PACKET3_SET_CONFIG_REG_OFFSET) >> 2);
radeon_ring_write(ring, DI_PT_RECTLIST);
radeon_ring_write(rdev, PACKET3(PACKET3_INDEX_TYPE, 0));
radeon_ring_write(rdev,
radeon_ring_write(ring, PACKET3(PACKET3_INDEX_TYPE, 0));
radeon_ring_write(ring,
#ifdef __BIG_ENDIAN
(2 << 2) |
#endif
DI_INDEX_SIZE_16_BIT);
radeon_ring_write(rdev, PACKET3(PACKET3_NUM_INSTANCES, 0));
radeon_ring_write(rdev, 1);
radeon_ring_write(ring, PACKET3(PACKET3_NUM_INSTANCES, 0));
radeon_ring_write(ring, 1);
radeon_ring_write(rdev, PACKET3(PACKET3_DRAW_INDEX_AUTO, 1));
radeon_ring_write(rdev, 3);
radeon_ring_write(rdev, DI_SRC_SEL_AUTO_INDEX);
radeon_ring_write(ring, PACKET3(PACKET3_DRAW_INDEX_AUTO, 1));
radeon_ring_write(ring, 3);
radeon_ring_write(ring, DI_SRC_SEL_AUTO_INDEX);
}
@ -275,6 +278,7 @@ draw_auto(struct radeon_device *rdev)
static void
set_default_state(struct radeon_device *rdev)
{
struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
u32 sq_config, sq_gpr_resource_mgmt_1, sq_gpr_resource_mgmt_2;
u32 sq_thread_resource_mgmt, sq_stack_resource_mgmt_1, sq_stack_resource_mgmt_2;
int num_ps_gprs, num_vs_gprs, num_temp_gprs, num_gs_gprs, num_es_gprs;
@ -430,49 +434,24 @@ set_default_state(struct radeon_device *rdev)
/* emit an IB pointing at default state */
dwords = ALIGN(rdev->r600_blit.state_len, 0x10);
gpu_addr = rdev->r600_blit.shader_gpu_addr + rdev->r600_blit.state_offset;
radeon_ring_write(rdev, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
radeon_ring_write(rdev,
radeon_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
radeon_ring_write(ring,
#ifdef __BIG_ENDIAN
(2 << 0) |
#endif
(gpu_addr & 0xFFFFFFFC));
radeon_ring_write(rdev, upper_32_bits(gpu_addr) & 0xFF);
radeon_ring_write(rdev, dwords);
radeon_ring_write(ring, upper_32_bits(gpu_addr) & 0xFF);
radeon_ring_write(ring, dwords);
/* SQ config */
radeon_ring_write(rdev, PACKET3(PACKET3_SET_CONFIG_REG, 6));
radeon_ring_write(rdev, (SQ_CONFIG - PACKET3_SET_CONFIG_REG_OFFSET) >> 2);
radeon_ring_write(rdev, sq_config);
radeon_ring_write(rdev, sq_gpr_resource_mgmt_1);
radeon_ring_write(rdev, sq_gpr_resource_mgmt_2);
radeon_ring_write(rdev, sq_thread_resource_mgmt);
radeon_ring_write(rdev, sq_stack_resource_mgmt_1);
radeon_ring_write(rdev, sq_stack_resource_mgmt_2);
}
static inline uint32_t i2f(uint32_t input)
{
u32 result, i, exponent, fraction;
if ((input & 0x3fff) == 0)
result = 0; /* 0 is a special case */
else {
exponent = 140; /* exponent biased by 127; */
fraction = (input & 0x3fff) << 10; /* cheat and only
handle numbers below 2^^15 */
for (i = 0; i < 14; i++) {
if (fraction & 0x800000)
break;
else {
fraction = fraction << 1; /* keep
shifting left until top bit = 1 */
exponent = exponent - 1;
}
}
result = exponent << 23 | (fraction & 0x7fffff); /* mask
off top bit; assumed 1 */
}
return result;
radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 6));
radeon_ring_write(ring, (SQ_CONFIG - PACKET3_SET_CONFIG_REG_OFFSET) >> 2);
radeon_ring_write(ring, sq_config);
radeon_ring_write(ring, sq_gpr_resource_mgmt_1);
radeon_ring_write(ring, sq_gpr_resource_mgmt_2);
radeon_ring_write(ring, sq_thread_resource_mgmt);
radeon_ring_write(ring, sq_stack_resource_mgmt_1);
radeon_ring_write(ring, sq_stack_resource_mgmt_2);
}
int r600_blit_init(struct radeon_device *rdev)
@ -483,11 +462,27 @@ int r600_blit_init(struct radeon_device *rdev)
u32 packet2s[16];
int num_packet2s = 0;
/* pin copy shader into vram if already initialized */
if (rdev->r600_blit.shader_obj)
goto done;
rdev->r600_blit.primitives.set_render_target = set_render_target;
rdev->r600_blit.primitives.cp_set_surface_sync = cp_set_surface_sync;
rdev->r600_blit.primitives.set_shaders = set_shaders;
rdev->r600_blit.primitives.set_vtx_resource = set_vtx_resource;
rdev->r600_blit.primitives.set_tex_resource = set_tex_resource;
rdev->r600_blit.primitives.set_scissors = set_scissors;
rdev->r600_blit.primitives.draw_auto = draw_auto;
rdev->r600_blit.primitives.set_default_state = set_default_state;
rdev->r600_blit.ring_size_common = 8; /* sync semaphore */
rdev->r600_blit.ring_size_common += 40; /* shaders + def state */
rdev->r600_blit.ring_size_common += 5; /* done copy */
rdev->r600_blit.ring_size_common += 16; /* fence emit for done copy */
rdev->r600_blit.ring_size_per_loop = 76;
/* set_render_target emits 2 extra dwords on rv6xx */
if (rdev->family > CHIP_R600 && rdev->family < CHIP_RV770)
rdev->r600_blit.ring_size_per_loop += 2;
rdev->r600_blit.max_dim = 8192;
mutex_init(&rdev->r600_blit.mutex);
rdev->r600_blit.state_offset = 0;
if (rdev->family >= CHIP_RV770)
@ -512,12 +507,27 @@ int r600_blit_init(struct radeon_device *rdev)
obj_size += r6xx_ps_size * 4;
obj_size = ALIGN(obj_size, 256);
r = radeon_bo_create(rdev, obj_size, PAGE_SIZE, true, RADEON_GEM_DOMAIN_VRAM,
&rdev->r600_blit.shader_obj);
/* pin copy shader into vram if not already initialized */
if (rdev->r600_blit.shader_obj == NULL) {
r = radeon_bo_create(rdev, obj_size, PAGE_SIZE, true,
RADEON_GEM_DOMAIN_VRAM,
NULL, &rdev->r600_blit.shader_obj);
if (r) {
DRM_ERROR("r600 failed to allocate shader\n");
return r;
}
r = radeon_bo_reserve(rdev->r600_blit.shader_obj, false);
if (unlikely(r != 0))
return r;
r = radeon_bo_pin(rdev->r600_blit.shader_obj, RADEON_GEM_DOMAIN_VRAM,
&rdev->r600_blit.shader_gpu_addr);
radeon_bo_unreserve(rdev->r600_blit.shader_obj);
if (r) {
DRM_ERROR("r600 failed to allocate shader\n");
dev_err(rdev->dev, "(%d) pin blit object failed\n", r);
return r;
}
}
DRM_DEBUG("r6xx blit allocated bo %08x vs %08x ps %08x\n",
obj_size,
@ -547,20 +557,7 @@ int r600_blit_init(struct radeon_device *rdev)
radeon_bo_kunmap(rdev->r600_blit.shader_obj);
radeon_bo_unreserve(rdev->r600_blit.shader_obj);
done:
r = radeon_bo_reserve(rdev->r600_blit.shader_obj, false);
if (unlikely(r != 0))
return r;
r = radeon_bo_pin(rdev->r600_blit.shader_obj, RADEON_GEM_DOMAIN_VRAM,
&rdev->r600_blit.shader_gpu_addr);
radeon_bo_unreserve(rdev->r600_blit.shader_obj);
if (r) {
dev_err(rdev->dev, "(%d) pin blit object failed\n", r);
return r;
}
// radeon_ttm_set_active_vram_size(rdev, rdev->mc.real_vram_size);
return 0;
}
@ -582,263 +579,176 @@ void r600_blit_fini(struct radeon_device *rdev)
radeon_bo_unref(&rdev->r600_blit.shader_obj);
}
static int r600_vb_ib_get(struct radeon_device *rdev)
static unsigned r600_blit_create_rect(unsigned num_gpu_pages,
int *width, int *height, int max_dim)
{
unsigned max_pages;
unsigned pages = num_gpu_pages;
int w, h;
if (num_gpu_pages == 0) {
/* not supposed to be called with no pages, but just in case */
h = 0;
w = 0;
pages = 0;
WARN_ON(1);
} else {
int rect_order = 2;
h = RECT_UNIT_H;
while (num_gpu_pages / rect_order) {
h *= 2;
rect_order *= 4;
if (h >= max_dim) {
h = max_dim;
break;
}
}
max_pages = (max_dim * h) / (RECT_UNIT_W * RECT_UNIT_H);
if (pages > max_pages)
pages = max_pages;
w = (pages * RECT_UNIT_W * RECT_UNIT_H) / h;
w = (w / RECT_UNIT_W) * RECT_UNIT_W;
pages = (w * h) / (RECT_UNIT_W * RECT_UNIT_H);
BUG_ON(pages == 0);
}
DRM_DEBUG("blit_rectangle: h=%d, w=%d, pages=%d\n", h, w, pages);
/* return width and height only of the caller wants it */
if (height)
*height = h;
if (width)
*width = w;
return pages;
}
int r600_blit_prepare_copy(struct radeon_device *rdev, unsigned num_gpu_pages,
struct radeon_fence **fence, struct radeon_sa_bo **vb,
struct radeon_semaphore **sem)
{
struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
int r;
r = radeon_ib_get(rdev, &rdev->r600_blit.vb_ib);
int ring_size;
int num_loops = 0;
int dwords_per_loop = rdev->r600_blit.ring_size_per_loop;
/* num loops */
while (num_gpu_pages) {
num_gpu_pages -=
r600_blit_create_rect(num_gpu_pages, NULL, NULL,
rdev->r600_blit.max_dim);
num_loops++;
}
/* 48 bytes for vertex per loop */
r = radeon_sa_bo_new(rdev, &rdev->ring_tmp_bo, vb,
(num_loops*48)+256, 256, true);
if (r) {
DRM_ERROR("failed to get IB for vertex buffer\n");
return r;
}
rdev->r600_blit.vb_total = 64*1024;
rdev->r600_blit.vb_used = 0;
return 0;
}
static void r600_vb_ib_put(struct radeon_device *rdev)
{
radeon_fence_emit(rdev, rdev->r600_blit.vb_ib->fence);
radeon_ib_free(rdev, &rdev->r600_blit.vb_ib);
}
int r600_blit_prepare_copy(struct radeon_device *rdev, int size_bytes)
{
int r;
int ring_size, line_size;
int max_size;
/* loops of emits 64 + fence emit possible */
int dwords_per_loop = 76, num_loops;
r = r600_vb_ib_get(rdev);
if (r)
r = radeon_semaphore_create(rdev, sem);
if (r) {
radeon_sa_bo_free(rdev, vb, NULL);
return r;
}
/* set_render_target emits 2 extra dwords on rv6xx */
if (rdev->family > CHIP_R600 && rdev->family < CHIP_RV770)
dwords_per_loop += 2;
/* 8 bpp vs 32 bpp for xfer unit */
if (size_bytes & 3)
line_size = 8192;
else
line_size = 8192*4;
max_size = 8192 * line_size;
/* major loops cover the max size transfer */
num_loops = ((size_bytes + max_size) / max_size);
/* minor loops cover the extra non aligned bits */
num_loops += ((size_bytes % line_size) ? 1 : 0);
/* calculate number of loops correctly */
ring_size = num_loops * dwords_per_loop;
/* set default + shaders */
ring_size += 40; /* shaders + def state */
ring_size += 10; /* fence emit for VB IB */
ring_size += 5; /* done copy */
ring_size += 10; /* fence emit for done copy */
r = radeon_ring_lock(rdev, ring_size);
if (r)
ring_size += rdev->r600_blit.ring_size_common;
r = radeon_ring_lock(rdev, ring, ring_size);
if (r) {
radeon_sa_bo_free(rdev, vb, NULL);
radeon_semaphore_free(rdev, sem, NULL);
return r;
}
set_default_state(rdev); /* 14 */
set_shaders(rdev); /* 26 */
if (radeon_fence_need_sync(*fence, RADEON_RING_TYPE_GFX_INDEX)) {
radeon_semaphore_sync_rings(rdev, *sem, (*fence)->ring,
RADEON_RING_TYPE_GFX_INDEX);
radeon_fence_note_sync(*fence, RADEON_RING_TYPE_GFX_INDEX);
} else {
radeon_semaphore_free(rdev, sem, NULL);
}
rdev->r600_blit.primitives.set_default_state(rdev);
rdev->r600_blit.primitives.set_shaders(rdev);
return 0;
}
void r600_blit_done_copy(struct radeon_device *rdev, struct radeon_fence *fence)
void r600_blit_done_copy(struct radeon_device *rdev, struct radeon_fence **fence,
struct radeon_sa_bo *vb, struct radeon_semaphore *sem)
{
struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
int r;
if (rdev->r600_blit.vb_ib)
r600_vb_ib_put(rdev);
r = radeon_fence_emit(rdev, fence, RADEON_RING_TYPE_GFX_INDEX);
if (r) {
radeon_ring_unlock_undo(rdev, ring);
return;
}
if (fence)
r = radeon_fence_emit(rdev, fence);
radeon_ring_unlock_commit(rdev);
radeon_ring_unlock_commit(rdev, ring);
radeon_sa_bo_free(rdev, &vb, *fence);
radeon_semaphore_free(rdev, &sem, *fence);
}
void r600_kms_blit_copy(struct radeon_device *rdev,
u64 src_gpu_addr, u64 dst_gpu_addr,
int size_bytes)
unsigned num_gpu_pages,
struct radeon_sa_bo *vb)
{
int max_bytes;
u64 vb_gpu_addr;
u32 *vb;
u32 *vb_cpu_addr;
DRM_DEBUG("emitting copy %16llx %16llx %d %d\n", src_gpu_addr, dst_gpu_addr,
size_bytes, rdev->r600_blit.vb_used);
vb = (u32 *)(rdev->r600_blit.vb_ib->ptr + rdev->r600_blit.vb_used);
if ((size_bytes & 3) || (src_gpu_addr & 3) || (dst_gpu_addr & 3)) {
max_bytes = 8192;
DRM_DEBUG("emitting copy %16llx %16llx %d\n",
src_gpu_addr, dst_gpu_addr, num_gpu_pages);
vb_cpu_addr = (u32 *)radeon_sa_bo_cpu_addr(vb);
vb_gpu_addr = radeon_sa_bo_gpu_addr(vb);
while (size_bytes) {
int cur_size = size_bytes;
int src_x = src_gpu_addr & 255;
int dst_x = dst_gpu_addr & 255;
int h = 1;
src_gpu_addr = src_gpu_addr & ~255ULL;
dst_gpu_addr = dst_gpu_addr & ~255ULL;
while (num_gpu_pages) {
int w, h;
unsigned size_in_bytes;
unsigned pages_per_loop =
r600_blit_create_rect(num_gpu_pages, &w, &h,
rdev->r600_blit.max_dim);
if (!src_x && !dst_x) {
h = (cur_size / max_bytes);
if (h > 8192)
h = 8192;
if (h == 0)
h = 1;
else
cur_size = max_bytes;
} else {
if (cur_size > max_bytes)
cur_size = max_bytes;
if (cur_size > (max_bytes - dst_x))
cur_size = (max_bytes - dst_x);
if (cur_size > (max_bytes - src_x))
cur_size = (max_bytes - src_x);
}
size_in_bytes = pages_per_loop * RADEON_GPU_PAGE_SIZE;
DRM_DEBUG("rectangle w=%d h=%d\n", w, h);
if ((rdev->r600_blit.vb_used + 48) > rdev->r600_blit.vb_total) {
// WARN_ON(1);
}
vb_cpu_addr[0] = 0;
vb_cpu_addr[1] = 0;
vb_cpu_addr[2] = 0;
vb_cpu_addr[3] = 0;
vb[0] = i2f(dst_x);
vb[1] = 0;
vb[2] = i2f(src_x);
vb[3] = 0;
vb_cpu_addr[4] = 0;
vb_cpu_addr[5] = int2float(h);
vb_cpu_addr[6] = 0;
vb_cpu_addr[7] = int2float(h);
vb[4] = i2f(dst_x);
vb[5] = i2f(h);
vb[6] = i2f(src_x);
vb[7] = i2f(h);
vb_cpu_addr[8] = int2float(w);
vb_cpu_addr[9] = int2float(h);
vb_cpu_addr[10] = int2float(w);
vb_cpu_addr[11] = int2float(h);
vb[8] = i2f(dst_x + cur_size);
vb[9] = i2f(h);
vb[10] = i2f(src_x + cur_size);
vb[11] = i2f(h);
/* src 9 */
set_tex_resource(rdev, FMT_8,
src_x + cur_size, h, src_x + cur_size,
src_gpu_addr);
/* 5 */
cp_set_surface_sync(rdev,
PACKET3_TC_ACTION_ENA, (src_x + cur_size * h), src_gpu_addr);
/* dst 23 */
set_render_target(rdev, COLOR_8,
dst_x + cur_size, h,
dst_gpu_addr);
/* scissors 12 */
set_scissors(rdev, dst_x, 0, dst_x + cur_size, h);
/* 14 */
vb_gpu_addr = rdev->r600_blit.vb_ib->gpu_addr + rdev->r600_blit.vb_used;
set_vtx_resource(rdev, vb_gpu_addr);
/* draw 10 */
draw_auto(rdev);
/* 5 */
cp_set_surface_sync(rdev,
rdev->r600_blit.primitives.set_tex_resource(rdev, FMT_8_8_8_8,
w, h, w, src_gpu_addr, size_in_bytes);
rdev->r600_blit.primitives.set_render_target(rdev, COLOR_8_8_8_8,
w, h, dst_gpu_addr);
rdev->r600_blit.primitives.set_scissors(rdev, 0, 0, w, h);
rdev->r600_blit.primitives.set_vtx_resource(rdev, vb_gpu_addr);
rdev->r600_blit.primitives.draw_auto(rdev);
rdev->r600_blit.primitives.cp_set_surface_sync(rdev,
PACKET3_CB_ACTION_ENA | PACKET3_CB0_DEST_BASE_ENA,
cur_size * h, dst_gpu_addr);
size_in_bytes, dst_gpu_addr);
vb += 12;
rdev->r600_blit.vb_used += 12 * 4;
src_gpu_addr += cur_size * h;
dst_gpu_addr += cur_size * h;
size_bytes -= cur_size * h;
vb_cpu_addr += 12;
vb_gpu_addr += 4*12;
src_gpu_addr += size_in_bytes;
dst_gpu_addr += size_in_bytes;
num_gpu_pages -= pages_per_loop;
}
} else {
max_bytes = 8192 * 4;
while (size_bytes) {
int cur_size = size_bytes;
int src_x = (src_gpu_addr & 255);
int dst_x = (dst_gpu_addr & 255);
int h = 1;
src_gpu_addr = src_gpu_addr & ~255ULL;
dst_gpu_addr = dst_gpu_addr & ~255ULL;
if (!src_x && !dst_x) {
h = (cur_size / max_bytes);
if (h > 8192)
h = 8192;
if (h == 0)
h = 1;
else
cur_size = max_bytes;
} else {
if (cur_size > max_bytes)
cur_size = max_bytes;
if (cur_size > (max_bytes - dst_x))
cur_size = (max_bytes - dst_x);
if (cur_size > (max_bytes - src_x))
cur_size = (max_bytes - src_x);
}
if ((rdev->r600_blit.vb_used + 48) > rdev->r600_blit.vb_total) {
// WARN_ON(1);
}
vb[0] = i2f(dst_x / 4);
vb[1] = 0;
vb[2] = i2f(src_x / 4);
vb[3] = 0;
vb[4] = i2f(dst_x / 4);
vb[5] = i2f(h);
vb[6] = i2f(src_x / 4);
vb[7] = i2f(h);
vb[8] = i2f((dst_x + cur_size) / 4);
vb[9] = i2f(h);
vb[10] = i2f((src_x + cur_size) / 4);
vb[11] = i2f(h);
/* src 9 */
set_tex_resource(rdev, FMT_8_8_8_8,
(src_x + cur_size) / 4,
h, (src_x + cur_size) / 4,
src_gpu_addr);
/* 5 */
cp_set_surface_sync(rdev,
PACKET3_TC_ACTION_ENA, (src_x + cur_size * h), src_gpu_addr);
/* dst 23 */
set_render_target(rdev, COLOR_8_8_8_8,
(dst_x + cur_size) / 4, h,
dst_gpu_addr);
/* scissors 12 */
set_scissors(rdev, (dst_x / 4), 0, (dst_x + cur_size / 4), h);
/* Vertex buffer setup 14 */
vb_gpu_addr = rdev->r600_blit.vb_ib->gpu_addr + rdev->r600_blit.vb_used;
set_vtx_resource(rdev, vb_gpu_addr);
/* draw 10 */
draw_auto(rdev);
/* 5 */
cp_set_surface_sync(rdev,
PACKET3_CB_ACTION_ENA | PACKET3_CB0_DEST_BASE_ENA,
cur_size * h, dst_gpu_addr);
/* 78 ring dwords per loop */
vb += 12;
rdev->r600_blit.vb_used += 12 * 4;
src_gpu_addr += cur_size * h;
dst_gpu_addr += cur_size * h;
size_bytes -= cur_size * h;
}
}
}

View File

@ -313,6 +313,10 @@ const u32 r6xx_default_state[] =
0x00000000, /* VGT_REUSE_OFF */
0x00000000, /* VGT_VTX_CNT_EN */
0xc0016900,
0x000000d4,
0x00000000, /* SX_MISC */
0xc0016900,
0x000002c8,
0x00000000, /* VGT_STRMOUT_BUFFER_EN */
@ -625,6 +629,10 @@ const u32 r7xx_default_state[] =
0x00000000, /* VGT_REUSE_OFF */
0x00000000, /* VGT_VTX_CNT_EN */
0xc0016900,
0x000000d4,
0x00000000, /* SX_MISC */
0xc0016900,
0x000002c8,
0x00000000, /* VGT_STRMOUT_BUFFER_EN */

View File

@ -35,4 +35,5 @@ extern const u32 r6xx_default_state[];
extern const u32 r6xx_ps_size, r6xx_vs_size;
extern const u32 r6xx_default_size, r7xx_default_size;
__pure uint32_t int2float(uint32_t x);
#endif

View File

@ -23,10 +23,11 @@
*
* Authors: Christian König
*/
#include "drmP.h"
#include "radeon_drm.h"
#include <drm/drmP.h>
#include <drm/radeon_drm.h>
#include "radeon.h"
#include "radeon_asic.h"
#include "r600d.h"
#include "atom.h"
/*
@ -52,19 +53,7 @@ enum r600_hdmi_iec_status_bits {
AUDIO_STATUS_LEVEL = 0x80
};
struct {
uint32_t Clock;
int N_32kHz;
int CTS_32kHz;
int N_44_1kHz;
int CTS_44_1kHz;
int N_48kHz;
int CTS_48kHz;
} r600_hdmi_ACR[] = {
static const struct radeon_hdmi_acr r600_hdmi_predefined_acr[] = {
/* 32kHz 44.1kHz 48kHz */
/* Clock N CTS N CTS N CTS */
{ 25174, 4576, 28125, 7007, 31250, 6864, 28125 }, /* 25,20/1.001 MHz */
@ -83,7 +72,7 @@ struct {
/*
* calculate CTS value if it's not found in the table
*/
static void r600_hdmi_calc_CTS(uint32_t clock, int *CTS, int N, int freq)
static void r600_hdmi_calc_cts(uint32_t clock, int *CTS, int N, int freq)
{
if (*CTS == 0)
*CTS = clock * N / (128 * freq) * 1000;
@ -91,6 +80,24 @@ static void r600_hdmi_calc_CTS(uint32_t clock, int *CTS, int N, int freq)
N, *CTS, freq);
}
struct radeon_hdmi_acr r600_hdmi_acr(uint32_t clock)
{
struct radeon_hdmi_acr res;
u8 i;
for (i = 0; r600_hdmi_predefined_acr[i].clock != clock &&
r600_hdmi_predefined_acr[i].clock != 0; i++)
;
res = r600_hdmi_predefined_acr[i];
/* In case some CTS are missing */
r600_hdmi_calc_cts(clock, &res.cts_32khz, res.n_32khz, 32000);
r600_hdmi_calc_cts(clock, &res.cts_44_1khz, res.n_44_1khz, 44100);
r600_hdmi_calc_cts(clock, &res.cts_48khz, res.n_48khz, 48000);
return res;
}
/*
* update the N and CTS parameters for a given pixel clock rate
*/
@ -98,30 +105,19 @@ static void r600_hdmi_update_ACR(struct drm_encoder *encoder, uint32_t clock)
{
struct drm_device *dev = encoder->dev;
struct radeon_device *rdev = dev->dev_private;
uint32_t offset = to_radeon_encoder(encoder)->hdmi_offset;
int CTS;
int N;
int i;
struct radeon_hdmi_acr acr = r600_hdmi_acr(clock);
struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
struct radeon_encoder_atom_dig *dig = radeon_encoder->enc_priv;
uint32_t offset = dig->afmt->offset;
for (i = 0; r600_hdmi_ACR[i].Clock != clock && r600_hdmi_ACR[i].Clock != 0; i++);
WREG32(HDMI0_ACR_32_0 + offset, HDMI0_ACR_CTS_32(acr.cts_32khz));
WREG32(HDMI0_ACR_32_1 + offset, acr.n_32khz);
CTS = r600_hdmi_ACR[i].CTS_32kHz;
N = r600_hdmi_ACR[i].N_32kHz;
r600_hdmi_calc_CTS(clock, &CTS, N, 32000);
WREG32(offset+R600_HDMI_32kHz_CTS, CTS << 12);
WREG32(offset+R600_HDMI_32kHz_N, N);
WREG32(HDMI0_ACR_44_0 + offset, HDMI0_ACR_CTS_44(acr.cts_44_1khz));
WREG32(HDMI0_ACR_44_1 + offset, acr.n_44_1khz);
CTS = r600_hdmi_ACR[i].CTS_44_1kHz;
N = r600_hdmi_ACR[i].N_44_1kHz;
r600_hdmi_calc_CTS(clock, &CTS, N, 44100);
WREG32(offset+R600_HDMI_44_1kHz_CTS, CTS << 12);
WREG32(offset+R600_HDMI_44_1kHz_N, N);
CTS = r600_hdmi_ACR[i].CTS_48kHz;
N = r600_hdmi_ACR[i].N_48kHz;
r600_hdmi_calc_CTS(clock, &CTS, N, 48000);
WREG32(offset+R600_HDMI_48kHz_CTS, CTS << 12);
WREG32(offset+R600_HDMI_48kHz_N, N);
WREG32(HDMI0_ACR_48_0 + offset, HDMI0_ACR_CTS_48(acr.cts_48khz));
WREG32(HDMI0_ACR_48_1 + offset, acr.n_48khz);
}
/*
@ -165,7 +161,9 @@ static void r600_hdmi_videoinfoframe(
{
struct drm_device *dev = encoder->dev;
struct radeon_device *rdev = dev->dev_private;
uint32_t offset = to_radeon_encoder(encoder)->hdmi_offset;
struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
struct radeon_encoder_atom_dig *dig = radeon_encoder->enc_priv;
uint32_t offset = dig->afmt->offset;
uint8_t frame[14];
@ -196,14 +194,21 @@ static void r600_hdmi_videoinfoframe(
frame[0xD] = (right_bar >> 8);
r600_hdmi_infoframe_checksum(0x82, 0x02, 0x0D, frame);
/* Our header values (type, version, length) should be alright, Intel
* is using the same. Checksum function also seems to be OK, it works
* fine for audio infoframe. However calculated value is always lower
* by 2 in comparison to fglrx. It breaks displaying anything in case
* of TVs that strictly check the checksum. Hack it manually here to
* workaround this issue. */
frame[0x0] += 2;
WREG32(offset+R600_HDMI_VIDEOINFOFRAME_0,
WREG32(HDMI0_AVI_INFO0 + offset,
frame[0x0] | (frame[0x1] << 8) | (frame[0x2] << 16) | (frame[0x3] << 24));
WREG32(offset+R600_HDMI_VIDEOINFOFRAME_1,
WREG32(HDMI0_AVI_INFO1 + offset,
frame[0x4] | (frame[0x5] << 8) | (frame[0x6] << 16) | (frame[0x7] << 24));
WREG32(offset+R600_HDMI_VIDEOINFOFRAME_2,
WREG32(HDMI0_AVI_INFO2 + offset,
frame[0x8] | (frame[0x9] << 8) | (frame[0xA] << 16) | (frame[0xB] << 24));
WREG32(offset+R600_HDMI_VIDEOINFOFRAME_3,
WREG32(HDMI0_AVI_INFO3 + offset,
frame[0xC] | (frame[0xD] << 8));
}
@ -224,7 +229,9 @@ static void r600_hdmi_audioinfoframe(
{
struct drm_device *dev = encoder->dev;
struct radeon_device *rdev = dev->dev_private;
uint32_t offset = to_radeon_encoder(encoder)->hdmi_offset;
struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
struct radeon_encoder_atom_dig *dig = radeon_encoder->enc_priv;
uint32_t offset = dig->afmt->offset;
uint8_t frame[11];
@ -242,22 +249,24 @@ static void r600_hdmi_audioinfoframe(
r600_hdmi_infoframe_checksum(0x84, 0x01, 0x0A, frame);
WREG32(offset+R600_HDMI_AUDIOINFOFRAME_0,
WREG32(HDMI0_AUDIO_INFO0 + offset,
frame[0x0] | (frame[0x1] << 8) | (frame[0x2] << 16) | (frame[0x3] << 24));
WREG32(offset+R600_HDMI_AUDIOINFOFRAME_1,
WREG32(HDMI0_AUDIO_INFO1 + offset,
frame[0x4] | (frame[0x5] << 8) | (frame[0x6] << 16) | (frame[0x8] << 24));
}
/*
* test if audio buffer is filled enough to start playing
*/
static int r600_hdmi_is_audio_buffer_filled(struct drm_encoder *encoder)
static bool r600_hdmi_is_audio_buffer_filled(struct drm_encoder *encoder)
{
struct drm_device *dev = encoder->dev;
struct radeon_device *rdev = dev->dev_private;
uint32_t offset = to_radeon_encoder(encoder)->hdmi_offset;
struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
struct radeon_encoder_atom_dig *dig = radeon_encoder->enc_priv;
uint32_t offset = dig->afmt->offset;
return (RREG32(offset+R600_HDMI_STATUS) & 0x10) != 0;
return (RREG32(HDMI0_STATUS + offset) & 0x10) != 0;
}
/*
@ -266,14 +275,15 @@ static int r600_hdmi_is_audio_buffer_filled(struct drm_encoder *encoder)
int r600_hdmi_buffer_status_changed(struct drm_encoder *encoder)
{
struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
struct radeon_encoder_atom_dig *dig = radeon_encoder->enc_priv;
int status, result;
if (!radeon_encoder->hdmi_offset)
if (!dig->afmt || !dig->afmt->enabled)
return 0;
status = r600_hdmi_is_audio_buffer_filled(encoder);
result = radeon_encoder->hdmi_buffer_status != status;
radeon_encoder->hdmi_buffer_status = status;
result = dig->afmt->last_buffer_filled_status != status;
dig->afmt->last_buffer_filled_status = status;
return result;
}
@ -281,26 +291,23 @@ int r600_hdmi_buffer_status_changed(struct drm_encoder *encoder)
/*
* write the audio workaround status to the hardware
*/
void r600_hdmi_audio_workaround(struct drm_encoder *encoder)
static void r600_hdmi_audio_workaround(struct drm_encoder *encoder)
{
struct drm_device *dev = encoder->dev;
struct radeon_device *rdev = dev->dev_private;
struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
uint32_t offset = radeon_encoder->hdmi_offset;
struct radeon_encoder_atom_dig *dig = radeon_encoder->enc_priv;
uint32_t offset = dig->afmt->offset;
bool hdmi_audio_workaround = false; /* FIXME */
u32 value;
if (!offset)
return;
if (!radeon_encoder->hdmi_audio_workaround ||
r600_hdmi_is_audio_buffer_filled(encoder)) {
/* disable audio workaround */
WREG32_P(offset+R600_HDMI_CNTL, 0x00000001, ~0x00001001);
} else {
/* enable audio workaround */
WREG32_P(offset+R600_HDMI_CNTL, 0x00001001, ~0x00001001);
}
if (!hdmi_audio_workaround ||
r600_hdmi_is_audio_buffer_filled(encoder))
value = 0; /* disable workaround */
else
value = HDMI0_AUDIO_TEST_EN; /* enable workaround */
WREG32_P(HDMI0_AUDIO_PACKET_CONTROL + offset,
value, ~HDMI0_AUDIO_TEST_EN);
}
@ -311,41 +318,74 @@ void r600_hdmi_setmode(struct drm_encoder *encoder, struct drm_display_mode *mod
{
struct drm_device *dev = encoder->dev;
struct radeon_device *rdev = dev->dev_private;
uint32_t offset = to_radeon_encoder(encoder)->hdmi_offset;
struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
struct radeon_encoder_atom_dig *dig = radeon_encoder->enc_priv;
uint32_t offset;
if (ASIC_IS_DCE4(rdev))
/* Silent, r600_hdmi_enable will raise WARN for us */
if (!dig->afmt->enabled)
return;
offset = dig->afmt->offset;
if (!offset)
return;
// r600_audio_set_clock(encoder, mode->clock);
r600_audio_set_clock(encoder, mode->clock);
WREG32(HDMI0_VBI_PACKET_CONTROL + offset,
HDMI0_NULL_SEND); /* send null packets when required */
WREG32(offset+R600_HDMI_UNKNOWN_0, 0x1000);
WREG32(offset+R600_HDMI_UNKNOWN_1, 0x0);
WREG32(offset+R600_HDMI_UNKNOWN_2, 0x1000);
WREG32(HDMI0_AUDIO_CRC_CONTROL + offset, 0x1000);
r600_hdmi_update_ACR(encoder, mode->clock);
if (ASIC_IS_DCE32(rdev)) {
WREG32(HDMI0_AUDIO_PACKET_CONTROL + offset,
HDMI0_AUDIO_DELAY_EN(1) | /* default audio delay */
HDMI0_AUDIO_PACKETS_PER_LINE(3)); /* should be suffient for all audio modes and small enough for all hblanks */
WREG32(AFMT_AUDIO_PACKET_CONTROL + offset,
AFMT_AUDIO_SAMPLE_SEND | /* send audio packets */
AFMT_60958_CS_UPDATE); /* allow 60958 channel status fields to be updated */
} else {
WREG32(HDMI0_AUDIO_PACKET_CONTROL + offset,
HDMI0_AUDIO_SAMPLE_SEND | /* send audio packets */
HDMI0_AUDIO_DELAY_EN(1) | /* default audio delay */
HDMI0_AUDIO_PACKETS_PER_LINE(3) | /* should be suffient for all audio modes and small enough for all hblanks */
HDMI0_60958_CS_UPDATE); /* allow 60958 channel status fields to be updated */
}
WREG32(offset+R600_HDMI_VIDEOCNTL, 0x13);
WREG32(HDMI0_ACR_PACKET_CONTROL + offset,
HDMI0_ACR_AUTO_SEND | /* allow hw to sent ACR packets when required */
HDMI0_ACR_SOURCE); /* select SW CTS value */
WREG32(offset+R600_HDMI_VERSION, 0x202);
WREG32(HDMI0_VBI_PACKET_CONTROL + offset,
HDMI0_NULL_SEND | /* send null packets when required */
HDMI0_GC_SEND | /* send general control packets */
HDMI0_GC_CONT); /* send general control packets every frame */
/* TODO: HDMI0_AUDIO_INFO_UPDATE */
WREG32(HDMI0_INFOFRAME_CONTROL0 + offset,
HDMI0_AVI_INFO_SEND | /* enable AVI info frames */
HDMI0_AVI_INFO_CONT | /* send AVI info frames every frame/field */
HDMI0_AUDIO_INFO_SEND | /* enable audio info frames (frames won't be set until audio is enabled) */
HDMI0_AUDIO_INFO_CONT); /* send audio info frames every frame/field */
WREG32(HDMI0_INFOFRAME_CONTROL1 + offset,
HDMI0_AVI_INFO_LINE(2) | /* anything other than 0 */
HDMI0_AUDIO_INFO_LINE(2)); /* anything other than 0 */
WREG32(HDMI0_GC + offset, 0); /* unset HDMI0_GC_AVMUTE */
r600_hdmi_videoinfoframe(encoder, RGB, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0);
r600_hdmi_update_ACR(encoder, mode->clock);
/* it's unknown what these bits do excatly, but it's indeed quite useful for debugging */
WREG32(offset+R600_HDMI_AUDIO_DEBUG_0, 0x00FFFFFF);
WREG32(offset+R600_HDMI_AUDIO_DEBUG_1, 0x007FFFFF);
WREG32(offset+R600_HDMI_AUDIO_DEBUG_2, 0x00000001);
WREG32(offset+R600_HDMI_AUDIO_DEBUG_3, 0x00000001);
WREG32(HDMI0_RAMP_CONTROL0 + offset, 0x00FFFFFF);
WREG32(HDMI0_RAMP_CONTROL1 + offset, 0x007FFFFF);
WREG32(HDMI0_RAMP_CONTROL2 + offset, 0x00000001);
WREG32(HDMI0_RAMP_CONTROL3 + offset, 0x00000001);
r600_hdmi_audio_workaround(encoder);
/* audio packets per line, does anyone know how to calc this ? */
WREG32_P(offset+R600_HDMI_CNTL, 0x00040000, ~0x001F0000);
}
#if 0
/*
* update settings with current parameters from audio engine
*/
@ -353,126 +393,82 @@ void r600_hdmi_update_audio_settings(struct drm_encoder *encoder)
{
struct drm_device *dev = encoder->dev;
struct radeon_device *rdev = dev->dev_private;
uint32_t offset = to_radeon_encoder(encoder)->hdmi_offset;
int channels = r600_audio_channels(rdev);
int rate = r600_audio_rate(rdev);
int bps = r600_audio_bits_per_sample(rdev);
uint8_t status_bits = r600_audio_status_bits(rdev);
uint8_t category_code = r600_audio_category_code(rdev);
struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
struct radeon_encoder_atom_dig *dig = radeon_encoder->enc_priv;
struct r600_audio audio = r600_audio_status(rdev);
uint32_t offset;
uint32_t iec;
if (!offset)
if (!dig->afmt || !dig->afmt->enabled)
return;
offset = dig->afmt->offset;
DRM_DEBUG("%s with %d channels, %d Hz sampling rate, %d bits per sample,\n",
r600_hdmi_is_audio_buffer_filled(encoder) ? "playing" : "stopped",
channels, rate, bps);
audio.channels, audio.rate, audio.bits_per_sample);
DRM_DEBUG("0x%02X IEC60958 status bits and 0x%02X category code\n",
(int)status_bits, (int)category_code);
(int)audio.status_bits, (int)audio.category_code);
iec = 0;
if (status_bits & AUDIO_STATUS_PROFESSIONAL)
if (audio.status_bits & AUDIO_STATUS_PROFESSIONAL)
iec |= 1 << 0;
if (status_bits & AUDIO_STATUS_NONAUDIO)
if (audio.status_bits & AUDIO_STATUS_NONAUDIO)
iec |= 1 << 1;
if (status_bits & AUDIO_STATUS_COPYRIGHT)
if (audio.status_bits & AUDIO_STATUS_COPYRIGHT)
iec |= 1 << 2;
if (status_bits & AUDIO_STATUS_EMPHASIS)
if (audio.status_bits & AUDIO_STATUS_EMPHASIS)
iec |= 1 << 3;
iec |= category_code << 8;
iec |= HDMI0_60958_CS_CATEGORY_CODE(audio.category_code);
switch (rate) {
case 32000: iec |= 0x3 << 24; break;
case 44100: iec |= 0x0 << 24; break;
case 88200: iec |= 0x8 << 24; break;
case 176400: iec |= 0xc << 24; break;
case 48000: iec |= 0x2 << 24; break;
case 96000: iec |= 0xa << 24; break;
case 192000: iec |= 0xe << 24; break;
switch (audio.rate) {
case 32000:
iec |= HDMI0_60958_CS_SAMPLING_FREQUENCY(0x3);
break;
case 44100:
iec |= HDMI0_60958_CS_SAMPLING_FREQUENCY(0x0);
break;
case 48000:
iec |= HDMI0_60958_CS_SAMPLING_FREQUENCY(0x2);
break;
case 88200:
iec |= HDMI0_60958_CS_SAMPLING_FREQUENCY(0x8);
break;
case 96000:
iec |= HDMI0_60958_CS_SAMPLING_FREQUENCY(0xa);
break;
case 176400:
iec |= HDMI0_60958_CS_SAMPLING_FREQUENCY(0xc);
break;
case 192000:
iec |= HDMI0_60958_CS_SAMPLING_FREQUENCY(0xe);
break;
}
WREG32(offset+R600_HDMI_IEC60958_1, iec);
WREG32(HDMI0_60958_0 + offset, iec);
iec = 0;
switch (bps) {
case 16: iec |= 0x2; break;
case 20: iec |= 0x3; break;
case 24: iec |= 0xb; break;
}
if (status_bits & AUDIO_STATUS_V)
switch (audio.bits_per_sample) {
case 16:
iec |= HDMI0_60958_CS_WORD_LENGTH(0x2);
break;
case 20:
iec |= HDMI0_60958_CS_WORD_LENGTH(0x3);
break;
case 24:
iec |= HDMI0_60958_CS_WORD_LENGTH(0xb);
break;
}
if (audio.status_bits & AUDIO_STATUS_V)
iec |= 0x5 << 16;
WREG32_P(HDMI0_60958_1 + offset, iec, ~0x5000f);
WREG32_P(offset+R600_HDMI_IEC60958_2, iec, ~0x5000f);
/* 0x021 or 0x031 sets the audio frame length */
WREG32(offset+R600_HDMI_AUDIOCNTL, 0x31);
r600_hdmi_audioinfoframe(encoder, channels-1, 0, 0, 0, 0, 0, 0, 0);
r600_hdmi_audioinfoframe(encoder, audio.channels - 1, 0, 0, 0, 0, 0, 0,
0);
r600_hdmi_audio_workaround(encoder);
}
static int r600_hdmi_find_free_block(struct drm_device *dev)
{
struct radeon_device *rdev = dev->dev_private;
struct drm_encoder *encoder;
struct radeon_encoder *radeon_encoder;
bool free_blocks[3] = { true, true, true };
list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
radeon_encoder = to_radeon_encoder(encoder);
switch (radeon_encoder->hdmi_offset) {
case R600_HDMI_BLOCK1:
free_blocks[0] = false;
break;
case R600_HDMI_BLOCK2:
free_blocks[1] = false;
break;
case R600_HDMI_BLOCK3:
free_blocks[2] = false;
break;
}
}
if (rdev->family == CHIP_RS600 || rdev->family == CHIP_RS690 ||
rdev->family == CHIP_RS740) {
return free_blocks[0] ? R600_HDMI_BLOCK1 : 0;
} else if (rdev->family >= CHIP_R600) {
if (free_blocks[0])
return R600_HDMI_BLOCK1;
else if (free_blocks[1])
return R600_HDMI_BLOCK2;
}
return 0;
}
static void r600_hdmi_assign_block(struct drm_encoder *encoder)
{
struct drm_device *dev = encoder->dev;
struct radeon_device *rdev = dev->dev_private;
struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
struct radeon_encoder_atom_dig *dig = radeon_encoder->enc_priv;
if (!dig) {
dev_err(rdev->dev, "Enabling HDMI on non-dig encoder\n");
return;
}
if (ASIC_IS_DCE4(rdev)) {
/* TODO */
} else if (ASIC_IS_DCE3(rdev)) {
radeon_encoder->hdmi_offset = dig->dig_encoder ?
R600_HDMI_BLOCK3 : R600_HDMI_BLOCK1;
if (ASIC_IS_DCE32(rdev))
radeon_encoder->hdmi_config_offset = dig->dig_encoder ?
R600_HDMI_CONFIG2 : R600_HDMI_CONFIG1;
} else if (rdev->family >= CHIP_R600 || rdev->family == CHIP_RS600 ||
rdev->family == CHIP_RS690 || rdev->family == CHIP_RS740) {
radeon_encoder->hdmi_offset = r600_hdmi_find_free_block(dev);
}
}
#endif
/*
* enable the HDMI engine
@ -482,56 +478,56 @@ void r600_hdmi_enable(struct drm_encoder *encoder)
struct drm_device *dev = encoder->dev;
struct radeon_device *rdev = dev->dev_private;
struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
struct radeon_encoder_atom_dig *dig = radeon_encoder->enc_priv;
uint32_t offset;
u32 hdmi;
if (ASIC_IS_DCE4(rdev))
if (ASIC_IS_DCE6(rdev))
return;
if (!radeon_encoder->hdmi_offset) {
r600_hdmi_assign_block(encoder);
if (!radeon_encoder->hdmi_offset) {
dev_warn(rdev->dev, "Could not find HDMI block for "
"0x%x encoder\n", radeon_encoder->encoder_id);
/* Silent, r600_hdmi_enable will raise WARN for us */
if (dig->afmt->enabled)
return;
}
}
offset = dig->afmt->offset;
offset = radeon_encoder->hdmi_offset;
if (ASIC_IS_DCE32(rdev) && !ASIC_IS_DCE4(rdev)) {
WREG32_P(radeon_encoder->hdmi_config_offset + 0x4, 0x1, ~0x1);
} else if (rdev->family >= CHIP_R600 && !ASIC_IS_DCE3(rdev)) {
/* Older chipsets require setting HDMI and routing manually */
if (rdev->family >= CHIP_R600 && !ASIC_IS_DCE3(rdev)) {
hdmi = HDMI0_ERROR_ACK | HDMI0_ENABLE;
switch (radeon_encoder->encoder_id) {
case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_TMDS1:
WREG32_P(AVIVO_TMDSA_CNTL, 0x4, ~0x4);
WREG32(offset + R600_HDMI_ENABLE, 0x101);
WREG32_P(AVIVO_TMDSA_CNTL, AVIVO_TMDSA_CNTL_HDMI_EN,
~AVIVO_TMDSA_CNTL_HDMI_EN);
hdmi |= HDMI0_STREAM(HDMI0_STREAM_TMDSA);
break;
case ENCODER_OBJECT_ID_INTERNAL_LVTM1:
WREG32_P(AVIVO_LVTMA_CNTL, 0x4, ~0x4);
WREG32(offset + R600_HDMI_ENABLE, 0x105);
WREG32_P(AVIVO_LVTMA_CNTL, AVIVO_LVTMA_CNTL_HDMI_EN,
~AVIVO_LVTMA_CNTL_HDMI_EN);
hdmi |= HDMI0_STREAM(HDMI0_STREAM_LVTMA);
break;
case ENCODER_OBJECT_ID_INTERNAL_DDI:
WREG32_P(DDIA_CNTL, DDIA_HDMI_EN, ~DDIA_HDMI_EN);
hdmi |= HDMI0_STREAM(HDMI0_STREAM_DDIA);
break;
case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
hdmi |= HDMI0_STREAM(HDMI0_STREAM_DVOA);
break;
default:
dev_err(rdev->dev, "Unknown HDMI output type\n");
dev_err(rdev->dev, "Invalid encoder for HDMI: 0x%X\n",
radeon_encoder->encoder_id);
break;
}
WREG32(HDMI0_CONTROL + offset, hdmi);
}
#if 0
if (rdev->irq.installed
&& rdev->family != CHIP_RS600
&& rdev->family != CHIP_RS690
&& rdev->family != CHIP_RS740) {
if (rdev->irq.installed) {
/* if irq is available use it */
rdev->irq.hdmi[offset == R600_HDMI_BLOCK1 ? 0 : 1] = true;
radeon_irq_set(rdev);
r600_audio_disable_polling(encoder);
} else {
/* if not fallback to polling */
r600_audio_enable_polling(encoder);
// radeon_irq_kms_enable_afmt(rdev, dig->afmt->id);
}
#endif
dig->afmt->enabled = true;
DRM_DEBUG("Enabling HDMI interface @ 0x%04X for encoder 0x%x\n",
radeon_encoder->hdmi_offset, radeon_encoder->encoder_id);
offset, radeon_encoder->encoder_id);
}
/*
@ -542,38 +538,50 @@ void r600_hdmi_disable(struct drm_encoder *encoder)
struct drm_device *dev = encoder->dev;
struct radeon_device *rdev = dev->dev_private;
struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
struct radeon_encoder_atom_dig *dig = radeon_encoder->enc_priv;
uint32_t offset;
if (ASIC_IS_DCE4(rdev))
if (ASIC_IS_DCE6(rdev))
return;
offset = radeon_encoder->hdmi_offset;
if (!offset) {
dev_err(rdev->dev, "Disabling not enabled HDMI\n");
/* Called for ATOM_ENCODER_MODE_HDMI only */
if (!dig || !dig->afmt) {
WARN_ON(1);
return;
}
if (!dig->afmt->enabled)
return;
offset = dig->afmt->offset;
DRM_DEBUG("Disabling HDMI interface @ 0x%04X for encoder 0x%x\n",
offset, radeon_encoder->encoder_id);
if (ASIC_IS_DCE32(rdev) && !ASIC_IS_DCE4(rdev)) {
WREG32_P(radeon_encoder->hdmi_config_offset + 0x4, 0, ~0x1);
} else if (rdev->family >= CHIP_R600 && !ASIC_IS_DCE3(rdev)) {
/* disable irq */
// radeon_irq_kms_disable_afmt(rdev, dig->afmt->id);
/* Older chipsets not handled by AtomBIOS */
if (rdev->family >= CHIP_R600 && !ASIC_IS_DCE3(rdev)) {
switch (radeon_encoder->encoder_id) {
case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_TMDS1:
WREG32_P(AVIVO_TMDSA_CNTL, 0, ~0x4);
WREG32(offset + R600_HDMI_ENABLE, 0);
WREG32_P(AVIVO_TMDSA_CNTL, 0,
~AVIVO_TMDSA_CNTL_HDMI_EN);
break;
case ENCODER_OBJECT_ID_INTERNAL_LVTM1:
WREG32_P(AVIVO_LVTMA_CNTL, 0, ~0x4);
WREG32(offset + R600_HDMI_ENABLE, 0);
WREG32_P(AVIVO_LVTMA_CNTL, 0,
~AVIVO_LVTMA_CNTL_HDMI_EN);
break;
case ENCODER_OBJECT_ID_INTERNAL_DDI:
WREG32_P(DDIA_CNTL, 0, ~DDIA_HDMI_EN);
break;
case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
break;
default:
dev_err(rdev->dev, "Unknown HDMI output type\n");
dev_err(rdev->dev, "Invalid encoder for HDMI: 0x%X\n",
radeon_encoder->encoder_id);
break;
}
WREG32(HDMI0_CONTROL + offset, HDMI0_ERROR_ACK);
}
radeon_encoder->hdmi_offset = 0;
radeon_encoder->hdmi_config_offset = 0;
dig->afmt->enabled = false;
}

View File

@ -156,45 +156,10 @@
#define R600_AUDIO_PIN_WIDGET_CNTL 0x73d4
#define R600_AUDIO_STATUS_BITS 0x73d8
/* HDMI base register addresses */
#define R600_HDMI_BLOCK1 0x7400
#define R600_HDMI_BLOCK2 0x7700
#define R600_HDMI_BLOCK3 0x7800
/* HDMI registers */
#define R600_HDMI_ENABLE 0x00
#define R600_HDMI_STATUS 0x04
# define R600_HDMI_INT_PENDING (1 << 29)
#define R600_HDMI_CNTL 0x08
# define R600_HDMI_INT_EN (1 << 28)
# define R600_HDMI_INT_ACK (1 << 29)
#define R600_HDMI_UNKNOWN_0 0x0C
#define R600_HDMI_AUDIOCNTL 0x10
#define R600_HDMI_VIDEOCNTL 0x14
#define R600_HDMI_VERSION 0x18
#define R600_HDMI_UNKNOWN_1 0x28
#define R600_HDMI_VIDEOINFOFRAME_0 0x54
#define R600_HDMI_VIDEOINFOFRAME_1 0x58
#define R600_HDMI_VIDEOINFOFRAME_2 0x5c
#define R600_HDMI_VIDEOINFOFRAME_3 0x60
#define R600_HDMI_32kHz_CTS 0xac
#define R600_HDMI_32kHz_N 0xb0
#define R600_HDMI_44_1kHz_CTS 0xb4
#define R600_HDMI_44_1kHz_N 0xb8
#define R600_HDMI_48kHz_CTS 0xbc
#define R600_HDMI_48kHz_N 0xc0
#define R600_HDMI_AUDIOINFOFRAME_0 0xcc
#define R600_HDMI_AUDIOINFOFRAME_1 0xd0
#define R600_HDMI_IEC60958_1 0xd4
#define R600_HDMI_IEC60958_2 0xd8
#define R600_HDMI_UNKNOWN_2 0xdc
#define R600_HDMI_AUDIO_DEBUG_0 0xe0
#define R600_HDMI_AUDIO_DEBUG_1 0xe4
#define R600_HDMI_AUDIO_DEBUG_2 0xe8
#define R600_HDMI_AUDIO_DEBUG_3 0xec
/* HDMI additional config base register addresses */
#define R600_HDMI_CONFIG1 0x7600
#define R600_HDMI_CONFIG2 0x7a00
#define DCE2_HDMI_OFFSET0 (0x7400 - 0x7400)
#define DCE2_HDMI_OFFSET1 (0x7700 - 0x7400)
/* DCE3.2 second instance starts at 0x7800 */
#define DCE3_HDMI_OFFSET0 (0x7400 - 0x7400)
#define DCE3_HDMI_OFFSET1 (0x7800 - 0x7400)
#endif

View File

@ -709,7 +709,7 @@ int r600_video_blit(uint64_t src_offset, int x, int y,
mutex_lock(&rdev->r600_video.mutex);
rdev->r600_video.vb_ib = NULL;
r = r600_video_prepare_copy(rdev, h*pitch);
r = r600_video_prepare_copy(rdev, w*4);
if (r) {
// if (rdev->r600_blit.vb_ib)
// radeon_ib_free(rdev, &rdev->r600_blit.vb_ib);

View File

@ -66,6 +66,14 @@
#define CC_RB_BACKEND_DISABLE 0x98F4
#define BACKEND_DISABLE(x) ((x) << 16)
#define R_028808_CB_COLOR_CONTROL 0x28808
#define S_028808_SPECIAL_OP(x) (((x) & 0x7) << 4)
#define G_028808_SPECIAL_OP(x) (((x) >> 4) & 0x7)
#define C_028808_SPECIAL_OP 0xFFFFFF8F
#define V_028808_SPECIAL_NORMAL 0x00
#define V_028808_SPECIAL_DISABLE 0x01
#define V_028808_SPECIAL_RESOLVE_BOX 0x07
#define CB_COLOR0_BASE 0x28040
#define CB_COLOR1_BASE 0x28044
#define CB_COLOR2_BASE 0x28048
@ -78,7 +86,40 @@
#define CB_COLOR0_SIZE 0x28060
#define CB_COLOR0_VIEW 0x28080
#define R_028080_CB_COLOR0_VIEW 0x028080
#define S_028080_SLICE_START(x) (((x) & 0x7FF) << 0)
#define G_028080_SLICE_START(x) (((x) >> 0) & 0x7FF)
#define C_028080_SLICE_START 0xFFFFF800
#define S_028080_SLICE_MAX(x) (((x) & 0x7FF) << 13)
#define G_028080_SLICE_MAX(x) (((x) >> 13) & 0x7FF)
#define C_028080_SLICE_MAX 0xFF001FFF
#define R_028084_CB_COLOR1_VIEW 0x028084
#define R_028088_CB_COLOR2_VIEW 0x028088
#define R_02808C_CB_COLOR3_VIEW 0x02808C
#define R_028090_CB_COLOR4_VIEW 0x028090
#define R_028094_CB_COLOR5_VIEW 0x028094
#define R_028098_CB_COLOR6_VIEW 0x028098
#define R_02809C_CB_COLOR7_VIEW 0x02809C
#define R_028100_CB_COLOR0_MASK 0x028100
#define S_028100_CMASK_BLOCK_MAX(x) (((x) & 0xFFF) << 0)
#define G_028100_CMASK_BLOCK_MAX(x) (((x) >> 0) & 0xFFF)
#define C_028100_CMASK_BLOCK_MAX 0xFFFFF000
#define S_028100_FMASK_TILE_MAX(x) (((x) & 0xFFFFF) << 12)
#define G_028100_FMASK_TILE_MAX(x) (((x) >> 12) & 0xFFFFF)
#define C_028100_FMASK_TILE_MAX 0x00000FFF
#define R_028104_CB_COLOR1_MASK 0x028104
#define R_028108_CB_COLOR2_MASK 0x028108
#define R_02810C_CB_COLOR3_MASK 0x02810C
#define R_028110_CB_COLOR4_MASK 0x028110
#define R_028114_CB_COLOR5_MASK 0x028114
#define R_028118_CB_COLOR6_MASK 0x028118
#define R_02811C_CB_COLOR7_MASK 0x02811C
#define CB_COLOR0_INFO 0x280a0
# define CB_FORMAT(x) ((x) << 2)
# define CB_ARRAY_MODE(x) ((x) << 8)
# define CB_SOURCE_FORMAT(x) ((x) << 27)
# define CB_SF_EXPORT_FULL 0
# define CB_SF_EXPORT_NORM 1
#define CB_COLOR0_TILE 0x280c0
#define CB_COLOR0_FRAG 0x280e0
#define CB_COLOR0_MASK 0x28100
@ -134,6 +175,9 @@
#define CONFIG_MEMSIZE 0x5428
#define CONFIG_CNTL 0x5424
#define CP_STALLED_STAT1 0x8674
#define CP_STALLED_STAT2 0x8678
#define CP_BUSY_STAT 0x867C
#define CP_STAT 0x8680
#define CP_COHER_BASE 0x85F8
#define CP_DEBUG 0xC1FC
@ -176,6 +220,14 @@
#define PREZ_MUST_WAIT_FOR_POSTZ_DONE (1 << 31)
#define DB_DEPTH_BASE 0x2800C
#define DB_HTILE_DATA_BASE 0x28014
#define DB_HTILE_SURFACE 0x28D24
#define S_028D24_HTILE_WIDTH(x) (((x) & 0x1) << 0)
#define G_028D24_HTILE_WIDTH(x) (((x) >> 0) & 0x1)
#define C_028D24_HTILE_WIDTH 0xFFFFFFFE
#define S_028D24_HTILE_HEIGHT(x) (((x) & 0x1) << 1)
#define G_028D24_HTILE_HEIGHT(x) (((x) >> 1) & 0x1)
#define C_028D24_HTILE_HEIGHT 0xFFFFFFFD
#define G_028D24_LINEAR(x) (((x) >> 2) & 0x1)
#define DB_WATERMARKS 0x9838
#define DEPTH_FREE(x) ((x) << 0)
#define DEPTH_FLUSH(x) ((x) << 5)
@ -192,6 +244,8 @@
#define BACKEND_MAP(x) ((x) << 16)
#define GB_TILING_CONFIG 0x98F0
#define PIPE_TILING__SHIFT 1
#define PIPE_TILING__MASK 0x0000000e
#define GC_USER_SHADER_PIPE_CONFIG 0x8954
#define INACTIVE_QD_PIPES(x) ((x) << 8)
@ -417,6 +471,17 @@
#define SQ_PGM_START_VS 0x28858
#define SQ_PGM_RESOURCES_VS 0x28868
#define SQ_PGM_CF_OFFSET_VS 0x288d0
#define SQ_VTX_CONSTANT_WORD0_0 0x30000
#define SQ_VTX_CONSTANT_WORD1_0 0x30004
#define SQ_VTX_CONSTANT_WORD2_0 0x30008
# define SQ_VTXC_BASE_ADDR_HI(x) ((x) << 0)
# define SQ_VTXC_STRIDE(x) ((x) << 8)
# define SQ_VTXC_ENDIAN_SWAP(x) ((x) << 30)
# define SQ_ENDIAN_NONE 0
# define SQ_ENDIAN_8IN16 1
# define SQ_ENDIAN_8IN32 2
#define SQ_VTX_CONSTANT_WORD3_0 0x3000c
#define SQ_VTX_CONSTANT_WORD6_0 0x38018
#define S__SQ_VTX_CONSTANT_TYPE(x) (((x) & 3) << 30)
#define G__SQ_VTX_CONSTANT_TYPE(x) (((x) >> 30) & 3)
@ -445,6 +510,7 @@
#define TC_L2_SIZE(x) ((x)<<5)
#define L2_DISABLE_LATE_HIT (1<<9)
#define VC_ENHANCE 0x9714
#define VGT_CACHE_INVALIDATION 0x88C4
#define CACHE_INVALIDATION(x) ((x)<<0)
@ -477,6 +543,11 @@
#define VGT_STRMOUT_BUFFER_OFFSET_1 0x28AEC
#define VGT_STRMOUT_BUFFER_OFFSET_2 0x28AFC
#define VGT_STRMOUT_BUFFER_OFFSET_3 0x28B0C
#define VGT_STRMOUT_BUFFER_SIZE_0 0x28AD0
#define VGT_STRMOUT_BUFFER_SIZE_1 0x28AE0
#define VGT_STRMOUT_BUFFER_SIZE_2 0x28AF0
#define VGT_STRMOUT_BUFFER_SIZE_3 0x28B00
#define VGT_STRMOUT_EN 0x28AB0
#define VGT_VERTEX_REUSE_BLOCK_CNTL 0x28C58
#define VTX_REUSE_DEPTH_MASK 0x000000FF
@ -553,11 +624,18 @@
#define RLC_HB_WPTR 0x3f1c
#define RLC_HB_WPTR_LSB_ADDR 0x3f14
#define RLC_HB_WPTR_MSB_ADDR 0x3f18
#define RLC_GPU_CLOCK_COUNT_LSB 0x3f38
#define RLC_GPU_CLOCK_COUNT_MSB 0x3f3c
#define RLC_CAPTURE_GPU_CLOCK_COUNT 0x3f40
#define RLC_MC_CNTL 0x3f44
#define RLC_UCODE_CNTL 0x3f48
#define RLC_UCODE_ADDR 0x3f2c
#define RLC_UCODE_DATA 0x3f30
/* new for TN */
#define TN_RLC_SAVE_AND_RESTORE_BASE 0x3f10
#define TN_RLC_CLEAR_STATE_RESTORE_BASE 0x3f20
#define SRBM_SOFT_RESET 0xe60
# define SOFT_RESET_RLC (1 << 13)
@ -777,6 +855,239 @@
# define TARGET_LINK_SPEED_MASK (0xf << 0)
# define SELECTABLE_DEEMPHASIS (1 << 6)
/* Audio clocks */
#define DCCG_AUDIO_DTO0_PHASE 0x0514
#define DCCG_AUDIO_DTO0_MODULE 0x0518
#define DCCG_AUDIO_DTO0_LOAD 0x051c
# define DTO_LOAD (1 << 31)
#define DCCG_AUDIO_DTO0_CNTL 0x0520
#define DCCG_AUDIO_DTO1_PHASE 0x0524
#define DCCG_AUDIO_DTO1_MODULE 0x0528
#define DCCG_AUDIO_DTO1_LOAD 0x052c
#define DCCG_AUDIO_DTO1_CNTL 0x0530
#define DCCG_AUDIO_DTO_SELECT 0x0534
/* digital blocks */
#define TMDSA_CNTL 0x7880
# define TMDSA_HDMI_EN (1 << 2)
#define LVTMA_CNTL 0x7a80
# define LVTMA_HDMI_EN (1 << 2)
#define DDIA_CNTL 0x7200
# define DDIA_HDMI_EN (1 << 2)
#define DIG0_CNTL 0x75a0
# define DIG_MODE(x) (((x) & 7) << 8)
# define DIG_MODE_DP 0
# define DIG_MODE_LVDS 1
# define DIG_MODE_TMDS_DVI 2
# define DIG_MODE_TMDS_HDMI 3
# define DIG_MODE_SDVO 4
#define DIG1_CNTL 0x79a0
/* rs6xx/rs740 and r6xx share the same HDMI blocks, however, rs6xx has only one
* instance of the blocks while r6xx has 2. DCE 3.0 cards are slightly
* different due to the new DIG blocks, but also have 2 instances.
* DCE 3.0 HDMI blocks are part of each DIG encoder.
*/
/* rs6xx/rs740/r6xx/dce3 */
#define HDMI0_CONTROL 0x7400
/* rs6xx/rs740/r6xx */
# define HDMI0_ENABLE (1 << 0)
# define HDMI0_STREAM(x) (((x) & 3) << 2)
# define HDMI0_STREAM_TMDSA 0
# define HDMI0_STREAM_LVTMA 1
# define HDMI0_STREAM_DVOA 2
# define HDMI0_STREAM_DDIA 3
/* rs6xx/r6xx/dce3 */
# define HDMI0_ERROR_ACK (1 << 8)
# define HDMI0_ERROR_MASK (1 << 9)
#define HDMI0_STATUS 0x7404
# define HDMI0_ACTIVE_AVMUTE (1 << 0)
# define HDMI0_AUDIO_ENABLE (1 << 4)
# define HDMI0_AZ_FORMAT_WTRIG (1 << 28)
# define HDMI0_AZ_FORMAT_WTRIG_INT (1 << 29)
#define HDMI0_AUDIO_PACKET_CONTROL 0x7408
# define HDMI0_AUDIO_SAMPLE_SEND (1 << 0)
# define HDMI0_AUDIO_DELAY_EN(x) (((x) & 3) << 4)
# define HDMI0_AUDIO_SEND_MAX_PACKETS (1 << 8)
# define HDMI0_AUDIO_TEST_EN (1 << 12)
# define HDMI0_AUDIO_PACKETS_PER_LINE(x) (((x) & 0x1f) << 16)
# define HDMI0_AUDIO_CHANNEL_SWAP (1 << 24)
# define HDMI0_60958_CS_UPDATE (1 << 26)
# define HDMI0_AZ_FORMAT_WTRIG_MASK (1 << 28)
# define HDMI0_AZ_FORMAT_WTRIG_ACK (1 << 29)
#define HDMI0_AUDIO_CRC_CONTROL 0x740c
# define HDMI0_AUDIO_CRC_EN (1 << 0)
#define HDMI0_VBI_PACKET_CONTROL 0x7410
# define HDMI0_NULL_SEND (1 << 0)
# define HDMI0_GC_SEND (1 << 4)
# define HDMI0_GC_CONT (1 << 5) /* 0 - once; 1 - every frame */
#define HDMI0_INFOFRAME_CONTROL0 0x7414
# define HDMI0_AVI_INFO_SEND (1 << 0)
# define HDMI0_AVI_INFO_CONT (1 << 1)
# define HDMI0_AUDIO_INFO_SEND (1 << 4)
# define HDMI0_AUDIO_INFO_CONT (1 << 5)
# define HDMI0_AUDIO_INFO_SOURCE (1 << 6) /* 0 - sound block; 1 - hmdi regs */
# define HDMI0_AUDIO_INFO_UPDATE (1 << 7)
# define HDMI0_MPEG_INFO_SEND (1 << 8)
# define HDMI0_MPEG_INFO_CONT (1 << 9)
# define HDMI0_MPEG_INFO_UPDATE (1 << 10)
#define HDMI0_INFOFRAME_CONTROL1 0x7418
# define HDMI0_AVI_INFO_LINE(x) (((x) & 0x3f) << 0)
# define HDMI0_AUDIO_INFO_LINE(x) (((x) & 0x3f) << 8)
# define HDMI0_MPEG_INFO_LINE(x) (((x) & 0x3f) << 16)
#define HDMI0_GENERIC_PACKET_CONTROL 0x741c
# define HDMI0_GENERIC0_SEND (1 << 0)
# define HDMI0_GENERIC0_CONT (1 << 1)
# define HDMI0_GENERIC0_UPDATE (1 << 2)
# define HDMI0_GENERIC1_SEND (1 << 4)
# define HDMI0_GENERIC1_CONT (1 << 5)
# define HDMI0_GENERIC0_LINE(x) (((x) & 0x3f) << 16)
# define HDMI0_GENERIC1_LINE(x) (((x) & 0x3f) << 24)
#define HDMI0_GC 0x7428
# define HDMI0_GC_AVMUTE (1 << 0)
#define HDMI0_AVI_INFO0 0x7454
# define HDMI0_AVI_INFO_CHECKSUM(x) (((x) & 0xff) << 0)
# define HDMI0_AVI_INFO_S(x) (((x) & 3) << 8)
# define HDMI0_AVI_INFO_B(x) (((x) & 3) << 10)
# define HDMI0_AVI_INFO_A(x) (((x) & 1) << 12)
# define HDMI0_AVI_INFO_Y(x) (((x) & 3) << 13)
# define HDMI0_AVI_INFO_Y_RGB 0
# define HDMI0_AVI_INFO_Y_YCBCR422 1
# define HDMI0_AVI_INFO_Y_YCBCR444 2
# define HDMI0_AVI_INFO_Y_A_B_S(x) (((x) & 0xff) << 8)
# define HDMI0_AVI_INFO_R(x) (((x) & 0xf) << 16)
# define HDMI0_AVI_INFO_M(x) (((x) & 0x3) << 20)
# define HDMI0_AVI_INFO_C(x) (((x) & 0x3) << 22)
# define HDMI0_AVI_INFO_C_M_R(x) (((x) & 0xff) << 16)
# define HDMI0_AVI_INFO_SC(x) (((x) & 0x3) << 24)
# define HDMI0_AVI_INFO_ITC_EC_Q_SC(x) (((x) & 0xff) << 24)
#define HDMI0_AVI_INFO1 0x7458
# define HDMI0_AVI_INFO_VIC(x) (((x) & 0x7f) << 0) /* don't use avi infoframe v1 */
# define HDMI0_AVI_INFO_PR(x) (((x) & 0xf) << 8) /* don't use avi infoframe v1 */
# define HDMI0_AVI_INFO_TOP(x) (((x) & 0xffff) << 16)
#define HDMI0_AVI_INFO2 0x745c
# define HDMI0_AVI_INFO_BOTTOM(x) (((x) & 0xffff) << 0)
# define HDMI0_AVI_INFO_LEFT(x) (((x) & 0xffff) << 16)
#define HDMI0_AVI_INFO3 0x7460
# define HDMI0_AVI_INFO_RIGHT(x) (((x) & 0xffff) << 0)
# define HDMI0_AVI_INFO_VERSION(x) (((x) & 3) << 24)
#define HDMI0_MPEG_INFO0 0x7464
# define HDMI0_MPEG_INFO_CHECKSUM(x) (((x) & 0xff) << 0)
# define HDMI0_MPEG_INFO_MB0(x) (((x) & 0xff) << 8)
# define HDMI0_MPEG_INFO_MB1(x) (((x) & 0xff) << 16)
# define HDMI0_MPEG_INFO_MB2(x) (((x) & 0xff) << 24)
#define HDMI0_MPEG_INFO1 0x7468
# define HDMI0_MPEG_INFO_MB3(x) (((x) & 0xff) << 0)
# define HDMI0_MPEG_INFO_MF(x) (((x) & 3) << 8)
# define HDMI0_MPEG_INFO_FR(x) (((x) & 1) << 12)
#define HDMI0_GENERIC0_HDR 0x746c
#define HDMI0_GENERIC0_0 0x7470
#define HDMI0_GENERIC0_1 0x7474
#define HDMI0_GENERIC0_2 0x7478
#define HDMI0_GENERIC0_3 0x747c
#define HDMI0_GENERIC0_4 0x7480
#define HDMI0_GENERIC0_5 0x7484
#define HDMI0_GENERIC0_6 0x7488
#define HDMI0_GENERIC1_HDR 0x748c
#define HDMI0_GENERIC1_0 0x7490
#define HDMI0_GENERIC1_1 0x7494
#define HDMI0_GENERIC1_2 0x7498
#define HDMI0_GENERIC1_3 0x749c
#define HDMI0_GENERIC1_4 0x74a0
#define HDMI0_GENERIC1_5 0x74a4
#define HDMI0_GENERIC1_6 0x74a8
#define HDMI0_ACR_32_0 0x74ac
# define HDMI0_ACR_CTS_32(x) (((x) & 0xfffff) << 12)
#define HDMI0_ACR_32_1 0x74b0
# define HDMI0_ACR_N_32(x) (((x) & 0xfffff) << 0)
#define HDMI0_ACR_44_0 0x74b4
# define HDMI0_ACR_CTS_44(x) (((x) & 0xfffff) << 12)
#define HDMI0_ACR_44_1 0x74b8
# define HDMI0_ACR_N_44(x) (((x) & 0xfffff) << 0)
#define HDMI0_ACR_48_0 0x74bc
# define HDMI0_ACR_CTS_48(x) (((x) & 0xfffff) << 12)
#define HDMI0_ACR_48_1 0x74c0
# define HDMI0_ACR_N_48(x) (((x) & 0xfffff) << 0)
#define HDMI0_ACR_STATUS_0 0x74c4
#define HDMI0_ACR_STATUS_1 0x74c8
#define HDMI0_AUDIO_INFO0 0x74cc
# define HDMI0_AUDIO_INFO_CHECKSUM(x) (((x) & 0xff) << 0)
# define HDMI0_AUDIO_INFO_CC(x) (((x) & 7) << 8)
#define HDMI0_AUDIO_INFO1 0x74d0
# define HDMI0_AUDIO_INFO_CA(x) (((x) & 0xff) << 0)
# define HDMI0_AUDIO_INFO_LSV(x) (((x) & 0xf) << 11)
# define HDMI0_AUDIO_INFO_DM_INH(x) (((x) & 1) << 15)
# define HDMI0_AUDIO_INFO_DM_INH_LSV(x) (((x) & 0xff) << 8)
#define HDMI0_60958_0 0x74d4
# define HDMI0_60958_CS_A(x) (((x) & 1) << 0)
# define HDMI0_60958_CS_B(x) (((x) & 1) << 1)
# define HDMI0_60958_CS_C(x) (((x) & 1) << 2)
# define HDMI0_60958_CS_D(x) (((x) & 3) << 3)
# define HDMI0_60958_CS_MODE(x) (((x) & 3) << 6)
# define HDMI0_60958_CS_CATEGORY_CODE(x) (((x) & 0xff) << 8)
# define HDMI0_60958_CS_SOURCE_NUMBER(x) (((x) & 0xf) << 16)
# define HDMI0_60958_CS_CHANNEL_NUMBER_L(x) (((x) & 0xf) << 20)
# define HDMI0_60958_CS_SAMPLING_FREQUENCY(x) (((x) & 0xf) << 24)
# define HDMI0_60958_CS_CLOCK_ACCURACY(x) (((x) & 3) << 28)
#define HDMI0_60958_1 0x74d8
# define HDMI0_60958_CS_WORD_LENGTH(x) (((x) & 0xf) << 0)
# define HDMI0_60958_CS_ORIGINAL_SAMPLING_FREQUENCY(x) (((x) & 0xf) << 4)
# define HDMI0_60958_CS_VALID_L(x) (((x) & 1) << 16)
# define HDMI0_60958_CS_VALID_R(x) (((x) & 1) << 18)
# define HDMI0_60958_CS_CHANNEL_NUMBER_R(x) (((x) & 0xf) << 20)
#define HDMI0_ACR_PACKET_CONTROL 0x74dc
# define HDMI0_ACR_SEND (1 << 0)
# define HDMI0_ACR_CONT (1 << 1)
# define HDMI0_ACR_SELECT(x) (((x) & 3) << 4)
# define HDMI0_ACR_HW 0
# define HDMI0_ACR_32 1
# define HDMI0_ACR_44 2
# define HDMI0_ACR_48 3
# define HDMI0_ACR_SOURCE (1 << 8) /* 0 - hw; 1 - cts value */
# define HDMI0_ACR_AUTO_SEND (1 << 12)
#define HDMI0_RAMP_CONTROL0 0x74e0
# define HDMI0_RAMP_MAX_COUNT(x) (((x) & 0xffffff) << 0)
#define HDMI0_RAMP_CONTROL1 0x74e4
# define HDMI0_RAMP_MIN_COUNT(x) (((x) & 0xffffff) << 0)
#define HDMI0_RAMP_CONTROL2 0x74e8
# define HDMI0_RAMP_INC_COUNT(x) (((x) & 0xffffff) << 0)
#define HDMI0_RAMP_CONTROL3 0x74ec
# define HDMI0_RAMP_DEC_COUNT(x) (((x) & 0xffffff) << 0)
/* HDMI0_60958_2 is r7xx only */
#define HDMI0_60958_2 0x74f0
# define HDMI0_60958_CS_CHANNEL_NUMBER_2(x) (((x) & 0xf) << 0)
# define HDMI0_60958_CS_CHANNEL_NUMBER_3(x) (((x) & 0xf) << 4)
# define HDMI0_60958_CS_CHANNEL_NUMBER_4(x) (((x) & 0xf) << 8)
# define HDMI0_60958_CS_CHANNEL_NUMBER_5(x) (((x) & 0xf) << 12)
# define HDMI0_60958_CS_CHANNEL_NUMBER_6(x) (((x) & 0xf) << 16)
# define HDMI0_60958_CS_CHANNEL_NUMBER_7(x) (((x) & 0xf) << 20)
/* r6xx only; second instance starts at 0x7700 */
#define HDMI1_CONTROL 0x7700
#define HDMI1_STATUS 0x7704
#define HDMI1_AUDIO_PACKET_CONTROL 0x7708
/* DCE3; second instance starts at 0x7800 NOT 0x7700 */
#define DCE3_HDMI1_CONTROL 0x7800
#define DCE3_HDMI1_STATUS 0x7804
#define DCE3_HDMI1_AUDIO_PACKET_CONTROL 0x7808
/* DCE3.2 (for interrupts) */
#define AFMT_STATUS 0x7600
# define AFMT_AUDIO_ENABLE (1 << 4)
# define AFMT_AZ_FORMAT_WTRIG (1 << 28)
# define AFMT_AZ_FORMAT_WTRIG_INT (1 << 29)
# define AFMT_AZ_AUDIO_ENABLE_CHG (1 << 30)
#define AFMT_AUDIO_PACKET_CONTROL 0x7604
# define AFMT_AUDIO_SAMPLE_SEND (1 << 0)
# define AFMT_AUDIO_TEST_EN (1 << 12)
# define AFMT_AUDIO_CHANNEL_SWAP (1 << 24)
# define AFMT_60958_CS_UPDATE (1 << 26)
# define AFMT_AZ_AUDIO_ENABLE_CHG_MASK (1 << 27)
# define AFMT_AZ_FORMAT_WTRIG_MASK (1 << 28)
# define AFMT_AZ_FORMAT_WTRIG_ACK (1 << 29)
# define AFMT_AZ_AUDIO_ENABLE_CHG_ACK (1 << 30)
/*
* PM4
*/
@ -815,7 +1126,11 @@
#define PACKET3_STRMOUT_BUFFER_UPDATE 0x34
#define PACKET3_INDIRECT_BUFFER_MP 0x38
#define PACKET3_MEM_SEMAPHORE 0x39
# define PACKET3_SEM_WAIT_ON_SIGNAL (0x1 << 12)
# define PACKET3_SEM_SEL_SIGNAL (0x6 << 29)
# define PACKET3_SEM_SEL_WAIT (0x7 << 29)
#define PACKET3_MPEG_INDEX 0x3A
#define PACKET3_COPY_DW 0x3B
#define PACKET3_WAIT_REG_MEM 0x3C
#define PACKET3_MEM_WRITE 0x3D
#define PACKET3_INDIRECT_BUFFER 0x32
@ -877,6 +1192,7 @@
#define PACKET3_SET_CTL_CONST 0x6F
#define PACKET3_SET_CTL_CONST_OFFSET 0x0003cff0
#define PACKET3_SET_CTL_CONST_END 0x0003e200
#define PACKET3_STRMOUT_BASE_UPDATE 0x72 /* r7xx */
#define PACKET3_SURFACE_BASE_UPDATE 0x73
@ -1106,6 +1422,9 @@
#define S_0280A0_TILE_MODE(x) (((x) & 0x3) << 18)
#define G_0280A0_TILE_MODE(x) (((x) >> 18) & 0x3)
#define C_0280A0_TILE_MODE 0xFFF3FFFF
#define V_0280A0_TILE_DISABLE 0
#define V_0280A0_CLEAR_ENABLE 1
#define V_0280A0_FRAG_ENABLE 2
#define S_0280A0_BLEND_CLAMP(x) (((x) & 0x1) << 20)
#define G_0280A0_BLEND_CLAMP(x) (((x) >> 20) & 0x1)
#define C_0280A0_BLEND_CLAMP 0xFFEFFFFF
@ -1352,6 +1671,12 @@
#define S_038010_DST_SEL_W(x) (((x) & 0x7) << 25)
#define G_038010_DST_SEL_W(x) (((x) >> 25) & 0x7)
#define C_038010_DST_SEL_W 0xF1FFFFFF
# define SQ_SEL_X 0
# define SQ_SEL_Y 1
# define SQ_SEL_Z 2
# define SQ_SEL_W 3
# define SQ_SEL_0 4
# define SQ_SEL_1 5
#define S_038010_BASE_LEVEL(x) (((x) & 0xF) << 28)
#define G_038010_BASE_LEVEL(x) (((x) >> 28) & 0xF)
#define C_038010_BASE_LEVEL 0x0FFFFFFF

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -42,6 +42,12 @@ uint32_t radeon_atom_get_memory_clock(struct radeon_device *rdev);
void radeon_atom_set_memory_clock(struct radeon_device *rdev, uint32_t mem_clock);
void radeon_atom_set_clock_gating(struct radeon_device *rdev, int enable);
void atombios_set_backlight_level(struct radeon_encoder *radeon_encoder, u8 level);
u8 atombios_get_backlight_level(struct radeon_encoder *radeon_encoder);
void radeon_legacy_set_backlight_level(struct radeon_encoder *radeon_encoder, u8 level);
u8 radeon_legacy_get_backlight_level(struct radeon_encoder *radeon_encoder);
/*
* r100,rv100,rs100,rv200,rs200
*/
@ -58,32 +64,35 @@ void r100_fini(struct radeon_device *rdev);
int r100_suspend(struct radeon_device *rdev);
int r100_resume(struct radeon_device *rdev);
void r100_vga_set_state(struct radeon_device *rdev, bool state);
bool r100_gpu_is_lockup(struct radeon_device *rdev);
bool r100_gpu_is_lockup(struct radeon_device *rdev, struct radeon_ring *cp);
int r100_asic_reset(struct radeon_device *rdev);
u32 r100_get_vblank_counter(struct radeon_device *rdev, int crtc);
void r100_pci_gart_tlb_flush(struct radeon_device *rdev);
int r100_pci_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
void r100_cp_commit(struct radeon_device *rdev);
void r100_ring_start(struct radeon_device *rdev);
void r100_ring_start(struct radeon_device *rdev, struct radeon_ring *ring);
int r100_irq_set(struct radeon_device *rdev);
int r100_irq_process(struct radeon_device *rdev);
void r100_fence_ring_emit(struct radeon_device *rdev,
struct radeon_fence *fence);
void r100_semaphore_ring_emit(struct radeon_device *rdev,
struct radeon_ring *cp,
struct radeon_semaphore *semaphore,
bool emit_wait);
int r100_cs_parse(struct radeon_cs_parser *p);
void r100_pll_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
uint32_t r100_pll_rreg(struct radeon_device *rdev, uint32_t reg);
int r100_copy_blit(struct radeon_device *rdev,
uint64_t src_offset,
uint64_t dst_offset,
unsigned num_pages,
struct radeon_fence *fence);
unsigned num_gpu_pages,
struct radeon_fence **fence);
int r100_set_surface_reg(struct radeon_device *rdev, int reg,
uint32_t tiling_flags, uint32_t pitch,
uint32_t offset, uint32_t obj_size);
void r100_clear_surface_reg(struct radeon_device *rdev, int reg);
void r100_bandwidth_update(struct radeon_device *rdev);
void r100_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
int r100_ring_test(struct radeon_device *rdev);
int r100_ring_test(struct radeon_device *rdev, struct radeon_ring *cp);
void r100_hpd_init(struct radeon_device *rdev);
void r100_hpd_fini(struct radeon_device *rdev);
bool r100_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
@ -100,13 +109,7 @@ int r100_pci_gart_enable(struct radeon_device *rdev);
void r100_pci_gart_disable(struct radeon_device *rdev);
int r100_debugfs_mc_info_init(struct radeon_device *rdev);
int r100_gui_wait_for_idle(struct radeon_device *rdev);
void r100_gpu_lockup_update(struct r100_gpu_lockup *lockup,
struct radeon_cp *cp);
bool r100_gpu_cp_is_lockup(struct radeon_device *rdev,
struct r100_gpu_lockup *lockup,
struct radeon_cp *cp);
void r100_ib_fini(struct radeon_device *rdev);
int r100_ib_init(struct radeon_device *rdev);
int r100_ib_test(struct radeon_device *rdev, struct radeon_ring *ring);
void r100_irq_disable(struct radeon_device *rdev);
void r100_mc_stop(struct radeon_device *rdev, struct r100_mc_save *save);
void r100_mc_resume(struct radeon_device *rdev, struct r100_mc_save *save);
@ -136,6 +139,8 @@ extern void r100_pm_get_dynpm_state(struct radeon_device *rdev);
extern void r100_pre_page_flip(struct radeon_device *rdev, int crtc);
extern u32 r100_page_flip(struct radeon_device *rdev, int crtc, u64 crtc_base);
extern void r100_post_page_flip(struct radeon_device *rdev, int crtc);
extern void r100_wait_for_vblank(struct radeon_device *rdev, int crtc);
extern int r100_mc_wait_for_idle(struct radeon_device *rdev);
/*
* r200,rv250,rs300,rv280
@ -143,8 +148,8 @@ extern void r100_post_page_flip(struct radeon_device *rdev, int crtc);
extern int r200_copy_dma(struct radeon_device *rdev,
uint64_t src_offset,
uint64_t dst_offset,
unsigned num_pages,
struct radeon_fence *fence);
unsigned num_gpu_pages,
struct radeon_fence **fence);
void r200_set_safe_registers(struct radeon_device *rdev);
/*
@ -154,9 +159,8 @@ extern int r300_init(struct radeon_device *rdev);
extern void r300_fini(struct radeon_device *rdev);
extern int r300_suspend(struct radeon_device *rdev);
extern int r300_resume(struct radeon_device *rdev);
extern bool r300_gpu_is_lockup(struct radeon_device *rdev);
extern int r300_asic_reset(struct radeon_device *rdev);
extern void r300_ring_start(struct radeon_device *rdev);
extern void r300_ring_start(struct radeon_device *rdev, struct radeon_ring *ring);
extern void r300_fence_ring_emit(struct radeon_device *rdev,
struct radeon_fence *fence);
extern int r300_cs_parse(struct radeon_cs_parser *p);
@ -173,6 +177,7 @@ extern int rv370_pcie_gart_init(struct radeon_device *rdev);
extern void rv370_pcie_gart_fini(struct radeon_device *rdev);
extern int rv370_pcie_gart_enable(struct radeon_device *rdev);
extern void rv370_pcie_gart_disable(struct radeon_device *rdev);
extern int r300_mc_wait_for_idle(struct radeon_device *rdev);
/*
* r420,r423,rv410
@ -203,6 +208,7 @@ int rs400_gart_enable(struct radeon_device *rdev);
void rs400_gart_adjust_size(struct radeon_device *rdev);
void rs400_gart_disable(struct radeon_device *rdev);
void rs400_gart_fini(struct radeon_device *rdev);
extern int rs400_mc_wait_for_idle(struct radeon_device *rdev);
/*
* rs600.
@ -233,7 +239,8 @@ extern void rs600_pre_page_flip(struct radeon_device *rdev, int crtc);
extern u32 rs600_page_flip(struct radeon_device *rdev, int crtc, u64 crtc_base);
extern void rs600_post_page_flip(struct radeon_device *rdev, int crtc);
void rs600_set_safe_registers(struct radeon_device *rdev);
extern void avivo_wait_for_vblank(struct radeon_device *rdev, int crtc);
extern int rs600_mc_wait_for_idle(struct radeon_device *rdev);
/*
* rs690,rs740
@ -248,23 +255,21 @@ void rs690_bandwidth_update(struct radeon_device *rdev);
void rs690_line_buffer_adjust(struct radeon_device *rdev,
struct drm_display_mode *mode1,
struct drm_display_mode *mode2);
extern int rs690_mc_wait_for_idle(struct radeon_device *rdev);
/*
* rv515
*/
struct rv515_mc_save {
u32 d1vga_control;
u32 d2vga_control;
u32 vga_render_control;
u32 vga_hdp_control;
u32 d1crtc_control;
u32 d2crtc_control;
};
int rv515_init(struct radeon_device *rdev);
void rv515_fini(struct radeon_device *rdev);
uint32_t rv515_mc_rreg(struct radeon_device *rdev, uint32_t reg);
void rv515_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
void rv515_ring_start(struct radeon_device *rdev);
void rv515_ring_start(struct radeon_device *rdev, struct radeon_ring *ring);
void rv515_bandwidth_update(struct radeon_device *rdev);
int rv515_resume(struct radeon_device *rdev);
int rv515_suspend(struct radeon_device *rdev);
@ -275,13 +280,14 @@ void rv515_mc_stop(struct radeon_device *rdev, struct rv515_mc_save *save);
void rv515_mc_resume(struct radeon_device *rdev, struct rv515_mc_save *save);
void rv515_clock_startup(struct radeon_device *rdev);
void rv515_debugfs(struct radeon_device *rdev);
int rv515_mc_wait_for_idle(struct radeon_device *rdev);
/*
* r520,rv530,rv560,rv570,r580
*/
int r520_init(struct radeon_device *rdev);
int r520_resume(struct radeon_device *rdev);
int r520_mc_wait_for_idle(struct radeon_device *rdev);
/*
* r600,rv610,rv630,rv620,rv635,rv670,rs780,rs880
@ -293,25 +299,28 @@ int r600_resume(struct radeon_device *rdev);
void r600_vga_set_state(struct radeon_device *rdev, bool state);
int r600_wb_init(struct radeon_device *rdev);
void r600_wb_fini(struct radeon_device *rdev);
void r600_cp_commit(struct radeon_device *rdev);
void r600_pcie_gart_tlb_flush(struct radeon_device *rdev);
uint32_t r600_pciep_rreg(struct radeon_device *rdev, uint32_t reg);
void r600_pciep_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
int r600_cs_parse(struct radeon_cs_parser *p);
void r600_fence_ring_emit(struct radeon_device *rdev,
struct radeon_fence *fence);
bool r600_gpu_is_lockup(struct radeon_device *rdev);
void r600_semaphore_ring_emit(struct radeon_device *rdev,
struct radeon_ring *cp,
struct radeon_semaphore *semaphore,
bool emit_wait);
bool r600_gpu_is_lockup(struct radeon_device *rdev, struct radeon_ring *cp);
int r600_asic_reset(struct radeon_device *rdev);
int r600_set_surface_reg(struct radeon_device *rdev, int reg,
uint32_t tiling_flags, uint32_t pitch,
uint32_t offset, uint32_t obj_size);
void r600_clear_surface_reg(struct radeon_device *rdev, int reg);
int r600_ib_test(struct radeon_device *rdev);
int r600_ib_test(struct radeon_device *rdev, struct radeon_ring *ring);
void r600_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
int r600_ring_test(struct radeon_device *rdev);
int r600_ring_test(struct radeon_device *rdev, struct radeon_ring *cp);
int r600_copy_blit(struct radeon_device *rdev,
uint64_t src_offset, uint64_t dst_offset,
unsigned num_pages, struct radeon_fence *fence);
unsigned num_gpu_pages, struct radeon_fence **fence);
void r600_hpd_init(struct radeon_device *rdev);
void r600_hpd_fini(struct radeon_device *rdev);
bool r600_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
@ -328,7 +337,7 @@ extern int r600_get_pcie_lanes(struct radeon_device *rdev);
bool r600_card_posted(struct radeon_device *rdev);
void r600_cp_stop(struct radeon_device *rdev);
int r600_cp_start(struct radeon_device *rdev);
void r600_ring_init(struct radeon_device *rdev, unsigned ring_size);
void r600_ring_init(struct radeon_device *rdev, struct radeon_ring *cp, unsigned ring_size);
int r600_cp_resume(struct radeon_device *rdev);
void r600_cp_fini(struct radeon_device *rdev);
int r600_count_pipe_bits(uint32_t val);
@ -349,26 +358,23 @@ void r600_disable_interrupts(struct radeon_device *rdev);
void r600_rlc_stop(struct radeon_device *rdev);
/* r600 audio */
int r600_audio_init(struct radeon_device *rdev);
int r600_audio_tmds_index(struct drm_encoder *encoder);
void r600_audio_set_clock(struct drm_encoder *encoder, int clock);
int r600_audio_channels(struct radeon_device *rdev);
int r600_audio_bits_per_sample(struct radeon_device *rdev);
int r600_audio_rate(struct radeon_device *rdev);
uint8_t r600_audio_status_bits(struct radeon_device *rdev);
uint8_t r600_audio_category_code(struct radeon_device *rdev);
void r600_audio_schedule_polling(struct radeon_device *rdev);
void r600_audio_enable_polling(struct drm_encoder *encoder);
void r600_audio_disable_polling(struct drm_encoder *encoder);
struct r600_audio r600_audio_status(struct radeon_device *rdev);
void r600_audio_fini(struct radeon_device *rdev);
void r600_hdmi_init(struct drm_encoder *encoder);
int r600_hdmi_buffer_status_changed(struct drm_encoder *encoder);
void r600_hdmi_update_audio_settings(struct drm_encoder *encoder);
/* r600 blit */
int r600_blit_prepare_copy(struct radeon_device *rdev, int size_bytes);
void r600_blit_done_copy(struct radeon_device *rdev, struct radeon_fence *fence);
int r600_blit_prepare_copy(struct radeon_device *rdev, unsigned num_gpu_pages,
struct radeon_fence **fence, struct radeon_sa_bo **vb,
struct radeon_semaphore **sem);
void r600_blit_done_copy(struct radeon_device *rdev, struct radeon_fence **fence,
struct radeon_sa_bo *vb, struct radeon_semaphore *sem);
void r600_kms_blit_copy(struct radeon_device *rdev,
u64 src_gpu_addr, u64 dst_gpu_addr,
int size_bytes);
unsigned num_gpu_pages,
struct radeon_sa_bo *vb);
int r600_mc_wait_for_idle(struct radeon_device *rdev);
uint64_t r600_get_gpu_clock(struct radeon_device *rdev);
/*
* rv770,rv730,rv710,rv740
@ -387,23 +393,20 @@ void r700_cp_fini(struct radeon_device *rdev);
* evergreen
*/
struct evergreen_mc_save {
u32 vga_control[6];
u32 vga_render_control;
u32 vga_hdp_control;
u32 crtc_control[6];
bool crtc_enabled[RADEON_MAX_CRTCS];
};
void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev);
int evergreen_init(struct radeon_device *rdev);
void evergreen_fini(struct radeon_device *rdev);
int evergreen_suspend(struct radeon_device *rdev);
int evergreen_resume(struct radeon_device *rdev);
bool evergreen_gpu_is_lockup(struct radeon_device *rdev);
bool evergreen_gpu_is_lockup(struct radeon_device *rdev, struct radeon_ring *cp);
int evergreen_asic_reset(struct radeon_device *rdev);
void evergreen_bandwidth_update(struct radeon_device *rdev);
void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
int evergreen_copy_blit(struct radeon_device *rdev,
uint64_t src_offset, uint64_t dst_offset,
unsigned num_pages, struct radeon_fence *fence);
void evergreen_hpd_init(struct radeon_device *rdev);
void evergreen_hpd_fini(struct radeon_device *rdev);
bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
@ -416,28 +419,62 @@ extern int evergreen_cs_parse(struct radeon_cs_parser *p);
extern void evergreen_pm_misc(struct radeon_device *rdev);
extern void evergreen_pm_prepare(struct radeon_device *rdev);
extern void evergreen_pm_finish(struct radeon_device *rdev);
extern void sumo_pm_init_profile(struct radeon_device *rdev);
extern void btc_pm_init_profile(struct radeon_device *rdev);
extern void evergreen_pre_page_flip(struct radeon_device *rdev, int crtc);
extern u32 evergreen_page_flip(struct radeon_device *rdev, int crtc, u64 crtc_base);
extern void evergreen_post_page_flip(struct radeon_device *rdev, int crtc);
extern void dce4_wait_for_vblank(struct radeon_device *rdev, int crtc);
void evergreen_disable_interrupt_state(struct radeon_device *rdev);
int evergreen_blit_init(struct radeon_device *rdev);
void evergreen_blit_fini(struct radeon_device *rdev);
/* evergreen blit */
int evergreen_blit_prepare_copy(struct radeon_device *rdev, int size_bytes);
void evergreen_blit_done_copy(struct radeon_device *rdev, struct radeon_fence *fence);
void evergreen_kms_blit_copy(struct radeon_device *rdev,
u64 src_gpu_addr, u64 dst_gpu_addr,
int size_bytes);
int evergreen_mc_wait_for_idle(struct radeon_device *rdev);
/*
* cayman
*/
void cayman_fence_ring_emit(struct radeon_device *rdev,
struct radeon_fence *fence);
void cayman_pcie_gart_tlb_flush(struct radeon_device *rdev);
int cayman_init(struct radeon_device *rdev);
void cayman_fini(struct radeon_device *rdev);
int cayman_suspend(struct radeon_device *rdev);
int cayman_resume(struct radeon_device *rdev);
bool cayman_gpu_is_lockup(struct radeon_device *rdev);
int cayman_asic_reset(struct radeon_device *rdev);
void cayman_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
int cayman_vm_init(struct radeon_device *rdev);
void cayman_vm_fini(struct radeon_device *rdev);
void cayman_vm_flush(struct radeon_device *rdev, int ridx, struct radeon_vm *vm);
uint32_t cayman_vm_page_flags(struct radeon_device *rdev, uint32_t flags);
void cayman_vm_set_page(struct radeon_device *rdev, uint64_t pe,
uint64_t addr, unsigned count,
uint32_t incr, uint32_t flags);
int evergreen_ib_parse(struct radeon_device *rdev, struct radeon_ib *ib);
/* DCE6 - SI */
void dce6_bandwidth_update(struct radeon_device *rdev);
/*
* si
*/
void si_fence_ring_emit(struct radeon_device *rdev,
struct radeon_fence *fence);
void si_pcie_gart_tlb_flush(struct radeon_device *rdev);
int si_init(struct radeon_device *rdev);
void si_fini(struct radeon_device *rdev);
int si_suspend(struct radeon_device *rdev);
int si_resume(struct radeon_device *rdev);
bool si_gpu_is_lockup(struct radeon_device *rdev, struct radeon_ring *cp);
int si_asic_reset(struct radeon_device *rdev);
void si_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
int si_irq_set(struct radeon_device *rdev);
int si_irq_process(struct radeon_device *rdev);
int si_vm_init(struct radeon_device *rdev);
void si_vm_fini(struct radeon_device *rdev);
void si_vm_set_page(struct radeon_device *rdev, uint64_t pe,
uint64_t addr, unsigned count,
uint32_t incr, uint32_t flags);
void si_vm_flush(struct radeon_device *rdev, int ridx, struct radeon_vm *vm);
int si_ib_parse(struct radeon_device *rdev, struct radeon_ib *ib);
uint64_t si_get_gpu_clock(struct radeon_device *rdev);
#endif

View File

@ -23,8 +23,8 @@
* Authors: Dave Airlie
* Alex Deucher
*/
#include "drmP.h"
#include "radeon_drm.h"
#include <drm/drmP.h>
#include <drm/radeon_drm.h>
#include "radeon.h"
#include "atom.h"
@ -56,38 +56,35 @@ extern void
radeon_add_legacy_encoder(struct drm_device *dev, uint32_t encoder_enum,
uint32_t supported_device);
/* local */
static int radeon_atom_get_max_vddc(struct radeon_device *rdev, u8 voltage_type,
u16 voltage_id, u16 *voltage);
union atom_supported_devices {
struct _ATOM_SUPPORTED_DEVICES_INFO info;
struct _ATOM_SUPPORTED_DEVICES_INFO_2 info_2;
struct _ATOM_SUPPORTED_DEVICES_INFO_2d1 info_2d1;
};
static inline struct radeon_i2c_bus_rec radeon_lookup_i2c_gpio(struct radeon_device *rdev,
uint8_t id)
static void radeon_lookup_i2c_gpio_quirks(struct radeon_device *rdev,
ATOM_GPIO_I2C_ASSIGMENT *gpio,
u8 index)
{
struct atom_context *ctx = rdev->mode_info.atom_context;
ATOM_GPIO_I2C_ASSIGMENT *gpio;
struct radeon_i2c_bus_rec i2c;
int index = GetIndexIntoMasterTable(DATA, GPIO_I2C_Info);
struct _ATOM_GPIO_I2C_INFO *i2c_info;
uint16_t data_offset, size;
int i, num_indices;
memset(&i2c, 0, sizeof(struct radeon_i2c_bus_rec));
i2c.valid = false;
if (atom_parse_data_header(ctx, index, &size, NULL, NULL, &data_offset)) {
i2c_info = (struct _ATOM_GPIO_I2C_INFO *)(ctx->bios + data_offset);
num_indices = (size - sizeof(ATOM_COMMON_TABLE_HEADER)) /
sizeof(ATOM_GPIO_I2C_ASSIGMENT);
for (i = 0; i < num_indices; i++) {
gpio = &i2c_info->asGPIO_Info[i];
/* r4xx mask is technically not used by the hw, so patch in the legacy mask bits */
if ((rdev->family == CHIP_R420) ||
(rdev->family == CHIP_R423) ||
(rdev->family == CHIP_RV410)) {
if ((le16_to_cpu(gpio->usClkMaskRegisterIndex) == 0x0018) ||
(le16_to_cpu(gpio->usClkMaskRegisterIndex) == 0x0019) ||
(le16_to_cpu(gpio->usClkMaskRegisterIndex) == 0x001a)) {
gpio->ucClkMaskShift = 0x19;
gpio->ucDataMaskShift = 0x18;
}
}
/* some evergreen boards have bad data for this entry */
if (ASIC_IS_DCE4(rdev)) {
if ((i == 7) &&
if ((index == 7) &&
(le16_to_cpu(gpio->usClkMaskRegisterIndex) == 0x1936) &&
(gpio->sucI2cId.ucAccess == 0)) {
gpio->sucI2cId.ucAccess = 0x97;
@ -100,13 +97,19 @@ static inline struct radeon_i2c_bus_rec radeon_lookup_i2c_gpio(struct radeon_dev
/* some DCE3 boards have bad data for this entry */
if (ASIC_IS_DCE3(rdev)) {
if ((i == 4) &&
if ((index == 4) &&
(le16_to_cpu(gpio->usClkMaskRegisterIndex) == 0x1fda) &&
(gpio->sucI2cId.ucAccess == 0x94))
gpio->sucI2cId.ucAccess = 0x14;
}
}
static struct radeon_i2c_bus_rec radeon_get_bus_rec_for_i2c_gpio(ATOM_GPIO_I2C_ASSIGMENT *gpio)
{
struct radeon_i2c_bus_rec i2c;
memset(&i2c, 0, sizeof(struct radeon_i2c_bus_rec));
if (gpio->sucI2cId.ucAccess == id) {
i2c.mask_clk_reg = le16_to_cpu(gpio->usClkMaskRegisterIndex) * 4;
i2c.mask_data_reg = le16_to_cpu(gpio->usDataMaskRegisterIndex) * 4;
i2c.en_clk_reg = le16_to_cpu(gpio->usClkEnRegisterIndex) * 4;
@ -138,10 +141,43 @@ static inline struct radeon_i2c_bus_rec radeon_lookup_i2c_gpio(struct radeon_dev
if (i2c.mask_clk_reg)
i2c.valid = true;
break;
}
}
}
else
i2c.valid = false;
return i2c;
}
static struct radeon_i2c_bus_rec radeon_lookup_i2c_gpio(struct radeon_device *rdev,
uint8_t id)
{
struct atom_context *ctx = rdev->mode_info.atom_context;
ATOM_GPIO_I2C_ASSIGMENT *gpio;
struct radeon_i2c_bus_rec i2c;
int index = GetIndexIntoMasterTable(DATA, GPIO_I2C_Info);
struct _ATOM_GPIO_I2C_INFO *i2c_info;
uint16_t data_offset, size;
int i, num_indices;
memset(&i2c, 0, sizeof(struct radeon_i2c_bus_rec));
i2c.valid = false;
if (atom_parse_data_header(ctx, index, &size, NULL, NULL, &data_offset)) {
i2c_info = (struct _ATOM_GPIO_I2C_INFO *)(ctx->bios + data_offset);
num_indices = (size - sizeof(ATOM_COMMON_TABLE_HEADER)) /
sizeof(ATOM_GPIO_I2C_ASSIGMENT);
for (i = 0; i < num_indices; i++) {
gpio = &i2c_info->asGPIO_Info[i];
radeon_lookup_i2c_gpio_quirks(rdev, gpio, i);
if (gpio->sucI2cId.ucAccess == id) {
i2c = radeon_get_bus_rec_for_i2c_gpio(gpio);
break;
}
}
}
return i2c;
}
@ -157,8 +193,6 @@ void radeon_atombios_i2c_init(struct radeon_device *rdev)
int i, num_indices;
char stmp[32];
memset(&i2c, 0, sizeof(struct radeon_i2c_bus_rec));
if (atom_parse_data_header(ctx, index, &size, NULL, NULL, &data_offset)) {
i2c_info = (struct _ATOM_GPIO_I2C_INFO *)(ctx->bios + data_offset);
@ -167,60 +201,12 @@ void radeon_atombios_i2c_init(struct radeon_device *rdev)
for (i = 0; i < num_indices; i++) {
gpio = &i2c_info->asGPIO_Info[i];
i2c.valid = false;
/* some evergreen boards have bad data for this entry */
if (ASIC_IS_DCE4(rdev)) {
if ((i == 7) &&
(le16_to_cpu(gpio->usClkMaskRegisterIndex) == 0x1936) &&
(gpio->sucI2cId.ucAccess == 0)) {
gpio->sucI2cId.ucAccess = 0x97;
gpio->ucDataMaskShift = 8;
gpio->ucDataEnShift = 8;
gpio->ucDataY_Shift = 8;
gpio->ucDataA_Shift = 8;
}
}
radeon_lookup_i2c_gpio_quirks(rdev, gpio, i);
/* some DCE3 boards have bad data for this entry */
if (ASIC_IS_DCE3(rdev)) {
if ((i == 4) &&
(le16_to_cpu(gpio->usClkMaskRegisterIndex) == 0x1fda) &&
(gpio->sucI2cId.ucAccess == 0x94))
gpio->sucI2cId.ucAccess = 0x14;
}
i2c = radeon_get_bus_rec_for_i2c_gpio(gpio);
i2c.mask_clk_reg = le16_to_cpu(gpio->usClkMaskRegisterIndex) * 4;
i2c.mask_data_reg = le16_to_cpu(gpio->usDataMaskRegisterIndex) * 4;
i2c.en_clk_reg = le16_to_cpu(gpio->usClkEnRegisterIndex) * 4;
i2c.en_data_reg = le16_to_cpu(gpio->usDataEnRegisterIndex) * 4;
i2c.y_clk_reg = le16_to_cpu(gpio->usClkY_RegisterIndex) * 4;
i2c.y_data_reg = le16_to_cpu(gpio->usDataY_RegisterIndex) * 4;
i2c.a_clk_reg = le16_to_cpu(gpio->usClkA_RegisterIndex) * 4;
i2c.a_data_reg = le16_to_cpu(gpio->usDataA_RegisterIndex) * 4;
i2c.mask_clk_mask = (1 << gpio->ucClkMaskShift);
i2c.mask_data_mask = (1 << gpio->ucDataMaskShift);
i2c.en_clk_mask = (1 << gpio->ucClkEnShift);
i2c.en_data_mask = (1 << gpio->ucDataEnShift);
i2c.y_clk_mask = (1 << gpio->ucClkY_Shift);
i2c.y_data_mask = (1 << gpio->ucDataY_Shift);
i2c.a_clk_mask = (1 << gpio->ucClkA_Shift);
i2c.a_data_mask = (1 << gpio->ucDataA_Shift);
if (gpio->sucI2cId.sbfAccess.bfHW_Capable)
i2c.hw_capable = true;
else
i2c.hw_capable = false;
if (gpio->sucI2cId.ucAccess == 0xa0)
i2c.mm_i2c = true;
else
i2c.mm_i2c = false;
i2c.i2c_id = gpio->sucI2cId.ucAccess;
if (i2c.mask_clk_reg) {
i2c.valid = true;
if (i2c.valid) {
sprintf(stmp, "0x%x", i2c.i2c_id);
rdev->i2c_bus[i] = radeon_i2c_create(rdev->ddev, &i2c, stmp);
}
@ -228,7 +214,7 @@ void radeon_atombios_i2c_init(struct radeon_device *rdev)
}
}
static inline struct radeon_gpio_rec radeon_lookup_gpio(struct radeon_device *rdev,
static struct radeon_gpio_rec radeon_lookup_gpio(struct radeon_device *rdev,
u8 id)
{
struct atom_context *ctx = rdev->mode_info.atom_context;
@ -271,7 +257,9 @@ static struct radeon_hpd radeon_atom_get_hpd_info_from_gpio(struct radeon_device
memset(&hpd, 0, sizeof(struct radeon_hpd));
if (ASIC_IS_DCE4(rdev))
if (ASIC_IS_DCE6(rdev))
reg = SI_DC_GPIO_HPD_A;
else if (ASIC_IS_DCE4(rdev))
reg = EVERGREEN_DC_GPIO_HPD_A;
else
reg = AVIVO_DC_GPIO_HPD_A;
@ -456,10 +444,26 @@ static bool radeon_atom_apply_quirks(struct drm_device *dev,
*/
if ((dev->pdev->device == 0x9498) &&
(dev->pdev->subsystem_vendor == 0x1682) &&
(dev->pdev->subsystem_device == 0x2452)) {
(dev->pdev->subsystem_device == 0x2452) &&
(i2c_bus->valid == false) &&
!(supported_device & (ATOM_DEVICE_TV_SUPPORT | ATOM_DEVICE_CV_SUPPORT))) {
struct radeon_device *rdev = dev->dev_private;
*i2c_bus = radeon_lookup_i2c_gpio(rdev, 0x93);
}
/* Fujitsu D3003-S2 board lists DVI-I as DVI-D and VGA */
if (((dev->pdev->device == 0x9802) || (dev->pdev->device == 0x9806)) &&
(dev->pdev->subsystem_vendor == 0x1734) &&
(dev->pdev->subsystem_device == 0x11bd)) {
if (*connector_type == DRM_MODE_CONNECTOR_VGA) {
*connector_type = DRM_MODE_CONNECTOR_DVII;
*line_mux = 0x3103;
} else if (*connector_type == DRM_MODE_CONNECTOR_DVID) {
*connector_type = DRM_MODE_CONNECTOR_DVII;
}
}
return true;
}
@ -1250,6 +1254,10 @@ bool radeon_atom_get_clock_info(struct drm_device *dev)
if (rdev->clock.max_pixel_clock == 0)
rdev->clock.max_pixel_clock = 40000;
/* not technically a clock, but... */
rdev->mode_info.firmware_flags =
le16_to_cpu(firmware_info->info.usFirmwareCapability.susAccess);
return true;
}
@ -1259,6 +1267,8 @@ bool radeon_atom_get_clock_info(struct drm_device *dev)
union igp_info {
struct _ATOM_INTEGRATED_SYSTEM_INFO info;
struct _ATOM_INTEGRATED_SYSTEM_INFO_V2 info_2;
struct _ATOM_INTEGRATED_SYSTEM_INFO_V6 info_6;
struct _ATOM_INTEGRATED_SYSTEM_INFO_V1_7 info_7;
};
bool radeon_atombios_sideport_present(struct radeon_device *rdev)
@ -1386,27 +1396,50 @@ static void radeon_atombios_get_igp_ss_overrides(struct radeon_device *rdev,
struct radeon_mode_info *mode_info = &rdev->mode_info;
int index = GetIndexIntoMasterTable(DATA, IntegratedSystemInfo);
u16 data_offset, size;
struct _ATOM_INTEGRATED_SYSTEM_INFO_V6 *igp_info;
union igp_info *igp_info;
u8 frev, crev;
u16 percentage = 0, rate = 0;
/* get any igp specific overrides */
if (atom_parse_data_header(mode_info->atom_context, index, &size,
&frev, &crev, &data_offset)) {
igp_info = (struct _ATOM_INTEGRATED_SYSTEM_INFO_V6 *)
igp_info = (union igp_info *)
(mode_info->atom_context->bios + data_offset);
switch (crev) {
case 6:
switch (id) {
case ASIC_INTERNAL_SS_ON_TMDS:
percentage = le16_to_cpu(igp_info->info_6.usDVISSPercentage);
rate = le16_to_cpu(igp_info->info_6.usDVISSpreadRateIn10Hz);
break;
case ASIC_INTERNAL_SS_ON_HDMI:
percentage = le16_to_cpu(igp_info->info_6.usHDMISSPercentage);
rate = le16_to_cpu(igp_info->info_6.usHDMISSpreadRateIn10Hz);
break;
case ASIC_INTERNAL_SS_ON_LVDS:
percentage = le16_to_cpu(igp_info->info_6.usLvdsSSPercentage);
rate = le16_to_cpu(igp_info->info_6.usLvdsSSpreadRateIn10Hz);
break;
}
break;
case 7:
switch (id) {
case ASIC_INTERNAL_SS_ON_TMDS:
percentage = le16_to_cpu(igp_info->usDVISSPercentage);
rate = le16_to_cpu(igp_info->usDVISSpreadRateIn10Hz);
percentage = le16_to_cpu(igp_info->info_7.usDVISSPercentage);
rate = le16_to_cpu(igp_info->info_7.usDVISSpreadRateIn10Hz);
break;
case ASIC_INTERNAL_SS_ON_HDMI:
percentage = le16_to_cpu(igp_info->usHDMISSPercentage);
rate = le16_to_cpu(igp_info->usHDMISSpreadRateIn10Hz);
percentage = le16_to_cpu(igp_info->info_7.usHDMISSPercentage);
rate = le16_to_cpu(igp_info->info_7.usHDMISSpreadRateIn10Hz);
break;
case ASIC_INTERNAL_SS_ON_LVDS:
percentage = le16_to_cpu(igp_info->usLvdsSSPercentage);
rate = le16_to_cpu(igp_info->usLvdsSSpreadRateIn10Hz);
percentage = le16_to_cpu(igp_info->info_7.usLvdsSSPercentage);
rate = le16_to_cpu(igp_info->info_7.usLvdsSSpreadRateIn10Hz);
break;
}
break;
default:
DRM_ERROR("Unsupported IGP table: %d %d\n", frev, crev);
break;
}
if (percentage)
@ -1892,6 +1925,8 @@ static const char *pp_lib_thermal_controller_names[] = {
"emc2103",
"Sumo",
"Northern Islands",
"Southern Islands",
"lm96163",
};
union power_info {
@ -1908,6 +1943,7 @@ union pplib_clock_info {
struct _ATOM_PPLIB_RS780_CLOCK_INFO rs780;
struct _ATOM_PPLIB_EVERGREEN_CLOCK_INFO evergreen;
struct _ATOM_PPLIB_SUMO_CLOCK_INFO sumo;
struct _ATOM_PPLIB_SI_CLOCK_INFO si;
};
union pplib_power_state {
@ -1973,7 +2009,8 @@ static int radeon_atombios_parse_power_table_1_3(struct radeon_device *rdev)
power_info = (union power_info *)(mode_info->atom_context->bios + data_offset);
/* add the i2c bus for thermal/fan chip */
if (power_info->info.ucOverdriveThermalController > 0) {
if ((power_info->info.ucOverdriveThermalController > 0) &&
(power_info->info.ucOverdriveThermalController < ARRAY_SIZE(thermal_controller_names))) {
DRM_INFO("Possible %s thermal controller at 0x%02x\n",
thermal_controller_names[power_info->info.ucOverdriveThermalController],
power_info->info.ucOverdriveControllerAddress >> 1);
@ -1996,10 +2033,14 @@ static int radeon_atombios_parse_power_table_1_3(struct radeon_device *rdev)
return state_index;
/* last mode is usually default, array is low to high */
for (i = 0; i < num_modes; i++) {
rdev->pm.power_state[state_index].clock_info =
kzalloc(sizeof(struct radeon_pm_clock_info) * 1, GFP_KERNEL);
if (!rdev->pm.power_state[state_index].clock_info)
return state_index;
rdev->pm.power_state[state_index].num_clock_modes = 1;
rdev->pm.power_state[state_index].clock_info[0].voltage.type = VOLTAGE_NONE;
switch (frev) {
case 1:
rdev->pm.power_state[state_index].num_clock_modes = 1;
rdev->pm.power_state[state_index].clock_info[0].mclk =
le16_to_cpu(power_info->info.asPowerPlayInfo[i].usMemoryClock);
rdev->pm.power_state[state_index].clock_info[0].sclk =
@ -2035,7 +2076,6 @@ static int radeon_atombios_parse_power_table_1_3(struct radeon_device *rdev)
state_index++;
break;
case 2:
rdev->pm.power_state[state_index].num_clock_modes = 1;
rdev->pm.power_state[state_index].clock_info[0].mclk =
le32_to_cpu(power_info->info_2.asPowerPlayInfo[i].ulMemoryClock);
rdev->pm.power_state[state_index].clock_info[0].sclk =
@ -2072,7 +2112,6 @@ static int radeon_atombios_parse_power_table_1_3(struct radeon_device *rdev)
state_index++;
break;
case 3:
rdev->pm.power_state[state_index].num_clock_modes = 1;
rdev->pm.power_state[state_index].clock_info[0].mclk =
le32_to_cpu(power_info->info_3.asPowerPlayInfo[i].ulMemoryClock);
rdev->pm.power_state[state_index].clock_info[0].sclk =
@ -2163,6 +2202,11 @@ static void radeon_atombios_add_pplib_thermal_controller(struct radeon_device *r
(controller->ucFanParameters &
ATOM_PP_FANPARAMETERS_NOFAN) ? "without" : "with");
rdev->pm.int_thermal_type = THERMAL_TYPE_NI;
} else if (controller->ucType == ATOM_PP_THERMALCONTROLLER_SISLANDS) {
DRM_INFO("Internal thermal controller %s fan control\n",
(controller->ucFanParameters &
ATOM_PP_FANPARAMETERS_NOFAN) ? "without" : "with");
rdev->pm.int_thermal_type = THERMAL_TYPE_SI;
} else if ((controller->ucType ==
ATOM_PP_THERMALCONTROLLER_EXTERNAL_GPIO) ||
(controller->ucType ==
@ -2170,7 +2214,7 @@ static void radeon_atombios_add_pplib_thermal_controller(struct radeon_device *r
(controller->ucType ==
ATOM_PP_THERMALCONTROLLER_EMC2103_WITH_INTERNAL)) {
DRM_INFO("Special thermal controller config\n");
} else {
} else if (controller->ucType < ARRAY_SIZE(pp_lib_thermal_controller_names)) {
DRM_INFO("Possible %s thermal controller at 0x%02x %s fan control\n",
pp_lib_thermal_controller_names[controller->ucType],
controller->ucI2cAddress >> 1,
@ -2185,6 +2229,12 @@ static void radeon_atombios_add_pplib_thermal_controller(struct radeon_device *r
strlcpy(info.type, name, sizeof(info.type));
i2c_new_device(&rdev->pm.i2c_bus->adapter, &info);
}
} else {
DRM_INFO("Unknown thermal controller type %d at 0x%02x %s fan control\n",
controller->ucType,
controller->ucI2cAddress >> 1,
(controller->ucFanParameters &
ATOM_PP_FANPARAMETERS_NOFAN) ? "without" : "with");
}
}
}
@ -2257,7 +2307,7 @@ static void radeon_atombios_parse_pplib_non_clock_info(struct radeon_device *rde
rdev->pm.default_power_state_index = state_index;
rdev->pm.power_state[state_index].default_clock_mode =
&rdev->pm.power_state[state_index].clock_info[mode_index - 1];
if (ASIC_IS_DCE5(rdev)) {
if (ASIC_IS_DCE5(rdev) && !(rdev->flags & RADEON_IS_IGP)) {
/* NI chips post without MC ucode, so default clocks are strobe mode only */
rdev->pm.default_sclk = rdev->pm.power_state[state_index].clock_info[0].sclk;
rdev->pm.default_mclk = rdev->pm.power_state[state_index].clock_info[0].mclk;
@ -2283,6 +2333,7 @@ static bool radeon_atombios_parse_pplib_clock_info(struct radeon_device *rdev,
union pplib_clock_info *clock_info)
{
u32 sclk, mclk;
u16 vddc;
if (rdev->flags & RADEON_IS_IGP) {
if (rdev->family >= CHIP_PALM) {
@ -2294,6 +2345,19 @@ static bool radeon_atombios_parse_pplib_clock_info(struct radeon_device *rdev,
sclk |= clock_info->rs780.ucLowEngineClockHigh << 16;
rdev->pm.power_state[state_index].clock_info[mode_index].sclk = sclk;
}
} else if (ASIC_IS_DCE6(rdev)) {
sclk = le16_to_cpu(clock_info->si.usEngineClockLow);
sclk |= clock_info->si.ucEngineClockHigh << 16;
mclk = le16_to_cpu(clock_info->si.usMemoryClockLow);
mclk |= clock_info->si.ucMemoryClockHigh << 16;
rdev->pm.power_state[state_index].clock_info[mode_index].mclk = mclk;
rdev->pm.power_state[state_index].clock_info[mode_index].sclk = sclk;
rdev->pm.power_state[state_index].clock_info[mode_index].voltage.type =
VOLTAGE_SW;
rdev->pm.power_state[state_index].clock_info[mode_index].voltage.voltage =
le16_to_cpu(clock_info->si.usVDDC);
rdev->pm.power_state[state_index].clock_info[mode_index].voltage.vddci =
le16_to_cpu(clock_info->si.usVDDCI);
} else if (ASIC_IS_DCE4(rdev)) {
sclk = le16_to_cpu(clock_info->evergreen.usEngineClockLow);
sclk |= clock_info->evergreen.ucEngineClockHigh << 16;
@ -2321,11 +2385,18 @@ static bool radeon_atombios_parse_pplib_clock_info(struct radeon_device *rdev,
}
/* patch up vddc if necessary */
if (rdev->pm.power_state[state_index].clock_info[mode_index].voltage.voltage == 0xff01) {
u16 vddc;
if (radeon_atom_get_max_vddc(rdev, &vddc) == 0)
switch (rdev->pm.power_state[state_index].clock_info[mode_index].voltage.voltage) {
case ATOM_VIRTUAL_VOLTAGE_ID0:
case ATOM_VIRTUAL_VOLTAGE_ID1:
case ATOM_VIRTUAL_VOLTAGE_ID2:
case ATOM_VIRTUAL_VOLTAGE_ID3:
if (radeon_atom_get_max_vddc(rdev, VOLTAGE_TYPE_VDDC,
rdev->pm.power_state[state_index].clock_info[mode_index].voltage.voltage,
&vddc) == 0)
rdev->pm.power_state[state_index].clock_info[mode_index].voltage.voltage = vddc;
break;
default:
break;
}
if (rdev->flags & RADEON_IS_IGP) {
@ -2377,6 +2448,13 @@ static int radeon_atombios_parse_power_table_4_5(struct radeon_device *rdev)
le16_to_cpu(power_info->pplib.usNonClockInfoArrayOffset) +
(power_state->v1.ucNonClockStateIndex *
power_info->pplib.ucNonClockSize));
rdev->pm.power_state[i].clock_info = kzalloc(sizeof(struct radeon_pm_clock_info) *
((power_info->pplib.ucStateEntrySize - 1) ?
(power_info->pplib.ucStateEntrySize - 1) : 1),
GFP_KERNEL);
if (!rdev->pm.power_state[i].clock_info)
return state_index;
if (power_info->pplib.ucStateEntrySize - 1) {
for (j = 0; j < (power_info->pplib.ucStateEntrySize - 1); j++) {
clock_info = (union pplib_clock_info *)
(mode_info->atom_context->bios + data_offset +
@ -2389,6 +2467,13 @@ static int radeon_atombios_parse_power_table_4_5(struct radeon_device *rdev)
if (valid)
mode_index++;
}
} else {
rdev->pm.power_state[state_index].clock_info[0].mclk =
rdev->clock.default_mclk;
rdev->pm.power_state[state_index].clock_info[0].sclk =
rdev->clock.default_sclk;
mode_index++;
}
rdev->pm.power_state[state_index].num_clock_modes = mode_index;
if (mode_index) {
radeon_atombios_parse_pplib_non_clock_info(rdev, state_index, mode_index,
@ -2421,9 +2506,9 @@ static int radeon_atombios_parse_power_table_6(struct radeon_device *rdev)
int i, j, non_clock_array_index, clock_array_index;
int state_index = 0, mode_index = 0;
union pplib_clock_info *clock_info;
struct StateArray *state_array;
struct ClockInfoArray *clock_info_array;
struct NonClockInfoArray *non_clock_info_array;
struct _StateArray *state_array;
struct _ClockInfoArray *clock_info_array;
struct _NonClockInfoArray *non_clock_info_array;
bool valid;
union power_info *power_info;
int index = GetIndexIntoMasterTable(DATA, PowerPlayInfo);
@ -2436,13 +2521,13 @@ static int radeon_atombios_parse_power_table_6(struct radeon_device *rdev)
power_info = (union power_info *)(mode_info->atom_context->bios + data_offset);
radeon_atombios_add_pplib_thermal_controller(rdev, &power_info->pplib.sThermalController);
state_array = (struct StateArray *)
state_array = (struct _StateArray *)
(mode_info->atom_context->bios + data_offset +
le16_to_cpu(power_info->pplib.usStateArrayOffset));
clock_info_array = (struct ClockInfoArray *)
clock_info_array = (struct _ClockInfoArray *)
(mode_info->atom_context->bios + data_offset +
le16_to_cpu(power_info->pplib.usClockInfoArrayOffset));
non_clock_info_array = (struct NonClockInfoArray *)
non_clock_info_array = (struct _NonClockInfoArray *)
(mode_info->atom_context->bios + data_offset +
le16_to_cpu(power_info->pplib.usNonClockInfoArrayOffset));
rdev->pm.power_state = kzalloc(sizeof(struct radeon_power_state) *
@ -2456,19 +2541,33 @@ static int radeon_atombios_parse_power_table_6(struct radeon_device *rdev)
non_clock_array_index = i; /* power_state->v2.nonClockInfoIndex */
non_clock_info = (struct _ATOM_PPLIB_NONCLOCK_INFO *)
&non_clock_info_array->nonClockInfo[non_clock_array_index];
rdev->pm.power_state[i].clock_info = kzalloc(sizeof(struct radeon_pm_clock_info) *
(power_state->v2.ucNumDPMLevels ?
power_state->v2.ucNumDPMLevels : 1),
GFP_KERNEL);
if (!rdev->pm.power_state[i].clock_info)
return state_index;
if (power_state->v2.ucNumDPMLevels) {
for (j = 0; j < power_state->v2.ucNumDPMLevels; j++) {
clock_array_index = power_state->v2.clockInfoIndex[j];
/* XXX this might be an inagua bug... */
if (clock_array_index >= clock_info_array->ucNumEntries)
continue;
clock_info = (union pplib_clock_info *)
&clock_info_array->clockInfo[clock_array_index];
&clock_info_array->clockInfo[clock_array_index * clock_info_array->ucEntrySize];
valid = radeon_atombios_parse_pplib_clock_info(rdev,
state_index, mode_index,
clock_info);
if (valid)
mode_index++;
}
} else {
rdev->pm.power_state[state_index].clock_info[0].mclk =
rdev->clock.default_mclk;
rdev->pm.power_state[state_index].clock_info[0].sclk =
rdev->clock.default_sclk;
mode_index++;
}
rdev->pm.power_state[state_index].num_clock_modes = mode_index;
if (mode_index) {
radeon_atombios_parse_pplib_non_clock_info(rdev, state_index, mode_index,
@ -2524,6 +2623,9 @@ void radeon_atombios_get_power_modes(struct radeon_device *rdev)
} else {
rdev->pm.power_state = kzalloc(sizeof(struct radeon_power_state), GFP_KERNEL);
if (rdev->pm.power_state) {
rdev->pm.power_state[0].clock_info =
kzalloc(sizeof(struct radeon_pm_clock_info) * 1, GFP_KERNEL);
if (rdev->pm.power_state[0].clock_info) {
/* add the default mode */
rdev->pm.power_state[state_index].type =
POWER_STATE_TYPE_DEFAULT;
@ -2539,12 +2641,17 @@ void radeon_atombios_get_power_modes(struct radeon_device *rdev)
state_index++;
}
}
}
rdev->pm.num_power_states = state_index;
rdev->pm.current_power_state_index = rdev->pm.default_power_state_index;
rdev->pm.current_clock_mode_index = 0;
rdev->pm.current_vddc = rdev->pm.power_state[rdev->pm.default_power_state_index].clock_info[0].voltage.voltage;
if (rdev->pm.default_power_state_index >= 0)
rdev->pm.current_vddc =
rdev->pm.power_state[rdev->pm.default_power_state_index].clock_info[0].voltage.voltage;
else
rdev->pm.current_vddc = 0;
}
void radeon_atom_set_clock_gating(struct radeon_device *rdev, int enable)
@ -2604,6 +2711,7 @@ union set_voltage {
struct _SET_VOLTAGE_PS_ALLOCATION alloc;
struct _SET_VOLTAGE_PARAMETERS v1;
struct _SET_VOLTAGE_PARAMETERS_V2 v2;
struct _SET_VOLTAGE_PARAMETERS_V1_3 v3;
};
void radeon_atom_set_voltage(struct radeon_device *rdev, u16 voltage_level, u8 voltage_type)
@ -2630,6 +2738,11 @@ void radeon_atom_set_voltage(struct radeon_device *rdev, u16 voltage_level, u8 v
args.v2.ucVoltageMode = SET_ASIC_VOLTAGE_MODE_SET_VOLTAGE;
args.v2.usVoltageLevel = cpu_to_le16(voltage_level);
break;
case 3:
args.v3.ucVoltageType = voltage_type;
args.v3.ucVoltageMode = ATOM_SET_VOLTAGE;
args.v3.usVoltageLevel = cpu_to_le16(voltage_level);
break;
default:
DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
return;
@ -2638,8 +2751,8 @@ void radeon_atom_set_voltage(struct radeon_device *rdev, u16 voltage_level, u8 v
atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
}
int radeon_atom_get_max_vddc(struct radeon_device *rdev,
u16 *voltage)
static int radeon_atom_get_max_vddc(struct radeon_device *rdev, u8 voltage_type,
u16 voltage_id, u16 *voltage)
{
union set_voltage args;
int index = GetIndexIntoMasterTable(COMMAND, SetVoltage);
@ -2660,6 +2773,15 @@ int radeon_atom_get_max_vddc(struct radeon_device *rdev,
*voltage = le16_to_cpu(args.v2.usVoltageLevel);
break;
case 3:
args.v3.ucVoltageType = voltage_type;
args.v3.ucVoltageMode = ATOM_GET_VOLTAGE_LEVEL;
args.v3.usVoltageLevel = cpu_to_le16(voltage_id);
atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
*voltage = le16_to_cpu(args.v3.usVoltageLevel);
break;
default:
DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
return -EINVAL;
@ -2911,6 +3033,20 @@ radeon_atombios_connected_scratch_regs(struct drm_connector *connector,
bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP5;
}
}
if ((radeon_encoder->devices & ATOM_DEVICE_DFP6_SUPPORT) &&
(radeon_connector->devices & ATOM_DEVICE_DFP6_SUPPORT)) {
if (connected) {
DRM_DEBUG_KMS("DFP6 connected\n");
bios_0_scratch |= ATOM_S0_DFP6;
bios_3_scratch |= ATOM_S3_DFP6_ACTIVE;
bios_6_scratch |= ATOM_S6_ACC_REQ_DFP6;
} else {
DRM_DEBUG_KMS("DFP6 disconnected\n");
bios_0_scratch &= ~ATOM_S0_DFP6;
bios_3_scratch &= ~ATOM_S3_DFP6_ACTIVE;
bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP6;
}
}
if (rdev->family >= CHIP_R600) {
WREG32(R600_BIOS_0_SCRATCH, bios_0_scratch);
@ -2931,6 +3067,9 @@ radeon_atombios_encoder_crtc_scratch_regs(struct drm_encoder *encoder, int crtc)
struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
uint32_t bios_3_scratch;
if (ASIC_IS_DCE4(rdev))
return;
if (rdev->family >= CHIP_R600)
bios_3_scratch = RREG32(R600_BIOS_3_SCRATCH);
else
@ -2983,6 +3122,9 @@ radeon_atombios_encoder_dpms_scratch_regs(struct drm_encoder *encoder, bool on)
struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
uint32_t bios_2_scratch;
if (ASIC_IS_DCE4(rdev))
return;
if (rdev->family >= CHIP_R600)
bios_2_scratch = RREG32(R600_BIOS_2_SCRATCH);
else

View File

@ -26,33 +26,81 @@
#include "radeon_reg.h"
#include "radeon.h"
unsigned int inline jiffies_to_msecs(const unsigned long j)
#define RADEON_BENCHMARK_COPY_BLIT 1
#define RADEON_BENCHMARK_COPY_DMA 0
#define RADEON_BENCHMARK_ITERATIONS 1024
#define RADEON_BENCHMARK_COMMON_MODES_N 17
static int radeon_benchmark_do_move(struct radeon_device *rdev, unsigned size,
uint64_t saddr, uint64_t daddr,
int flag, int n)
{
return (10 * j);
};
unsigned long start_jiffies;
unsigned long end_jiffies;
struct radeon_fence *fence = NULL;
int i, r;
start_jiffies = jiffies;
for (i = 0; i < n; i++) {
switch (flag) {
case RADEON_BENCHMARK_COPY_DMA:
r = radeon_copy_dma(rdev, saddr, daddr,
size / RADEON_GPU_PAGE_SIZE,
&fence);
break;
case RADEON_BENCHMARK_COPY_BLIT:
r = radeon_copy_blit(rdev, saddr, daddr,
size / RADEON_GPU_PAGE_SIZE,
&fence);
break;
default:
DRM_ERROR("Unknown copy method\n");
r = -EINVAL;
}
if (r)
goto exit_do_move;
r = radeon_fence_wait(fence, false);
if (r)
goto exit_do_move;
radeon_fence_unref(&fence);
}
end_jiffies = GetTimerTicks();
r = jiffies_to_msecs(end_jiffies - start_jiffies);
exit_do_move:
if (fence)
radeon_fence_unref(&fence);
return r;
}
void radeon_benchmark_move(struct radeon_device *rdev, unsigned bsize,
static void radeon_benchmark_log_results(int n, unsigned size,
unsigned int time,
unsigned sdomain, unsigned ddomain,
char *kind)
{
unsigned int throughput = (n * (size >> 10)) / time;
DRM_INFO("radeon: %s %u bo moves of %u kB from"
" %d to %d in %u ms, throughput: %u Mb/s or %u MB/s\n",
kind, n, size >> 10, sdomain, ddomain, time,
throughput * 8, throughput);
}
static void radeon_benchmark_move(struct radeon_device *rdev, unsigned size,
unsigned sdomain, unsigned ddomain)
{
struct radeon_bo *dobj = NULL;
struct radeon_bo *sobj = NULL;
struct radeon_fence *fence = NULL;
uint64_t saddr, daddr;
unsigned long start_jiffies;
unsigned long end_jiffies;
unsigned long time;
unsigned i, n, size;
int r;
int r, n;
int time;
ENTER();
size = bsize;
n = 4; //1024;
dbgprintf("source domain %x\n", sdomain);
r = radeon_bo_create(rdev, size, PAGE_SIZE, true, sdomain, &sobj);
n = RADEON_BENCHMARK_ITERATIONS;
r = radeon_bo_create(rdev, size, PAGE_SIZE, true, sdomain, NULL, &sobj);
if (r) {
goto out_cleanup;
}
@ -64,10 +112,7 @@ void radeon_benchmark_move(struct radeon_device *rdev, unsigned bsize,
if (r) {
goto out_cleanup;
}
dbgprintf("destination domain %x\n", ddomain);
r = radeon_bo_create(rdev, size, PAGE_SIZE, true, ddomain, &dobj);
r = radeon_bo_create(rdev, size, PAGE_SIZE, true, ddomain, NULL, &dobj);
if (r) {
goto out_cleanup;
}
@ -82,75 +127,27 @@ void radeon_benchmark_move(struct radeon_device *rdev, unsigned bsize,
dbgprintf("done\n");
/* r100 doesn't have dma engine so skip the test */
if (rdev->asic->copy_dma) {
dbgprintf("copy dma\n");
start_jiffies = GetTimerTicks();
for (i = 0; i < n; i++) {
r = radeon_fence_create(rdev, &fence);
if (r) {
goto out_cleanup;
}
r = radeon_copy_dma(rdev, saddr, daddr,
size / RADEON_GPU_PAGE_SIZE, fence);
if (r) {
goto out_cleanup;
}
}
r = radeon_fence_wait(fence, false);
if (r) {
goto out_cleanup;
}
radeon_fence_unref(&fence);
end_jiffies = GetTimerTicks();
time = end_jiffies - start_jiffies;
time = jiffies_to_msecs(time);
if (time > 0) {
i = ((n * size) >> 10) / time;
printk(KERN_INFO "radeon: dma %u bo moves of %ukb from"
" %d to %d in %lums (%ukb/ms %ukb/s %uM/s)\n",
n, size >> 10,
sdomain, ddomain, time,
i, i * 1000, (i * 1000) / 1024);
}
/* also, VRAM-to-VRAM test doesn't make much sense for DMA */
/* skip it as well if domains are the same */
if ((rdev->asic->copy.dma) && (sdomain != ddomain)) {
time = radeon_benchmark_do_move(rdev, size, saddr, daddr,
RADEON_BENCHMARK_COPY_DMA, n);
if (time < 0)
goto out_cleanup;
if (time > 0)
radeon_benchmark_log_results(n, size, time,
sdomain, ddomain, "dma");
}
start_jiffies = GetTimerTicks();
for (i = 0; i < n; i++) {
r = radeon_fence_create(rdev, &fence);
if (r) {
goto out_cleanup;
}
r = radeon_copy_blit(rdev, saddr, daddr, size / RADEON_GPU_PAGE_SIZE, fence);
if (r) {
goto out_cleanup;
}
}
time = radeon_benchmark_do_move(rdev, size, saddr, daddr,
RADEON_BENCHMARK_COPY_BLIT, n);
if (time < 0)
goto out_cleanup;
if (time > 0)
radeon_benchmark_log_results(n, size, time,
sdomain, ddomain, "blit");
r = radeon_fence_wait(fence, false);
if (r) {
goto out_cleanup;
}
radeon_fence_unref(&fence);
end_jiffies = GetTimerTicks();
time = end_jiffies - start_jiffies;
time = jiffies_to_msecs(time);
if (time > 0) {
i = ((n * size) >> 10) / time;
printk(KERN_INFO "radeon: blit %u bo moves of %ukb from %d to %d"
" in %lums (%ukb/ms %ukb/s %uM/s)\n", n, size >> 10,
sdomain, ddomain, time, i, i * 1000, (i * 1000) / 1024);
}
out_cleanup:
dbgprintf("cleanup\n");
if (sobj) {
r = radeon_bo_reserve(sobj, false);
if (likely(r == 0)) {
@ -167,23 +164,95 @@ out_cleanup:
}
radeon_bo_unref(&dobj);
}
if (fence) {
radeon_fence_unref(&fence);
}
if (r) {
printk(KERN_WARNING "Error while benchmarking BO move.\n");
DRM_ERROR("Error while benchmarking BO move.\n");
}
LEAVE();
}
void radeon_benchmark(struct radeon_device *rdev)
void radeon_benchmark(struct radeon_device *rdev, int test_number)
{
radeon_benchmark_move(rdev, 4096*4096, RADEON_GEM_DOMAIN_GTT,
int i;
int common_modes[RADEON_BENCHMARK_COMMON_MODES_N] = {
640 * 480 * 4,
720 * 480 * 4,
800 * 600 * 4,
848 * 480 * 4,
1024 * 768 * 4,
1152 * 768 * 4,
1280 * 720 * 4,
1280 * 800 * 4,
1280 * 854 * 4,
1280 * 960 * 4,
1280 * 1024 * 4,
1440 * 900 * 4,
1400 * 1050 * 4,
1680 * 1050 * 4,
1600 * 1200 * 4,
1920 * 1080 * 4,
1920 * 1200 * 4
};
switch (test_number) {
case 1:
/* simple test, VRAM to GTT and GTT to VRAM */
radeon_benchmark_move(rdev, 1024*1024, RADEON_GEM_DOMAIN_GTT,
RADEON_GEM_DOMAIN_VRAM);
radeon_benchmark_move(rdev, 1024*1024, RADEON_GEM_DOMAIN_VRAM,
RADEON_GEM_DOMAIN_GTT);
break;
case 2:
/* simple test, VRAM to VRAM */
radeon_benchmark_move(rdev, 1024*1024, RADEON_GEM_DOMAIN_VRAM,
RADEON_GEM_DOMAIN_VRAM);
break;
case 3:
/* GTT to VRAM, buffer size sweep, powers of 2 */
for (i = 1; i <= 16384; i <<= 1)
radeon_benchmark_move(rdev, i * RADEON_GPU_PAGE_SIZE,
RADEON_GEM_DOMAIN_GTT,
RADEON_GEM_DOMAIN_VRAM);
break;
case 4:
/* VRAM to GTT, buffer size sweep, powers of 2 */
for (i = 1; i <= 16384; i <<= 1)
radeon_benchmark_move(rdev, i * RADEON_GPU_PAGE_SIZE,
RADEON_GEM_DOMAIN_VRAM,
RADEON_GEM_DOMAIN_GTT);
break;
case 5:
/* VRAM to VRAM, buffer size sweep, powers of 2 */
for (i = 1; i <= 16384; i <<= 1)
radeon_benchmark_move(rdev, i * RADEON_GPU_PAGE_SIZE,
RADEON_GEM_DOMAIN_VRAM,
RADEON_GEM_DOMAIN_VRAM);
break;
case 6:
/* GTT to VRAM, buffer size sweep, common modes */
for (i = 0; i < RADEON_BENCHMARK_COMMON_MODES_N; i++)
radeon_benchmark_move(rdev, common_modes[i],
RADEON_GEM_DOMAIN_GTT,
RADEON_GEM_DOMAIN_VRAM);
radeon_benchmark_move(rdev, 4096*4096, RADEON_GEM_DOMAIN_VRAM,
break;
case 7:
/* VRAM to GTT, buffer size sweep, common modes */
for (i = 0; i < RADEON_BENCHMARK_COMMON_MODES_N; i++)
radeon_benchmark_move(rdev, common_modes[i],
RADEON_GEM_DOMAIN_VRAM,
RADEON_GEM_DOMAIN_GTT);
radeon_benchmark_move(rdev, 4096*4096, RADEON_GEM_DOMAIN_VRAM,
break;
case 8:
/* VRAM to VRAM, buffer size sweep, common modes */
for (i = 0; i < RADEON_BENCHMARK_COMMON_MODES_N; i++)
radeon_benchmark_move(rdev, common_modes[i],
RADEON_GEM_DOMAIN_VRAM,
RADEON_GEM_DOMAIN_VRAM);
break;
default:
DRM_ERROR("Unknown benchmark\n");
}
}

View File

@ -25,7 +25,7 @@
* Alex Deucher
* Jerome Glisse
*/
#include "drmP.h"
#include <drm/drmP.h>
#include "radeon_reg.h"
#include "radeon.h"
#include "atom.h"
@ -99,16 +99,81 @@ static bool radeon_read_bios(struct radeon_device *rdev)
return true;
}
#ifdef CONFIG_ACPI
/* ATRM is used to get the BIOS on the discrete cards in
* dual-gpu systems.
*/
/* retrieve the ROM in 4k blocks */
#define ATRM_BIOS_PAGE 4096
/**
* radeon_atrm_call - fetch a chunk of the vbios
*
* @atrm_handle: acpi ATRM handle
* @bios: vbios image pointer
* @offset: offset of vbios image data to fetch
* @len: length of vbios image data to fetch
*
* Executes ATRM to fetch a chunk of the discrete
* vbios image on PX systems (all asics).
* Returns the length of the buffer fetched.
*/
static int radeon_atrm_call(acpi_handle atrm_handle, uint8_t *bios,
int offset, int len)
{
acpi_status status;
union acpi_object atrm_arg_elements[2], *obj;
struct acpi_object_list atrm_arg;
struct acpi_buffer buffer = { ACPI_ALLOCATE_BUFFER, NULL};
atrm_arg.count = 2;
atrm_arg.pointer = &atrm_arg_elements[0];
atrm_arg_elements[0].type = ACPI_TYPE_INTEGER;
atrm_arg_elements[0].integer.value = offset;
atrm_arg_elements[1].type = ACPI_TYPE_INTEGER;
atrm_arg_elements[1].integer.value = len;
status = acpi_evaluate_object(atrm_handle, NULL, &atrm_arg, &buffer);
if (ACPI_FAILURE(status)) {
printk("failed to evaluate ATRM got %s\n", acpi_format_exception(status));
return -ENODEV;
}
obj = (union acpi_object *)buffer.pointer;
memcpy(bios+offset, obj->buffer.pointer, obj->buffer.length);
len = obj->buffer.length;
kfree(buffer.pointer);
return len;
}
static bool radeon_atrm_get_bios(struct radeon_device *rdev)
{
int ret;
int size = 256 * 1024;
int i;
struct pci_dev *pdev = NULL;
acpi_handle dhandle, atrm_handle;
acpi_status status;
bool found = false;
if (!radeon_atrm_supported(rdev->pdev))
/* ATRM is for the discrete card only */
if (rdev->flags & RADEON_IS_IGP)
return false;
while ((pdev = pci_get_class(PCI_CLASS_DISPLAY_VGA << 8, pdev)) != NULL) {
dhandle = DEVICE_ACPI_HANDLE(&pdev->dev);
if (!dhandle)
continue;
status = acpi_get_handle(dhandle, "ATRM", &atrm_handle);
if (!ACPI_FAILURE(status)) {
found = true;
break;
}
}
if (!found)
return false;
rdev->bios = kmalloc(size, GFP_KERNEL);
@ -118,10 +183,11 @@ static bool radeon_atrm_get_bios(struct radeon_device *rdev)
}
for (i = 0; i < size / ATRM_BIOS_PAGE; i++) {
ret = radeon_atrm_get_bios_chunk(rdev->bios,
ret = radeon_atrm_call(atrm_handle,
rdev->bios,
(i * ATRM_BIOS_PAGE),
ATRM_BIOS_PAGE);
if (ret <= 0)
if (ret < ATRM_BIOS_PAGE)
break;
}
@ -131,6 +197,12 @@ static bool radeon_atrm_get_bios(struct radeon_device *rdev)
}
return true;
}
#else
static bool radeon_atrm_get_bios(struct radeon_device *rdev)
{
return false;
}
#endif
static bool ni_read_disabled_bios(struct radeon_device *rdev)
{
@ -477,6 +549,61 @@ static bool radeon_read_disabled_bios(struct radeon_device *rdev)
return legacy_read_disabled_bios(rdev);
}
#ifdef CONFIG_ACPI
static bool radeon_acpi_vfct_bios(struct radeon_device *rdev)
{
bool ret = false;
struct acpi_table_header *hdr;
acpi_size tbl_size;
UEFI_ACPI_VFCT *vfct;
GOP_VBIOS_CONTENT *vbios;
VFCT_IMAGE_HEADER *vhdr;
if (!ACPI_SUCCESS(acpi_get_table_with_size("VFCT", 1, &hdr, &tbl_size)))
return false;
if (tbl_size < sizeof(UEFI_ACPI_VFCT)) {
DRM_ERROR("ACPI VFCT table present but broken (too short #1)\n");
goto out_unmap;
}
vfct = (UEFI_ACPI_VFCT *)hdr;
if (vfct->VBIOSImageOffset + sizeof(VFCT_IMAGE_HEADER) > tbl_size) {
DRM_ERROR("ACPI VFCT table present but broken (too short #2)\n");
goto out_unmap;
}
vbios = (GOP_VBIOS_CONTENT *)((char *)hdr + vfct->VBIOSImageOffset);
vhdr = &vbios->VbiosHeader;
DRM_INFO("ACPI VFCT contains a BIOS for %02x:%02x.%d %04x:%04x, size %d\n",
vhdr->PCIBus, vhdr->PCIDevice, vhdr->PCIFunction,
vhdr->VendorID, vhdr->DeviceID, vhdr->ImageLength);
if (vhdr->PCIBus != rdev->pdev->bus->number ||
vhdr->PCIDevice != PCI_SLOT(rdev->pdev->devfn) ||
vhdr->PCIFunction != PCI_FUNC(rdev->pdev->devfn) ||
vhdr->VendorID != rdev->pdev->vendor ||
vhdr->DeviceID != rdev->pdev->device) {
DRM_INFO("ACPI VFCT table is not for this card\n");
goto out_unmap;
};
if (vfct->VBIOSImageOffset + sizeof(VFCT_IMAGE_HEADER) + vhdr->ImageLength > tbl_size) {
DRM_ERROR("ACPI VFCT image truncated\n");
goto out_unmap;
}
rdev->bios = kmemdup(&vbios->VbiosContent, vhdr->ImageLength, GFP_KERNEL);
ret = !!rdev->bios;
out_unmap:
return ret;
}
#else
static inline bool radeon_acpi_vfct_bios(struct radeon_device *rdev)
{
return false;
}
#endif
bool radeon_get_bios(struct radeon_device *rdev)
{
@ -484,10 +611,12 @@ bool radeon_get_bios(struct radeon_device *rdev)
uint16_t tmp;
r = radeon_atrm_get_bios(rdev);
if (r == false)
r = radeon_acpi_vfct_bios(rdev);
if (r == false)
r = igp_read_bios_from_vram(rdev);
if (r == false)
r = radeon_read_bios(rdev);
if (r == false)
r = radeon_read_bios(rdev);
if (r == false) {
r = radeon_read_disabled_bios(rdev);
}

View File

@ -0,0 +1,44 @@
/*
* Copyright 2009 Advanced Micro Devices, Inc.
* Copyright 2009 Red Hat Inc.
* Copyright 2012 Alcatel-Lucent, Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice (including the next
* paragraph) shall be included in all copies or substantial portions of the
* Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE COPYRIGHT HOLDER(S) AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
* OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
* ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*
*/
#ifndef __RADEON_BLIT_COMMON_H__
#define DI_PT_RECTLIST 0x11
#define DI_INDEX_SIZE_16_BIT 0x0
#define DI_SRC_SEL_AUTO_INDEX 0x2
#define FMT_8 0x1
#define FMT_5_6_5 0x8
#define FMT_8_8_8_8 0x1a
#define COLOR_8 0x1
#define COLOR_5_6_5 0x8
#define COLOR_8_8_8_8 0x1a
#define RECT_UNIT_H 32
#define RECT_UNIT_W (RADEON_GPU_PAGE_SIZE / 4 / RECT_UNIT_H)
#define __RADEON_BLIT_COMMON_H__
#endif

View File

@ -25,8 +25,8 @@
* Alex Deucher
* Jerome Glisse
*/
#include "drmP.h"
#include "radeon_drm.h"
#include <drm/drmP.h>
#include <drm/radeon_drm.h>
#include "radeon_reg.h"
#include "radeon.h"
#include "atom.h"
@ -219,6 +219,9 @@ void radeon_get_clock_info(struct drm_device *dev)
} else {
DRM_INFO("Using generic clock info\n");
/* may need to be per card */
rdev->clock.max_pixel_clock = 35000;
if (rdev->flags & RADEON_IS_IGP) {
p1pll->reference_freq = 1432;
p2pll->reference_freq = 1432;
@ -331,7 +334,7 @@ void radeon_get_clock_info(struct drm_device *dev)
if (!rdev->clock.default_sclk)
rdev->clock.default_sclk = radeon_get_engine_clock(rdev);
if ((!rdev->clock.default_mclk) && rdev->asic->get_memory_clock)
if ((!rdev->clock.default_mclk) && rdev->asic->pm.get_memory_clock)
rdev->clock.default_mclk = radeon_get_memory_clock(rdev);
rdev->pm.current_sclk = rdev->clock.default_sclk;
@ -630,7 +633,7 @@ void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable)
tmp &= ~(R300_SCLK_FORCE_VAP);
tmp |= RADEON_SCLK_FORCE_CP;
WREG32_PLL(RADEON_SCLK_CNTL, tmp);
udelay(15000);
mdelay(15);
tmp = RREG32_PLL(R300_SCLK_CNTL2);
tmp &= ~(R300_SCLK_FORCE_TCL |
@ -648,12 +651,12 @@ void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable)
tmp |= (RADEON_ENGIN_DYNCLK_MODE |
(0x01 << RADEON_ACTIVE_HILO_LAT_SHIFT));
WREG32_PLL(RADEON_CLK_PWRMGT_CNTL, tmp);
udelay(15000);
mdelay(15);
tmp = RREG32_PLL(RADEON_CLK_PIN_CNTL);
tmp |= RADEON_SCLK_DYN_START_CNTL;
WREG32_PLL(RADEON_CLK_PIN_CNTL, tmp);
udelay(15000);
mdelay(15);
/* When DRI is enabled, setting DYN_STOP_LAT to zero can cause some R200
to lockup randomly, leave them as set by BIOS.
@ -693,7 +696,7 @@ void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable)
tmp |= RADEON_SCLK_MORE_FORCEON;
}
WREG32_PLL(RADEON_SCLK_MORE_CNTL, tmp);
udelay(15000);
mdelay(15);
}
/* RV200::A11 A12, RV250::A11 A12 */
@ -706,7 +709,7 @@ void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable)
tmp |= RADEON_TCL_BYPASS_DISABLE;
WREG32_PLL(RADEON_PLL_PWRMGT_CNTL, tmp);
}
udelay(15000);
mdelay(15);
/*enable dynamic mode for display clocks (PIXCLK and PIX2CLK) */
tmp = RREG32_PLL(RADEON_PIXCLKS_CNTL);
@ -719,14 +722,14 @@ void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable)
RADEON_PIXCLK_TMDS_ALWAYS_ONb);
WREG32_PLL(RADEON_PIXCLKS_CNTL, tmp);
udelay(15000);
mdelay(15);
tmp = RREG32_PLL(RADEON_VCLK_ECP_CNTL);
tmp |= (RADEON_PIXCLK_ALWAYS_ONb |
RADEON_PIXCLK_DAC_ALWAYS_ONb);
WREG32_PLL(RADEON_VCLK_ECP_CNTL, tmp);
udelay(15000);
mdelay(15);
}
} else {
/* Turn everything OFF (ForceON to everything) */
@ -858,7 +861,7 @@ void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable)
}
WREG32_PLL(RADEON_SCLK_CNTL, tmp);
udelay(16000);
mdelay(16);
if ((rdev->family == CHIP_R300) ||
(rdev->family == CHIP_R350)) {
@ -867,7 +870,7 @@ void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable)
R300_SCLK_FORCE_GA |
R300_SCLK_FORCE_CBA);
WREG32_PLL(R300_SCLK_CNTL2, tmp);
udelay(16000);
mdelay(16);
}
if (rdev->flags & RADEON_IS_IGP) {
@ -875,7 +878,7 @@ void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable)
tmp &= ~(RADEON_FORCEON_MCLKA |
RADEON_FORCEON_YCLKA);
WREG32_PLL(RADEON_MCLK_CNTL, tmp);
udelay(16000);
mdelay(16);
}
if ((rdev->family == CHIP_RV200) ||
@ -884,7 +887,7 @@ void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable)
tmp = RREG32_PLL(RADEON_SCLK_MORE_CNTL);
tmp |= RADEON_SCLK_MORE_FORCEON;
WREG32_PLL(RADEON_SCLK_MORE_CNTL, tmp);
udelay(16000);
mdelay(16);
}
tmp = RREG32_PLL(RADEON_PIXCLKS_CNTL);
@ -897,7 +900,7 @@ void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable)
RADEON_PIXCLK_TMDS_ALWAYS_ONb);
WREG32_PLL(RADEON_PIXCLKS_CNTL, tmp);
udelay(16000);
mdelay(16);
tmp = RREG32_PLL(RADEON_VCLK_ECP_CNTL);
tmp &= ~(RADEON_PIXCLK_ALWAYS_ONb |

View File

@ -24,8 +24,8 @@
* Authors: Dave Airlie
* Alex Deucher
*/
#include "drmP.h"
#include "radeon_drm.h"
#include <drm/drmP.h>
#include <drm/radeon_drm.h>
#include "radeon.h"
#include "atom.h"
@ -620,8 +620,8 @@ static struct radeon_i2c_bus_rec combios_setup_i2c_bus(struct radeon_device *rde
i2c.y_data_mask = 0x80;
} else {
/* default masks for ddc pads */
i2c.mask_clk_mask = RADEON_GPIO_EN_1;
i2c.mask_data_mask = RADEON_GPIO_EN_0;
i2c.mask_clk_mask = RADEON_GPIO_MASK_1;
i2c.mask_data_mask = RADEON_GPIO_MASK_0;
i2c.a_clk_mask = RADEON_GPIO_A_1;
i2c.a_data_mask = RADEON_GPIO_A_0;
i2c.en_clk_mask = RADEON_GPIO_EN_1;
@ -719,6 +719,34 @@ static struct radeon_i2c_bus_rec combios_setup_i2c_bus(struct radeon_device *rde
return i2c;
}
static struct radeon_i2c_bus_rec radeon_combios_get_i2c_info_from_table(struct radeon_device *rdev)
{
struct drm_device *dev = rdev->ddev;
struct radeon_i2c_bus_rec i2c;
u16 offset;
u8 id, blocks, clk, data;
int i;
i2c.valid = false;
offset = combios_get_table_offset(dev, COMBIOS_I2C_INFO_TABLE);
if (offset) {
blocks = RBIOS8(offset + 2);
for (i = 0; i < blocks; i++) {
id = RBIOS8(offset + 3 + (i * 5) + 0);
if (id == 136) {
clk = RBIOS8(offset + 3 + (i * 5) + 3);
data = RBIOS8(offset + 3 + (i * 5) + 4);
/* gpiopad */
i2c = combios_setup_i2c_bus(rdev, DDC_MONID,
(1 << clk), (1 << data));
break;
}
}
}
return i2c;
}
void radeon_combios_i2c_init(struct radeon_device *rdev)
{
struct drm_device *dev = rdev->ddev;
@ -755,30 +783,14 @@ void radeon_combios_i2c_init(struct radeon_device *rdev)
} else if (rdev->family == CHIP_RS300 ||
rdev->family == CHIP_RS400 ||
rdev->family == CHIP_RS480) {
u16 offset;
u8 id, blocks, clk, data;
int i;
/* 0x68 */
i2c = combios_setup_i2c_bus(rdev, DDC_CRT2, 0, 0);
rdev->i2c_bus[3] = radeon_i2c_create(dev, &i2c, "MONID");
offset = combios_get_table_offset(dev, COMBIOS_I2C_INFO_TABLE);
if (offset) {
blocks = RBIOS8(offset + 2);
for (i = 0; i < blocks; i++) {
id = RBIOS8(offset + 3 + (i * 5) + 0);
if (id == 136) {
clk = RBIOS8(offset + 3 + (i * 5) + 3);
data = RBIOS8(offset + 3 + (i * 5) + 4);
/* gpiopad */
i2c = combios_setup_i2c_bus(rdev, DDC_MONID,
(1 << clk), (1 << data));
i2c = radeon_combios_get_i2c_info_from_table(rdev);
if (i2c.valid)
rdev->i2c_bus[4] = radeon_i2c_create(dev, &i2c, "GPIOPAD_MASK");
break;
}
}
}
} else if ((rdev->family == CHIP_R200) ||
(rdev->family >= CHIP_R300)) {
/* 0x68 */
@ -1561,6 +1573,11 @@ bool radeon_get_legacy_connector_info_from_table(struct drm_device *dev)
(rdev->pdev->subsystem_device == 0x4150)) {
/* Mac G5 tower 9600 */
rdev->mode_info.connector_table = CT_MAC_G5_9600;
} else if ((rdev->pdev->device == 0x4c66) &&
(rdev->pdev->subsystem_vendor == 0x1002) &&
(rdev->pdev->subsystem_device == 0x4c66)) {
/* SAM440ep RV250 embedded board */
rdev->mode_info.connector_table = CT_SAM440EP;
} else
#endif /* CONFIG_PPC_PMAC */
#ifdef CONFIG_PPC64
@ -2134,6 +2151,67 @@ bool radeon_get_legacy_connector_info_from_table(struct drm_device *dev)
CONNECTOR_OBJECT_ID_SVIDEO,
&hpd);
break;
case CT_SAM440EP:
DRM_INFO("Connector Table: %d (SAM440ep embedded board)\n",
rdev->mode_info.connector_table);
/* LVDS */
ddc_i2c = combios_setup_i2c_bus(rdev, DDC_NONE_DETECTED, 0, 0);
hpd.hpd = RADEON_HPD_NONE;
radeon_add_legacy_encoder(dev,
radeon_get_encoder_enum(dev,
ATOM_DEVICE_LCD1_SUPPORT,
0),
ATOM_DEVICE_LCD1_SUPPORT);
radeon_add_legacy_connector(dev, 0, ATOM_DEVICE_LCD1_SUPPORT,
DRM_MODE_CONNECTOR_LVDS, &ddc_i2c,
CONNECTOR_OBJECT_ID_LVDS,
&hpd);
/* DVI-I - secondary dac, int tmds */
ddc_i2c = combios_setup_i2c_bus(rdev, DDC_DVI, 0, 0);
hpd.hpd = RADEON_HPD_1; /* ??? */
radeon_add_legacy_encoder(dev,
radeon_get_encoder_enum(dev,
ATOM_DEVICE_DFP1_SUPPORT,
0),
ATOM_DEVICE_DFP1_SUPPORT);
radeon_add_legacy_encoder(dev,
radeon_get_encoder_enum(dev,
ATOM_DEVICE_CRT2_SUPPORT,
2),
ATOM_DEVICE_CRT2_SUPPORT);
radeon_add_legacy_connector(dev, 1,
ATOM_DEVICE_DFP1_SUPPORT |
ATOM_DEVICE_CRT2_SUPPORT,
DRM_MODE_CONNECTOR_DVII, &ddc_i2c,
CONNECTOR_OBJECT_ID_SINGLE_LINK_DVI_I,
&hpd);
/* VGA - primary dac */
ddc_i2c = combios_setup_i2c_bus(rdev, DDC_VGA, 0, 0);
hpd.hpd = RADEON_HPD_NONE;
radeon_add_legacy_encoder(dev,
radeon_get_encoder_enum(dev,
ATOM_DEVICE_CRT1_SUPPORT,
1),
ATOM_DEVICE_CRT1_SUPPORT);
radeon_add_legacy_connector(dev, 2,
ATOM_DEVICE_CRT1_SUPPORT,
DRM_MODE_CONNECTOR_VGA, &ddc_i2c,
CONNECTOR_OBJECT_ID_VGA,
&hpd);
/* TV - TV DAC */
ddc_i2c.valid = false;
hpd.hpd = RADEON_HPD_NONE;
radeon_add_legacy_encoder(dev,
radeon_get_encoder_enum(dev,
ATOM_DEVICE_TV1_SUPPORT,
2),
ATOM_DEVICE_TV1_SUPPORT);
radeon_add_legacy_connector(dev, 3, ATOM_DEVICE_TV1_SUPPORT,
DRM_MODE_CONNECTOR_SVIDEO,
&ddc_i2c,
CONNECTOR_OBJECT_ID_SVIDEO,
&hpd);
break;
default:
DRM_INFO("Connector table: %d (invalid)\n",
rdev->mode_info.connector_table);
@ -2255,6 +2333,9 @@ bool radeon_get_legacy_connector_info_from_bios(struct drm_device *dev)
connector = (tmp >> 12) & 0xf;
ddc_type = (tmp >> 8) & 0xf;
if (ddc_type == 5)
ddc_i2c = radeon_combios_get_i2c_info_from_table(rdev);
else
ddc_i2c = combios_setup_i2c_bus(rdev, ddc_type, 0, 0);
switch (connector) {
@ -2563,14 +2644,17 @@ void radeon_combios_get_power_modes(struct radeon_device *rdev)
/* allocate 2 power states */
rdev->pm.power_state = kzalloc(sizeof(struct radeon_power_state) * 2, GFP_KERNEL);
if (!rdev->pm.power_state) {
rdev->pm.default_power_state_index = state_index;
rdev->pm.num_power_states = 0;
rdev->pm.current_power_state_index = rdev->pm.default_power_state_index;
rdev->pm.current_clock_mode_index = 0;
return;
}
if (rdev->pm.power_state) {
/* allocate 1 clock mode per state */
rdev->pm.power_state[0].clock_info =
kzalloc(sizeof(struct radeon_pm_clock_info) * 1, GFP_KERNEL);
rdev->pm.power_state[1].clock_info =
kzalloc(sizeof(struct radeon_pm_clock_info) * 1, GFP_KERNEL);
if (!rdev->pm.power_state[0].clock_info ||
!rdev->pm.power_state[1].clock_info)
goto pm_failed;
} else
goto pm_failed;
/* check for a thermal chip */
offset = combios_get_table_offset(dev, COMBIOS_OVERDRIVE_INFO_TABLE);
@ -2617,6 +2701,25 @@ void radeon_combios_get_power_modes(struct radeon_device *rdev)
i2c_new_device(&rdev->pm.i2c_bus->adapter, &info);
}
}
} else {
/* boards with a thermal chip, but no overdrive table */
/* Asus 9600xt has an f75375 on the monid bus */
if ((dev->pdev->device == 0x4152) &&
(dev->pdev->subsystem_vendor == 0x1043) &&
(dev->pdev->subsystem_device == 0xc002)) {
i2c_bus = combios_setup_i2c_bus(rdev, DDC_MONID, 0, 0);
rdev->pm.i2c_bus = radeon_i2c_lookup(rdev, &i2c_bus);
if (rdev->pm.i2c_bus) {
struct i2c_board_info info = { };
const char *name = "f75375";
info.addr = 0x28;
strlcpy(info.type, name, sizeof(info.type));
i2c_new_device(&rdev->pm.i2c_bus->adapter, &info);
DRM_INFO("Possible %s thermal controller at 0x%02x\n",
name, info.addr);
}
}
}
if (rdev->flags & RADEON_IS_MOBILITY) {
@ -2714,6 +2817,14 @@ default_mode:
rdev->pm.default_power_state_index = state_index;
rdev->pm.num_power_states = state_index + 1;
rdev->pm.current_power_state_index = rdev->pm.default_power_state_index;
rdev->pm.current_clock_mode_index = 0;
return;
pm_failed:
rdev->pm.default_power_state_index = state_index;
rdev->pm.num_power_states = 0;
rdev->pm.current_power_state_index = rdev->pm.default_power_state_index;
rdev->pm.current_clock_mode_index = 0;
}
@ -2815,7 +2926,7 @@ bool radeon_combios_external_tmds_setup(struct drm_encoder *encoder)
case 4:
val = RBIOS16(index);
index += 2;
udelay(val * 1000);
mdelay(val);
break;
case 6:
slave_addr = id & 0xff;
@ -3014,7 +3125,7 @@ static void combios_parse_pll_table(struct drm_device *dev, uint16_t offset)
udelay(150);
break;
case 2:
udelay(1000);
mdelay(1);
break;
case 3:
while (tmp--) {
@ -3045,13 +3156,13 @@ static void combios_parse_pll_table(struct drm_device *dev, uint16_t offset)
/*mclk_cntl |= 0x00001111;*//* ??? */
WREG32_PLL(RADEON_MCLK_CNTL,
mclk_cntl);
udelay(10000);
mdelay(10);
#endif
WREG32_PLL
(RADEON_CLK_PWRMGT_CNTL,
tmp &
~RADEON_CG_NO1_DEBUG_0);
udelay(10000);
mdelay(10);
}
break;
default:
@ -3208,15 +3319,6 @@ static void combios_write_ram_size(struct drm_device *dev)
WREG32(RADEON_CONFIG_MEMSIZE, mem_size);
}
void radeon_combios_dyn_clk_setup(struct drm_device *dev, int enable)
{
uint16_t dyn_clk_info =
combios_get_table_offset(dev, COMBIOS_DYN_CLK_1_TABLE);
if (dyn_clk_info)
combios_parse_pll_table(dev, dyn_clk_info);
}
void radeon_combios_asic_init(struct drm_device *dev)
{
struct radeon_device *rdev = dev->dev_private;
@ -3279,6 +3381,14 @@ void radeon_combios_asic_init(struct drm_device *dev)
rdev->pdev->subsystem_device == 0x30a4)
return;
/* quirk for rs4xx Compaq Presario V5245EU laptop to make it resume
* - it hangs on resume inside the dynclk 1 table.
*/
if (rdev->family == CHIP_RS480 &&
rdev->pdev->subsystem_vendor == 0x103c &&
rdev->pdev->subsystem_device == 0x30ae)
return;
/* DYN CLK 1 */
table = combios_get_table_offset(dev, COMBIOS_DYN_CLK_1_TABLE);
if (table)

View File

@ -23,11 +23,11 @@
* Authors: Dave Airlie
* Alex Deucher
*/
#include "drmP.h"
#include "drm_edid.h"
#include "drm_crtc_helper.h"
#include "drm_fb_helper.h"
#include "radeon_drm.h"
#include <drm/drmP.h>
#include <drm/drm_edid.h>
#include <drm/drm_crtc_helper.h>
#include <drm/drm_fb_helper.h>
#include <drm/radeon_drm.h>
#include "radeon.h"
#include "atom.h"
@ -40,12 +40,6 @@ radeon_atombios_connected_scratch_regs(struct drm_connector *connector,
struct drm_encoder *encoder,
bool connected);
extern void
radeon_legacy_backlight_init(struct radeon_encoder *radeon_encoder,
struct drm_connector *drm_connector);
bool radeon_connector_encoder_is_dp_bridge(struct drm_connector *connector);
void radeon_connector_hotplug(struct drm_connector *connector)
{
struct drm_device *dev = connector->dev;
@ -60,19 +54,40 @@ void radeon_connector_hotplug(struct drm_connector *connector)
radeon_hpd_set_polarity(rdev, radeon_connector->hpd.hpd);
/* powering up/down the eDP panel generates hpd events which
* can interfere with modesetting.
*/
if (connector->connector_type == DRM_MODE_CONNECTOR_eDP)
/* if the connector is already off, don't turn it back on */
if (connector->dpms != DRM_MODE_DPMS_ON)
return;
/* pre-r600 did not always have the hpd pins mapped accurately to connectors */
if (rdev->family >= CHIP_R600) {
if (radeon_hpd_sense(rdev, radeon_connector->hpd.hpd))
drm_helper_connector_dpms(connector, DRM_MODE_DPMS_ON);
else
/* just deal with DP (not eDP) here. */
if (connector->connector_type == DRM_MODE_CONNECTOR_DisplayPort) {
struct radeon_connector_atom_dig *dig_connector =
radeon_connector->con_priv;
/* if existing sink type was not DP no need to retrain */
if (dig_connector->dp_sink_type != CONNECTOR_OBJECT_ID_DISPLAYPORT)
return;
/* first get sink type as it may be reset after (un)plug */
dig_connector->dp_sink_type = radeon_dp_getsinktype(radeon_connector);
/* don't do anything if sink is not display port, i.e.,
* passive dp->(dvi|hdmi) adaptor
*/
if (dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT) {
int saved_dpms = connector->dpms;
/* Only turn off the display if it's physically disconnected */
if (!radeon_hpd_sense(rdev, radeon_connector->hpd.hpd)) {
drm_helper_connector_dpms(connector, DRM_MODE_DPMS_OFF);
} else if (radeon_dp_needs_link_train(radeon_connector)) {
/* set it to OFF so that drm_helper_connector_dpms()
* won't return immediately since the current state
* is ON at this point.
*/
connector->dpms = DRM_MODE_DPMS_OFF;
drm_helper_connector_dpms(connector, DRM_MODE_DPMS_ON);
}
connector->dpms = saved_dpms;
}
}
}
static void radeon_property_change_mode(struct drm_encoder *encoder)
@ -84,6 +99,62 @@ static void radeon_property_change_mode(struct drm_encoder *encoder)
crtc->x, crtc->y, crtc->fb);
}
}
int radeon_get_monitor_bpc(struct drm_connector *connector)
{
struct drm_device *dev = connector->dev;
struct radeon_device *rdev = dev->dev_private;
struct radeon_connector *radeon_connector = to_radeon_connector(connector);
struct radeon_connector_atom_dig *dig_connector;
int bpc = 8;
switch (connector->connector_type) {
case DRM_MODE_CONNECTOR_DVII:
case DRM_MODE_CONNECTOR_HDMIB:
if (radeon_connector->use_digital) {
if (drm_detect_hdmi_monitor(radeon_connector->edid)) {
if (connector->display_info.bpc)
bpc = connector->display_info.bpc;
}
}
break;
case DRM_MODE_CONNECTOR_DVID:
case DRM_MODE_CONNECTOR_HDMIA:
if (drm_detect_hdmi_monitor(radeon_connector->edid)) {
if (connector->display_info.bpc)
bpc = connector->display_info.bpc;
}
break;
case DRM_MODE_CONNECTOR_DisplayPort:
dig_connector = radeon_connector->con_priv;
if ((dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT) ||
(dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_eDP) ||
drm_detect_hdmi_monitor(radeon_connector->edid)) {
if (connector->display_info.bpc)
bpc = connector->display_info.bpc;
}
break;
case DRM_MODE_CONNECTOR_eDP:
case DRM_MODE_CONNECTOR_LVDS:
if (connector->display_info.bpc)
bpc = connector->display_info.bpc;
else if (ASIC_IS_DCE41(rdev) || ASIC_IS_DCE5(rdev)) {
struct drm_connector_helper_funcs *connector_funcs =
connector->helper_private;
struct drm_encoder *encoder = connector_funcs->best_encoder(connector);
struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
struct radeon_encoder_atom_dig *dig = radeon_encoder->enc_priv;
if (dig->lcd_misc & ATOM_PANEL_MISC_V13_6BIT_PER_COLOR)
bpc = 6;
else if (dig->lcd_misc & ATOM_PANEL_MISC_V13_8BIT_PER_COLOR)
bpc = 8;
}
break;
}
return bpc;
}
static void
radeon_connector_update_scratch_regs(struct drm_connector *connector, enum drm_connector_status status)
{
@ -123,7 +194,7 @@ radeon_connector_update_scratch_regs(struct drm_connector *connector, enum drm_c
}
}
struct drm_encoder *radeon_find_encoder(struct drm_connector *connector, int encoder_type)
static struct drm_encoder *radeon_find_encoder(struct drm_connector *connector, int encoder_type)
{
struct drm_mode_object *obj;
struct drm_encoder *encoder;
@ -144,7 +215,7 @@ struct drm_encoder *radeon_find_encoder(struct drm_connector *connector, int enc
return NULL;
}
struct drm_encoder *radeon_best_single_encoder(struct drm_connector *connector)
static struct drm_encoder *radeon_best_single_encoder(struct drm_connector *connector)
{
int enc_id = connector->encoder_ids[0];
struct drm_mode_object *obj;
@ -295,7 +366,7 @@ static void radeon_add_common_modes(struct drm_encoder *encoder, struct drm_conn
}
}
int radeon_connector_set_property(struct drm_connector *connector, struct drm_property *property,
static int radeon_connector_set_property(struct drm_connector *connector, struct drm_property *property,
uint64_t val)
{
struct drm_device *dev = connector->dev;
@ -430,55 +501,24 @@ int radeon_connector_set_property(struct drm_connector *connector, struct drm_pr
return 0;
}
/*
* Some integrated ATI Radeon chipset implementations (e. g.
* Asus M2A-VM HDMI) may indicate the availability of a DDC,
* even when there's no monitor connected. For these connectors
* following DDC probe extension will be applied: check also for the
* availability of EDID with at least a correct EDID header. Only then,
* DDC is assumed to be available. This prevents drm_get_edid() and
* drm_edid_block_valid() from periodically dumping data and kernel
* errors into the logs and onto the terminal.
*/
static bool radeon_connector_needs_extended_probe(struct radeon_device *dev,
uint32_t supported_device,
int connector_type)
{
/* Asus M2A-VM HDMI board sends data to i2c bus even,
* if HDMI add-on card is not plugged in or HDMI is disabled in
* BIOS. Valid DDC can only be assumed, if also a valid EDID header
* can be retrieved via i2c bus during DDC probe */
if ((dev->pdev->device == 0x791e) &&
(dev->pdev->subsystem_vendor == 0x1043) &&
(dev->pdev->subsystem_device == 0x826d)) {
if ((connector_type == DRM_MODE_CONNECTOR_HDMIA) &&
(supported_device == ATOM_DEVICE_DFP2_SUPPORT))
return true;
}
/* ECS A740GM-M with ATI RADEON 2100 sends data to i2c bus
* for a DVI connector that is not implemented */
if ((dev->pdev->device == 0x796e) &&
(dev->pdev->subsystem_vendor == 0x1019) &&
(dev->pdev->subsystem_device == 0x2615)) {
if ((connector_type == DRM_MODE_CONNECTOR_DVID) &&
(supported_device == ATOM_DEVICE_DFP2_SUPPORT))
return true;
}
/* Default: no EDID header probe required for DDC probing */
return false;
}
static void radeon_fixup_lvds_native_mode(struct drm_encoder *encoder,
struct drm_connector *connector)
{
struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
struct drm_display_mode *native_mode = &radeon_encoder->native_mode;
struct drm_display_mode *t, *mode;
/* If the EDID preferred mode doesn't match the native mode, use it */
list_for_each_entry_safe(mode, t, &connector->probed_modes, head) {
if (mode->type & DRM_MODE_TYPE_PREFERRED) {
if (mode->hdisplay != native_mode->hdisplay ||
mode->vdisplay != native_mode->vdisplay)
memcpy(native_mode, mode, sizeof(*mode));
}
}
/* Try to get native mode details from EDID if necessary */
if (!native_mode->clock) {
struct drm_display_mode *t, *mode;
list_for_each_entry_safe(mode, t, &connector->probed_modes, head) {
if (mode->hdisplay == native_mode->hdisplay &&
mode->vdisplay == native_mode->vdisplay) {
@ -489,6 +529,7 @@ static void radeon_fixup_lvds_native_mode(struct drm_encoder *encoder,
}
}
}
if (!native_mode->clock) {
DRM_DEBUG_KMS("No LVDS native mode details, disabling RMX\n");
radeon_encoder->rmx_type = RMX_OFF;
@ -605,7 +646,7 @@ static void radeon_connector_destroy(struct drm_connector *connector)
if (radeon_connector->edid)
kfree(radeon_connector->edid);
kfree(radeon_connector->con_priv);
drm_sysfs_connector_remove(connector);
// drm_sysfs_connector_remove(connector);
drm_connector_cleanup(connector);
kfree(connector);
}
@ -646,13 +687,13 @@ static int radeon_lvds_set_property(struct drm_connector *connector,
}
struct drm_connector_helper_funcs radeon_lvds_connector_helper_funcs = {
static const struct drm_connector_helper_funcs radeon_lvds_connector_helper_funcs = {
.get_modes = radeon_lvds_get_modes,
.mode_valid = radeon_lvds_mode_valid,
.best_encoder = radeon_best_single_encoder,
};
struct drm_connector_funcs radeon_lvds_connector_funcs = {
static const struct drm_connector_funcs radeon_lvds_connector_funcs = {
.dpms = drm_helper_connector_dpms,
.detect = radeon_lvds_detect,
.fill_modes = drm_helper_probe_single_connector_modes,
@ -700,9 +741,9 @@ radeon_vga_detect(struct drm_connector *connector, bool force)
ret = connector_status_disconnected;
if (radeon_connector->ddc_bus)
dret = radeon_ddc_probe(radeon_connector,
radeon_connector->requires_extended_probe);
dret = radeon_ddc_probe(radeon_connector);
if (dret) {
radeon_connector->detected_by_load = false;
if (radeon_connector->edid) {
kfree(radeon_connector->edid);
radeon_connector->edid = NULL;
@ -729,12 +770,21 @@ radeon_vga_detect(struct drm_connector *connector, bool force)
} else {
/* if we aren't forcing don't do destructive polling */
if (!force)
if (!force) {
/* only return the previous status if we last
* detected a monitor via load.
*/
if (radeon_connector->detected_by_load)
return connector->status;
else
return ret;
}
if (radeon_connector->dac_load_detect && encoder) {
encoder_funcs = encoder->helper_private;
ret = encoder_funcs->detect(encoder, connector);
if (ret != connector_status_disconnected)
radeon_connector->detected_by_load = true;
}
}
@ -755,13 +805,13 @@ radeon_vga_detect(struct drm_connector *connector, bool force)
return ret;
}
struct drm_connector_helper_funcs radeon_vga_connector_helper_funcs = {
static const struct drm_connector_helper_funcs radeon_vga_connector_helper_funcs = {
.get_modes = radeon_vga_get_modes,
.mode_valid = radeon_vga_mode_valid,
.best_encoder = radeon_best_single_encoder,
};
struct drm_connector_funcs radeon_vga_connector_funcs = {
static const struct drm_connector_funcs radeon_vga_connector_funcs = {
.dpms = drm_helper_connector_dpms,
.detect = radeon_vga_detect,
.fill_modes = drm_helper_probe_single_connector_modes,
@ -825,13 +875,13 @@ radeon_tv_detect(struct drm_connector *connector, bool force)
return ret;
}
struct drm_connector_helper_funcs radeon_tv_connector_helper_funcs = {
static const struct drm_connector_helper_funcs radeon_tv_connector_helper_funcs = {
.get_modes = radeon_tv_get_modes,
.mode_valid = radeon_tv_mode_valid,
.best_encoder = radeon_best_single_encoder,
};
struct drm_connector_funcs radeon_tv_connector_funcs = {
static const struct drm_connector_funcs radeon_tv_connector_funcs = {
.dpms = drm_helper_connector_dpms,
.detect = radeon_tv_detect,
.fill_modes = drm_helper_probe_single_connector_modes,
@ -848,6 +898,27 @@ static int radeon_dvi_get_modes(struct drm_connector *connector)
return ret;
}
static bool radeon_check_hpd_status_unchanged(struct drm_connector *connector)
{
struct drm_device *dev = connector->dev;
struct radeon_device *rdev = dev->dev_private;
struct radeon_connector *radeon_connector = to_radeon_connector(connector);
enum drm_connector_status status;
/* We only trust HPD on R600 and newer ASICS. */
if (rdev->family >= CHIP_R600
&& radeon_connector->hpd.hpd != RADEON_HPD_NONE) {
if (radeon_hpd_sense(rdev, radeon_connector->hpd.hpd))
status = connector_status_connected;
else
status = connector_status_disconnected;
if (connector->status == status)
return true;
}
return false;
}
/*
* DVI is complicated
* Do a DDC probe, if DDC probe passes, get the full EDID so
@ -872,10 +943,13 @@ radeon_dvi_detect(struct drm_connector *connector, bool force)
enum drm_connector_status ret = connector_status_disconnected;
bool dret = false;
if (!force && radeon_check_hpd_status_unchanged(connector))
return connector->status;
if (radeon_connector->ddc_bus)
dret = radeon_ddc_probe(radeon_connector,
radeon_connector->requires_extended_probe);
dret = radeon_ddc_probe(radeon_connector);
if (dret) {
radeon_connector->detected_by_load = false;
if (radeon_connector->edid) {
kfree(radeon_connector->edid);
radeon_connector->edid = NULL;
@ -938,7 +1012,17 @@ radeon_dvi_detect(struct drm_connector *connector, bool force)
if ((ret == connector_status_connected) && (radeon_connector->use_digital == true))
goto out;
/* DVI-D and HDMI-A are digital only */
if ((connector->connector_type == DRM_MODE_CONNECTOR_DVID) ||
(connector->connector_type == DRM_MODE_CONNECTOR_HDMIA))
goto out;
/* if we aren't forcing don't do destructive polling */
if (!force) {
/* only return the previous status if we last
* detected a monitor via load.
*/
if (radeon_connector->detected_by_load)
ret = connector->status;
goto out;
}
@ -957,6 +1041,10 @@ radeon_dvi_detect(struct drm_connector *connector, bool force)
encoder = obj_to_encoder(obj);
if (encoder->encoder_type != DRM_MODE_ENCODER_DAC &&
encoder->encoder_type != DRM_MODE_ENCODER_TVDAC)
continue;
encoder_funcs = encoder->helper_private;
if (encoder_funcs->detect) {
if (ret != connector_status_connected) {
@ -964,6 +1052,8 @@ radeon_dvi_detect(struct drm_connector *connector, bool force)
if (ret == connector_status_connected) {
radeon_connector->use_digital = false;
}
if (ret != connector_status_disconnected)
radeon_connector->detected_by_load = true;
}
break;
}
@ -981,6 +1071,7 @@ radeon_dvi_detect(struct drm_connector *connector, bool force)
* cases the DVI port is actually a virtual KVM port connected to the service
* processor.
*/
out:
if ((!rdev->is_atom_bios) &&
(ret == connector_status_disconnected) &&
rdev->mode_info.bios_hardcoded_edid_size) {
@ -988,14 +1079,13 @@ radeon_dvi_detect(struct drm_connector *connector, bool force)
ret = connector_status_connected;
}
out:
/* updated in get modes as well since we need to know if it's analog or digital */
radeon_connector_update_scratch_regs(connector, ret);
return ret;
}
/* okay need to be smart in here about which encoder to pick */
struct drm_encoder *radeon_dvi_encoder(struct drm_connector *connector)
static struct drm_encoder *radeon_dvi_encoder(struct drm_connector *connector)
{
int enc_id = connector->encoder_ids[0];
struct radeon_connector *radeon_connector = to_radeon_connector(connector);
@ -1066,7 +1156,7 @@ static int radeon_dvi_mode_valid(struct drm_connector *connector,
(radeon_connector->connector_object_id == CONNECTOR_OBJECT_ID_HDMI_TYPE_B))
return MODE_OK;
else if (radeon_connector->connector_object_id == CONNECTOR_OBJECT_ID_HDMI_TYPE_A) {
if (ASIC_IS_DCE3(rdev)) {
if (ASIC_IS_DCE6(rdev)) {
/* HDMI 1.3+ supports max clock of 340 Mhz */
if (mode->clock > 340000)
return MODE_CLOCK_HIGH;
@ -1085,13 +1175,13 @@ static int radeon_dvi_mode_valid(struct drm_connector *connector,
return MODE_OK;
}
struct drm_connector_helper_funcs radeon_dvi_connector_helper_funcs = {
static const struct drm_connector_helper_funcs radeon_dvi_connector_helper_funcs = {
.get_modes = radeon_dvi_get_modes,
.mode_valid = radeon_dvi_mode_valid,
.best_encoder = radeon_dvi_encoder,
};
struct drm_connector_funcs radeon_dvi_connector_funcs = {
static const struct drm_connector_funcs radeon_dvi_connector_funcs = {
.dpms = drm_helper_connector_dpms,
.detect = radeon_dvi_detect,
.fill_modes = drm_helper_probe_single_connector_modes,
@ -1110,7 +1200,7 @@ static void radeon_dp_connector_destroy(struct drm_connector *connector)
if (radeon_dig_connector->dp_i2c_bus)
radeon_i2c_destroy(radeon_dig_connector->dp_i2c_bus);
kfree(radeon_connector->con_priv);
drm_sysfs_connector_remove(connector);
// drm_sysfs_connector_remove(connector);
drm_connector_cleanup(connector);
kfree(connector);
}
@ -1126,6 +1216,7 @@ static int radeon_dp_get_modes(struct drm_connector *connector)
(connector->connector_type == DRM_MODE_CONNECTOR_LVDS)) {
struct drm_display_mode *mode;
if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
if (!radeon_dig_connector->edp_on)
atombios_set_edp_panel_power(connector,
ATOM_TRANSMITTER_ACTION_POWER_ON);
@ -1133,6 +1224,15 @@ static int radeon_dp_get_modes(struct drm_connector *connector)
if (!radeon_dig_connector->edp_on)
atombios_set_edp_panel_power(connector,
ATOM_TRANSMITTER_ACTION_POWER_OFF);
} else {
/* need to setup ddc on the bridge */
if (radeon_connector_encoder_get_dp_bridge_encoder_id(connector) !=
ENCODER_OBJECT_ID_NONE) {
if (encoder)
radeon_atom_ext_encoder_setup_ddc(encoder);
}
ret = radeon_ddc_get_modes(radeon_connector);
}
if (ret > 0) {
if (encoder) {
@ -1143,7 +1243,6 @@ static int radeon_dp_get_modes(struct drm_connector *connector)
return ret;
}
encoder = radeon_best_single_encoder(connector);
if (!encoder)
return 0;
@ -1160,7 +1259,8 @@ static int radeon_dp_get_modes(struct drm_connector *connector)
}
} else {
/* need to setup ddc on the bridge */
if (radeon_connector_encoder_is_dp_bridge(connector)) {
if (radeon_connector_encoder_get_dp_bridge_encoder_id(connector) !=
ENCODER_OBJECT_ID_NONE) {
if (encoder)
radeon_atom_ext_encoder_setup_ddc(encoder);
}
@ -1170,13 +1270,12 @@ static int radeon_dp_get_modes(struct drm_connector *connector)
return ret;
}
bool radeon_connector_encoder_is_dp_bridge(struct drm_connector *connector)
u16 radeon_connector_encoder_get_dp_bridge_encoder_id(struct drm_connector *connector)
{
struct drm_mode_object *obj;
struct drm_encoder *encoder;
struct radeon_encoder *radeon_encoder;
int i;
bool found = false;
for (i = 0; i < DRM_CONNECTOR_MAX_ENCODER; i++) {
if (connector->encoder_ids[i] == 0)
@ -1192,14 +1291,13 @@ bool radeon_connector_encoder_is_dp_bridge(struct drm_connector *connector)
switch (radeon_encoder->encoder_id) {
case ENCODER_OBJECT_ID_TRAVIS:
case ENCODER_OBJECT_ID_NUTMEG:
found = true;
break;
return radeon_encoder->encoder_id;
default:
break;
}
}
return found;
return ENCODER_OBJECT_ID_NONE;
}
bool radeon_connector_encoder_is_hbr2(struct drm_connector *connector)
@ -1251,6 +1349,9 @@ radeon_dp_detect(struct drm_connector *connector, bool force)
struct radeon_connector_atom_dig *radeon_dig_connector = radeon_connector->con_priv;
struct drm_encoder *encoder = radeon_best_single_encoder(connector);
if (!force && radeon_check_hpd_status_unchanged(connector))
return connector->status;
if (radeon_connector->edid) {
kfree(radeon_connector->edid);
radeon_connector->edid = NULL;
@ -1276,12 +1377,24 @@ radeon_dp_detect(struct drm_connector *connector, bool force)
if (!radeon_dig_connector->edp_on)
atombios_set_edp_panel_power(connector,
ATOM_TRANSMITTER_ACTION_POWER_OFF);
} else {
/* need to setup ddc on the bridge */
if (radeon_connector_encoder_is_dp_bridge(connector)) {
if (encoder)
} else if (radeon_connector_encoder_get_dp_bridge_encoder_id(connector) !=
ENCODER_OBJECT_ID_NONE) {
/* DP bridges are always DP */
radeon_dig_connector->dp_sink_type = CONNECTOR_OBJECT_ID_DISPLAYPORT;
/* get the DPCD from the bridge */
radeon_dp_getdpcd(radeon_connector);
if (encoder) {
/* setup ddc on the bridge */
radeon_atom_ext_encoder_setup_ddc(encoder);
if (radeon_ddc_probe(radeon_connector)) /* try DDC */
ret = connector_status_connected;
else if (radeon_connector->dac_load_detect) { /* try load detection */
struct drm_encoder_helper_funcs *encoder_funcs = encoder->helper_private;
ret = encoder_funcs->detect(encoder, connector);
}
}
} else {
radeon_dig_connector->dp_sink_type = radeon_dp_getsinktype(radeon_connector);
if (radeon_hpd_sense(rdev, radeon_connector->hpd.hpd)) {
ret = connector_status_connected;
@ -1292,21 +1405,10 @@ radeon_dp_detect(struct drm_connector *connector, bool force)
if (radeon_dp_getdpcd(radeon_connector))
ret = connector_status_connected;
} else {
if (radeon_ddc_probe(radeon_connector,
radeon_connector->requires_extended_probe))
if (radeon_ddc_probe(radeon_connector))
ret = connector_status_connected;
}
}
if ((ret == connector_status_disconnected) &&
radeon_connector->dac_load_detect) {
struct drm_encoder *encoder = radeon_best_single_encoder(connector);
struct drm_encoder_helper_funcs *encoder_funcs;
if (encoder) {
encoder_funcs = encoder->helper_private;
ret = encoder_funcs->detect(encoder, connector);
}
}
}
radeon_connector_update_scratch_regs(connector, ret);
@ -1356,13 +1458,13 @@ static int radeon_dp_mode_valid(struct drm_connector *connector,
}
}
struct drm_connector_helper_funcs radeon_dp_connector_helper_funcs = {
static const struct drm_connector_helper_funcs radeon_dp_connector_helper_funcs = {
.get_modes = radeon_dp_get_modes,
.mode_valid = radeon_dp_mode_valid,
.best_encoder = radeon_dvi_encoder,
};
struct drm_connector_funcs radeon_dp_connector_funcs = {
static const struct drm_connector_funcs radeon_dp_connector_funcs = {
.dpms = drm_helper_connector_dpms,
.detect = radeon_dp_detect,
.fill_modes = drm_helper_probe_single_connector_modes,
@ -1448,9 +1550,7 @@ radeon_add_atom_connector(struct drm_device *dev,
radeon_connector->shared_ddc = shared_ddc;
radeon_connector->connector_object_id = connector_object_id;
radeon_connector->hpd = *hpd;
radeon_connector->requires_extended_probe =
radeon_connector_needs_extended_probe(rdev, supported_device,
connector_type);
radeon_connector->router = *router;
if (router->ddc_valid || router->cd_valid) {
radeon_connector->router_bus = radeon_i2c_lookup(rdev, &router->i2c_info);
@ -1746,7 +1846,7 @@ radeon_add_atom_connector(struct drm_device *dev,
connector->polled = DRM_CONNECTOR_POLL_HPD;
connector->display_info.subpixel_order = subpixel_order;
drm_sysfs_connector_add(connector);
// drm_sysfs_connector_add(connector);
return;
failed:
@ -1797,9 +1897,7 @@ radeon_add_legacy_connector(struct drm_device *dev,
radeon_connector->devices = supported_device;
radeon_connector->connector_object_id = connector_object_id;
radeon_connector->hpd = *hpd;
radeon_connector->requires_extended_probe =
radeon_connector_needs_extended_probe(rdev, supported_device,
connector_type);
switch (connector_type) {
case DRM_MODE_CONNECTOR_VGA:
drm_connector_init(dev, &radeon_connector->base, &radeon_vga_connector_funcs, connector_type);
@ -1905,16 +2003,5 @@ radeon_add_legacy_connector(struct drm_device *dev,
} else
connector->polled = DRM_CONNECTOR_POLL_HPD;
connector->display_info.subpixel_order = subpixel_order;
drm_sysfs_connector_add(connector);
if (connector_type == DRM_MODE_CONNECTOR_LVDS) {
struct drm_encoder *drm_encoder;
list_for_each_entry(drm_encoder, &dev->mode_config.encoder_list, head) {
struct radeon_encoder *radeon_encoder;
radeon_encoder = to_radeon_encoder(drm_encoder);
if (radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_LVDS)
radeon_legacy_backlight_init(radeon_encoder, connector);
}
}
// drm_sysfs_connector_add(connector);
}

View File

@ -26,7 +26,7 @@
* Jerome Glisse
*/
//#include <linux/console.h>
#include <linux/slab.h>
#include <drm/drmP.h>
#include <drm/drm_crtc_helper.h>
#include <drm/radeon_drm.h>
@ -55,6 +55,8 @@ int radeon_audio = 1;
int radeon_hw_i2c = 0;
int radeon_pcie_gen2 = 0;
int radeon_disp_priority = 0;
int radeon_lockup_timeout = 10000;
int irq_override = 0;
@ -65,7 +67,7 @@ void parse_cmdline(char *cmdline, videomode_t *mode, char *log, int *kms);
int init_display(struct radeon_device *rdev, videomode_t *mode);
int init_display_kms(struct radeon_device *rdev, videomode_t *mode);
int get_modes(videomode_t *mode, int *count);
int get_modes(videomode_t *mode, u32_t *count);
int set_user_mode(videomode_t *mode);
int r100_2D_test(struct radeon_device *rdev);
@ -132,11 +134,19 @@ static const char radeon_family_name[][16] = {
"TURKS",
"CAICOS",
"CAYMAN",
"ARUBA",
"TAHITI",
"PITCAIRN",
"VERDE",
"LAST",
};
/*
* Clear GPU surface registers.
/**
* radeon_surface_init - Clear GPU surface registers.
*
* @rdev: radeon_device pointer
*
* Clear GPU surface registers (r1xx-r5xx).
*/
void radeon_surface_init(struct radeon_device *rdev)
{
@ -155,6 +165,13 @@ void radeon_surface_init(struct radeon_device *rdev)
/*
* GPU scratch registers helpers function.
*/
/**
* radeon_scratch_init - Init scratch register driver information.
*
* @rdev: radeon_device pointer
*
* Init CP scratch register driver information (r1xx-r5xx)
*/
void radeon_scratch_init(struct radeon_device *rdev)
{
int i;
@ -172,6 +189,15 @@ void radeon_scratch_init(struct radeon_device *rdev)
}
}
/**
* radeon_scratch_get - Allocate a scratch register
*
* @rdev: radeon_device pointer
* @reg: scratch register mmio offset
*
* Allocate a CP scratch register for use by the driver (all asics).
* Returns 0 on success or -EINVAL on failure.
*/
int radeon_scratch_get(struct radeon_device *rdev, uint32_t *reg)
{
int i;
@ -186,6 +212,14 @@ int radeon_scratch_get(struct radeon_device *rdev, uint32_t *reg)
return -EINVAL;
}
/**
* radeon_scratch_free - Free a scratch register
*
* @rdev: radeon_device pointer
* @reg: scratch register mmio offset
*
* Free a CP scratch register allocated for use by the driver (all asics)
*/
void radeon_scratch_free(struct radeon_device *rdev, uint32_t reg)
{
int i;
@ -198,6 +232,20 @@ void radeon_scratch_free(struct radeon_device *rdev, uint32_t reg)
}
}
/*
* radeon_wb_*()
* Writeback is the the method by which the the GPU updates special pages
* in memory with the status of certain GPU events (fences, ring pointers,
* etc.).
*/
/**
* radeon_wb_disable - Disable Writeback
*
* @rdev: radeon_device pointer
*
* Disables Writeback (all asics). Used for suspend.
*/
void radeon_wb_disable(struct radeon_device *rdev)
{
int r;
@ -213,6 +261,14 @@ void radeon_wb_disable(struct radeon_device *rdev)
rdev->wb.enabled = false;
}
/**
* radeon_wb_fini - Disable Writeback and free memory
*
* @rdev: radeon_device pointer
*
* Disables Writeback and frees the Writeback memory (all asics).
* Used at driver shutdown.
*/
void radeon_wb_fini(struct radeon_device *rdev)
{
radeon_wb_disable(rdev);
@ -223,13 +279,22 @@ void radeon_wb_fini(struct radeon_device *rdev)
}
}
/**
* radeon_wb_init- Init Writeback driver info and allocate memory
*
* @rdev: radeon_device pointer
*
* Disables Writeback and frees the Writeback memory (all asics).
* Used at driver startup.
* Returns 0 on success or an -error on failure.
*/
int radeon_wb_init(struct radeon_device *rdev)
{
int r;
if (rdev->wb.wb_obj == NULL) {
r = radeon_bo_create(rdev, RADEON_GPU_PAGE_SIZE, PAGE_SIZE, true,
RADEON_GEM_DOMAIN_GTT, &rdev->wb.wb_obj);
RADEON_GEM_DOMAIN_GTT, NULL, &rdev->wb.wb_obj);
if (r) {
dev_warn(rdev->dev, "(%d) create WB bo failed\n", r);
return r;
@ -261,21 +326,25 @@ int radeon_wb_init(struct radeon_device *rdev)
/* disable event_write fences */
rdev->wb.use_event = false;
/* disabled via module param */
if (radeon_no_wb == 1)
if (radeon_no_wb == 1) {
rdev->wb.enabled = false;
else {
} else {
if (rdev->flags & RADEON_IS_AGP) {
/* often unreliable on AGP */
// if (rdev->flags & RADEON_IS_AGP) {
// rdev->wb.enabled = false;
// } else {
rdev->wb.enabled = false;
} else if (rdev->family < CHIP_R300) {
/* often unreliable on pre-r300 */
rdev->wb.enabled = false;
} else {
rdev->wb.enabled = true;
/* event_write fences are only available on r600+ */
if (rdev->family >= CHIP_R600)
if (rdev->family >= CHIP_R600) {
rdev->wb.use_event = true;
// }
}
/* always use writeback/events on NI */
if (ASIC_IS_DCE5(rdev)) {
}
}
/* always use writeback/events on NI, APUs */
if (rdev->family >= CHIP_PALM) {
rdev->wb.enabled = true;
rdev->wb.use_event = true;
}
@ -328,6 +397,8 @@ int radeon_wb_init(struct radeon_device *rdev)
*/
void radeon_vram_location(struct radeon_device *rdev, struct radeon_mc *mc, u64 base)
{
uint64_t limit = (uint64_t)radeon_vram_limit << 20;
mc->vram_start = base;
if (mc->mc_vram_size > (0xFFFFFFFF - base + 1)) {
dev_warn(rdev->dev, "limiting VRAM to PCI aperture size\n");
@ -341,6 +412,8 @@ void radeon_vram_location(struct radeon_device *rdev, struct radeon_mc *mc, u64
mc->mc_vram_size = mc->aper_size;
}
mc->vram_end = mc->vram_start + mc->mc_vram_size - 1;
if (limit && limit < mc->real_vram_size)
mc->real_vram_size = limit;
dev_info(rdev->dev, "VRAM: %lluM 0x%016llX - 0x%016llX (%lluM used)\n",
mc->mc_vram_size >> 20, mc->vram_start,
mc->vram_end, mc->real_vram_size >> 20);
@ -385,6 +458,15 @@ void radeon_gtt_location(struct radeon_device *rdev, struct radeon_mc *mc)
/*
* GPU helpers function.
*/
/**
* radeon_card_posted - check if the hw has already been initialized
*
* @rdev: radeon_device pointer
*
* Check if the asic has been initialized (all asics).
* Used at driver startup.
* Returns true if initialized or false if not.
*/
bool radeon_card_posted(struct radeon_device *rdev)
{
uint32_t reg;
@ -431,6 +513,14 @@ bool radeon_card_posted(struct radeon_device *rdev)
}
/**
* radeon_update_bandwidth_info - update display bandwidth params
*
* @rdev: radeon_device pointer
*
* Used when sclk/mclk are switched or display modes are set.
* params are used to calculate display watermarks (all asics)
*/
void radeon_update_bandwidth_info(struct radeon_device *rdev)
{
fixed20_12 a;
@ -451,6 +541,15 @@ void radeon_update_bandwidth_info(struct radeon_device *rdev)
}
}
/**
* radeon_boot_test_post_card - check and possibly initialize the hw
*
* @rdev: radeon_device pointer
*
* Check if the asic is initialized and if not, attempt to initialize
* it (all asics).
* Returns true if initialized or false if not.
*/
bool radeon_boot_test_post_card(struct radeon_device *rdev)
{
if (radeon_card_posted(rdev))
@ -469,14 +568,24 @@ bool radeon_boot_test_post_card(struct radeon_device *rdev)
}
}
/**
* radeon_dummy_page_init - init dummy page used by the driver
*
* @rdev: radeon_device pointer
*
* Allocate the dummy page used by the driver (all asics).
* This dummy page is used by the driver as a filler for gart entries
* when pages are taken out of the GART
* Returns 0 on sucess, -ENOMEM on failure.
*/
int radeon_dummy_page_init(struct radeon_device *rdev)
{
if (rdev->dummy_page.page)
return 0;
rdev->dummy_page.page = AllocPage();
rdev->dummy_page.page = (void*)AllocPage();
if (rdev->dummy_page.page == NULL)
return -ENOMEM;
rdev->dummy_page.addr = MapIoMem(rdev->dummy_page.page, 4096, 5);
rdev->dummy_page.addr = MapIoMem((addr_t)rdev->dummy_page.page, 4096, 3);
if (!rdev->dummy_page.addr) {
// __free_page(rdev->dummy_page.page);
rdev->dummy_page.page = NULL;
@ -485,16 +594,40 @@ int radeon_dummy_page_init(struct radeon_device *rdev)
return 0;
}
/**
* radeon_dummy_page_fini - free dummy page used by the driver
*
* @rdev: radeon_device pointer
*
* Frees the dummy page used by the driver (all asics).
*/
void radeon_dummy_page_fini(struct radeon_device *rdev)
{
if (rdev->dummy_page.page == NULL)
return;
KernelFree(rdev->dummy_page.addr);
KernelFree((void*)rdev->dummy_page.addr);
rdev->dummy_page.page = NULL;
}
/* ATOM accessor methods */
/*
* ATOM is an interpreted byte code stored in tables in the vbios. The
* driver registers callbacks to access registers and the interpreter
* in the driver parses the tables and executes then to program specific
* actions (set display modes, asic init, etc.). See radeon_atombios.c,
* atombios.h, and atom.c
*/
/**
* cail_pll_read - read PLL register
*
* @info: atom card_info pointer
* @reg: PLL register offset
*
* Provides a PLL register accessor for the atom interpreter (r4xx+).
* Returns the value of the PLL register.
*/
static uint32_t cail_pll_read(struct card_info *info, uint32_t reg)
{
struct radeon_device *rdev = info->dev->dev_private;
@ -504,6 +637,15 @@ static uint32_t cail_pll_read(struct card_info *info, uint32_t reg)
return r;
}
/**
* cail_pll_write - write PLL register
*
* @info: atom card_info pointer
* @reg: PLL register offset
* @val: value to write to the pll register
*
* Provides a PLL register accessor for the atom interpreter (r4xx+).
*/
static void cail_pll_write(struct card_info *info, uint32_t reg, uint32_t val)
{
struct radeon_device *rdev = info->dev->dev_private;
@ -511,6 +653,15 @@ static void cail_pll_write(struct card_info *info, uint32_t reg, uint32_t val)
rdev->pll_wreg(rdev, reg, val);
}
/**
* cail_mc_read - read MC (Memory Controller) register
*
* @info: atom card_info pointer
* @reg: MC register offset
*
* Provides an MC register accessor for the atom interpreter (r4xx+).
* Returns the value of the MC register.
*/
static uint32_t cail_mc_read(struct card_info *info, uint32_t reg)
{
struct radeon_device *rdev = info->dev->dev_private;
@ -520,6 +671,15 @@ static uint32_t cail_mc_read(struct card_info *info, uint32_t reg)
return r;
}
/**
* cail_mc_write - write MC (Memory Controller) register
*
* @info: atom card_info pointer
* @reg: MC register offset
* @val: value to write to the pll register
*
* Provides a MC register accessor for the atom interpreter (r4xx+).
*/
static void cail_mc_write(struct card_info *info, uint32_t reg, uint32_t val)
{
struct radeon_device *rdev = info->dev->dev_private;
@ -527,6 +687,15 @@ static void cail_mc_write(struct card_info *info, uint32_t reg, uint32_t val)
rdev->mc_wreg(rdev, reg, val);
}
/**
* cail_reg_write - write MMIO register
*
* @info: atom card_info pointer
* @reg: MMIO register offset
* @val: value to write to the pll register
*
* Provides a MMIO register accessor for the atom interpreter (r4xx+).
*/
static void cail_reg_write(struct card_info *info, uint32_t reg, uint32_t val)
{
struct radeon_device *rdev = info->dev->dev_private;
@ -534,6 +703,15 @@ static void cail_reg_write(struct card_info *info, uint32_t reg, uint32_t val)
WREG32(reg*4, val);
}
/**
* cail_reg_read - read MMIO register
*
* @info: atom card_info pointer
* @reg: MMIO register offset
*
* Provides an MMIO register accessor for the atom interpreter (r4xx+).
* Returns the value of the MMIO register.
*/
static uint32_t cail_reg_read(struct card_info *info, uint32_t reg)
{
struct radeon_device *rdev = info->dev->dev_private;
@ -543,6 +721,15 @@ static uint32_t cail_reg_read(struct card_info *info, uint32_t reg)
return r;
}
/**
* cail_ioreg_write - write IO register
*
* @info: atom card_info pointer
* @reg: IO register offset
* @val: value to write to the pll register
*
* Provides a IO register accessor for the atom interpreter (r4xx+).
*/
static void cail_ioreg_write(struct card_info *info, uint32_t reg, uint32_t val)
{
struct radeon_device *rdev = info->dev->dev_private;
@ -550,6 +737,15 @@ static void cail_ioreg_write(struct card_info *info, uint32_t reg, uint32_t val)
WREG32_IO(reg*4, val);
}
/**
* cail_ioreg_read - read IO register
*
* @info: atom card_info pointer
* @reg: IO register offset
*
* Provides an IO register accessor for the atom interpreter (r4xx+).
* Returns the value of the IO register.
*/
static uint32_t cail_ioreg_read(struct card_info *info, uint32_t reg)
{
struct radeon_device *rdev = info->dev->dev_private;
@ -559,6 +755,16 @@ static uint32_t cail_ioreg_read(struct card_info *info, uint32_t reg)
return r;
}
/**
* radeon_atombios_init - init the driver info and callbacks for atombios
*
* @rdev: radeon_device pointer
*
* Initializes the driver info and register access callbacks for the
* ATOM interpreter (r4xx+).
* Returns 0 on sucess, -ENOMEM on failure.
* Called at driver startup.
*/
int radeon_atombios_init(struct radeon_device *rdev)
{
struct card_info *atom_card_info =
@ -592,6 +798,15 @@ int radeon_atombios_init(struct radeon_device *rdev)
return 0;
}
/**
* radeon_atombios_fini - free the driver info and callbacks for atombios
*
* @rdev: radeon_device pointer
*
* Frees the driver info and register access callbacks for the ATOM
* interpreter (r4xx+).
* Called at driver shutdown.
*/
void radeon_atombios_fini(struct radeon_device *rdev)
{
if (rdev->mode_info.atom_context) {
@ -601,17 +816,50 @@ void radeon_atombios_fini(struct radeon_device *rdev)
kfree(rdev->mode_info.atom_card_info);
}
/* COMBIOS */
/*
* COMBIOS is the bios format prior to ATOM. It provides
* command tables similar to ATOM, but doesn't have a unified
* parser. See radeon_combios.c
*/
/**
* radeon_combios_init - init the driver info for combios
*
* @rdev: radeon_device pointer
*
* Initializes the driver info for combios (r1xx-r3xx).
* Returns 0 on sucess.
* Called at driver startup.
*/
int radeon_combios_init(struct radeon_device *rdev)
{
radeon_combios_initialize_bios_scratch_regs(rdev->ddev);
return 0;
}
/**
* radeon_combios_fini - free the driver info for combios
*
* @rdev: radeon_device pointer
*
* Frees the driver info for combios (r1xx-r3xx).
* Called at driver shutdown.
*/
void radeon_combios_fini(struct radeon_device *rdev)
{
}
/* if we get transitioned to only one device, tak VGA back */
/* if we get transitioned to only one device, take VGA back */
/**
* radeon_vga_set_decode - enable/disable vga decode
*
* @cookie: radeon_device pointer
* @state: enable/disable vga decode
*
* Enable/disable vga decode (all asics).
* Returns VGA resource flags.
*/
static unsigned int radeon_vga_set_decode(void *cookie, bool state)
{
struct radeon_device *rdev = cookie;
@ -623,55 +871,49 @@ static unsigned int radeon_vga_set_decode(void *cookie, bool state)
return VGA_RSRC_NORMAL_IO | VGA_RSRC_NORMAL_MEM;
}
void radeon_check_arguments(struct radeon_device *rdev)
/**
* radeon_check_pot_argument - check that argument is a power of two
*
* @arg: value to check
*
* Validates that a certain argument is a power of two (all asics).
* Returns true if argument is valid.
*/
static bool radeon_check_pot_argument(int arg)
{
return (arg & (arg - 1)) == 0;
}
/**
* radeon_check_arguments - validate module params
*
* @rdev: radeon_device pointer
*
* Validates certain module parameters and updates
* the associated values used by the driver (all asics).
*/
static void radeon_check_arguments(struct radeon_device *rdev)
{
/* vramlimit must be a power of two */
switch (radeon_vram_limit) {
case 0:
case 4:
case 8:
case 16:
case 32:
case 64:
case 128:
case 256:
case 512:
case 1024:
case 2048:
case 4096:
break;
default:
if (!radeon_check_pot_argument(radeon_vram_limit)) {
dev_warn(rdev->dev, "vram limit (%d) must be a power of 2\n",
radeon_vram_limit);
radeon_vram_limit = 0;
break;
}
radeon_vram_limit = radeon_vram_limit << 20;
/* gtt size must be power of two and greater or equal to 32M */
switch (radeon_gart_size) {
case 4:
case 8:
case 16:
if (radeon_gart_size < 32) {
dev_warn(rdev->dev, "gart size (%d) too small forcing to 512M\n",
radeon_gart_size);
radeon_gart_size = 512;
break;
case 32:
case 64:
case 128:
case 256:
case 512:
case 1024:
case 2048:
case 4096:
break;
default:
} else if (!radeon_check_pot_argument(radeon_gart_size)) {
dev_warn(rdev->dev, "gart size (%d) must be a power of 2\n",
radeon_gart_size);
radeon_gart_size = 512;
break;
}
rdev->mc.gtt_size = radeon_gart_size * 1024 * 1024;
rdev->mc.gtt_size = (uint64_t)radeon_gart_size << 20;
/* AGP mode can only be -1, 1, 2, 4, 8 */
switch (radeon_agpmode) {
case -1:
@ -705,25 +947,38 @@ int radeon_device_init(struct radeon_device *rdev,
rdev->is_atom_bios = false;
rdev->usec_timeout = RADEON_MAX_USEC_TIMEOUT;
rdev->mc.gtt_size = radeon_gart_size * 1024 * 1024;
rdev->gpu_lockup = false;
rdev->accel_working = false;
/* set up ring ids */
for (i = 0; i < RADEON_NUM_RINGS; i++) {
rdev->ring[i].idx = i;
}
DRM_INFO("initializing kernel modesetting (%s 0x%04X:0x%04X).\n",
radeon_family_name[rdev->family], pdev->vendor, pdev->device);
DRM_INFO("initializing kernel modesetting (%s 0x%04X:0x%04X 0x%04X:0x%04X).\n",
radeon_family_name[rdev->family], pdev->vendor, pdev->device,
pdev->subsystem_vendor, pdev->subsystem_device);
/* mutex initialization are all done here so we
* can recall function without having locking issues */
mutex_init(&rdev->cs_mutex);
mutex_init(&rdev->ib_pool.mutex);
mutex_init(&rdev->cp.mutex);
mutex_init(&rdev->ring_lock);
mutex_init(&rdev->dc_hw_i2c_mutex);
if (rdev->family >= CHIP_R600)
spin_lock_init(&rdev->ih.lock);
atomic_set(&rdev->ih.lock, 0);
mutex_init(&rdev->gem.mutex);
mutex_init(&rdev->pm.mutex);
mutex_init(&rdev->vram_mutex);
rwlock_init(&rdev->fence_drv.lock);
INIT_LIST_HEAD(&rdev->gem.objects);
mutex_init(&rdev->gpu_clock_mutex);
init_rwsem(&rdev->pm.mclk_lock);
init_rwsem(&rdev->exclusive_lock);
init_waitqueue_head(&rdev->irq.vblank_queue);
r = radeon_gem_init(rdev);
if (r)
return r;
/* initialize vm here */
mutex_init(&rdev->vm_manager.lock);
/* Adjust VM size here.
* Currently set to 4GB ((1 << 20) 4k pages).
* Max GPUVM size for cayman and SI is 40 bits.
*/
rdev->vm_manager.max_pfn = 1 << 20;
INIT_LIST_HEAD(&rdev->vm_manager.lru_vm);
/* Set asic functions */
r = radeon_asic_init(rdev);
@ -745,20 +1000,22 @@ int radeon_device_init(struct radeon_device *rdev,
/* set DMA mask + need_dma32 flags.
* PCIE - can handle 40-bits.
* IGP - can handle 40-bits (in theory)
* IGP - can handle 40-bits
* AGP - generally dma32 is safest
* PCI - only dma32
* PCI - dma32 for legacy pci gart, 40 bits on newer asics
*/
rdev->need_dma32 = false;
if (rdev->flags & RADEON_IS_AGP)
rdev->need_dma32 = true;
if (rdev->flags & RADEON_IS_PCI)
if ((rdev->flags & RADEON_IS_PCI) &&
(rdev->family <= CHIP_RS740))
rdev->need_dma32 = true;
dma_bits = rdev->need_dma32 ? 32 : 40;
r = pci_set_dma_mask(rdev->pdev, DMA_BIT_MASK(dma_bits));
if (r) {
rdev->need_dma32 = true;
dma_bits = 32;
printk(KERN_WARNING "radeon: No suitable DMA available.\n");
}
@ -766,16 +1023,25 @@ int radeon_device_init(struct radeon_device *rdev,
/* TODO: block userspace mapping of io register */
rdev->rmmio_base = pci_resource_start(rdev->pdev, 2);
rdev->rmmio_size = pci_resource_len(rdev->pdev, 2);
rdev->rmmio = (void*)MapIoMem(rdev->rmmio_base, rdev->rmmio_size,
PG_SW+PG_NOCACHE);
rdev->rmmio = ioremap(rdev->rmmio_base, rdev->rmmio_size);
if (rdev->rmmio == NULL) {
return -ENOMEM;
}
DRM_INFO("register mmio base: 0x%08X\n", (uint32_t)rdev->rmmio_base);
DRM_INFO("register mmio size: %u\n", (unsigned)rdev->rmmio_size);
/* io port mapping */
for (i = 0; i < DEVICE_COUNT_RESOURCE; i++) {
if (pci_resource_flags(rdev->pdev, i) & IORESOURCE_IO) {
rdev->rio_mem_size = pci_resource_len(rdev->pdev, i);
rdev->rio_mem = pci_iomap(rdev->pdev, i, rdev->rio_mem_size);
break;
}
}
if (rdev->rio_mem == NULL)
DRM_ERROR("Unable to find PCI I/O BAR\n");
r = radeon_init(rdev);
if (r)
return r;
@ -794,12 +1060,93 @@ int radeon_device_init(struct radeon_device *rdev,
// if (radeon_testing) {
// radeon_test_moves(rdev);
// }
// if ((radeon_testing & 2)) {
// radeon_test_syncing(rdev);
// }
if (radeon_benchmarking) {
radeon_benchmark(rdev);
radeon_benchmark(rdev, radeon_benchmarking);
}
return 0;
}
/**
* radeon_gpu_reset - reset the asic
*
* @rdev: radeon device pointer
*
* Attempt the reset the GPU if it has hung (all asics).
* Returns 0 for success or an error on failure.
*/
int radeon_gpu_reset(struct radeon_device *rdev)
{
unsigned ring_sizes[RADEON_NUM_RINGS];
uint32_t *ring_data[RADEON_NUM_RINGS];
bool saved = false;
int i, r;
int resched;
// down_write(&rdev->exclusive_lock);
radeon_save_bios_scratch_regs(rdev);
/* block TTM */
// resched = ttm_bo_lock_delayed_workqueue(&rdev->mman.bdev);
radeon_suspend(rdev);
for (i = 0; i < RADEON_NUM_RINGS; ++i) {
ring_sizes[i] = radeon_ring_backup(rdev, &rdev->ring[i],
&ring_data[i]);
if (ring_sizes[i]) {
saved = true;
dev_info(rdev->dev, "Saved %d dwords of commands "
"on ring %d.\n", ring_sizes[i], i);
}
}
retry:
r = radeon_asic_reset(rdev);
if (!r) {
dev_info(rdev->dev, "GPU reset succeeded, trying to resume\n");
radeon_resume(rdev);
}
radeon_restore_bios_scratch_regs(rdev);
drm_helper_resume_force_mode(rdev->ddev);
if (!r) {
for (i = 0; i < RADEON_NUM_RINGS; ++i) {
radeon_ring_restore(rdev, &rdev->ring[i],
ring_sizes[i], ring_data[i]);
ring_sizes[i] = 0;
ring_data[i] = NULL;
}
r = radeon_ib_ring_tests(rdev);
if (r) {
dev_err(rdev->dev, "ib ring test failed (%d).\n", r);
if (saved) {
saved = false;
radeon_suspend(rdev);
goto retry;
}
}
} else {
for (i = 0; i < RADEON_NUM_RINGS; ++i) {
kfree(ring_data[i]);
}
}
// ttm_bo_unlock_delayed_workqueue(&rdev->mman.bdev, resched);
if (r) {
/* bad news, how to tell it to userspace ? */
dev_info(rdev->dev, "GPU reset failed\n");
}
// up_write(&rdev->exclusive_lock);
return r;
}
/*
* Driver load/unload
@ -901,15 +1248,6 @@ int drm_get_dev(struct pci_dev *pdev, const struct pci_device_id *ent)
init_display(dev->dev_private, &usermode);
uint32_t route0 = PciRead32(0, 31<<3, 0x60);
uint32_t route1 = PciRead32(0, 31<<3, 0x68);
uint8_t elcr0 = in8(0x4D0);
uint8_t elcr1 = in8(0x4D1);
dbgprintf("pci route: %x %x elcr: %x %x\n", route0, route1, elcr0, elcr1);
LEAVE();
return 0;
@ -981,12 +1319,12 @@ static struct pci_device_id pciidlist[] = {
#define API_VERSION 0x01000100
#define SRV_GETVERSION 0
#define SRV_ENUM_MODES 1
#define SRV_SET_MODE 2
#define SRV_GETVERSION 0
#define SRV_ENUM_MODES 1
#define SRV_SET_MODE 2
#define SRV_CREATE_VIDEO 9
#define SRV_BLIT_VIDEO 10
#define SRV_CREATE_VIDEO 9
#define SRV_BLIT_VIDEO 10
#define SRV_CREATE_BITMAP 11
@ -1014,15 +1352,14 @@ int _stdcall display_handler(ioctl_t *io)
{
case SRV_GETVERSION:
check_output(4);
*outp = API_VERSION;
retval = 0;
*outp = API_VERSION;
retval = 0;
break;
case SRV_ENUM_MODES:
dbgprintf("SRV_ENUM_MODES inp %x inp_size %x out_size %x\n",
inp, io->inp_size, io->out_size );
check_output(4);
check_input(*outp * sizeof(videomode_t));
if( radeon_modeset)
retval = get_modes((videomode_t*)inp, outp);
break;
@ -1036,12 +1373,12 @@ int _stdcall display_handler(ioctl_t *io)
break;
case SRV_CREATE_VIDEO:
retval = r600_create_video(inp[0], inp[1], outp);
// retval = r600_create_video(inp[0], inp[1], outp);
break;
case SRV_BLIT_VIDEO:
r600_video_blit( ((uint64_t*)inp)[0], inp[2], inp[3],
inp[4], inp[5], inp[6]);
// r600_video_blit( ((uint64_t*)inp)[0], inp[2], inp[3],
// inp[4], inp[5], inp[6]);
retval = 0;
break;
@ -1049,7 +1386,7 @@ int _stdcall display_handler(ioctl_t *io)
case SRV_CREATE_BITMAP:
check_input(8);
check_output(4);
retval = create_bitmap(outp, inp[0], inp[1]);
// retval = create_bitmap(outp, inp[0], inp[1]);
break;
};
@ -1064,7 +1401,7 @@ u32_t drvEntry(int action, char *cmdline)
{
struct radeon_device *rdev = NULL;
struct pci_device_id *ent;
const struct pci_device_id *ent;
int err;
u32_t retval = 0;
@ -1088,7 +1425,7 @@ u32_t drvEntry(int action, char *cmdline)
return 0;
};
}
dbgprintf("Radeon RC11 cmdline %s\n", cmdline);
dbgprintf("Radeon RC12 preview 1 cmdline %s\n", cmdline);
enum_pci_devices();
@ -1121,3 +1458,26 @@ void drm_vblank_post_modeset(struct drm_device *dev, int crtc)
void drm_vblank_pre_modeset(struct drm_device *dev, int crtc)
{};
#define PCI_CLASS_REVISION 0x08
#define PCI_CLASS_DISPLAY_VGA 0x0300
int pci_scan_filter(u32_t id, u32_t busnr, u32_t devfn)
{
u16_t vendor, device;
u32_t class;
int ret = 0;
vendor = id & 0xffff;
device = (id >> 16) & 0xffff;
if(vendor == 0x1002)
{
class = PciRead32(busnr, devfn, PCI_CLASS_REVISION);
class >>= 16;
if( class == PCI_CLASS_DISPLAY_VGA)
ret = 1;
}
return ret;
}

View File

@ -23,17 +23,15 @@
* Authors: Dave Airlie
* Alex Deucher
*/
#include "drmP.h"
#include "radeon_drm.h"
#include <drm/drmP.h>
#include <drm/radeon_drm.h>
#include "radeon.h"
#include "atom.h"
#include <asm/div64.h>
#include "drm_crtc_helper.h"
#include "drm_edid.h"
static int radeon_ddc_dump(struct drm_connector *connector);
#include <drm/drm_crtc_helper.h>
#include <drm/drm_edid.h>
static void avivo_crtc_load_lut(struct drm_crtc *crtc)
{
@ -288,7 +286,7 @@ static void radeon_crtc_init(struct drm_device *dev, int index)
radeon_legacy_init_crtc(dev, radeon_crtc);
}
static const char *encoder_names[36] = {
static const char *encoder_names[37] = {
"NONE",
"INTERNAL_LVDS",
"INTERNAL_TMDS1",
@ -325,24 +323,7 @@ static const char *encoder_names[36] = {
"INTERNAL_UNIPHY2",
"NUTMEG",
"TRAVIS",
};
static const char *connector_names[15] = {
"Unknown",
"VGA",
"DVI-I",
"DVI-D",
"DVI-A",
"Composite",
"S-video",
"LVDS",
"Component",
"DIN",
"DisplayPort",
"HDMI-A",
"HDMI-B",
"TV",
"eDP",
"INTERNAL_VCE"
};
static const char *hpd_names[6] = {
@ -367,7 +348,7 @@ static void radeon_print_display_setup(struct drm_device *dev)
list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
radeon_connector = to_radeon_connector(connector);
DRM_INFO("Connector %d:\n", i);
DRM_INFO(" %s\n", connector_names[connector->connector_type]);
DRM_INFO(" %s\n", drm_get_connector_name(connector));
if (radeon_connector->hpd.hpd != RADEON_HPD_NONE)
DRM_INFO(" %s\n", hpd_names[radeon_connector->hpd.hpd]);
if (radeon_connector->ddc_bus) {
@ -433,7 +414,6 @@ static void radeon_print_display_setup(struct drm_device *dev)
static bool radeon_setup_enc_conn(struct drm_device *dev)
{
struct radeon_device *rdev = dev->dev_private;
struct drm_connector *drm_connector;
bool ret = false;
if (rdev->bios) {
@ -453,8 +433,6 @@ static bool radeon_setup_enc_conn(struct drm_device *dev)
if (ret) {
radeon_setup_encoder_clones(dev);
radeon_print_display_setup(dev);
list_for_each_entry(drm_connector, &dev->mode_config.connector_list, head)
radeon_ddc_dump(drm_connector);
}
return ret;
@ -471,16 +449,22 @@ int radeon_ddc_get_modes(struct radeon_connector *radeon_connector)
radeon_router_select_ddc_port(radeon_connector);
if ((radeon_connector->base.connector_type == DRM_MODE_CONNECTOR_DisplayPort) ||
(radeon_connector->base.connector_type == DRM_MODE_CONNECTOR_eDP)) {
(radeon_connector->base.connector_type == DRM_MODE_CONNECTOR_eDP) ||
(radeon_connector_encoder_get_dp_bridge_encoder_id(&radeon_connector->base) !=
ENCODER_OBJECT_ID_NONE)) {
struct radeon_connector_atom_dig *dig = radeon_connector->con_priv;
if ((dig->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT ||
dig->dp_sink_type == CONNECTOR_OBJECT_ID_eDP) && dig->dp_i2c_bus)
radeon_connector->edid = drm_get_edid(&radeon_connector->base, &dig->dp_i2c_bus->adapter);
}
if (!radeon_connector->ddc_bus)
return -1;
if (!radeon_connector->edid) {
radeon_connector->edid = drm_get_edid(&radeon_connector->base, &radeon_connector->ddc_bus->adapter);
radeon_connector->edid = drm_get_edid(&radeon_connector->base,
&dig->dp_i2c_bus->adapter);
else if (radeon_connector->ddc_bus && !radeon_connector->edid)
radeon_connector->edid = drm_get_edid(&radeon_connector->base,
&radeon_connector->ddc_bus->adapter);
} else {
if (radeon_connector->ddc_bus && !radeon_connector->edid)
radeon_connector->edid = drm_get_edid(&radeon_connector->base,
&radeon_connector->ddc_bus->adapter);
}
if (!radeon_connector->edid) {
@ -502,34 +486,6 @@ int radeon_ddc_get_modes(struct radeon_connector *radeon_connector)
return 0;
}
static int radeon_ddc_dump(struct drm_connector *connector)
{
struct edid *edid;
struct radeon_connector *radeon_connector = to_radeon_connector(connector);
int ret = 0;
/* on hw with routers, select right port */
if (radeon_connector->router.ddc_valid)
radeon_router_select_ddc_port(radeon_connector);
if (!radeon_connector->ddc_bus)
return -1;
edid = drm_get_edid(connector, &radeon_connector->ddc_bus->adapter);
/* Log EDID retrieval status here. In particular with regard to
* connectors with requires_extended_probe flag set, that will prevent
* function radeon_dvi_detect() to fetch EDID on this connector,
* as long as there is no valid EDID header found */
if (edid) {
DRM_INFO("Radeon display connector %s: Found valid EDID",
drm_get_connector_name(connector));
kfree(edid);
} else {
DRM_INFO("Radeon display connector %s: No monitor connected or invalid EDID",
drm_get_connector_name(connector));
}
return ret;
}
/* avivo */
static void avivo_get_fb_div(struct radeon_pll *pll,
u32 target_clock,
@ -867,15 +823,25 @@ static const struct drm_framebuffer_funcs radeon_fb_funcs = {
.create_handle = radeon_user_framebuffer_create_handle,
};
void
int
radeon_framebuffer_init(struct drm_device *dev,
struct radeon_framebuffer *rfb,
struct drm_mode_fb_cmd *mode_cmd,
struct drm_mode_fb_cmd2 *mode_cmd,
struct drm_gem_object *obj)
{
int ret;
ENTER();
rfb->obj = obj;
drm_framebuffer_init(dev, &rfb->base, &radeon_fb_funcs);
ret = drm_framebuffer_init(dev, &rfb->base, &radeon_fb_funcs);
if (ret) {
rfb->obj = NULL;
return ret;
}
drm_helper_mode_fill_fb_struct(&rfb->base, mode_cmd);
LEAVE();
return 0;
}
@ -885,11 +851,6 @@ static const struct drm_mode_config_funcs radeon_mode_funcs = {
// .output_poll_changed = radeon_output_poll_changed
};
struct drm_prop_enum_list {
int type;
char *name;
};
static struct drm_prop_enum_list radeon_tmds_pll_enum_list[] =
{ { 0, "driver" },
{ 1, "bios" },
@ -914,86 +875,53 @@ static struct drm_prop_enum_list radeon_underscan_enum_list[] =
static int radeon_modeset_create_props(struct radeon_device *rdev)
{
int i, sz;
int sz;
if (rdev->is_atom_bios) {
rdev->mode_info.coherent_mode_property =
drm_property_create(rdev->ddev,
DRM_MODE_PROP_RANGE,
"coherent", 2);
drm_property_create_range(rdev->ddev, 0 , "coherent", 0, 1);
if (!rdev->mode_info.coherent_mode_property)
return -ENOMEM;
rdev->mode_info.coherent_mode_property->values[0] = 0;
rdev->mode_info.coherent_mode_property->values[1] = 1;
}
if (!ASIC_IS_AVIVO(rdev)) {
sz = ARRAY_SIZE(radeon_tmds_pll_enum_list);
rdev->mode_info.tmds_pll_property =
drm_property_create(rdev->ddev,
DRM_MODE_PROP_ENUM,
"tmds_pll", sz);
for (i = 0; i < sz; i++) {
drm_property_add_enum(rdev->mode_info.tmds_pll_property,
i,
radeon_tmds_pll_enum_list[i].type,
radeon_tmds_pll_enum_list[i].name);
}
drm_property_create_enum(rdev->ddev, 0,
"tmds_pll",
radeon_tmds_pll_enum_list, sz);
}
rdev->mode_info.load_detect_property =
drm_property_create(rdev->ddev,
DRM_MODE_PROP_RANGE,
"load detection", 2);
drm_property_create_range(rdev->ddev, 0, "load detection", 0, 1);
if (!rdev->mode_info.load_detect_property)
return -ENOMEM;
rdev->mode_info.load_detect_property->values[0] = 0;
rdev->mode_info.load_detect_property->values[1] = 1;
drm_mode_create_scaling_mode_property(rdev->ddev);
sz = ARRAY_SIZE(radeon_tv_std_enum_list);
rdev->mode_info.tv_std_property =
drm_property_create(rdev->ddev,
DRM_MODE_PROP_ENUM,
"tv standard", sz);
for (i = 0; i < sz; i++) {
drm_property_add_enum(rdev->mode_info.tv_std_property,
i,
radeon_tv_std_enum_list[i].type,
radeon_tv_std_enum_list[i].name);
}
drm_property_create_enum(rdev->ddev, 0,
"tv standard",
radeon_tv_std_enum_list, sz);
sz = ARRAY_SIZE(radeon_underscan_enum_list);
rdev->mode_info.underscan_property =
drm_property_create(rdev->ddev,
DRM_MODE_PROP_ENUM,
"underscan", sz);
for (i = 0; i < sz; i++) {
drm_property_add_enum(rdev->mode_info.underscan_property,
i,
radeon_underscan_enum_list[i].type,
radeon_underscan_enum_list[i].name);
}
drm_property_create_enum(rdev->ddev, 0,
"underscan",
radeon_underscan_enum_list, sz);
rdev->mode_info.underscan_hborder_property =
drm_property_create(rdev->ddev,
DRM_MODE_PROP_RANGE,
"underscan hborder", 2);
drm_property_create_range(rdev->ddev, 0,
"underscan hborder", 0, 128);
if (!rdev->mode_info.underscan_hborder_property)
return -ENOMEM;
rdev->mode_info.underscan_hborder_property->values[0] = 0;
rdev->mode_info.underscan_hborder_property->values[1] = 128;
rdev->mode_info.underscan_vborder_property =
drm_property_create(rdev->ddev,
DRM_MODE_PROP_RANGE,
"underscan vborder", 2);
drm_property_create_range(rdev->ddev, 0,
"underscan vborder", 0, 128);
if (!rdev->mode_info.underscan_vborder_property)
return -ENOMEM;
rdev->mode_info.underscan_vborder_property->values[0] = 0;
rdev->mode_info.underscan_vborder_property->values[1] = 128;
return 0;
}
@ -1018,6 +946,93 @@ void radeon_update_display_priority(struct radeon_device *rdev)
}
/*
* Allocate hdmi structs and determine register offsets
*/
static void radeon_afmt_init(struct radeon_device *rdev)
{
int i;
for (i = 0; i < RADEON_MAX_AFMT_BLOCKS; i++)
rdev->mode_info.afmt[i] = NULL;
if (ASIC_IS_DCE6(rdev)) {
/* todo */
} else if (ASIC_IS_DCE4(rdev)) {
/* DCE4/5 has 6 audio blocks tied to DIG encoders */
/* DCE4.1 has 2 audio blocks tied to DIG encoders */
rdev->mode_info.afmt[0] = kzalloc(sizeof(struct radeon_afmt), GFP_KERNEL);
if (rdev->mode_info.afmt[0]) {
rdev->mode_info.afmt[0]->offset = EVERGREEN_CRTC0_REGISTER_OFFSET;
rdev->mode_info.afmt[0]->id = 0;
}
rdev->mode_info.afmt[1] = kzalloc(sizeof(struct radeon_afmt), GFP_KERNEL);
if (rdev->mode_info.afmt[1]) {
rdev->mode_info.afmt[1]->offset = EVERGREEN_CRTC1_REGISTER_OFFSET;
rdev->mode_info.afmt[1]->id = 1;
}
if (!ASIC_IS_DCE41(rdev)) {
rdev->mode_info.afmt[2] = kzalloc(sizeof(struct radeon_afmt), GFP_KERNEL);
if (rdev->mode_info.afmt[2]) {
rdev->mode_info.afmt[2]->offset = EVERGREEN_CRTC2_REGISTER_OFFSET;
rdev->mode_info.afmt[2]->id = 2;
}
rdev->mode_info.afmt[3] = kzalloc(sizeof(struct radeon_afmt), GFP_KERNEL);
if (rdev->mode_info.afmt[3]) {
rdev->mode_info.afmt[3]->offset = EVERGREEN_CRTC3_REGISTER_OFFSET;
rdev->mode_info.afmt[3]->id = 3;
}
rdev->mode_info.afmt[4] = kzalloc(sizeof(struct radeon_afmt), GFP_KERNEL);
if (rdev->mode_info.afmt[4]) {
rdev->mode_info.afmt[4]->offset = EVERGREEN_CRTC4_REGISTER_OFFSET;
rdev->mode_info.afmt[4]->id = 4;
}
rdev->mode_info.afmt[5] = kzalloc(sizeof(struct radeon_afmt), GFP_KERNEL);
if (rdev->mode_info.afmt[5]) {
rdev->mode_info.afmt[5]->offset = EVERGREEN_CRTC5_REGISTER_OFFSET;
rdev->mode_info.afmt[5]->id = 5;
}
}
} else if (ASIC_IS_DCE3(rdev)) {
/* DCE3.x has 2 audio blocks tied to DIG encoders */
rdev->mode_info.afmt[0] = kzalloc(sizeof(struct radeon_afmt), GFP_KERNEL);
if (rdev->mode_info.afmt[0]) {
rdev->mode_info.afmt[0]->offset = DCE3_HDMI_OFFSET0;
rdev->mode_info.afmt[0]->id = 0;
}
rdev->mode_info.afmt[1] = kzalloc(sizeof(struct radeon_afmt), GFP_KERNEL);
if (rdev->mode_info.afmt[1]) {
rdev->mode_info.afmt[1]->offset = DCE3_HDMI_OFFSET1;
rdev->mode_info.afmt[1]->id = 1;
}
} else if (ASIC_IS_DCE2(rdev)) {
/* DCE2 has at least 1 routable audio block */
rdev->mode_info.afmt[0] = kzalloc(sizeof(struct radeon_afmt), GFP_KERNEL);
if (rdev->mode_info.afmt[0]) {
rdev->mode_info.afmt[0]->offset = DCE2_HDMI_OFFSET0;
rdev->mode_info.afmt[0]->id = 0;
}
/* r6xx has 2 routable audio blocks */
if (rdev->family >= CHIP_R600) {
rdev->mode_info.afmt[1] = kzalloc(sizeof(struct radeon_afmt), GFP_KERNEL);
if (rdev->mode_info.afmt[1]) {
rdev->mode_info.afmt[1]->offset = DCE2_HDMI_OFFSET1;
rdev->mode_info.afmt[1]->id = 1;
}
}
}
}
static void radeon_afmt_fini(struct radeon_device *rdev)
{
int i;
for (i = 0; i < RADEON_MAX_AFMT_BLOCKS; i++) {
kfree(rdev->mode_info.afmt[i]);
rdev->mode_info.afmt[i] = NULL;
}
}
int radeon_modeset_init(struct radeon_device *rdev)
{
int i;
@ -1026,7 +1041,7 @@ int radeon_modeset_init(struct radeon_device *rdev)
drm_mode_config_init(rdev->ddev);
rdev->mode_info.mode_config_initialized = true;
rdev->ddev->mode_config.funcs = (void *)&radeon_mode_funcs;
rdev->ddev->mode_config.funcs = &radeon_mode_funcs;
if (ASIC_IS_DCE5(rdev)) {
rdev->ddev->mode_config.max_width = 16384;
@ -1039,6 +1054,9 @@ int radeon_modeset_init(struct radeon_device *rdev)
rdev->ddev->mode_config.max_height = 4096;
}
rdev->ddev->mode_config.preferred_depth = 24;
rdev->ddev->mode_config.prefer_shadow = 1;
rdev->ddev->mode_config.fb_base = rdev->mc.aper_base;
ret = radeon_modeset_create_props(rdev);
@ -1066,13 +1084,18 @@ int radeon_modeset_init(struct radeon_device *rdev)
return ret;
}
/* init dig PHYs */
if (rdev->is_atom_bios)
/* init dig PHYs, disp eng pll */
if (rdev->is_atom_bios) {
radeon_atom_encoder_init(rdev);
radeon_atom_disp_eng_pll_init(rdev);
}
/* initialize hpd */
// radeon_hpd_init(rdev);
/* setup afmt */
// radeon_afmt_init(rdev);
/* Initialize power management */
// radeon_pm_init(rdev);
@ -1087,6 +1110,7 @@ void radeon_modeset_fini(struct radeon_device *rdev)
kfree(rdev->mode_info.bios_hardcoded_edid);
if (rdev->mode_info.mode_config_initialized) {
// radeon_afmt_fini(rdev);
// drm_kms_helper_poll_fini(rdev->ddev);
// radeon_hpd_fini(rdev);
drm_mode_config_cleanup(rdev->ddev);
@ -1096,7 +1120,7 @@ void radeon_modeset_fini(struct radeon_device *rdev)
radeon_i2c_fini(rdev);
}
static bool is_hdtv_mode(struct drm_display_mode *mode)
static bool is_hdtv_mode(const struct drm_display_mode *mode)
{
/* try and guess if this is a tv or a monitor */
if ((mode->vdisplay == 480 && mode->hdisplay == 720) || /* 480p */
@ -1109,7 +1133,7 @@ static bool is_hdtv_mode(struct drm_display_mode *mode)
}
bool radeon_crtc_scaling_mode_fixup(struct drm_crtc *crtc,
struct drm_display_mode *mode,
const struct drm_display_mode *mode,
struct drm_display_mode *adjusted_mode)
{
struct drm_device *dev = crtc->dev;
@ -1126,6 +1150,8 @@ bool radeon_crtc_scaling_mode_fixup(struct drm_crtc *crtc,
radeon_crtc->h_border = 0;
radeon_crtc->v_border = 0;
ENTER();
list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
if (encoder->crtc != crtc)
continue;
@ -1133,6 +1159,10 @@ bool radeon_crtc_scaling_mode_fixup(struct drm_crtc *crtc,
connector = radeon_get_connector_for_encoder(encoder);
radeon_connector = to_radeon_connector(connector);
dbgprintf("native_hdisplay %d vdisplay %d\n",
radeon_encoder->native_mode.hdisplay,
radeon_encoder->native_mode.vdisplay);
if (first) {
/* set scaling */
if (radeon_encoder->rmx_type == RMX_OFF)
@ -1198,6 +1228,9 @@ bool radeon_crtc_scaling_mode_fixup(struct drm_crtc *crtc,
radeon_crtc->vsc.full = dfixed_const(1);
radeon_crtc->hsc.full = dfixed_const(1);
}
LEAVE();
return true;
}

File diff suppressed because it is too large Load Diff

View File

@ -87,6 +87,10 @@ enum radeon_family {
CHIP_TURKS,
CHIP_CAICOS,
CHIP_CAYMAN,
CHIP_ARUBA,
CHIP_TAHITI,
CHIP_PITCAIRN,
CHIP_VERDE,
CHIP_LAST,
};

View File

@ -27,20 +27,17 @@
#include <linux/slab.h>
#include <linux/fb.h>
#include "drmP.h"
#include "drm.h"
#include "drm_crtc.h"
#include "drm_crtc_helper.h"
#include "radeon_drm.h"
#include <drm/drmP.h>
#include <drm/drm_crtc.h>
#include <drm/drm_crtc_helper.h>
#include <drm/radeon_drm.h>
#include "radeon.h"
#include "drm_fb_helper.h"
#include <drm/drm_fb_helper.h>
#include <drm_mm.h>
#include "radeon_object.h"
int radeonfb_create_object(struct radeon_fbdev *rfbdev,
struct drm_mode_fb_cmd *mode_cmd,
int radeonfb_create_pinned_object(struct radeon_fbdev *rfbdev,
struct drm_mode_fb_cmd2 *mode_cmd,
struct drm_gem_object **gobj_p);
/* object hierarchy -
@ -55,7 +52,7 @@ struct radeon_fbdev {
};
static struct fb_ops radeonfb_ops = {
// .owner = THIS_MODULE,
.owner = THIS_MODULE,
.fb_check_var = drm_fb_helper_check_var,
.fb_set_par = drm_fb_helper_set_par,
// .fb_fillrect = cfb_fillrect,
@ -98,7 +95,7 @@ static int radeonfb_create(struct radeon_fbdev *rfbdev,
struct radeon_device *rdev = rfbdev->rdev;
struct fb_info *info;
struct drm_framebuffer *fb = NULL;
struct drm_mode_fb_cmd mode_cmd;
struct drm_mode_fb_cmd2 mode_cmd;
struct drm_gem_object *gobj = NULL;
struct radeon_bo *rbo = NULL;
struct device *device = &rdev->pdev->dev;
@ -114,22 +111,32 @@ static int radeonfb_create(struct radeon_fbdev *rfbdev,
if ((sizes->surface_bpp == 24) && ASIC_IS_AVIVO(rdev))
sizes->surface_bpp = 32;
mode_cmd.bpp = sizes->surface_bpp;
mode_cmd.depth = sizes->surface_depth;
mode_cmd.pixel_format = drm_mode_legacy_fb_format(sizes->surface_bpp,
sizes->surface_depth);
ret = radeonfb_create_pinned_object(rfbdev, &mode_cmd, &gobj);
if (ret) {
DRM_ERROR("failed to create fbcon object %d\n", ret);
return ret;
}
ret = radeonfb_create_object(rfbdev, &mode_cmd, &gobj);
rbo = gem_to_radeon_bo(gobj);
/* okay we have an object now allocate the framebuffer */
info = framebuffer_alloc(0, device);
if (info == NULL) {
dbgprintf("framebuffer_alloc\n");
ret = -ENOMEM;
goto out_unref;
}
info->par = rfbdev;
radeon_framebuffer_init(rdev->ddev, &rfbdev->rfb, &mode_cmd, gobj);
ret = radeon_framebuffer_init(rdev->ddev, &rfbdev->rfb, &mode_cmd, gobj);
if (ret) {
DRM_ERROR("failed to initalise framebuffer %d\n", ret);
goto out_unref;
}
fb = &rfbdev->rfb.base;
@ -141,7 +148,7 @@ static int radeonfb_create(struct radeon_fbdev *rfbdev,
strcpy(info->fix.id, "radeondrmfb");
drm_fb_helper_fill_fix(info, fb->pitch, fb->depth);
drm_fb_helper_fill_fix(info, fb->pitches[0], fb->depth);
info->flags = FBINFO_DEFAULT | FBINFO_CAN_FORCE_OUTPUT;
info->fbops = &radeonfb_ops;
@ -163,21 +170,17 @@ static int radeonfb_create(struct radeon_fbdev *rfbdev,
info->apertures->ranges[0].base = rdev->ddev->mode_config.fb_base;
info->apertures->ranges[0].size = rdev->mc.aper_size;
// info->pixmap.size = 64*1024;
// info->pixmap.buf_align = 8;
// info->pixmap.access_align = 32;
// info->pixmap.flags = FB_PIXMAP_SYSTEM;
// info->pixmap.scan_align = 1;
/* Use default scratch pixmap (info->pixmap.flags = FB_PIXMAP_SYSTEM) */
if (info->screen_base == NULL) {
ret = -ENOSPC;
goto out_unref;
}
// if (info->screen_base == NULL) {
// ret = -ENOSPC;
// goto out_unref;
// }
DRM_INFO("fb mappable at 0x%lX\n", info->fix.smem_start);
DRM_INFO("vram apper at 0x%lX\n", (unsigned long)rdev->mc.aper_base);
DRM_INFO("size %lu\n", (unsigned long)radeon_bo_size(rbo));
DRM_INFO("fb depth is %d\n", fb->depth);
DRM_INFO(" pitch is %d\n", fb->pitch);
DRM_INFO(" pitch is %d\n", fb->pitches[0]);
LEAVE();

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -25,14 +25,14 @@
* Alex Deucher
* Jerome Glisse
*/
#include "drmP.h"
#include "drm.h"
#include "radeon_drm.h"
#include <drm/drmP.h>
#include <drm/radeon_drm.h>
#include "radeon.h"
int radeon_gem_object_init(struct drm_gem_object *obj)
{
/* we do nothings here */
BUG();
return 0;
}
@ -46,11 +46,12 @@ void radeon_gem_object_free(struct drm_gem_object *gobj)
}
int radeon_gem_object_create(struct radeon_device *rdev, int size,
int alignment, int initial_domain,
bool discardable, bool kernel,
struct drm_gem_object **obj)
int alignment, int initial_domain,
bool discardable, bool kernel,
struct drm_gem_object **obj)
{
struct radeon_bo *robj;
unsigned long max_size;
int r;
*obj = NULL;
@ -58,11 +59,26 @@ int radeon_gem_object_create(struct radeon_device *rdev, int size,
if (alignment < PAGE_SIZE) {
alignment = PAGE_SIZE;
}
r = radeon_bo_create(rdev, size, alignment, kernel, initial_domain, &robj);
/* maximun bo size is the minimun btw visible vram and gtt size */
max_size = min(rdev->mc.visible_vram_size, rdev->mc.gtt_size);
if (size > max_size) {
printk(KERN_WARNING "%s:%d alloc size %dMb bigger than %ldMb limit\n",
__func__, __LINE__, size >> 20, max_size >> 20);
return -ENOMEM;
}
retry:
r = radeon_bo_create(rdev, size, alignment, kernel, initial_domain, NULL, &robj);
if (r) {
if (r != -ERESTARTSYS)
if (r != -ERESTARTSYS) {
if (initial_domain == RADEON_GEM_DOMAIN_VRAM) {
initial_domain |= RADEON_GEM_DOMAIN_GTT;
goto retry;
}
DRM_ERROR("Failed to allocate GEM object (%d, %d, %u, %d)\n",
size, initial_domain, alignment, r);
}
return r;
}
*obj = &robj->gem_base;
@ -116,7 +132,7 @@ int radeon_gem_set_domain(struct drm_gem_object *gobj,
}
if (!domain) {
/* Do nothings */
printk(KERN_WARNING "Set domain withou domain !\n");
printk(KERN_WARNING "Set domain without domain !\n");
return 0;
}
if (domain == RADEON_GEM_DOMAIN_CPU) {
@ -151,6 +167,7 @@ int radeon_gem_info_ioctl(struct drm_device *dev, void *data,
struct radeon_device *rdev = dev->dev_private;
struct drm_radeon_gem_info *args = data;
struct ttm_mem_type_manager *man;
unsigned i;
man = &rdev->mman.bdev.man[TTM_PL_VRAM];
@ -159,8 +176,9 @@ int radeon_gem_info_ioctl(struct drm_device *dev, void *data,
if (rdev->stollen_vga_memory)
args->vram_visible -= radeon_bo_size(rdev->stollen_vga_memory);
args->vram_visible -= radeon_fbdev_total_size(rdev);
args->gart_size = rdev->mc.gtt_size - rdev->cp.ring_size - 4096 -
RADEON_IB_POOL_SIZE*64*1024;
args->gart_size = rdev->mc.gtt_size - 4096 - RADEON_IB_POOL_SIZE*64*1024;
for(i = 0; i < RADEON_NUM_RINGS; ++i)
args->gart_size -= rdev->ring[i].ring_size;
return 0;
}
@ -189,21 +207,27 @@ int radeon_gem_create_ioctl(struct drm_device *dev, void *data,
uint32_t handle;
int r;
down_read(&rdev->exclusive_lock);
/* create a gem object to contain this object in */
args->size = roundup(args->size, PAGE_SIZE);
r = radeon_gem_object_create(rdev, args->size, args->alignment,
args->initial_domain, false,
false, &gobj);
if (r) {
up_read(&rdev->exclusive_lock);
r = radeon_gem_handle_lockup(rdev, r);
return r;
}
r = drm_gem_handle_create(filp, gobj, &handle);
/* drop reference from allocate - handle holds it now */
drm_gem_object_unreference_unlocked(gobj);
if (r) {
up_read(&rdev->exclusive_lock);
r = radeon_gem_handle_lockup(rdev, r);
return r;
}
args->handle = handle;
up_read(&rdev->exclusive_lock);
return 0;
}
@ -212,6 +236,7 @@ int radeon_gem_set_domain_ioctl(struct drm_device *dev, void *data,
{
/* transition the BO to a domain -
* just validate the BO into a certain domain */
struct radeon_device *rdev = dev->dev_private;
struct drm_radeon_gem_set_domain *args = data;
struct drm_gem_object *gobj;
struct radeon_bo *robj;
@ -219,10 +244,12 @@ int radeon_gem_set_domain_ioctl(struct drm_device *dev, void *data,
/* for now if someone requests domain CPU -
* just make sure the buffer is finished with */
down_read(&rdev->exclusive_lock);
/* just do a BO wait for now */
gobj = drm_gem_object_lookup(dev, filp, args->handle);
if (gobj == NULL) {
up_read(&rdev->exclusive_lock);
return -ENOENT;
}
robj = gem_to_radeon_bo(gobj);
@ -230,6 +257,8 @@ int radeon_gem_set_domain_ioctl(struct drm_device *dev, void *data,
r = radeon_gem_set_domain(gobj, args->read_domains, args->write_domain);
drm_gem_object_unreference_unlocked(gobj);
up_read(&rdev->exclusive_lock);
r = radeon_gem_handle_lockup(robj->rdev, r);
return r;
}
@ -261,6 +290,7 @@ int radeon_gem_mmap_ioctl(struct drm_device *dev, void *data,
int radeon_gem_busy_ioctl(struct drm_device *dev, void *data,
struct drm_file *filp)
{
struct radeon_device *rdev = dev->dev_private;
struct drm_radeon_gem_busy *args = data;
struct drm_gem_object *gobj;
struct radeon_bo *robj;
@ -286,12 +316,14 @@ int radeon_gem_busy_ioctl(struct drm_device *dev, void *data,
break;
}
drm_gem_object_unreference_unlocked(gobj);
r = radeon_gem_handle_lockup(rdev, r);
return r;
}
int radeon_gem_wait_idle_ioctl(struct drm_device *dev, void *data,
struct drm_file *filp)
{
struct radeon_device *rdev = dev->dev_private;
struct drm_radeon_gem_wait_idle *args = data;
struct drm_gem_object *gobj;
struct radeon_bo *robj;
@ -304,9 +336,10 @@ int radeon_gem_wait_idle_ioctl(struct drm_device *dev, void *data,
robj = gem_to_radeon_bo(gobj);
r = radeon_bo_wait(robj, NULL, false);
/* callback hw specific functions if any */
if (robj->rdev->asic->ioctl_wait_idle)
robj->rdev->asic->ioctl_wait_idle(robj->rdev, robj);
if (rdev->asic->ioctl_wait_idle)
robj->rdev->asic->ioctl_wait_idle(rdev, robj);
drm_gem_object_unreference_unlocked(gobj);
r = radeon_gem_handle_lockup(rdev, r);
return r;
}

View File

@ -23,39 +23,42 @@
* Authors: Dave Airlie
* Alex Deucher
*/
#include "drmP.h"
#include "radeon_drm.h"
#include <linux/export.h>
#include <drm/drmP.h>
#include <drm/drm_edid.h>
#include <drm/radeon_drm.h>
#include "radeon.h"
#include "atom.h"
extern int radeon_atom_hw_i2c_xfer(struct i2c_adapter *i2c_adap,
struct i2c_msg *msgs, int num);
extern u32 radeon_atom_hw_i2c_func(struct i2c_adapter *adap);
/**
* radeon_ddc_probe
*
*/
bool radeon_ddc_probe(struct radeon_connector *radeon_connector, bool requires_extended_probe)
bool radeon_ddc_probe(struct radeon_connector *radeon_connector)
{
u8 out = 0x0;
u8 buf[8];
int ret;
struct i2c_msg msgs[] = {
{
.addr = 0x50,
.addr = DDC_ADDR,
.flags = 0,
.len = 1,
.buf = &out,
},
{
.addr = 0x50,
.addr = DDC_ADDR,
.flags = I2C_M_RD,
.len = 1,
.len = 8,
.buf = buf,
}
};
/* Read 8 bytes from i2c for extended probe of EDID header */
if (requires_extended_probe)
msgs[1].len = 8;
/* on hw with routers, select right port */
if (radeon_connector->router.ddc_valid)
radeon_router_select_ddc_port(radeon_connector);
@ -64,7 +67,6 @@ bool radeon_ddc_probe(struct radeon_connector *radeon_connector, bool requires_e
if (ret != 2)
/* Couldn't find an accessible DDC on this connector */
return false;
if (requires_extended_probe) {
/* Probe also for valid EDID header
* EDID header starts with:
* 0x00,0xFF,0xFF,0xFF,0xFF,0xFF,0xFF,0x00.
@ -75,14 +77,14 @@ bool radeon_ddc_probe(struct radeon_connector *radeon_connector, bool requires_e
* connector */
return false;
}
}
return true;
}
/* bit banging i2c */
static void radeon_i2c_do_lock(struct radeon_i2c_chan *i2c, int lock_state)
static int pre_xfer(struct i2c_adapter *i2c_adap)
{
struct radeon_i2c_chan *i2c = i2c_get_adapdata(i2c_adap);
struct radeon_device *rdev = i2c->dev->dev_private;
struct radeon_i2c_bus_rec *rec = &i2c->rec;
uint32_t temp;
@ -137,19 +139,30 @@ static void radeon_i2c_do_lock(struct radeon_i2c_chan *i2c, int lock_state)
WREG32(rec->en_data_reg, temp);
/* mask the gpio pins for software use */
temp = RREG32(rec->mask_clk_reg);
if (lock_state)
temp |= rec->mask_clk_mask;
else
temp &= ~rec->mask_clk_mask;
temp = RREG32(rec->mask_clk_reg) | rec->mask_clk_mask;
WREG32(rec->mask_clk_reg, temp);
temp = RREG32(rec->mask_clk_reg);
temp = RREG32(rec->mask_data_reg) | rec->mask_data_mask;
WREG32(rec->mask_data_reg, temp);
temp = RREG32(rec->mask_data_reg);
if (lock_state)
temp |= rec->mask_data_mask;
else
temp &= ~rec->mask_data_mask;
return 0;
}
static void post_xfer(struct i2c_adapter *i2c_adap)
{
struct radeon_i2c_chan *i2c = i2c_get_adapdata(i2c_adap);
struct radeon_device *rdev = i2c->dev->dev_private;
struct radeon_i2c_bus_rec *rec = &i2c->rec;
uint32_t temp;
/* unmask the gpio pins for software use */
temp = RREG32(rec->mask_clk_reg) & ~rec->mask_clk_mask;
WREG32(rec->mask_clk_reg, temp);
temp = RREG32(rec->mask_clk_reg);
temp = RREG32(rec->mask_data_reg) & ~rec->mask_data_mask;
WREG32(rec->mask_data_reg, temp);
temp = RREG32(rec->mask_data_reg);
}
@ -209,22 +222,6 @@ static void set_data(void *i2c_priv, int data)
WREG32(rec->en_data_reg, val);
}
static int pre_xfer(struct i2c_adapter *i2c_adap)
{
struct radeon_i2c_chan *i2c = i2c_get_adapdata(i2c_adap);
radeon_i2c_do_lock(i2c, 1);
return 0;
}
static void post_xfer(struct i2c_adapter *i2c_adap)
{
struct radeon_i2c_chan *i2c = i2c_get_adapdata(i2c_adap);
radeon_i2c_do_lock(i2c, 0);
}
/* hw i2c */
static u32 radeon_get_i2c_prescale(struct radeon_device *rdev)
@ -890,6 +887,11 @@ static const struct i2c_algorithm radeon_i2c_algo = {
.functionality = radeon_hw_i2c_func,
};
static const struct i2c_algorithm radeon_atom_i2c_algo = {
.master_xfer = radeon_atom_hw_i2c_xfer,
.functionality = radeon_atom_hw_i2c_func,
};
struct radeon_i2c_chan *radeon_i2c_create(struct drm_device *dev,
struct radeon_i2c_bus_rec *rec,
const char *name)
@ -898,12 +900,17 @@ struct radeon_i2c_chan *radeon_i2c_create(struct drm_device *dev,
struct radeon_i2c_chan *i2c;
int ret;
/* don't add the mm_i2c bus unless hw_i2c is enabled */
if (rec->mm_i2c && (radeon_hw_i2c == 0))
return NULL;
i2c = kzalloc(sizeof(struct radeon_i2c_chan), GFP_KERNEL);
if (i2c == NULL)
return NULL;
i2c->rec = *rec;
// i2c->adapter.owner = THIS_MODULE;
i2c->adapter.owner = THIS_MODULE;
i2c->adapter.class = I2C_CLASS_DDC;
i2c->dev = dev;
i2c_set_adapdata(&i2c->adapter, i2c);
if (rec->mm_i2c ||
@ -915,11 +922,23 @@ struct radeon_i2c_chan *radeon_i2c_create(struct drm_device *dev,
snprintf(i2c->adapter.name, sizeof(i2c->adapter.name),
"Radeon i2c hw bus %s", name);
i2c->adapter.algo = &radeon_i2c_algo;
// ret = i2c_add_adapter(&i2c->adapter);
// if (ret) {
// DRM_ERROR("Failed to register hw i2c %s\n", name);
// goto out_free;
// }
// ret = i2c_add_adapter(&i2c->adapter);
// if (ret) {
// DRM_ERROR("Failed to register hw i2c %s\n", name);
// goto out_free;
// }
} else if (rec->hw_capable &&
radeon_hw_i2c &&
ASIC_IS_DCE3(rdev)) {
/* hw i2c using atom */
snprintf(i2c->adapter.name, sizeof(i2c->adapter.name),
"Radeon i2c hw bus %s", name);
i2c->adapter.algo = &radeon_atom_i2c_algo;
// ret = i2c_add_adapter(&i2c->adapter);
// if (ret) {
// DRM_ERROR("Failed to register hw i2c %s\n", name);
// goto out_free;
// }
} else {
/* set the radeon bit adapter */
snprintf(i2c->adapter.name, sizeof(i2c->adapter.name),
@ -931,10 +950,8 @@ struct radeon_i2c_chan *radeon_i2c_create(struct drm_device *dev,
i2c->algo.bit.setscl = set_clock;
i2c->algo.bit.getsda = get_data;
i2c->algo.bit.getscl = get_clock;
i2c->algo.bit.udelay = 20;
/* vesa says 2.2 ms is enough, 1 jiffy doesn't seem to always
* make this, 2 jiffies is a lot more reliable */
i2c->algo.bit.timeout = 2;
i2c->algo.bit.udelay = 10;
i2c->algo.bit.timeout = usecs_to_jiffies(2200); /* from VESA */
i2c->algo.bit.data = i2c;
ret = i2c_bit_add_bus(&i2c->adapter);
if (ret) {
@ -962,7 +979,7 @@ struct radeon_i2c_chan *radeon_i2c_create_dp(struct drm_device *dev,
return NULL;
i2c->rec = *rec;
// i2c->adapter.owner = THIS_MODULE;
i2c->adapter.owner = THIS_MODULE;
i2c->adapter.class = I2C_CLASS_DDC;
i2c->dev = dev;
snprintf(i2c->adapter.name, sizeof(i2c->adapter.name),

View File

@ -25,75 +25,120 @@
* Alex Deucher
* Jerome Glisse
*/
#include "drmP.h"
#include "drm_crtc_helper.h"
#include "radeon_drm.h"
#include <drm/drmP.h>
#include <drm/drm_crtc_helper.h>
#include <drm/radeon_drm.h>
#include "radeon_reg.h"
#include "radeon.h"
#include "atom.h"
#define RADEON_WAIT_IDLE_TIMEOUT 200
struct radeon_device *main_device;
extern int irq_override;
/**
* radeon_driver_irq_handler_kms - irq handler for KMS
*
* @DRM_IRQ_ARGS: args
*
* This is the irq handler for the radeon KMS driver (all asics).
* radeon_irq_process is a macro that points to the per-asic
* irq handler callback.
*/
void irq_handler_kms()
{
// dbgprintf("%s\n",__FUNCTION__);
radeon_irq_process(main_device);
}
static void radeon_irq_preinstall(struct radeon_device *rdev)
/**
* radeon_driver_irq_preinstall_kms - drm irq preinstall callback
*
* @dev: drm dev pointer
*
* Gets the hw ready to enable irqs (all asics).
* This function disables all interrupt sources on the GPU.
*/
void radeon_irq_preinstall_kms(struct radeon_device *rdev)
{
unsigned long irqflags;
unsigned i;
spin_lock_irqsave(&rdev->irq.lock, irqflags);
/* Disable *all* interrupts */
rdev->irq.sw_int = false;
rdev->irq.gui_idle = false;
for (i = 0; i < rdev->num_crtc; i++)
for (i = 0; i < RADEON_NUM_RINGS; i++)
atomic_set(&rdev->irq.ring_int[i], 0);
for (i = 0; i < RADEON_MAX_HPD_PINS; i++)
rdev->irq.hpd[i] = false;
for (i = 0; i < RADEON_MAX_CRTCS; i++) {
rdev->irq.crtc_vblank_int[i] = false;
for (i = 0; i < 6; i++) {
rdev->irq.hpd[i] = false;
rdev->irq.pflip[i] = false;
atomic_set(&rdev->irq.pflip[i], 0);
rdev->irq.afmt[i] = false;
}
radeon_irq_set(rdev);
spin_unlock_irqrestore(&rdev->irq.lock, irqflags);
/* Clear bits */
radeon_irq_process(rdev);
}
int radeon_driver_irq_postinstall(struct radeon_device *rdev)
/**
* radeon_driver_irq_postinstall_kms - drm irq preinstall callback
*
* @dev: drm dev pointer
*
* Handles stuff to be done after enabling irqs (all asics).
* Returns 0 on success.
*/
int radeon_driver_irq_postinstall_kms(struct radeon_device *rdev)
{
// struct radeon_device *rdev = dev->dev_private;
// dev->max_vblank_count = 0x001fffff;
rdev->irq.sw_int = true;
radeon_irq_set(rdev);
return 0;
}
/**
* radeon_irq_kms_init - init driver interrupt info
*
* @rdev: radeon device pointer
*
* Sets up the work irq handlers, vblank init, MSIs, etc. (all asics).
* Returns 0 for success, error for failure.
*/
int radeon_irq_kms_init(struct radeon_device *rdev)
{
int i;
int irq_line;
int r = 0;
ENTER();
// INIT_WORK(&rdev->hotplug_work, radeon_hotplug_work_func);
// INIT_WORK(&rdev->hotplug_work, radeon_hotplug_work_func);
// INIT_WORK(&rdev->audio_work, r600_audio_update_hdmi);
spin_lock_init(&rdev->irq.sw_lock);
for (i = 0; i < rdev->num_crtc; i++)
spin_lock_init(&rdev->irq.pflip_lock[i]);
spin_lock_init(&rdev->irq.lock);
// r = drm_vblank_init(rdev->ddev, rdev->num_crtc);
// if (r) {
// return r;
// }
/* enable msi */
rdev->msi_enabled = 0;
// if (radeon_msi_ok(rdev)) {
// int ret = pci_enable_msi(rdev->pdev);
// if (!ret) {
// rdev->msi_enabled = 1;
// dev_info(rdev->dev, "radeon: using MSI.\n");
// }
// }
rdev->irq.installed = true;
main_device = rdev;
radeon_irq_preinstall(rdev);
radeon_irq_preinstall_kms(rdev);
if (irq_override)
irq_line = irq_override;
@ -106,41 +151,81 @@ int radeon_irq_kms_init(struct radeon_device *rdev)
// r = drm_irq_install(rdev->ddev);
r = radeon_driver_irq_postinstall(rdev);
r = radeon_driver_irq_postinstall_kms(rdev);
if (r) {
rdev->irq.installed = false;
LEAVE();
return r;
}
DRM_INFO("radeon: irq initialized.\n");
return 0;
}
/**
* radeon_irq_kms_fini - tear down driver interrrupt info
*
* @rdev: radeon device pointer
*
* Tears down the work irq handlers, vblank handlers, MSIs, etc. (all asics).
*/
void radeon_irq_kms_fini(struct radeon_device *rdev)
{
// drm_vblank_cleanup(rdev->ddev);
if (rdev->irq.installed) {
// drm_irq_uninstall(rdev->ddev);
rdev->irq.installed = false;
// if (rdev->msi_enabled)
// pci_disable_msi(rdev->pdev);
}
// flush_work(&rdev->hotplug_work);
}
void radeon_irq_kms_sw_irq_get(struct radeon_device *rdev)
/**
* radeon_irq_kms_sw_irq_get - enable software interrupt
*
* @rdev: radeon device pointer
* @ring: ring whose interrupt you want to enable
*
* Enables the software interrupt for a specific ring (all asics).
* The software interrupt is generally used to signal a fence on
* a particular ring.
*/
void radeon_irq_kms_sw_irq_get(struct radeon_device *rdev, int ring)
{
unsigned long irqflags;
spin_lock_irqsave(&rdev->irq.sw_lock, irqflags);
if (rdev->ddev->irq_enabled && (++rdev->irq.sw_refcount == 1)) {
rdev->irq.sw_int = true;
if (!rdev->ddev->irq_enabled)
return;
if (atomic_inc_return(&rdev->irq.ring_int[ring]) == 1) {
spin_lock_irqsave(&rdev->irq.lock, irqflags);
radeon_irq_set(rdev);
spin_unlock_irqrestore(&rdev->irq.lock, irqflags);
}
spin_unlock_irqrestore(&rdev->irq.sw_lock, irqflags);
}
void radeon_irq_kms_sw_irq_put(struct radeon_device *rdev)
/**
* radeon_irq_kms_sw_irq_put - disable software interrupt
*
* @rdev: radeon device pointer
* @ring: ring whose interrupt you want to disable
*
* Disables the software interrupt for a specific ring (all asics).
* The software interrupt is generally used to signal a fence on
* a particular ring.
*/
void radeon_irq_kms_sw_irq_put(struct radeon_device *rdev, int ring)
{
unsigned long irqflags;
spin_lock_irqsave(&rdev->irq.sw_lock, irqflags);
BUG_ON(rdev->ddev->irq_enabled && rdev->irq.sw_refcount <= 0);
if (rdev->ddev->irq_enabled && (--rdev->irq.sw_refcount == 0)) {
rdev->irq.sw_int = false;
if (!rdev->ddev->irq_enabled)
return;
if (atomic_dec_and_test(&rdev->irq.ring_int[ring])) {
spin_lock_irqsave(&rdev->irq.lock, irqflags);
radeon_irq_set(rdev);
spin_unlock_irqrestore(&rdev->irq.lock, irqflags);
}
spin_unlock_irqrestore(&rdev->irq.sw_lock, irqflags);
}

View File

@ -206,11 +206,6 @@ static void radeon_legacy_rmx_mode_set(struct drm_crtc *crtc,
WREG32(RADEON_FP_CRTC_V_TOTAL_DISP, fp_crtc_v_total_disp);
}
void radeon_restore_common_regs(struct drm_device *dev)
{
/* don't need this yet */
}
static void radeon_pll_wait_for_read_update_complete(struct drm_device *dev)
{
struct radeon_device *rdev = dev->dev_private;
@ -295,7 +290,7 @@ static uint8_t radeon_compute_pll_gain(uint16_t ref_freq, uint16_t ref_div,
return 1;
}
void radeon_crtc_dpms(struct drm_crtc *crtc, int mode)
static void radeon_crtc_dpms(struct drm_crtc *crtc, int mode)
{
struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
struct drm_device *dev = crtc->dev;
@ -419,6 +414,7 @@ int radeon_crtc_do_set_base(struct drm_crtc *crtc,
r = radeon_bo_reserve(rbo, false);
if (unlikely(r != 0))
return r;
/* Only 27 bit offset for legacy CRTC */
r = radeon_bo_pin(rbo, RADEON_GEM_DOMAIN_VRAM, &base);
if (unlikely(r != 0)) {
radeon_bo_unreserve(rbo);
@ -437,7 +433,7 @@ int radeon_crtc_do_set_base(struct drm_crtc *crtc,
crtc_offset_cntl = 0;
pitch_pixels = target_fb->pitch / (target_fb->bits_per_pixel / 8);
pitch_pixels = target_fb->pitches[0] / (target_fb->bits_per_pixel / 8);
crtc_pitch = (((pitch_pixels * target_fb->bits_per_pixel) +
((target_fb->bits_per_pixel * 8) - 1)) /
(target_fb->bits_per_pixel * 8));
@ -988,15 +984,9 @@ static void radeon_set_pll(struct drm_crtc *crtc, struct drm_display_mode *mode)
}
static bool radeon_crtc_mode_fixup(struct drm_crtc *crtc,
struct drm_display_mode *mode,
const struct drm_display_mode *mode,
struct drm_display_mode *adjusted_mode)
{
struct drm_device *dev = crtc->dev;
struct radeon_device *rdev = dev->dev_private;
/* adjust pm to upcoming mode change */
radeon_pm_compute_clocks(rdev);
if (!radeon_crtc_scaling_mode_fixup(crtc, mode, adjusted_mode))
return false;
return true;
@ -1029,9 +1019,11 @@ static int radeon_crtc_mode_set(struct drm_crtc *crtc,
static void radeon_crtc_prepare(struct drm_crtc *crtc)
{
struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
struct drm_device *dev = crtc->dev;
struct drm_crtc *crtci;
radeon_crtc->in_mode_set = true;
/*
* The hardware wedges sometimes if you reconfigure one CRTC
* whilst another is running (see fdo bug #24611).
@ -1042,6 +1034,7 @@ static void radeon_crtc_prepare(struct drm_crtc *crtc)
static void radeon_crtc_commit(struct drm_crtc *crtc)
{
struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
struct drm_device *dev = crtc->dev;
struct drm_crtc *crtci;
@ -1052,6 +1045,7 @@ static void radeon_crtc_commit(struct drm_crtc *crtc)
if (crtci->enabled)
radeon_crtc_dpms(crtci, DRM_MODE_DPMS_ON);
}
radeon_crtc->in_mode_set = false;
}
static const struct drm_crtc_helper_funcs legacy_helper_funcs = {

View File

@ -23,11 +23,15 @@
* Authors: Dave Airlie
* Alex Deucher
*/
#include "drmP.h"
#include "drm_crtc_helper.h"
#include "radeon_drm.h"
#include <drm/drmP.h>
#include <drm/drm_crtc_helper.h>
#include <drm/radeon_drm.h>
#include "radeon.h"
#include "atom.h"
#include <linux/backlight.h>
#ifdef CONFIG_PMAC_BACKLIGHT
#include <asm/backlight.h>
#endif
static void radeon_legacy_encoder_disable(struct drm_encoder *encoder)
{
@ -84,7 +88,7 @@ static void radeon_legacy_lvds_update(struct drm_encoder *encoder, int mode)
lvds_pll_cntl = RREG32(RADEON_LVDS_PLL_CNTL);
lvds_pll_cntl |= RADEON_LVDS_PLL_EN;
WREG32(RADEON_LVDS_PLL_CNTL, lvds_pll_cntl);
udelay(1000);
mdelay(1);
lvds_pll_cntl = RREG32(RADEON_LVDS_PLL_CNTL);
lvds_pll_cntl &= ~RADEON_LVDS_PLL_RESET;
@ -97,7 +101,7 @@ static void radeon_legacy_lvds_update(struct drm_encoder *encoder, int mode)
(backlight_level << RADEON_LVDS_BL_MOD_LEVEL_SHIFT));
if (is_mac)
lvds_gen_cntl |= RADEON_LVDS_BL_MOD_EN;
udelay(panel_pwr_delay * 1000);
mdelay(panel_pwr_delay);
WREG32(RADEON_LVDS_GEN_CNTL, lvds_gen_cntl);
break;
case DRM_MODE_DPMS_STANDBY:
@ -114,10 +118,10 @@ static void radeon_legacy_lvds_update(struct drm_encoder *encoder, int mode)
WREG32(RADEON_LVDS_GEN_CNTL, lvds_gen_cntl);
lvds_gen_cntl &= ~(RADEON_LVDS_ON | RADEON_LVDS_BLON | RADEON_LVDS_EN | RADEON_LVDS_DIGON);
}
udelay(panel_pwr_delay * 1000);
mdelay(panel_pwr_delay);
WREG32(RADEON_LVDS_GEN_CNTL, lvds_gen_cntl);
WREG32_PLL(RADEON_PIXCLKS_CNTL, pixclks_cntl);
udelay(panel_pwr_delay * 1000);
mdelay(panel_pwr_delay);
break;
}
@ -240,7 +244,7 @@ static void radeon_legacy_lvds_mode_set(struct drm_encoder *encoder,
}
static bool radeon_legacy_mode_fixup(struct drm_encoder *encoder,
struct drm_display_mode *mode,
const struct drm_display_mode *mode,
struct drm_display_mode *adjusted_mode)
{
struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
@ -265,15 +269,49 @@ static const struct drm_encoder_helper_funcs radeon_legacy_lvds_helper_funcs = {
.disable = radeon_legacy_encoder_disable,
};
u8
radeon_legacy_get_backlight_level(struct radeon_encoder *radeon_encoder)
{
struct drm_device *dev = radeon_encoder->base.dev;
struct radeon_device *rdev = dev->dev_private;
u8 backlight_level;
backlight_level = (RREG32(RADEON_LVDS_GEN_CNTL) >>
RADEON_LVDS_BL_MOD_LEVEL_SHIFT) & 0xff;
return backlight_level;
}
void
radeon_legacy_set_backlight_level(struct radeon_encoder *radeon_encoder, u8 level)
{
struct drm_device *dev = radeon_encoder->base.dev;
struct radeon_device *rdev = dev->dev_private;
int dpms_mode = DRM_MODE_DPMS_ON;
if (radeon_encoder->enc_priv) {
if (rdev->is_atom_bios) {
struct radeon_encoder_atom_dig *lvds = radeon_encoder->enc_priv;
if (lvds->backlight_level > 0)
dpms_mode = lvds->dpms_mode;
else
dpms_mode = DRM_MODE_DPMS_OFF;
lvds->backlight_level = level;
} else {
struct radeon_encoder_lvds *lvds = radeon_encoder->enc_priv;
if (lvds->backlight_level > 0)
dpms_mode = lvds->dpms_mode;
else
dpms_mode = DRM_MODE_DPMS_OFF;
lvds->backlight_level = level;
}
}
radeon_legacy_lvds_update(&radeon_encoder->base, dpms_mode);
}
#if defined(CONFIG_BACKLIGHT_CLASS_DEVICE) || defined(CONFIG_BACKLIGHT_CLASS_DEVICE_MODULE)
#define MAX_RADEON_LEVEL 0xFF
struct radeon_backlight_privdata {
struct radeon_encoder *encoder;
uint8_t negative;
};
static uint8_t radeon_legacy_lvds_level(struct backlight_device *bd)
{
struct radeon_backlight_privdata *pdata = bl_get_data(bd);
@ -282,13 +320,13 @@ static uint8_t radeon_legacy_lvds_level(struct backlight_device *bd)
/* Convert brightness to hardware level */
if (bd->props.brightness < 0)
level = 0;
else if (bd->props.brightness > MAX_RADEON_LEVEL)
level = MAX_RADEON_LEVEL;
else if (bd->props.brightness > RADEON_MAX_BL_LEVEL)
level = RADEON_MAX_BL_LEVEL;
else
level = bd->props.brightness;
if (pdata->negative)
level = MAX_RADEON_LEVEL - level;
level = RADEON_MAX_BL_LEVEL - level;
return level;
}
@ -297,26 +335,9 @@ static int radeon_legacy_backlight_update_status(struct backlight_device *bd)
{
struct radeon_backlight_privdata *pdata = bl_get_data(bd);
struct radeon_encoder *radeon_encoder = pdata->encoder;
struct drm_device *dev = radeon_encoder->base.dev;
struct radeon_device *rdev = dev->dev_private;
int dpms_mode = DRM_MODE_DPMS_ON;
if (radeon_encoder->enc_priv) {
if (rdev->is_atom_bios) {
struct radeon_encoder_atom_dig *lvds = radeon_encoder->enc_priv;
dpms_mode = lvds->dpms_mode;
lvds->backlight_level = radeon_legacy_lvds_level(bd);
} else {
struct radeon_encoder_lvds *lvds = radeon_encoder->enc_priv;
dpms_mode = lvds->dpms_mode;
lvds->backlight_level = radeon_legacy_lvds_level(bd);
}
}
if (bd->props.brightness > 0)
radeon_legacy_lvds_update(&radeon_encoder->base, dpms_mode);
else
radeon_legacy_lvds_update(&radeon_encoder->base, DRM_MODE_DPMS_OFF);
radeon_legacy_set_backlight_level(radeon_encoder,
radeon_legacy_lvds_level(bd));
return 0;
}
@ -332,7 +353,7 @@ static int radeon_legacy_backlight_get_brightness(struct backlight_device *bd)
backlight_level = (RREG32(RADEON_LVDS_GEN_CNTL) >>
RADEON_LVDS_BL_MOD_LEVEL_SHIFT) & 0xff;
return pdata->negative ? MAX_RADEON_LEVEL - backlight_level : backlight_level;
return pdata->negative ? RADEON_MAX_BL_LEVEL - backlight_level : backlight_level;
}
static const struct backlight_ops radeon_backlight_ops = {
@ -349,6 +370,7 @@ void radeon_legacy_backlight_init(struct radeon_encoder *radeon_encoder,
struct backlight_properties props;
struct radeon_backlight_privdata *pdata;
uint8_t backlight_level;
char bl_name[16];
if (!radeon_encoder->enc_priv)
return;
@ -365,9 +387,12 @@ void radeon_legacy_backlight_init(struct radeon_encoder *radeon_encoder,
goto error;
}
props.max_brightness = MAX_RADEON_LEVEL;
memset(&props, 0, sizeof(props));
props.max_brightness = RADEON_MAX_BL_LEVEL;
props.type = BACKLIGHT_RAW;
bd = backlight_device_register("radeon_bl", &drm_connector->kdev,
snprintf(bl_name, sizeof(bl_name),
"radeon_bl%d", dev->primary->index);
bd = backlight_device_register(bl_name, &drm_connector->kdev,
pdata, &radeon_backlight_ops, &props);
if (IS_ERR(bd)) {
DRM_ERROR("Backlight registration failed\n");
@ -444,7 +469,7 @@ static void radeon_legacy_backlight_exit(struct radeon_encoder *radeon_encoder)
}
if (bd) {
struct radeon_legacy_backlight_privdata *pdata;
struct radeon_backlight_privdata *pdata;
pdata = bl_get_data(bd);
backlight_device_unregister(bd);
@ -652,7 +677,7 @@ static enum drm_connector_status radeon_legacy_primary_dac_detect(struct drm_enc
WREG32(RADEON_DAC_MACRO_CNTL, tmp);
udelay(2000);
mdelay(2);
if (RREG32(RADEON_DAC_CNTL) & RADEON_DAC_CMP_OUTPUT)
found = connector_status_connected;
@ -969,11 +994,7 @@ static void radeon_legacy_tmds_ext_mode_set(struct drm_encoder *encoder,
static void radeon_ext_tmds_enc_destroy(struct drm_encoder *encoder)
{
struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
struct radeon_encoder_ext_tmds *tmds = radeon_encoder->enc_priv;
if (tmds) {
if (tmds->i2c_bus)
radeon_i2c_destroy(tmds->i2c_bus);
}
/* don't destroy the i2c bus record here, this will be done in radeon_i2c_fini */
kfree(radeon_encoder->enc_priv);
drm_encoder_cleanup(encoder);
kfree(radeon_encoder);
@ -1495,7 +1516,7 @@ static enum drm_connector_status radeon_legacy_tv_dac_detect(struct drm_encoder
tmp = dac_cntl2 | RADEON_DAC2_DAC2_CLK_SEL | RADEON_DAC2_CMP_EN;
WREG32(RADEON_DAC_CNTL2, tmp);
udelay(10000);
mdelay(10);
if (ASIC_IS_R300(rdev)) {
if (RREG32(RADEON_DAC_CNTL2) & RADEON_DAC2_CMP_OUT_B)

View File

@ -1,5 +1,5 @@
#include "drmP.h"
#include "drm_crtc_helper.h"
#include <drm/drmP.h>
#include <drm/drm_crtc_helper.h>
#include "radeon.h"
/*
@ -864,7 +864,7 @@ void radeon_legacy_tv_adjust_crtc_reg(struct drm_encoder *encoder,
*v_sync_strt_wid = tmp;
}
static inline int get_post_div(int value)
static int get_post_div(int value)
{
int post_div;
switch (value) {

View File

@ -30,12 +30,11 @@
#ifndef RADEON_MODE_H
#define RADEON_MODE_H
#include <drm_crtc.h>
#include <drm_mode.h>
#include <drm_edid.h>
#include <drm_dp_helper.h>
#include <drm_fixed.h>
#include <drm_crtc_helper.h>
#include <drm/drm_crtc.h>
#include <drm/drm_edid.h>
#include <drm/drm_dp_helper.h>
#include <drm/drm_fixed.h>
#include <drm/drm_crtc_helper.h>
#include <linux/i2c.h>
#include <linux/i2c-algo-bit.h>
@ -210,6 +209,7 @@ enum radeon_connector_table {
CT_RN50_POWER,
CT_MAC_X800,
CT_MAC_G5_9600,
CT_SAM440EP
};
enum radeon_dvo_chip {
@ -219,12 +219,20 @@ enum radeon_dvo_chip {
struct radeon_fbdev;
struct radeon_afmt {
bool enabled;
int offset;
bool last_buffer_filled_status;
int id;
};
struct radeon_mode_info {
struct atom_context *atom_context;
struct card_info *atom_card_info;
enum radeon_connector_table connector_table;
bool mode_config_initialized;
struct radeon_crtc *crtcs[6];
struct radeon_afmt *afmt[6];
/* DVI-I properties */
struct drm_property *coherent_mode_property;
/* DAC enable load detect */
@ -243,8 +251,23 @@ struct radeon_mode_info {
/* pointer to fbdev info structure */
struct radeon_fbdev *rfbdev;
/* firmware flags */
u16 firmware_flags;
/* pointer to backlight encoder */
struct radeon_encoder *bl_encoder;
};
#define RADEON_MAX_BL_LEVEL 0xFF
#if defined(CONFIG_BACKLIGHT_CLASS_DEVICE) || defined(CONFIG_BACKLIGHT_CLASS_DEVICE_MODULE)
struct radeon_backlight_privdata {
struct radeon_encoder *encoder;
uint8_t negative;
};
#endif
#define MAX_H_CODE_TIMING_LEN 32
#define MAX_V_CODE_TIMING_LEN 32
@ -260,12 +283,25 @@ struct radeon_tv_regs {
uint16_t v_code_timing[MAX_V_CODE_TIMING_LEN];
};
struct radeon_atom_ss {
uint16_t percentage;
uint8_t type;
uint16_t step;
uint8_t delay;
uint8_t range;
uint8_t refdiv;
/* asic_ss */
uint16_t rate;
uint16_t amount;
};
struct radeon_crtc {
struct drm_crtc base;
int crtc_id;
u16 lut_r[256], lut_g[256], lut_b[256];
bool enabled;
bool can_tile;
bool in_mode_set;
uint32_t crtc_offset;
struct drm_gem_object *cursor_bo;
uint64_t cursor_addr;
@ -281,6 +317,16 @@ struct radeon_crtc {
struct drm_display_mode native_mode;
int pll_id;
int deferred_flip_completion;
/* pll sharing */
struct radeon_atom_ss ss;
bool ss_enabled;
u32 adjusted_clock;
int bpc;
u32 pll_reference_div;
u32 pll_post_div;
u32 pll_flags;
struct drm_encoder *encoder;
struct drm_connector *connector;
};
struct radeon_encoder_primary_dac {
@ -334,18 +380,6 @@ struct radeon_encoder_ext_tmds {
};
/* spread spectrum */
struct radeon_atom_ss {
uint16_t percentage;
uint8_t type;
uint16_t step;
uint8_t delay;
uint8_t range;
uint8_t refdiv;
/* asic_ss */
uint16_t rate;
uint16_t amount;
};
struct radeon_encoder_atom_dig {
bool linkb;
/* atom dig */
@ -360,6 +394,8 @@ struct radeon_encoder_atom_dig {
struct backlight_device *bl_dev;
int dpms_mode;
uint8_t backlight_level;
int panel_mode;
struct radeon_afmt *afmt;
};
struct radeon_encoder_atom_dac {
@ -381,10 +417,6 @@ struct radeon_encoder {
struct drm_display_mode native_mode;
void *enc_priv;
int audio_polling_active;
int hdmi_offset;
int hdmi_config_offset;
int hdmi_audio_workaround;
int hdmi_buffer_status;
bool is_ext_encoder;
u16 caps;
};
@ -436,15 +468,13 @@ struct radeon_connector {
struct radeon_i2c_chan *ddc_bus;
/* some systems have an hdmi and vga port with a shared ddc line */
bool shared_ddc;
/* for some Radeon chip families we apply an additional EDID header
check as part of the DDC probe */
bool requires_extended_probe;
bool use_digital;
/* we need to mind the EDID between detect
and get modes due to analog/digital/tvencoder */
struct edid *edid;
void *con_priv;
bool dac_load_detect;
bool detected_by_load; /* if the connection status was determined by load */
uint16_t connector_object_id;
struct radeon_hpd hpd;
struct radeon_router router;
@ -456,6 +486,8 @@ struct radeon_framebuffer {
struct drm_gem_object *obj;
};
#define ENCODER_MODE_IS_DP(em) (((em) == ATOM_ENCODER_MODE_DP) || \
((em) == ATOM_ENCODER_MODE_DP_MST))
extern enum radeon_tv_std
radeon_combios_get_tv_info(struct radeon_device *rdev);
@ -464,28 +496,37 @@ radeon_atombios_get_tv_info(struct radeon_device *rdev);
extern struct drm_connector *
radeon_get_connector_for_encoder(struct drm_encoder *encoder);
extern struct drm_connector *
radeon_get_connector_for_encoder_init(struct drm_encoder *encoder);
extern bool radeon_dig_monitor_is_duallink(struct drm_encoder *encoder,
u32 pixel_clock);
extern bool radeon_encoder_is_dp_bridge(struct drm_encoder *encoder);
extern bool radeon_connector_encoder_is_dp_bridge(struct drm_connector *connector);
extern u16 radeon_encoder_get_dp_bridge_encoder_id(struct drm_encoder *encoder);
extern u16 radeon_connector_encoder_get_dp_bridge_encoder_id(struct drm_connector *connector);
extern bool radeon_connector_encoder_is_hbr2(struct drm_connector *connector);
extern bool radeon_connector_is_dp12_capable(struct drm_connector *connector);
extern int radeon_get_monitor_bpc(struct drm_connector *connector);
extern void radeon_connector_hotplug(struct drm_connector *connector);
extern int radeon_dp_mode_valid_helper(struct drm_connector *connector,
struct drm_display_mode *mode);
extern void radeon_dp_set_link_config(struct drm_connector *connector,
struct drm_display_mode *mode);
const struct drm_display_mode *mode);
extern void radeon_dp_link_train(struct drm_encoder *encoder,
struct drm_connector *connector);
extern bool radeon_dp_needs_link_train(struct radeon_connector *radeon_connector);
extern u8 radeon_dp_getsinktype(struct radeon_connector *radeon_connector);
extern bool radeon_dp_getdpcd(struct radeon_connector *radeon_connector);
extern int radeon_dp_get_panel_mode(struct drm_encoder *encoder,
struct drm_connector *connector);
extern void atombios_dig_encoder_setup(struct drm_encoder *encoder, int action, int panel_mode);
extern void radeon_atom_encoder_init(struct radeon_device *rdev);
extern void radeon_atom_disp_eng_pll_init(struct radeon_device *rdev);
extern void atombios_dig_transmitter_setup(struct drm_encoder *encoder,
int action, uint8_t lane_num,
uint8_t lane_set);
extern void radeon_atom_ext_encoder_setup_ddc(struct drm_encoder *encoder);
extern struct drm_encoder *radeon_atom_get_external_encoder(struct drm_encoder *encoder);
extern struct drm_encoder *radeon_get_external_encoder(struct drm_encoder *encoder);
extern int radeon_dp_i2c_aux_ch(struct i2c_adapter *adapter, int mode,
u8 write_byte, u8 *read_byte);
@ -515,8 +556,7 @@ extern void radeon_i2c_put_byte(struct radeon_i2c_chan *i2c,
u8 val);
extern void radeon_router_select_ddc_port(struct radeon_connector *radeon_connector);
extern void radeon_router_select_cd_port(struct radeon_connector *radeon_connector);
extern bool radeon_ddc_probe(struct radeon_connector *radeon_connector,
bool requires_extended_probe);
extern bool radeon_ddc_probe(struct radeon_connector *radeon_connector);
extern int radeon_ddc_get_modes(struct radeon_connector *radeon_connector);
extern struct drm_encoder *radeon_best_encoder(struct drm_connector *connector);
@ -639,9 +679,9 @@ extern void radeon_crtc_fb_gamma_set(struct drm_crtc *crtc, u16 red, u16 green,
u16 blue, int regno);
extern void radeon_crtc_fb_gamma_get(struct drm_crtc *crtc, u16 *red, u16 *green,
u16 *blue, int regno);
void radeon_framebuffer_init(struct drm_device *dev,
int radeon_framebuffer_init(struct drm_device *dev,
struct radeon_framebuffer *rfb,
struct drm_mode_fb_cmd *mode_cmd,
struct drm_mode_fb_cmd2 *mode_cmd,
struct drm_gem_object *obj);
int radeonfb_remove(struct drm_device *dev, struct drm_framebuffer *fb);
@ -661,7 +701,7 @@ void radeon_enc_destroy(struct drm_encoder *encoder);
void radeon_copy_fb(struct drm_device *dev, struct drm_gem_object *dst_obj);
void radeon_combios_asic_init(struct drm_device *dev);
bool radeon_crtc_scaling_mode_fixup(struct drm_crtc *crtc,
struct drm_display_mode *mode,
const struct drm_display_mode *mode,
struct drm_display_mode *adjusted_mode);
void radeon_panel_mode_fixup(struct drm_encoder *encoder,
struct drm_display_mode *adjusted_mode);

View File

@ -31,6 +31,8 @@
#include <drm/radeon_drm.h>
#include "radeon.h"
struct sg_table;
/**
* radeon_mem_type_to_domain - return domain corresponding to mem_type
* @mem_type: ttm memory type
@ -52,16 +54,7 @@ static inline unsigned radeon_mem_type_to_domain(u32 mem_type)
return 0;
}
/**
* radeon_bo_reserve - reserve bo
* @bo: bo structure
* @no_wait: don't sleep while trying to reserve (return -EBUSY)
*
* Returns:
* -EBUSY: buffer is busy and @no_wait is true
* -ERESTARTSYS: A wait for the buffer to become unreserved was interrupted by
* a signal. Release all buffer reservations and return to user-space.
*/
int radeon_bo_reserve(struct radeon_bo *bo, bool no_intr);
static inline void radeon_bo_unreserve(struct radeon_bo *bo)
{
@ -92,6 +85,16 @@ static inline bool radeon_bo_is_reserved(struct radeon_bo *bo)
return !!atomic_read(&bo->tbo.reserved);
}
static inline unsigned radeon_bo_ngpu_pages(struct radeon_bo *bo)
{
return (bo->tbo.num_pages << PAGE_SHIFT) / RADEON_GPU_PAGE_SIZE;
}
static inline unsigned radeon_bo_gpu_page_alignment(struct radeon_bo *bo)
{
return (bo->tbo.mem.page_alignment << PAGE_SHIFT) / RADEON_GPU_PAGE_SIZE;
}
/**
* radeon_bo_mmap_offset - return mmap offset of bo
* @bo: radeon object for which we query the offset
@ -106,32 +109,20 @@ static inline u64 radeon_bo_mmap_offset(struct radeon_bo *bo)
return bo->tbo.addr_space_offset;
}
static inline int radeon_bo_wait(struct radeon_bo *bo, u32 *mem_type,
bool no_wait)
{
int r;
r = ttm_bo_reserve(&bo->tbo, true, no_wait, false, 0);
if (unlikely(r != 0))
return r;
// spin_lock(&bo->tbo.bdev->fence_lock);
if (mem_type)
*mem_type = bo->tbo.mem.mem_type;
if (bo->tbo.sync_obj)
r = ttm_bo_wait(&bo->tbo, true, true, no_wait);
// spin_unlock(&bo->tbo.bdev->fence_lock);
ttm_bo_unreserve(&bo->tbo);
return r;
}
extern int radeon_bo_wait(struct radeon_bo *bo, u32 *mem_type,
bool no_wait);
extern int radeon_bo_create(struct radeon_device *rdev,
unsigned long size, int byte_align,
bool kernel, u32 domain,
struct sg_table *sg,
struct radeon_bo **bo_ptr);
extern int radeon_bo_kmap(struct radeon_bo *bo, void **ptr);
extern void radeon_bo_kunmap(struct radeon_bo *bo);
extern void radeon_bo_unref(struct radeon_bo **bo);
extern int radeon_bo_pin(struct radeon_bo *bo, u32 domain, u64 *gpu_addr);
extern int radeon_bo_pin_restricted(struct radeon_bo *bo, u32 domain,
u64 max_offset, u64 *gpu_addr);
extern int radeon_bo_unpin(struct radeon_bo *bo);
extern int radeon_bo_evict_vram(struct radeon_device *rdev);
extern void radeon_bo_force_delete(struct radeon_device *rdev);
@ -152,4 +143,41 @@ extern void radeon_bo_move_notify(struct ttm_buffer_object *bo,
struct ttm_mem_reg *mem);
extern int radeon_bo_fault_reserve_notify(struct ttm_buffer_object *bo);
extern int radeon_bo_get_surface_reg(struct radeon_bo *bo);
/*
* sub allocation
*/
static inline uint64_t radeon_sa_bo_gpu_addr(struct radeon_sa_bo *sa_bo)
{
return sa_bo->manager->gpu_addr + sa_bo->soffset;
}
static inline void * radeon_sa_bo_cpu_addr(struct radeon_sa_bo *sa_bo)
{
return sa_bo->manager->cpu_ptr + sa_bo->soffset;
}
extern int radeon_sa_bo_manager_init(struct radeon_device *rdev,
struct radeon_sa_manager *sa_manager,
unsigned size, u32 domain);
extern void radeon_sa_bo_manager_fini(struct radeon_device *rdev,
struct radeon_sa_manager *sa_manager);
extern int radeon_sa_bo_manager_start(struct radeon_device *rdev,
struct radeon_sa_manager *sa_manager);
extern int radeon_sa_bo_manager_suspend(struct radeon_device *rdev,
struct radeon_sa_manager *sa_manager);
extern int radeon_sa_bo_new(struct radeon_device *rdev,
struct radeon_sa_manager *sa_manager,
struct radeon_sa_bo **sa_bo,
unsigned size, unsigned align, bool block);
extern void radeon_sa_bo_free(struct radeon_device *rdev,
struct radeon_sa_bo **sa_bo,
struct radeon_fence *fence);
#if defined(CONFIG_DEBUG_FS)
extern void radeon_sa_bo_dump_debug_info(struct radeon_sa_manager *sa_manager,
struct seq_file *m);
#endif
#endif

View File

@ -122,9 +122,11 @@ void ttm_bo_unreserve(struct ttm_buffer_object *bo)
bo->reserved.counter = 1;
}
struct sg_table;
int radeon_bo_create(struct radeon_device *rdev,
unsigned long size, int byte_align, bool kernel, u32 domain,
struct radeon_bo **bo_ptr)
unsigned long size, int byte_align, bool kernel, u32 domain,
struct sg_table *sg, struct radeon_bo **bo_ptr)
{
struct radeon_bo *bo;
enum ttm_bo_type type;
@ -218,7 +220,7 @@ int radeon_bo_pin(struct radeon_bo *bo, u32 domain, u64 *gpu_addr)
pagelist = &((u32_t*)page_tabs)[(u32_t)bo->kptr >> 12];
dbgprintf("pagelist %x\n", pagelist);
radeon_gart_bind(bo->rdev, bo->tbo.offset,
bo->tbo.vm_node->size, pagelist);
bo->tbo.vm_node->size, pagelist, NULL);
bo->tbo.offset += (u64)bo->rdev->mc.gtt_start;
}
else

View File

@ -20,20 +20,18 @@
* Authors: Rafał Miłecki <zajec5@gmail.com>
* Alex Deucher <alexdeucher@gmail.com>
*/
#include "drmP.h"
#include <drm/drmP.h>
#include "radeon.h"
#include "avivod.h"
#include "atom.h"
#define DRM_DEBUG_DRIVER(fmt, args...)
#define RADEON_IDLE_LOOP_MS 100
#define RADEON_RECLOCK_DELAY_MS 200
#define RADEON_WAIT_VBLANK_TIMEOUT 200
#define RADEON_WAIT_IDLE_TIMEOUT 200
static const char *radeon_pm_state_type_name[5] = {
"Default",
"",
"Powersave",
"Battery",
"Balanced",
@ -47,24 +45,26 @@ static bool radeon_pm_debug_check_in_vbl(struct radeon_device *rdev, bool finish
static void radeon_pm_update_profile(struct radeon_device *rdev);
static void radeon_pm_set_clocks(struct radeon_device *rdev);
static inline int power_supply_is_system_supplied(void) { return -ENOSYS; }
#define ACPI_AC_CLASS "ac_adapter"
#ifdef CONFIG_ACPI
static int radeon_acpi_event(struct notifier_block *nb,
unsigned long val,
void *data)
int radeon_pm_get_type_index(struct radeon_device *rdev,
enum radeon_pm_state_type ps_type,
int instance)
{
struct radeon_device *rdev = container_of(nb, struct radeon_device, acpi_nb);
struct acpi_bus_event *entry = (struct acpi_bus_event *)data;
int i;
int found_instance = -1;
if (strcmp(entry->device_class, ACPI_AC_CLASS) == 0) {
if (power_supply_is_system_supplied() > 0)
DRM_DEBUG_DRIVER("pm: AC\n");
else
DRM_DEBUG_DRIVER("pm: DC\n");
for (i = 0; i < rdev->pm.num_power_states; i++) {
if (rdev->pm.power_state[i].type == ps_type) {
found_instance++;
if (found_instance == instance)
return i;
}
}
/* return default if no match */
return rdev->pm.default_power_state_index;
}
void radeon_pm_acpi_event_handler(struct radeon_device *rdev)
{
if (rdev->pm.pm_method == PM_METHOD_PROFILE) {
if (rdev->pm.profile == PM_PROFILE_AUTO) {
mutex_lock(&rdev->pm.mutex);
@ -73,11 +73,7 @@ static int radeon_acpi_event(struct notifier_block *nb,
mutex_unlock(&rdev->pm.mutex);
}
}
}
return NOTIFY_OK;
}
#endif
static void radeon_pm_update_profile(struct radeon_device *rdev)
{
@ -140,6 +136,15 @@ static void radeon_unmap_vram_bos(struct radeon_device *rdev)
}
static void radeon_sync_with_vblank(struct radeon_device *rdev)
{
if (rdev->pm.active_crtcs) {
rdev->pm.vblank_sync = false;
// wait_event_timeout(
// rdev->irq.vblank_queue, rdev->pm.vblank_sync,
// msecs_to_jiffies(RADEON_WAIT_VBLANK_TIMEOUT));
}
}
static void radeon_set_power_state(struct radeon_device *rdev)
{
@ -156,8 +161,21 @@ static void radeon_set_power_state(struct radeon_device *rdev)
if (sclk > rdev->pm.default_sclk)
sclk = rdev->pm.default_sclk;
/* starting with BTC, there is one state that is used for both
* MH and SH. Difference is that we always use the high clock index for
* mclk.
*/
if ((rdev->pm.pm_method == PM_METHOD_PROFILE) &&
(rdev->family >= CHIP_BARTS) &&
rdev->pm.active_crtc_count &&
((rdev->pm.profile_index == PM_PROFILE_MID_MH_IDX) ||
(rdev->pm.profile_index == PM_PROFILE_LOW_MH_IDX)))
mclk = rdev->pm.power_state[rdev->pm.requested_power_state_index].
clock_info[rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx].mclk;
else
mclk = rdev->pm.power_state[rdev->pm.requested_power_state_index].
clock_info[rdev->pm.requested_clock_mode_index].mclk;
if (mclk > rdev->pm.default_mclk)
mclk = rdev->pm.default_mclk;
@ -165,7 +183,7 @@ static void radeon_set_power_state(struct radeon_device *rdev)
if (sclk < rdev->pm.current_sclk)
misc_after = true;
// radeon_sync_with_vblank(rdev);
radeon_sync_with_vblank(rdev);
if (rdev->pm.pm_method == PM_METHOD_DYNPM) {
if (!radeon_pm_in_vbl(rdev))
@ -188,7 +206,7 @@ static void radeon_set_power_state(struct radeon_device *rdev)
}
/* set memory clock */
if (rdev->asic->set_memory_clock && (mclk != rdev->pm.current_mclk)) {
if (rdev->asic->pm.set_memory_clock && (mclk != rdev->pm.current_mclk)) {
radeon_pm_debug_check_in_vbl(rdev, false);
radeon_set_memory_clock(rdev, mclk);
radeon_pm_debug_check_in_vbl(rdev, true);
@ -218,27 +236,16 @@ static void radeon_pm_set_clocks(struct radeon_device *rdev)
return;
mutex_lock(&rdev->ddev->struct_mutex);
mutex_lock(&rdev->vram_mutex);
mutex_lock(&rdev->cp.mutex);
// down_write(&rdev->pm.mclk_lock);
mutex_lock(&rdev->ring_lock);
/* gui idle int has issues on older chips it seems */
if (rdev->family >= CHIP_R600) {
if (rdev->irq.installed) {
/* wait for GPU idle */
rdev->pm.gui_idle = false;
rdev->irq.gui_idle = true;
}
} else {
if (rdev->cp.ready) {
// struct radeon_fence *fence;
// radeon_ring_alloc(rdev, 64);
// radeon_fence_create(rdev, &fence);
// radeon_fence_emit(rdev, fence);
// radeon_ring_commit(rdev);
// radeon_fence_wait(fence, false);
// radeon_fence_unref(&fence);
}
/* wait for the rings to drain */
for (i = 0; i < RADEON_NUM_RINGS; i++) {
struct radeon_ring *ring = &rdev->ring[i];
if (ring->ready)
radeon_fence_wait_empty_locked(rdev, i);
}
radeon_unmap_vram_bos(rdev);
if (rdev->irq.installed) {
@ -268,8 +275,8 @@ static void radeon_pm_set_clocks(struct radeon_device *rdev)
rdev->pm.dynpm_planned_action = DYNPM_ACTION_NONE;
mutex_unlock(&rdev->cp.mutex);
mutex_unlock(&rdev->vram_mutex);
mutex_unlock(&rdev->ring_lock);
// up_write(&rdev->pm.mclk_lock);
mutex_unlock(&rdev->ddev->struct_mutex);
}
@ -294,17 +301,15 @@ static void radeon_pm_print_states(struct radeon_device *rdev)
for (j = 0; j < power_state->num_clock_modes; j++) {
clock_info = &(power_state->clock_info[j]);
if (rdev->flags & RADEON_IS_IGP)
DRM_DEBUG_DRIVER("\t\t%d e: %d%s\n",
DRM_DEBUG_DRIVER("\t\t%d e: %d\n",
j,
clock_info->sclk * 10,
clock_info->flags & RADEON_PM_MODE_NO_DISPLAY ? "\tNo display only" : "");
clock_info->sclk * 10);
else
DRM_DEBUG_DRIVER("\t\t%d e: %d\tm: %d\tv: %d%s\n",
DRM_DEBUG_DRIVER("\t\t%d e: %d\tm: %d\tv: %d\n",
j,
clock_info->sclk * 10,
clock_info->mclk * 10,
clock_info->voltage.voltage,
clock_info->flags & RADEON_PM_MODE_NO_DISPLAY ? "\tNo display only" : "");
clock_info->voltage.voltage);
}
}
}
@ -313,8 +318,15 @@ static ssize_t radeon_get_pm_profile(struct device *dev,
struct device_attribute *attr,
char *buf)
{
struct drm_device *ddev = pci_get_drvdata(to_pci_dev(dev));
struct radeon_device *rdev = ddev->dev_private;
int cp = rdev->pm.profile;
return snprintf(buf, PAGE_SIZE, "%s\n", "default");
return snprintf(buf, PAGE_SIZE, "%s\n",
(cp == PM_PROFILE_AUTO) ? "auto" :
(cp == PM_PROFILE_LOW) ? "low" :
(cp == PM_PROFILE_MID) ? "mid" :
(cp == PM_PROFILE_HIGH) ? "high" : "default");
}
static ssize_t radeon_set_pm_profile(struct device *dev,
@ -326,11 +338,26 @@ static ssize_t radeon_set_pm_profile(struct device *dev,
struct radeon_device *rdev = ddev->dev_private;
mutex_lock(&rdev->pm.mutex);
if (rdev->pm.pm_method == PM_METHOD_PROFILE) {
if (strncmp("default", buf, strlen("default")) == 0)
rdev->pm.profile = PM_PROFILE_DEFAULT;
else if (strncmp("auto", buf, strlen("auto")) == 0)
rdev->pm.profile = PM_PROFILE_AUTO;
else if (strncmp("low", buf, strlen("low")) == 0)
rdev->pm.profile = PM_PROFILE_LOW;
else if (strncmp("mid", buf, strlen("mid")) == 0)
rdev->pm.profile = PM_PROFILE_MID;
else if (strncmp("high", buf, strlen("high")) == 0)
rdev->pm.profile = PM_PROFILE_HIGH;
else {
count = -EINVAL;
goto fail;
}
radeon_pm_update_profile(rdev);
radeon_pm_set_clocks(rdev);
} else
count = -EINVAL;
fail:
mutex_unlock(&rdev->pm.mutex);
@ -373,7 +400,7 @@ static ssize_t radeon_set_pm_method(struct device *dev,
mutex_unlock(&rdev->pm.mutex);
// cancel_delayed_work_sync(&rdev->pm.dynpm_idle_work);
} else {
DRM_ERROR("invalid power method!\n");
count = -EINVAL;
goto fail;
}
radeon_pm_compute_clocks(rdev);
@ -381,13 +408,16 @@ fail:
return count;
}
//static DEVICE_ATTR(power_profile, S_IRUGO | S_IWUSR, radeon_get_pm_profile, radeon_set_pm_profile);
//static DEVICE_ATTR(power_method, S_IRUGO | S_IWUSR, radeon_get_pm_method, radeon_set_pm_method);
static ssize_t radeon_hwmon_show_temp(struct device *dev,
struct device_attribute *attr,
char *buf)
{
struct drm_device *ddev = pci_get_drvdata(to_pci_dev(dev));
struct radeon_device *rdev = ddev->dev_private;
u32 temp;
int temp;
switch (rdev->pm.int_thermal_type) {
case THERMAL_TYPE_RV6XX:
@ -400,6 +430,12 @@ static ssize_t radeon_hwmon_show_temp(struct device *dev,
case THERMAL_TYPE_NI:
temp = evergreen_get_temp(rdev);
break;
case THERMAL_TYPE_SUMO:
temp = sumo_get_temp(rdev);
break;
case THERMAL_TYPE_SI:
temp = si_get_temp(rdev);
break;
default:
temp = 0;
break;

View File

@ -56,6 +56,7 @@
#include "r600_reg.h"
#include "evergreen_reg.h"
#include "ni_reg.h"
#include "si_reg.h"
#define RADEON_MC_AGP_LOCATION 0x014c
#define RADEON_MC_AGP_START_MASK 0x0000FFFF
@ -539,9 +540,11 @@
#define RADEON_CRTC2_PITCH 0x032c
#define RADEON_CRTC_STATUS 0x005c
# define RADEON_CRTC_VBLANK_CUR (1 << 0)
# define RADEON_CRTC_VBLANK_SAVE (1 << 1)
# define RADEON_CRTC_VBLANK_SAVE_CLEAR (1 << 1)
#define RADEON_CRTC2_STATUS 0x03fc
# define RADEON_CRTC2_VBLANK_CUR (1 << 0)
# define RADEON_CRTC2_VBLANK_SAVE (1 << 1)
# define RADEON_CRTC2_VBLANK_SAVE_CLEAR (1 << 1)
#define RADEON_CRTC_V_SYNC_STRT_WID 0x020c

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,419 @@
/*
* Copyright 2011 Red Hat Inc.
* All Rights Reserved.
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sub license, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL
* THE COPYRIGHT HOLDERS, AUTHORS AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM,
* DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
* OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
* USE OR OTHER DEALINGS IN THE SOFTWARE.
*
* The above copyright notice and this permission notice (including the
* next paragraph) shall be included in all copies or substantial portions
* of the Software.
*
*/
/*
* Authors:
* Jerome Glisse <glisse@freedesktop.org>
*/
/* Algorithm:
*
* We store the last allocated bo in "hole", we always try to allocate
* after the last allocated bo. Principle is that in a linear GPU ring
* progression was is after last is the oldest bo we allocated and thus
* the first one that should no longer be in use by the GPU.
*
* If it's not the case we skip over the bo after last to the closest
* done bo if such one exist. If none exist and we are not asked to
* block we report failure to allocate.
*
* If we are asked to block we wait on all the oldest fence of all
* rings. We just wait for any of those fence to complete.
*/
#include <drm/drmP.h>
#include "radeon.h"
static void radeon_sa_bo_remove_locked(struct radeon_sa_bo *sa_bo);
static void radeon_sa_bo_try_free(struct radeon_sa_manager *sa_manager);
int radeon_sa_bo_manager_init(struct radeon_device *rdev,
struct radeon_sa_manager *sa_manager,
unsigned size, u32 domain)
{
int i, r;
init_waitqueue_head(&sa_manager->wq);
sa_manager->bo = NULL;
sa_manager->size = size;
sa_manager->domain = domain;
sa_manager->hole = &sa_manager->olist;
INIT_LIST_HEAD(&sa_manager->olist);
for (i = 0; i < RADEON_NUM_RINGS; ++i) {
INIT_LIST_HEAD(&sa_manager->flist[i]);
}
r = radeon_bo_create(rdev, size, RADEON_GPU_PAGE_SIZE, true,
RADEON_GEM_DOMAIN_CPU, NULL, &sa_manager->bo);
if (r) {
dev_err(rdev->dev, "(%d) failed to allocate bo for manager\n", r);
return r;
}
return r;
}
void radeon_sa_bo_manager_fini(struct radeon_device *rdev,
struct radeon_sa_manager *sa_manager)
{
struct radeon_sa_bo *sa_bo, *tmp;
if (!list_empty(&sa_manager->olist)) {
sa_manager->hole = &sa_manager->olist,
radeon_sa_bo_try_free(sa_manager);
if (!list_empty(&sa_manager->olist)) {
dev_err(rdev->dev, "sa_manager is not empty, clearing anyway\n");
}
}
list_for_each_entry_safe(sa_bo, tmp, &sa_manager->olist, olist) {
radeon_sa_bo_remove_locked(sa_bo);
}
radeon_bo_unref(&sa_manager->bo);
sa_manager->size = 0;
}
int radeon_sa_bo_manager_start(struct radeon_device *rdev,
struct radeon_sa_manager *sa_manager)
{
int r;
if (sa_manager->bo == NULL) {
dev_err(rdev->dev, "no bo for sa manager\n");
return -EINVAL;
}
/* map the buffer */
r = radeon_bo_reserve(sa_manager->bo, false);
if (r) {
dev_err(rdev->dev, "(%d) failed to reserve manager bo\n", r);
return r;
}
r = radeon_bo_pin(sa_manager->bo, sa_manager->domain, &sa_manager->gpu_addr);
if (r) {
radeon_bo_unreserve(sa_manager->bo);
dev_err(rdev->dev, "(%d) failed to pin manager bo\n", r);
return r;
}
r = radeon_bo_kmap(sa_manager->bo, &sa_manager->cpu_ptr);
radeon_bo_unreserve(sa_manager->bo);
return r;
}
int radeon_sa_bo_manager_suspend(struct radeon_device *rdev,
struct radeon_sa_manager *sa_manager)
{
int r;
if (sa_manager->bo == NULL) {
dev_err(rdev->dev, "no bo for sa manager\n");
return -EINVAL;
}
r = radeon_bo_reserve(sa_manager->bo, false);
if (!r) {
radeon_bo_kunmap(sa_manager->bo);
radeon_bo_unpin(sa_manager->bo);
radeon_bo_unreserve(sa_manager->bo);
}
return r;
}
static void radeon_sa_bo_remove_locked(struct radeon_sa_bo *sa_bo)
{
struct radeon_sa_manager *sa_manager = sa_bo->manager;
if (sa_manager->hole == &sa_bo->olist) {
sa_manager->hole = sa_bo->olist.prev;
}
list_del_init(&sa_bo->olist);
list_del_init(&sa_bo->flist);
radeon_fence_unref(&sa_bo->fence);
kfree(sa_bo);
}
static void radeon_sa_bo_try_free(struct radeon_sa_manager *sa_manager)
{
struct radeon_sa_bo *sa_bo, *tmp;
if (sa_manager->hole->next == &sa_manager->olist)
return;
sa_bo = list_entry(sa_manager->hole->next, struct radeon_sa_bo, olist);
list_for_each_entry_safe_from(sa_bo, tmp, &sa_manager->olist, olist) {
if (sa_bo->fence == NULL || !radeon_fence_signaled(sa_bo->fence)) {
return;
}
radeon_sa_bo_remove_locked(sa_bo);
}
}
static inline unsigned radeon_sa_bo_hole_soffset(struct radeon_sa_manager *sa_manager)
{
struct list_head *hole = sa_manager->hole;
if (hole != &sa_manager->olist) {
return list_entry(hole, struct radeon_sa_bo, olist)->eoffset;
}
return 0;
}
static inline unsigned radeon_sa_bo_hole_eoffset(struct radeon_sa_manager *sa_manager)
{
struct list_head *hole = sa_manager->hole;
if (hole->next != &sa_manager->olist) {
return list_entry(hole->next, struct radeon_sa_bo, olist)->soffset;
}
return sa_manager->size;
}
static bool radeon_sa_bo_try_alloc(struct radeon_sa_manager *sa_manager,
struct radeon_sa_bo *sa_bo,
unsigned size, unsigned align)
{
unsigned soffset, eoffset, wasted;
soffset = radeon_sa_bo_hole_soffset(sa_manager);
eoffset = radeon_sa_bo_hole_eoffset(sa_manager);
wasted = (align - (soffset % align)) % align;
if ((eoffset - soffset) >= (size + wasted)) {
soffset += wasted;
sa_bo->manager = sa_manager;
sa_bo->soffset = soffset;
sa_bo->eoffset = soffset + size;
list_add(&sa_bo->olist, sa_manager->hole);
INIT_LIST_HEAD(&sa_bo->flist);
sa_manager->hole = &sa_bo->olist;
return true;
}
return false;
}
/**
* radeon_sa_event - Check if we can stop waiting
*
* @sa_manager: pointer to the sa_manager
* @size: number of bytes we want to allocate
* @align: alignment we need to match
*
* Check if either there is a fence we can wait for or
* enough free memory to satisfy the allocation directly
*/
static bool radeon_sa_event(struct radeon_sa_manager *sa_manager,
unsigned size, unsigned align)
{
unsigned soffset, eoffset, wasted;
int i;
for (i = 0; i < RADEON_NUM_RINGS; ++i) {
if (!list_empty(&sa_manager->flist[i])) {
return true;
}
}
soffset = radeon_sa_bo_hole_soffset(sa_manager);
eoffset = radeon_sa_bo_hole_eoffset(sa_manager);
wasted = (align - (soffset % align)) % align;
if ((eoffset - soffset) >= (size + wasted)) {
return true;
}
return false;
}
static bool radeon_sa_bo_next_hole(struct radeon_sa_manager *sa_manager,
struct radeon_fence **fences,
unsigned *tries)
{
struct radeon_sa_bo *best_bo = NULL;
unsigned i, soffset, best, tmp;
/* if hole points to the end of the buffer */
if (sa_manager->hole->next == &sa_manager->olist) {
/* try again with its beginning */
sa_manager->hole = &sa_manager->olist;
return true;
}
soffset = radeon_sa_bo_hole_soffset(sa_manager);
/* to handle wrap around we add sa_manager->size */
best = sa_manager->size * 2;
/* go over all fence list and try to find the closest sa_bo
* of the current last
*/
for (i = 0; i < RADEON_NUM_RINGS; ++i) {
struct radeon_sa_bo *sa_bo;
if (list_empty(&sa_manager->flist[i])) {
continue;
}
sa_bo = list_first_entry(&sa_manager->flist[i],
struct radeon_sa_bo, flist);
if (!radeon_fence_signaled(sa_bo->fence)) {
fences[i] = sa_bo->fence;
continue;
}
/* limit the number of tries each ring gets */
if (tries[i] > 2) {
continue;
}
tmp = sa_bo->soffset;
if (tmp < soffset) {
/* wrap around, pretend it's after */
tmp += sa_manager->size;
}
tmp -= soffset;
if (tmp < best) {
/* this sa bo is the closest one */
best = tmp;
best_bo = sa_bo;
}
}
if (best_bo) {
++tries[best_bo->fence->ring];
sa_manager->hole = best_bo->olist.prev;
/* we knew that this one is signaled,
so it's save to remote it */
radeon_sa_bo_remove_locked(best_bo);
return true;
}
return false;
}
int radeon_sa_bo_new(struct radeon_device *rdev,
struct radeon_sa_manager *sa_manager,
struct radeon_sa_bo **sa_bo,
unsigned size, unsigned align, bool block)
{
struct radeon_fence *fences[RADEON_NUM_RINGS];
unsigned tries[RADEON_NUM_RINGS];
int i, r;
BUG_ON(align > RADEON_GPU_PAGE_SIZE);
BUG_ON(size > sa_manager->size);
*sa_bo = kmalloc(sizeof(struct radeon_sa_bo), GFP_KERNEL);
if ((*sa_bo) == NULL) {
return -ENOMEM;
}
(*sa_bo)->manager = sa_manager;
(*sa_bo)->fence = NULL;
INIT_LIST_HEAD(&(*sa_bo)->olist);
INIT_LIST_HEAD(&(*sa_bo)->flist);
spin_lock(&sa_manager->wq.lock);
do {
for (i = 0; i < RADEON_NUM_RINGS; ++i) {
fences[i] = NULL;
tries[i] = 0;
}
do {
radeon_sa_bo_try_free(sa_manager);
if (radeon_sa_bo_try_alloc(sa_manager, *sa_bo,
size, align)) {
spin_unlock(&sa_manager->wq.lock);
return 0;
}
/* see if we can skip over some allocations */
} while (radeon_sa_bo_next_hole(sa_manager, fences, tries));
spin_unlock(&sa_manager->wq.lock);
r = radeon_fence_wait_any(rdev, fences, false);
spin_lock(&sa_manager->wq.lock);
/* if we have nothing to wait for block */
if (r == -ENOENT && block) {
// r = wait_event_interruptible_locked(
// sa_manager->wq,
// radeon_sa_event(sa_manager, size, align)
// );
} else if (r == -ENOENT) {
r = -ENOMEM;
}
} while (!r);
spin_unlock(&sa_manager->wq.lock);
kfree(*sa_bo);
*sa_bo = NULL;
return r;
}
void radeon_sa_bo_free(struct radeon_device *rdev, struct radeon_sa_bo **sa_bo,
struct radeon_fence *fence)
{
struct radeon_sa_manager *sa_manager;
if (sa_bo == NULL || *sa_bo == NULL) {
return;
}
sa_manager = (*sa_bo)->manager;
spin_lock(&sa_manager->wq.lock);
if (fence && !radeon_fence_signaled(fence)) {
(*sa_bo)->fence = radeon_fence_ref(fence);
list_add_tail(&(*sa_bo)->flist,
&sa_manager->flist[fence->ring]);
} else {
radeon_sa_bo_remove_locked(*sa_bo);
}
// wake_up_all_locked(&sa_manager->wq);
spin_unlock(&sa_manager->wq.lock);
*sa_bo = NULL;
}
#if defined(CONFIG_DEBUG_FS)
void radeon_sa_bo_dump_debug_info(struct radeon_sa_manager *sa_manager,
struct seq_file *m)
{
struct radeon_sa_bo *i;
spin_lock(&sa_manager->wq.lock);
list_for_each_entry(i, &sa_manager->olist, olist) {
if (&i->olist == sa_manager->hole) {
seq_printf(m, ">");
} else {
seq_printf(m, " ");
}
seq_printf(m, "[0x%08x 0x%08x] size %8d",
i->soffset, i->eoffset, i->eoffset - i->soffset);
if (i->fence) {
seq_printf(m, " protected by 0x%016llx on ring %d",
i->fence->seq, i->fence->ring);
}
seq_printf(m, "\n");
}
spin_unlock(&sa_manager->wq.lock);
}
#endif

View File

@ -0,0 +1,115 @@
/*
* Copyright 2011 Christian König.
* All Rights Reserved.
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sub license, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL
* THE COPYRIGHT HOLDERS, AUTHORS AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM,
* DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
* OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
* USE OR OTHER DEALINGS IN THE SOFTWARE.
*
* The above copyright notice and this permission notice (including the
* next paragraph) shall be included in all copies or substantial portions
* of the Software.
*
*/
/*
* Authors:
* Christian König <deathsimple@vodafone.de>
*/
#include <drm/drmP.h>
#include "radeon.h"
int radeon_semaphore_create(struct radeon_device *rdev,
struct radeon_semaphore **semaphore)
{
int r;
*semaphore = kmalloc(sizeof(struct radeon_semaphore), GFP_KERNEL);
if (*semaphore == NULL) {
return -ENOMEM;
}
r = radeon_sa_bo_new(rdev, &rdev->ring_tmp_bo,
&(*semaphore)->sa_bo, 8, 8, true);
if (r) {
kfree(*semaphore);
*semaphore = NULL;
return r;
}
(*semaphore)->waiters = 0;
(*semaphore)->gpu_addr = radeon_sa_bo_gpu_addr((*semaphore)->sa_bo);
*((uint64_t*)radeon_sa_bo_cpu_addr((*semaphore)->sa_bo)) = 0;
return 0;
}
void radeon_semaphore_emit_signal(struct radeon_device *rdev, int ring,
struct radeon_semaphore *semaphore)
{
--semaphore->waiters;
radeon_semaphore_ring_emit(rdev, ring, &rdev->ring[ring], semaphore, false);
}
void radeon_semaphore_emit_wait(struct radeon_device *rdev, int ring,
struct radeon_semaphore *semaphore)
{
++semaphore->waiters;
radeon_semaphore_ring_emit(rdev, ring, &rdev->ring[ring], semaphore, true);
}
/* caller must hold ring lock */
int radeon_semaphore_sync_rings(struct radeon_device *rdev,
struct radeon_semaphore *semaphore,
int signaler, int waiter)
{
int r;
/* no need to signal and wait on the same ring */
if (signaler == waiter) {
return 0;
}
/* prevent GPU deadlocks */
if (!rdev->ring[signaler].ready) {
dev_err(rdev->dev, "Trying to sync to a disabled ring!");
return -EINVAL;
}
r = radeon_ring_alloc(rdev, &rdev->ring[signaler], 8);
if (r) {
return r;
}
radeon_semaphore_emit_signal(rdev, signaler, semaphore);
radeon_ring_commit(rdev, &rdev->ring[signaler]);
/* we assume caller has already allocated space on waiters ring */
radeon_semaphore_emit_wait(rdev, waiter, semaphore);
return 0;
}
void radeon_semaphore_free(struct radeon_device *rdev,
struct radeon_semaphore **semaphore,
struct radeon_fence *fence)
{
if (semaphore == NULL || *semaphore == NULL) {
return;
}
if ((*semaphore)->waiters > 0) {
dev_err(rdev->dev, "semaphore %p has more waiters than signalers,"
" hardware lockup imminent!\n", *semaphore);
}
radeon_sa_bo_free(rdev, &(*semaphore)->sa_bo, fence);
kfree(*semaphore);
*semaphore = NULL;
}

View File

@ -33,9 +33,11 @@
#include <ttm/ttm_bo_driver.h>
#include <ttm/ttm_placement.h>
#include <ttm/ttm_module.h>
#include <ttm/ttm_page_alloc.h>
#include <drm/drmP.h>
#include <drm/radeon_drm.h>
#include <linux/seq_file.h>
#include <linux/slab.h>
#include "radeon_reg.h"
#include "radeon.h"
@ -57,28 +59,28 @@ static struct radeon_device *radeon_get_rdev(struct ttm_bo_device *bdev)
/*
* Global memory.
*/
static int radeon_ttm_mem_global_init(struct ttm_global_reference *ref)
static int radeon_ttm_mem_global_init(struct drm_global_reference *ref)
{
return ttm_mem_global_init(ref->object);
}
static void radeon_ttm_mem_global_release(struct ttm_global_reference *ref)
static void radeon_ttm_mem_global_release(struct drm_global_reference *ref)
{
ttm_mem_global_release(ref->object);
}
static int radeon_ttm_global_init(struct radeon_device *rdev)
{
struct ttm_global_reference *global_ref;
struct drm_global_reference *global_ref;
int r;
rdev->mman.mem_global_referenced = false;
global_ref = &rdev->mman.mem_global_ref;
global_ref->global_type = TTM_GLOBAL_TTM_MEM;
global_ref->global_type = DRM_GLOBAL_TTM_MEM;
global_ref->size = sizeof(struct ttm_mem_global);
global_ref->init = &radeon_ttm_mem_global_init;
global_ref->release = &radeon_ttm_mem_global_release;
r = ttm_global_item_ref(global_ref);
r = drm_global_item_ref(global_ref);
if (r != 0) {
DRM_ERROR("Failed setting up TTM memory accounting "
"subsystem.\n");
@ -88,14 +90,14 @@ static int radeon_ttm_global_init(struct radeon_device *rdev)
rdev->mman.bo_global_ref.mem_glob =
rdev->mman.mem_global_ref.object;
global_ref = &rdev->mman.bo_global_ref.ref;
global_ref->global_type = TTM_GLOBAL_TTM_BO;
global_ref->global_type = DRM_GLOBAL_TTM_BO;
global_ref->size = sizeof(struct ttm_bo_global);
global_ref->init = &ttm_bo_global_init;
global_ref->release = &ttm_bo_global_release;
r = ttm_global_item_ref(global_ref);
r = drm_global_item_ref(global_ref);
if (r != 0) {
DRM_ERROR("Failed setting up TTM BO subsystem.\n");
ttm_global_item_unref(&rdev->mman.mem_global_ref);
drm_global_item_unref(&rdev->mman.mem_global_ref);
return r;
}
@ -104,8 +106,10 @@ static int radeon_ttm_global_init(struct radeon_device *rdev)
}
struct ttm_backend *radeon_ttm_backend_create(struct radeon_device *rdev);
static int radeon_invalidate_caches(struct ttm_bo_device *bdev, uint32_t flags)
{
return 0;
}
static int radeon_init_mem_type(struct ttm_bo_device *bdev, uint32_t type,
struct ttm_mem_type_manager *man)
@ -122,7 +126,8 @@ static int radeon_init_mem_type(struct ttm_bo_device *bdev, uint32_t type,
man->default_caching = TTM_PL_FLAG_CACHED;
break;
case TTM_PL_TT:
man->gpu_offset = rdev->mc.gtt_location;
man->func = &ttm_bo_manager_func;
man->gpu_offset = rdev->mc.gtt_start;
man->available_caching = TTM_PL_MASK_CACHING;
man->default_caching = TTM_PL_FLAG_CACHED;
man->flags = TTM_MEMTYPE_FLAG_MAPPABLE | TTM_MEMTYPE_FLAG_CMA;
@ -133,34 +138,22 @@ static int radeon_init_mem_type(struct ttm_bo_device *bdev, uint32_t type,
(unsigned)type);
return -EINVAL;
}
man->io_offset = rdev->mc.agp_base;
man->io_size = rdev->mc.gtt_size;
man->io_addr = NULL;
if (!rdev->ddev->agp->cant_use_aperture)
man->flags = TTM_MEMTYPE_FLAG_NEEDS_IOREMAP |
TTM_MEMTYPE_FLAG_MAPPABLE;
man->flags = TTM_MEMTYPE_FLAG_MAPPABLE;
man->available_caching = TTM_PL_FLAG_UNCACHED |
TTM_PL_FLAG_WC;
man->default_caching = TTM_PL_FLAG_WC;
} else
#endif
{
man->io_offset = 0;
man->io_size = 0;
man->io_addr = NULL;
}
#endif
break;
case TTM_PL_VRAM:
/* "On-card" video ram */
man->gpu_offset = rdev->mc.vram_location;
man->func = &ttm_bo_manager_func;
man->gpu_offset = rdev->mc.vram_start;
man->flags = TTM_MEMTYPE_FLAG_FIXED |
TTM_MEMTYPE_FLAG_NEEDS_IOREMAP |
TTM_MEMTYPE_FLAG_MAPPABLE;
man->available_caching = TTM_PL_FLAG_UNCACHED | TTM_PL_FLAG_WC;
man->default_caching = TTM_PL_FLAG_WC;
man->io_addr = NULL;
man->io_offset = rdev->mc.aper_base;
man->io_size = rdev->mc.aper_size;
break;
default:
DRM_ERROR("Unsupported memory type %u\n", (unsigned)type);

View File

@ -33,7 +33,7 @@ int init_cursor(cursor_t *cursor)
rdev = (struct radeon_device *)rdisplay->ddev->dev_private;
r = radeon_bo_create(rdev, CURSOR_WIDTH*CURSOR_HEIGHT*4,
PAGE_SIZE, false, RADEON_GEM_DOMAIN_VRAM, &cursor->robj);
PAGE_SIZE, false, RADEON_GEM_DOMAIN_VRAM, NULL, &cursor->robj);
if (unlikely(r != 0))
return r;
@ -294,3 +294,34 @@ void framebuffer_release(struct fb_info *info)
}
/* 23 bits of float fractional data */
#define I2F_FRAC_BITS 23
#define I2F_MASK ((1 << I2F_FRAC_BITS) - 1)
/*
* Converts unsigned integer into 32-bit IEEE floating point representation.
* Will be exact from 0 to 2^24. Above that, we round towards zero
* as the fractional bits will not fit in a float. (It would be better to
* round towards even as the fpu does, but that is slower.)
*/
__pure uint32_t int2float(uint32_t x)
{
uint32_t msb, exponent, fraction;
/* Zero is special */
if (!x) return 0;
/* Get location of the most significant bit */
msb = __fls(x);
/*
* Use a rotate instead of a shift because that works both leftwards
* and rightwards due to the mod(32) behaviour. This means we don't
* need to check to see if we are above 2^24 or not.
*/
fraction = ror32(x, (msb - I2F_FRAC_BITS) & 0x1f) & I2F_MASK;
exponent = (127 + msb) << I2F_FRAC_BITS;
return fraction + exponent;
}

View File

@ -272,8 +272,11 @@ do_set:
fb->width = reqmode->width;
fb->height = reqmode->height;
fb->pitch = radeon_align_pitch(dev->dev_private, reqmode->width, 32, false) * ((32 + 1) / 8);
fb->pitches[0] = fb->pitches[1] = fb->pitches[2] =
fb->pitches[3] = radeon_align_pitch(dev->dev_private, reqmode->width, 32, false) * ((32 + 1) / 8);
fb->bits_per_pixel = 32;
fb->depth = 24;
crtc->fb = fb;
crtc->enabled = true;
@ -288,13 +291,13 @@ do_set:
{
rdisplay->width = fb->width;
rdisplay->height = fb->height;
rdisplay->pitch = fb->pitch;
rdisplay->pitch = fb->pitches[0];
rdisplay->vrefresh = drm_mode_vrefresh(mode);
sysSetScreen(fb->width, fb->height, fb->pitch);
sysSetScreen(fb->width, fb->height, fb->pitches[0]);
dbgprintf("new mode %d x %d pitch %d\n",
fb->width, fb->height, fb->pitch);
fb->width, fb->height, fb->pitches[0]);
}
else
DRM_ERROR("failed to set mode %d_%d on crtc %p\n",
@ -363,6 +366,14 @@ bool init_display_kms(struct radeon_device *rdev, videomode_t *usermode)
{
struct drm_device *dev;
struct drm_connector *connector;
struct drm_connector_helper_funcs *connector_funcs;
struct drm_encoder *encoder;
struct drm_crtc *crtc = NULL;
struct drm_framebuffer *fb;
struct drm_display_mode *native;
cursor_t *cursor;
bool retval = false;
u32_t ifl;
@ -374,9 +385,99 @@ bool init_display_kms(struct radeon_device *rdev, videomode_t *usermode)
ENTER();
rdisplay = GetDisplay();
dev = rdev->ddev;
list_for_each_entry(connector, &dev->mode_config.connector_list, head)
{
if( connector->status != connector_status_connected)
continue;
connector_funcs = connector->helper_private;
encoder = connector_funcs->best_encoder(connector);
if( encoder == NULL)
{
dbgprintf("CONNECTOR %x ID: %d no active encoders\n",
connector, connector->base.id);
continue;
}
connector->encoder = encoder;
dbgprintf("CONNECTOR %x ID: %d status %d encoder %x\n crtc %x\n",
connector, connector->base.id,
connector->status, connector->encoder,
encoder->crtc);
crtc = encoder->crtc;
break;
};
if(connector == NULL)
{
dbgprintf("No active connectors!\n");
return -1;
};
{
struct drm_display_mode *tmp;
list_for_each_entry(tmp, &connector->modes, head) {
if (drm_mode_width(tmp) > 16384 ||
drm_mode_height(tmp) > 16384)
continue;
if (tmp->type & DRM_MODE_TYPE_PREFERRED)
{
native = tmp;
break;
};
}
}
if( ASIC_IS_AVIVO(rdev) && native )
{
dbgprintf("native w %d h %d\n", native->hdisplay, native->vdisplay);
struct radeon_encoder *radeon_encoder = to_radeon_encoder(connector->encoder);
radeon_encoder->rmx_type = RMX_FULL;
radeon_encoder->native_mode = *native;
};
if(crtc == NULL)
{
struct drm_crtc *tmp_crtc;
int crtc_mask = 1;
list_for_each_entry(tmp_crtc, &dev->mode_config.crtc_list, head)
{
if (encoder->possible_crtcs & crtc_mask)
{
crtc = tmp_crtc;
encoder->crtc = crtc;
break;
};
crtc_mask <<= 1;
};
};
if(crtc == NULL)
{
dbgprintf("No CRTC for encoder %d\n", encoder->base.id);
return -1;
};
dbgprintf("[Select CRTC:%d]\n", crtc->base.id);
// drm_helper_connector_dpms(connector, DRM_MODE_DPMS_ON);
rdisplay = GetDisplay();
rdisplay->ddev = dev;
rdisplay->connector = connector;
rdisplay->crtc = crtc;
rdisplay->supported_modes = count_connector_modes(connector);
dev = rdisplay->ddev = rdev->ddev;
ifl = safe_cli();
{
@ -384,52 +485,17 @@ bool init_display_kms(struct radeon_device *rdev, videomode_t *usermode)
{
init_cursor(cursor);
};
};
safe_sti(ifl);
rfbdev = rdev->mode_info.rfbdev;
fb_helper = &rfbdev->helper;
// for (i = 0; i < fb_helper->crtc_count; i++)
// {
struct drm_mode_set *mode_set = &fb_helper->crtc_info[0].mode_set;
struct drm_crtc *crtc;
struct drm_display_mode *mode;
crtc = mode_set->crtc;
// if (!crtc->enabled)
// continue;
mode = mode_set->mode;
dbgprintf("crtc %d width %d height %d vrefresh %d\n",
crtc->base.id,
drm_mode_width(mode), drm_mode_height(mode),
drm_mode_vrefresh(mode));
// }
rdisplay->connector = get_def_connector(dev);
if( rdisplay->connector == 0 )
{
dbgprintf("no active connectors\n");
return false;
};
rdisplay->crtc = rdisplay->connector->encoder->crtc = crtc;
rdisplay->supported_modes = count_connector_modes(rdisplay->connector);
dbgprintf("current mode %d x %d x %d\n",
rdisplay->width, rdisplay->height, rdisplay->vrefresh);
dbgprintf("user mode mode %d x %d x %d\n",
usermode->width, usermode->height, usermode->freq);
if( (usermode->width != 0) &&
(usermode->height != 0) &&
( (usermode->width != rdisplay->width) ||
@ -439,6 +505,13 @@ bool init_display_kms(struct radeon_device *rdev, videomode_t *usermode)
retval = set_mode(dev, rdisplay->connector, usermode, false);
}
else
{
usermode->width = rdisplay->width;
usermode->height = rdisplay->height;
usermode->freq = 60;
retval = set_mode(dev, rdisplay->connector, usermode, false);
};
ifl = safe_cli();
{
@ -464,7 +537,7 @@ int get_modes(videomode_t *mode, int *count)
{
int err = -1;
ENTER();
// ENTER();
dbgprintf("mode %x count %d\n", mode, *count);
@ -497,7 +570,7 @@ int get_modes(videomode_t *mode, int *count)
*count = i;
err = 0;
};
LEAVE();
// LEAVE();
return err;
}
@ -505,7 +578,7 @@ int set_user_mode(videomode_t *mode)
{
int err = -1;
ENTER();
// ENTER();
dbgprintf("width %d height %d vrefresh %d\n",
mode->width, mode->height, mode->freq);
@ -521,14 +594,13 @@ int set_user_mode(videomode_t *mode)
err = 0;
};
LEAVE();
// LEAVE();
return err;
};
int radeonfb_create_object(struct radeon_fbdev *rfbdev,
struct drm_mode_fb_cmd *mode_cmd,
int radeonfb_create_pinned_object(struct radeon_fbdev *rfbdev,
struct drm_mode_fb_cmd2 *mode_cmd,
struct drm_gem_object **gobj_p)
{
struct radeon_device *rdev = rfbdev->rdev;
@ -539,21 +611,29 @@ int radeonfb_create_object(struct radeon_fbdev *rfbdev,
int ret;
int aligned_size, size;
int height = mode_cmd->height;
u32 bpp, depth;
static struct radeon_bo kos_bo;
static struct drm_mm_node vm_node;
drm_fb_get_bpp_depth(mode_cmd->pixel_format, &depth, &bpp);
/* need to align pitch with crtc limits */
mode_cmd->pitch = radeon_align_pitch(rdev, mode_cmd->width, mode_cmd->bpp, fb_tiled) * ((mode_cmd->bpp + 1) / 8);
mode_cmd->pitches[0] = radeon_align_pitch(rdev, mode_cmd->width, bpp,
fb_tiled) * ((bpp + 1) / 8);
if (rdev->family >= CHIP_R600)
height = ALIGN(mode_cmd->height, 8);
size = mode_cmd->pitch * height;
size = mode_cmd->pitches[0] * height;
aligned_size = ALIGN(size, PAGE_SIZE);
ret = drm_gem_object_init(rdev->ddev, &kos_bo.gem_base, aligned_size);
if (unlikely(ret)) {
return ret;
printk(KERN_ERR "failed to allocate framebuffer (%d)\n",
aligned_size);
return -ENOMEM;
}
kos_bo.rdev = rdev;
@ -569,10 +649,13 @@ int radeonfb_create_object(struct radeon_fbdev *rfbdev,
if (fb_tiled)
tiling_flags = RADEON_TILING_MACRO;
if (tiling_flags) {
rbo->tiling_flags = tiling_flags | RADEON_TILING_SURFACE;
rbo->pitch = mode_cmd->pitch;
}
// if (tiling_flags) {
// ret = radeon_bo_set_tiling_flags(rbo,
// tiling_flags | RADEON_TILING_SURFACE,
// mode_cmd->pitches[0]);
// if (ret)
// dev_err(rdev->dev, "FB failed to set tiling flags\n");
// }
vm_node.size = 0xC00000 >> 12;
vm_node.start = 0;
@ -584,13 +667,9 @@ int radeonfb_create_object(struct radeon_fbdev *rfbdev,
rbo->kptr = (void*)0xFE000000;
rbo->pin_count = 1;
// if (fb_tiled)
// radeon_bo_check_tiling(rbo, 0, 0);
*gobj_p = gobj;
return 0;
}

View File

@ -0,0 +1,641 @@
cayman 0x9400
0x0000802C GRBM_GFX_INDEX
0x000084FC CP_STRMOUT_CNTL
0x000085F0 CP_COHER_CNTL
0x000085F4 CP_COHER_SIZE
0x000088B0 VGT_VTX_VECT_EJECT_REG
0x000088C4 VGT_CACHE_INVALIDATION
0x000088D4 VGT_GS_VERTEX_REUSE
0x00008958 VGT_PRIMITIVE_TYPE
0x0000895C VGT_INDEX_TYPE
0x00008970 VGT_NUM_INDICES
0x00008974 VGT_NUM_INSTANCES
0x00008990 VGT_COMPUTE_DIM_X
0x00008994 VGT_COMPUTE_DIM_Y
0x00008998 VGT_COMPUTE_DIM_Z
0x0000899C VGT_COMPUTE_START_X
0x000089A0 VGT_COMPUTE_START_Y
0x000089A4 VGT_COMPUTE_START_Z
0x000089A8 VGT_COMPUTE_INDEX
0x000089AC VGT_COMPUTE_THREAD_GOURP_SIZE
0x000089B0 VGT_HS_OFFCHIP_PARAM
0x00008A14 PA_CL_ENHANCE
0x00008A60 PA_SC_LINE_STIPPLE_VALUE
0x00008B10 PA_SC_LINE_STIPPLE_STATE
0x00008BF0 PA_SC_ENHANCE
0x00008D8C SQ_DYN_GPR_CNTL_PS_FLUSH_REQ
0x00008D94 SQ_DYN_GPR_SIMD_LOCK_EN
0x00008C00 SQ_CONFIG
0x00008C04 SQ_GPR_RESOURCE_MGMT_1
0x00008C10 SQ_GLOBAL_GPR_RESOURCE_MGMT_1
0x00008C14 SQ_GLOBAL_GPR_RESOURCE_MGMT_2
0x00008DF8 SQ_CONST_MEM_BASE
0x00008E20 SQ_STATIC_THREAD_MGMT_1
0x00008E24 SQ_STATIC_THREAD_MGMT_2
0x00008E28 SQ_STATIC_THREAD_MGMT_3
0x00008E48 SQ_EX_ALLOC_TABLE_SLOTS
0x00009100 SPI_CONFIG_CNTL
0x0000913C SPI_CONFIG_CNTL_1
0x00009508 TA_CNTL_AUX
0x00009830 DB_DEBUG
0x00009834 DB_DEBUG2
0x00009838 DB_DEBUG3
0x0000983C DB_DEBUG4
0x00009854 DB_WATERMARKS
0x0000A400 TD_PS_BORDER_COLOR_INDEX
0x0000A404 TD_PS_BORDER_COLOR_RED
0x0000A408 TD_PS_BORDER_COLOR_GREEN
0x0000A40C TD_PS_BORDER_COLOR_BLUE
0x0000A410 TD_PS_BORDER_COLOR_ALPHA
0x0000A414 TD_VS_BORDER_COLOR_INDEX
0x0000A418 TD_VS_BORDER_COLOR_RED
0x0000A41C TD_VS_BORDER_COLOR_GREEN
0x0000A420 TD_VS_BORDER_COLOR_BLUE
0x0000A424 TD_VS_BORDER_COLOR_ALPHA
0x0000A428 TD_GS_BORDER_COLOR_INDEX
0x0000A42C TD_GS_BORDER_COLOR_RED
0x0000A430 TD_GS_BORDER_COLOR_GREEN
0x0000A434 TD_GS_BORDER_COLOR_BLUE
0x0000A438 TD_GS_BORDER_COLOR_ALPHA
0x0000A43C TD_HS_BORDER_COLOR_INDEX
0x0000A440 TD_HS_BORDER_COLOR_RED
0x0000A444 TD_HS_BORDER_COLOR_GREEN
0x0000A448 TD_HS_BORDER_COLOR_BLUE
0x0000A44C TD_HS_BORDER_COLOR_ALPHA
0x0000A450 TD_LS_BORDER_COLOR_INDEX
0x0000A454 TD_LS_BORDER_COLOR_RED
0x0000A458 TD_LS_BORDER_COLOR_GREEN
0x0000A45C TD_LS_BORDER_COLOR_BLUE
0x0000A460 TD_LS_BORDER_COLOR_ALPHA
0x0000A464 TD_CS_BORDER_COLOR_INDEX
0x0000A468 TD_CS_BORDER_COLOR_RED
0x0000A46C TD_CS_BORDER_COLOR_GREEN
0x0000A470 TD_CS_BORDER_COLOR_BLUE
0x0000A474 TD_CS_BORDER_COLOR_ALPHA
0x00028000 DB_RENDER_CONTROL
0x00028004 DB_COUNT_CONTROL
0x0002800C DB_RENDER_OVERRIDE
0x00028010 DB_RENDER_OVERRIDE2
0x00028028 DB_STENCIL_CLEAR
0x0002802C DB_DEPTH_CLEAR
0x00028030 PA_SC_SCREEN_SCISSOR_TL
0x00028034 PA_SC_SCREEN_SCISSOR_BR
0x00028140 SQ_ALU_CONST_BUFFER_SIZE_PS_0
0x00028144 SQ_ALU_CONST_BUFFER_SIZE_PS_1
0x00028148 SQ_ALU_CONST_BUFFER_SIZE_PS_2
0x0002814C SQ_ALU_CONST_BUFFER_SIZE_PS_3
0x00028150 SQ_ALU_CONST_BUFFER_SIZE_PS_4
0x00028154 SQ_ALU_CONST_BUFFER_SIZE_PS_5
0x00028158 SQ_ALU_CONST_BUFFER_SIZE_PS_6
0x0002815C SQ_ALU_CONST_BUFFER_SIZE_PS_7
0x00028160 SQ_ALU_CONST_BUFFER_SIZE_PS_8
0x00028164 SQ_ALU_CONST_BUFFER_SIZE_PS_9
0x00028168 SQ_ALU_CONST_BUFFER_SIZE_PS_10
0x0002816C SQ_ALU_CONST_BUFFER_SIZE_PS_11
0x00028170 SQ_ALU_CONST_BUFFER_SIZE_PS_12
0x00028174 SQ_ALU_CONST_BUFFER_SIZE_PS_13
0x00028178 SQ_ALU_CONST_BUFFER_SIZE_PS_14
0x0002817C SQ_ALU_CONST_BUFFER_SIZE_PS_15
0x00028180 SQ_ALU_CONST_BUFFER_SIZE_VS_0
0x00028184 SQ_ALU_CONST_BUFFER_SIZE_VS_1
0x00028188 SQ_ALU_CONST_BUFFER_SIZE_VS_2
0x0002818C SQ_ALU_CONST_BUFFER_SIZE_VS_3
0x00028190 SQ_ALU_CONST_BUFFER_SIZE_VS_4
0x00028194 SQ_ALU_CONST_BUFFER_SIZE_VS_5
0x00028198 SQ_ALU_CONST_BUFFER_SIZE_VS_6
0x0002819C SQ_ALU_CONST_BUFFER_SIZE_VS_7
0x000281A0 SQ_ALU_CONST_BUFFER_SIZE_VS_8
0x000281A4 SQ_ALU_CONST_BUFFER_SIZE_VS_9
0x000281A8 SQ_ALU_CONST_BUFFER_SIZE_VS_10
0x000281AC SQ_ALU_CONST_BUFFER_SIZE_VS_11
0x000281B0 SQ_ALU_CONST_BUFFER_SIZE_VS_12
0x000281B4 SQ_ALU_CONST_BUFFER_SIZE_VS_13
0x000281B8 SQ_ALU_CONST_BUFFER_SIZE_VS_14
0x000281BC SQ_ALU_CONST_BUFFER_SIZE_VS_15
0x000281C0 SQ_ALU_CONST_BUFFER_SIZE_GS_0
0x000281C4 SQ_ALU_CONST_BUFFER_SIZE_GS_1
0x000281C8 SQ_ALU_CONST_BUFFER_SIZE_GS_2
0x000281CC SQ_ALU_CONST_BUFFER_SIZE_GS_3
0x000281D0 SQ_ALU_CONST_BUFFER_SIZE_GS_4
0x000281D4 SQ_ALU_CONST_BUFFER_SIZE_GS_5
0x000281D8 SQ_ALU_CONST_BUFFER_SIZE_GS_6
0x000281DC SQ_ALU_CONST_BUFFER_SIZE_GS_7
0x000281E0 SQ_ALU_CONST_BUFFER_SIZE_GS_8
0x000281E4 SQ_ALU_CONST_BUFFER_SIZE_GS_9
0x000281E8 SQ_ALU_CONST_BUFFER_SIZE_GS_10
0x000281EC SQ_ALU_CONST_BUFFER_SIZE_GS_11
0x000281F0 SQ_ALU_CONST_BUFFER_SIZE_GS_12
0x000281F4 SQ_ALU_CONST_BUFFER_SIZE_GS_13
0x000281F8 SQ_ALU_CONST_BUFFER_SIZE_GS_14
0x000281FC SQ_ALU_CONST_BUFFER_SIZE_GS_15
0x00028200 PA_SC_WINDOW_OFFSET
0x00028204 PA_SC_WINDOW_SCISSOR_TL
0x00028208 PA_SC_WINDOW_SCISSOR_BR
0x0002820C PA_SC_CLIPRECT_RULE
0x00028210 PA_SC_CLIPRECT_0_TL
0x00028214 PA_SC_CLIPRECT_0_BR
0x00028218 PA_SC_CLIPRECT_1_TL
0x0002821C PA_SC_CLIPRECT_1_BR
0x00028220 PA_SC_CLIPRECT_2_TL
0x00028224 PA_SC_CLIPRECT_2_BR
0x00028228 PA_SC_CLIPRECT_3_TL
0x0002822C PA_SC_CLIPRECT_3_BR
0x00028230 PA_SC_EDGERULE
0x00028234 PA_SU_HARDWARE_SCREEN_OFFSET
0x00028240 PA_SC_GENERIC_SCISSOR_TL
0x00028244 PA_SC_GENERIC_SCISSOR_BR
0x00028250 PA_SC_VPORT_SCISSOR_0_TL
0x00028254 PA_SC_VPORT_SCISSOR_0_BR
0x00028258 PA_SC_VPORT_SCISSOR_1_TL
0x0002825C PA_SC_VPORT_SCISSOR_1_BR
0x00028260 PA_SC_VPORT_SCISSOR_2_TL
0x00028264 PA_SC_VPORT_SCISSOR_2_BR
0x00028268 PA_SC_VPORT_SCISSOR_3_TL
0x0002826C PA_SC_VPORT_SCISSOR_3_BR
0x00028270 PA_SC_VPORT_SCISSOR_4_TL
0x00028274 PA_SC_VPORT_SCISSOR_4_BR
0x00028278 PA_SC_VPORT_SCISSOR_5_TL
0x0002827C PA_SC_VPORT_SCISSOR_5_BR
0x00028280 PA_SC_VPORT_SCISSOR_6_TL
0x00028284 PA_SC_VPORT_SCISSOR_6_BR
0x00028288 PA_SC_VPORT_SCISSOR_7_TL
0x0002828C PA_SC_VPORT_SCISSOR_7_BR
0x00028290 PA_SC_VPORT_SCISSOR_8_TL
0x00028294 PA_SC_VPORT_SCISSOR_8_BR
0x00028298 PA_SC_VPORT_SCISSOR_9_TL
0x0002829C PA_SC_VPORT_SCISSOR_9_BR
0x000282A0 PA_SC_VPORT_SCISSOR_10_TL
0x000282A4 PA_SC_VPORT_SCISSOR_10_BR
0x000282A8 PA_SC_VPORT_SCISSOR_11_TL
0x000282AC PA_SC_VPORT_SCISSOR_11_BR
0x000282B0 PA_SC_VPORT_SCISSOR_12_TL
0x000282B4 PA_SC_VPORT_SCISSOR_12_BR
0x000282B8 PA_SC_VPORT_SCISSOR_13_TL
0x000282BC PA_SC_VPORT_SCISSOR_13_BR
0x000282C0 PA_SC_VPORT_SCISSOR_14_TL
0x000282C4 PA_SC_VPORT_SCISSOR_14_BR
0x000282C8 PA_SC_VPORT_SCISSOR_15_TL
0x000282CC PA_SC_VPORT_SCISSOR_15_BR
0x000282D0 PA_SC_VPORT_ZMIN_0
0x000282D4 PA_SC_VPORT_ZMAX_0
0x000282D8 PA_SC_VPORT_ZMIN_1
0x000282DC PA_SC_VPORT_ZMAX_1
0x000282E0 PA_SC_VPORT_ZMIN_2
0x000282E4 PA_SC_VPORT_ZMAX_2
0x000282E8 PA_SC_VPORT_ZMIN_3
0x000282EC PA_SC_VPORT_ZMAX_3
0x000282F0 PA_SC_VPORT_ZMIN_4
0x000282F4 PA_SC_VPORT_ZMAX_4
0x000282F8 PA_SC_VPORT_ZMIN_5
0x000282FC PA_SC_VPORT_ZMAX_5
0x00028300 PA_SC_VPORT_ZMIN_6
0x00028304 PA_SC_VPORT_ZMAX_6
0x00028308 PA_SC_VPORT_ZMIN_7
0x0002830C PA_SC_VPORT_ZMAX_7
0x00028310 PA_SC_VPORT_ZMIN_8
0x00028314 PA_SC_VPORT_ZMAX_8
0x00028318 PA_SC_VPORT_ZMIN_9
0x0002831C PA_SC_VPORT_ZMAX_9
0x00028320 PA_SC_VPORT_ZMIN_10
0x00028324 PA_SC_VPORT_ZMAX_10
0x00028328 PA_SC_VPORT_ZMIN_11
0x0002832C PA_SC_VPORT_ZMAX_11
0x00028330 PA_SC_VPORT_ZMIN_12
0x00028334 PA_SC_VPORT_ZMAX_12
0x00028338 PA_SC_VPORT_ZMIN_13
0x0002833C PA_SC_VPORT_ZMAX_13
0x00028340 PA_SC_VPORT_ZMIN_14
0x00028344 PA_SC_VPORT_ZMAX_14
0x00028348 PA_SC_VPORT_ZMIN_15
0x0002834C PA_SC_VPORT_ZMAX_15
0x00028354 SX_SURFACE_SYNC
0x0002835C SX_SCATTER_EXPORT_SIZE
0x00028380 SQ_VTX_SEMANTIC_0
0x00028384 SQ_VTX_SEMANTIC_1
0x00028388 SQ_VTX_SEMANTIC_2
0x0002838C SQ_VTX_SEMANTIC_3
0x00028390 SQ_VTX_SEMANTIC_4
0x00028394 SQ_VTX_SEMANTIC_5
0x00028398 SQ_VTX_SEMANTIC_6
0x0002839C SQ_VTX_SEMANTIC_7
0x000283A0 SQ_VTX_SEMANTIC_8
0x000283A4 SQ_VTX_SEMANTIC_9
0x000283A8 SQ_VTX_SEMANTIC_10
0x000283AC SQ_VTX_SEMANTIC_11
0x000283B0 SQ_VTX_SEMANTIC_12
0x000283B4 SQ_VTX_SEMANTIC_13
0x000283B8 SQ_VTX_SEMANTIC_14
0x000283BC SQ_VTX_SEMANTIC_15
0x000283C0 SQ_VTX_SEMANTIC_16
0x000283C4 SQ_VTX_SEMANTIC_17
0x000283C8 SQ_VTX_SEMANTIC_18
0x000283CC SQ_VTX_SEMANTIC_19
0x000283D0 SQ_VTX_SEMANTIC_20
0x000283D4 SQ_VTX_SEMANTIC_21
0x000283D8 SQ_VTX_SEMANTIC_22
0x000283DC SQ_VTX_SEMANTIC_23
0x000283E0 SQ_VTX_SEMANTIC_24
0x000283E4 SQ_VTX_SEMANTIC_25
0x000283E8 SQ_VTX_SEMANTIC_26
0x000283EC SQ_VTX_SEMANTIC_27
0x000283F0 SQ_VTX_SEMANTIC_28
0x000283F4 SQ_VTX_SEMANTIC_29
0x000283F8 SQ_VTX_SEMANTIC_30
0x000283FC SQ_VTX_SEMANTIC_31
0x00028400 VGT_MAX_VTX_INDX
0x00028404 VGT_MIN_VTX_INDX
0x00028408 VGT_INDX_OFFSET
0x0002840C VGT_MULTI_PRIM_IB_RESET_INDX
0x00028410 SX_ALPHA_TEST_CONTROL
0x00028414 CB_BLEND_RED
0x00028418 CB_BLEND_GREEN
0x0002841C CB_BLEND_BLUE
0x00028420 CB_BLEND_ALPHA
0x00028430 DB_STENCILREFMASK
0x00028434 DB_STENCILREFMASK_BF
0x00028438 SX_ALPHA_REF
0x0002843C PA_CL_VPORT_XSCALE_0
0x00028440 PA_CL_VPORT_XOFFSET_0
0x00028444 PA_CL_VPORT_YSCALE_0
0x00028448 PA_CL_VPORT_YOFFSET_0
0x0002844C PA_CL_VPORT_ZSCALE_0
0x00028450 PA_CL_VPORT_ZOFFSET_0
0x00028454 PA_CL_VPORT_XSCALE_1
0x00028458 PA_CL_VPORT_XOFFSET_1
0x0002845C PA_CL_VPORT_YSCALE_1
0x00028460 PA_CL_VPORT_YOFFSET_1
0x00028464 PA_CL_VPORT_ZSCALE_1
0x00028468 PA_CL_VPORT_ZOFFSET_1
0x0002846C PA_CL_VPORT_XSCALE_2
0x00028470 PA_CL_VPORT_XOFFSET_2
0x00028474 PA_CL_VPORT_YSCALE_2
0x00028478 PA_CL_VPORT_YOFFSET_2
0x0002847C PA_CL_VPORT_ZSCALE_2
0x00028480 PA_CL_VPORT_ZOFFSET_2
0x00028484 PA_CL_VPORT_XSCALE_3
0x00028488 PA_CL_VPORT_XOFFSET_3
0x0002848C PA_CL_VPORT_YSCALE_3
0x00028490 PA_CL_VPORT_YOFFSET_3
0x00028494 PA_CL_VPORT_ZSCALE_3
0x00028498 PA_CL_VPORT_ZOFFSET_3
0x0002849C PA_CL_VPORT_XSCALE_4
0x000284A0 PA_CL_VPORT_XOFFSET_4
0x000284A4 PA_CL_VPORT_YSCALE_4
0x000284A8 PA_CL_VPORT_YOFFSET_4
0x000284AC PA_CL_VPORT_ZSCALE_4
0x000284B0 PA_CL_VPORT_ZOFFSET_4
0x000284B4 PA_CL_VPORT_XSCALE_5
0x000284B8 PA_CL_VPORT_XOFFSET_5
0x000284BC PA_CL_VPORT_YSCALE_5
0x000284C0 PA_CL_VPORT_YOFFSET_5
0x000284C4 PA_CL_VPORT_ZSCALE_5
0x000284C8 PA_CL_VPORT_ZOFFSET_5
0x000284CC PA_CL_VPORT_XSCALE_6
0x000284D0 PA_CL_VPORT_XOFFSET_6
0x000284D4 PA_CL_VPORT_YSCALE_6
0x000284D8 PA_CL_VPORT_YOFFSET_6
0x000284DC PA_CL_VPORT_ZSCALE_6
0x000284E0 PA_CL_VPORT_ZOFFSET_6
0x000284E4 PA_CL_VPORT_XSCALE_7
0x000284E8 PA_CL_VPORT_XOFFSET_7
0x000284EC PA_CL_VPORT_YSCALE_7
0x000284F0 PA_CL_VPORT_YOFFSET_7
0x000284F4 PA_CL_VPORT_ZSCALE_7
0x000284F8 PA_CL_VPORT_ZOFFSET_7
0x000284FC PA_CL_VPORT_XSCALE_8
0x00028500 PA_CL_VPORT_XOFFSET_8
0x00028504 PA_CL_VPORT_YSCALE_8
0x00028508 PA_CL_VPORT_YOFFSET_8
0x0002850C PA_CL_VPORT_ZSCALE_8
0x00028510 PA_CL_VPORT_ZOFFSET_8
0x00028514 PA_CL_VPORT_XSCALE_9
0x00028518 PA_CL_VPORT_XOFFSET_9
0x0002851C PA_CL_VPORT_YSCALE_9
0x00028520 PA_CL_VPORT_YOFFSET_9
0x00028524 PA_CL_VPORT_ZSCALE_9
0x00028528 PA_CL_VPORT_ZOFFSET_9
0x0002852C PA_CL_VPORT_XSCALE_10
0x00028530 PA_CL_VPORT_XOFFSET_10
0x00028534 PA_CL_VPORT_YSCALE_10
0x00028538 PA_CL_VPORT_YOFFSET_10
0x0002853C PA_CL_VPORT_ZSCALE_10
0x00028540 PA_CL_VPORT_ZOFFSET_10
0x00028544 PA_CL_VPORT_XSCALE_11
0x00028548 PA_CL_VPORT_XOFFSET_11
0x0002854C PA_CL_VPORT_YSCALE_11
0x00028550 PA_CL_VPORT_YOFFSET_11
0x00028554 PA_CL_VPORT_ZSCALE_11
0x00028558 PA_CL_VPORT_ZOFFSET_11
0x0002855C PA_CL_VPORT_XSCALE_12
0x00028560 PA_CL_VPORT_XOFFSET_12
0x00028564 PA_CL_VPORT_YSCALE_12
0x00028568 PA_CL_VPORT_YOFFSET_12
0x0002856C PA_CL_VPORT_ZSCALE_12
0x00028570 PA_CL_VPORT_ZOFFSET_12
0x00028574 PA_CL_VPORT_XSCALE_13
0x00028578 PA_CL_VPORT_XOFFSET_13
0x0002857C PA_CL_VPORT_YSCALE_13
0x00028580 PA_CL_VPORT_YOFFSET_13
0x00028584 PA_CL_VPORT_ZSCALE_13
0x00028588 PA_CL_VPORT_ZOFFSET_13
0x0002858C PA_CL_VPORT_XSCALE_14
0x00028590 PA_CL_VPORT_XOFFSET_14
0x00028594 PA_CL_VPORT_YSCALE_14
0x00028598 PA_CL_VPORT_YOFFSET_14
0x0002859C PA_CL_VPORT_ZSCALE_14
0x000285A0 PA_CL_VPORT_ZOFFSET_14
0x000285A4 PA_CL_VPORT_XSCALE_15
0x000285A8 PA_CL_VPORT_XOFFSET_15
0x000285AC PA_CL_VPORT_YSCALE_15
0x000285B0 PA_CL_VPORT_YOFFSET_15
0x000285B4 PA_CL_VPORT_ZSCALE_15
0x000285B8 PA_CL_VPORT_ZOFFSET_15
0x000285BC PA_CL_UCP_0_X
0x000285C0 PA_CL_UCP_0_Y
0x000285C4 PA_CL_UCP_0_Z
0x000285C8 PA_CL_UCP_0_W
0x000285CC PA_CL_UCP_1_X
0x000285D0 PA_CL_UCP_1_Y
0x000285D4 PA_CL_UCP_1_Z
0x000285D8 PA_CL_UCP_1_W
0x000285DC PA_CL_UCP_2_X
0x000285E0 PA_CL_UCP_2_Y
0x000285E4 PA_CL_UCP_2_Z
0x000285E8 PA_CL_UCP_2_W
0x000285EC PA_CL_UCP_3_X
0x000285F0 PA_CL_UCP_3_Y
0x000285F4 PA_CL_UCP_3_Z
0x000285F8 PA_CL_UCP_3_W
0x000285FC PA_CL_UCP_4_X
0x00028600 PA_CL_UCP_4_Y
0x00028604 PA_CL_UCP_4_Z
0x00028608 PA_CL_UCP_4_W
0x0002860C PA_CL_UCP_5_X
0x00028610 PA_CL_UCP_5_Y
0x00028614 PA_CL_UCP_5_Z
0x00028618 PA_CL_UCP_5_W
0x0002861C SPI_VS_OUT_ID_0
0x00028620 SPI_VS_OUT_ID_1
0x00028624 SPI_VS_OUT_ID_2
0x00028628 SPI_VS_OUT_ID_3
0x0002862C SPI_VS_OUT_ID_4
0x00028630 SPI_VS_OUT_ID_5
0x00028634 SPI_VS_OUT_ID_6
0x00028638 SPI_VS_OUT_ID_7
0x0002863C SPI_VS_OUT_ID_8
0x00028640 SPI_VS_OUT_ID_9
0x00028644 SPI_PS_INPUT_CNTL_0
0x00028648 SPI_PS_INPUT_CNTL_1
0x0002864C SPI_PS_INPUT_CNTL_2
0x00028650 SPI_PS_INPUT_CNTL_3
0x00028654 SPI_PS_INPUT_CNTL_4
0x00028658 SPI_PS_INPUT_CNTL_5
0x0002865C SPI_PS_INPUT_CNTL_6
0x00028660 SPI_PS_INPUT_CNTL_7
0x00028664 SPI_PS_INPUT_CNTL_8
0x00028668 SPI_PS_INPUT_CNTL_9
0x0002866C SPI_PS_INPUT_CNTL_10
0x00028670 SPI_PS_INPUT_CNTL_11
0x00028674 SPI_PS_INPUT_CNTL_12
0x00028678 SPI_PS_INPUT_CNTL_13
0x0002867C SPI_PS_INPUT_CNTL_14
0x00028680 SPI_PS_INPUT_CNTL_15
0x00028684 SPI_PS_INPUT_CNTL_16
0x00028688 SPI_PS_INPUT_CNTL_17
0x0002868C SPI_PS_INPUT_CNTL_18
0x00028690 SPI_PS_INPUT_CNTL_19
0x00028694 SPI_PS_INPUT_CNTL_20
0x00028698 SPI_PS_INPUT_CNTL_21
0x0002869C SPI_PS_INPUT_CNTL_22
0x000286A0 SPI_PS_INPUT_CNTL_23
0x000286A4 SPI_PS_INPUT_CNTL_24
0x000286A8 SPI_PS_INPUT_CNTL_25
0x000286AC SPI_PS_INPUT_CNTL_26
0x000286B0 SPI_PS_INPUT_CNTL_27
0x000286B4 SPI_PS_INPUT_CNTL_28
0x000286B8 SPI_PS_INPUT_CNTL_29
0x000286BC SPI_PS_INPUT_CNTL_30
0x000286C0 SPI_PS_INPUT_CNTL_31
0x000286C4 SPI_VS_OUT_CONFIG
0x000286C8 SPI_THREAD_GROUPING
0x000286CC SPI_PS_IN_CONTROL_0
0x000286D0 SPI_PS_IN_CONTROL_1
0x000286D4 SPI_INTERP_CONTROL_0
0x000286D8 SPI_INPUT_Z
0x000286DC SPI_FOG_CNTL
0x000286E0 SPI_BARYC_CNTL
0x000286E4 SPI_PS_IN_CONTROL_2
0x000286E8 SPI_COMPUTE_INPUT_CNTL
0x000286EC SPI_COMPUTE_NUM_THREAD_X
0x000286F0 SPI_COMPUTE_NUM_THREAD_Y
0x000286F4 SPI_COMPUTE_NUM_THREAD_Z
0x000286F8 SPI_GPR_MGMT
0x000286FC SPI_LDS_MGMT
0x00028700 SPI_STACK_MGMT
0x00028704 SPI_WAVE_MGMT_1
0x00028708 SPI_WAVE_MGMT_2
0x00028720 GDS_ADDR_BASE
0x00028724 GDS_ADDR_SIZE
0x00028780 CB_BLEND0_CONTROL
0x00028784 CB_BLEND1_CONTROL
0x00028788 CB_BLEND2_CONTROL
0x0002878C CB_BLEND3_CONTROL
0x00028790 CB_BLEND4_CONTROL
0x00028794 CB_BLEND5_CONTROL
0x00028798 CB_BLEND6_CONTROL
0x0002879C CB_BLEND7_CONTROL
0x000287CC CS_COPY_STATE
0x000287D0 GFX_COPY_STATE
0x000287D4 PA_CL_POINT_X_RAD
0x000287D8 PA_CL_POINT_Y_RAD
0x000287DC PA_CL_POINT_SIZE
0x000287E0 PA_CL_POINT_CULL_RAD
0x00028808 CB_COLOR_CONTROL
0x0002880C DB_SHADER_CONTROL
0x00028810 PA_CL_CLIP_CNTL
0x00028814 PA_SU_SC_MODE_CNTL
0x00028818 PA_CL_VTE_CNTL
0x0002881C PA_CL_VS_OUT_CNTL
0x00028820 PA_CL_NANINF_CNTL
0x00028824 PA_SU_LINE_STIPPLE_CNTL
0x00028828 PA_SU_LINE_STIPPLE_SCALE
0x0002882C PA_SU_PRIM_FILTER_CNTL
0x00028844 SQ_PGM_RESOURCES_PS
0x00028848 SQ_PGM_RESOURCES_2_PS
0x0002884C SQ_PGM_EXPORTS_PS
0x00028860 SQ_PGM_RESOURCES_VS
0x00028864 SQ_PGM_RESOURCES_2_VS
0x00028878 SQ_PGM_RESOURCES_GS
0x0002887C SQ_PGM_RESOURCES_2_GS
0x00028890 SQ_PGM_RESOURCES_ES
0x00028894 SQ_PGM_RESOURCES_2_ES
0x000288A8 SQ_PGM_RESOURCES_FS
0x000288BC SQ_PGM_RESOURCES_HS
0x000288C0 SQ_PGM_RESOURCES_2_HS
0x000288D4 SQ_PGM_RESOURCES_LS
0x000288D8 SQ_PGM_RESOURCES_2_LS
0x000288E8 SQ_LDS_ALLOC
0x000288EC SQ_LDS_ALLOC_PS
0x000288F0 SQ_VTX_SEMANTIC_CLEAR
0x00028A00 PA_SU_POINT_SIZE
0x00028A04 PA_SU_POINT_MINMAX
0x00028A08 PA_SU_LINE_CNTL
0x00028A0C PA_SC_LINE_STIPPLE
0x00028A10 VGT_OUTPUT_PATH_CNTL
0x00028A14 VGT_HOS_CNTL
0x00028A18 VGT_HOS_MAX_TESS_LEVEL
0x00028A1C VGT_HOS_MIN_TESS_LEVEL
0x00028A20 VGT_HOS_REUSE_DEPTH
0x00028A24 VGT_GROUP_PRIM_TYPE
0x00028A28 VGT_GROUP_FIRST_DECR
0x00028A2C VGT_GROUP_DECR
0x00028A30 VGT_GROUP_VECT_0_CNTL
0x00028A34 VGT_GROUP_VECT_1_CNTL
0x00028A38 VGT_GROUP_VECT_0_FMT_CNTL
0x00028A3C VGT_GROUP_VECT_1_FMT_CNTL
0x00028A40 VGT_GS_MODE
0x00028A48 PA_SC_MODE_CNTL_0
0x00028A4C PA_SC_MODE_CNTL_1
0x00028A50 VGT_ENHANCE
0x00028A54 VGT_GS_PER_ES
0x00028A58 VGT_ES_PER_GS
0x00028A5C VGT_GS_PER_VS
0x00028A6C VGT_GS_OUT_PRIM_TYPE
0x00028A70 IA_ENHANCE
0x00028A84 VGT_PRIMITIVEID_EN
0x00028A94 VGT_MULTI_PRIM_IB_RESET_EN
0x00028AA0 VGT_INSTANCE_STEP_RATE_0
0x00028AA4 VGT_INSTANCE_STEP_RATE_1
0x00028AA8 IA_MULTI_VGT_PARAM
0x00028AB4 VGT_REUSE_OFF
0x00028AB8 VGT_VTX_CNT_EN
0x00028AC0 DB_SRESULTS_COMPARE_STATE0
0x00028AC4 DB_SRESULTS_COMPARE_STATE1
0x00028AC8 DB_PRELOAD_CONTROL
0x00028AD4 VGT_STRMOUT_VTX_STRIDE_0
0x00028AE4 VGT_STRMOUT_VTX_STRIDE_1
0x00028AF4 VGT_STRMOUT_VTX_STRIDE_2
0x00028B04 VGT_STRMOUT_VTX_STRIDE_3
0x00028B28 VGT_STRMOUT_DRAW_OPAQUE_OFFSET
0x00028B2C VGT_STRMOUT_DRAW_OPAQUE_BUFFER_FILLED_SIZE
0x00028B30 VGT_STRMOUT_DRAW_OPAQUE_VERTEX_STRIDE
0x00028B38 VGT_GS_MAX_VERT_OUT
0x00028B54 VGT_SHADER_STAGES_EN
0x00028B58 VGT_LS_HS_CONFIG
0x00028B6C VGT_TF_PARAM
0x00028B70 DB_ALPHA_TO_MASK
0x00028B74 VGT_DISPATCH_INITIATOR
0x00028B78 PA_SU_POLY_OFFSET_DB_FMT_CNTL
0x00028B7C PA_SU_POLY_OFFSET_CLAMP
0x00028B80 PA_SU_POLY_OFFSET_FRONT_SCALE
0x00028B84 PA_SU_POLY_OFFSET_FRONT_OFFSET
0x00028B88 PA_SU_POLY_OFFSET_BACK_SCALE
0x00028B8C PA_SU_POLY_OFFSET_BACK_OFFSET
0x00028B74 VGT_GS_INSTANCE_CNT
0x00028BD4 PA_SC_CENTROID_PRIORITY_0
0x00028BD8 PA_SC_CENTROID_PRIORITY_1
0x00028BDC PA_SC_LINE_CNTL
0x00028BE4 PA_SU_VTX_CNTL
0x00028BE8 PA_CL_GB_VERT_CLIP_ADJ
0x00028BEC PA_CL_GB_VERT_DISC_ADJ
0x00028BF0 PA_CL_GB_HORZ_CLIP_ADJ
0x00028BF4 PA_CL_GB_HORZ_DISC_ADJ
0x00028BF8 PA_SC_AA_SAMPLE_LOCS_PIXEL_X0_Y0_0
0x00028BFC PA_SC_AA_SAMPLE_LOCS_PIXEL_X0_Y0_1
0x00028C00 PA_SC_AA_SAMPLE_LOCS_PIXEL_X0_Y0_2
0x00028C04 PA_SC_AA_SAMPLE_LOCS_PIXEL_X0_Y0_3
0x00028C08 PA_SC_AA_SAMPLE_LOCS_PIXEL_X1_Y0_0
0x00028C0C PA_SC_AA_SAMPLE_LOCS_PIXEL_X1_Y0_1
0x00028C10 PA_SC_AA_SAMPLE_LOCS_PIXEL_X1_Y0_2
0x00028C14 PA_SC_AA_SAMPLE_LOCS_PIXEL_X1_Y0_3
0x00028C18 PA_SC_AA_SAMPLE_LOCS_PIXEL_X0_Y1_0
0x00028C1C PA_SC_AA_SAMPLE_LOCS_PIXEL_X0_Y1_1
0x00028C20 PA_SC_AA_SAMPLE_LOCS_PIXEL_X0_Y1_2
0x00028C24 PA_SC_AA_SAMPLE_LOCS_PIXEL_X0_Y1_3
0x00028C28 PA_SC_AA_SAMPLE_LOCS_PIXEL_X1_Y1_0
0x00028C2C PA_SC_AA_SAMPLE_LOCS_PIXEL_X1_Y1_1
0x00028C30 PA_SC_AA_SAMPLE_LOCS_PIXEL_X1_Y1_2
0x00028C34 PA_SC_AA_SAMPLE_LOCS_PIXEL_X1_Y1_3
0x00028C38 PA_SC_AA_MASK_X0_Y0_X1_Y0
0x00028C3C PA_SC_AA_MASK_X0_Y1_X1_Y1
0x00028C78 CB_COLOR0_DIM
0x00028CB4 CB_COLOR1_DIM
0x00028CF0 CB_COLOR2_DIM
0x00028D2C CB_COLOR3_DIM
0x00028D68 CB_COLOR4_DIM
0x00028DA4 CB_COLOR5_DIM
0x00028DE0 CB_COLOR6_DIM
0x00028E1C CB_COLOR7_DIM
0x00028E58 CB_COLOR8_DIM
0x00028E74 CB_COLOR9_DIM
0x00028E90 CB_COLOR10_DIM
0x00028EAC CB_COLOR11_DIM
0x00028C8C CB_COLOR0_CLEAR_WORD0
0x00028C90 CB_COLOR0_CLEAR_WORD1
0x00028C94 CB_COLOR0_CLEAR_WORD2
0x00028C98 CB_COLOR0_CLEAR_WORD3
0x00028CC8 CB_COLOR1_CLEAR_WORD0
0x00028CCC CB_COLOR1_CLEAR_WORD1
0x00028CD0 CB_COLOR1_CLEAR_WORD2
0x00028CD4 CB_COLOR1_CLEAR_WORD3
0x00028D04 CB_COLOR2_CLEAR_WORD0
0x00028D08 CB_COLOR2_CLEAR_WORD1
0x00028D0C CB_COLOR2_CLEAR_WORD2
0x00028D10 CB_COLOR2_CLEAR_WORD3
0x00028D40 CB_COLOR3_CLEAR_WORD0
0x00028D44 CB_COLOR3_CLEAR_WORD1
0x00028D48 CB_COLOR3_CLEAR_WORD2
0x00028D4C CB_COLOR3_CLEAR_WORD3
0x00028D7C CB_COLOR4_CLEAR_WORD0
0x00028D80 CB_COLOR4_CLEAR_WORD1
0x00028D84 CB_COLOR4_CLEAR_WORD2
0x00028D88 CB_COLOR4_CLEAR_WORD3
0x00028DB8 CB_COLOR5_CLEAR_WORD0
0x00028DBC CB_COLOR5_CLEAR_WORD1
0x00028DC0 CB_COLOR5_CLEAR_WORD2
0x00028DC4 CB_COLOR5_CLEAR_WORD3
0x00028DF4 CB_COLOR6_CLEAR_WORD0
0x00028DF8 CB_COLOR6_CLEAR_WORD1
0x00028DFC CB_COLOR6_CLEAR_WORD2
0x00028E00 CB_COLOR6_CLEAR_WORD3
0x00028E30 CB_COLOR7_CLEAR_WORD0
0x00028E34 CB_COLOR7_CLEAR_WORD1
0x00028E38 CB_COLOR7_CLEAR_WORD2
0x00028E3C CB_COLOR7_CLEAR_WORD3
0x00028F80 SQ_ALU_CONST_BUFFER_SIZE_HS_0
0x00028F84 SQ_ALU_CONST_BUFFER_SIZE_HS_1
0x00028F88 SQ_ALU_CONST_BUFFER_SIZE_HS_2
0x00028F8C SQ_ALU_CONST_BUFFER_SIZE_HS_3
0x00028F90 SQ_ALU_CONST_BUFFER_SIZE_HS_4
0x00028F94 SQ_ALU_CONST_BUFFER_SIZE_HS_5
0x00028F98 SQ_ALU_CONST_BUFFER_SIZE_HS_6
0x00028F9C SQ_ALU_CONST_BUFFER_SIZE_HS_7
0x00028FA0 SQ_ALU_CONST_BUFFER_SIZE_HS_8
0x00028FA4 SQ_ALU_CONST_BUFFER_SIZE_HS_9
0x00028FA8 SQ_ALU_CONST_BUFFER_SIZE_HS_10
0x00028FAC SQ_ALU_CONST_BUFFER_SIZE_HS_11
0x00028FB0 SQ_ALU_CONST_BUFFER_SIZE_HS_12
0x00028FB4 SQ_ALU_CONST_BUFFER_SIZE_HS_13
0x00028FB8 SQ_ALU_CONST_BUFFER_SIZE_HS_14
0x00028FBC SQ_ALU_CONST_BUFFER_SIZE_HS_15
0x00028FC0 SQ_ALU_CONST_BUFFER_SIZE_LS_0
0x00028FC4 SQ_ALU_CONST_BUFFER_SIZE_LS_1
0x00028FC8 SQ_ALU_CONST_BUFFER_SIZE_LS_2
0x00028FCC SQ_ALU_CONST_BUFFER_SIZE_LS_3
0x00028FD0 SQ_ALU_CONST_BUFFER_SIZE_LS_4
0x00028FD4 SQ_ALU_CONST_BUFFER_SIZE_LS_5
0x00028FD8 SQ_ALU_CONST_BUFFER_SIZE_LS_6
0x00028FDC SQ_ALU_CONST_BUFFER_SIZE_LS_7
0x00028FE0 SQ_ALU_CONST_BUFFER_SIZE_LS_8
0x00028FE4 SQ_ALU_CONST_BUFFER_SIZE_LS_9
0x00028FE8 SQ_ALU_CONST_BUFFER_SIZE_LS_10
0x00028FEC SQ_ALU_CONST_BUFFER_SIZE_LS_11
0x00028FF0 SQ_ALU_CONST_BUFFER_SIZE_LS_12
0x00028FF4 SQ_ALU_CONST_BUFFER_SIZE_LS_13
0x00028FF8 SQ_ALU_CONST_BUFFER_SIZE_LS_14
0x00028FFC SQ_ALU_CONST_BUFFER_SIZE_LS_15
0x0003CFF0 SQ_VTX_BASE_VTX_LOC
0x0003CFF4 SQ_VTX_START_INST_LOC
0x0003FF00 SQ_TEX_SAMPLER_CLEAR
0x0003FF04 SQ_TEX_RESOURCE_CLEAR
0x0003FF08 SQ_LOOP_BOOL_CLEAR

View File

@ -0,0 +1,644 @@
evergreen 0x9400
0x0000802C GRBM_GFX_INDEX
0x00008040 WAIT_UNTIL
0x00008044 WAIT_UNTIL_POLL_CNTL
0x00008048 WAIT_UNTIL_POLL_MASK
0x0000804c WAIT_UNTIL_POLL_REFDATA
0x000084FC CP_STRMOUT_CNTL
0x000085F0 CP_COHER_CNTL
0x000085F4 CP_COHER_SIZE
0x000088B0 VGT_VTX_VECT_EJECT_REG
0x000088C4 VGT_CACHE_INVALIDATION
0x000088D4 VGT_GS_VERTEX_REUSE
0x00008958 VGT_PRIMITIVE_TYPE
0x0000895C VGT_INDEX_TYPE
0x00008970 VGT_NUM_INDICES
0x00008974 VGT_NUM_INSTANCES
0x00008990 VGT_COMPUTE_DIM_X
0x00008994 VGT_COMPUTE_DIM_Y
0x00008998 VGT_COMPUTE_DIM_Z
0x0000899C VGT_COMPUTE_START_X
0x000089A0 VGT_COMPUTE_START_Y
0x000089A4 VGT_COMPUTE_START_Z
0x000089AC VGT_COMPUTE_THREAD_GOURP_SIZE
0x00008A14 PA_CL_ENHANCE
0x00008A60 PA_SC_LINE_STIPPLE_VALUE
0x00008B10 PA_SC_LINE_STIPPLE_STATE
0x00008BF0 PA_SC_ENHANCE
0x00008D8C SQ_DYN_GPR_CNTL_PS_FLUSH_REQ
0x00008D90 SQ_DYN_GPR_OPTIMIZATION
0x00008D94 SQ_DYN_GPR_SIMD_LOCK_EN
0x00008D98 SQ_DYN_GPR_THREAD_LIMIT
0x00008D9C SQ_DYN_GPR_LDS_LIMIT
0x00008C00 SQ_CONFIG
0x00008C04 SQ_GPR_RESOURCE_MGMT_1
0x00008C08 SQ_GPR_RESOURCE_MGMT_2
0x00008C0C SQ_GPR_RESOURCE_MGMT_3
0x00008C10 SQ_GLOBAL_GPR_RESOURCE_MGMT_1
0x00008C14 SQ_GLOBAL_GPR_RESOURCE_MGMT_2
0x00008C18 SQ_THREAD_RESOURCE_MGMT
0x00008C1C SQ_THREAD_RESOURCE_MGMT_2
0x00008C20 SQ_STACK_RESOURCE_MGMT_1
0x00008C24 SQ_STACK_RESOURCE_MGMT_2
0x00008C28 SQ_STACK_RESOURCE_MGMT_3
0x00008DF8 SQ_CONST_MEM_BASE
0x00008E20 SQ_STATIC_THREAD_MGMT_1
0x00008E24 SQ_STATIC_THREAD_MGMT_2
0x00008E28 SQ_STATIC_THREAD_MGMT_3
0x00008E2C SQ_LDS_RESOURCE_MGMT
0x00008E48 SQ_EX_ALLOC_TABLE_SLOTS
0x00009014 SX_MEMORY_EXPORT_SIZE
0x00009100 SPI_CONFIG_CNTL
0x0000913C SPI_CONFIG_CNTL_1
0x00009508 TA_CNTL_AUX
0x00009700 VC_CNTL
0x00009714 VC_ENHANCE
0x00009830 DB_DEBUG
0x00009834 DB_DEBUG2
0x00009838 DB_DEBUG3
0x0000983C DB_DEBUG4
0x00009854 DB_WATERMARKS
0x0000A400 TD_PS_BORDER_COLOR_INDEX
0x0000A404 TD_PS_BORDER_COLOR_RED
0x0000A408 TD_PS_BORDER_COLOR_GREEN
0x0000A40C TD_PS_BORDER_COLOR_BLUE
0x0000A410 TD_PS_BORDER_COLOR_ALPHA
0x0000A414 TD_VS_BORDER_COLOR_INDEX
0x0000A418 TD_VS_BORDER_COLOR_RED
0x0000A41C TD_VS_BORDER_COLOR_GREEN
0x0000A420 TD_VS_BORDER_COLOR_BLUE
0x0000A424 TD_VS_BORDER_COLOR_ALPHA
0x0000A428 TD_GS_BORDER_COLOR_INDEX
0x0000A42C TD_GS_BORDER_COLOR_RED
0x0000A430 TD_GS_BORDER_COLOR_GREEN
0x0000A434 TD_GS_BORDER_COLOR_BLUE
0x0000A438 TD_GS_BORDER_COLOR_ALPHA
0x0000A43C TD_HS_BORDER_COLOR_INDEX
0x0000A440 TD_HS_BORDER_COLOR_RED
0x0000A444 TD_HS_BORDER_COLOR_GREEN
0x0000A448 TD_HS_BORDER_COLOR_BLUE
0x0000A44C TD_HS_BORDER_COLOR_ALPHA
0x0000A450 TD_LS_BORDER_COLOR_INDEX
0x0000A454 TD_LS_BORDER_COLOR_RED
0x0000A458 TD_LS_BORDER_COLOR_GREEN
0x0000A45C TD_LS_BORDER_COLOR_BLUE
0x0000A460 TD_LS_BORDER_COLOR_ALPHA
0x0000A464 TD_CS_BORDER_COLOR_INDEX
0x0000A468 TD_CS_BORDER_COLOR_RED
0x0000A46C TD_CS_BORDER_COLOR_GREEN
0x0000A470 TD_CS_BORDER_COLOR_BLUE
0x0000A474 TD_CS_BORDER_COLOR_ALPHA
0x00028000 DB_RENDER_CONTROL
0x00028004 DB_COUNT_CONTROL
0x0002800C DB_RENDER_OVERRIDE
0x00028010 DB_RENDER_OVERRIDE2
0x00028028 DB_STENCIL_CLEAR
0x0002802C DB_DEPTH_CLEAR
0x00028030 PA_SC_SCREEN_SCISSOR_TL
0x00028034 PA_SC_SCREEN_SCISSOR_BR
0x00028140 SQ_ALU_CONST_BUFFER_SIZE_PS_0
0x00028144 SQ_ALU_CONST_BUFFER_SIZE_PS_1
0x00028148 SQ_ALU_CONST_BUFFER_SIZE_PS_2
0x0002814C SQ_ALU_CONST_BUFFER_SIZE_PS_3
0x00028150 SQ_ALU_CONST_BUFFER_SIZE_PS_4
0x00028154 SQ_ALU_CONST_BUFFER_SIZE_PS_5
0x00028158 SQ_ALU_CONST_BUFFER_SIZE_PS_6
0x0002815C SQ_ALU_CONST_BUFFER_SIZE_PS_7
0x00028160 SQ_ALU_CONST_BUFFER_SIZE_PS_8
0x00028164 SQ_ALU_CONST_BUFFER_SIZE_PS_9
0x00028168 SQ_ALU_CONST_BUFFER_SIZE_PS_10
0x0002816C SQ_ALU_CONST_BUFFER_SIZE_PS_11
0x00028170 SQ_ALU_CONST_BUFFER_SIZE_PS_12
0x00028174 SQ_ALU_CONST_BUFFER_SIZE_PS_13
0x00028178 SQ_ALU_CONST_BUFFER_SIZE_PS_14
0x0002817C SQ_ALU_CONST_BUFFER_SIZE_PS_15
0x00028180 SQ_ALU_CONST_BUFFER_SIZE_VS_0
0x00028184 SQ_ALU_CONST_BUFFER_SIZE_VS_1
0x00028188 SQ_ALU_CONST_BUFFER_SIZE_VS_2
0x0002818C SQ_ALU_CONST_BUFFER_SIZE_VS_3
0x00028190 SQ_ALU_CONST_BUFFER_SIZE_VS_4
0x00028194 SQ_ALU_CONST_BUFFER_SIZE_VS_5
0x00028198 SQ_ALU_CONST_BUFFER_SIZE_VS_6
0x0002819C SQ_ALU_CONST_BUFFER_SIZE_VS_7
0x000281A0 SQ_ALU_CONST_BUFFER_SIZE_VS_8
0x000281A4 SQ_ALU_CONST_BUFFER_SIZE_VS_9
0x000281A8 SQ_ALU_CONST_BUFFER_SIZE_VS_10
0x000281AC SQ_ALU_CONST_BUFFER_SIZE_VS_11
0x000281B0 SQ_ALU_CONST_BUFFER_SIZE_VS_12
0x000281B4 SQ_ALU_CONST_BUFFER_SIZE_VS_13
0x000281B8 SQ_ALU_CONST_BUFFER_SIZE_VS_14
0x000281BC SQ_ALU_CONST_BUFFER_SIZE_VS_15
0x000281C0 SQ_ALU_CONST_BUFFER_SIZE_GS_0
0x000281C4 SQ_ALU_CONST_BUFFER_SIZE_GS_1
0x000281C8 SQ_ALU_CONST_BUFFER_SIZE_GS_2
0x000281CC SQ_ALU_CONST_BUFFER_SIZE_GS_3
0x000281D0 SQ_ALU_CONST_BUFFER_SIZE_GS_4
0x000281D4 SQ_ALU_CONST_BUFFER_SIZE_GS_5
0x000281D8 SQ_ALU_CONST_BUFFER_SIZE_GS_6
0x000281DC SQ_ALU_CONST_BUFFER_SIZE_GS_7
0x000281E0 SQ_ALU_CONST_BUFFER_SIZE_GS_8
0x000281E4 SQ_ALU_CONST_BUFFER_SIZE_GS_9
0x000281E8 SQ_ALU_CONST_BUFFER_SIZE_GS_10
0x000281EC SQ_ALU_CONST_BUFFER_SIZE_GS_11
0x000281F0 SQ_ALU_CONST_BUFFER_SIZE_GS_12
0x000281F4 SQ_ALU_CONST_BUFFER_SIZE_GS_13
0x000281F8 SQ_ALU_CONST_BUFFER_SIZE_GS_14
0x000281FC SQ_ALU_CONST_BUFFER_SIZE_GS_15
0x00028200 PA_SC_WINDOW_OFFSET
0x00028204 PA_SC_WINDOW_SCISSOR_TL
0x00028208 PA_SC_WINDOW_SCISSOR_BR
0x0002820C PA_SC_CLIPRECT_RULE
0x00028210 PA_SC_CLIPRECT_0_TL
0x00028214 PA_SC_CLIPRECT_0_BR
0x00028218 PA_SC_CLIPRECT_1_TL
0x0002821C PA_SC_CLIPRECT_1_BR
0x00028220 PA_SC_CLIPRECT_2_TL
0x00028224 PA_SC_CLIPRECT_2_BR
0x00028228 PA_SC_CLIPRECT_3_TL
0x0002822C PA_SC_CLIPRECT_3_BR
0x00028230 PA_SC_EDGERULE
0x00028234 PA_SU_HARDWARE_SCREEN_OFFSET
0x00028240 PA_SC_GENERIC_SCISSOR_TL
0x00028244 PA_SC_GENERIC_SCISSOR_BR
0x00028250 PA_SC_VPORT_SCISSOR_0_TL
0x00028254 PA_SC_VPORT_SCISSOR_0_BR
0x00028258 PA_SC_VPORT_SCISSOR_1_TL
0x0002825C PA_SC_VPORT_SCISSOR_1_BR
0x00028260 PA_SC_VPORT_SCISSOR_2_TL
0x00028264 PA_SC_VPORT_SCISSOR_2_BR
0x00028268 PA_SC_VPORT_SCISSOR_3_TL
0x0002826C PA_SC_VPORT_SCISSOR_3_BR
0x00028270 PA_SC_VPORT_SCISSOR_4_TL
0x00028274 PA_SC_VPORT_SCISSOR_4_BR
0x00028278 PA_SC_VPORT_SCISSOR_5_TL
0x0002827C PA_SC_VPORT_SCISSOR_5_BR
0x00028280 PA_SC_VPORT_SCISSOR_6_TL
0x00028284 PA_SC_VPORT_SCISSOR_6_BR
0x00028288 PA_SC_VPORT_SCISSOR_7_TL
0x0002828C PA_SC_VPORT_SCISSOR_7_BR
0x00028290 PA_SC_VPORT_SCISSOR_8_TL
0x00028294 PA_SC_VPORT_SCISSOR_8_BR
0x00028298 PA_SC_VPORT_SCISSOR_9_TL
0x0002829C PA_SC_VPORT_SCISSOR_9_BR
0x000282A0 PA_SC_VPORT_SCISSOR_10_TL
0x000282A4 PA_SC_VPORT_SCISSOR_10_BR
0x000282A8 PA_SC_VPORT_SCISSOR_11_TL
0x000282AC PA_SC_VPORT_SCISSOR_11_BR
0x000282B0 PA_SC_VPORT_SCISSOR_12_TL
0x000282B4 PA_SC_VPORT_SCISSOR_12_BR
0x000282B8 PA_SC_VPORT_SCISSOR_13_TL
0x000282BC PA_SC_VPORT_SCISSOR_13_BR
0x000282C0 PA_SC_VPORT_SCISSOR_14_TL
0x000282C4 PA_SC_VPORT_SCISSOR_14_BR
0x000282C8 PA_SC_VPORT_SCISSOR_15_TL
0x000282CC PA_SC_VPORT_SCISSOR_15_BR
0x000282D0 PA_SC_VPORT_ZMIN_0
0x000282D4 PA_SC_VPORT_ZMAX_0
0x000282D8 PA_SC_VPORT_ZMIN_1
0x000282DC PA_SC_VPORT_ZMAX_1
0x000282E0 PA_SC_VPORT_ZMIN_2
0x000282E4 PA_SC_VPORT_ZMAX_2
0x000282E8 PA_SC_VPORT_ZMIN_3
0x000282EC PA_SC_VPORT_ZMAX_3
0x000282F0 PA_SC_VPORT_ZMIN_4
0x000282F4 PA_SC_VPORT_ZMAX_4
0x000282F8 PA_SC_VPORT_ZMIN_5
0x000282FC PA_SC_VPORT_ZMAX_5
0x00028300 PA_SC_VPORT_ZMIN_6
0x00028304 PA_SC_VPORT_ZMAX_6
0x00028308 PA_SC_VPORT_ZMIN_7
0x0002830C PA_SC_VPORT_ZMAX_7
0x00028310 PA_SC_VPORT_ZMIN_8
0x00028314 PA_SC_VPORT_ZMAX_8
0x00028318 PA_SC_VPORT_ZMIN_9
0x0002831C PA_SC_VPORT_ZMAX_9
0x00028320 PA_SC_VPORT_ZMIN_10
0x00028324 PA_SC_VPORT_ZMAX_10
0x00028328 PA_SC_VPORT_ZMIN_11
0x0002832C PA_SC_VPORT_ZMAX_11
0x00028330 PA_SC_VPORT_ZMIN_12
0x00028334 PA_SC_VPORT_ZMAX_12
0x00028338 PA_SC_VPORT_ZMIN_13
0x0002833C PA_SC_VPORT_ZMAX_13
0x00028340 PA_SC_VPORT_ZMIN_14
0x00028344 PA_SC_VPORT_ZMAX_14
0x00028348 PA_SC_VPORT_ZMIN_15
0x0002834C PA_SC_VPORT_ZMAX_15
0x00028354 SX_SURFACE_SYNC
0x00028380 SQ_VTX_SEMANTIC_0
0x00028384 SQ_VTX_SEMANTIC_1
0x00028388 SQ_VTX_SEMANTIC_2
0x0002838C SQ_VTX_SEMANTIC_3
0x00028390 SQ_VTX_SEMANTIC_4
0x00028394 SQ_VTX_SEMANTIC_5
0x00028398 SQ_VTX_SEMANTIC_6
0x0002839C SQ_VTX_SEMANTIC_7
0x000283A0 SQ_VTX_SEMANTIC_8
0x000283A4 SQ_VTX_SEMANTIC_9
0x000283A8 SQ_VTX_SEMANTIC_10
0x000283AC SQ_VTX_SEMANTIC_11
0x000283B0 SQ_VTX_SEMANTIC_12
0x000283B4 SQ_VTX_SEMANTIC_13
0x000283B8 SQ_VTX_SEMANTIC_14
0x000283BC SQ_VTX_SEMANTIC_15
0x000283C0 SQ_VTX_SEMANTIC_16
0x000283C4 SQ_VTX_SEMANTIC_17
0x000283C8 SQ_VTX_SEMANTIC_18
0x000283CC SQ_VTX_SEMANTIC_19
0x000283D0 SQ_VTX_SEMANTIC_20
0x000283D4 SQ_VTX_SEMANTIC_21
0x000283D8 SQ_VTX_SEMANTIC_22
0x000283DC SQ_VTX_SEMANTIC_23
0x000283E0 SQ_VTX_SEMANTIC_24
0x000283E4 SQ_VTX_SEMANTIC_25
0x000283E8 SQ_VTX_SEMANTIC_26
0x000283EC SQ_VTX_SEMANTIC_27
0x000283F0 SQ_VTX_SEMANTIC_28
0x000283F4 SQ_VTX_SEMANTIC_29
0x000283F8 SQ_VTX_SEMANTIC_30
0x000283FC SQ_VTX_SEMANTIC_31
0x00028400 VGT_MAX_VTX_INDX
0x00028404 VGT_MIN_VTX_INDX
0x00028408 VGT_INDX_OFFSET
0x0002840C VGT_MULTI_PRIM_IB_RESET_INDX
0x00028410 SX_ALPHA_TEST_CONTROL
0x00028414 CB_BLEND_RED
0x00028418 CB_BLEND_GREEN
0x0002841C CB_BLEND_BLUE
0x00028420 CB_BLEND_ALPHA
0x00028430 DB_STENCILREFMASK
0x00028434 DB_STENCILREFMASK_BF
0x00028438 SX_ALPHA_REF
0x0002843C PA_CL_VPORT_XSCALE_0
0x00028440 PA_CL_VPORT_XOFFSET_0
0x00028444 PA_CL_VPORT_YSCALE_0
0x00028448 PA_CL_VPORT_YOFFSET_0
0x0002844C PA_CL_VPORT_ZSCALE_0
0x00028450 PA_CL_VPORT_ZOFFSET_0
0x00028454 PA_CL_VPORT_XSCALE_1
0x00028458 PA_CL_VPORT_XOFFSET_1
0x0002845C PA_CL_VPORT_YSCALE_1
0x00028460 PA_CL_VPORT_YOFFSET_1
0x00028464 PA_CL_VPORT_ZSCALE_1
0x00028468 PA_CL_VPORT_ZOFFSET_1
0x0002846C PA_CL_VPORT_XSCALE_2
0x00028470 PA_CL_VPORT_XOFFSET_2
0x00028474 PA_CL_VPORT_YSCALE_2
0x00028478 PA_CL_VPORT_YOFFSET_2
0x0002847C PA_CL_VPORT_ZSCALE_2
0x00028480 PA_CL_VPORT_ZOFFSET_2
0x00028484 PA_CL_VPORT_XSCALE_3
0x00028488 PA_CL_VPORT_XOFFSET_3
0x0002848C PA_CL_VPORT_YSCALE_3
0x00028490 PA_CL_VPORT_YOFFSET_3
0x00028494 PA_CL_VPORT_ZSCALE_3
0x00028498 PA_CL_VPORT_ZOFFSET_3
0x0002849C PA_CL_VPORT_XSCALE_4
0x000284A0 PA_CL_VPORT_XOFFSET_4
0x000284A4 PA_CL_VPORT_YSCALE_4
0x000284A8 PA_CL_VPORT_YOFFSET_4
0x000284AC PA_CL_VPORT_ZSCALE_4
0x000284B0 PA_CL_VPORT_ZOFFSET_4
0x000284B4 PA_CL_VPORT_XSCALE_5
0x000284B8 PA_CL_VPORT_XOFFSET_5
0x000284BC PA_CL_VPORT_YSCALE_5
0x000284C0 PA_CL_VPORT_YOFFSET_5
0x000284C4 PA_CL_VPORT_ZSCALE_5
0x000284C8 PA_CL_VPORT_ZOFFSET_5
0x000284CC PA_CL_VPORT_XSCALE_6
0x000284D0 PA_CL_VPORT_XOFFSET_6
0x000284D4 PA_CL_VPORT_YSCALE_6
0x000284D8 PA_CL_VPORT_YOFFSET_6
0x000284DC PA_CL_VPORT_ZSCALE_6
0x000284E0 PA_CL_VPORT_ZOFFSET_6
0x000284E4 PA_CL_VPORT_XSCALE_7
0x000284E8 PA_CL_VPORT_XOFFSET_7
0x000284EC PA_CL_VPORT_YSCALE_7
0x000284F0 PA_CL_VPORT_YOFFSET_7
0x000284F4 PA_CL_VPORT_ZSCALE_7
0x000284F8 PA_CL_VPORT_ZOFFSET_7
0x000284FC PA_CL_VPORT_XSCALE_8
0x00028500 PA_CL_VPORT_XOFFSET_8
0x00028504 PA_CL_VPORT_YSCALE_8
0x00028508 PA_CL_VPORT_YOFFSET_8
0x0002850C PA_CL_VPORT_ZSCALE_8
0x00028510 PA_CL_VPORT_ZOFFSET_8
0x00028514 PA_CL_VPORT_XSCALE_9
0x00028518 PA_CL_VPORT_XOFFSET_9
0x0002851C PA_CL_VPORT_YSCALE_9
0x00028520 PA_CL_VPORT_YOFFSET_9
0x00028524 PA_CL_VPORT_ZSCALE_9
0x00028528 PA_CL_VPORT_ZOFFSET_9
0x0002852C PA_CL_VPORT_XSCALE_10
0x00028530 PA_CL_VPORT_XOFFSET_10
0x00028534 PA_CL_VPORT_YSCALE_10
0x00028538 PA_CL_VPORT_YOFFSET_10
0x0002853C PA_CL_VPORT_ZSCALE_10
0x00028540 PA_CL_VPORT_ZOFFSET_10
0x00028544 PA_CL_VPORT_XSCALE_11
0x00028548 PA_CL_VPORT_XOFFSET_11
0x0002854C PA_CL_VPORT_YSCALE_11
0x00028550 PA_CL_VPORT_YOFFSET_11
0x00028554 PA_CL_VPORT_ZSCALE_11
0x00028558 PA_CL_VPORT_ZOFFSET_11
0x0002855C PA_CL_VPORT_XSCALE_12
0x00028560 PA_CL_VPORT_XOFFSET_12
0x00028564 PA_CL_VPORT_YSCALE_12
0x00028568 PA_CL_VPORT_YOFFSET_12
0x0002856C PA_CL_VPORT_ZSCALE_12
0x00028570 PA_CL_VPORT_ZOFFSET_12
0x00028574 PA_CL_VPORT_XSCALE_13
0x00028578 PA_CL_VPORT_XOFFSET_13
0x0002857C PA_CL_VPORT_YSCALE_13
0x00028580 PA_CL_VPORT_YOFFSET_13
0x00028584 PA_CL_VPORT_ZSCALE_13
0x00028588 PA_CL_VPORT_ZOFFSET_13
0x0002858C PA_CL_VPORT_XSCALE_14
0x00028590 PA_CL_VPORT_XOFFSET_14
0x00028594 PA_CL_VPORT_YSCALE_14
0x00028598 PA_CL_VPORT_YOFFSET_14
0x0002859C PA_CL_VPORT_ZSCALE_14
0x000285A0 PA_CL_VPORT_ZOFFSET_14
0x000285A4 PA_CL_VPORT_XSCALE_15
0x000285A8 PA_CL_VPORT_XOFFSET_15
0x000285AC PA_CL_VPORT_YSCALE_15
0x000285B0 PA_CL_VPORT_YOFFSET_15
0x000285B4 PA_CL_VPORT_ZSCALE_15
0x000285B8 PA_CL_VPORT_ZOFFSET_15
0x000285BC PA_CL_UCP_0_X
0x000285C0 PA_CL_UCP_0_Y
0x000285C4 PA_CL_UCP_0_Z
0x000285C8 PA_CL_UCP_0_W
0x000285CC PA_CL_UCP_1_X
0x000285D0 PA_CL_UCP_1_Y
0x000285D4 PA_CL_UCP_1_Z
0x000285D8 PA_CL_UCP_1_W
0x000285DC PA_CL_UCP_2_X
0x000285E0 PA_CL_UCP_2_Y
0x000285E4 PA_CL_UCP_2_Z
0x000285E8 PA_CL_UCP_2_W
0x000285EC PA_CL_UCP_3_X
0x000285F0 PA_CL_UCP_3_Y
0x000285F4 PA_CL_UCP_3_Z
0x000285F8 PA_CL_UCP_3_W
0x000285FC PA_CL_UCP_4_X
0x00028600 PA_CL_UCP_4_Y
0x00028604 PA_CL_UCP_4_Z
0x00028608 PA_CL_UCP_4_W
0x0002860C PA_CL_UCP_5_X
0x00028610 PA_CL_UCP_5_Y
0x00028614 PA_CL_UCP_5_Z
0x00028618 PA_CL_UCP_5_W
0x0002861C SPI_VS_OUT_ID_0
0x00028620 SPI_VS_OUT_ID_1
0x00028624 SPI_VS_OUT_ID_2
0x00028628 SPI_VS_OUT_ID_3
0x0002862C SPI_VS_OUT_ID_4
0x00028630 SPI_VS_OUT_ID_5
0x00028634 SPI_VS_OUT_ID_6
0x00028638 SPI_VS_OUT_ID_7
0x0002863C SPI_VS_OUT_ID_8
0x00028640 SPI_VS_OUT_ID_9
0x00028644 SPI_PS_INPUT_CNTL_0
0x00028648 SPI_PS_INPUT_CNTL_1
0x0002864C SPI_PS_INPUT_CNTL_2
0x00028650 SPI_PS_INPUT_CNTL_3
0x00028654 SPI_PS_INPUT_CNTL_4
0x00028658 SPI_PS_INPUT_CNTL_5
0x0002865C SPI_PS_INPUT_CNTL_6
0x00028660 SPI_PS_INPUT_CNTL_7
0x00028664 SPI_PS_INPUT_CNTL_8
0x00028668 SPI_PS_INPUT_CNTL_9
0x0002866C SPI_PS_INPUT_CNTL_10
0x00028670 SPI_PS_INPUT_CNTL_11
0x00028674 SPI_PS_INPUT_CNTL_12
0x00028678 SPI_PS_INPUT_CNTL_13
0x0002867C SPI_PS_INPUT_CNTL_14
0x00028680 SPI_PS_INPUT_CNTL_15
0x00028684 SPI_PS_INPUT_CNTL_16
0x00028688 SPI_PS_INPUT_CNTL_17
0x0002868C SPI_PS_INPUT_CNTL_18
0x00028690 SPI_PS_INPUT_CNTL_19
0x00028694 SPI_PS_INPUT_CNTL_20
0x00028698 SPI_PS_INPUT_CNTL_21
0x0002869C SPI_PS_INPUT_CNTL_22
0x000286A0 SPI_PS_INPUT_CNTL_23
0x000286A4 SPI_PS_INPUT_CNTL_24
0x000286A8 SPI_PS_INPUT_CNTL_25
0x000286AC SPI_PS_INPUT_CNTL_26
0x000286B0 SPI_PS_INPUT_CNTL_27
0x000286B4 SPI_PS_INPUT_CNTL_28
0x000286B8 SPI_PS_INPUT_CNTL_29
0x000286BC SPI_PS_INPUT_CNTL_30
0x000286C0 SPI_PS_INPUT_CNTL_31
0x000286C4 SPI_VS_OUT_CONFIG
0x000286C8 SPI_THREAD_GROUPING
0x000286CC SPI_PS_IN_CONTROL_0
0x000286D0 SPI_PS_IN_CONTROL_1
0x000286D4 SPI_INTERP_CONTROL_0
0x000286D8 SPI_INPUT_Z
0x000286DC SPI_FOG_CNTL
0x000286E0 SPI_BARYC_CNTL
0x000286E4 SPI_PS_IN_CONTROL_2
0x000286E8 SPI_COMPUTE_INPUT_CNTL
0x000286EC SPI_COMPUTE_NUM_THREAD_X
0x000286F0 SPI_COMPUTE_NUM_THREAD_Y
0x000286F4 SPI_COMPUTE_NUM_THREAD_Z
0x00028720 GDS_ADDR_BASE
0x00028724 GDS_ADDR_SIZE
0x00028728 GDS_ORDERED_WAVE_PER_SE
0x00028780 CB_BLEND0_CONTROL
0x00028784 CB_BLEND1_CONTROL
0x00028788 CB_BLEND2_CONTROL
0x0002878C CB_BLEND3_CONTROL
0x00028790 CB_BLEND4_CONTROL
0x00028794 CB_BLEND5_CONTROL
0x00028798 CB_BLEND6_CONTROL
0x0002879C CB_BLEND7_CONTROL
0x000287CC CS_COPY_STATE
0x000287D0 GFX_COPY_STATE
0x000287D4 PA_CL_POINT_X_RAD
0x000287D8 PA_CL_POINT_Y_RAD
0x000287DC PA_CL_POINT_SIZE
0x000287E0 PA_CL_POINT_CULL_RAD
0x00028808 CB_COLOR_CONTROL
0x0002880C DB_SHADER_CONTROL
0x00028810 PA_CL_CLIP_CNTL
0x00028814 PA_SU_SC_MODE_CNTL
0x00028818 PA_CL_VTE_CNTL
0x0002881C PA_CL_VS_OUT_CNTL
0x00028820 PA_CL_NANINF_CNTL
0x00028824 PA_SU_LINE_STIPPLE_CNTL
0x00028828 PA_SU_LINE_STIPPLE_SCALE
0x0002882C PA_SU_PRIM_FILTER_CNTL
0x00028838 SQ_DYN_GPR_RESOURCE_LIMIT_1
0x00028844 SQ_PGM_RESOURCES_PS
0x00028848 SQ_PGM_RESOURCES_2_PS
0x0002884C SQ_PGM_EXPORTS_PS
0x00028860 SQ_PGM_RESOURCES_VS
0x00028864 SQ_PGM_RESOURCES_2_VS
0x00028878 SQ_PGM_RESOURCES_GS
0x0002887C SQ_PGM_RESOURCES_2_GS
0x00028890 SQ_PGM_RESOURCES_ES
0x00028894 SQ_PGM_RESOURCES_2_ES
0x000288A8 SQ_PGM_RESOURCES_FS
0x000288BC SQ_PGM_RESOURCES_HS
0x000288C0 SQ_PGM_RESOURCES_2_HS
0x000288D4 SQ_PGM_RESOURCES_LS
0x000288D8 SQ_PGM_RESOURCES_2_LS
0x000288E8 SQ_LDS_ALLOC
0x000288EC SQ_LDS_ALLOC_PS
0x000288F0 SQ_VTX_SEMANTIC_CLEAR
0x00028A00 PA_SU_POINT_SIZE
0x00028A04 PA_SU_POINT_MINMAX
0x00028A08 PA_SU_LINE_CNTL
0x00028A0C PA_SC_LINE_STIPPLE
0x00028A10 VGT_OUTPUT_PATH_CNTL
0x00028A14 VGT_HOS_CNTL
0x00028A18 VGT_HOS_MAX_TESS_LEVEL
0x00028A1C VGT_HOS_MIN_TESS_LEVEL
0x00028A20 VGT_HOS_REUSE_DEPTH
0x00028A24 VGT_GROUP_PRIM_TYPE
0x00028A28 VGT_GROUP_FIRST_DECR
0x00028A2C VGT_GROUP_DECR
0x00028A30 VGT_GROUP_VECT_0_CNTL
0x00028A34 VGT_GROUP_VECT_1_CNTL
0x00028A38 VGT_GROUP_VECT_0_FMT_CNTL
0x00028A3C VGT_GROUP_VECT_1_FMT_CNTL
0x00028A40 VGT_GS_MODE
0x00028A48 PA_SC_MODE_CNTL_0
0x00028A4C PA_SC_MODE_CNTL_1
0x00028A50 VGT_ENHANCE
0x00028A54 VGT_GS_PER_ES
0x00028A58 VGT_ES_PER_GS
0x00028A5C VGT_GS_PER_VS
0x00028A6C VGT_GS_OUT_PRIM_TYPE
0x00028A84 VGT_PRIMITIVEID_EN
0x00028A94 VGT_MULTI_PRIM_IB_RESET_EN
0x00028AA0 VGT_INSTANCE_STEP_RATE_0
0x00028AA4 VGT_INSTANCE_STEP_RATE_1
0x00028AB4 VGT_REUSE_OFF
0x00028AB8 VGT_VTX_CNT_EN
0x00028AC0 DB_SRESULTS_COMPARE_STATE0
0x00028AC4 DB_SRESULTS_COMPARE_STATE1
0x00028AC8 DB_PRELOAD_CONTROL
0x00028AD4 VGT_STRMOUT_VTX_STRIDE_0
0x00028AE4 VGT_STRMOUT_VTX_STRIDE_1
0x00028AF4 VGT_STRMOUT_VTX_STRIDE_2
0x00028B04 VGT_STRMOUT_VTX_STRIDE_3
0x00028B28 VGT_STRMOUT_DRAW_OPAQUE_OFFSET
0x00028B2C VGT_STRMOUT_DRAW_OPAQUE_BUFFER_FILLED_SIZE
0x00028B30 VGT_STRMOUT_DRAW_OPAQUE_VERTEX_STRIDE
0x00028B38 VGT_GS_MAX_VERT_OUT
0x00028B54 VGT_SHADER_STAGES_EN
0x00028B58 VGT_LS_HS_CONFIG
0x00028B5C VGT_LS_SIZE
0x00028B60 VGT_HS_SIZE
0x00028B64 VGT_LS_HS_ALLOC
0x00028B68 VGT_HS_PATCH_CONST
0x00028B6C VGT_TF_PARAM
0x00028B70 DB_ALPHA_TO_MASK
0x00028B74 VGT_DISPATCH_INITIATOR
0x00028B78 PA_SU_POLY_OFFSET_DB_FMT_CNTL
0x00028B7C PA_SU_POLY_OFFSET_CLAMP
0x00028B80 PA_SU_POLY_OFFSET_FRONT_SCALE
0x00028B84 PA_SU_POLY_OFFSET_FRONT_OFFSET
0x00028B88 PA_SU_POLY_OFFSET_BACK_SCALE
0x00028B8C PA_SU_POLY_OFFSET_BACK_OFFSET
0x00028B74 VGT_GS_INSTANCE_CNT
0x00028C00 PA_SC_LINE_CNTL
0x00028C08 PA_SU_VTX_CNTL
0x00028C0C PA_CL_GB_VERT_CLIP_ADJ
0x00028C10 PA_CL_GB_VERT_DISC_ADJ
0x00028C14 PA_CL_GB_HORZ_CLIP_ADJ
0x00028C18 PA_CL_GB_HORZ_DISC_ADJ
0x00028C1C PA_SC_AA_SAMPLE_LOCS_0
0x00028C20 PA_SC_AA_SAMPLE_LOCS_1
0x00028C24 PA_SC_AA_SAMPLE_LOCS_2
0x00028C28 PA_SC_AA_SAMPLE_LOCS_3
0x00028C2C PA_SC_AA_SAMPLE_LOCS_4
0x00028C30 PA_SC_AA_SAMPLE_LOCS_5
0x00028C34 PA_SC_AA_SAMPLE_LOCS_6
0x00028C38 PA_SC_AA_SAMPLE_LOCS_7
0x00028C3C PA_SC_AA_MASK
0x00028C78 CB_COLOR0_DIM
0x00028CB4 CB_COLOR1_DIM
0x00028CF0 CB_COLOR2_DIM
0x00028D2C CB_COLOR3_DIM
0x00028D68 CB_COLOR4_DIM
0x00028DA4 CB_COLOR5_DIM
0x00028DE0 CB_COLOR6_DIM
0x00028E1C CB_COLOR7_DIM
0x00028E58 CB_COLOR8_DIM
0x00028E74 CB_COLOR9_DIM
0x00028E90 CB_COLOR10_DIM
0x00028EAC CB_COLOR11_DIM
0x00028C8C CB_COLOR0_CLEAR_WORD0
0x00028C90 CB_COLOR0_CLEAR_WORD1
0x00028C94 CB_COLOR0_CLEAR_WORD2
0x00028C98 CB_COLOR0_CLEAR_WORD3
0x00028CC8 CB_COLOR1_CLEAR_WORD0
0x00028CCC CB_COLOR1_CLEAR_WORD1
0x00028CD0 CB_COLOR1_CLEAR_WORD2
0x00028CD4 CB_COLOR1_CLEAR_WORD3
0x00028D04 CB_COLOR2_CLEAR_WORD0
0x00028D08 CB_COLOR2_CLEAR_WORD1
0x00028D0C CB_COLOR2_CLEAR_WORD2
0x00028D10 CB_COLOR2_CLEAR_WORD3
0x00028D40 CB_COLOR3_CLEAR_WORD0
0x00028D44 CB_COLOR3_CLEAR_WORD1
0x00028D48 CB_COLOR3_CLEAR_WORD2
0x00028D4C CB_COLOR3_CLEAR_WORD3
0x00028D7C CB_COLOR4_CLEAR_WORD0
0x00028D80 CB_COLOR4_CLEAR_WORD1
0x00028D84 CB_COLOR4_CLEAR_WORD2
0x00028D88 CB_COLOR4_CLEAR_WORD3
0x00028DB8 CB_COLOR5_CLEAR_WORD0
0x00028DBC CB_COLOR5_CLEAR_WORD1
0x00028DC0 CB_COLOR5_CLEAR_WORD2
0x00028DC4 CB_COLOR5_CLEAR_WORD3
0x00028DF4 CB_COLOR6_CLEAR_WORD0
0x00028DF8 CB_COLOR6_CLEAR_WORD1
0x00028DFC CB_COLOR6_CLEAR_WORD2
0x00028E00 CB_COLOR6_CLEAR_WORD3
0x00028E30 CB_COLOR7_CLEAR_WORD0
0x00028E34 CB_COLOR7_CLEAR_WORD1
0x00028E38 CB_COLOR7_CLEAR_WORD2
0x00028E3C CB_COLOR7_CLEAR_WORD3
0x00028F80 SQ_ALU_CONST_BUFFER_SIZE_HS_0
0x00028F84 SQ_ALU_CONST_BUFFER_SIZE_HS_1
0x00028F88 SQ_ALU_CONST_BUFFER_SIZE_HS_2
0x00028F8C SQ_ALU_CONST_BUFFER_SIZE_HS_3
0x00028F90 SQ_ALU_CONST_BUFFER_SIZE_HS_4
0x00028F94 SQ_ALU_CONST_BUFFER_SIZE_HS_5
0x00028F98 SQ_ALU_CONST_BUFFER_SIZE_HS_6
0x00028F9C SQ_ALU_CONST_BUFFER_SIZE_HS_7
0x00028FA0 SQ_ALU_CONST_BUFFER_SIZE_HS_8
0x00028FA4 SQ_ALU_CONST_BUFFER_SIZE_HS_9
0x00028FA8 SQ_ALU_CONST_BUFFER_SIZE_HS_10
0x00028FAC SQ_ALU_CONST_BUFFER_SIZE_HS_11
0x00028FB0 SQ_ALU_CONST_BUFFER_SIZE_HS_12
0x00028FB4 SQ_ALU_CONST_BUFFER_SIZE_HS_13
0x00028FB8 SQ_ALU_CONST_BUFFER_SIZE_HS_14
0x00028FBC SQ_ALU_CONST_BUFFER_SIZE_HS_15
0x00028FC0 SQ_ALU_CONST_BUFFER_SIZE_LS_0
0x00028FC4 SQ_ALU_CONST_BUFFER_SIZE_LS_1
0x00028FC8 SQ_ALU_CONST_BUFFER_SIZE_LS_2
0x00028FCC SQ_ALU_CONST_BUFFER_SIZE_LS_3
0x00028FD0 SQ_ALU_CONST_BUFFER_SIZE_LS_4
0x00028FD4 SQ_ALU_CONST_BUFFER_SIZE_LS_5
0x00028FD8 SQ_ALU_CONST_BUFFER_SIZE_LS_6
0x00028FDC SQ_ALU_CONST_BUFFER_SIZE_LS_7
0x00028FE0 SQ_ALU_CONST_BUFFER_SIZE_LS_8
0x00028FE4 SQ_ALU_CONST_BUFFER_SIZE_LS_9
0x00028FE8 SQ_ALU_CONST_BUFFER_SIZE_LS_10
0x00028FEC SQ_ALU_CONST_BUFFER_SIZE_LS_11
0x00028FF0 SQ_ALU_CONST_BUFFER_SIZE_LS_12
0x00028FF4 SQ_ALU_CONST_BUFFER_SIZE_LS_13
0x00028FF8 SQ_ALU_CONST_BUFFER_SIZE_LS_14
0x00028FFC SQ_ALU_CONST_BUFFER_SIZE_LS_15
0x0003CFF0 SQ_VTX_BASE_VTX_LOC
0x0003CFF4 SQ_VTX_START_INST_LOC
0x0003FF00 SQ_TEX_SAMPLER_CLEAR
0x0003FF04 SQ_TEX_RESOURCE_CLEAR
0x0003FF08 SQ_LOOP_BOOL_CLEAR

View File

@ -0,0 +1,105 @@
r100 0x3294
0x1434 SRC_Y_X
0x1438 DST_Y_X
0x143C DST_HEIGHT_WIDTH
0x146C DP_GUI_MASTER_CNTL
0x1474 BRUSH_Y_X
0x1478 DP_BRUSH_BKGD_CLR
0x147C DP_BRUSH_FRGD_CLR
0x1480 BRUSH_DATA0
0x1484 BRUSH_DATA1
0x1598 DST_WIDTH_HEIGHT
0x15C0 CLR_CMP_CNTL
0x15C4 CLR_CMP_CLR_SRC
0x15C8 CLR_CMP_CLR_DST
0x15CC CLR_CMP_MSK
0x15D8 DP_SRC_FRGD_CLR
0x15DC DP_SRC_BKGD_CLR
0x1600 DST_LINE_START
0x1604 DST_LINE_END
0x1608 DST_LINE_PATCOUNT
0x16C0 DP_CNTL
0x16CC DP_WRITE_MSK
0x16D0 DP_CNTL_XDIR_YDIR_YMAJOR
0x16E8 DEFAULT_SC_BOTTOM_RIGHT
0x16EC SC_TOP_LEFT
0x16F0 SC_BOTTOM_RIGHT
0x16F4 SRC_SC_BOTTOM_RIGHT
0x1714 DSTCACHE_CTLSTAT
0x1720 WAIT_UNTIL
0x172C RBBM_GUICNTL
0x1810 FOG_3D_TABLE_START
0x1814 FOG_3D_TABLE_END
0x1a14 FOG_TABLE_INDEX
0x1a18 FOG_TABLE_DATA
0x1c14 PP_MISC
0x1c18 PP_FOG_COLOR
0x1c1c RE_SOLID_COLOR
0x1c20 RB3D_BLENDCNTL
0x1c4c SE_CNTL
0x1c50 SE_COORD_FMT
0x1c60 PP_TXCBLEND_0
0x1c64 PP_TXABLEND_0
0x1c68 PP_TFACTOR_0
0x1c78 PP_TXCBLEND_1
0x1c7c PP_TXABLEND_1
0x1c80 PP_TFACTOR_1
0x1c90 PP_TXCBLEND_2
0x1c94 PP_TXABLEND_2
0x1c98 PP_TFACTOR_2
0x1cc8 RE_STIPPLE_ADDR
0x1ccc RE_STIPPLE_DATA
0x1cd0 RE_LINE_PATTERN
0x1cd4 RE_LINE_STATE
0x1d40 PP_BORDER_COLOR0
0x1d44 PP_BORDER_COLOR1
0x1d48 PP_BORDER_COLOR2
0x1d7c RB3D_STENCILREFMASK
0x1d80 RB3D_ROPCNTL
0x1d84 RB3D_PLANEMASK
0x1d98 VAP_VPORT_XSCALE
0x1d9C VAP_VPORT_XOFFSET
0x1da0 VAP_VPORT_YSCALE
0x1da4 VAP_VPORT_YOFFSET
0x1da8 VAP_VPORT_ZSCALE
0x1dac VAP_VPORT_ZOFFSET
0x1db0 SE_ZBIAS_FACTOR
0x1db4 SE_ZBIAS_CONSTANT
0x1db8 SE_LINE_WIDTH
0x2140 SE_CNTL_STATUS
0x2200 SE_TCL_VECTOR_INDX_REG
0x2204 SE_TCL_VECTOR_DATA_REG
0x2208 SE_TCL_SCALAR_INDX_REG
0x220c SE_TCL_SCALAR_DATA_REG
0x2210 SE_TCL_MATERIAL_EMISSIVE_RED
0x2214 SE_TCL_MATERIAL_EMISSIVE_GREEN
0x2218 SE_TCL_MATERIAL_EMISSIVE_BLUE
0x221c SE_TCL_MATERIAL_EMISSIVE_ALPHA
0x2220 SE_TCL_MATERIAL_AMBIENT_RED
0x2224 SE_TCL_MATERIAL_AMBIENT_GREEN
0x2228 SE_TCL_MATERIAL_AMBIENT_BLUE
0x222c SE_TCL_MATERIAL_AMBIENT_ALPHA
0x2230 SE_TCL_MATERIAL_DIFFUSE_RED
0x2234 SE_TCL_MATERIAL_DIFFUSE_GREEN
0x2238 SE_TCL_MATERIAL_DIFFUSE_BLUE
0x223c SE_TCL_MATERIAL_DIFFUSE_ALPHA
0x2240 SE_TCL_MATERIAL_SPECULAR_RED
0x2244 SE_TCL_MATERIAL_SPECULAR_GREEN
0x2248 SE_TCL_MATERIAL_SPECULAR_BLUE
0x224c SE_TCL_MATERIAL_SPECULAR_ALPHA
0x2250 SE_TCL_SHININESS
0x2254 SE_TCL_OUTPUT_VTX_FMT
0x2258 SE_TCL_OUTPUT_VTX_SEL
0x225c SE_TCL_MATRIX_SELECT_0
0x2260 SE_TCL_MATRIX_SELECT_1
0x2264 SE_TCL_UCP_VERT_BLEND_CNTL
0x2268 SE_TCL_TEXTURE_PROC_CTL
0x226c SE_TCL_LIGHT_MODEL_CTL
0x2270 SE_TCL_PER_LIGHT_CTL_0
0x2274 SE_TCL_PER_LIGHT_CTL_1
0x2278 SE_TCL_PER_LIGHT_CTL_2
0x227c SE_TCL_PER_LIGHT_CTL_3
0x2284 SE_TCL_STATE_FLUSH
0x26c0 RE_TOP_LEFT
0x26c4 RE_MISC
0x3290 RB3D_ZPASS_DATA

View File

@ -0,0 +1,186 @@
r200 0x3294
0x1434 SRC_Y_X
0x1438 DST_Y_X
0x143C DST_HEIGHT_WIDTH
0x146C DP_GUI_MASTER_CNTL
0x1474 BRUSH_Y_X
0x1478 DP_BRUSH_BKGD_CLR
0x147C DP_BRUSH_FRGD_CLR
0x1480 BRUSH_DATA0
0x1484 BRUSH_DATA1
0x1598 DST_WIDTH_HEIGHT
0x15C0 CLR_CMP_CNTL
0x15C4 CLR_CMP_CLR_SRC
0x15C8 CLR_CMP_CLR_DST
0x15CC CLR_CMP_MSK
0x15D8 DP_SRC_FRGD_CLR
0x15DC DP_SRC_BKGD_CLR
0x1600 DST_LINE_START
0x1604 DST_LINE_END
0x1608 DST_LINE_PATCOUNT
0x16C0 DP_CNTL
0x16CC DP_WRITE_MSK
0x16D0 DP_CNTL_XDIR_YDIR_YMAJOR
0x16E8 DEFAULT_SC_BOTTOM_RIGHT
0x16EC SC_TOP_LEFT
0x16F0 SC_BOTTOM_RIGHT
0x16F4 SRC_SC_BOTTOM_RIGHT
0x1714 DSTCACHE_CTLSTAT
0x1720 WAIT_UNTIL
0x172C RBBM_GUICNTL
0x1c14 PP_MISC
0x1c18 PP_FOG_COLOR
0x1c1c RE_SOLID_COLOR
0x1c20 RB3D_BLENDCNTL
0x1c4c SE_CNTL
0x1c50 RE_CNTL
0x1cc8 RE_STIPPLE_ADDR
0x1ccc RE_STIPPLE_DATA
0x1cd0 RE_LINE_PATTERN
0x1cd4 RE_LINE_STATE
0x1cd8 RE_SCISSOR_TL_0
0x1cdc RE_SCISSOR_BR_0
0x1ce0 RE_SCISSOR_TL_1
0x1ce4 RE_SCISSOR_BR_1
0x1ce8 RE_SCISSOR_TL_2
0x1cec RE_SCISSOR_BR_2
0x1d60 RB3D_DEPTHXY_OFFSET
0x1d7c RB3D_STENCILREFMASK
0x1d80 RB3D_ROPCNTL
0x1d84 RB3D_PLANEMASK
0x1d98 VAP_VPORT_XSCALE
0x1d9c VAP_VPORT_XOFFSET
0x1da0 VAP_VPORT_YSCALE
0x1da4 VAP_VPORT_YOFFSET
0x1da8 VAP_VPORT_ZSCALE
0x1dac VAP_VPORT_ZOFFSET
0x1db0 SE_ZBIAS_FACTOR
0x1db4 SE_ZBIAS_CONSTANT
0x1db8 SE_LINE_WIDTH
0x2080 SE_VAP_CNTL
0x2090 SE_TCL_OUTPUT_VTX_FMT_0
0x2094 SE_TCL_OUTPUT_VTX_FMT_1
0x20b0 SE_VTE_CNTL
0x2140 SE_CNTL_STATUS
0x2180 SE_VTX_STATE_CNTL
0x2200 SE_TCL_VECTOR_INDX_REG
0x2204 SE_TCL_VECTOR_DATA_REG
0x2208 SE_TCL_SCALAR_INDX_REG
0x220c SE_TCL_SCALAR_DATA_REG
0x2230 SE_TCL_MATRIX_SEL_0
0x2234 SE_TCL_MATRIX_SEL_1
0x2238 SE_TCL_MATRIX_SEL_2
0x223c SE_TCL_MATRIX_SEL_3
0x2240 SE_TCL_MATRIX_SEL_4
0x2250 SE_TCL_OUTPUT_VTX_COMP_SEL
0x2254 SE_TCL_INPUT_VTX_VECTOR_ADDR_0
0x2258 SE_TCL_INPUT_VTX_VECTOR_ADDR_1
0x225c SE_TCL_INPUT_VTX_VECTOR_ADDR_2
0x2260 SE_TCL_INPUT_VTX_VECTOR_ADDR_3
0x2268 SE_TCL_LIGHT_MODEL_CTL_0
0x226c SE_TCL_LIGHT_MODEL_CTL_1
0x2270 SE_TCL_PER_LIGHT_CTL_0
0x2274 SE_TCL_PER_LIGHT_CTL_1
0x2278 SE_TCL_PER_LIGHT_CTL_2
0x227c SE_TCL_PER_LIGHT_CTL_3
0x2284 VAP_PVS_STATE_FLUSH_REG
0x22a8 SE_TCL_TEX_PROC_CTL_2
0x22ac SE_TCL_TEX_PROC_CTL_3
0x22b0 SE_TCL_TEX_PROC_CTL_0
0x22b4 SE_TCL_TEX_PROC_CTL_1
0x22b8 SE_TCL_TEX_CYL_WRAP_CTL
0x22c0 SE_TCL_UCP_VERT_BLEND_CNTL
0x22c4 SE_TCL_POINT_SPRITE_CNTL
0x22d0 SE_PVS_CNTL
0x22d4 SE_PVS_CONST_CNTL
0x2648 RE_POINTSIZE
0x26c0 RE_TOP_LEFT
0x26c4 RE_MISC
0x26f0 RE_AUX_SCISSOR_CNTL
0x2c14 PP_BORDER_COLOR_0
0x2c34 PP_BORDER_COLOR_1
0x2c54 PP_BORDER_COLOR_2
0x2c74 PP_BORDER_COLOR_3
0x2c94 PP_BORDER_COLOR_4
0x2cb4 PP_BORDER_COLOR_5
0x2cc4 PP_CNTL_X
0x2cf8 PP_TRI_PERF
0x2cfc PP_PERF_CNTL
0x2d9c PP_TAM_DEBUG3
0x2ee0 PP_TFACTOR_0
0x2ee4 PP_TFACTOR_1
0x2ee8 PP_TFACTOR_2
0x2eec PP_TFACTOR_3
0x2ef0 PP_TFACTOR_4
0x2ef4 PP_TFACTOR_5
0x2ef8 PP_TFACTOR_6
0x2efc PP_TFACTOR_7
0x2f00 PP_TXCBLEND_0
0x2f04 PP_TXCBLEND2_0
0x2f08 PP_TXABLEND_0
0x2f0c PP_TXABLEND2_0
0x2f10 PP_TXCBLEND_1
0x2f14 PP_TXCBLEND2_1
0x2f18 PP_TXABLEND_1
0x2f1c PP_TXABLEND2_1
0x2f20 PP_TXCBLEND_2
0x2f24 PP_TXCBLEND2_2
0x2f28 PP_TXABLEND_2
0x2f2c PP_TXABLEND2_2
0x2f30 PP_TXCBLEND_3
0x2f34 PP_TXCBLEND2_3
0x2f38 PP_TXABLEND_3
0x2f3c PP_TXABLEND2_3
0x2f40 PP_TXCBLEND_4
0x2f44 PP_TXCBLEND2_4
0x2f48 PP_TXABLEND_4
0x2f4c PP_TXABLEND2_4
0x2f50 PP_TXCBLEND_5
0x2f54 PP_TXCBLEND2_5
0x2f58 PP_TXABLEND_5
0x2f5c PP_TXABLEND2_5
0x2f60 PP_TXCBLEND_6
0x2f64 PP_TXCBLEND2_6
0x2f68 PP_TXABLEND_6
0x2f6c PP_TXABLEND2_6
0x2f70 PP_TXCBLEND_7
0x2f74 PP_TXCBLEND2_7
0x2f78 PP_TXABLEND_7
0x2f7c PP_TXABLEND2_7
0x2f80 PP_TXCBLEND_8
0x2f84 PP_TXCBLEND2_8
0x2f88 PP_TXABLEND_8
0x2f8c PP_TXABLEND2_8
0x2f90 PP_TXCBLEND_9
0x2f94 PP_TXCBLEND2_9
0x2f98 PP_TXABLEND_9
0x2f9c PP_TXABLEND2_9
0x2fa0 PP_TXCBLEND_10
0x2fa4 PP_TXCBLEND2_10
0x2fa8 PP_TXABLEND_10
0x2fac PP_TXABLEND2_10
0x2fb0 PP_TXCBLEND_11
0x2fb4 PP_TXCBLEND2_11
0x2fb8 PP_TXABLEND_11
0x2fbc PP_TXABLEND2_11
0x2fc0 PP_TXCBLEND_12
0x2fc4 PP_TXCBLEND2_12
0x2fc8 PP_TXABLEND_12
0x2fcc PP_TXABLEND2_12
0x2fd0 PP_TXCBLEND_13
0x2fd4 PP_TXCBLEND2_13
0x2fd8 PP_TXABLEND_13
0x2fdc PP_TXABLEND2_13
0x2fe0 PP_TXCBLEND_14
0x2fe4 PP_TXCBLEND2_14
0x2fe8 PP_TXABLEND_14
0x2fec PP_TXABLEND2_14
0x2ff0 PP_TXCBLEND_15
0x2ff4 PP_TXCBLEND2_15
0x2ff8 PP_TXABLEND_15
0x2ffc PP_TXABLEND2_15
0x3218 RB3D_BLENCOLOR
0x321c RB3D_ABLENDCNTL
0x3220 RB3D_CBLENDCNTL
0x3290 RB3D_ZPASS_DATA

View File

@ -0,0 +1,714 @@
r300 0x4f60
0x1434 SRC_Y_X
0x1438 DST_Y_X
0x143C DST_HEIGHT_WIDTH
0x146C DP_GUI_MASTER_CNTL
0x1474 BRUSH_Y_X
0x1478 DP_BRUSH_BKGD_CLR
0x147C DP_BRUSH_FRGD_CLR
0x1480 BRUSH_DATA0
0x1484 BRUSH_DATA1
0x1598 DST_WIDTH_HEIGHT
0x15C0 CLR_CMP_CNTL
0x15C4 CLR_CMP_CLR_SRC
0x15C8 CLR_CMP_CLR_DST
0x15CC CLR_CMP_MSK
0x15D8 DP_SRC_FRGD_CLR
0x15DC DP_SRC_BKGD_CLR
0x1600 DST_LINE_START
0x1604 DST_LINE_END
0x1608 DST_LINE_PATCOUNT
0x16C0 DP_CNTL
0x16CC DP_WRITE_MSK
0x16D0 DP_CNTL_XDIR_YDIR_YMAJOR
0x16E8 DEFAULT_SC_BOTTOM_RIGHT
0x16EC SC_TOP_LEFT
0x16F0 SC_BOTTOM_RIGHT
0x16F4 SRC_SC_BOTTOM_RIGHT
0x1714 DSTCACHE_CTLSTAT
0x1720 WAIT_UNTIL
0x172C RBBM_GUICNTL
0x1D98 VAP_VPORT_XSCALE
0x1D9C VAP_VPORT_XOFFSET
0x1DA0 VAP_VPORT_YSCALE
0x1DA4 VAP_VPORT_YOFFSET
0x1DA8 VAP_VPORT_ZSCALE
0x1DAC VAP_VPORT_ZOFFSET
0x2080 VAP_CNTL
0x2090 VAP_OUT_VTX_FMT_0
0x2094 VAP_OUT_VTX_FMT_1
0x20B0 VAP_VTE_CNTL
0x2138 VAP_VF_MIN_VTX_INDX
0x2140 VAP_CNTL_STATUS
0x2150 VAP_PROG_STREAM_CNTL_0
0x2154 VAP_PROG_STREAM_CNTL_1
0x2158 VAP_PROG_STREAM_CNTL_2
0x215C VAP_PROG_STREAM_CNTL_3
0x2160 VAP_PROG_STREAM_CNTL_4
0x2164 VAP_PROG_STREAM_CNTL_5
0x2168 VAP_PROG_STREAM_CNTL_6
0x216C VAP_PROG_STREAM_CNTL_7
0x2180 VAP_VTX_STATE_CNTL
0x2184 VAP_VSM_VTX_ASSM
0x2188 VAP_VTX_STATE_IND_REG_0
0x218C VAP_VTX_STATE_IND_REG_1
0x2190 VAP_VTX_STATE_IND_REG_2
0x2194 VAP_VTX_STATE_IND_REG_3
0x2198 VAP_VTX_STATE_IND_REG_4
0x219C VAP_VTX_STATE_IND_REG_5
0x21A0 VAP_VTX_STATE_IND_REG_6
0x21A4 VAP_VTX_STATE_IND_REG_7
0x21A8 VAP_VTX_STATE_IND_REG_8
0x21AC VAP_VTX_STATE_IND_REG_9
0x21B0 VAP_VTX_STATE_IND_REG_10
0x21B4 VAP_VTX_STATE_IND_REG_11
0x21B8 VAP_VTX_STATE_IND_REG_12
0x21BC VAP_VTX_STATE_IND_REG_13
0x21C0 VAP_VTX_STATE_IND_REG_14
0x21C4 VAP_VTX_STATE_IND_REG_15
0x21DC VAP_PSC_SGN_NORM_CNTL
0x21E0 VAP_PROG_STREAM_CNTL_EXT_0
0x21E4 VAP_PROG_STREAM_CNTL_EXT_1
0x21E8 VAP_PROG_STREAM_CNTL_EXT_2
0x21EC VAP_PROG_STREAM_CNTL_EXT_3
0x21F0 VAP_PROG_STREAM_CNTL_EXT_4
0x21F4 VAP_PROG_STREAM_CNTL_EXT_5
0x21F8 VAP_PROG_STREAM_CNTL_EXT_6
0x21FC VAP_PROG_STREAM_CNTL_EXT_7
0x2200 VAP_PVS_VECTOR_INDX_REG
0x2204 VAP_PVS_VECTOR_DATA_REG
0x2208 VAP_PVS_VECTOR_DATA_REG_128
0x221C VAP_CLIP_CNTL
0x2220 VAP_GB_VERT_CLIP_ADJ
0x2224 VAP_GB_VERT_DISC_ADJ
0x2228 VAP_GB_HORZ_CLIP_ADJ
0x222C VAP_GB_HORZ_DISC_ADJ
0x2230 VAP_PVS_FLOW_CNTL_ADDRS_0
0x2234 VAP_PVS_FLOW_CNTL_ADDRS_1
0x2238 VAP_PVS_FLOW_CNTL_ADDRS_2
0x223C VAP_PVS_FLOW_CNTL_ADDRS_3
0x2240 VAP_PVS_FLOW_CNTL_ADDRS_4
0x2244 VAP_PVS_FLOW_CNTL_ADDRS_5
0x2248 VAP_PVS_FLOW_CNTL_ADDRS_6
0x224C VAP_PVS_FLOW_CNTL_ADDRS_7
0x2250 VAP_PVS_FLOW_CNTL_ADDRS_8
0x2254 VAP_PVS_FLOW_CNTL_ADDRS_9
0x2258 VAP_PVS_FLOW_CNTL_ADDRS_10
0x225C VAP_PVS_FLOW_CNTL_ADDRS_11
0x2260 VAP_PVS_FLOW_CNTL_ADDRS_12
0x2264 VAP_PVS_FLOW_CNTL_ADDRS_13
0x2268 VAP_PVS_FLOW_CNTL_ADDRS_14
0x226C VAP_PVS_FLOW_CNTL_ADDRS_15
0x2284 VAP_PVS_STATE_FLUSH_REG
0x2288 VAP_PVS_VTX_TIMEOUT_REG
0x2290 VAP_PVS_FLOW_CNTL_LOOP_INDEX_0
0x2294 VAP_PVS_FLOW_CNTL_LOOP_INDEX_1
0x2298 VAP_PVS_FLOW_CNTL_LOOP_INDEX_2
0x229C VAP_PVS_FLOW_CNTL_LOOP_INDEX_3
0x22A0 VAP_PVS_FLOW_CNTL_LOOP_INDEX_4
0x22A4 VAP_PVS_FLOW_CNTL_LOOP_INDEX_5
0x22A8 VAP_PVS_FLOW_CNTL_LOOP_INDEX_6
0x22AC VAP_PVS_FLOW_CNTL_LOOP_INDEX_7
0x22B0 VAP_PVS_FLOW_CNTL_LOOP_INDEX_8
0x22B4 VAP_PVS_FLOW_CNTL_LOOP_INDEX_9
0x22B8 VAP_PVS_FLOW_CNTL_LOOP_INDEX_10
0x22BC VAP_PVS_FLOW_CNTL_LOOP_INDEX_11
0x22C0 VAP_PVS_FLOW_CNTL_LOOP_INDEX_12
0x22C4 VAP_PVS_FLOW_CNTL_LOOP_INDEX_13
0x22C8 VAP_PVS_FLOW_CNTL_LOOP_INDEX_14
0x22CC VAP_PVS_FLOW_CNTL_LOOP_INDEX_15
0x22D0 VAP_PVS_CODE_CNTL_0
0x22D4 VAP_PVS_CONST_CNTL
0x22D8 VAP_PVS_CODE_CNTL_1
0x22DC VAP_PVS_FLOW_CNTL_OPC
0x342C RB2D_DSTCACHE_CTLSTAT
0x4000 GB_VAP_RASTER_VTX_FMT_0
0x4004 GB_VAP_RASTER_VTX_FMT_1
0x4008 GB_ENABLE
0x4010 GB_MSPOS0
0x4014 GB_MSPOS1
0x401C GB_SELECT
0x4020 GB_AA_CONFIG
0x4024 GB_FIFO_SIZE
0x4100 TX_INVALTAGS
0x4200 GA_POINT_S0
0x4204 GA_POINT_T0
0x4208 GA_POINT_S1
0x420C GA_POINT_T1
0x4214 GA_TRIANGLE_STIPPLE
0x421C GA_POINT_SIZE
0x4230 GA_POINT_MINMAX
0x4234 GA_LINE_CNTL
0x4238 GA_LINE_STIPPLE_CONFIG
0x4260 GA_LINE_STIPPLE_VALUE
0x4264 GA_LINE_S0
0x4268 GA_LINE_S1
0x4278 GA_COLOR_CONTROL
0x427C GA_SOLID_RG
0x4280 GA_SOLID_BA
0x4288 GA_POLY_MODE
0x428C GA_ROUND_MODE
0x4290 GA_OFFSET
0x4294 GA_FOG_SCALE
0x4298 GA_FOG_OFFSET
0x42A0 SU_TEX_WRAP
0x42A4 SU_POLY_OFFSET_FRONT_SCALE
0x42A8 SU_POLY_OFFSET_FRONT_OFFSET
0x42AC SU_POLY_OFFSET_BACK_SCALE
0x42B0 SU_POLY_OFFSET_BACK_OFFSET
0x42B4 SU_POLY_OFFSET_ENABLE
0x42B8 SU_CULL_MODE
0x42C0 SU_DEPTH_SCALE
0x42C4 SU_DEPTH_OFFSET
0x42C8 SU_REG_DEST
0x4300 RS_COUNT
0x4304 RS_INST_COUNT
0x4310 RS_IP_0
0x4314 RS_IP_1
0x4318 RS_IP_2
0x431C RS_IP_3
0x4320 RS_IP_4
0x4324 RS_IP_5
0x4328 RS_IP_6
0x432C RS_IP_7
0x4330 RS_INST_0
0x4334 RS_INST_1
0x4338 RS_INST_2
0x433C RS_INST_3
0x4340 RS_INST_4
0x4344 RS_INST_5
0x4348 RS_INST_6
0x434C RS_INST_7
0x4350 RS_INST_8
0x4354 RS_INST_9
0x4358 RS_INST_10
0x435C RS_INST_11
0x4360 RS_INST_12
0x4364 RS_INST_13
0x4368 RS_INST_14
0x436C RS_INST_15
0x43A8 SC_EDGERULE
0x43B0 SC_CLIP_0_A
0x43B4 SC_CLIP_0_B
0x43B8 SC_CLIP_1_A
0x43BC SC_CLIP_1_B
0x43C0 SC_CLIP_2_A
0x43C4 SC_CLIP_2_B
0x43C8 SC_CLIP_3_A
0x43CC SC_CLIP_3_B
0x43D0 SC_CLIP_RULE
0x43E0 SC_SCISSOR0
0x43E8 SC_SCREENDOOR
0x4440 TX_FILTER1_0
0x4444 TX_FILTER1_1
0x4448 TX_FILTER1_2
0x444C TX_FILTER1_3
0x4450 TX_FILTER1_4
0x4454 TX_FILTER1_5
0x4458 TX_FILTER1_6
0x445C TX_FILTER1_7
0x4460 TX_FILTER1_8
0x4464 TX_FILTER1_9
0x4468 TX_FILTER1_10
0x446C TX_FILTER1_11
0x4470 TX_FILTER1_12
0x4474 TX_FILTER1_13
0x4478 TX_FILTER1_14
0x447C TX_FILTER1_15
0x4580 TX_CHROMA_KEY_0
0x4584 TX_CHROMA_KEY_1
0x4588 TX_CHROMA_KEY_2
0x458C TX_CHROMA_KEY_3
0x4590 TX_CHROMA_KEY_4
0x4594 TX_CHROMA_KEY_5
0x4598 TX_CHROMA_KEY_6
0x459C TX_CHROMA_KEY_7
0x45A0 TX_CHROMA_KEY_8
0x45A4 TX_CHROMA_KEY_9
0x45A8 TX_CHROMA_KEY_10
0x45AC TX_CHROMA_KEY_11
0x45B0 TX_CHROMA_KEY_12
0x45B4 TX_CHROMA_KEY_13
0x45B8 TX_CHROMA_KEY_14
0x45BC TX_CHROMA_KEY_15
0x45C0 TX_BORDER_COLOR_0
0x45C4 TX_BORDER_COLOR_1
0x45C8 TX_BORDER_COLOR_2
0x45CC TX_BORDER_COLOR_3
0x45D0 TX_BORDER_COLOR_4
0x45D4 TX_BORDER_COLOR_5
0x45D8 TX_BORDER_COLOR_6
0x45DC TX_BORDER_COLOR_7
0x45E0 TX_BORDER_COLOR_8
0x45E4 TX_BORDER_COLOR_9
0x45E8 TX_BORDER_COLOR_10
0x45EC TX_BORDER_COLOR_11
0x45F0 TX_BORDER_COLOR_12
0x45F4 TX_BORDER_COLOR_13
0x45F8 TX_BORDER_COLOR_14
0x45FC TX_BORDER_COLOR_15
0x4600 US_CONFIG
0x4604 US_PIXSIZE
0x4608 US_CODE_OFFSET
0x460C US_RESET
0x4610 US_CODE_ADDR_0
0x4614 US_CODE_ADDR_1
0x4618 US_CODE_ADDR_2
0x461C US_CODE_ADDR_3
0x4620 US_TEX_INST_0
0x4624 US_TEX_INST_1
0x4628 US_TEX_INST_2
0x462C US_TEX_INST_3
0x4630 US_TEX_INST_4
0x4634 US_TEX_INST_5
0x4638 US_TEX_INST_6
0x463C US_TEX_INST_7
0x4640 US_TEX_INST_8
0x4644 US_TEX_INST_9
0x4648 US_TEX_INST_10
0x464C US_TEX_INST_11
0x4650 US_TEX_INST_12
0x4654 US_TEX_INST_13
0x4658 US_TEX_INST_14
0x465C US_TEX_INST_15
0x4660 US_TEX_INST_16
0x4664 US_TEX_INST_17
0x4668 US_TEX_INST_18
0x466C US_TEX_INST_19
0x4670 US_TEX_INST_20
0x4674 US_TEX_INST_21
0x4678 US_TEX_INST_22
0x467C US_TEX_INST_23
0x4680 US_TEX_INST_24
0x4684 US_TEX_INST_25
0x4688 US_TEX_INST_26
0x468C US_TEX_INST_27
0x4690 US_TEX_INST_28
0x4694 US_TEX_INST_29
0x4698 US_TEX_INST_30
0x469C US_TEX_INST_31
0x46A4 US_OUT_FMT_0
0x46A8 US_OUT_FMT_1
0x46AC US_OUT_FMT_2
0x46B0 US_OUT_FMT_3
0x46B4 US_W_FMT
0x46C0 US_ALU_RGB_ADDR_0
0x46C4 US_ALU_RGB_ADDR_1
0x46C8 US_ALU_RGB_ADDR_2
0x46CC US_ALU_RGB_ADDR_3
0x46D0 US_ALU_RGB_ADDR_4
0x46D4 US_ALU_RGB_ADDR_5
0x46D8 US_ALU_RGB_ADDR_6
0x46DC US_ALU_RGB_ADDR_7
0x46E0 US_ALU_RGB_ADDR_8
0x46E4 US_ALU_RGB_ADDR_9
0x46E8 US_ALU_RGB_ADDR_10
0x46EC US_ALU_RGB_ADDR_11
0x46F0 US_ALU_RGB_ADDR_12
0x46F4 US_ALU_RGB_ADDR_13
0x46F8 US_ALU_RGB_ADDR_14
0x46FC US_ALU_RGB_ADDR_15
0x4700 US_ALU_RGB_ADDR_16
0x4704 US_ALU_RGB_ADDR_17
0x4708 US_ALU_RGB_ADDR_18
0x470C US_ALU_RGB_ADDR_19
0x4710 US_ALU_RGB_ADDR_20
0x4714 US_ALU_RGB_ADDR_21
0x4718 US_ALU_RGB_ADDR_22
0x471C US_ALU_RGB_ADDR_23
0x4720 US_ALU_RGB_ADDR_24
0x4724 US_ALU_RGB_ADDR_25
0x4728 US_ALU_RGB_ADDR_26
0x472C US_ALU_RGB_ADDR_27
0x4730 US_ALU_RGB_ADDR_28
0x4734 US_ALU_RGB_ADDR_29
0x4738 US_ALU_RGB_ADDR_30
0x473C US_ALU_RGB_ADDR_31
0x4740 US_ALU_RGB_ADDR_32
0x4744 US_ALU_RGB_ADDR_33
0x4748 US_ALU_RGB_ADDR_34
0x474C US_ALU_RGB_ADDR_35
0x4750 US_ALU_RGB_ADDR_36
0x4754 US_ALU_RGB_ADDR_37
0x4758 US_ALU_RGB_ADDR_38
0x475C US_ALU_RGB_ADDR_39
0x4760 US_ALU_RGB_ADDR_40
0x4764 US_ALU_RGB_ADDR_41
0x4768 US_ALU_RGB_ADDR_42
0x476C US_ALU_RGB_ADDR_43
0x4770 US_ALU_RGB_ADDR_44
0x4774 US_ALU_RGB_ADDR_45
0x4778 US_ALU_RGB_ADDR_46
0x477C US_ALU_RGB_ADDR_47
0x4780 US_ALU_RGB_ADDR_48
0x4784 US_ALU_RGB_ADDR_49
0x4788 US_ALU_RGB_ADDR_50
0x478C US_ALU_RGB_ADDR_51
0x4790 US_ALU_RGB_ADDR_52
0x4794 US_ALU_RGB_ADDR_53
0x4798 US_ALU_RGB_ADDR_54
0x479C US_ALU_RGB_ADDR_55
0x47A0 US_ALU_RGB_ADDR_56
0x47A4 US_ALU_RGB_ADDR_57
0x47A8 US_ALU_RGB_ADDR_58
0x47AC US_ALU_RGB_ADDR_59
0x47B0 US_ALU_RGB_ADDR_60
0x47B4 US_ALU_RGB_ADDR_61
0x47B8 US_ALU_RGB_ADDR_62
0x47BC US_ALU_RGB_ADDR_63
0x47C0 US_ALU_ALPHA_ADDR_0
0x47C4 US_ALU_ALPHA_ADDR_1
0x47C8 US_ALU_ALPHA_ADDR_2
0x47CC US_ALU_ALPHA_ADDR_3
0x47D0 US_ALU_ALPHA_ADDR_4
0x47D4 US_ALU_ALPHA_ADDR_5
0x47D8 US_ALU_ALPHA_ADDR_6
0x47DC US_ALU_ALPHA_ADDR_7
0x47E0 US_ALU_ALPHA_ADDR_8
0x47E4 US_ALU_ALPHA_ADDR_9
0x47E8 US_ALU_ALPHA_ADDR_10
0x47EC US_ALU_ALPHA_ADDR_11
0x47F0 US_ALU_ALPHA_ADDR_12
0x47F4 US_ALU_ALPHA_ADDR_13
0x47F8 US_ALU_ALPHA_ADDR_14
0x47FC US_ALU_ALPHA_ADDR_15
0x4800 US_ALU_ALPHA_ADDR_16
0x4804 US_ALU_ALPHA_ADDR_17
0x4808 US_ALU_ALPHA_ADDR_18
0x480C US_ALU_ALPHA_ADDR_19
0x4810 US_ALU_ALPHA_ADDR_20
0x4814 US_ALU_ALPHA_ADDR_21
0x4818 US_ALU_ALPHA_ADDR_22
0x481C US_ALU_ALPHA_ADDR_23
0x4820 US_ALU_ALPHA_ADDR_24
0x4824 US_ALU_ALPHA_ADDR_25
0x4828 US_ALU_ALPHA_ADDR_26
0x482C US_ALU_ALPHA_ADDR_27
0x4830 US_ALU_ALPHA_ADDR_28
0x4834 US_ALU_ALPHA_ADDR_29
0x4838 US_ALU_ALPHA_ADDR_30
0x483C US_ALU_ALPHA_ADDR_31
0x4840 US_ALU_ALPHA_ADDR_32
0x4844 US_ALU_ALPHA_ADDR_33
0x4848 US_ALU_ALPHA_ADDR_34
0x484C US_ALU_ALPHA_ADDR_35
0x4850 US_ALU_ALPHA_ADDR_36
0x4854 US_ALU_ALPHA_ADDR_37
0x4858 US_ALU_ALPHA_ADDR_38
0x485C US_ALU_ALPHA_ADDR_39
0x4860 US_ALU_ALPHA_ADDR_40
0x4864 US_ALU_ALPHA_ADDR_41
0x4868 US_ALU_ALPHA_ADDR_42
0x486C US_ALU_ALPHA_ADDR_43
0x4870 US_ALU_ALPHA_ADDR_44
0x4874 US_ALU_ALPHA_ADDR_45
0x4878 US_ALU_ALPHA_ADDR_46
0x487C US_ALU_ALPHA_ADDR_47
0x4880 US_ALU_ALPHA_ADDR_48
0x4884 US_ALU_ALPHA_ADDR_49
0x4888 US_ALU_ALPHA_ADDR_50
0x488C US_ALU_ALPHA_ADDR_51
0x4890 US_ALU_ALPHA_ADDR_52
0x4894 US_ALU_ALPHA_ADDR_53
0x4898 US_ALU_ALPHA_ADDR_54
0x489C US_ALU_ALPHA_ADDR_55
0x48A0 US_ALU_ALPHA_ADDR_56
0x48A4 US_ALU_ALPHA_ADDR_57
0x48A8 US_ALU_ALPHA_ADDR_58
0x48AC US_ALU_ALPHA_ADDR_59
0x48B0 US_ALU_ALPHA_ADDR_60
0x48B4 US_ALU_ALPHA_ADDR_61
0x48B8 US_ALU_ALPHA_ADDR_62
0x48BC US_ALU_ALPHA_ADDR_63
0x48C0 US_ALU_RGB_INST_0
0x48C4 US_ALU_RGB_INST_1
0x48C8 US_ALU_RGB_INST_2
0x48CC US_ALU_RGB_INST_3
0x48D0 US_ALU_RGB_INST_4
0x48D4 US_ALU_RGB_INST_5
0x48D8 US_ALU_RGB_INST_6
0x48DC US_ALU_RGB_INST_7
0x48E0 US_ALU_RGB_INST_8
0x48E4 US_ALU_RGB_INST_9
0x48E8 US_ALU_RGB_INST_10
0x48EC US_ALU_RGB_INST_11
0x48F0 US_ALU_RGB_INST_12
0x48F4 US_ALU_RGB_INST_13
0x48F8 US_ALU_RGB_INST_14
0x48FC US_ALU_RGB_INST_15
0x4900 US_ALU_RGB_INST_16
0x4904 US_ALU_RGB_INST_17
0x4908 US_ALU_RGB_INST_18
0x490C US_ALU_RGB_INST_19
0x4910 US_ALU_RGB_INST_20
0x4914 US_ALU_RGB_INST_21
0x4918 US_ALU_RGB_INST_22
0x491C US_ALU_RGB_INST_23
0x4920 US_ALU_RGB_INST_24
0x4924 US_ALU_RGB_INST_25
0x4928 US_ALU_RGB_INST_26
0x492C US_ALU_RGB_INST_27
0x4930 US_ALU_RGB_INST_28
0x4934 US_ALU_RGB_INST_29
0x4938 US_ALU_RGB_INST_30
0x493C US_ALU_RGB_INST_31
0x4940 US_ALU_RGB_INST_32
0x4944 US_ALU_RGB_INST_33
0x4948 US_ALU_RGB_INST_34
0x494C US_ALU_RGB_INST_35
0x4950 US_ALU_RGB_INST_36
0x4954 US_ALU_RGB_INST_37
0x4958 US_ALU_RGB_INST_38
0x495C US_ALU_RGB_INST_39
0x4960 US_ALU_RGB_INST_40
0x4964 US_ALU_RGB_INST_41
0x4968 US_ALU_RGB_INST_42
0x496C US_ALU_RGB_INST_43
0x4970 US_ALU_RGB_INST_44
0x4974 US_ALU_RGB_INST_45
0x4978 US_ALU_RGB_INST_46
0x497C US_ALU_RGB_INST_47
0x4980 US_ALU_RGB_INST_48
0x4984 US_ALU_RGB_INST_49
0x4988 US_ALU_RGB_INST_50
0x498C US_ALU_RGB_INST_51
0x4990 US_ALU_RGB_INST_52
0x4994 US_ALU_RGB_INST_53
0x4998 US_ALU_RGB_INST_54
0x499C US_ALU_RGB_INST_55
0x49A0 US_ALU_RGB_INST_56
0x49A4 US_ALU_RGB_INST_57
0x49A8 US_ALU_RGB_INST_58
0x49AC US_ALU_RGB_INST_59
0x49B0 US_ALU_RGB_INST_60
0x49B4 US_ALU_RGB_INST_61
0x49B8 US_ALU_RGB_INST_62
0x49BC US_ALU_RGB_INST_63
0x49C0 US_ALU_ALPHA_INST_0
0x49C4 US_ALU_ALPHA_INST_1
0x49C8 US_ALU_ALPHA_INST_2
0x49CC US_ALU_ALPHA_INST_3
0x49D0 US_ALU_ALPHA_INST_4
0x49D4 US_ALU_ALPHA_INST_5
0x49D8 US_ALU_ALPHA_INST_6
0x49DC US_ALU_ALPHA_INST_7
0x49E0 US_ALU_ALPHA_INST_8
0x49E4 US_ALU_ALPHA_INST_9
0x49E8 US_ALU_ALPHA_INST_10
0x49EC US_ALU_ALPHA_INST_11
0x49F0 US_ALU_ALPHA_INST_12
0x49F4 US_ALU_ALPHA_INST_13
0x49F8 US_ALU_ALPHA_INST_14
0x49FC US_ALU_ALPHA_INST_15
0x4A00 US_ALU_ALPHA_INST_16
0x4A04 US_ALU_ALPHA_INST_17
0x4A08 US_ALU_ALPHA_INST_18
0x4A0C US_ALU_ALPHA_INST_19
0x4A10 US_ALU_ALPHA_INST_20
0x4A14 US_ALU_ALPHA_INST_21
0x4A18 US_ALU_ALPHA_INST_22
0x4A1C US_ALU_ALPHA_INST_23
0x4A20 US_ALU_ALPHA_INST_24
0x4A24 US_ALU_ALPHA_INST_25
0x4A28 US_ALU_ALPHA_INST_26
0x4A2C US_ALU_ALPHA_INST_27
0x4A30 US_ALU_ALPHA_INST_28
0x4A34 US_ALU_ALPHA_INST_29
0x4A38 US_ALU_ALPHA_INST_30
0x4A3C US_ALU_ALPHA_INST_31
0x4A40 US_ALU_ALPHA_INST_32
0x4A44 US_ALU_ALPHA_INST_33
0x4A48 US_ALU_ALPHA_INST_34
0x4A4C US_ALU_ALPHA_INST_35
0x4A50 US_ALU_ALPHA_INST_36
0x4A54 US_ALU_ALPHA_INST_37
0x4A58 US_ALU_ALPHA_INST_38
0x4A5C US_ALU_ALPHA_INST_39
0x4A60 US_ALU_ALPHA_INST_40
0x4A64 US_ALU_ALPHA_INST_41
0x4A68 US_ALU_ALPHA_INST_42
0x4A6C US_ALU_ALPHA_INST_43
0x4A70 US_ALU_ALPHA_INST_44
0x4A74 US_ALU_ALPHA_INST_45
0x4A78 US_ALU_ALPHA_INST_46
0x4A7C US_ALU_ALPHA_INST_47
0x4A80 US_ALU_ALPHA_INST_48
0x4A84 US_ALU_ALPHA_INST_49
0x4A88 US_ALU_ALPHA_INST_50
0x4A8C US_ALU_ALPHA_INST_51
0x4A90 US_ALU_ALPHA_INST_52
0x4A94 US_ALU_ALPHA_INST_53
0x4A98 US_ALU_ALPHA_INST_54
0x4A9C US_ALU_ALPHA_INST_55
0x4AA0 US_ALU_ALPHA_INST_56
0x4AA4 US_ALU_ALPHA_INST_57
0x4AA8 US_ALU_ALPHA_INST_58
0x4AAC US_ALU_ALPHA_INST_59
0x4AB0 US_ALU_ALPHA_INST_60
0x4AB4 US_ALU_ALPHA_INST_61
0x4AB8 US_ALU_ALPHA_INST_62
0x4ABC US_ALU_ALPHA_INST_63
0x4BC0 FG_FOG_BLEND
0x4BC4 FG_FOG_FACTOR
0x4BC8 FG_FOG_COLOR_R
0x4BCC FG_FOG_COLOR_G
0x4BD0 FG_FOG_COLOR_B
0x4BD4 FG_ALPHA_FUNC
0x4BD8 FG_DEPTH_SRC
0x4C00 US_ALU_CONST_R_0
0x4C04 US_ALU_CONST_G_0
0x4C08 US_ALU_CONST_B_0
0x4C0C US_ALU_CONST_A_0
0x4C10 US_ALU_CONST_R_1
0x4C14 US_ALU_CONST_G_1
0x4C18 US_ALU_CONST_B_1
0x4C1C US_ALU_CONST_A_1
0x4C20 US_ALU_CONST_R_2
0x4C24 US_ALU_CONST_G_2
0x4C28 US_ALU_CONST_B_2
0x4C2C US_ALU_CONST_A_2
0x4C30 US_ALU_CONST_R_3
0x4C34 US_ALU_CONST_G_3
0x4C38 US_ALU_CONST_B_3
0x4C3C US_ALU_CONST_A_3
0x4C40 US_ALU_CONST_R_4
0x4C44 US_ALU_CONST_G_4
0x4C48 US_ALU_CONST_B_4
0x4C4C US_ALU_CONST_A_4
0x4C50 US_ALU_CONST_R_5
0x4C54 US_ALU_CONST_G_5
0x4C58 US_ALU_CONST_B_5
0x4C5C US_ALU_CONST_A_5
0x4C60 US_ALU_CONST_R_6
0x4C64 US_ALU_CONST_G_6
0x4C68 US_ALU_CONST_B_6
0x4C6C US_ALU_CONST_A_6
0x4C70 US_ALU_CONST_R_7
0x4C74 US_ALU_CONST_G_7
0x4C78 US_ALU_CONST_B_7
0x4C7C US_ALU_CONST_A_7
0x4C80 US_ALU_CONST_R_8
0x4C84 US_ALU_CONST_G_8
0x4C88 US_ALU_CONST_B_8
0x4C8C US_ALU_CONST_A_8
0x4C90 US_ALU_CONST_R_9
0x4C94 US_ALU_CONST_G_9
0x4C98 US_ALU_CONST_B_9
0x4C9C US_ALU_CONST_A_9
0x4CA0 US_ALU_CONST_R_10
0x4CA4 US_ALU_CONST_G_10
0x4CA8 US_ALU_CONST_B_10
0x4CAC US_ALU_CONST_A_10
0x4CB0 US_ALU_CONST_R_11
0x4CB4 US_ALU_CONST_G_11
0x4CB8 US_ALU_CONST_B_11
0x4CBC US_ALU_CONST_A_11
0x4CC0 US_ALU_CONST_R_12
0x4CC4 US_ALU_CONST_G_12
0x4CC8 US_ALU_CONST_B_12
0x4CCC US_ALU_CONST_A_12
0x4CD0 US_ALU_CONST_R_13
0x4CD4 US_ALU_CONST_G_13
0x4CD8 US_ALU_CONST_B_13
0x4CDC US_ALU_CONST_A_13
0x4CE0 US_ALU_CONST_R_14
0x4CE4 US_ALU_CONST_G_14
0x4CE8 US_ALU_CONST_B_14
0x4CEC US_ALU_CONST_A_14
0x4CF0 US_ALU_CONST_R_15
0x4CF4 US_ALU_CONST_G_15
0x4CF8 US_ALU_CONST_B_15
0x4CFC US_ALU_CONST_A_15
0x4D00 US_ALU_CONST_R_16
0x4D04 US_ALU_CONST_G_16
0x4D08 US_ALU_CONST_B_16
0x4D0C US_ALU_CONST_A_16
0x4D10 US_ALU_CONST_R_17
0x4D14 US_ALU_CONST_G_17
0x4D18 US_ALU_CONST_B_17
0x4D1C US_ALU_CONST_A_17
0x4D20 US_ALU_CONST_R_18
0x4D24 US_ALU_CONST_G_18
0x4D28 US_ALU_CONST_B_18
0x4D2C US_ALU_CONST_A_18
0x4D30 US_ALU_CONST_R_19
0x4D34 US_ALU_CONST_G_19
0x4D38 US_ALU_CONST_B_19
0x4D3C US_ALU_CONST_A_19
0x4D40 US_ALU_CONST_R_20
0x4D44 US_ALU_CONST_G_20
0x4D48 US_ALU_CONST_B_20
0x4D4C US_ALU_CONST_A_20
0x4D50 US_ALU_CONST_R_21
0x4D54 US_ALU_CONST_G_21
0x4D58 US_ALU_CONST_B_21
0x4D5C US_ALU_CONST_A_21
0x4D60 US_ALU_CONST_R_22
0x4D64 US_ALU_CONST_G_22
0x4D68 US_ALU_CONST_B_22
0x4D6C US_ALU_CONST_A_22
0x4D70 US_ALU_CONST_R_23
0x4D74 US_ALU_CONST_G_23
0x4D78 US_ALU_CONST_B_23
0x4D7C US_ALU_CONST_A_23
0x4D80 US_ALU_CONST_R_24
0x4D84 US_ALU_CONST_G_24
0x4D88 US_ALU_CONST_B_24
0x4D8C US_ALU_CONST_A_24
0x4D90 US_ALU_CONST_R_25
0x4D94 US_ALU_CONST_G_25
0x4D98 US_ALU_CONST_B_25
0x4D9C US_ALU_CONST_A_25
0x4DA0 US_ALU_CONST_R_26
0x4DA4 US_ALU_CONST_G_26
0x4DA8 US_ALU_CONST_B_26
0x4DAC US_ALU_CONST_A_26
0x4DB0 US_ALU_CONST_R_27
0x4DB4 US_ALU_CONST_G_27
0x4DB8 US_ALU_CONST_B_27
0x4DBC US_ALU_CONST_A_27
0x4DC0 US_ALU_CONST_R_28
0x4DC4 US_ALU_CONST_G_28
0x4DC8 US_ALU_CONST_B_28
0x4DCC US_ALU_CONST_A_28
0x4DD0 US_ALU_CONST_R_29
0x4DD4 US_ALU_CONST_G_29
0x4DD8 US_ALU_CONST_B_29
0x4DDC US_ALU_CONST_A_29
0x4DE0 US_ALU_CONST_R_30
0x4DE4 US_ALU_CONST_G_30
0x4DE8 US_ALU_CONST_B_30
0x4DEC US_ALU_CONST_A_30
0x4DF0 US_ALU_CONST_R_31
0x4DF4 US_ALU_CONST_G_31
0x4DF8 US_ALU_CONST_B_31
0x4DFC US_ALU_CONST_A_31
0x4E08 RB3D_ABLENDCNTL_R3
0x4E10 RB3D_CONSTANT_COLOR
0x4E14 RB3D_COLOR_CLEAR_VALUE
0x4E18 RB3D_ROPCNTL_R3
0x4E1C RB3D_CLRCMP_FLIPE_R3
0x4E20 RB3D_CLRCMP_CLR_R3
0x4E24 RB3D_CLRCMP_MSK_R3
0x4E48 RB3D_DEBUG_CTL
0x4E4C RB3D_DSTCACHE_CTLSTAT_R3
0x4E50 RB3D_DITHER_CTL
0x4E54 RB3D_CMASK_OFFSET0
0x4E58 RB3D_CMASK_OFFSET1
0x4E5C RB3D_CMASK_OFFSET2
0x4E60 RB3D_CMASK_OFFSET3
0x4E64 RB3D_CMASK_PITCH0
0x4E68 RB3D_CMASK_PITCH1
0x4E6C RB3D_CMASK_PITCH2
0x4E70 RB3D_CMASK_PITCH3
0x4E74 RB3D_CMASK_WRINDEX
0x4E78 RB3D_CMASK_DWORD
0x4E7C RB3D_CMASK_RDINDEX
0x4EA0 RB3D_DISCARD_SRC_PIXEL_LTE_THRESHOLD
0x4EA4 RB3D_DISCARD_SRC_PIXEL_GTE_THRESHOLD
0x4F04 ZB_ZSTENCILCNTL
0x4F08 ZB_STENCILREFMASK
0x4F14 ZB_ZTOP
0x4F18 ZB_ZCACHE_CTLSTAT
0x4F28 ZB_DEPTHCLEARVALUE
0x4F58 ZB_ZPASS_DATA

View File

@ -0,0 +1,780 @@
r420 0x4f60
0x1434 SRC_Y_X
0x1438 DST_Y_X
0x143C DST_HEIGHT_WIDTH
0x146C DP_GUI_MASTER_CNTL
0x1474 BRUSH_Y_X
0x1478 DP_BRUSH_BKGD_CLR
0x147C DP_BRUSH_FRGD_CLR
0x1480 BRUSH_DATA0
0x1484 BRUSH_DATA1
0x1598 DST_WIDTH_HEIGHT
0x15C0 CLR_CMP_CNTL
0x15C4 CLR_CMP_CLR_SRC
0x15C8 CLR_CMP_CLR_DST
0x15CC CLR_CMP_MSK
0x15D8 DP_SRC_FRGD_CLR
0x15DC DP_SRC_BKGD_CLR
0x1600 DST_LINE_START
0x1604 DST_LINE_END
0x1608 DST_LINE_PATCOUNT
0x16C0 DP_CNTL
0x16CC DP_WRITE_MSK
0x16D0 DP_CNTL_XDIR_YDIR_YMAJOR
0x16E8 DEFAULT_SC_BOTTOM_RIGHT
0x16EC SC_TOP_LEFT
0x16F0 SC_BOTTOM_RIGHT
0x16F4 SRC_SC_BOTTOM_RIGHT
0x1714 DSTCACHE_CTLSTAT
0x1720 WAIT_UNTIL
0x172C RBBM_GUICNTL
0x1D98 VAP_VPORT_XSCALE
0x1D9C VAP_VPORT_XOFFSET
0x1DA0 VAP_VPORT_YSCALE
0x1DA4 VAP_VPORT_YOFFSET
0x1DA8 VAP_VPORT_ZSCALE
0x1DAC VAP_VPORT_ZOFFSET
0x2080 VAP_CNTL
0x2090 VAP_OUT_VTX_FMT_0
0x2094 VAP_OUT_VTX_FMT_1
0x20B0 VAP_VTE_CNTL
0x2138 VAP_VF_MIN_VTX_INDX
0x2140 VAP_CNTL_STATUS
0x2150 VAP_PROG_STREAM_CNTL_0
0x2154 VAP_PROG_STREAM_CNTL_1
0x2158 VAP_PROG_STREAM_CNTL_2
0x215C VAP_PROG_STREAM_CNTL_3
0x2160 VAP_PROG_STREAM_CNTL_4
0x2164 VAP_PROG_STREAM_CNTL_5
0x2168 VAP_PROG_STREAM_CNTL_6
0x216C VAP_PROG_STREAM_CNTL_7
0x2180 VAP_VTX_STATE_CNTL
0x2184 VAP_VSM_VTX_ASSM
0x2188 VAP_VTX_STATE_IND_REG_0
0x218C VAP_VTX_STATE_IND_REG_1
0x2190 VAP_VTX_STATE_IND_REG_2
0x2194 VAP_VTX_STATE_IND_REG_3
0x2198 VAP_VTX_STATE_IND_REG_4
0x219C VAP_VTX_STATE_IND_REG_5
0x21A0 VAP_VTX_STATE_IND_REG_6
0x21A4 VAP_VTX_STATE_IND_REG_7
0x21A8 VAP_VTX_STATE_IND_REG_8
0x21AC VAP_VTX_STATE_IND_REG_9
0x21B0 VAP_VTX_STATE_IND_REG_10
0x21B4 VAP_VTX_STATE_IND_REG_11
0x21B8 VAP_VTX_STATE_IND_REG_12
0x21BC VAP_VTX_STATE_IND_REG_13
0x21C0 VAP_VTX_STATE_IND_REG_14
0x21C4 VAP_VTX_STATE_IND_REG_15
0x21DC VAP_PSC_SGN_NORM_CNTL
0x21E0 VAP_PROG_STREAM_CNTL_EXT_0
0x21E4 VAP_PROG_STREAM_CNTL_EXT_1
0x21E8 VAP_PROG_STREAM_CNTL_EXT_2
0x21EC VAP_PROG_STREAM_CNTL_EXT_3
0x21F0 VAP_PROG_STREAM_CNTL_EXT_4
0x21F4 VAP_PROG_STREAM_CNTL_EXT_5
0x21F8 VAP_PROG_STREAM_CNTL_EXT_6
0x21FC VAP_PROG_STREAM_CNTL_EXT_7
0x2200 VAP_PVS_VECTOR_INDX_REG
0x2204 VAP_PVS_VECTOR_DATA_REG
0x2208 VAP_PVS_VECTOR_DATA_REG_128
0x221C VAP_CLIP_CNTL
0x2220 VAP_GB_VERT_CLIP_ADJ
0x2224 VAP_GB_VERT_DISC_ADJ
0x2228 VAP_GB_HORZ_CLIP_ADJ
0x222C VAP_GB_HORZ_DISC_ADJ
0x2230 VAP_PVS_FLOW_CNTL_ADDRS_0
0x2234 VAP_PVS_FLOW_CNTL_ADDRS_1
0x2238 VAP_PVS_FLOW_CNTL_ADDRS_2
0x223C VAP_PVS_FLOW_CNTL_ADDRS_3
0x2240 VAP_PVS_FLOW_CNTL_ADDRS_4
0x2244 VAP_PVS_FLOW_CNTL_ADDRS_5
0x2248 VAP_PVS_FLOW_CNTL_ADDRS_6
0x224C VAP_PVS_FLOW_CNTL_ADDRS_7
0x2250 VAP_PVS_FLOW_CNTL_ADDRS_8
0x2254 VAP_PVS_FLOW_CNTL_ADDRS_9
0x2258 VAP_PVS_FLOW_CNTL_ADDRS_10
0x225C VAP_PVS_FLOW_CNTL_ADDRS_11
0x2260 VAP_PVS_FLOW_CNTL_ADDRS_12
0x2264 VAP_PVS_FLOW_CNTL_ADDRS_13
0x2268 VAP_PVS_FLOW_CNTL_ADDRS_14
0x226C VAP_PVS_FLOW_CNTL_ADDRS_15
0x2284 VAP_PVS_STATE_FLUSH_REG
0x2288 VAP_PVS_VTX_TIMEOUT_REG
0x2290 VAP_PVS_FLOW_CNTL_LOOP_INDEX_0
0x2294 VAP_PVS_FLOW_CNTL_LOOP_INDEX_1
0x2298 VAP_PVS_FLOW_CNTL_LOOP_INDEX_2
0x229C VAP_PVS_FLOW_CNTL_LOOP_INDEX_3
0x22A0 VAP_PVS_FLOW_CNTL_LOOP_INDEX_4
0x22A4 VAP_PVS_FLOW_CNTL_LOOP_INDEX_5
0x22A8 VAP_PVS_FLOW_CNTL_LOOP_INDEX_6
0x22AC VAP_PVS_FLOW_CNTL_LOOP_INDEX_7
0x22B0 VAP_PVS_FLOW_CNTL_LOOP_INDEX_8
0x22B4 VAP_PVS_FLOW_CNTL_LOOP_INDEX_9
0x22B8 VAP_PVS_FLOW_CNTL_LOOP_INDEX_10
0x22BC VAP_PVS_FLOW_CNTL_LOOP_INDEX_11
0x22C0 VAP_PVS_FLOW_CNTL_LOOP_INDEX_12
0x22C4 VAP_PVS_FLOW_CNTL_LOOP_INDEX_13
0x22C8 VAP_PVS_FLOW_CNTL_LOOP_INDEX_14
0x22CC VAP_PVS_FLOW_CNTL_LOOP_INDEX_15
0x22D0 VAP_PVS_CODE_CNTL_0
0x22D4 VAP_PVS_CONST_CNTL
0x22D8 VAP_PVS_CODE_CNTL_1
0x22DC VAP_PVS_FLOW_CNTL_OPC
0x342C RB2D_DSTCACHE_CTLSTAT
0x4000 GB_VAP_RASTER_VTX_FMT_0
0x4004 GB_VAP_RASTER_VTX_FMT_1
0x4008 GB_ENABLE
0x4010 GB_MSPOS0
0x4014 GB_MSPOS1
0x401C GB_SELECT
0x4020 GB_AA_CONFIG
0x4024 GB_FIFO_SIZE
0x4100 TX_INVALTAGS
0x4200 GA_POINT_S0
0x4204 GA_POINT_T0
0x4208 GA_POINT_S1
0x420C GA_POINT_T1
0x4214 GA_TRIANGLE_STIPPLE
0x421C GA_POINT_SIZE
0x4230 GA_POINT_MINMAX
0x4234 GA_LINE_CNTL
0x4238 GA_LINE_STIPPLE_CONFIG
0x4260 GA_LINE_STIPPLE_VALUE
0x4264 GA_LINE_S0
0x4268 GA_LINE_S1
0x4278 GA_COLOR_CONTROL
0x427C GA_SOLID_RG
0x4280 GA_SOLID_BA
0x4288 GA_POLY_MODE
0x428C GA_ROUND_MODE
0x4290 GA_OFFSET
0x4294 GA_FOG_SCALE
0x4298 GA_FOG_OFFSET
0x42A0 SU_TEX_WRAP
0x42A4 SU_POLY_OFFSET_FRONT_SCALE
0x42A8 SU_POLY_OFFSET_FRONT_OFFSET
0x42AC SU_POLY_OFFSET_BACK_SCALE
0x42B0 SU_POLY_OFFSET_BACK_OFFSET
0x42B4 SU_POLY_OFFSET_ENABLE
0x42B8 SU_CULL_MODE
0x42C0 SU_DEPTH_SCALE
0x42C4 SU_DEPTH_OFFSET
0x42C8 SU_REG_DEST
0x4300 RS_COUNT
0x4304 RS_INST_COUNT
0x4310 RS_IP_0
0x4314 RS_IP_1
0x4318 RS_IP_2
0x431C RS_IP_3
0x4320 RS_IP_4
0x4324 RS_IP_5
0x4328 RS_IP_6
0x432C RS_IP_7
0x4330 RS_INST_0
0x4334 RS_INST_1
0x4338 RS_INST_2
0x433C RS_INST_3
0x4340 RS_INST_4
0x4344 RS_INST_5
0x4348 RS_INST_6
0x434C RS_INST_7
0x4350 RS_INST_8
0x4354 RS_INST_9
0x4358 RS_INST_10
0x435C RS_INST_11
0x4360 RS_INST_12
0x4364 RS_INST_13
0x4368 RS_INST_14
0x436C RS_INST_15
0x43A8 SC_EDGERULE
0x43B0 SC_CLIP_0_A
0x43B4 SC_CLIP_0_B
0x43B8 SC_CLIP_1_A
0x43BC SC_CLIP_1_B
0x43C0 SC_CLIP_2_A
0x43C4 SC_CLIP_2_B
0x43C8 SC_CLIP_3_A
0x43CC SC_CLIP_3_B
0x43D0 SC_CLIP_RULE
0x43E0 SC_SCISSOR0
0x43E8 SC_SCREENDOOR
0x4440 TX_FILTER1_0
0x4444 TX_FILTER1_1
0x4448 TX_FILTER1_2
0x444C TX_FILTER1_3
0x4450 TX_FILTER1_4
0x4454 TX_FILTER1_5
0x4458 TX_FILTER1_6
0x445C TX_FILTER1_7
0x4460 TX_FILTER1_8
0x4464 TX_FILTER1_9
0x4468 TX_FILTER1_10
0x446C TX_FILTER1_11
0x4470 TX_FILTER1_12
0x4474 TX_FILTER1_13
0x4478 TX_FILTER1_14
0x447C TX_FILTER1_15
0x4580 TX_CHROMA_KEY_0
0x4584 TX_CHROMA_KEY_1
0x4588 TX_CHROMA_KEY_2
0x458C TX_CHROMA_KEY_3
0x4590 TX_CHROMA_KEY_4
0x4594 TX_CHROMA_KEY_5
0x4598 TX_CHROMA_KEY_6
0x459C TX_CHROMA_KEY_7
0x45A0 TX_CHROMA_KEY_8
0x45A4 TX_CHROMA_KEY_9
0x45A8 TX_CHROMA_KEY_10
0x45AC TX_CHROMA_KEY_11
0x45B0 TX_CHROMA_KEY_12
0x45B4 TX_CHROMA_KEY_13
0x45B8 TX_CHROMA_KEY_14
0x45BC TX_CHROMA_KEY_15
0x45C0 TX_BORDER_COLOR_0
0x45C4 TX_BORDER_COLOR_1
0x45C8 TX_BORDER_COLOR_2
0x45CC TX_BORDER_COLOR_3
0x45D0 TX_BORDER_COLOR_4
0x45D4 TX_BORDER_COLOR_5
0x45D8 TX_BORDER_COLOR_6
0x45DC TX_BORDER_COLOR_7
0x45E0 TX_BORDER_COLOR_8
0x45E4 TX_BORDER_COLOR_9
0x45E8 TX_BORDER_COLOR_10
0x45EC TX_BORDER_COLOR_11
0x45F0 TX_BORDER_COLOR_12
0x45F4 TX_BORDER_COLOR_13
0x45F8 TX_BORDER_COLOR_14
0x45FC TX_BORDER_COLOR_15
0x4600 US_CONFIG
0x4604 US_PIXSIZE
0x4608 US_CODE_OFFSET
0x460C US_RESET
0x4610 US_CODE_ADDR_0
0x4614 US_CODE_ADDR_1
0x4618 US_CODE_ADDR_2
0x461C US_CODE_ADDR_3
0x4620 US_TEX_INST_0
0x4624 US_TEX_INST_1
0x4628 US_TEX_INST_2
0x462C US_TEX_INST_3
0x4630 US_TEX_INST_4
0x4634 US_TEX_INST_5
0x4638 US_TEX_INST_6
0x463C US_TEX_INST_7
0x4640 US_TEX_INST_8
0x4644 US_TEX_INST_9
0x4648 US_TEX_INST_10
0x464C US_TEX_INST_11
0x4650 US_TEX_INST_12
0x4654 US_TEX_INST_13
0x4658 US_TEX_INST_14
0x465C US_TEX_INST_15
0x4660 US_TEX_INST_16
0x4664 US_TEX_INST_17
0x4668 US_TEX_INST_18
0x466C US_TEX_INST_19
0x4670 US_TEX_INST_20
0x4674 US_TEX_INST_21
0x4678 US_TEX_INST_22
0x467C US_TEX_INST_23
0x4680 US_TEX_INST_24
0x4684 US_TEX_INST_25
0x4688 US_TEX_INST_26
0x468C US_TEX_INST_27
0x4690 US_TEX_INST_28
0x4694 US_TEX_INST_29
0x4698 US_TEX_INST_30
0x469C US_TEX_INST_31
0x46A4 US_OUT_FMT_0
0x46A8 US_OUT_FMT_1
0x46AC US_OUT_FMT_2
0x46B0 US_OUT_FMT_3
0x46B4 US_W_FMT
0x46B8 US_CODE_BANK
0x46BC US_CODE_EXT
0x46C0 US_ALU_RGB_ADDR_0
0x46C4 US_ALU_RGB_ADDR_1
0x46C8 US_ALU_RGB_ADDR_2
0x46CC US_ALU_RGB_ADDR_3
0x46D0 US_ALU_RGB_ADDR_4
0x46D4 US_ALU_RGB_ADDR_5
0x46D8 US_ALU_RGB_ADDR_6
0x46DC US_ALU_RGB_ADDR_7
0x46E0 US_ALU_RGB_ADDR_8
0x46E4 US_ALU_RGB_ADDR_9
0x46E8 US_ALU_RGB_ADDR_10
0x46EC US_ALU_RGB_ADDR_11
0x46F0 US_ALU_RGB_ADDR_12
0x46F4 US_ALU_RGB_ADDR_13
0x46F8 US_ALU_RGB_ADDR_14
0x46FC US_ALU_RGB_ADDR_15
0x4700 US_ALU_RGB_ADDR_16
0x4704 US_ALU_RGB_ADDR_17
0x4708 US_ALU_RGB_ADDR_18
0x470C US_ALU_RGB_ADDR_19
0x4710 US_ALU_RGB_ADDR_20
0x4714 US_ALU_RGB_ADDR_21
0x4718 US_ALU_RGB_ADDR_22
0x471C US_ALU_RGB_ADDR_23
0x4720 US_ALU_RGB_ADDR_24
0x4724 US_ALU_RGB_ADDR_25
0x4728 US_ALU_RGB_ADDR_26
0x472C US_ALU_RGB_ADDR_27
0x4730 US_ALU_RGB_ADDR_28
0x4734 US_ALU_RGB_ADDR_29
0x4738 US_ALU_RGB_ADDR_30
0x473C US_ALU_RGB_ADDR_31
0x4740 US_ALU_RGB_ADDR_32
0x4744 US_ALU_RGB_ADDR_33
0x4748 US_ALU_RGB_ADDR_34
0x474C US_ALU_RGB_ADDR_35
0x4750 US_ALU_RGB_ADDR_36
0x4754 US_ALU_RGB_ADDR_37
0x4758 US_ALU_RGB_ADDR_38
0x475C US_ALU_RGB_ADDR_39
0x4760 US_ALU_RGB_ADDR_40
0x4764 US_ALU_RGB_ADDR_41
0x4768 US_ALU_RGB_ADDR_42
0x476C US_ALU_RGB_ADDR_43
0x4770 US_ALU_RGB_ADDR_44
0x4774 US_ALU_RGB_ADDR_45
0x4778 US_ALU_RGB_ADDR_46
0x477C US_ALU_RGB_ADDR_47
0x4780 US_ALU_RGB_ADDR_48
0x4784 US_ALU_RGB_ADDR_49
0x4788 US_ALU_RGB_ADDR_50
0x478C US_ALU_RGB_ADDR_51
0x4790 US_ALU_RGB_ADDR_52
0x4794 US_ALU_RGB_ADDR_53
0x4798 US_ALU_RGB_ADDR_54
0x479C US_ALU_RGB_ADDR_55
0x47A0 US_ALU_RGB_ADDR_56
0x47A4 US_ALU_RGB_ADDR_57
0x47A8 US_ALU_RGB_ADDR_58
0x47AC US_ALU_RGB_ADDR_59
0x47B0 US_ALU_RGB_ADDR_60
0x47B4 US_ALU_RGB_ADDR_61
0x47B8 US_ALU_RGB_ADDR_62
0x47BC US_ALU_RGB_ADDR_63
0x47C0 US_ALU_ALPHA_ADDR_0
0x47C4 US_ALU_ALPHA_ADDR_1
0x47C8 US_ALU_ALPHA_ADDR_2
0x47CC US_ALU_ALPHA_ADDR_3
0x47D0 US_ALU_ALPHA_ADDR_4
0x47D4 US_ALU_ALPHA_ADDR_5
0x47D8 US_ALU_ALPHA_ADDR_6
0x47DC US_ALU_ALPHA_ADDR_7
0x47E0 US_ALU_ALPHA_ADDR_8
0x47E4 US_ALU_ALPHA_ADDR_9
0x47E8 US_ALU_ALPHA_ADDR_10
0x47EC US_ALU_ALPHA_ADDR_11
0x47F0 US_ALU_ALPHA_ADDR_12
0x47F4 US_ALU_ALPHA_ADDR_13
0x47F8 US_ALU_ALPHA_ADDR_14
0x47FC US_ALU_ALPHA_ADDR_15
0x4800 US_ALU_ALPHA_ADDR_16
0x4804 US_ALU_ALPHA_ADDR_17
0x4808 US_ALU_ALPHA_ADDR_18
0x480C US_ALU_ALPHA_ADDR_19
0x4810 US_ALU_ALPHA_ADDR_20
0x4814 US_ALU_ALPHA_ADDR_21
0x4818 US_ALU_ALPHA_ADDR_22
0x481C US_ALU_ALPHA_ADDR_23
0x4820 US_ALU_ALPHA_ADDR_24
0x4824 US_ALU_ALPHA_ADDR_25
0x4828 US_ALU_ALPHA_ADDR_26
0x482C US_ALU_ALPHA_ADDR_27
0x4830 US_ALU_ALPHA_ADDR_28
0x4834 US_ALU_ALPHA_ADDR_29
0x4838 US_ALU_ALPHA_ADDR_30
0x483C US_ALU_ALPHA_ADDR_31
0x4840 US_ALU_ALPHA_ADDR_32
0x4844 US_ALU_ALPHA_ADDR_33
0x4848 US_ALU_ALPHA_ADDR_34
0x484C US_ALU_ALPHA_ADDR_35
0x4850 US_ALU_ALPHA_ADDR_36
0x4854 US_ALU_ALPHA_ADDR_37
0x4858 US_ALU_ALPHA_ADDR_38
0x485C US_ALU_ALPHA_ADDR_39
0x4860 US_ALU_ALPHA_ADDR_40
0x4864 US_ALU_ALPHA_ADDR_41
0x4868 US_ALU_ALPHA_ADDR_42
0x486C US_ALU_ALPHA_ADDR_43
0x4870 US_ALU_ALPHA_ADDR_44
0x4874 US_ALU_ALPHA_ADDR_45
0x4878 US_ALU_ALPHA_ADDR_46
0x487C US_ALU_ALPHA_ADDR_47
0x4880 US_ALU_ALPHA_ADDR_48
0x4884 US_ALU_ALPHA_ADDR_49
0x4888 US_ALU_ALPHA_ADDR_50
0x488C US_ALU_ALPHA_ADDR_51
0x4890 US_ALU_ALPHA_ADDR_52
0x4894 US_ALU_ALPHA_ADDR_53
0x4898 US_ALU_ALPHA_ADDR_54
0x489C US_ALU_ALPHA_ADDR_55
0x48A0 US_ALU_ALPHA_ADDR_56
0x48A4 US_ALU_ALPHA_ADDR_57
0x48A8 US_ALU_ALPHA_ADDR_58
0x48AC US_ALU_ALPHA_ADDR_59
0x48B0 US_ALU_ALPHA_ADDR_60
0x48B4 US_ALU_ALPHA_ADDR_61
0x48B8 US_ALU_ALPHA_ADDR_62
0x48BC US_ALU_ALPHA_ADDR_63
0x48C0 US_ALU_RGB_INST_0
0x48C4 US_ALU_RGB_INST_1
0x48C8 US_ALU_RGB_INST_2
0x48CC US_ALU_RGB_INST_3
0x48D0 US_ALU_RGB_INST_4
0x48D4 US_ALU_RGB_INST_5
0x48D8 US_ALU_RGB_INST_6
0x48DC US_ALU_RGB_INST_7
0x48E0 US_ALU_RGB_INST_8
0x48E4 US_ALU_RGB_INST_9
0x48E8 US_ALU_RGB_INST_10
0x48EC US_ALU_RGB_INST_11
0x48F0 US_ALU_RGB_INST_12
0x48F4 US_ALU_RGB_INST_13
0x48F8 US_ALU_RGB_INST_14
0x48FC US_ALU_RGB_INST_15
0x4900 US_ALU_RGB_INST_16
0x4904 US_ALU_RGB_INST_17
0x4908 US_ALU_RGB_INST_18
0x490C US_ALU_RGB_INST_19
0x4910 US_ALU_RGB_INST_20
0x4914 US_ALU_RGB_INST_21
0x4918 US_ALU_RGB_INST_22
0x491C US_ALU_RGB_INST_23
0x4920 US_ALU_RGB_INST_24
0x4924 US_ALU_RGB_INST_25
0x4928 US_ALU_RGB_INST_26
0x492C US_ALU_RGB_INST_27
0x4930 US_ALU_RGB_INST_28
0x4934 US_ALU_RGB_INST_29
0x4938 US_ALU_RGB_INST_30
0x493C US_ALU_RGB_INST_31
0x4940 US_ALU_RGB_INST_32
0x4944 US_ALU_RGB_INST_33
0x4948 US_ALU_RGB_INST_34
0x494C US_ALU_RGB_INST_35
0x4950 US_ALU_RGB_INST_36
0x4954 US_ALU_RGB_INST_37
0x4958 US_ALU_RGB_INST_38
0x495C US_ALU_RGB_INST_39
0x4960 US_ALU_RGB_INST_40
0x4964 US_ALU_RGB_INST_41
0x4968 US_ALU_RGB_INST_42
0x496C US_ALU_RGB_INST_43
0x4970 US_ALU_RGB_INST_44
0x4974 US_ALU_RGB_INST_45
0x4978 US_ALU_RGB_INST_46
0x497C US_ALU_RGB_INST_47
0x4980 US_ALU_RGB_INST_48
0x4984 US_ALU_RGB_INST_49
0x4988 US_ALU_RGB_INST_50
0x498C US_ALU_RGB_INST_51
0x4990 US_ALU_RGB_INST_52
0x4994 US_ALU_RGB_INST_53
0x4998 US_ALU_RGB_INST_54
0x499C US_ALU_RGB_INST_55
0x49A0 US_ALU_RGB_INST_56
0x49A4 US_ALU_RGB_INST_57
0x49A8 US_ALU_RGB_INST_58
0x49AC US_ALU_RGB_INST_59
0x49B0 US_ALU_RGB_INST_60
0x49B4 US_ALU_RGB_INST_61
0x49B8 US_ALU_RGB_INST_62
0x49BC US_ALU_RGB_INST_63
0x49C0 US_ALU_ALPHA_INST_0
0x49C4 US_ALU_ALPHA_INST_1
0x49C8 US_ALU_ALPHA_INST_2
0x49CC US_ALU_ALPHA_INST_3
0x49D0 US_ALU_ALPHA_INST_4
0x49D4 US_ALU_ALPHA_INST_5
0x49D8 US_ALU_ALPHA_INST_6
0x49DC US_ALU_ALPHA_INST_7
0x49E0 US_ALU_ALPHA_INST_8
0x49E4 US_ALU_ALPHA_INST_9
0x49E8 US_ALU_ALPHA_INST_10
0x49EC US_ALU_ALPHA_INST_11
0x49F0 US_ALU_ALPHA_INST_12
0x49F4 US_ALU_ALPHA_INST_13
0x49F8 US_ALU_ALPHA_INST_14
0x49FC US_ALU_ALPHA_INST_15
0x4A00 US_ALU_ALPHA_INST_16
0x4A04 US_ALU_ALPHA_INST_17
0x4A08 US_ALU_ALPHA_INST_18
0x4A0C US_ALU_ALPHA_INST_19
0x4A10 US_ALU_ALPHA_INST_20
0x4A14 US_ALU_ALPHA_INST_21
0x4A18 US_ALU_ALPHA_INST_22
0x4A1C US_ALU_ALPHA_INST_23
0x4A20 US_ALU_ALPHA_INST_24
0x4A24 US_ALU_ALPHA_INST_25
0x4A28 US_ALU_ALPHA_INST_26
0x4A2C US_ALU_ALPHA_INST_27
0x4A30 US_ALU_ALPHA_INST_28
0x4A34 US_ALU_ALPHA_INST_29
0x4A38 US_ALU_ALPHA_INST_30
0x4A3C US_ALU_ALPHA_INST_31
0x4A40 US_ALU_ALPHA_INST_32
0x4A44 US_ALU_ALPHA_INST_33
0x4A48 US_ALU_ALPHA_INST_34
0x4A4C US_ALU_ALPHA_INST_35
0x4A50 US_ALU_ALPHA_INST_36
0x4A54 US_ALU_ALPHA_INST_37
0x4A58 US_ALU_ALPHA_INST_38
0x4A5C US_ALU_ALPHA_INST_39
0x4A60 US_ALU_ALPHA_INST_40
0x4A64 US_ALU_ALPHA_INST_41
0x4A68 US_ALU_ALPHA_INST_42
0x4A6C US_ALU_ALPHA_INST_43
0x4A70 US_ALU_ALPHA_INST_44
0x4A74 US_ALU_ALPHA_INST_45
0x4A78 US_ALU_ALPHA_INST_46
0x4A7C US_ALU_ALPHA_INST_47
0x4A80 US_ALU_ALPHA_INST_48
0x4A84 US_ALU_ALPHA_INST_49
0x4A88 US_ALU_ALPHA_INST_50
0x4A8C US_ALU_ALPHA_INST_51
0x4A90 US_ALU_ALPHA_INST_52
0x4A94 US_ALU_ALPHA_INST_53
0x4A98 US_ALU_ALPHA_INST_54
0x4A9C US_ALU_ALPHA_INST_55
0x4AA0 US_ALU_ALPHA_INST_56
0x4AA4 US_ALU_ALPHA_INST_57
0x4AA8 US_ALU_ALPHA_INST_58
0x4AAC US_ALU_ALPHA_INST_59
0x4AB0 US_ALU_ALPHA_INST_60
0x4AB4 US_ALU_ALPHA_INST_61
0x4AB8 US_ALU_ALPHA_INST_62
0x4ABC US_ALU_ALPHA_INST_63
0x4AC0 US_ALU_EXT_ADDR_0
0x4AC4 US_ALU_EXT_ADDR_1
0x4AC8 US_ALU_EXT_ADDR_2
0x4ACC US_ALU_EXT_ADDR_3
0x4AD0 US_ALU_EXT_ADDR_4
0x4AD4 US_ALU_EXT_ADDR_5
0x4AD8 US_ALU_EXT_ADDR_6
0x4ADC US_ALU_EXT_ADDR_7
0x4AE0 US_ALU_EXT_ADDR_8
0x4AE4 US_ALU_EXT_ADDR_9
0x4AE8 US_ALU_EXT_ADDR_10
0x4AEC US_ALU_EXT_ADDR_11
0x4AF0 US_ALU_EXT_ADDR_12
0x4AF4 US_ALU_EXT_ADDR_13
0x4AF8 US_ALU_EXT_ADDR_14
0x4AFC US_ALU_EXT_ADDR_15
0x4B00 US_ALU_EXT_ADDR_16
0x4B04 US_ALU_EXT_ADDR_17
0x4B08 US_ALU_EXT_ADDR_18
0x4B0C US_ALU_EXT_ADDR_19
0x4B10 US_ALU_EXT_ADDR_20
0x4B14 US_ALU_EXT_ADDR_21
0x4B18 US_ALU_EXT_ADDR_22
0x4B1C US_ALU_EXT_ADDR_23
0x4B20 US_ALU_EXT_ADDR_24
0x4B24 US_ALU_EXT_ADDR_25
0x4B28 US_ALU_EXT_ADDR_26
0x4B2C US_ALU_EXT_ADDR_27
0x4B30 US_ALU_EXT_ADDR_28
0x4B34 US_ALU_EXT_ADDR_29
0x4B38 US_ALU_EXT_ADDR_30
0x4B3C US_ALU_EXT_ADDR_31
0x4B40 US_ALU_EXT_ADDR_32
0x4B44 US_ALU_EXT_ADDR_33
0x4B48 US_ALU_EXT_ADDR_34
0x4B4C US_ALU_EXT_ADDR_35
0x4B50 US_ALU_EXT_ADDR_36
0x4B54 US_ALU_EXT_ADDR_37
0x4B58 US_ALU_EXT_ADDR_38
0x4B5C US_ALU_EXT_ADDR_39
0x4B60 US_ALU_EXT_ADDR_40
0x4B64 US_ALU_EXT_ADDR_41
0x4B68 US_ALU_EXT_ADDR_42
0x4B6C US_ALU_EXT_ADDR_43
0x4B70 US_ALU_EXT_ADDR_44
0x4B74 US_ALU_EXT_ADDR_45
0x4B78 US_ALU_EXT_ADDR_46
0x4B7C US_ALU_EXT_ADDR_47
0x4B80 US_ALU_EXT_ADDR_48
0x4B84 US_ALU_EXT_ADDR_49
0x4B88 US_ALU_EXT_ADDR_50
0x4B8C US_ALU_EXT_ADDR_51
0x4B90 US_ALU_EXT_ADDR_52
0x4B94 US_ALU_EXT_ADDR_53
0x4B98 US_ALU_EXT_ADDR_54
0x4B9C US_ALU_EXT_ADDR_55
0x4BA0 US_ALU_EXT_ADDR_56
0x4BA4 US_ALU_EXT_ADDR_57
0x4BA8 US_ALU_EXT_ADDR_58
0x4BAC US_ALU_EXT_ADDR_59
0x4BB0 US_ALU_EXT_ADDR_60
0x4BB4 US_ALU_EXT_ADDR_61
0x4BB8 US_ALU_EXT_ADDR_62
0x4BBC US_ALU_EXT_ADDR_63
0x4BC0 FG_FOG_BLEND
0x4BC4 FG_FOG_FACTOR
0x4BC8 FG_FOG_COLOR_R
0x4BCC FG_FOG_COLOR_G
0x4BD0 FG_FOG_COLOR_B
0x4BD4 FG_ALPHA_FUNC
0x4BD8 FG_DEPTH_SRC
0x4C00 US_ALU_CONST_R_0
0x4C04 US_ALU_CONST_G_0
0x4C08 US_ALU_CONST_B_0
0x4C0C US_ALU_CONST_A_0
0x4C10 US_ALU_CONST_R_1
0x4C14 US_ALU_CONST_G_1
0x4C18 US_ALU_CONST_B_1
0x4C1C US_ALU_CONST_A_1
0x4C20 US_ALU_CONST_R_2
0x4C24 US_ALU_CONST_G_2
0x4C28 US_ALU_CONST_B_2
0x4C2C US_ALU_CONST_A_2
0x4C30 US_ALU_CONST_R_3
0x4C34 US_ALU_CONST_G_3
0x4C38 US_ALU_CONST_B_3
0x4C3C US_ALU_CONST_A_3
0x4C40 US_ALU_CONST_R_4
0x4C44 US_ALU_CONST_G_4
0x4C48 US_ALU_CONST_B_4
0x4C4C US_ALU_CONST_A_4
0x4C50 US_ALU_CONST_R_5
0x4C54 US_ALU_CONST_G_5
0x4C58 US_ALU_CONST_B_5
0x4C5C US_ALU_CONST_A_5
0x4C60 US_ALU_CONST_R_6
0x4C64 US_ALU_CONST_G_6
0x4C68 US_ALU_CONST_B_6
0x4C6C US_ALU_CONST_A_6
0x4C70 US_ALU_CONST_R_7
0x4C74 US_ALU_CONST_G_7
0x4C78 US_ALU_CONST_B_7
0x4C7C US_ALU_CONST_A_7
0x4C80 US_ALU_CONST_R_8
0x4C84 US_ALU_CONST_G_8
0x4C88 US_ALU_CONST_B_8
0x4C8C US_ALU_CONST_A_8
0x4C90 US_ALU_CONST_R_9
0x4C94 US_ALU_CONST_G_9
0x4C98 US_ALU_CONST_B_9
0x4C9C US_ALU_CONST_A_9
0x4CA0 US_ALU_CONST_R_10
0x4CA4 US_ALU_CONST_G_10
0x4CA8 US_ALU_CONST_B_10
0x4CAC US_ALU_CONST_A_10
0x4CB0 US_ALU_CONST_R_11
0x4CB4 US_ALU_CONST_G_11
0x4CB8 US_ALU_CONST_B_11
0x4CBC US_ALU_CONST_A_11
0x4CC0 US_ALU_CONST_R_12
0x4CC4 US_ALU_CONST_G_12
0x4CC8 US_ALU_CONST_B_12
0x4CCC US_ALU_CONST_A_12
0x4CD0 US_ALU_CONST_R_13
0x4CD4 US_ALU_CONST_G_13
0x4CD8 US_ALU_CONST_B_13
0x4CDC US_ALU_CONST_A_13
0x4CE0 US_ALU_CONST_R_14
0x4CE4 US_ALU_CONST_G_14
0x4CE8 US_ALU_CONST_B_14
0x4CEC US_ALU_CONST_A_14
0x4CF0 US_ALU_CONST_R_15
0x4CF4 US_ALU_CONST_G_15
0x4CF8 US_ALU_CONST_B_15
0x4CFC US_ALU_CONST_A_15
0x4D00 US_ALU_CONST_R_16
0x4D04 US_ALU_CONST_G_16
0x4D08 US_ALU_CONST_B_16
0x4D0C US_ALU_CONST_A_16
0x4D10 US_ALU_CONST_R_17
0x4D14 US_ALU_CONST_G_17
0x4D18 US_ALU_CONST_B_17
0x4D1C US_ALU_CONST_A_17
0x4D20 US_ALU_CONST_R_18
0x4D24 US_ALU_CONST_G_18
0x4D28 US_ALU_CONST_B_18
0x4D2C US_ALU_CONST_A_18
0x4D30 US_ALU_CONST_R_19
0x4D34 US_ALU_CONST_G_19
0x4D38 US_ALU_CONST_B_19
0x4D3C US_ALU_CONST_A_19
0x4D40 US_ALU_CONST_R_20
0x4D44 US_ALU_CONST_G_20
0x4D48 US_ALU_CONST_B_20
0x4D4C US_ALU_CONST_A_20
0x4D50 US_ALU_CONST_R_21
0x4D54 US_ALU_CONST_G_21
0x4D58 US_ALU_CONST_B_21
0x4D5C US_ALU_CONST_A_21
0x4D60 US_ALU_CONST_R_22
0x4D64 US_ALU_CONST_G_22
0x4D68 US_ALU_CONST_B_22
0x4D6C US_ALU_CONST_A_22
0x4D70 US_ALU_CONST_R_23
0x4D74 US_ALU_CONST_G_23
0x4D78 US_ALU_CONST_B_23
0x4D7C US_ALU_CONST_A_23
0x4D80 US_ALU_CONST_R_24
0x4D84 US_ALU_CONST_G_24
0x4D88 US_ALU_CONST_B_24
0x4D8C US_ALU_CONST_A_24
0x4D90 US_ALU_CONST_R_25
0x4D94 US_ALU_CONST_G_25
0x4D98 US_ALU_CONST_B_25
0x4D9C US_ALU_CONST_A_25
0x4DA0 US_ALU_CONST_R_26
0x4DA4 US_ALU_CONST_G_26
0x4DA8 US_ALU_CONST_B_26
0x4DAC US_ALU_CONST_A_26
0x4DB0 US_ALU_CONST_R_27
0x4DB4 US_ALU_CONST_G_27
0x4DB8 US_ALU_CONST_B_27
0x4DBC US_ALU_CONST_A_27
0x4DC0 US_ALU_CONST_R_28
0x4DC4 US_ALU_CONST_G_28
0x4DC8 US_ALU_CONST_B_28
0x4DCC US_ALU_CONST_A_28
0x4DD0 US_ALU_CONST_R_29
0x4DD4 US_ALU_CONST_G_29
0x4DD8 US_ALU_CONST_B_29
0x4DDC US_ALU_CONST_A_29
0x4DE0 US_ALU_CONST_R_30
0x4DE4 US_ALU_CONST_G_30
0x4DE8 US_ALU_CONST_B_30
0x4DEC US_ALU_CONST_A_30
0x4DF0 US_ALU_CONST_R_31
0x4DF4 US_ALU_CONST_G_31
0x4DF8 US_ALU_CONST_B_31
0x4DFC US_ALU_CONST_A_31
0x4E08 RB3D_ABLENDCNTL_R3
0x4E10 RB3D_CONSTANT_COLOR
0x4E14 RB3D_COLOR_CLEAR_VALUE
0x4E18 RB3D_ROPCNTL_R3
0x4E1C RB3D_CLRCMP_FLIPE_R3
0x4E20 RB3D_CLRCMP_CLR_R3
0x4E24 RB3D_CLRCMP_MSK_R3
0x4E48 RB3D_DEBUG_CTL
0x4E4C RB3D_DSTCACHE_CTLSTAT_R3
0x4E50 RB3D_DITHER_CTL
0x4E54 RB3D_CMASK_OFFSET0
0x4E58 RB3D_CMASK_OFFSET1
0x4E5C RB3D_CMASK_OFFSET2
0x4E60 RB3D_CMASK_OFFSET3
0x4E64 RB3D_CMASK_PITCH0
0x4E68 RB3D_CMASK_PITCH1
0x4E6C RB3D_CMASK_PITCH2
0x4E70 RB3D_CMASK_PITCH3
0x4E74 RB3D_CMASK_WRINDEX
0x4E78 RB3D_CMASK_DWORD
0x4E7C RB3D_CMASK_RDINDEX
0x4EA0 RB3D_DISCARD_SRC_PIXEL_LTE_THRESHOLD
0x4EA4 RB3D_DISCARD_SRC_PIXEL_GTE_THRESHOLD
0x4F04 ZB_ZSTENCILCNTL
0x4F08 ZB_STENCILREFMASK
0x4F14 ZB_ZTOP
0x4F18 ZB_ZCACHE_CTLSTAT
0x4F28 ZB_DEPTHCLEARVALUE
0x4F58 ZB_ZPASS_DATA

View File

@ -0,0 +1,755 @@
r600 0x9400
0x000287A0 R7xx_CB_SHADER_CONTROL
0x00028230 R7xx_PA_SC_EDGERULE
0x000286C8 R7xx_SPI_THREAD_GROUPING
0x00008D8C R7xx_SQ_DYN_GPR_CNTL_PS_FLUSH_REQ
0x00008490 CP_STRMOUT_CNTL
0x000085F0 CP_COHER_CNTL
0x000085F4 CP_COHER_SIZE
0x000088C4 VGT_CACHE_INVALIDATION
0x00028A50 VGT_ENHANCE
0x000088CC VGT_ES_PER_GS
0x00028A2C VGT_GROUP_DECR
0x00028A28 VGT_GROUP_FIRST_DECR
0x00028A24 VGT_GROUP_PRIM_TYPE
0x00028A30 VGT_GROUP_VECT_0_CNTL
0x00028A38 VGT_GROUP_VECT_0_FMT_CNTL
0x00028A34 VGT_GROUP_VECT_1_CNTL
0x00028A3C VGT_GROUP_VECT_1_FMT_CNTL
0x00028A40 VGT_GS_MODE
0x00028A6C VGT_GS_OUT_PRIM_TYPE
0x000088C8 VGT_GS_PER_ES
0x000088E8 VGT_GS_PER_VS
0x000088D4 VGT_GS_VERTEX_REUSE
0x00028A14 VGT_HOS_CNTL
0x00028A18 VGT_HOS_MAX_TESS_LEVEL
0x00028A1C VGT_HOS_MIN_TESS_LEVEL
0x00028A20 VGT_HOS_REUSE_DEPTH
0x0000895C VGT_INDEX_TYPE
0x00028408 VGT_INDX_OFFSET
0x00028AA0 VGT_INSTANCE_STEP_RATE_0
0x00028AA4 VGT_INSTANCE_STEP_RATE_1
0x00028400 VGT_MAX_VTX_INDX
0x00028404 VGT_MIN_VTX_INDX
0x00028A94 VGT_MULTI_PRIM_IB_RESET_EN
0x0002840C VGT_MULTI_PRIM_IB_RESET_INDX
0x00008970 VGT_NUM_INDICES
0x00008974 VGT_NUM_INSTANCES
0x00028A10 VGT_OUTPUT_PATH_CNTL
0x00028A84 VGT_PRIMITIVEID_EN
0x00008958 VGT_PRIMITIVE_TYPE
0x00028AB4 VGT_REUSE_OFF
0x00028AB8 VGT_VTX_CNT_EN
0x000088B0 VGT_VTX_VECT_EJECT_REG
0x00028AD4 VGT_STRMOUT_VTX_STRIDE_0
0x00028AE4 VGT_STRMOUT_VTX_STRIDE_1
0x00028AF4 VGT_STRMOUT_VTX_STRIDE_2
0x00028B04 VGT_STRMOUT_VTX_STRIDE_3
0x00028B28 VGT_STRMOUT_DRAW_OPAQUE_OFFSET
0x00028B2C VGT_STRMOUT_DRAW_OPAQUE_BUFFER_FILLED_SIZE
0x00028B30 VGT_STRMOUT_DRAW_OPAQUE_VERTEX_STRIDE
0x00028810 PA_CL_CLIP_CNTL
0x00008A14 PA_CL_ENHANCE
0x00028C14 PA_CL_GB_HORZ_CLIP_ADJ
0x00028C18 PA_CL_GB_HORZ_DISC_ADJ
0x00028C0C PA_CL_GB_VERT_CLIP_ADJ
0x00028C10 PA_CL_GB_VERT_DISC_ADJ
0x00028820 PA_CL_NANINF_CNTL
0x00028E1C PA_CL_POINT_CULL_RAD
0x00028E18 PA_CL_POINT_SIZE
0x00028E10 PA_CL_POINT_X_RAD
0x00028E14 PA_CL_POINT_Y_RAD
0x00028E2C PA_CL_UCP_0_W
0x00028E3C PA_CL_UCP_1_W
0x00028E4C PA_CL_UCP_2_W
0x00028E5C PA_CL_UCP_3_W
0x00028E6C PA_CL_UCP_4_W
0x00028E7C PA_CL_UCP_5_W
0x00028E20 PA_CL_UCP_0_X
0x00028E30 PA_CL_UCP_1_X
0x00028E40 PA_CL_UCP_2_X
0x00028E50 PA_CL_UCP_3_X
0x00028E60 PA_CL_UCP_4_X
0x00028E70 PA_CL_UCP_5_X
0x00028E24 PA_CL_UCP_0_Y
0x00028E34 PA_CL_UCP_1_Y
0x00028E44 PA_CL_UCP_2_Y
0x00028E54 PA_CL_UCP_3_Y
0x00028E64 PA_CL_UCP_4_Y
0x00028E74 PA_CL_UCP_5_Y
0x00028E28 PA_CL_UCP_0_Z
0x00028E38 PA_CL_UCP_1_Z
0x00028E48 PA_CL_UCP_2_Z
0x00028E58 PA_CL_UCP_3_Z
0x00028E68 PA_CL_UCP_4_Z
0x00028E78 PA_CL_UCP_5_Z
0x00028440 PA_CL_VPORT_XOFFSET_0
0x00028458 PA_CL_VPORT_XOFFSET_1
0x00028470 PA_CL_VPORT_XOFFSET_2
0x00028488 PA_CL_VPORT_XOFFSET_3
0x000284A0 PA_CL_VPORT_XOFFSET_4
0x000284B8 PA_CL_VPORT_XOFFSET_5
0x000284D0 PA_CL_VPORT_XOFFSET_6
0x000284E8 PA_CL_VPORT_XOFFSET_7
0x00028500 PA_CL_VPORT_XOFFSET_8
0x00028518 PA_CL_VPORT_XOFFSET_9
0x00028530 PA_CL_VPORT_XOFFSET_10
0x00028548 PA_CL_VPORT_XOFFSET_11
0x00028560 PA_CL_VPORT_XOFFSET_12
0x00028578 PA_CL_VPORT_XOFFSET_13
0x00028590 PA_CL_VPORT_XOFFSET_14
0x000285A8 PA_CL_VPORT_XOFFSET_15
0x0002843C PA_CL_VPORT_XSCALE_0
0x00028454 PA_CL_VPORT_XSCALE_1
0x0002846C PA_CL_VPORT_XSCALE_2
0x00028484 PA_CL_VPORT_XSCALE_3
0x0002849C PA_CL_VPORT_XSCALE_4
0x000284B4 PA_CL_VPORT_XSCALE_5
0x000284CC PA_CL_VPORT_XSCALE_6
0x000284E4 PA_CL_VPORT_XSCALE_7
0x000284FC PA_CL_VPORT_XSCALE_8
0x00028514 PA_CL_VPORT_XSCALE_9
0x0002852C PA_CL_VPORT_XSCALE_10
0x00028544 PA_CL_VPORT_XSCALE_11
0x0002855C PA_CL_VPORT_XSCALE_12
0x00028574 PA_CL_VPORT_XSCALE_13
0x0002858C PA_CL_VPORT_XSCALE_14
0x000285A4 PA_CL_VPORT_XSCALE_15
0x00028448 PA_CL_VPORT_YOFFSET_0
0x00028460 PA_CL_VPORT_YOFFSET_1
0x00028478 PA_CL_VPORT_YOFFSET_2
0x00028490 PA_CL_VPORT_YOFFSET_3
0x000284A8 PA_CL_VPORT_YOFFSET_4
0x000284C0 PA_CL_VPORT_YOFFSET_5
0x000284D8 PA_CL_VPORT_YOFFSET_6
0x000284F0 PA_CL_VPORT_YOFFSET_7
0x00028508 PA_CL_VPORT_YOFFSET_8
0x00028520 PA_CL_VPORT_YOFFSET_9
0x00028538 PA_CL_VPORT_YOFFSET_10
0x00028550 PA_CL_VPORT_YOFFSET_11
0x00028568 PA_CL_VPORT_YOFFSET_12
0x00028580 PA_CL_VPORT_YOFFSET_13
0x00028598 PA_CL_VPORT_YOFFSET_14
0x000285B0 PA_CL_VPORT_YOFFSET_15
0x00028444 PA_CL_VPORT_YSCALE_0
0x0002845C PA_CL_VPORT_YSCALE_1
0x00028474 PA_CL_VPORT_YSCALE_2
0x0002848C PA_CL_VPORT_YSCALE_3
0x000284A4 PA_CL_VPORT_YSCALE_4
0x000284BC PA_CL_VPORT_YSCALE_5
0x000284D4 PA_CL_VPORT_YSCALE_6
0x000284EC PA_CL_VPORT_YSCALE_7
0x00028504 PA_CL_VPORT_YSCALE_8
0x0002851C PA_CL_VPORT_YSCALE_9
0x00028534 PA_CL_VPORT_YSCALE_10
0x0002854C PA_CL_VPORT_YSCALE_11
0x00028564 PA_CL_VPORT_YSCALE_12
0x0002857C PA_CL_VPORT_YSCALE_13
0x00028594 PA_CL_VPORT_YSCALE_14
0x000285AC PA_CL_VPORT_YSCALE_15
0x00028450 PA_CL_VPORT_ZOFFSET_0
0x00028468 PA_CL_VPORT_ZOFFSET_1
0x00028480 PA_CL_VPORT_ZOFFSET_2
0x00028498 PA_CL_VPORT_ZOFFSET_3
0x000284B0 PA_CL_VPORT_ZOFFSET_4
0x000284C8 PA_CL_VPORT_ZOFFSET_5
0x000284E0 PA_CL_VPORT_ZOFFSET_6
0x000284F8 PA_CL_VPORT_ZOFFSET_7
0x00028510 PA_CL_VPORT_ZOFFSET_8
0x00028528 PA_CL_VPORT_ZOFFSET_9
0x00028540 PA_CL_VPORT_ZOFFSET_10
0x00028558 PA_CL_VPORT_ZOFFSET_11
0x00028570 PA_CL_VPORT_ZOFFSET_12
0x00028588 PA_CL_VPORT_ZOFFSET_13
0x000285A0 PA_CL_VPORT_ZOFFSET_14
0x000285B8 PA_CL_VPORT_ZOFFSET_15
0x0002844C PA_CL_VPORT_ZSCALE_0
0x00028464 PA_CL_VPORT_ZSCALE_1
0x0002847C PA_CL_VPORT_ZSCALE_2
0x00028494 PA_CL_VPORT_ZSCALE_3
0x000284AC PA_CL_VPORT_ZSCALE_4
0x000284C4 PA_CL_VPORT_ZSCALE_5
0x000284DC PA_CL_VPORT_ZSCALE_6
0x000284F4 PA_CL_VPORT_ZSCALE_7
0x0002850C PA_CL_VPORT_ZSCALE_8
0x00028524 PA_CL_VPORT_ZSCALE_9
0x0002853C PA_CL_VPORT_ZSCALE_10
0x00028554 PA_CL_VPORT_ZSCALE_11
0x0002856C PA_CL_VPORT_ZSCALE_12
0x00028584 PA_CL_VPORT_ZSCALE_13
0x0002859C PA_CL_VPORT_ZSCALE_14
0x000285B4 PA_CL_VPORT_ZSCALE_15
0x0002881C PA_CL_VS_OUT_CNTL
0x00028818 PA_CL_VTE_CNTL
0x00028C48 PA_SC_AA_MASK
0x00008B40 PA_SC_AA_SAMPLE_LOCS_2S
0x00008B44 PA_SC_AA_SAMPLE_LOCS_4S
0x00008B48 PA_SC_AA_SAMPLE_LOCS_8S_WD0
0x00008B4C PA_SC_AA_SAMPLE_LOCS_8S_WD1
0x00028C20 PA_SC_AA_SAMPLE_LOCS_8S_WD1_MCTX
0x00028C1C PA_SC_AA_SAMPLE_LOCS_MCTX
0x00028214 PA_SC_CLIPRECT_0_BR
0x0002821C PA_SC_CLIPRECT_1_BR
0x00028224 PA_SC_CLIPRECT_2_BR
0x0002822C PA_SC_CLIPRECT_3_BR
0x00028210 PA_SC_CLIPRECT_0_TL
0x00028218 PA_SC_CLIPRECT_1_TL
0x00028220 PA_SC_CLIPRECT_2_TL
0x00028228 PA_SC_CLIPRECT_3_TL
0x0002820C PA_SC_CLIPRECT_RULE
0x00008BF0 PA_SC_ENHANCE
0x00028244 PA_SC_GENERIC_SCISSOR_BR
0x00028240 PA_SC_GENERIC_SCISSOR_TL
0x00028C00 PA_SC_LINE_CNTL
0x00028A0C PA_SC_LINE_STIPPLE
0x00008B10 PA_SC_LINE_STIPPLE_STATE
0x00028A4C PA_SC_MODE_CNTL
0x00028A48 PA_SC_MPASS_PS_CNTL
0x00008B20 PA_SC_MULTI_CHIP_CNTL
0x00028034 PA_SC_SCREEN_SCISSOR_BR
0x00028030 PA_SC_SCREEN_SCISSOR_TL
0x00028254 PA_SC_VPORT_SCISSOR_0_BR
0x0002825C PA_SC_VPORT_SCISSOR_1_BR
0x00028264 PA_SC_VPORT_SCISSOR_2_BR
0x0002826C PA_SC_VPORT_SCISSOR_3_BR
0x00028274 PA_SC_VPORT_SCISSOR_4_BR
0x0002827C PA_SC_VPORT_SCISSOR_5_BR
0x00028284 PA_SC_VPORT_SCISSOR_6_BR
0x0002828C PA_SC_VPORT_SCISSOR_7_BR
0x00028294 PA_SC_VPORT_SCISSOR_8_BR
0x0002829C PA_SC_VPORT_SCISSOR_9_BR
0x000282A4 PA_SC_VPORT_SCISSOR_10_BR
0x000282AC PA_SC_VPORT_SCISSOR_11_BR
0x000282B4 PA_SC_VPORT_SCISSOR_12_BR
0x000282BC PA_SC_VPORT_SCISSOR_13_BR
0x000282C4 PA_SC_VPORT_SCISSOR_14_BR
0x000282CC PA_SC_VPORT_SCISSOR_15_BR
0x00028250 PA_SC_VPORT_SCISSOR_0_TL
0x00028258 PA_SC_VPORT_SCISSOR_1_TL
0x00028260 PA_SC_VPORT_SCISSOR_2_TL
0x00028268 PA_SC_VPORT_SCISSOR_3_TL
0x00028270 PA_SC_VPORT_SCISSOR_4_TL
0x00028278 PA_SC_VPORT_SCISSOR_5_TL
0x00028280 PA_SC_VPORT_SCISSOR_6_TL
0x00028288 PA_SC_VPORT_SCISSOR_7_TL
0x00028290 PA_SC_VPORT_SCISSOR_8_TL
0x00028298 PA_SC_VPORT_SCISSOR_9_TL
0x000282A0 PA_SC_VPORT_SCISSOR_10_TL
0x000282A8 PA_SC_VPORT_SCISSOR_11_TL
0x000282B0 PA_SC_VPORT_SCISSOR_12_TL
0x000282B8 PA_SC_VPORT_SCISSOR_13_TL
0x000282C0 PA_SC_VPORT_SCISSOR_14_TL
0x000282C8 PA_SC_VPORT_SCISSOR_15_TL
0x000282D4 PA_SC_VPORT_ZMAX_0
0x000282DC PA_SC_VPORT_ZMAX_1
0x000282E4 PA_SC_VPORT_ZMAX_2
0x000282EC PA_SC_VPORT_ZMAX_3
0x000282F4 PA_SC_VPORT_ZMAX_4
0x000282FC PA_SC_VPORT_ZMAX_5
0x00028304 PA_SC_VPORT_ZMAX_6
0x0002830C PA_SC_VPORT_ZMAX_7
0x00028314 PA_SC_VPORT_ZMAX_8
0x0002831C PA_SC_VPORT_ZMAX_9
0x00028324 PA_SC_VPORT_ZMAX_10
0x0002832C PA_SC_VPORT_ZMAX_11
0x00028334 PA_SC_VPORT_ZMAX_12
0x0002833C PA_SC_VPORT_ZMAX_13
0x00028344 PA_SC_VPORT_ZMAX_14
0x0002834C PA_SC_VPORT_ZMAX_15
0x000282D0 PA_SC_VPORT_ZMIN_0
0x000282D8 PA_SC_VPORT_ZMIN_1
0x000282E0 PA_SC_VPORT_ZMIN_2
0x000282E8 PA_SC_VPORT_ZMIN_3
0x000282F0 PA_SC_VPORT_ZMIN_4
0x000282F8 PA_SC_VPORT_ZMIN_5
0x00028300 PA_SC_VPORT_ZMIN_6
0x00028308 PA_SC_VPORT_ZMIN_7
0x00028310 PA_SC_VPORT_ZMIN_8
0x00028318 PA_SC_VPORT_ZMIN_9
0x00028320 PA_SC_VPORT_ZMIN_10
0x00028328 PA_SC_VPORT_ZMIN_11
0x00028330 PA_SC_VPORT_ZMIN_12
0x00028338 PA_SC_VPORT_ZMIN_13
0x00028340 PA_SC_VPORT_ZMIN_14
0x00028348 PA_SC_VPORT_ZMIN_15
0x00028200 PA_SC_WINDOW_OFFSET
0x00028208 PA_SC_WINDOW_SCISSOR_BR
0x00028204 PA_SC_WINDOW_SCISSOR_TL
0x00028A08 PA_SU_LINE_CNTL
0x00028A04 PA_SU_POINT_MINMAX
0x00028A00 PA_SU_POINT_SIZE
0x00028E0C PA_SU_POLY_OFFSET_BACK_OFFSET
0x00028E08 PA_SU_POLY_OFFSET_BACK_SCALE
0x00028DFC PA_SU_POLY_OFFSET_CLAMP
0x00028DF8 PA_SU_POLY_OFFSET_DB_FMT_CNTL
0x00028E04 PA_SU_POLY_OFFSET_FRONT_OFFSET
0x00028E00 PA_SU_POLY_OFFSET_FRONT_SCALE
0x00028814 PA_SU_SC_MODE_CNTL
0x00028C08 PA_SU_VTX_CNTL
0x00008C04 SQ_GPR_RESOURCE_MGMT_1
0x00008C08 SQ_GPR_RESOURCE_MGMT_2
0x00008C10 SQ_STACK_RESOURCE_MGMT_1
0x00008C14 SQ_STACK_RESOURCE_MGMT_2
0x00008C0C SQ_THREAD_RESOURCE_MGMT
0x00028380 SQ_VTX_SEMANTIC_0
0x00028384 SQ_VTX_SEMANTIC_1
0x00028388 SQ_VTX_SEMANTIC_2
0x0002838C SQ_VTX_SEMANTIC_3
0x00028390 SQ_VTX_SEMANTIC_4
0x00028394 SQ_VTX_SEMANTIC_5
0x00028398 SQ_VTX_SEMANTIC_6
0x0002839C SQ_VTX_SEMANTIC_7
0x000283A0 SQ_VTX_SEMANTIC_8
0x000283A4 SQ_VTX_SEMANTIC_9
0x000283A8 SQ_VTX_SEMANTIC_10
0x000283AC SQ_VTX_SEMANTIC_11
0x000283B0 SQ_VTX_SEMANTIC_12
0x000283B4 SQ_VTX_SEMANTIC_13
0x000283B8 SQ_VTX_SEMANTIC_14
0x000283BC SQ_VTX_SEMANTIC_15
0x000283C0 SQ_VTX_SEMANTIC_16
0x000283C4 SQ_VTX_SEMANTIC_17
0x000283C8 SQ_VTX_SEMANTIC_18
0x000283CC SQ_VTX_SEMANTIC_19
0x000283D0 SQ_VTX_SEMANTIC_20
0x000283D4 SQ_VTX_SEMANTIC_21
0x000283D8 SQ_VTX_SEMANTIC_22
0x000283DC SQ_VTX_SEMANTIC_23
0x000283E0 SQ_VTX_SEMANTIC_24
0x000283E4 SQ_VTX_SEMANTIC_25
0x000283E8 SQ_VTX_SEMANTIC_26
0x000283EC SQ_VTX_SEMANTIC_27
0x000283F0 SQ_VTX_SEMANTIC_28
0x000283F4 SQ_VTX_SEMANTIC_29
0x000283F8 SQ_VTX_SEMANTIC_30
0x000283FC SQ_VTX_SEMANTIC_31
0x000288E0 SQ_VTX_SEMANTIC_CLEAR
0x0003CFF4 SQ_VTX_START_INST_LOC
0x000281C0 SQ_ALU_CONST_BUFFER_SIZE_GS_0
0x000281C4 SQ_ALU_CONST_BUFFER_SIZE_GS_1
0x000281C8 SQ_ALU_CONST_BUFFER_SIZE_GS_2
0x000281CC SQ_ALU_CONST_BUFFER_SIZE_GS_3
0x000281D0 SQ_ALU_CONST_BUFFER_SIZE_GS_4
0x000281D4 SQ_ALU_CONST_BUFFER_SIZE_GS_5
0x000281D8 SQ_ALU_CONST_BUFFER_SIZE_GS_6
0x000281DC SQ_ALU_CONST_BUFFER_SIZE_GS_7
0x000281E0 SQ_ALU_CONST_BUFFER_SIZE_GS_8
0x000281E4 SQ_ALU_CONST_BUFFER_SIZE_GS_9
0x000281E8 SQ_ALU_CONST_BUFFER_SIZE_GS_10
0x000281EC SQ_ALU_CONST_BUFFER_SIZE_GS_11
0x000281F0 SQ_ALU_CONST_BUFFER_SIZE_GS_12
0x000281F4 SQ_ALU_CONST_BUFFER_SIZE_GS_13
0x000281F8 SQ_ALU_CONST_BUFFER_SIZE_GS_14
0x000281FC SQ_ALU_CONST_BUFFER_SIZE_GS_15
0x00028140 SQ_ALU_CONST_BUFFER_SIZE_PS_0
0x00028144 SQ_ALU_CONST_BUFFER_SIZE_PS_1
0x00028148 SQ_ALU_CONST_BUFFER_SIZE_PS_2
0x0002814C SQ_ALU_CONST_BUFFER_SIZE_PS_3
0x00028150 SQ_ALU_CONST_BUFFER_SIZE_PS_4
0x00028154 SQ_ALU_CONST_BUFFER_SIZE_PS_5
0x00028158 SQ_ALU_CONST_BUFFER_SIZE_PS_6
0x0002815C SQ_ALU_CONST_BUFFER_SIZE_PS_7
0x00028160 SQ_ALU_CONST_BUFFER_SIZE_PS_8
0x00028164 SQ_ALU_CONST_BUFFER_SIZE_PS_9
0x00028168 SQ_ALU_CONST_BUFFER_SIZE_PS_10
0x0002816C SQ_ALU_CONST_BUFFER_SIZE_PS_11
0x00028170 SQ_ALU_CONST_BUFFER_SIZE_PS_12
0x00028174 SQ_ALU_CONST_BUFFER_SIZE_PS_13
0x00028178 SQ_ALU_CONST_BUFFER_SIZE_PS_14
0x0002817C SQ_ALU_CONST_BUFFER_SIZE_PS_15
0x00028180 SQ_ALU_CONST_BUFFER_SIZE_VS_0
0x00028184 SQ_ALU_CONST_BUFFER_SIZE_VS_1
0x00028188 SQ_ALU_CONST_BUFFER_SIZE_VS_2
0x0002818C SQ_ALU_CONST_BUFFER_SIZE_VS_3
0x00028190 SQ_ALU_CONST_BUFFER_SIZE_VS_4
0x00028194 SQ_ALU_CONST_BUFFER_SIZE_VS_5
0x00028198 SQ_ALU_CONST_BUFFER_SIZE_VS_6
0x0002819C SQ_ALU_CONST_BUFFER_SIZE_VS_7
0x000281A0 SQ_ALU_CONST_BUFFER_SIZE_VS_8
0x000281A4 SQ_ALU_CONST_BUFFER_SIZE_VS_9
0x000281A8 SQ_ALU_CONST_BUFFER_SIZE_VS_10
0x000281AC SQ_ALU_CONST_BUFFER_SIZE_VS_11
0x000281B0 SQ_ALU_CONST_BUFFER_SIZE_VS_12
0x000281B4 SQ_ALU_CONST_BUFFER_SIZE_VS_13
0x000281B8 SQ_ALU_CONST_BUFFER_SIZE_VS_14
0x000281BC SQ_ALU_CONST_BUFFER_SIZE_VS_15
0x000288D8 SQ_PGM_CF_OFFSET_ES
0x000288DC SQ_PGM_CF_OFFSET_FS
0x000288D4 SQ_PGM_CF_OFFSET_GS
0x000288CC SQ_PGM_CF_OFFSET_PS
0x000288D0 SQ_PGM_CF_OFFSET_VS
0x00028854 SQ_PGM_EXPORTS_PS
0x00028890 SQ_PGM_RESOURCES_ES
0x000288A4 SQ_PGM_RESOURCES_FS
0x0002887C SQ_PGM_RESOURCES_GS
0x00028850 SQ_PGM_RESOURCES_PS
0x00028868 SQ_PGM_RESOURCES_VS
0x00009100 SPI_CONFIG_CNTL
0x0000913C SPI_CONFIG_CNTL_1
0x000286DC SPI_FOG_CNTL
0x000286E4 SPI_FOG_FUNC_BIAS
0x000286E0 SPI_FOG_FUNC_SCALE
0x000286D8 SPI_INPUT_Z
0x000286D4 SPI_INTERP_CONTROL_0
0x00028644 SPI_PS_INPUT_CNTL_0
0x00028648 SPI_PS_INPUT_CNTL_1
0x0002864C SPI_PS_INPUT_CNTL_2
0x00028650 SPI_PS_INPUT_CNTL_3
0x00028654 SPI_PS_INPUT_CNTL_4
0x00028658 SPI_PS_INPUT_CNTL_5
0x0002865C SPI_PS_INPUT_CNTL_6
0x00028660 SPI_PS_INPUT_CNTL_7
0x00028664 SPI_PS_INPUT_CNTL_8
0x00028668 SPI_PS_INPUT_CNTL_9
0x0002866C SPI_PS_INPUT_CNTL_10
0x00028670 SPI_PS_INPUT_CNTL_11
0x00028674 SPI_PS_INPUT_CNTL_12
0x00028678 SPI_PS_INPUT_CNTL_13
0x0002867C SPI_PS_INPUT_CNTL_14
0x00028680 SPI_PS_INPUT_CNTL_15
0x00028684 SPI_PS_INPUT_CNTL_16
0x00028688 SPI_PS_INPUT_CNTL_17
0x0002868C SPI_PS_INPUT_CNTL_18
0x00028690 SPI_PS_INPUT_CNTL_19
0x00028694 SPI_PS_INPUT_CNTL_20
0x00028698 SPI_PS_INPUT_CNTL_21
0x0002869C SPI_PS_INPUT_CNTL_22
0x000286A0 SPI_PS_INPUT_CNTL_23
0x000286A4 SPI_PS_INPUT_CNTL_24
0x000286A8 SPI_PS_INPUT_CNTL_25
0x000286AC SPI_PS_INPUT_CNTL_26
0x000286B0 SPI_PS_INPUT_CNTL_27
0x000286B4 SPI_PS_INPUT_CNTL_28
0x000286B8 SPI_PS_INPUT_CNTL_29
0x000286BC SPI_PS_INPUT_CNTL_30
0x000286C0 SPI_PS_INPUT_CNTL_31
0x000286CC SPI_PS_IN_CONTROL_0
0x000286D0 SPI_PS_IN_CONTROL_1
0x000286C4 SPI_VS_OUT_CONFIG
0x00028614 SPI_VS_OUT_ID_0
0x00028618 SPI_VS_OUT_ID_1
0x0002861C SPI_VS_OUT_ID_2
0x00028620 SPI_VS_OUT_ID_3
0x00028624 SPI_VS_OUT_ID_4
0x00028628 SPI_VS_OUT_ID_5
0x0002862C SPI_VS_OUT_ID_6
0x00028630 SPI_VS_OUT_ID_7
0x00028634 SPI_VS_OUT_ID_8
0x00028638 SPI_VS_OUT_ID_9
0x00028438 SX_ALPHA_REF
0x00028410 SX_ALPHA_TEST_CONTROL
0x00028354 SX_SURFACE_SYNC
0x00009014 SX_MEMORY_EXPORT_SIZE
0x00009604 TC_INVALIDATE
0x00009400 TD_FILTER4
0x00009404 TD_FILTER4_1
0x00009408 TD_FILTER4_2
0x0000940C TD_FILTER4_3
0x00009410 TD_FILTER4_4
0x00009414 TD_FILTER4_5
0x00009418 TD_FILTER4_6
0x0000941C TD_FILTER4_7
0x00009420 TD_FILTER4_8
0x00009424 TD_FILTER4_9
0x00009428 TD_FILTER4_10
0x0000942C TD_FILTER4_11
0x00009430 TD_FILTER4_12
0x00009434 TD_FILTER4_13
0x00009438 TD_FILTER4_14
0x0000943C TD_FILTER4_15
0x00009440 TD_FILTER4_16
0x00009444 TD_FILTER4_17
0x00009448 TD_FILTER4_18
0x0000944C TD_FILTER4_19
0x00009450 TD_FILTER4_20
0x00009454 TD_FILTER4_21
0x00009458 TD_FILTER4_22
0x0000945C TD_FILTER4_23
0x00009460 TD_FILTER4_24
0x00009464 TD_FILTER4_25
0x00009468 TD_FILTER4_26
0x0000946C TD_FILTER4_27
0x00009470 TD_FILTER4_28
0x00009474 TD_FILTER4_29
0x00009478 TD_FILTER4_30
0x0000947C TD_FILTER4_31
0x00009480 TD_FILTER4_32
0x00009484 TD_FILTER4_33
0x00009488 TD_FILTER4_34
0x0000948C TD_FILTER4_35
0x0000A80C TD_GS_SAMPLER0_BORDER_ALPHA
0x0000A81C TD_GS_SAMPLER1_BORDER_ALPHA
0x0000A82C TD_GS_SAMPLER2_BORDER_ALPHA
0x0000A83C TD_GS_SAMPLER3_BORDER_ALPHA
0x0000A84C TD_GS_SAMPLER4_BORDER_ALPHA
0x0000A85C TD_GS_SAMPLER5_BORDER_ALPHA
0x0000A86C TD_GS_SAMPLER6_BORDER_ALPHA
0x0000A87C TD_GS_SAMPLER7_BORDER_ALPHA
0x0000A88C TD_GS_SAMPLER8_BORDER_ALPHA
0x0000A89C TD_GS_SAMPLER9_BORDER_ALPHA
0x0000A8AC TD_GS_SAMPLER10_BORDER_ALPHA
0x0000A8BC TD_GS_SAMPLER11_BORDER_ALPHA
0x0000A8CC TD_GS_SAMPLER12_BORDER_ALPHA
0x0000A8DC TD_GS_SAMPLER13_BORDER_ALPHA
0x0000A8EC TD_GS_SAMPLER14_BORDER_ALPHA
0x0000A8FC TD_GS_SAMPLER15_BORDER_ALPHA
0x0000A90C TD_GS_SAMPLER16_BORDER_ALPHA
0x0000A91C TD_GS_SAMPLER17_BORDER_ALPHA
0x0000A808 TD_GS_SAMPLER0_BORDER_BLUE
0x0000A818 TD_GS_SAMPLER1_BORDER_BLUE
0x0000A828 TD_GS_SAMPLER2_BORDER_BLUE
0x0000A838 TD_GS_SAMPLER3_BORDER_BLUE
0x0000A848 TD_GS_SAMPLER4_BORDER_BLUE
0x0000A858 TD_GS_SAMPLER5_BORDER_BLUE
0x0000A868 TD_GS_SAMPLER6_BORDER_BLUE
0x0000A878 TD_GS_SAMPLER7_BORDER_BLUE
0x0000A888 TD_GS_SAMPLER8_BORDER_BLUE
0x0000A898 TD_GS_SAMPLER9_BORDER_BLUE
0x0000A8A8 TD_GS_SAMPLER10_BORDER_BLUE
0x0000A8B8 TD_GS_SAMPLER11_BORDER_BLUE
0x0000A8C8 TD_GS_SAMPLER12_BORDER_BLUE
0x0000A8D8 TD_GS_SAMPLER13_BORDER_BLUE
0x0000A8E8 TD_GS_SAMPLER14_BORDER_BLUE
0x0000A8F8 TD_GS_SAMPLER15_BORDER_BLUE
0x0000A908 TD_GS_SAMPLER16_BORDER_BLUE
0x0000A918 TD_GS_SAMPLER17_BORDER_BLUE
0x0000A804 TD_GS_SAMPLER0_BORDER_GREEN
0x0000A814 TD_GS_SAMPLER1_BORDER_GREEN
0x0000A824 TD_GS_SAMPLER2_BORDER_GREEN
0x0000A834 TD_GS_SAMPLER3_BORDER_GREEN
0x0000A844 TD_GS_SAMPLER4_BORDER_GREEN
0x0000A854 TD_GS_SAMPLER5_BORDER_GREEN
0x0000A864 TD_GS_SAMPLER6_BORDER_GREEN
0x0000A874 TD_GS_SAMPLER7_BORDER_GREEN
0x0000A884 TD_GS_SAMPLER8_BORDER_GREEN
0x0000A894 TD_GS_SAMPLER9_BORDER_GREEN
0x0000A8A4 TD_GS_SAMPLER10_BORDER_GREEN
0x0000A8B4 TD_GS_SAMPLER11_BORDER_GREEN
0x0000A8C4 TD_GS_SAMPLER12_BORDER_GREEN
0x0000A8D4 TD_GS_SAMPLER13_BORDER_GREEN
0x0000A8E4 TD_GS_SAMPLER14_BORDER_GREEN
0x0000A8F4 TD_GS_SAMPLER15_BORDER_GREEN
0x0000A904 TD_GS_SAMPLER16_BORDER_GREEN
0x0000A914 TD_GS_SAMPLER17_BORDER_GREEN
0x0000A800 TD_GS_SAMPLER0_BORDER_RED
0x0000A810 TD_GS_SAMPLER1_BORDER_RED
0x0000A820 TD_GS_SAMPLER2_BORDER_RED
0x0000A830 TD_GS_SAMPLER3_BORDER_RED
0x0000A840 TD_GS_SAMPLER4_BORDER_RED
0x0000A850 TD_GS_SAMPLER5_BORDER_RED
0x0000A860 TD_GS_SAMPLER6_BORDER_RED
0x0000A870 TD_GS_SAMPLER7_BORDER_RED
0x0000A880 TD_GS_SAMPLER8_BORDER_RED
0x0000A890 TD_GS_SAMPLER9_BORDER_RED
0x0000A8A0 TD_GS_SAMPLER10_BORDER_RED
0x0000A8B0 TD_GS_SAMPLER11_BORDER_RED
0x0000A8C0 TD_GS_SAMPLER12_BORDER_RED
0x0000A8D0 TD_GS_SAMPLER13_BORDER_RED
0x0000A8E0 TD_GS_SAMPLER14_BORDER_RED
0x0000A8F0 TD_GS_SAMPLER15_BORDER_RED
0x0000A900 TD_GS_SAMPLER16_BORDER_RED
0x0000A910 TD_GS_SAMPLER17_BORDER_RED
0x0000A40C TD_PS_SAMPLER0_BORDER_ALPHA
0x0000A41C TD_PS_SAMPLER1_BORDER_ALPHA
0x0000A42C TD_PS_SAMPLER2_BORDER_ALPHA
0x0000A43C TD_PS_SAMPLER3_BORDER_ALPHA
0x0000A44C TD_PS_SAMPLER4_BORDER_ALPHA
0x0000A45C TD_PS_SAMPLER5_BORDER_ALPHA
0x0000A46C TD_PS_SAMPLER6_BORDER_ALPHA
0x0000A47C TD_PS_SAMPLER7_BORDER_ALPHA
0x0000A48C TD_PS_SAMPLER8_BORDER_ALPHA
0x0000A49C TD_PS_SAMPLER9_BORDER_ALPHA
0x0000A4AC TD_PS_SAMPLER10_BORDER_ALPHA
0x0000A4BC TD_PS_SAMPLER11_BORDER_ALPHA
0x0000A4CC TD_PS_SAMPLER12_BORDER_ALPHA
0x0000A4DC TD_PS_SAMPLER13_BORDER_ALPHA
0x0000A4EC TD_PS_SAMPLER14_BORDER_ALPHA
0x0000A4FC TD_PS_SAMPLER15_BORDER_ALPHA
0x0000A50C TD_PS_SAMPLER16_BORDER_ALPHA
0x0000A51C TD_PS_SAMPLER17_BORDER_ALPHA
0x0000A408 TD_PS_SAMPLER0_BORDER_BLUE
0x0000A418 TD_PS_SAMPLER1_BORDER_BLUE
0x0000A428 TD_PS_SAMPLER2_BORDER_BLUE
0x0000A438 TD_PS_SAMPLER3_BORDER_BLUE
0x0000A448 TD_PS_SAMPLER4_BORDER_BLUE
0x0000A458 TD_PS_SAMPLER5_BORDER_BLUE
0x0000A468 TD_PS_SAMPLER6_BORDER_BLUE
0x0000A478 TD_PS_SAMPLER7_BORDER_BLUE
0x0000A488 TD_PS_SAMPLER8_BORDER_BLUE
0x0000A498 TD_PS_SAMPLER9_BORDER_BLUE
0x0000A4A8 TD_PS_SAMPLER10_BORDER_BLUE
0x0000A4B8 TD_PS_SAMPLER11_BORDER_BLUE
0x0000A4C8 TD_PS_SAMPLER12_BORDER_BLUE
0x0000A4D8 TD_PS_SAMPLER13_BORDER_BLUE
0x0000A4E8 TD_PS_SAMPLER14_BORDER_BLUE
0x0000A4F8 TD_PS_SAMPLER15_BORDER_BLUE
0x0000A508 TD_PS_SAMPLER16_BORDER_BLUE
0x0000A518 TD_PS_SAMPLER17_BORDER_BLUE
0x0000A404 TD_PS_SAMPLER0_BORDER_GREEN
0x0000A414 TD_PS_SAMPLER1_BORDER_GREEN
0x0000A424 TD_PS_SAMPLER2_BORDER_GREEN
0x0000A434 TD_PS_SAMPLER3_BORDER_GREEN
0x0000A444 TD_PS_SAMPLER4_BORDER_GREEN
0x0000A454 TD_PS_SAMPLER5_BORDER_GREEN
0x0000A464 TD_PS_SAMPLER6_BORDER_GREEN
0x0000A474 TD_PS_SAMPLER7_BORDER_GREEN
0x0000A484 TD_PS_SAMPLER8_BORDER_GREEN
0x0000A494 TD_PS_SAMPLER9_BORDER_GREEN
0x0000A4A4 TD_PS_SAMPLER10_BORDER_GREEN
0x0000A4B4 TD_PS_SAMPLER11_BORDER_GREEN
0x0000A4C4 TD_PS_SAMPLER12_BORDER_GREEN
0x0000A4D4 TD_PS_SAMPLER13_BORDER_GREEN
0x0000A4E4 TD_PS_SAMPLER14_BORDER_GREEN
0x0000A4F4 TD_PS_SAMPLER15_BORDER_GREEN
0x0000A504 TD_PS_SAMPLER16_BORDER_GREEN
0x0000A514 TD_PS_SAMPLER17_BORDER_GREEN
0x0000A400 TD_PS_SAMPLER0_BORDER_RED
0x0000A410 TD_PS_SAMPLER1_BORDER_RED
0x0000A420 TD_PS_SAMPLER2_BORDER_RED
0x0000A430 TD_PS_SAMPLER3_BORDER_RED
0x0000A440 TD_PS_SAMPLER4_BORDER_RED
0x0000A450 TD_PS_SAMPLER5_BORDER_RED
0x0000A460 TD_PS_SAMPLER6_BORDER_RED
0x0000A470 TD_PS_SAMPLER7_BORDER_RED
0x0000A480 TD_PS_SAMPLER8_BORDER_RED
0x0000A490 TD_PS_SAMPLER9_BORDER_RED
0x0000A4A0 TD_PS_SAMPLER10_BORDER_RED
0x0000A4B0 TD_PS_SAMPLER11_BORDER_RED
0x0000A4C0 TD_PS_SAMPLER12_BORDER_RED
0x0000A4D0 TD_PS_SAMPLER13_BORDER_RED
0x0000A4E0 TD_PS_SAMPLER14_BORDER_RED
0x0000A4F0 TD_PS_SAMPLER15_BORDER_RED
0x0000A500 TD_PS_SAMPLER16_BORDER_RED
0x0000A510 TD_PS_SAMPLER17_BORDER_RED
0x0000AA00 TD_PS_SAMPLER0_CLEARTYPE_KERNEL
0x0000AA04 TD_PS_SAMPLER1_CLEARTYPE_KERNEL
0x0000AA08 TD_PS_SAMPLER2_CLEARTYPE_KERNEL
0x0000AA0C TD_PS_SAMPLER3_CLEARTYPE_KERNEL
0x0000AA10 TD_PS_SAMPLER4_CLEARTYPE_KERNEL
0x0000AA14 TD_PS_SAMPLER5_CLEARTYPE_KERNEL
0x0000AA18 TD_PS_SAMPLER6_CLEARTYPE_KERNEL
0x0000AA1C TD_PS_SAMPLER7_CLEARTYPE_KERNEL
0x0000AA20 TD_PS_SAMPLER8_CLEARTYPE_KERNEL
0x0000AA24 TD_PS_SAMPLER9_CLEARTYPE_KERNEL
0x0000AA28 TD_PS_SAMPLER10_CLEARTYPE_KERNEL
0x0000AA2C TD_PS_SAMPLER11_CLEARTYPE_KERNEL
0x0000AA30 TD_PS_SAMPLER12_CLEARTYPE_KERNEL
0x0000AA34 TD_PS_SAMPLER13_CLEARTYPE_KERNEL
0x0000AA38 TD_PS_SAMPLER14_CLEARTYPE_KERNEL
0x0000AA3C TD_PS_SAMPLER15_CLEARTYPE_KERNEL
0x0000AA40 TD_PS_SAMPLER16_CLEARTYPE_KERNEL
0x0000AA44 TD_PS_SAMPLER17_CLEARTYPE_KERNEL
0x0000A60C TD_VS_SAMPLER0_BORDER_ALPHA
0x0000A61C TD_VS_SAMPLER1_BORDER_ALPHA
0x0000A62C TD_VS_SAMPLER2_BORDER_ALPHA
0x0000A63C TD_VS_SAMPLER3_BORDER_ALPHA
0x0000A64C TD_VS_SAMPLER4_BORDER_ALPHA
0x0000A65C TD_VS_SAMPLER5_BORDER_ALPHA
0x0000A66C TD_VS_SAMPLER6_BORDER_ALPHA
0x0000A67C TD_VS_SAMPLER7_BORDER_ALPHA
0x0000A68C TD_VS_SAMPLER8_BORDER_ALPHA
0x0000A69C TD_VS_SAMPLER9_BORDER_ALPHA
0x0000A6AC TD_VS_SAMPLER10_BORDER_ALPHA
0x0000A6BC TD_VS_SAMPLER11_BORDER_ALPHA
0x0000A6CC TD_VS_SAMPLER12_BORDER_ALPHA
0x0000A6DC TD_VS_SAMPLER13_BORDER_ALPHA
0x0000A6EC TD_VS_SAMPLER14_BORDER_ALPHA
0x0000A6FC TD_VS_SAMPLER15_BORDER_ALPHA
0x0000A70C TD_VS_SAMPLER16_BORDER_ALPHA
0x0000A71C TD_VS_SAMPLER17_BORDER_ALPHA
0x0000A608 TD_VS_SAMPLER0_BORDER_BLUE
0x0000A618 TD_VS_SAMPLER1_BORDER_BLUE
0x0000A628 TD_VS_SAMPLER2_BORDER_BLUE
0x0000A638 TD_VS_SAMPLER3_BORDER_BLUE
0x0000A648 TD_VS_SAMPLER4_BORDER_BLUE
0x0000A658 TD_VS_SAMPLER5_BORDER_BLUE
0x0000A668 TD_VS_SAMPLER6_BORDER_BLUE
0x0000A678 TD_VS_SAMPLER7_BORDER_BLUE
0x0000A688 TD_VS_SAMPLER8_BORDER_BLUE
0x0000A698 TD_VS_SAMPLER9_BORDER_BLUE
0x0000A6A8 TD_VS_SAMPLER10_BORDER_BLUE
0x0000A6B8 TD_VS_SAMPLER11_BORDER_BLUE
0x0000A6C8 TD_VS_SAMPLER12_BORDER_BLUE
0x0000A6D8 TD_VS_SAMPLER13_BORDER_BLUE
0x0000A6E8 TD_VS_SAMPLER14_BORDER_BLUE
0x0000A6F8 TD_VS_SAMPLER15_BORDER_BLUE
0x0000A708 TD_VS_SAMPLER16_BORDER_BLUE
0x0000A718 TD_VS_SAMPLER17_BORDER_BLUE
0x0000A604 TD_VS_SAMPLER0_BORDER_GREEN
0x0000A614 TD_VS_SAMPLER1_BORDER_GREEN
0x0000A624 TD_VS_SAMPLER2_BORDER_GREEN
0x0000A634 TD_VS_SAMPLER3_BORDER_GREEN
0x0000A644 TD_VS_SAMPLER4_BORDER_GREEN
0x0000A654 TD_VS_SAMPLER5_BORDER_GREEN
0x0000A664 TD_VS_SAMPLER6_BORDER_GREEN
0x0000A674 TD_VS_SAMPLER7_BORDER_GREEN
0x0000A684 TD_VS_SAMPLER8_BORDER_GREEN
0x0000A694 TD_VS_SAMPLER9_BORDER_GREEN
0x0000A6A4 TD_VS_SAMPLER10_BORDER_GREEN
0x0000A6B4 TD_VS_SAMPLER11_BORDER_GREEN
0x0000A6C4 TD_VS_SAMPLER12_BORDER_GREEN
0x0000A6D4 TD_VS_SAMPLER13_BORDER_GREEN
0x0000A6E4 TD_VS_SAMPLER14_BORDER_GREEN
0x0000A6F4 TD_VS_SAMPLER15_BORDER_GREEN
0x0000A704 TD_VS_SAMPLER16_BORDER_GREEN
0x0000A714 TD_VS_SAMPLER17_BORDER_GREEN
0x0000A600 TD_VS_SAMPLER0_BORDER_RED
0x0000A610 TD_VS_SAMPLER1_BORDER_RED
0x0000A620 TD_VS_SAMPLER2_BORDER_RED
0x0000A630 TD_VS_SAMPLER3_BORDER_RED
0x0000A640 TD_VS_SAMPLER4_BORDER_RED
0x0000A650 TD_VS_SAMPLER5_BORDER_RED
0x0000A660 TD_VS_SAMPLER6_BORDER_RED
0x0000A670 TD_VS_SAMPLER7_BORDER_RED
0x0000A680 TD_VS_SAMPLER8_BORDER_RED
0x0000A690 TD_VS_SAMPLER9_BORDER_RED
0x0000A6A0 TD_VS_SAMPLER10_BORDER_RED
0x0000A6B0 TD_VS_SAMPLER11_BORDER_RED
0x0000A6C0 TD_VS_SAMPLER12_BORDER_RED
0x0000A6D0 TD_VS_SAMPLER13_BORDER_RED
0x0000A6E0 TD_VS_SAMPLER14_BORDER_RED
0x0000A6F0 TD_VS_SAMPLER15_BORDER_RED
0x0000A700 TD_VS_SAMPLER16_BORDER_RED
0x0000A710 TD_VS_SAMPLER17_BORDER_RED
0x00009508 TA_CNTL_AUX
0x0002802C DB_DEPTH_CLEAR
0x00028D34 DB_PREFETCH_LIMIT
0x00028D30 DB_PRELOAD_CONTROL
0x00028D0C DB_RENDER_CONTROL
0x00028D10 DB_RENDER_OVERRIDE
0x0002880C DB_SHADER_CONTROL
0x00028D28 DB_SRESULTS_COMPARE_STATE0
0x00028D2C DB_SRESULTS_COMPARE_STATE1
0x00028430 DB_STENCILREFMASK
0x00028434 DB_STENCILREFMASK_BF
0x00028028 DB_STENCIL_CLEAR
0x00028780 CB_BLEND0_CONTROL
0x00028784 CB_BLEND1_CONTROL
0x00028788 CB_BLEND2_CONTROL
0x0002878C CB_BLEND3_CONTROL
0x00028790 CB_BLEND4_CONTROL
0x00028794 CB_BLEND5_CONTROL
0x00028798 CB_BLEND6_CONTROL
0x0002879C CB_BLEND7_CONTROL
0x00028804 CB_BLEND_CONTROL
0x00028420 CB_BLEND_ALPHA
0x0002841C CB_BLEND_BLUE
0x00028418 CB_BLEND_GREEN
0x00028414 CB_BLEND_RED
0x0002812C CB_CLEAR_ALPHA
0x00028128 CB_CLEAR_BLUE
0x00028124 CB_CLEAR_GREEN
0x00028120 CB_CLEAR_RED
0x00028C30 CB_CLRCMP_CONTROL
0x00028C38 CB_CLRCMP_DST
0x00028C3C CB_CLRCMP_MSK
0x00028C34 CB_CLRCMP_SRC
0x0002842C CB_FOG_BLUE
0x00028428 CB_FOG_GREEN
0x00028424 CB_FOG_RED
0x00008040 WAIT_UNTIL
0x00009714 VC_ENHANCE
0x00009830 DB_DEBUG
0x00009838 DB_WATERMARKS
0x00028D44 DB_ALPHA_TO_MASK
0x00009700 VC_CNTL

View File

@ -0,0 +1,30 @@
rn50 0x3294
0x1434 SRC_Y_X
0x1438 DST_Y_X
0x143C DST_HEIGHT_WIDTH
0x146C DP_GUI_MASTER_CNTL
0x1474 BRUSH_Y_X
0x1478 DP_BRUSH_BKGD_CLR
0x147C DP_BRUSH_FRGD_CLR
0x1480 BRUSH_DATA0
0x1484 BRUSH_DATA1
0x1598 DST_WIDTH_HEIGHT
0x15C0 CLR_CMP_CNTL
0x15C4 CLR_CMP_CLR_SRC
0x15C8 CLR_CMP_CLR_DST
0x15CC CLR_CMP_MSK
0x15D8 DP_SRC_FRGD_CLR
0x15DC DP_SRC_BKGD_CLR
0x1600 DST_LINE_START
0x1604 DST_LINE_END
0x1608 DST_LINE_PATCOUNT
0x16C0 DP_CNTL
0x16CC DP_WRITE_MSK
0x16D0 DP_CNTL_XDIR_YDIR_YMAJOR
0x16E8 DEFAULT_SC_BOTTOM_RIGHT
0x16EC SC_TOP_LEFT
0x16F0 SC_BOTTOM_RIGHT
0x16F4 SRC_SC_BOTTOM_RIGHT
0x1714 DSTCACHE_CTLSTAT
0x1720 WAIT_UNTIL
0x172C RBBM_GUICNTL

View File

@ -0,0 +1,780 @@
rs600 0x6d40
0x1434 SRC_Y_X
0x1438 DST_Y_X
0x143C DST_HEIGHT_WIDTH
0x146C DP_GUI_MASTER_CNTL
0x1474 BRUSH_Y_X
0x1478 DP_BRUSH_BKGD_CLR
0x147C DP_BRUSH_FRGD_CLR
0x1480 BRUSH_DATA0
0x1484 BRUSH_DATA1
0x1598 DST_WIDTH_HEIGHT
0x15C0 CLR_CMP_CNTL
0x15C4 CLR_CMP_CLR_SRC
0x15C8 CLR_CMP_CLR_DST
0x15CC CLR_CMP_MSK
0x15D8 DP_SRC_FRGD_CLR
0x15DC DP_SRC_BKGD_CLR
0x1600 DST_LINE_START
0x1604 DST_LINE_END
0x1608 DST_LINE_PATCOUNT
0x16C0 DP_CNTL
0x16CC DP_WRITE_MSK
0x16D0 DP_CNTL_XDIR_YDIR_YMAJOR
0x16E8 DEFAULT_SC_BOTTOM_RIGHT
0x16EC SC_TOP_LEFT
0x16F0 SC_BOTTOM_RIGHT
0x16F4 SRC_SC_BOTTOM_RIGHT
0x1714 DSTCACHE_CTLSTAT
0x1720 WAIT_UNTIL
0x172C RBBM_GUICNTL
0x1D98 VAP_VPORT_XSCALE
0x1D9C VAP_VPORT_XOFFSET
0x1DA0 VAP_VPORT_YSCALE
0x1DA4 VAP_VPORT_YOFFSET
0x1DA8 VAP_VPORT_ZSCALE
0x1DAC VAP_VPORT_ZOFFSET
0x2080 VAP_CNTL
0x2090 VAP_OUT_VTX_FMT_0
0x2094 VAP_OUT_VTX_FMT_1
0x20B0 VAP_VTE_CNTL
0x2138 VAP_VF_MIN_VTX_INDX
0x2140 VAP_CNTL_STATUS
0x2150 VAP_PROG_STREAM_CNTL_0
0x2154 VAP_PROG_STREAM_CNTL_1
0x2158 VAP_PROG_STREAM_CNTL_2
0x215C VAP_PROG_STREAM_CNTL_3
0x2160 VAP_PROG_STREAM_CNTL_4
0x2164 VAP_PROG_STREAM_CNTL_5
0x2168 VAP_PROG_STREAM_CNTL_6
0x216C VAP_PROG_STREAM_CNTL_7
0x2180 VAP_VTX_STATE_CNTL
0x2184 VAP_VSM_VTX_ASSM
0x2188 VAP_VTX_STATE_IND_REG_0
0x218C VAP_VTX_STATE_IND_REG_1
0x2190 VAP_VTX_STATE_IND_REG_2
0x2194 VAP_VTX_STATE_IND_REG_3
0x2198 VAP_VTX_STATE_IND_REG_4
0x219C VAP_VTX_STATE_IND_REG_5
0x21A0 VAP_VTX_STATE_IND_REG_6
0x21A4 VAP_VTX_STATE_IND_REG_7
0x21A8 VAP_VTX_STATE_IND_REG_8
0x21AC VAP_VTX_STATE_IND_REG_9
0x21B0 VAP_VTX_STATE_IND_REG_10
0x21B4 VAP_VTX_STATE_IND_REG_11
0x21B8 VAP_VTX_STATE_IND_REG_12
0x21BC VAP_VTX_STATE_IND_REG_13
0x21C0 VAP_VTX_STATE_IND_REG_14
0x21C4 VAP_VTX_STATE_IND_REG_15
0x21DC VAP_PSC_SGN_NORM_CNTL
0x21E0 VAP_PROG_STREAM_CNTL_EXT_0
0x21E4 VAP_PROG_STREAM_CNTL_EXT_1
0x21E8 VAP_PROG_STREAM_CNTL_EXT_2
0x21EC VAP_PROG_STREAM_CNTL_EXT_3
0x21F0 VAP_PROG_STREAM_CNTL_EXT_4
0x21F4 VAP_PROG_STREAM_CNTL_EXT_5
0x21F8 VAP_PROG_STREAM_CNTL_EXT_6
0x21FC VAP_PROG_STREAM_CNTL_EXT_7
0x2200 VAP_PVS_VECTOR_INDX_REG
0x2204 VAP_PVS_VECTOR_DATA_REG
0x2208 VAP_PVS_VECTOR_DATA_REG_128
0x221C VAP_CLIP_CNTL
0x2220 VAP_GB_VERT_CLIP_ADJ
0x2224 VAP_GB_VERT_DISC_ADJ
0x2228 VAP_GB_HORZ_CLIP_ADJ
0x222C VAP_GB_HORZ_DISC_ADJ
0x2230 VAP_PVS_FLOW_CNTL_ADDRS_0
0x2234 VAP_PVS_FLOW_CNTL_ADDRS_1
0x2238 VAP_PVS_FLOW_CNTL_ADDRS_2
0x223C VAP_PVS_FLOW_CNTL_ADDRS_3
0x2240 VAP_PVS_FLOW_CNTL_ADDRS_4
0x2244 VAP_PVS_FLOW_CNTL_ADDRS_5
0x2248 VAP_PVS_FLOW_CNTL_ADDRS_6
0x224C VAP_PVS_FLOW_CNTL_ADDRS_7
0x2250 VAP_PVS_FLOW_CNTL_ADDRS_8
0x2254 VAP_PVS_FLOW_CNTL_ADDRS_9
0x2258 VAP_PVS_FLOW_CNTL_ADDRS_10
0x225C VAP_PVS_FLOW_CNTL_ADDRS_11
0x2260 VAP_PVS_FLOW_CNTL_ADDRS_12
0x2264 VAP_PVS_FLOW_CNTL_ADDRS_13
0x2268 VAP_PVS_FLOW_CNTL_ADDRS_14
0x226C VAP_PVS_FLOW_CNTL_ADDRS_15
0x2284 VAP_PVS_STATE_FLUSH_REG
0x2288 VAP_PVS_VTX_TIMEOUT_REG
0x2290 VAP_PVS_FLOW_CNTL_LOOP_INDEX_0
0x2294 VAP_PVS_FLOW_CNTL_LOOP_INDEX_1
0x2298 VAP_PVS_FLOW_CNTL_LOOP_INDEX_2
0x229C VAP_PVS_FLOW_CNTL_LOOP_INDEX_3
0x22A0 VAP_PVS_FLOW_CNTL_LOOP_INDEX_4
0x22A4 VAP_PVS_FLOW_CNTL_LOOP_INDEX_5
0x22A8 VAP_PVS_FLOW_CNTL_LOOP_INDEX_6
0x22AC VAP_PVS_FLOW_CNTL_LOOP_INDEX_7
0x22B0 VAP_PVS_FLOW_CNTL_LOOP_INDEX_8
0x22B4 VAP_PVS_FLOW_CNTL_LOOP_INDEX_9
0x22B8 VAP_PVS_FLOW_CNTL_LOOP_INDEX_10
0x22BC VAP_PVS_FLOW_CNTL_LOOP_INDEX_11
0x22C0 VAP_PVS_FLOW_CNTL_LOOP_INDEX_12
0x22C4 VAP_PVS_FLOW_CNTL_LOOP_INDEX_13
0x22C8 VAP_PVS_FLOW_CNTL_LOOP_INDEX_14
0x22CC VAP_PVS_FLOW_CNTL_LOOP_INDEX_15
0x22D0 VAP_PVS_CODE_CNTL_0
0x22D4 VAP_PVS_CONST_CNTL
0x22D8 VAP_PVS_CODE_CNTL_1
0x22DC VAP_PVS_FLOW_CNTL_OPC
0x342C RB2D_DSTCACHE_CTLSTAT
0x4000 GB_VAP_RASTER_VTX_FMT_0
0x4004 GB_VAP_RASTER_VTX_FMT_1
0x4008 GB_ENABLE
0x4010 GB_MSPOS0
0x4014 GB_MSPOS1
0x401C GB_SELECT
0x4020 GB_AA_CONFIG
0x4024 GB_FIFO_SIZE
0x4100 TX_INVALTAGS
0x4200 GA_POINT_S0
0x4204 GA_POINT_T0
0x4208 GA_POINT_S1
0x420C GA_POINT_T1
0x4214 GA_TRIANGLE_STIPPLE
0x421C GA_POINT_SIZE
0x4230 GA_POINT_MINMAX
0x4234 GA_LINE_CNTL
0x4238 GA_LINE_STIPPLE_CONFIG
0x4260 GA_LINE_STIPPLE_VALUE
0x4264 GA_LINE_S0
0x4268 GA_LINE_S1
0x4278 GA_COLOR_CONTROL
0x427C GA_SOLID_RG
0x4280 GA_SOLID_BA
0x4288 GA_POLY_MODE
0x428C GA_ROUND_MODE
0x4290 GA_OFFSET
0x4294 GA_FOG_SCALE
0x4298 GA_FOG_OFFSET
0x42A0 SU_TEX_WRAP
0x42A4 SU_POLY_OFFSET_FRONT_SCALE
0x42A8 SU_POLY_OFFSET_FRONT_OFFSET
0x42AC SU_POLY_OFFSET_BACK_SCALE
0x42B0 SU_POLY_OFFSET_BACK_OFFSET
0x42B4 SU_POLY_OFFSET_ENABLE
0x42B8 SU_CULL_MODE
0x42C0 SU_DEPTH_SCALE
0x42C4 SU_DEPTH_OFFSET
0x42C8 SU_REG_DEST
0x4300 RS_COUNT
0x4304 RS_INST_COUNT
0x4310 RS_IP_0
0x4314 RS_IP_1
0x4318 RS_IP_2
0x431C RS_IP_3
0x4320 RS_IP_4
0x4324 RS_IP_5
0x4328 RS_IP_6
0x432C RS_IP_7
0x4330 RS_INST_0
0x4334 RS_INST_1
0x4338 RS_INST_2
0x433C RS_INST_3
0x4340 RS_INST_4
0x4344 RS_INST_5
0x4348 RS_INST_6
0x434C RS_INST_7
0x4350 RS_INST_8
0x4354 RS_INST_9
0x4358 RS_INST_10
0x435C RS_INST_11
0x4360 RS_INST_12
0x4364 RS_INST_13
0x4368 RS_INST_14
0x436C RS_INST_15
0x43A8 SC_EDGERULE
0x43B0 SC_CLIP_0_A
0x43B4 SC_CLIP_0_B
0x43B8 SC_CLIP_1_A
0x43BC SC_CLIP_1_B
0x43C0 SC_CLIP_2_A
0x43C4 SC_CLIP_2_B
0x43C8 SC_CLIP_3_A
0x43CC SC_CLIP_3_B
0x43D0 SC_CLIP_RULE
0x43E0 SC_SCISSOR0
0x43E8 SC_SCREENDOOR
0x4440 TX_FILTER1_0
0x4444 TX_FILTER1_1
0x4448 TX_FILTER1_2
0x444C TX_FILTER1_3
0x4450 TX_FILTER1_4
0x4454 TX_FILTER1_5
0x4458 TX_FILTER1_6
0x445C TX_FILTER1_7
0x4460 TX_FILTER1_8
0x4464 TX_FILTER1_9
0x4468 TX_FILTER1_10
0x446C TX_FILTER1_11
0x4470 TX_FILTER1_12
0x4474 TX_FILTER1_13
0x4478 TX_FILTER1_14
0x447C TX_FILTER1_15
0x4580 TX_CHROMA_KEY_0
0x4584 TX_CHROMA_KEY_1
0x4588 TX_CHROMA_KEY_2
0x458C TX_CHROMA_KEY_3
0x4590 TX_CHROMA_KEY_4
0x4594 TX_CHROMA_KEY_5
0x4598 TX_CHROMA_KEY_6
0x459C TX_CHROMA_KEY_7
0x45A0 TX_CHROMA_KEY_8
0x45A4 TX_CHROMA_KEY_9
0x45A8 TX_CHROMA_KEY_10
0x45AC TX_CHROMA_KEY_11
0x45B0 TX_CHROMA_KEY_12
0x45B4 TX_CHROMA_KEY_13
0x45B8 TX_CHROMA_KEY_14
0x45BC TX_CHROMA_KEY_15
0x45C0 TX_BORDER_COLOR_0
0x45C4 TX_BORDER_COLOR_1
0x45C8 TX_BORDER_COLOR_2
0x45CC TX_BORDER_COLOR_3
0x45D0 TX_BORDER_COLOR_4
0x45D4 TX_BORDER_COLOR_5
0x45D8 TX_BORDER_COLOR_6
0x45DC TX_BORDER_COLOR_7
0x45E0 TX_BORDER_COLOR_8
0x45E4 TX_BORDER_COLOR_9
0x45E8 TX_BORDER_COLOR_10
0x45EC TX_BORDER_COLOR_11
0x45F0 TX_BORDER_COLOR_12
0x45F4 TX_BORDER_COLOR_13
0x45F8 TX_BORDER_COLOR_14
0x45FC TX_BORDER_COLOR_15
0x4600 US_CONFIG
0x4604 US_PIXSIZE
0x4608 US_CODE_OFFSET
0x460C US_RESET
0x4610 US_CODE_ADDR_0
0x4614 US_CODE_ADDR_1
0x4618 US_CODE_ADDR_2
0x461C US_CODE_ADDR_3
0x4620 US_TEX_INST_0
0x4624 US_TEX_INST_1
0x4628 US_TEX_INST_2
0x462C US_TEX_INST_3
0x4630 US_TEX_INST_4
0x4634 US_TEX_INST_5
0x4638 US_TEX_INST_6
0x463C US_TEX_INST_7
0x4640 US_TEX_INST_8
0x4644 US_TEX_INST_9
0x4648 US_TEX_INST_10
0x464C US_TEX_INST_11
0x4650 US_TEX_INST_12
0x4654 US_TEX_INST_13
0x4658 US_TEX_INST_14
0x465C US_TEX_INST_15
0x4660 US_TEX_INST_16
0x4664 US_TEX_INST_17
0x4668 US_TEX_INST_18
0x466C US_TEX_INST_19
0x4670 US_TEX_INST_20
0x4674 US_TEX_INST_21
0x4678 US_TEX_INST_22
0x467C US_TEX_INST_23
0x4680 US_TEX_INST_24
0x4684 US_TEX_INST_25
0x4688 US_TEX_INST_26
0x468C US_TEX_INST_27
0x4690 US_TEX_INST_28
0x4694 US_TEX_INST_29
0x4698 US_TEX_INST_30
0x469C US_TEX_INST_31
0x46A4 US_OUT_FMT_0
0x46A8 US_OUT_FMT_1
0x46AC US_OUT_FMT_2
0x46B0 US_OUT_FMT_3
0x46B4 US_W_FMT
0x46B8 US_CODE_BANK
0x46BC US_CODE_EXT
0x46C0 US_ALU_RGB_ADDR_0
0x46C4 US_ALU_RGB_ADDR_1
0x46C8 US_ALU_RGB_ADDR_2
0x46CC US_ALU_RGB_ADDR_3
0x46D0 US_ALU_RGB_ADDR_4
0x46D4 US_ALU_RGB_ADDR_5
0x46D8 US_ALU_RGB_ADDR_6
0x46DC US_ALU_RGB_ADDR_7
0x46E0 US_ALU_RGB_ADDR_8
0x46E4 US_ALU_RGB_ADDR_9
0x46E8 US_ALU_RGB_ADDR_10
0x46EC US_ALU_RGB_ADDR_11
0x46F0 US_ALU_RGB_ADDR_12
0x46F4 US_ALU_RGB_ADDR_13
0x46F8 US_ALU_RGB_ADDR_14
0x46FC US_ALU_RGB_ADDR_15
0x4700 US_ALU_RGB_ADDR_16
0x4704 US_ALU_RGB_ADDR_17
0x4708 US_ALU_RGB_ADDR_18
0x470C US_ALU_RGB_ADDR_19
0x4710 US_ALU_RGB_ADDR_20
0x4714 US_ALU_RGB_ADDR_21
0x4718 US_ALU_RGB_ADDR_22
0x471C US_ALU_RGB_ADDR_23
0x4720 US_ALU_RGB_ADDR_24
0x4724 US_ALU_RGB_ADDR_25
0x4728 US_ALU_RGB_ADDR_26
0x472C US_ALU_RGB_ADDR_27
0x4730 US_ALU_RGB_ADDR_28
0x4734 US_ALU_RGB_ADDR_29
0x4738 US_ALU_RGB_ADDR_30
0x473C US_ALU_RGB_ADDR_31
0x4740 US_ALU_RGB_ADDR_32
0x4744 US_ALU_RGB_ADDR_33
0x4748 US_ALU_RGB_ADDR_34
0x474C US_ALU_RGB_ADDR_35
0x4750 US_ALU_RGB_ADDR_36
0x4754 US_ALU_RGB_ADDR_37
0x4758 US_ALU_RGB_ADDR_38
0x475C US_ALU_RGB_ADDR_39
0x4760 US_ALU_RGB_ADDR_40
0x4764 US_ALU_RGB_ADDR_41
0x4768 US_ALU_RGB_ADDR_42
0x476C US_ALU_RGB_ADDR_43
0x4770 US_ALU_RGB_ADDR_44
0x4774 US_ALU_RGB_ADDR_45
0x4778 US_ALU_RGB_ADDR_46
0x477C US_ALU_RGB_ADDR_47
0x4780 US_ALU_RGB_ADDR_48
0x4784 US_ALU_RGB_ADDR_49
0x4788 US_ALU_RGB_ADDR_50
0x478C US_ALU_RGB_ADDR_51
0x4790 US_ALU_RGB_ADDR_52
0x4794 US_ALU_RGB_ADDR_53
0x4798 US_ALU_RGB_ADDR_54
0x479C US_ALU_RGB_ADDR_55
0x47A0 US_ALU_RGB_ADDR_56
0x47A4 US_ALU_RGB_ADDR_57
0x47A8 US_ALU_RGB_ADDR_58
0x47AC US_ALU_RGB_ADDR_59
0x47B0 US_ALU_RGB_ADDR_60
0x47B4 US_ALU_RGB_ADDR_61
0x47B8 US_ALU_RGB_ADDR_62
0x47BC US_ALU_RGB_ADDR_63
0x47C0 US_ALU_ALPHA_ADDR_0
0x47C4 US_ALU_ALPHA_ADDR_1
0x47C8 US_ALU_ALPHA_ADDR_2
0x47CC US_ALU_ALPHA_ADDR_3
0x47D0 US_ALU_ALPHA_ADDR_4
0x47D4 US_ALU_ALPHA_ADDR_5
0x47D8 US_ALU_ALPHA_ADDR_6
0x47DC US_ALU_ALPHA_ADDR_7
0x47E0 US_ALU_ALPHA_ADDR_8
0x47E4 US_ALU_ALPHA_ADDR_9
0x47E8 US_ALU_ALPHA_ADDR_10
0x47EC US_ALU_ALPHA_ADDR_11
0x47F0 US_ALU_ALPHA_ADDR_12
0x47F4 US_ALU_ALPHA_ADDR_13
0x47F8 US_ALU_ALPHA_ADDR_14
0x47FC US_ALU_ALPHA_ADDR_15
0x4800 US_ALU_ALPHA_ADDR_16
0x4804 US_ALU_ALPHA_ADDR_17
0x4808 US_ALU_ALPHA_ADDR_18
0x480C US_ALU_ALPHA_ADDR_19
0x4810 US_ALU_ALPHA_ADDR_20
0x4814 US_ALU_ALPHA_ADDR_21
0x4818 US_ALU_ALPHA_ADDR_22
0x481C US_ALU_ALPHA_ADDR_23
0x4820 US_ALU_ALPHA_ADDR_24
0x4824 US_ALU_ALPHA_ADDR_25
0x4828 US_ALU_ALPHA_ADDR_26
0x482C US_ALU_ALPHA_ADDR_27
0x4830 US_ALU_ALPHA_ADDR_28
0x4834 US_ALU_ALPHA_ADDR_29
0x4838 US_ALU_ALPHA_ADDR_30
0x483C US_ALU_ALPHA_ADDR_31
0x4840 US_ALU_ALPHA_ADDR_32
0x4844 US_ALU_ALPHA_ADDR_33
0x4848 US_ALU_ALPHA_ADDR_34
0x484C US_ALU_ALPHA_ADDR_35
0x4850 US_ALU_ALPHA_ADDR_36
0x4854 US_ALU_ALPHA_ADDR_37
0x4858 US_ALU_ALPHA_ADDR_38
0x485C US_ALU_ALPHA_ADDR_39
0x4860 US_ALU_ALPHA_ADDR_40
0x4864 US_ALU_ALPHA_ADDR_41
0x4868 US_ALU_ALPHA_ADDR_42
0x486C US_ALU_ALPHA_ADDR_43
0x4870 US_ALU_ALPHA_ADDR_44
0x4874 US_ALU_ALPHA_ADDR_45
0x4878 US_ALU_ALPHA_ADDR_46
0x487C US_ALU_ALPHA_ADDR_47
0x4880 US_ALU_ALPHA_ADDR_48
0x4884 US_ALU_ALPHA_ADDR_49
0x4888 US_ALU_ALPHA_ADDR_50
0x488C US_ALU_ALPHA_ADDR_51
0x4890 US_ALU_ALPHA_ADDR_52
0x4894 US_ALU_ALPHA_ADDR_53
0x4898 US_ALU_ALPHA_ADDR_54
0x489C US_ALU_ALPHA_ADDR_55
0x48A0 US_ALU_ALPHA_ADDR_56
0x48A4 US_ALU_ALPHA_ADDR_57
0x48A8 US_ALU_ALPHA_ADDR_58
0x48AC US_ALU_ALPHA_ADDR_59
0x48B0 US_ALU_ALPHA_ADDR_60
0x48B4 US_ALU_ALPHA_ADDR_61
0x48B8 US_ALU_ALPHA_ADDR_62
0x48BC US_ALU_ALPHA_ADDR_63
0x48C0 US_ALU_RGB_INST_0
0x48C4 US_ALU_RGB_INST_1
0x48C8 US_ALU_RGB_INST_2
0x48CC US_ALU_RGB_INST_3
0x48D0 US_ALU_RGB_INST_4
0x48D4 US_ALU_RGB_INST_5
0x48D8 US_ALU_RGB_INST_6
0x48DC US_ALU_RGB_INST_7
0x48E0 US_ALU_RGB_INST_8
0x48E4 US_ALU_RGB_INST_9
0x48E8 US_ALU_RGB_INST_10
0x48EC US_ALU_RGB_INST_11
0x48F0 US_ALU_RGB_INST_12
0x48F4 US_ALU_RGB_INST_13
0x48F8 US_ALU_RGB_INST_14
0x48FC US_ALU_RGB_INST_15
0x4900 US_ALU_RGB_INST_16
0x4904 US_ALU_RGB_INST_17
0x4908 US_ALU_RGB_INST_18
0x490C US_ALU_RGB_INST_19
0x4910 US_ALU_RGB_INST_20
0x4914 US_ALU_RGB_INST_21
0x4918 US_ALU_RGB_INST_22
0x491C US_ALU_RGB_INST_23
0x4920 US_ALU_RGB_INST_24
0x4924 US_ALU_RGB_INST_25
0x4928 US_ALU_RGB_INST_26
0x492C US_ALU_RGB_INST_27
0x4930 US_ALU_RGB_INST_28
0x4934 US_ALU_RGB_INST_29
0x4938 US_ALU_RGB_INST_30
0x493C US_ALU_RGB_INST_31
0x4940 US_ALU_RGB_INST_32
0x4944 US_ALU_RGB_INST_33
0x4948 US_ALU_RGB_INST_34
0x494C US_ALU_RGB_INST_35
0x4950 US_ALU_RGB_INST_36
0x4954 US_ALU_RGB_INST_37
0x4958 US_ALU_RGB_INST_38
0x495C US_ALU_RGB_INST_39
0x4960 US_ALU_RGB_INST_40
0x4964 US_ALU_RGB_INST_41
0x4968 US_ALU_RGB_INST_42
0x496C US_ALU_RGB_INST_43
0x4970 US_ALU_RGB_INST_44
0x4974 US_ALU_RGB_INST_45
0x4978 US_ALU_RGB_INST_46
0x497C US_ALU_RGB_INST_47
0x4980 US_ALU_RGB_INST_48
0x4984 US_ALU_RGB_INST_49
0x4988 US_ALU_RGB_INST_50
0x498C US_ALU_RGB_INST_51
0x4990 US_ALU_RGB_INST_52
0x4994 US_ALU_RGB_INST_53
0x4998 US_ALU_RGB_INST_54
0x499C US_ALU_RGB_INST_55
0x49A0 US_ALU_RGB_INST_56
0x49A4 US_ALU_RGB_INST_57
0x49A8 US_ALU_RGB_INST_58
0x49AC US_ALU_RGB_INST_59
0x49B0 US_ALU_RGB_INST_60
0x49B4 US_ALU_RGB_INST_61
0x49B8 US_ALU_RGB_INST_62
0x49BC US_ALU_RGB_INST_63
0x49C0 US_ALU_ALPHA_INST_0
0x49C4 US_ALU_ALPHA_INST_1
0x49C8 US_ALU_ALPHA_INST_2
0x49CC US_ALU_ALPHA_INST_3
0x49D0 US_ALU_ALPHA_INST_4
0x49D4 US_ALU_ALPHA_INST_5
0x49D8 US_ALU_ALPHA_INST_6
0x49DC US_ALU_ALPHA_INST_7
0x49E0 US_ALU_ALPHA_INST_8
0x49E4 US_ALU_ALPHA_INST_9
0x49E8 US_ALU_ALPHA_INST_10
0x49EC US_ALU_ALPHA_INST_11
0x49F0 US_ALU_ALPHA_INST_12
0x49F4 US_ALU_ALPHA_INST_13
0x49F8 US_ALU_ALPHA_INST_14
0x49FC US_ALU_ALPHA_INST_15
0x4A00 US_ALU_ALPHA_INST_16
0x4A04 US_ALU_ALPHA_INST_17
0x4A08 US_ALU_ALPHA_INST_18
0x4A0C US_ALU_ALPHA_INST_19
0x4A10 US_ALU_ALPHA_INST_20
0x4A14 US_ALU_ALPHA_INST_21
0x4A18 US_ALU_ALPHA_INST_22
0x4A1C US_ALU_ALPHA_INST_23
0x4A20 US_ALU_ALPHA_INST_24
0x4A24 US_ALU_ALPHA_INST_25
0x4A28 US_ALU_ALPHA_INST_26
0x4A2C US_ALU_ALPHA_INST_27
0x4A30 US_ALU_ALPHA_INST_28
0x4A34 US_ALU_ALPHA_INST_29
0x4A38 US_ALU_ALPHA_INST_30
0x4A3C US_ALU_ALPHA_INST_31
0x4A40 US_ALU_ALPHA_INST_32
0x4A44 US_ALU_ALPHA_INST_33
0x4A48 US_ALU_ALPHA_INST_34
0x4A4C US_ALU_ALPHA_INST_35
0x4A50 US_ALU_ALPHA_INST_36
0x4A54 US_ALU_ALPHA_INST_37
0x4A58 US_ALU_ALPHA_INST_38
0x4A5C US_ALU_ALPHA_INST_39
0x4A60 US_ALU_ALPHA_INST_40
0x4A64 US_ALU_ALPHA_INST_41
0x4A68 US_ALU_ALPHA_INST_42
0x4A6C US_ALU_ALPHA_INST_43
0x4A70 US_ALU_ALPHA_INST_44
0x4A74 US_ALU_ALPHA_INST_45
0x4A78 US_ALU_ALPHA_INST_46
0x4A7C US_ALU_ALPHA_INST_47
0x4A80 US_ALU_ALPHA_INST_48
0x4A84 US_ALU_ALPHA_INST_49
0x4A88 US_ALU_ALPHA_INST_50
0x4A8C US_ALU_ALPHA_INST_51
0x4A90 US_ALU_ALPHA_INST_52
0x4A94 US_ALU_ALPHA_INST_53
0x4A98 US_ALU_ALPHA_INST_54
0x4A9C US_ALU_ALPHA_INST_55
0x4AA0 US_ALU_ALPHA_INST_56
0x4AA4 US_ALU_ALPHA_INST_57
0x4AA8 US_ALU_ALPHA_INST_58
0x4AAC US_ALU_ALPHA_INST_59
0x4AB0 US_ALU_ALPHA_INST_60
0x4AB4 US_ALU_ALPHA_INST_61
0x4AB8 US_ALU_ALPHA_INST_62
0x4ABC US_ALU_ALPHA_INST_63
0x4AC0 US_ALU_EXT_ADDR_0
0x4AC4 US_ALU_EXT_ADDR_1
0x4AC8 US_ALU_EXT_ADDR_2
0x4ACC US_ALU_EXT_ADDR_3
0x4AD0 US_ALU_EXT_ADDR_4
0x4AD4 US_ALU_EXT_ADDR_5
0x4AD8 US_ALU_EXT_ADDR_6
0x4ADC US_ALU_EXT_ADDR_7
0x4AE0 US_ALU_EXT_ADDR_8
0x4AE4 US_ALU_EXT_ADDR_9
0x4AE8 US_ALU_EXT_ADDR_10
0x4AEC US_ALU_EXT_ADDR_11
0x4AF0 US_ALU_EXT_ADDR_12
0x4AF4 US_ALU_EXT_ADDR_13
0x4AF8 US_ALU_EXT_ADDR_14
0x4AFC US_ALU_EXT_ADDR_15
0x4B00 US_ALU_EXT_ADDR_16
0x4B04 US_ALU_EXT_ADDR_17
0x4B08 US_ALU_EXT_ADDR_18
0x4B0C US_ALU_EXT_ADDR_19
0x4B10 US_ALU_EXT_ADDR_20
0x4B14 US_ALU_EXT_ADDR_21
0x4B18 US_ALU_EXT_ADDR_22
0x4B1C US_ALU_EXT_ADDR_23
0x4B20 US_ALU_EXT_ADDR_24
0x4B24 US_ALU_EXT_ADDR_25
0x4B28 US_ALU_EXT_ADDR_26
0x4B2C US_ALU_EXT_ADDR_27
0x4B30 US_ALU_EXT_ADDR_28
0x4B34 US_ALU_EXT_ADDR_29
0x4B38 US_ALU_EXT_ADDR_30
0x4B3C US_ALU_EXT_ADDR_31
0x4B40 US_ALU_EXT_ADDR_32
0x4B44 US_ALU_EXT_ADDR_33
0x4B48 US_ALU_EXT_ADDR_34
0x4B4C US_ALU_EXT_ADDR_35
0x4B50 US_ALU_EXT_ADDR_36
0x4B54 US_ALU_EXT_ADDR_37
0x4B58 US_ALU_EXT_ADDR_38
0x4B5C US_ALU_EXT_ADDR_39
0x4B60 US_ALU_EXT_ADDR_40
0x4B64 US_ALU_EXT_ADDR_41
0x4B68 US_ALU_EXT_ADDR_42
0x4B6C US_ALU_EXT_ADDR_43
0x4B70 US_ALU_EXT_ADDR_44
0x4B74 US_ALU_EXT_ADDR_45
0x4B78 US_ALU_EXT_ADDR_46
0x4B7C US_ALU_EXT_ADDR_47
0x4B80 US_ALU_EXT_ADDR_48
0x4B84 US_ALU_EXT_ADDR_49
0x4B88 US_ALU_EXT_ADDR_50
0x4B8C US_ALU_EXT_ADDR_51
0x4B90 US_ALU_EXT_ADDR_52
0x4B94 US_ALU_EXT_ADDR_53
0x4B98 US_ALU_EXT_ADDR_54
0x4B9C US_ALU_EXT_ADDR_55
0x4BA0 US_ALU_EXT_ADDR_56
0x4BA4 US_ALU_EXT_ADDR_57
0x4BA8 US_ALU_EXT_ADDR_58
0x4BAC US_ALU_EXT_ADDR_59
0x4BB0 US_ALU_EXT_ADDR_60
0x4BB4 US_ALU_EXT_ADDR_61
0x4BB8 US_ALU_EXT_ADDR_62
0x4BBC US_ALU_EXT_ADDR_63
0x4BC0 FG_FOG_BLEND
0x4BC4 FG_FOG_FACTOR
0x4BC8 FG_FOG_COLOR_R
0x4BCC FG_FOG_COLOR_G
0x4BD0 FG_FOG_COLOR_B
0x4BD4 FG_ALPHA_FUNC
0x4BD8 FG_DEPTH_SRC
0x4C00 US_ALU_CONST_R_0
0x4C04 US_ALU_CONST_G_0
0x4C08 US_ALU_CONST_B_0
0x4C0C US_ALU_CONST_A_0
0x4C10 US_ALU_CONST_R_1
0x4C14 US_ALU_CONST_G_1
0x4C18 US_ALU_CONST_B_1
0x4C1C US_ALU_CONST_A_1
0x4C20 US_ALU_CONST_R_2
0x4C24 US_ALU_CONST_G_2
0x4C28 US_ALU_CONST_B_2
0x4C2C US_ALU_CONST_A_2
0x4C30 US_ALU_CONST_R_3
0x4C34 US_ALU_CONST_G_3
0x4C38 US_ALU_CONST_B_3
0x4C3C US_ALU_CONST_A_3
0x4C40 US_ALU_CONST_R_4
0x4C44 US_ALU_CONST_G_4
0x4C48 US_ALU_CONST_B_4
0x4C4C US_ALU_CONST_A_4
0x4C50 US_ALU_CONST_R_5
0x4C54 US_ALU_CONST_G_5
0x4C58 US_ALU_CONST_B_5
0x4C5C US_ALU_CONST_A_5
0x4C60 US_ALU_CONST_R_6
0x4C64 US_ALU_CONST_G_6
0x4C68 US_ALU_CONST_B_6
0x4C6C US_ALU_CONST_A_6
0x4C70 US_ALU_CONST_R_7
0x4C74 US_ALU_CONST_G_7
0x4C78 US_ALU_CONST_B_7
0x4C7C US_ALU_CONST_A_7
0x4C80 US_ALU_CONST_R_8
0x4C84 US_ALU_CONST_G_8
0x4C88 US_ALU_CONST_B_8
0x4C8C US_ALU_CONST_A_8
0x4C90 US_ALU_CONST_R_9
0x4C94 US_ALU_CONST_G_9
0x4C98 US_ALU_CONST_B_9
0x4C9C US_ALU_CONST_A_9
0x4CA0 US_ALU_CONST_R_10
0x4CA4 US_ALU_CONST_G_10
0x4CA8 US_ALU_CONST_B_10
0x4CAC US_ALU_CONST_A_10
0x4CB0 US_ALU_CONST_R_11
0x4CB4 US_ALU_CONST_G_11
0x4CB8 US_ALU_CONST_B_11
0x4CBC US_ALU_CONST_A_11
0x4CC0 US_ALU_CONST_R_12
0x4CC4 US_ALU_CONST_G_12
0x4CC8 US_ALU_CONST_B_12
0x4CCC US_ALU_CONST_A_12
0x4CD0 US_ALU_CONST_R_13
0x4CD4 US_ALU_CONST_G_13
0x4CD8 US_ALU_CONST_B_13
0x4CDC US_ALU_CONST_A_13
0x4CE0 US_ALU_CONST_R_14
0x4CE4 US_ALU_CONST_G_14
0x4CE8 US_ALU_CONST_B_14
0x4CEC US_ALU_CONST_A_14
0x4CF0 US_ALU_CONST_R_15
0x4CF4 US_ALU_CONST_G_15
0x4CF8 US_ALU_CONST_B_15
0x4CFC US_ALU_CONST_A_15
0x4D00 US_ALU_CONST_R_16
0x4D04 US_ALU_CONST_G_16
0x4D08 US_ALU_CONST_B_16
0x4D0C US_ALU_CONST_A_16
0x4D10 US_ALU_CONST_R_17
0x4D14 US_ALU_CONST_G_17
0x4D18 US_ALU_CONST_B_17
0x4D1C US_ALU_CONST_A_17
0x4D20 US_ALU_CONST_R_18
0x4D24 US_ALU_CONST_G_18
0x4D28 US_ALU_CONST_B_18
0x4D2C US_ALU_CONST_A_18
0x4D30 US_ALU_CONST_R_19
0x4D34 US_ALU_CONST_G_19
0x4D38 US_ALU_CONST_B_19
0x4D3C US_ALU_CONST_A_19
0x4D40 US_ALU_CONST_R_20
0x4D44 US_ALU_CONST_G_20
0x4D48 US_ALU_CONST_B_20
0x4D4C US_ALU_CONST_A_20
0x4D50 US_ALU_CONST_R_21
0x4D54 US_ALU_CONST_G_21
0x4D58 US_ALU_CONST_B_21
0x4D5C US_ALU_CONST_A_21
0x4D60 US_ALU_CONST_R_22
0x4D64 US_ALU_CONST_G_22
0x4D68 US_ALU_CONST_B_22
0x4D6C US_ALU_CONST_A_22
0x4D70 US_ALU_CONST_R_23
0x4D74 US_ALU_CONST_G_23
0x4D78 US_ALU_CONST_B_23
0x4D7C US_ALU_CONST_A_23
0x4D80 US_ALU_CONST_R_24
0x4D84 US_ALU_CONST_G_24
0x4D88 US_ALU_CONST_B_24
0x4D8C US_ALU_CONST_A_24
0x4D90 US_ALU_CONST_R_25
0x4D94 US_ALU_CONST_G_25
0x4D98 US_ALU_CONST_B_25
0x4D9C US_ALU_CONST_A_25
0x4DA0 US_ALU_CONST_R_26
0x4DA4 US_ALU_CONST_G_26
0x4DA8 US_ALU_CONST_B_26
0x4DAC US_ALU_CONST_A_26
0x4DB0 US_ALU_CONST_R_27
0x4DB4 US_ALU_CONST_G_27
0x4DB8 US_ALU_CONST_B_27
0x4DBC US_ALU_CONST_A_27
0x4DC0 US_ALU_CONST_R_28
0x4DC4 US_ALU_CONST_G_28
0x4DC8 US_ALU_CONST_B_28
0x4DCC US_ALU_CONST_A_28
0x4DD0 US_ALU_CONST_R_29
0x4DD4 US_ALU_CONST_G_29
0x4DD8 US_ALU_CONST_B_29
0x4DDC US_ALU_CONST_A_29
0x4DE0 US_ALU_CONST_R_30
0x4DE4 US_ALU_CONST_G_30
0x4DE8 US_ALU_CONST_B_30
0x4DEC US_ALU_CONST_A_30
0x4DF0 US_ALU_CONST_R_31
0x4DF4 US_ALU_CONST_G_31
0x4DF8 US_ALU_CONST_B_31
0x4DFC US_ALU_CONST_A_31
0x4E08 RB3D_ABLENDCNTL_R3
0x4E10 RB3D_CONSTANT_COLOR
0x4E14 RB3D_COLOR_CLEAR_VALUE
0x4E18 RB3D_ROPCNTL_R3
0x4E1C RB3D_CLRCMP_FLIPE_R3
0x4E20 RB3D_CLRCMP_CLR_R3
0x4E24 RB3D_CLRCMP_MSK_R3
0x4E48 RB3D_DEBUG_CTL
0x4E4C RB3D_DSTCACHE_CTLSTAT_R3
0x4E50 RB3D_DITHER_CTL
0x4E54 RB3D_CMASK_OFFSET0
0x4E58 RB3D_CMASK_OFFSET1
0x4E5C RB3D_CMASK_OFFSET2
0x4E60 RB3D_CMASK_OFFSET3
0x4E64 RB3D_CMASK_PITCH0
0x4E68 RB3D_CMASK_PITCH1
0x4E6C RB3D_CMASK_PITCH2
0x4E70 RB3D_CMASK_PITCH3
0x4E74 RB3D_CMASK_WRINDEX
0x4E78 RB3D_CMASK_DWORD
0x4E7C RB3D_CMASK_RDINDEX
0x4EA0 RB3D_DISCARD_SRC_PIXEL_LTE_THRESHOLD
0x4EA4 RB3D_DISCARD_SRC_PIXEL_GTE_THRESHOLD
0x4F04 ZB_ZSTENCILCNTL
0x4F08 ZB_STENCILREFMASK
0x4F14 ZB_ZTOP
0x4F18 ZB_ZCACHE_CTLSTAT
0x4F28 ZB_DEPTHCLEARVALUE
0x4F58 ZB_ZPASS_DATA

View File

@ -0,0 +1,494 @@
rv515 0x6d40
0x1434 SRC_Y_X
0x1438 DST_Y_X
0x143C DST_HEIGHT_WIDTH
0x146C DP_GUI_MASTER_CNTL
0x1474 BRUSH_Y_X
0x1478 DP_BRUSH_BKGD_CLR
0x147C DP_BRUSH_FRGD_CLR
0x1480 BRUSH_DATA0
0x1484 BRUSH_DATA1
0x1598 DST_WIDTH_HEIGHT
0x15C0 CLR_CMP_CNTL
0x15C4 CLR_CMP_CLR_SRC
0x15C8 CLR_CMP_CLR_DST
0x15CC CLR_CMP_MSK
0x15D8 DP_SRC_FRGD_CLR
0x15DC DP_SRC_BKGD_CLR
0x1600 DST_LINE_START
0x1604 DST_LINE_END
0x1608 DST_LINE_PATCOUNT
0x16C0 DP_CNTL
0x16CC DP_WRITE_MSK
0x16D0 DP_CNTL_XDIR_YDIR_YMAJOR
0x16E8 DEFAULT_SC_BOTTOM_RIGHT
0x16EC SC_TOP_LEFT
0x16F0 SC_BOTTOM_RIGHT
0x16F4 SRC_SC_BOTTOM_RIGHT
0x1714 DSTCACHE_CTLSTAT
0x1720 WAIT_UNTIL
0x172C RBBM_GUICNTL
0x1D98 VAP_VPORT_XSCALE
0x1D9C VAP_VPORT_XOFFSET
0x1DA0 VAP_VPORT_YSCALE
0x1DA4 VAP_VPORT_YOFFSET
0x1DA8 VAP_VPORT_ZSCALE
0x1DAC VAP_VPORT_ZOFFSET
0x2080 VAP_CNTL
0x208C VAP_INDEX_OFFSET
0x2090 VAP_OUT_VTX_FMT_0
0x2094 VAP_OUT_VTX_FMT_1
0x20B0 VAP_VTE_CNTL
0x2138 VAP_VF_MIN_VTX_INDX
0x2140 VAP_CNTL_STATUS
0x2150 VAP_PROG_STREAM_CNTL_0
0x2154 VAP_PROG_STREAM_CNTL_1
0x2158 VAP_PROG_STREAM_CNTL_2
0x215C VAP_PROG_STREAM_CNTL_3
0x2160 VAP_PROG_STREAM_CNTL_4
0x2164 VAP_PROG_STREAM_CNTL_5
0x2168 VAP_PROG_STREAM_CNTL_6
0x216C VAP_PROG_STREAM_CNTL_7
0x2180 VAP_VTX_STATE_CNTL
0x2184 VAP_VSM_VTX_ASSM
0x2188 VAP_VTX_STATE_IND_REG_0
0x218C VAP_VTX_STATE_IND_REG_1
0x2190 VAP_VTX_STATE_IND_REG_2
0x2194 VAP_VTX_STATE_IND_REG_3
0x2198 VAP_VTX_STATE_IND_REG_4
0x219C VAP_VTX_STATE_IND_REG_5
0x21A0 VAP_VTX_STATE_IND_REG_6
0x21A4 VAP_VTX_STATE_IND_REG_7
0x21A8 VAP_VTX_STATE_IND_REG_8
0x21AC VAP_VTX_STATE_IND_REG_9
0x21B0 VAP_VTX_STATE_IND_REG_10
0x21B4 VAP_VTX_STATE_IND_REG_11
0x21B8 VAP_VTX_STATE_IND_REG_12
0x21BC VAP_VTX_STATE_IND_REG_13
0x21C0 VAP_VTX_STATE_IND_REG_14
0x21C4 VAP_VTX_STATE_IND_REG_15
0x21DC VAP_PSC_SGN_NORM_CNTL
0x21E0 VAP_PROG_STREAM_CNTL_EXT_0
0x21E4 VAP_PROG_STREAM_CNTL_EXT_1
0x21E8 VAP_PROG_STREAM_CNTL_EXT_2
0x21EC VAP_PROG_STREAM_CNTL_EXT_3
0x21F0 VAP_PROG_STREAM_CNTL_EXT_4
0x21F4 VAP_PROG_STREAM_CNTL_EXT_5
0x21F8 VAP_PROG_STREAM_CNTL_EXT_6
0x21FC VAP_PROG_STREAM_CNTL_EXT_7
0x2200 VAP_PVS_VECTOR_INDX_REG
0x2204 VAP_PVS_VECTOR_DATA_REG
0x2208 VAP_PVS_VECTOR_DATA_REG_128
0x2218 VAP_TEX_TO_COLOR_CNTL
0x221C VAP_CLIP_CNTL
0x2220 VAP_GB_VERT_CLIP_ADJ
0x2224 VAP_GB_VERT_DISC_ADJ
0x2228 VAP_GB_HORZ_CLIP_ADJ
0x222C VAP_GB_HORZ_DISC_ADJ
0x2230 VAP_PVS_FLOW_CNTL_ADDRS_0
0x2234 VAP_PVS_FLOW_CNTL_ADDRS_1
0x2238 VAP_PVS_FLOW_CNTL_ADDRS_2
0x223C VAP_PVS_FLOW_CNTL_ADDRS_3
0x2240 VAP_PVS_FLOW_CNTL_ADDRS_4
0x2244 VAP_PVS_FLOW_CNTL_ADDRS_5
0x2248 VAP_PVS_FLOW_CNTL_ADDRS_6
0x224C VAP_PVS_FLOW_CNTL_ADDRS_7
0x2250 VAP_PVS_FLOW_CNTL_ADDRS_8
0x2254 VAP_PVS_FLOW_CNTL_ADDRS_9
0x2258 VAP_PVS_FLOW_CNTL_ADDRS_10
0x225C VAP_PVS_FLOW_CNTL_ADDRS_11
0x2260 VAP_PVS_FLOW_CNTL_ADDRS_12
0x2264 VAP_PVS_FLOW_CNTL_ADDRS_13
0x2268 VAP_PVS_FLOW_CNTL_ADDRS_14
0x226C VAP_PVS_FLOW_CNTL_ADDRS_15
0x2284 VAP_PVS_STATE_FLUSH_REG
0x2288 VAP_PVS_VTX_TIMEOUT_REG
0x2290 VAP_PVS_FLOW_CNTL_LOOP_INDEX_0
0x2294 VAP_PVS_FLOW_CNTL_LOOP_INDEX_1
0x2298 VAP_PVS_FLOW_CNTL_LOOP_INDEX_2
0x229C VAP_PVS_FLOW_CNTL_LOOP_INDEX_3
0x22A0 VAP_PVS_FLOW_CNTL_LOOP_INDEX_4
0x22A4 VAP_PVS_FLOW_CNTL_LOOP_INDEX_5
0x22A8 VAP_PVS_FLOW_CNTL_LOOP_INDEX_6
0x22AC VAP_PVS_FLOW_CNTL_LOOP_INDEX_7
0x22B0 VAP_PVS_FLOW_CNTL_LOOP_INDEX_8
0x22B4 VAP_PVS_FLOW_CNTL_LOOP_INDEX_9
0x22B8 VAP_PVS_FLOW_CNTL_LOOP_INDEX_10
0x22BC VAP_PVS_FLOW_CNTL_LOOP_INDEX_11
0x22C0 VAP_PVS_FLOW_CNTL_LOOP_INDEX_12
0x22C4 VAP_PVS_FLOW_CNTL_LOOP_INDEX_13
0x22C8 VAP_PVS_FLOW_CNTL_LOOP_INDEX_14
0x22CC VAP_PVS_FLOW_CNTL_LOOP_INDEX_15
0x22D0 VAP_PVS_CODE_CNTL_0
0x22D4 VAP_PVS_CONST_CNTL
0x22D8 VAP_PVS_CODE_CNTL_1
0x22DC VAP_PVS_FLOW_CNTL_OPC
0x2500 VAP_PVS_FLOW_CNTL_ADDRS_LW_0
0x2504 VAP_PVS_FLOW_CNTL_ADDRS_UW_0
0x2508 VAP_PVS_FLOW_CNTL_ADDRS_LW_1
0x250C VAP_PVS_FLOW_CNTL_ADDRS_UW_1
0x2510 VAP_PVS_FLOW_CNTL_ADDRS_LW_2
0x2514 VAP_PVS_FLOW_CNTL_ADDRS_UW_2
0x2518 VAP_PVS_FLOW_CNTL_ADDRS_LW_3
0x251C VAP_PVS_FLOW_CNTL_ADDRS_UW_3
0x2520 VAP_PVS_FLOW_CNTL_ADDRS_LW_4
0x2524 VAP_PVS_FLOW_CNTL_ADDRS_UW_4
0x2528 VAP_PVS_FLOW_CNTL_ADDRS_LW_5
0x252C VAP_PVS_FLOW_CNTL_ADDRS_UW_5
0x2530 VAP_PVS_FLOW_CNTL_ADDRS_LW_6
0x2534 VAP_PVS_FLOW_CNTL_ADDRS_UW_6
0x2538 VAP_PVS_FLOW_CNTL_ADDRS_LW_7
0x253C VAP_PVS_FLOW_CNTL_ADDRS_UW_7
0x2540 VAP_PVS_FLOW_CNTL_ADDRS_LW_8
0x2544 VAP_PVS_FLOW_CNTL_ADDRS_UW_8
0x2548 VAP_PVS_FLOW_CNTL_ADDRS_LW_9
0x254C VAP_PVS_FLOW_CNTL_ADDRS_UW_9
0x2550 VAP_PVS_FLOW_CNTL_ADDRS_LW_10
0x2554 VAP_PVS_FLOW_CNTL_ADDRS_UW_10
0x2558 VAP_PVS_FLOW_CNTL_ADDRS_LW_11
0x255C VAP_PVS_FLOW_CNTL_ADDRS_UW_11
0x2560 VAP_PVS_FLOW_CNTL_ADDRS_LW_12
0x2564 VAP_PVS_FLOW_CNTL_ADDRS_UW_12
0x2568 VAP_PVS_FLOW_CNTL_ADDRS_LW_13
0x256C VAP_PVS_FLOW_CNTL_ADDRS_UW_13
0x2570 VAP_PVS_FLOW_CNTL_ADDRS_LW_14
0x2574 VAP_PVS_FLOW_CNTL_ADDRS_UW_14
0x2578 VAP_PVS_FLOW_CNTL_ADDRS_LW_15
0x257C VAP_PVS_FLOW_CNTL_ADDRS_UW_15
0x342C RB2D_DSTCACHE_CTLSTAT
0x4000 GB_VAP_RASTER_VTX_FMT_0
0x4004 GB_VAP_RASTER_VTX_FMT_1
0x4008 GB_ENABLE
0x4010 GB_MSPOS0
0x4014 GB_MSPOS1
0x401C GB_SELECT
0x4020 GB_AA_CONFIG
0x4024 GB_FIFO_SIZE
0x4100 TX_INVALTAGS
0x4114 SU_TEX_WRAP_PS3
0x4118 PS3_ENABLE
0x411c PS3_VTX_FMT
0x4120 PS3_TEX_SOURCE
0x4200 GA_POINT_S0
0x4204 GA_POINT_T0
0x4208 GA_POINT_S1
0x420C GA_POINT_T1
0x4214 GA_TRIANGLE_STIPPLE
0x421C GA_POINT_SIZE
0x4230 GA_POINT_MINMAX
0x4234 GA_LINE_CNTL
0x4238 GA_LINE_STIPPLE_CONFIG
0x4258 GA_COLOR_CONTROL_PS3
0x4260 GA_LINE_STIPPLE_VALUE
0x4264 GA_LINE_S0
0x4268 GA_LINE_S1
0x4278 GA_COLOR_CONTROL
0x427C GA_SOLID_RG
0x4280 GA_SOLID_BA
0x4288 GA_POLY_MODE
0x428C GA_ROUND_MODE
0x4290 GA_OFFSET
0x4294 GA_FOG_SCALE
0x4298 GA_FOG_OFFSET
0x42A0 SU_TEX_WRAP
0x42A4 SU_POLY_OFFSET_FRONT_SCALE
0x42A8 SU_POLY_OFFSET_FRONT_OFFSET
0x42AC SU_POLY_OFFSET_BACK_SCALE
0x42B0 SU_POLY_OFFSET_BACK_OFFSET
0x42B4 SU_POLY_OFFSET_ENABLE
0x42B8 SU_CULL_MODE
0x42C0 SU_DEPTH_SCALE
0x42C4 SU_DEPTH_OFFSET
0x42C8 SU_REG_DEST
0x4300 RS_COUNT
0x4304 RS_INST_COUNT
0x4074 RS_IP_0
0x4078 RS_IP_1
0x407C RS_IP_2
0x4080 RS_IP_3
0x4084 RS_IP_4
0x4088 RS_IP_5
0x408C RS_IP_6
0x4090 RS_IP_7
0x4094 RS_IP_8
0x4098 RS_IP_9
0x409C RS_IP_10
0x40A0 RS_IP_11
0x40A4 RS_IP_12
0x40A8 RS_IP_13
0x40AC RS_IP_14
0x40B0 RS_IP_15
0x4320 RS_INST_0
0x4324 RS_INST_1
0x4328 RS_INST_2
0x432C RS_INST_3
0x4330 RS_INST_4
0x4334 RS_INST_5
0x4338 RS_INST_6
0x433C RS_INST_7
0x4340 RS_INST_8
0x4344 RS_INST_9
0x4348 RS_INST_10
0x434C RS_INST_11
0x4350 RS_INST_12
0x4354 RS_INST_13
0x4358 RS_INST_14
0x435C RS_INST_15
0x43A8 SC_EDGERULE
0x43B0 SC_CLIP_0_A
0x43B4 SC_CLIP_0_B
0x43B8 SC_CLIP_1_A
0x43BC SC_CLIP_1_B
0x43C0 SC_CLIP_2_A
0x43C4 SC_CLIP_2_B
0x43C8 SC_CLIP_3_A
0x43CC SC_CLIP_3_B
0x43D0 SC_CLIP_RULE
0x43E0 SC_SCISSOR0
0x43E8 SC_SCREENDOOR
0x4440 TX_FILTER1_0
0x4444 TX_FILTER1_1
0x4448 TX_FILTER1_2
0x444C TX_FILTER1_3
0x4450 TX_FILTER1_4
0x4454 TX_FILTER1_5
0x4458 TX_FILTER1_6
0x445C TX_FILTER1_7
0x4460 TX_FILTER1_8
0x4464 TX_FILTER1_9
0x4468 TX_FILTER1_10
0x446C TX_FILTER1_11
0x4470 TX_FILTER1_12
0x4474 TX_FILTER1_13
0x4478 TX_FILTER1_14
0x447C TX_FILTER1_15
0x4580 TX_CHROMA_KEY_0
0x4584 TX_CHROMA_KEY_1
0x4588 TX_CHROMA_KEY_2
0x458C TX_CHROMA_KEY_3
0x4590 TX_CHROMA_KEY_4
0x4594 TX_CHROMA_KEY_5
0x4598 TX_CHROMA_KEY_6
0x459C TX_CHROMA_KEY_7
0x45A0 TX_CHROMA_KEY_8
0x45A4 TX_CHROMA_KEY_9
0x45A8 TX_CHROMA_KEY_10
0x45AC TX_CHROMA_KEY_11
0x45B0 TX_CHROMA_KEY_12
0x45B4 TX_CHROMA_KEY_13
0x45B8 TX_CHROMA_KEY_14
0x45BC TX_CHROMA_KEY_15
0x45C0 TX_BORDER_COLOR_0
0x45C4 TX_BORDER_COLOR_1
0x45C8 TX_BORDER_COLOR_2
0x45CC TX_BORDER_COLOR_3
0x45D0 TX_BORDER_COLOR_4
0x45D4 TX_BORDER_COLOR_5
0x45D8 TX_BORDER_COLOR_6
0x45DC TX_BORDER_COLOR_7
0x45E0 TX_BORDER_COLOR_8
0x45E4 TX_BORDER_COLOR_9
0x45E8 TX_BORDER_COLOR_10
0x45EC TX_BORDER_COLOR_11
0x45F0 TX_BORDER_COLOR_12
0x45F4 TX_BORDER_COLOR_13
0x45F8 TX_BORDER_COLOR_14
0x45FC TX_BORDER_COLOR_15
0x4250 GA_US_VECTOR_INDEX
0x4254 GA_US_VECTOR_DATA
0x4600 US_CONFIG
0x4604 US_PIXSIZE
0x4620 US_FC_BOOL_CONST
0x4624 US_FC_CTRL
0x4630 US_CODE_ADDR
0x4634 US_CODE_RANGE
0x4638 US_CODE_OFFSET
0x4640 US_FORMAT0_0
0x4644 US_FORMAT0_1
0x4648 US_FORMAT0_2
0x464C US_FORMAT0_3
0x4650 US_FORMAT0_4
0x4654 US_FORMAT0_5
0x4658 US_FORMAT0_6
0x465C US_FORMAT0_7
0x4660 US_FORMAT0_8
0x4664 US_FORMAT0_9
0x4668 US_FORMAT0_10
0x466C US_FORMAT0_11
0x4670 US_FORMAT0_12
0x4674 US_FORMAT0_13
0x4678 US_FORMAT0_14
0x467C US_FORMAT0_15
0x46A4 US_OUT_FMT_0
0x46A8 US_OUT_FMT_1
0x46AC US_OUT_FMT_2
0x46B0 US_OUT_FMT_3
0x46B4 US_W_FMT
0x4BC0 FG_FOG_BLEND
0x4BC4 FG_FOG_FACTOR
0x4BC8 FG_FOG_COLOR_R
0x4BCC FG_FOG_COLOR_G
0x4BD0 FG_FOG_COLOR_B
0x4BD4 FG_ALPHA_FUNC
0x4BD8 FG_DEPTH_SRC
0x4BE0 FG_ALPHA_VALUE
0x4C00 US_ALU_CONST_R_0
0x4C04 US_ALU_CONST_G_0
0x4C08 US_ALU_CONST_B_0
0x4C0C US_ALU_CONST_A_0
0x4C10 US_ALU_CONST_R_1
0x4C14 US_ALU_CONST_G_1
0x4C18 US_ALU_CONST_B_1
0x4C1C US_ALU_CONST_A_1
0x4C20 US_ALU_CONST_R_2
0x4C24 US_ALU_CONST_G_2
0x4C28 US_ALU_CONST_B_2
0x4C2C US_ALU_CONST_A_2
0x4C30 US_ALU_CONST_R_3
0x4C34 US_ALU_CONST_G_3
0x4C38 US_ALU_CONST_B_3
0x4C3C US_ALU_CONST_A_3
0x4C40 US_ALU_CONST_R_4
0x4C44 US_ALU_CONST_G_4
0x4C48 US_ALU_CONST_B_4
0x4C4C US_ALU_CONST_A_4
0x4C50 US_ALU_CONST_R_5
0x4C54 US_ALU_CONST_G_5
0x4C58 US_ALU_CONST_B_5
0x4C5C US_ALU_CONST_A_5
0x4C60 US_ALU_CONST_R_6
0x4C64 US_ALU_CONST_G_6
0x4C68 US_ALU_CONST_B_6
0x4C6C US_ALU_CONST_A_6
0x4C70 US_ALU_CONST_R_7
0x4C74 US_ALU_CONST_G_7
0x4C78 US_ALU_CONST_B_7
0x4C7C US_ALU_CONST_A_7
0x4C80 US_ALU_CONST_R_8
0x4C84 US_ALU_CONST_G_8
0x4C88 US_ALU_CONST_B_8
0x4C8C US_ALU_CONST_A_8
0x4C90 US_ALU_CONST_R_9
0x4C94 US_ALU_CONST_G_9
0x4C98 US_ALU_CONST_B_9
0x4C9C US_ALU_CONST_A_9
0x4CA0 US_ALU_CONST_R_10
0x4CA4 US_ALU_CONST_G_10
0x4CA8 US_ALU_CONST_B_10
0x4CAC US_ALU_CONST_A_10
0x4CB0 US_ALU_CONST_R_11
0x4CB4 US_ALU_CONST_G_11
0x4CB8 US_ALU_CONST_B_11
0x4CBC US_ALU_CONST_A_11
0x4CC0 US_ALU_CONST_R_12
0x4CC4 US_ALU_CONST_G_12
0x4CC8 US_ALU_CONST_B_12
0x4CCC US_ALU_CONST_A_12
0x4CD0 US_ALU_CONST_R_13
0x4CD4 US_ALU_CONST_G_13
0x4CD8 US_ALU_CONST_B_13
0x4CDC US_ALU_CONST_A_13
0x4CE0 US_ALU_CONST_R_14
0x4CE4 US_ALU_CONST_G_14
0x4CE8 US_ALU_CONST_B_14
0x4CEC US_ALU_CONST_A_14
0x4CF0 US_ALU_CONST_R_15
0x4CF4 US_ALU_CONST_G_15
0x4CF8 US_ALU_CONST_B_15
0x4CFC US_ALU_CONST_A_15
0x4D00 US_ALU_CONST_R_16
0x4D04 US_ALU_CONST_G_16
0x4D08 US_ALU_CONST_B_16
0x4D0C US_ALU_CONST_A_16
0x4D10 US_ALU_CONST_R_17
0x4D14 US_ALU_CONST_G_17
0x4D18 US_ALU_CONST_B_17
0x4D1C US_ALU_CONST_A_17
0x4D20 US_ALU_CONST_R_18
0x4D24 US_ALU_CONST_G_18
0x4D28 US_ALU_CONST_B_18
0x4D2C US_ALU_CONST_A_18
0x4D30 US_ALU_CONST_R_19
0x4D34 US_ALU_CONST_G_19
0x4D38 US_ALU_CONST_B_19
0x4D3C US_ALU_CONST_A_19
0x4D40 US_ALU_CONST_R_20
0x4D44 US_ALU_CONST_G_20
0x4D48 US_ALU_CONST_B_20
0x4D4C US_ALU_CONST_A_20
0x4D50 US_ALU_CONST_R_21
0x4D54 US_ALU_CONST_G_21
0x4D58 US_ALU_CONST_B_21
0x4D5C US_ALU_CONST_A_21
0x4D60 US_ALU_CONST_R_22
0x4D64 US_ALU_CONST_G_22
0x4D68 US_ALU_CONST_B_22
0x4D6C US_ALU_CONST_A_22
0x4D70 US_ALU_CONST_R_23
0x4D74 US_ALU_CONST_G_23
0x4D78 US_ALU_CONST_B_23
0x4D7C US_ALU_CONST_A_23
0x4D80 US_ALU_CONST_R_24
0x4D84 US_ALU_CONST_G_24
0x4D88 US_ALU_CONST_B_24
0x4D8C US_ALU_CONST_A_24
0x4D90 US_ALU_CONST_R_25
0x4D94 US_ALU_CONST_G_25
0x4D98 US_ALU_CONST_B_25
0x4D9C US_ALU_CONST_A_25
0x4DA0 US_ALU_CONST_R_26
0x4DA4 US_ALU_CONST_G_26
0x4DA8 US_ALU_CONST_B_26
0x4DAC US_ALU_CONST_A_26
0x4DB0 US_ALU_CONST_R_27
0x4DB4 US_ALU_CONST_G_27
0x4DB8 US_ALU_CONST_B_27
0x4DBC US_ALU_CONST_A_27
0x4DC0 US_ALU_CONST_R_28
0x4DC4 US_ALU_CONST_G_28
0x4DC8 US_ALU_CONST_B_28
0x4DCC US_ALU_CONST_A_28
0x4DD0 US_ALU_CONST_R_29
0x4DD4 US_ALU_CONST_G_29
0x4DD8 US_ALU_CONST_B_29
0x4DDC US_ALU_CONST_A_29
0x4DE0 US_ALU_CONST_R_30
0x4DE4 US_ALU_CONST_G_30
0x4DE8 US_ALU_CONST_B_30
0x4DEC US_ALU_CONST_A_30
0x4DF0 US_ALU_CONST_R_31
0x4DF4 US_ALU_CONST_G_31
0x4DF8 US_ALU_CONST_B_31
0x4DFC US_ALU_CONST_A_31
0x4E08 RB3D_ABLENDCNTL_R3
0x4E10 RB3D_CONSTANT_COLOR
0x4E14 RB3D_COLOR_CLEAR_VALUE
0x4E18 RB3D_ROPCNTL_R3
0x4E1C RB3D_CLRCMP_FLIPE_R3
0x4E20 RB3D_CLRCMP_CLR_R3
0x4E24 RB3D_CLRCMP_MSK_R3
0x4E48 RB3D_DEBUG_CTL
0x4E4C RB3D_DSTCACHE_CTLSTAT_R3
0x4E50 RB3D_DITHER_CTL
0x4E54 RB3D_CMASK_OFFSET0
0x4E58 RB3D_CMASK_OFFSET1
0x4E5C RB3D_CMASK_OFFSET2
0x4E60 RB3D_CMASK_OFFSET3
0x4E64 RB3D_CMASK_PITCH0
0x4E68 RB3D_CMASK_PITCH1
0x4E6C RB3D_CMASK_PITCH2
0x4E70 RB3D_CMASK_PITCH3
0x4E74 RB3D_CMASK_WRINDEX
0x4E78 RB3D_CMASK_DWORD
0x4E7C RB3D_CMASK_RDINDEX
0x4EA0 RB3D_DISCARD_SRC_PIXEL_LTE_THRESHOLD
0x4EA4 RB3D_DISCARD_SRC_PIXEL_GTE_THRESHOLD
0x4EF8 RB3D_CONSTANT_COLOR_AR
0x4EFC RB3D_CONSTANT_COLOR_GB
0x4F04 ZB_ZSTENCILCNTL
0x4F08 ZB_STENCILREFMASK
0x4F14 ZB_ZTOP
0x4F18 ZB_ZCACHE_CTLSTAT
0x4F58 ZB_ZPASS_DATA
0x4F28 ZB_DEPTHCLEARVALUE
0x4FD4 ZB_STENCILREFMASK_BF

View File

@ -77,7 +77,7 @@ int rs400_gart_init(struct radeon_device *rdev)
{
int r;
if (rdev->gart.table.ram.ptr) {
if (rdev->gart.ptr) {
WARN(1, "RS400 GART already initialized\n");
return 0;
}
@ -182,6 +182,9 @@ int rs400_gart_enable(struct radeon_device *rdev)
/* Enable gart */
WREG32_MC(RS480_AGP_ADDRESS_SPACE_SIZE, (RS480_GART_EN | size_reg));
rs400_gart_tlb_flush(rdev);
DRM_INFO("PCIE GART of %uM enabled (table at 0x%016llX).\n",
(unsigned)(rdev->mc.gtt_size >> 20),
(unsigned long long)rdev->gart.table_addr);
rdev->gart.ready = true;
return 0;
}
@ -209,6 +212,7 @@ void rs400_gart_fini(struct radeon_device *rdev)
int rs400_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr)
{
uint32_t entry;
u32 *gtt = rdev->gart.ptr;
if (i < 0 || i > rdev->gart.num_gpu_pages) {
return -EINVAL;
@ -218,7 +222,7 @@ int rs400_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr)
((upper_32_bits(addr) & 0xff) << 4) |
RS400_PTE_WRITEABLE | RS400_PTE_READABLE;
entry = cpu_to_le32(entry);
rdev->gart.table.ram.ptr[i] = entry;
gtt[i] = entry;
return 0;
}
@ -238,7 +242,7 @@ int rs400_mc_wait_for_idle(struct radeon_device *rdev)
return -1;
}
void rs400_gpu_init(struct radeon_device *rdev)
static void rs400_gpu_init(struct radeon_device *rdev)
{
/* FIXME: is this correct ? */
r420_pipes_init(rdev);
@ -248,7 +252,7 @@ void rs400_gpu_init(struct radeon_device *rdev)
}
}
void rs400_mc_init(struct radeon_device *rdev)
static void rs400_mc_init(struct radeon_device *rdev)
{
u64 base;
@ -366,7 +370,7 @@ static int rs400_debugfs_pcie_gart_info_init(struct radeon_device *rdev)
#endif
}
void rs400_mc_program(struct radeon_device *rdev)
static void rs400_mc_program(struct radeon_device *rdev)
{
struct r100_mc_save save;
@ -415,11 +419,13 @@ static int rs400_startup(struct radeon_device *rdev)
dev_err(rdev->dev, "failed initializing CP (%d).\n", r);
return r;
}
r = r100_ib_init(rdev);
r = radeon_ib_pool_init(rdev);
if (r) {
dev_err(rdev->dev, "failed initializing IB (%d).\n", r);
dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
return r;
}
return 0;
}
@ -482,6 +488,7 @@ int rs400_init(struct radeon_device *rdev)
if (r)
return r;
r300_set_reg_safe(rdev);
rdev->accel_working = true;
r = rs400_startup(rdev);
if (r) {

View File

@ -35,7 +35,7 @@
* close to the one of the R600 family (R600 likely being an evolution
* of the RS600 GART block).
*/
#include "drmP.h"
#include <drm/drmP.h>
#include "radeon.h"
#include "radeon_asic.h"
#include "atom.h"
@ -43,9 +43,35 @@
#include "rs600_reg_safe.h"
void rs600_gpu_init(struct radeon_device *rdev);
static void rs600_gpu_init(struct radeon_device *rdev);
int rs600_mc_wait_for_idle(struct radeon_device *rdev);
static const u32 crtc_offsets[2] =
{
0,
AVIVO_D2CRTC_H_TOTAL - AVIVO_D1CRTC_H_TOTAL
};
void avivo_wait_for_vblank(struct radeon_device *rdev, int crtc)
{
int i;
if (crtc >= rdev->num_crtc)
return;
if (RREG32(AVIVO_D1CRTC_CONTROL + crtc_offsets[crtc]) & AVIVO_CRTC_EN) {
for (i = 0; i < rdev->usec_timeout; i++) {
if (!(RREG32(AVIVO_D1CRTC_STATUS + crtc_offsets[crtc]) & AVIVO_D1CRTC_V_BLANK))
break;
udelay(1);
}
for (i = 0; i < rdev->usec_timeout; i++) {
if (RREG32(AVIVO_D1CRTC_STATUS + crtc_offsets[crtc]) & AVIVO_D1CRTC_V_BLANK)
break;
udelay(1);
}
}
}
/* hpd for digital panel detect/disconnect */
bool rs600_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
{
@ -101,6 +127,7 @@ void rs600_hpd_init(struct radeon_device *rdev)
{
struct drm_device *dev = rdev->ddev;
struct drm_connector *connector;
unsigned enable = 0;
list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
struct radeon_connector *radeon_connector = to_radeon_connector(connector);
@ -108,25 +135,25 @@ void rs600_hpd_init(struct radeon_device *rdev)
case RADEON_HPD_1:
WREG32(R_007D00_DC_HOT_PLUG_DETECT1_CONTROL,
S_007D00_DC_HOT_PLUG_DETECT1_EN(1));
rdev->irq.hpd[0] = true;
break;
case RADEON_HPD_2:
WREG32(R_007D10_DC_HOT_PLUG_DETECT2_CONTROL,
S_007D10_DC_HOT_PLUG_DETECT2_EN(1));
rdev->irq.hpd[1] = true;
break;
default:
break;
}
enable |= 1 << radeon_connector->hpd.hpd;
radeon_hpd_set_polarity(rdev, radeon_connector->hpd.hpd);
}
if (rdev->irq.installed)
rs600_irq_set(rdev);
// radeon_irq_kms_enable_hpd(rdev, enable);
}
void rs600_hpd_fini(struct radeon_device *rdev)
{
struct drm_device *dev = rdev->ddev;
struct drm_connector *connector;
unsigned disable = 0;
list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
struct radeon_connector *radeon_connector = to_radeon_connector(connector);
@ -134,27 +161,17 @@ void rs600_hpd_fini(struct radeon_device *rdev)
case RADEON_HPD_1:
WREG32(R_007D00_DC_HOT_PLUG_DETECT1_CONTROL,
S_007D00_DC_HOT_PLUG_DETECT1_EN(0));
rdev->irq.hpd[0] = false;
break;
case RADEON_HPD_2:
WREG32(R_007D10_DC_HOT_PLUG_DETECT2_CONTROL,
S_007D10_DC_HOT_PLUG_DETECT2_EN(0));
rdev->irq.hpd[1] = false;
break;
default:
break;
}
disable |= 1 << radeon_connector->hpd.hpd;
}
}
void rs600_bm_disable(struct radeon_device *rdev)
{
u32 tmp;
/* disable bus mastering */
tmp = PciRead16(rdev->pdev->bus, rdev->pdev->devfn, 0x4);
PciWrite16(rdev->pdev->bus, rdev->pdev->devfn, 0x4, tmp & 0xFFFB);
mdelay(1);
// radeon_irq_kms_disable_hpd(rdev, disable);
}
int rs600_asic_reset(struct radeon_device *rdev)
@ -180,7 +197,8 @@ int rs600_asic_reset(struct radeon_device *rdev)
WREG32(RADEON_CP_RB_CNTL, tmp);
// pci_save_state(rdev->pdev);
/* disable bus mastering */
rs600_bm_disable(rdev);
// pci_clear_master(rdev->pdev);
mdelay(1);
/* reset GA+VAP */
WREG32(R_0000F0_RBBM_SOFT_RESET, S_0000F0_SOFT_RESET_VAP(1) |
S_0000F0_SOFT_RESET_GA(1));
@ -211,7 +229,6 @@ int rs600_asic_reset(struct radeon_device *rdev)
/* Check if GPU is idle */
if (G_000E40_GA_BUSY(status) || G_000E40_VAP_BUSY(status)) {
dev_err(rdev->dev, "failed to reset GPU\n");
rdev->gpu_lockup = true;
ret = -1;
} else
dev_info(rdev->dev, "GPU reset succeed\n");
@ -240,11 +257,11 @@ void rs600_gart_tlb_flush(struct radeon_device *rdev)
tmp = RREG32_MC(R_000100_MC_PT0_CNTL);
}
int rs600_gart_init(struct radeon_device *rdev)
static int rs600_gart_init(struct radeon_device *rdev)
{
int r;
if (rdev->gart.table.vram.robj) {
if (rdev->gart.robj) {
WARN(1, "RS600 GART already initialized\n");
return 0;
}
@ -262,7 +279,7 @@ static int rs600_gart_enable(struct radeon_device *rdev)
u32 tmp;
int r, i;
if (rdev->gart.table.vram.robj == NULL) {
if (rdev->gart.robj == NULL) {
dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
return -EINVAL;
}
@ -315,30 +332,25 @@ static int rs600_gart_enable(struct radeon_device *rdev)
tmp = RREG32_MC(R_000009_MC_CNTL1);
WREG32_MC(R_000009_MC_CNTL1, (tmp | S_000009_ENABLE_PAGE_TABLES(1)));
rs600_gart_tlb_flush(rdev);
DRM_INFO("PCIE GART of %uM enabled (table at 0x%016llX).\n",
(unsigned)(rdev->mc.gtt_size >> 20),
(unsigned long long)rdev->gart.table_addr);
rdev->gart.ready = true;
return 0;
}
void rs600_gart_disable(struct radeon_device *rdev)
static void rs600_gart_disable(struct radeon_device *rdev)
{
u32 tmp;
int r;
/* FIXME: disable out of gart access */
WREG32_MC(R_000100_MC_PT0_CNTL, 0);
tmp = RREG32_MC(R_000009_MC_CNTL1);
WREG32_MC(R_000009_MC_CNTL1, tmp & C_000009_ENABLE_PAGE_TABLES);
if (rdev->gart.table.vram.robj) {
r = radeon_bo_reserve(rdev->gart.table.vram.robj, false);
if (r == 0) {
radeon_bo_kunmap(rdev->gart.table.vram.robj);
radeon_bo_unpin(rdev->gart.table.vram.robj);
radeon_bo_unreserve(rdev->gart.table.vram.robj);
}
}
radeon_gart_table_vram_unpin(rdev);
}
void rs600_gart_fini(struct radeon_device *rdev)
static void rs600_gart_fini(struct radeon_device *rdev)
{
radeon_gart_fini(rdev);
rs600_gart_disable(rdev);
@ -353,7 +365,7 @@ void rs600_gart_fini(struct radeon_device *rdev)
int rs600_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr)
{
void __iomem *ptr = (void *)rdev->gart.table.vram.ptr;
void __iomem *ptr = (void *)rdev->gart.ptr;
if (i < 0 || i > rdev->gart.num_gpu_pages) {
return -EINVAL;
@ -373,24 +385,27 @@ int rs600_irq_set(struct radeon_device *rdev)
~S_007D08_DC_HOT_PLUG_DETECT1_INT_EN(1);
u32 hpd2 = RREG32(R_007D18_DC_HOT_PLUG_DETECT2_INT_CONTROL) &
~S_007D18_DC_HOT_PLUG_DETECT2_INT_EN(1);
u32 hdmi0;
if (ASIC_IS_DCE2(rdev))
hdmi0 = RREG32(R_007408_HDMI0_AUDIO_PACKET_CONTROL) &
~S_007408_HDMI0_AZ_FORMAT_WTRIG_MASK(1);
else
hdmi0 = 0;
if (!rdev->irq.installed) {
WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
WREG32(R_000040_GEN_INT_CNTL, 0);
return -EINVAL;
}
if (rdev->irq.sw_int) {
if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
tmp |= S_000040_SW_INT_EN(1);
}
if (rdev->irq.gui_idle) {
tmp |= S_000040_GUI_IDLE(1);
}
if (rdev->irq.crtc_vblank_int[0] ||
rdev->irq.pflip[0]) {
atomic_read(&rdev->irq.pflip[0])) {
mode_int |= S_006540_D1MODE_VBLANK_INT_MASK(1);
}
if (rdev->irq.crtc_vblank_int[1] ||
rdev->irq.pflip[1]) {
atomic_read(&rdev->irq.pflip[1])) {
mode_int |= S_006540_D2MODE_VBLANK_INT_MASK(1);
}
if (rdev->irq.hpd[0]) {
@ -399,10 +414,15 @@ int rs600_irq_set(struct radeon_device *rdev)
if (rdev->irq.hpd[1]) {
hpd2 |= S_007D18_DC_HOT_PLUG_DETECT2_INT_EN(1);
}
if (rdev->irq.afmt[0]) {
hdmi0 |= S_007408_HDMI0_AZ_FORMAT_WTRIG_MASK(1);
}
WREG32(R_000040_GEN_INT_CNTL, tmp);
WREG32(R_006540_DxMODE_INT_MASK, mode_int);
WREG32(R_007D08_DC_HOT_PLUG_DETECT1_INT_CONTROL, hpd1);
WREG32(R_007D18_DC_HOT_PLUG_DETECT2_INT_CONTROL, hpd2);
if (ASIC_IS_DCE2(rdev))
WREG32(R_007408_HDMI0_AUDIO_PACKET_CONTROL, hdmi0);
return 0;
}
@ -412,12 +432,6 @@ static inline u32 rs600_irq_ack(struct radeon_device *rdev)
uint32_t irq_mask = S_000044_SW_INT(1);
u32 tmp;
/* the interrupt works, but the status bit is permanently asserted */
if (rdev->irq.gui_idle && radeon_gui_idle(rdev)) {
if (!rdev->irq.gui_idle_acked)
irq_mask |= S_000044_GUI_IDLE_STAT(1);
}
if (G_000044_DISPLAY_INT_STAT(irqs)) {
rdev->irq.stat_regs.r500.disp_int = RREG32(R_007EDC_DISP_INTERRUPT_STATUS);
if (G_007EDC_LB_D1_VBLANK_INTERRUPT(rdev->irq.stat_regs.r500.disp_int)) {
@ -442,6 +456,17 @@ static inline u32 rs600_irq_ack(struct radeon_device *rdev)
rdev->irq.stat_regs.r500.disp_int = 0;
}
if (ASIC_IS_DCE2(rdev)) {
rdev->irq.stat_regs.r500.hdmi0_status = RREG32(R_007404_HDMI0_STATUS) &
S_007404_HDMI0_AZ_FORMAT_WTRIG(1);
if (G_007404_HDMI0_AZ_FORMAT_WTRIG(rdev->irq.stat_regs.r500.hdmi0_status)) {
tmp = RREG32(R_007408_HDMI0_AUDIO_PACKET_CONTROL);
tmp |= S_007408_HDMI0_AZ_FORMAT_WTRIG_ACK(1);
WREG32(R_007408_HDMI0_AUDIO_PACKET_CONTROL, tmp);
}
} else
rdev->irq.stat_regs.r500.hdmi0_status = 0;
if (irqs) {
WREG32(R_000044_GEN_INT_STATUS, irqs);
}
@ -450,6 +475,9 @@ static inline u32 rs600_irq_ack(struct radeon_device *rdev)
void rs600_irq_disable(struct radeon_device *rdev)
{
u32 hdmi0 = RREG32(R_007408_HDMI0_AUDIO_PACKET_CONTROL) &
~S_007408_HDMI0_AZ_FORMAT_WTRIG_MASK(1);
WREG32(R_007408_HDMI0_AUDIO_PACKET_CONTROL, hdmi0);
WREG32(R_000040_GEN_INT_CNTL, 0);
WREG32(R_006540_DxMODE_INT_MASK, 0);
/* Wait and acknowledge irq */
@ -461,24 +489,20 @@ int rs600_irq_process(struct radeon_device *rdev)
{
u32 status, msi_rearm;
bool queue_hotplug = false;
/* reset gui idle ack. the status bit is broken */
rdev->irq.gui_idle_acked = false;
bool queue_hdmi = false;
status = rs600_irq_ack(rdev);
if (!status && !rdev->irq.stat_regs.r500.disp_int) {
if (!status &&
!rdev->irq.stat_regs.r500.disp_int &&
!rdev->irq.stat_regs.r500.hdmi0_status) {
return IRQ_NONE;
}
while (status || rdev->irq.stat_regs.r500.disp_int) {
while (status ||
rdev->irq.stat_regs.r500.disp_int ||
rdev->irq.stat_regs.r500.hdmi0_status) {
/* SW interrupt */
if (G_000044_SW_INT(status)) {
radeon_fence_process(rdev);
}
/* GUI idle */
if (G_000040_GUI_IDLE(status)) {
rdev->irq.gui_idle_acked = true;
rdev->pm.gui_idle = true;
// wake_up(&rdev->irq.idle_queue);
radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
}
/* Vertical blank interrupts */
if (G_007EDC_LB_D1_VBLANK_INTERRUPT(rdev->irq.stat_regs.r500.disp_int)) {
@ -507,12 +531,16 @@ int rs600_irq_process(struct radeon_device *rdev)
queue_hotplug = true;
DRM_DEBUG("HPD2\n");
}
if (G_007404_HDMI0_AZ_FORMAT_WTRIG(rdev->irq.stat_regs.r500.hdmi0_status)) {
queue_hdmi = true;
DRM_DEBUG("HDMI0\n");
}
status = rs600_irq_ack(rdev);
}
/* reset gui idle ack. the status bit is broken */
rdev->irq.gui_idle_acked = false;
// if (queue_hotplug)
// schedule_work(&rdev->hotplug_work);
// if (queue_hdmi)
// schedule_work(&rdev->audio_work);
if (rdev->msi_enabled) {
switch (rdev->family) {
case CHIP_RS600:
@ -523,9 +551,7 @@ int rs600_irq_process(struct radeon_device *rdev)
WREG32(RADEON_BUS_CNTL, msi_rearm | RS600_MSI_REARM);
break;
default:
msi_rearm = RREG32(RADEON_MSI_REARM_EN) & ~RV370_MSI_REARM_EN;
WREG32(RADEON_MSI_REARM_EN, msi_rearm);
WREG32(RADEON_MSI_REARM_EN, msi_rearm | RV370_MSI_REARM_EN);
WREG32(RADEON_MSI_REARM_EN, RV370_MSI_REARM_EN);
break;
}
}
@ -552,7 +578,7 @@ int rs600_mc_wait_for_idle(struct radeon_device *rdev)
return -1;
}
void rs600_gpu_init(struct radeon_device *rdev)
static void rs600_gpu_init(struct radeon_device *rdev)
{
r420_pipes_init(rdev);
/* Wait for mc idle */
@ -560,7 +586,7 @@ void rs600_gpu_init(struct radeon_device *rdev)
dev_warn(rdev->dev, "Wait MC idle timeout before updating MC.\n");
}
void rs600_mc_init(struct radeon_device *rdev)
static void rs600_mc_init(struct radeon_device *rdev)
{
u64 base;
@ -622,7 +648,7 @@ void rs600_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v)
WREG32(R_000074_MC_IND_DATA, v);
}
void rs600_debugfs(struct radeon_device *rdev)
static void rs600_debugfs(struct radeon_device *rdev)
{
if (r100_debugfs_rbbm_init(rdev))
DRM_ERROR("Failed to register debugfs file for RBBM !\n");
@ -688,11 +714,14 @@ static int rs600_startup(struct radeon_device *rdev)
dev_err(rdev->dev, "failed initializing CP (%d).\n", r);
return r;
}
r = r100_ib_init(rdev);
r = radeon_ib_pool_init(rdev);
if (r) {
dev_err(rdev->dev, "failed initializing IB (%d).\n", r);
dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
return r;
}
return 0;
}
@ -754,6 +783,7 @@ int rs600_init(struct radeon_device *rdev)
if (r)
return r;
rs600_set_safe_registers(rdev);
rdev->accel_working = true;
r = rs600_startup(rdev);
if (r) {

View File

@ -485,6 +485,20 @@
#define S_007D18_DC_HOT_PLUG_DETECT2_INT_EN(x) (((x) & 0x1) << 16)
#define G_007D18_DC_HOT_PLUG_DETECT2_INT_EN(x) (((x) >> 16) & 0x1)
#define C_007D18_DC_HOT_PLUG_DETECT2_INT_EN 0xFFFEFFFF
#define R_007404_HDMI0_STATUS 0x007404
#define S_007404_HDMI0_AZ_FORMAT_WTRIG(x) (((x) & 0x1) << 28)
#define G_007404_HDMI0_AZ_FORMAT_WTRIG(x) (((x) >> 28) & 0x1)
#define C_007404_HDMI0_AZ_FORMAT_WTRIG 0xEFFFFFFF
#define S_007404_HDMI0_AZ_FORMAT_WTRIG_INT(x) (((x) & 0x1) << 29)
#define G_007404_HDMI0_AZ_FORMAT_WTRIG_INT(x) (((x) >> 29) & 0x1)
#define C_007404_HDMI0_AZ_FORMAT_WTRIG_INT 0xDFFFFFFF
#define R_007408_HDMI0_AUDIO_PACKET_CONTROL 0x007408
#define S_007408_HDMI0_AZ_FORMAT_WTRIG_MASK(x) (((x) & 0x1) << 28)
#define G_007408_HDMI0_AZ_FORMAT_WTRIG_MASK(x) (((x) >> 28) & 0x1)
#define C_007408_HDMI0_AZ_FORMAT_WTRIG_MASK 0xEFFFFFFF
#define S_007408_HDMI0_AZ_FORMAT_WTRIG_ACK(x) (((x) & 0x1) << 29)
#define G_007408_HDMI0_AZ_FORMAT_WTRIG_ACK(x) (((x) >> 29) & 0x1)
#define C_007408_HDMI0_AZ_FORMAT_WTRIG_ACK 0xDFFFFFFF
/* MC registers */
#define R_000000_MC_STATUS 0x000000

View File

@ -25,13 +25,13 @@
* Alex Deucher
* Jerome Glisse
*/
#include "drmP.h"
#include <drm/drmP.h>
#include "radeon.h"
#include "radeon_asic.h"
#include "atom.h"
#include "rs690d.h"
static int rs690_mc_wait_for_idle(struct radeon_device *rdev)
int rs690_mc_wait_for_idle(struct radeon_device *rdev)
{
unsigned i;
uint32_t tmp;
@ -145,7 +145,7 @@ void rs690_pm_info(struct radeon_device *rdev)
rdev->pm.sideport_bandwidth.full = dfixed_div(rdev->pm.sideport_bandwidth, tmp);
}
void rs690_mc_init(struct radeon_device *rdev)
static void rs690_mc_init(struct radeon_device *rdev)
{
u64 base;
@ -224,7 +224,7 @@ struct rs690_watermark {
fixed20_12 sclk;
};
void rs690_crtc_bandwidth_compute(struct radeon_device *rdev,
static void rs690_crtc_bandwidth_compute(struct radeon_device *rdev,
struct radeon_crtc *crtc,
struct rs690_watermark *wm)
{
@ -581,7 +581,7 @@ void rs690_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v)
WREG32(R_000078_MC_INDEX, 0x7F);
}
void rs690_mc_program(struct radeon_device *rdev)
static void rs690_mc_program(struct radeon_device *rdev)
{
struct rv515_mc_save save;
@ -630,11 +630,14 @@ static int rs690_startup(struct radeon_device *rdev)
dev_err(rdev->dev, "failed initializing CP (%d).\n", r);
return r;
}
r = r100_ib_init(rdev);
r = radeon_ib_pool_init(rdev);
if (r) {
dev_err(rdev->dev, "failed initializing IB (%d).\n", r);
dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
return r;
}
return 0;
}
@ -698,6 +701,7 @@ int rs690_init(struct radeon_device *rdev)
if (r)
return r;
rs600_set_safe_registers(rdev);
rdev->accel_working = true;
r = rs690_startup(rdev);
if (r) {

View File

@ -27,7 +27,7 @@
*/
#include <linux/seq_file.h>
#include <linux/slab.h>
#include "drmP.h"
#include <drm/drmP.h>
#include "rv515d.h"
#include "radeon.h"
#include "radeon_asic.h"
@ -35,9 +35,9 @@
#include "rv515_reg_safe.h"
/* This files gather functions specifics to: rv515 */
int rv515_debugfs_pipes_info_init(struct radeon_device *rdev);
int rv515_debugfs_ga_info_init(struct radeon_device *rdev);
void rv515_gpu_init(struct radeon_device *rdev);
static int rv515_debugfs_pipes_info_init(struct radeon_device *rdev);
static int rv515_debugfs_ga_info_init(struct radeon_device *rdev);
static void rv515_gpu_init(struct radeon_device *rdev);
int rv515_mc_wait_for_idle(struct radeon_device *rdev);
void rv515_debugfs(struct radeon_device *rdev)
@ -53,46 +53,46 @@ void rv515_debugfs(struct radeon_device *rdev)
}
}
void rv515_ring_start(struct radeon_device *rdev)
void rv515_ring_start(struct radeon_device *rdev, struct radeon_ring *ring)
{
int r;
r = radeon_ring_lock(rdev, 64);
r = radeon_ring_lock(rdev, ring, 64);
if (r) {
return;
}
radeon_ring_write(rdev, PACKET0(ISYNC_CNTL, 0));
radeon_ring_write(rdev,
radeon_ring_write(ring, PACKET0(ISYNC_CNTL, 0));
radeon_ring_write(ring,
ISYNC_ANY2D_IDLE3D |
ISYNC_ANY3D_IDLE2D |
ISYNC_WAIT_IDLEGUI |
ISYNC_CPSCRATCH_IDLEGUI);
radeon_ring_write(rdev, PACKET0(WAIT_UNTIL, 0));
radeon_ring_write(rdev, WAIT_2D_IDLECLEAN | WAIT_3D_IDLECLEAN);
radeon_ring_write(rdev, PACKET0(R300_DST_PIPE_CONFIG, 0));
radeon_ring_write(rdev, R300_PIPE_AUTO_CONFIG);
radeon_ring_write(rdev, PACKET0(GB_SELECT, 0));
radeon_ring_write(rdev, 0);
radeon_ring_write(rdev, PACKET0(GB_ENABLE, 0));
radeon_ring_write(rdev, 0);
radeon_ring_write(rdev, PACKET0(R500_SU_REG_DEST, 0));
radeon_ring_write(rdev, (1 << rdev->num_gb_pipes) - 1);
radeon_ring_write(rdev, PACKET0(VAP_INDEX_OFFSET, 0));
radeon_ring_write(rdev, 0);
radeon_ring_write(rdev, PACKET0(RB3D_DSTCACHE_CTLSTAT, 0));
radeon_ring_write(rdev, RB3D_DC_FLUSH | RB3D_DC_FREE);
radeon_ring_write(rdev, PACKET0(ZB_ZCACHE_CTLSTAT, 0));
radeon_ring_write(rdev, ZC_FLUSH | ZC_FREE);
radeon_ring_write(rdev, PACKET0(WAIT_UNTIL, 0));
radeon_ring_write(rdev, WAIT_2D_IDLECLEAN | WAIT_3D_IDLECLEAN);
radeon_ring_write(rdev, PACKET0(GB_AA_CONFIG, 0));
radeon_ring_write(rdev, 0);
radeon_ring_write(rdev, PACKET0(RB3D_DSTCACHE_CTLSTAT, 0));
radeon_ring_write(rdev, RB3D_DC_FLUSH | RB3D_DC_FREE);
radeon_ring_write(rdev, PACKET0(ZB_ZCACHE_CTLSTAT, 0));
radeon_ring_write(rdev, ZC_FLUSH | ZC_FREE);
radeon_ring_write(rdev, PACKET0(GB_MSPOS0, 0));
radeon_ring_write(rdev,
radeon_ring_write(ring, PACKET0(WAIT_UNTIL, 0));
radeon_ring_write(ring, WAIT_2D_IDLECLEAN | WAIT_3D_IDLECLEAN);
radeon_ring_write(ring, PACKET0(R300_DST_PIPE_CONFIG, 0));
radeon_ring_write(ring, R300_PIPE_AUTO_CONFIG);
radeon_ring_write(ring, PACKET0(GB_SELECT, 0));
radeon_ring_write(ring, 0);
radeon_ring_write(ring, PACKET0(GB_ENABLE, 0));
radeon_ring_write(ring, 0);
radeon_ring_write(ring, PACKET0(R500_SU_REG_DEST, 0));
radeon_ring_write(ring, (1 << rdev->num_gb_pipes) - 1);
radeon_ring_write(ring, PACKET0(VAP_INDEX_OFFSET, 0));
radeon_ring_write(ring, 0);
radeon_ring_write(ring, PACKET0(RB3D_DSTCACHE_CTLSTAT, 0));
radeon_ring_write(ring, RB3D_DC_FLUSH | RB3D_DC_FREE);
radeon_ring_write(ring, PACKET0(ZB_ZCACHE_CTLSTAT, 0));
radeon_ring_write(ring, ZC_FLUSH | ZC_FREE);
radeon_ring_write(ring, PACKET0(WAIT_UNTIL, 0));
radeon_ring_write(ring, WAIT_2D_IDLECLEAN | WAIT_3D_IDLECLEAN);
radeon_ring_write(ring, PACKET0(GB_AA_CONFIG, 0));
radeon_ring_write(ring, 0);
radeon_ring_write(ring, PACKET0(RB3D_DSTCACHE_CTLSTAT, 0));
radeon_ring_write(ring, RB3D_DC_FLUSH | RB3D_DC_FREE);
radeon_ring_write(ring, PACKET0(ZB_ZCACHE_CTLSTAT, 0));
radeon_ring_write(ring, ZC_FLUSH | ZC_FREE);
radeon_ring_write(ring, PACKET0(GB_MSPOS0, 0));
radeon_ring_write(ring,
((6 << MS_X0_SHIFT) |
(6 << MS_Y0_SHIFT) |
(6 << MS_X1_SHIFT) |
@ -101,8 +101,8 @@ void rv515_ring_start(struct radeon_device *rdev)
(6 << MS_Y2_SHIFT) |
(6 << MSBD0_Y_SHIFT) |
(6 << MSBD0_X_SHIFT)));
radeon_ring_write(rdev, PACKET0(GB_MSPOS1, 0));
radeon_ring_write(rdev,
radeon_ring_write(ring, PACKET0(GB_MSPOS1, 0));
radeon_ring_write(ring,
((6 << MS_X3_SHIFT) |
(6 << MS_Y3_SHIFT) |
(6 << MS_X4_SHIFT) |
@ -110,15 +110,15 @@ void rv515_ring_start(struct radeon_device *rdev)
(6 << MS_X5_SHIFT) |
(6 << MS_Y5_SHIFT) |
(6 << MSBD1_SHIFT)));
radeon_ring_write(rdev, PACKET0(GA_ENHANCE, 0));
radeon_ring_write(rdev, GA_DEADLOCK_CNTL | GA_FASTSYNC_CNTL);
radeon_ring_write(rdev, PACKET0(GA_POLY_MODE, 0));
radeon_ring_write(rdev, FRONT_PTYPE_TRIANGE | BACK_PTYPE_TRIANGE);
radeon_ring_write(rdev, PACKET0(GA_ROUND_MODE, 0));
radeon_ring_write(rdev, GEOMETRY_ROUND_NEAREST | COLOR_ROUND_NEAREST);
radeon_ring_write(rdev, PACKET0(0x20C8, 0));
radeon_ring_write(rdev, 0);
radeon_ring_unlock_commit(rdev);
radeon_ring_write(ring, PACKET0(GA_ENHANCE, 0));
radeon_ring_write(ring, GA_DEADLOCK_CNTL | GA_FASTSYNC_CNTL);
radeon_ring_write(ring, PACKET0(GA_POLY_MODE, 0));
radeon_ring_write(ring, FRONT_PTYPE_TRIANGE | BACK_PTYPE_TRIANGE);
radeon_ring_write(ring, PACKET0(GA_ROUND_MODE, 0));
radeon_ring_write(ring, GEOMETRY_ROUND_NEAREST | COLOR_ROUND_NEAREST);
radeon_ring_write(ring, PACKET0(0x20C8, 0));
radeon_ring_write(ring, 0);
radeon_ring_unlock_commit(rdev, ring);
}
int rv515_mc_wait_for_idle(struct radeon_device *rdev)
@ -143,13 +143,13 @@ void rv515_vga_render_disable(struct radeon_device *rdev)
RREG32(R_000300_VGA_RENDER_CONTROL) & C_000300_VGA_VSTATUS_CNTL);
}
void rv515_gpu_init(struct radeon_device *rdev)
static void rv515_gpu_init(struct radeon_device *rdev)
{
unsigned pipe_select_current, gb_pipe_select, tmp;
if (r100_gui_wait_for_idle(rdev)) {
printk(KERN_WARNING "Failed to wait GUI idle while "
"reseting GPU. Bad things might happen.\n");
"resetting GPU. Bad things might happen.\n");
}
rv515_vga_render_disable(rdev);
r420_pipes_init(rdev);
@ -161,7 +161,7 @@ void rv515_gpu_init(struct radeon_device *rdev)
WREG32_PLL(0x000D, tmp);
if (r100_gui_wait_for_idle(rdev)) {
printk(KERN_WARNING "Failed to wait GUI idle while "
"reseting GPU. Bad things might happen.\n");
"resetting GPU. Bad things might happen.\n");
}
if (rv515_mc_wait_for_idle(rdev)) {
printk(KERN_WARNING "Failed to wait MC idle while "
@ -189,7 +189,7 @@ static void rv515_vram_get_type(struct radeon_device *rdev)
}
}
void rv515_mc_init(struct radeon_device *rdev)
static void rv515_mc_init(struct radeon_device *rdev)
{
rv515_vram_get_type(rdev);
@ -261,7 +261,7 @@ static struct drm_info_list rv515_ga_info_list[] = {
};
#endif
int rv515_debugfs_pipes_info_init(struct radeon_device *rdev)
static int rv515_debugfs_pipes_info_init(struct radeon_device *rdev)
{
#if defined(CONFIG_DEBUG_FS)
return radeon_debugfs_add_files(rdev, rv515_pipes_info_list, 1);
@ -270,7 +270,7 @@ int rv515_debugfs_pipes_info_init(struct radeon_device *rdev)
#endif
}
int rv515_debugfs_ga_info_init(struct radeon_device *rdev)
static int rv515_debugfs_ga_info_init(struct radeon_device *rdev)
{
#if defined(CONFIG_DEBUG_FS)
return radeon_debugfs_add_files(rdev, rv515_ga_info_list, 1);
@ -281,12 +281,8 @@ int rv515_debugfs_ga_info_init(struct radeon_device *rdev)
void rv515_mc_stop(struct radeon_device *rdev, struct rv515_mc_save *save)
{
save->d1vga_control = RREG32(R_000330_D1VGA_CONTROL);
save->d2vga_control = RREG32(R_000338_D2VGA_CONTROL);
save->vga_render_control = RREG32(R_000300_VGA_RENDER_CONTROL);
save->vga_hdp_control = RREG32(R_000328_VGA_HDP_CONTROL);
save->d1crtc_control = RREG32(R_006080_D1CRTC_CONTROL);
save->d2crtc_control = RREG32(R_006880_D2CRTC_CONTROL);
/* Stop all video */
WREG32(R_0068E8_D2CRTC_UPDATE_LOCK, 0);
@ -311,19 +307,10 @@ void rv515_mc_resume(struct radeon_device *rdev, struct rv515_mc_save *save)
/* Unlock host access */
WREG32(R_000328_VGA_HDP_CONTROL, save->vga_hdp_control);
mdelay(1);
/* Restore video state */
WREG32(R_000330_D1VGA_CONTROL, save->d1vga_control);
WREG32(R_000338_D2VGA_CONTROL, save->d2vga_control);
WREG32(R_0060E8_D1CRTC_UPDATE_LOCK, 1);
WREG32(R_0068E8_D2CRTC_UPDATE_LOCK, 1);
WREG32(R_006080_D1CRTC_CONTROL, save->d1crtc_control);
WREG32(R_006880_D2CRTC_CONTROL, save->d2crtc_control);
WREG32(R_0060E8_D1CRTC_UPDATE_LOCK, 0);
WREG32(R_0068E8_D2CRTC_UPDATE_LOCK, 0);
WREG32(R_000300_VGA_RENDER_CONTROL, save->vga_render_control);
}
void rv515_mc_program(struct radeon_device *rdev)
static void rv515_mc_program(struct radeon_device *rdev)
{
struct rv515_mc_save save;
@ -401,11 +388,13 @@ static int rv515_startup(struct radeon_device *rdev)
dev_err(rdev->dev, "failed initializing CP (%d).\n", r);
return r;
}
r = r100_ib_init(rdev);
r = radeon_ib_pool_init(rdev);
if (r) {
dev_err(rdev->dev, "failed initializing IB (%d).\n", r);
dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
return r;
}
return 0;
}
@ -477,6 +466,7 @@ int rv515_init(struct radeon_device *rdev)
if (r)
return r;
rv515_set_safe_registers(rdev);
rdev->accel_working = true;
r = rv515_startup(rdev);
if (r) {
@ -735,7 +725,7 @@ struct rv515_watermark {
fixed20_12 sclk;
};
void rv515_crtc_bandwidth_compute(struct radeon_device *rdev,
static void rv515_crtc_bandwidth_compute(struct radeon_device *rdev,
struct radeon_crtc *crtc,
struct rv515_watermark *wm)
{

Some files were not shown because too many files have changed in this diff Show More