Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 6f34be50 authored by Alex Deucher's avatar Alex Deucher Committed by Dave Airlie
Browse files

drm/radeon/kms: add pageflip ioctl support (v3)



This adds support for dri2 pageflipping.

v2: precision updates from Mario Kleiner.
v3: Multihead fixes from Mario Kleiner; missing crtc offset
    add note about update pending bit on pre-avivo chips

Signed-off-by: default avatarAlex Deucher <alexdeucher@gmail.com>
Signed-off-by: default avatarMario Kleiner <mario.kleiner@tuebingen.mpg.de>
Signed-off-by: default avatarDave Airlie <airlied@redhat.com>
parent f5a80209
Loading
Loading
Loading
Loading
+200 −101
Original line number Original line Diff line number Diff line
@@ -40,6 +40,61 @@
static void evergreen_gpu_init(struct radeon_device *rdev);
static void evergreen_gpu_init(struct radeon_device *rdev);
void evergreen_fini(struct radeon_device *rdev);
void evergreen_fini(struct radeon_device *rdev);


void evergreen_pre_page_flip(struct radeon_device *rdev, int crtc)
{
	struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc];
	u32 tmp;

	/* make sure flip is at vb rather than hb */
	tmp = RREG32(EVERGREEN_GRPH_FLIP_CONTROL + radeon_crtc->crtc_offset);
	tmp &= ~EVERGREEN_GRPH_SURFACE_UPDATE_H_RETRACE_EN;
	WREG32(EVERGREEN_GRPH_FLIP_CONTROL + radeon_crtc->crtc_offset, tmp);

	/* set pageflip to happen anywhere in vblank interval */
	WREG32(EVERGREEN_MASTER_UPDATE_MODE + radeon_crtc->crtc_offset, 0);

	/* enable the pflip int */
	radeon_irq_kms_pflip_irq_get(rdev, crtc);
}

void evergreen_post_page_flip(struct radeon_device *rdev, int crtc)
{
	/* disable the pflip int */
	radeon_irq_kms_pflip_irq_put(rdev, crtc);
}

u32 evergreen_page_flip(struct radeon_device *rdev, int crtc_id, u64 crtc_base)
{
	struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
	u32 tmp = RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset);

	/* Lock the graphics update lock */
	tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
	WREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset, tmp);

	/* update the scanout addresses */
	WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
	       upper_32_bits(crtc_base));
	WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
	       (u32)crtc_base);

	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
	       upper_32_bits(crtc_base));
	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
	       (u32)crtc_base);

	/* Wait for update_pending to go high. */
	while (!(RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING));
	DRM_DEBUG("Update pending now high. Unlocking vupdate_lock.\n");

	/* Unlock the lock, so double-buffering can take place inside vblank */
	tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
	WREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset, tmp);

	/* Return current update_pending status: */
	return RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING;
}

/* get temperature in millidegrees */
/* get temperature in millidegrees */
u32 evergreen_get_temp(struct radeon_device *rdev)
u32 evergreen_get_temp(struct radeon_device *rdev)
{
{
@@ -2060,6 +2115,7 @@ int evergreen_irq_set(struct radeon_device *rdev)
	u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
	u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
	u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6;
	u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6;
	u32 grbm_int_cntl = 0;
	u32 grbm_int_cntl = 0;
	u32 grph1 = 0, grph2 = 0, grph3 = 0, grph4 = 0, grph5 = 0, grph6 = 0;


	if (!rdev->irq.installed) {
	if (!rdev->irq.installed) {
		WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
		WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
@@ -2085,27 +2141,33 @@ int evergreen_irq_set(struct radeon_device *rdev)
		cp_int_cntl |= RB_INT_ENABLE;
		cp_int_cntl |= RB_INT_ENABLE;
		cp_int_cntl |= TIME_STAMP_INT_ENABLE;
		cp_int_cntl |= TIME_STAMP_INT_ENABLE;
	}
	}
	if (rdev->irq.crtc_vblank_int[0]) {
	if (rdev->irq.crtc_vblank_int[0] ||
	    rdev->irq.pflip[0]) {
		DRM_DEBUG("evergreen_irq_set: vblank 0\n");
		DRM_DEBUG("evergreen_irq_set: vblank 0\n");
		crtc1 |= VBLANK_INT_MASK;
		crtc1 |= VBLANK_INT_MASK;
	}
	}
	if (rdev->irq.crtc_vblank_int[1]) {
	if (rdev->irq.crtc_vblank_int[1] ||
	    rdev->irq.pflip[1]) {
		DRM_DEBUG("evergreen_irq_set: vblank 1\n");
		DRM_DEBUG("evergreen_irq_set: vblank 1\n");
		crtc2 |= VBLANK_INT_MASK;
		crtc2 |= VBLANK_INT_MASK;
	}
	}
	if (rdev->irq.crtc_vblank_int[2]) {
	if (rdev->irq.crtc_vblank_int[2] ||
	    rdev->irq.pflip[2]) {
		DRM_DEBUG("evergreen_irq_set: vblank 2\n");
		DRM_DEBUG("evergreen_irq_set: vblank 2\n");
		crtc3 |= VBLANK_INT_MASK;
		crtc3 |= VBLANK_INT_MASK;
	}
	}
	if (rdev->irq.crtc_vblank_int[3]) {
	if (rdev->irq.crtc_vblank_int[3] ||
	    rdev->irq.pflip[3]) {
		DRM_DEBUG("evergreen_irq_set: vblank 3\n");
		DRM_DEBUG("evergreen_irq_set: vblank 3\n");
		crtc4 |= VBLANK_INT_MASK;
		crtc4 |= VBLANK_INT_MASK;
	}
	}
	if (rdev->irq.crtc_vblank_int[4]) {
	if (rdev->irq.crtc_vblank_int[4] ||
	    rdev->irq.pflip[4]) {
		DRM_DEBUG("evergreen_irq_set: vblank 4\n");
		DRM_DEBUG("evergreen_irq_set: vblank 4\n");
		crtc5 |= VBLANK_INT_MASK;
		crtc5 |= VBLANK_INT_MASK;
	}
	}
	if (rdev->irq.crtc_vblank_int[5]) {
	if (rdev->irq.crtc_vblank_int[5] ||
	    rdev->irq.pflip[5]) {
		DRM_DEBUG("evergreen_irq_set: vblank 5\n");
		DRM_DEBUG("evergreen_irq_set: vblank 5\n");
		crtc6 |= VBLANK_INT_MASK;
		crtc6 |= VBLANK_INT_MASK;
	}
	}
@@ -2148,6 +2210,13 @@ int evergreen_irq_set(struct radeon_device *rdev)
	WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5);
	WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5);
	WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6);
	WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6);


	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, grph1);
	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, grph2);
	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, grph3);
	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, grph4);
	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, grph5);
	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, grph6);

	WREG32(DC_HPD1_INT_CONTROL, hpd1);
	WREG32(DC_HPD1_INT_CONTROL, hpd1);
	WREG32(DC_HPD2_INT_CONTROL, hpd2);
	WREG32(DC_HPD2_INT_CONTROL, hpd2);
	WREG32(DC_HPD3_INT_CONTROL, hpd3);
	WREG32(DC_HPD3_INT_CONTROL, hpd3);
@@ -2158,79 +2227,92 @@ int evergreen_irq_set(struct radeon_device *rdev)
	return 0;
	return 0;
}
}


static inline void evergreen_irq_ack(struct radeon_device *rdev,
static inline void evergreen_irq_ack(struct radeon_device *rdev)
				     u32 *disp_int,
				     u32 *disp_int_cont,
				     u32 *disp_int_cont2,
				     u32 *disp_int_cont3,
				     u32 *disp_int_cont4,
				     u32 *disp_int_cont5)
{
{
	u32 tmp;
	u32 tmp;


	*disp_int = RREG32(DISP_INTERRUPT_STATUS);
	rdev->irq.stat_regs.evergreen.disp_int = RREG32(DISP_INTERRUPT_STATUS);
	*disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
	rdev->irq.stat_regs.evergreen.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
	*disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
	rdev->irq.stat_regs.evergreen.disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
	*disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
	rdev->irq.stat_regs.evergreen.disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
	*disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
	rdev->irq.stat_regs.evergreen.disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
	*disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
	rdev->irq.stat_regs.evergreen.disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);

	rdev->irq.stat_regs.evergreen.d1grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
	if (*disp_int & LB_D1_VBLANK_INTERRUPT)
	rdev->irq.stat_regs.evergreen.d2grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
	rdev->irq.stat_regs.evergreen.d3grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
	rdev->irq.stat_regs.evergreen.d4grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
	rdev->irq.stat_regs.evergreen.d5grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
	rdev->irq.stat_regs.evergreen.d6grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);

	if (rdev->irq.stat_regs.evergreen.d1grph_int & GRPH_PFLIP_INT_OCCURRED)
		WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
	if (rdev->irq.stat_regs.evergreen.d2grph_int & GRPH_PFLIP_INT_OCCURRED)
		WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
	if (rdev->irq.stat_regs.evergreen.d3grph_int & GRPH_PFLIP_INT_OCCURRED)
		WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
	if (rdev->irq.stat_regs.evergreen.d4grph_int & GRPH_PFLIP_INT_OCCURRED)
		WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
	if (rdev->irq.stat_regs.evergreen.d5grph_int & GRPH_PFLIP_INT_OCCURRED)
		WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
	if (rdev->irq.stat_regs.evergreen.d6grph_int & GRPH_PFLIP_INT_OCCURRED)
		WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);

	if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT)
		WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK);
		WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK);
	if (*disp_int & LB_D1_VLINE_INTERRUPT)
	if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT)
		WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK);
		WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK);


	if (*disp_int_cont & LB_D2_VBLANK_INTERRUPT)
	if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT)
		WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK);
		WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK);
	if (*disp_int_cont & LB_D2_VLINE_INTERRUPT)
	if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT)
		WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK);
		WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK);


	if (*disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
	if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
		WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK);
		WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK);
	if (*disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
	if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
		WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK);
		WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK);


	if (*disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
	if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
		WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK);
		WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK);
	if (*disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
	if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
		WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK);
		WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK);


	if (*disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
	if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
		WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK);
		WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK);
	if (*disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
	if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
		WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK);
		WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK);


	if (*disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
	if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
		WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK);
		WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK);
	if (*disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
	if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
		WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK);
		WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK);


	if (*disp_int & DC_HPD1_INTERRUPT) {
	if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
		tmp = RREG32(DC_HPD1_INT_CONTROL);
		tmp = RREG32(DC_HPD1_INT_CONTROL);
		tmp |= DC_HPDx_INT_ACK;
		tmp |= DC_HPDx_INT_ACK;
		WREG32(DC_HPD1_INT_CONTROL, tmp);
		WREG32(DC_HPD1_INT_CONTROL, tmp);
	}
	}
	if (*disp_int_cont & DC_HPD2_INTERRUPT) {
	if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
		tmp = RREG32(DC_HPD2_INT_CONTROL);
		tmp = RREG32(DC_HPD2_INT_CONTROL);
		tmp |= DC_HPDx_INT_ACK;
		tmp |= DC_HPDx_INT_ACK;
		WREG32(DC_HPD2_INT_CONTROL, tmp);
		WREG32(DC_HPD2_INT_CONTROL, tmp);
	}
	}
	if (*disp_int_cont2 & DC_HPD3_INTERRUPT) {
	if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
		tmp = RREG32(DC_HPD3_INT_CONTROL);
		tmp = RREG32(DC_HPD3_INT_CONTROL);
		tmp |= DC_HPDx_INT_ACK;
		tmp |= DC_HPDx_INT_ACK;
		WREG32(DC_HPD3_INT_CONTROL, tmp);
		WREG32(DC_HPD3_INT_CONTROL, tmp);
	}
	}
	if (*disp_int_cont3 & DC_HPD4_INTERRUPT) {
	if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
		tmp = RREG32(DC_HPD4_INT_CONTROL);
		tmp = RREG32(DC_HPD4_INT_CONTROL);
		tmp |= DC_HPDx_INT_ACK;
		tmp |= DC_HPDx_INT_ACK;
		WREG32(DC_HPD4_INT_CONTROL, tmp);
		WREG32(DC_HPD4_INT_CONTROL, tmp);
	}
	}
	if (*disp_int_cont4 & DC_HPD5_INTERRUPT) {
	if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
		tmp = RREG32(DC_HPD5_INT_CONTROL);
		tmp = RREG32(DC_HPD5_INT_CONTROL);
		tmp |= DC_HPDx_INT_ACK;
		tmp |= DC_HPDx_INT_ACK;
		WREG32(DC_HPD5_INT_CONTROL, tmp);
		WREG32(DC_HPD5_INT_CONTROL, tmp);
	}
	}
	if (*disp_int_cont5 & DC_HPD6_INTERRUPT) {
	if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
		tmp = RREG32(DC_HPD5_INT_CONTROL);
		tmp = RREG32(DC_HPD5_INT_CONTROL);
		tmp |= DC_HPDx_INT_ACK;
		tmp |= DC_HPDx_INT_ACK;
		WREG32(DC_HPD6_INT_CONTROL, tmp);
		WREG32(DC_HPD6_INT_CONTROL, tmp);
@@ -2239,14 +2321,10 @@ static inline void evergreen_irq_ack(struct radeon_device *rdev,


void evergreen_irq_disable(struct radeon_device *rdev)
void evergreen_irq_disable(struct radeon_device *rdev)
{
{
	u32 disp_int, disp_int_cont, disp_int_cont2;
	u32 disp_int_cont3, disp_int_cont4, disp_int_cont5;

	r600_disable_interrupts(rdev);
	r600_disable_interrupts(rdev);
	/* Wait and acknowledge irq */
	/* Wait and acknowledge irq */
	mdelay(1);
	mdelay(1);
	evergreen_irq_ack(rdev, &disp_int, &disp_int_cont, &disp_int_cont2,
	evergreen_irq_ack(rdev);
			  &disp_int_cont3, &disp_int_cont4, &disp_int_cont5);
	evergreen_disable_interrupt_state(rdev);
	evergreen_disable_interrupt_state(rdev);
}
}


@@ -2286,8 +2364,6 @@ int evergreen_irq_process(struct radeon_device *rdev)
	u32 rptr = rdev->ih.rptr;
	u32 rptr = rdev->ih.rptr;
	u32 src_id, src_data;
	u32 src_id, src_data;
	u32 ring_index;
	u32 ring_index;
	u32 disp_int, disp_int_cont, disp_int_cont2;
	u32 disp_int_cont3, disp_int_cont4, disp_int_cont5;
	unsigned long flags;
	unsigned long flags;
	bool queue_hotplug = false;
	bool queue_hotplug = false;


@@ -2308,8 +2384,7 @@ int evergreen_irq_process(struct radeon_device *rdev)


restart_ih:
restart_ih:
	/* display interrupts */
	/* display interrupts */
	evergreen_irq_ack(rdev, &disp_int, &disp_int_cont, &disp_int_cont2,
	evergreen_irq_ack(rdev);
			  &disp_int_cont3, &disp_int_cont4, &disp_int_cont5);


	rdev->ih.wptr = wptr;
	rdev->ih.wptr = wptr;
	while (rptr != wptr) {
	while (rptr != wptr) {
@@ -2322,17 +2397,21 @@ int evergreen_irq_process(struct radeon_device *rdev)
		case 1: /* D1 vblank/vline */
		case 1: /* D1 vblank/vline */
			switch (src_data) {
			switch (src_data) {
			case 0: /* D1 vblank */
			case 0: /* D1 vblank */
				if (disp_int & LB_D1_VBLANK_INTERRUPT) {
				if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT) {
					if (rdev->irq.pflip[0])
						radeon_crtc_handle_flip(rdev, 0);
					if (rdev->irq.crtc_vblank_int[0]) {
						drm_handle_vblank(rdev->ddev, 0);
						drm_handle_vblank(rdev->ddev, 0);
						rdev->pm.vblank_sync = true;
						rdev->pm.vblank_sync = true;
						wake_up(&rdev->irq.vblank_queue);
						wake_up(&rdev->irq.vblank_queue);
					disp_int &= ~LB_D1_VBLANK_INTERRUPT;
					}
					rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VBLANK_INTERRUPT;
					DRM_DEBUG("IH: D1 vblank\n");
					DRM_DEBUG("IH: D1 vblank\n");
				}
				}
				break;
				break;
			case 1: /* D1 vline */
			case 1: /* D1 vline */
				if (disp_int & LB_D1_VLINE_INTERRUPT) {
				if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT) {
					disp_int &= ~LB_D1_VLINE_INTERRUPT;
					rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VLINE_INTERRUPT;
					DRM_DEBUG("IH: D1 vline\n");
					DRM_DEBUG("IH: D1 vline\n");
				}
				}
				break;
				break;
@@ -2344,17 +2423,21 @@ int evergreen_irq_process(struct radeon_device *rdev)
		case 2: /* D2 vblank/vline */
		case 2: /* D2 vblank/vline */
			switch (src_data) {
			switch (src_data) {
			case 0: /* D2 vblank */
			case 0: /* D2 vblank */
				if (disp_int_cont & LB_D2_VBLANK_INTERRUPT) {
				if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT) {
					if (rdev->irq.pflip[1])
						radeon_crtc_handle_flip(rdev, 1);
					if (rdev->irq.crtc_vblank_int[1]) {
						drm_handle_vblank(rdev->ddev, 1);
						drm_handle_vblank(rdev->ddev, 1);
						rdev->pm.vblank_sync = true;
						rdev->pm.vblank_sync = true;
						wake_up(&rdev->irq.vblank_queue);
						wake_up(&rdev->irq.vblank_queue);
					disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
					}
					rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
					DRM_DEBUG("IH: D2 vblank\n");
					DRM_DEBUG("IH: D2 vblank\n");
				}
				}
				break;
				break;
			case 1: /* D2 vline */
			case 1: /* D2 vline */
				if (disp_int_cont & LB_D2_VLINE_INTERRUPT) {
				if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT) {
					disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
					rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
					DRM_DEBUG("IH: D2 vline\n");
					DRM_DEBUG("IH: D2 vline\n");
				}
				}
				break;
				break;
@@ -2366,17 +2449,21 @@ int evergreen_irq_process(struct radeon_device *rdev)
		case 3: /* D3 vblank/vline */
		case 3: /* D3 vblank/vline */
			switch (src_data) {
			switch (src_data) {
			case 0: /* D3 vblank */
			case 0: /* D3 vblank */
				if (disp_int_cont2 & LB_D3_VBLANK_INTERRUPT) {
				if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT) {
					if (rdev->irq.crtc_vblank_int[2]) {
						drm_handle_vblank(rdev->ddev, 2);
						drm_handle_vblank(rdev->ddev, 2);
						rdev->pm.vblank_sync = true;
						rdev->pm.vblank_sync = true;
						wake_up(&rdev->irq.vblank_queue);
						wake_up(&rdev->irq.vblank_queue);
					disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
					}
					if (rdev->irq.pflip[2])
						radeon_crtc_handle_flip(rdev, 2);
					rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
					DRM_DEBUG("IH: D3 vblank\n");
					DRM_DEBUG("IH: D3 vblank\n");
				}
				}
				break;
				break;
			case 1: /* D3 vline */
			case 1: /* D3 vline */
				if (disp_int_cont2 & LB_D3_VLINE_INTERRUPT) {
				if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT) {
					disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
					rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
					DRM_DEBUG("IH: D3 vline\n");
					DRM_DEBUG("IH: D3 vline\n");
				}
				}
				break;
				break;
@@ -2388,17 +2475,21 @@ int evergreen_irq_process(struct radeon_device *rdev)
		case 4: /* D4 vblank/vline */
		case 4: /* D4 vblank/vline */
			switch (src_data) {
			switch (src_data) {
			case 0: /* D4 vblank */
			case 0: /* D4 vblank */
				if (disp_int_cont3 & LB_D4_VBLANK_INTERRUPT) {
				if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT) {
					if (rdev->irq.crtc_vblank_int[3]) {
						drm_handle_vblank(rdev->ddev, 3);
						drm_handle_vblank(rdev->ddev, 3);
						rdev->pm.vblank_sync = true;
						rdev->pm.vblank_sync = true;
						wake_up(&rdev->irq.vblank_queue);
						wake_up(&rdev->irq.vblank_queue);
					disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
					}
					if (rdev->irq.pflip[3])
						radeon_crtc_handle_flip(rdev, 3);
					rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
					DRM_DEBUG("IH: D4 vblank\n");
					DRM_DEBUG("IH: D4 vblank\n");
				}
				}
				break;
				break;
			case 1: /* D4 vline */
			case 1: /* D4 vline */
				if (disp_int_cont3 & LB_D4_VLINE_INTERRUPT) {
				if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT) {
					disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
					rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
					DRM_DEBUG("IH: D4 vline\n");
					DRM_DEBUG("IH: D4 vline\n");
				}
				}
				break;
				break;
@@ -2410,17 +2501,21 @@ int evergreen_irq_process(struct radeon_device *rdev)
		case 5: /* D5 vblank/vline */
		case 5: /* D5 vblank/vline */
			switch (src_data) {
			switch (src_data) {
			case 0: /* D5 vblank */
			case 0: /* D5 vblank */
				if (disp_int_cont4 & LB_D5_VBLANK_INTERRUPT) {
				if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT) {
					if (rdev->irq.crtc_vblank_int[4]) {
						drm_handle_vblank(rdev->ddev, 4);
						drm_handle_vblank(rdev->ddev, 4);
						rdev->pm.vblank_sync = true;
						rdev->pm.vblank_sync = true;
						wake_up(&rdev->irq.vblank_queue);
						wake_up(&rdev->irq.vblank_queue);
					disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
					}
					if (rdev->irq.pflip[4])
						radeon_crtc_handle_flip(rdev, 4);
					rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
					DRM_DEBUG("IH: D5 vblank\n");
					DRM_DEBUG("IH: D5 vblank\n");
				}
				}
				break;
				break;
			case 1: /* D5 vline */
			case 1: /* D5 vline */
				if (disp_int_cont4 & LB_D5_VLINE_INTERRUPT) {
				if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT) {
					disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
					rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
					DRM_DEBUG("IH: D5 vline\n");
					DRM_DEBUG("IH: D5 vline\n");
				}
				}
				break;
				break;
@@ -2432,17 +2527,21 @@ int evergreen_irq_process(struct radeon_device *rdev)
		case 6: /* D6 vblank/vline */
		case 6: /* D6 vblank/vline */
			switch (src_data) {
			switch (src_data) {
			case 0: /* D6 vblank */
			case 0: /* D6 vblank */
				if (disp_int_cont5 & LB_D6_VBLANK_INTERRUPT) {
				if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT) {
					if (rdev->irq.crtc_vblank_int[5]) {
						drm_handle_vblank(rdev->ddev, 5);
						drm_handle_vblank(rdev->ddev, 5);
						rdev->pm.vblank_sync = true;
						rdev->pm.vblank_sync = true;
						wake_up(&rdev->irq.vblank_queue);
						wake_up(&rdev->irq.vblank_queue);
					disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
					}
					if (rdev->irq.pflip[5])
						radeon_crtc_handle_flip(rdev, 5);
					rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
					DRM_DEBUG("IH: D6 vblank\n");
					DRM_DEBUG("IH: D6 vblank\n");
				}
				}
				break;
				break;
			case 1: /* D6 vline */
			case 1: /* D6 vline */
				if (disp_int_cont5 & LB_D6_VLINE_INTERRUPT) {
				if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT) {
					disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
					rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
					DRM_DEBUG("IH: D6 vline\n");
					DRM_DEBUG("IH: D6 vline\n");
				}
				}
				break;
				break;
@@ -2454,43 +2553,43 @@ int evergreen_irq_process(struct radeon_device *rdev)
		case 42: /* HPD hotplug */
		case 42: /* HPD hotplug */
			switch (src_data) {
			switch (src_data) {
			case 0:
			case 0:
				if (disp_int & DC_HPD1_INTERRUPT) {
				if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
					disp_int &= ~DC_HPD1_INTERRUPT;
					rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_INTERRUPT;
					queue_hotplug = true;
					queue_hotplug = true;
					DRM_DEBUG("IH: HPD1\n");
					DRM_DEBUG("IH: HPD1\n");
				}
				}
				break;
				break;
			case 1:
			case 1:
				if (disp_int_cont & DC_HPD2_INTERRUPT) {
				if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
					disp_int_cont &= ~DC_HPD2_INTERRUPT;
					rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_INTERRUPT;
					queue_hotplug = true;
					queue_hotplug = true;
					DRM_DEBUG("IH: HPD2\n");
					DRM_DEBUG("IH: HPD2\n");
				}
				}
				break;
				break;
			case 2:
			case 2:
				if (disp_int_cont2 & DC_HPD3_INTERRUPT) {
				if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
					disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
					rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
					queue_hotplug = true;
					queue_hotplug = true;
					DRM_DEBUG("IH: HPD3\n");
					DRM_DEBUG("IH: HPD3\n");
				}
				}
				break;
				break;
			case 3:
			case 3:
				if (disp_int_cont3 & DC_HPD4_INTERRUPT) {
				if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
					disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
					rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
					queue_hotplug = true;
					queue_hotplug = true;
					DRM_DEBUG("IH: HPD4\n");
					DRM_DEBUG("IH: HPD4\n");
				}
				}
				break;
				break;
			case 4:
			case 4:
				if (disp_int_cont4 & DC_HPD5_INTERRUPT) {
				if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
					disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
					rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
					queue_hotplug = true;
					queue_hotplug = true;
					DRM_DEBUG("IH: HPD5\n");
					DRM_DEBUG("IH: HPD5\n");
				}
				}
				break;
				break;
			case 5:
			case 5:
				if (disp_int_cont5 & DC_HPD6_INTERRUPT) {
				if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
					disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
					rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
					queue_hotplug = true;
					queue_hotplug = true;
					DRM_DEBUG("IH: HPD6\n");
					DRM_DEBUG("IH: HPD6\n");
				}
				}
+6 −0
Original line number Original line Diff line number Diff line
@@ -105,6 +105,11 @@
#define EVERGREEN_GRPH_Y_START                          0x6830
#define EVERGREEN_GRPH_Y_START                          0x6830
#define EVERGREEN_GRPH_X_END                            0x6834
#define EVERGREEN_GRPH_X_END                            0x6834
#define EVERGREEN_GRPH_Y_END                            0x6838
#define EVERGREEN_GRPH_Y_END                            0x6838
#define EVERGREEN_GRPH_UPDATE                           0x6844
#       define EVERGREEN_GRPH_SURFACE_UPDATE_PENDING    (1 << 2)
#       define EVERGREEN_GRPH_UPDATE_LOCK               (1 << 16)
#define EVERGREEN_GRPH_FLIP_CONTROL                     0x6848
#       define EVERGREEN_GRPH_SURFACE_UPDATE_H_RETRACE_EN (1 << 0)


/* CUR blocks at 0x6998, 0x7598, 0x10198, 0x10d98, 0x11998, 0x12598 */
/* CUR blocks at 0x6998, 0x7598, 0x10198, 0x10d98, 0x11998, 0x12598 */
#define EVERGREEN_CUR_CONTROL                           0x6998
#define EVERGREEN_CUR_CONTROL                           0x6998
@@ -178,6 +183,7 @@
#       define EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE (1 << 24)
#       define EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE (1 << 24)
#define EVERGREEN_CRTC_STATUS                           0x6e8c
#define EVERGREEN_CRTC_STATUS                           0x6e8c
#define EVERGREEN_CRTC_STATUS_POSITION                  0x6e90
#define EVERGREEN_CRTC_STATUS_POSITION                  0x6e90
#define EVERGREEN_MASTER_UPDATE_MODE                    0x6ef8
#define EVERGREEN_CRTC_UPDATE_LOCK                      0x6ed4
#define EVERGREEN_CRTC_UPDATE_LOCK                      0x6ed4


#define EVERGREEN_DC_GPIO_HPD_MASK                      0x64b0
#define EVERGREEN_DC_GPIO_HPD_MASK                      0x64b0
+66 −8
Original line number Original line Diff line number Diff line
@@ -68,6 +68,54 @@ MODULE_FIRMWARE(FIRMWARE_R520);
 * r100,rv100,rs100,rv200,rs200,r200,rv250,rs300,rv280
 * r100,rv100,rs100,rv200,rs200,r200,rv250,rs300,rv280
 */
 */


void r100_pre_page_flip(struct radeon_device *rdev, int crtc)
{
	struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc];
	u32 tmp;

	/* make sure flip is at vb rather than hb */
	tmp = RREG32(RADEON_CRTC_OFFSET_CNTL + radeon_crtc->crtc_offset);
	tmp &= ~RADEON_CRTC_OFFSET_FLIP_CNTL;
	WREG32(RADEON_CRTC_OFFSET_CNTL + radeon_crtc->crtc_offset, tmp);

	/* set pageflip to happen as late as possible in the vblank interval.
	 * same field for crtc1/2
	 */
	tmp = RREG32(RADEON_CRTC_GEN_CNTL);
	tmp &= ~RADEON_CRTC_VSTAT_MODE_MASK;
	WREG32(RADEON_CRTC_GEN_CNTL, tmp);

	/* enable the pflip int */
	radeon_irq_kms_pflip_irq_get(rdev, crtc);
}

void r100_post_page_flip(struct radeon_device *rdev, int crtc)
{
	/* disable the pflip int */
	radeon_irq_kms_pflip_irq_put(rdev, crtc);
}

u32 r100_page_flip(struct radeon_device *rdev, int crtc_id, u64 crtc_base)
{
	struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
	u32 tmp = ((u32)crtc_base) | RADEON_CRTC_OFFSET__OFFSET_LOCK;

	/* Lock the graphics update lock */
	/* update the scanout addresses */
	WREG32(RADEON_CRTC_OFFSET + radeon_crtc->crtc_offset, tmp);

	/* Note: We don't wait for update_pending to assert, as this never
	 * happens for some reason on R1xx - R4xx. Adds a bit of imprecision.
	 */

	/* Unlock the lock, so double-buffering can take place inside vblank */
	tmp &= ~RADEON_CRTC_OFFSET__OFFSET_LOCK;
	WREG32(RADEON_CRTC_OFFSET + radeon_crtc->crtc_offset, tmp);

	/* Return current update_pending status: */
	return RREG32(RADEON_CRTC_OFFSET + radeon_crtc->crtc_offset) & RADEON_CRTC_OFFSET__GUI_TRIG_OFFSET;
}

void r100_pm_get_dynpm_state(struct radeon_device *rdev)
void r100_pm_get_dynpm_state(struct radeon_device *rdev)
{
{
	int i;
	int i;
@@ -526,10 +574,12 @@ int r100_irq_set(struct radeon_device *rdev)
	if (rdev->irq.gui_idle) {
	if (rdev->irq.gui_idle) {
		tmp |= RADEON_GUI_IDLE_MASK;
		tmp |= RADEON_GUI_IDLE_MASK;
	}
	}
	if (rdev->irq.crtc_vblank_int[0]) {
	if (rdev->irq.crtc_vblank_int[0] ||
	    rdev->irq.pflip[0]) {
		tmp |= RADEON_CRTC_VBLANK_MASK;
		tmp |= RADEON_CRTC_VBLANK_MASK;
	}
	}
	if (rdev->irq.crtc_vblank_int[1]) {
	if (rdev->irq.crtc_vblank_int[1] ||
	    rdev->irq.pflip[1]) {
		tmp |= RADEON_CRTC2_VBLANK_MASK;
		tmp |= RADEON_CRTC2_VBLANK_MASK;
	}
	}
	if (rdev->irq.hpd[0]) {
	if (rdev->irq.hpd[0]) {
@@ -600,15 +650,23 @@ int r100_irq_process(struct radeon_device *rdev)
		}
		}
		/* Vertical blank interrupts */
		/* Vertical blank interrupts */
		if (status & RADEON_CRTC_VBLANK_STAT) {
		if (status & RADEON_CRTC_VBLANK_STAT) {
			if (rdev->irq.pflip[0])
				radeon_crtc_handle_flip(rdev, 0);
			if (rdev->irq.crtc_vblank_int[0]) {
				drm_handle_vblank(rdev->ddev, 0);
				drm_handle_vblank(rdev->ddev, 0);
				rdev->pm.vblank_sync = true;
				rdev->pm.vblank_sync = true;
				wake_up(&rdev->irq.vblank_queue);
				wake_up(&rdev->irq.vblank_queue);
			}
			}
		}
		if (status & RADEON_CRTC2_VBLANK_STAT) {
		if (status & RADEON_CRTC2_VBLANK_STAT) {
			if (rdev->irq.pflip[1])
				radeon_crtc_handle_flip(rdev, 1);
			if (rdev->irq.crtc_vblank_int[1]) {
				drm_handle_vblank(rdev->ddev, 1);
				drm_handle_vblank(rdev->ddev, 1);
				rdev->pm.vblank_sync = true;
				rdev->pm.vblank_sync = true;
				wake_up(&rdev->irq.vblank_queue);
				wake_up(&rdev->irq.vblank_queue);
			}
			}
		}
		if (status & RADEON_FP_DETECT_STAT) {
		if (status & RADEON_FP_DETECT_STAT) {
			queue_hotplug = true;
			queue_hotplug = true;
			DRM_DEBUG("HPD1\n");
			DRM_DEBUG("HPD1\n");
+4 −0
Original line number Original line Diff line number Diff line
@@ -355,6 +355,8 @@
#define AVIVO_D1CRTC_FRAME_COUNT                                0x60a4
#define AVIVO_D1CRTC_FRAME_COUNT                                0x60a4
#define AVIVO_D1CRTC_STEREO_CONTROL                             0x60c4
#define AVIVO_D1CRTC_STEREO_CONTROL                             0x60c4


#define AVIVO_D1MODE_MASTER_UPDATE_MODE                         0x60e4

/* master controls */
/* master controls */
#define AVIVO_DC_CRTC_MASTER_EN                                 0x60f8
#define AVIVO_DC_CRTC_MASTER_EN                                 0x60f8
#define AVIVO_DC_CRTC_TV_CONTROL                                0x60fc
#define AVIVO_DC_CRTC_TV_CONTROL                                0x60fc
@@ -409,8 +411,10 @@
#define AVIVO_D1GRPH_X_END                                      0x6134
#define AVIVO_D1GRPH_X_END                                      0x6134
#define AVIVO_D1GRPH_Y_END                                      0x6138
#define AVIVO_D1GRPH_Y_END                                      0x6138
#define AVIVO_D1GRPH_UPDATE                                     0x6144
#define AVIVO_D1GRPH_UPDATE                                     0x6144
#       define AVIVO_D1GRPH_SURFACE_UPDATE_PENDING              (1 << 2)
#       define AVIVO_D1GRPH_UPDATE_LOCK                         (1 << 16)
#       define AVIVO_D1GRPH_UPDATE_LOCK                         (1 << 16)
#define AVIVO_D1GRPH_FLIP_CONTROL                               0x6148
#define AVIVO_D1GRPH_FLIP_CONTROL                               0x6148
#       define AVIVO_D1GRPH_SURFACE_UPDATE_H_RETRACE_EN         (1 << 0)


#define AVIVO_D1CUR_CONTROL                     0x6400
#define AVIVO_D1CUR_CONTROL                     0x6400
#       define AVIVO_D1CURSOR_EN                (1 << 0)
#       define AVIVO_D1CURSOR_EN                (1 << 0)
+71 −55

File changed.

Preview size limit exceeded, changes collapsed.

Loading