Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 38726840 authored by Gopikrishnaiah Anandan's avatar Gopikrishnaiah Anandan
Browse files

drm: msm: sde: Fix the reg dma last command sequence



Control trigger should be sent only for last command of a frame sequence.
Currently driver is sending the trigger for each command packet which
will cause the SDE hardware to stall. Change fixes the last command
sequence.

Change-Id: Ibce3b1d42b1d5e0f8ff87b6dbcc9dd1c2f5de84c
Signed-off-by: default avatarGopikrishnaiah Anandan <agopik@codeaurora.org>
parent 14416b24
Loading
Loading
Loading
Loading
+2 −1
Original line number Diff line number Diff line
@@ -2479,7 +2479,8 @@ static void _sde_encoder_kickoff_phys(struct sde_encoder_virt *sde_enc)
				phys->split_role == ENC_ROLE_SLAVE) &&
				phys->split_role != ENC_ROLE_SKIP)
			set_bit(i, sde_enc->frame_busy_mask);

		if (phys->hw_ctl->ops.reg_dma_flush)
			phys->hw_ctl->ops.reg_dma_flush(phys->hw_ctl);
		if (!phys->ops.needs_single_flush ||
				!phys->ops.needs_single_flush(phys))
			_sde_encoder_trigger_flush(&sde_enc->base, phys, 0x0);
+10 −4
Original line number Diff line number Diff line
@@ -112,10 +112,6 @@ static u32 sde_hw_ctl_get_pending_flush(struct sde_hw_ctl *ctx)

static inline void sde_hw_ctl_trigger_flush(struct sde_hw_ctl *ctx)
{
	struct sde_hw_reg_dma_ops *ops = sde_reg_dma_get_ops();

	if (ops && ops->last_command)
		ops->last_command(ctx, DMA_CTL_QUEUE0);

	SDE_REG_WRITE(&ctx->hw, CTL_FLUSH, ctx->pending_flush_mask);
}
@@ -538,6 +534,14 @@ static void sde_hw_ctl_setup_sbuf_cfg(struct sde_hw_ctl *ctx,
	SDE_REG_WRITE(c, CTL_ROT_TOP, val);
}

static void sde_hw_reg_dma_flush(struct sde_hw_ctl *ctx)
{
	struct sde_hw_reg_dma_ops *ops = sde_reg_dma_get_ops();

	if (ops && ops->last_command)
		ops->last_command(ctx, DMA_CTL_QUEUE0);
}

static void _setup_ctl_ops(struct sde_hw_ctl_ops *ops,
		unsigned long cap)
{
@@ -559,6 +563,8 @@ static void _setup_ctl_ops(struct sde_hw_ctl_ops *ops,
	ops->get_bitmask_intf = sde_hw_ctl_get_bitmask_intf;
	ops->get_bitmask_cdm = sde_hw_ctl_get_bitmask_cdm;
	ops->get_bitmask_wb = sde_hw_ctl_get_bitmask_wb;
	ops->reg_dma_flush = sde_hw_reg_dma_flush;

	if (cap & BIT(SDE_CTL_SBUF)) {
		ops->get_bitmask_rot = sde_hw_ctl_get_bitmask_rot;
		ops->setup_sbuf_cfg = sde_hw_ctl_setup_sbuf_cfg;
+7 −0
Original line number Diff line number Diff line
@@ -206,6 +206,13 @@ struct sde_hw_ctl_ops {

	void (*setup_sbuf_cfg)(struct sde_hw_ctl *ctx,
		struct sde_ctl_sbuf_cfg *cfg);

	/**
	 * Flush the reg dma by sending last command.
	 * @ctx       : ctl path ctx pointer
	 */
	void (*reg_dma_flush)(struct sde_hw_ctl *ctx);

};

/**
+4 −4
Original line number Diff line number Diff line
@@ -50,7 +50,6 @@
			(cfg)->dma_buf->index)

#define REG_DMA_DECODE_SEL 0x180AC060
#define REG_DMA_LAST_CMD 0x180AC004
#define SINGLE_REG_WRITE_OPCODE (BIT(28))
#define REL_ADDR_OPCODE (BIT(27))
#define HW_INDEX_REG_WRITE_OPCODE (BIT(28) | BIT(29))
@@ -471,6 +470,7 @@ static int write_kick_off_v1(struct sde_reg_dma_kickoff_cfg *cfg)
			cfg->dma_buf->iova);
	SDE_REG_WRITE(&hw, reg_dma_ctl_queue_off[cfg->ctl->idx] + 0x4,
			cmd1);
	if (cfg->last_command)
		SDE_REG_WRITE(&cfg->ctl->hw, REG_DMA_CTL_TRIGGER_OFF,
			queue_sel[cfg->queue_select]);

@@ -754,8 +754,8 @@ static int write_last_cmd(struct sde_reg_dma_setup_ops_cfg *cfg)

	loc =  (u32 *)((u8 *)cfg->dma_buf->vaddr +
			cfg->dma_buf->index);
	loc[0] = REG_DMA_LAST_CMD;
	loc[1] = BIT(0);
	loc[0] = REG_DMA_DECODE_SEL;
	loc[1] = 0;
	cfg->dma_buf->index = sizeof(u32) * 2;
	cfg->dma_buf->ops_completed = REG_WRITE_OP | DECODE_SEL_OP;
	cfg->dma_buf->next_op_allowed = REG_WRITE_OP;