Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 7d070947 authored by Lloyd Atkinson's avatar Lloyd Atkinson Committed by Narendra Muppalla
Browse files

drm/msm/sde: reset kickoff count atomically before irq enable



Rework physical encoder kickoff counting logic to address issues
where an interrupt could be missed, or completion could get out
of sync due to a race condition between kickoff count and irq
enable. It also generalizes the kickoff count across the video
and command interfaces.

Change-Id: Id88f5b2859f74ee1aced8fbd7552e9023155d01c
Signed-off-by: default avatarLloyd Atkinson <latkinso@codeaurora.org>
parent cd43ca63
Loading
Loading
Loading
Loading
+21 −8
Original line number Diff line number Diff line
@@ -52,7 +52,7 @@
 *	Virtual encoder defers as much as possible to the physical encoders.
 *	Virtual encoder registers itself with the DRM Framework as the encoder.
 * @base:		drm_encoder base class for registration with DRM
 * @spin_lock:		Lock for IRQ purposes
 * @enc_spin_lock:	Virtual-Encoder-Wide Spin Lock for IRQ purposes
 * @bus_scaling_client:	Client handle to the bus scaling interface
 * @num_phys_encs:	Actual number of physical encoders contained.
 * @phys_encs:		Container of physical encoders managed.
@@ -72,7 +72,7 @@
 */
struct sde_encoder_virt {
	struct drm_encoder base;
	spinlock_t spin_lock;
	spinlock_t enc_spinlock;
	uint32_t bus_scaling_client;

	uint32_t display_num_of_h_tiles;
@@ -527,10 +527,10 @@ static void sde_encoder_vblank_callback(struct drm_encoder *drm_enc,

	sde_enc = to_sde_encoder_virt(drm_enc);

	spin_lock_irqsave(&sde_enc->spin_lock, lock_flags);
	spin_lock_irqsave(&sde_enc->enc_spinlock, lock_flags);
	if (sde_enc->crtc_vblank_cb)
		sde_enc->crtc_vblank_cb(sde_enc->crtc_vblank_cb_data);
	spin_unlock_irqrestore(&sde_enc->spin_lock, lock_flags);
	spin_unlock_irqrestore(&sde_enc->enc_spinlock, lock_flags);

	atomic_inc(&phy_enc->vsync_cnt);
}
@@ -561,10 +561,10 @@ void sde_encoder_register_vblank_callback(struct drm_encoder *drm_enc,
	SDE_DEBUG_ENC(sde_enc, "\n");
	SDE_EVT32(DRMID(drm_enc), enable);

	spin_lock_irqsave(&sde_enc->spin_lock, lock_flags);
	spin_lock_irqsave(&sde_enc->enc_spinlock, lock_flags);
	sde_enc->crtc_vblank_cb = vbl_cb;
	sde_enc->crtc_vblank_cb_data = vbl_data;
	spin_unlock_irqrestore(&sde_enc->spin_lock, lock_flags);
	spin_unlock_irqrestore(&sde_enc->enc_spinlock, lock_flags);

	for (i = 0; i < sde_enc->num_phys_encs; i++) {
		struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
@@ -676,6 +676,8 @@ static void _sde_encoder_kickoff_phys(struct sde_encoder_virt *sde_enc)
{
	struct sde_hw_ctl *ctl;
	uint32_t i, pending_flush;
	unsigned long lock_flags;
	int pending_kickoff_cnt;

	if (!sde_enc) {
		SDE_ERROR("invalid encoder\n");
@@ -684,12 +686,20 @@ static void _sde_encoder_kickoff_phys(struct sde_encoder_virt *sde_enc)

	pending_flush = 0x0;

	/* update pending counts and trigger kickoff ctl flush atomically */
	spin_lock_irqsave(&sde_enc->enc_spinlock, lock_flags);

	/* don't perform flush/start operations for slave encoders */
	for (i = 0; i < sde_enc->num_phys_encs; i++) {
		struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
		if (!phys || phys->enable_state == SDE_ENC_DISABLED)
			continue;

		pending_kickoff_cnt = sde_encoder_phys_inc_pending(phys);
		SDE_EVT32(DRMID(&sde_enc->base), i, pending_kickoff_cnt);

		ctl = phys->hw_ctl;
		if (!ctl || phys->enable_state == SDE_ENC_DISABLED)
		if (!ctl)
			continue;

		if (!phys->ops.needs_split_flush ||
@@ -708,6 +718,8 @@ static void _sde_encoder_kickoff_phys(struct sde_encoder_virt *sde_enc)
	}

	_sde_encoder_trigger_start(sde_enc->cur_master);

	spin_unlock_irqrestore(&sde_enc->enc_spinlock, lock_flags);
}

void sde_encoder_schedule_kickoff(struct drm_encoder *drm_enc)
@@ -1040,6 +1052,7 @@ static int sde_encoder_setup_display(struct sde_encoder_virt *sde_enc,
	phys_params.sde_kms = sde_kms;
	phys_params.parent = &sde_enc->base;
	phys_params.parent_ops = parent_ops;
	phys_params.enc_spinlock = &sde_enc->enc_spinlock;

	SDE_DEBUG("\n");

@@ -1152,7 +1165,7 @@ struct drm_encoder *sde_encoder_init(
		goto fail;

	sde_enc->cur_master = NULL;
	spin_lock_init(&sde_enc->spin_lock);
	spin_lock_init(&sde_enc->enc_spinlock);
	drm_enc = &sde_enc->base;
	drm_encoder_init(dev, drm_enc, &sde_encoder_funcs, drm_enc_mode);
	drm_encoder_helper_add(drm_enc, &sde_encoder_helper_funcs);
+16 −13
Original line number Diff line number Diff line
@@ -170,11 +170,16 @@ enum sde_intr_idx {
 * @split_role:		Role to play in a split-panel configuration
 * @intf_mode:		Interface mode
 * @intf_idx:		Interface index on sde hardware
 * @spin_lock:		Lock for IRQ purposes
 * @enc_spinlock:	Virtual-Encoder-Wide Spin Lock for IRQ purposes
 * @enable_state:	Enable state tracking
 * @vblank_refcount:	Reference count of vblank request
 * @vsync_cnt:		Vsync count for the physical encoder
 * @underrun_cnt:	Underrun count for the physical encoder
 * @pending_kickoff_cnt:	Atomic counter tracking the number of kickoffs
 *				vs. the number of done/vblank irqs. Should hover
 *				between 0-2 Incremented when a new kickoff is
 *				scheduled. Decremented in irq handler
 * @pending_kickoff_wq:		Wait queue for blocking until kickoff completes
 */
struct sde_encoder_phys {
	struct drm_encoder *parent;
@@ -192,13 +197,20 @@ struct sde_encoder_phys {
	enum sde_enc_split_role split_role;
	enum sde_intf_mode intf_mode;
	enum sde_intf intf_idx;
	spinlock_t spin_lock;
	spinlock_t *enc_spinlock;
	enum sde_enc_enable_state enable_state;
	atomic_t vblank_refcount;
	atomic_t vsync_cnt;
	atomic_t underrun_cnt;
	atomic_t pending_kickoff_cnt;
	wait_queue_head_t pending_kickoff_wq;
};

static inline int sde_encoder_phys_inc_pending(struct sde_encoder_phys *phys)
{
	return atomic_inc_return(&phys->pending_kickoff_cnt);
}

/**
 * struct sde_encoder_phys_vid - sub-class of sde_encoder_phys to handle video
 *	mode specific operations
@@ -206,14 +218,12 @@ struct sde_encoder_phys {
 * @irq_idx:	IRQ interface lookup index
 * @irq_cb:	interrupt callback
 * @hw_intf:	Hardware interface to the intf registers
 * @vblank_completion:	Completion event signaled on reception of the vsync irq
 */
struct sde_encoder_phys_vid {
	struct sde_encoder_phys base;
	int irq_idx[INTR_IDX_MAX];
	struct sde_irq_callback irq_cb[INTR_IDX_MAX];
	struct sde_hw_intf *hw_intf;
	struct completion vblank_completion;
};

/**
@@ -226,13 +236,6 @@ struct sde_encoder_phys_vid {
 *			For CMD encoders, VBLANK is driven by the PP RD Done IRQ
 * @pp_tx_done_irq_idx:	IRQ signifying frame transmission to panel complete
 * @irq_cb:	interrupt callback
 * @pp_tx_done_wq:	Wait queue that tracks when a commit is flushed
 *			to hardware after the reception of pp_done
 *			Used to prevent back to back commits
 * @pending_cnt:	Atomic counter tracking the number of kickoffs vs.
 *			the number of pp_done irqs. Should hover between 0-2
 *			Incremented when a new kickoff is scheduled
 *			Decremented in pp_done irq
 */
struct sde_encoder_phys_cmd {
	struct sde_encoder_phys base;
@@ -240,8 +243,6 @@ struct sde_encoder_phys_cmd {
	int stream_sel;
	int irq_idx[INTR_IDX_MAX];
	struct sde_irq_callback irq_cb[INTR_IDX_MAX];
	wait_queue_head_t pp_tx_done_wq;
	atomic_t pending_cnt;
};

/**
@@ -298,6 +299,7 @@ struct sde_encoder_phys_wb {
 * @split_role:		Role to play in a split-panel configuration
 * @intf_idx:		Interface index this phys_enc will control
 * @wb_idx:		Writeback index this phys_enc will control
 * @enc_spinlock:	Virtual-Encoder-Wide Spin Lock for IRQ purposes
 */
struct sde_enc_phys_init_params {
	struct sde_kms *sde_kms;
@@ -306,6 +308,7 @@ struct sde_enc_phys_init_params {
	enum sde_enc_split_role split_role;
	enum sde_intf intf_idx;
	enum sde_wb wb_idx;
	spinlock_t *enc_spinlock;
};

/**
+21 −21
Original line number Diff line number Diff line
@@ -97,6 +97,7 @@ static void sde_encoder_phys_cmd_pp_tx_done_irq(void *arg, int irq_idx)
{
	struct sde_encoder_phys_cmd *cmd_enc = arg;
	struct sde_encoder_phys *phys_enc;
	unsigned long lock_flags;
	int new_cnt;

	if (!cmd_enc)
@@ -104,12 +105,15 @@ static void sde_encoder_phys_cmd_pp_tx_done_irq(void *arg, int irq_idx)

	phys_enc = &cmd_enc->base;

	new_cnt = atomic_add_unless(&cmd_enc->pending_cnt, -1, 0);
	spin_lock_irqsave(phys_enc->enc_spinlock, lock_flags);
	new_cnt = atomic_add_unless(&phys_enc->pending_kickoff_cnt, -1, 0);
	spin_unlock_irqrestore(phys_enc->enc_spinlock, lock_flags);

	SDE_EVT32_IRQ(DRMID(phys_enc->parent),
			phys_enc->hw_pp->idx - PINGPONG_0, new_cnt);

	/* Signal any waiting atomic commit thread */
	wake_up_all(&cmd_enc->pp_tx_done_wq);
	wake_up_all(&phys_enc->pending_kickoff_wq);
}

static void sde_encoder_phys_cmd_pp_rd_ptr_irq(void *arg, int irq_idx)
@@ -134,9 +138,8 @@ static int _sde_encoder_phys_cmd_wait_for_idle(
	int ret;

	/* return EWOULDBLOCK since we know the wait isn't necessary */
	if (phys_enc->enable_state != SDE_ENC_ENABLED) {
		SDE_ERROR_CMDENC(cmd_enc, "encoder not enabled, state: %d\n",
				phys_enc->enable_state);
	if (phys_enc->enable_state == SDE_ENC_DISABLED) {
		SDE_ERROR_CMDENC(cmd_enc, "encoder is disabled\n");
		return -EWOULDBLOCK;
	}

@@ -144,8 +147,8 @@ static int _sde_encoder_phys_cmd_wait_for_idle(
	ret = sde_encoder_helper_wait_event_timeout(
			DRMID(phys_enc->parent),
			phys_enc->hw_pp->idx - PINGPONG_0,
			&cmd_enc->pp_tx_done_wq,
			&cmd_enc->pending_cnt,
			&phys_enc->pending_kickoff_wq,
			&phys_enc->pending_kickoff_cnt,
			KICKOFF_TIMEOUT_MS);
	if (ret <= 0) {
		irq_status = sde_core_irq_read(phys_enc->sde_kms,
@@ -528,9 +531,12 @@ static void sde_encoder_phys_cmd_disable(struct sde_encoder_phys *phys_enc)

	ret = _sde_encoder_phys_cmd_wait_for_idle(phys_enc);
	if (ret) {
		atomic_set(&cmd_enc->pending_cnt, 0);
		SDE_ERROR("failure waiting for idle before disable: %d\n", ret);
		SDE_EVT32(DRMID(phys_enc->parent), phys_enc->hw_pp->idx, ret);
		atomic_set(&phys_enc->pending_kickoff_cnt, 0);
		SDE_ERROR_CMDENC(cmd_enc,
				"pp %d failed wait for idle at disable: %d\n",
				phys_enc->hw_pp->idx - PINGPONG_0, ret);
		SDE_EVT32(DRMID(phys_enc->parent),
				phys_enc->hw_pp->idx - PINGPONG_0, ret);
	}

	sde_encoder_phys_cmd_unregister_irq(phys_enc, INTR_IDX_UNDERRUN);
@@ -591,7 +597,6 @@ static void sde_encoder_phys_cmd_prepare_for_kickoff(
{
	struct sde_encoder_phys_cmd *cmd_enc =
			to_sde_encoder_phys_cmd(phys_enc);
	int new_pending_cnt;
	int ret;

	if (!phys_enc) {
@@ -607,16 +612,12 @@ static void sde_encoder_phys_cmd_prepare_for_kickoff(
	 */
	ret = _sde_encoder_phys_cmd_wait_for_idle(phys_enc);
	if (ret) {
		/* force pending_cnt 0 to discard failed kickoff */
		atomic_set(&cmd_enc->pending_cnt, 0);
		/* force pending_kickoff_cnt 0 to discard failed kickoff */
		atomic_set(&phys_enc->pending_kickoff_cnt, 0);
		SDE_EVT32(DRMID(phys_enc->parent),
				phys_enc->hw_pp->idx - PINGPONG_0);
		SDE_ERROR("failed wait_for_idle: %d\n", ret);
	}

	new_pending_cnt = atomic_inc_return(&cmd_enc->pending_cnt);
	SDE_EVT32(DRMID(phys_enc->parent), phys_enc->hw_pp->idx,
			new_pending_cnt);
}

static void sde_encoder_phys_cmd_init_ops(
@@ -671,15 +672,14 @@ struct sde_encoder_phys *sde_encoder_phys_cmd_init(
	phys_enc->sde_kms = p->sde_kms;
	phys_enc->split_role = p->split_role;
	phys_enc->intf_mode = INTF_MODE_CMD;
	spin_lock_init(&phys_enc->spin_lock);
	phys_enc->enc_spinlock = p->enc_spinlock;
	cmd_enc->stream_sel = 0;
	phys_enc->enable_state = SDE_ENC_DISABLED;
	atomic_set(&cmd_enc->pending_cnt, 0);
	for (i = 0; i < INTR_IDX_MAX; i++)
		INIT_LIST_HEAD(&cmd_enc->irq_cb[i].list);
	atomic_set(&phys_enc->vblank_refcount, 0);

	init_waitqueue_head(&cmd_enc->pp_tx_done_wq);
	atomic_set(&phys_enc->pending_kickoff_cnt, 0);
	init_waitqueue_head(&phys_enc->pending_kickoff_wq);

	SDE_DEBUG_CMDENC(cmd_enc, "created\n");

+109 −105
Original line number Diff line number Diff line
@@ -42,24 +42,6 @@ static bool sde_encoder_phys_vid_is_master(
	return ret;
}

static void sde_encoder_phys_vid_wait_for_vblank(
		struct sde_encoder_phys_vid *vid_enc)
{
	int rc = 0;

	if (!vid_enc) {
		SDE_ERROR("invalid encoder\n");
		return;
	}
	SDE_DEBUG_VIDENC(vid_enc, "\n");
	rc = wait_for_completion_timeout(&vid_enc->vblank_completion,
			KICKOFF_TIMEOUT_JIFFIES);
	if (rc == 0) {
		SDE_ERROR_VIDENC(vid_enc, "timed out waiting for vblank irq\n");
		SDE_DBG_DUMP("panic");
	}
}

static void drm_mode_to_intf_timing_params(
		const struct sde_encoder_phys_vid *vid_enc,
		const struct drm_display_mode *mode,
@@ -220,9 +202,9 @@ static void programmable_fetch_config(struct sde_encoder_phys *phys_enc,
		"vfp_fetch_lines %u vfp_fetch_start_vsync_counter %u\n",
		vfp_fetch_lines, vfp_fetch_start_vsync_counter);

	spin_lock_irqsave(&phys_enc->spin_lock, lock_flags);
	spin_lock_irqsave(phys_enc->enc_spinlock, lock_flags);
	vid_enc->hw_intf->ops.setup_prg_fetch(vid_enc->hw_intf, &f);
	spin_unlock_irqrestore(&phys_enc->spin_lock, lock_flags);
	spin_unlock_irqrestore(phys_enc->enc_spinlock, lock_flags);
}

static bool sde_encoder_phys_vid_mode_fixup(
@@ -288,11 +270,11 @@ static void sde_encoder_phys_vid_setup_timing_engine(
	intf_cfg.stream_sel = 0; /* Don't care value for video mode */
	intf_cfg.mode_3d = sde_encoder_helper_get_3d_blend_mode(phys_enc);

	spin_lock_irqsave(&phys_enc->spin_lock, lock_flags);
	spin_lock_irqsave(phys_enc->enc_spinlock, lock_flags);
	vid_enc->hw_intf->ops.setup_timing_gen(vid_enc->hw_intf,
			&timing_params, fmt);
	phys_enc->hw_ctl->ops.setup_intf_cfg(phys_enc->hw_ctl, &intf_cfg);
	spin_unlock_irqrestore(&phys_enc->spin_lock, lock_flags);
	spin_unlock_irqrestore(phys_enc->enc_spinlock, lock_flags);

	programmable_fetch_config(phys_enc, &timing_params);
}
@@ -301,6 +283,8 @@ static void sde_encoder_phys_vid_vblank_irq(void *arg, int irq_idx)
{
	struct sde_encoder_phys_vid *vid_enc = arg;
	struct sde_encoder_phys *phys_enc;
	unsigned long lock_flags;
	int new_cnt;

	if (!vid_enc)
		return;
@@ -310,8 +294,14 @@ static void sde_encoder_phys_vid_vblank_irq(void *arg, int irq_idx)
		phys_enc->parent_ops.handle_vblank_virt(phys_enc->parent,
				phys_enc);

	/* signal VBLANK completion */
	complete_all(&vid_enc->vblank_completion);
	spin_lock_irqsave(phys_enc->enc_spinlock, lock_flags);
	new_cnt = atomic_add_unless(&phys_enc->pending_kickoff_cnt, -1, 0);
	SDE_EVT32_IRQ(DRMID(phys_enc->parent), vid_enc->hw_intf->idx - INTF_0,
			new_cnt);
	spin_unlock_irqrestore(phys_enc->enc_spinlock, lock_flags);

	/* Signal any waiting atomic commit thread */
	wake_up_all(&phys_enc->pending_kickoff_wq);
}

static void sde_encoder_phys_vid_underrun_irq(void *arg, int irq_idx)
@@ -354,9 +344,9 @@ static void _sde_encoder_phys_vid_split_config(
	if (hw_mdptop && hw_mdptop->ops.setup_split_pipe) {
		unsigned long lock_flags;

		spin_lock_irqsave(&phys_enc->spin_lock, lock_flags);
		spin_lock_irqsave(phys_enc->enc_spinlock, lock_flags);
		hw_mdptop->ops.setup_split_pipe(hw_mdptop, &cfg);
		spin_unlock_irqrestore(&phys_enc->spin_lock, lock_flags);
		spin_unlock_irqrestore(phys_enc->enc_spinlock, lock_flags);
	}
}

@@ -573,59 +563,6 @@ static void sde_encoder_phys_vid_enable(struct sde_encoder_phys *phys_enc)
	return;
}

static void sde_encoder_phys_vid_disable(struct sde_encoder_phys *phys_enc)
{
	unsigned long lock_flags;
	struct sde_encoder_phys_vid *vid_enc;

	if (!phys_enc) {
		SDE_ERROR("invalid encoder\n");
		return;
	}

	vid_enc = to_sde_encoder_phys_vid(phys_enc);
	if (!vid_enc->hw_intf || !phys_enc->hw_ctl) {
		SDE_ERROR("invalid hw_intf %d hw_ctl %d\n",
				vid_enc->hw_intf != 0, phys_enc->hw_ctl != 0);
		return;
	}

	SDE_DEBUG_VIDENC(vid_enc, "\n");

	if (WARN_ON(!vid_enc->hw_intf->ops.enable_timing))
		return;

	if (phys_enc->enable_state == SDE_ENC_DISABLED) {
		SDE_ERROR("already disabled\n");
		return;
	}

	spin_lock_irqsave(&phys_enc->spin_lock, lock_flags);
	vid_enc->hw_intf->ops.enable_timing(vid_enc->hw_intf, 0);
	reinit_completion(&vid_enc->vblank_completion);
	phys_enc->enable_state = SDE_ENC_DISABLED;
	spin_unlock_irqrestore(&phys_enc->spin_lock, lock_flags);

	/*
	 * Wait for a vsync so we know the ENABLE=0 latched before
	 * the (connector) source of the vsync's gets disabled,
	 * otherwise we end up in a funny state if we re-enable
	 * before the disable latches, which results that some of
	 * the settings changes for the new modeset (like new
	 * scanout buffer) don't latch properly..
	 */
	if (sde_encoder_phys_vid_is_master(phys_enc)) {
		sde_encoder_phys_vid_wait_for_vblank(vid_enc);
		sde_encoder_phys_vid_control_vblank_irq(phys_enc, false);
	}

	if (atomic_read(&phys_enc->vblank_refcount))
		SDE_ERROR("enc:%d role:%d invalid vblank refcount %d\n",
				phys_enc->parent->base.id,
				phys_enc->split_role,
				atomic_read(&phys_enc->vblank_refcount));
}

static void sde_encoder_phys_vid_destroy(struct sde_encoder_phys *phys_enc)
{
	struct sde_encoder_phys_vid *vid_enc;
@@ -666,9 +603,10 @@ static void sde_encoder_phys_vid_get_hw_resources(
static int sde_encoder_phys_vid_wait_for_commit_done(
		struct sde_encoder_phys *phys_enc)
{
	unsigned long ret;
	struct sde_encoder_phys_vid *vid_enc =
			to_sde_encoder_phys_vid(phys_enc);
	u32 irq_status;
	int ret;

	if (!sde_encoder_phys_vid_is_master(phys_enc))
		return 0;
@@ -678,33 +616,99 @@ static int sde_encoder_phys_vid_wait_for_commit_done(
		return -EWOULDBLOCK;
	}

	SDE_EVT32(DRMID(phys_enc->parent), vid_enc->hw_intf->idx,
	SDE_EVT32(DRMID(phys_enc->parent), vid_enc->hw_intf->idx - INTF_0,
			SDE_EVTLOG_FUNC_ENTRY);

	ret = wait_for_completion_timeout(&vid_enc->vblank_completion,
			KICKOFF_TIMEOUT_JIFFIES);
	if (!ret) {
		SDE_DEBUG_VIDENC(vid_enc, "wait %u ms timed out\n",
	/* Wait for kickoff to complete */
	ret = sde_encoder_helper_wait_event_timeout(
			DRMID(phys_enc->parent),
			vid_enc->hw_intf->idx - INTF_0,
			&phys_enc->pending_kickoff_wq,
			&phys_enc->pending_kickoff_cnt,
			KICKOFF_TIMEOUT_MS);
		SDE_EVT32(DRMID(phys_enc->parent), vid_enc->hw_intf->idx,
				KICKOFF_TIMEOUT_MS);
		return -ETIMEDOUT;
	if (ret <= 0) {
		irq_status = sde_core_irq_read(phys_enc->sde_kms,
				INTR_IDX_VSYNC, true);
		if (irq_status) {
			SDE_EVT32(DRMID(phys_enc->parent),
					vid_enc->hw_intf->idx - INTF_0);
			SDE_DEBUG_VIDENC(vid_enc, "done, irq not triggered\n");
			sde_encoder_phys_vid_vblank_irq(vid_enc,
					INTR_IDX_VSYNC);
			ret = 0;
		} else {
			SDE_EVT32(DRMID(phys_enc->parent),
					vid_enc->hw_intf->idx - INTF_0);
			SDE_ERROR_VIDENC(vid_enc, "kickoff timed out\n");
			ret = -ETIMEDOUT;
		}
	} else {
		ret = 0;
	}

	SDE_EVT32(DRMID(phys_enc->parent), vid_enc->hw_intf->idx,
			SDE_EVTLOG_FUNC_EXIT);

	return 0;
}

static void sde_encoder_phys_vid_prepare_for_kickoff(
		struct sde_encoder_phys *phys_enc)
static void sde_encoder_phys_vid_disable(struct sde_encoder_phys *phys_enc)
{
	struct sde_encoder_phys_vid *vid_enc =
			to_sde_encoder_phys_vid(phys_enc);
	struct sde_encoder_phys_vid *vid_enc;
	unsigned long lock_flags;
	int ret;

	if (!phys_enc) {
		SDE_ERROR("invalid encoder\n");
		return;
	}

	vid_enc = to_sde_encoder_phys_vid(phys_enc);
	if (!vid_enc->hw_intf || !phys_enc->hw_ctl) {
		SDE_ERROR("invalid hw_intf %d hw_ctl %d\n",
				vid_enc->hw_intf != 0, phys_enc->hw_ctl != 0);
		return;
	}

	SDE_DEBUG_VIDENC(vid_enc, "\n");

	/* Reset completion to wait for the next vblank */
	reinit_completion(&vid_enc->vblank_completion);
	if (WARN_ON(!vid_enc->hw_intf->ops.enable_timing))
		return;

	if (phys_enc->enable_state == SDE_ENC_DISABLED) {
		SDE_ERROR("already disabled\n");
		return;
	}

	spin_lock_irqsave(phys_enc->enc_spinlock, lock_flags);
	vid_enc->hw_intf->ops.enable_timing(vid_enc->hw_intf, 0);
	if (sde_encoder_phys_vid_is_master(phys_enc))
		sde_encoder_phys_inc_pending(phys_enc);
	spin_unlock_irqrestore(phys_enc->enc_spinlock, lock_flags);

	/*
	 * Wait for a vsync so we know the ENABLE=0 latched before
	 * the (connector) source of the vsync's gets disabled,
	 * otherwise we end up in a funny state if we re-enable
	 * before the disable latches, which results that some of
	 * the settings changes for the new modeset (like new
	 * scanout buffer) don't latch properly..
	 */
	if (sde_encoder_phys_vid_is_master(phys_enc)) {
		ret = sde_encoder_phys_vid_wait_for_commit_done(phys_enc);
		if (ret) {
			atomic_set(&phys_enc->pending_kickoff_cnt, 0);
			SDE_ERROR_VIDENC(vid_enc,
					"failure waiting for disable: %d\n",
					ret);
			SDE_EVT32(DRMID(phys_enc->parent),
					vid_enc->hw_intf->idx - INTF_0, ret);
		}
		sde_encoder_phys_vid_control_vblank_irq(phys_enc, false);
	}

	if (atomic_read(&phys_enc->vblank_refcount))
		SDE_ERROR_VIDENC(vid_enc, "invalid vblank refcount %d\n",
				atomic_read(&phys_enc->vblank_refcount));

	phys_enc->enable_state = SDE_ENC_DISABLED;
}

static void sde_encoder_phys_vid_handle_post_kickoff(
@@ -726,10 +730,11 @@ static void sde_encoder_phys_vid_handle_post_kickoff(
	 * Video encoders need to turn on their interfaces now
	 */
	if (phys_enc->enable_state == SDE_ENC_ENABLING) {
		SDE_EVT32(DRMID(phys_enc->parent), vid_enc->hw_intf->idx);
		spin_lock_irqsave(&phys_enc->spin_lock, lock_flags);
		SDE_EVT32(DRMID(phys_enc->parent),
				vid_enc->hw_intf->idx - INTF_0);
		spin_lock_irqsave(phys_enc->enc_spinlock, lock_flags);
		vid_enc->hw_intf->ops.enable_timing(vid_enc->hw_intf, 1);
		spin_unlock_irqrestore(&phys_enc->spin_lock, lock_flags);
		spin_unlock_irqrestore(phys_enc->enc_spinlock, lock_flags);
		phys_enc->enable_state = SDE_ENC_ENABLED;
	}
}
@@ -765,7 +770,6 @@ static void sde_encoder_phys_vid_init_ops(struct sde_encoder_phys_ops *ops)
	ops->get_hw_resources = sde_encoder_phys_vid_get_hw_resources;
	ops->control_vblank_irq = sde_encoder_phys_vid_control_vblank_irq;
	ops->wait_for_commit_done = sde_encoder_phys_vid_wait_for_commit_done;
	ops->prepare_for_kickoff = sde_encoder_phys_vid_prepare_for_kickoff;
	ops->handle_post_kickoff = sde_encoder_phys_vid_handle_post_kickoff;
	ops->needs_split_flush = sde_encoder_phys_vid_needs_split_flush;
	ops->setup_misr = sde_encoder_phys_vid_setup_misr;
@@ -791,7 +795,6 @@ struct sde_encoder_phys *sde_encoder_phys_vid_init(
		ret = -ENOMEM;
		goto fail;
	}
	init_completion(&vid_enc->vblank_completion);

	phys_enc = &vid_enc->base;

@@ -839,11 +842,12 @@ struct sde_encoder_phys *sde_encoder_phys_vid_init(
	phys_enc->sde_kms = p->sde_kms;
	phys_enc->split_role = p->split_role;
	phys_enc->intf_mode = INTF_MODE_VIDEO;
	spin_lock_init(&phys_enc->spin_lock);
	init_completion(&vid_enc->vblank_completion);
	phys_enc->enc_spinlock = p->enc_spinlock;
	for (i = 0; i < INTR_IDX_MAX; i++)
		INIT_LIST_HEAD(&vid_enc->irq_cb[i].list);
	atomic_set(&phys_enc->vblank_refcount, 0);
	atomic_set(&phys_enc->pending_kickoff_cnt, 0);
	init_waitqueue_head(&phys_enc->pending_kickoff_wq);
	phys_enc->enable_state = SDE_ENC_DISABLED;

	SDE_DEBUG_VIDENC(vid_enc, "created intf idx:%d\n", p->intf_idx);
+1 −1
Original line number Diff line number Diff line
@@ -1063,7 +1063,7 @@ struct sde_encoder_phys *sde_encoder_phys_wb_init(
	phys_enc->split_role = p->split_role;
	phys_enc->intf_mode = INTF_MODE_WB_LINE;
	phys_enc->intf_idx = p->intf_idx;
	spin_lock_init(&phys_enc->spin_lock);
	phys_enc->enc_spinlock = p->enc_spinlock;
	INIT_LIST_HEAD(&wb_enc->irq_cb.list);

	ret = sde_encoder_phys_wb_init_debugfs(phys_enc, p->sde_kms);