Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit f3900e66 authored by Linux Build Service Account's avatar Linux Build Service Account Committed by Gerrit - the friendly Code Review server
Browse files

Merge "drm/msm/sde: separate the power notify and idle notify events"

parents f8f33ca8 c9e213b4
Loading
Loading
Loading
Loading
+71 −78
Original line number Diff line number Diff line
@@ -61,12 +61,15 @@ static int sde_crtc_power_interrupt_handler(struct drm_crtc *crtc_drm,
	bool en, struct sde_irq_callback *ad_irq);
static int sde_crtc_idle_interrupt_handler(struct drm_crtc *crtc_drm,
	bool en, struct sde_irq_callback *idle_irq);
static int sde_crtc_pm_event_handler(struct drm_crtc *crtc, bool en,
		struct sde_irq_callback *noirq);

static struct sde_crtc_custom_events custom_events[] = {
	{DRM_EVENT_AD_BACKLIGHT, sde_cp_ad_interrupt},
	{DRM_EVENT_CRTC_POWER, sde_crtc_power_interrupt_handler},
	{DRM_EVENT_IDLE_NOTIFY, sde_crtc_idle_interrupt_handler},
	{DRM_EVENT_HISTOGRAM, sde_cp_hist_interrupt},
	{DRM_EVENT_SDE_POWER, sde_crtc_pm_event_handler},
};

/* default input fence timeout, in ms */
@@ -2276,47 +2279,6 @@ static void _sde_crtc_retire_event(struct drm_crtc *crtc, ktime_t ts)
	SDE_ATRACE_END("signal_retire_fence");
}

/* _sde_crtc_idle_notify - signal idle timeout to client */
static void _sde_crtc_idle_notify(struct sde_crtc *sde_crtc)
{
	struct drm_crtc *crtc;
	struct drm_event event;
	int ret = 0;

	if (!sde_crtc) {
		SDE_ERROR("invalid sde crtc\n");
		return;
	}

	crtc = &sde_crtc->base;
	event.type = DRM_EVENT_IDLE_NOTIFY;
	event.length = sizeof(u32);
	msm_mode_object_event_notify(&crtc->base, crtc->dev, &event,
								(u8 *)&ret);

	SDE_DEBUG("crtc:%d idle timeout notified\n", crtc->base.id);
}

/*
 * sde_crtc_handle_event - crtc frame event handle.
 * This API must manage only non-IRQ context events.
 */
static bool _sde_crtc_handle_event(struct sde_crtc *sde_crtc, u32 event)
{
	bool event_processed = false;

	/**
	 * idle events are originated from commit thread and can be processed
	 * in same context
	 */
	if (event & SDE_ENCODER_FRAME_EVENT_IDLE) {
		_sde_crtc_idle_notify(sde_crtc);
		event_processed = true;
	}

	return event_processed;
}

static void sde_crtc_frame_event_work(struct kthread_work *work)
{
	struct msm_drm_private *priv;
@@ -2410,15 +2372,6 @@ static void sde_crtc_frame_event_work(struct kthread_work *work)
	SDE_ATRACE_END("crtc_frame_event");
}

/*
 * sde_crtc_frame_event_cb - crtc frame event callback API. CRTC module
 * registers this API to encoder for all frame event callbacks like
 * release_fence, retire_fence, frame_error, frame_done, idle_timeout,
 * etc. Encoder may call different events from different context - IRQ,
 * user thread, commit_thread, etc. Each event should be carefully
 * reviewed and should be processed in proper task context to avoid scheduling
 * delay or properly manage the irq context's bottom half processing.
 */
static void sde_crtc_frame_event_cb(void *data, u32 event)
{
	struct drm_crtc *crtc = (struct drm_crtc *)data;
@@ -2427,7 +2380,6 @@ static void sde_crtc_frame_event_cb(void *data, u32 event)
	struct sde_crtc_frame_event *fevent;
	unsigned long flags;
	u32 crtc_id;
	bool event_processed = false;

	if (!crtc || !crtc->dev || !crtc->dev->dev_private) {
		SDE_ERROR("invalid parameters\n");
@@ -2440,11 +2392,6 @@ static void sde_crtc_frame_event_cb(void *data, u32 event)
	SDE_DEBUG("crtc%d\n", crtc->base.id);
	SDE_EVT32_VERBOSE(DRMID(crtc), event);

	/* try to process the event in caller context */
	event_processed = _sde_crtc_handle_event(sde_crtc, event);
	if (event_processed)
		return;

	spin_lock_irqsave(&sde_crtc->spin_lock, flags);
	fevent = list_first_entry_or_null(&sde_crtc->frame_event_list,
			struct sde_crtc_frame_event, list);
@@ -2485,24 +2432,6 @@ void sde_crtc_complete_commit(struct drm_crtc *crtc,
		sde_crtc_secure_ctrl(crtc, true);
}

/* _sde_crtc_set_idle_timeout - update idle timeout wait duration */
static void _sde_crtc_set_idle_timeout(struct drm_crtc *crtc, u64 val)
{
	struct drm_encoder *encoder;

	if (!crtc) {
		SDE_ERROR("invalid crtc\n");
		return;
	}

	drm_for_each_encoder(encoder, crtc->dev) {
		if (encoder->crtc != crtc)
			continue;

		sde_encoder_set_idle_timeout(encoder, (u32) val);
	}
}

/**
 * _sde_crtc_set_input_fence_timeout - update ns version of in fence timeout
 * @cstate: Pointer to sde crtc state
@@ -3158,6 +3087,12 @@ static void sde_crtc_atomic_begin(struct drm_crtc *crtc,
	_sde_crtc_blend_setup(crtc, true);
	_sde_crtc_dest_scaler_setup(crtc);

	/* cancel the idle notify delayed work */
	if (sde_encoder_check_mode(sde_crtc->mixers[0].encoder,
					MSM_DISPLAY_CAP_VID_MODE) &&
		kthread_cancel_delayed_work_sync(&sde_crtc->idle_notify_work))
		SDE_DEBUG("idle notify work cancelled\n");

	/*
	 * Since CP properties use AXI buffer to program the
	 * HW, check if context bank is in attached
@@ -3189,6 +3124,7 @@ static void sde_crtc_atomic_flush(struct drm_crtc *crtc,
	struct msm_drm_thread *event_thread;
	unsigned long flags;
	struct sde_crtc_state *cstate;
	int idle_time = 0;

	if (!crtc || !crtc->dev || !crtc->dev->dev_private) {
		SDE_ERROR("invalid crtc\n");
@@ -3214,6 +3150,7 @@ static void sde_crtc_atomic_flush(struct drm_crtc *crtc,
	}

	event_thread = &priv->event_thread[crtc->index];
	idle_time = sde_crtc_get_property(cstate, CRTC_PROP_IDLE_TIMEOUT);

	if (sde_crtc->event) {
		SDE_DEBUG("already received sde_crtc->event\n");
@@ -3244,6 +3181,15 @@ static void sde_crtc_atomic_flush(struct drm_crtc *crtc,
	/* wait for acquire fences before anything else is done */
	_sde_crtc_wait_for_fences(crtc);

	/* schedule the idle notify delayed work */
	if (idle_time && sde_encoder_check_mode(sde_crtc->mixers[0].encoder,
						MSM_DISPLAY_CAP_VID_MODE)) {
		kthread_queue_delayed_work(&event_thread->worker,
					&sde_crtc->idle_notify_work,
					msecs_to_jiffies(idle_time));
		SDE_DEBUG("schedule idle notify work in %dms\n", idle_time);
	}

	if (!cstate->rsc_update) {
		drm_for_each_encoder(encoder, dev) {
			if (encoder->crtc != crtc)
@@ -3942,10 +3888,11 @@ static void sde_crtc_handle_power_event(u32 event_type, void *arg)
	struct drm_plane *plane;
	struct drm_encoder *encoder;
	struct sde_crtc_mixer *m;
	u32 i, misr_status;
	u32 i, misr_status, power_on;
	unsigned long flags;
	struct sde_crtc_irq_info *node = NULL;
	int ret = 0;
	struct drm_event event;

	if (!crtc) {
		SDE_ERROR("invalid crtc\n");
@@ -3981,6 +3928,12 @@ static void sde_crtc_handle_power_event(u32 event_type, void *arg)

		sde_cp_crtc_post_ipc(crtc);

		event.type = DRM_EVENT_SDE_POWER;
		event.length = sizeof(power_on);
		power_on = 1;
		msm_mode_object_event_notify(&crtc->base, crtc->dev, &event,
				(u8 *)&power_on);

		for (i = 0; i < sde_crtc->num_mixers; ++i) {
			m = &sde_crtc->mixers[i];
			if (!m->hw_lm || !m->hw_lm->ops.setup_misr ||
@@ -4049,6 +4002,11 @@ static void sde_crtc_handle_power_event(u32 event_type, void *arg)
		if (cstate->num_ds_enabled)
			sde_crtc->ds_reconfig = true;

		event.type = DRM_EVENT_SDE_POWER;
		event.length = sizeof(power_on);
		power_on = 0;
		msm_mode_object_event_notify(&crtc->base, crtc->dev, &event,
				(u8 *)&power_on);
		break;
	default:
		SDE_DEBUG("event:%d not handled\n", event_type);
@@ -4890,7 +4848,7 @@ static void sde_crtc_install_properties(struct drm_crtc *crtc,
			CRTC_PROP_ROT_CLK);

	msm_property_install_range(&sde_crtc->property_info,
		"idle_time", IDLE_TIMEOUT, 0, U64_MAX, 0,
		"idle_time", 0, 0, U64_MAX, 0,
		CRTC_PROP_IDLE_TIMEOUT);

	msm_property_install_blob(&sde_crtc->property_info, "capabilities",
@@ -5079,8 +5037,6 @@ static int sde_crtc_atomic_set_property(struct drm_crtc *crtc,
				cstate->bw_control = true;
				cstate->bw_split_vote = true;
				break;
			case CRTC_PROP_IDLE_TIMEOUT:
				_sde_crtc_set_idle_timeout(crtc, val);
			default:
				/* nothing to do */
				break;
@@ -5682,6 +5638,30 @@ static int _sde_crtc_init_events(struct sde_crtc *sde_crtc)
	return rc;
}

/*
 * __sde_crtc_idle_notify_work - signal idle timeout to user space
 */
static void __sde_crtc_idle_notify_work(struct kthread_work *work)
{
	struct sde_crtc *sde_crtc = container_of(work, struct sde_crtc,
				idle_notify_work.work);
	struct drm_crtc *crtc;
	struct drm_event event;
	int ret = 0;

	if (!sde_crtc) {
		SDE_ERROR("invalid sde crtc\n");
	} else {
		crtc = &sde_crtc->base;
		event.type = DRM_EVENT_IDLE_NOTIFY;
		event.length = sizeof(u32);
		msm_mode_object_event_notify(&crtc->base, crtc->dev,
				&event, (u8 *)&ret);

		SDE_DEBUG("crtc[%d]: idle timeout notified\n", crtc->base.id);
	}
}

/* initialize crtc */
struct drm_crtc *sde_crtc_init(struct drm_device *dev, struct drm_plane *plane)
{
@@ -5753,6 +5733,9 @@ struct drm_crtc *sde_crtc_init(struct drm_device *dev, struct drm_plane *plane)
	sde_cp_crtc_init(crtc);
	sde_cp_crtc_install_properties(crtc);

	kthread_init_delayed_work(&sde_crtc->idle_notify_work,
					__sde_crtc_idle_notify_work);

	SDE_DEBUG("%s: successfully initialized crtc\n", sde_crtc->name);
	return crtc;
}
@@ -5894,6 +5877,16 @@ static int sde_crtc_power_interrupt_handler(struct drm_crtc *crtc_drm,
	return 0;
}

static int sde_crtc_pm_event_handler(struct drm_crtc *crtc, bool en,
		struct sde_irq_callback *noirq)
{
	/*
	 * IRQ object noirq is not being used here since there is
	 * no crtc irq from pm event.
	 */
	return 0;
}

static int sde_crtc_idle_interrupt_handler(struct drm_crtc *crtc_drm,
	bool en, struct sde_irq_callback *irq)
{
+2 −0
Original line number Diff line number Diff line
@@ -212,6 +212,7 @@ struct sde_crtc_event {
 * @misr_data     : store misr data before turning off the clocks.
 * @sbuf_flush_mask: flush mask for inline rotator
 * @sbuf_flush_mask_old: inline rotator flush mask for previous commit
 * @idle_notify_work: delayed worker to notify idle timeout to user space
 * @power_event   : registered power event handle
 * @cur_perf      : current performance committed to clock/bandwidth driver
 * @rp_lock       : serialization lock for resource pool
@@ -276,6 +277,7 @@ struct sde_crtc {

	u32 sbuf_flush_mask;
	u32 sbuf_flush_mask_old;
	struct kthread_delayed_work idle_notify_work;

	struct sde_power_event *power_event;

+10 −28
Original line number Diff line number Diff line
@@ -92,7 +92,7 @@
 *	This event happens at INTERRUPT level.
 *	Event signals the end of the data transfer after the PP FRAME_DONE
 *	event. At the end of this event, a delayed work is scheduled to go to
 *	IDLE_PC state after IDLE_TIMEOUT time.
 *	IDLE_PC state after IDLE_POWERCOLLAPSE_DURATION time.
 * @SDE_ENC_RC_EVENT_PRE_STOP:
 *	This event happens at NORMAL priority.
 *	This event, when received during the ON state, set RSC to IDLE, and
@@ -118,9 +118,9 @@
 *	with new vtotal.
 * @SDE_ENC_RC_EVENT_ENTER_IDLE:
 *	This event happens at NORMAL priority from a work item.
 *	Event signals that there were no frame updates for IDLE_TIMEOUT time.
 *	This would disable MDP/DSI core clocks and request RSC with IDLE state
 *	and change the resource state to IDLE.
 *	Event signals that there were no frame updates for
 *	IDLE_POWERCOLLAPSE_DURATION time. This would disable MDP/DSI core clocks
 *      and request RSC with IDLE state and change the resource state to IDLE.
 */
enum sde_enc_rc_events {
	SDE_ENC_RC_EVENT_KICKOFF = 1,
@@ -199,7 +199,6 @@ enum sde_enc_rc_states {
 * @rsc_config:			rsc configuration for display vtotal, fps, etc.
 * @cur_conn_roi:		current connector roi
 * @prv_conn_roi:		previous connector roi to optimize if unchanged
 * @idle_timeout:		idle timeout duration in milliseconds
 */
struct sde_encoder_virt {
	struct drm_encoder base;
@@ -244,8 +243,6 @@ struct sde_encoder_virt {
	struct sde_rsc_cmd_config rsc_config;
	struct sde_rect cur_conn_roi;
	struct sde_rect prv_conn_roi;

	u32 idle_timeout;
};

#define to_sde_encoder_virt(x) container_of(x, struct sde_encoder_virt, base)
@@ -315,17 +312,6 @@ static bool _sde_encoder_is_dsc_enabled(struct drm_encoder *drm_enc)
	return (comp_info->comp_type == MSM_DISPLAY_COMPRESSION_DSC);
}

void sde_encoder_set_idle_timeout(struct drm_encoder *drm_enc, u32 idle_timeout)
{
	struct sde_encoder_virt *sde_enc;

	if (!drm_enc)
		return;

	sde_enc = to_sde_encoder_virt(drm_enc);
	sde_enc->idle_timeout = idle_timeout;
}

bool sde_encoder_is_dsc_merge(struct drm_encoder *drm_enc)
{
	enum sde_rm_topology_name topology;
@@ -1717,7 +1703,7 @@ static int sde_encoder_resource_control(struct drm_encoder *drm_enc,
		u32 sw_event)
{
	bool autorefresh_enabled = false;
	unsigned int lp, idle_timeout;
	unsigned int lp, idle_pc_duration;
	struct sde_encoder_virt *sde_enc;
	struct msm_drm_private *priv;
	struct msm_drm_thread *disp_thread;
@@ -1841,18 +1827,18 @@ static int sde_encoder_resource_control(struct drm_encoder *drm_enc,
			lp = SDE_MODE_DPMS_ON;

		if (lp == SDE_MODE_DPMS_LP2)
			idle_timeout = IDLE_SHORT_TIMEOUT;
			idle_pc_duration = IDLE_SHORT_TIMEOUT;
		else
			idle_timeout = sde_enc->idle_timeout;
			idle_pc_duration = IDLE_POWERCOLLAPSE_DURATION;

		if (!autorefresh_enabled && idle_timeout)
		if (!autorefresh_enabled)
			kthread_queue_delayed_work(
				&disp_thread->worker,
				&sde_enc->delayed_off_work,
				msecs_to_jiffies(idle_timeout));
				msecs_to_jiffies(idle_pc_duration));
		SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
				autorefresh_enabled,
				idle_timeout, SDE_EVTLOG_FUNC_CASE2);
				idle_pc_duration, SDE_EVTLOG_FUNC_CASE2);
		SDE_DEBUG_ENC(sde_enc, "sw_event:%d, work scheduled\n",
				sw_event);
		break;
@@ -2588,9 +2574,6 @@ static void sde_encoder_off_work(struct kthread_work *work)

	sde_encoder_resource_control(&sde_enc->base,
						SDE_ENC_RC_EVENT_ENTER_IDLE);

	sde_encoder_frame_done_callback(&sde_enc->base, NULL,
				SDE_ENCODER_FRAME_EVENT_IDLE);
}

/**
@@ -3990,7 +3973,6 @@ struct drm_encoder *sde_encoder_init(
	mutex_init(&sde_enc->rc_lock);
	kthread_init_delayed_work(&sde_enc->delayed_off_work,
			sde_encoder_off_work);
	sde_enc->idle_timeout = IDLE_TIMEOUT;
	sde_enc->vblank_enabled = false;

	kthread_init_work(&sde_enc->vsync_event_work,
+1 −11
Original line number Diff line number Diff line
@@ -29,9 +29,8 @@
#define SDE_ENCODER_FRAME_EVENT_PANEL_DEAD		BIT(2)
#define SDE_ENCODER_FRAME_EVENT_SIGNAL_RELEASE_FENCE	BIT(3)
#define SDE_ENCODER_FRAME_EVENT_SIGNAL_RETIRE_FENCE	BIT(4)
#define SDE_ENCODER_FRAME_EVENT_IDLE			BIT(5)

#define IDLE_TIMEOUT	(66 - 16/2)
#define IDLE_POWERCOLLAPSE_DURATION	(66 - 16/2)

/**
 * Encoder functions and data types
@@ -213,15 +212,6 @@ void sde_encoder_destroy(struct drm_encoder *drm_enc);
 */
void sde_encoder_prepare_commit(struct drm_encoder *drm_enc);

/**
 * sde_encoder_set_idle_timeout - set the idle timeout for video
 *                    and command mode encoders.
 * @drm_enc:    Pointer to previously created drm encoder structure
 * @idle_timeout:    idle timeout duration in milliseconds
 */
void sde_encoder_set_idle_timeout(struct drm_encoder *drm_enc,
							u32 idle_timeout);

/**
 * sde_encoder_update_caps_for_cont_splash - update encoder settings during
 *	device bootup when cont_splash is enabled