Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 5ebb06d3 authored by Sultan Alsawaf's avatar Sultan Alsawaf Committed by Pranav Vashi
Browse files

soc: qcom: scm: Fix scm_call_count when used with GCC and LSE atomics



LSE atomic increments and decrements clobber the x0 and x1 registers,
and since these registers are used in volatile inline assembly for SCM
calls, GCC does not preserve their values across the atomic_inc() and
atomic_dec() calls. This results in x0 and x1 containing garbage values
before and after the SCM call, breaking it entirely.

Wrapping the atomic_inc() and atomic_dec() outside the SCM call
functions fixes the issue.

Change-Id: Ia2a192efb2be3c2458ddbe49eaef0edcad4378aa
Signed-off-by: default avatarSultan Alsawaf <sultan@kerneltoast.com>
Signed-off-by: default avatarxNombre <kartapolska@gmail.com>
Signed-off-by: default avatarPranav Vashi <neobuddy89@gmail.com>
parent 876fa5f9
Loading
Loading
Loading
Loading
+39 −12
Original line number Original line Diff line number Diff line
@@ -381,7 +381,7 @@ int scm_call_noalloc(u32 svc_id, u32 cmd_id, const void *cmd_buf,


#ifdef CONFIG_ARM64
#ifdef CONFIG_ARM64


static int __scm_call_armv8_64(u64 x0, u64 x1, u64 x2, u64 x3, u64 x4, u64 x5,
static int ___scm_call_armv8_64(u64 x0, u64 x1, u64 x2, u64 x3, u64 x4, u64 x5,
				u64 *ret1, u64 *ret2, u64 *ret3)
				u64 *ret1, u64 *ret2, u64 *ret3)
{
{
	register u64 r0 asm("x0") = x0;
	register u64 r0 asm("x0") = x0;
@@ -392,7 +392,6 @@ static int __scm_call_armv8_64(u64 x0, u64 x1, u64 x2, u64 x3, u64 x4, u64 x5,
	register u64 r5 asm("x5") = x5;
	register u64 r5 asm("x5") = x5;
	register u64 r6 asm("x6") = 0;
	register u64 r6 asm("x6") = 0;


	atomic_inc(&scm_call_count);
	do {
	do {
		asm volatile(
		asm volatile(
			__asmeq("%0", R0_STR)
			__asmeq("%0", R0_STR)
@@ -421,8 +420,6 @@ static int __scm_call_armv8_64(u64 x0, u64 x1, u64 x2, u64 x3, u64 x4, u64 x5,
			  "x14", "x15", "x16", "x17");
			  "x14", "x15", "x16", "x17");
	} while (r0 == SCM_INTERRUPTED);
	} while (r0 == SCM_INTERRUPTED);


	atomic_dec(&scm_call_count);

	if (ret1)
	if (ret1)
		*ret1 = r1;
		*ret1 = r1;
	if (ret2)
	if (ret2)
@@ -433,7 +430,19 @@ static int __scm_call_armv8_64(u64 x0, u64 x1, u64 x2, u64 x3, u64 x4, u64 x5,
	return r0;
	return r0;
}
}


static int __scm_call_armv8_32(u32 w0, u32 w1, u32 w2, u32 w3, u32 w4, u32 w5,
static int __scm_call_armv8_64(u64 x0, u64 x1, u64 x2, u64 x3, u64 x4, u64 x5,
				u64 *ret1, u64 *ret2, u64 *ret3)
{
	int ret;

	atomic_inc(&scm_call_count);
	ret = ___scm_call_armv8_64(x0, x1, x2, x3, x4, x5, ret1, ret2, ret3);
	atomic_dec(&scm_call_count);

	return ret;
}

static int ___scm_call_armv8_32(u32 w0, u32 w1, u32 w2, u32 w3, u32 w4, u32 w5,
				u64 *ret1, u64 *ret2, u64 *ret3)
				u64 *ret1, u64 *ret2, u64 *ret3)
{
{
	register u32 r0 asm("w0") = w0;
	register u32 r0 asm("w0") = w0;
@@ -444,7 +453,6 @@ static int __scm_call_armv8_32(u32 w0, u32 w1, u32 w2, u32 w3, u32 w4, u32 w5,
	register u32 r5 asm("w5") = w5;
	register u32 r5 asm("w5") = w5;
	register u32 r6 asm("w6") = 0;
	register u32 r6 asm("w6") = 0;


	atomic_inc(&scm_call_count);
	do {
	do {
		asm volatile(
		asm volatile(
			__asmeq("%0", R0_STR)
			__asmeq("%0", R0_STR)
@@ -474,8 +482,6 @@ static int __scm_call_armv8_32(u32 w0, u32 w1, u32 w2, u32 w3, u32 w4, u32 w5,


	} while (r0 == SCM_INTERRUPTED);
	} while (r0 == SCM_INTERRUPTED);


	atomic_dec(&scm_call_count);

	if (ret1)
	if (ret1)
		*ret1 = r1;
		*ret1 = r1;
	if (ret2)
	if (ret2)
@@ -486,9 +492,21 @@ static int __scm_call_armv8_32(u32 w0, u32 w1, u32 w2, u32 w3, u32 w4, u32 w5,
	return r0;
	return r0;
}
}


static int __scm_call_armv8_32(u32 w0, u32 w1, u32 w2, u32 w3, u32 w4, u32 w5,
				u64 *ret1, u64 *ret2, u64 *ret3)
{
	int ret;

	atomic_inc(&scm_call_count);
	ret = ___scm_call_armv8_32(w0, w1, w2, w3, w4, w5, ret1, ret2, ret3);
	atomic_dec(&scm_call_count);

	return ret;
}

#else
#else


static int __scm_call_armv8_32(u32 w0, u32 w1, u32 w2, u32 w3, u32 w4, u32 w5,
static int ___scm_call_armv8_32(u32 w0, u32 w1, u32 w2, u32 w3, u32 w4, u32 w5,
				u64 *ret1, u64 *ret2, u64 *ret3)
				u64 *ret1, u64 *ret2, u64 *ret3)
{
{
	register u32 r0 asm("r0") = w0;
	register u32 r0 asm("r0") = w0;
@@ -499,7 +517,6 @@ static int __scm_call_armv8_32(u32 w0, u32 w1, u32 w2, u32 w3, u32 w4, u32 w5,
	register u32 r5 asm("r5") = w5;
	register u32 r5 asm("r5") = w5;
	register u32 r6 asm("r6") = 0;
	register u32 r6 asm("r6") = 0;


	atomic_inc(&scm_call_count);
	do {
	do {
		asm volatile(
		asm volatile(
			__asmeq("%0", R0_STR)
			__asmeq("%0", R0_STR)
@@ -527,8 +544,6 @@ static int __scm_call_armv8_32(u32 w0, u32 w1, u32 w2, u32 w3, u32 w4, u32 w5,


	} while (r0 == SCM_INTERRUPTED);
	} while (r0 == SCM_INTERRUPTED);


	atomic_dec(&scm_call_count);

	if (ret1)
	if (ret1)
		*ret1 = r1;
		*ret1 = r1;
	if (ret2)
	if (ret2)
@@ -539,6 +554,18 @@ static int __scm_call_armv8_32(u32 w0, u32 w1, u32 w2, u32 w3, u32 w4, u32 w5,
	return r0;
	return r0;
}
}


static int __scm_call_armv8_32(u32 w0, u32 w1, u32 w2, u32 w3, u32 w4, u32 w5,
				u64 *ret1, u64 *ret2, u64 *ret3)
{
	int ret;

	atomic_inc(&scm_call_count);
	ret = ___scm_call_armv8_32(w0, w1, w2, w3, w4, w5, ret1, ret2, ret3);
	atomic_dec(&scm_call_count);

	return ret;
}

static int __scm_call_armv8_64(u64 x0, u64 x1, u64 x2, u64 x3, u64 x4, u64 x5,
static int __scm_call_armv8_64(u64 x0, u64 x1, u64 x2, u64 x3, u64 x4, u64 x5,
				u64 *ret1, u64 *ret2, u64 *ret3)
				u64 *ret1, u64 *ret2, u64 *ret3)
{
{