Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit e2a3d402 authored by Linus Torvalds's avatar Linus Torvalds
Browse files

power: improve inline asm memory constraints



Use "+m" rather than a combination of "=m" and "m" for improved
clarity and consistency.

Signed-off-by: default avatarLinus Torvalds <torvalds@osdl.org>
parent a496e25d
Loading
Loading
Loading
Loading
+16 −16
Original line number Diff line number Diff line
@@ -27,8 +27,8 @@ static __inline__ void atomic_add(int a, atomic_t *v)
	PPC405_ERR77(0,%3)
"	stwcx.	%0,0,%3 \n\
	bne-	1b"
	: "=&r" (t), "=m" (v->counter)
	: "r" (a), "r" (&v->counter), "m" (v->counter)
	: "=&r" (t), "+m" (v->counter)
	: "r" (a), "r" (&v->counter)
	: "cc");
}

@@ -63,8 +63,8 @@ static __inline__ void atomic_sub(int a, atomic_t *v)
	PPC405_ERR77(0,%3)
"	stwcx.	%0,0,%3 \n\
	bne-	1b"
	: "=&r" (t), "=m" (v->counter)
	: "r" (a), "r" (&v->counter), "m" (v->counter)
	: "=&r" (t), "+m" (v->counter)
	: "r" (a), "r" (&v->counter)
	: "cc");
}

@@ -97,8 +97,8 @@ static __inline__ void atomic_inc(atomic_t *v)
	PPC405_ERR77(0,%2)
"	stwcx.	%0,0,%2 \n\
	bne-	1b"
	: "=&r" (t), "=m" (v->counter)
	: "r" (&v->counter), "m" (v->counter)
	: "=&r" (t), "+m" (v->counter)
	: "r" (&v->counter)
	: "cc");
}

@@ -141,8 +141,8 @@ static __inline__ void atomic_dec(atomic_t *v)
	PPC405_ERR77(0,%2)\
"	stwcx.	%0,0,%2\n\
	bne-	1b"
	: "=&r" (t), "=m" (v->counter)
	: "r" (&v->counter), "m" (v->counter)
	: "=&r" (t), "+m" (v->counter)
	: "r" (&v->counter)
	: "cc");
}

@@ -253,8 +253,8 @@ static __inline__ void atomic64_add(long a, atomic64_t *v)
	add	%0,%2,%0\n\
	stdcx.	%0,0,%3 \n\
	bne-	1b"
	: "=&r" (t), "=m" (v->counter)
	: "r" (a), "r" (&v->counter), "m" (v->counter)
	: "=&r" (t), "+m" (v->counter)
	: "r" (a), "r" (&v->counter)
	: "cc");
}

@@ -287,8 +287,8 @@ static __inline__ void atomic64_sub(long a, atomic64_t *v)
	subf	%0,%2,%0\n\
	stdcx.	%0,0,%3 \n\
	bne-	1b"
	: "=&r" (t), "=m" (v->counter)
	: "r" (a), "r" (&v->counter), "m" (v->counter)
	: "=&r" (t), "+m" (v->counter)
	: "r" (a), "r" (&v->counter)
	: "cc");
}

@@ -319,8 +319,8 @@ static __inline__ void atomic64_inc(atomic64_t *v)
	addic	%0,%0,1\n\
	stdcx.	%0,0,%2 \n\
	bne-	1b"
	: "=&r" (t), "=m" (v->counter)
	: "r" (&v->counter), "m" (v->counter)
	: "=&r" (t), "+m" (v->counter)
	: "r" (&v->counter)
	: "cc");
}

@@ -361,8 +361,8 @@ static __inline__ void atomic64_dec(atomic64_t *v)
	addic	%0,%0,-1\n\
	stdcx.	%0,0,%2\n\
	bne-	1b"
	: "=&r" (t), "=m" (v->counter)
	: "r" (&v->counter), "m" (v->counter)
	: "=&r" (t), "+m" (v->counter)
	: "r" (&v->counter)
	: "cc");
}

+8 −8
Original line number Diff line number Diff line
@@ -65,8 +65,8 @@ static __inline__ void set_bit(int nr, volatile unsigned long *addr)
	PPC405_ERR77(0,%3)
	PPC_STLCX "%0,0,%3\n"
	"bne-	1b"
	: "=&r"(old), "=m"(*p)
	: "r"(mask), "r"(p), "m"(*p)
	: "=&r" (old), "+m" (*p)
	: "r" (mask), "r" (p)
	: "cc" );
}

@@ -82,8 +82,8 @@ static __inline__ void clear_bit(int nr, volatile unsigned long *addr)
	PPC405_ERR77(0,%3)
	PPC_STLCX "%0,0,%3\n"
	"bne-	1b"
	: "=&r"(old), "=m"(*p)
	: "r"(mask), "r"(p), "m"(*p)
	: "=&r" (old), "+m" (*p)
	: "r" (mask), "r" (p)
	: "cc" );
}

@@ -99,8 +99,8 @@ static __inline__ void change_bit(int nr, volatile unsigned long *addr)
	PPC405_ERR77(0,%3)
	PPC_STLCX "%0,0,%3\n"
	"bne-	1b"
	: "=&r"(old), "=m"(*p)
	: "r"(mask), "r"(p), "m"(*p)
	: "=&r" (old), "+m" (*p)
	: "r" (mask), "r" (p)
	: "cc" );
}

@@ -179,8 +179,8 @@ static __inline__ void set_bits(unsigned long mask, unsigned long *addr)
	"or	%0,%0,%2\n"
	PPC_STLCX "%0,0,%3\n"
	"bne-	1b"
	: "=&r" (old), "=m" (*addr)
	: "r" (mask), "r" (addr), "m" (*addr)
	: "=&r" (old), "+m" (*addr)
	: "r" (mask), "r" (addr)
	: "cc");
}

+8 −8
Original line number Diff line number Diff line
@@ -220,8 +220,8 @@ __xchg_u32(volatile void *p, unsigned long val)
"	stwcx.	%3,0,%2 \n\
	bne-	1b"
	ISYNC_ON_SMP
	: "=&r" (prev), "=m" (*(volatile unsigned int *)p)
	: "r" (p), "r" (val), "m" (*(volatile unsigned int *)p)
	: "=&r" (prev), "+m" (*(volatile unsigned int *)p)
	: "r" (p), "r" (val)
	: "cc", "memory");

	return prev;
@@ -240,8 +240,8 @@ __xchg_u64(volatile void *p, unsigned long val)
"	stdcx.	%3,0,%2 \n\
	bne-	1b"
	ISYNC_ON_SMP
	: "=&r" (prev), "=m" (*(volatile unsigned long *)p)
	: "r" (p), "r" (val), "m" (*(volatile unsigned long *)p)
	: "=&r" (prev), "+m" (*(volatile unsigned long *)p)
	: "r" (p), "r" (val)
	: "cc", "memory");

	return prev;
@@ -299,8 +299,8 @@ __cmpxchg_u32(volatile unsigned int *p, unsigned long old, unsigned long new)
	ISYNC_ON_SMP
	"\n\
2:"
	: "=&r" (prev), "=m" (*p)
	: "r" (p), "r" (old), "r" (new), "m" (*p)
	: "=&r" (prev), "+m" (*p)
	: "r" (p), "r" (old), "r" (new)
	: "cc", "memory");

	return prev;
@@ -322,8 +322,8 @@ __cmpxchg_u64(volatile unsigned long *p, unsigned long old, unsigned long new)
	ISYNC_ON_SMP
	"\n\
2:"
	: "=&r" (prev), "=m" (*p)
	: "r" (p), "r" (old), "r" (new), "m" (*p)
	: "=&r" (prev), "+m" (*p)
	: "r" (p), "r" (old), "r" (new)
	: "cc", "memory");

	return prev;