Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 1ad752ad authored by Linux Build Service Account's avatar Linux Build Service Account Committed by Gerrit - the friendly Code Review server
Browse files

Merge "Perf: ARM: Support 32bit armv8-pmuv3 driver"

parents 0b63aa50 1255505e
Loading
Loading
Loading
Loading
+87 −0
Original line number Diff line number Diff line
@@ -26,4 +26,91 @@ extern unsigned long perf_misc_flags(struct pt_regs *regs);
	(regs)->ARM_cpsr = SVC_MODE; \
}

static inline u32 armv8pmu_pmcr_read_reg(void)
{
	u32 val;

	asm volatile("mrc p15, 0, %0, c9, c12, 0" : "=r" (val));
	return val;
}

static inline u32 armv8pmu_pmccntr_read_reg(void)
{
	u32 val;

	asm volatile("mrc p15, 0, %0, c9, c13, 0" : "=r" (val));
	return val;
}

static inline u32 armv8pmu_pmxevcntr_read_reg(void)
{
	u32 val;

	asm volatile("mrc p15, 0, %0, c9, c13, 2" : "=r" (val));
	return val;
}

static inline u32 armv8pmu_pmovsclr_read_reg(void)
{
	u32 val;

	asm volatile("mrc p15, 0, %0, c9, c12, 3" : "=r" (val));
	return val;
}

static inline void armv8pmu_pmcr_write_reg(u32 val)
{
	asm volatile("mcr p15, 0, %0, c9, c12, 0" : : "r" (val));
}

static inline void armv8pmu_pmselr_write_reg(u32 val)
{
	asm volatile("mcr p15, 0, %0, c9, c12, 5" : : "r" (val));
}

static inline void armv8pmu_pmccntr_write_reg(u32 val)
{
	asm volatile("mcr p15, 0, %0, c9, c13, 0" : : "r" (val));
}

static inline void armv8pmu_pmxevcntr_write_reg(u32 val)
{
	asm volatile("mcr p15, 0, %0, c9, c13, 2" : : "r" (val));
}

static inline void armv8pmu_pmxevtyper_write_reg(u32 val)
{
	asm volatile("mcr p15, 0, %0, c9, c13, 1" : : "r" (val));
}

static inline void armv8pmu_pmcntenset_write_reg(u32 val)
{
	asm volatile("mcr p15, 0, %0, c9, c12, 1" : : "r" (val));
}

static inline void armv8pmu_pmcntenclr_write_reg(u32 val)
{
	asm volatile("mcr p15, 0, %0, c9, c12, 2" : : "r" (val));
}

static inline void armv8pmu_pmintenset_write_reg(u32 val)
{
	asm volatile("mcr p15, 0, %0, c9, c14, 1" : : "r" (val));
}

static inline void armv8pmu_pmintenclr_write_reg(u32 val)
{
	asm volatile("mcr p15, 0, %0, c9, c14, 2" : : "r" (val));
}

static inline void armv8pmu_pmovsclr_write_reg(u32 val)
{
	asm volatile("mcr p15, 0, %0, c9, c12, 3" : : "r" (val));
}

static inline void armv8pmu_pmuserenr_write_reg(u32 val)
{
	asm volatile("mcr p15, 0, %0, c9, c14, 0" : : "r" (val));
}

#endif /* __ARM_PERF_EVENT_H__ */
+87 −0
Original line number Diff line number Diff line
@@ -31,4 +31,91 @@ extern unsigned long perf_misc_flags(struct pt_regs *regs);
	(regs)->pstate = PSR_MODE_EL1h;	\
}

static inline u32 armv8pmu_pmcr_read_reg(void)
{
	u32 val;

	asm volatile("mrs %0, pmcr_el0" : "=r" (val));
	return val;
}

static inline u32 armv8pmu_pmccntr_read_reg(void)
{
	u32 val;

	asm volatile("mrs %0, pmccntr_el0" : "=r" (val));
	return val;
}

static inline u32 armv8pmu_pmxevcntr_read_reg(void)
{
	u32 val;

	asm volatile("mrs %0, pmxevcntr_el0" : "=r" (val));
	return val;
}

static inline u32 armv8pmu_pmovsclr_read_reg(void)
{
	u32 val;

	asm volatile("mrs %0, pmovsclr_el0" : "=r" (val));
	return val;
}

static inline void armv8pmu_pmcr_write_reg(u32 val)
{
	asm volatile("msr pmcr_el0, %0" :: "r" (val));
}

static inline void armv8pmu_pmselr_write_reg(u32 val)
{
	asm volatile("msr pmselr_el0, %0" :: "r" (val));
}

static inline void armv8pmu_pmccntr_write_reg(u32 val)
{
	asm volatile("msr pmccntr_el0, %0" :: "r" (val));
}

static inline void armv8pmu_pmxevcntr_write_reg(u32 val)
{
	asm volatile("msr pmxevcntr_el0, %0" :: "r" (val));
}

static inline void armv8pmu_pmxevtyper_write_reg(u32 val)
{
	asm volatile("msr pmxevtyper_el0, %0" :: "r" (val));
}

static inline void armv8pmu_pmcntenset_write_reg(u32 val)
{
	asm volatile("msr pmcntenset_el0, %0" :: "r" (val));
}

static inline void armv8pmu_pmcntenclr_write_reg(u32 val)
{
	asm volatile("msr pmcntenclr_el0, %0" :: "r" (val));
}

static inline void armv8pmu_pmintenset_write_reg(u32 val)
{
	asm volatile("msr pmintenset_el1, %0" :: "r" (val));
}

static inline void armv8pmu_pmintenclr_write_reg(u32 val)
{
	asm volatile("msr pmintenclr_el1, %0" :: "r" (val));
}

static inline void armv8pmu_pmovsclr_write_reg(u32 val)
{
	asm volatile("msr pmovsclr_el0, %0" :: "r" (val));
}

static inline void armv8pmu_pmuserenr_write_reg(u32 val)
{
	asm volatile("msr pmuserenr_el0, %0" :: "r" (val));
}

#endif
+1 −2
Original line number Diff line number Diff line
@@ -31,8 +31,7 @@ arm64-obj-$(CONFIG_FUNCTION_TRACER) += ftrace.o entry-ftrace.o
arm64-obj-$(CONFIG_MODULES)		+= arm64ksyms.o module.o
arm64-obj-$(CONFIG_ARM64_MODULE_PLTS)	+= module-plts.o
arm64-obj-$(CONFIG_PERF_EVENTS)		+= perf_regs.o perf_callchain.o
arm64-obj-$(CONFIG_HW_PERF_EVENTS)	+= perf_event.o perf_debug.o		\
					   perf_trace_counters.o		\
arm64-obj-$(CONFIG_HW_PERF_EVENTS)	+= perf_debug.o	perf_trace_counters.o	\
					   perf_trace_user.o
arm64-obj-$(CONFIG_HAVE_HW_BREAKPOINT)	+= hw_breakpoint.o
arm64-obj-$(CONFIG_CPU_PM)		+= sleep.o suspend.o
+1 −0
Original line number Diff line number Diff line
obj-$(CONFIG_ARM_PMU) += arm_pmu.o
obj-$(CONFIG_HW_PERF_EVENTS) += perf_event_armv8.o
+18 −19
Original line number Diff line number Diff line
@@ -20,6 +20,7 @@
 */

#include <asm/irq_regs.h>
#include <asm/perf_event.h>

#include <linux/of.h>
#include <linux/perf/arm_pmu.h>
@@ -239,16 +240,14 @@ struct arm_pmu_and_idle_nb {

static inline u32 armv8pmu_pmcr_read(void)
{
	u32 val;
	asm volatile("mrs %0, pmcr_el0" : "=r" (val));
	return val;
	return armv8pmu_pmcr_read_reg();
}

inline void armv8pmu_pmcr_write(u32 val)
{
	val &= ARMV8_PMCR_MASK;
	isb();
	asm volatile("msr pmcr_el0, %0" :: "r" (val));
	armv8pmu_pmcr_write_reg(val);
}

static inline int armv8pmu_has_overflowed(u32 pmovsr)
@@ -270,7 +269,7 @@ static inline int armv8pmu_counter_has_overflowed(u32 pmnc, int idx)
static inline int armv8pmu_select_counter(int idx)
{
	u32 counter = ARMV8_IDX_TO_COUNTER(idx);
	asm volatile("msr pmselr_el0, %0" :: "r" (counter));
	armv8pmu_pmselr_write_reg(counter);
	isb();

	return idx;
@@ -287,9 +286,9 @@ static inline u32 armv8pmu_read_counter(struct perf_event *event)
		pr_err("CPU%u reading wrong counter %d\n",
			smp_processor_id(), idx);
	else if (idx == ARMV8_IDX_CYCLE_COUNTER)
		asm volatile("mrs %0, pmccntr_el0" : "=r" (value));
		value = armv8pmu_pmccntr_read_reg();
	else if (armv8pmu_select_counter(idx) == idx)
		asm volatile("mrs %0, pmxevcntr_el0" : "=r" (value));
		value = armv8pmu_pmxevcntr_read_reg();

	return value;
}
@@ -304,47 +303,47 @@ static inline void armv8pmu_write_counter(struct perf_event *event, u32 value)
		pr_err("CPU%u writing wrong counter %d\n",
			smp_processor_id(), idx);
	else if (idx == ARMV8_IDX_CYCLE_COUNTER)
		asm volatile("msr pmccntr_el0, %0" :: "r" (value));
		armv8pmu_pmccntr_write_reg(value);
	else if (armv8pmu_select_counter(idx) == idx)
		asm volatile("msr pmxevcntr_el0, %0" :: "r" (value));
		armv8pmu_pmxevcntr_write_reg(value);
}

inline void armv8pmu_write_evtype(int idx, u32 val)
{
	if (armv8pmu_select_counter(idx) == idx) {
		val &= ARMV8_EVTYPE_MASK;
		asm volatile("msr pmxevtyper_el0, %0" :: "r" (val));
		armv8pmu_pmxevtyper_write_reg(val);
	}
}

inline int armv8pmu_enable_counter(int idx)
{
	u32 counter = ARMV8_IDX_TO_COUNTER(idx);
	asm volatile("msr pmcntenset_el0, %0" :: "r" (BIT(counter)));
	armv8pmu_pmcntenset_write_reg(BIT(counter));
	return idx;
}

inline int armv8pmu_disable_counter(int idx)
{
	u32 counter = ARMV8_IDX_TO_COUNTER(idx);
	asm volatile("msr pmcntenclr_el0, %0" :: "r" (BIT(counter)));
	armv8pmu_pmcntenclr_write_reg(BIT(counter));
	return idx;
}

inline int armv8pmu_enable_intens(int idx)
{
	u32 counter = ARMV8_IDX_TO_COUNTER(idx);
	asm volatile("msr pmintenset_el1, %0" :: "r" (BIT(counter)));
	armv8pmu_pmintenset_write_reg(BIT(counter));
	return idx;
}

inline int armv8pmu_disable_intens(int idx)
{
	u32 counter = ARMV8_IDX_TO_COUNTER(idx);
	asm volatile("msr pmintenclr_el1, %0" :: "r" (BIT(counter)));
	armv8pmu_pmintenclr_write_reg(BIT(counter));
	isb();
	/* Clear the overflow flag in case an interrupt is pending. */
	asm volatile("msr pmovsclr_el0, %0" :: "r" (BIT(counter)));
	armv8pmu_pmovsclr_write_reg(BIT(counter));
	isb();

	return idx;
@@ -355,11 +354,11 @@ inline u32 armv8pmu_getreset_flags(void)
	u32 value;

	/* Read */
	asm volatile("mrs %0, pmovsclr_el0" : "=r" (value));
	value = armv8pmu_pmovsclr_read_reg();

	/* Write to clear flags */
	value &= ARMV8_OVSR_MASK;
	asm volatile("msr pmovsclr_el0, %0" :: "r" (value));
	armv8pmu_pmovsclr_write_reg(value);

	return value;
}
@@ -566,14 +565,14 @@ static int armv8pmu_set_event_filter(struct hw_perf_event *event,
static void armv8pmu_init_usermode(void)
{
	/* Enable access from userspace. */
	asm volatile("msr pmuserenr_el0, %0" :: "r" (0xF));
	armv8pmu_pmuserenr_write_reg(0xF);

}
#else
static inline void armv8pmu_init_usermode(void)
{
	/* Disable access from userspace. */
	asm volatile("msr pmuserenr_el0, %0" :: "r" (0));
	armv8pmu_pmuserenr_write_reg(0);

}
#endif