Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit bed4f130 authored by Ingo Molnar's avatar Ingo Molnar
Browse files

Merge branch 'x86/irq' into x86/core

parents 3e5621ed bf8bd66d
Loading
Loading
Loading
Loading
+66 −31
Original line number Diff line number Diff line
@@ -6,10 +6,10 @@
#endif

/*
   Macros for dwarf2 CFI unwind table entries.
   See "as.info" for details on these pseudo ops. Unfortunately
   they are only supported in very new binutils, so define them
   away for older version.
 * Macros for dwarf2 CFI unwind table entries.
 * See "as.info" for details on these pseudo ops. Unfortunately
 * they are only supported in very new binutils, so define them
 * away for older version.
 */

#ifdef CONFIG_AS_CFI
@@ -36,8 +36,10 @@

#else

/* Due to the structure of pre-exisiting code, don't use assembler line
   comment character # to ignore the arguments. Instead, use a dummy macro. */
/*
 * Due to the structure of pre-exisiting code, don't use assembler line
 * comment character # to ignore the arguments. Instead, use a dummy macro.
 */
.macro cfi_ignore a=0, b=0, c=0, d=0
.endm

@@ -58,4 +60,37 @@

#endif

/*
 * An attempt to make CFI annotations more or less
 * correct and shorter. It is implied that you know
 * what you're doing if you use them.
 */
#ifdef __ASSEMBLY__
#ifdef CONFIG_X86_64
	.macro pushq_cfi reg
	pushq \reg
	CFI_ADJUST_CFA_OFFSET 8
	.endm

	.macro popq_cfi reg
	popq \reg
	CFI_ADJUST_CFA_OFFSET -8
	.endm

	.macro movq_cfi reg offset=0
	movq %\reg, \offset(%rsp)
	CFI_REL_OFFSET \reg, \offset
	.endm

	.macro movq_cfi_restore offset reg
	movq \offset(%rsp), %\reg
	CFI_RESTORE \reg
	.endm
#else /*!CONFIG_X86_64*/

	/* 32bit defenitions are missed yet */

#endif /*!CONFIG_X86_64*/
#endif /*__ASSEMBLY__*/

#endif /* _ASM_X86_DWARF2_H */
+2 −0
Original line number Diff line number Diff line
@@ -22,6 +22,8 @@ DECLARE_PER_CPU(irq_cpustat_t, irq_stat);
#define __ARCH_IRQ_STAT
#define __IRQ_STAT(cpu, member) (per_cpu(irq_stat, cpu).member)

#define inc_irq_stat(member)	(__get_cpu_var(irq_stat).member++)

void ack_bad_irq(unsigned int irq);
#include <linux/irq_cpustat.h>

+2 −0
Original line number Diff line number Diff line
@@ -11,6 +11,8 @@

#define __ARCH_IRQ_STAT 1

#define inc_irq_stat(member)	add_pda(member, 1)

#define local_softirq_pending() read_pda(__softirq_pending)

#define __ARCH_SET_SOFTIRQ_PENDING 1
+1 −3
Original line number Diff line number Diff line
@@ -109,9 +109,7 @@ extern asmlinkage void smp_invalidate_interrupt(struct pt_regs *);
#endif
#endif

#ifdef CONFIG_X86_32
extern void (*const interrupt[NR_VECTORS])(void);
#endif
extern void (*__initconst interrupt[NR_VECTORS-FIRST_EXTERNAL_VECTOR])(void);

typedef int vector_irq_t[NR_VECTORS];
DECLARE_PER_CPU(vector_irq_t, vector_irq);
+60 −0
Original line number Diff line number Diff line
@@ -57,5 +57,65 @@
#define __ALIGN_STR ".align 16,0x90"
#endif

/*
 * to check ENTRY_X86/END_X86 and
 * KPROBE_ENTRY_X86/KPROBE_END_X86
 * unbalanced-missed-mixed appearance
 */
#define __set_entry_x86		.set ENTRY_X86_IN, 0
#define __unset_entry_x86	.set ENTRY_X86_IN, 1
#define __set_kprobe_x86	.set KPROBE_X86_IN, 0
#define __unset_kprobe_x86	.set KPROBE_X86_IN, 1

#define __macro_err_x86 .error "ENTRY_X86/KPROBE_X86 unbalanced,missed,mixed"

#define __check_entry_x86	\
	.ifdef ENTRY_X86_IN;	\
	.ifeq ENTRY_X86_IN;	\
	__macro_err_x86;	\
	.abort;			\
	.endif;			\
	.endif

#define __check_kprobe_x86	\
	.ifdef KPROBE_X86_IN;	\
	.ifeq KPROBE_X86_IN;	\
	__macro_err_x86;	\
	.abort;			\
	.endif;			\
	.endif

#define __check_entry_kprobe_x86	\
	__check_entry_x86;		\
	__check_kprobe_x86

#define ENTRY_KPROBE_FINAL_X86 __check_entry_kprobe_x86

#define ENTRY_X86(name)			\
	__check_entry_kprobe_x86;	\
	__set_entry_x86;		\
	.globl name;			\
	__ALIGN;			\
	name:

#define END_X86(name)			\
	__unset_entry_x86;		\
	__check_entry_kprobe_x86;	\
	.size name, .-name

#define KPROBE_ENTRY_X86(name)		\
	__check_entry_kprobe_x86;	\
	__set_kprobe_x86;		\
	.pushsection .kprobes.text, "ax"; \
	.globl name;			\
	__ALIGN;			\
	name:

#define KPROBE_END_X86(name)		\
	__unset_kprobe_x86;		\
	__check_entry_kprobe_x86;	\
	.size name, .-name;		\
	.popsection

#endif /* _ASM_X86_LINKAGE_H */
Loading