Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 0909c8c2 authored by Benjamin Herrenschmidt's avatar Benjamin Herrenschmidt Committed by Paul Mackerras
Browse files

[POWERPC] Support feature fixups in vdso's



This patch reworks the feature fixup mecanism so vdso's can be fixed up.
The main issue was that the construct:

        .long   label  (or .llong on 64 bits)

will not work in the case of a shared library like the vdso. It will
generate an empty placeholder in the fixup table along with a reloc,
which is not something we can deal with in the vdso.

The idea here (thanks Alan Modra !) is to instead use something like:

1:
        .long   label - 1b

That is, the feature fixup tables no longer contain addresses of bits of
code to patch, but offsets of such code from the fixup table entry
itself. That is properly resolved by ld when building the .so's. I've
modified the fixup mecanism generically to use that method for the rest
of the kernel as well.

Another trick is that the 32 bits vDSO included in the 64 bits kernel
need to have a table in the 64 bits format. However, gas does not
support 32 bits code with a statement of the form:

        .llong  label - 1b  (Or even just .llong label)

That is, it cannot emit the right fixup/relocation for the linker to use
to assign a 32 bits address to an .llong field. Thus, in the specific
case of the 32 bits vdso built as part of the 64 bits kernel, we are
using a modified macro that generates:

        .long   0xffffffff
        .llong  label - 1b

Note that is assumes that the value is negative which is enforced by
the .lds (those offsets are always negative as the .text is always
before the fixup table and gas doesn't support emiting the reloc the
other way around).

Signed-off-by: default avatarBenjamin Herrenschmidt <benh@kernel.crashing.org>
Signed-off-by: default avatarPaul Mackerras <paulus@samba.org>
parent 7aeb7324
Loading
Loading
Loading
Loading
+5 −6
Original line number Original line Diff line number Diff line
@@ -1202,14 +1202,13 @@ struct cpu_spec *identify_cpu(unsigned long offset)
	return NULL;
	return NULL;
}
}


void do_feature_fixups(unsigned long offset, unsigned long value,
void do_feature_fixups(unsigned long value, void *fixup_start, void *fixup_end)
		       void *fixup_start, void *fixup_end)
{
{
	struct fixup_entry {
	struct fixup_entry {
		unsigned long	mask;
		unsigned long	mask;
		unsigned long	value;
		unsigned long	value;
		unsigned int	*start;
		long		start_off;
		unsigned int	*end;
		long		end_off;
	} *fcur, *fend;
	} *fcur, *fend;


	fcur = fixup_start;
	fcur = fixup_start;
@@ -1224,8 +1223,8 @@ void do_feature_fixups(unsigned long offset, unsigned long value,
		/* These PTRRELOCs will disappear once the new scheme for
		/* These PTRRELOCs will disappear once the new scheme for
		 * modules and vdso is implemented
		 * modules and vdso is implemented
		 */
		 */
		pstart = PTRRELOC(fcur->start);
		pstart = ((unsigned int *)fcur) + (fcur->start_off / 4);
		pend = PTRRELOC(fcur->end);
		pend = ((unsigned int *)fcur) + (fcur->end_off / 4);


		for (p = pstart; p < pend; p++) {
		for (p = pstart; p < pend; p++) {
			*p = 0x60000000u;
			*p = 0x60000000u;
+1 −1
Original line number Original line Diff line number Diff line
@@ -103,7 +103,7 @@ unsigned long __init early_init(unsigned long dt_ptr)
	 */
	 */
	spec = identify_cpu(offset);
	spec = identify_cpu(offset);


	do_feature_fixups(offset, spec->cpu_features,
	do_feature_fixups(spec->cpu_features,
			  PTRRELOC(&__start___ftr_fixup),
			  PTRRELOC(&__start___ftr_fixup),
			  PTRRELOC(&__stop___ftr_fixup));
			  PTRRELOC(&__stop___ftr_fixup));


+2 −2
Original line number Original line Diff line number Diff line
@@ -354,9 +354,9 @@ void __init setup_system(void)
	/* Apply the CPUs-specific and firmware specific fixups to kernel
	/* Apply the CPUs-specific and firmware specific fixups to kernel
	 * text (nop out sections not relevant to this CPU or this firmware)
	 * text (nop out sections not relevant to this CPU or this firmware)
	 */
	 */
	do_feature_fixups(0, cur_cpu_spec->cpu_features,
	do_feature_fixups(cur_cpu_spec->cpu_features,
			  &__start___ftr_fixup, &__stop___ftr_fixup);
			  &__start___ftr_fixup, &__stop___ftr_fixup);
	do_feature_fixups(0, powerpc_firmware_features,
	do_feature_fixups(powerpc_firmware_features,
			  &__start___fw_ftr_fixup, &__stop___fw_ftr_fixup);
			  &__start___fw_ftr_fixup, &__stop___fw_ftr_fixup);


	/*
	/*
+43 −0
Original line number Original line Diff line number Diff line
@@ -36,6 +36,8 @@
#include <asm/vdso.h>
#include <asm/vdso.h>
#include <asm/vdso_datapage.h>
#include <asm/vdso_datapage.h>


#include "setup.h"

#undef DEBUG
#undef DEBUG


#ifdef DEBUG
#ifdef DEBUG
@@ -586,6 +588,43 @@ static __init int vdso_fixup_datapage(struct lib32_elfinfo *v32,
	return 0;
	return 0;
}
}



static __init int vdso_fixup_features(struct lib32_elfinfo *v32,
				      struct lib64_elfinfo *v64)
{
	void *start32;
	unsigned long size32;

#ifdef CONFIG_PPC64
	void *start64;
	unsigned long size64;

	start64 = find_section64(v64->hdr, "__ftr_fixup", &size64);
	if (start64)
		do_feature_fixups(cur_cpu_spec->cpu_features,
				  start64, start64 + size64);

	start64 = find_section64(v64->hdr, "__fw_ftr_fixup", &size64);
	if (start64)
		do_feature_fixups(powerpc_firmware_features,
				  start64, start64 + size64);
#endif /* CONFIG_PPC64 */

	start32 = find_section32(v32->hdr, "__ftr_fixup", &size32);
	if (start32)
		do_feature_fixups(cur_cpu_spec->cpu_features,
				  start32, start32 + size32);

#ifdef CONFIG_PPC64
	start32 = find_section32(v32->hdr, "__fw_ftr_fixup", &size32);
	if (start32)
		do_feature_fixups(powerpc_firmware_features,
				  start32, start32 + size32);
#endif /* CONFIG_PPC64 */

	return 0;
}

static __init int vdso_fixup_alt_funcs(struct lib32_elfinfo *v32,
static __init int vdso_fixup_alt_funcs(struct lib32_elfinfo *v32,
				       struct lib64_elfinfo *v64)
				       struct lib64_elfinfo *v64)
{
{
@@ -634,6 +673,9 @@ static __init int vdso_setup(void)
	if (vdso_fixup_datapage(&v32, &v64))
	if (vdso_fixup_datapage(&v32, &v64))
		return -1;
		return -1;


	if (vdso_fixup_features(&v32, &v64))
		return -1;

	if (vdso_fixup_alt_funcs(&v32, &v64))
	if (vdso_fixup_alt_funcs(&v32, &v64))
		return -1;
		return -1;


@@ -714,6 +756,7 @@ void __init vdso_init(void)
	 * Setup the syscall map in the vDOS
	 * Setup the syscall map in the vDOS
	 */
	 */
	vdso_setup_syscall_map();
	vdso_setup_syscall_map();

	/*
	/*
	 * Initialize the vDSO images in memory, that is do necessary
	 * Initialize the vDSO images in memory, that is do necessary
	 * fixups of vDSO symbols, locate trampolines, etc...
	 * fixups of vDSO symbols, locate trampolines, etc...
+12 −0
Original line number Original line Diff line number Diff line
@@ -32,6 +32,18 @@ SECTIONS
  PROVIDE (_etext = .);
  PROVIDE (_etext = .);
  PROVIDE (etext = .);
  PROVIDE (etext = .);


  . = ALIGN(8);
  __ftr_fixup : {
    *(__ftr_fixup)
  }

#ifdef CONFIG_PPC64
  . = ALIGN(8);
  __fw_ftr_fixup : {
    *(__fw_ftr_fixup)
  }
#endif

  /* Other stuff is appended to the text segment: */
  /* Other stuff is appended to the text segment: */
  .rodata		: { *(.rodata .rodata.* .gnu.linkonce.r.*) }
  .rodata		: { *(.rodata .rodata.* .gnu.linkonce.r.*) }
  .rodata1		: { *(.rodata1) }
  .rodata1		: { *(.rodata1) }
Loading