Commit 5235c7e2 authored by Mark Rutland's avatar Mark Rutland Committed by Catalin Marinas
Browse files

arm64: alternatives: use cpucap naming



To more clearly align the various users of the cpucap enumeration, this patch
changes the alternative code to use the term `cpucap` in favour of `feature`.
The alternative_has_feature_{likely,unlikely}() functions are renamed to
alternative_has_cap_<likely,unlikely}() to more clearly align with the
cpus_have_{const_,}cap() helpers.

At the same time remove the stale comment referring to the "ARM64_CB
bit", which is evidently a typo for ARM64_CB_PATCH, which was removed in
commit:

  4c0bd995 ("arm64: alternatives: have callbacks take a cap")

There should be no functional change as a result of this patch; this is
purely a renaming exercise.

Signed-off-by: default avatarMark Rutland <mark.rutland@arm.com>
Reviewed-by: default avatarSuzuki K Poulose <suzuki.poulose@arm.com>
Cc: Marc Zyngier <maz@kernel.org>
Cc: Mark Brown <broonie@kernel.org>
Cc: Will Deacon <will@kernel.org>
Link: https://lore.kernel.org/r/20230607164846.3967305-3-mark.rutland@arm.com


Signed-off-by: default avatarCatalin Marinas <catalin.marinas@arm.com>
parent 7f242982
Loading
Loading
Loading
Loading
+27 −27
Original line number Diff line number Diff line
@@ -23,17 +23,17 @@

#include <linux/stringify.h>

#define ALTINSTR_ENTRY(feature)					              \
#define ALTINSTR_ENTRY(cpucap)					              \
	" .word 661b - .\n"				/* label           */ \
	" .word 663f - .\n"				/* new instruction */ \
	" .hword " __stringify(feature) "\n"		/* feature bit     */ \
	" .hword " __stringify(cpucap) "\n"		/* cpucap          */ \
	" .byte 662b-661b\n"				/* source len      */ \
	" .byte 664f-663f\n"				/* replacement len */

#define ALTINSTR_ENTRY_CB(feature, cb)					      \
#define ALTINSTR_ENTRY_CB(cpucap, cb)					      \
	" .word 661b - .\n"				/* label           */ \
	" .word " __stringify(cb) "- .\n"		/* callback        */ \
	" .hword " __stringify(feature) "\n"		/* feature bit     */ \
	" .hword " __stringify(cpucap) "\n"		/* cpucap          */ \
	" .byte 662b-661b\n"				/* source len      */ \
	" .byte 664f-663f\n"				/* replacement len */

@@ -53,13 +53,13 @@
 *
 * Alternatives with callbacks do not generate replacement instructions.
 */
#define __ALTERNATIVE_CFG(oldinstr, newinstr, feature, cfg_enabled)	\
#define __ALTERNATIVE_CFG(oldinstr, newinstr, cpucap, cfg_enabled)	\
	".if "__stringify(cfg_enabled)" == 1\n"				\
	"661:\n\t"							\
	oldinstr "\n"							\
	"662:\n"							\
	".pushsection .altinstructions,\"a\"\n"				\
	ALTINSTR_ENTRY(feature)						\
	ALTINSTR_ENTRY(cpucap)						\
	".popsection\n"							\
	".subsection 1\n"						\
	"663:\n\t"							\
@@ -70,31 +70,31 @@
	".previous\n"							\
	".endif\n"

#define __ALTERNATIVE_CFG_CB(oldinstr, feature, cfg_enabled, cb)	\
#define __ALTERNATIVE_CFG_CB(oldinstr, cpucap, cfg_enabled, cb)	\
	".if "__stringify(cfg_enabled)" == 1\n"				\
	"661:\n\t"							\
	oldinstr "\n"							\
	"662:\n"							\
	".pushsection .altinstructions,\"a\"\n"				\
	ALTINSTR_ENTRY_CB(feature, cb)					\
	ALTINSTR_ENTRY_CB(cpucap, cb)					\
	".popsection\n"							\
	"663:\n\t"							\
	"664:\n\t"							\
	".endif\n"

#define _ALTERNATIVE_CFG(oldinstr, newinstr, feature, cfg, ...)	\
	__ALTERNATIVE_CFG(oldinstr, newinstr, feature, IS_ENABLED(cfg))
#define _ALTERNATIVE_CFG(oldinstr, newinstr, cpucap, cfg, ...)	\
	__ALTERNATIVE_CFG(oldinstr, newinstr, cpucap, IS_ENABLED(cfg))

#define ALTERNATIVE_CB(oldinstr, feature, cb) \
	__ALTERNATIVE_CFG_CB(oldinstr, (1 << ARM64_CB_SHIFT) | (feature), 1, cb)
#define ALTERNATIVE_CB(oldinstr, cpucap, cb) \
	__ALTERNATIVE_CFG_CB(oldinstr, (1 << ARM64_CB_SHIFT) | (cpucap), 1, cb)
#else

#include <asm/assembler.h>

.macro altinstruction_entry orig_offset alt_offset feature orig_len alt_len
.macro altinstruction_entry orig_offset alt_offset cpucap orig_len alt_len
	.word \orig_offset - .
	.word \alt_offset - .
	.hword (\feature)
	.hword (\cpucap)
	.byte \orig_len
	.byte \alt_len
.endm
@@ -210,9 +210,9 @@ alternative_endif
#endif  /*  __ASSEMBLY__  */

/*
 * Usage: asm(ALTERNATIVE(oldinstr, newinstr, feature));
 * Usage: asm(ALTERNATIVE(oldinstr, newinstr, cpucap));
 *
 * Usage: asm(ALTERNATIVE(oldinstr, newinstr, feature, CONFIG_FOO));
 * Usage: asm(ALTERNATIVE(oldinstr, newinstr, cpucap, CONFIG_FOO));
 * N.B. If CONFIG_FOO is specified, but not selected, the whole block
 *      will be omitted, including oldinstr.
 */
@@ -224,15 +224,15 @@ alternative_endif
#include <linux/types.h>

static __always_inline bool
alternative_has_feature_likely(const unsigned long feature)
alternative_has_cap_likely(const unsigned long cpucap)
{
	compiletime_assert(feature < ARM64_NCAPS,
			   "feature must be < ARM64_NCAPS");
	compiletime_assert(cpucap < ARM64_NCAPS,
			   "cpucap must be < ARM64_NCAPS");

	asm_volatile_goto(
	ALTERNATIVE_CB("b	%l[l_no]", %[feature], alt_cb_patch_nops)
	ALTERNATIVE_CB("b	%l[l_no]", %[cpucap], alt_cb_patch_nops)
	:
	: [feature] "i" (feature)
	: [cpucap] "i" (cpucap)
	:
	: l_no);

@@ -242,15 +242,15 @@ alternative_has_feature_likely(const unsigned long feature)
}

static __always_inline bool
alternative_has_feature_unlikely(const unsigned long feature)
alternative_has_cap_unlikely(const unsigned long cpucap)
{
	compiletime_assert(feature < ARM64_NCAPS,
			   "feature must be < ARM64_NCAPS");
	compiletime_assert(cpucap < ARM64_NCAPS,
			   "cpucap must be < ARM64_NCAPS");

	asm_volatile_goto(
	ALTERNATIVE("nop", "b	%l[l_yes]", %[feature])
	ALTERNATIVE("nop", "b	%l[l_yes]", %[cpucap])
	:
	: [feature] "i" (feature)
	: [cpucap] "i" (cpucap)
	:
	: l_yes);

+2 −2
Original line number Diff line number Diff line
@@ -13,7 +13,7 @@
struct alt_instr {
	s32 orig_offset;	/* offset to original instruction */
	s32 alt_offset;		/* offset to replacement instruction */
	u16 cpufeature;		/* cpufeature bit set for replacement */
	u16 cpucap;		/* cpucap bit set for replacement */
	u8  orig_len;		/* size of original instruction(s) */
	u8  alt_len;		/* size of new instruction(s), <= orig_len */
};
@@ -23,7 +23,7 @@ typedef void (*alternative_cb_t)(struct alt_instr *alt,

void __init apply_boot_alternatives(void);
void __init apply_alternatives_all(void);
bool alternative_is_applied(u16 cpufeature);
bool alternative_is_applied(u16 cpucap);

#ifdef CONFIG_MODULES
void apply_alternatives_module(void *start, size_t length);
+2 −2
Original line number Diff line number Diff line
@@ -437,7 +437,7 @@ unsigned long cpu_get_elf_hwcap2(void);

static __always_inline bool system_capabilities_finalized(void)
{
	return alternative_has_feature_likely(ARM64_ALWAYS_SYSTEM);
	return alternative_has_cap_likely(ARM64_ALWAYS_SYSTEM);
}

/*
@@ -464,7 +464,7 @@ static __always_inline bool __cpus_have_const_cap(int num)
{
	if (num >= ARM64_NCAPS)
		return false;
	return alternative_has_feature_unlikely(num);
	return alternative_has_cap_unlikely(num);
}

/*
+1 −1
Original line number Diff line number Diff line
@@ -24,7 +24,7 @@
static __always_inline bool __irqflags_uses_pmr(void)
{
	return IS_ENABLED(CONFIG_ARM64_PSEUDO_NMI) &&
	       alternative_has_feature_unlikely(ARM64_HAS_GIC_PRIO_MASKING);
	       alternative_has_cap_unlikely(ARM64_HAS_GIC_PRIO_MASKING);
}

static __always_inline void __daif_local_irq_enable(void)
+1 −1
Original line number Diff line number Diff line
@@ -18,7 +18,7 @@

static __always_inline bool system_uses_lse_atomics(void)
{
	return alternative_has_feature_likely(ARM64_HAS_LSE_ATOMICS);
	return alternative_has_cap_likely(ARM64_HAS_LSE_ATOMICS);
}

#define __lse_ll_sc_body(op, ...)					\
Loading