Commit be1dd669 authored by Linus Torvalds's avatar Linus Torvalds
Browse files

Merge tag 'perf-tools-fixes-for-v5.10-2020-11-17' of...

Merge tag 'perf-tools-fixes-for-v5.10-2020-11-17' of git://git.kernel.org/pub/scm/linux/kernel/git/acme/linux

Pull perf tools fixes from Arnaldo Carvalho de Melo:

 - Fix file corruption due to event deletion in 'perf inject'.

 - Update arch/x86/lib/mem{cpy,set}_64.S copies used in 'perf bench mem
   memcpy', silencing perf build warning.

 - Avoid an msan warning in a copied stack in 'perf test'.

 - Correct tracepoint field name "flags" in ARM's CS-ETM hardware
   tracing 'perf test' entry.

 - Update branch sample pattern for cs-etm to cope with excluding guest
   in userspace counting.

 - Don't free "lock_seq_stat" if read_count isn't zero in 'perf lock'.

* tag 'perf-tools-fixes-for-v5.10-2020-11-17' of git://git.kernel.org/pub/scm/linux/kernel/git/acme/linux:
  perf test: Avoid an msan warning in a copied stack.
  perf inject: Fix file corruption due to event deletion
  perf test: Update branch sample pattern for cs-etm
  perf test: Fix a typo in cs-etm testing
  tools arch: Update arch/x86/lib/mem{cpy,set}_64.S copies used in 'perf bench mem memcpy'
  perf lock: Don't free "lock_seq_stat" if read_count isn't zero
  perf lock: Correct field name "flags"
parents 9dacf44c 568beb27
Loading
Loading
Loading
Loading
+3 −5
Original line number Diff line number Diff line
@@ -16,8 +16,6 @@
 * to a jmp to memcpy_erms which does the REP; MOVSB mem copy.
 */

.weak memcpy

/*
 * memcpy - Copy a memory block.
 *
@@ -30,7 +28,7 @@
 * rax original destination
 */
SYM_FUNC_START_ALIAS(__memcpy)
SYM_FUNC_START_LOCAL(memcpy)
SYM_FUNC_START_WEAK(memcpy)
	ALTERNATIVE_2 "jmp memcpy_orig", "", X86_FEATURE_REP_GOOD, \
		      "jmp memcpy_erms", X86_FEATURE_ERMS

@@ -51,14 +49,14 @@ EXPORT_SYMBOL(__memcpy)
 * memcpy_erms() - enhanced fast string memcpy. This is faster and
 * simpler than memcpy. Use memcpy_erms when possible.
 */
SYM_FUNC_START(memcpy_erms)
SYM_FUNC_START_LOCAL(memcpy_erms)
	movq %rdi, %rax
	movq %rdx, %rcx
	rep movsb
	ret
SYM_FUNC_END(memcpy_erms)

SYM_FUNC_START(memcpy_orig)
SYM_FUNC_START_LOCAL(memcpy_orig)
	movq %rdi, %rax

	cmpq $0x20, %rdx
+6 −5
Original line number Diff line number Diff line
@@ -4,8 +4,7 @@
#include <linux/linkage.h>
#include <asm/cpufeatures.h>
#include <asm/alternative-asm.h>

.weak memset
#include <asm/export.h>

/*
 * ISO C memset - set a memory block to a byte value. This function uses fast
@@ -18,7 +17,7 @@
 *
 * rax   original destination
 */
SYM_FUNC_START_ALIAS(memset)
SYM_FUNC_START_WEAK(memset)
SYM_FUNC_START(__memset)
	/*
	 * Some CPUs support enhanced REP MOVSB/STOSB feature. It is recommended
@@ -44,6 +43,8 @@ SYM_FUNC_START(__memset)
	ret
SYM_FUNC_END(__memset)
SYM_FUNC_END_ALIAS(memset)
EXPORT_SYMBOL(memset)
EXPORT_SYMBOL(__memset)

/*
 * ISO C memset - set a memory block to a byte value. This function uses
@@ -56,7 +57,7 @@ SYM_FUNC_END_ALIAS(memset)
 *
 * rax   original destination
 */
SYM_FUNC_START(memset_erms)
SYM_FUNC_START_LOCAL(memset_erms)
	movq %rdi,%r9
	movb %sil,%al
	movq %rdx,%rcx
@@ -65,7 +66,7 @@ SYM_FUNC_START(memset_erms)
	ret
SYM_FUNC_END(memset_erms)

SYM_FUNC_START(memset_orig)
SYM_FUNC_START_LOCAL(memset_orig)
	movq %rdi,%r10

	/* expand byte value  */
+7 −0
Original line number Diff line number Diff line
@@ -38,6 +38,13 @@ static int sample_ustack(struct perf_sample *sample,
	stack_size = stack_size > STACK_SIZE ? STACK_SIZE : stack_size;

	memcpy(buf, (void *) sp, stack_size);
#ifdef MEMORY_SANITIZER
	/*
	 * Copying the stack may copy msan poison, avoid false positives in the
	 * unwinder by removing the poison here.
	 */
	__msan_unpoison(buf, stack_size);
#endif
	stack->data = (char *) buf;
	stack->size = stack_size;
	return 0;
+3 −0
Original line number Diff line number Diff line
@@ -2,6 +2,9 @@

/* Various wrappers to make the kernel .S file build in user-space: */

// memcpy_orig and memcpy_erms are being defined as SYM_L_LOCAL but we need it
#define SYM_FUNC_START_LOCAL(name)                      \
        SYM_START(name, SYM_L_GLOBAL, SYM_A_ALIGN)
#define memcpy MEMCPY /* don't hide glibc's memcpy() */
#define altinstr_replacement text
#define globl p2align 4; .globl
+3 −0
Original line number Diff line number Diff line
/* SPDX-License-Identifier: GPL-2.0 */
// memset_orig and memset_erms are being defined as SYM_L_LOCAL but we need it
#define SYM_FUNC_START_LOCAL(name)                      \
        SYM_START(name, SYM_L_GLOBAL, SYM_A_ALIGN)
#define memset MEMSET /* don't hide glibc's memset() */
#define altinstr_replacement text
#define globl p2align 4; .globl
Loading