Commit bf1a1bad authored by Linus Torvalds's avatar Linus Torvalds
Browse files

Merge tag 'riscv-for-linus-6.3-mw2' of git://git.kernel.org/pub/scm/linux/kernel/git/riscv/linux

Pull more RISC-V updates from Palmer Dabbelt:

 - Some cleanups and fixes for the Zbb-optimized string routines

 - Support for custom (vendor or implementation defined) perf events

 - COMMAND_LINE_SIZE has been increased to 1024

* tag 'riscv-for-linus-6.3-mw2' of git://git.kernel.org/pub/scm/linux/kernel/git/riscv/linux:
  riscv: Bump COMMAND_LINE_SIZE value to 1024
  drivers/perf: RISC-V: Allow programming custom firmware events
  riscv, lib: Fix Zbb strncmp
  RISC-V: improve string-function assembly
parents 271d8939 61fc1ee8
Loading
Loading
Loading
Loading
+8 −0
Original line number Diff line number Diff line
/* SPDX-License-Identifier: GPL-2.0-only WITH Linux-syscall-note */

#ifndef _UAPI_ASM_RISCV_SETUP_H
#define _UAPI_ASM_RISCV_SETUP_H

#define COMMAND_LINE_SIZE	1024

#endif /* _UAPI_ASM_RISCV_SETUP_H */
+4 −2
Original line number Diff line number Diff line
@@ -40,7 +40,9 @@ SYM_FUNC_START(strcmp)
	ret

/*
 * Variant of strcmp using the ZBB extension if available
 * Variant of strcmp using the ZBB extension if available.
 * The code was published as part of the bitmanip manual
 * in Appendix A.
 */
#ifdef CONFIG_RISCV_ISA_ZBB
strcmp_zbb:
@@ -57,7 +59,7 @@ strcmp_zbb:
	 *   a1 - string2
	 *
	 * Clobbers
	 *   t0, t1, t2, t3, t4, t5
	 *   t0, t1, t2, t3, t4
	 */

	or	t2, a0, a1
+5 −5
Original line number Diff line number Diff line
@@ -96,7 +96,7 @@ strlen_zbb:
	 * of valid bytes in this chunk.
	 */
	srli	a0, t1, 3
	bgtu	t3, a0, 3f
	bgtu	t3, a0, 2f

	/* Prepare for the word comparison loop. */
	addi	t2, t0, SZREG
@@ -112,20 +112,20 @@ strlen_zbb:
	addi	t0, t0, SZREG
	orc.b	t1, t1
	beq	t1, t3, 1b
2:

	not	t1, t1
	CZ	t1, t1
	srli	t1, t1, 3

	/* Get number of processed words.  */
	/* Get number of processed bytes. */
	sub	t2, t0, t2

	/* Add number of characters in the first word.  */
	add	a0, a0, t2
	srli	t1, t1, 3

	/* Add number of characters in the last word.  */
	add	a0, a0, t1
3:
2:
	ret

.option pop
+10 −10
Original line number Diff line number Diff line
@@ -70,7 +70,7 @@ strncmp_zbb:
	li	t5, -1
	and	t2, t2, SZREG-1
	add	t4, a0, a2
	bnez	t2, 4f
	bnez	t2, 3f

	/* Adjust limit for fast-path.  */
	andi	t6, t4, -SZREG
@@ -78,11 +78,13 @@ strncmp_zbb:
	/* Main loop for aligned string.  */
	.p2align 3
1:
	bgt	a0, t6, 3f
	bge	a0, t6, 3f
	REG_L	t0, 0(a0)
	REG_L	t1, 0(a1)
	orc.b	t3, t0
	bne	t3, t5, 2f
	orc.b	t3, t1
	bne	t3, t5, 2f
	addi	a0, a0, SZREG
	addi	a1, a1, SZREG
	beq	t0, t1, 1b
@@ -114,23 +116,21 @@ strncmp_zbb:
	ret

	/* Simple loop for misaligned strings.  */
3:
	/* Restore limit for slow-path.  */
	.p2align 3
4:
	bge	a0, t4, 6f
3:
	bge	a0, t4, 5f
	lbu	t0, 0(a0)
	lbu	t1, 0(a1)
	addi	a0, a0, 1
	addi	a1, a1, 1
	bne	t0, t1, 5f
	bnez	t0, 4b
	bne	t0, t1, 4f
	bnez	t0, 3b

5:
4:
	sub	a0, t0, t1
	ret

6:
5:
	li	a0, 0
	ret

+2 −5
Original line number Diff line number Diff line
@@ -436,11 +436,8 @@ static int pmu_sbi_event_map(struct perf_event *event, u64 *econfig)
		bSoftware = config >> 63;
		raw_config_val = config & RISCV_PMU_RAW_EVENT_MASK;
		if (bSoftware) {
			if (raw_config_val < SBI_PMU_FW_MAX)
			ret = (raw_config_val & 0xFFFF) |
				(SBI_PMU_EVENT_TYPE_FW << 16);
			else
				return -EINVAL;
		} else {
			ret = RISCV_PMU_RAW_EVENT_IDX;
			*econfig = raw_config_val;