Loading Documentation/filesystems/proc.txt +3 −1 Original line number Diff line number Diff line Loading @@ -494,7 +494,9 @@ manner. The codes are the following: Note that there is no guarantee that every flag and associated mnemonic will be present in all further kernel releases. Things get changed, the flags may be vanished or the reverse -- new added. be vanished or the reverse -- new added. Interpretation of their meaning might change in future as well. So each consumer of these flags has to follow each specific kernel version for the exact semantic. This file is only present if the CONFIG_MMU kernel configuration option is enabled. Loading Makefile +1 −1 Original line number Diff line number Diff line # SPDX-License-Identifier: GPL-2.0 VERSION = 4 PATCHLEVEL = 14 SUBLEVEL = 95 SUBLEVEL = 97 EXTRAVERSION = NAME = Petit Gorille Loading arch/arc/include/asm/perf_event.h +2 −1 Original line number Diff line number Diff line Loading @@ -103,7 +103,8 @@ static const char * const arc_pmu_ev_hw_map[] = { /* counts condition */ [PERF_COUNT_HW_INSTRUCTIONS] = "iall", [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = "ijmp", /* Excludes ZOL jumps */ /* All jump instructions that are taken */ [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = "ijmptak", [PERF_COUNT_ARC_BPOK] = "bpok", /* NP-NT, PT-T, PNT-NT */ #ifdef CONFIG_ISA_ARCV2 [PERF_COUNT_HW_BRANCH_MISSES] = "bpmp", Loading arch/arc/lib/memset-archs.S +32 −8 Original line number Diff line number Diff line Loading @@ -7,11 +7,39 @@ */ #include <linux/linkage.h> #include <asm/cache.h> #undef PREALLOC_NOT_AVAIL /* * The memset implementation below is optimized to use prefetchw and prealloc * instruction in case of CPU with 64B L1 data cache line (L1_CACHE_SHIFT == 6) * If you want to implement optimized memset for other possible L1 data cache * line lengths (32B and 128B) you should rewrite code carefully checking * we don't call any prefetchw/prealloc instruction for L1 cache lines which * don't belongs to memset area. */ #if L1_CACHE_SHIFT == 6 .macro PREALLOC_INSTR reg, off prealloc [\reg, \off] .endm .macro PREFETCHW_INSTR reg, off prefetchw [\reg, \off] .endm #else .macro PREALLOC_INSTR .endm .macro PREFETCHW_INSTR .endm #endif ENTRY_CFI(memset) prefetchw [r0] ; Prefetch the write location PREFETCHW_INSTR r0, 0 ; Prefetch the first write location mov.f 0, r2 ;;; if size is zero jz.d [blink] Loading Loading @@ -48,11 +76,8 @@ ENTRY_CFI(memset) lpnz @.Lset64bytes ;; LOOP START #ifdef PREALLOC_NOT_AVAIL prefetchw [r3, 64] ;Prefetch the next write location #else prealloc [r3, 64] #endif PREALLOC_INSTR r3, 64 ; alloc next line w/o fetching #ifdef CONFIG_ARC_HAS_LL64 std.ab r4, [r3, 8] std.ab r4, [r3, 8] Loading Loading @@ -85,7 +110,6 @@ ENTRY_CFI(memset) lsr.f lp_count, r2, 5 ;Last remaining max 124 bytes lpnz .Lset32bytes ;; LOOP START prefetchw [r3, 32] ;Prefetch the next write location #ifdef CONFIG_ARC_HAS_LL64 std.ab r4, [r3, 8] std.ab r4, [r3, 8] Loading arch/arc/mm/init.c +2 −1 Original line number Diff line number Diff line Loading @@ -138,7 +138,8 @@ void __init setup_arch_memory(void) */ memblock_add_node(low_mem_start, low_mem_sz, 0); memblock_reserve(low_mem_start, __pa(_end) - low_mem_start); memblock_reserve(CONFIG_LINUX_LINK_BASE, __pa(_end) - CONFIG_LINUX_LINK_BASE); #ifdef CONFIG_BLK_DEV_INITRD if (initrd_start) Loading Loading
Documentation/filesystems/proc.txt +3 −1 Original line number Diff line number Diff line Loading @@ -494,7 +494,9 @@ manner. The codes are the following: Note that there is no guarantee that every flag and associated mnemonic will be present in all further kernel releases. Things get changed, the flags may be vanished or the reverse -- new added. be vanished or the reverse -- new added. Interpretation of their meaning might change in future as well. So each consumer of these flags has to follow each specific kernel version for the exact semantic. This file is only present if the CONFIG_MMU kernel configuration option is enabled. Loading
Makefile +1 −1 Original line number Diff line number Diff line # SPDX-License-Identifier: GPL-2.0 VERSION = 4 PATCHLEVEL = 14 SUBLEVEL = 95 SUBLEVEL = 97 EXTRAVERSION = NAME = Petit Gorille Loading
arch/arc/include/asm/perf_event.h +2 −1 Original line number Diff line number Diff line Loading @@ -103,7 +103,8 @@ static const char * const arc_pmu_ev_hw_map[] = { /* counts condition */ [PERF_COUNT_HW_INSTRUCTIONS] = "iall", [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = "ijmp", /* Excludes ZOL jumps */ /* All jump instructions that are taken */ [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = "ijmptak", [PERF_COUNT_ARC_BPOK] = "bpok", /* NP-NT, PT-T, PNT-NT */ #ifdef CONFIG_ISA_ARCV2 [PERF_COUNT_HW_BRANCH_MISSES] = "bpmp", Loading
arch/arc/lib/memset-archs.S +32 −8 Original line number Diff line number Diff line Loading @@ -7,11 +7,39 @@ */ #include <linux/linkage.h> #include <asm/cache.h> #undef PREALLOC_NOT_AVAIL /* * The memset implementation below is optimized to use prefetchw and prealloc * instruction in case of CPU with 64B L1 data cache line (L1_CACHE_SHIFT == 6) * If you want to implement optimized memset for other possible L1 data cache * line lengths (32B and 128B) you should rewrite code carefully checking * we don't call any prefetchw/prealloc instruction for L1 cache lines which * don't belongs to memset area. */ #if L1_CACHE_SHIFT == 6 .macro PREALLOC_INSTR reg, off prealloc [\reg, \off] .endm .macro PREFETCHW_INSTR reg, off prefetchw [\reg, \off] .endm #else .macro PREALLOC_INSTR .endm .macro PREFETCHW_INSTR .endm #endif ENTRY_CFI(memset) prefetchw [r0] ; Prefetch the write location PREFETCHW_INSTR r0, 0 ; Prefetch the first write location mov.f 0, r2 ;;; if size is zero jz.d [blink] Loading Loading @@ -48,11 +76,8 @@ ENTRY_CFI(memset) lpnz @.Lset64bytes ;; LOOP START #ifdef PREALLOC_NOT_AVAIL prefetchw [r3, 64] ;Prefetch the next write location #else prealloc [r3, 64] #endif PREALLOC_INSTR r3, 64 ; alloc next line w/o fetching #ifdef CONFIG_ARC_HAS_LL64 std.ab r4, [r3, 8] std.ab r4, [r3, 8] Loading Loading @@ -85,7 +110,6 @@ ENTRY_CFI(memset) lsr.f lp_count, r2, 5 ;Last remaining max 124 bytes lpnz .Lset32bytes ;; LOOP START prefetchw [r3, 32] ;Prefetch the next write location #ifdef CONFIG_ARC_HAS_LL64 std.ab r4, [r3, 8] std.ab r4, [r3, 8] Loading
arch/arc/mm/init.c +2 −1 Original line number Diff line number Diff line Loading @@ -138,7 +138,8 @@ void __init setup_arch_memory(void) */ memblock_add_node(low_mem_start, low_mem_sz, 0); memblock_reserve(low_mem_start, __pa(_end) - low_mem_start); memblock_reserve(CONFIG_LINUX_LINK_BASE, __pa(_end) - CONFIG_LINUX_LINK_BASE); #ifdef CONFIG_BLK_DEV_INITRD if (initrd_start) Loading