lkml.org 
[lkml]   [2020]   [Nov]   [3]   [last100]   RSS Feed
Views: [wrap][no wrap]   [headers]  [forward] 
 
Messages in this thread
    Patch in this message
    /
    From
    Subject[PATCH 5.9 372/391] arm64: Change .weak to SYM_FUNC_START_WEAK_PI for arch/arm64/lib/mem*.S
    Date
    From: Fangrui Song <maskray@google.com>

    commit ec9d78070de986ecf581ea204fd322af4d2477ec upstream.

    Commit 39d114ddc682 ("arm64: add KASAN support") added .weak directives to
    arch/arm64/lib/mem*.S instead of changing the existing SYM_FUNC_START_PI
    macros. This can lead to the assembly snippet `.weak memcpy ... .globl
    memcpy` which will produce a STB_WEAK memcpy with GNU as but STB_GLOBAL
    memcpy with LLVM's integrated assembler before LLVM 12. LLVM 12 (since
    https://reviews.llvm.org/D90108) will error on such an overridden symbol
    binding.

    Use the appropriate SYM_FUNC_START_WEAK_PI instead.

    Fixes: 39d114ddc682 ("arm64: add KASAN support")
    Reported-by: Sami Tolvanen <samitolvanen@google.com>
    Signed-off-by: Fangrui Song <maskray@google.com>
    Tested-by: Sami Tolvanen <samitolvanen@google.com>
    Tested-by: Nick Desaulniers <ndesaulniers@google.com>
    Reviewed-by: Nick Desaulniers <ndesaulniers@google.com>
    Cc: <stable@vger.kernel.org>
    Link: https://lore.kernel.org/r/20201029181951.1866093-1-maskray@google.com
    Signed-off-by: Will Deacon <will@kernel.org>
    Signed-off-by: Greg Kroah-Hartman <gregkh@linuxfoundation.org>

    ---
    arch/arm64/lib/memcpy.S | 3 +--
    arch/arm64/lib/memmove.S | 3 +--
    arch/arm64/lib/memset.S | 3 +--
    3 files changed, 3 insertions(+), 6 deletions(-)

    --- a/arch/arm64/lib/memcpy.S
    +++ b/arch/arm64/lib/memcpy.S
    @@ -56,9 +56,8 @@
    stp \reg1, \reg2, [\ptr], \val
    .endm

    - .weak memcpy
    SYM_FUNC_START_ALIAS(__memcpy)
    -SYM_FUNC_START_PI(memcpy)
    +SYM_FUNC_START_WEAK_PI(memcpy)
    #include "copy_template.S"
    ret
    SYM_FUNC_END_PI(memcpy)
    --- a/arch/arm64/lib/memmove.S
    +++ b/arch/arm64/lib/memmove.S
    @@ -45,9 +45,8 @@ C_h .req x12
    D_l .req x13
    D_h .req x14

    - .weak memmove
    SYM_FUNC_START_ALIAS(__memmove)
    -SYM_FUNC_START_PI(memmove)
    +SYM_FUNC_START_WEAK_PI(memmove)
    cmp dstin, src
    b.lo __memcpy
    add tmp1, src, count
    --- a/arch/arm64/lib/memset.S
    +++ b/arch/arm64/lib/memset.S
    @@ -42,9 +42,8 @@ dst .req x8
    tmp3w .req w9
    tmp3 .req x9

    - .weak memset
    SYM_FUNC_START_ALIAS(__memset)
    -SYM_FUNC_START_PI(memset)
    +SYM_FUNC_START_WEAK_PI(memset)
    mov dst, dstin /* Preserve return value. */
    and A_lw, val, #255
    orr A_lw, A_lw, A_lw, lsl #8

    \
     
     \ /
      Last update: 2020-11-03 21:52    [W:4.079 / U:0.108 seconds]
    ©2003-2020 Jasper Spaans|hosted at Digital Ocean and TransIP|Read the blog|Advertise on this site