--- /dev/null
+From fc585d4a5cf614727f64d86550b794bcad29d5c3 Mon Sep 17 00:00:00 2001
+From: Olof Johansson <olof@lixom.net>
+Date: Mon, 16 Dec 2019 20:06:31 -0800
+Subject: riscv: Less inefficient gcc tishift helpers (and export their symbols)
+
+From: Olof Johansson <olof@lixom.net>
+
+commit fc585d4a5cf614727f64d86550b794bcad29d5c3 upstream.
+
+The existing __lshrti3 was really inefficient, and the other two helpers
+are also needed to compile some modules.
+
+Add the missing versions, and export all of the symbols like arm64
+already does.
+
+This code is based on the assembly generated by libgcc builds.
+
+This fixes a build break triggered by ubsan:
+
+riscv64-unknown-linux-gnu-ld: lib/ubsan.o: in function `.L2':
+ubsan.c:(.text.unlikely+0x38): undefined reference to `__ashlti3'
+riscv64-unknown-linux-gnu-ld: ubsan.c:(.text.unlikely+0x42): undefined reference to `__ashrti3'
+
+Signed-off-by: Olof Johansson <olof@lixom.net>
+[paul.walmsley@sifive.com: use SYM_FUNC_{START,END} instead of
+ ENTRY/ENDPROC; note libgcc origin]
+Signed-off-by: Paul Walmsley <paul.walmsley@sifive.com>
+Cc: Kefeng Wang <wangkefeng.wang@huawei.com>
+Signed-off-by: Greg Kroah-Hartman <gregkh@linuxfoundation.org>
+
+---
+ arch/riscv/include/asm/asm-prototypes.h | 4 +
+ arch/riscv/lib/tishift.S | 75 ++++++++++++++++++++++++--------
+ 2 files changed, 61 insertions(+), 18 deletions(-)
+
+--- a/arch/riscv/include/asm/asm-prototypes.h
++++ b/arch/riscv/include/asm/asm-prototypes.h
+@@ -4,4 +4,8 @@
+ #include <linux/ftrace.h>
+ #include <asm-generic/asm-prototypes.h>
+
++long long __lshrti3(long long a, int b);
++long long __ashrti3(long long a, int b);
++long long __ashlti3(long long a, int b);
++
+ #endif /* _ASM_RISCV_PROTOTYPES_H */
+--- a/arch/riscv/lib/tishift.S
++++ b/arch/riscv/lib/tishift.S
+@@ -4,34 +4,73 @@
+ */
+
+ #include <linux/linkage.h>
++#include <asm-generic/export.h>
+
+-ENTRY(__lshrti3)
++SYM_FUNC_START(__lshrti3)
+ beqz a2, .L1
+ li a5,64
+ sub a5,a5,a2
+- addi sp,sp,-16
+ sext.w a4,a5
+ blez a5, .L2
+ sext.w a2,a2
+- sll a4,a1,a4
+ srl a0,a0,a2
+- srl a1,a1,a2
++ sll a4,a1,a4
++ srl a2,a1,a2
+ or a0,a0,a4
+- sd a1,8(sp)
+- sd a0,0(sp)
+- ld a0,0(sp)
+- ld a1,8(sp)
+- addi sp,sp,16
+- ret
++ mv a1,a2
+ .L1:
+ ret
+ .L2:
+- negw a4,a4
+- srl a1,a1,a4
+- sd a1,0(sp)
+- sd zero,8(sp)
+- ld a0,0(sp)
+- ld a1,8(sp)
+- addi sp,sp,16
++ negw a0,a4
++ li a2,0
++ srl a0,a1,a0
++ mv a1,a2
++ ret
++SYM_FUNC_END(__lshrti3)
++EXPORT_SYMBOL(__lshrti3)
++
++SYM_FUNC_START(__ashrti3)
++ beqz a2, .L3
++ li a5,64
++ sub a5,a5,a2
++ sext.w a4,a5
++ blez a5, .L4
++ sext.w a2,a2
++ srl a0,a0,a2
++ sll a4,a1,a4
++ sra a2,a1,a2
++ or a0,a0,a4
++ mv a1,a2
++.L3:
++ ret
++.L4:
++ negw a0,a4
++ srai a2,a1,0x3f
++ sra a0,a1,a0
++ mv a1,a2
++ ret
++SYM_FUNC_END(__ashrti3)
++EXPORT_SYMBOL(__ashrti3)
++
++SYM_FUNC_START(__ashlti3)
++ beqz a2, .L5
++ li a5,64
++ sub a5,a5,a2
++ sext.w a4,a5
++ blez a5, .L6
++ sext.w a2,a2
++ sll a1,a1,a2
++ srl a4,a0,a4
++ sll a2,a0,a2
++ or a1,a1,a4
++ mv a0,a2
++.L5:
++ ret
++.L6:
++ negw a1,a4
++ li a2,0
++ sll a1,a0,a1
++ mv a0,a2
+ ret
+-ENDPROC(__lshrti3)
++SYM_FUNC_END(__ashlti3)
++EXPORT_SYMBOL(__ashlti3)