]> git.baikalelectronics.ru Git - kernel.git/commitdiff
s390: introduce proper type handling call_on_stack() macro
authorHeiko Carstens <hca@linux.ibm.com>
Mon, 5 Jul 2021 18:16:10 +0000 (20:16 +0200)
committerVasily Gorbik <gor@linux.ibm.com>
Thu, 8 Jul 2021 20:12:17 +0000 (22:12 +0200)
The existing CALL_ON_STACK() macro allows for subtle bugs:

- There is no type checking of the function that is being called. That
  is: missing or too many arguments do not cause any compile error or
  warning. The same is true if the return type of the called function
  changes. This can lead to quite random bugs.

- Sign and zero extension of arguments is missing. Given that the s390
  C ABI requires that the caller of a function performs proper sign
  and zero extension this can also lead to subtle bugs.

- If arguments to the CALL_ON_STACK() macros contain functions calls
  register corruption can happen due to register asm constructs being
  used.

Therefore introduce a new call_on_stack() macro which is supposed to
fix all these problems.

Reviewed-by: Sven Schnelle <svens@linux.ibm.com>
Signed-off-by: Heiko Carstens <hca@linux.ibm.com>
Signed-off-by: Vasily Gorbik <gor@linux.ibm.com>
arch/s390/include/asm/stacktrace.h

index 76c6034428be8d23cb60d60c9a6291d51370d7ff..b4d936580fbf93cfc6fde16e39d46d7740845768 100644 (file)
@@ -129,6 +129,103 @@ struct stack_frame {
        r2;                                                             \
 })
 
+#define CALL_LARGS_0(...)                                              \
+       long dummy = 0
+#define CALL_LARGS_1(t1, a1)                                           \
+       long arg1  = (long)(t1)(a1)
+#define CALL_LARGS_2(t1, a1, t2, a2)                                   \
+       CALL_LARGS_1(t1, a1);                                           \
+       long arg2 = (long)(t2)(a2)
+#define CALL_LARGS_3(t1, a1, t2, a2, t3, a3)                           \
+       CALL_LARGS_2(t1, a1, t2, a2);                                   \
+       long arg3 = (long)(t3)(a3)
+#define CALL_LARGS_4(t1, a1, t2, a2, t3, a3, t4, a4)                   \
+       CALL_LARGS_3(t1, a1, t2, a2, t3, a3);                           \
+       long arg4  = (long)(t4)(a4)
+#define CALL_LARGS_5(t1, a1, t2, a2, t3, a3, t4, a4, t5, a5)           \
+       CALL_LARGS_4(t1, a1, t2, a2, t3, a3, t4, a4);                   \
+       long arg5 = (long)(t5)(a5)
+
+#define CALL_REGS_0                                                    \
+       register long r2 asm("2") = dummy
+#define CALL_REGS_1                                                    \
+       register long r2 asm("2") = arg1
+#define CALL_REGS_2                                                    \
+       CALL_REGS_1;                                                    \
+       register long r3 asm("3") = arg2
+#define CALL_REGS_3                                                    \
+       CALL_REGS_2;                                                    \
+       register long r4 asm("4") = arg3
+#define CALL_REGS_4                                                    \
+       CALL_REGS_3;                                                    \
+       register long r5 asm("5") = arg4
+#define CALL_REGS_5                                                    \
+       CALL_REGS_4;                                                    \
+       register long r6 asm("6") = arg5
+
+#define CALL_TYPECHECK_0(...)
+#define CALL_TYPECHECK_1(t, a, ...)                                    \
+       typecheck(t, a)
+#define CALL_TYPECHECK_2(t, a, ...)                                    \
+       CALL_TYPECHECK_1(__VA_ARGS__);                                  \
+       typecheck(t, a)
+#define CALL_TYPECHECK_3(t, a, ...)                                    \
+       CALL_TYPECHECK_2(__VA_ARGS__);                                  \
+       typecheck(t, a)
+#define CALL_TYPECHECK_4(t, a, ...)                                    \
+       CALL_TYPECHECK_3(__VA_ARGS__);                                  \
+       typecheck(t, a)
+#define CALL_TYPECHECK_5(t, a, ...)                                    \
+       CALL_TYPECHECK_4(__VA_ARGS__);                                  \
+       typecheck(t, a)
+
+#define CALL_PARM_0(...) void
+#define CALL_PARM_1(t, a, ...) t
+#define CALL_PARM_2(t, a, ...) t, CALL_PARM_1(__VA_ARGS__)
+#define CALL_PARM_3(t, a, ...) t, CALL_PARM_2(__VA_ARGS__)
+#define CALL_PARM_4(t, a, ...) t, CALL_PARM_3(__VA_ARGS__)
+#define CALL_PARM_5(t, a, ...) t, CALL_PARM_4(__VA_ARGS__)
+#define CALL_PARM_6(t, a, ...) t, CALL_PARM_5(__VA_ARGS__)
+
+/*
+ * Use call_on_stack() to call a function switching to a specified
+ * stack. Proper sign and zero extension of function arguments is
+ * done. Usage:
+ *
+ * rc = call_on_stack(nr, stack, rettype, fn, t1, a1, t2, a2, ...)
+ *
+ * - nr specifies the number of function arguments of fn.
+ * - stack specifies the stack to be used.
+ * - fn is the function to be called.
+ * - rettype is the return type of fn.
+ * - t1, a1, ... are pairs, where t1 must match the type of the first
+ *   argument of fn, t2 the second, etc. a1 is the corresponding
+ *   first function argument (not name), etc.
+ */
+#define call_on_stack(nr, stack, rettype, fn, ...)                     \
+({                                                                     \
+       rettype (*__fn)(CALL_PARM_##nr(__VA_ARGS__)) = fn;              \
+       unsigned long frame = current_frame_address();                  \
+       unsigned long __stack = stack;                                  \
+       unsigned long prev;                                             \
+       CALL_LARGS_##nr(__VA_ARGS__);                                   \
+       CALL_REGS_##nr;                                                 \
+                                                                       \
+       CALL_TYPECHECK_##nr(__VA_ARGS__);                               \
+       asm volatile(                                                   \
+               "       lgr     %[_prev],15\n"                          \
+               "       lg      15,%[_stack]\n"                         \
+               "       stg     %[_frame],%[_bc](15)\n"                 \
+               "       brasl   14,%[_fn]\n"                            \
+               "       lgr     15,%[_prev]\n"                          \
+               : [_prev] "=&d" (prev), CALL_FMT_##nr                   \
+               : [_stack] "R" (__stack),                               \
+                 [_bc] "i" (offsetof(struct stack_frame, back_chain)), \
+                 [_frame] "d" (frame),                                 \
+                 [_fn] "X" (__fn) : CALL_CLOBBER_##nr);                \
+       (rettype)r2;                                                    \
+})
+
 #define CALL_ON_STACK_NORETURN(fn, stack)                              \
 ({                                                                     \
        asm volatile(                                                   \