]> git.baikalelectronics.ru Git - kernel.git/commitdiff
sched: Harden PREEMPT_DYNAMIC
authorPeter Zijlstra <peterz@infradead.org>
Mon, 25 Jan 2021 15:26:50 +0000 (16:26 +0100)
committerIngo Molnar <mingo@kernel.org>
Wed, 17 Feb 2021 13:12:42 +0000 (14:12 +0100)
Use the new EXPORT_STATIC_CALL_TRAMP() / static_call_mod() to unexport
the static_call_key for the PREEMPT_DYNAMIC calls such that modules
can no longer update these calls.

Having modules change/hi-jack the preemption calls would be horrible.

Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Signed-off-by: Ingo Molnar <mingo@kernel.org>
arch/x86/include/asm/preempt.h
include/linux/kernel.h
include/linux/sched.h
kernel/sched/core.c

index 9b12dce9bda551ff6f34e6bc09223fd5606d4b09..0aa96f824af1639948d090f7c09c86cbfe572f9a 100644 (file)
@@ -114,7 +114,7 @@ DECLARE_STATIC_CALL(preempt_schedule, __preempt_schedule_func);
 
 #define __preempt_schedule() \
 do { \
-       __ADDRESSABLE(STATIC_CALL_KEY(preempt_schedule)); \
+       __STATIC_CALL_MOD_ADDRESSABLE(preempt_schedule); \
        asm volatile ("call " STATIC_CALL_TRAMP_STR(preempt_schedule) : ASM_CALL_CONSTRAINT); \
 } while (0)
 
@@ -127,7 +127,7 @@ DECLARE_STATIC_CALL(preempt_schedule_notrace, __preempt_schedule_notrace_func);
 
 #define __preempt_schedule_notrace() \
 do { \
-       __ADDRESSABLE(STATIC_CALL_KEY(preempt_schedule_notrace)); \
+       __STATIC_CALL_MOD_ADDRESSABLE(preempt_schedule_notrace); \
        asm volatile ("call " STATIC_CALL_TRAMP_STR(preempt_schedule_notrace) : ASM_CALL_CONSTRAINT); \
 } while (0)
 
index cfd3d349f905eae78a7abd927741eb70015a88c5..5b7ed6dc99accf89059190f01f73adbfa13287ca 100644 (file)
@@ -93,7 +93,7 @@ DECLARE_STATIC_CALL(might_resched, __cond_resched);
 
 static __always_inline void might_resched(void)
 {
-       static_call(might_resched)();
+       static_call_mod(might_resched)();
 }
 
 #else
index 2f35594b8b5353f26c4adcb53533433b91fb9bc3..4d568288abf9f66c85cf96297f076b94af962a07 100644 (file)
@@ -1880,7 +1880,7 @@ DECLARE_STATIC_CALL(cond_resched, __cond_resched);
 
 static __always_inline int _cond_resched(void)
 {
-       return static_call(cond_resched)();
+       return static_call_mod(cond_resched)();
 }
 
 #else
index 4a17bb5f28b0f359a4063da31a09d12ae35e63a5..cec507be460c38837d0c36e0fcd1aff095326b43 100644 (file)
@@ -5267,7 +5267,7 @@ EXPORT_SYMBOL(preempt_schedule);
 
 #ifdef CONFIG_PREEMPT_DYNAMIC
 DEFINE_STATIC_CALL(preempt_schedule, __preempt_schedule_func);
-EXPORT_STATIC_CALL(preempt_schedule);
+EXPORT_STATIC_CALL_TRAMP(preempt_schedule);
 #endif
 
 
@@ -5325,7 +5325,7 @@ EXPORT_SYMBOL_GPL(preempt_schedule_notrace);
 
 #ifdef CONFIG_PREEMPT_DYNAMIC
 DEFINE_STATIC_CALL(preempt_schedule_notrace, __preempt_schedule_notrace_func);
-EXPORT_STATIC_CALL(preempt_schedule_notrace);
+EXPORT_STATIC_CALL_TRAMP(preempt_schedule_notrace);
 #endif
 
 #endif /* CONFIG_PREEMPTION */
@@ -6997,10 +6997,10 @@ EXPORT_SYMBOL(__cond_resched);
 
 #ifdef CONFIG_PREEMPT_DYNAMIC
 DEFINE_STATIC_CALL_RET0(cond_resched, __cond_resched);
-EXPORT_STATIC_CALL(cond_resched);
+EXPORT_STATIC_CALL_TRAMP(cond_resched);
 
 DEFINE_STATIC_CALL_RET0(might_resched, __cond_resched);
-EXPORT_STATIC_CALL(might_resched);
+EXPORT_STATIC_CALL_TRAMP(might_resched);
 #endif
 
 /*