]> git.baikalelectronics.ru Git - kernel.git/commitdiff
bitops: define const_*() versions of the non-atomics
authorAlexander Lobakin <alexandr.lobakin@intel.com>
Fri, 24 Jun 2022 12:13:08 +0000 (14:13 +0200)
committerYury Norov <yury.norov@gmail.com>
Fri, 1 Jul 2022 02:52:41 +0000 (19:52 -0700)
Define const_*() variants of the non-atomic bitops to be used when
the input arguments are compile-time constants, so that the compiler
will be always able to resolve those to compile-time constants as
well. Those are mostly direct aliases for generic_*() with one
exception for const_test_bit(): the original one is declared
atomic-safe and thus doesn't discard the `volatile` qualifier, so
in order to let optimize code, define it separately disregarding
the qualifier.
Add them to the compile-time type checks as well just in case.

Suggested-by: Marco Elver <elver@google.com>
Signed-off-by: Alexander Lobakin <alexandr.lobakin@intel.com>
Reviewed-by: Marco Elver <elver@google.com>
Reviewed-by: Andy Shevchenko <andriy.shevchenko@linux.intel.com>
Signed-off-by: Yury Norov <yury.norov@gmail.com>
include/asm-generic/bitops/generic-non-atomic.h
include/linux/bitops.h

index b85b8a2ac239ee15299c21a7464d895a3c2e1def..3d5ebd24652b9ff2412c45fd829a5fe8b9308e0e 100644 (file)
@@ -127,4 +127,35 @@ generic_test_bit(unsigned long nr, const volatile unsigned long *addr)
        return 1UL & (addr[BIT_WORD(nr)] >> (nr & (BITS_PER_LONG-1)));
 }
 
+/*
+ * const_*() definitions provide good compile-time optimizations when
+ * the passed arguments can be resolved at compile time.
+ */
+#define const___set_bit                        generic___set_bit
+#define const___clear_bit              generic___clear_bit
+#define const___change_bit             generic___change_bit
+#define const___test_and_set_bit       generic___test_and_set_bit
+#define const___test_and_clear_bit     generic___test_and_clear_bit
+#define const___test_and_change_bit    generic___test_and_change_bit
+
+/**
+ * const_test_bit - Determine whether a bit is set
+ * @nr: bit number to test
+ * @addr: Address to start counting from
+ *
+ * A version of generic_test_bit() which discards the `volatile` qualifier to
+ * allow a compiler to optimize code harder. Non-atomic and to be called only
+ * for testing compile-time constants, e.g. by the corresponding macros, not
+ * directly from "regular" code.
+ */
+static __always_inline bool
+const_test_bit(unsigned long nr, const volatile unsigned long *addr)
+{
+       const unsigned long *p = (const unsigned long *)addr + BIT_WORD(nr);
+       unsigned long mask = BIT_MASK(nr);
+       unsigned long val = *p;
+
+       return !!(val & mask);
+}
+
 #endif /* __ASM_GENERIC_BITOPS_GENERIC_NON_ATOMIC_H */
index 87087454a2883523ac316f84fa30bebd92104472..d393297287d5292cede0e94e84fa6551d1ef84d1 100644 (file)
@@ -37,6 +37,7 @@ extern unsigned long __sw_hweight64(__u64 w);
 /* Check that the bitops prototypes are sane */
 #define __check_bitop_pr(name)                                         \
        static_assert(__same_type(arch_##name, generic_##name) &&       \
+                     __same_type(const_##name, generic_##name) &&      \
                      __same_type(name, generic_##name))
 
 __check_bitop_pr(__set_bit);