Skip to content

Commit

Permalink
s390/bitops: remove small optimization to fix clang build
Browse files Browse the repository at this point in the history
clang does not know about the 'b1' construct used in bitops inline
assembly. Since the plan is to use compiler atomic builtins anyway
there is no point in requesting clang support for this. Especially if
one considers that the kernel seems to be the only user of this.

With removing this small optimization it is possible to compile the
kernel also with -march=zEC12 and higher using clang.

Build error:

In file included from ./include/linux/bitops.h:32:
./arch/s390/include/asm/bitops.h:69:4: error: invalid operand in inline asm: 'oi        $0,${1:b}'
                        "oi     %0,%b1\n"
                        ^

Signed-off-by: Heiko Carstens <hca@linux.ibm.com>
Signed-off-by: Vasily Gorbik <gor@linux.ibm.com>
  • Loading branch information
Heiko Carstens authored and Vasily Gorbik committed Jan 19, 2021
1 parent 6110cce commit efe5e0f
Showing 1 changed file with 0 additions and 36 deletions.
36 changes: 0 additions & 36 deletions arch/s390/include/asm/bitops.h
Original file line number Diff line number Diff line change
Expand Up @@ -61,18 +61,6 @@ static __always_inline void arch_set_bit(unsigned long nr, volatile unsigned lon
unsigned long *addr = __bitops_word(nr, ptr);
unsigned long mask;

#ifdef CONFIG_HAVE_MARCH_ZEC12_FEATURES
if (__builtin_constant_p(nr)) {
unsigned char *caddr = __bitops_byte(nr, ptr);

asm volatile(
"oi %0,%b1\n"
: "+Q" (*caddr)
: "i" (1 << (nr & 7))
: "cc", "memory");
return;
}
#endif
mask = 1UL << (nr & (BITS_PER_LONG - 1));
__atomic64_or(mask, (long *)addr);
}
Expand All @@ -82,18 +70,6 @@ static __always_inline void arch_clear_bit(unsigned long nr, volatile unsigned l
unsigned long *addr = __bitops_word(nr, ptr);
unsigned long mask;

#ifdef CONFIG_HAVE_MARCH_ZEC12_FEATURES
if (__builtin_constant_p(nr)) {
unsigned char *caddr = __bitops_byte(nr, ptr);

asm volatile(
"ni %0,%b1\n"
: "+Q" (*caddr)
: "i" (~(1 << (nr & 7)))
: "cc", "memory");
return;
}
#endif
mask = ~(1UL << (nr & (BITS_PER_LONG - 1)));
__atomic64_and(mask, (long *)addr);
}
Expand All @@ -104,18 +80,6 @@ static __always_inline void arch_change_bit(unsigned long nr,
unsigned long *addr = __bitops_word(nr, ptr);
unsigned long mask;

#ifdef CONFIG_HAVE_MARCH_ZEC12_FEATURES
if (__builtin_constant_p(nr)) {
unsigned char *caddr = __bitops_byte(nr, ptr);

asm volatile(
"xi %0,%b1\n"
: "+Q" (*caddr)
: "i" (1 << (nr & 7))
: "cc", "memory");
return;
}
#endif
mask = 1UL << (nr & (BITS_PER_LONG - 1));
__atomic64_xor(mask, (long *)addr);
}
Expand Down

0 comments on commit efe5e0f

Please sign in to comment.