#define _ASM_S390_ARCH_HWEIGHT_H
#include <linux/types.h>
+#include <asm/march.h>
static __always_inline unsigned long popcnt_z196(unsigned long w)
{
static __always_inline unsigned long __arch_hweight64(__u64 w)
{
- if (IS_ENABLED(CONFIG_HAVE_MARCH_Z15_FEATURES))
+ if (__is_defined(MARCH_HAS_Z15_FEATURES))
return popcnt_z15(w);
- if (IS_ENABLED(CONFIG_HAVE_MARCH_Z196_FEATURES)) {
+ if (__is_defined(MARCH_HAS_Z196_FEATURES)) {
w = popcnt_z196(w);
w += w >> 32;
w += w >> 16;
static __always_inline unsigned int __arch_hweight32(unsigned int w)
{
- if (IS_ENABLED(CONFIG_HAVE_MARCH_Z15_FEATURES))
+ if (__is_defined(MARCH_HAS_Z15_FEATURES))
return popcnt_z15(w);
- if (IS_ENABLED(CONFIG_HAVE_MARCH_Z196_FEATURES)) {
+ if (__is_defined(MARCH_HAS_Z196_FEATURES)) {
w = popcnt_z196(w);
w += w >> 16;
w += w >> 8;
static __always_inline unsigned int __arch_hweight16(unsigned int w)
{
- if (IS_ENABLED(CONFIG_HAVE_MARCH_Z15_FEATURES))
+ if (__is_defined(MARCH_HAS_Z15_FEATURES))
return popcnt_z15((unsigned short)w);
- if (IS_ENABLED(CONFIG_HAVE_MARCH_Z196_FEATURES)) {
+ if (__is_defined(MARCH_HAS_Z196_FEATURES)) {
w = popcnt_z196(w);
w += w >> 8;
return w & 0xff;
static __always_inline unsigned int __arch_hweight8(unsigned int w)
{
- if (IS_ENABLED(CONFIG_HAVE_MARCH_Z196_FEATURES))
+ if (__is_defined(MARCH_HAS_Z196_FEATURES))
return popcnt_z196((unsigned char)w);
return __sw_hweight8(w);
}
#define __ARCH_S390_ATOMIC_OPS__
#include <linux/limits.h>
+#include <asm/march.h>
static __always_inline int __atomic_read(const atomic_t *v)
{
}
}
-#ifdef CONFIG_HAVE_MARCH_Z196_FEATURES
+#ifdef MARCH_HAS_Z196_FEATURES
#define __ATOMIC_OP(op_name, op_type, op_string, op_barrier) \
static __always_inline op_type op_name(op_type val, op_type *ptr) \
#undef __ATOMIC_CONST_OPS
#undef __ATOMIC_CONST_OP
-#else /* CONFIG_HAVE_MARCH_Z196_FEATURES */
+#else /* MARCH_HAS_Z196_FEATURES */
#define __ATOMIC_OP(op_name, op_string) \
static __always_inline int op_name(int val, int *ptr) \
#define __atomic64_add_const(val, ptr) __atomic64_add(val, ptr)
#define __atomic64_add_const_barrier(val, ptr) __atomic64_add(val, ptr)
-#endif /* CONFIG_HAVE_MARCH_Z196_FEATURES */
+#endif /* MARCH_HAS_Z196_FEATURES */
static __always_inline int __atomic_cmpxchg(int *ptr, int old, int new)
{
#ifndef __ASM_BARRIER_H
#define __ASM_BARRIER_H
+#include <asm/march.h>
+
/*
* Force strict CPU ordering.
* And yes, this is required on UP too when we're talking
* to devices.
*/
-#ifdef CONFIG_HAVE_MARCH_Z196_FEATURES
+#ifdef MARCH_HAS_Z196_FEATURES
/* Fast-BCR without checkpoint synchronization */
#define __ASM_BCR_SERIALIZE "bcr 14,0\n"
#else
#include <linux/preempt.h>
#include <asm/cmpxchg.h>
+#include <asm/march.h>
/*
* s390 uses its own implementation for per cpu data, the offset of
#define this_cpu_or_1(pcp, val) arch_this_cpu_to_op_simple(pcp, val, |)
#define this_cpu_or_2(pcp, val) arch_this_cpu_to_op_simple(pcp, val, |)
-#ifndef CONFIG_HAVE_MARCH_Z196_FEATURES
+#ifndef MARCH_HAS_Z196_FEATURES
#define this_cpu_add_4(pcp, val) arch_this_cpu_to_op_simple(pcp, val, +)
#define this_cpu_add_8(pcp, val) arch_this_cpu_to_op_simple(pcp, val, +)
#define this_cpu_or_4(pcp, val) arch_this_cpu_to_op_simple(pcp, val, |)
#define this_cpu_or_8(pcp, val) arch_this_cpu_to_op_simple(pcp, val, |)
-#else /* CONFIG_HAVE_MARCH_Z196_FEATURES */
+#else /* MARCH_HAS_Z196_FEATURES */
#define arch_this_cpu_add(pcp, val, op1, op2, szcast) \
{ \
#define this_cpu_or_4(pcp, val) arch_this_cpu_to_op(pcp, val, "lao")
#define this_cpu_or_8(pcp, val) arch_this_cpu_to_op(pcp, val, "laog")
-#endif /* CONFIG_HAVE_MARCH_Z196_FEATURES */
+#endif /* MARCH_HAS_Z196_FEATURES */
#define arch_this_cpu_cmpxchg(pcp, oval, nval) \
({ \
#include <asm/current.h>
#include <linux/thread_info.h>
#include <asm/atomic_ops.h>
+#include <asm/march.h>
-#ifdef CONFIG_HAVE_MARCH_Z196_FEATURES
+#ifdef MARCH_HAS_Z196_FEATURES
/* We use the MSB mostly because its available */
#define PREEMPT_NEED_RESCHED 0x80000000
preempt_offset);
}
-#else /* CONFIG_HAVE_MARCH_Z196_FEATURES */
+#else /* MARCH_HAS_Z196_FEATURES */
#define PREEMPT_ENABLED (0)
tif_need_resched());
}
-#endif /* CONFIG_HAVE_MARCH_Z196_FEATURES */
+#endif /* MARCH_HAS_Z196_FEATURES */
#define init_task_preempt_count(p) do { } while (0)
/* Deferred to CPU bringup time */
#include <asm/ftrace.h>
#include <asm/nospec-insn.h>
#include <asm/ptrace.h>
+#include <asm/march.h>
#define STACK_FRAME_SIZE_PTREGS (STACK_FRAME_OVERHEAD + __PT_SIZE)
#define STACK_PTREGS (STACK_FRAME_OVERHEAD)
SYM_CODE_END(ftrace_caller)
SYM_CODE_START(ftrace_common)
-#ifdef CONFIG_HAVE_MARCH_Z196_FEATURES
+#ifdef MARCH_HAS_Z196_FEATURES
aghik %r2,%r0,-MCOUNT_INSN_SIZE
lgrl %r4,function_trace_op
lgrl %r1,ftrace_func
.Lftrace_graph_caller_end:
#endif
lg %r0,(STACK_FREGS_PTREGS_PSW+8)(%r15)
-#ifdef CONFIG_HAVE_MARCH_Z196_FEATURES
+#ifdef MARCH_HAS_Z196_FEATURES
ltg %r1,STACK_FREGS_PTREGS_ORIG_GPR2(%r15)
locgrz %r1,%r0
#else