Commit 3363fbdd6fb4992ffe6c17c0dd7388ffa22d99e6

Authored by Martin Schwidefsky
Committed by Linus Torvalds
1 parent 40ac6b204c

[PATCH] s390: futex atomic operations

Add support for atomic futex operations.

Signed-off-by: Martin Schwidefsky <schwidefsky@de.ibm.com>
Signed-off-by: Andrew Morton <akpm@osdl.org>
Signed-off-by: Linus Torvalds <torvalds@osdl.org>

Showing 1 changed file with 119 additions and 4 deletions Side-by-side Diff

include/asm-s390/futex.h
1   -#ifndef _ASM_FUTEX_H
2   -#define _ASM_FUTEX_H
  1 +#ifndef _ASM_S390_FUTEX_H
  2 +#define _ASM_S390_FUTEX_H
3 3  
4   -#include <asm-generic/futex.h>
  4 +#ifdef __KERNEL__
5 5  
6   -#endif
  6 +#include <linux/futex.h>
  7 +#include <asm/errno.h>
  8 +#include <asm/uaccess.h>
  9 +
  10 +#ifndef __s390x__
  11 +#define __futex_atomic_fixup \
  12 + ".section __ex_table,\"a\"\n" \
  13 + " .align 4\n" \
  14 + " .long 0b,2b,1b,2b\n" \
  15 + ".previous"
  16 +#else /* __s390x__ */
  17 +#define __futex_atomic_fixup \
  18 + ".section __ex_table,\"a\"\n" \
  19 + " .align 8\n" \
  20 + " .quad 0b,2b,1b,2b\n" \
  21 + ".previous"
  22 +#endif /* __s390x__ */
  23 +
  24 +#define __futex_atomic_op(insn, ret, oldval, newval, uaddr, oparg) \
  25 + asm volatile(" l %1,0(%6)\n" \
  26 + "0: " insn \
  27 + " cs %1,%2,0(%6)\n" \
  28 + "1: jl 0b\n" \
  29 + " lhi %0,0\n" \
  30 + "2:\n" \
  31 + __futex_atomic_fixup \
  32 + : "=d" (ret), "=&d" (oldval), "=&d" (newval), \
  33 + "=m" (*uaddr) \
  34 + : "0" (-EFAULT), "d" (oparg), "a" (uaddr), \
  35 + "m" (*uaddr) : "cc" );
  36 +
  37 +static inline int futex_atomic_op_inuser (int encoded_op, int __user *uaddr)
  38 +{
  39 + int op = (encoded_op >> 28) & 7;
  40 + int cmp = (encoded_op >> 24) & 15;
  41 + int oparg = (encoded_op << 8) >> 20;
  42 + int cmparg = (encoded_op << 20) >> 20;
  43 + int oldval = 0, newval, ret;
  44 + if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28))
  45 + oparg = 1 << oparg;
  46 +
  47 + if (! access_ok (VERIFY_WRITE, uaddr, sizeof(int)))
  48 + return -EFAULT;
  49 +
  50 + inc_preempt_count();
  51 +
  52 + switch (op) {
  53 + case FUTEX_OP_SET:
  54 + __futex_atomic_op("lr %2,%5\n",
  55 + ret, oldval, newval, uaddr, oparg);
  56 + break;
  57 + case FUTEX_OP_ADD:
  58 + __futex_atomic_op("lr %2,%1\nar %2,%5\n",
  59 + ret, oldval, newval, uaddr, oparg);
  60 + break;
  61 + case FUTEX_OP_OR:
  62 + __futex_atomic_op("lr %2,%1\nor %2,%5\n",
  63 + ret, oldval, newval, uaddr, oparg);
  64 + break;
  65 + case FUTEX_OP_ANDN:
  66 + __futex_atomic_op("lr %2,%1\nnr %2,%5\n",
  67 + ret, oldval, newval, uaddr, oparg);
  68 + break;
  69 + case FUTEX_OP_XOR:
  70 + __futex_atomic_op("lr %2,%1\nxr %2,%5\n",
  71 + ret, oldval, newval, uaddr, oparg);
  72 + break;
  73 + default:
  74 + ret = -ENOSYS;
  75 + }
  76 +
  77 + dec_preempt_count();
  78 +
  79 + if (!ret) {
  80 + switch (cmp) {
  81 + case FUTEX_OP_CMP_EQ: ret = (oldval == cmparg); break;
  82 + case FUTEX_OP_CMP_NE: ret = (oldval != cmparg); break;
  83 + case FUTEX_OP_CMP_LT: ret = (oldval < cmparg); break;
  84 + case FUTEX_OP_CMP_GE: ret = (oldval >= cmparg); break;
  85 + case FUTEX_OP_CMP_LE: ret = (oldval <= cmparg); break;
  86 + case FUTEX_OP_CMP_GT: ret = (oldval > cmparg); break;
  87 + default: ret = -ENOSYS;
  88 + }
  89 + }
  90 + return ret;
  91 +}
  92 +
  93 +static inline int
  94 +futex_atomic_cmpxchg_inatomic(int __user *uaddr, int oldval, int newval)
  95 +{
  96 + int ret;
  97 +
  98 + if (! access_ok (VERIFY_WRITE, uaddr, sizeof(int)))
  99 + return -EFAULT;
  100 + asm volatile(" cs %1,%4,0(%5)\n"
  101 + "0: lr %0,%1\n"
  102 + "1:\n"
  103 +#ifndef __s390x__
  104 + ".section __ex_table,\"a\"\n"
  105 + " .align 4\n"
  106 + " .long 0b,1b\n"
  107 + ".previous"
  108 +#else /* __s390x__ */
  109 + ".section __ex_table,\"a\"\n"
  110 + " .align 8\n"
  111 + " .quad 0b,1b\n"
  112 + ".previous"
  113 +#endif /* __s390x__ */
  114 + : "=d" (ret), "+d" (oldval), "=m" (*uaddr)
  115 + : "0" (-EFAULT), "d" (newval), "a" (uaddr), "m" (*uaddr)
  116 + : "cc", "memory" );
  117 + return oldval;
  118 +}
  119 +
  120 +#endif /* __KERNEL__ */
  121 +#endif /* _ASM_S390_FUTEX_H */