| .. | .. |
|---|
| 1 | +/* SPDX-License-Identifier: GPL-2.0-or-later */ |
|---|
| 1 | 2 | /* |
|---|
| 2 | 3 | * INET An implementation of the TCP/IP protocol suite for the LINUX |
|---|
| 3 | 4 | * operating system. INET is implemented using the BSD Socket |
|---|
| .. | .. |
|---|
| 9 | 10 | * Arnt Gulbrandsen, <agulbra@nvg.unit.no> |
|---|
| 10 | 11 | * Borrows very liberally from tcp.c and ip.c, see those |
|---|
| 11 | 12 | * files for more names. |
|---|
| 12 | | - * |
|---|
| 13 | | - * This program is free software; you can redistribute it and/or |
|---|
| 14 | | - * modify it under the terms of the GNU General Public License |
|---|
| 15 | | - * as published by the Free Software Foundation; either version |
|---|
| 16 | | - * 2 of the License, or (at your option) any later version. |
|---|
| 17 | 13 | */ |
|---|
| 18 | 14 | |
|---|
| 19 | 15 | #ifndef _CHECKSUM_H |
|---|
| .. | .. |
|---|
| 26 | 22 | #include <asm/checksum.h> |
|---|
| 27 | 23 | |
|---|
| 28 | 24 | #ifndef _HAVE_ARCH_COPY_AND_CSUM_FROM_USER |
|---|
| 29 | | -static inline |
|---|
| 25 | +static __always_inline |
|---|
| 30 | 26 | __wsum csum_and_copy_from_user (const void __user *src, void *dst, |
|---|
| 31 | | - int len, __wsum sum, int *err_ptr) |
|---|
| 27 | + int len) |
|---|
| 32 | 28 | { |
|---|
| 33 | | - if (access_ok(VERIFY_READ, src, len)) |
|---|
| 34 | | - return csum_partial_copy_from_user(src, dst, len, sum, err_ptr); |
|---|
| 35 | | - |
|---|
| 36 | | - if (len) |
|---|
| 37 | | - *err_ptr = -EFAULT; |
|---|
| 38 | | - |
|---|
| 39 | | - return sum; |
|---|
| 29 | + if (copy_from_user(dst, src, len)) |
|---|
| 30 | + return 0; |
|---|
| 31 | + return csum_partial(dst, len, ~0U); |
|---|
| 40 | 32 | } |
|---|
| 41 | 33 | #endif |
|---|
| 42 | 34 | |
|---|
| 43 | 35 | #ifndef HAVE_CSUM_COPY_USER |
|---|
| 44 | | -static __inline__ __wsum csum_and_copy_to_user |
|---|
| 45 | | -(const void *src, void __user *dst, int len, __wsum sum, int *err_ptr) |
|---|
| 36 | +static __always_inline __wsum csum_and_copy_to_user |
|---|
| 37 | +(const void *src, void __user *dst, int len) |
|---|
| 46 | 38 | { |
|---|
| 47 | | - sum = csum_partial(src, len, sum); |
|---|
| 39 | + __wsum sum = csum_partial(src, len, ~0U); |
|---|
| 48 | 40 | |
|---|
| 49 | | - if (access_ok(VERIFY_WRITE, dst, len)) { |
|---|
| 50 | | - if (copy_to_user(dst, src, len) == 0) |
|---|
| 51 | | - return sum; |
|---|
| 52 | | - } |
|---|
| 53 | | - if (len) |
|---|
| 54 | | - *err_ptr = -EFAULT; |
|---|
| 41 | + if (copy_to_user(dst, src, len) == 0) |
|---|
| 42 | + return sum; |
|---|
| 43 | + return 0; |
|---|
| 44 | +} |
|---|
| 45 | +#endif |
|---|
| 55 | 46 | |
|---|
| 56 | | - return (__force __wsum)-1; /* invalid checksum */ |
|---|
| 47 | +#ifndef _HAVE_ARCH_CSUM_AND_COPY |
|---|
| 48 | +static __always_inline __wsum |
|---|
| 49 | +csum_partial_copy_nocheck(const void *src, void *dst, int len) |
|---|
| 50 | +{ |
|---|
| 51 | + memcpy(dst, src, len); |
|---|
| 52 | + return csum_partial(dst, len, 0); |
|---|
| 57 | 53 | } |
|---|
| 58 | 54 | #endif |
|---|
| 59 | 55 | |
|---|
| 60 | 56 | #ifndef HAVE_ARCH_CSUM_ADD |
|---|
| 61 | | -static inline __wsum csum_add(__wsum csum, __wsum addend) |
|---|
| 57 | +static __always_inline __wsum csum_add(__wsum csum, __wsum addend) |
|---|
| 62 | 58 | { |
|---|
| 63 | 59 | u32 res = (__force u32)csum; |
|---|
| 64 | 60 | res += (__force u32)addend; |
|---|
| .. | .. |
|---|
| 66 | 62 | } |
|---|
| 67 | 63 | #endif |
|---|
| 68 | 64 | |
|---|
| 69 | | -static inline __wsum csum_sub(__wsum csum, __wsum addend) |
|---|
| 65 | +static __always_inline __wsum csum_sub(__wsum csum, __wsum addend) |
|---|
| 70 | 66 | { |
|---|
| 71 | 67 | return csum_add(csum, ~addend); |
|---|
| 72 | 68 | } |
|---|
| 73 | 69 | |
|---|
| 74 | | -static inline __sum16 csum16_add(__sum16 csum, __be16 addend) |
|---|
| 70 | +static __always_inline __sum16 csum16_add(__sum16 csum, __be16 addend) |
|---|
| 75 | 71 | { |
|---|
| 76 | 72 | u16 res = (__force u16)csum; |
|---|
| 77 | 73 | |
|---|
| .. | .. |
|---|
| 79 | 75 | return (__force __sum16)(res + (res < (__force u16)addend)); |
|---|
| 80 | 76 | } |
|---|
| 81 | 77 | |
|---|
| 82 | | -static inline __sum16 csum16_sub(__sum16 csum, __be16 addend) |
|---|
| 78 | +static __always_inline __sum16 csum16_sub(__sum16 csum, __be16 addend) |
|---|
| 83 | 79 | { |
|---|
| 84 | 80 | return csum16_add(csum, ~addend); |
|---|
| 85 | 81 | } |
|---|
| 86 | 82 | |
|---|
| 87 | | -static inline __wsum |
|---|
| 83 | +static __always_inline __wsum |
|---|
| 88 | 84 | csum_block_add(__wsum csum, __wsum csum2, int offset) |
|---|
| 89 | 85 | { |
|---|
| 90 | 86 | u32 sum = (__force u32)csum2; |
|---|
| .. | .. |
|---|
| 96 | 92 | return csum_add(csum, (__force __wsum)sum); |
|---|
| 97 | 93 | } |
|---|
| 98 | 94 | |
|---|
| 99 | | -static inline __wsum |
|---|
| 95 | +static __always_inline __wsum |
|---|
| 100 | 96 | csum_block_add_ext(__wsum csum, __wsum csum2, int offset, int len) |
|---|
| 101 | 97 | { |
|---|
| 102 | 98 | return csum_block_add(csum, csum2, offset); |
|---|
| 103 | 99 | } |
|---|
| 104 | 100 | |
|---|
| 105 | | -static inline __wsum |
|---|
| 101 | +static __always_inline __wsum |
|---|
| 106 | 102 | csum_block_sub(__wsum csum, __wsum csum2, int offset) |
|---|
| 107 | 103 | { |
|---|
| 108 | 104 | return csum_block_add(csum, ~csum2, offset); |
|---|
| 109 | 105 | } |
|---|
| 110 | 106 | |
|---|
| 111 | | -static inline __wsum csum_unfold(__sum16 n) |
|---|
| 107 | +static __always_inline __wsum csum_unfold(__sum16 n) |
|---|
| 112 | 108 | { |
|---|
| 113 | 109 | return (__force __wsum)n; |
|---|
| 114 | 110 | } |
|---|
| 115 | 111 | |
|---|
| 116 | | -static inline __wsum csum_partial_ext(const void *buff, int len, __wsum sum) |
|---|
| 112 | +static __always_inline |
|---|
| 113 | +__wsum csum_partial_ext(const void *buff, int len, __wsum sum) |
|---|
| 117 | 114 | { |
|---|
| 118 | 115 | return csum_partial(buff, len, sum); |
|---|
| 119 | 116 | } |
|---|
| 120 | 117 | |
|---|
| 121 | 118 | #define CSUM_MANGLED_0 ((__force __sum16)0xffff) |
|---|
| 122 | 119 | |
|---|
| 123 | | -static inline void csum_replace_by_diff(__sum16 *sum, __wsum diff) |
|---|
| 120 | +static __always_inline void csum_replace_by_diff(__sum16 *sum, __wsum diff) |
|---|
| 124 | 121 | { |
|---|
| 125 | 122 | *sum = csum_fold(csum_add(diff, ~csum_unfold(*sum))); |
|---|
| 126 | 123 | } |
|---|
| 127 | 124 | |
|---|
| 128 | | -static inline void csum_replace4(__sum16 *sum, __be32 from, __be32 to) |
|---|
| 125 | +static __always_inline void csum_replace4(__sum16 *sum, __be32 from, __be32 to) |
|---|
| 129 | 126 | { |
|---|
| 130 | 127 | __wsum tmp = csum_sub(~csum_unfold(*sum), (__force __wsum)from); |
|---|
| 131 | 128 | |
|---|
| .. | .. |
|---|
| 138 | 135 | * m : old value of a 16bit field |
|---|
| 139 | 136 | * m' : new value of a 16bit field |
|---|
| 140 | 137 | */ |
|---|
| 141 | | -static inline void csum_replace2(__sum16 *sum, __be16 old, __be16 new) |
|---|
| 138 | +static __always_inline void csum_replace2(__sum16 *sum, __be16 old, __be16 new) |
|---|
| 142 | 139 | { |
|---|
| 143 | 140 | *sum = ~csum16_add(csum16_sub(~(*sum), old), new); |
|---|
| 144 | 141 | } |
|---|
| .. | .. |
|---|
| 157 | 154 | void inet_proto_csum_replace_by_diff(__sum16 *sum, struct sk_buff *skb, |
|---|
| 158 | 155 | __wsum diff, bool pseudohdr); |
|---|
| 159 | 156 | |
|---|
| 160 | | -static inline void inet_proto_csum_replace2(__sum16 *sum, struct sk_buff *skb, |
|---|
| 161 | | - __be16 from, __be16 to, |
|---|
| 162 | | - bool pseudohdr) |
|---|
| 157 | +static __always_inline |
|---|
| 158 | +void inet_proto_csum_replace2(__sum16 *sum, struct sk_buff *skb, |
|---|
| 159 | + __be16 from, __be16 to, bool pseudohdr) |
|---|
| 163 | 160 | { |
|---|
| 164 | 161 | inet_proto_csum_replace4(sum, skb, (__force __be32)from, |
|---|
| 165 | 162 | (__force __be32)to, pseudohdr); |
|---|
| 166 | 163 | } |
|---|
| 167 | 164 | |
|---|
| 168 | | -static inline __wsum remcsum_adjust(void *ptr, __wsum csum, |
|---|
| 169 | | - int start, int offset) |
|---|
| 165 | +static __always_inline __wsum remcsum_adjust(void *ptr, __wsum csum, |
|---|
| 166 | + int start, int offset) |
|---|
| 170 | 167 | { |
|---|
| 171 | 168 | __sum16 *psum = (__sum16 *)(ptr + offset); |
|---|
| 172 | 169 | __wsum delta; |
|---|
| .. | .. |
|---|
| 182 | 179 | return delta; |
|---|
| 183 | 180 | } |
|---|
| 184 | 181 | |
|---|
| 185 | | -static inline void remcsum_unadjust(__sum16 *psum, __wsum delta) |
|---|
| 182 | +static __always_inline void remcsum_unadjust(__sum16 *psum, __wsum delta) |
|---|
| 186 | 183 | { |
|---|
| 187 | 184 | *psum = csum_fold(csum_sub(delta, (__force __wsum)*psum)); |
|---|
| 188 | 185 | } |
|---|