summaryrefslogtreecommitdiff
path: root/arch/sparc/include/asm/checksum_64.h
diff options
context:
space:
mode:
authorSam Ravnborg <sam@ravnborg.org>2014-05-16 23:25:50 +0200
committerDavid S. Miller <davem@davemloft.net>2014-05-18 19:01:29 -0700
commitf05a68653e56ca2f23bccf7e50be69486886f052 (patch)
treec312c5d8fe927962ba365d85cd43de6592bd6642 /arch/sparc/include/asm/checksum_64.h
parent77e39a79f36ece60769787a33fe5ae0b8b4621ba (diff)
sparc: drop use of extern for prototypes in arch/sparc/include/asm
Drop extern for all prototypes and adjust alignment of parameters as required after the removal. In a few rare cases adjust linelength to conform to maximum 80 chars, and likewise in a few rare cases adjust alignment of parameters to static functions. Signed-off-by: Sam Ravnborg <sam@ravnborg.org> Signed-off-by: David S. Miller <davem@davemloft.net>
Diffstat (limited to 'arch/sparc/include/asm/checksum_64.h')
-rw-r--r--arch/sparc/include/asm/checksum_64.h32
1 files changed, 16 insertions, 16 deletions
diff --git a/arch/sparc/include/asm/checksum_64.h b/arch/sparc/include/asm/checksum_64.h
index 019b9615e43c..b3c8b9ded364 100644
--- a/arch/sparc/include/asm/checksum_64.h
+++ b/arch/sparc/include/asm/checksum_64.h
@@ -29,7 +29,7 @@
*
* it's best to have buff aligned on a 32-bit boundary
*/
-extern __wsum csum_partial(const void * buff, int len, __wsum sum);
+__wsum csum_partial(const void * buff, int len, __wsum sum);
/* the same as csum_partial, but copies from user space while it
* checksums
@@ -37,12 +37,12 @@ extern __wsum csum_partial(const void * buff, int len, __wsum sum);
* here even more important to align src and dst on a 32-bit (or even
* better 64-bit) boundary
*/
-extern __wsum csum_partial_copy_nocheck(const void *src, void *dst,
- int len, __wsum sum);
+__wsum csum_partial_copy_nocheck(const void *src, void *dst,
+ int len, __wsum sum);
-extern long __csum_partial_copy_from_user(const void __user *src,
- void *dst, int len,
- __wsum sum);
+long __csum_partial_copy_from_user(const void __user *src,
+ void *dst, int len,
+ __wsum sum);
static inline __wsum
csum_partial_copy_from_user(const void __user *src,
@@ -59,9 +59,9 @@ csum_partial_copy_from_user(const void __user *src,
* Copy and checksum to user
*/
#define HAVE_CSUM_COPY_USER
-extern long __csum_partial_copy_to_user(const void *src,
- void __user *dst, int len,
- __wsum sum);
+long __csum_partial_copy_to_user(const void *src,
+ void __user *dst, int len,
+ __wsum sum);
static inline __wsum
csum_and_copy_to_user(const void *src,
@@ -77,7 +77,7 @@ csum_and_copy_to_user(const void *src,
/* ihl is always 5 or greater, almost always is 5, and iph is word aligned
* the majority of the time.
*/
-extern __sum16 ip_fast_csum(const void *iph, unsigned int ihl);
+__sum16 ip_fast_csum(const void *iph, unsigned int ihl);
/* Fold a partial checksum without adding pseudo headers. */
static inline __sum16 csum_fold(__wsum sum)
@@ -96,9 +96,9 @@ static inline __sum16 csum_fold(__wsum sum)
}
static inline __wsum csum_tcpudp_nofold(__be32 saddr, __be32 daddr,
- unsigned int len,
- unsigned short proto,
- __wsum sum)
+ unsigned int len,
+ unsigned short proto,
+ __wsum sum)
{
__asm__ __volatile__(
" addcc %1, %0, %0\n"
@@ -116,9 +116,9 @@ static inline __wsum csum_tcpudp_nofold(__be32 saddr, __be32 daddr,
* returns a 16-bit checksum, already complemented
*/
static inline __sum16 csum_tcpudp_magic(__be32 saddr, __be32 daddr,
- unsigned short len,
- unsigned short proto,
- __wsum sum)
+ unsigned short len,
+ unsigned short proto,
+ __wsum sum)
{
return csum_fold(csum_tcpudp_nofold(saddr,daddr,len,proto,sum));
}