x86/csum: Move csum_tail result parameter to a local variable.

Message ID 20230530135953.3341174-1-trix@redhat.com
State New
Headers
Series x86/csum: Move csum_tail result parameter to a local variable. |

Commit Message

Tom Rix May 30, 2023, 1:59 p.m. UTC
  clang with W=1 reports
arch/x86/lib/csum-partial_64.c:74:20: error: variable
  'result' is uninitialized when used here [-Werror,-Wuninitialized]
                return csum_tail(result, temp64, odd);
                                 ^~~~~~
This is a false positive, but there is never an intermediate value
of result to pass into csum_trail, so move the parameter result to
a local variable.

Signed-off-by: Tom Rix <trix@redhat.com>
---
 arch/x86/lib/csum-partial_64.c | 10 ++++++----
 1 file changed, 6 insertions(+), 4 deletions(-)
  

Comments

Nathan Chancellor May 30, 2023, 3:20 p.m. UTC | #1
Hi Tom,

On Tue, May 30, 2023 at 09:59:53AM -0400, Tom Rix wrote:
> clang with W=1 reports
> arch/x86/lib/csum-partial_64.c:74:20: error: variable
>   'result' is uninitialized when used here [-Werror,-Wuninitialized]
>                 return csum_tail(result, temp64, odd);
>                                  ^~~~~~
> This is a false positive, but there is never an intermediate value
> of result to pass into csum_trail, so move the parameter result to
> a local variable.
> 
> Signed-off-by: Tom Rix <trix@redhat.com>

Thanks for the patch. I sent the same one last Friday, which should be
in your inbox:

https://lore.kernel.org/20230526-csum_partial-wuninitialized-v1-1-ebc0108dcec1@kernel.org/

Dave picked it up yesterday, I guess -tip's auto-latest was not
refreshed for today's -next:

https://git.kernel.org/tip/2fe1e67e6987b6f05329740da79c8150a2205b0d

Cheers,
Nathan

> ---
>  arch/x86/lib/csum-partial_64.c | 10 ++++++----
>  1 file changed, 6 insertions(+), 4 deletions(-)
> 
> diff --git a/arch/x86/lib/csum-partial_64.c b/arch/x86/lib/csum-partial_64.c
> index fe5861951b15..cea25ca8b8cf 100644
> --- a/arch/x86/lib/csum-partial_64.c
> +++ b/arch/x86/lib/csum-partial_64.c
> @@ -21,8 +21,10 @@ static inline unsigned short from32to16(unsigned a)
>  	return b;
>  }
>  
> -static inline __wsum csum_tail(unsigned int result, u64 temp64, int odd)
> +static inline __wsum csum_tail(u64 temp64, int odd)
>  {
> +	unsigned int result;
> +
>  	result = add32_with_carry(temp64 >> 32, temp64 & 0xffffffff);
>  	if (unlikely(odd)) {
>  		result = from32to16(result);
> @@ -45,7 +47,7 @@ static inline __wsum csum_tail(unsigned int result, u64 temp64, int odd)
>  __wsum csum_partial(const void *buff, int len, __wsum sum)
>  {
>  	u64 temp64 = (__force u64)sum;
> -	unsigned odd, result;
> +	unsigned odd;
>  
>  	odd = 1 & (unsigned long) buff;
>  	if (unlikely(odd)) {
> @@ -71,7 +73,7 @@ __wsum csum_partial(const void *buff, int len, __wsum sum)
>  		    "adcq $0,%[res]"
>  		    : [res] "+r"(temp64)
>  		    : [src] "r"(buff), "m"(*(const char(*)[40])buff));
> -		return csum_tail(result, temp64, odd);
> +		return csum_tail(temp64, odd);
>  	}
>  	if (unlikely(len >= 64)) {
>  		/*
> @@ -141,7 +143,7 @@ __wsum csum_partial(const void *buff, int len, __wsum sum)
>  		    : [res] "+r"(temp64)
>  		    : [trail] "r"(trail));
>  	}
> -	return csum_tail(result, temp64, odd);
> +	return csum_tail(temp64, odd);
>  }
>  EXPORT_SYMBOL(csum_partial);
>  
> -- 
> 2.27.0
>
  
Dave Hansen June 1, 2023, 4:58 p.m. UTC | #2
On 5/30/23 08:20, Nathan Chancellor wrote:
> Dave picked it up yesterday, I guess -tip's auto-latest was not
> refreshed for today's -next:
> 
> https://git.kernel.org/tip/2fe1e67e6987b6f05329740da79c8150a2205b0d

BTW, thank you _both_ for the patches!
  

Patch

diff --git a/arch/x86/lib/csum-partial_64.c b/arch/x86/lib/csum-partial_64.c
index fe5861951b15..cea25ca8b8cf 100644
--- a/arch/x86/lib/csum-partial_64.c
+++ b/arch/x86/lib/csum-partial_64.c
@@ -21,8 +21,10 @@  static inline unsigned short from32to16(unsigned a)
 	return b;
 }
 
-static inline __wsum csum_tail(unsigned int result, u64 temp64, int odd)
+static inline __wsum csum_tail(u64 temp64, int odd)
 {
+	unsigned int result;
+
 	result = add32_with_carry(temp64 >> 32, temp64 & 0xffffffff);
 	if (unlikely(odd)) {
 		result = from32to16(result);
@@ -45,7 +47,7 @@  static inline __wsum csum_tail(unsigned int result, u64 temp64, int odd)
 __wsum csum_partial(const void *buff, int len, __wsum sum)
 {
 	u64 temp64 = (__force u64)sum;
-	unsigned odd, result;
+	unsigned odd;
 
 	odd = 1 & (unsigned long) buff;
 	if (unlikely(odd)) {
@@ -71,7 +73,7 @@  __wsum csum_partial(const void *buff, int len, __wsum sum)
 		    "adcq $0,%[res]"
 		    : [res] "+r"(temp64)
 		    : [src] "r"(buff), "m"(*(const char(*)[40])buff));
-		return csum_tail(result, temp64, odd);
+		return csum_tail(temp64, odd);
 	}
 	if (unlikely(len >= 64)) {
 		/*
@@ -141,7 +143,7 @@  __wsum csum_partial(const void *buff, int len, __wsum sum)
 		    : [res] "+r"(temp64)
 		    : [trail] "r"(trail));
 	}
-	return csum_tail(result, temp64, odd);
+	return csum_tail(temp64, odd);
 }
 EXPORT_SYMBOL(csum_partial);