This allows us to move duplicated code in <asm/atomic.h> (atomic_inc_not_zero() for now) to <linux/atomic.h> Signed-off-by: Arun Sharma <asharma@fb.com> Reviewed-by: Eric Dumazet <eric.dumazet@gmail.com> Cc: Ingo Molnar <mingo@elte.hu> Cc: David Miller <davem@davemloft.net> Cc: Eric Dumazet <eric.dumazet@gmail.com> Acked-by: Mike Frysinger <vapier@gentoo.org> Signed-off-by: Andrew Morton <akpm@linux-foundation.org> Signed-off-by: Linus Torvalds <torvalds@linux-foundation.org>
		
			
				
	
	
		
			34 lines
		
	
	
	
		
			784 B
			
		
	
	
	
		
			C
		
	
	
	
	
	
			
		
		
	
	
			34 lines
		
	
	
	
		
			784 B
			
		
	
	
	
		
			C
		
	
	
	
	
	
#include <linux/module.h>
 | 
						|
#include <linux/spinlock.h>
 | 
						|
#include <linux/atomic.h>
 | 
						|
 | 
						|
/*
 | 
						|
 * This is an implementation of the notion of "decrement a
 | 
						|
 * reference count, and return locked if it decremented to zero".
 | 
						|
 *
 | 
						|
 * NOTE NOTE NOTE! This is _not_ equivalent to
 | 
						|
 *
 | 
						|
 *	if (atomic_dec_and_test(&atomic)) {
 | 
						|
 *		spin_lock(&lock);
 | 
						|
 *		return 1;
 | 
						|
 *	}
 | 
						|
 *	return 0;
 | 
						|
 *
 | 
						|
 * because the spin-lock and the decrement must be
 | 
						|
 * "atomic".
 | 
						|
 */
 | 
						|
int _atomic_dec_and_lock(atomic_t *atomic, spinlock_t *lock)
 | 
						|
{
 | 
						|
	/* Subtract 1 from counter unless that drops it to 0 (ie. it was 1) */
 | 
						|
	if (atomic_add_unless(atomic, -1, 1))
 | 
						|
		return 0;
 | 
						|
 | 
						|
	/* Otherwise do it the slow way */
 | 
						|
	spin_lock(lock);
 | 
						|
	if (atomic_dec_and_test(atomic))
 | 
						|
		return 1;
 | 
						|
	spin_unlock(lock);
 | 
						|
	return 0;
 | 
						|
}
 | 
						|
 | 
						|
EXPORT_SYMBOL(_atomic_dec_and_lock);
 |