404

[ Avaa Bypassed ]




Upload:

Command:

botdev@13.59.219.20: ~ $
/* SPDX-License-Identifier: GPL-2.0 */
#ifndef _ASM_IA64_ATOMIC_H
#define _ASM_IA64_ATOMIC_H

/*
 * Atomic operations that C can't guarantee us.  Useful for
 * resource counting etc..
 *
 * NOTE: don't mess with the types below!  The "unsigned long" and
 * "int" types were carefully placed so as to ensure proper operation
 * of the macros.
 *
 * Copyright (C) 1998, 1999, 2002-2003 Hewlett-Packard Co
 *	David Mosberger-Tang <davidm@hpl.hp.com>
 */
#include <linux/types.h>

#include <asm/intrinsics.h>
#include <asm/barrier.h>


#define ATOMIC_INIT(i)		{ (i) }
#define ATOMIC64_INIT(i)	{ (i) }

#define atomic_read(v)		READ_ONCE((v)->counter)
#define atomic64_read(v)	READ_ONCE((v)->counter)

#define atomic_set(v,i)		WRITE_ONCE(((v)->counter), (i))
#define atomic64_set(v,i)	WRITE_ONCE(((v)->counter), (i))

#define ATOMIC_OP(op, c_op)						\
static __inline__ int							\
ia64_atomic_##op (int i, atomic_t *v)					\
{									\
	__s32 old, new;							\
	CMPXCHG_BUGCHECK_DECL						\
									\
	do {								\
		CMPXCHG_BUGCHECK(v);					\
		old = atomic_read(v);					\
		new = old c_op i;					\
	} while (ia64_cmpxchg(acq, v, old, new, sizeof(atomic_t)) != old); \
	return new;							\
}

#define ATOMIC_FETCH_OP(op, c_op)					\
static __inline__ int							\
ia64_atomic_fetch_##op (int i, atomic_t *v)				\
{									\
	__s32 old, new;							\
	CMPXCHG_BUGCHECK_DECL						\
									\
	do {								\
		CMPXCHG_BUGCHECK(v);					\
		old = atomic_read(v);					\
		new = old c_op i;					\
	} while (ia64_cmpxchg(acq, v, old, new, sizeof(atomic_t)) != old); \
	return old;							\
}

#define ATOMIC_OPS(op, c_op)						\
	ATOMIC_OP(op, c_op)						\
	ATOMIC_FETCH_OP(op, c_op)

ATOMIC_OPS(add, +)
ATOMIC_OPS(sub, -)

#ifdef __OPTIMIZE__
#define __ia64_atomic_const(i)	__builtin_constant_p(i) ?		\
		((i) == 1 || (i) == 4 || (i) == 8 || (i) == 16 ||	\
		 (i) == -1 || (i) == -4 || (i) == -8 || (i) == -16) : 0

#define atomic_add_return(i, v)						\
({									\
	int __i = (i);							\
	static const int __ia64_atomic_p = __ia64_atomic_const(i);	\
	__ia64_atomic_p ? ia64_fetch_and_add(__i, &(v)->counter) :	\
				ia64_atomic_add(__i, v);		\
})

#define atomic_sub_return(i, v)						\
({									\
	int __i = (i);							\
	static const int __ia64_atomic_p = __ia64_atomic_const(i);	\
	__ia64_atomic_p ? ia64_fetch_and_add(-__i, &(v)->counter) :	\
				ia64_atomic_sub(__i, v);		\
})
#else
#define atomic_add_return(i, v)	ia64_atomic_add(i, v)
#define atomic_sub_return(i, v)	ia64_atomic_sub(i, v)
#endif

#define atomic_fetch_add(i,v)						\
({									\
	int __ia64_aar_i = (i);						\
	(__builtin_constant_p(i)					\
	 && (   (__ia64_aar_i ==  1) || (__ia64_aar_i ==   4)		\
	     || (__ia64_aar_i ==  8) || (__ia64_aar_i ==  16)		\
	     || (__ia64_aar_i == -1) || (__ia64_aar_i ==  -4)		\
	     || (__ia64_aar_i == -8) || (__ia64_aar_i == -16)))		\
		? ia64_fetchadd(__ia64_aar_i, &(v)->counter, acq)	\
		: ia64_atomic_fetch_add(__ia64_aar_i, v);		\
})

#define atomic_fetch_sub(i,v)						\
({									\
	int __ia64_asr_i = (i);						\
	(__builtin_constant_p(i)					\
	 && (   (__ia64_asr_i ==   1) || (__ia64_asr_i ==   4)		\
	     || (__ia64_asr_i ==   8) || (__ia64_asr_i ==  16)		\
	     || (__ia64_asr_i ==  -1) || (__ia64_asr_i ==  -4)		\
	     || (__ia64_asr_i ==  -8) || (__ia64_asr_i == -16)))	\
		? ia64_fetchadd(-__ia64_asr_i, &(v)->counter, acq)	\
		: ia64_atomic_fetch_sub(__ia64_asr_i, v);		\
})

ATOMIC_FETCH_OP(and, &)
ATOMIC_FETCH_OP(or, |)
ATOMIC_FETCH_OP(xor, ^)

#define atomic_and(i,v)	(void)ia64_atomic_fetch_and(i,v)
#define atomic_or(i,v)	(void)ia64_atomic_fetch_or(i,v)
#define atomic_xor(i,v)	(void)ia64_atomic_fetch_xor(i,v)

#define atomic_fetch_and(i,v)	ia64_atomic_fetch_and(i,v)
#define atomic_fetch_or(i,v)	ia64_atomic_fetch_or(i,v)
#define atomic_fetch_xor(i,v)	ia64_atomic_fetch_xor(i,v)

#undef ATOMIC_OPS
#undef ATOMIC_FETCH_OP
#undef ATOMIC_OP

#define ATOMIC64_OP(op, c_op)						\
static __inline__ long							\
ia64_atomic64_##op (__s64 i, atomic64_t *v)				\
{									\
	__s64 old, new;							\
	CMPXCHG_BUGCHECK_DECL						\
									\
	do {								\
		CMPXCHG_BUGCHECK(v);					\
		old = atomic64_read(v);					\
		new = old c_op i;					\
	} while (ia64_cmpxchg(acq, v, old, new, sizeof(atomic64_t)) != old); \
	return new;							\
}

#define ATOMIC64_FETCH_OP(op, c_op)					\
static __inline__ long							\
ia64_atomic64_fetch_##op (__s64 i, atomic64_t *v)			\
{									\
	__s64 old, new;							\
	CMPXCHG_BUGCHECK_DECL						\
									\
	do {								\
		CMPXCHG_BUGCHECK(v);					\
		old = atomic64_read(v);					\
		new = old c_op i;					\
	} while (ia64_cmpxchg(acq, v, old, new, sizeof(atomic64_t)) != old); \
	return old;							\
}

#define ATOMIC64_OPS(op, c_op)						\
	ATOMIC64_OP(op, c_op)						\
	ATOMIC64_FETCH_OP(op, c_op)

ATOMIC64_OPS(add, +)
ATOMIC64_OPS(sub, -)

#define atomic64_add_return(i,v)					\
({									\
	long __ia64_aar_i = (i);					\
	(__builtin_constant_p(i)					\
	 && (   (__ia64_aar_i ==  1) || (__ia64_aar_i ==   4)		\
	     || (__ia64_aar_i ==  8) || (__ia64_aar_i ==  16)		\
	     || (__ia64_aar_i == -1) || (__ia64_aar_i ==  -4)		\
	     || (__ia64_aar_i == -8) || (__ia64_aar_i == -16)))		\
		? ia64_fetch_and_add(__ia64_aar_i, &(v)->counter)	\
		: ia64_atomic64_add(__ia64_aar_i, v);			\
})

#define atomic64_sub_return(i,v)					\
({									\
	long __ia64_asr_i = (i);					\
	(__builtin_constant_p(i)					\
	 && (   (__ia64_asr_i ==   1) || (__ia64_asr_i ==   4)		\
	     || (__ia64_asr_i ==   8) || (__ia64_asr_i ==  16)		\
	     || (__ia64_asr_i ==  -1) || (__ia64_asr_i ==  -4)		\
	     || (__ia64_asr_i ==  -8) || (__ia64_asr_i == -16)))	\
		? ia64_fetch_and_add(-__ia64_asr_i, &(v)->counter)	\
		: ia64_atomic64_sub(__ia64_asr_i, v);			\
})

#define atomic64_fetch_add(i,v)						\
({									\
	long __ia64_aar_i = (i);					\
	(__builtin_constant_p(i)					\
	 && (   (__ia64_aar_i ==  1) || (__ia64_aar_i ==   4)		\
	     || (__ia64_aar_i ==  8) || (__ia64_aar_i ==  16)		\
	     || (__ia64_aar_i == -1) || (__ia64_aar_i ==  -4)		\
	     || (__ia64_aar_i == -8) || (__ia64_aar_i == -16)))		\
		? ia64_fetchadd(__ia64_aar_i, &(v)->counter, acq)	\
		: ia64_atomic64_fetch_add(__ia64_aar_i, v);		\
})

#define atomic64_fetch_sub(i,v)						\
({									\
	long __ia64_asr_i = (i);					\
	(__builtin_constant_p(i)					\
	 && (   (__ia64_asr_i ==   1) || (__ia64_asr_i ==   4)		\
	     || (__ia64_asr_i ==   8) || (__ia64_asr_i ==  16)		\
	     || (__ia64_asr_i ==  -1) || (__ia64_asr_i ==  -4)		\
	     || (__ia64_asr_i ==  -8) || (__ia64_asr_i == -16)))	\
		? ia64_fetchadd(-__ia64_asr_i, &(v)->counter, acq)	\
		: ia64_atomic64_fetch_sub(__ia64_asr_i, v);		\
})

ATOMIC64_FETCH_OP(and, &)
ATOMIC64_FETCH_OP(or, |)
ATOMIC64_FETCH_OP(xor, ^)

#define atomic64_and(i,v)	(void)ia64_atomic64_fetch_and(i,v)
#define atomic64_or(i,v)	(void)ia64_atomic64_fetch_or(i,v)
#define atomic64_xor(i,v)	(void)ia64_atomic64_fetch_xor(i,v)

#define atomic64_fetch_and(i,v)	ia64_atomic64_fetch_and(i,v)
#define atomic64_fetch_or(i,v)	ia64_atomic64_fetch_or(i,v)
#define atomic64_fetch_xor(i,v)	ia64_atomic64_fetch_xor(i,v)

#undef ATOMIC64_OPS
#undef ATOMIC64_FETCH_OP
#undef ATOMIC64_OP

#define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), old, new))
#define atomic_xchg(v, new) (xchg(&((v)->counter), new))

#define atomic64_cmpxchg(v, old, new) \
	(cmpxchg(&((v)->counter), old, new))
#define atomic64_xchg(v, new) (xchg(&((v)->counter), new))

static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
{
	int c, old;
	c = atomic_read(v);
	for (;;) {
		if (unlikely(c == (u)))
			break;
		old = atomic_cmpxchg((v), c, c + (a));
		if (likely(old == c))
			break;
		c = old;
	}
	return c;
}


static __inline__ long atomic64_add_unless(atomic64_t *v, long a, long u)
{
	long c, old;
	c = atomic64_read(v);
	for (;;) {
		if (unlikely(c == (u)))
			break;
		old = atomic64_cmpxchg((v), c, c + (a));
		if (likely(old == c))
			break;
		c = old;
	}
	return c != (u);
}

#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)

static __inline__ long atomic64_dec_if_positive(atomic64_t *v)
{
	long c, old, dec;
	c = atomic64_read(v);
	for (;;) {
		dec = c - 1;
		if (unlikely(dec < 0))
			break;
		old = atomic64_cmpxchg((v), c, dec);
		if (likely(old == c))
			break;
		c = old;
	}
	return dec;
}

/*
 * Atomically add I to V and return TRUE if the resulting value is
 * negative.
 */
static __inline__ int
atomic_add_negative (int i, atomic_t *v)
{
	return atomic_add_return(i, v) < 0;
}

static __inline__ long
atomic64_add_negative (__s64 i, atomic64_t *v)
{
	return atomic64_add_return(i, v) < 0;
}

#define atomic_dec_return(v)		atomic_sub_return(1, (v))
#define atomic_inc_return(v)		atomic_add_return(1, (v))
#define atomic64_dec_return(v)		atomic64_sub_return(1, (v))
#define atomic64_inc_return(v)		atomic64_add_return(1, (v))

#define atomic_sub_and_test(i,v)	(atomic_sub_return((i), (v)) == 0)
#define atomic_dec_and_test(v)		(atomic_sub_return(1, (v)) == 0)
#define atomic_inc_and_test(v)		(atomic_add_return(1, (v)) == 0)
#define atomic64_sub_and_test(i,v)	(atomic64_sub_return((i), (v)) == 0)
#define atomic64_dec_and_test(v)	(atomic64_sub_return(1, (v)) == 0)
#define atomic64_inc_and_test(v)	(atomic64_add_return(1, (v)) == 0)

#define atomic_add(i,v)			(void)atomic_add_return((i), (v))
#define atomic_sub(i,v)			(void)atomic_sub_return((i), (v))
#define atomic_inc(v)			atomic_add(1, (v))
#define atomic_dec(v)			atomic_sub(1, (v))

#define atomic64_add(i,v)		(void)atomic64_add_return((i), (v))
#define atomic64_sub(i,v)		(void)atomic64_sub_return((i), (v))
#define atomic64_inc(v)			atomic64_add(1, (v))
#define atomic64_dec(v)			atomic64_sub(1, (v))

#endif /* _ASM_IA64_ATOMIC_H */

Filemanager

Name Type Size Permission Actions
native Folder 0755
sn Folder 0755
uv Folder 0755
Kbuild File 224 B 0644
acenv.h File 1.27 KB 0644
acpi-ext.h File 590 B 0644
acpi.h File 4.1 KB 0644
agp.h File 857 B 0644
asm-offsets.h File 35 B 0644
asm-prototypes.h File 890 B 0644
asmmacro.h File 3.29 KB 0644
atomic.h File 9.47 KB 0644
barrier.h File 2.36 KB 0644
bitops.h File 10.84 KB 0644
bug.h File 404 B 0644
bugs.h File 436 B 0644
cache.h File 771 B 0644
cacheflush.h File 1.71 KB 0644
checksum.h File 2.1 KB 0644
clocksource.h File 276 B 0644
cpu.h File 456 B 0644
cputime.h File 855 B 0644
current.h File 418 B 0644
cyclone.h File 442 B 0644
delay.h File 1.7 KB 0644
device.h File 323 B 0644
div64.h File 31 B 0644
dma-mapping.h File 1.17 KB 0644
dma.h File 466 B 0644
dmi.h File 343 B 0644
early_ioremap.h File 428 B 0644
elf.h File 9.83 KB 0644
emergency-restart.h File 149 B 0644
esi.h File 887 B 0644
exception.h File 1.13 KB 0644
export.h File 115 B 0644
extable.h File 330 B 0644
fb.h File 569 B 0644
fpswa.h File 1.88 KB 0644
ftrace.h File 748 B 0644
futex.h File 2.56 KB 0644
gcc_intrin.h File 368 B 0644
hardirq.h File 564 B 0644
hpsim.h File 364 B 0644
hugetlb.h File 1.67 KB 0644
hw_irq.h File 6.33 KB 0644
idle.h File 200 B 0644
intrinsics.h File 306 B 0644
io.h File 11.77 KB 0644
iommu.h File 555 B 0644
iommu_table.h File 175 B 0644
iosapic.h File 3.16 KB 0644
irq.h File 1.02 KB 0644
irq_regs.h File 34 B 0644
irq_remapping.h File 142 B 0644
irqflags.h File 2.11 KB 0644
kdebug.h File 1.64 KB 0644
kexec.h File 1.57 KB 0644
kmap_types.h File 260 B 0644
kprobes.h File 3.82 KB 0644
kregs.h File 6.73 KB 0644
libata-portmap.h File 225 B 0644
linkage.h File 398 B 0644
local.h File 31 B 0644
local64.h File 33 B 0644
machvec.h File 12.1 KB 0644
machvec_dig.h File 449 B 0644
machvec_dig_vtd.h File 558 B 0644
machvec_hpsim.h File 544 B 0644
machvec_hpzx1.h File 544 B 0644
machvec_hpzx1_swiotlb.h File 632 B 0644
machvec_init.h File 1.33 KB 0644
machvec_sn2.h File 4.71 KB 0644
machvec_uv.h File 684 B 0644
mca.h File 5.91 KB 0644
mca_asm.h File 7.18 KB 0644
meminit.h File 2.24 KB 0644
mman.h File 432 B 0644
mmu.h File 374 B 0644
mmu_context.h File 5.29 KB 0644
mmzone.h File 1.1 KB 0644
module.h File 1.1 KB 0644
msidef.h File 1.4 KB 0644
nodedata.h File 1.85 KB 0644
numa.h File 2.18 KB 0644
page.h File 6.49 KB 0644
pal.h File 53.39 KB 0644
param.h File 439 B 0644
parport.h File 534 B 0644
patch.h File 1.19 KB 0644
pci.h File 2.83 KB 0644
percpu.h File 1.32 KB 0644
perfmon.h File 4.33 KB 0644
pgalloc.h File 2.84 KB 0644
pgtable.h File 20.92 KB 0644
processor.h File 17.98 KB 0644
ptrace.h File 5.2 KB 0644
rwsem.h File 3.82 KB 0644
sal.h File 26.51 KB 0644
sections.h File 1.35 KB 0644
segment.h File 162 B 0644
serial.h File 446 B 0644
shmparam.h File 445 B 0644
signal.h File 749 B 0644
smp.h File 3.21 KB 0644
sparsemem.h File 621 B 0644
spinlock.h File 6.92 KB 0644
spinlock_types.h File 475 B 0644
string.h File 659 B 0644
swiotlb.h File 344 B 0644
switch_to.h File 2.89 KB 0644
syscall.h File 2.06 KB 0644
termios.h File 1.88 KB 0644
thread_info.h File 4.66 KB 0644
timex.h File 1.47 KB 0644
tlb.h File 8.42 KB 0644
tlbflush.h File 2.33 KB 0644
topology.h File 1.58 KB 0644
types.h File 828 B 0644
uaccess.h File 9.86 KB 0644
unaligned.h File 337 B 0644
uncached.h File 463 B 0644
unistd.h File 1.45 KB 0644
unwind.h File 5.74 KB 0644
user.h File 2.25 KB 0644
ustack.h File 403 B 0644
vga.h File 657 B 0644
xor.h File 1.12 KB 0644