OXIESEC PANEL
- Current Dir:
/
/
usr
/
src
/
linux-headers-4.15.0-197
/
arch
/
mips
/
include
/
asm
Server IP: 139.59.38.164
Upload:
Create Dir:
Name
Size
Modified
Perms
📁
..
-
11/17/2022 06:42:15 AM
rwxr-xr-x
📄
Kbuild
577 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📄
abi.h
853 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📄
addrspace.h
4.1 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
amon.h
409 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📄
arch_hweight.h
792 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📄
asm-eva.h
6.82 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
asm-offsets.h
35 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📄
asm-prototypes.h
197 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📄
asm.h
8.47 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
asmmacro-32.h
2.47 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
asmmacro-64.h
1.22 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
asmmacro.h
14.07 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
atomic.h
19.73 KB
11/01/2022 04:52:05 PM
rw-r--r--
📄
barrier.h
8.03 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
bcache.h
2.04 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
bitops.h
15.46 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
bitrev.h
608 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📄
bmips-spaces.h
268 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📄
bmips.h
3.45 KB
11/01/2022 04:52:05 PM
rw-r--r--
📄
bootinfo.h
5.08 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
branch.h
2.35 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
break.h
787 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📄
bug.h
759 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📄
bugs.h
944 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📄
cache.h
546 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📄
cacheflush.h
4.99 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
cacheops.h
3.71 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
cdmm.h
3.67 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
cevt-r4k.h
823 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📄
checksum.h
6.43 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
clock.h
997 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📄
clocksource.h
884 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📄
cmp.h
492 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📄
cmpxchg.h
5.28 KB
11/01/2022 04:52:05 PM
rw-r--r--
📄
compat-signal.h
640 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📄
compat.h
6.66 KB
11/01/2022 04:52:05 PM
rw-r--r--
📄
compiler.h
2.96 KB
11/01/2022 04:52:05 PM
rw-r--r--
📄
cop2.h
1.77 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
cpu-features.h
19.46 KB
11/01/2022 04:52:05 PM
rw-r--r--
📄
cpu-info.h
5.84 KB
11/01/2022 04:52:05 PM
rw-r--r--
📄
cpu-type.h
4.13 KB
11/01/2022 04:52:05 PM
rw-r--r--
📄
cpu.h
15.54 KB
11/01/2022 04:52:05 PM
rw-r--r--
📄
cpufeature.h
717 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📄
debug.h
654 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📁
dec
-
11/17/2022 06:42:20 AM
rwxr-xr-x
📄
delay.h
841 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📄
device.h
347 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📄
div64.h
2.17 KB
11/01/2022 04:52:05 PM
rw-r--r--
📄
dma-coherence.h
813 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📄
dma-mapping.h
981 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📄
dma.h
9.92 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
ds1287.h
1019 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📄
dsemul.h
3.24 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
dsp.h
1.91 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
edac.h
819 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📄
elf.h
15.04 KB
01/28/2018 09:20:33 PM
rw-r--r--
📁
emma
-
11/17/2022 06:42:20 AM
rwxr-xr-x
📄
errno.h
429 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📄
eva.h
796 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📄
exec.h
579 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📄
extable.h
241 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📄
fb.h
372 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📄
fixmap.h
2.29 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
floppy.h
1.57 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
fpregdef.h
2.66 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
fpu.h
5.21 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
fpu_emulator.h
5.74 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
ftrace.h
2.11 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
futex.h
4.87 KB
01/28/2018 09:20:33 PM
rw-r--r--
📁
fw
-
11/17/2022 06:42:20 AM
rwxr-xr-x
📄
gio_device.h
1.5 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
gt64120.h
19.37 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
hardirq.h
544 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📄
hazards.h
8.36 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
highmem.h
1.72 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
hpet.h
1.93 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
hugetlb.h
2.76 KB
11/01/2022 04:52:05 PM
rw-r--r--
📄
hw_irq.h
475 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📄
i8259.h
2.52 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
ide.h
330 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📄
idle.h
689 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📄
inst.h
2.34 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
io.h
18.44 KB
11/01/2022 04:52:05 PM
rw-r--r--
📁
ip32
-
11/17/2022 06:42:20 AM
rwxr-xr-x
📄
irq.h
2.26 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
irq_cpu.h
708 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📄
irq_gt641xx.h
2.69 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
irq_regs.h
744 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📄
irqflags.h
4.04 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
isa-rev.h
556 bytes
11/01/2022 04:52:05 PM
rw-r--r--
📄
isadep.h
603 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📄
jazz.h
8 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
jazzdma.h
2.97 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
jump_label.h
1.4 KB
11/01/2022 04:52:05 PM
rw-r--r--
📄
kdebug.h
303 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📄
kexec.h
1.53 KB
11/01/2022 04:52:05 PM
rw-r--r--
📄
kgdb.h
1.19 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
kmap_types.h
221 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📄
kprobes.h
2.68 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
kvm_host.h
37.88 KB
11/01/2022 04:52:05 PM
rw-r--r--
📄
kvm_para.h
2.09 KB
01/28/2018 09:20:33 PM
rw-r--r--
📁
lasat
-
11/17/2022 06:42:20 AM
rwxr-xr-x
📄
linkage.h
306 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📄
llsc.h
623 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📄
local.h
4.99 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
m48t37.h
732 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📄
maar.h
4.04 KB
01/28/2018 09:20:33 PM
rw-r--r--
📁
mach-ar7
-
11/17/2022 06:42:20 AM
rwxr-xr-x
📁
mach-ath25
-
11/17/2022 06:42:20 AM
rwxr-xr-x
📁
mach-ath79
-
11/17/2022 06:42:20 AM
rwxr-xr-x
📁
mach-au1x00
-
11/17/2022 06:42:20 AM
rwxr-xr-x
📁
mach-bcm47xx
-
11/17/2022 06:42:20 AM
rwxr-xr-x
📁
mach-bcm63xx
-
11/17/2022 06:42:20 AM
rwxr-xr-x
📁
mach-bmips
-
11/17/2022 06:42:20 AM
rwxr-xr-x
📁
mach-cavium-octeon
-
11/17/2022 06:42:20 AM
rwxr-xr-x
📁
mach-cobalt
-
11/17/2022 06:42:20 AM
rwxr-xr-x
📁
mach-db1x00
-
11/17/2022 06:42:20 AM
rwxr-xr-x
📁
mach-dec
-
11/17/2022 06:42:20 AM
rwxr-xr-x
📁
mach-emma2rh
-
11/17/2022 06:42:20 AM
rwxr-xr-x
📁
mach-generic
-
11/17/2022 06:42:20 AM
rwxr-xr-x
📁
mach-ip22
-
11/17/2022 06:42:20 AM
rwxr-xr-x
📁
mach-ip27
-
11/17/2022 06:42:20 AM
rwxr-xr-x
📁
mach-ip28
-
11/17/2022 06:42:20 AM
rwxr-xr-x
📁
mach-ip32
-
11/17/2022 06:42:20 AM
rwxr-xr-x
📁
mach-jazz
-
11/17/2022 06:42:20 AM
rwxr-xr-x
📁
mach-jz4740
-
11/17/2022 06:42:20 AM
rwxr-xr-x
📁
mach-lantiq
-
11/17/2022 06:42:20 AM
rwxr-xr-x
📁
mach-lasat
-
11/17/2022 06:42:20 AM
rwxr-xr-x
📁
mach-loongson32
-
11/17/2022 06:42:20 AM
rwxr-xr-x
📁
mach-loongson64
-
11/17/2022 06:42:20 AM
rwxr-xr-x
📁
mach-malta
-
11/17/2022 06:42:20 AM
rwxr-xr-x
📁
mach-netlogic
-
11/17/2022 06:42:20 AM
rwxr-xr-x
📁
mach-paravirt
-
11/17/2022 06:42:20 AM
rwxr-xr-x
📁
mach-pic32
-
11/17/2022 06:42:20 AM
rwxr-xr-x
📁
mach-pistachio
-
11/17/2022 06:42:20 AM
rwxr-xr-x
📁
mach-pmcs-msp71xx
-
11/17/2022 06:42:20 AM
rwxr-xr-x
📁
mach-pnx833x
-
11/17/2022 06:42:20 AM
rwxr-xr-x
📁
mach-ralink
-
11/17/2022 06:42:20 AM
rwxr-xr-x
📁
mach-rc32434
-
11/17/2022 06:42:20 AM
rwxr-xr-x
📁
mach-rm
-
11/17/2022 06:42:20 AM
rwxr-xr-x
📁
mach-sibyte
-
11/17/2022 06:42:20 AM
rwxr-xr-x
📁
mach-tx39xx
-
11/17/2022 06:42:20 AM
rwxr-xr-x
📁
mach-tx49xx
-
11/17/2022 06:42:20 AM
rwxr-xr-x
📁
mach-vr41xx
-
11/17/2022 06:42:20 AM
rwxr-xr-x
📁
mach-xilfpga
-
11/17/2022 06:42:20 AM
rwxr-xr-x
📄
machine.h
2.93 KB
11/01/2022 04:52:05 PM
rw-r--r--
📄
mc146818-time.h
3.69 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
mc146818rtc.h
450 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📁
mips-boards
-
11/17/2022 06:42:20 AM
rwxr-xr-x
📄
mips-cm.h
15.86 KB
11/01/2022 04:52:05 PM
rw-r--r--
📄
mips-cpc.h
5.83 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
mips-cps.h
6.55 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
mips-gic.h
12.3 KB
11/01/2022 04:52:05 PM
rw-r--r--
📄
mips-r2-to-r6-emul.h
2.05 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
mips_machine.h
1.32 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
mips_mt.h
707 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📄
mipsmtregs.h
10.9 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
mipsprom.h
2.1 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
mipsregs.h
88.1 KB
11/01/2022 04:52:05 PM
rw-r--r--
📄
mmu.h
550 bytes
11/01/2022 04:52:05 PM
rw-r--r--
📄
mmu_context.h
5.41 KB
11/01/2022 04:52:05 PM
rw-r--r--
📄
mmzone.h
561 bytes
11/01/2022 04:52:05 PM
rw-r--r--
📄
module.h
4.45 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
msa.h
8.01 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
msc01_ic.h
6.55 KB
01/28/2018 09:20:33 PM
rw-r--r--
📁
netlogic
-
11/17/2022 06:42:20 AM
rwxr-xr-x
📄
nile4.h
10.33 KB
01/28/2018 09:20:33 PM
rw-r--r--
📁
octeon
-
11/17/2022 06:42:20 AM
rwxr-xr-x
📄
paccess.h
3.07 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
page.h
7.19 KB
01/28/2018 09:20:33 PM
rw-r--r--
📁
pci
-
11/17/2022 06:42:20 AM
rwxr-xr-x
📄
pci.h
4.08 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
perf_event.h
482 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📄
pgalloc.h
3.21 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
pgtable-32.h
7.31 KB
11/01/2022 04:52:05 PM
rw-r--r--
📄
pgtable-64.h
10.87 KB
11/01/2022 04:52:05 PM
rw-r--r--
📄
pgtable-bits.h
7.36 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
pgtable.h
17.34 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
pm-cps.h
1.68 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
pm.h
3.99 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
pmon.h
1.64 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
prefetch.h
2.1 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
processor.h
11.71 KB
11/01/2022 04:52:05 PM
rw-r--r--
📄
prom.h
845 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📄
ptrace.h
5.55 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
r4k-timer.h
604 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📄
r4kcache.h
26.34 KB
11/01/2022 04:52:05 PM
rw-r--r--
📄
reboot.h
440 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📄
reg.h
26 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📄
regdef.h
2.63 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
rtlx.h
2.1 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
seccomp.h
800 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📄
serial.h
607 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📄
setup.h
884 bytes
11/01/2022 04:52:05 PM
rw-r--r--
📁
sgi
-
11/17/2022 06:42:20 AM
rwxr-xr-x
📄
sgialib.h
2.45 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
sgiarcs.h
15.32 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
shmparam.h
352 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📁
sibyte
-
11/17/2022 06:42:20 AM
rwxr-xr-x
📄
sigcontext.h
1.04 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
signal.h
1.02 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
sim.h
2.32 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
smp-cps.h
1.18 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
smp-ops.h
2.33 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
smp.h
3.31 KB
11/01/2022 04:52:05 PM
rw-r--r--
📁
sn
-
11/17/2022 06:42:20 AM
rwxr-xr-x
📄
sni.h
7.27 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
socket.h
1.34 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
sparsemem.h
486 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📄
spinlock.h
459 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📄
spinlock_types.h
188 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📄
spram.h
262 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📄
stackframe.h
10.82 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
stackprotector.h
1.15 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
stacktrace.h
2.15 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
string.h
2.94 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
switch_to.h
4.19 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
syscall.h
3.57 KB
11/01/2022 04:52:05 PM
rw-r--r--
📄
termios.h
2.89 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
thread_info.h
6.63 KB
11/01/2022 04:52:05 PM
rw-r--r--
📄
time.h
2.13 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
timex.h
2.87 KB
11/01/2022 04:52:05 PM
rw-r--r--
📄
tlb.h
1.09 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
tlbdebug.h
403 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📄
tlbex.h
788 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📄
tlbflush.h
1.67 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
tlbmisc.h
320 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📄
topology.h
619 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📄
traps.h
1.25 KB
01/28/2018 09:20:33 PM
rw-r--r--
📁
txx9
-
11/17/2022 06:42:20 AM
rwxr-xr-x
📄
txx9irq.h
743 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📄
txx9pio.h
592 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📄
txx9tmr.h
1.59 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
types.h
487 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📄
uaccess.h
22.2 KB
11/01/2022 04:52:05 PM
rw-r--r--
📄
uasm.h
9.18 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
unistd.h
1.9 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
uprobes.h
1.11 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
vdso.h
3.72 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
vga.h
1.26 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
vpe.h
2.7 KB
01/28/2018 09:20:33 PM
rw-r--r--
📁
vr41xx
-
11/17/2022 06:42:20 AM
rwxr-xr-x
📄
war.h
7.48 KB
01/28/2018 09:20:33 PM
rw-r--r--
📄
watch.h
827 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📄
wbflush.h
694 bytes
01/28/2018 09:20:33 PM
rw-r--r--
📁
xtalk
-
11/17/2022 06:42:20 AM
rwxr-xr-x
📄
yamon-dt.h
1.88 KB
01/28/2018 09:20:33 PM
rw-r--r--
Editing: atomic.h
Close
/* * Atomic operations that C can't guarantee us. Useful for * resource counting etc.. * * But use these as seldom as possible since they are much more slower * than regular operations. * * This file is subject to the terms and conditions of the GNU General Public * License. See the file "COPYING" in the main directory of this archive * for more details. * * Copyright (C) 1996, 97, 99, 2000, 03, 04, 06 by Ralf Baechle */ #ifndef _ASM_ATOMIC_H #define _ASM_ATOMIC_H #include <linux/irqflags.h> #include <linux/types.h> #include <asm/barrier.h> #include <asm/compiler.h> #include <asm/cpu-features.h> #include <asm/cmpxchg.h> #include <asm/war.h> #define ATOMIC_INIT(i) { (i) } /* * atomic_read - read atomic variable * @v: pointer of type atomic_t * * Atomically reads the value of @v. */ #define atomic_read(v) READ_ONCE((v)->counter) /* * atomic_set - set atomic variable * @v: pointer of type atomic_t * @i: required value * * Atomically sets the value of @v to @i. */ #define atomic_set(v, i) WRITE_ONCE((v)->counter, (i)) #define ATOMIC_OP(op, c_op, asm_op) \ static __inline__ void atomic_##op(int i, atomic_t * v) \ { \ if (kernel_uses_llsc && R10000_LLSC_WAR) { \ int temp; \ \ __asm__ __volatile__( \ " .set arch=r4000 \n" \ "1: ll %0, %1 # atomic_" #op " \n" \ " " #asm_op " %0, %2 \n" \ " sc %0, %1 \n" \ " beqzl %0, 1b \n" \ " .set mips0 \n" \ : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \ : "Ir" (i)); \ } else if (kernel_uses_llsc) { \ int temp; \ \ do { \ __asm__ __volatile__( \ " .set "MIPS_ISA_LEVEL" \n" \ " ll %0, %1 # atomic_" #op "\n" \ " " #asm_op " %0, %2 \n" \ " sc %0, %1 \n" \ " .set mips0 \n" \ : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \ : "Ir" (i)); \ } while (unlikely(!temp)); \ } else { \ unsigned long flags; \ \ raw_local_irq_save(flags); \ v->counter c_op i; \ raw_local_irq_restore(flags); \ } \ } #define ATOMIC_OP_RETURN(op, c_op, asm_op) \ static __inline__ int atomic_##op##_return_relaxed(int i, atomic_t * v) \ { \ int result; \ \ if (kernel_uses_llsc && R10000_LLSC_WAR) { \ int temp; \ \ __asm__ __volatile__( \ " .set arch=r4000 \n" \ "1: ll %1, %2 # atomic_" #op "_return \n" \ " " #asm_op " %0, %1, %3 \n" \ " sc %0, %2 \n" \ " beqzl %0, 1b \n" \ " " #asm_op " %0, %1, %3 \n" \ " .set mips0 \n" \ : "=&r" (result), "=&r" (temp), \ "+" GCC_OFF_SMALL_ASM() (v->counter) \ : "Ir" (i)); \ } else if (kernel_uses_llsc) { \ int temp; \ \ do { \ __asm__ __volatile__( \ " .set "MIPS_ISA_LEVEL" \n" \ " ll %1, %2 # atomic_" #op "_return \n" \ " " #asm_op " %0, %1, %3 \n" \ " sc %0, %2 \n" \ " .set mips0 \n" \ : "=&r" (result), "=&r" (temp), \ "+" GCC_OFF_SMALL_ASM() (v->counter) \ : "Ir" (i)); \ } while (unlikely(!result)); \ \ result = temp; result c_op i; \ } else { \ unsigned long flags; \ \ raw_local_irq_save(flags); \ result = v->counter; \ result c_op i; \ v->counter = result; \ raw_local_irq_restore(flags); \ } \ \ return result; \ } #define ATOMIC_FETCH_OP(op, c_op, asm_op) \ static __inline__ int atomic_fetch_##op##_relaxed(int i, atomic_t * v) \ { \ int result; \ \ if (kernel_uses_llsc && R10000_LLSC_WAR) { \ int temp; \ \ __asm__ __volatile__( \ " .set arch=r4000 \n" \ "1: ll %1, %2 # atomic_fetch_" #op " \n" \ " " #asm_op " %0, %1, %3 \n" \ " sc %0, %2 \n" \ " beqzl %0, 1b \n" \ " move %0, %1 \n" \ " .set mips0 \n" \ : "=&r" (result), "=&r" (temp), \ "+" GCC_OFF_SMALL_ASM() (v->counter) \ : "Ir" (i)); \ } else if (kernel_uses_llsc) { \ int temp; \ \ do { \ __asm__ __volatile__( \ " .set "MIPS_ISA_LEVEL" \n" \ " ll %1, %2 # atomic_fetch_" #op " \n" \ " " #asm_op " %0, %1, %3 \n" \ " sc %0, %2 \n" \ " .set mips0 \n" \ : "=&r" (result), "=&r" (temp), \ "+" GCC_OFF_SMALL_ASM() (v->counter) \ : "Ir" (i)); \ } while (unlikely(!result)); \ \ result = temp; \ } else { \ unsigned long flags; \ \ raw_local_irq_save(flags); \ result = v->counter; \ v->counter c_op i; \ raw_local_irq_restore(flags); \ } \ \ return result; \ } #define ATOMIC_OPS(op, c_op, asm_op) \ ATOMIC_OP(op, c_op, asm_op) \ ATOMIC_OP_RETURN(op, c_op, asm_op) \ ATOMIC_FETCH_OP(op, c_op, asm_op) ATOMIC_OPS(add, +=, addu) ATOMIC_OPS(sub, -=, subu) #define atomic_add_return_relaxed atomic_add_return_relaxed #define atomic_sub_return_relaxed atomic_sub_return_relaxed #define atomic_fetch_add_relaxed atomic_fetch_add_relaxed #define atomic_fetch_sub_relaxed atomic_fetch_sub_relaxed #undef ATOMIC_OPS #define ATOMIC_OPS(op, c_op, asm_op) \ ATOMIC_OP(op, c_op, asm_op) \ ATOMIC_FETCH_OP(op, c_op, asm_op) ATOMIC_OPS(and, &=, and) ATOMIC_OPS(or, |=, or) ATOMIC_OPS(xor, ^=, xor) #define atomic_fetch_and_relaxed atomic_fetch_and_relaxed #define atomic_fetch_or_relaxed atomic_fetch_or_relaxed #define atomic_fetch_xor_relaxed atomic_fetch_xor_relaxed #undef ATOMIC_OPS #undef ATOMIC_FETCH_OP #undef ATOMIC_OP_RETURN #undef ATOMIC_OP /* * atomic_sub_if_positive - conditionally subtract integer from atomic variable * @i: integer value to subtract * @v: pointer of type atomic_t * * Atomically test @v and subtract @i if @v is greater or equal than @i. * The function returns the old value of @v minus @i. */ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v) { int result; smp_mb__before_llsc(); if (kernel_uses_llsc && R10000_LLSC_WAR) { int temp; __asm__ __volatile__( " .set arch=r4000 \n" "1: ll %1, %2 # atomic_sub_if_positive\n" " subu %0, %1, %3 \n" " bltz %0, 1f \n" " sc %0, %2 \n" " .set noreorder \n" " beqzl %0, 1b \n" " subu %0, %1, %3 \n" " .set reorder \n" "1: \n" " .set mips0 \n" : "=&r" (result), "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) : "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter) : "memory"); } else if (kernel_uses_llsc) { int temp; __asm__ __volatile__( " .set "MIPS_ISA_LEVEL" \n" "1: ll %1, %2 # atomic_sub_if_positive\n" " subu %0, %1, %3 \n" " bltz %0, 1f \n" " sc %0, %2 \n" " .set noreorder \n" " beqz %0, 1b \n" " subu %0, %1, %3 \n" " .set reorder \n" "1: \n" " .set mips0 \n" : "=&r" (result), "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) : "Ir" (i)); } else { unsigned long flags; raw_local_irq_save(flags); result = v->counter; result -= i; if (result >= 0) v->counter = result; raw_local_irq_restore(flags); } smp_llsc_mb(); return result; } #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n))) #define atomic_xchg(v, new) (xchg(&((v)->counter), (new))) /** * __atomic_add_unless - add unless the number is a given value * @v: pointer of type atomic_t * @a: the amount to add to v... * @u: ...unless v is equal to u. * * Atomically adds @a to @v, so long as it was not @u. * Returns the old value of @v. */ static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u) { int c, old; c = atomic_read(v); for (;;) { if (unlikely(c == (u))) break; old = atomic_cmpxchg((v), c, c + (a)); if (likely(old == c)) break; c = old; } return c; } #define atomic_dec_return(v) atomic_sub_return(1, (v)) #define atomic_inc_return(v) atomic_add_return(1, (v)) /* * atomic_sub_and_test - subtract value from variable and test result * @i: integer value to subtract * @v: pointer of type atomic_t * * Atomically subtracts @i from @v and returns * true if the result is zero, or false for all * other cases. */ #define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0) /* * atomic_inc_and_test - increment and test * @v: pointer of type atomic_t * * Atomically increments @v by 1 * and returns true if the result is zero, or false for all * other cases. */ #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0) /* * atomic_dec_and_test - decrement by 1 and test * @v: pointer of type atomic_t * * Atomically decrements @v by 1 and * returns true if the result is 0, or false for all other * cases. */ #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0) /* * atomic_dec_if_positive - decrement by 1 if old value positive * @v: pointer of type atomic_t */ #define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v) /* * atomic_inc - increment atomic variable * @v: pointer of type atomic_t * * Atomically increments @v by 1. */ #define atomic_inc(v) atomic_add(1, (v)) /* * atomic_dec - decrement and test * @v: pointer of type atomic_t * * Atomically decrements @v by 1. */ #define atomic_dec(v) atomic_sub(1, (v)) /* * atomic_add_negative - add and test if negative * @v: pointer of type atomic_t * @i: integer value to add * * Atomically adds @i to @v and returns true * if the result is negative, or false when * result is greater than or equal to zero. */ #define atomic_add_negative(i, v) (atomic_add_return(i, (v)) < 0) #ifdef CONFIG_64BIT #define ATOMIC64_INIT(i) { (i) } /* * atomic64_read - read atomic variable * @v: pointer of type atomic64_t * */ #define atomic64_read(v) READ_ONCE((v)->counter) /* * atomic64_set - set atomic variable * @v: pointer of type atomic64_t * @i: required value */ #define atomic64_set(v, i) WRITE_ONCE((v)->counter, (i)) #define ATOMIC64_OP(op, c_op, asm_op) \ static __inline__ void atomic64_##op(long i, atomic64_t * v) \ { \ if (kernel_uses_llsc && R10000_LLSC_WAR) { \ long temp; \ \ __asm__ __volatile__( \ " .set arch=r4000 \n" \ "1: lld %0, %1 # atomic64_" #op " \n" \ " " #asm_op " %0, %2 \n" \ " scd %0, %1 \n" \ " beqzl %0, 1b \n" \ " .set mips0 \n" \ : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \ : "Ir" (i)); \ } else if (kernel_uses_llsc) { \ long temp; \ \ do { \ __asm__ __volatile__( \ " .set "MIPS_ISA_LEVEL" \n" \ " lld %0, %1 # atomic64_" #op "\n" \ " " #asm_op " %0, %2 \n" \ " scd %0, %1 \n" \ " .set mips0 \n" \ : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \ : "Ir" (i)); \ } while (unlikely(!temp)); \ } else { \ unsigned long flags; \ \ raw_local_irq_save(flags); \ v->counter c_op i; \ raw_local_irq_restore(flags); \ } \ } #define ATOMIC64_OP_RETURN(op, c_op, asm_op) \ static __inline__ long atomic64_##op##_return_relaxed(long i, atomic64_t * v) \ { \ long result; \ \ if (kernel_uses_llsc) { \ long temp; \ \ __asm__ __volatile__( \ " .set arch=r4000 \n" \ "1: lld %1, %2 # atomic64_" #op "_return\n" \ " " #asm_op " %0, %1, %3 \n" \ " scd %0, %2 \n" \ " beqzl %0, 1b \n" \ " " #asm_op " %0, %1, %3 \n" \ " .set mips0 \n" \ : "=&r" (result), "=&r" (temp), \ "+" GCC_OFF_SMALL_ASM() (v->counter) \ : "Ir" (i)); \ } else if (kernel_uses_llsc) { \ long temp; \ \ do { \ __asm__ __volatile__( \ " .set "MIPS_ISA_LEVEL" \n" \ " lld %1, %2 # atomic64_" #op "_return\n" \ " " #asm_op " %0, %1, %3 \n" \ " scd %0, %2 \n" \ " .set mips0 \n" \ : "=&r" (result), "=&r" (temp), \ "=" GCC_OFF_SMALL_ASM() (v->counter) \ : "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter) \ : "memory"); \ } while (unlikely(!result)); \ \ result = temp; result c_op i; \ } else { \ unsigned long flags; \ \ raw_local_irq_save(flags); \ result = v->counter; \ result c_op i; \ v->counter = result; \ raw_local_irq_restore(flags); \ } \ \ return result; \ } #define ATOMIC64_FETCH_OP(op, c_op, asm_op) \ static __inline__ long atomic64_fetch_##op##_relaxed(long i, atomic64_t * v) \ { \ long result; \ \ if (kernel_uses_llsc && R10000_LLSC_WAR) { \ long temp; \ \ __asm__ __volatile__( \ " .set arch=r4000 \n" \ "1: lld %1, %2 # atomic64_fetch_" #op "\n" \ " " #asm_op " %0, %1, %3 \n" \ " scd %0, %2 \n" \ " beqzl %0, 1b \n" \ " move %0, %1 \n" \ " .set mips0 \n" \ : "=&r" (result), "=&r" (temp), \ "+" GCC_OFF_SMALL_ASM() (v->counter) \ : "Ir" (i)); \ } else if (kernel_uses_llsc) { \ long temp; \ \ do { \ __asm__ __volatile__( \ " .set "MIPS_ISA_LEVEL" \n" \ " lld %1, %2 # atomic64_fetch_" #op "\n" \ " " #asm_op " %0, %1, %3 \n" \ " scd %0, %2 \n" \ " .set mips0 \n" \ : "=&r" (result), "=&r" (temp), \ "=" GCC_OFF_SMALL_ASM() (v->counter) \ : "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter) \ : "memory"); \ } while (unlikely(!result)); \ \ result = temp; \ } else { \ unsigned long flags; \ \ raw_local_irq_save(flags); \ result = v->counter; \ v->counter c_op i; \ raw_local_irq_restore(flags); \ } \ \ return result; \ } #define ATOMIC64_OPS(op, c_op, asm_op) \ ATOMIC64_OP(op, c_op, asm_op) \ ATOMIC64_OP_RETURN(op, c_op, asm_op) \ ATOMIC64_FETCH_OP(op, c_op, asm_op) ATOMIC64_OPS(add, +=, daddu) ATOMIC64_OPS(sub, -=, dsubu) #define atomic64_add_return_relaxed atomic64_add_return_relaxed #define atomic64_sub_return_relaxed atomic64_sub_return_relaxed #define atomic64_fetch_add_relaxed atomic64_fetch_add_relaxed #define atomic64_fetch_sub_relaxed atomic64_fetch_sub_relaxed #undef ATOMIC64_OPS #define ATOMIC64_OPS(op, c_op, asm_op) \ ATOMIC64_OP(op, c_op, asm_op) \ ATOMIC64_FETCH_OP(op, c_op, asm_op) ATOMIC64_OPS(and, &=, and) ATOMIC64_OPS(or, |=, or) ATOMIC64_OPS(xor, ^=, xor) #define atomic64_fetch_and_relaxed atomic64_fetch_and_relaxed #define atomic64_fetch_or_relaxed atomic64_fetch_or_relaxed #define atomic64_fetch_xor_relaxed atomic64_fetch_xor_relaxed #undef ATOMIC64_OPS #undef ATOMIC64_FETCH_OP #undef ATOMIC64_OP_RETURN #undef ATOMIC64_OP /* * atomic64_sub_if_positive - conditionally subtract integer from atomic * variable * @i: integer value to subtract * @v: pointer of type atomic64_t * * Atomically test @v and subtract @i if @v is greater or equal than @i. * The function returns the old value of @v minus @i. */ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v) { long result; smp_mb__before_llsc(); if (kernel_uses_llsc && R10000_LLSC_WAR) { long temp; __asm__ __volatile__( " .set arch=r4000 \n" "1: lld %1, %2 # atomic64_sub_if_positive\n" " dsubu %0, %1, %3 \n" " bltz %0, 1f \n" " scd %0, %2 \n" " .set noreorder \n" " beqzl %0, 1b \n" " dsubu %0, %1, %3 \n" " .set reorder \n" "1: \n" " .set mips0 \n" : "=&r" (result), "=&r" (temp), "=" GCC_OFF_SMALL_ASM() (v->counter) : "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter) : "memory"); } else if (kernel_uses_llsc) { long temp; __asm__ __volatile__( " .set "MIPS_ISA_LEVEL" \n" "1: lld %1, %2 # atomic64_sub_if_positive\n" " dsubu %0, %1, %3 \n" " bltz %0, 1f \n" " scd %0, %2 \n" " .set noreorder \n" " beqz %0, 1b \n" " dsubu %0, %1, %3 \n" " .set reorder \n" "1: \n" " .set mips0 \n" : "=&r" (result), "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) : "Ir" (i)); } else { unsigned long flags; raw_local_irq_save(flags); result = v->counter; result -= i; if (result >= 0) v->counter = result; raw_local_irq_restore(flags); } smp_llsc_mb(); return result; } #define atomic64_cmpxchg(v, o, n) \ ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n))) #define atomic64_xchg(v, new) (xchg(&((v)->counter), (new))) /** * atomic64_add_unless - add unless the number is a given value * @v: pointer of type atomic64_t * @a: the amount to add to v... * @u: ...unless v is equal to u. * * Atomically adds @a to @v, so long as it was not @u. * Returns true iff @v was not @u. */ static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u) { long c, old; c = atomic64_read(v); for (;;) { if (unlikely(c == (u))) break; old = atomic64_cmpxchg((v), c, c + (a)); if (likely(old == c)) break; c = old; } return c != (u); } #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0) #define atomic64_dec_return(v) atomic64_sub_return(1, (v)) #define atomic64_inc_return(v) atomic64_add_return(1, (v)) /* * atomic64_sub_and_test - subtract value from variable and test result * @i: integer value to subtract * @v: pointer of type atomic64_t * * Atomically subtracts @i from @v and returns * true if the result is zero, or false for all * other cases. */ #define atomic64_sub_and_test(i, v) (atomic64_sub_return((i), (v)) == 0) /* * atomic64_inc_and_test - increment and test * @v: pointer of type atomic64_t * * Atomically increments @v by 1 * and returns true if the result is zero, or false for all * other cases. */ #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0) /* * atomic64_dec_and_test - decrement by 1 and test * @v: pointer of type atomic64_t * * Atomically decrements @v by 1 and * returns true if the result is 0, or false for all other * cases. */ #define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0) /* * atomic64_dec_if_positive - decrement by 1 if old value positive * @v: pointer of type atomic64_t */ #define atomic64_dec_if_positive(v) atomic64_sub_if_positive(1, v) /* * atomic64_inc - increment atomic variable * @v: pointer of type atomic64_t * * Atomically increments @v by 1. */ #define atomic64_inc(v) atomic64_add(1, (v)) /* * atomic64_dec - decrement and test * @v: pointer of type atomic64_t * * Atomically decrements @v by 1. */ #define atomic64_dec(v) atomic64_sub(1, (v)) /* * atomic64_add_negative - add and test if negative * @v: pointer of type atomic64_t * @i: integer value to add * * Atomically adds @i to @v and returns true * if the result is negative, or false when * result is greater than or equal to zero. */ #define atomic64_add_negative(i, v) (atomic64_add_return(i, (v)) < 0) #endif /* CONFIG_64BIT */ #endif /* _ASM_ATOMIC_H */