- Sort Score
- Result 10 results
- Languages All
Results 1 - 10 of 2,114 for Xadd (0.04 sec)
-
src/cmd/link/internal/arm64/asm.go
if label != 0 { xadd = ldr.SymValue(rs) + xadd - ldr.SymValue(label) rs = label } if xadd != signext24(xadd) { ldr.Errorf(s, "internal error: relocation addend overflow: %s+0x%x", ldr.SymName(rs), xadd) } } if rt == objabi.R_CALLARM64 && xadd != 0 { label := ldr.Lookup(offsetLabelName(ldr, rs, xadd), ldr.SymVersion(rs)) if label != 0 {
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Tue Jan 30 20:09:45 UTC 2024 - 47K bytes - Viewed (0) -
src/cmd/link/internal/loong64/asm.go
out.Write64(uint64(r.Xadd)) case 8: out.Write64(uint64(sectoff)) out.Write64(uint64(elf.R_LARCH_64) | uint64(elfsym)<<32) out.Write64(uint64(r.Xadd)) default: return false } case objabi.R_LOONG64_TLS_LE_LO: out.Write64(uint64(sectoff)) out.Write64(uint64(elf.R_LARCH_TLS_LE_LO12) | uint64(elfsym)<<32) out.Write64(uint64(r.Xadd)) case objabi.R_LOONG64_TLS_LE_HI:
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Tue Feb 27 17:26:07 UTC 2024 - 7.5K bytes - Viewed (0) -
src/internal/runtime/atomic/atomic_mipsx.s
MOVW R1, ret+4(FP) RET TEXT ·Load8(SB),NOSPLIT,$0-5 MOVW ptr+0(FP), R1 SYNC MOVB 0(R1), R1 SYNC MOVB R1, ret+4(FP) RET // uint32 Xadd(uint32 volatile *val, int32 delta) // Atomically: // *val += delta; // return *val; TEXT ·Xadd(SB),NOSPLIT,$0-12 MOVW ptr+0(FP), R2 MOVW delta+4(FP), R3 SYNC try_xadd: LL (R2), R1 // R1 = *R2 ADDU R1, R3, R4 MOVW R4, R1 SC R4, (R2) // *R2 = R4
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Sat May 11 21:29:34 UTC 2024 - 4.9K bytes - Viewed (0) -
src/internal/runtime/atomic/atomic_386.s
JMP ·Store64(SB) TEXT ·Storeuintptr(SB), NOSPLIT, $0-8 JMP ·Store(SB) TEXT ·Xadduintptr(SB), NOSPLIT, $0-12 JMP ·Xadd(SB) TEXT ·Loadint32(SB), NOSPLIT, $0-8 JMP ·Load(SB) TEXT ·Loadint64(SB), NOSPLIT, $0-12 JMP ·Load64(SB) TEXT ·Xaddint32(SB), NOSPLIT, $0-12 JMP ·Xadd(SB) TEXT ·Xaddint64(SB), NOSPLIT, $0-20 JMP ·Xadd64(SB) // bool ·Cas64(uint64 *val, uint64 old, uint64 new) // Atomically:
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Mar 25 19:53:03 UTC 2024 - 6.5K bytes - Viewed (0) -
src/internal/runtime/atomic/atomic_amd64.s
JMP ·Cas64(SB) TEXT ·Casuintptr(SB), NOSPLIT, $0-25 JMP ·Cas64(SB) TEXT ·CasRel(SB), NOSPLIT, $0-17 JMP ·Cas(SB) // uint32 Xadd(uint32 volatile *val, int32 delta) // Atomically: // *val += delta; // return *val; TEXT ·Xadd(SB), NOSPLIT, $0-20 MOVQ ptr+0(FP), BX MOVL delta+8(FP), AX MOVL AX, CX LOCK XADDL AX, 0(BX) ADDL CX, AX MOVL AX, ret+16(FP) RET
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Mar 25 19:53:03 UTC 2024 - 5.2K bytes - Viewed (0) -
src/internal/runtime/atomic/atomic_mips64x.s
JMP ·Load64(SB) TEXT ·Xaddint32(SB), NOSPLIT, $0-20 JMP ·Xadd(SB) TEXT ·Xaddint64(SB), NOSPLIT, $0-24 JMP ·Xadd64(SB) // bool casp(void **val, void *old, void *new) // Atomically: // if(*val == old){ // *val = new; // return 1; // } else // return 0; TEXT ·Casp1(SB), NOSPLIT, $0-25 JMP ·Cas64(SB) // uint32 xadd(uint32 volatile *ptr, int32 delta) // Atomically: // *val += delta;
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Sat May 11 21:29:34 UTC 2024 - 7.2K bytes - Viewed (0) -
src/internal/runtime/atomic/atomic_wasm.go
// //go:linkname Load //go:linkname Loadp //go:linkname Load64 //go:linkname Loadint32 //go:linkname Loadint64 //go:linkname Loaduintptr //go:linkname LoadAcquintptr //go:linkname Xadd //go:linkname Xaddint32 //go:linkname Xaddint64 //go:linkname Xadd64 //go:linkname Xadduintptr //go:linkname Xchg //go:linkname Xchg64 //go:linkname Xchgint32 //go:linkname Xchgint64 //go:linkname Xchguintptr
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 15 19:57:43 UTC 2024 - 5.4K bytes - Viewed (0) -
src/internal/runtime/atomic/atomic_arm.go
} func addrLock(addr *uint64) *spinlock { return &locktab[(uintptr(unsafe.Pointer(addr))>>3)%uintptr(len(locktab))].l } // Atomic add and return new value. // //go:nosplit func Xadd(val *uint32, delta int32) uint32 { for { oval := *val nval := oval + uint32(delta) if Cas(val, oval, nval) { return nval } } } //go:noescape
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 15 19:57:43 UTC 2024 - 4.8K bytes - Viewed (0) -
src/internal/runtime/atomic/atomic_riscv64.s
// return *val; // func Xadd(ptr *uint32, delta int32) uint32 TEXT ·Xadd(SB), NOSPLIT, $0-20 MOV ptr+0(FP), A0 MOVW delta+8(FP), A1 AMOADDW A1, (A0), A2 ADD A2,A1,A0 MOVW A0, ret+16(FP) RET // func Xadd64(ptr *uint64, delta int64) uint64 TEXT ·Xadd64(SB), NOSPLIT, $0-24 MOV ptr+0(FP), A0 MOV delta+8(FP), A1 AMOADDD A1, (A0), A2 ADD A2, A1, A0 MOV A0, ret+16(FP) RET
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Mar 25 19:53:03 UTC 2024 - 7K bytes - Viewed (0) -
src/cmd/link/internal/mips/asm.go
// set up addend for eventual relocation via outer symbol. _, off := ld.FoldSubSymbolOffset(ldr, rs) xadd := r.Add() + off return applyrel(target.Arch, ldr, r.Type(), r.Off(), s, val, xadd), 1, true case objabi.R_ADDRMIPSTLS, objabi.R_CALLMIPS, objabi.R_JMPMIPS: return applyrel(target.Arch, ldr, r.Type(), r.Off(), s, val, r.Add()), 1, true } } const isOk = true const noExtReloc = 0 switch rt := r.Type(); rt {
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed Aug 23 05:58:20 UTC 2023 - 5.5K bytes - Viewed (0)