- Sort Score
- Result 10 results
- Languages All
Results 21 - 30 of 36 for Xadduintptr (0.15 sec)
-
src/internal/runtime/atomic/atomic_arm64.s
#ifndef GOARM64_LSE load_store_loop: LDAXR (R0), R2 AND R1, R2, R3 STLXR R3, (R0), R4 CBNZ R4, load_store_loop MOVD R2, ret+16(FP) RET #endif // func Anduintptr(addr *uintptr, v uintptr) old uintptr TEXT ·Anduintptr(SB), NOSPLIT, $0-24 B ·And64(SB) // func Oruintptr(addr *uintptr, v uintptr) old uintptr TEXT ·Oruintptr(SB), NOSPLIT, $0-24
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Mar 25 19:53:03 UTC 2024 - 9K bytes - Viewed (0) -
src/internal/runtime/atomic/atomic_ppc64x.s
MOVD ptr+0(FP), R3 MOVD val+8(FP), R4 LWSYNC again: LDAR (R3),R6 AND R4, R6, R7 STDCCC R7, (R3) BNE again MOVD R6, ret+16(FP) RET // func Anduintptr(addr *uintptr, v uintptr) old uintptr TEXT ·Anduintptr(SB), NOSPLIT, $0-24 JMP ·And64(SB) // func Oruintptr(addr *uintptr, v uintptr) old uintptr TEXT ·Oruintptr(SB), NOSPLIT, $0-24
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Mar 25 19:53:03 UTC 2024 - 7.5K bytes - Viewed (0) -
src/internal/runtime/atomic/types.go
// the new updated value. // // This operation wraps around in the usual // two's-complement way. // //go:nosplit func (u *Uintptr) Add(delta uintptr) uintptr { return Xadduintptr(&u.value, delta) } // Float64 is an atomically accessed float64 value. // // 8-byte aligned on all platforms, unlike a regular float64. // // A Float64 must not be copied. type Float64 struct {
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Mar 25 19:53:03 UTC 2024 - 14.2K bytes - Viewed (0) -
src/runtime/export_test.go
var F32to64 = f32to64 var Fcmp64 = fcmp64 var Fintto64 = fintto64 var F64toint = f64toint var Entersyscall = entersyscall var Exitsyscall = exitsyscall var LockedOSThread = lockedOSThread var Xadduintptr = atomic.Xadduintptr var ReadRandomFailed = &readRandomFailed var Fastlog2 = fastlog2 var Atoi = atoi var Atoi32 = atoi32 var ParseByteCount = parseByteCount var Nanotime = nanotime
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu May 30 17:50:53 UTC 2024 - 46.1K bytes - Viewed (0) -
src/sync/atomic/asm.s
JMP internal∕runtime∕atomic·Cas64(SB) TEXT ·AddInt32(SB),NOSPLIT,$0 JMP internal∕runtime∕atomic·Xadd(SB) TEXT ·AddUint32(SB),NOSPLIT,$0 JMP internal∕runtime∕atomic·Xadd(SB) TEXT ·AddUintptr(SB),NOSPLIT,$0 JMP internal∕runtime∕atomic·Xadduintptr(SB) TEXT ·AddInt64(SB),NOSPLIT,$0 JMP internal∕runtime∕atomic·Xadd64(SB) TEXT ·AddUint64(SB),NOSPLIT,$0 JMP internal∕runtime∕atomic·Xadd64(SB)
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri May 17 18:37:29 UTC 2024 - 3K bytes - Viewed (0) -
src/sync/atomic/doc.go
// (particularly if you target 32-bit platforms; see the bugs section). func AddUint64(addr *uint64, delta uint64) (new uint64) // AddUintptr atomically adds delta to *addr and returns the new value. // Consider using the more ergonomic and less error-prone [Uintptr.Add] instead. func AddUintptr(addr *uintptr, delta uintptr) (new uintptr) // AndInt32 atomically performs a bitwise AND operation on *addr using the bitmask provided as mask
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri Jun 07 21:14:51 UTC 2024 - 11.7K bytes - Viewed (0) -
src/runtime/mheap.go
func (b *gcBitsArena) tryAlloc(bytes uintptr) *gcBits { if b == nil || atomic.Loaduintptr(&b.free)+bytes > uintptr(len(b.bits)) { return nil } // Try to allocate from this block. end := atomic.Xadduintptr(&b.free, bytes) if end > uintptr(len(b.bits)) { return nil } // There was enough room. start := end - bytes return &b.bits[start] }
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 22 22:31:00 UTC 2024 - 78K bytes - Viewed (0) -
src/internal/runtime/atomic/atomic_andor_generic.go
//go:build arm || wasm // Export some functions via linkname to assembly in sync/atomic. // //go:linkname And32 //go:linkname Or32 //go:linkname And64 //go:linkname Or64 //go:linkname Anduintptr //go:linkname Oruintptr package atomic import _ "unsafe" // For linkname //go:nosplit func And32(ptr *uint32, val uint32) uint32 { for { old := *ptr if Cas(ptr, old, old&val) {
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri May 17 20:08:37 UTC 2024 - 1.2K bytes - Viewed (0) -
src/sync/atomic/type.go
// Add atomically adds delta to x and returns the new value. func (x *Uintptr) Add(delta uintptr) (new uintptr) { return AddUintptr(&x.v, delta) } // And atomically performs a bitwise AND operation on x using the bitmask // provided as mask and returns the old value. func (x *Uintptr) And(mask uintptr) (old uintptr) { return AndUintptr(&x.v, mask) } // Or atomically performs a bitwise OR operation on x using the bitmask
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri May 17 18:37:29 UTC 2024 - 8.5K bytes - Viewed (0) -
src/cmd/vendor/golang.org/x/tools/go/analysis/passes/atomic/atomic.go
call, ok := right.(*ast.CallExpr) if !ok { continue } fn := typeutil.StaticCallee(pass.TypesInfo, call) if analysisutil.IsFunctionNamed(fn, "sync/atomic", "AddInt32", "AddInt64", "AddUint32", "AddUint64", "AddUintptr") { checkAtomicAddAssignment(pass, n.Lhs[i], call) } } }) return nil, nil } // checkAtomicAddAssignment walks the atomic.Add* method calls checking
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu Oct 19 23:33:33 UTC 2023 - 2.3K bytes - Viewed (0)