- Sort Score
- Result 10 results
- Languages All
Results 1 - 10 of 19 for Loaduintptr (0.22 sec)
-
src/runtime/lock_sema.go
const ( locked uintptr = 1 active_spin = 4 active_spin_cnt = 30 passive_spin = 1 ) func mutexContended(l *mutex) bool { return atomic.Loaduintptr(&l.key) > locked } func lock(l *mutex) { lockWithRank(l, getLockRank(l)) } func lock2(l *mutex) { gp := getg() if gp.m.locks < 0 { throw("runtime·lock: lock count") } gp.m.locks++
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu May 30 17:57:37 UTC 2024 - 6.8K bytes - Viewed (0) -
src/internal/runtime/atomic/atomic_wasm.go
// Export some functions via linkname to assembly in sync/atomic. // //go:linkname Load //go:linkname Loadp //go:linkname Load64 //go:linkname Loadint32 //go:linkname Loadint64 //go:linkname Loaduintptr //go:linkname LoadAcquintptr //go:linkname Xadd //go:linkname Xaddint32 //go:linkname Xaddint64 //go:linkname Xadd64 //go:linkname Xadduintptr //go:linkname Xchg //go:linkname Xchg64 //go:linkname Xchgint32
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 15 19:57:43 UTC 2024 - 5.4K bytes - Viewed (0) -
src/sync/atomic/asm.s
JMP internal∕runtime∕atomic·Load(SB) TEXT ·LoadInt64(SB),NOSPLIT,$0 JMP internal∕runtime∕atomic·Load64(SB) TEXT ·LoadUint64(SB),NOSPLIT,$0 JMP internal∕runtime∕atomic·Load64(SB) TEXT ·LoadUintptr(SB),NOSPLIT,$0 JMP internal∕runtime∕atomic·Loaduintptr(SB) TEXT ·LoadPointer(SB),NOSPLIT,$0 JMP internal∕runtime∕atomic·Loadp(SB) TEXT ·StoreInt32(SB),NOSPLIT,$0 JMP internal∕runtime∕atomic·Store(SB)
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri May 17 18:37:29 UTC 2024 - 3K bytes - Viewed (0) -
src/sync/atomic/doc.go
// (particularly if you target 32-bit platforms; see the bugs section). func LoadUint64(addr *uint64) (val uint64) // LoadUintptr atomically loads *addr. // Consider using the more ergonomic and less error-prone [Uintptr.Load] instead. func LoadUintptr(addr *uintptr) (val uintptr) // LoadPointer atomically loads *addr. // Consider using the more ergonomic and less error-prone [Pointer.Load] instead.
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri Jun 07 21:14:51 UTC 2024 - 11.7K bytes - Viewed (0) -
src/runtime/signal_unix.go
// handle a particular signal (e.g., signal occurred on a non-Go thread). // See sigfwdgo for more information on when the signals are forwarded. // // This is read by the signal handler; accesses should use // atomic.Loaduintptr and atomic.Storeuintptr. var fwdSig [_NSIG]uintptr // handlingSig is indexed by signal number and is non-zero if we are // currently handling the signal. Or, to put it another way, whether
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri May 10 16:04:54 UTC 2024 - 45K bytes - Viewed (0) -
src/internal/runtime/atomic/atomic_mipsx.s
JMP ·Cas(SB) TEXT ·Casint64(SB),NOSPLIT,$0-21 JMP ·Cas64(SB) TEXT ·Casuintptr(SB),NOSPLIT,$0-13 JMP ·Cas(SB) TEXT ·CasRel(SB),NOSPLIT,$0-13 JMP ·Cas(SB) TEXT ·Loaduintptr(SB),NOSPLIT,$0-8 JMP ·Load(SB) TEXT ·Loaduint(SB),NOSPLIT,$0-8 JMP ·Load(SB) TEXT ·Loadp(SB),NOSPLIT,$-0-8 JMP ·Load(SB) TEXT ·Storeint32(SB),NOSPLIT,$0-8 JMP ·Store(SB)
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Sat May 11 21:29:34 UTC 2024 - 4.9K bytes - Viewed (0) -
src/cmd/compile/internal/test/inl_test.go
// internal/runtime/atomic.Loaduintptr is only intrinsified on these platforms. want["runtime"] = append(want["runtime"], "traceAcquire") } if bits.UintSize == 64 { // mix is only defined on 64-bit architectures
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Tue Apr 09 04:07:57 UTC 2024 - 10.7K bytes - Viewed (0) -
src/internal/runtime/atomic/atomic_mips64x.s
JMP ·Cas(SB) TEXT ·Casint64(SB), NOSPLIT, $0-25 JMP ·Cas64(SB) TEXT ·Casuintptr(SB), NOSPLIT, $0-25 JMP ·Cas64(SB) TEXT ·CasRel(SB), NOSPLIT, $0-17 JMP ·Cas(SB) TEXT ·Loaduintptr(SB), NOSPLIT|NOFRAME, $0-16 JMP ·Load64(SB) TEXT ·Loaduint(SB), NOSPLIT|NOFRAME, $0-16 JMP ·Load64(SB) TEXT ·Storeint32(SB), NOSPLIT, $0-12 JMP ·Store(SB) TEXT ·Storeint64(SB), NOSPLIT, $0-16
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Sat May 11 21:29:34 UTC 2024 - 7.2K bytes - Viewed (0) -
src/sync/atomic/type.go
// A Uintptr is an atomic uintptr. The zero value is zero. type Uintptr struct { _ noCopy v uintptr } // Load atomically loads and returns the value stored in x. func (x *Uintptr) Load() uintptr { return LoadUintptr(&x.v) } // Store atomically stores val into x. func (x *Uintptr) Store(val uintptr) { StoreUintptr(&x.v, val) } // Swap atomically stores new into x and returns the previous value.
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri May 17 18:37:29 UTC 2024 - 8.5K bytes - Viewed (0) -
src/sync/pool.go
return x } } // Try the victim cache. We do this after attempting to steal // from all primary caches because we want objects in the // victim cache to age out if at all possible. size = atomic.LoadUintptr(&p.victimSize) if uintptr(pid) >= size { return nil } locals = p.victim l := indexLocal(locals, pid) if x := l.private; x != nil { l.private = nil return x }
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri Jun 07 21:14:51 UTC 2024 - 9.4K bytes - Viewed (0)