- Sort Score
- Result 10 results
- Languages All
Results 21 - 30 of 814 for Xadd64 (0.34 sec)
-
src/internal/runtime/atomic/bench_test.go
for pb.Next() { atomic.Xadd(ptr, 1) } }) } func BenchmarkXadd64(b *testing.B) { var x uint64 ptr := &x b.RunParallel(func(pb *testing.PB) { for pb.Next() { atomic.Xadd64(ptr, 1) } }) } func BenchmarkCas(b *testing.B) { var x uint32 x = 1 ptr := &x b.RunParallel(func(pb *testing.PB) { for pb.Next() { atomic.Cas(ptr, 1, 0)
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Mar 25 19:53:03 UTC 2024 - 3.2K bytes - Viewed (0) -
src/internal/runtime/atomic/atomic_test.go
shouldPanic(t, "Load64", func() { atomic.Load64(up64) }) shouldPanic(t, "Loadint64", func() { atomic.Loadint64(p64) }) shouldPanic(t, "Store64", func() { atomic.Store64(up64, 0) }) shouldPanic(t, "Xadd64", func() { atomic.Xadd64(up64, 1) }) shouldPanic(t, "Xchg64", func() { atomic.Xchg64(up64, 1) }) shouldPanic(t, "Cas64", func() { atomic.Cas64(up64, 1, 2) }) } func TestAnd8(t *testing.T) { // Basic sanity check.
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Mar 25 19:53:03 UTC 2024 - 8.5K bytes - Viewed (0) -
src/runtime/runtime1.go
} atomic.Store64(&test_z64, (1<<40)+1) if atomic.Load64(&test_z64) != (1<<40)+1 { throw("store64 failed") } if atomic.Xadd64(&test_z64, (1<<40)+1) != (2<<40)+2 { throw("xadd64 failed") } if atomic.Load64(&test_z64) != (2<<40)+2 { throw("xadd64 failed") } if atomic.Xchg64(&test_z64, (3<<40)+3) != (2<<40)+2 { throw("xchg64 failed") } if atomic.Load64(&test_z64) != (3<<40)+3 {
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu May 30 17:52:17 UTC 2024 - 19.3K bytes - Viewed (0) -
src/internal/runtime/atomic/atomic_arm.go
//go:noescape func LoadAcquintptr(ptr *uintptr) uintptr //go:noescape func Cas64(addr *uint64, old, new uint64) bool //go:noescape func CasRel(addr *uint32, old, new uint32) bool //go:noescape func Xadd64(addr *uint64, delta int64) uint64 //go:noescape func Xchg64(addr *uint64, v uint64) uint64 //go:noescape func Load64(addr *uint64) uint64 //go:noescape func Store8(addr *uint8, v uint8)
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 15 19:57:43 UTC 2024 - 4.8K bytes - Viewed (0) -
src/internal/runtime/atomic/atomic_mipsx.s
TEXT ·Loadint32(SB),NOSPLIT,$0-8 JMP ·Load(SB) TEXT ·Loadint64(SB),NOSPLIT,$0-12 JMP ·Load64(SB) TEXT ·Xaddint32(SB),NOSPLIT,$0-12 JMP ·Xadd(SB) TEXT ·Xaddint64(SB),NOSPLIT,$0-20 JMP ·Xadd64(SB) TEXT ·Casp1(SB),NOSPLIT,$0-13 JMP ·Cas(SB) TEXT ·Xchgint32(SB),NOSPLIT,$0-12 JMP ·Xchg(SB) TEXT ·Xchgint64(SB),NOSPLIT,$0-20 JMP ·Xchg64(SB) TEXT ·Xchguintptr(SB),NOSPLIT,$0-12
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Sat May 11 21:29:34 UTC 2024 - 4.9K bytes - Viewed (0) -
src/runtime/arena.go
stats := memstats.heapStats.acquire() atomic.Xaddint64(&stats.committed, -int64(s.npages*pageSize)) atomic.Xaddint64(&stats.inHeap, -int64(s.npages*pageSize)) atomic.Xadd64(&stats.largeFreeCount, 1) atomic.Xadd64(&stats.largeFree, int64(s.elemsize)) memstats.heapStats.release() // This counts as a free, so update heapLive. gcController.update(-int64(s.elemsize), 0)
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 08 17:44:56 UTC 2024 - 37.9K bytes - Viewed (0) -
src/runtime/mgcsweep.go
// value. If it does so, then metrics that subtract object footprint // from inHeap might overflow. See #67019. stats := memstats.heapStats.acquire() atomic.Xadd64(&stats.largeFreeCount, 1) atomic.Xadd64(&stats.largeFree, int64(size)) memstats.heapStats.release() // Count the free in the inconsistent, internal stats. gcController.totalFree.Add(int64(size))
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 08 17:52:18 UTC 2024 - 32.9K bytes - Viewed (0) -
src/runtime/mgcwork.go
} if w.bytesMarked != 0 { // dispose happens relatively infrequently. If this // atomic becomes a problem, we should first try to // dispose less and if necessary aggregate in a per-P // counter. atomic.Xadd64(&work.bytesMarked, int64(w.bytesMarked)) w.bytesMarked = 0 } if w.heapScanWork != 0 { gcController.heapScanWork.Add(w.heapScanWork) w.heapScanWork = 0 } }
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Mar 25 19:53:03 UTC 2024 - 12.9K bytes - Viewed (0) -
src/internal/runtime/atomic/types.go
// the new updated value. // // This operation wraps around in the usual // two's-complement way. // //go:nosplit func (u *Uint64) Add(delta int64) uint64 { return Xadd64(&u.value, delta) } // Uintptr is an atomically accessed uintptr value. // // A Uintptr must not be copied. type Uintptr struct { noCopy noCopy value uintptr }
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Mar 25 19:53:03 UTC 2024 - 14.2K bytes - Viewed (0) -
src/cmd/compile/internal/ssagen/ssa.go
alias("sync/atomic", "AddInt64", "internal/runtime/atomic", "Xadd64", all...) alias("sync/atomic", "AddUint32", "internal/runtime/atomic", "Xadd", all...) alias("sync/atomic", "AddUint64", "internal/runtime/atomic", "Xadd64", all...) alias("sync/atomic", "AddUintptr", "internal/runtime/atomic", "Xadd", p4...) alias("sync/atomic", "AddUintptr", "internal/runtime/atomic", "Xadd64", p8...)
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Jun 10 19:44:43 UTC 2024 - 284.9K bytes - Viewed (0)