- Sort Score
- Result 10 results
- Languages All
Results 1 - 10 of 12 for StorepNoWB (0.17 sec)
-
src/internal/runtime/atomic/atomic_386.go
//go:noescape func StoreRel(ptr *uint32, val uint32) //go:noescape func StoreReluintptr(ptr *uintptr, val uintptr) // NO go:noescape annotation; see atomic_pointer.go.
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 15 19:57:43 UTC 2024 - 2.2K bytes - Viewed (0) -
src/internal/runtime/atomic/atomic_mips64x.go
//go:noescape func Store(ptr *uint32, val uint32) //go:noescape func Store8(ptr *uint8, val uint8) //go:noescape func Store64(ptr *uint64, val uint64) // NO go:noescape annotation; see atomic_pointer.go. func StorepNoWB(ptr unsafe.Pointer, val unsafe.Pointer) //go:noescape func StoreRel(ptr *uint32, val uint32) //go:noescape func StoreRel64(ptr *uint64, val uint64) //go:noescape
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Sat May 11 21:29:34 UTC 2024 - 2.1K bytes - Viewed (0) -
src/internal/runtime/atomic/atomic_wasm.go
*ptr = val } //go:nosplit //go:noinline func Store64(ptr *uint64, val uint64) { *ptr = val } // StorepNoWB performs *ptr = val atomically and without a write // barrier. // // NO go:noescape annotation; see atomic_pointer.go. func StorepNoWB(ptr unsafe.Pointer, val unsafe.Pointer) //go:nosplit //go:noinline func Casint32(ptr *int32, old, new int32) bool { if *ptr == old {
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 15 19:57:43 UTC 2024 - 5.4K bytes - Viewed (0) -
src/internal/runtime/atomic/atomic_mipsx.go
//go:noescape func Store(ptr *uint32, val uint32) //go:noescape func Store8(ptr *uint8, val uint8) // NO go:noescape annotation; see atomic_pointer.go. func StorepNoWB(ptr unsafe.Pointer, val unsafe.Pointer) //go:noescape func StoreRel(ptr *uint32, val uint32) //go:noescape func StoreReluintptr(ptr *uintptr, val uintptr) //go:noescape
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri May 17 20:08:37 UTC 2024 - 3.2K bytes - Viewed (0) -
src/runtime/atomic_pointer.go
func atomicstorep(ptr unsafe.Pointer, new unsafe.Pointer) { if writeBarrier.enabled { atomicwb((*unsafe.Pointer)(ptr), new) } if goexperiment.CgoCheck2 { cgoCheckPtrWrite((*unsafe.Pointer)(ptr), new) } atomic.StorepNoWB(noescape(ptr), new) } // atomic_storePointer is the implementation of runtime/internal/UnsafePointer.Store // (like StoreNoWB but with the write barrier). // //go:nosplit
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 29 16:25:21 UTC 2024 - 4K bytes - Viewed (0) -
src/internal/runtime/atomic/atomic_arm.go
} } } //go:nosplit func Xchguintptr(addr *uintptr, v uintptr) uintptr { return uintptr(Xchg((*uint32)(unsafe.Pointer(addr)), uint32(v))) } // Not noescape -- it installs a pointer to addr. func StorepNoWB(addr unsafe.Pointer, v unsafe.Pointer) //go:noescape func Store(addr *uint32, v uint32) //go:noescape func StoreRel(addr *uint32, v uint32) //go:noescape func StoreReluintptr(addr *uintptr, v uintptr)
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 15 19:57:43 UTC 2024 - 4.8K bytes - Viewed (0) -
src/internal/runtime/atomic/atomic_mipsx.s
JMP ·Cas(SB) TEXT ·Xchgint32(SB),NOSPLIT,$0-12 JMP ·Xchg(SB) TEXT ·Xchgint64(SB),NOSPLIT,$0-20 JMP ·Xchg64(SB) TEXT ·Xchguintptr(SB),NOSPLIT,$0-12 JMP ·Xchg(SB) TEXT ·StorepNoWB(SB),NOSPLIT,$0-8 JMP ·Store(SB) TEXT ·StoreRel(SB),NOSPLIT,$0-8 JMP ·Store(SB) TEXT ·StoreReluintptr(SB),NOSPLIT,$0-8 JMP ·Store(SB) // void Or8(byte volatile*, byte);
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Sat May 11 21:29:34 UTC 2024 - 4.9K bytes - Viewed (0) -
src/internal/runtime/atomic/atomic_mips64x.s
SYNC RET TEXT ·Xchgint32(SB), NOSPLIT, $0-20 JMP ·Xchg(SB) TEXT ·Xchgint64(SB), NOSPLIT, $0-24 JMP ·Xchg64(SB) TEXT ·Xchguintptr(SB), NOSPLIT, $0-24 JMP ·Xchg64(SB) TEXT ·StorepNoWB(SB), NOSPLIT, $0-16 JMP ·Store64(SB) TEXT ·StoreRel(SB), NOSPLIT, $0-12 JMP ·Store(SB) TEXT ·StoreRel64(SB), NOSPLIT, $0-16 JMP ·Store64(SB) TEXT ·StoreReluintptr(SB), NOSPLIT, $0-16
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Sat May 11 21:29:34 UTC 2024 - 7.2K bytes - Viewed (0) -
src/runtime/iface.go
} if m2 == nil { // Use atomic write here so if a reader sees m, it also // sees the correctly initialized fields of m. // NoWB is ok because m is not in heap memory. // *p = m atomic.StorepNoWB(unsafe.Pointer(p), unsafe.Pointer(m)) t.count++ return } h += i h &= mask } } // itabInit fills in the m.Fun array with all the code pointers for
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 29 17:58:53 UTC 2024 - 22.5K bytes - Viewed (0) -
src/runtime/malloc.go
} if h.arenasHugePages { sysHugePage(unsafe.Pointer(l2), unsafe.Sizeof(*l2)) } else { sysNoHugePage(unsafe.Pointer(l2), unsafe.Sizeof(*l2)) } atomic.StorepNoWB(unsafe.Pointer(&h.arenas[ri.l1()]), unsafe.Pointer(l2)) } if l2[ri.l2()] != nil { throw("arena already initialized") } var r *heapArena
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 29 17:58:53 UTC 2024 - 59.6K bytes - Viewed (0)