- Sort Score
- Result 10 results
- Languages All
Results 1 - 10 of 336 for zeroing (0.15 sec)
-
src/internal/bytealg/bytealg.go
h -= pow * uint32(s[i+n]) if h == hashss && string(s[i:i+n]) == string(sep) { return i } } return -1 } // MakeNoZero makes a slice of length n and capacity of at least n Bytes // without zeroing the bytes (including the bytes between len and cap). // It is the caller's responsibility to ensure uninitialized bytes // do not leak to the end user.
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Feb 19 19:51:15 UTC 2024 - 3.1K bytes - Viewed (0) -
src/runtime/HACKING.md
before they become visible as GC roots. Otherwise, the GC may observe stale heap pointers. See "Zero-initialization versus zeroing". Zero-initialization versus zeroing ================================== There are two types of zeroing in the runtime, depending on whether the memory is already initialized to a type-safe state. If memory is not in a type-safe state, meaning it potentially contains
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Mar 25 19:53:03 UTC 2024 - 13.9K bytes - Viewed (0) -
pkg/slices/slices.go
func FilterInPlace[E any](s []E, f func(E) bool) []E { n := 0 for _, val := range s { if f(val) { s[n] = val n++ } } // If those elements contain pointers you might consider zeroing those elements // so that objects they reference can be garbage collected." var empty E for i := n; i < len(s); i++ { s[i] = empty } s = s[:n] return s }
Registered: Fri Jun 14 15:00:06 UTC 2024 - Last Modified: Wed May 15 06:28:11 UTC 2024 - 7.9K bytes - Viewed (0) -
src/runtime/slice.go
var to unsafe.Pointer if !et.Pointers() { to = mallocgc(tomem, nil, false) if copymem < tomem { memclrNoHeapPointers(add(to, copymem), tomem-copymem) } } else { // Note: can't use rawmem (which avoids zeroing of memory), because then GC can scan uninitialized memory. to = mallocgc(tomem, et, true) if copymem > 0 && writeBarrier.enabled { // Only shade the pointers in old.array since we know the destination slice to
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 29 16:25:21 UTC 2024 - 12.2K bytes - Viewed (0) -
src/vendor/golang.org/x/crypto/sha3/sha3.go
func (d *state) BlockSize() int { return d.rate } // Size returns the output size of the hash function in bytes. func (d *state) Size() int { return d.outputLen } // Reset clears the internal state by zeroing the sponge state and // the buffer indexes, and setting Sponge.state to absorbing. func (d *state) Reset() { // Zero the permutation's state. for i := range d.a { d.a[i] = 0 } d.state = spongeAbsorbing
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Tue Jun 04 16:19:04 UTC 2024 - 5.4K bytes - Viewed (0) -
src/runtime/mbarrier.go
if goexperiment.CgoCheck2 { cgoCheckMemmove2(typ, dst, src, 0, typ.Size_) } } // wbZero performs the write barrier operations necessary before // zeroing a region of memory at address dst of type typ. // Does not actually do the zeroing. // //go:nowritebarrierrec //go:nosplit func wbZero(typ *_type, dst unsafe.Pointer) { // This always copies a full value of type typ so it's safe
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 29 17:58:53 UTC 2024 - 15.7K bytes - Viewed (0) -
src/sync/poolqueue.go
break } } // We now own slot. val := *(*any)(unsafe.Pointer(slot)) if val == dequeueNil(nil) { val = nil } // Tell pushHead that we're done with this slot. Zeroing the // slot is also important so we don't leave behind references // that could keep this object live longer than necessary. // // We write to val first and then publish that we're done with
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Feb 26 18:12:29 UTC 2024 - 8.3K bytes - Viewed (0) -
src/cmd/compile/internal/walk/assign.go
} else { n.(*ir.AssignStmt).X = left } as := n.(*ir.AssignStmt) if oaslit(as, init) { return ir.NewBlockStmt(as.Pos(), nil) } if as.Y == nil { // TODO(austin): Check all "implicit zeroing" return as } if !base.Flag.Cfg.Instrumenting && ir.IsZero(as.Y) { return as } switch as.Y.Op() { default: as.Y = walkExpr(as.Y, init) case ir.ORECV:
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 08 17:09:06 UTC 2024 - 20.3K bytes - Viewed (0) -
src/cmd/compile/internal/ssagen/pgen.go
// Sort pointer-typed before non-pointer types. // Keeps the stack's GC bitmap compact. ap := a.Type().HasPointers() bp := b.Type().HasPointers() if ap != bp { return ap } // Group variables that need zeroing, so we can efficiently zero // them altogether. ap = a.Needzero() bp = b.Needzero() if ap != bp { return ap } // Sort variables in descending alignment order, so we can optimally
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 15 15:44:14 UTC 2024 - 13.1K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/_gen/RISCV64.rules
(MOVDstore ptr (MOVDconst [0]) mem)))) // Medium 8-aligned zeroing uses a Duff's device // 8 and 128 are magic constants, see runtime/mkduff.go (Zero [s] {t} ptr mem) && s%8 == 0 && s <= 8*128 && t.Alignment()%8 == 0 && !config.noDuffDevice => (DUFFZERO [8 * (128 - s/8)] ptr mem) // Generic zeroing uses a loop (Zero [s] {t} ptr mem) => (LoweredZero [t.Alignment()] ptr
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu Mar 07 14:57:07 UTC 2024 - 40.3K bytes - Viewed (0)