- Sort Score
- Result 10 results
- Languages All
Results 11 - 20 of 444 for zeroing (0.15 sec)
-
src/cmd/compile/internal/walk/range.go
fn := typecheck.LookupRuntime("mapclear", t.Key(), t.Elem()) n := mkcallstmt1(fn, rtyp, m) return walkStmt(typecheck.Stmt(n)) } // Lower n into runtime·memclr if possible, for // fast zeroing of slices and arrays (issue 5373). // Look for instances of // // for i := range a { // a[i] = zero // } // // in which the evaluation of a is side-effect-free. //
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed Sep 20 14:52:33 UTC 2023 - 17.6K bytes - Viewed (0) -
src/runtime/mkduff.go
// Copyright 2015 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. //go:build ignore // runtime·duffzero is a Duff's device for zeroing memory. // The compiler jumps to computed addresses within // the routine to zero chunks of memory. // Do not change duffzero without also // changing the uses in cmd/compile/internal/*/*.go.
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Tue Nov 21 19:04:21 UTC 2023 - 8K bytes - Viewed (0) -
src/sync/poolqueue.go
break } } // We now own slot. val := *(*any)(unsafe.Pointer(slot)) if val == dequeueNil(nil) { val = nil } // Tell pushHead that we're done with this slot. Zeroing the // slot is also important so we don't leave behind references // that could keep this object live longer than necessary. // // We write to val first and then publish that we're done with
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Feb 26 18:12:29 UTC 2024 - 8.3K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/_gen/LOONG64.rules
(MOVVstore [0] ptr (MOVVconst [0]) mem))) // medium zeroing uses a duff device // 8, and 128 are magic constants, see runtime/mkduff.go (Zero [s] {t} ptr mem) && s%8 == 0 && s > 24 && s <= 8*128 && t.Alignment()%8 == 0 && !config.noDuffDevice => (DUFFZERO [8 * (128 - s/8)] ptr mem) // large or unaligned zeroing uses a loop (Zero [s] {t} ptr mem)
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Tue Nov 21 19:26:25 UTC 2023 - 31.8K bytes - Viewed (0) -
src/cmd/compile/internal/walk/assign.go
} else { n.(*ir.AssignStmt).X = left } as := n.(*ir.AssignStmt) if oaslit(as, init) { return ir.NewBlockStmt(as.Pos(), nil) } if as.Y == nil { // TODO(austin): Check all "implicit zeroing" return as } if !base.Flag.Cfg.Instrumenting && ir.IsZero(as.Y) { return as } switch as.Y.Op() { default: as.Y = walkExpr(as.Y, init) case ir.ORECV:
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 08 17:09:06 UTC 2024 - 20.3K bytes - Viewed (0) -
src/cmd/compile/internal/ssagen/pgen.go
// Sort pointer-typed before non-pointer types. // Keeps the stack's GC bitmap compact. ap := a.Type().HasPointers() bp := b.Type().HasPointers() if ap != bp { return ap } // Group variables that need zeroing, so we can efficiently zero // them altogether. ap = a.Needzero() bp = b.Needzero() if ap != bp { return ap } // Sort variables in descending alignment order, so we can optimally
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 15 15:44:14 UTC 2024 - 13.1K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/_gen/RISCV64.rules
(MOVDstore ptr (MOVDconst [0]) mem)))) // Medium 8-aligned zeroing uses a Duff's device // 8 and 128 are magic constants, see runtime/mkduff.go (Zero [s] {t} ptr mem) && s%8 == 0 && s <= 8*128 && t.Alignment()%8 == 0 && !config.noDuffDevice => (DUFFZERO [8 * (128 - s/8)] ptr mem) // Generic zeroing uses a loop (Zero [s] {t} ptr mem) => (LoweredZero [t.Alignment()] ptr
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu Mar 07 14:57:07 UTC 2024 - 40.3K bytes - Viewed (0) -
src/runtime/arena.go
// not Linux decides to back this memory with transparent huge // pages. There's latency involved in this zeroing, but the hugepage // gains are almost always worth it. Note: it's important that we // clear even if it's freshly mapped and we know there's no point // to zeroing as *that* is the critical signal to use huge pages. memclrNoHeapPointers(unsafe.Pointer(s.base()), s.elemsize) s.needzero = 0
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 08 17:44:56 UTC 2024 - 37.9K bytes - Viewed (0) -
src/runtime/malloc.go
// already zeroed. Otherwise if needzero is true, objects are zeroed as // they are allocated. There are various benefits to delaying zeroing // this way: // // 1. Stack frame allocation can avoid zeroing altogether. // // 2. It exhibits better temporal locality, since the program is // probably about to write to the memory. // // 3. We don't zero pages that never get reused.
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 29 17:58:53 UTC 2024 - 59.6K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/block.go
b.Kind = kind b.ResetControls() b.Aux = nil b.AuxInt = 0 b.Controls[0] = v b.Controls[1] = w v.Uses++ w.Uses++ } // truncateValues truncates b.Values at the ith element, zeroing subsequent elements. // The values in b.Values after i must already have had their args reset, // to maintain correct value uses counts. func (b *Block) truncateValues(i int) { tail := b.Values[i:]
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 15 15:44:14 UTC 2024 - 12.2K bytes - Viewed (0)