- Sort Score
- Result 10 results
- Languages All
Results 1 - 10 of 14 for Needzero (0.16 sec)
-
src/cmd/compile/internal/ssagen/pgen.go
ap := a.Type().HasPointers() bp := b.Type().HasPointers() if ap != bp { return ap } // Group variables that need zeroing, so we can efficiently zero // them altogether. ap = a.Needzero() bp = b.Needzero() if ap != bp { return ap } // Sort variables in descending alignment order, so we can optimally // pack variables into the frame. if a.Type().Alignment() != b.Type().Alignment() {
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 15 15:44:14 UTC 2024 - 13.1K bytes - Viewed (0) -
src/runtime/malloc.go
// // Allocating and freeing a large object uses the mheap // directly, bypassing the mcache and mcentral. // // If mspan.needzero is false, then free object slots in the mspan are // already zeroed. Otherwise if needzero is true, objects are zeroed as // they are allocated. There are various benefits to delaying zeroing // this way: // // 1. Stack frame allocation can avoid zeroing altogether.
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 29 17:58:53 UTC 2024 - 59.6K bytes - Viewed (0) -
src/runtime/mheap.go
spanclass spanClass // size class and noscan (uint8) state mSpanStateBox // mSpanInUse etc; accessed atomically (get/set methods) needzero uint8 // needs to be zeroed before allocation isUserArenaChunk bool // whether or not this span represents a user arena
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 22 22:31:00 UTC 2024 - 78K bytes - Viewed (0) -
src/cmd/compile/internal/liveness/plive.go
// occur (almost) anywhere. Because it is live // everywhere, it must be zeroed on entry. livedefer.Set(int32(i)) // It was already marked as Needzero when created. if !n.Needzero() { base.Fatalf("all pointer-containing defer arg slots should have Needzero set") } } } } // We must analyze the entry block first. The runtime assumes // the function entry map is index 0. Conveniently, layout
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri Jun 07 15:22:22 UTC 2024 - 45.2K bytes - Viewed (0) -
src/runtime/arena.go
if ok { ptr = unsafe.Pointer(v) } } if ptr == nil { // Failed to allocate. mp.mallocing = 0 releasem(mp) return nil } if s.needzero != 0 { throw("arena chunk needs zeroing, but should already be zeroed") } // Set up heap bitmap and do extra accounting. if typ.Pointers() { if cap >= 0 { userArenaHeapBitsSetSliceType(typ, cap, ptr, s)
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 08 17:44:56 UTC 2024 - 37.9K bytes - Viewed (0) -
src/runtime/mgcsweep.go
// Only mark the span as needing zeroing if we've freed any // objects, because a fresh span that had been allocated into, // wasn't totally filled, but then swept, still has all of its // free slots zeroed. s.needzero = 1 stats := memstats.heapStats.acquire() atomic.Xadd64(&stats.smallFreeCount[spc.sizeclass()], int64(nfreed)) memstats.heapStats.release() // Count the frees in the inconsistent, internal stats.
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 08 17:52:18 UTC 2024 - 32.9K bytes - Viewed (0) -
src/cmd/compile/internal/ssagen/ssa.go
var state uint32 // Iterate through declarations. Autos are sorted in decreasing // frame offset order. for _, n := range e.curfn.Dcl { if !n.Needzero() { continue } if n.Class != ir.PAUTO { e.Fatalf(n.Pos(), "needzero class %d", n.Class) } if n.Type().Size()%int64(types.PtrSize) != 0 || n.FrameOffset()%int64(types.PtrSize) != 0 || n.Type().Size() == 0 {
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Jun 10 19:44:43 UTC 2024 - 284.9K bytes - Viewed (0) -
src/encoding/gob/enc_helpers.go
slice, ok := v.Interface().([]bool) if !ok { // It is kind bool but not type bool. TODO: We can handle this unsafely. return false } for _, x := range slice { if x != false || state.sendZero { if x { state.encodeUint(1) } else { state.encodeUint(0) } } } return true } func encComplex64Array(state *encoderState, v reflect.Value) bool {
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Sat Mar 10 17:50:11 UTC 2018 - 9.9K bytes - Viewed (0) -
src/encoding/gob/encode.go
b := v.Bool() if b || state.sendZero { state.update(i) if b { state.encodeUint(1) } else { state.encodeUint(0) } } } // encInt encodes the signed integer (int int8 int16 int32 int64) referenced by v. func encInt(i *encInstr, state *encoderState, v reflect.Value) { value := v.Int() if value != 0 || state.sendZero { state.update(i) state.encodeInt(value) }
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu May 16 02:00:26 UTC 2024 - 19K bytes - Viewed (0) -
src/internal/bytealg/count_amd64.s
PCMPEQB X0, X1 PMOVMSKB X1, DX // Apply mask ANDQ R10, DX POPCNTL DX, DX ADDQ DX, R12 end: MOVQ R12, (R8) RET // handle for lengths < 16 small: TESTQ BX, BX JEQ endzero // Check if we'll load across a page boundary. LEAQ 16(SI), AX TESTW $0xff0, AX JEQ endofpage // We must ignore high bytes as they aren't part of our slice. // Create mask. MOVB BX, CX
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri Oct 06 20:54:43 UTC 2023 - 4.7K bytes - Viewed (0)