- Sort Score
- Result 10 results
- Languages All
Results 1 - 10 of 82 for mheap (0.05 sec)
-
src/internal/trace/gc.go
// Keep the first window. goto keep } else { // Replace it with this window. heap.Remove(&acc.wHeap, i) break } } } heap.Push(&acc.wHeap, UtilWindow{time, mu}) if len(acc.wHeap) > acc.nWorst { heap.Pop(&acc.wHeap) } keep: } if len(acc.wHeap) < acc.nWorst { // We don't have N windows yet, so keep accumulating. acc.bound = 1.0 } else {
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri May 17 18:48:18 UTC 2024 - 26K bytes - Viewed (0) -
src/runtime/mbarrier.go
// the ptr object regardless of the slot's color. // // Another place where we intentionally omit memory barriers is when // accessing mheap_.arena_used to check if a pointer points into the // heap. On relaxed memory machines, it's possible for a mutator to // extend the size of the heap by updating arena_used, allocate an // object from this new region, and publish a pointer to that object,
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 29 17:58:53 UTC 2024 - 15.7K bytes - Viewed (0) -
src/runtime/export_test.go
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu May 30 17:50:53 UTC 2024 - 46.1K bytes - Viewed (0) -
src/runtime/stack.go
s = stackLarge.free[log2npage].first stackLarge.free[log2npage].remove(s) } unlock(&stackLarge.lock) lockWithRankMayAcquire(&mheap_.lock, lockRankMheap) if s == nil { // Allocate a new stack from the heap. s = mheap_.allocManual(npage, spanAllocStack) if s == nil { throw("out of memory") } osStackAlloc(s) s.elemsize = uintptr(n) }
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 22 22:31:00 UTC 2024 - 41.1K bytes - Viewed (0) -
src/runtime/pprof/pprof.go
// output to a writer during profiling. // // # Heap profile // // The heap profile reports statistics as of the most recently completed // garbage collection; it elides more recent allocation to avoid skewing // the profile away from live data and toward garbage. // If there has been no garbage collection at all, the heap profile reports // all known allocations. This exception helps mainly in programs running
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu May 30 17:52:17 UTC 2024 - 30.6K bytes - Viewed (0) -
src/runtime/extern.go
#% percentage of time spent in GC since program start #+...+# wall-clock/CPU times for the phases of the GC #->#-># MB heap size at GC start, at GC end, and live heap, or /gc/scan/heap:bytes # MB goal goal heap size, or /gc/heap/goal:bytes # MB stacks estimated scannable stack size, or /gc/scan/stack:bytes # MB globals scannable global size, or /gc/scan/globals:bytes
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu May 30 17:52:17 UTC 2024 - 18.9K bytes - Viewed (0) -
platforms/software/dependency-management/src/test/groovy/org/gradle/api/internal/artifacts/ivyservice/ivyresolve/CachingModuleComponentRepositoryTest.groovy
false | MetadataFetchingCost.CHEAP | MetadataFetchingCost.CHEAP false | MetadataFetchingCost.FAST | MetadataFetchingCost.CHEAP false | MetadataFetchingCost.EXPENSIVE | MetadataFetchingCost.CHEAP true | MetadataFetchingCost.CHEAP | MetadataFetchingCost.CHEAP
Registered: Wed Jun 12 18:38:38 UTC 2024 - Last Modified: Wed May 15 00:21:07 UTC 2024 - 11.6K bytes - Viewed (0) -
src/runtime/metrics/doc.go
Cumulative count of heap allocations whose storage was freed by the garbage collector. Note that this does not include tiny objects as defined by /gc/heap/tiny/allocs:objects, only tiny blocks. /gc/heap/goal:bytes Heap size target for the end of the GC cycle. /gc/heap/live:bytes Heap memory occupied by live objects that were marked by the previous GC. /gc/heap/objects:objects
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 22 22:58:43 UTC 2024 - 20K bytes - Viewed (0) -
src/runtime/gc_test.go
for i := range blocks { blocks[i] = new([blockSize]byte) } // Check that the running page count matches reality. pagesInUse, counted := runtime.CountPagesInUse() if pagesInUse != counted { t.Fatalf("mheap_.pagesInUse is %d, but direct count is %d", pagesInUse, counted) } } func init() { // Enable ReadMemStats' double-check mode. *runtime.DoubleCheckReadMemStats = true }
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed Jun 05 22:33:52 UTC 2024 - 17.6K bytes - Viewed (0) -
src/internal/trace/event/go122/event.go
// Experimental heap span events. Added in Go 1.23. EvSpan // heap span exists [timestamp, id, npages, type/class] EvSpanAlloc // heap span alloc [timestamp, id, npages, type/class] EvSpanFree // heap span free [timestamp, id] // Experimental heap object events. Added in Go 1.23. EvHeapObject // heap object exists [timestamp, id, type] EvHeapObjectAlloc // heap object alloc [timestamp, id, type]
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri May 17 18:48:18 UTC 2024 - 14.8K bytes - Viewed (0)