- Sort Score
- Result 10 results
- Languages All
Results 11 - 20 of 468 for heapUp (0.21 sec)
-
src/runtime/mheap.go
// arenaHint is a hint for where to grow the heap arenas. See // mheap_.arenaHints. type arenaHint struct { _ sys.NotInHeap addr uintptr down bool next *arenaHint } // An mspan is a run of pages. // // When a mspan is in the heap free treap, state == mSpanFree // and heapmap(s->start) == span, heapmap(s->start+s->npages-1) == span. // If the mspan is in the heap scav treap, then in addition to the
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 22 22:31:00 UTC 2024 - 78K bytes - Viewed (0) -
src/runtime/mpagealloc.go
// // A note on latency: for sufficiently small heaps (<10s of GiB) this function will take constant // time, but may take time proportional to the size of the mapped heap beyond that. // // The heap lock must not be held over this operation, since it will briefly acquire // the heap lock. // // Must be called on the system stack because it acquires the heap lock. // //go:systemstack
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Mar 25 19:53:03 UTC 2024 - 39.2K bytes - Viewed (0) -
testing/soak/src/integTest/kotlin/org/gradle/kotlin/dsl/caching/AbstractScriptCachingIntegrationTest.kt
executerForCacheInspection(*arguments).run() protected fun buildWithDaemonHeapSize(heapMb: Int, vararg arguments: String): ExecutionResult = executerForCacheInspection(*arguments) .withBuildJvmOpts("-Xms${heapMb}m", "-Xmx${heapMb}m") .run() private fun executerForCacheInspection(vararg arguments: String): GradleExecuter = gradleExecuterFor(
Registered: Wed Jun 12 18:38:38 UTC 2024 - Last Modified: Fri Jun 07 11:33:23 UTC 2024 - 1.9K bytes - Viewed (0) -
src/cmd/vendor/github.com/google/pprof/profile/legacy_profile.go
} return addrs, nil } // scaleHeapSample adjusts the data from a heapz Sample to // account for its probability of appearing in the collected // data. heapz profiles are a sampling of the memory allocations // requests in a program. We estimate the unsampled value by dividing // each collected sample by its probability of appearing in the // profile. heapz v2 profiles rely on a poisson process to determine
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Tue Nov 22 18:58:12 UTC 2022 - 32.8K bytes - Viewed (0) -
test/fixedbugs/issue13799.go
// Heap -> stack pointer eventually causes badness when stack reallocation // occurs. var fn func() // ERROR "moved to heap: fn$" i := 0 // ERROR "moved to heap: i$" for ; i < maxI; i++ { // var fn func() // this makes it work, because fn stays off heap j := 0 // ERROR "moved to heap: j$" fn = func() { // ERROR "func literal escapes to heap$" m[i] = append(m[i], 0) if j < 25 {
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri Sep 08 18:50:24 UTC 2023 - 4.9K bytes - Viewed (0) -
test/escape_indir.go
x := &ConstPtr{} // ERROR "&ConstPtr{} does not escape" x.p = &i return *x } func constptr03() **ConstPtr { i := 0 // ERROR "moved to heap: i" x := &ConstPtr{} // ERROR "&ConstPtr{} escapes to heap" "moved to heap: x" x.p = &i return &x } func constptr1() { i := 0 // ERROR "moved to heap: i" x := &ConstPtr{} // ERROR "&ConstPtr{} escapes to heap" x.p = &i sink = x }
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Sat Sep 12 08:31:49 UTC 2020 - 3.3K bytes - Viewed (0) -
src/internal/trace/batchcursor_test.go
heap[len(heap)-1].ev.time = 21 heapUpdate(heap, len(heap)-1) checkHeap(t, heap) if heap[len(heap)-1].ev.time != 21 { t.Fatalf("heap update failed, expected %d as heap min: %s", 21, heapDebugString(heap)) } // Update the last element to be smaller. heap[len(heap)-1].ev.time = 7 heapUpdate(heap, len(heap)-1) checkHeap(t, heap) if heap[len(heap)-1].ev.time == 21 {
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri May 17 18:48:18 UTC 2024 - 3K bytes - Viewed (0) -
src/runtime/debuglog.go
throw("failed to allocate debug log") } l.w.r.data = &l.w.data l.owned.Store(1) // Prepend to allDloggers list. headp := (*uintptr)(unsafe.Pointer(&allDloggers)) for { head := atomic.Loaduintptr(headp) l.allLink = (*dlogger)(unsafe.Pointer(head)) if atomic.Casuintptr(headp, head, uintptr(unsafe.Pointer(l))) { break } } } // If the time delta is getting too high, write a new sync
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Tue Apr 02 15:10:48 UTC 2024 - 18.3K bytes - Viewed (0) -
test/escape_closure.go
func ClosureCallArgs9() { // BAD: x should not leak x := 0 // ERROR "moved to heap: x" for { defer func(p *int) { // ERROR "func literal escapes to heap" "p does not escape" *p = 1 }(&x) } } func ClosureCallArgs10() { for { x := 0 // ERROR "moved to heap: x" defer func(p *int) { // ERROR "func literal escapes to heap" "p does not escape" *p = 1 }(&x) } }
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu Aug 17 16:36:09 UTC 2023 - 4.6K bytes - Viewed (0) -
src/internal/trace/batchcursor.go
heapSiftDown(heap, i) } func heapRemove(heap []*batchCursor, i int) []*batchCursor { // Sift index i up to the root, ignoring actual values. for i > 0 { heap[(i-1)/2], heap[i] = heap[i], heap[(i-1)/2] i = (i - 1) / 2 } // Swap the root with the last element, then remove it. heap[0], heap[len(heap)-1] = heap[len(heap)-1], heap[0] heap = heap[:len(heap)-1] // Sift the root down. heapSiftDown(heap, 0)
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri May 17 18:48:18 UTC 2024 - 4.1K bytes - Viewed (0)