- Sort Score
- Result 10 results
- Languages All
Results 31 - 40 of 273 for Atack (0.07 sec)
-
src/cmd/internal/obj/inl.go
Parent: parent, Pos: pos, Func: func_, Name: name, } tree.nodes = append(tree.nodes, call) return r } // AllParents invokes do on each InlinedCall in the inlining call // stack, from outermost to innermost. // // That is, if inlIndex corresponds to f inlining g inlining h, // AllParents invokes do with the call for inlining g into f, and then // inlining h into g.
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon May 22 22:47:15 UTC 2023 - 4.4K bytes - Viewed (0) -
src/internal/trace/traceviewer/format/format.go
Time float64 `json:"ts"` Dur float64 `json:"dur,omitempty"` PID uint64 `json:"pid"` TID uint64 `json:"tid"` ID uint64 `json:"id,omitempty"` BindPoint string `json:"bp,omitempty"` Stack int `json:"sf,omitempty"` EndStack int `json:"esf,omitempty"` Arg any `json:"args,omitempty"` Cname string `json:"cname,omitempty"` Category string `json:"cat,omitempty"` }
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Tue Nov 21 20:45:06 UTC 2023 - 2K bytes - Viewed (0) -
src/runtime/testdata/testprog/checkptr.go
} // CheckPtrAlignmentNilPtr tests that checkptrAlignment doesn't crash // on nil pointers (#47430). func CheckPtrAlignmentNilPtr() { var do func(int) do = func(n int) { // Inflate the stack so runtime.shrinkstack gets called during GC if n > 0 { do(n - 1) } var p unsafe.Pointer _ = (*int)(p) } go func() { for { runtime.GC() } }() go func() {
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed Aug 31 17:15:15 UTC 2022 - 2.4K bytes - Viewed (0) -
src/runtime/mpagecache.go
} // flush empties out unallocated free pages in the given cache // into s. Then, it clears the cache, such that empty returns // true. // // p.mheapLock must be held. // // Must run on the system stack because p.mheapLock must be held. // //go:systemstack func (c *pageCache) flush(p *pageAlloc) { assertLockHeld(p.mheapLock) if c.empty() { return } ci := chunkIndex(c.base)
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed Apr 19 14:30:00 UTC 2023 - 5.6K bytes - Viewed (0) -
src/cmd/cgo/internal/test/callback_windows.go
} return i; #else return 0; #endif } */ import "C" import ( "internal/testenv" "reflect" "runtime" "strings" "testing" "unsafe" ) // Test that the stack can be unwound through a call out and call back // into Go. func testCallbackCallersSEH(t *testing.T) { testenv.SkipIfOptimizationOff(t) // This test requires inlining. if runtime.Compiler != "gc" {
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed Nov 29 16:01:37 UTC 2023 - 2.6K bytes - Viewed (0) -
src/cmd/trace/jsontrace_test.go
} } // filterStackRootFunc returns an event filter that returns true if the function // at the root of the stack trace is named name. func filterStackRootFunc(name string) eventFilterFn { return func(e *format.Event, data *format.Data) bool { frames := stackFrames(data, e.Stack) rootFrame := frames[len(frames)-1] return strings.HasPrefix(rootFrame, name+":") } }
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri May 17 18:48:18 UTC 2024 - 7.8K bytes - Viewed (0) -
src/syscall/exec_bsd.go
// they might have been locked at the time of the fork. This means // no rescheduling, no malloc calls, and no new stack segments. // For the same reason compiler does not race instrument it. // The calls to RawSyscall are okay because they are assembly // functions that do not grow the stack. // //go:norace
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri Sep 29 18:51:35 UTC 2023 - 7.9K bytes - Viewed (0) -
src/net/packetconn_test.go
// license that can be found in the LICENSE file. // This file implements API tests across platforms and should never have a build // constraint. package net import ( "os" "testing" ) // The full stack test cases for IPConn have been moved to the // following: // golang.org/x/net/ipv4 // golang.org/x/net/ipv6 // golang.org/x/net/icmp func packetConnTestData(t *testing.T, network string) ([]byte, func()) {
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Sep 18 17:20:52 UTC 2023 - 3K bytes - Viewed (0) -
src/runtime/histogram_test.go
package runtime_test import ( "math" . "runtime" "testing" ) var dummyTimeHistogram TimeHistogram func TestTimeHistogram(t *testing.T) { // We need to use a global dummy because this // could get stack-allocated with a non-8-byte alignment. // The result of this bad alignment is a segfault on // 32-bit platforms when calling Record. h := &dummyTimeHistogram // Record exactly one sample in each bucket.
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri Sep 16 16:32:01 UTC 2022 - 3.5K bytes - Viewed (0) -
src/cmd/cgo/internal/testsanitizers/testdata/msan6.go
} */ import "C" // allocateStack extends the stack so that stack copying doesn't // confuse the msan data structures. // //go:noinline func allocateStack(i int) int { if i == 0 { return i } return allocateStack(i - 1) } // F1 marks a chunk of stack as uninitialized. // C.f returns an uninitialized struct on the stack, so msan will mark // the stack as uninitialized. // //go:noinline func F1() uintptr {
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri May 12 11:59:56 UTC 2023 - 1.4K bytes - Viewed (0)