- Sort Score
- Result 10 results
- Languages All
Results 101 - 110 of 224 for Atack (0.1 sec)
-
src/internal/trace/testdata/testprog/cpu-profile.go
pprofStacks[stack] += samples } } for stack, samples := range pprofStacks { fmt.Fprintf(os.Stderr, "%s\t%d\n", stack, samples) } } func cpuHogger(f func(x int) int, y *int, dur time.Duration) { // We only need to get one 100 Hz clock tick, so we've got // a large safety buffer. // But do at least 500 iterations (which should take about 100ms), // otherwise TestCPUProfileMultithreaded can fail if only one
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri May 17 18:48:18 UTC 2024 - 3.8K bytes - Viewed (0) -
src/cmd/vendor/golang.org/x/sys/unix/zerrors_linux_386.go
{11, "SIGSEGV", "segmentation fault"}, {12, "SIGUSR2", "user defined signal 2"}, {13, "SIGPIPE", "broken pipe"}, {14, "SIGALRM", "alarm clock"}, {15, "SIGTERM", "terminated"}, {16, "SIGSTKFLT", "stack fault"}, {17, "SIGCHLD", "child exited"}, {18, "SIGCONT", "continued"}, {19, "SIGSTOP", "stopped (signal)"}, {20, "SIGTSTP", "stopped"}, {21, "SIGTTIN", "stopped (tty input)"},
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Tue Jun 04 16:19:04 UTC 2024 - 34.2K bytes - Viewed (0) -
src/runtime/debug/stack_test.go
func (t *T) ptrmethod() []byte { return Stack() } func (t T) method() []byte { return t.ptrmethod() } /* The traceback should look something like this, modulo line numbers and hex constants. Don't worry much about the base levels, but check the ones in our own package. goroutine 10 [running]: runtime/debug.Stack(0x0, 0x0, 0x0) /Users/r/go/src/runtime/debug/stack.go:28 +0x80
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu May 16 15:19:04 UTC 2024 - 5.5K bytes - Viewed (0) -
src/runtime/mbarrier.go
// the stack to the heap, but this requires first having a pointer // hidden on the stack. Immediately after a stack is scanned, it only // points to shaded objects, so it's not hiding anything, and the // shade(*slot) prevents it from hiding any other pointers on its // stack. // // For a detailed description of this barrier and proof of
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 29 17:58:53 UTC 2024 - 15.7K bytes - Viewed (0) -
src/internal/trace/order.go
go122.EvGoSwitch: (*ordering).advanceGoSwitch, go122.EvGoSwitchDestroy: (*ordering).advanceGoSwitch, go122.EvGoCreateBlocked: (*ordering).advanceGoCreate, // GoStatus event with a stack. Added in Go 1.23. go122.EvGoStatusStack: (*ordering).advanceGoStatus, // Experimental events. // Experimental heap span events. Added in Go 1.23. go122.EvSpan: (*ordering).advanceAllocFree,
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Jun 03 14:56:25 UTC 2024 - 52.4K bytes - Viewed (0) -
src/cmd/compile/internal/ssagen/abi.go
fn.SetABIWrapper(true) fn.SetDupok(true) // ABI0-to-ABIInternal wrappers will be mainly loading params from // stack into registers (and/or storing stack locations back to // registers after the wrapped call); in most cases they won't // need to allocate stack space, so it should be OK to mark them // as NOSPLIT in these cases. In addition, my assumption is that
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 15 19:57:43 UTC 2024 - 13.8K bytes - Viewed (0) -
src/runtime/crash_cgo_test.go
t.Skip("skipping windows specific test") } testenv.SkipFlaky(t, 22575) o := runTestProg(t, "testprogcgo", "StackMemory") stackUsage, err := strconv.Atoi(o) if err != nil { t.Fatalf("Failed to read stack usage: %v", err) } if expected, got := 100<<10, stackUsage; got > expected { t.Fatalf("expected < %d bytes of memory per thread, got %d", expected, got) } } func TestSigStackSwapping(t *testing.T) {
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Apr 15 16:44:47 UTC 2024 - 22.2K bytes - Viewed (0) -
src/cmd/trace/viewer.go
package main import ( "fmt" "internal/trace" "internal/trace/traceviewer" "time" ) // viewerFrames returns the frames of the stack of ev. The given frame slice is // used to store the frames to reduce allocations. func viewerFrames(stk trace.Stack) []*trace.Frame { var frames []*trace.Frame stk.Frames(func(f trace.StackFrame) bool { frames = append(frames, &trace.Frame{ PC: f.PC, Fn: f.Func,
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri May 17 18:48:18 UTC 2024 - 1.4K bytes - Viewed (0) -
src/cmd/go/internal/modload/import.go
// that suggests a 'go get' command for root packages that transitively import // packages from modules with missing sums. load.CheckPackageErrors would be // a good place to consolidate errors, but we'll need to attach the import // stack here. type ImportMissingSumError struct { importPath string found bool mods []module.Version
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Tue May 07 15:21:14 UTC 2024 - 27.7K bytes - Viewed (0) -
src/internal/bisect/bisect.go
return dst } // MatchStack assigns the current call stack a change ID. // If the stack should be printed, MatchStack prints it. // Then MatchStack reports whether a change at the current call stack should be enabled. func (m *Matcher) Stack(w Writer) bool { if m == nil { return true } return m.stack(w) } // stack does the real work for Stack. // This lets stack's body handle m == nil and potentially be inlined.
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri May 03 17:28:43 UTC 2024 - 22.9K bytes - Viewed (0)