- Sort Score
- Result 10 results
- Languages All
Results 111 - 120 of 263 for Atack (0.04 sec)
-
src/fmt/scan_test.go
testScanInts(t, func(r *RecursiveInt, b *bytes.Buffer) (err error) { _, err = Fscan(b, r) return }) } // 800 is small enough to not overflow the stack when using gccgo on a // platform that does not support split stack. const intCount = 800 func testScanInts(t *testing.T, scan func(*RecursiveInt, *bytes.Buffer) error) { r := new(RecursiveInt) ints := makeInts(intCount)
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Tue May 23 20:25:13 UTC 2023 - 39.3K bytes - Viewed (0) -
src/cmd/vendor/golang.org/x/sys/unix/zsysnum_netbsd_arm64.go
SYS_GETSID = 286 // { pid_t|sys||getsid(pid_t pid); } SYS___CLONE = 287 // { pid_t|sys||__clone(int flags, void *stack); } SYS_FKTRACE = 288 // { int|sys||fktrace(int fd, int ops, int facs, pid_t pid); } SYS_PREADV = 289 // { ssize_t|sys||preadv(int fd, const struct iovec *iovp, int iovcnt, int PAD, off_t offset); }
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu Oct 19 23:33:33 UTC 2023 - 25.7K bytes - Viewed (0) -
src/crypto/internal/nistec/generate.go
func (p *{{.P}}Point) ScalarMult(q *{{.P}}Point, scalar []byte) (*{{.P}}Point, error) { // Compute a {{.p}}Table for the base point q. The explicit New{{.P}}Point // calls get inlined, letting the allocations live on the stack. var table = {{.p}}Table{New{{.P}}Point(), New{{.P}}Point(), New{{.P}}Point(), New{{.P}}Point(), New{{.P}}Point(), New{{.P}}Point(), New{{.P}}Point(), New{{.P}}Point(), New{{.P}}Point(), New{{.P}}Point(), New{{.P}}Point(),
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Mar 04 17:29:44 UTC 2024 - 19.7K bytes - Viewed (0) -
src/runtime/mbarrier.go
// the stack to the heap, but this requires first having a pointer // hidden on the stack. Immediately after a stack is scanned, it only // points to shaded objects, so it's not hiding anything, and the // shade(*slot) prevents it from hiding any other pointers on its // stack. // // For a detailed description of this barrier and proof of
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 29 17:58:53 UTC 2024 - 15.7K bytes - Viewed (0) -
src/cmd/go/internal/cache/cache.go
entry := fmt.Sprintf("v1 %x %x %20d %20d\n", id, out, size, time.Now().UnixNano()) if verify && allowVerify { old, err := c.get(id) if err == nil && (old.OutputID != out || old.Size != size) { // panic to show stack trace, so we can see what code is generating this cache entry. msg := fmt.Sprintf("go: internal cache error: cache verify failed: id=%x changed:<<<\n%s\n>>>\nold: %x %d\nnew: %x %d", id, reverseHash(id), out, size, old.OutputID, old.Size)
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Sat Mar 09 14:19:39 UTC 2024 - 20.3K bytes - Viewed (0) -
src/encoding/pem/pem_test.go
} if string(rest) != test.input { t.Errorf("unexpected rest: %q; want = %q", rest, test.input) } } } func TestCVE202224675(t *testing.T) { // Prior to CVE-2022-24675, this input would cause a stack overflow. input := []byte(strings.Repeat("-----BEGIN \n", 10000000)) result, rest := Decode(input) if result != nil || !reflect.DeepEqual(rest, input) { t.Errorf("Encode of %#v decoded as %#v", input, rest) } }
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu Sep 29 22:56:00 UTC 2022 - 23.5K bytes - Viewed (0) -
src/runtime/crash_cgo_test.go
t.Skip("skipping windows specific test") } testenv.SkipFlaky(t, 22575) o := runTestProg(t, "testprogcgo", "StackMemory") stackUsage, err := strconv.Atoi(o) if err != nil { t.Fatalf("Failed to read stack usage: %v", err) } if expected, got := 100<<10, stackUsage; got > expected { t.Fatalf("expected < %d bytes of memory per thread, got %d", expected, got) } } func TestSigStackSwapping(t *testing.T) {
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Apr 15 16:44:47 UTC 2024 - 22.2K bytes - Viewed (0) -
src/cmd/go/internal/modload/import.go
// that suggests a 'go get' command for root packages that transitively import // packages from modules with missing sums. load.CheckPackageErrors would be // a good place to consolidate errors, but we'll need to attach the import // stack here. type ImportMissingSumError struct { importPath string found bool mods []module.Version
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Tue May 07 15:21:14 UTC 2024 - 27.7K bytes - Viewed (0) -
src/cmd/compile/internal/ssagen/abi.go
fn.SetABIWrapper(true) fn.SetDupok(true) // ABI0-to-ABIInternal wrappers will be mainly loading params from // stack into registers (and/or storing stack locations back to // registers after the wrapped call); in most cases they won't // need to allocate stack space, so it should be OK to mark them // as NOSPLIT in these cases. In addition, my assumption is that
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 15 19:57:43 UTC 2024 - 13.8K bytes - Viewed (0) -
src/internal/bisect/bisect.go
return dst } // MatchStack assigns the current call stack a change ID. // If the stack should be printed, MatchStack prints it. // Then MatchStack reports whether a change at the current call stack should be enabled. func (m *Matcher) Stack(w Writer) bool { if m == nil { return true } return m.stack(w) } // stack does the real work for Stack. // This lets stack's body handle m == nil and potentially be inlined.
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri May 03 17:28:43 UTC 2024 - 22.9K bytes - Viewed (0)