- Sort Score
- Result 10 results
- Languages All
Results 21 - 30 of 383 for stksize (0.23 sec)
-
src/cmd/compile/internal/ssa/writebarrier.go
var off int64 for ptr.Op == OpOffPtr { off += ptr.AuxInt ptr = ptr.Args[0] } ptrSize := ptr.Block.Func.Config.PtrSize if off%ptrSize != 0 { return true // see issue 61187 } if size%ptrSize != 0 { ptr.Fatalf("unaligned pointer write") } if off < 0 || off+size > 64*ptrSize { // memory range goes off end of tracked offsets return true } z := zeroes[mem.ID] if ptr != z.base {
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri Sep 08 19:09:14 UTC 2023 - 23.5K bytes - Viewed (0) -
src/cmd/compile/internal/types/size.go
t.setAlg(ACPLX128) case TPTR: w = int64(PtrSize) t.intRegs = 1 CheckSize(t.Elem()) t.ptrBytes = int64(PtrSize) // See PtrDataSize case TUNSAFEPTR: w = int64(PtrSize) t.intRegs = 1 t.ptrBytes = int64(PtrSize) case TINTER: // implemented as 2 pointers w = 2 * int64(PtrSize) t.align = uint8(PtrSize) t.intRegs = 2 expandiface(t)
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Mar 25 19:53:03 UTC 2024 - 15K bytes - Viewed (0) -
src/runtime/os_openbsd_libc.go
writeErrStr(failthreadcreate) exit(1) } // Find out OS stack size for our own stack guard. var stacksize uintptr if pthread_attr_getstacksize(&attr, &stacksize) != 0 { writeErrStr(failthreadcreate) exit(1) } mp.g0.stack.hi = stacksize // for mstart // Tell the pthread library we won't join with this thread. if pthread_attr_setdetachstate(&attr, _PTHREAD_CREATE_DETACHED) != 0 {
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu Nov 10 20:44:45 UTC 2022 - 1.5K bytes - Viewed (0) -
src/runtime/mbitmap.go
tp.mask = readUintptr(addb(tp.typ.GCData, (tp.addr-tp.elem)/goarch.PtrSize/8)) if tp.addr+goarch.PtrSize*ptrBits > limit { bits := (tp.addr + goarch.PtrSize*ptrBits - limit) / goarch.PtrSize tp.mask &^= ((1 << (bits)) - 1) << (ptrBits - bits) } } } // fastForward moves the iterator forward by n bytes. n must be a multiple // of goarch.PtrSize. limit must be the same limit passed to next for this // iterator. //
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu May 23 00:18:55 UTC 2024 - 60K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/_gen/dec.rules
(Load <typ.Int> (OffPtr <typ.IntPtr> [config.PtrSize] ptr) mem) (Load <typ.Int> (OffPtr <typ.IntPtr> [2*config.PtrSize] ptr) mem)) (Store {t} dst (SliceMake ptr len cap) mem) => (Store {typ.Int} (OffPtr <typ.IntPtr> [2*config.PtrSize] dst) cap (Store {typ.Int} (OffPtr <typ.IntPtr> [config.PtrSize] dst) len (Store {t.Elem().PtrTo()} dst ptr mem)))
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu Oct 12 00:48:31 UTC 2023 - 6.9K bytes - Viewed (0) -
src/cmd/compile/internal/riscv64/ggen.go
// Adjust the frame to account for LR. off += base.Ctxt.Arch.FixedFrameSize if cnt < int64(4*types.PtrSize) { for i := int64(0); i < cnt; i += int64(types.PtrSize) { p = pp.Append(p, riscv.AMOV, obj.TYPE_REG, riscv.REG_ZERO, 0, obj.TYPE_MEM, riscv.REG_SP, off+i) } return p } if cnt <= int64(128*types.PtrSize) { p = pp.Append(p, riscv.AADDI, obj.TYPE_CONST, 0, off, obj.TYPE_REG, riscv.REG_X25, 0) p.Reg = riscv.REG_SP
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Tue Apr 19 15:59:22 UTC 2022 - 1.8K bytes - Viewed (0) -
src/runtime/map_faststr.go
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu May 23 01:17:26 UTC 2024 - 15.3K bytes - Viewed (0) -
src/unique/clone_test.go
testCloneSeq[testIntArray](t, cSeq()) testCloneSeq[testEface](t, cSeq()) testCloneSeq[testStringArray](t, cSeq(0, 2*goarch.PtrSize, 4*goarch.PtrSize)) testCloneSeq[testStringStruct](t, cSeq(0)) testCloneSeq[testStringStructArrayStruct](t, cSeq(0, 2*goarch.PtrSize)) testCloneSeq[testStruct](t, cSeq(8)) } func cSeq(stringOffsets ...uintptr) cloneSeq { return cloneSeq{stringOffsets: stringOffsets} }
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Apr 22 18:14:07 UTC 2024 - 1K bytes - Viewed (0) -
src/cmd/compile/internal/typebits/typebits.go
if off&int64(types.PtrSize-1) != 0 { base.Fatalf("typebits.Set: invalid alignment, %v", t) } bv.Set(int32(off / int64(types.PtrSize))) // pointer case types.TSTRING: // struct { byte *str; intgo len; } if off&int64(types.PtrSize-1) != 0 { base.Fatalf("typebits.Set: invalid alignment, %v", t) } bv.Set(int32(off / int64(types.PtrSize))) //pointer in first slot case types.TINTER:
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Tue Aug 22 01:53:41 UTC 2023 - 3.2K bytes - Viewed (0) -
src/cmd/vendor/golang.org/x/tools/go/analysis/passes/asmdecl/asmdecl.go
cc = append(cc, newComponent(suffix+"_data", asmKind(arch.ptrSize), "interface data", off+arch.ptrSize, arch.ptrSize, suffix)) case asmInterface: cc = append(cc, newComponent(suffix+"_itable", asmKind(arch.ptrSize), "interface itable", off, arch.ptrSize, suffix)) cc = append(cc, newComponent(suffix+"_data", asmKind(arch.ptrSize), "interface data", off+arch.ptrSize, arch.ptrSize, suffix)) case asmSlice:
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri May 03 02:38:00 UTC 2024 - 22.8K bytes - Viewed (0)