- Sort Score
- Result 10 results
- Languages All
Results 1 - 10 of 27 for Slots (0.29 sec)
-
src/cmd/compile/internal/ssa/debug.go
// The location of each known slot, indexed by SlotID. slots []VarLoc // The slots present in each register, indexed by register number. registers [][]SlotID } // reset fills state with the live variables from live. func (state *stateAtPC) reset(live abt.T) { slots, registers := state.slots, state.registers for i := range slots { slots[i] = VarLoc{} } for i := range registers {
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Jun 10 19:44:43 UTC 2024 - 58.4K bytes - Viewed (0) -
src/index/suffixarray/sais2.go
// Loop backward through sa, always tracking // the next index to populate from sa[:numLMS]. // When we get to one, populate it. // Zero the rest of the slots; they have dead values in them. x := numLMS - 1 saX := sa[x] c := text[saX] b := bucket[c] - 1 bucket[c] = b for i := len(sa) - 1; i >= 0; i-- { if i != int(b) { sa[i] = 0
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Mar 18 23:57:18 UTC 2024 - 52.3K bytes - Viewed (0) -
src/runtime/malloc.go
// Scan the mspan's free bitmap to find a free slot. // If there is a free slot, allocate it. // This can all be done without acquiring a lock. // // 2. If the mspan has no free slots, obtain a new mspan // from the mcentral's list of mspans of the required size // class that have free space. // Obtaining a whole span amortizes the cost of locking // the mcentral. //
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 29 17:58:53 UTC 2024 - 59.6K bytes - Viewed (0) -
tensorflow/compiler/mlir/tensorflow/transforms/resource_op_lifting.cc
// variables are removed from the loop variabled during // canonicalizationW, we need to create new operand/result slots. The // input operands for these slots are the read values // prior to the op, and all references to these are replaced by the // corresponding slot argument. We need to generate writes following
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Thu Apr 25 16:01:03 UTC 2024 - 55.1K bytes - Viewed (0) -
src/runtime/mgcpacer.go
// excludes these objects (and hence only goes up between GCs). // // To reduce contention, this is updated only when obtaining a span // from an mcentral and at this point it counts all of the unallocated // slots in that span (which will be allocated before that mcache // obtains another span from that mcentral). Hence, it slightly // overestimates the "true" live heap size. It's better to overestimate
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Mar 25 19:53:03 UTC 2024 - 55.4K bytes - Viewed (0) -
src/runtime/mbitmap.go
m.mask = 1 } else { m.mask = m.mask << 1 } m.index++ } // clobberdeadPtr is a special value that is used by the compiler to // clobber dead stack slots, when -clobberdead flag is set. const clobberdeadPtr = uintptr(0xdeaddead | 0xdeaddead<<((^uintptr(0)>>63)*32)) // badPointer throws bad pointer in heap panic. func badPointer(s *mspan, p, refBase, refOff uintptr) {
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu May 23 00:18:55 UTC 2024 - 60K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/regalloc.go
// computeLive also computes the desired register information at the end of each block. // This desired register information is stored in s.desired. // TODO: this could be quadratic if lots of variables are live across lots of // basic blocks. Figure out a way to make this function (or, more precisely, the user // of this function) require only linear size & time. func (s *regAllocState) computeLive() { f := s.f
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Tue Nov 21 17:49:56 UTC 2023 - 87.2K bytes - Viewed (0) -
src/runtime/mgcmark.go
// since we may have stopped while this function was // setting up a call. // // TODO: We could narrow this down if the compiler // produced a single map per function of stack slots // and registers that ever contain a pointer. if frame.varp != 0 { size := frame.varp - frame.sp if size > 0 { scanConservative(frame.sp, size, nil, gcw, state) } }
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu Apr 18 21:25:11 UTC 2024 - 52.5K bytes - Viewed (0) -
src/runtime/traceback.go
if p == nil { return } liveInfo := funcdata(f, abi.FUNCDATA_ArgLiveInfo) liveIdx := pcdatavalue(f, abi.PCDATA_ArgLiveIndex, pc) startOffset := uint8(0xff) // smallest offset that needs liveness info (slots with a lower offset is always live) if liveInfo != nil { startOffset = *(*uint8)(liveInfo) } isLive := func(off, slotIdx uint8) bool { if liveInfo == nil || liveIdx <= 0 {
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 29 16:25:21 UTC 2024 - 55.1K bytes - Viewed (0) -
src/runtime/mheap.go
// any situation where a pointer is suspect. s.state.set(mSpanInUse) } // Publish the span in various locations. // This is safe to call without the lock held because the slots // related to this span will only ever be read or modified by // this thread until pointers into the span are published (and // we execute a publication barrier at the end of this function
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 22 22:31:00 UTC 2024 - 78K bytes - Viewed (0)