Search Options

Results per page
Sort
Preferred Languages
Advance

Results 1 - 10 of 10 for slotIDs (0.17 sec)

  1. src/cmd/compile/internal/ssa/debug.go

    }
    
    type partsByVarOffset struct {
    	slotIDs []SlotID
    	slots   []LocalSlot
    }
    
    func (a partsByVarOffset) Len() int { return len(a.slotIDs) }
    func (a partsByVarOffset) Less(i, j int) bool {
    	return varOffset(a.slots[a.slotIDs[i]]) < varOffset(a.slots[a.slotIDs[j]])
    }
    func (a partsByVarOffset) Swap(i, j int) { a.slotIDs[i], a.slotIDs[j] = a.slotIDs[j], a.slotIDs[i] }
    
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Mon Jun 10 19:44:43 UTC 2024
    - 58.4K bytes
    - Viewed (0)
  2. src/index/suffixarray/sais2.go

    	// Loop backward through sa, always tracking
    	// the next index to populate from sa[:numLMS].
    	// When we get to one, populate it.
    	// Zero the rest of the slots; they have dead values in them.
    	x := numLMS - 1
    	saX := sa[x]
    	c := text[saX]
    	b := bucket[c] - 1
    	bucket[c] = b
    
    	for i := len(sa) - 1; i >= 0; i-- {
    		if i != int(b) {
    			sa[i] = 0
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Mon Mar 18 23:57:18 UTC 2024
    - 52.3K bytes
    - Viewed (0)
  3. src/runtime/malloc.go

    //	   Scan the mspan's free bitmap to find a free slot.
    //	   If there is a free slot, allocate it.
    //	   This can all be done without acquiring a lock.
    //
    //	2. If the mspan has no free slots, obtain a new mspan
    //	   from the mcentral's list of mspans of the required size
    //	   class that have free space.
    //	   Obtaining a whole span amortizes the cost of locking
    //	   the mcentral.
    //
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Wed May 29 17:58:53 UTC 2024
    - 59.6K bytes
    - Viewed (0)
  4. tensorflow/compiler/mlir/tensorflow/transforms/resource_op_lifting.cc

      //        variables are removed from the loop variabled during
      //        canonicalizationW, we need to create new operand/result slots. The
      //        input operands for these slots are the read values
      //        prior to the op, and all references to these are replaced by the
      //        corresponding slot argument. We need to generate writes following
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Thu Apr 25 16:01:03 UTC 2024
    - 55.1K bytes
    - Viewed (0)
  5. src/runtime/mgcpacer.go

    	// excludes these objects (and hence only goes up between GCs).
    	//
    	// To reduce contention, this is updated only when obtaining a span
    	// from an mcentral and at this point it counts all of the unallocated
    	// slots in that span (which will be allocated before that mcache
    	// obtains another span from that mcentral). Hence, it slightly
    	// overestimates the "true" live heap size. It's better to overestimate
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Mon Mar 25 19:53:03 UTC 2024
    - 55.4K bytes
    - Viewed (0)
  6. src/runtime/mbitmap.go

    		m.mask = 1
    	} else {
    		m.mask = m.mask << 1
    	}
    	m.index++
    }
    
    // clobberdeadPtr is a special value that is used by the compiler to
    // clobber dead stack slots, when -clobberdead flag is set.
    const clobberdeadPtr = uintptr(0xdeaddead | 0xdeaddead<<((^uintptr(0)>>63)*32))
    
    // badPointer throws bad pointer in heap panic.
    func badPointer(s *mspan, p, refBase, refOff uintptr) {
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Thu May 23 00:18:55 UTC 2024
    - 60K bytes
    - Viewed (0)
  7. src/cmd/compile/internal/ssa/regalloc.go

    					// any safepoint. Just use a type big enough to hold any register.
    					t := LocalSlot{N: e.s.f.NewLocal(c.Pos, types.Int64), Type: types.Int64}
    					// TODO: reuse these slots. They'll need to be erased first.
    					e.set(t, vid, x, false, c.Pos)
    					if e.s.f.pass.debug > regDebug {
    						fmt.Printf("  SPILL %s->%s %s\n", r, t, x.LongString())
    					}
    				}
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Tue Nov 21 17:49:56 UTC 2023
    - 87.2K bytes
    - Viewed (0)
  8. src/runtime/mgcmark.go

    		// since we may have stopped while this function was
    		// setting up a call.
    		//
    		// TODO: We could narrow this down if the compiler
    		// produced a single map per function of stack slots
    		// and registers that ever contain a pointer.
    		if frame.varp != 0 {
    			size := frame.varp - frame.sp
    			if size > 0 {
    				scanConservative(frame.sp, size, nil, gcw, state)
    			}
    		}
    
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Thu Apr 18 21:25:11 UTC 2024
    - 52.5K bytes
    - Viewed (0)
  9. src/runtime/traceback.go

    	if p == nil {
    		return
    	}
    
    	liveInfo := funcdata(f, abi.FUNCDATA_ArgLiveInfo)
    	liveIdx := pcdatavalue(f, abi.PCDATA_ArgLiveIndex, pc)
    	startOffset := uint8(0xff) // smallest offset that needs liveness info (slots with a lower offset is always live)
    	if liveInfo != nil {
    		startOffset = *(*uint8)(liveInfo)
    	}
    
    	isLive := func(off, slotIdx uint8) bool {
    		if liveInfo == nil || liveIdx <= 0 {
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Wed May 29 16:25:21 UTC 2024
    - 55.1K bytes
    - Viewed (0)
  10. src/runtime/mheap.go

    		// any situation where a pointer is suspect.
    		s.state.set(mSpanInUse)
    	}
    
    	// Publish the span in various locations.
    
    	// This is safe to call without the lock held because the slots
    	// related to this span will only ever be read or modified by
    	// this thread until pointers into the span are published (and
    	// we execute a publication barrier at the end of this function
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Wed May 22 22:31:00 UTC 2024
    - 78K bytes
    - Viewed (0)
Back to top