Search Options

Results per page
Sort
Preferred Languages
Advance

Results 11 - 16 of 16 for safe_point (0.14 sec)

  1. src/runtime/runtime2.go

    	freem *m
    
    	gcwaiting  atomic.Bool // gc is waiting to run
    	stopwait   int32
    	stopnote   note
    	sysmonwait atomic.Bool
    	sysmonnote note
    
    	// safePointFn should be called on each P at the next GC
    	// safepoint if p.runSafePointFn is set.
    	safePointFn   func(*p)
    	safePointWait int32
    	safePointNote note
    
    	profilehz int32 // cpu profiling rate
    
    	procresizetime int64 // nanotime() of last change to gomaxprocs
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Thu May 30 17:57:37 UTC 2024
    - 47.9K bytes
    - Viewed (0)
  2. src/cmd/compile/internal/ppc64/ssa.go

    		p := s.Prog(storeOp)
    		p.From.Type = obj.TYPE_REG
    		p.From.Reg = v.Args[0].Reg()
    		ssagen.AddrAuto(&p.To, v)
    
    	case ssa.OpArgIntReg, ssa.OpArgFloatReg:
    		// The assembler needs to wrap the entry safepoint/stack growth code with spill/unspill
    		// The loop only runs once.
    		for _, a := range v.Block.Func.RegArgs {
    			// Pass the spill/unspill information along to the assembler, offset by size of
    			// the saved LR slot.
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Wed May 22 19:59:38 UTC 2024
    - 55.4K bytes
    - Viewed (0)
  3. src/runtime/malloc.go

    	return assistG
    }
    
    // memclrNoHeapPointersChunked repeatedly calls memclrNoHeapPointers
    // on chunks of the buffer to be zeroed, with opportunities for preemption
    // along the way.  memclrNoHeapPointers contains no safepoints and also
    // cannot be preemptively scheduled, so this provides a still-efficient
    // block copy that can also be preempted on a reasonable granularity.
    //
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Wed May 29 17:58:53 UTC 2024
    - 59.6K bytes
    - Viewed (0)
  4. src/cmd/compile/internal/ssa/regalloc.go

    					x := e.p.NewValue1(c.Pos, OpStoreReg, c.Type, c)
    					// Allocate a temp location to spill a register to.
    					// The type of the slot is immaterial - it will not be live across
    					// any safepoint. Just use a type big enough to hold any register.
    					t := LocalSlot{N: e.s.f.NewLocal(c.Pos, types.Int64), Type: types.Int64}
    					// TODO: reuse these slots. They'll need to be erased first.
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Tue Nov 21 17:49:56 UTC 2023
    - 87.2K bytes
    - Viewed (0)
  5. src/runtime/proc.go

    func forEachP(reason waitReason, fn func(*p)) {
    	systemstack(func() {
    		gp := getg().m.curg
    		// Mark the user stack as preemptible so that it may be scanned.
    		// Otherwise, our attempt to force all P's to a safepoint could
    		// result in a deadlock as we attempt to preempt a worker that's
    		// trying to preempt us (e.g. for a stack scan).
    		//
    		// N.B. The execution tracer is not aware of this status
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Wed May 29 17:58:53 UTC 2024
    - 207.5K bytes
    - Viewed (0)
  6. src/cmd/compile/internal/ssagen/ssa.go

    	if f.OwnAux.ABIInfo().InRegistersUsed() != 0 && base.Flag.N == 0 {
    		// First, see if it is already spilled before it may be live. Look for a spill
    		// in the entry block up to the first safepoint.
    		type nameOff struct {
    			n   *ir.Name
    			off int64
    		}
    		partLiveArgsSpilled := make(map[nameOff]bool)
    		for _, v := range f.Entry.Values {
    			if v.Op.IsCall() {
    				break
    			}
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Mon Jun 10 19:44:43 UTC 2024
    - 284.9K bytes
    - Viewed (0)
Back to top