Search Options

Results per page
Sort
Preferred Languages
Advance

Results 81 - 90 of 132 for preemptee (0.14 sec)

  1. src/cmd/internal/obj/arm64/obj7.go

    		p.From.Offset = 3 * int64(c.ctxt.Arch.PtrSize) // G.stackguard1
    	}
    	p.To.Type = obj.TYPE_REG
    	p.To.Reg = REGRT1
    
    	// Mark the stack bound check and morestack call async nonpreemptible.
    	// If we get preempted here, when resumed the preemption request is
    	// cleared, but we'll still call morestack, which will double the stack
    	// unnecessarily. See issue #35470.
    	p = c.ctxt.StartUnsafePoint(p, c.newprog)
    
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Wed Nov 08 05:46:32 UTC 2023
    - 28.4K bytes
    - Viewed (0)
  2. src/runtime/mstats.go

    // valid statistic values.
    //
    // Not safe to call concurrently. The world must be stopped
    // or metricsSema must be held.
    func (m *consistentHeapStats) read(out *heapStatsDelta) {
    	// Getting preempted after this point is not safe because
    	// we read allp. We need to make sure a STW can't happen
    	// so it doesn't change out from under us.
    	mp := acquirem()
    
    	// Get the current generation. We can be confident that this
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Mon Apr 08 21:03:13 UTC 2024
    - 34.2K bytes
    - Viewed (0)
  3. src/cmd/internal/obj/mips/obj0.go

    		p.From.Offset = 3 * int64(c.ctxt.Arch.PtrSize) // G.stackguard1
    	}
    	p.To.Type = obj.TYPE_REG
    	p.To.Reg = REG_R1
    
    	// Mark the stack bound check and morestack call async nonpreemptible.
    	// If we get preempted here, when resumed the preemption request is
    	// cleared, but we'll still call morestack, which will double the stack
    	// unnecessarily. See issue #35470.
    	p = c.ctxt.StartUnsafePoint(p, c.newprog)
    
    	var q *obj.Prog
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Fri Apr 21 19:28:53 UTC 2023
    - 30.6K bytes
    - Viewed (0)
  4. pkg/scheduler/framework/interface.go

    	// Optionally, a non-nil PostFilterResult may be returned along with a Success status. For example,
    	// a preemption plugin may choose to return nominatedNodeName, so that framework can reuse that to update the
    	// preemptor pod's .spec.status.nominatedNodeName field.
    	PostFilter(ctx context.Context, state *CycleState, pod *v1.Pod, filteredNodeStatusMap NodeToStatusMap) (*PostFilterResult, *Status)
    }
    
    Registered: Sat Jun 15 01:39:40 UTC 2024
    - Last Modified: Fri May 31 15:52:16 UTC 2024
    - 35.4K bytes
    - Viewed (0)
  5. src/cmd/compile/internal/types2/call.go

    			check.use(call.ArgList...)
    			x.mode = invalid
    			x.expr = call
    			return statement
    		}
    
    		// If sig is generic and all type arguments are provided, preempt function
    		// argument type inference by explicitly instantiating the signature. This
    		// ensures that we record accurate type information for sig, even if there
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Thu May 30 19:19:55 UTC 2024
    - 31.5K bytes
    - Viewed (0)
  6. src/go/types/call.go

    			check.use(call.Args...)
    			x.mode = invalid
    			x.expr = call
    			return statement
    		}
    
    		// If sig is generic and all type arguments are provided, preempt function
    		// argument type inference by explicitly instantiating the signature. This
    		// ensures that we record accurate type information for sig, even if there
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Thu May 30 19:19:55 UTC 2024
    - 33.5K bytes
    - Viewed (0)
  7. src/cmd/internal/obj/ppc64/obj9.go

    		p.From.Offset = 3 * int64(c.ctxt.Arch.PtrSize) // G.stackguard1
    	}
    	p.To.Type = obj.TYPE_REG
    	p.To.Reg = REG_R22
    
    	// Mark the stack bound check and morestack call async nonpreemptible.
    	// If we get preempted here, when resumed the preemption request is
    	// cleared, but we'll still call morestack, which will double the stack
    	// unnecessarily. See issue #35470.
    	p = c.ctxt.StartUnsafePoint(p, c.newprog)
    
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Wed May 22 18:17:17 UTC 2024
    - 40.8K bytes
    - Viewed (0)
  8. src/cmd/internal/obj/wasm/wasmobj.go

    		p.Spadj = tempFrame
    		ctxtp := obj.Addr{
    			Type:   obj.TYPE_MEM,
    			Reg:    REG_SP,
    			Offset: 0,
    		}
    		p = appendp(p, AMOVD, regAddr(REGCTXT), ctxtp)
    
    		// maymorestack must not itself preempt because we
    		// don't have full stack information, so this can be
    		// ACALLNORESUME.
    		p = appendp(p, ACALLNORESUME, constAddr(0))
    		// See ../x86/obj6.go
    		sym := ctxt.LookupABI(ctxt.Flag_maymorestack, s.ABI())
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Wed Jun 14 00:03:57 UTC 2023
    - 34.6K bytes
    - Viewed (0)
  9. src/runtime/arena.go

    	// what that looks like until we actually allocate things into the
    	// arena).
    	deductAssistCredit(userArenaChunkBytes)
    
    	// Set mp.mallocing to keep from being preempted by GC.
    	mp := acquirem()
    	if mp.mallocing != 0 {
    		throw("malloc deadlock")
    	}
    	if mp.gsignal == getg() {
    		throw("malloc during signal")
    	}
    	mp.mallocing = 1
    
    	// Allocate a new user arena.
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Wed May 08 17:44:56 UTC 2024
    - 37.9K bytes
    - Viewed (0)
  10. src/cmd/internal/obj/x86/obj6.go

    		if cursym.CFunc() {
    			p.To.Offset = 3 * int64(ctxt.Arch.PtrSize) // G.stackguard1
    		}
    
    		// Mark the stack bound check and morestack call async nonpreemptible.
    		// If we get preempted here, when resumed the preemption request is
    		// cleared, but we'll still call morestack, which will double the stack
    		// unnecessarily. See issue #35470.
    		p = ctxt.StartUnsafePoint(p, newprog)
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Fri Sep 08 18:36:45 UTC 2023
    - 40.9K bytes
    - Viewed (0)
Back to top