Search Options

Results per page
Sort
Preferred Languages
Advance

Results 11 - 20 of 24 for FuncPCABIInternal (0.24 sec)

  1. src/runtime/map.go

    // hold onto it for very long.
    func mapaccess1(t *maptype, h *hmap, key unsafe.Pointer) unsafe.Pointer {
    	if raceenabled && h != nil {
    		callerpc := getcallerpc()
    		pc := abi.FuncPCABIInternal(mapaccess1)
    		racereadpc(unsafe.Pointer(h), callerpc, pc)
    		raceReadObjectPC(t.Key, key, callerpc, pc)
    	}
    	if msanenabled && h != nil {
    		msanread(key, t.Key.Size_)
    	}
    	if asanenabled && h != nil {
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Wed May 29 17:58:53 UTC 2024
    - 57.6K bytes
    - Viewed (0)
  2. src/runtime/signal_unix.go

    			if fwdSig[i] != _SIG_DFL && fwdSig[i] != _SIG_IGN {
    				setsigstack(i)
    			} else if fwdSig[i] == _SIG_IGN {
    				sigInitIgnored(i)
    			}
    			continue
    		}
    
    		handlingSig[i] = 1
    		setsig(i, abi.FuncPCABIInternal(sighandler))
    	}
    }
    
    //go:nosplit
    //go:nowritebarrierrec
    func sigInstallGoHandler(sig uint32) bool {
    	// For some signals, we respect an inherited SIG_IGN handler
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Fri May 10 16:04:54 UTC 2024
    - 45K bytes
    - Viewed (0)
  3. src/runtime/mprof.go

    		// "runtime.unlock".
    		skip += 1 // runtime.unlockWithRank.func1
    	}
    	prof.pending = 0
    
    	prof.stack[0] = logicalStackSentinel
    	if debug.runtimeContentionStacks.Load() == 0 {
    		prof.stack[1] = abi.FuncPCABIInternal(_LostContendedRuntimeLock) + sys.PCQuantum
    		prof.stack[2] = 0
    		return
    	}
    
    	var nstk int
    	gp := getg()
    	sp := getcallersp()
    	pc := getcallerpc()
    	systemstack(func() {
    		var u unwinder
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Thu May 30 17:57:37 UTC 2024
    - 53.3K bytes
    - Viewed (0)
  4. src/runtime/iface.go

    // be used as the second word of an interface value.
    func convT(t *_type, v unsafe.Pointer) unsafe.Pointer {
    	if raceenabled {
    		raceReadObjectPC(t, v, getcallerpc(), abi.FuncPCABIInternal(convT))
    	}
    	if msanenabled {
    		msanread(v, t.Size_)
    	}
    	if asanenabled {
    		asanread(v, t.Size_)
    	}
    	x := mallocgc(t.Size_, t, true)
    	typedmemmove(t, x, v)
    	return x
    }
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Wed May 29 17:58:53 UTC 2024
    - 22.5K bytes
    - Viewed (0)
  5. src/runtime/chan.go

    		throw("unreachable")
    	}
    
    	if debugChan {
    		print("chansend: chan=", c, "\n")
    	}
    
    	if raceenabled {
    		racereadpc(c.raceaddr(), callerpc, abi.FuncPCABIInternal(chansend))
    	}
    
    	// Fast path: check for failed non-blocking operation without acquiring the lock.
    	//
    	// After observing that the channel is not closed, we observe that the channel is
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Thu May 23 01:16:50 UTC 2024
    - 25.9K bytes
    - Viewed (0)
  6. src/runtime/preempt.go

    // asyncPreempt call.
    var asyncPreemptStack = ^uintptr(0)
    
    func init() {
    	f := findfunc(abi.FuncPCABI0(asyncPreempt))
    	total := funcMaxSPDelta(f)
    	f = findfunc(abi.FuncPCABIInternal(asyncPreempt2))
    	total += funcMaxSPDelta(f)
    	// Add some overhead for return PCs, etc.
    	asyncPreemptStack = uintptr(total) + 8*goarch.PtrSize
    	if asyncPreemptStack > stackNosplit {
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Fri May 17 15:41:45 UTC 2024
    - 15.1K bytes
    - Viewed (0)
  7. src/runtime/type.go

    }
    
    func (t rtype) textOff(off textOff) unsafe.Pointer {
    	if off == -1 {
    		// -1 is the sentinel value for unreachable code.
    		// See cmd/link/internal/ld/data.go:relocsym.
    		return unsafe.Pointer(abi.FuncPCABIInternal(unreachableMethod))
    	}
    	base := uintptr(unsafe.Pointer(t.Type))
    	var md *moduledata
    	for next := &firstmoduledata; next != nil; next = next.next {
    		if base >= next.types && base < next.etypes {
    			md = next
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Thu May 23 01:17:26 UTC 2024
    - 12.7K bytes
    - Viewed (0)
  8. src/cmd/compile/internal/ir/func.go

    func IsFuncPCIntrinsic(n *CallExpr) bool {
    	if n.Op() != OCALLFUNC || n.Fun.Op() != ONAME {
    		return false
    	}
    	fn := n.Fun.(*Name).Sym()
    	return (fn.Name == "FuncPCABI0" || fn.Name == "FuncPCABIInternal") &&
    		fn.Pkg.Path == "internal/abi"
    }
    
    // IsIfaceOfFunc inspects whether n is an interface conversion from a direct
    // reference of a func. If so, it returns referenced Func; otherwise nil.
    //
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Thu May 23 01:05:44 UTC 2024
    - 21.1K bytes
    - Viewed (0)
  9. src/runtime/panic.go

    // See the doc comment for deferrangefunc for details.
    func deferprocat(fn func(), frame any) {
    	head := frame.(*atomic.Pointer[_defer])
    	if raceenabled {
    		racewritepc(unsafe.Pointer(head), getcallerpc(), abi.FuncPCABIInternal(deferprocat))
    	}
    	d1 := newdefer()
    	d1.fn = fn
    	for {
    		d1.link = head.Load()
    		if d1.link == badDefer() {
    			throw("defer after range func returned")
    		}
    		if head.CompareAndSwap(d1.link, d1) {
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Wed May 29 17:58:53 UTC 2024
    - 43.8K bytes
    - Viewed (0)
  10. src/runtime/pprof/pprof.go

    	}
    
    	stk := make([]uintptr, 32)
    	n := runtime.Callers(skip+1, stk[:])
    	stk = stk[:n]
    	if len(stk) == 0 {
    		// The value for skip is too large, and there's no stack trace to record.
    		stk = []uintptr{abi.FuncPCABIInternal(lostProfileEvent)}
    	}
    
    	p.mu.Lock()
    	defer p.mu.Unlock()
    	if p.m[value] != nil {
    		panic("pprof: Profile.Add of duplicate value")
    	}
    	p.m[value] = stk
    }
    
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Thu May 30 17:52:17 UTC 2024
    - 30.6K bytes
    - Viewed (0)
Back to top