- Sort Score
- Result 10 results
- Languages All
Results 1 - 10 of 417 for caller2b (0.21 sec)
-
src/runtime/callers_test.go
// open-coded defer processing) want := []string{"runtime.Callers", "runtime_test.TestCallersPanic.func1", "runtime.gopanic", "runtime_test.f3", "runtime_test.f2", "runtime_test.f1", "runtime_test.TestCallersPanic"} defer func() { if r := recover(); r == nil { t.Fatal("did not panic") } pcs := make([]uintptr, 20) pcs = pcs[:runtime.Callers(0, pcs)] testCallers(t, pcs, true)
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Aug 28 21:36:31 UTC 2023 - 12.1K bytes - Viewed (0) -
guava/src/com/google/common/util/concurrent/TimeLimiter.java
* exception is propagated to the caller exactly as-is. If, on the other hand, the time limit is * reached, the proxy will attempt to abort the call to the target, and will throw an {@link * UncheckedTimeoutException} to the caller. * * <p>It is important to note that the primary purpose of the proxy object is to return control to
Registered: Wed Jun 12 16:38:11 UTC 2024 - Last Modified: Fri May 12 18:32:03 UTC 2023 - 15.3K bytes - Viewed (0) -
src/runtime/slice.go
if toLen < n { n = toLen } if width == 0 { return n } size := uintptr(n) * width if raceenabled { callerpc := getcallerpc() pc := abi.FuncPCABIInternal(slicecopy) racereadrangepc(fromPtr, size, callerpc, pc) racewriterangepc(toPtr, size, callerpc, pc) } if msanenabled { msanread(fromPtr, size) msanwrite(toPtr, size) } if asanenabled { asanread(fromPtr, size)
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 29 16:25:21 UTC 2024 - 12.2K bytes - Viewed (0) -
src/runtime/stack_test.go
useStackPtrs(n-1, b) } type structWithMethod struct{} func (s structWithMethod) caller() string { _, file, line, ok := Caller(1) if !ok { panic("Caller failed") } return fmt.Sprintf("%s:%d", file, line) } func (s structWithMethod) callers() []uintptr { pc := make([]uintptr, 16) return pc[:Callers(0, pc)] } func (s structWithMethod) stack() string { buf := make([]byte, 4<<10)
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed Jun 14 00:03:57 UTC 2023 - 23.1K bytes - Viewed (0) -
src/log/slog/logger.go
if !l.Enabled(ctx, level) { return } var pc uintptr if !internal.IgnorePC { var pcs [1]uintptr // skip [runtime.Callers, this function, this function's caller] runtime.Callers(3, pcs[:]) pc = pcs[0] } r := NewRecord(time.Now(), level, msg, pc) r.Add(args...) if ctx == nil { ctx = context.Background() } _ = l.Handler().Handle(ctx, r)
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Jan 08 18:26:18 UTC 2024 - 10.2K bytes - Viewed (0) -
src/runtime/chan.go
*/ func chansend(c *hchan, ep unsafe.Pointer, block bool, callerpc uintptr) bool { if c == nil { if !block { return false } gopark(nil, nil, waitReasonChanSendNilChan, traceBlockForever, 2) throw("unreachable") } if debugChan { print("chansend: chan=", c, "\n") } if raceenabled { racereadpc(c.raceaddr(), callerpc, abi.FuncPCABIInternal(chansend)) }
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu May 23 01:16:50 UTC 2024 - 25.9K bytes - Viewed (0) -
src/runtime/race_ppc64le.s
MOVD $__tsan_go_atomic32_load(SB), R8 ADD $32, R1, R6 // addr of caller's 1st arg BR racecallatomic<>(SB) RET TEXT sync∕atomic·LoadInt64(SB), NOSPLIT, $0-16 GO_ARGS // void __tsan_go_atomic64_load(ThreadState *thr, uptr cpc, uptr pc, u8 *a); MOVD $__tsan_go_atomic64_load(SB), R8 ADD $32, R1, R6 // addr of caller's 1st arg BR racecallatomic<>(SB) RET
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri May 17 18:37:29 UTC 2024 - 17K bytes - Viewed (0) -
tensorflow/compiler/mlir/tensorflow/transforms/decompose_optionals.cc
if (!symbol_uses.has_value()) { return failure(); } for (auto use : *symbol_uses) { Operation* caller = use.getUser(); bool changed = false; rewriter.startOpModification(caller); for (auto [result, type] : llvm::zip(caller->getResults(), return_types)) { if (result.getType() != type) { result.setType(type); changed = true; }
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Wed Jun 12 21:18:05 UTC 2024 - 9.8K bytes - Viewed (0) -
src/runtime/map_fast32.go
package runtime import ( "internal/abi" "internal/goarch" "unsafe" ) func mapaccess1_fast32(t *maptype, h *hmap, key uint32) unsafe.Pointer { if raceenabled && h != nil { callerpc := getcallerpc() racereadpc(unsafe.Pointer(h), callerpc, abi.FuncPCABIInternal(mapaccess1_fast32)) } if h == nil || h.count == 0 { return unsafe.Pointer(&zeroVal[0]) } if h.flags&hashWriting != 0 {
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu May 23 01:17:26 UTC 2024 - 13.9K bytes - Viewed (0) -
src/runtime/sys_linux_mips64x.s
MOVW n+16(FP), R6 MOVV $SYS_write, R2 SYSCALL BEQ R7, 2(PC) SUBVU R2, R0, R2 // caller expects negative errno MOVW R2, ret+24(FP) RET TEXT runtime·read(SB),NOSPLIT|NOFRAME,$0-28 MOVW fd+0(FP), R4 MOVV p+8(FP), R5 MOVW n+16(FP), R6 MOVV $SYS_read, R2 SYSCALL BEQ R7, 2(PC) SUBVU R2, R0, R2 // caller expects negative errno MOVW R2, ret+24(FP) RET
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Tue Oct 18 20:57:24 UTC 2022 - 12K bytes - Viewed (0)