- Sort Score
- Result 10 results
- Languages All
Results 31 - 40 of 418 for clobber (0.12 sec)
-
src/cmd/compile/internal/amd64/versions_test.go
dst.Chmod(0500) // make executable // Clobber all the non-v1 opcodes. opcodes := map[string]bool{} var features []string for feature, opcodeList := range featureToOpcodes { if runtimeFeatures[feature] { features = append(features, fmt.Sprintf("cpu.%s=off", feature)) } for _, op := range opcodeList { opcodes[op] = true } } clobber(t, os.Args[0], dst, opcodes) if err = dst.Close(); err != nil {
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Tue Nov 15 20:19:15 UTC 2022 - 10.9K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/flagalloc.go
// we can leave in the flags register at the end of the block. (There // is no place to put a flag regeneration instruction.) for _, b := range f.Blocks { if b.Kind == BlockDefer { // Defer blocks internally use/clobber the flags value. end[b.ID] = nil continue } for _, v := range b.ControlValues() { if v.Type.IsFlags() && end[b.ID] != v { end[b.ID] = nil } } }
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Oct 31 21:41:20 UTC 2022 - 6.7K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/memcombine.go
v = leftShift(loadBlock, pos, v, shift0-(n-1)*size*8) } // Install with (Copy v). root.reset(OpCopy) root.AddArg(v) // Clobber the loads, just to prevent additional work being done on // subtrees (which are now unreachable). for i := int64(0); i < n; i++ { clobber(r[i].load) } return true } func memcombineStores(f *Func) { mark := f.newSparseSet(f.NumValues()) defer f.retSparseSet(mark)
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu Mar 21 19:45:41 UTC 2024 - 18.4K bytes - Viewed (0) -
src/runtime/signal_386.go
pc := uintptr(c.eip()) sp := uintptr(c.esp()) if shouldPushSigpanic(gp, pc, *(*uintptr)(unsafe.Pointer(sp))) { c.pushCall(abi.FuncPCABIInternal(sigpanic), pc) } else { // Not safe to push the call. Just clobber the frame. c.set_eip(uint32(abi.FuncPCABIInternal(sigpanic))) } } func (c *sigctxt) pushCall(targetPC, resumePC uintptr) { // Make it look like we called target at resumePC. sp := uintptr(c.esp())
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu Oct 28 18:17:57 UTC 2021 - 1.7K bytes - Viewed (0) -
src/cmd/link/internal/ld/inittask.go
ctxt.mainInittasks = ctxt.inittaskSym([]string{fmt.Sprintf("%s..inittask", objabi.PathToPrefix(*flagPluginPath))}, "go:plugin.inittasks") // Make symbol local so multiple plugins don't clobber each other's inittask list. ctxt.loader.SetAttrLocal(ctxt.mainInittasks, true) case BuildModeShared: // For a shared library, all packages are roots. var roots []string for _, lib := range ctxt.Library {
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Tue Jan 30 20:09:45 UTC 2024 - 6.2K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/rewriteAMD64.go
// cond: canMergeLoad(v, l) && clobber(l) // result: (CMPBload {sym} [off] ptr x mem) for { l := v_0 if l.Op != OpAMD64MOVBload { break } off := auxIntToInt32(l.AuxInt) sym := auxToSym(l.Aux) mem := l.Args[1] ptr := l.Args[0] x := v_1 if !(canMergeLoad(v, l) && clobber(l)) { break } v.reset(OpAMD64CMPBload)
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Tue Mar 12 19:38:41 UTC 2024 - 712.7K bytes - Viewed (0) -
src/runtime/asan_ppc64le.s
RLDCR $0, R1, $~15, R1 // align SP to 16 bytes MOVD FARG, CTR // address of function to be called MOVD R0, 0(R1) // clear back chain pointer BL (CTR) MOVD $0, R0 // C code can clobber R0 set it back to 0 MOVD R16, R1 // restore R1; MOVD runtimeĀ·tls_g(SB), R10 // find correct g MOVD 0(R10), g RET // tls_g, g value for each thread in TLS
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri Aug 26 18:13:33 UTC 2022 - 2.8K bytes - Viewed (0) -
src/runtime/signal_amd64.go
// which takes care of that. if shouldPushSigpanic(gp, pc, *(*uintptr)(unsafe.Pointer(sp))) { c.pushCall(abi.FuncPCABI0(sigpanic0), pc) } else { // Not safe to push the call. Just clobber the frame. c.set_rip(uint64(abi.FuncPCABI0(sigpanic0))) } } func (c *sigctxt) pushCall(targetPC, resumePC uintptr) { // Make it look like we called target at resumePC. sp := uintptr(c.rsp())
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Sat Apr 23 05:38:56 UTC 2022 - 2.7K bytes - Viewed (0) -
src/runtime/race_arm64.s
load_g MOVD g_racectx(g), R0 // goroutine context BL racecall<>(SB) MOVD R21, R9 // restore the original function // Call the atomic function. // racecall will call LLVM race code which might clobber R28 (g) load_g MOVD g_racectx(g), R0 // goroutine context MOVD 16(RSP), R1 // caller pc MOVD R9, R2 // pc ADD $40, RSP, R3 // arguments BL racecall<>(SB) // Call __tsan_go_ignore_sync_end.
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri May 17 18:37:29 UTC 2024 - 15.5K bytes - Viewed (0) -
tools/build-base-images.sh
# * export DOCKER_ARCHITECTURES="linux/amd64,linux/arm64" # Note: if you already have a container builder before running the qemu setup you will need to restart them "${ROOT}/tools/docker" --push --no-cache --no-clobber --targets="${DOCKER_TARGETS}" APKO_IMAGES="" for h in ${HUBS}; do for t in ${TAGS:-$TAG}; do APKO_IMAGES+="${h}/iptables:$t " done done
Registered: Fri Jun 14 15:00:06 UTC 2024 - Last Modified: Thu May 02 17:24:41 UTC 2024 - 2.2K bytes - Viewed (0)