- Sort Score
- Result 10 results
- Languages All
Results 1 - 10 of 157 for clobber (0.14 sec)
-
src/cmd/compile/internal/liveness/plive.go
// clobber generates code to clobber pointer slots in all dead variables // (those not marked in live). Clobbering instructions are added to the end // of b.Values. func clobber(lv *liveness, b *ssa.Block, live bitvec.BitVec) { for i, n := range lv.vars { if !live.Get(int32(i)) && !n.Addrtaken() && !n.OpenDeferSlot() && !n.IsOutputParamHeapAddr() { // Don't clobber stack objects (address-taken). They are // tracked dynamically.
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri Jun 07 15:22:22 UTC 2024 - 45.2K bytes - Viewed (0) -
src/cmd/compile/internal/amd64/versions_test.go
dst.Chmod(0500) // make executable // Clobber all the non-v1 opcodes. opcodes := map[string]bool{} var features []string for feature, opcodeList := range featureToOpcodes { if runtimeFeatures[feature] { features = append(features, fmt.Sprintf("cpu.%s=off", feature)) } for _, op := range opcodeList { opcodes[op] = true } } clobber(t, os.Args[0], dst, opcodes) if err = dst.Close(); err != nil {
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Tue Nov 15 20:19:15 UTC 2022 - 10.9K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/memcombine.go
v = leftShift(loadBlock, pos, v, shift0-(n-1)*size*8) } // Install with (Copy v). root.reset(OpCopy) root.AddArg(v) // Clobber the loads, just to prevent additional work being done on // subtrees (which are now unreachable). for i := int64(0); i < n; i++ { clobber(r[i].load) } return true } func memcombineStores(f *Func) { mark := f.newSparseSet(f.NumValues()) defer f.retSparseSet(mark)
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu Mar 21 19:45:41 UTC 2024 - 18.4K bytes - Viewed (0) -
src/runtime/race_arm64.s
load_g MOVD g_racectx(g), R0 // goroutine context BL racecall<>(SB) MOVD R21, R9 // restore the original function // Call the atomic function. // racecall will call LLVM race code which might clobber R28 (g) load_g MOVD g_racectx(g), R0 // goroutine context MOVD 16(RSP), R1 // caller pc MOVD R9, R2 // pc ADD $40, RSP, R3 // arguments BL racecall<>(SB) // Call __tsan_go_ignore_sync_end.
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri May 17 18:37:29 UTC 2024 - 15.5K bytes - Viewed (0) -
tools/docker-builder/main.go
rootCmd.Flags().BoolVar(&globalArgs.NoCache, "no-cache", globalArgs.NoCache, "disable caching") rootCmd.Flags().BoolVar(&globalArgs.NoClobber, "no-clobber", globalArgs.NoClobber, "do not allow pushing images that already exist") rootCmd.Flags().StringVar(&globalArgs.Builder, "builder", globalArgs.Builder, "type of builder to use. options are crane or docker")
Registered: Fri Jun 14 15:00:06 UTC 2024 - Last Modified: Wed Jul 26 13:23:41 UTC 2023 - 10K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/_gen/RISCV64.rules
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu Mar 07 14:57:07 UTC 2024 - 40.3K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/branchelim.go
cost := phi * 1 if phi > 1 { // If we have more than 1 phi and some values in post have args // in yes or no blocks, we may have to recalculate condition, because // those args may clobber flags. For now assume that all operations clobber flags. cost += other * 1 } return cost < maxcost } } // canSpeculativelyExecute reports whether every value in the block can
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed Nov 30 17:46:51 UTC 2022 - 12.7K bytes - Viewed (0) -
src/runtime/asm_arm.s
// to the buffer space in R8. // It clobbers condition codes. // It does not clobber any other general-purpose registers, // but may clobber others (e.g., floating point registers). // The act of CALLing gcWriteBarrier will clobber R14 (LR). TEXT gcWriteBarrier<>(SB),NOSPLIT|NOFRAME,$0 // Save the registers clobbered by the fast path. MOVM.DB.W [R0,R1], (R13) retry: MOVW g_m(g), R0
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri Feb 23 21:00:52 UTC 2024 - 32.1K bytes - Viewed (0) -
src/runtime/asm_mips64x.s
// to the buffer space in R25. // It clobbers R23 (the linker temp register). // The act of CALLing gcWriteBarrier will clobber R31 (LR). // It does not clobber any other general-purpose registers, // but may clobber others (e.g., floating point registers). TEXT gcWriteBarrier<>(SB),NOSPLIT,$192 // Save the registers clobbered by the fast path. MOVV R1, 184(R29) MOVV R2, 192(R29) retry:
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Nov 06 19:45:59 UTC 2023 - 24.3K bytes - Viewed (0) -
src/runtime/asm_s390x.s
// number of bytes of buffer needed in R9, and returns a pointer // to the buffer space in R9. // It clobbers R10 (the temp register) and R1 (used by PLT stub). // It does not clobber any other general-purpose registers, // but may clobber others (e.g., floating point registers). TEXT gcWriteBarrier<>(SB),NOSPLIT,$96 // Save the registers clobbered by the fast path. MOVD R4, 96(R15) retry: MOVD g_m(g), R1 MOVD m_p(R1), R1
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu Jan 25 09:18:28 UTC 2024 - 28.1K bytes - Viewed (0)