- Sort Score
- Result 10 results
- Languages All
Results 1 - 10 of 275 for clobber (0.1 sec)
-
src/cmd/compile/internal/liveness/plive.go
// clobber generates code to clobber pointer slots in all dead variables // (those not marked in live). Clobbering instructions are added to the end // of b.Values. func clobber(lv *liveness, b *ssa.Block, live bitvec.BitVec) { for i, n := range lv.vars { if !live.Get(int32(i)) && !n.Addrtaken() && !n.OpenDeferSlot() && !n.IsOutputParamHeapAddr() { // Don't clobber stack objects (address-taken). They are // tracked dynamically.
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri Jun 07 15:22:22 UTC 2024 - 45.2K bytes - Viewed (0) -
test/codegen/clobberdead.go
package codegen type T [2]*int // contain pointer, not SSA-able (so locals are not registerized) var p1, p2, p3 T func F() { // 3735936685 is 0xdeaddead. On ARM64 R27 is REGTMP. // clobber x, y at entry. not clobber z (stack object). // amd64:`MOVL\t\$3735936685, command-line-arguments\.x`, `MOVL\t\$3735936685, command-line-arguments\.y`, -`MOVL\t\$3735936685, command-line-arguments\.z`
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu Oct 19 23:33:25 UTC 2023 - 1.3K bytes - Viewed (0) -
src/cmd/compile/internal/test/testdata/mergelocals/integration.go
np uintptr x [1023]int } var G int //go:noinline func clobber() { G++ } func ABC(i, j int) int { r := 0 // here v2 and v3 can be overlapped. clobber() if i < 101 { var v2 Vanilla v2.x[i] = j r += v2.x[j] } if j != 303 { var v3 Vanilla2 v3.x[i] = j r += v3.x[j] } clobber() // not an overlap candidate (only one var of this size). var s Single
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Tue Apr 09 17:42:19 UTC 2024 - 1.3K bytes - Viewed (0) -
src/cmd/cgo/internal/test/issue9400/asm_mips64x.s
//go:build (mips64 || mips64le) && gc #include "textflag.h" #define SYNC WORD $0xf TEXT ·RewindAndSetgid(SB),NOSPLIT|NOFRAME,$0-0 // Rewind stack pointer so anything that happens on the stack // will clobber the test pattern created by the caller ADDV $(1024*8), R29 // Ask signaller to setgid MOVW $1, R1 SYNC MOVW R1, ·Baton(SB) SYNC // Wait for setgid completion loop: SYNC
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri Dec 15 21:57:36 UTC 2023 - 691 bytes - Viewed (0) -
src/cmd/compile/internal/ssa/_gen/AMD64.rules
(MOVBQZX x:(MOVBload [off] {sym} ptr mem)) && x.Uses == 1 && clobber(x) => @x.Block (MOVBload <v.Type> [off] {sym} ptr mem) (MOVBQZX x:(MOVWload [off] {sym} ptr mem)) && x.Uses == 1 && clobber(x) => @x.Block (MOVBload <v.Type> [off] {sym} ptr mem) (MOVBQZX x:(MOVLload [off] {sym} ptr mem)) && x.Uses == 1 && clobber(x) => @x.Block (MOVBload <v.Type> [off] {sym} ptr mem)
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Tue Mar 12 19:38:41 UTC 2024 - 93.9K bytes - Viewed (0) -
src/cmd/cgo/internal/test/issue9400/asm_riscv64.s
// license that can be found in the LICENSE file. //go:build riscv64 && gc #include "textflag.h" TEXT ·RewindAndSetgid(SB),NOSPLIT|NOFRAME,$0-0 // Rewind stack pointer so anything that happens on the stack // will clobber the test pattern created by the caller ADD $(1024*8), X2 // Ask signaller to setgid MOV $1, X5 FENCE MOVW X5, ·Baton(SB) FENCE // Wait for setgid completion loop: FENCE
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri Dec 15 21:57:36 UTC 2023 - 659 bytes - Viewed (0) -
src/cmd/vendor/golang.org/x/tools/go/analysis/passes/framepointer/framepointer.go
// Package framepointer defines an Analyzer that reports assembly code // that clobbers the frame pointer before saving it. package framepointer import ( "go/build" "regexp" "strings" "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/internal/analysisutil" ) const Doc = "report assembly that clobbers the frame pointer before saving it" var Analyzer = &analysis.Analyzer{
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri May 03 02:38:00 UTC 2024 - 2.4K bytes - Viewed (0) -
src/cmd/internal/buildid/buildid_test.go
if elf.ProgType(phdr.Type) == elf.PT_NOTE { // Increase the size so we keep // reading notes. order.PutUint64(data[phoff+4*8:], phdr.Filesz+1) // Clobber the Align field to zero. order.PutUint64(data[phoff+6*8:], 0) // Clobber the note type so we // keep reading notes. order.PutUint32(data[phdr.Off+12:], 0) } phoff += phsize }
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Mar 04 17:31:28 UTC 2024 - 6.6K bytes - Viewed (0) -
src/runtime/signal_ppc64x.go
// Push the LR to stack, as we'll clobber it in order to // push the call. The function being pushed is responsible // for restoring the LR and setting the SP back. // This extra space is known to gentraceback. sp := c.sp() - sys.MinFrameSize c.set_sp(sp) *(*uint64)(unsafe.Pointer(uintptr(sp))) = c.link() // In PIC mode, we'll set up (i.e. clobber) R2 on function // entry. Save it ahead of time.
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri Sep 08 15:08:04 UTC 2023 - 3.7K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/regalloc.go
} // Dump any registers which will be clobbered if s.doClobber && v.Op.IsCall() { // clobber registers that are marked as clobber in regmask, but // don't clobber inputs. s.clobberRegs(regspec.clobbers &^ s.tmpused &^ s.nospill) } s.freeRegs(regspec.clobbers) s.tmpused |= regspec.clobbers // Pick registers for outputs. {
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Tue Nov 21 17:49:56 UTC 2023 - 87.2K bytes - Viewed (0)