- Sort Score
- Result 10 results
- Languages All
Results 1 - 10 of 20 for clobbered (0.45 sec)
-
src/cmd/compile/internal/ssa/_gen/ARM64Ops.go
// R20 changed as side effect // R16 and R17 may be clobbered by linker trampoline. { name: "DUFFZERO", aux: "Int64", argLength: 2, reg: regInfo{ inputs: []regMask{buildReg("R20")}, clobbers: buildReg("R16 R17 R20 R30"), }, faultOnNilArg0: true, unsafePoint: true, // FP maintenance around DUFFZERO can be clobbered by interrupts }, // large zeroing
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu May 23 15:49:20 UTC 2024 - 58.8K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/regalloc.go
} // Dump any registers which will be clobbered if s.doClobber && v.Op.IsCall() { // clobber registers that are marked as clobber in regmask, but // don't clobber inputs. s.clobberRegs(regspec.clobbers &^ s.tmpused &^ s.nospill) } s.freeRegs(regspec.clobbers) s.tmpused |= regspec.clobbers // Pick registers for outputs. {
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Tue Nov 21 17:49:56 UTC 2023 - 87.2K bytes - Viewed (0) -
src/runtime/asm_amd64.s
// gcWriteBarrier does NOT follow the Go ABI. It accepts the // number of bytes of buffer needed in R11, and returns a pointer // to the buffer space in R11. // It clobbers FLAGS. It does not clobber any general-purpose registers, // but may clobber others (e.g., SSE registers). // Typical use would be, when doing *(CX+88) = AX // CMPL $0, runtime.writeBarrier(SB) // JEQ dowrite
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Sat May 11 20:38:24 UTC 2024 - 60.4K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/rewrite.go
return nil // too far away } // clobber invalidates values. Returns true. // clobber is used by rewrite rules to: // // A) make sure the values are really dead and never used again. // B) decrement use counts of the values' args. func clobber(vv ...*Value) bool { for _, v := range vv { v.reset(OpInvalid) // Note: leave v.Block intact. The Block field is used after clobber. } return true }
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri Jun 07 19:02:52 UTC 2024 - 64.2K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/debug.go
} // Handle any register clobbering. Call operations, for example, // clobber all registers even though they don't explicitly write to // them. clobbers := uint64(opcodeTable[v.Op].reg.clobbers) for { if clobbers == 0 { break } reg := uint8(bits.TrailingZeros64(clobbers)) clobbers &^= 1 << reg for _, slot := range locs.registers[reg] { if state.loggingLevel > 1 {
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Jun 10 19:44:43 UTC 2024 - 58.4K bytes - Viewed (0) -
src/crypto/internal/nistec/p256_asm_ppc64le.s
X- ; Y- ; MUL;T3=T // T3 = T3*T2 T2 T3 T4 X=T4; Y=Y1; MUL;T- // T4 = T4*Y1 T3 T4 SUB(T<T3-T) Y3:=T // Y3 = T3-T4 T3 T4 */ // // V27 is clobbered by p256MulInternal so must be // saved in a temp. // // func p256PointAddAffineAsm(res, in1 *P256Point, in2 *p256AffinePoint, sign, sel, zero int) TEXT ·p256PointAddAffineAsm(SB), NOSPLIT, $16-48
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Mar 04 17:29:44 UTC 2024 - 56.5K bytes - Viewed (0) -
src/runtime/mbitmap.go
m.mask = 1 } else { m.mask = m.mask << 1 } m.index++ } // clobberdeadPtr is a special value that is used by the compiler to // clobber dead stack slots, when -clobberdead flag is set. const clobberdeadPtr = uintptr(0xdeaddead | 0xdeaddead<<((^uintptr(0)>>63)*32)) // badPointer throws bad pointer in heap panic. func badPointer(s *mspan, p, refBase, refOff uintptr) {
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu May 23 00:18:55 UTC 2024 - 60K bytes - Viewed (0) -
src/cmd/compile/internal/ppc64/ssa.go
// Remainder after the loop rem := v.AuxInt % bytesPerLoop dstReg := v.Args[0].Reg() srcReg := v.Args[1].Reg() // The set of registers used here, must match the clobbered reg list // in PPC64Ops.go. offset := int64(0) // top of the loop var top *obj.Prog // Only generate looping code when loop counter is > 1 for >= 64 bytes if ctr > 1 {
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 22 19:59:38 UTC 2024 - 55.4K bytes - Viewed (0) -
src/cmd/internal/obj/riscv/obj.go
p.As = ABLTU p.From.Type = obj.TYPE_REG p.From.Reg = REG_X6 p.Reg = REG_X7 p.To.Type = obj.TYPE_BRANCH to_done = p } // Spill the register args that could be clobbered by the // morestack code p = ctxt.EmitEntryStackMap(cursym, p, newprog) p = cursym.Func().SpillRegisterArgs(p, newprog) // CALL runtime.morestack(SB) p = obj.Appendp(p, newprog) p.As = obj.ACALL
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Sun Apr 07 03:32:27 UTC 2024 - 77K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/_gen/S390XOps.go
// It saves all GP registers if necessary, // but clobbers R14 (LR) because it's a call, // and also clobbers R1 as the PLT stub does. // Returns a pointer to a write barrier buffer in R9. {name: "LoweredWB", argLength: 1, reg: regInfo{clobbers: (callerSave &^ gpg) | buildReg("R14") | r1, outputs: []regMask{r9}}, clobberFlags: true, aux: "Int64"},
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri Feb 24 00:21:13 UTC 2023 - 52.5K bytes - Viewed (0)