- Sort Score
- Result 10 results
- Languages All
Results 21 - 30 of 33 for newValue0I (0.15 sec)
-
src/cmd/compile/internal/ssa/rewriteAMD64.go
v.Aux = symToAux(dstSym) v0 := b.NewValue0(v_1.Pos, OpAMD64MOVQconst, typ.UInt64) v0.AuxInt = int64ToAuxInt(int64(read64(srcSym, int64(srcOff)+8, config.ctxt.Arch.ByteOrder))) v1 := b.NewValue0(v_1.Pos, OpAMD64MOVQstore, types.TypeMem) v1.AuxInt = int32ToAuxInt(dstOff) v1.Aux = symToAux(dstSym) v2 := b.NewValue0(v_1.Pos, OpAMD64MOVQconst, typ.UInt64)
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Tue Mar 12 19:38:41 UTC 2024 - 712.7K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/rewriteRISCV64latelower.go
c := auxIntToInt64(v.AuxInt) if v_0.Op != OpRISCV64MOVBUreg { break } x := v_0.Args[0] if !(c <= 56) { break } v.reset(OpRISCV64SRLI) v.AuxInt = int64ToAuxInt(56 - c) v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, typ.UInt64) v0.AuxInt = int64ToAuxInt(56) v0.AddArg(x) v.AddArg(v0) return true } // match: (SLLI [c] (MOVHUreg x)) // cond: c <= 48
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu Jan 19 22:42:34 UTC 2023 - 5.1K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/rewrite386.go
v.reset(OpMove) v.AuxInt = int64ToAuxInt(s - s%4) v0 := b.NewValue0(v.Pos, Op386ADDLconst, dst.Type) v0.AuxInt = int32ToAuxInt(int32(s % 4)) v0.AddArg(dst) v1 := b.NewValue0(v.Pos, Op386ADDLconst, src.Type) v1.AuxInt = int32ToAuxInt(int32(s % 4)) v1.AddArg(src) v2 := b.NewValue0(v.Pos, Op386MOVLstore, types.TypeMem) v3 := b.NewValue0(v.Pos, Op386MOVLload, typ.UInt32) v3.AddArg2(src, mem)
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri Apr 21 21:05:46 UTC 2023 - 262.4K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/softfloat.go
v.reset(OpXor32) v.Type = f.Config.Types.UInt32 v.AddArg(arg0) mask := v.Block.NewValue0(v.Pos, OpConst32, v.Type) mask.AuxInt = -0x80000000 v.AddArg(mask) case OpNeg64F: arg0 := v.Args[0] v.reset(OpXor64) v.Type = f.Config.Types.UInt64 v.AddArg(arg0) mask := v.Block.NewValue0(v.Pos, OpConst64, v.Type) mask.AuxInt = -0x8000000000000000 v.AddArg(mask)
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Tue Aug 03 16:14:24 UTC 2021 - 2K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/rewriteARM64latelower.go
// cond: !isARM64addcon(c) // result: (ADDSflags x (MOVDconst [c])) for { c := auxIntToInt64(v.AuxInt) x := v_0 if !(!isARM64addcon(c)) { break } v.reset(OpARM64ADDSflags) v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) v0.AuxInt = int64ToAuxInt(c) v.AddArg2(x, v0) return true } return false } func rewriteValueARM64latelower_OpARM64ADDconst(v *Value) bool { v_0 := v.Args[0]
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Tue Mar 12 19:38:41 UTC 2024 - 19.3K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/rewritePPC64latelower.go
for { if auxIntToInt32(v.AuxInt) != 2 { break } cmp := v_0 if !(buildcfg.GOPPC64 <= 9) { break } v.reset(OpPPC64ISELZ) v.AuxInt = int32ToAuxInt(2) v0 := b.NewValue0(v.Pos, OpPPC64MOVDconst, typ.Int64) v0.AuxInt = int64ToAuxInt(1) v.AddArg2(v0, cmp) return true } // match: (SETBC [0] cmp) // cond: buildcfg.GOPPC64 <= 9 // result: (ISELZ [0] (MOVDconst [1]) cmp)
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 22 19:59:38 UTC 2024 - 16.5K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/fuse_comparisons.go
// significantly so we shouldn't be overly conservative. if !canSpeculativelyExecute(b) { return false } // Logically combine the control values for p and b. v := b.NewValue0(bc.Pos, op, bc.Type) v.AddArg(pc) v.AddArg(bc) // Set the combined control value as the control value for b. b.SetControl(v) // Modify p so that it jumps directly to b. p.removeEdge(i)
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Apr 11 16:34:30 UTC 2022 - 4K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/loopreschedchecks.go
tofixBackedges = append(tofixBackedges, edgeMem{e, nil}) } // It's possible that there is no memory state (no global/pointer loads/stores or calls) if lastMems[f.Entry.ID] == nil { lastMems[f.Entry.ID] = f.Entry.NewValue0(f.Entry.Pos, OpInitMem, types.TypeMem) } memDefsAtBlockEnds := f.Cache.allocValueSlice(f.NumBlocks()) // For each block, the mem def seen at its bottom. Could be from earlier block.
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Tue Aug 22 21:17:10 UTC 2023 - 16K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/value.go
} } // If/when midstack inlining is enabled (-l=4), the compiler gets both larger and slower. // Not-inlining this method is a help (*Value.reset and *Block.NewValue0 are similar). // //go:noinline func (v *Value) AddArg(w *Value) { if v.Args == nil { v.resetArgs() // use argstorage } v.Args = append(v.Args, w) w.Uses++ } //go:noinline
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri May 03 16:40:22 UTC 2024 - 16.7K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/writebarrier.go
} args = append(args, mem) // issue call argTypes := make([]*types.Type, nargs, 3) // at most 3 args; allows stack allocation for i := 0; i < nargs; i++ { argTypes[i] = typ } call := b.NewValue0A(pos, OpStaticCall, types.TypeResultMem, StaticAuxCall(fn, b.Func.ABIDefault.ABIAnalyzeTypes(argTypes, nil))) call.AddArgs(args...) call.AuxInt = int64(nargs) * typ.Size()
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri Sep 08 19:09:14 UTC 2023 - 23.5K bytes - Viewed (0)