- Sort Score
- Result 10 results
- Languages All
Results 1 - 10 of 11 for Less32F (0.13 sec)
-
test/fuse.go
} func fLessNeq(a int32, f float64) bool { return a < 0 && f > Cf2 || a != 0 && f < -Cf2 } func fLessLess(a float32, f float64) bool { return a < 0 && f > Cf2 || a < 0 && f < -Cf2 // ERROR "Redirect Less32F based on Less32F$" } func fLessLeq(a float64, f float64) bool { return a < 0 && f > Cf2 || a <= 0 && f < -Cf2 } func fLeqEq(a float64, f float64) bool { return a <= 0 && f > Cf2 || a == 0 && f < -Cf2 }
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Tue Jan 23 00:02:36 UTC 2024 - 5.3K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/_gen/genericOps.go
{name: "Less16", argLength: 2, typ: "Bool"}, {name: "Less16U", argLength: 2, typ: "Bool"}, {name: "Less32", argLength: 2, typ: "Bool"}, {name: "Less32U", argLength: 2, typ: "Bool"}, {name: "Less64", argLength: 2, typ: "Bool"}, {name: "Less64U", argLength: 2, typ: "Bool"}, {name: "Less32F", argLength: 2, typ: "Bool"}, {name: "Less64F", argLength: 2, typ: "Bool"},
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu May 23 15:49:20 UTC 2024 - 42.6K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/_gen/generic.rules
(Less32 x (Const32 <t> [math.MinInt32+1])) => (Eq32 x (Const32 <t> [math.MinInt32])) (Less16 x (Const16 <t> [math.MinInt16+1])) => (Eq16 x (Const16 <t> [math.MinInt16])) (Less8 x (Const8 <t> [math.MinInt8 +1])) => (Eq8 x (Const8 <t> [math.MinInt8 ])) (Less64 (Const64 <t> [math.MaxInt64-1]) x) => (Eq64 x (Const64 <t> [math.MaxInt64]))
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu May 16 22:21:05 UTC 2024 - 135.3K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/_gen/ARM64.rules
// and ARM Manual says FCMP instruction sets PSTATE.<N,Z,C,V> // of this case to (0, 0, 1, 1). (Less32F x y) => (LessThanF (FCMPS x y)) (Less64F x y) => (LessThanF (FCMPD x y)) // For an unsigned integer x, the following rules are useful when combining branch // 0 < x => x != 0 // x <= 0 => x == 0 // x < 1 => x == 0 // 1 <= x => x != 0
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu May 23 15:49:20 UTC 2024 - 113.1K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/rewritegeneric.go
v2.AuxInt = int64ToAuxInt(d - c - 1) v.AddArg2(v0, v2) return true } break } // match: (AndB (Less32U (Const32 [c]) x) (Less32U x (Const32 [d]))) // cond: uint32(d) >= uint32(c+1) && uint32(c+1) > uint32(c) // result: (Less32U (Sub32 <x.Type> x (Const32 <x.Type> [c+1])) (Const32 <x.Type> [d-c-1])) for { for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Apr 22 18:24:47 UTC 2024 - 812.2K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/rewritePPC64.go
} } func rewriteValuePPC64_OpLess32F(v *Value) bool { v_1 := v.Args[1] v_0 := v.Args[0] b := v.Block // match: (Less32F x y) // result: (FLessThan (FCMPU x y)) for { x := v_0 y := v_1 v.reset(OpPPC64FLessThan) v0 := b.NewValue0(v.Pos, OpPPC64FCMPU, types.TypeFlags) v0.AddArg2(x, y) v.AddArg(v0) return true } }
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri Jun 07 19:02:52 UTC 2024 - 360.2K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/_gen/RISCV64.rules
(RotateLeft32 ...) => (ROLW ...) (RotateLeft64 ...) => (ROL ...) (Less64 ...) => (SLT ...) (Less32 x y) => (SLT (SignExt32to64 x) (SignExt32to64 y)) (Less16 x y) => (SLT (SignExt16to64 x) (SignExt16to64 y)) (Less8 x y) => (SLT (SignExt8to64 x) (SignExt8to64 y)) (Less64U ...) => (SLTU ...) (Less32U x y) => (SLTU (ZeroExt32to64 x) (ZeroExt32to64 y)) (Less16U x y) => (SLTU (ZeroExt16to64 x) (ZeroExt16to64 y))
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu Mar 07 14:57:07 UTC 2024 - 40.3K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/rewriteARM64.go
b := v.Block typ := &b.Func.Config.Types // match: (Less32U zero:(MOVDconst [0]) x) // result: (Neq32 zero x) for { zero := v_0 if zero.Op != OpARM64MOVDconst || auxIntToInt64(zero.AuxInt) != 0 { break } x := v_1 v.reset(OpNeq32) v.AddArg2(zero, x) return true } // match: (Less32U x (MOVDconst [1])) // result: (Eq32 x (MOVDconst [0])) for {
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu May 23 15:49:20 UTC 2024 - 608.6K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/rewriteAMD64.go
} } func rewriteValueAMD64_OpLess32F(v *Value) bool { v_1 := v.Args[1] v_0 := v.Args[0] b := v.Block // match: (Less32F x y) // result: (SETGF (UCOMISS y x)) for { x := v_0 y := v_1 v.reset(OpAMD64SETGF) v0 := b.NewValue0(v.Pos, OpAMD64UCOMISS, types.TypeFlags) v0.AddArg2(y, x) v.AddArg(v0) return true } }
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Tue Mar 12 19:38:41 UTC 2024 - 712.7K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/rewriteRISCV64.go
v_1 := v.Args[1] v_0 := v.Args[0] b := v.Block typ := &b.Func.Config.Types // match: (Leq32U x y) // result: (Not (Less32U y x)) for { x := v_0 y := v_1 v.reset(OpNot) v0 := b.NewValue0(v.Pos, OpLess32U, typ.Bool) v0.AddArg2(y, x) v.AddArg(v0) return true } } func rewriteValueRISCV64_OpLeq64(v *Value) bool { v_1 := v.Args[1]
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu Mar 07 14:57:07 UTC 2024 - 205.1K bytes - Viewed (0)