- Sort Score
- Result 10 results
- Languages All
Results 1 - 10 of 19 for LessEqual (0.19 sec)
-
src/runtime/mranges.go
// If b is strictly contained in a, thus forcing a split, it will throw. func (a addrRange) subtract(b addrRange) addrRange { if b.base.lessEqual(a.base) && a.limit.lessEqual(b.limit) { return addrRange{} } else if a.base.lessThan(b.base) && b.limit.lessThan(a.limit) { throw("bad prune") } else if b.limit.lessThan(a.limit) && a.base.lessThan(b.limit) { a.base = b.limit
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Mar 25 19:53:03 UTC 2024 - 14.5K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/_gen/ARM64latelower.rules
// Boolean-generating instructions (NOTE: NOT all boolean Values) always // zero upper bit of the register; no need to zero-extend (MOVBUreg x:((Equal|NotEqual|LessThan|LessThanU|LessThanF|LessEqual|LessEqualU|LessEqualF|GreaterThan|GreaterThanU|GreaterThanF|GreaterEqual|GreaterEqualU|GreaterEqualF) _)) => x // omit unsigned extension (MOVWUreg x) && zeroUpper32Bits(x, 3) => x // don't extend after proper load
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Tue Mar 12 19:38:41 UTC 2024 - 4.1K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/_gen/ARM64.rules
(Less32U x y) => (LessThanU (CMPW x y)) (Less64U x y) => (LessThanU (CMP x y)) (Leq8 x y) => (LessEqual (CMPW (SignExt8to32 x) (SignExt8to32 y))) (Leq16 x y) => (LessEqual (CMPW (SignExt16to32 x) (SignExt16to32 y))) (Leq32 x y) => (LessEqual (CMPW x y)) (Leq64 x y) => (LessEqual (CMP x y)) // Refer to the comments for op Less64F above. (Leq32F x y) => (LessEqualF (FCMPS x y))
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu May 23 15:49:20 UTC 2024 - 113.1K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/_gen/PPC64.rules
(Less(32|64)U x y) => (LessThan ((CMPWU|CMPU) x y)) (Leq(8|16) x y) => (LessEqual (CMPW (SignExt(8|16)to32 x) (SignExt(8|16)to32 y))) (Leq(32|64) x y) => (LessEqual ((CMPW|CMP) x y)) (Leq(32|64)F x y) => (FLessEqual (FCMPU x y)) (Leq(8|16)U x y) => (LessEqual (CMPWU (ZeroExt(8|16)to32 x) (ZeroExt(8|16)to32 y))) (Leq(32|64)U x y) => (LessEqual (CMP(WU|U) x y)) // Absorb pseudo-ops into blocks.
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri Jun 07 19:02:52 UTC 2024 - 53.2K bytes - Viewed (0) -
tensorflow/compiler/mlir/tf2xla/tests/legalize-tf-binary-elementwise.mlir
func.return %0: tensor<2xi1> } // CHECK-LABEL: func @less_equal func.func @less_equal(%arg0: tensor<2xi32>, %arg1: tensor<2xi32>) -> tensor<2xi1> { // CHECK-NEXT: mhlo.compare LE, %arg0, %arg1 %0 = "tf.LessEqual"(%arg0, %arg1) : (tensor<2xi32>, tensor<2xi32>) -> tensor<2xi1> func.return %0: tensor<2xi1>
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Sat Apr 06 15:32:52 UTC 2024 - 18.4K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/rewriteARM64latelower.go
} // match: (MOVBUreg x:(LessThanF _)) // result: x for { x := v_0 if x.Op != OpARM64LessThanF { break } v.copyOf(x) return true } // match: (MOVBUreg x:(LessEqual _)) // result: x for { x := v_0 if x.Op != OpARM64LessEqual { break } v.copyOf(x) return true } // match: (MOVBUreg x:(LessEqualU _)) // result: x for {
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Tue Mar 12 19:38:41 UTC 2024 - 19.3K bytes - Viewed (0) -
tensorflow/compiler/mlir/lite/tests/fuse-tftext.mlir
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Thu May 02 09:41:17 UTC 2024 - 460.3K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/rewritePPC64.go
} func rewriteValuePPC64_OpPPC64LessEqual(v *Value) bool { v_0 := v.Args[0] // match: (LessEqual (FlagEQ)) // result: (MOVDconst [1]) for { if v_0.Op != OpPPC64FlagEQ { break } v.reset(OpPPC64MOVDconst) v.AuxInt = int64ToAuxInt(1) return true } // match: (LessEqual (FlagLT)) // result: (MOVDconst [1]) for { if v_0.Op != OpPPC64FlagLT { break }
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri Jun 07 19:02:52 UTC 2024 - 360.2K bytes - Viewed (0) -
src/runtime/mpagealloc.go
// that summary. But as soon as we need to iterate beyond that summary // in a level to find a large enough range, we'll stop narrowing. foundFree := func(addr offAddr, size uintptr) { if firstFree.base.lessEqual(addr) && addr.add(size-1).lessEqual(firstFree.bound) { // This range fits within the current firstFree window, so narrow // down the firstFree window to the base and bound of this range. firstFree.base = addr
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Mar 25 19:53:03 UTC 2024 - 39.2K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/rewriteARM64.go
return true } return false } func rewriteValueARM64_OpARM64LessEqual(v *Value) bool { v_0 := v.Args[0] b := v.Block // match: (LessEqual (CMPconst [0] z:(AND x y))) // cond: z.Uses == 1 // result: (LessEqual (TST x y)) for { if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 { break } z := v_0.Args[0] if z.Op != OpARM64AND { break
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu May 23 15:49:20 UTC 2024 - 608.6K bytes - Viewed (0)