- Sort Score
- Result 10 results
- Languages All
Results 31 - 40 of 47 for fadd32 (0.17 sec)
-
tensorflow/compiler/mlir/lite/tests/canonicalize.mlir
%10 = "tf.Cast"(%arg3) {Truncate = false, device = ""} : (tensor<i32>) -> tensor<f32> %11 = "tf.AddV2"(%arg3, %9) {device = ""} : (tensor<i32>, tensor<i32>) -> tensor<i32> %12 = "tf.StackPushV2"(%arg4, %10) {device = "", swap_memory = false} : (tensor<!tf_type.resource>, tensor<f32>) -> tensor<f32> %13 = "tf.AddV2"(%arg1, %9) {device = ""} : (tensor<i32>, tensor<i32>) -> tensor<i32>
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Thu May 02 09:41:17 UTC 2024 - 20.6K bytes - Viewed (0) -
src/runtime/sys_linux_mips64x.s
SYSCALL MOVW R2, ret+24(FP) RET // int64 futex(int32 *uaddr, int32 op, int32 val, // struct timespec *timeout, int32 *uaddr2, int32 val2); TEXT runtime·futex(SB),NOSPLIT|NOFRAME,$0 MOVV addr+0(FP), R4 MOVW op+8(FP), R5 MOVW val+12(FP), R6 MOVV ts+16(FP), R7 MOVV addr2+24(FP), R8 MOVW val3+32(FP), R9 MOVV $SYS_futex, R2 SYSCALL BEQ R7, 2(PC) SUBVU R2, R0, R2 // caller expects negative errno
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Tue Oct 18 20:57:24 UTC 2022 - 12K bytes - Viewed (0) -
src/cmd/compile/internal/typecheck/builtin.go
{"typeAssert", funcTag, 67}, {"interfaceSwitch", funcTag, 70}, {"ifaceeq", funcTag, 72}, {"efaceeq", funcTag, 72}, {"panicrangestate", funcTag, 73}, {"deferrangefunc", funcTag, 74}, {"rand32", funcTag, 75}, {"makemap64", funcTag, 77}, {"makemap", funcTag, 78}, {"makemap_small", funcTag, 79}, {"mapaccess1", funcTag, 80}, {"mapaccess1_fast32", funcTag, 81}, {"mapaccess1_fast64", funcTag, 82},
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Tue May 21 21:08:03 UTC 2024 - 16.2K bytes - Viewed (0) -
tensorflow/compiler/mlir/tensorflow/tests/stack_ops_decomposition.mlir
// CHECK-NEXT: %[[CONST1:.*]] = "tf.Const"() <{value = dense<1> : tensor<1xi32>}> : () -> tensor<1xi32> // CHECK-NEXT: %[[NEW_SIZE:.*]] = "tf.AddV2"(%[[READ_SIZE]], %[[CONST1]]) : (tensor<1xi32>, tensor<1xi32>) -> tensor<1xi32> // CHECK-NEXT: "tf.AssignVariableOp"(%[[SIZE]], %[[NEW_SIZE]]) : (tensor<!tf_type.resource<tensor<1xi32>>>, tensor<1xi32>) -> ()
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Mon Oct 30 06:52:55 UTC 2023 - 25.8K bytes - Viewed (0) -
tensorflow/compiler/mlir/tensorflow/transforms/canonicalize.td
(TF_AddOp:$src TF_NumberTensor:$arg0, TF_NumberTensor:$arg1), (TF_AddV2Op:$dest $arg0, $arg1), [], [(CopyAttrs $src, $dest)]>; //===----------------------------------------------------------------------===// // AddV2 op patterns. //===----------------------------------------------------------------------===// def AddV2OfNegLeft : Pat< (TF_AddV2Op:$src (TF_NegOp $arg0), $arg1), (TF_SubOp:$dest $arg1, $arg0),
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Wed Dec 06 18:42:28 UTC 2023 - 17K bytes - Viewed (0) -
test/live.go
} func bad40() { t := newT40() _ = t printnl() } func good40() { ret := T40{} // ERROR "stack object ret T40$" ret.m = make(map[int]int) // ERROR "live at call to rand32: .autotmp_[0-9]+$" "stack object .autotmp_[0-9]+ runtime.hmap$" t := &ret printnl() // ERROR "live at call to printnl: ret$" // Note: ret is live at the printnl because the compiler moves &ret
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Tue Dec 05 20:34:30 UTC 2023 - 18K bytes - Viewed (0) -
src/runtime/export_test.go
// Export guts for testing. package runtime import ( "internal/abi" "internal/goarch" "internal/goos" "internal/runtime/atomic" "runtime/internal/sys" "unsafe" ) var Fadd64 = fadd64 var Fsub64 = fsub64 var Fmul64 = fmul64 var Fdiv64 = fdiv64 var F64to32 = f64to32 var F32to64 = f32to64 var Fcmp64 = fcmp64 var Fintto64 = fintto64 var F64toint = f64toint
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu May 30 17:50:53 UTC 2024 - 46.1K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/memcombine.go
if idx != nil { // We have two or more indexing values. // Pick the first one we found. return BaseAddress{ptr: ptr, idx: idx}, off } idx = ptr.Args[1] if idx.Op == OpAdd32 || idx.Op == OpAdd64 { if idx.Args[0].Op == OpConst32 || idx.Args[0].Op == OpConst64 { off += idx.Args[0].AuxInt idx = idx.Args[1] } else if idx.Args[1].Op == OpConst32 || idx.Args[1].Op == OpConst64 {
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu Mar 21 19:45:41 UTC 2024 - 18.4K bytes - Viewed (0) -
src/runtime/sys_linux_ppc64x.s
MOVW R3, ret+24(FP) RET // int64 futex(int32 *uaddr, int32 op, int32 val, // struct timespec *timeout, int32 *uaddr2, int32 val2); TEXT runtime·futex(SB),NOSPLIT|NOFRAME,$0 MOVD addr+0(FP), R3 MOVW op+8(FP), R4 MOVW val+12(FP), R5 MOVD ts+16(FP), R6 MOVD addr2+24(FP), R7 MOVW val3+32(FP), R8 SYSCALL $SYS_futex BVC 2(PC) NEG R3 // caller expects negative errno MOVW R3, ret+40(FP)
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 22 18:17:17 UTC 2024 - 18.1K bytes - Viewed (0) -
src/cmd/compile/internal/walk/builtin.go
// no buckets need to be allocated in this code path. if n.Esc() == ir.EscNone { // Only need to initialize h.hash0 since // hmap h has been allocated on the stack already. // h.hash0 = rand32() rand := mkcall("rand32", types.Types[types.TUINT32], init) hashsym := hmapType.Field(4).Sym // hmap.hash0 see reflect.go:hmap appendWalkStmt(init, ir.NewAssignStmt(base.Pos, ir.NewSelectorExpr(base.Pos, ir.ODOT, h, hashsym), rand))
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri Mar 08 22:35:22 UTC 2024 - 31.2K bytes - Viewed (0)