Search Options

Results per page
Sort
Preferred Languages
Advance

Results 71 - 80 of 465 for f32 (0.03 sec)

  1. tensorflow/compiler/mlir/quantization/tensorflow/tests/quantize_composite_functions.mlir

        %1 = "quantfork.dcast"(%0) : (tensor<2x2x3x2x!quant.uniform<i8<-127:127>:f32:3, {4.000000e-03,5.000000e-03}>>) -> tensor<*xf32>
        %2 = "quantfork.qcast"(%arg0) : (tensor<1x2x2x3xf32>) -> tensor<1x2x2x3x!quant.uniform<i8:f32, 8.000000e-03>>
        %3 = "quantfork.dcast"(%2) : (tensor<1x2x2x3x!quant.uniform<i8:f32, 8.000000e-03>>) -> tensor<*xf32>
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Mon Nov 06 01:23:21 UTC 2023
    - 15.2K bytes
    - Viewed (0)
  2. tensorflow/compiler/mlir/quantization/tensorflow/tests/convert_custom_aggregation_op_to_quant_stats.mlir

      %0:4 = "tf.CustomAggregator"(%arg0) {min = -0.1 : f32, max = 0.2 : f32, id = "0", calibration_method = 1 : i32, num_bins = 0 : i32, max_percentile = 0.000000e+00 : f32, min_percentile = 0.000000e+00 : f32} : (tensor<8x8x8x8xf32>) -> (tensor<8x8x8x8xf32>, tensor<f32>, tensor<f32>, tensor<*xi64>)
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Wed May 01 06:25:50 UTC 2024
    - 2.2K bytes
    - Viewed (0)
  3. tensorflow/compiler/mlir/lite/tests/prepare-quantize-post-training.mlir

    // CHECK-DAG: %[[input_7:.*]] = "tfl.dequantize"({{.*}}) : (tensor<2x4x!quant.uniform<i8<-127:127>:f32, 0.010094007169167826>>) -> tensor<2x4xf32>
    // CHECK-DAG: %[[input_8:.*]] = "tfl.dequantize"({{.*}}) : (tensor<2x4x!quant.uniform<i8<-127:127>:f32, 0.018637238525030179>>) -> tensor<2x4xf32>
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Thu May 02 09:41:17 UTC 2024
    - 52.6K bytes
    - Viewed (0)
  4. tensorflow/compiler/mlir/quantization/stablehlo/tests/passes/defer_activation_transpose.mlir

      %0 = stablehlo.constant dense<0xFF800000> : tensor<f32>  // -inf
      %1 = stablehlo.transpose %arg0, dims = [0, 3, 1, 2] : (tensor<1x16x16x4xf32>) -> tensor<1x4x16x16xf32>
      %2 = "stablehlo.reduce_window"(%1, %0) ({
      ^bb0(%arg1: tensor<f32>, %arg2: tensor<f32>):
          %3 = stablehlo.maximum %arg1, %arg2 : tensor<f32>
          stablehlo.return %3 : tensor<f32>
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Thu Apr 18 20:32:46 UTC 2024
    - 14.6K bytes
    - Viewed (0)
  5. tensorflow/compiler/mlir/tensorflow/tests/compile_mlir_util/stablehlo_add.mlir

      func.func @main(%arg0: tensor<f32>, %arg1: tensor<f32>) -> tensor<f32> {
        %0 = "stablehlo.add"(%arg0, %arg1) : (tensor<f32>, tensor<f32>) -> tensor<f32>
        func.return %0 : tensor<f32>
      }
    }
    
    // CHECK-LABEL: HloModule main
    // CHECK:       ENTRY %main.{{[0-9]+}} ([[ARG0:.*]]: f32[], [[ARG1:.*]]: f32[]) -> (f32[]) {
    // CHECK-NEXT:    %[[ARG0]] = f32[] parameter(0)
    // CHECK-NEXT:    %[[ARG1]] = f32[] parameter(1)
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Thu Nov 17 02:51:12 UTC 2022
    - 892 bytes
    - Viewed (0)
  6. tensorflow/compiler/mlir/tensorflow/tests/xla_broadcast.mlir

      // CHECK-NEXT:   %[[ID:.*]] = "tf_device.launch"() <{device = "TPU_REPLICATED_HOST_0"}> ({
      // CHECK-NEXT:     %[[IDINSIDE:.*]] = "tf.Identity"(%[[REPVAR]]) {ici_weight_distribution_mlir_bridge_marker = true} : (tensor<f32>) -> tensor<f32>
      // CHECK-NEXT:     tf_device.return %[[IDINSIDE]] : tensor<f32>
      // CHECK-NEXT:   }) : () -> tensor<f32>
      // CHECK-NEXT:   "tf_device.cluster"() ({
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Thu Jun 13 18:52:07 UTC 2024
    - 2.9K bytes
    - Viewed (0)
  7. tensorflow/compiler/mlir/lite/tests/mlir2flatbuffer/mul_v2.mlir

      %0 = "tfl.pseudo_qconst"() { qtype = tensor<3x!quant.uniform<i8:f32, 0.1>>, value = dense<2> : tensor<3xi8>} : () -> tensor<3x!quant.uniform<i8:f32, 0.1>>
      %1 = "tfl.mul"(%arg0, %0) {fused_activation_function = "NONE"} : (tensor<3x!quant.uniform<i8:f32, 0.1>>, tensor<3x!quant.uniform<i8:f32, 0.1>>) -> tensor<3x!quant.uniform<i8:f32, 0.1>> loc("mul")
      func.return %1 : tensor<3x!quant.uniform<i8:f32, 0.1>>
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Thu Jul 14 16:41:28 UTC 2022
    - 2.9K bytes
    - Viewed (0)
  8. tensorflow/compiler/mlir/lite/tests/mlir2flatbuffer/mul_v3.mlir

      %0 = "tfl.pseudo_qconst"() { qtype = tensor<3x!quant.uniform<i8:f32, 1.0>>, value = dense<2> : tensor<3xi8>} : () -> tensor<3x!quant.uniform<i8:f32, 1.0>>
      %1 = "tfl.mul"(%arg0, %0) {fused_activation_function = "NONE"} : (tensor<3x!quant.uniform<i8:f32, 1.0>>, tensor<3x!quant.uniform<i8:f32, 1.0>>) -> tensor<3x!quant.uniform<i8:f32, 1.0>> loc("mul")
      func.return %1 : tensor<3x!quant.uniform<i8:f32, 1.0>>
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Thu Jul 14 16:41:28 UTC 2022
    - 2.9K bytes
    - Viewed (0)
  9. tensorflow/compiler/mlir/tensorflow/tests/tensor_list_ops_decomposition.mlir

        // CHECK: })
        %push = "tf.TensorListPushBack"(%arg0, %elem) : (tensor<!tf_type.variant<tensor<f32>>>, tensor<f32>) -> tensor<!tf_type.variant<tensor<f32>>>
        "tf.Yield"(%push, %sub) : (tensor<!tf_type.variant<tensor<f32>>>, tensor<i32>) -> ()
      }) : (tensor<!tf_type.variant<tensor<f32>>>, tensor<i32>) -> (tensor<!tf_type.variant<tensor<f32>>>, tensor<i32>)
      // CHECK: "tf.Slice"
      // CHECK-NOT: tf.TensorListPopBack
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Mon Oct 30 06:52:55 UTC 2023
    - 38.6K bytes
    - Viewed (0)
  10. tensorflow/compiler/mlir/lite/experimental/tac/tests/get-alternative-subgraph.mlir

      %5 = "tfl.add"(%4, %arg3) {tac.device = "CPU", tac.inference_type = "QUANTIZED_INT8", fused_activation_function = "NONE"} : (tensor<1x384x128x!quant.uniform<i8:f32, 0.3:3>>, tensor<128x!quant.uniform<i8:f32, 0.2:-4>>) -> tensor<1x384x128x!quant.uniform<i8:f32, 0.3:-3>>
      func.return %5 : tensor<1x384x128x!quant.uniform<i8:f32, 0.3:-3>>
    }
    
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Thu May 02 09:41:17 UTC 2024
    - 20.1K bytes
    - Viewed (0)
Back to top