- Sort Score
- Result 10 results
- Languages All
Results 21 - 30 of 138 for relu6 (0.05 sec)
-
tensorflow/compiler/mlir/tfr/README.md
attrs=['act: {"", "RELU", "RELU6", "TANH"} = ""'], derived_attrs=['T: {float, int8}'], outputs=['o: T']) def _composite_fully_connected(input_, filter_, bias, act): res = tf.raw_ops.MatMul( a=input_, b=filter_, transpose_a=False, transpose_b=True) res = tf.raw_ops.Add(x=res, y=bias) if act == 'RELU': return tf.raw_ops.Relu(features=res) elif act == 'RELU6': return tf.raw_ops.Relu6(features=res)
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Tue Mar 29 18:32:13 UTC 2022 - 6.2K bytes - Viewed (0) -
tensorflow/compiler/mlir/lite/tests/mlir2flatbuffer/simple.mlir
// CHECK-NEXT: inputs: [ 0, 1 ], // CHECK-NEXT: outputs: [ 2 ], // CHECK-NEXT: builtin_options_type: SubOptions, // CHECK-NEXT: builtin_options: { // CHECK-NEXT: fused_activation_function: RELU6 // CHECK-NEXT: } // CHECK-NEXT: }, { // CHECK-NEXT: opcode_index: 1, // CHECK-NEXT: inputs: [ 3, 2 ], // CHECK-NEXT: outputs: [ 4 ], // CHECK-NEXT: builtin_options_type: AddOptions,
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Thu Jul 14 16:41:28 UTC 2022 - 4.3K bytes - Viewed (0) -
tensorflow/compiler/mlir/lite/tests/mlir2flatbuffer/simple_with_unconnected_control_nodes.mlir
// CHECK-NEXT: inputs: [ 0, 1 ], // CHECK-NEXT: outputs: [ 2 ], // CHECK-NEXT: builtin_options_type: SubOptions, // CHECK-NEXT: builtin_options: { // CHECK-NEXT: fused_activation_function: RELU6 // CHECK-NEXT: } // CHECK-NEXT: }, { // CHECK-NEXT: opcode_index: 1, // CHECK-NEXT: inputs: [ 3, 2 ], // CHECK-NEXT: outputs: [ 4 ], // CHECK-NEXT: builtin_options_type: AddOptions,
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Wed Aug 17 13:32:53 UTC 2022 - 4.4K bytes - Viewed (0) -
tensorflow/compiler/mlir/quantization/stablehlo/python/integration_test/quantize_model_test.py
# Check activation functions are explicitly present. # If present the last op before return should be stablehlo.clamp for relu6 # and stablehlo.maximum for relu. if activation_fn is nn_ops.relu6: self.assertRegex(module_str, r'stablehlo.clamp.*\n.*return') elif activation_fn is nn_ops.relu: self.assertRegex(module_str, r'stablehlo.maximum.*\n.*return') else:
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Tue May 14 06:31:57 UTC 2024 - 51.4K bytes - Viewed (0) -
tensorflow/compiler/mlir/lite/experimental/tac/tests/raise-target-subgraphs.mlir
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Thu May 02 09:41:17 UTC 2024 - 74.9K bytes - Viewed (0) -
tensorflow/compiler/mlir/quantization/tensorflow/tests/quantize_composite_functions.mlir
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Mon Nov 06 01:23:21 UTC 2023 - 15.2K bytes - Viewed (0) -
tensorflow/compiler/mlir/lite/tests/mlir2flatbuffer/simple_with_connected_control_nodes.mlir
// CHECK-NEXT: inputs: [ 0, 1 ], // CHECK-NEXT: outputs: [ 2 ], // CHECK-NEXT: builtin_options_type: SubOptions, // CHECK-NEXT: builtin_options: { // CHECK-NEXT: fused_activation_function: RELU6 // CHECK-NEXT: } // CHECK-NEXT: }, { // CHECK-NEXT: opcode_index: 1, // CHECK-NEXT: inputs: [ 3, 2 ], // CHECK-NEXT: outputs: [ 4 ], // CHECK-NEXT: builtin_options_type: AddOptions,
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Wed Aug 17 13:32:53 UTC 2022 - 4.6K bytes - Viewed (0) -
tensorflow/compiler/mlir/quantization/tensorflow/passes/quantized_function_library_uniform_quantized.mlir
{"quantized_ops": ["${main_op}", "BiasAdd", "Relu"], "act_func": "internal_requantize_and_relu_fn", "output_type": "!tf_type.qint8"}, {"quantized_ops": ["${main_op}", "BiasAdd", "Relu6"], "act_func": "internal_requantize_and_relu6_fn", "output_type": "!tf_type.qint8"}, ]
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Tue Aug 29 01:13:58 UTC 2023 - 19.3K bytes - Viewed (0) -
tensorflow/compiler/mlir/tfr/passes/decompose_patterns.td
def QuantActRangeRelu6Pattern : Pattern< (TFR_TFRQuantActRangeOp (TFR_ConstOp HasStringAttr<"RELU6">:$act), (ConstantLikeMatcher F32Attr:$scale), (ConstantLikeMatcher I64Attr:$zp)), [(TFR_ConstantTensorOp (Arith_ConstantOp (Quantize<"0.0f"> $scale, $zp))), (TFR_ConstantTensorOp (Arith_ConstantOp (Quantize<"6.0f"> $scale, $zp)))]>; def QuantActRangeReluN1To1Pattern : Pattern<
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Thu Sep 29 21:02:21 UTC 2022 - 2.4K bytes - Viewed (0) -
tensorflow/compiler/mlir/lite/tests/ops.mlir
// CHECK: "RELU" %1 = tfl.add %arg0, %arg1 {fused_activation_function = "RELU"} : tensor<4xi32> // CHECK: "RELU_N1_TO_1" %2 = tfl.add %arg0, %arg1 {fused_activation_function = "RELU_N1_TO_1"} : tensor<4xi32> // CHECK: "RELU6" %3 = tfl.add %arg0, %arg1 {fused_activation_function = "RELU6"} : tensor<4xi32> // CHECK: "TANH"
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Thu Jun 06 19:09:08 UTC 2024 - 189.2K bytes - Viewed (0)