- Sort Score
- Result 10 results
- Languages All
Results 1 - 10 of 13 for RELU_N1_TO_1 (0.17 sec)
-
tensorflow/compiler/mlir/tfr/passes/decompose_patterns.td
(TFR_ConstantTensorOp (Arith_ConstantOp (Quantize<"6.0f"> $scale, $zp)))]>; def QuantActRangeReluN1To1Pattern : Pattern< (TFR_TFRQuantActRangeOp (TFR_ConstOp HasStringAttr<"RELU_N1_TO_1">:$act), (ConstantLikeMatcher F32Attr:$scale), (ConstantLikeMatcher I64Attr:$zp)), [(TFR_ConstantTensorOp (Arith_ConstantOp (Quantize<"-1.0f"> $scale, $zp))),
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Thu Sep 29 21:02:21 UTC 2022 - 2.4K bytes - Viewed (0) -
tensorflow/compiler/mlir/lite/tests/flatbuffer2mlir/test_schema.fbs
LOGISTIC = 14, LSH_PROJECTION = 15, LSTM = 16, MAX_POOL_2D = 17, MUL = 18, RELU = 19, // NOTE(aselle): RELU_N1_TO_1 used to be called RELU1, but it was renamed // since different model developers use RELU1 in different ways. Never // create another op called RELU1. RELU_N1_TO_1 = 20, RELU6 = 21, RESHAPE = 22, RESIZE_BILINEAR = 23, RNN = 24, SOFTMAX = 25, SPACE_TO_DEPTH = 26,
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Mon Apr 19 19:46:06 UTC 2021 - 26.1K bytes - Viewed (0) -
tensorflow/compiler/mlir/lite/ir/tfl_op_enums.td
// These should match the ActivationFunctionType enum in TFLite schema. def TFL_AFEnum_None : I32EnumAttrCase<"NONE", 0>; def TFL_AFEnum_Relu : I32EnumAttrCase<"RELU", 1>; def TFL_AFEnum_Relu1 : I32EnumAttrCase<"RELU_N1_TO_1", 2>; def TFL_AFEnum_Relu6 : I32EnumAttrCase<"RELU6", 3>; def TFL_AFEnum_Tanh : I32EnumAttrCase<"TANH", 4>; def TFL_AFEnum_Sign : I32EnumAttrCase<"SIGN_BIT", 5>; def TFL_AFAttr : TFL_AnyStrAttrOf<[
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Thu Oct 20 00:05:24 UTC 2022 - 6.4K bytes - Viewed (0) -
tensorflow/compiler/mlir/lite/schema/schema_v3b.fbs
LOGISTIC = 14, LSH_PROJECTION = 15, LSTM = 16, MAX_POOL_2D = 17, MUL = 18, RELU = 19, // NOTE(aselle): RELU_N1_TO_1 used to be called RELU1, but it was renamed // since different model developers use RELU1 in different ways. Never // create another op called RELU1. RELU_N1_TO_1 = 20, RELU6 = 21, RESHAPE = 22, RESIZE_BILINEAR = 23, RNN = 24, SOFTMAX = 25, SPACE_TO_DEPTH = 26,
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Tue May 28 14:28:27 UTC 2024 - 30K bytes - Viewed (0) -
tensorflow/compiler/mlir/lite/schema/schema.fbs
LOGISTIC = 14, LSH_PROJECTION = 15, LSTM = 16, MAX_POOL_2D = 17, MUL = 18, RELU = 19, // NOTE(aselle): RELU_N1_TO_1 used to be called RELU1, but it was renamed // since different model developers use RELU1 in different ways. Never // create another op called RELU1. RELU_N1_TO_1 = 20, RELU6 = 21, RESHAPE = 22, RESIZE_BILINEAR = 23, RNN = 24, SOFTMAX = 25, SPACE_TO_DEPTH = 26,
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Fri May 03 18:01:23 UTC 2024 - 41.7K bytes - Viewed (0) -
tensorflow/compiler/mlir/tfr/ir/tfr_ops.td
range for the fused activation `act` with the quantization defined by the `scale` and `zero point`. Currently, the allowed activations are `NONE`, `RELU`, `RELU6` and `RELU_N1_TO_1`. Example: ```mlir %3, %4 = tfr.quant_act_range(%2, %1, %0) : (tfr.attr, float, i64) -> (tfr.tensor, tfr.tensor) ``` }]; let arguments = (ins
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Mon Apr 22 10:54:29 UTC 2024 - 17.4K bytes - Viewed (0) -
tensorflow/compiler/mlir/tfr/tests/decompose.mlir
%zp = arith.constant 42 : i64 %none_attr = tfr.constant "NONE" -> !tfr.attr %relu_attr = tfr.constant "RELU" -> !tfr.attr %relu6_attr = tfr.constant "RELU6" -> !tfr.attr %reluN1_1_attr = tfr.constant "RELU_N1_TO_1" -> !tfr.attr %none:2 = "tfr.quant_act_range"(%none_attr, %scale, %zp) : (!tfr.attr, f32, i64) -> (!tfr.tensor, !tfr.tensor)
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Mon Oct 30 06:52:55 UTC 2023 - 16.7K bytes - Viewed (0) -
tensorflow/compiler/mlir/lite/tests/optimize.mlir
func.return %1 : tensor<2x3xf32> // CHECK: %[[relu_n1_to_1:[0-9].*]] = "tfl.relu_n1_to_1" } // CHECK-LABEL: fuse_relu_to_add func.func @fuse_relu_to_add(%arg0: tensor<2x3xf32>, %arg1: tensor<2x3xf32>) -> tensor<2x3xf32> { %0 = "tfl.add"(%arg0, %arg1) {fused_activation_function = "NONE"} : (tensor<2x3xf32>, tensor<2x3xf32>) -> tensor<2x3xf32> %1 = "tfl.relu_n1_to_1"(%0) : (tensor<2x3xf32>) -> tensor<2x3xf32>
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Thu May 16 20:31:41 UTC 2024 - 284.1K bytes - Viewed (0) -
tensorflow/compiler/mlir/lite/flatbuffer_operator.cc
return llvm::StringSwitch<tflite::ActivationFunctionType>(str) .Case("NONE", tflite::ActivationFunctionType_NONE) .Case("RELU", tflite::ActivationFunctionType_RELU) .Case("RELU_N1_TO_1", tflite::ActivationFunctionType_RELU_N1_TO_1) .Case("RELU6", tflite::ActivationFunctionType_RELU6) .Case("TANH", tflite::ActivationFunctionType_TANH)
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Tue May 21 18:21:50 UTC 2024 - 38K bytes - Viewed (0) -
tensorflow/compiler/mlir/lite/tests/ops.mlir
// CHECK: "RELU" %1 = tfl.add %arg0, %arg1 {fused_activation_function = "RELU"} : tensor<4xi32> // CHECK: "RELU_N1_TO_1" %2 = tfl.add %arg0, %arg1 {fused_activation_function = "RELU_N1_TO_1"} : tensor<4xi32> // CHECK: "RELU6" %3 = tfl.add %arg0, %arg1 {fused_activation_function = "RELU6"} : tensor<4xi32> // CHECK: "TANH"
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Thu Jun 06 19:09:08 UTC 2024 - 189.2K bytes - Viewed (0)