Search Options

Results per page
Sort
Preferred Languages
Advance

Results 1 - 10 of 11 for kRelu1 (0.12 sec)

  1. tensorflow/compiler/mlir/lite/transforms/optimize.cc

    // The actual Optimize Pass.
    namespace {
    #define GEN_PASS_DEF_OPTIMIZEPASS
    #include "tensorflow/compiler/mlir/lite/transforms/passes.h.inc"
    
    constexpr char kRelu[] = "RELU";
    constexpr char kRelu6[] = "RELU6";
    constexpr char kRelu1[] = "RELU_N1_TO_1";
    
    ElementsAttr FlattenTo1D(Attribute a) {
      auto elements = mlir::cast<DenseElementsAttr>(a);
      const std::array<int64_t, 1> flattened_shape = {elements.getNumElements()};
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Tue Apr 30 00:40:15 UTC 2024
    - 102.3K bytes
    - Viewed (0)
  2. tensorflow/compiler/jit/cluster_scoping_pass_test.cc

        //       b
        //       |
        //       v
        // a -> add0 (ClusterX) -> relu0 (ClusterX) -> stage
        //
        //             b
        //             |
        //             v
        // unstage -> add1 (ClusterY) -> relu1 (ClusterY)
        GraphDefBuilder builder(GraphDefBuilder::kFailImmediately);
        Node* a = ops::SourceOp("Const", builder.opts()
                                             .WithName("a")
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Wed Apr 29 16:20:48 UTC 2020
    - 6.7K bytes
    - Viewed (0)
  3. tensorflow/compiler/mlir/lite/quantization/tensorflow/fallback_to_flex_patterns.td

    // make it easier to create Relu1 matching patterns.
    def SwapMaximumOperands : Pat<
      (TF_MaximumOp (TF_ConstOp:$cst $cst_val), $input),
      (TF_MaximumOp $input, $cst)>;
    
    def SwapMinimumOperands : Pat<
      (TF_MinimumOp (TF_ConstOp:$cst $cst_val), $input),
      (TF_MinimumOp $input, $cst)>;
    
    // Relu1 activation is represented as a couple of Max and Min ops, The following
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Thu Sep 29 21:02:21 UTC 2022
    - 3.2K bytes
    - Viewed (0)
  4. tensorflow/compiler/mlir/lite/stablehlo/odml_converter/transforms/outline_composites.cc

    namespace mlir {
    namespace odml {
    namespace {
    
    // TODO - b/330337238: Surface these to other files when needed.
    constexpr llvm::StringLiteral kCompositeNamespace = "odml.internal";
    constexpr llvm::StringLiteral kGelu = "gelu";
    
    std::string MakeCompositeName(llvm::StringRef op_name) {
      return (kCompositeNamespace + "." + op_name).str();
    }
    
    #define GEN_PASS_DEF_OUTLINECOMPOSITESPASS
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Fri May 17 17:58:54 UTC 2024
    - 9.6K bytes
    - Viewed (0)
  5. tensorflow/compiler/mlir/lite/tests/flatbuffer2mlir/test_schema.fbs

      LOGISTIC = 14,
      LSH_PROJECTION = 15,
      LSTM = 16,
      MAX_POOL_2D = 17,
      MUL = 18,
      RELU = 19,
      // NOTE(aselle): RELU_N1_TO_1 used to be called RELU1, but it was renamed
      // since different model developers use RELU1 in different ways. Never
      // create another op called RELU1.
      RELU_N1_TO_1 = 20,
      RELU6 = 21,
      RESHAPE = 22,
      RESIZE_BILINEAR = 23,
      RNN = 24,
      SOFTMAX = 25,
      SPACE_TO_DEPTH = 26,
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Mon Apr 19 19:46:06 UTC 2021
    - 26.1K bytes
    - Viewed (0)
  6. tensorflow/compiler/mlir/lite/schema/schema_v3b.fbs

      LOGISTIC = 14,
      LSH_PROJECTION = 15,
      LSTM = 16,
      MAX_POOL_2D = 17,
      MUL = 18,
      RELU = 19,
      // NOTE(aselle): RELU_N1_TO_1 used to be called RELU1, but it was renamed
      // since different model developers use RELU1 in different ways. Never
      // create another op called RELU1.
      RELU_N1_TO_1 = 20,
      RELU6 = 21,
      RESHAPE = 22,
      RESIZE_BILINEAR = 23,
      RNN = 24,
      SOFTMAX = 25,
      SPACE_TO_DEPTH = 26,
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Tue May 28 14:28:27 UTC 2024
    - 30K bytes
    - Viewed (0)
  7. tensorflow/compiler/jit/mark_for_compilation_pass_test.cc

        std::unordered_map<string, string> clusters = GetClusters(*graph);
        EXPECT_EQ(clusters["add0"], clusters["add1"]);
        EXPECT_EQ(clusters["add0"], clusters["relu1"]);
        EXPECT_EQ(clusters["relu0"], clusters["add1"]);
        EXPECT_EQ(clusters["relu0"], clusters["relu1"]);
      }
    
      // By default, ClusterScopingPass is on and different pipeline stages should
      // not be merged.
      {
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Wed Feb 14 10:11:10 UTC 2024
    - 79.6K bytes
    - Viewed (0)
  8. tensorflow/compiler/mlir/lite/schema/schema.fbs

      LOGISTIC = 14,
      LSH_PROJECTION = 15,
      LSTM = 16,
      MAX_POOL_2D = 17,
      MUL = 18,
      RELU = 19,
      // NOTE(aselle): RELU_N1_TO_1 used to be called RELU1, but it was renamed
      // since different model developers use RELU1 in different ways. Never
      // create another op called RELU1.
      RELU_N1_TO_1 = 20,
      RELU6 = 21,
      RESHAPE = 22,
      RESIZE_BILINEAR = 23,
      RNN = 24,
      SOFTMAX = 25,
      SPACE_TO_DEPTH = 26,
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Fri May 03 18:01:23 UTC 2024
    - 41.7K bytes
    - Viewed (0)
  9. tensorflow/compiler/mlir/lite/transforms/optimize_patterns.td

    def MinimumOfReluAnd6ToRelu6 :
      Pat<(TFL_MinimumOp (TFL_ReluOp $x), (Arith_ConstantOp $y)),
          (TFL_Relu6Op $x),
          [(IsConstantValueOf<6> $y)]>;
    
    // For both relu1 and relu_0_to_1, the min/max operators commute,
    // so there are two possible orderings we need to rewrite.
    // Concretely, `m < n -> max(m, min(n, x)) = min(m, max(m, x))`.
    // Proof:
    // case (x <= m)
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Thu May 16 20:31:41 UTC 2024
    - 66.4K bytes
    - Viewed (0)
  10. tensorflow/compiler/mlir/lite/ir/tfl_ops.td

        PredOpTrait<"x and y must have same element type",
          TFL_TCresVTEtIsSameAsOp<0, 0>>,
        Pure,
        QuantizableResult,
        SameOperandsAndResultShape]> {
      let summary = "Relu1 operator";
    
      let description = [{
        Element-wise Relu1 operator
          x -> max(-1, min(1, x))
      }];
    
      let arguments = (ins TFL_TensorOf<[F32, QUI8, QI8]>:$x);
    
      let results = (outs TFL_TensorOf<[F32, QUI8, QI8]>:$y);
    
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Thu Jun 06 19:09:08 UTC 2024
    - 186K bytes
    - Viewed (0)
Back to top