Search Options

Results per page
Sort
Preferred Languages
Advance

Results 1 - 2 of 2 for kRelu1 (0.07 sec)

  1. tensorflow/compiler/mlir/lite/transforms/optimize.cc

    // The actual Optimize Pass.
    namespace {
    #define GEN_PASS_DEF_OPTIMIZEPASS
    #include "tensorflow/compiler/mlir/lite/transforms/passes.h.inc"
    
    constexpr char kRelu[] = "RELU";
    constexpr char kRelu6[] = "RELU6";
    constexpr char kRelu1[] = "RELU_N1_TO_1";
    
    ElementsAttr FlattenTo1D(Attribute a) {
      auto elements = mlir::cast<DenseElementsAttr>(a);
      const std::array<int64_t, 1> flattened_shape = {elements.getNumElements()};
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Tue Apr 30 00:40:15 UTC 2024
    - 102.3K bytes
    - Viewed (0)
  2. tensorflow/compiler/mlir/lite/ir/tfl_ops.td

        PredOpTrait<"x and y must have same element type",
          TFL_TCresVTEtIsSameAsOp<0, 0>>,
        Pure,
        QuantizableResult,
        SameOperandsAndResultShape]> {
      let summary = "Relu1 operator";
    
      let description = [{
        Element-wise Relu1 operator
          x -> max(-1, min(1, x))
      }];
    
      let arguments = (ins TFL_TensorOf<[F32, QUI8, QI8]>:$x);
    
      let results = (outs TFL_TensorOf<[F32, QUI8, QI8]>:$y);
    
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Thu Jun 06 19:09:08 UTC 2024
    - 186K bytes
    - Viewed (0)
Back to top