Search Options

Results per page
Sort
Preferred Languages
Advance

Results 1 - 6 of 6 for LeakyReluGrad (0.37 sec)

  1. tensorflow/cc/gradients/nn_grad.cc

      internal::LeakyReluGrad::Attrs attrs;
      auto dx = internal::LeakyReluGrad(scope, grad_inputs[0], op.input(1),
                                        attrs.Alpha(alpha));
      grad_outputs->push_back(dx);
      grad_outputs->push_back(NoGradient());
      return scope.status();
    }
    REGISTER_GRADIENT_OP("LeakyReluGrad", LeakyReluGradGradHelper);
    
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Fri May 27 23:34:33 UTC 2022
    - 24.5K bytes
    - Viewed (0)
  2. tensorflow/cc/gradients/nn_grad_test.cc

      Tensor x_init_value = test::AsTensor<float>(
          {-0.9f, -0.7f, -0.5f, -0.3f, -0.1f, 6.1f, 6.3f, 6.5f, 6.7f, 6.9f},
          {5, 2});
      RunTest(x, x_init_value, y, shape);
    }
    
    TEST_F(NNGradTest, LeakyReluGrad) {
      TensorShape shape({5, 2});
      auto x = Placeholder(scope_, DT_FLOAT, Placeholder::Shape(shape));
      auto y = ops::internal::LeakyRelu(scope_, x);
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Tue Mar 22 20:45:22 UTC 2022
    - 15K bytes
    - Viewed (0)
  3. tensorflow/compiler/jit/mark_for_compilation_pass.cc

    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Wed Feb 21 12:19:41 UTC 2024
    - 85.3K bytes
    - Viewed (0)
  4. tensorflow/compiler/mlir/tf2xla/transforms/legalize_tf.cc

                                              leakyActivationVal);
        return success();
      }
    };
    
    /// Converts a TF::LeakyReluGradOp to HLO.
    /// LeakyReluGrad(gradient, inputs) = gradient if input > 0
    /// else alpha * gradient.
    class ConvertLeakyReluGradOp : public OpRewritePattern<TF::LeakyReluGradOp> {
     public:
      using OpRewritePattern::OpRewritePattern;
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Tue Jun 11 20:00:43 UTC 2024
    - 291.8K bytes
    - Viewed (0)
  5. tensorflow/compiler/mlir/tf2xla/tests/legalize-tf.mlir

        // CHECK-NEXT: %[[RES:.*]] = mhlo.select %[[CMP]], %[[GRADIENT]], %[[LEAKYGRAD]] : tensor<1x4x4xi1>, tensor<1x4x4xf32>
        // CHECK-NEXT: return %[[RES]] : tensor<1x4x4xf32>
        %0 = "tf.LeakyReluGrad"(%arg0, %arg1) {alpha = 2.000000e-01 : f32, device = ""} : (tensor<1x4x4xf32>, tensor<1x4x4xf32>) -> tensor<1x4x4xf32>
        func.return %0 : tensor<1x4x4xf32>
    }
    
    // -----
    
    // CHECK-LABEL: func @softsign
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Mon May 06 18:46:23 UTC 2024
    - 335.5K bytes
    - Viewed (0)
  6. tensorflow/compiler/mlir/tensorflow/ir/tf_generated_ops.td

        static bool isCompatibleReturnTypes(TypeRange inferred, TypeRange actual) {
          return ArraysAreCastCompatible(inferred, actual);
        }
      }];
    }
    
    def TF_LeakyReluGradOp : TF_Op<"LeakyReluGrad", [Pure, TF_SameOperandsAndResultTypeResolveRef]> {
      let summary = [{
    Computes rectified linear gradients for a LeakyRelu operation.
      }];
    
      let arguments = (ins
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Tue Jun 11 23:24:08 UTC 2024
    - 793K bytes
    - Viewed (0)
Back to top