Search Options

Results per page
Sort
Preferred Languages
Advance

Results 1 - 5 of 5 for da (0.15 sec)

  1. tensorflow/c/experimental/gradients/math_grad_test.cc

      ASSERT_EQ(errors::OK, status_.code()) << status_.message();
    
      bool transpose_a_vals[] = {false, false, true, true};
      bool transpose_b_vals[] = {false, true, false, true};
      float dA_vals[4][9] = {{24, 15, 6, 24, 15, 6, 24, 15, 6},
                             {18, 15, 12, 18, 15, 12, 18, 15, 12},
                             {24, 24, 24, 15, 15, 15, 6, 6, 6},
    C++
    - Registered: Tue Mar 26 12:39:09 GMT 2024
    - Last Modified: Thu Apr 13 17:32:14 GMT 2023
    - 16.3K bytes
    - Viewed (0)
  2. tensorflow/c/eager/gradient_checker_test.cc

        ASSERT_EQ(errors::OK, s.code()) << s.message();
        B.reset(B_raw);
      }
    
      float expected_dA[4] = {-.5f, 2.0f, -.5f, 2.0f};
      ASSERT_NO_FATAL_FAILURE(CompareNumericalAndManualGradients(
          MatMulModel, ctx_.get(), {A.get(), B.get()}, 0, expected_dA, 4,
          UseFunction()));
    }
    
    TEST_P(GradientCheckerTest, TestMul) {
      AbstractTensorHandlePtr x;
      {
    C++
    - Registered: Tue Apr 30 12:39:09 GMT 2024
    - Last Modified: Fri Apr 14 10:03:59 GMT 2023
    - 6.5K bytes
    - Viewed (0)
  3. tensorflow/c/experimental/gradients/nn_grad.cc

                     absl::Span<AbstractTensorHandle*> grad_inputs) override {
        /* Given upstream grad U and a BiasAdd: A + bias, the gradients are:
         *
         *    dA = U
         *    dbias = reduceSum(U, dims = channel_dim)
         */
    
        AbstractTensorHandle* upstream_grad = grad_outputs[0];
        DCHECK(upstream_grad);
    
        // Recover data format from forward pass for gradient.
    C++
    - Registered: Tue Mar 26 12:39:09 GMT 2024
    - Last Modified: Wed Feb 28 13:53:47 GMT 2024
    - 5.7K bytes
    - Viewed (0)
  4. tensorflow/c/experimental/gradients/math_grad.cc

                     absl::Span<AbstractTensorHandle*> grad_inputs) override {
        /* Given upstream grad U and a matmul op A*B, the gradients are:
         *
         *    dA = U * B.T
         *    dB = A.T * U
         *
         *    where A.T means `transpose(A)`
         */
        AbstractTensorHandle* upstream_grad = grad_outputs[0];
    
        // Get transpose attrs
        bool t_a;
    C++
    - Registered: Tue Mar 26 12:39:09 GMT 2024
    - Last Modified: Wed Feb 28 13:53:47 GMT 2024
    - 15.2K bytes
    - Viewed (0)
  5. RELEASE.md

    Rawat, Astropeak, Ayush Agrawal, Bairen Yi, Bas Aarts, Bastian Eichenberger, Ben
    Barsdell, Benjamin Peterson, bhack, Bharat Raghunathan, Bhavani Subramanian,
    Bryan Cutler, candy.dc, Cao Zongyan, Captain-Pool, Casper Da Costa-Luis, Chen
    Guoyin, Cheng Chang, chengchingwen, Chong Yan, Choong Yin Thong, Christopher
    Yeh, Clayne Robison, Coady, Patrick, Dan Ganea, David Norman, Denis Khalikov,
    Plain Text
    - Registered: Tue Apr 30 12:39:09 GMT 2024
    - Last Modified: Mon Apr 29 19:17:57 GMT 2024
    - 727.7K bytes
    - Viewed (8)
Back to top