Search Options

Results per page
Sort
Preferred Languages
Advance

Results 1 - 4 of 4 for ReduceSum (0.1 sec)

  1. tensorflow/cc/gradients/math_grad.cc

      auto reduce =
          internal::BroadcastGradientArgs(scope, x_batch_shape, y_batch_shape);
      (*grad_outputs)[0] =
          Reshape(scope, ReduceSum(scope, (*grad_outputs)[0], reduce.r0), sx);
      (*grad_outputs)[1] =
          Reshape(scope, ReduceSum(scope, (*grad_outputs)[1], reduce.r1), sy);
      return scope.status();
    }
    REGISTER_GRADIENT_OP("BatchMatMulV2", BatchMatMulV2Grad);
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Fri Aug 25 18:20:20 UTC 2023
    - 50.7K bytes
    - Viewed (0)
  2. tensorflow/c/experimental/gradients/nn_grad.cc

                     absl::Span<AbstractTensorHandle*> grad_inputs) override {
        /* Given upstream grad U and a BiasAdd: A + bias, the gradients are:
         *
         *    dA = U
         *    dbias = reduceSum(U, dims = channel_dim)
         */
    
        AbstractTensorHandle* upstream_grad = grad_outputs[0];
        DCHECK(upstream_grad);
    
        // Recover data format from forward pass for gradient.
        std::string data_format;
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Tue Apr 09 06:38:45 UTC 2024
    - 5.7K bytes
    - Viewed (0)
  3. tensorflow/cc/gradients/array_grad.cc

      // all the gradients from the shape it fills.
      // We use ReduceSum to implement this, which needs an argument providing
      // the indices of all the dimensions of the incoming gradient.
      // grad(x) = reduce_sum(grad(y), [0..rank(grad(y))])
      auto all_dims = Range(scope, Const(scope, 0), Rank(scope, grad_inputs[0]),
                            Const(scope, 1));
      grad_outputs->push_back(ReduceSum(scope, grad_inputs[0], all_dims));
      return scope.status();
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Tue Oct 10 23:33:32 UTC 2023
    - 31.7K bytes
    - Viewed (0)
  4. tensorflow/cc/gradients/linalg_grad.cc

          scope, Slice1dHelper(scope, x_shape, bx_start, bx_end),
          Slice1dHelper(scope, y_shape, by_start, by_end));
      grad_x = Reshape(
          scope, ReduceSum(scope, grad_x, Add(scope, bx_start, args.r0)), x_shape);
      grad_y = Reshape(
          scope, ReduceSum(scope, grad_y, Add(scope, by_start, args.r1)), y_shape);
      grad_outputs->push_back(grad_x);
      grad_outputs->push_back(grad_y);
      return scope.status();
    }
    
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Mon Mar 07 23:11:54 UTC 2022
    - 20.4K bytes
    - Viewed (0)
Back to top