Search Options

Results per page
Sort
Preferred Languages
Advance

Results 1 - 4 of 4 for calculated (0.29 sec)

  1. tensorflow/c/experimental/gradients/nn_grad.cc

                     absl::Span<AbstractTensorHandle*> grad_inputs) override {
        AbstractTensorHandle* upstream_grad = grad_outputs[0];
        AbstractTensorHandle* activations = forward_outputs_[0];
    
        // Calculate Grad
        std::string name = "relu_grad";
        TF_RETURN_IF_ERROR(ReluGrad(ctx, upstream_grad, activations,
                                    &grad_inputs[0], name.c_str()));
        return absl::OkStatus();
      }
    C++
    - Registered: Tue Mar 26 12:39:09 GMT 2024
    - Last Modified: Wed Feb 28 13:53:47 GMT 2024
    - 5.7K bytes
    - Viewed (0)
  2. tensorflow/c/experimental/filesystem/plugins/gcs/ram_file_block_cache.cc

        // The cache is effectively disabled, so we pass the read through to the
        // fetcher without breaking it up into blocks.
        return block_fetcher_(filename, offset, n, buffer, status);
      }
      // Calculate the block-aligned start and end of the read.
      size_t start = block_size_ * (offset / block_size_);
      size_t finish = block_size_ * ((offset + n) / block_size_);
      if (finish < offset + n) {
        finish += block_size_;
    C++
    - Registered: Tue Apr 23 12:39:09 GMT 2024
    - Last Modified: Thu Jul 16 01:39:09 GMT 2020
    - 11.1K bytes
    - Viewed (0)
  3. tensorflow/c/eager/gradient_checker.cc

        TF_RETURN_IF_ERROR(
            ops::Sub(ctx, fPlus.get(), fMinus.get(), f_outputs, "sub_top"));
        AbstractTensorHandlePtr fDiff(f_outputs[0]);
    
        // Calculate using the difference quotient definition:
        // (f(theta + eps) - f(theta - eps)) / (2 * eps).
        TF_RETURN_IF_ERROR(
            ops::Div(ctx, fDiff.get(), two_eps.get(), f_outputs, "diff_quotient"));
    C++
    - Registered: Tue Apr 30 12:39:09 GMT 2024
    - Last Modified: Thu Feb 15 09:49:45 GMT 2024
    - 7.3K bytes
    - Viewed (0)
  4. tensorflow/c/experimental/gradients/math_grad.cc

        AbstractTensorHandlePtr Ones_X(temp_output);
    
        name = "Add_Log1p_Grad_X";
        // Calculate 1 + Conj(X)
        TF_RETURN_IF_ERROR(
            AddV2(ctx, Ones_X.get(), Conj_X.get(), &temp_output, name.c_str()));
    
        AbstractTensorHandlePtr Conj_XP1(temp_output);
    
        name = "Div_Log1p_Grad_X";
        // Calculate U / (1 + Conj(X))
        TF_RETURN_IF_ERROR(
    C++
    - Registered: Tue Mar 26 12:39:09 GMT 2024
    - Last Modified: Wed Feb 28 13:53:47 GMT 2024
    - 15.2K bytes
    - Viewed (0)
Back to top