Search Options

Results per page
Sort
Preferred Languages
Advance

Results 1 - 4 of 4 for else_function (0.09 sec)

  1. tensorflow/c/eager/gradient_checker.cc

                                absl::Span<AbstractTensorHandle*> outputs,
                                bool use_function) {
      AbstractTensorHandle* model_outputs[1];
    
      // Run the model.
      TF_RETURN_IF_ERROR(
          RunModel(forward, ctx, inputs, model_outputs, use_function));
      AbstractTensorHandlePtr model_out(model_outputs[0]);
    
      TF_Tensor* model_out_tensor;
    Registered: Tue Nov 05 12:39:12 UTC 2024
    - Last Modified: Sat Oct 12 05:11:17 UTC 2024
    - 7.3K bytes
    - Viewed (0)
  2. tensorflow/c/eager/gradient_checker_test.cc

        absl::Span<AbstractTensorHandle* const> inputs, int input_index,
        float* expected_grad, int num_grad, bool use_function,
        double abs_error = 1e-2) {
      absl::Status s;
      AbstractTensorHandlePtr numerical_grad;
      {
        AbstractTensorHandle* numerical_grad_raw;
        s = CalcNumericalGrad(ctx, model, inputs, input_index, use_function,
                              &numerical_grad_raw);
        ASSERT_EQ(errors::OK, s.code()) << s.message();
    Registered: Tue Nov 05 12:39:12 UTC 2024
    - Last Modified: Sat Oct 12 05:11:17 UTC 2024
    - 6.5K bytes
    - Viewed (0)
  3. tensorflow/c/eager/gradient_checker.h

     * parameter specified by `input_index`.
     *
     * I.e. if y = <output of the forward model> and w = inputs[input_index],
     * this will calculate dy/dw numerically.
     *
     * `use_function` indicates whether to use graph mode(true) or eager(false).
     *
     * `numerical_grad` is the pointer to the AbstractTensorHandle* which will
     * hold the numerical gradient data at the end of the function.
     */
    Registered: Tue Nov 05 12:39:12 UTC 2024
    - Last Modified: Sat Oct 12 05:11:17 UTC 2024
    - 1.8K bytes
    - Viewed (0)
  4. tensorflow/c/eager/gradients_test.cc

      std::vector<AbstractTensorHandle*> outputs(1);
      absl::Status s = RunModel(RecordOperationWithNullGradientFunctionModel,
                                ctx.get(), {x.get()}, absl::MakeSpan(outputs),
                                /*use_function=*/!std::get<2>(GetParam()));
      ASSERT_EQ(error::INVALID_ARGUMENT, s.code());
      ASSERT_EQ(
          "Provided null gradient_function for 'Neg'.\nIf the intent is to treat "
    Registered: Tue Nov 05 12:39:12 UTC 2024
    - Last Modified: Sat Oct 12 05:11:17 UTC 2024
    - 7K bytes
    - Viewed (0)
Back to top