Search Options

Results per page
Sort
Preferred Languages
Advance

Results 1 - 4 of 4 for Model (0.14 sec)

  1. tensorflow/c/experimental/gradients/grad_test_helper.cc

    namespace gradients {
    namespace internal {
    
    void CompareNumericalAndAutodiffGradients(
        Model model, Model grad_model, AbstractContext* ctx,
        absl::Span<AbstractTensorHandle* const> inputs, bool use_function,
        double abs_error) {
      auto num_inputs = inputs.size();
      std::vector<AbstractTensorHandle*> outputs(num_inputs);
      auto s = RunModel(grad_model, ctx, inputs, absl::MakeSpan(outputs),
                        /*use_function=*/use_function);
    C++
    - Registered: Tue Mar 26 12:39:09 GMT 2024
    - Last Modified: Wed Feb 28 13:53:47 GMT 2024
    - 5K bytes
    - Viewed (0)
  2. tensorflow/c/eager/gradient_checker.cc

          RunModel(forward, ctx, inputs, model_outputs, use_function));
      AbstractTensorHandlePtr model_out(model_outputs[0]);
    
      TF_Tensor* model_out_tensor;
      TF_RETURN_IF_ERROR(GetValue(model_out.get(), &model_out_tensor));
      int num_dims_out = TF_NumDims(model_out_tensor);
      TF_DeleteTensor(model_out_tensor);
    
      // If the output is a scalar, then return the scalar output
      if (num_dims_out == 0) {
        outputs[0] = model_out.release();
    C++
    - Registered: Tue Apr 30 12:39:09 GMT 2024
    - Last Modified: Thu Feb 15 09:49:45 GMT 2024
    - 7.3K bytes
    - Viewed (0)
  3. tensorflow/c/eager/unified_api_testutil.cc

          TF_RETURN_IF_ERROR(
              CreateParamsForInputs(func_ctx.get(), inputs, &func_inputs));
          std::vector<AbstractTensorHandle*> model_outputs;
          model_outputs.resize(outputs.size());
          TF_RETURN_IF_ERROR(model(func_ctx.get(), absl::MakeSpan(func_inputs),
                                   absl::MakeSpan(model_outputs)));
          for (auto func_input : func_inputs) {
            func_input->Unref();
          }
          AbstractFunction* func = nullptr;
    C++
    - Registered: Tue Apr 30 12:39:09 GMT 2024
    - Last Modified: Tue Feb 27 13:57:45 GMT 2024
    - 5.7K bytes
    - Viewed (0)
  4. tensorflow/c/experimental/gradients/nn_grad_test.cc

      TF_RETURN_IF_ERROR(ops::SparseSoftmaxCrossEntropyWithLogits(
          ctx, inputs[0], inputs[1], &loss, &backprop,
          "SparseSoftmaxCrossEntropyWithLogits"));
      // `gradient_checker` only works with model that returns only 1 tensor.
      // Although, `ops::SparseSoftmaxCrossEntropyWithLogits` returns 2 tensors, the
      // second tensor isn't needed for computing gradient so we could safely drop
      // it.
      outputs[0] = loss;
    C++
    - Registered: Tue Mar 26 12:39:09 GMT 2024
    - Last Modified: Wed Feb 28 13:53:47 GMT 2024
    - 8.3K bytes
    - Viewed (0)
Back to top