Search Options

Results per page
Sort
Preferred Languages
Advance

Results 1 - 5 of 5 for OnesLike (0.21 sec)

  1. tensorflow/c/experimental/gradients/math_grad.cc

        TF_RETURN_IF_ERROR(SafeConj(ctx, X, &temp_output, name.c_str()));
    
        AbstractTensorHandlePtr Conj_X(temp_output);
    
        // Creates Ones
        name = "OnesLike_Log1p_Grad_X";
        TF_RETURN_IF_ERROR(OnesLike(ctx, Conj_X.get(), &temp_output, name.c_str()));
    
        AbstractTensorHandlePtr Ones_X(temp_output);
    
        name = "Add_Log1p_Grad_X";
        // Calculate 1 + Conj(X)
        TF_RETURN_IF_ERROR(
    C++
    - Registered: Tue Mar 26 12:39:09 GMT 2024
    - Last Modified: Wed Feb 28 13:53:47 GMT 2024
    - 15.2K bytes
    - Viewed (0)
  2. tensorflow/c/eager/gradients.cc

      AbstractOperationPtr op(ctx_->CreateOperation());
      TF_RETURN_IF_ERROR(op->Reset("OnesLike", /*raw_device_name=*/nullptr));
      if (isa<tracing::TracingOperation>(op.get())) {
        TF_RETURN_IF_ERROR(dyn_cast<tracing::TracingOperation>(op.get())->SetOpName(
            absl::StrCat("OnesLike", ToId(t.GetHandle())).c_str()));
      }
      TF_RETURN_IF_ERROR(op->AddInput(t.GetHandle()));
      int num_outputs = 1;
    C++
    - Registered: Tue Apr 23 12:39:09 GMT 2024
    - Last Modified: Thu Feb 15 09:49:45 GMT 2024
    - 19.3K bytes
    - Viewed (0)
  3. tensorflow/c/c_api_test.cc

        if (grad_inputs_provided) {
          const float const3_val[] = {1.0, 1.0, 1.0, 1.0};
          const3 = FloatConst2x2(expected_graph_, s_, const3_val, "GradInputs");
        } else {
          const3 = OnesLike(expected_graph_, s_, matmul, "gradients/OnesLike");
        }
    
        TF_Operation* matmul1 = MatMul(expected_graph_, s_, const3, const1,
                                       "gradients/MatMul", false, true);
    C++
    - Registered: Tue Apr 23 12:39:09 GMT 2024
    - Last Modified: Mon Apr 15 03:35:10 GMT 2024
    - 96.9K bytes
    - Viewed (3)
  4. tensorflow/c/c_api.h

    // `dx` are used as initial gradients (which represent the symbolic partial
    // derivatives of some loss function `L` w.r.t. `y`).
    // `dx` must be nullptr or have size `ny`.
    // If `dx` is nullptr, the implementation will use dx of `OnesLike` for all
    // shapes in `y`.
    // The partial derivatives are returned in `dy`. `dy` should be allocated to
    // size `nx`.
    //
    // Gradient nodes are automatically named under the "gradients/" prefix. To
    C
    - Registered: Tue Apr 23 12:39:09 GMT 2024
    - Last Modified: Thu Oct 26 21:08:15 GMT 2023
    - 82.3K bytes
    - Viewed (3)
  5. RELEASE.md

    *   Correctly handle CuDNN RNN weight loaded when nest in `TimeDistributed`.
    *   Adding per-element weight support for `WALSComputePartialLhsAndRhsOp`.
    *   ZerosLike and OnesLike ops treated as constants by Graph Transform Tool.
    *   Gamma distribution and the derived distributions (Beta, Dirichlet, Student's
        t, inverse Gamma) now fully reparameterized.
    Plain Text
    - Registered: Tue Apr 23 12:39:09 GMT 2024
    - Last Modified: Wed Apr 03 20:27:38 GMT 2024
    - 727.4K bytes
    - Viewed (8)
Back to top