Search Options

Results per page
Sort
Preferred Languages
Advance

Results 1 - 4 of 4 for origin (0.29 sec)

  1. tensorflow/c/eager/c_api_experimental.h

      // Method to execute an operation.
      //
      // Arguments provide enough information to reconstruct the original `TFE_Op`,
      // or construct a transformed version, by inspecting the passed `op`.
      //
      // TFE_OpGetDevice(op) records the original placement of the operation. It may
      // be an empty string if no device was explicitly requested, but will
    C
    - Registered: Tue Apr 30 12:39:09 GMT 2024
    - Last Modified: Wed Feb 21 22:37:46 GMT 2024
    - 39.5K bytes
    - Viewed (0)
  2. tensorflow/c/eager/parallel_device/parallel_device_lib.h

      // its corresponding inputs from the input ParallelTensors. Wraps the
      // resulting per-device and per-output TFE_TensorHandles into one
      // ParallelTensor per output of the original operation.
      //
      // Attributes are forwarded to executed operations unmodified.
      //
      // The returned optional has a value if and only if `status` evaluates to
    C
    - Registered: Tue Apr 30 12:39:09 GMT 2024
    - Last Modified: Tue Apr 25 15:21:13 GMT 2023
    - 12.9K bytes
    - Viewed (0)
  3. tensorflow/c/c_api_experimental.h

                                                       unsigned char enable);
    
    // Set XLA's internal BuildXlaOpsPassFlags.tf_xla_enable_lazy_compilation to the
    // value of 'enabled'. Also returns the original value of that flag.
    //
    // Use in tests to allow XLA to fallback to TF classic. This has global effect.
    TF_CAPI_EXPORT unsigned char TF_SetXlaEnableLazyCompilation(
        unsigned char enable);
    C
    - Registered: Tue Apr 30 12:39:09 GMT 2024
    - Last Modified: Thu Apr 27 21:07:00 GMT 2023
    - 15.1K bytes
    - Viewed (0)
  4. tensorflow/c/eager/tape.h

            // operations which supposedly both created the same Tensor. It comes up
            // in recompute_grad, where the gradients have the same value. However,
            // only the original gradient is connected to everything else, so we
            // should still use that.
            vspace_.DeleteGradient(forward_grads[i]);
          } else {
    C
    - Registered: Tue Apr 30 12:39:09 GMT 2024
    - Last Modified: Tue Apr 02 12:40:29 GMT 2024
    - 47.2K bytes
    - Viewed (1)
Back to top