- Sort Score
- Result 10 results
- Languages All
Results 1 - 10 of 80 for Move (0.02 sec)
-
tensorflow/c/eager/parallel_device/parallel_device_lib_test.cc
std::vector<TensorHandlePtr> vector_handles; vector_handles.reserve(2); vector_handles.push_back(std::move(two_vector)); vector_handles.push_back(std::move(three_vector)); std::unique_ptr<ParallelTensor> unknown_length_vector = ParallelTensor::FromTensorHandles( parallel_device, std::move(vector_handles), status.get()); ASSERT_TRUE(TF_GetCode(status.get()) == TF_OK) << TF_Message(status.get());
Registered: Tue Nov 05 12:39:12 UTC 2024 - Last Modified: Mon Oct 21 04:14:14 UTC 2024 - 15.6K bytes - Viewed (0) -
tensorflow/c/eager/parallel_device/parallel_device_lib.cc
for (int j = 0; j < underlying_devices_.size(); ++j) { components.push_back(std::move(per_device_output_tensors[j][i])); } if (expected_output_shapes[i].IsFullyDefined()) { per_device_outputs.push_back(ParallelTensor::FromTensorHandles( *this, std::move(components), absl::Span<const int64_t>(expected_output_shapes[i].dim_sizes()), status)); } else {
Registered: Tue Nov 05 12:39:12 UTC 2024 - Last Modified: Mon Oct 21 04:14:14 UTC 2024 - 25.9K bytes - Viewed (0) -
tensorflow/c/eager/parallel_device/parallel_device.cc
std::move(maybe_parallel_results.value())); std::vector<MaybeParallelTensorOwned> result_content; result_content.reserve(parallel_results.size()); for (std::unique_ptr<ParallelTensor>& parallel_result : parallel_results) { result_content.push_back( MaybeParallelTensorOwned(std::move(parallel_result))); } result.emplace(std::move(result_content)); return result; }
Registered: Tue Nov 05 12:39:12 UTC 2024 - Last Modified: Mon Oct 21 04:14:14 UTC 2024 - 18.3K bytes - Viewed (0) -
compat/maven-embedder/src/main/java/org/fusesource/jansi/Ansi.java
* * @param x the index (1-based) of the column to move to * @return this Ansi instance */ public Ansi cursorToColumn(final int x) { return appendEscapeSequence('G', Math.max(1, x)); } /** * Moves the cursor up. If the parameter y is negative it moves the cursor down. * * @param y the number of lines to move up * @return this Ansi instance */
Registered: Sun Nov 03 03:35:11 UTC 2024 - Last Modified: Fri Oct 25 12:31:46 UTC 2024 - 23.1K bytes - Viewed (0) -
tensorflow/c/checkpoint_reader.cc
Registered: Tue Nov 05 12:39:12 UTC 2024 - Last Modified: Sat Oct 12 16:27:48 UTC 2024 - 5.6K bytes - Viewed (0) -
ci/official/utilities/rename_and_verify_wheels.sh
set -euxo pipefail cd "$TFCI_OUTPUT_DIR" # Move extra wheel files somewhere out of the way. This script # expects just one wheel file to exist. if [[ "$(ls *.whl | wc -l | tr -d ' ')" != "1" ]]; then echo "More than one wheel file is present: moving the oldest to" echo "$TFCI_OUTPUT_DIR/extra_wheels." # List all .whl files by their modification time (ls -t) and move anything
Registered: Tue Nov 05 12:39:12 UTC 2024 - Last Modified: Wed Oct 02 21:18:17 UTC 2024 - 4.3K bytes - Viewed (0) -
tensorflow/c/c_api_function.cc
stack_traces[n->name()] = n->GetStackTrace(); } TF_Function* tf_function = new TF_Function(); tf_function->record = new tensorflow::FunctionRecord( std::move(fdef), std::move(stack_traces), false); return tf_function; } TF_Function* TF_GraphToFunction(const TF_Graph* fn_body, const char* fn_name, unsigned char append_hash_to_fn_name,
Registered: Tue Nov 05 12:39:12 UTC 2024 - Last Modified: Sat Oct 12 16:27:48 UTC 2024 - 13.7K bytes - Viewed (0) -
tensorflow/c/eager/parallel_device/parallel_device_lib.h
: device_(device), tensors_(std::move(tensors)), shape_(std::vector<int64_t>(shape.begin(), shape.end())), dtype_(dtype) {} ParallelTensor(const ParallelDevice& device, std::vector<TensorHandlePtr> tensors, const TF_DataType dtype) : device_(device), tensors_(std::move(tensors)), shape_(absl::nullopt), dtype_(dtype) {}
Registered: Tue Nov 05 12:39:12 UTC 2024 - Last Modified: Mon Oct 21 04:14:14 UTC 2024 - 13.1K bytes - Viewed (0) -
tensorflow/c/eager/graph_function.cc
namespace tensorflow { namespace tracing { namespace graph { GraphFunction::GraphFunction(FunctionDef fdef) : AbstractFunction(kGraph), func_record_(new FunctionRecord(std::move(fdef), {}, true)) {} GraphFunction::~GraphFunction() {} absl::Status GraphFunction::GetFunctionDef(const FunctionDef **fdef) { *fdef = &(func_record_->fdef()); return absl::OkStatus(); } } // namespace graph
Registered: Tue Nov 05 12:39:12 UTC 2024 - Last Modified: Sat Oct 12 05:11:17 UTC 2024 - 1.3K bytes - Viewed (0) -
ci/official/pycpp.sh
PROFILE_JSON_PATH="$PROFILE_JSON_PATH/profile.json.gz" else PROFILE_JSON_PATH="$TFCI_OUTPUT_DIR/profile.json.gz" fi # TODO(b/361369076) Remove the following block after TF NumPy 1 is dropped # Move hermetic requirement lock files for NumPy 1 to the root if [[ "$TFCI_WHL_NUMPY_VERSION" == 1 ]]; then cp ./ci/official/requirements_updater/numpy1_requirements/*.txt . fi if [[ $TFCI_PYCPP_SWAP_TO_BUILD_ENABLE == 1 ]]; then
Registered: Tue Nov 05 12:39:12 UTC 2024 - Last Modified: Wed Oct 23 18:48:35 UTC 2024 - 1.9K bytes - Viewed (0)