- Sort Score
- Result 10 results
- Languages All
Results 1 - 10 of 83 for debug_info (0.76 sec)
-
tensorflow/compiler/mlir/tensorflow/tests/tf_saved_model/debug_info.py
# See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== # RUN: %p/debug_info | FileCheck %s # pylint: disable=missing-docstring,line-too-long import tensorflow.compat.v2 as tf from tensorflow.compiler.mlir.tensorflow.tests.tf_saved_model import common class TestModule(tf.Module):
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Sat Dec 03 00:08:31 UTC 2022 - 1.5K bytes - Viewed (0) -
tensorflow/c/eager/c_api_debug_test.cc
TFE_TensorDebugInfo* debug_info = TFE_TensorHandleTensorDebugInfo(h, status); CHECK_EQ(TF_OK, TF_GetCode(status)) << TF_Message(status); ASSERT_EQ(2, TFE_TensorDebugInfoOnDeviceNumDims(debug_info)); // Shape is the same for CPU tensors. EXPECT_EQ(3, TFE_TensorDebugInfoOnDeviceDim(debug_info, 0)); EXPECT_EQ(2, TFE_TensorDebugInfoOnDeviceDim(debug_info, 1)); TFE_DeleteTensorDebugInfo(debug_info);
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Mon Apr 06 22:10:09 UTC 2020 - 2.3K bytes - Viewed (0) -
tensorflow/c/eager/c_api_debug.cc
TF_CAPI_EXPORT extern void TFE_DeleteTensorDebugInfo( TFE_TensorDebugInfo* debug_info) { delete debug_info; } TF_CAPI_EXPORT extern int TFE_TensorDebugInfoOnDeviceNumDims( TFE_TensorDebugInfo* debug_info) { return debug_info->dev_dims.size(); } TF_CAPI_EXPORT extern int64_t TFE_TensorDebugInfoOnDeviceDim( TFE_TensorDebugInfo* debug_info, int dim_index) { return debug_info->dev_dims[dim_index]; }
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Wed Aug 11 01:20:50 UTC 2021 - 2.5K bytes - Viewed (0) -
tensorflow/compiler/mlir/tensorflow/translate/import_model.h
const GraphDebugInfo& debug_info) : meta_graph_def_(meta_graph_def), debug_info_(debug_info) { DCHECK(meta_graph_def); } virtual ~SavedModelMLIRImportInput(); // The original MetaGraphDef of the savedmodel. const MetaGraphDef& meta_graph_def() const { return *meta_graph_def_; } const GraphDebugInfo& debug_info() const { return debug_info_; }
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Wed May 01 11:17:36 UTC 2024 - 6.8K bytes - Viewed (0) -
tensorflow/cc/saved_model/reader.cc
internal::FileExists(Env::Default(), debug_info_pb_path)); if (debug_info_pb_exists) { GraphDebugInfo debug_info; TF_RETURN_IF_ERROR( ReadBinaryProto(Env::Default(), debug_info_pb_path, &debug_info)); *debug_info_proto = std::make_unique<GraphDebugInfo>(std::move(debug_info)); } return absl::OkStatus(); }
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Tue Mar 05 00:19:29 UTC 2024 - 6.1K bytes - Viewed (0) -
tensorflow/compiler/mlir/tensorflow/tests/tf_saved_model/BUILD
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Tue Oct 17 20:57:18 UTC 2023 - 6.2K bytes - Viewed (0) -
tensorflow/compiler/aot/tfcompile.bzl
mlir_flags = ["--mlir_components=" + mlir_components] srcs = [tfcompile_graph, config] debug_info_flags = [] if debug_info: srcs.append(debug_info) debug_info_flags = ["--debug_info=$(location " + debug_info + ")"] tfcompile_gen = "gen_" + name _tfcompile_model_library( name = tfcompile_gen, model_name = name, srcs = srcs,
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Thu May 02 19:18:08 UTC 2024 - 21.8K bytes - Viewed (0) -
tensorflow/compiler/mlir/python/mlir.cc
const std::string& pass_pipeline, bool show_debug_info, TF_Status* status) { GraphDebugInfo debug_info; GraphImportConfig specs; return ImportGraphDefImpl(proto, pass_pipeline, show_debug_info, debug_info, specs, status); } std::string ImportGraphDef(const std::string& proto,
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Fri May 03 18:16:49 UTC 2024 - 19.3K bytes - Viewed (0) -
tensorflow/compiler/mlir/tensorflow/translate/tf_mlir_translate.cc
if (!port::kLittleEndian) TF_RETURN_IF_ERROR(ByteSwapTensorContentInGraphDef(&graphdef)); GraphDebugInfo debug_info; if (!import_options.debug_info_file.empty()) { TF_RETURN_IF_ERROR( LoadProtoFromFile(import_options.debug_info_file, &debug_info)); } GraphImportConfig specs; specs.prune_unused_nodes = import_options.prune_unused_nodes;
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Tue May 07 11:51:44 UTC 2024 - 14.1K bytes - Viewed (0) -
tensorflow/compiler/mlir/lite/python/graphdef_to_tfl_flatbuffer.cc
#include "tsl/platform/statusor.h" namespace tensorflow { absl::Status ConvertGraphDefToTFLiteFlatBuffer( const toco::ModelFlags& model_flags, toco::TocoFlags& toco_flags, const GraphDebugInfo& debug_info, const GraphDef& input, std::string* result) { using ::tflite::optimize::ReducedPrecisionSupport; mlir::MLIRContext context; GraphImportConfig specs; mlir::quant::QuantizationSpecs quant_specs;
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Mon Mar 11 19:29:56 UTC 2024 - 5.1K bytes - Viewed (0)