- Sort Score
- Result 10 results
- Languages All
Results 1 - 10 of 14 for save_model_path (0.27 sec)
-
tensorflow/compiler/mlir/tensorflow/tests/tf_saved_model/common.py
if FLAGS.save_model_path: save_model_path = FLAGS.save_model_path else: save_model_path = tempfile.mkdtemp(suffix='.saved_model') save_options = tf.saved_model.SaveOptions(save_debug_info=show_debug_info) tf.saved_model.save( create_module_fn(), save_model_path, options=save_options ) logging.info('Saved model to: %s', save_model_path)
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Thu Mar 02 23:49:27 UTC 2023 - 4K bytes - Viewed (0) -
tensorflow/compiler/mlir/tensorflow/tests/tf_saved_model/common_v1.py
if FLAGS.save_model_path: save_model_path = FLAGS.save_model_path else: save_model_path = tempfile.mktemp(suffix='.saved_model') signature_def_map, init_op, assets_collection = create_signature() sess = tf.Session() sess.run(tf.initializers.global_variables()) builder = tf.saved_model.builder.SavedModelBuilder(save_model_path) builder.add_meta_graph_and_variables(
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Mon Mar 20 13:19:26 UTC 2023 - 5.2K bytes - Viewed (0) -
tensorflow/compiler/mlir/tensorflow/tests/tf_saved_model/README.md
``` bazel run :foo.py.test ``` Run just the Python file and look at the output: ``` bazel run :foo ``` Generate saved model to inspect proto: ``` bazel run :foo -- --save_model_path=/tmp/my.saved_model # Inspect /tmp/my.saved_model/saved_model.pb ``` ## Rationale for Python-based tests For a SavedModel importer, the natural place to start is to feed in the
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Wed Oct 02 03:37:19 UTC 2019 - 1.5K bytes - Viewed (0) -
tensorflow/compiler/mlir/python/mlir.h
// // Args: // saved_model_path: File path from which to load the SavedModel. // exported_names_str: Comma-separated list of names to export. // Empty means "export all". // // Returns: // A string of textual MLIR representing the raw imported SavedModel. std::string ExperimentalConvertSavedModelToMlir( const std::string &saved_model_path, const std::string &exported_names_str,
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Fri Apr 14 23:44:01 UTC 2023 - 5.6K bytes - Viewed (0) -
tensorflow/compiler/mlir/quantization/stablehlo/cc/saved_model_import.h
absl::string_view saved_model_path, const std::unordered_set<std::string>& tags, const std::vector<std::string>& signature_keys, MLIRContext& ctx ABSL_ATTRIBUTE_LIFETIME_BOUND); // Gets the function aliases from the SavedModel. absl::StatusOr<absl::flat_hash_map<FunctionName, FunctionAlias>> GetFunctionAliases(absl::string_view saved_model_path,
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Wed Apr 24 12:49:45 UTC 2024 - 4.2K bytes - Viewed (0) -
tensorflow/compiler/mlir/quantization/tensorflow/python/quantize_model.h
absl::StatusOr<ExportedModel> QuantizeQatModel( absl::string_view saved_model_path, const std::vector<std::string>& signature_keys, const std::unordered_set<std::string>& tags, const QuantizationOptions& quantization_options); // Applies post-training dynamic-range quantization to the model. absl::StatusOr<ExportedModel> QuantizeDynamicRangePtq( absl::string_view saved_model_path, const std::vector<std::string>& signature_keys,
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Thu Mar 28 15:31:08 UTC 2024 - 3.3K bytes - Viewed (0) -
tensorflow/compiler/mlir/quantization/stablehlo/cc/saved_model_import.cc
} absl::StatusOr<absl::flat_hash_map<FunctionName, FunctionAlias>> GetFunctionAliases(absl::string_view saved_model_path, const std::unordered_set<std::string>& tags) { tensorflow::MetaGraphDef meta_graph; TF_RETURN_IF_ERROR(tensorflow::ReadMetaGraphDefFromSavedModel( saved_model_path, tags, &meta_graph)); absl::flat_hash_map<FunctionName, FunctionAlias> function_aliases(
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Wed Apr 24 12:49:45 UTC 2024 - 6.4K bytes - Viewed (0) -
tensorflow/cc/saved_model/experimental/public/saved_model_api.h
}; inline std::unique_ptr<SavedModelAPI> SavedModelAPI::Load( const std::string& saved_model_path, const Runtime& runtime, Status* status, const std::unordered_set<std::string>* tags) { TF_SavedModel* saved_model = nullptr; if (tags == nullptr) { saved_model = TF_LoadSavedModel(saved_model_path.c_str(), runtime.GetTFEContext(), status->GetTFStatus()); } else {
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Wed Nov 04 00:45:47 UTC 2020 - 6.4K bytes - Viewed (0) -
tensorflow/compiler/mlir/lite/tests/debuginfo/saved_model_error.py
# saved the model test_model = TestModule() saved_model_path = '/tmp/test.saved_model' save_options = tf.saved_model.SaveOptions(save_debug_info=True) tf.saved_model.save(test_model, saved_model_path, options=save_options) # load the model and convert converter = tf.lite.TFLiteConverter.from_saved_model(saved_model_path) converter.convert() # pylint: disable=line-too-long
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Tue Sep 28 21:37:05 UTC 2021 - 2.8K bytes - Viewed (0) -
tensorflow/compiler/mlir/quantization/tensorflow/python/pywrap_function_lib.cc
exported_model, src_saved_model_path, tags, signature_def_map); } std::optional<bool> RunCalibration( const absl::string_view saved_model_path, const std::vector<std::string>& signature_keys, const std::unordered_set<std::string>& tags, const bool force_graph_mode_calibration,
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Tue Apr 09 06:33:29 UTC 2024 - 5K bytes - Viewed (0)