- Sort Score
- Result 10 results
- Languages All
Results 1 - 9 of 9 for SavedModelBundle (0.33 sec)
-
tensorflow/compiler/mlir/tensorflow/translate/tf_mlir_translate.cc
MLIRImportOptions options, std::unique_ptr<tensorflow::SavedModelBundle>* saved_model_bundle) { // Create local bundle if no one is provided to use. std::unique_ptr<tensorflow::SavedModelBundle> bundle; if (saved_model_bundle == nullptr) { bundle = std::make_unique<tensorflow::SavedModelBundle>(); } else if (*saved_model_bundle == nullptr) {
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Tue May 07 11:51:44 UTC 2024 - 14.1K bytes - Viewed (0) -
tensorflow/cc/saved_model/loader.cc
const RunOptions& run_options, const string& export_dir, const std::unordered_set<string>& tags, SavedModelBundle* const bundle) { return LoadSavedModelGeneric<SavedModelBundle>(session_options, run_options, export_dir, tags, bundle); } Status RestoreSession(const RunOptions& run_options,
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Tue Apr 02 04:36:00 UTC 2024 - 23K bytes - Viewed (0) -
tensorflow/compiler/mlir/python/mlir.cc
bool include_variables_in_initializers, bool upgrade_legacy, bool show_debug_info, TF_Status* status) { // Load the saved model into a SavedModelBundle. std::unordered_set<string> tag_set = absl::StrSplit(tags, ',', absl::SkipEmpty()); tensorflow::SavedModelBundle bundle; auto load_status = tensorflow::LoadSavedModel({}, {}, saved_model_path, tag_set, &bundle); if (!load_status.ok()) {
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Fri May 03 18:16:49 UTC 2024 - 19.3K bytes - Viewed (0) -
tensorflow/compiler/mlir/lite/stablehlo/odml_to_stablehlo.cc
namespace odml { absl::StatusOr<OwningOpRef<mlir::ModuleOp>> ImportSavedModelOrMLIR( const std::string& input_path, MLIRContext* context, llvm::SourceMgr* source_mgr, std::unique_ptr<tensorflow::SavedModelBundle>* saved_model_bundle) { if (absl::EndsWith(input_path, ".mlir")) { auto file_or_err = llvm::MemoryBuffer::getFileOrSTDIN(input_path.c_str()); if (std::error_code error = file_or_err.getError()) {
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Fri May 03 18:16:49 UTC 2024 - 14.1K bytes - Viewed (0) -
tensorflow/compiler/mlir/lite/tf_to_tfl_flatbuffer.cc
const absl::string_view saved_model_dir, const std::unordered_set<std::string>& saved_model_tags, QuantizationConfig* quantization_config, const PyFunctionLibrary* quantization_py_function_lib, const SavedModelBundle* saved_model_bundle, mlir::PassManager& pass_manager, mlir::StatusScopedDiagnosticHandler& status_handler, ModuleOp& module) { // TODO: b/194747383 - We need to valid that indeed the "main" func is // presented.
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Fri May 03 18:01:23 UTC 2024 - 23.8K bytes - Viewed (0) -
tensorflow/compiler/mlir/quantization/tensorflow/python/quantize_model.cc
absl::flat_hash_map<std::string, std::string> &function_aliases) { // Convert the SavedModelBundle to an MLIR module. MLIRImportOptions import_options; import_options.upgrade_legacy = true; import_options.lift_variables = false; import_options.include_variables_in_initializers = true; auto bundle = std::make_unique<SavedModelBundle>(); // TODO: b/213406917 - Add support for the object graph based saved model.
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Fri May 17 03:36:50 UTC 2024 - 23.8K bytes - Viewed (0) -
tensorflow/compiler/mlir/lite/python/saved_model_to_tfl_flatbuffer.cc
std::vector<std::string> custom_opdefs(toco_flags.custom_opdefs().begin(), toco_flags.custom_opdefs().end()); auto bundle = std::make_unique<tensorflow::SavedModelBundle>(); TF_ASSIGN_OR_RETURN( auto module, ImportSavedModel( model_flags.saved_model_dir(), model_flags.saved_model_version(), tags, absl::MakeSpan(custom_opdefs), exported_names, specs,
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Sun May 12 12:39:37 UTC 2024 - 11K bytes - Viewed (0) -
tensorflow/compiler/mlir/lite/tf_tfl_translate.cc
llvm::errs() << "You must specify `emit-select-tf-ops=true` when passing " "`select-user-tf-ops` flag."; return kTrFailure; } std::unique_ptr<tensorflow::SavedModelBundle> bundle; // TODO(b/147435528): We need to test the e2e behavior once the graph freezing // inside mlir is done. if ((import_saved_model_object_graph || import_saved_model_signature_defs) &&
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Fri May 03 18:01:23 UTC 2024 - 14K bytes - Viewed (0) -
tensorflow/compiler/mlir/lite/python/tf_tfl_flatbuffer_helpers.cc
mlir::OwningOpRef<mlir::ModuleOp> module, const mlir::TFL::PassConfig& pass_config, const std::unordered_set<std::string>& saved_model_tags, std::string* result, SavedModelBundle* saved_model_bundle, const PyFunctionLibrary* quantization_py_function_lib) { if (toco_flags.has_dump_graphviz_dir()) { TF_RETURN_IF_ERROR(DumpOpGraphToFile( module.get(),
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Sun May 12 12:39:37 UTC 2024 - 17.3K bytes - Viewed (0)