Search Options

Results per page
Sort
Preferred Languages
Advance

Results 1 - 10 of 10 for FlatBufferToMlir (0.47 sec)

  1. tensorflow/compiler/mlir/lite/python/_pywrap_converter_api.pyi

    def ExperimentalMlirSparsifyModel(input_contents_txt_raw: object) -> object: ...
    def FlatBufferToMlir(arg0: str, arg1: bool) -> str: ...
    def RegisterCustomOpdefs(custom_opdefs_txt_raw: object) -> object: ...
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Fri May 31 18:18:30 UTC 2024
    - 1.7K bytes
    - Viewed (0)
  2. tensorflow/compiler/mlir/lite/python/wrap_converter.py

      return _pywrap_converter_api.RetrieveCollectedErrors()
    
    
    def wrapped_flat_buffer_file_to_mlir(model, input_is_filepath):
      """Wraps FlatBufferFileToMlir with lazy loader."""
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Fri May 31 18:18:30 UTC 2024
    - 3K bytes
    - Viewed (0)
  3. tensorflow/compiler/mlir/lite/python/flatbuffer_to_mlir.cc

      std::string error;
      auto loc =
          mlir::FileLineColLoc::get(context, input->getBufferIdentifier(), 0, 0);
      std::vector<std::string> inputs;
      std::vector<std::string> outputs;
      return tflite::FlatBufferToMlir(
          absl::string_view(input->getBufferStart(), input->getBufferSize()),
          context, loc, false, inputs, outputs, false);
    }
    
    }  // namespace
    
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Sun May 12 12:39:37 UTC 2024
    - 3.4K bytes
    - Viewed (0)
  4. tensorflow/compiler/mlir/lite/experimental/tac/utils/utils.cc

          mlir::FileLineColLoc::get(context, input_filename, /*line=*/0,
                                    /*column=*/0);
      std::vector<std::string> inputs;
      std::vector<std::string> outputs;
      return tflite::FlatBufferToMlir(
          absl::string_view(buffer->getBufferStart(), buffer->getBufferSize()),
          context, loc, /*use_external_constant=*/false, inputs, outputs,
          experimental_prune_unreachable_nodes_unconditionally);
    }
    
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Mon Jun 03 03:47:03 UTC 2024
    - 4.5K bytes
    - Viewed (0)
  5. tensorflow/compiler/mlir/lite/sparsity/sparsify_model.cc

      std::string serialized_model(
          reinterpret_cast<const char*>(input_builder.GetBufferPointer()),
          input_builder.GetSize());
    
      OwningOpRef<mlir::ModuleOp> module = tflite::FlatBufferToMlir(
          serialized_model, &context, UnknownLoc::get(&context));
      if (!module) {
        LOG(ERROR) << "Couldn't import flatbuffer to MLIR.";
        return absl::InternalError("Couldn't import flatbuffer to MLIR.");
      }
    
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Mon Jun 10 20:16:40 UTC 2024
    - 4.3K bytes
    - Viewed (0)
  6. tensorflow/compiler/mlir/lite/python/converter_python_api_wrapper.cc

            }
            return serialized_message_list;
          },
          R"pbdoc(
          Returns and clears the list of collected errors in ErrorCollector.
        )pbdoc");
      m.def(
          "FlatBufferToMlir",
          [](const std::string& model, bool input_is_filepath) {
            return tflite::FlatBufferFileToMlir(model, input_is_filepath);
          },
          R"pbdoc(
          Returns MLIR dump of the given TFLite model.
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Fri May 31 18:18:30 UTC 2024
    - 5.6K bytes
    - Viewed (0)
  7. tensorflow/compiler/mlir/lite/quantization/lite/quantize_model.cc

      MLIRContext context(registry);
      StatusScopedDiagnosticHandler statusHandler(&context,
                                                  /*propagate=*/true);
    
      OwningOpRef<mlir::ModuleOp> module = tflite::FlatBufferToMlir(
          model_buffer, &context, UnknownLoc::get(&context));
      if (!module) {
        LOG(ERROR) << "Couldn't import flatbuffer to MLIR.";
        return kTfLiteError;
      }
    
      // Apply quantization passes.
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Wed Jun 12 23:15:24 UTC 2024
    - 6.3K bytes
    - Viewed (0)
  8. tensorflow/compiler/mlir/lite/quantization/lite/quantize_weights.cc

      std::string serialized_model(
          reinterpret_cast<const char*>(input_builder.GetBufferPointer()),
          input_builder.GetSize());
    
      OwningOpRef<mlir::ModuleOp> module = tflite::FlatBufferToMlir(
          serialized_model, &context, UnknownLoc::get(&context));
    
      // Apply quantization passes.
      PassManager pm((*module)->getName(), OpPassManager::Nesting::Implicit);
      quant::QuantizationSpecs quant_specs;
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Wed Jun 12 23:15:24 UTC 2024
    - 9.5K bytes
    - Viewed (0)
  9. tensorflow/compiler/mlir/python/mlir.cc

                       ("Unable to load input file " + error).c_str());
          return;
        }
    
        auto buffer_view =
            std::string_view(buffer->getBufferStart(), buffer->getBufferSize());
        module = tflite::FlatBufferToMlir(
            buffer_view, &context, loc, use_external_constant, ordered_input_arrays,
            ordered_output_arrays);
        mlir::PassManager pm(&context, module.get()->getName().getStringRef(),
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Fri May 03 18:16:49 UTC 2024
    - 19.3K bytes
    - Viewed (0)
  10. tensorflow/compiler/mlir/lite/flatbuffer_import.cc

        to_delete_funcs.push_back(cond);
      });
      for (auto& func : to_delete_funcs) {
        func.erase();
      }
    }
    }  // namespace
    
    OwningOpRef<mlir::ModuleOp> tflite::FlatBufferToMlir(
        absl::string_view buffer, MLIRContext* context, Location base_loc,
        bool use_external_constant,
        const std::vector<std::string>& ordered_input_arrays,
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Tue May 21 18:21:50 UTC 2024
    - 66.8K bytes
    - Viewed (0)
Back to top