- Sort Score
- Result 10 results
- Languages All
Results 101 - 110 of 435 for ModuleOp (0.14 sec)
-
tensorflow/compiler/mlir/tensorflow/transforms/host_runtime/tpu_metadata_utils_test.cc
mlir::parseSourceFile<mlir::ModuleOp>(mlir_module_path, &context_); if (!mlir_module_) { return absl::Status( absl::StatusCode::kNotFound, absl::StrCat("Could not find MLIR module at ", mlir_module_path)); } return absl::OkStatus(); } DialectRegistry registry_; MLIRContext context_; OwningOpRef<mlir::ModuleOp> mlir_module_; };
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Tue Dec 12 04:22:33 UTC 2023 - 6.1K bytes - Viewed (0) -
tensorflow/compiler/mlir/lite/stablehlo/odml_converter/transforms/shlo_simplify.cc
void runOnOperation() override { ModuleOp module = getOperation(); RewritePatternSet patterns(&getContext()); populateWithGenerated(patterns); PopulateFolderPatterns(patterns); if (failed(applyPatternsAndFoldGreedily(module, std::move(patterns)))) { signalPassFailure(); } } }; } // namespace std::unique_ptr<OperationPass<ModuleOp>> CreateSHLOSimplifyPass() {
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Fri May 10 03:05:20 UTC 2024 - 2.3K bytes - Viewed (0) -
tensorflow/compiler/mlir/tensorflow_to_stablehlo/python/pywrap_tensorflow_to_stablehlo_lib.cc
absl::StatusOr<std::string> ModuleToBytecode(ModuleOp module) { std::string bytecode; llvm::raw_string_ostream os(bytecode); mlir::BytecodeWriterConfig config; if (mlir::failed(mlir::writeBytecodeToFile(module, os, config))) { return absl::InvalidArgumentError("mlir::writeBytecodeToFile failed"); } return bytecode; } absl::StatusOr<std::string> ExportModule(ModuleOp module) {
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Tue May 21 22:58:42 UTC 2024 - 5K bytes - Viewed (0) -
tensorflow/compiler/mlir/quantization/tensorflow/passes/remove_var_init_by_const.cc
// instead). class RemoveVariableInitializationByConstPass : public PassWrapper<RemoveVariableInitializationByConstPass, OperationPass<ModuleOp>> { public: MLIR_DEFINE_EXPLICIT_INTERNAL_INLINE_TYPE_ID( RemoveVariableInitializationByConstPass) StringRef getArgument() const final { return "quant-remove-var-init-by-const"; }
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Fri Feb 03 12:04:03 UTC 2023 - 4.5K bytes - Viewed (0) -
tensorflow/compiler/mlir/lite/flatbuffer_translate.h
// convert location of the op to name in flatbuffer. Returns true if translation // fails, otherwise returns false. bool MlirToFlatBufferTranslateFunction(mlir::ModuleOp module, std::string* serialized_flatbuffer, bool emit_builtin_tflite_ops, bool emit_select_tf_ops,
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Tue Mar 22 14:25:57 UTC 2022 - 2K bytes - Viewed (0) -
tensorflow/compiler/mlir/quantization/stablehlo/cc/pre_calibration.cc
using ::tensorflow::quantization::RunPasses; PreCalibrationComponent::PreCalibrationComponent( absl::Nonnull<MLIRContext*> ctx) : ctx_(ABSL_DIE_IF_NULL(ctx)) {} // Crash OK absl::StatusOr<ModuleOp> PreCalibrationComponent::Run( ModuleOp module_op, const QuantizationConfig& config) { TF_RETURN_IF_ERROR(RunPasses( kName, /*add_passes_func=*/ [&config](PassManager& pm) {
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Wed Apr 17 09:28:53 UTC 2024 - 2K bytes - Viewed (0) -
tensorflow/compiler/mlir/lite/experimental/tac/utils/utils.h
#include "mlir/Parser/Parser.h" // from @llvm-project namespace mlir { namespace TFL { namespace tac { // Import the file as mlir module, the input maybe flatbuffer or mlir file. absl::StatusOr<mlir::OwningOpRef<mlir::ModuleOp>> ImportFlatbufferOrMlir( const std::string& input_filename, bool input_mlir, bool experimental_prune_unreachable_nodes_unconditionally, llvm::SourceMgr* source_mgr, mlir::MLIRContext* context);
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Mon Jun 03 03:47:03 UTC 2024 - 1.9K bytes - Viewed (0) -
tensorflow/compiler/mlir/lite/experimental/tac/transforms/get_alternative_subgraph.cc
// maybe tensor layout transformation, device specific fusion, etc. class AlternativeSubgraphPass : public mlir::PassWrapper<AlternativeSubgraphPass, mlir::OperationPass<ModuleOp>> { public: MLIR_DEFINE_EXPLICIT_INTERNAL_INLINE_TYPE_ID(AlternativeSubgraphPass) llvm::StringRef getArgument() const final { return "tfl-get-alternative-subgraph"; }
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Tue Jun 06 03:08:33 UTC 2023 - 12.3K bytes - Viewed (0) -
tensorflow/compiler/mlir/quantization/tensorflow/passes/preprocess_op.cc
weight_op->getLoc(), new_shape, weight_op->getResult(0), new_shape_const); op->setOperand(weight_operand_idx, reshape_op); // Create a new function with preprocessed types. ModuleOp module = op->getParentOfType<ModuleOp>(); SymbolTable symbol_table(module); func::FuncOp float_func = dyn_cast<func::FuncOp>(symbol_table.lookup(function_name)); OperandRange func_args = op.getArgs();
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Thu Apr 25 16:01:03 UTC 2024 - 11.4K bytes - Viewed (0) -
tensorflow/compiler/mlir/quantization/tensorflow/cc/constant_fold_test.cc
%mul = "tf.Mul"(%cast, %scale) : (tensor<1024x24x24x3xf32>, tensor<f32>) -> tensor<1024x24x24x3xf32> func.return %mul : tensor<1024x24x24x3xf32> } } )mlir"; OwningOpRef<ModuleOp> module_op_ref = ParseModuleOpString(kModuleCode); const auto test_func = module_op_ref->lookupSymbol<func::FuncOp>("test_fold_constant"); ASSERT_THAT(test_func, NotNull());
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Thu Apr 04 07:19:09 UTC 2024 - 10.1K bytes - Viewed (0)