- Sort Score
- Result 10 results
- Languages All
Results 71 - 80 of 295 for ModuleOp (0.3 sec)
-
tensorflow/compiler/mlir/tensorflow/utils/tf_xla_mlir_translate.cc
mlir::OwningOpRef<mlir::ModuleOp> module_ref; auto status = DeserializeMlirModule(str_attr.getValue().str(), context, &module_ref); if (!status.ok()) { LOG(ERROR) << status; return nullptr; } return module_ref; } static mlir::LogicalResult MlirModuleToSerializedMlirStringAttrTranslate( mlir::ModuleOp module_op, llvm::raw_ostream& output) { output << "\"";
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Thu Apr 25 16:01:03 UTC 2024 - 18.8K bytes - Viewed (0) -
tensorflow/compiler/mlir/tf2xla/transforms/test_utils.h
#include "tsl/platform/statusor.h" namespace mlir { namespace mhlo { namespace test { // Given a raw string, return a ModuleOp that can be used with the given // MLIRContext. absl::StatusOr<OwningOpRef<ModuleOp>> GetMlirModuleFromString( absl::string_view module_string, MLIRContext* mlir_context); } // namespace test } // namespace mhlo } // namespace mlir
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Thu May 02 09:16:07 UTC 2024 - 1.4K bytes - Viewed (0) -
tensorflow/compiler/mlir/tf2xla/internal/mlir_bridge_pass_util.h
// _xla_compile_device_type=TPU. bool IsSupportedByReplicatedBridge(mlir::ModuleOp module); // Check if an MLIR module contains TPUPartitionedCall op. If so, we define // such graph as an inference graph. Otherwise, it is non inference graph. bool HasTPUPartitionedCallOpInModule(mlir::ModuleOp module); // Check if a graph contains TPUPartitionedCall op, including its reachable
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Wed Mar 13 16:33:22 UTC 2024 - 2.3K bytes - Viewed (0) -
tensorflow/compiler/mlir/lite/experimental/tac/transforms/tac_filter.cc
TacFilter; using ::third_party::tensorflow::compiler::mlir::lite::experimental::tac:: TacFilters; class TacFilterPass : public PassWrapper<TacFilterPass, OperationPass<ModuleOp>> { public: MLIR_DEFINE_EXPLICIT_INTERNAL_INLINE_TYPE_ID(TacFilterPass) TacFilterPass() = default; TacFilterPass(const TacFilterPass& other) { this->tac_filters_ = other.tac_filters_; }
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Thu Apr 25 16:01:03 UTC 2024 - 8.9K bytes - Viewed (0) -
tensorflow/compiler/mlir/lite/stablehlo/odml_converter/transforms/shlo_simplify.cc
void runOnOperation() override { ModuleOp module = getOperation(); RewritePatternSet patterns(&getContext()); populateWithGenerated(patterns); PopulateFolderPatterns(patterns); if (failed(applyPatternsAndFoldGreedily(module, std::move(patterns)))) { signalPassFailure(); } } }; } // namespace std::unique_ptr<OperationPass<ModuleOp>> CreateSHLOSimplifyPass() {
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Fri May 10 03:05:20 UTC 2024 - 2.3K bytes - Viewed (0) -
tensorflow/compiler/mlir/tensorflow_to_stablehlo/python/pywrap_tensorflow_to_stablehlo_lib.cc
absl::StatusOr<std::string> ModuleToBytecode(ModuleOp module) { std::string bytecode; llvm::raw_string_ostream os(bytecode); mlir::BytecodeWriterConfig config; if (mlir::failed(mlir::writeBytecodeToFile(module, os, config))) { return absl::InvalidArgumentError("mlir::writeBytecodeToFile failed"); } return bytecode; } absl::StatusOr<std::string> ExportModule(ModuleOp module) {
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Tue May 21 22:58:42 UTC 2024 - 5K bytes - Viewed (0) -
tensorflow/compiler/mlir/quantization/stablehlo/cc/pre_calibration.cc
using ::tensorflow::quantization::RunPasses; PreCalibrationComponent::PreCalibrationComponent( absl::Nonnull<MLIRContext*> ctx) : ctx_(ABSL_DIE_IF_NULL(ctx)) {} // Crash OK absl::StatusOr<ModuleOp> PreCalibrationComponent::Run( ModuleOp module_op, const QuantizationConfig& config) { TF_RETURN_IF_ERROR(RunPasses( kName, /*add_passes_func=*/ [&config](PassManager& pm) {
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Wed Apr 17 09:28:53 UTC 2024 - 2K bytes - Viewed (0) -
tensorflow/compiler/mlir/lite/experimental/tac/utils/utils.h
#include "mlir/Parser/Parser.h" // from @llvm-project namespace mlir { namespace TFL { namespace tac { // Import the file as mlir module, the input maybe flatbuffer or mlir file. absl::StatusOr<mlir::OwningOpRef<mlir::ModuleOp>> ImportFlatbufferOrMlir( const std::string& input_filename, bool input_mlir, bool experimental_prune_unreachable_nodes_unconditionally, llvm::SourceMgr* source_mgr, mlir::MLIRContext* context);
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Mon Jun 03 03:47:03 UTC 2024 - 1.9K bytes - Viewed (0) -
tensorflow/compiler/mlir/quantization/tensorflow/passes/preprocess_op.cc
weight_op->getLoc(), new_shape, weight_op->getResult(0), new_shape_const); op->setOperand(weight_operand_idx, reshape_op); // Create a new function with preprocessed types. ModuleOp module = op->getParentOfType<ModuleOp>(); SymbolTable symbol_table(module); func::FuncOp float_func = dyn_cast<func::FuncOp>(symbol_table.lookup(function_name)); OperandRange func_args = op.getArgs();
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Thu Apr 25 16:01:03 UTC 2024 - 11.4K bytes - Viewed (0) -
tensorflow/compiler/mlir/quantization/tensorflow/cc/constant_fold_test.cc
%mul = "tf.Mul"(%cast, %scale) : (tensor<1024x24x24x3xf32>, tensor<f32>) -> tensor<1024x24x24x3xf32> func.return %mul : tensor<1024x24x24x3xf32> } } )mlir"; OwningOpRef<ModuleOp> module_op_ref = ParseModuleOpString(kModuleCode); const auto test_func = module_op_ref->lookupSymbol<func::FuncOp>("test_fold_constant"); ASSERT_THAT(test_func, NotNull());
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Thu Apr 04 07:19:09 UTC 2024 - 10.1K bytes - Viewed (0)