- Sort Score
- Result 10 results
- Languages All
Results 1 - 3 of 3 for HasTPUPartitionedCallOpInModule (0.25 sec)
-
tensorflow/compiler/mlir/tf2xla/internal/mlir_bridge_pass_util_test.cc
mlir::OwningOpRef<mlir::ModuleOp> module = mlir::parseSourceString<mlir::ModuleOp>(code, &context); ASSERT_TRUE(module); EXPECT_TRUE(HasTPUPartitionedCallOpInModule(*module)); } TEST(HasTPUPartitionedCallOpInModule, HasNotTPUPartitionedCallModule) { const char* const code = R"mlir( module attributes {tf.versions = {bad_consumers = [], min_consumer = 0 : i32, producer = 268 : i32}} {
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Wed Apr 17 19:51:50 UTC 2024 - 10.3K bytes - Viewed (0) -
tensorflow/compiler/mlir/tf2xla/internal/mlir_bridge_pass_util.h
bool IsSupportedByReplicatedBridge(mlir::ModuleOp module); // Check if an MLIR module contains TPUPartitionedCall op. If so, we define // such graph as an inference graph. Otherwise, it is non inference graph. bool HasTPUPartitionedCallOpInModule(mlir::ModuleOp module); // Check if a graph contains TPUPartitionedCall op, including its reachable // functions. The function library is used to store the functions that are // defined in a TensorFlow program
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Wed Mar 13 16:33:22 UTC 2024 - 2.3K bytes - Viewed (0) -
tensorflow/compiler/mlir/tf2xla/internal/mlir_bridge_pass_util.cc
return IsReplicatedGraph(graph, function_library); } bool IsSupportedByReplicatedBridge(mlir::ModuleOp module) { return IsReplicatedGraph(module); } bool HasTPUPartitionedCallOpInModule(mlir::ModuleOp module) { bool has_tpu_partitioned_call = false; for (auto func_op : module.getOps<mlir::func::FuncOp>()) { func_op->walk([&](mlir::TF::TPUPartitionedCallOp op) {
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Tue May 07 12:22:33 UTC 2024 - 8.9K bytes - Viewed (0)