- Sort Score
- Result 10 results
- Languages All
Results 1 - 10 of 28 for getFunctionType (0.73 sec)
-
tensorflow/compiler/mlir/lite/utils/nms_utils.cc
Value max_output_size = func_.getArgument(2); Value iou_threshold = func_.getArgument(3); Value score_threshold = func_.getArgument(4); auto output_type0 = func_.getFunctionType().getResult(0); auto output_type1 = func_.getFunctionType().getResult(1); OpBuilder builder(func_.getBody()); auto op = builder.create<mlir::TFL::NonMaxSuppressionV4Op>( func_.getLoc(), output_type0, output_type1, boxes, scores,
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Thu Apr 25 16:01:03 UTC 2024 - 8.1K bytes - Viewed (0) -
tensorflow/compiler/mlir/lite/utils/perception_ops_utils.cc
<< func_.getNumArguments(); } if (func_.getFunctionType().getNumResults() != 1) { return func_.emitWarning() << "Invalid number of results from " << kImageWarping << ": " << func_.getFunctionType().getNumResults(); } // Check types and shapes. auto image_type = mlir::dyn_cast_or_null<RankedTensorType>( func_.getFunctionType().getInput(0));
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Fri May 17 17:58:54 UTC 2024 - 8.9K bytes - Viewed (0) -
tensorflow/compiler/mlir/tensorflow/ir/tf_remaining_ops.cc
if (op->getNumOperands() != func.getFunctionType().getNumInputs()) return op.emitError() << "'host_func' has " << func.getFunctionType().getNumInputs() << " inputs and '_XlaHostComputeMlir' has " << op->getNumOperands() << " operands. Number of operands/inputs should be the same."; if (op->getNumResults() != func.getFunctionType().getNumResults())
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Fri Apr 14 20:05:58 UTC 2023 - 7.6K bytes - Viewed (0) -
tensorflow/compiler/mlir/python/mlir_wrapper/builders.cc
void init_builders(py::module& m) { py::class_<mlir::Builder>(m, "Builder") .def(py::init<mlir::MLIRContext*>()) .def("getFunctionType", [](mlir::Builder& b, std::vector<mlir::Type> inputs, std::vector<mlir::Type> outputs) { return b.getFunctionType(llvm::ArrayRef<mlir::Type>(inputs), llvm::ArrayRef<mlir::Type>(outputs)); });
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Mon Mar 28 08:44:55 UTC 2022 - 2.3K bytes - Viewed (0) -
tensorflow/compiler/mlir/tensorflow/transforms/materialize_mlir_passthrough_op.cc
return; } if (main.getFunctionType().getNumResults() != op->getNumResults()) { op->emitError() << "mismatch between MLIR Opaque Op number of results (" << op->getNumResults() << ") and main() entry point in the module (" << main.getFunctionType().getNumResults() << " results)\n"; return; }
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Thu Nov 03 12:35:38 UTC 2022 - 4.2K bytes - Viewed (0) -
tensorflow/compiler/mlir/tensorflow/transforms/tf_functional_to_executor.cc
OpBuilder builder(&body, body.begin()); auto graph_op = builder.create<tf_executor::GraphOp>( loc, func.getFunctionType().getResults()); graph_op.getBody().push_back(new Block); builder.setInsertionPointToEnd(&graph_op.GetBody()); auto island = builder.create<tf_executor::IslandOp>( loc, func.getFunctionType().getResults(), tf_executor::ControlType::get(&getContext()), ArrayRef<Value>()); // Create Fetch.
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Fri Aug 11 20:52:36 UTC 2023 - 4.1K bytes - Viewed (0) -
tensorflow/compiler/mlir/tensorflow/transforms/cluster_outlining.cc
llvm::SmallVector<Type, 4> operand_types; operand_types.reserve(live_ins.size()); for (Value v : live_ins) operand_types.emplace_back(v.getType()); auto func_type = builder->getFunctionType(operand_types, op.getResultTypes()); std::string func_name; if (auto outlined_func_name = op->template getAttrOfType<StringAttr>( TF::kClusterOutlinedFunctionNameAttr)) {
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Tue Apr 30 21:25:12 UTC 2024 - 7.2K bytes - Viewed (0) -
tensorflow/compiler/mlir/tf2xla/internal/utils/test_metadata_config.cc
auto main_fn = module.lookupSymbol<mlir::func::FuncOp>(kEntryFuncName); if (!main_fn) { return absl::InternalError("Could not find main function in MLIR Module."); } mlir::FunctionType func_type = main_fn.getFunctionType(); for (auto input_type : func_type.getInputs()) { tensorflow::TensorShape tensor_shape; xla::Shape xla_shape = xla::TypeToShape(input_type); TF_RETURN_IF_ERROR(tensorflow::TensorShape::BuildTensorShape(
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Thu Jun 13 23:59:33 UTC 2024 - 3.9K bytes - Viewed (0) -
tensorflow/compiler/mlir/lite/mlir_tflite_runner.cc
if (!module) return 1; // TODO(jpienaar): Expand to support inputs. mlir::func::FuncOp main = module->lookupSymbol<mlir::func::FuncOp>("main"); QCHECK(main) << "No 'main' function specified."; if (main.getFunctionType().getNumInputs() != 0) LOG(QFATAL) << "NYI: Only nullary functions supported."; // Convert to flatbuffer. std::string serialized_flatbuffer; tflite::FlatbufferExportOptions options;
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Sat Jun 03 00:14:05 UTC 2023 - 6.3K bytes - Viewed (0) -
tensorflow/compiler/mlir/tensorflow/transforms/group_by_dialect.cc
builder.setInsertionPointToEnd(first_block); auto func = builder.create<mlir::func::FuncOp>( ops[0]->getLoc(), dialect.str() + std::to_string(function_id), builder.getFunctionType(input_types, output_types)); func->setAttr("dialect", builder.getStringAttr(dialect)); auto block = func.addEntryBlock(); llvm::DenseSet<Operation*> all_operations; for (Operation* outer : ops) {
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Fri Mar 17 07:31:01 UTC 2023 - 8K bytes - Viewed (0)