- Sort Score
- Result 10 results
- Languages All
Results 11 - 20 of 145 for registry_ (0.15 sec)
-
tensorflow/compiler/mlir/mlir_graph_optimization_pass.cc
<< ", FallbackEnabled: " << num_passes_fallback_enabled << ", Total: " << registry_->passes().size(); } GraphDebugInfo debug_info; mlir::DialectRegistry registry; RegisterDialects(registry); mlir::MLIRContext context(registry); GraphImportConfig import_config; import_config.graph_as_function = true; import_config.control_outputs = *control_ret_node_names;
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Wed Jun 12 22:19:26 UTC 2024 - 18.5K bytes - Viewed (0) -
tensorflow/compiler/mlir/tensorflow/transforms/host_runtime/lower_cluster_to_runtime_ops_test.cc
class LowerClusterToRuntimeOpsTest : public ::testing::Test { public: LowerClusterToRuntimeOpsTest() { mlir::RegisterCommonToolingDialects(registry_); context_.appendDialectRegistry(registry_); context_.loadAllAvailableDialects(); env_ = Env::Default(); test_group_name_ = "TestGroup"; test_dir_ = testing::TmpDir();
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Tue May 28 21:44:37 UTC 2024 - 6.5K bytes - Viewed (0) -
tensorflow/c/experimental/gradients/array_grad_test.cc
ASSERT_EQ(errors::OK, status_.code()) << status_.message(); x2.reset(x2_raw); } status_ = registry_.Register("IdentityN", IdentityNRegisterer); ASSERT_EQ(errors::OK, status_.code()) << status_.message(); auto IdentityNGradModel = BuildGradModel(IdentityNModel, registry_); std::vector<AbstractTensorHandle*> outputs(2); status_ = RunModel(IdentityNGradModel, immediate_execution_ctx_.get(),
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Wed Feb 28 13:53:47 UTC 2024 - 5K bytes - Viewed (0) -
tensorflow/cc/framework/gradients.cc
}; SymbolicGradientBuilder::SymbolicGradientBuilder( const Scope& scope, const ops::GradOpRegistry* registry, const std::vector<Output>& outputs, const std::vector<Output>& inputs, const std::vector<Output>& grad_inputs, std::vector<Output>* grad_outputs) : scope_(scope), registry_(registry), outputs_(outputs), inputs_(inputs), grad_inputs_(grad_inputs),
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Sat Apr 13 05:57:22 UTC 2024 - 22K bytes - Viewed (0) -
tensorflow/compiler/mlir/lite/experimental/tac/tac_module.cc
context->appendDialectRegistry(registry_); context->loadAllAvailableDialects(); // Run TAC passes. auto status = RunTacPasses(&module, options_.debug_mode); if (!status.ok()) { return status; } return exporter_->Export(module); } void TacModule::RegisterExtraDialects(mlir::DialectRegistry& registry) { registry.appendTo(registry_); } } // namespace tac
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Thu Jun 08 01:19:25 UTC 2023 - 5.6K bytes - Viewed (0) -
tensorflow/c/eager/gradients.cc
Status GradientRegistry::Register( const string& op_name, GradientFunctionFactory gradient_function_factory) { auto iter = registry_.find(op_name); if (iter != registry_.end()) { const string error_msg = "Gradient already exists for op: " + op_name + "."; return errors::AlreadyExists(error_msg); } registry_.insert({op_name, gradient_function_factory}); return absl::OkStatus(); } Status GradientRegistry::Lookup(
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Thu Feb 15 09:49:45 UTC 2024 - 19.3K bytes - Viewed (0) -
tensorflow/cc/framework/grad_op_registry.h
/// the gradient function for 'op' exists in the registry. /// Note that 'func' can be null for ops that have registered no-gradient with /// the registry. /// Returns error status otherwise. Status Lookup(const string& op, GradFunc* func) const; /// Returns a pointer to the global gradient function registry. static GradOpRegistry* Global(); private:
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Wed Oct 05 15:33:58 UTC 2022 - 2.9K bytes - Viewed (0) -
tensorflow/c/experimental/gradients/tape/tape_operation.cc
namespace tensorflow { namespace gradients { TapeOperation::TapeOperation(AbstractOperation* parent_op, Tape* tape, const GradientRegistry& registry) : AbstractOperation(kTape), parent_op_(parent_op), tape_(tape), registry_(registry) { // TODO(b/172003047): Consider making AbstractOperation RefCounted. // parent_op_->Ref(); } void TapeOperation::Release() {
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Wed Feb 28 06:16:45 UTC 2024 - 9K bytes - Viewed (0) -
tensorflow/compiler/mlir/lite/experimental/tac/tac_module.h
const std::vector<const tac::TargetHardware*>& GetAvailableHardwares() const { return const_backends_; } // Registers all dialects in 'registry' with the module. // This to allow clients to register extra dialects required. void RegisterExtraDialects(mlir::DialectRegistry& registry); protected: // Adds TAC passes to the 'pass_manager'. virtual void AddTACPass(mlir::OpPassManager* pass_manager,
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Thu Jun 08 01:19:25 UTC 2023 - 4.3K bytes - Viewed (0) -
tensorflow/c/eager/gradients.h
// // More complex gradient functions can use inputs/attrs etc. from the // // forward `op`. // return new AddGradientFunction; // } // // Status RegisterGradients(GradientRegistry* registry) { // return registry->Register("Add", AddRegisterer); // } class GradientFunction { public: virtual Status Compute(AbstractContext* ctx, absl::Span<AbstractTensorHandle* const> grad_outputs,
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Mon Sep 26 10:27:05 UTC 2022 - 6.9K bytes - Viewed (0)