Search Options

Results per page
Sort
Preferred Languages
Advance

Results 11 - 20 of 1,429 for registry1 (0.21 sec)

  1. tensorflow/compiler/mlir/mlir_graph_optimization_pass.h

      explicit MlirV1CompatGraphOptimizationPass(
          const MlirV1CompatOptimizationPassRegistry* registry =
              &MlirV1CompatOptimizationPassRegistry::Global())
          : registry_(registry) {}
    
      Status Run(const GraphOptimizationPassOptions& options) override;
    
     private:
      const MlirV1CompatOptimizationPassRegistry* registry_;
    };
    
    // -------------------------------------------------------------------------- //
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Tue Oct 03 22:53:34 UTC 2023
    - 8.4K bytes
    - Viewed (0)
  2. tensorflow/compiler/mlir/mlir_graph_optimization_pass.cc

                             << ", FallbackEnabled: " << num_passes_fallback_enabled
                             << ", Total: " << registry_->passes().size();
      }
    
      GraphDebugInfo debug_info;
      mlir::DialectRegistry registry;
      RegisterDialects(registry);
      mlir::MLIRContext context(registry);
      GraphImportConfig import_config;
      import_config.graph_as_function = true;
      import_config.control_outputs = *control_ret_node_names;
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Wed Jun 12 22:19:26 UTC 2024
    - 18.5K bytes
    - Viewed (0)
  3. tensorflow/cc/framework/grad_op_registry.h

      /// the gradient function for 'op' exists in the registry.
      /// Note that 'func' can be null for ops that have registered no-gradient with
      /// the registry.
      /// Returns error status otherwise.
      Status Lookup(const string& op, GradFunc* func) const;
    
      /// Returns a pointer to the global gradient function registry.
      static GradOpRegistry* Global();
    
     private:
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Wed Oct 05 15:33:58 UTC 2022
    - 2.9K bytes
    - Viewed (0)
  4. platforms/documentation/docs/src/snippets/native-binaries/cunit/groovy/libs/cunit/2.1-2/include/CUnit/TestDB.h

    #else
      unsigned int uiNumberOfSuites;    /**< Number of registered suites in the registry. */
      unsigned int uiNumberOfTests;     /**< Total number of registered tests in the registry. */
      CU_pSuite    pSuite;              /**< Pointer to the 1st suite in the test registry. */
    #endif
    } CU_TestRegistry;
    typedef CU_TestRegistry* CU_pTestRegistry;  /**< Pointer to a CUnit test registry. */
    
    /*=================================================================
    Registered: Wed Jun 12 18:38:38 UTC 2024
    - Last Modified: Mon Nov 27 17:53:42 UTC 2023
    - 40.4K bytes
    - Viewed (0)
  5. tensorflow/compiler/mlir/lite/experimental/tac/tac_module.cc

      context->appendDialectRegistry(registry_);
      context->loadAllAvailableDialects();
    
      // Run TAC passes.
      auto status = RunTacPasses(&module, options_.debug_mode);
    
      if (!status.ok()) {
        return status;
      }
    
      return exporter_->Export(module);
    }
    
    void TacModule::RegisterExtraDialects(mlir::DialectRegistry& registry) {
      registry.appendTo(registry_);
    }
    }  // namespace tac
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Thu Jun 08 01:19:25 UTC 2023
    - 5.6K bytes
    - Viewed (0)
  6. tensorflow/c/experimental/gradients/tape/tape_operation.cc

    namespace tensorflow {
    namespace gradients {
    TapeOperation::TapeOperation(AbstractOperation* parent_op, Tape* tape,
                                 const GradientRegistry& registry)
        : AbstractOperation(kTape),
          parent_op_(parent_op),
          tape_(tape),
          registry_(registry) {
      // TODO(b/172003047): Consider making AbstractOperation RefCounted.
      // parent_op_->Ref();
    }
    void TapeOperation::Release() {
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Wed Feb 28 06:16:45 UTC 2024
    - 9K bytes
    - Viewed (0)
  7. tensorflow/compiler/mlir/lite/experimental/tac/tac_module.h

      const std::vector<const tac::TargetHardware*>& GetAvailableHardwares() const {
        return const_backends_;
      }
    
      // Registers all dialects in 'registry' with the module.
      // This to allow clients to register extra dialects required.
      void RegisterExtraDialects(mlir::DialectRegistry& registry);
    
     protected:
      // Adds TAC passes to the 'pass_manager'.
      virtual void AddTACPass(mlir::OpPassManager* pass_manager,
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Thu Jun 08 01:19:25 UTC 2023
    - 4.3K bytes
    - Viewed (0)
  8. tensorflow/cc/framework/gradients.cc

    };
    
    SymbolicGradientBuilder::SymbolicGradientBuilder(
        const Scope& scope, const ops::GradOpRegistry* registry,
        const std::vector<Output>& outputs, const std::vector<Output>& inputs,
        const std::vector<Output>& grad_inputs, std::vector<Output>* grad_outputs)
        : scope_(scope),
          registry_(registry),
          outputs_(outputs),
          inputs_(inputs),
          grad_inputs_(grad_inputs),
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Sat Apr 13 05:57:22 UTC 2024
    - 22K bytes
    - Viewed (0)
  9. tensorflow/c/eager/gradients.cc

    Status GradientRegistry::Register(
        const string& op_name, GradientFunctionFactory gradient_function_factory) {
      auto iter = registry_.find(op_name);
      if (iter != registry_.end()) {
        const string error_msg = "Gradient already exists for op: " + op_name + ".";
        return errors::AlreadyExists(error_msg);
      }
      registry_.insert({op_name, gradient_function_factory});
      return absl::OkStatus();
    }
    Status GradientRegistry::Lookup(
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Thu Feb 15 09:49:45 UTC 2024
    - 19.3K bytes
    - Viewed (0)
  10. tensorflow/c/eager/gradients.h

    //   // More complex gradient functions can use inputs/attrs etc. from the
    //   // forward `op`.
    //   return new AddGradientFunction;
    // }
    //
    // Status RegisterGradients(GradientRegistry* registry) {
    //   return registry->Register("Add", AddRegisterer);
    // }
    class GradientFunction {
     public:
      virtual Status Compute(AbstractContext* ctx,
                             absl::Span<AbstractTensorHandle* const> grad_outputs,
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Mon Sep 26 10:27:05 UTC 2022
    - 6.9K bytes
    - Viewed (0)
Back to top