Search Options

Results per page
Sort
Preferred Languages
Advance

Results 181 - 190 of 435 for ModuleOp (0.21 sec)

  1. tensorflow/compiler/mlir/quantization/stablehlo/cc/calibration/statistics.cc

        statistics_map.insert(single_map.statistics().begin(),
                              single_map.statistics().end());
      }
      return statistics_map;
    }
    
    absl::Status AddCalibrationStatistics(
        mlir::ModuleOp module_op, absl::string_view calibration_data_dir,
        const CalibrationOptions& calibration_options,
        const PyFunctionLibrary& py_function_library) {
      TF_ASSIGN_OR_RETURN(const CalibrationStatisticsFlatMap statistics_map,
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Tue May 14 06:31:57 UTC 2024
    - 4.6K bytes
    - Viewed (0)
  2. tensorflow/compiler/mlir/quantization/stablehlo/passes/insert_calibration_statistics_saver.cc

      for (Region& region : op->getRegions()) {
        if (!region.getOps<TF::CalibrationStatisticsSaverOp>().empty()) {
          return true;
        }
      }
    
      SymbolTable symbol_table(op->getParentOfType<ModuleOp>());
      // Check the functions associated to CaseOp, IfOp and WhileOp.
      for (const NamedAttribute& attr : op->getAttrs()) {
        FlatSymbolRefAttr symbol_attr =
            dyn_cast_or_null<FlatSymbolRefAttr>(attr.getValue());
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Tue May 14 06:31:57 UTC 2024
    - 7.2K bytes
    - Viewed (0)
  3. tensorflow/compiler/mlir/quantization/tensorflow/passes/lift_quantizable_spots_as_functions.cc

      }
    
      void removeAttrMapAttribute(TF::PartitionedCallOp call_op,
                                  StringRef function_name,
                                  StringRef error_message) const {
        ModuleOp module = call_op->getParentOfType<ModuleOp>();
        SymbolTable symbol_table(module);
        mlir::func::FuncOp composite_func =
            dyn_cast<func::FuncOp>(symbol_table.lookup(function_name));
        if (!composite_func) return;
    
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Fri May 10 04:07:09 UTC 2024
    - 16.4K bytes
    - Viewed (0)
  4. tensorflow/compiler/mlir/lite/transforms/legalize_hashtables.cc

        return success();
      }
    };
    
    template <typename T>
    std::vector<T> GetAllOps(mlir::ModuleOp* module) {
      std::vector<T> ops;
      module->walk([&](T op) { ops.emplace_back(op); });
      return ops;
    }
    
    bool checkWhetherGraphHasValidStaticLookupTables(ModuleOp module) {
      auto hashtables = GetAllOps<TF::HashTableV2Op>(&module);
      // No needs to run the legalization patterns.
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Thu Apr 25 16:01:03 UTC 2024
    - 7.6K bytes
    - Viewed (0)
  5. tensorflow/compiler/mlir/lite/tf_tfl_passes.h

    // Adds the first portion of StableHLO->TF passes happening before quantization.
    // The `pass_manager` that runs on a `mlir::ModuleOp` expects a graph containing
    // a `mlir::TF::XlaCallModuleOp` with serialized StableHLO module. The resulting
    // `mlir::ModuleOp` after running these passes will be an MHLO module, or a
    // StableHLO module if `pass_config.enable_stablehlo_quantizer` is `true`. This
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Thu Feb 01 06:14:07 UTC 2024
    - 4.1K bytes
    - Viewed (0)
  6. tensorflow/compiler/mlir/quantization/tensorflow/passes/cast_bf16_ops_to_f32.cc

    #include "tensorflow/compiler/mlir/tensorflow/ir/tf_ops.h"
    
    namespace mlir {
    namespace quant {
    namespace {
    
    class CastBf16OpsToF32Pass
        : public PassWrapper<CastBf16OpsToF32Pass, OperationPass<ModuleOp>> {
     public:
      MLIR_DEFINE_EXPLICIT_INTERNAL_INLINE_TYPE_ID(CastBf16OpsToF32Pass)
      explicit CastBf16OpsToF32Pass() = default;
    
      StringRef getArgument() const final {
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Sun Dec 10 05:52:02 UTC 2023
    - 4.5K bytes
    - Viewed (0)
  7. tensorflow/compiler/mlir/tensorflow/transforms/sparsecore/sparsecore_passes.td

    limitations under the License.
    ==============================================================================*/
    
    include "mlir/Pass/PassBase.td"
    
    def EmbeddingPipeliningPass : Pass<"tf-embedding-pipelining", "mlir::ModuleOp"> {
      let summary = "Rewrite graph for embedding pipelining";
      let constructor = "TFDevice::CreateEmbeddingPipeliningPass()";
        let description = [{
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Thu Mar 28 23:42:09 UTC 2024
    - 3.9K bytes
    - Viewed (0)
  8. tensorflow/compiler/mlir/quantization/stablehlo/passes/replace_stablehlo_ops_in_main_function_with_xla_call_module_ops.cc

                               const TypeRange result_types,
                               const SetVector<Operation*>& reverse_subgraph,
                               const func::FuncOp stablehlo_func_op,
                               ModuleOp module_op) {
      MLIRContext* ctx = module_op.getContext();
      OpBuilder builder(ctx);
      Operation* last_subgraph_op = reverse_subgraph.front();
      builder.setInsertionPointAfter(last_subgraph_op);
    
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Thu Apr 25 16:01:03 UTC 2024
    - 21K bytes
    - Viewed (0)
  9. tensorflow/compiler/mlir/quantization/tensorflow/passes/insert_restore_op.cc

    // tf.AssignVariableOp(tf.VarHandleOp, tf.Const) patterns in the initializer
    // function and replaces tf.Consts with the results of RestoreV2.
    class InsertRestoreOpPass
        : public PassWrapper<InsertRestoreOpPass, OperationPass<ModuleOp>> {
     public:
      MLIR_DEFINE_EXPLICIT_INTERNAL_INLINE_TYPE_ID(InsertRestoreOpPass)
    
      explicit InsertRestoreOpPass() = default;
    
      // The argument used to refer to the pass in the textual format (e.g. on the
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Sun Mar 12 06:02:20 UTC 2023
    - 9K bytes
    - Viewed (0)
  10. tensorflow/compiler/mlir/tensorflow/transforms/annotate_parameter_replication.cc

      while (auto op = v.getDefiningOp()) {
        if (!isa<TF::IdentityOp, TF::ReadVariableOp>(op)) break;
        v = op->getOperand(0);
      }
      return v;
    }
    
    void AnnotateParameterReplicationPass::runOnOperation() {
      ModuleOp m = getOperation();
      OpBuilder builder(m.getContext());
      m.walk([&](tf_device::ClusterFuncOp cluster_func) {
        auto replicate = cluster_func->getParentOfType<tf_device::ReplicateOp>();
        if (!replicate) return;
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Thu Apr 25 16:01:03 UTC 2024
    - 4.1K bytes
    - Viewed (0)
Back to top