Search Options

Results per page
Sort
Preferred Languages
Advance

Results 41 - 50 of 166 for setAttr (0.23 sec)

  1. tensorflow/c/python_api.h

    namespace tensorflow {
    
    void AddControlInput(TF_Graph* graph, TF_Operation* op, TF_Operation* input);
    
    // Changes an attr value in the node_def Protocol Buffer and sets a status upon
    // completion.
    void SetAttr(TF_Graph* graph, TF_Operation* op, const char* attr_name,
                 TF_Buffer* attr_value_proto, TF_Status* status);
    
    // Clears the attr in the node_def Protocol Buffer and sets a status upon
    // completion.
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Wed Jul 12 18:48:56 UTC 2023
    - 3.5K bytes
    - Viewed (0)
  2. tensorflow/compiler/mlir/quantization/tensorflow/passes/mark_functions_noinline.cc

      Builder builder(&getContext());
    
      // Adds the `tf._noinline = true` attribute to the function if the name
      // matches.
      if (noinline_functions.contains(func_op.getSymName())) {
        func_op->setAttr(kTfNoinlineAttr, builder.getBoolAttr(true));
        LLVM_DEBUG(llvm::dbgs()
                   << "Marked tf._noinline = true: " << func_op.getSymName());
      }
    }
    
    static PassRegistration<MarkFunctionsNoinlinePass> pass{};
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Wed Jan 18 02:52:57 UTC 2023
    - 4.5K bytes
    - Viewed (0)
  3. tensorflow/compiler/mlir/lite/transforms/analyze_variables.cc

                  getElementTypeOrSelf(operand.getType()))) {
            legalize_to_tfl = false;
            return WalkResult::interrupt();
          }
        }
        return WalkResult::advance();
      });
      module->setAttr(kLegalizeTflVariables,
                      BoolAttr::get(context, legalize_to_tfl));
    }
    
    }  // namespace
    
    std::unique_ptr<OperationPass<ModuleOp>> CreateAnalyzeVariablesPass() {
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Thu Apr 25 16:01:03 UTC 2024
    - 4.3K bytes
    - Viewed (0)
  4. tensorflow/compiler/mlir/tensorflow/transforms/add_functions_for_exported_names.cc

    // Set the (array of) exported name(s) of a (public) function to just
    // contain the given string.
    void SetExportedName(func::FuncOp f, StringRef name) {
      OpBuilder b(f);
      f->removeAttr(kTfSavedModelExportedNamesAttr);
      f->setAttr(kTfSavedModelExportedNamesAttr, b.getStrArrayAttr({name}));
    }
    
    // Convert a savedmodel public function into a private function.
    // This means we need to remove any attributes that are only allowed
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Tue Dec 19 08:06:04 UTC 2023
    - 4.5K bytes
    - Viewed (0)
  5. tensorflow/compiler/mlir/tensorflow/transforms/tpu_host_computation_expansion.cc

            should_expand_op_to_host_computation = false;
            continue;
          }
        }
    
        if (should_expand_op_to_host_computation)
          head_outside_compiled_op->setAttr(kXlaOutsideCompilationAttr,
                                            builder->getStringAttr(""));
      }
    }
    
    #define GEN_PASS_DEF_TPUHOSTCOMPUTATIONEXPANSIONPASS
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Thu Apr 25 16:01:03 UTC 2024
    - 5.1K bytes
    - Viewed (0)
  6. tensorflow/compiler/mlir/tensorflow/transforms/colocate_tpu_copy_with_dynamic_shape.cc

          state = solver.lookupState<Device>(result);
          if (state) break;
        }
        if (!state || !state->hasDevice()) {
          return WalkResult::advance();
        }
        op->setAttr(kDevice, state->getDevice());
        return WalkResult::advance();
      });
    }
    
    }  // namespace
    
    std::unique_ptr<OperationPass<ModuleOp>>
    CreateColocateTPUCopyWithDynamicShapePass() {
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Wed Aug 23 00:30:27 UTC 2023
    - 5.2K bytes
    - Viewed (0)
  7. tensorflow/compiler/mlir/tensorflow/utils/device_util.cc

          devices.push_back(builder.getNamedAttr(name, metadata));
        }
      }
    
      op->setAttr(kDevicesAttr, builder.getDictionaryAttr(devices));
    }
    
    mlir::LogicalResult GetDevicesFromOp(mlir::Operation* op,
                                         mlir::TF::RuntimeDevices* devices) {
      auto devices_attr = op->getAttr(kDevicesAttr);
      if (!devices_attr) return mlir::success();
    
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Thu Apr 25 16:01:03 UTC 2024
    - 6.4K bytes
    - Viewed (0)
  8. tensorflow/compiler/mlir/tensorflow/transforms/tpu_annotate_dynamic_shape_inputs.cc

                    std::get<1>(result).getDefiningOp())) {
              dynamic_shape_arg_index.push_back(cluster_func_operand.index());
            }
          }
        }
    
        cluster_func_op->setAttr(TF::kDynamicArgIndexAttr,
                                 builder.getI32ArrayAttr(dynamic_shape_arg_index));
    
        FlatSymbolRefAttr func_attr = cluster_func_op.getFuncAttr();
        func::FuncOp func =
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Thu Apr 25 16:01:03 UTC 2024
    - 6.2K bytes
    - Viewed (0)
  9. tensorflow/compiler/mlir/lite/experimental/tac/transforms/target_annotation.cc

                       OpBuilder* builder) {
      // TODO(karimnosseir): Maybe set device capabilities to allow us to have
      // more flexbility when raise the subgraphs.
      auto default_target = builder->getStringAttr(annotation);
      op->setAttr(attribute, default_target);
    }
    
    void TargetAnnotationPass::SetTargetAnnotation(
        Operation* op, llvm::ArrayRef<std::string> device_specs,
        OpBuilder* builder) {
      if (op->hasAttr(kSkipTargetAnnotation)) {
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Fri May 19 19:32:06 UTC 2023
    - 5.9K bytes
    - Viewed (0)
  10. tensorflow/compiler/mlir/tensorflow/transforms/gpu_fusion.cc

        if (!add_op || add_op.getZ().hasOneUse()) {
          // We fuse the Relu only if the add has a single use, otherwise we only
          // fuse the add itself.
          op->setAttr("activation_mode", rewriter.getStringAttr("Relu"));
          rewriter.replaceOp(relu_op, op->getResult(0));
        }
        if (add_op) {
          rewriter.replaceOp(add_op, op->getResult(0));
        }
    
        return success();
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Thu Nov 03 12:35:38 UTC 2022
    - 5.2K bytes
    - Viewed (0)
Back to top