- Sort Score
- Result 10 results
- Languages All
Results 31 - 40 of 63 for get_attr (0.12 sec)
-
tensorflow/compiler/mlir/quantization/common/attrs_and_constraints.h
} // Returns the function attribute for the given call op which is lifted for // quantization. inline FlatSymbolRefAttr GetFuncAttr(TF::PartitionedCallOp call_op) { return mlir::dyn_cast<FlatSymbolRefAttr>(call_op.getFAttr()); } inline FlatSymbolRefAttr GetFuncAttr(TF::XlaCallModuleOp call_op) { return call_op->getAttrOfType<FlatSymbolRefAttr>( TF::kStablehloEntryFunctionAttrName); }
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Thu Apr 25 16:01:03 UTC 2024 - 9.9K bytes - Viewed (0) -
tensorflow/compiler/mlir/tf2xla/internal/passes/extract_head_tail_outside_compilation.cc
auto new_cluster = builder->create<mlir::tf_device::ClusterOp>( cluster.getLoc(), new_cluster_result_types, /*operands=*/llvm::ArrayRef<Value>{}, cluster->getAttrs()); new_cluster.getBody().takeBody(cluster.getBody()); auto operand_not_in_cluster = [&](mlir::OpOperand& operand) { return !new_cluster.getOperation()->isProperAncestor(operand.getOwner()); };
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Thu Apr 25 16:01:03 UTC 2024 - 20.6K bytes - Viewed (0) -
fastapi/encoders.py
exclude = set(exclude) if isinstance(obj, BaseModel): # TODO: remove when deprecating Pydantic v1 encoders: Dict[Any, Any] = {} if not PYDANTIC_V2: encoders = getattr(obj.__config__, "json_encoders", {}) # type: ignore[attr-defined] if custom_encoder: encoders.update(custom_encoder) obj_dict = _model_dump( obj, mode="json",
Registered: Mon Jun 17 08:32:26 UTC 2024 - Last Modified: Thu Apr 18 21:56:59 UTC 2024 - 10.8K bytes - Viewed (0) -
tensorflow/compiler/mlir/tensorflow/transforms/resource_op_lifting_cleanup.cc
if (cloned == func) continue; // Patch up the op attribute to point to the new function. for (NamedAttribute attr : op->getAttrs()) { auto symref = mlir::dyn_cast<FlatSymbolRefAttr>(attr.getValue()); if (!symref) continue; if (symref.getValue() != func.getName()) continue; op->setAttr(attr.getName(), FlatSymbolRefAttr::get(op->getContext(), cloned.getName())); break; } }
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Thu Apr 25 16:01:03 UTC 2024 - 19.7K bytes - Viewed (0) -
tensorflow/compiler/mlir/quantization/stablehlo/passes/quantization_patterns.h
OperationState new_state(candidate_op->getLoc(), candidate_op->getName().getStringRef(), inputs, output_types, candidate_op->getAttrs()); for (int i = 0; i < candidate_op->getNumRegions(); ++i) { new_state.addRegion(); } Operation* quantized_op = rewriter.create(new_state); if (candidate_op->getNumRegions() != 0) {
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Thu Apr 25 16:01:03 UTC 2024 - 10.9K bytes - Viewed (0) -
tensorflow/compiler/mlir/quantization/stablehlo/passes/quantization_patterns.cc
ModuleOp module_op = op->getParentOfType<ModuleOp>(); // Ignore ops without quantization method. // Consider adding checks for individual methods. if (!op->getAttr(kQuantizationMethodAttr)) return failure(); // Ignore unquantized ops. if (!IsQuantizedXlaCallModuleOp(op)) return failure(); // For weight-only quantization, op should be hybrid quantized.
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Fri May 03 06:04:36 UTC 2024 - 41.7K bytes - Viewed (0) -
tensorflow/compiler/mlir/quantization/tensorflow/passes/lift_quantizable_spots_as_functions.cc
private: LogicalResult matchAndRewrite(TF::PartitionedCallOp call_op, PatternRewriter& rewriter) const override { StringRef function_name = mlir::cast<FlatSymbolRefAttr>(call_op.getFAttr()).getValue(); if (!function_name.starts_with("composite_") || !call_op->hasAttr(kQuantTraitAttrName)) { return failure(); } absl::Status check_status;
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Fri May 10 04:07:09 UTC 2024 - 16.4K bytes - Viewed (0) -
tensorflow/compiler/mlir/lite/transforms/reduce_while_operands.cc
new_result_types.push_back(while_op.getResultTypes()[i]); } } auto new_while_op = OpBuilder(while_op).create<WhileOp>( while_op.getLoc(), new_result_types, new_operands, while_op->getAttrs()); new_while_op.getCond().takeBody(while_op.getCond()); new_while_op.getBody().takeBody(while_op.getBody()); for (auto i = 0; i < n; ++i) { if (!while_op.getResult(i).use_empty()) {
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Thu Apr 25 16:01:03 UTC 2024 - 10.4K bytes - Viewed (0) -
tensorflow/compiler/mlir/quantization/tensorflow/passes/add_dump_tensor_op.cc
new_call_op->setAttr(kEntryFuncAttrName, rewriter.getStringAttr(new_ref_func_name.getValue())); new_call_op->setAttrs(call_op->getAttrs()); new_call_op->removeAttr(rewriter.getStringAttr(kQuantTraitAttrName)); FlatSymbolRefAttr new_func_name_attr = FlatSymbolRefAttr::get(rewriter.getContext(), new_ref_func_name); new_call_op->setAttr(kEntryFuncAttrName, new_func_name_attr);
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Fri Mar 22 22:55:22 UTC 2024 - 13K bytes - Viewed (0) -
tensorflow/compiler/mlir/lite/transforms/post_quantize.cc
PatternRewriter& rewriter) const override { auto input_op = op.getInput().getDefiningOp(); if (auto q = llvm::dyn_cast_or_null<QuantizeOp>(input_op)) { if (!q->getAttr(mlir::quant::kVolatileOpAttrName)) return failure(); if (remove_volatile_ops_type == kPreserveInputsAndOutputs) { // Don't remove leading and trailing QDQ for PTQ workflow, so the io
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Thu Apr 25 16:01:03 UTC 2024 - 17.1K bytes - Viewed (0)