- Sort Score
- Result 10 results
- Languages All
Results 51 - 60 of 149 for getDefiningOp (0.64 sec)
-
tensorflow/compiler/mlir/quantization/tensorflow/passes/prepare_lifting.td
def FuseAffineOpAndMul : Pat< (TF_MulOp (SupportedAffineOpMatcher $conv_out, $input, $weight), (TF_ConstOp:$mul_rhs IsFloatElementsAttr:$mul_rhs_value)), (CloneOpWithReplacedOperands (GetDefiningOp $conv_out), $input, (MultiplyFakeQuantValue $weight, (MakeOneDimValueBroadcastable $mul_rhs, $weight))), [(HasOneUse $conv_out), (HasRankOf<1> $mul_rhs_value),
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Wed Feb 14 03:24:59 UTC 2024 - 8.4K bytes - Viewed (0) -
tensorflow/compiler/mlir/quantization/tensorflow/passes/insert_save_op.cc
auto var_handle_op = dyn_cast<TF::VarHandleOp>(resource_operand.getDefiningOp()); if (!var_handle_op) continue; Value assigned_value_operand = assign_variable_op.getOperand(1); auto const_op = dyn_cast<TF::ConstOp>(assigned_value_operand.getDefiningOp()); if (!const_op) continue; var_handle_ops.emplace_back(var_handle_op); }
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Fri Mar 22 05:52:39 UTC 2024 - 9.5K bytes - Viewed (0) -
tensorflow/compiler/mlir/lite/transforms/dilated_conv.h
} // Check if the ConvOp's input is defined by `Expand` op, and the output used // by `Squeeze` op. Operation* producer_op = op.getOperand(0).getDefiningOp(); if (!producer_op || producer_op->getNumResults() != 1) { return rewriter.notifyMatchFailure( op, "op doesn't have a producer node that has a single result"); } if (!producer_op->hasOneUse() ||
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Thu Apr 25 16:01:03 UTC 2024 - 20K bytes - Viewed (0) -
tensorflow/compiler/mlir/quantization/tensorflow/passes/lift_quantizable_spots_as_functions.cc
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Fri May 10 04:07:09 UTC 2024 - 16.4K bytes - Viewed (0) -
tensorflow/compiler/mlir/quantization/tensorflow/ops/tf_quantize_op.cc
ShapedType result_type, StringRef func_name, Value& func_input_arg) { Operation* input_op = input_val.getDefiningOp(); Operation* insertion_point = input_op->getParentOfType<func::FuncOp>(); if (!insertion_point) insertion_point = input_op->getParentOfType<ModuleOp>(); rewriter.setInsertionPointAfter(insertion_point);
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Thu Apr 25 16:01:03 UTC 2024 - 11K bytes - Viewed (0) -
tensorflow/compiler/mlir/lite/stablehlo/transforms/uniform_quantized_stablehlo_to_tfl_pass.cc
add_op != nullptr && !isa<stablehlo::ConstantOp>(add_op->getOperand(1).getDefiningOp())) { LLVM_DEBUG(llvm::dbgs() << "Expected a `stablehlo.constant` as the " << "rhs of `stablehlo.add`.\n"); } // Make sure the filter is a constant or a constant transpose. Operation* filter_op = filter.getDefiningOp(); const bool is_constant = isa_and_nonnull<stablehlo::ConstantOp>(filter_op);
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Mon Apr 22 09:00:19 UTC 2024 - 99.8K bytes - Viewed (0) -
tensorflow/compiler/mlir/quantization/stablehlo/passes/quantization_patterns.h
// The input of the quantize op has already been quantized, i.e. // rescale. return failure(); } Operation* operand_op = operand.getDefiningOp(); if (operand_op == nullptr) { // When `QuantizeOpT`'s operand does not have a defining op, it means it // is a `BlockArgument`. The pattern does not match if there is no op to // quantize.
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Thu Apr 25 16:01:03 UTC 2024 - 10.9K bytes - Viewed (0) -
tensorflow/compiler/mlir/lite/stablehlo/transforms/legalize_stablehlo_to_vhlo.cc
auto new_value = converter.materializeArgumentConversion( rewriter, result.getLoc(), type, {result}); rewriter.replaceAllUsesExcept(result, new_value, new_value.getDefiningOp()); } // Wrap operands in an an unrealized cast to create a cast to buffer any type // changes to the operand, and apply type converter to operands: // V0 = op(operand) // ==>
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Wed May 15 19:48:51 UTC 2024 - 12.3K bytes - Viewed (0) -
tensorflow/compiler/mlir/lite/transforms/default_quant_params.cc
// This value isn't an expressed type (float), skip. if (!new_type) return; Block &block = value.getParentRegion()->front(); Operation *op = value.getDefiningOp(); if (op) { builder.setInsertionPoint(&block, ++Block::iterator(op)); } else { builder.setInsertionPointToStart(&block); } TypeAttr type_attr = TypeAttr::get(new_type);
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Thu Apr 25 16:01:03 UTC 2024 - 9.4K bytes - Viewed (0) -
tensorflow/compiler/mlir/tensorflow/transforms/remove_vars_in_session_initializer.cc
} } erase_list.push_back(op); for (auto& use : op->getOpOperands()) { if (auto op_result = mlir::dyn_cast<mlir::OpResult>(use.get())) { Operation* def = op_result.getDefiningOp(); if (!dead_ops.insert(def).second) continue; RecursiveRemove(def, erase_list, dead_ops); } } } void RemoveVariables(llvm::ArrayRef<VarHandleOp> vars) {
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Thu Apr 25 16:01:03 UTC 2024 - 3.6K bytes - Viewed (0)