- Sort Score
- Result 10 results
- Languages All
Results 1 - 10 of 26 for callFoo (0.33 sec)
-
analysis/analysis-api/testData/components/compilerFacility/compilation/codeFragments/capturing/nestedOuterClass.kt
<caret_context>val x = 0 } } } fun callFoo(foo: Foo): Int { return 0 } fun callString(string: String): Int { return 1 } // MODULE: main // MODULE_KIND: CodeFragment // CONTEXT_MODULE: context // FILE: fragment.kt // CODE_FRAGMENT_KIND: EXPRESSION
Registered: Wed Jun 12 09:53:16 UTC 2024 - Last Modified: Tue Jan 30 11:41:26 UTC 2024 - 502 bytes - Viewed (0) -
analysis/analysis-api/testData/components/compilerFacility/compilation/codeFragments/capturing/nestedOuterClass.ir.txt
$this: CALL 'public final fun plus (other: kotlin.Int): kotlin.Int [operator] declared in kotlin.Int' type=kotlin.Int origin=PLUS $this: CALL 'public final fun callFoo (foo: <root>.Foo): kotlin.Int declared in <root>.ContextKt' type=kotlin.Int origin=null foo: GET_VAR 'p0: <root>.Foo declared in <root>.CodeFragment.run' type=<root>.Foo origin=null
Registered: Wed Jun 12 09:53:16 UTC 2024 - Last Modified: Tue Jan 30 11:41:26 UTC 2024 - 2K bytes - Viewed (0) -
tensorflow/compiler/mlir/quantization/stablehlo/passes/merge_fusion_with_dequantize.cc
: public OpRewritePattern<func::CallOp> { public: explicit MergeFusionWithUniformDequantizePattern(MLIRContext* context) : OpRewritePattern<func::CallOp>(context) {} LogicalResult matchAndRewrite(func::CallOp call_op, PatternRewriter& rewriter) const override { if (call_op.getNumResults() != 1) return failure(); auto users = call_op->getUsers(); for (auto user : users) {
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Thu Apr 25 16:01:03 UTC 2024 - 5.9K bytes - Viewed (0) -
tensorflow/compiler/mlir/quantization/stablehlo/cc/report.cc
std::optional<QuantizationResult> GetQuantizationResult(func::CallOp call_op) { const StringRef callee_name = call_op.getCalleeAttr().getValue(); if (!callee_name.starts_with(kQuantizedFuncPrefix)) { return std::nullopt; // `call_op` is not a quantized function call. } absl::StatusOr<Method> method = GetQuantizationMethod(call_op); if (!method.ok()) { call_op->emitError() << "Failed to get quantization method: "
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Thu Apr 25 16:01:03 UTC 2024 - 6.8K bytes - Viewed (0) -
tensorflow/compiler/mlir/lite/experimental/common/outline_operations.cc
// for the call_op. Operation* last_output = subgraph.partition_ops_.back(); builder.setInsertionPoint(last_output); auto call_op = builder.create<func::CallOp>(last_output->getLoc(), func, subgraph.FuncArguments()); ops_added.call_op = call_op; // FuncOutputs refer to the original `Values` in input module which are now
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Thu Apr 25 16:01:03 UTC 2024 - 8.5K bytes - Viewed (0) -
tensorflow/compiler/mlir/quantization/stablehlo/passes/unwrap_xla_call_module_op.cc
}; void UnwrapXlaCallModuleOp(TF::XlaCallModuleOp call_op, SymbolTable& symbol_table) { // Do not inline lifted quantized functions used for fusing patterns. // TODO - b/310539922: Remove reference to TF/TFL utils. if (call_op->hasAttr(kQuantTraitAttrName)) { return; } auto function_name = call_op ->getAttrOfType<FlatSymbolRefAttr>(
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Tue Mar 05 07:39:40 UTC 2024 - 4.8K bytes - Viewed (0) -
tensorflow/compiler/mlir/quantization/stablehlo/passes/restore_function_name.cc
void RestoreFunctionNameFromXlaCallModuleOp(TF::XlaCallModuleOp& call_op, SymbolTable& symbol_table) { if (!call_op->hasAttr(kOriginalStablehloEntryFunctionAttrName)) { return; } const auto original_function_name = call_op->getAttrOfType<StringAttr>( kOriginalStablehloEntryFunctionAttrName); const auto current_function_name = call_op->getAttrOfType<FlatSymbolRefAttr>(
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Tue Mar 05 08:32:43 UTC 2024 - 3.6K bytes - Viewed (0) -
tensorflow/compiler/mlir/quantization/tensorflow/passes/convert_tpu_model_to_cpu.cc
using OpRewritePattern<TF::TPUPartitionedCallOp>::OpRewritePattern; private: LogicalResult matchAndRewrite(TF::TPUPartitionedCallOp call_op, PatternRewriter& rewriter) const override { auto f_attr = mlir::dyn_cast<FlatSymbolRefAttr>(call_op.getFAttr()); auto module_op = call_op->getParentOfType<ModuleOp>(); SymbolTable symbol_table(module_op); auto f_name = f_attr.getValue();
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Thu Apr 25 16:01:03 UTC 2024 - 5.5K bytes - Viewed (0) -
tensorflow/compiler/mlir/quantization/tensorflow/passes/lift_quantizable_spots_as_functions_drq.cc
int current_num_elements = mlir::cast<ShapedType>(call_op.getOperand(idx).getType()) .getNumElements(); if (current_num_elements < min_num_elements_for_weights_) { call_op.emitRemark("Quantization is skipped for ") << call_op->getName().getStringRef().str() << " because it has " << current_num_elements
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Thu Apr 25 16:01:03 UTC 2024 - 8.5K bytes - Viewed (0) -
tensorflow/compiler/mlir/lite/experimental/tac/transforms/cost_model.cc
constexpr float kRequantCost = 2.0; // TODO(renjieliu): Ideally this should consider different kinds of SOCs as // well. // Get total bytes transferred. int64_t GetTransferredTensorBytes(func::CallOp from_graph, func::CallOp to_graph) { int64_t total_size_transferred = 0; for (auto input : to_graph.getOperands()) { Operation* input_op = input.getDefiningOp();
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Thu Apr 25 16:01:03 UTC 2024 - 7.3K bytes - Viewed (0)