- Sort Score
- Result 10 results
- Languages All
Results 1 - 2 of 2 for GetInferenceTypeAnnotation (0.16 sec)
-
tensorflow/compiler/mlir/lite/experimental/tac/common/targets.h
return GetCanonicalHardwareName(device.getValue().str()); } // Get inference type attribute from the operation if available. inline std::optional<InferenceType> GetInferenceTypeAnnotation(Operation* op) { auto inference_type = op->getAttrOfType<StringAttr>(kInferenceType); if (inference_type == nullptr) return std::nullopt; llvm::StringRef device_name_str = inference_type.getValue();
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Tue Jun 06 03:08:33 UTC 2023 - 4.7K bytes - Viewed (0) -
tensorflow/compiler/mlir/lite/experimental/tac/transforms/get_alternative_subgraph.cc
func.emitError( "cannot find target annotation or unknown device specified for current " "function"); return; } auto current_inference_type = GetInferenceTypeAnnotation(func); if (!current_inference_type.has_value() || current_inference_type == UNKNOWN) { func.emitError( "cannot find inference type annotation or unknown inference type "
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Tue Jun 06 03:08:33 UTC 2023 - 12.3K bytes - Viewed (0)