- Sort Score
- Result 10 results
- Languages All
Results 31 - 32 of 32 for Selu (0.02 sec)
-
tensorflow/compiler/mlir/lite/flatbuffer_operator.cc
llvm::StringRef str, flatbuffers::FlatBufferBuilder* builder) { return llvm::StringSwitch<tflite::ActivationFunctionType>(str) .Case("NONE", tflite::ActivationFunctionType_NONE) .Case("RELU", tflite::ActivationFunctionType_RELU) .Case("RELU_N1_TO_1", tflite::ActivationFunctionType_RELU_N1_TO_1) .Case("RELU6", tflite::ActivationFunctionType_RELU6) .Case("TANH", tflite::ActivationFunctionType_TANH)
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Tue May 21 18:21:50 UTC 2024 - 38K bytes - Viewed (0) -
tensorflow/compiler/mlir/tfr/python/tfr_gen_test.py
y = _tfr_quant_raw_data(x) s, z = _tfr_quant_qparam(x) s = _tfr_quant_scale_factor(1.0, [s, s]) s = _tfr_quant_scale_factor(1.0, [s]) y = math_ops.Sub(y, z) qmin, qmax = _tfr_quant_act_range('RELU', 1.0, 0) (qmin, qmax) # pylint: disable=pointless-statement d = _tfr_quant_rescale(y, s, 0) e = math_ops.Cast(x=d, DstT=dtypes.int16) f = math_ops.Cast(x=e, DstT=dtypes.int8) return f
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Wed Oct 13 16:33:28 UTC 2021 - 28.8K bytes - Viewed (0)