- Sort Score
- Result 10 results
- Languages All
Results 1 - 6 of 6 for activation_fn (0.18 sec)
-
tensorflow/compiler/mlir/quantization/tensorflow/python/integration_test/quantize_model_test.py
'activation_fn': nn_ops.relu, 'has_bias': True, }, ) def test_conv2d_ptq_model_whole_model_verify(self, activation_fn, has_bias): input_shape = [None, None, None, 3] filter_shape = [2, 3, 3, 2] model = self._create_conv2d_model( input_shape, filter_shape, activation_fn=activation_fn, has_bias=has_bias, )
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Fri May 17 03:36:50 UTC 2024 - 235.6K bytes - Viewed (0) -
tensorflow/compiler/mlir/quantization/stablehlo/python/integration_test/quantize_model_test.py
# If present the last op before return should be stablehlo.clamp for relu6 # and stablehlo.maximum for relu. if activation_fn is nn_ops.relu6: self.assertRegex(module_str, r'stablehlo.clamp.*\n.*return') elif activation_fn is nn_ops.relu: self.assertRegex(module_str, r'stablehlo.maximum.*\n.*return') else: # Check activation functions are implicit.
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Tue May 14 06:31:57 UTC 2024 - 51.4K bytes - Viewed (0) -
tensorflow/compiler/mlir/quantization/stablehlo/python/integration_test/quantize_model_test_base.py
""" out = math_ops.matmul(input_tensor, self.filters, name='sample/matmul') if bias_fn is not None: out = bias_fn(out, self.bias) if activation_fn is not None: out = activation_fn(out) return {'output': out} model = MatmulModel(weight_shape) saved_model_save.save( model, saved_model_path,
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Tue May 14 06:31:57 UTC 2024 - 18.2K bytes - Viewed (0) -
tensorflow/compiler/mlir/quantization/tensorflow/python/integration_test/quantize_model_test_base.py
) if activation_fn is not None: out = activation_fn(out) return {'output': out} return DepthwiseConvModel() def _create_conv2d_model( self, input_shape: Sequence[int], filter_shape: Sequence[int], has_bias: bool = False, has_batch_norm: bool = False, activation_fn: Optional[ops.Operation] = None,
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Thu Mar 21 08:51:46 UTC 2024 - 51.2K bytes - Viewed (0) -
tensorflow/compiler/jit/tests/keras_imagenet_main_graph_mode.pbtxt
key: "use_nesterov" value { b: false } } } node { name: "training/SGD/gradients/activation_50/Relu_grad/ReluGrad" op: "ReluGrad" input: "training/SGD/gradients/max_pooling2d_1/MaxPool_grad/MaxPoolGrad" input: "activation_50/Relu" device: "/job:localhost/replica:0/task:0/device:GPU:0" attr { key: "T" value { type: DT_FLOAT
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Thu May 30 02:52:54 UTC 2019 - 1.1M bytes - Viewed (0) -
tensorflow/compiler/jit/tests/keras_imagenet_main.pbtxt
value { b: false } } } node { name: "training/LossScaleOptimizer/gradients/activation_50/Relu_grad/ReluGrad" op: "ReluGrad" input: "training/LossScaleOptimizer/gradients/pool1_pad_1/Pad_grad/Slice_1" input: "activation_50/Relu" device: "/job:localhost/replica:0/task:0/device:GPU:0" attr { key: "T" value { type: DT_HALF
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Thu May 30 02:52:54 UTC 2019 - 1.3M bytes - Viewed (0)