- Sort Score
- Result 10 results
- Languages All
Results 31 - 40 of 111 for RELU (0.07 sec)
-
tensorflow/compiler/mlir/quantization/tensorflow/passes/quantized_function_library_uniform_quantized.mlir
parameters[ {"quantized_ops": ["${main_op}", "BiasAdd"], "act_func": "internal_requantize_no_activation_fn", "output_type": "!tf_type.qint8"}, {"quantized_ops": ["${main_op}", "BiasAdd", "Relu"], "act_func": "internal_requantize_and_relu_fn", "output_type": "!tf_type.qint8"}, {"quantized_ops": ["${main_op}", "BiasAdd", "Relu6"], "act_func": "internal_requantize_and_relu6_fn", "output_type": "!tf_type.qint8"}, ]
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Tue Aug 29 01:13:58 UTC 2023 - 19.3K bytes - Viewed (0) -
tensorflow/compiler/jit/mark_for_compilation_pass_test.cc
Node* b = ops::UnaryOp("Relu", a, builder.opts().WithName("B")); Node* c = ops::UnaryOp("Relu", b, builder.opts().WithName("C")); Node* d = ops::UnaryOp("UncompilableUnary", c, builder.opts().WithName("D")); Node* e = ops::UnaryOp("Relu", d, builder.opts().WithName("E")); ops::UnaryOp("Relu", e, builder.opts().WithName("F"));
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Wed Feb 14 10:11:10 UTC 2024 - 79.6K bytes - Viewed (0) -
tensorflow/compiler/mlir/tfr/python/op_reg_gen_test.py
@composite.Composite( 'TestNoOp', derived_attrs=['T: numbertype'], outputs=['o1: T']) def _composite_no_op(): pass @Composite( 'TestCompositeOp', inputs=['x: T', 'y: T'], attrs=['act: {"", "relu"}', 'trans: bool = true'], derived_attrs=['T: numbertype'], outputs=['o1: T', 'o2: T']) def _composite_op(x, y, act, trans): return x + act, y + trans class TFRGenTensorTest(test.TestCase):
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Tue Sep 28 21:37:05 UTC 2021 - 2.5K bytes - Viewed (0) -
tensorflow/compiler/mlir/tensorflow/tests/layout_optimization_move_transposes_end.mlir
// CHECK: %[[TANH:[0-9]*]] = "tf.Tanh"(%arg0) {{.*}} tensor<1x4x4x8xf32> // CHECK: %[[RELU:[0-9]*]] = "tf.Relu"(%[[TANH]]) {{.*}} tensor<1x4x4x8xf32> // CHECK: %[[RES_TRANSPOSE:[0-9]*]] = "tf.Transpose"(%[[RELU]], %[[RES_PERM]]) // CHECK: return %[[RES_TRANSPOSE]] %0 = "tf.Const"() {value = dense<[0, 3, 1, 2]> : tensor<4xi32>} : () -> tensor<4xi32>
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Mon Oct 30 06:52:55 UTC 2023 - 9.5K bytes - Viewed (0) -
tensorflow/compiler/jit/tests/keras_imagenet_main_graph_mode.pbtxt
key: "_class" value { list { s: "loc:@bn_conv1_1/AssignMovingAvg_1/AssignSubVariableOp" } } } } node { name: "activation_50/Relu" op: "Relu" input: "bn_conv1_1/FusedBatchNorm" device: "/job:localhost/replica:0/task:0/device:GPU:0" attr { key: "T" value { type: DT_FLOAT } } }
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Thu May 30 02:52:54 UTC 2019 - 1.1M bytes - Viewed (0) -
tensorflow/compiler/mlir/lite/tests/mlir2flatbuffer/svdf_v2.mlir
// CHECK-NEXT: outputs: [ 5 ], // CHECK-NEXT: builtin_options_type: SVDFOptions, // CHECK-NEXT: builtin_options: { // CHECK-NEXT: rank: 2, // CHECK-NEXT: fused_activation_function: RELU // CHECK-NEXT: } // CHECK-NEXT: } ], // CHECK-NEXT: name: "main" // CHECK-NEXT: } ], // CHECK-NEXT: description: "MLIR Converted.", // CHECK-NEXT: buffers: [ { // CHECK-EMPTY:
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Thu Jul 14 16:41:28 UTC 2022 - 3.7K bytes - Viewed (0) -
tensorflow/compiler/mlir/tfr/examples/mnist/mnist_train.py
# output shape: [-1, 28, 28, 32] conv1 = gen_mnist_ops.new_conv2d(x, self.weights['f1'], self.biases['b1'], 1, 1, 1, 1, 'SAME', 'RELU') # Max pooling. The kernel size spec {ksize} also follows the layout of # the data. Here we have a pooling window of 2, and a stride of 2. # output shape: [-1, 14, 14, 32]
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Wed Oct 20 03:05:18 UTC 2021 - 6.5K bytes - Viewed (0) -
tensorflow/cc/gradients/nn_grad_test.cc
RunTest(x, x_init_value, y, shape); } TEST_F(NNGradTest, ReluGrad) { TensorShape shape({5, 2}); auto x = Placeholder(scope_, DT_FLOAT, Placeholder::Shape(shape)); auto y = Relu(scope_, x); // Avoid input values where ReLU gradient is not well defined (around zero). Tensor x_init_value = test::AsTensor<float>( {-0.9f, -0.7f, -0.5f, -0.3f, -0.1f, 0.1f, 0.3f, 0.5f, 0.7f, 0.9f}, {5, 2});
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Tue Mar 22 20:45:22 UTC 2022 - 15K bytes - Viewed (0) -
tensorflow/compiler/mlir/lite/tests/mlir2flatbuffer/svdf.mlir
// CHECK-NEXT: outputs: [ 5 ], // CHECK-NEXT: builtin_options_type: SVDFOptions, // CHECK-NEXT: builtin_options: { // CHECK-NEXT: rank: 2, // CHECK-NEXT: fused_activation_function: RELU // CHECK-NEXT: } // CHECK-NEXT: } ], // CHECK-NEXT: name: "main" // CHECK-NEXT: } ], // CHECK-NEXT: description: "MLIR Converted.", // CHECK-NEXT: buffers: [ { // CHECK-EMPTY:
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Thu Jul 14 16:41:28 UTC 2022 - 3.6K bytes - Viewed (0) -
tensorflow/compiler/mlir/tfr/examples/mnist/mnist_ops_test.py
'input_': input_, 'filter_': filter_, 'bias': bias, 'stride_w': 2, 'stride_h': 2, 'dilation_w': 1, 'dilation_h': 1, 'padding': 'SAME', 'act': 'RELU' } self._assertOpAndComposite([input_, filter_, bias], tf.function(gen_mnist_ops.new_conv2d), ops_defs._composite_conv_add_relu, kwargs)
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Tue Sep 28 21:37:05 UTC 2021 - 4K bytes - Viewed (0)