- Sort Score
- Result 10 results
- Languages All
Results 91 - 100 of 138 for relu6 (0.08 sec)
-
tensorflow/c/experimental/ops/nn_ops.cc
return op_ptr->Execute(absl::MakeSpan(backprops, 1), &num_retvals); } // Op: Relu() // Summary: Computes rectified linear: `max(features, 0)`. // // Description: // See: https://en.wikipedia.org/wiki/Rectifier_(neural_networks) // Example usage: // >>> tf.nn.relu([-2., 0., 3.]).numpy() // array([0., 0., 3.], dtype=float32) Status Relu(AbstractContext* ctx, AbstractTensorHandle* const features,
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Tue May 10 19:11:36 UTC 2022 - 5.9K bytes - Viewed (0) -
tensorflow/compiler/mlir/lite/tests/end2end/conv_2d.pbtxt
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Fri Jun 28 06:29:38 UTC 2019 - 3.7K bytes - Viewed (0) -
tensorflow/compiler/mlir/tensorflow/tests/layout_optimization_move_transposes_begin.mlir
// CHECK: %[[TANH:[0-9]*]] = "tf.Tanh"(%[[ARG_TRANSPOSE]]) {{.*}} tensor<1x8x4x4xf32> // CHECK: %[[RELU:[0-9]*]] = "tf.Relu"(%[[TANH]]) {{.*}} tensor<1x8x4x4xf32> // CHECK: return %[[RELU]] %0 = "tf.Tanh"(%arg0) : (tensor<1x4x4x8xf32>) -> tensor<1x4x4x8xf32> %1 = "tf.Relu"(%0) : (tensor<1x4x4x8xf32>) -> tensor<1x4x4x8xf32> %2 = "tf.Const"() {value = dense<[0, 3, 1, 2]> : tensor<4xi32>} : () -> tensor<4xi32>
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Mon Oct 30 06:52:55 UTC 2023 - 6.3K bytes - Viewed (0) -
tensorflow/c/experimental/ops/nn_ops.h
// Computes rectified linear gradients for a Relu operation. Status ReluGrad(AbstractContext* ctx, AbstractTensorHandle* const gradients, AbstractTensorHandle* const features, AbstractTensorHandle** backprops, const char* name = nullptr, const char* raw_device_name = nullptr); // Computes rectified linear: `max(features, 0)`. Status Relu(AbstractContext* ctx, AbstractTensorHandle* const features,
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Tue May 10 19:11:36 UTC 2022 - 2.6K bytes - Viewed (0) -
tensorflow/c/experimental/gradients/nn_grad_test.cc
using tensorflow::TF_StatusPtr; Status ReluModel(AbstractContext* ctx, absl::Span<AbstractTensorHandle* const> inputs, absl::Span<AbstractTensorHandle*> outputs) { return ops::Relu(ctx, inputs[0], &outputs[0], "Relu"); } Status SparseSoftmaxCrossEntropyWithLogitsModel( AbstractContext* ctx, absl::Span<AbstractTensorHandle* const> inputs, absl::Span<AbstractTensorHandle*> outputs) {
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Wed Feb 28 13:53:47 UTC 2024 - 8.3K bytes - Viewed (0) -
tensorflow/compiler/jit/tests/keras_imagenet_main.pbtxt
tensor_content: "\001" } } } experimental_debug_info { original_node_names: "bn2a_branch1/cond_1" } } node { name: "activation_50/Relu" op: "Relu" input: "bn_conv1_1/FusedBatchNormV2" device: "/job:localhost/replica:0/task:0/device:GPU:0" attr { key: "T" value { type: DT_HALF } } }
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Thu May 30 02:52:54 UTC 2019 - 1.3M bytes - Viewed (0) -
tensorflow/compiler/mlir/lite/tests/end2end/back2back_fake_quant.pbtxt
key: "T" value { type: DT_FLOAT } } attr { key: "data_format" value { s: "NHWC" } } } node { name: "sequential/quant_dense/Relu" op: "Relu" input: "sequential/quant_dense/BiasAdd" attr { key: "T" value { type: DT_FLOAT } } } node {
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Mon Nov 15 19:42:47 UTC 2021 - 25.9K bytes - Viewed (0) -
tensorflow/compiler/mlir/tfr/python/op_reg_gen_test.py
@composite.Composite( 'TestNoOp', derived_attrs=['T: numbertype'], outputs=['o1: T']) def _composite_no_op(): pass @Composite( 'TestCompositeOp', inputs=['x: T', 'y: T'], attrs=['act: {"", "relu"}', 'trans: bool = true'], derived_attrs=['T: numbertype'], outputs=['o1: T', 'o2: T']) def _composite_op(x, y, act, trans): return x + act, y + trans class TFRGenTensorTest(test.TestCase):
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Tue Sep 28 21:37:05 UTC 2021 - 2.5K bytes - Viewed (0) -
tensorflow/compiler/mlir/tensorflow/tests/layout_optimization_move_transposes_end.mlir
// CHECK: %[[TANH:[0-9]*]] = "tf.Tanh"(%arg0) {{.*}} tensor<1x4x4x8xf32> // CHECK: %[[RELU:[0-9]*]] = "tf.Relu"(%[[TANH]]) {{.*}} tensor<1x4x4x8xf32> // CHECK: %[[RES_TRANSPOSE:[0-9]*]] = "tf.Transpose"(%[[RELU]], %[[RES_PERM]]) // CHECK: return %[[RES_TRANSPOSE]] %0 = "tf.Const"() {value = dense<[0, 3, 1, 2]> : tensor<4xi32>} : () -> tensor<4xi32>
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Mon Oct 30 06:52:55 UTC 2023 - 9.5K bytes - Viewed (0) -
tensorflow/compiler/jit/tests/keras_imagenet_main_graph_mode.pbtxt
key: "_class" value { list { s: "loc:@bn_conv1_1/AssignMovingAvg_1/AssignSubVariableOp" } } } } node { name: "activation_50/Relu" op: "Relu" input: "bn_conv1_1/FusedBatchNorm" device: "/job:localhost/replica:0/task:0/device:GPU:0" attr { key: "T" value { type: DT_FLOAT } } }
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Thu May 30 02:52:54 UTC 2019 - 1.1M bytes - Viewed (0)