Search Options

Results per page
Sort
Preferred Languages
Advance

Results 21 - 30 of 45 for _backprop (0.42 sec)

  1. tensorflow/c/eager/tape.h

    // any gradients to be computed).
    //
    // Finally, we start a backprop stack with a set of tape entries for which we
    // have all gradients available. This set usually is a subset of the set of
    // targets (not all since targets which have outputs in the tape will not have
    // gradients available initially).
    //
    // Then we repeatedly pop an entry from the stack, run its backprop, and update
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Tue Apr 02 12:40:29 UTC 2024
    - 47.2K bytes
    - Viewed (0)
  2. tensorflow/compiler/mlir/tf2xla/tests/legalize-tf-with-tf2xla-hlo-importer.mlir

        // CHECK: %[[offset_backprop:.*]] = mhlo.convert %[[red2]] : tensor<8xf32>
    
        // CHECK: %[[x_backprop:.*]] = mhlo.convert %[[mul3]] : tensor<8x8x8x8xf32>
        // CHECK: return %[[x_backprop]] : tensor<8x8x8x8xf32>
    
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Sat Apr 06 15:32:52 UTC 2024
    - 38.6K bytes
    - Viewed (0)
  3. samples/bookinfo/src/productpage/static/tailwind/tailwind.css

    s":" ","--tw-backdrop-contrast":" ","--tw-backdrop-grayscale":" ","--tw-backdrop-hue-rotate":" ","--tw-backdrop-invert":" ","--tw-backdrop-opacity":" ","--tw-backdrop-saturate":" ","--tw-backdrop-sepia":" "}),e({".backdrop-filter":{"@defaults backdrop-filter":{},"backdrop-filter":Fe},".backdrop-filter-none":{"backdrop-filter":"none"}})},transitionProperty:({matchUtilities:i,theme:e})=>{let t=e("transitionTimingFunction.DEFAULT"),r=e("transitionDuration.DEFAULT");i({transition:n=>({"transition-pr...
    Registered: Fri Jun 14 15:00:06 UTC 2024
    - Last Modified: Tue May 28 14:48:01 UTC 2024
    - 357.1K bytes
    - Viewed (1)
  4. tensorflow/cc/gradients/nn_grad_test.cc

      auto y =
          tensorflow::ops::SoftmaxCrossEntropyWithLogits(scope_, logits, labels);
      // Note the reversal of the backprop and loss orders. Issue #18734 has been
      // opened for this.
      RunTest({logits, labels}, {logits_shape, logits_shape}, {y.backprop, y.loss},
              {logits_shape, loss_shape});
    }
    
    TEST_F(NNGradTest, LogSoftmaxGrad) {
      TensorShape shape({5, 3});
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Tue Mar 22 20:45:22 UTC 2022
    - 15K bytes
    - Viewed (0)
  5. tensorflow/compiler/mlir/lite/stablehlo/transforms/fuse_convolution_pass.cc

          });
        }
        filter_value = filter.getValue();
        mul_value = multiplier.getValue();
        // In MHLO, Conv filter is in HWIO format, Depthwise conv filter is in HW1O
        // format and backprop input conv filter is in HWOI format.
        // Only fuses multiplier if all dimensions other than the out channel
        // dimension are equal to 1.
        if (!TFL::IsDimensionsDegenerateExceptLastOne(
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Thu Feb 22 22:21:19 UTC 2024
    - 8.3K bytes
    - Viewed (0)
  6. tensorflow/compiler/mlir/tensorflow/tests/tpu_space_to_depth_pass.mlir

        // CHECK: %[[BACKPROP:.*]] = "tf.Conv2DBackpropFilter"
        // CHECK-SAME: strides = [1, 1, 1, 1]
        // CHECK-SAME: (tensor<2x115x115x12xf32>, tensor<4xi32>, tensor<2x112x112x64xf32>) -> tensor<4x4x12x64xf32>
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Mon Oct 30 06:52:55 UTC 2023
    - 37.4K bytes
    - Viewed (0)
  7. tensorflow/compiler/mlir/tf2xla/tests/legalize-tf.mlir

      // CHECK-NEXT: %[[offset_backprop:.*]] = mhlo.convert %[[red2]] : tensor<8xf32>
    
      // CHECK-NEXT: %[[x_backprop:.*]] = mhlo.convert %[[mul3]] : tensor<8x8x8x8xf32>
      // CHECK-NEXT: return %[[x_backprop]] : tensor<8x8x8x8xf32>
    
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Mon May 06 18:46:23 UTC 2024
    - 335.5K bytes
    - Viewed (0)
  8. tensorflow/cc/framework/gradients_test.cc

                                            {dx, dy, dz}, &grad_outputs));
        }
      }
      CompareTestAndExpectedGraphs();
    }
    
    TEST_F(GradientsTest, StackUnstack_StopBackprop) {
      // Tests that backprop stops before calculating gradients for Stack (because
      // only gradients w.r.t the output of Stack are requested).
      for (const bool expected : {false, true}) {
        const Scope& scope = expected ? scope_expected_ : scope_test_;
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Sat Apr 15 15:13:38 UTC 2023
    - 25K bytes
    - Viewed (0)
  9. tensorflow/c/while_loop_test.cc

          Add(params_->body_inputs[0], {one, 0}, params_->body_graph, s_);
      ASSERT_EQ(TF_OK, TF_GetCode(s_)) << TF_Message(s_);
      params_->body_outputs[0] = {add, 0};
    
      ExpectOK();
    
      // Create backprop graph
      TF_Output grad_output;
      TF_AddGradients(graph_, outputs_.data(), outputs_.size(), inputs_.data(), 1,
                      nullptr, s_, &grad_output);
      ASSERT_EQ(TF_OK, TF_GetCode(s_)) << TF_Message(s_);
    
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Thu Apr 11 06:05:56 UTC 2024
    - 15.3K bytes
    - Viewed (0)
  10. tensorflow/compiler/mlir/tf2xla/transforms/legalize_tf.cc

          Value scratch2 =
              ApplyReduction(loc, weighted_grad, reduce_dims, &rewriter);
    
          // x_backprop = y_backprop * (scale * scratch1)
          auto scaled_grad =
              rewriter.create<mhlo::MulOp>(loc, op.getScale(), scratch1);
          x_backprop = rewriter.create<mhlo::MulOp>(
              loc, grad,
              Broadcast1DToFeatureDim(loc, act, scaled_grad, feature_dim,
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Tue Jun 11 20:00:43 UTC 2024
    - 291.8K bytes
    - Viewed (0)
Back to top