Search Options

Results per page
Sort
Preferred Languages
Advance

Results 1 - 10 of 203 for activations (0.18 sec)

  1. maven-core/src/main/java/org/apache/maven/execution/ProjectActivation.java

            final ActivationSettings settings = ActivationSettings.of(active, optional);
            this.activations.add(new ProjectActivationSettings(selector, settings));
        }
    
        private Stream<ProjectActivationSettings> getProjects(final Predicate<ActivationSettings> predicate) {
            return this.activations.stream().filter(activation -> predicate.test(activation.activationSettings));
        }
    
    Registered: Wed Jun 12 09:55:16 UTC 2024
    - Last Modified: Mon Dec 26 15:12:32 UTC 2022
    - 7K bytes
    - Viewed (0)
  2. maven-core/src/main/java/org/apache/maven/execution/ProfileActivation.java

            getInactiveProfiles().forEach(this.activations::remove);
            inactiveProfileIds.forEach(this::deactivateOptionalProfile);
        }
    
        /**
         * Mark a profile as required and activated.
         * @param id The identifier of the profile.
         */
        public void activateRequiredProfile(String id) {
            this.activations.put(id, ActivationSettings.ACTIVATION_REQUIRED);
        }
    
        /**
    Registered: Wed Jun 12 09:55:16 UTC 2024
    - Last Modified: Mon Dec 26 15:12:32 UTC 2022
    - 5.6K bytes
    - Viewed (0)
  3. tensorflow/compiler/mlir/tfr/resources/decomposition_lib.mlir

    //
    // REGISTER_OP("Relu")
    //     .Input("features: T")
    //     .Output("activations: T")
    //     .Attr("T: {realnumbertype, qint8}")
    // T is a derived attribute.
    tfr.func @tf__relu_(!tfr.tensor<T>) -> !tfr.tensor<T> attributes{T}
    
    
    // Translated from:
    //
    // REGISTER_OP("Relu6")
    //     .Input("features: T")
    //     .Output("activations: T")
    //     .Attr("T: {realnumbertype}")
    // T is a derived attribute.
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Wed Oct 13 16:33:28 UTC 2021
    - 4.2K bytes
    - Viewed (0)
  4. tensorflow/compiler/mlir/tensorflow/tests/rewrite_tpu_embedding_ops.mlir

    }
    
    // CHECK-LABEL: func @recv_send_ops
    func.func @recv_send_ops() -> () {
      // CHECK: %[[DATA:.*]] = "tf.XlaRecvTPUEmbeddingDeduplicationData"()
      // CHECK: %[[ACTIVATIONS:.*]] = "tf.XlaRecvTPUEmbeddingActivations"(%[[DATA]])
      // CHECK: "tf.XlaSendTPUEmbeddingGradients"(%[[ACTIVATIONS]], %[[DATA]])
    
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Mon Oct 30 06:52:55 UTC 2023
    - 4.2K bytes
    - Viewed (0)
  5. tensorflow/c/experimental/gradients/nn_grad.cc

        AbstractTensorHandle* upstream_grad = grad_outputs[0];
        AbstractTensorHandle* activations = forward_outputs_[0];
    
        // Calculate Grad
        std::string name = "relu_grad";
        TF_RETURN_IF_ERROR(ReluGrad(ctx, upstream_grad, activations,
                                    &grad_inputs[0], name.c_str()));
        return absl::OkStatus();
      }
      ~ReluGradientFunction() override {
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Tue Apr 09 06:38:45 UTC 2024
    - 5.7K bytes
    - Viewed (0)
  6. tensorflow/c/experimental/ops/nn_ops.cc

    //   Example usage:
    //   >>> tf.nn.relu([-2., 0., 3.]).numpy()
    //   array([0., 0., 3.], dtype=float32)
    Status Relu(AbstractContext* ctx, AbstractTensorHandle* const features,
                AbstractTensorHandle** activations, const char* name,
                const char* raw_device_name) {
      AbstractOperationPtr op_ptr(ctx->CreateOperation());
      TF_RETURN_IF_ERROR(op_ptr->Reset("Relu", raw_device_name));
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Tue May 10 19:11:36 UTC 2022
    - 5.9K bytes
    - Viewed (0)
  7. maven-model-builder/src/main/java/org/apache/maven/model/building/DefaultModelBuilder.java

                    activation = activation.clone();
                }
    
                activations.put(profile.getId(), activation);
            }
    
            return activations;
        }
    
        private void injectProfileActivations(Model model, Map<String, Activation> activations) {
            for (Profile profile : model.getProfiles()) {
                Activation activation = profile.getActivation();
    
                if (activation == null) {
    Registered: Wed Jun 12 09:55:16 UTC 2024
    - Last Modified: Tue May 21 09:54:32 UTC 2024
    - 82.9K bytes
    - Viewed (0)
  8. tensorflow/c/experimental/ops/nn_ops.h

                    const char* raw_device_name = nullptr);
    
    // Computes rectified linear: `max(features, 0)`.
    Status Relu(AbstractContext* ctx, AbstractTensorHandle* const features,
                AbstractTensorHandle** activations, const char* name = nullptr,
                const char* raw_device_name = nullptr);
    
    // Adds `bias` to `value`.
    Status BiasAdd(AbstractContext* ctx, AbstractTensorHandle* const value,
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Tue May 10 19:11:36 UTC 2022
    - 2.6K bytes
    - Viewed (0)
  9. tensorflow/compiler/mlir/lite/transforms/prepare_quantize_dynamic_range.cc

    namespace mlir {
    namespace TFL {
    
    namespace {
    #define GEN_PASS_DEF_PREPAREDYNAMICRANGEQUANTIZEPASS
    #include "tensorflow/compiler/mlir/lite/transforms/passes.h.inc"
    
    // A boolean attribute used to describe whether input activations need to be
    // asymmetrically quantized.
    constexpr char kAsymmetricQuantizeInputsAttr[] = "asymmetric_quantize_inputs";
    
    using QuantizationUnits = llvm::SetVector<std::pair<Operation*, int>>;
    
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Thu Apr 25 16:01:03 UTC 2024
    - 20.8K bytes
    - Viewed (0)
  10. tensorflow/compiler/mlir/lite/transforms/default_quant_params.cc

    #include "tensorflow/compiler/mlir/quantization/common/quantization_lib/quantization_utils.h"
    
    //===----------------------------------------------------------------------===//
    // The Pass to add default quantization parameters for the activations which
    // don't have quantization information. These default parameters are usually
    // not from real measurement, so this pass is only for test purpose.
    
    namespace mlir {
    namespace TFL {
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Thu Apr 25 16:01:03 UTC 2024
    - 9.4K bytes
    - Viewed (0)
Back to top