Search Options

Results per page
Sort
Preferred Languages
Advance

Results 1 - 10 of 72 for getIPs (0.12 sec)

  1. pkg/dns/proto/nds.pb.go

    // Deprecated: Use NameTable_NameInfo.ProtoReflect.Descriptor instead.
    func (*NameTable_NameInfo) Descriptor() ([]byte, []int) {
    	return file_dns_proto_nds_proto_rawDescGZIP(), []int{0, 0}
    }
    
    func (x *NameTable_NameInfo) GetIps() []string {
    	if x != nil {
    		return x.Ips
    	}
    	return nil
    }
    
    func (x *NameTable_NameInfo) GetRegistry() string {
    	if x != nil {
    		return x.Registry
    	}
    	return ""
    }
    
    Registered: Fri Jun 14 15:00:06 UTC 2024
    - Last Modified: Fri May 31 02:27:10 UTC 2024
    - 10.2K bytes
    - Viewed (0)
  2. tensorflow/compiler/mlir/lite/stablehlo/transforms/optimize.cc

        if (!dot.getLhs().getType().hasStaticShape() ||
            !dot.getRhs().getType().hasStaticShape())
          return rewriter.notifyMatchFailure(
              dot, "all dot_general operands must be statically shaped");
        if (dot.getLhs().getType().getElementType() !=
                first_dot.getLhs().getType().getElementType() ||
            dot.getRhs().getType().getElementType() !=
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Thu Apr 25 16:01:03 UTC 2024
    - 26.9K bytes
    - Viewed (0)
  3. tensorflow/compiler/mlir/lite/stablehlo/transforms/fold_broadcast_pass.cc

    }
    
    template <typename Op, typename ElementType = Type, typename ValType,
              typename Convert>
    static Attribute BinaryFolder(Op *op) {
      auto lhs_op = op->getLhs().template getDefiningOp<mhlo::ConstantOp>();
      auto rhs_op = op->getRhs().template getDefiningOp<mhlo::ConstantOp>();
      if (!lhs_op || !lhs_op) return {};
    
      auto lhs = dyn_cast_or_null<DenseElementsAttr>(lhs_op.getValue());
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Thu Apr 25 16:01:03 UTC 2024
    - 10.5K bytes
    - Viewed (0)
  4. tensorflow/compiler/mlir/lite/stablehlo/transforms/legalize_hlo_conversions/dot_general.cc

      return ConvertDot(
          rewriter, dot_op.getLhs(), dot_op.getRhs(), dot_dimension_numbers,
          mlir::cast<ShapedType>(dot_op.getResult().getType()), dot_op.getLoc());
    }
    
    Value ConvertDotGeneralOp(PatternRewriter& rewriter, Operation* old_op) {
      auto dot_general_op = cast<mhlo::DotGeneralOp>(old_op);
      return ConvertDot(
          rewriter, dot_general_op.getLhs(), dot_general_op.getRhs(),
          dot_general_op.getDotDimensionNumbers(),
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Thu Apr 25 16:01:03 UTC 2024
    - 19.2K bytes
    - Viewed (0)
  5. platforms/core-execution/persistent-cache/src/main/java/org/gradle/cache/internal/btree/BTreePersistentIndexedCache.java

                    return block.getPos().compareTo(block1.getPos());
                }
            });
    
            for (int i = 0; i < blocks.size() - 1; i++) {
                Block b1 = blocks.get(i).getBlock();
                Block b2 = blocks.get(i + 1).getBlock();
                if (b1.getPos().getPos() + b1.getSize() > b2.getPos().getPos()) {
    Registered: Wed Jun 12 18:38:38 UTC 2024
    - Last Modified: Fri Sep 22 09:08:47 UTC 2023
    - 26.5K bytes
    - Viewed (0)
  6. tensorflow/compiler/mlir/quantization/stablehlo/passes/defer_activation_transpose.cc

        auto max_op = dyn_cast_or_null<MaxOp>(
            return_op.getOperands().front().getDefiningOp());
        if (!max_op) return false;
    
        return (max_op.getLhs() == block.getArgument(0)) &&
               (max_op.getRhs() == block.getArgument(1));
      }
    
      // Whether `op` has the `padding` attribute (which is optional).
      bool HasPadding(mlir::stablehlo::ReduceWindowOp op) const {
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Thu Apr 25 16:01:03 UTC 2024
    - 11.5K bytes
    - Viewed (0)
  7. tensorflow/compiler/mlir/quantization/common/lift_as_function_call_test.cc

      ASSERT_TRUE(module_op);
    
      func::FuncOp main_fn = FindMainFuncOp(*module_op);
      ASSERT_THAT(main_fn, NotNull());
    
      auto xla_call_module_ops = main_fn.getOps<TF::XlaCallModuleOp>();
      ASSERT_FALSE(xla_call_module_ops.empty());
    
      // Test that `GetQuantizationMethod` returns a valid `Method` corresponding to
      // `"no_quantization {}"`.
      const absl::StatusOr<Method> method =
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Fri May 10 04:07:09 UTC 2024
    - 26.2K bytes
    - Viewed (0)
  8. istioctl/pkg/precheck/precheck.go

    		verificationImpacted = verificationImpacted || checkVerify(dr.Spec.GetTrafficPolicy().GetTls())
    		sniImpacted = sniImpacted || checkSNI(dr.Spec.GetTrafficPolicy().GetTls())
    		for _, pl := range dr.Spec.GetTrafficPolicy().GetPortLevelSettings() {
    			verificationImpacted = verificationImpacted || checkVerify(pl.GetTls())
    			sniImpacted = sniImpacted || checkSNI(pl.GetTls())
    		}
    		for _, ss := range dr.Spec.Subsets {
    Registered: Fri Jun 14 15:00:06 UTC 2024
    - Last Modified: Fri Apr 12 02:57:30 UTC 2024
    - 19.3K bytes
    - Viewed (0)
  9. tensorflow/compiler/mlir/quantization/stablehlo/passes/bridge/convert_tf_quant_ops_to_mhlo.cc

      StringAttr conv_padding = op.getPaddingAttr();
      SmallVector<int64_t> padding_nums;
      ShapedType lhs_shape = mlir::cast<ShapedType>(op.getLhs().getType());
      ShapedType rhs_shape = mlir::cast<ShapedType>(op.getRhs().getType());
    
      // Handle only static shape cases.
      // TODO(b/260284866): Handle dynamic shape cases.
      if (!lhs_shape.hasStaticShape()) {
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Fri May 17 17:58:54 UTC 2024
    - 30.9K bytes
    - Viewed (0)
  10. tensorflow/compiler/mlir/quantization/common/attrs_and_constraints_test.cc

          ParseModuleOpString(kModuleDotGeneralFullyConnected);
      ASSERT_TRUE(module_op);
    
      func::FuncOp main_fn = FindMainFuncOp(*module_op);
      ASSERT_THAT(main_fn, NotNull());
    
      auto dot_general_op = *main_fn.getOps<DotGeneralOp>().begin();
      EXPECT_THAT(IsDotGeneralFullyConnected(dot_general_op), true);
    }
    
    TEST_F(AttrsAndConstraintsTest, IsDotGeneralFullyConnectedReturnsFalse) {
      OwningOpRef<ModuleOp> module_op =
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Tue May 14 17:10:32 UTC 2024
    - 22.9K bytes
    - Viewed (0)
Back to top