Search Options

Results per page
Sort
Preferred Languages
Advance

Results 1 - 10 of 26 for getFAttr (0.13 sec)

  1. tensorflow/compiler/mlir/quantization/tensorflow/passes/convert_tpu_model_to_cpu.cc

     private:
      LogicalResult matchAndRewrite(TF::TPUPartitionedCallOp call_op,
                                    PatternRewriter& rewriter) const override {
        auto f_attr = mlir::dyn_cast<FlatSymbolRefAttr>(call_op.getFAttr());
        auto module_op = call_op->getParentOfType<ModuleOp>();
        SymbolTable symbol_table(module_op);
    
        auto f_name = f_attr.getValue();
        func::FuncOp float_func =
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Thu Apr 25 16:01:03 UTC 2024
    - 5.5K bytes
    - Viewed (0)
  2. tensorflow/compiler/mlir/quantization/tensorflow/passes/propagate_quantize_type.cc

      }
    
      LogicalResult matchAndRewrite(TF::PartitionedCallOp op,
                                    PatternRewriter& rewriter) const override {
        const auto f_attr = mlir::dyn_cast<FlatSymbolRefAttr>(op.getFAttr());
        StringRef function_name = f_attr.getValue();
        if (!function_name.starts_with(kDequantizeFunctionName)) return failure();
    
        llvm::SmallVector<Operation*> users(op->getUsers().begin(),
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Thu Apr 25 16:01:03 UTC 2024
    - 7K bytes
    - Viewed (0)
  3. tensorflow/compiler/mlir/quantization/tensorflow/ops/tf_op_quant_spec.cc

      auto spec = std::make_unique<OpQuantSpec>();
      if (auto call_op = dyn_cast<TF::PartitionedCallOp>(op)) {
        StringRef function_name =
            mlir::cast<FlatSymbolRefAttr>(call_op.getFAttr()).getValue();
        if (!function_name.starts_with("composite_")) {
          return spec;
        }
        if (function_name.contains("depthwise_conv2d")) {
          spec->coeff_op_quant_dim[1] = 3;
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Thu Apr 25 16:01:03 UTC 2024
    - 6.3K bytes
    - Viewed (0)
  4. tensorflow/compiler/mlir/quantization/tensorflow/passes/lift_quantizable_spots_as_functions_drq.cc

                << min_num_elements_for_weights_ << " elements).";
            call_op->removeAttr(kQuantTraitAttrName);
          }
        }
    
        StringRef function_name =
            mlir::cast<FlatSymbolRefAttr>(call_op.getFAttr()).getValue();
        if ((quantization_method_ == tensorflow::quantization::QuantizationMethod::
                                         METHOD_DYNAMIC_RANGE_INT8) &&
            (function_name.contains("batch_matmul") ||
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Thu Apr 25 16:01:03 UTC 2024
    - 8.5K bytes
    - Viewed (0)
  5. tensorflow/compiler/mlir/tfr/examples/mnist/ops_defs.py

          grad, axis=reduction_axes, keepdims=True)
      bias_grad = tf.reshape(updates_grad_reshaped, input_value_shape)
    
      dilations = [1, op.get_attr('dilation_w'), op.get_attr('dilation_h'), 1]
      strides = [1, op.get_attr('stride_w'), op.get_attr('stride_h'), 1]
      padding = op.get_attr('padding')
      shape_0, shape_1 = tf.shape_n([op.inputs[0], op.inputs[1]])
      return [
          tf.compat.v1.nn.conv2d_backprop_input(
              shape_0,
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Thu Aug 31 20:23:51 UTC 2023
    - 6.8K bytes
    - Viewed (0)
  6. tensorflow/compiler/mlir/quantization/tensorflow/calibrator/custom_aggregator_op.cc

        float max_percentile;
        OP_REQUIRES_OK(context, context->GetAttr("calibration_method",
                                                 &calibration_method_value));
        OP_REQUIRES_OK(context, context->GetAttr("num_bins", &num_bins));
        OP_REQUIRES_OK(context,
                       context->GetAttr("min_percentile", &min_percentile));
        OP_REQUIRES_OK(context,
                       context->GetAttr("max_percentile", &max_percentile));
    
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Thu Apr 25 01:09:50 UTC 2024
    - 6.2K bytes
    - Viewed (0)
  7. tensorflow/compiler/mlir/quantization/tensorflow/debugging/dump_tensor_op.cc

        string file_name;
        string func_name;
        string node_name;
        OP_REQUIRES_OK(ctx, ctx->GetAttr("log_dir_path", &log_dir_path));
        OP_REQUIRES_OK(ctx, ctx->GetAttr("enabled", &enabled_));
        OP_REQUIRES_OK(ctx, ctx->GetAttr("file_name", &file_name));
        OP_REQUIRES_OK(ctx, ctx->GetAttr("func_name", &func_name));
        OP_REQUIRES_OK(ctx, ctx->GetAttr("node_name", &node_name));
        OP_REQUIRES_OK(ctx, ctx->env()->RecursivelyCreateDir(log_dir_path));
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Thu Feb 22 03:12:17 UTC 2024
    - 4.8K bytes
    - Viewed (0)
  8. tensorflow/compiler/aot/aot_only_var_handle_op.cc

        .SetIsStateful()
        .SetShapeFn([](shape_inference::InferenceContext* c) {
          c->set_output(0, c->Scalar());
          DataType t;
          TF_RETURN_IF_ERROR(c->GetAttr("dtype", &t));
          PartialTensorShape p;
          TF_RETURN_IF_ERROR(c->GetAttr("shape", &p));
          shape_inference::ShapeHandle s;
          TF_RETURN_IF_ERROR(c->MakeShapeFromPartialTensorShape(p, &s));
          c->set_output_handle_shapes_and_types(
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Wed Feb 28 09:57:04 UTC 2024
    - 3K bytes
    - Viewed (0)
  9. docs/debugging/xattr/main.go

    	"errors"
    	"flag"
    	"fmt"
    	"log"
    	"os"
    
    	"github.com/olekukonko/tablewriter"
    	"github.com/pkg/xattr"
    )
    
    var (
    	path, name string
    	value      uint64
    	set, list  bool
    )
    
    func getxattr(path, name string) (uint64, error) {
    	buf, err := xattr.LGet(path, name)
    	if err != nil {
    		return 0, err
    	}
    
    	return binary.LittleEndian.Uint64(buf[:8]), nil
    }
    
    Registered: Sun Jun 16 00:44:34 UTC 2024
    - Last Modified: Fri Dec 29 23:52:41 UTC 2023
    - 3.2K bytes
    - Viewed (0)
  10. fastapi/types.py

    import types
    from enum import Enum
    from typing import Any, Callable, Dict, Set, Type, TypeVar, Union
    
    from pydantic import BaseModel
    
    DecoratedCallable = TypeVar("DecoratedCallable", bound=Callable[..., Any])
    UnionType = getattr(types, "UnionType", Union)
    ModelNameMap = Dict[Union[Type[BaseModel], Type[Enum]], str]
    Registered: Mon Jun 17 08:32:26 UTC 2024
    - Last Modified: Tue Dec 12 00:29:03 UTC 2023
    - 383 bytes
    - Viewed (0)
Back to top