Search Options

Results per page
Sort
Preferred Languages
Advance

Results 11 - 17 of 17 for _replication_info (0.38 sec)

  1. tensorflow/compiler/mlir/tensorflow/transforms/tf_passes.td

        ```
    
        wll be replaced by `_replication_info="cluster"` and  `_xla_compile_device_type="TPU"`.
    
        ```mlir
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Wed Jun 12 21:18:05 UTC 2024
    - 99.6K bytes
    - Viewed (0)
  2. tensorflow/compiler/mlir/tensorflow/tests/decompose_resource_ops.mlir

      "tf.ResourceApplyFtrl"(%var, %accum, %linear, %grad, %lr, %l1, %l2, %lr_power) {_xla_compile_device_type = "TPU", _replication_info = "cluster_train_function", device = "", multiply_linear_by_lr = false, use_locking = true} : (tensor<*x!tf_type.resource>, tensor<*x!tf_type.resource>, tensor<*x!tf_type.resource>, tensor<*xf32>, tensor<*xf32>, tensor<*xf32>, tensor<*xf32>,...
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Wed May 22 19:47:48 UTC 2024
    - 51.3K bytes
    - Viewed (0)
  3. tensorflow/compiler/mlir/tensorflow/transforms/passes.h

    std::unique_ptr<OperationPass<func::FuncOp>>
    CreateTPUPartitionedOpConversionPass();
    
    std::unique_ptr<OperationPass<ModuleOp>> CreateTPUValidateInputsPass();
    
    // Creates a pass that cleans up `_replication_info` attribute on operations
    // that are inside a cluster.
    std::unique_ptr<OperationPass<ModuleOp>>
    CreateTPUClusterCleanupAttributesPass();
    
    // Creates a pass that removes Identity/IdentityN ops from a cluster.
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Wed Jun 12 21:18:05 UTC 2024
    - 31.8K bytes
    - Viewed (0)
  4. cmd/data-usage.go

    			cfg, _ := getReplicationConfig(GlobalContext, bucket)
    			if cfg != nil && cfg.RoleArn != "" {
    				if dataUsageInfo.ReplicationInfo == nil {
    					dataUsageInfo.ReplicationInfo = make(map[string]BucketTargetUsageInfo)
    				}
    				dataUsageInfo.ReplicationInfo[cfg.RoleArn] = BucketTargetUsageInfo{
    					ReplicationFailedSize:   bui.ReplicationFailedSizeV1,
    					ReplicationFailedCount:  bui.ReplicationFailedCountV1,
    Registered: Sun Jun 16 00:44:34 UTC 2024
    - Last Modified: Thu May 09 00:51:34 UTC 2024
    - 5.6K bytes
    - Viewed (0)
  5. cmd/data-usage-utils.go

    	ReplicaSize             uint64                           `json:"objectReplicaTotalSize"`
    	ReplicaCount            uint64                           `json:"objectReplicaCount"`
    	ReplicationInfo         map[string]BucketTargetUsageInfo `json:"objectsReplicationInfo"`
    }
    
    // DataUsageInfo represents data usage stats of the underlying Object API
    type DataUsageInfo struct {
    Registered: Sun Jun 16 00:44:34 UTC 2024
    - Last Modified: Sun Mar 10 09:15:15 UTC 2024
    - 6.4K bytes
    - Viewed (0)
  6. tensorflow/compiler/mlir/tf2xla/internal/mlir_bridge_pass_util.cc

      auto predicate = [](const Graph& graph) {
        for (const Node* node : graph.nodes()) {
          // _tpu_replicate is used in replicated TPU graphs. It will be converted
          // to_replication_info and _xla_compile_device_type in phase 1 pipelines.
          if (node->attrs().FindByString(std::string(kTpuReplicateAttr))) {
            return true;
          }
        }
        return false;
      };
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Tue May 07 12:22:33 UTC 2024
    - 8.9K bytes
    - Viewed (0)
  7. cmd/data-usage-cache.go

    			bui.ReplicaSize = flat.ReplicationStats.ReplicaSize
    			bui.ReplicaCount = flat.ReplicationStats.ReplicaCount
    
    			bui.ReplicationInfo = make(map[string]BucketTargetUsageInfo, len(flat.ReplicationStats.Targets))
    			for arn, stat := range flat.ReplicationStats.Targets {
    				bui.ReplicationInfo[arn] = BucketTargetUsageInfo{
    					ReplicationPendingSize:  stat.PendingSize,
    					ReplicatedSize:          stat.ReplicatedSize,
    Registered: Sun Jun 16 00:44:34 UTC 2024
    - Last Modified: Fri May 10 14:49:50 UTC 2024
    - 42.8K bytes
    - Viewed (0)
Back to top