- Sort Score
- Result 10 results
- Languages All
Results 1 - 10 of 76 for conservatively (0.2 sec)
-
tensorflow/compiler/mlir/tensorflow/transforms/host_runtime/tpu_merge_variables_with_execute.cc
const llvm::SmallDenseSet<int64_t>& resource_ids, bool previous_unknown_resource_access) { // If we had any unknown resource access before, then we conservatively assume // that `resource` has been accessed before. // If `resource` is an unknown resource, then we conservatively assume that // the same resource has been accessed before. if (previous_unknown_resource_access || resource_analysis_info.IsUnknownResource(resource))
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Thu Feb 29 17:52:11 UTC 2024 - 27K bytes - Viewed (0) -
tensorflow/compiler/mlir/tensorflow/analysis/side_effect_analysis.h
// Converts from read/write state that relates ops with the same parallel id // to a set of last accesses for use with other parallel ids. Reads/writes // between parallel ids are conservatively approximated as writes. absl::flat_hash_set<Operation*> GetLastWrites(ResourceId resource_id); // Sets the read/write state for ops within the same parallel id. void SetLastWrites(ResourceId resource_id,
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Wed May 15 09:04:13 UTC 2024 - 14.8K bytes - Viewed (0) -
tensorflow/compiler/mlir/tensorflow/analysis/side_effect_analysis.cc
// only self-dependent conservatively, i.e., we do add dependencies // to/from unknown resource types. Currently, we don't have such cases and // there is no indication that we will need to support them in the future. LOG(WARNING) << "Self-dependent-only resource types are treated " "conservatively for value-based side effects."; }
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Wed May 15 09:04:13 UTC 2024 - 41.2K bytes - Viewed (0) -
src/runtime/preempt.go
} //go:generate go run mkpreempt.go // asyncPreempt saves all user registers and calls asyncPreempt2. // // When stack scanning encounters an asyncPreempt frame, it scans that // frame and its parent frame conservatively. // // asyncPreempt is implemented in assembly. func asyncPreempt() //go:nosplit func asyncPreempt2() { gp := getg() gp.asyncSafePoint = true if gp.preemptStop { mcall(preemptPark)
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri May 17 15:41:45 UTC 2024 - 15.1K bytes - Viewed (0) -
tensorflow/compiler/mlir/lite/transforms/lift_tflite_flex_ops.cc
// TFLite flatbuffer schema doesn't distinguish scalar tensor shapes // and unranked tensor shapes (i.e. they are both represented as an empty // INT32 list), see b/138865275. MLIR importer conservatively treats them as // unranked tensor types. Here we set them to scalar tensor types when it is // safe. if (auto tensor_array_v3_op = dyn_cast<TF::TensorArrayV3Op>(tf_op)) {
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Thu Apr 25 16:01:03 UTC 2024 - 10.9K bytes - Viewed (0) -
src/runtime/mgcstack.go
// of a goroutine. type stackScanState struct { // stack limits stack stack // conservative indicates that the next frame must be scanned conservatively. // This applies only to the innermost frame at an async safe-point. conservative bool // buf contains the set of possible pointers to stack objects. // Organized as a LIFO linked list of buffers.
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Aug 21 21:06:52 UTC 2023 - 10.6K bytes - Viewed (0) -
src/cmd/compile/internal/walk/assign.go
// Check for reasons why we may need to compute later expressions // before this assignment happens. if name == nil { // Not a direct assignment to a declared variable. // Conservatively assume any memory access might alias. memWrite = true continue } if name.Class == ir.PPARAMOUT && ir.CurFunc.HasDefer() { // Assignments to a result parameter in a function with defers
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 08 17:09:06 UTC 2024 - 20.3K bytes - Viewed (0) -
tensorflow/compiler/jit/resource_operation_safety_analysis.cc
} const XlaResourceOpInfo* op_info = GetResourceOpInfoForOp(n.type_string()); if (op_info) { *out_resource_op_kind = op_info->kind(); return absl::OkStatus(); } // We conservatively assume that functions will both read and write resource // variables. In the future we may consider doing some form of // inter-procedural analysis. if (MayCallFunction(n, flib_def)) {
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Fri Feb 09 11:36:41 UTC 2024 - 11.2K bytes - Viewed (0) -
src/cmd/go/internal/imports/build.go
// // If tags["*"] is true, then ShouldBuild will consider every // build tag except "ignore" to be both true and false for // the purpose of satisfying build tags, in order to estimate // (conservatively) whether a file could ever possibly be used // in any build. func ShouldBuild(content []byte, tags map[string]bool) bool { // Identify leading run of // comments and blank lines, // which must be followed by a blank line.
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu Mar 30 18:50:57 UTC 2023 - 10.4K bytes - Viewed (0) -
src/runtime/mcache.go
// We assumed earlier that the full span gets allocated. gcController.totalAlloc.Add(slotsUsed * int64(s.elemsize)) if s.sweepgen != sg+1 { // refill conservatively counted unallocated slots in gcController.heapLive. // Undo this. // // If this span was cached before sweep, then gcController.heapLive was totally
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Mar 25 19:53:03 UTC 2024 - 10K bytes - Viewed (0)