- Sort Score
- Result 10 results
- Languages All
Results 31 - 40 of 157 for conservatively (0.3 sec)
-
tensorflow/compiler/mlir/lite/transforms/legalize_tf.cc
!padding_attr.isSplat() || !padding_attr.getSplatValue<APInt>().isZero()) { return false; } } else { // If the padding value is neither float nor int, conservatively assume it // contains nonzeros. return false; } rewriter->replaceOpWithNewOp<MatrixDiagOp>(op, output_type, input); return true; }
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Mon May 20 20:06:54 UTC 2024 - 45.2K bytes - Viewed (0) -
tensorflow/compiler/mlir/tensorflow/ir/tf_ops_n_z.cc
&effects) { effects.reserve(2 * getArgs().size() + 1); effects.emplace_back(MemoryEffects::Write::get(), ResourceEffects::TPUExecute::get()); // Conservatively mark resource handles as read and write, as without // analyzing TPUCompile, there is not sufficient information to determine // effects on resources. For the MLIR bridge, this op will never be
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Thu May 09 22:07:10 UTC 2024 - 170.8K bytes - Viewed (0) -
src/net/http/transport.go
// ForceAttemptHTTP2 controls whether HTTP/2 is enabled when a non-zero // Dial, DialTLS, or DialContext func or TLSClientConfig is provided. // By default, use of any those fields conservatively disables HTTP/2. // To use a custom dialer or TLS config and still attempt HTTP/2 // upgrades, set this to true. ForceAttemptHTTP2 bool } func (t *Transport) writeBufferSize() int {
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu Jun 06 21:59:21 UTC 2024 - 91K bytes - Viewed (0) -
tensorflow/compiler/jit/deadness_analysis.cc
// symbolic predicate to each merge on the first iteration. We still use // symbolic predicates for merges for which we can't pattern match on the // backedge predicate. This is conservatively correct. namespace tensorflow { namespace { using tsl::StatusOr; // Represents a logical predicate, used as described in the algorithm overview // above. class Predicate { public:
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Tue Mar 12 06:33:33 UTC 2024 - 60.4K bytes - Viewed (0) -
src/runtime/malloc.go
// freeIndexForScan now so x is seen by the GC // (including conservative scan) as an allocated object. // While this pointer can't escape into user code as a // _live_ pointer until we return, conservative scanning // may find a dead pointer that happens to point into this // object. Delaying this update until now ensures that // conservative scanning considers this pointer dead until // this point.
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 29 17:58:53 UTC 2024 - 59.6K bytes - Viewed (0) -
src/cmd/link/internal/arm64/asm.go
fallthrough case objabi.R_CALLARM64: var t int64 // ldr.SymValue(rs) == 0 indicates a cross-package jump to a function that is not yet // laid out. Conservatively use a trampoline. This should be rare, as we lay out packages // in dependency order. if ldr.SymValue(rs) != 0 { t = ldr.SymValue(rs) + r.Add() - (ldr.SymValue(s) + int64(r.Off())) }
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Tue Jan 30 20:09:45 UTC 2024 - 47K bytes - Viewed (0) -
src/runtime/mgcscavenge.go
// due to clock bugs. // // In this case, just assume scavenging takes 10 µs per regular physical page // (determined empirically), and conservatively ignore the impact of huge pages // on timing. const approxWorkedNSPerPhysicalPage = 10e3 if duration == 0 { worked += approxWorkedNSPerPhysicalPage * float64(r/physPageSize) } else {
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 08 17:48:45 UTC 2024 - 52.3K bytes - Viewed (0) -
src/runtime/mgc.go
// ensures all spans are swept while the world is stopped. concurrentSweep = true // debugScanConservative enables debug logging for stack // frames that are scanned conservatively. debugScanConservative = false // sweepMinHeapDistance is a lower bound on the heap distance // (in bytes) reserved for concurrent sweeping between GC // cycles. sweepMinHeapDistance = 1024 * 1024 )
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 29 16:25:21 UTC 2024 - 62K bytes - Viewed (0) -
src/runtime/mgcpacer.go
// goroutine stack space (i.e. what is actually scanned) because used // goroutine stack space is much harder to measure cheaply. By using // allocated space, we make an overestimate; this is OK, it's better // to conservatively overcount than undercount. maxStackScan atomic.Uint64 // globalsScan is the total amount of global variable space // that is scannable. globalsScan atomic.Uint64
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Mar 25 19:53:03 UTC 2024 - 55.4K bytes - Viewed (0) -
src/runtime/asm_arm64.s
TEXT runtime·debugCallV2<ABIInternal>(SB),NOSPLIT|NOFRAME,$0-0 STP (R29, R30), -280(RSP) SUB $272, RSP, RSP SUB $8, RSP, R29 // Save all registers that may contain pointers so they can be // conservatively scanned. // // We can't do anything that might clobber any of these // registers before this. STP (R27, g), (30*8)(RSP) STP (R25, R26), (28*8)(RSP) STP (R23, R24), (26*8)(RSP)
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Sat May 11 20:38:24 UTC 2024 - 43.4K bytes - Viewed (0)