- Sort Score
- Result 10 results
- Languages All
Results 91 - 100 of 126 for getc (0.12 sec)
-
src/cmd/compile/internal/typecheck/func.go
} if t.NumResults() == 1 { n.SetType(l.Type().Result(0).Type) if n.Op() == ir.OCALLFUNC && n.Fun.Op() == ir.ONAME { if sym := n.Fun.(*ir.Name).Sym(); types.RuntimeSymName(sym) == "getg" { // Emit code for runtime.getg() directly instead of calling function. // Most such rewrites (for example the similar one for math.Sqrt) should be done in walk,
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed Mar 06 15:23:18 UTC 2024 - 21.1K bytes - Viewed (0) -
src/test/java/jcifs/tests/DfsTest.java
private String getTestDfsTargetServer () { String dfsTargetServer = getProperties().get("test.server.dfs"); if ( dfsTargetServer != null ) { return dfsTargetServer; } return getTestServer(); } private String getTestDC () { String dfsTargetServer = getProperties().get("test.domain.dc"); if ( dfsTargetServer != null ) { return dfsTargetServer;
Registered: Wed Jun 12 15:45:55 UTC 2024 - Last Modified: Sun Mar 01 09:46:04 UTC 2020 - 13.5K bytes - Viewed (0) -
tensorflow/compiler/mlir/tensorflow/transforms/tpu_resource_partitioning.cc
continue; const auto inputs = partitioned_input.getInputs(); const bool packed_input = partitioned_input.getIsPacked(); int num_cores_per_replica = partitioned_input.getN(); if (num_cores_per_replica_attr) { num_cores_per_replica = num_cores_per_replica_attr.getInt(); } else if (packed_input) { return partitioned_input->emitOpError()
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Wed Sep 06 19:12:29 UTC 2023 - 11.8K bytes - Viewed (0) -
src/main/java/org/codelibs/fess/app/web/api/admin/stats/ApiAdminStatsAction.java
jvmPoolObj.capacity = p.getTotalCapacity().getBytes(); return jvmPoolObj; }).toArray(n -> new JvmPoolObj[n]); final GarbageCollectors gc = jvmStats.getGc(); jvmObj.gc = Arrays.stream(gc.getCollectors()).map(c -> { final JvmGcObj jvmGcObj = new JvmGcObj(); jvmGcObj.key = c.getName(); jvmGcObj.count = c.getCollectionCount();
Registered: Wed Jun 12 13:08:18 UTC 2024 - Last Modified: Thu Feb 22 01:37:57 UTC 2024 - 12.1K bytes - Viewed (0) -
src/runtime/cgocheck.go
} } s := spanOfUnchecked(uintptr(src)) if s.state.get() == mSpanManual { // There are no heap bits for value stored on the stack. // For a channel receive src might be on the stack of some // other goroutine, so we can't unwind the stack even if // we wanted to. // We can't expand the GC program without extra storage // space we can't easily get. // Fortunately we have the type information.
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Tue Apr 09 04:07:57 UTC 2024 - 7.6K bytes - Viewed (0) -
platforms/software/dependency-management/src/test/groovy/org/gradle/api/internal/catalog/LibrariesSourceGeneratorTest.groovy
bundle('myBundle', ['foo', 'bar']) plugin('pl', 'org.plugin') version('1.2') } then: def libs = sources.compile() def foo = libs.foo.get() def bar = libs.bar.get() assert foo.module.group == 'g' assert foo.module.name == 'a' assert foo.versionConstraint.requiredVersion == 'v' assert bar.module.group == 'g2'
Registered: Wed Jun 12 18:38:38 UTC 2024 - Last Modified: Thu Apr 18 08:26:24 UTC 2024 - 19.2K bytes - Viewed (0) -
src/cmd/link/internal/ld/lib.go
pkg := objabi.PathToPrefix(lib.Pkg) eof := f.Offset() + length start := f.Offset() c1 := bgetc(f) c2 := bgetc(f) c3 := bgetc(f) c4 := bgetc(f) f.MustSeek(start, 0) unit := &sym.CompilationUnit{Lib: lib} lib.Units = append(lib.Units, unit) magic := uint32(c1)<<24 | uint32(c2)<<16 | uint32(c3)<<8 | uint32(c4)
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Tue May 21 18:45:27 UTC 2024 - 88.6K bytes - Viewed (0) -
src/runtime/malloc.go
} else if size&1 == 0 { align = 2 } else { align = 1 } } return persistentalloc(size, align, &memstats.other_sys) } if inittrace.active && inittrace.id == getg().goid { // Init functions are executed sequentially in a single goroutine. inittrace.allocs += 1 } } // assistG is the G to charge for this allocation, or nil if
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 29 17:58:53 UTC 2024 - 59.6K bytes - Viewed (0) -
src/runtime/traceruntime.go
// buffer flushes are rare. Record the lock edge even if it doesn't happen // this time. lockRankMayTraceFlush() // Check if we're already locked. If so, return an invalid traceLocker. if getg().m.trace.seqlock.Load()%2 == 1 { return traceLocker{} } return traceAcquireEnabled() } // ok returns true if the traceLocker is valid (i.e. tracing is enabled). //
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 22 22:31:00 UTC 2024 - 25.7K bytes - Viewed (0) -
src/runtime/runtime.go
// from nanotime that we can use (some platforms have a really coarse system time granularity). // We require some amount of time to pass to ensure that the conversion rate is fairly accurate // in aggregate. But because we compute this rate lazily, there's a pretty good chance a decent // amount of time has passed by the time we get here. //
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu May 23 01:16:47 UTC 2024 - 9.9K bytes - Viewed (0)