- Sort Score
- Result 10 results
- Languages All
Results 41 - 49 of 49 for locals (0.12 sec)
-
testing/architecture-test/src/changes/archunit-store/public-api-mutable-properties.txt
Method <org.gradle.caching.local.DirectoryBuildCache.getDirectory()> does not have raw return type assignable to org.gradle.api.provider.Property in (DirectoryBuildCache.java:0)
Registered: Wed Jun 12 18:38:38 UTC 2024 - Last Modified: Fri Jun 07 22:42:49 UTC 2024 - 160.5K bytes - Viewed (0) -
src/cmd/internal/obj/s390x/asmz.go
// must have a symbol break } c.instoffset = a.Offset if a.Sym.Type == objabi.STLSBSS { if c.ctxt.Flag_shared { return C_TLS_IE // initial exec model } return C_TLS_LE // local exec model } return C_ADDR case obj.NAME_GOTREF: return C_GOTADDR case obj.NAME_AUTO: if a.Reg == REGSP { // unset base register for better printing, since
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Tue Apr 16 17:46:09 UTC 2024 - 176.7K bytes - Viewed (0) -
cmd/bucket-replication.go
} return sameTarget, toAPIError(ctx, nil) } // performs a http request to remote endpoint to check if deployment id of remote endpoint is same as // local cluster deployment id. This is to prevent replication to self, especially in case of a loadbalancer // in front of MinIO. func checkRemoteEndpoint(ctx context.Context, epURL *url.URL) error { reqURL := &url.URL{
Registered: Sun Jun 16 00:44:34 UTC 2024 - Last Modified: Thu Jun 13 06:56:12 UTC 2024 - 114.4K bytes - Viewed (0) -
cmd/object-handlers.go
} writeErrorResponse(ctx, w, toAPIError(ctx, err), r.URL) return } } defer gr.Close() objInfo := gr.ObjInfo if !proxy.Proxy { // apply lifecycle rules only for local requests // Automatically remove the object/version if an expiry lifecycle rule can be applied if lc, err := globalLifecycleSys.Get(bucket); err == nil { rcfg, err := globalBucketObjectLockSys.Get(bucket)
Registered: Sun Jun 16 00:44:34 UTC 2024 - Last Modified: Fri Jun 14 13:28:35 UTC 2024 - 124.2K bytes - Viewed (0) -
tensorflow/compiler/mlir/tensorflow/transforms/shape_inference.cc
} std::vector<std::string> platforms; for (auto attr : op.getPlatforms().getAsRange<StringAttr>()) { platforms.push_back(attr.getValue().str()); } // It is a terrible idea to have local MLIR contexts so we need to // register extensions here, again. mlir::DialectRegistry registry; registry.insert<mlir::func::FuncDialect>(); mlir::func::registerAllExtensions(registry);
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Sat Jun 08 07:28:49 UTC 2024 - 134.1K bytes - Viewed (0) -
src/reflect/value.go
escapes(*(*any)(x)) // the dereference may not always be safe, but never executed } } // This is just a wrapper around abi.NoEscape. The inlining heuristics are // finnicky and for whatever reason treat the local call to noescape as much // lower cost with respect to the inliner budget. (That is, replacing calls to // noescape with abi.NoEscape will cause inlining tests to fail.) // //go:nosplit
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 22 21:17:41 UTC 2024 - 119.9K bytes - Viewed (0) -
tensorflow/compiler/mlir/lite/ir/tfl_ops.td
} def TFL_LocalResponseNormalizationOp : TFL_Op<"local_response_normalization", [ TFL_OperandHasRank<0, 4>, TF_SameOperandsAndResultTypeResolveRef, Pure]> { let summary = "Local Response Normalization."; let description = [{ The 4-D `input` tensor is treated as a 3-D array of 1-D vectors (along the last dimension), and each vector is normalized independently. Within a given vector,
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Thu Jun 06 19:09:08 UTC 2024 - 186K bytes - Viewed (0) -
src/cmd/internal/obj/ppc64/asm9.go
} else if c.opform(inst) == DS_FORM && v&0x3 != 0 { log.Fatalf("invalid offset for DS form load/store %v", p) } case 75: // 32 bit offset symbol loads (got/toc/addr) var rel *obj.Reloc v := p.From.Offset // Offsets in DS form loads must be a multiple of 4 inst := c.opload(p.As) switch p.From.Name { case obj.NAME_GOTREF, obj.NAME_TOCREF: if v != 0 {
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 15 13:55:28 UTC 2024 - 156.1K bytes - Viewed (0) -
src/reflect/all_test.go
for iter.Next() { k.SetIterKey(iter) e.SetIterValue(iter) } })) // Calling MapRange should not allocate even though it returns a *MapIter. // The function is inlineable, so if the local usage does not escape // the *MapIter, it can remain stack allocated. want := 0 if got != want { t.Errorf("wanted %d alloc, got %d", want, got) } } func TestCanIntUintFloatComplex(t *testing.T) {
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu May 23 01:00:11 UTC 2024 - 218.8K bytes - Viewed (0)