- Sort Score
- Result 10 results
- Languages All
Results 21 - 30 of 1,554 for offset1 (0.59 sec)
-
src/runtime/type.go
// and treat the offset as an identifier. We use negative offsets that // do not overlap with any compile-time module offsets. // // Entries are created by reflect.addReflectOff. var reflectOffs struct { lock mutex next int32 m map[int32]unsafe.Pointer minv map[unsafe.Pointer]int32 } func reflectOffsLock() { lock(&reflectOffs.lock) if raceenabled {
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu May 23 01:17:26 UTC 2024 - 12.7K bytes - Viewed (0) -
cmd/erasure-decode_test.go
} // To generate random offset/length. r := rand.New(rand.NewSource(UTCNow().UnixNano())) buf := &bytes.Buffer{} // Verify erasure.Decode() for random offsets and lengths. for i := 0; i < iterations; i++ { offset := r.Int63n(length) readLen := r.Int63n(length - offset) expected := data[offset : offset+readLen] // Get the checksums of the current part.
Registered: Sun Jun 16 00:44:34 UTC 2024 - Last Modified: Tue Jan 30 20:43:25 UTC 2024 - 21.1K bytes - Viewed (0) -
src/log/slog/level.go
defer func() { if err != nil { err = fmt.Errorf("slog: level string %q: %w", s, err) } }() name := s offset := 0 if i := strings.IndexAny(s, "+-"); i >= 0 { name = s[:i] offset, err = strconv.Atoi(s[i:]) if err != nil { return err } } switch strings.ToUpper(name) { case "DEBUG": *l = LevelDebug case "INFO": *l = LevelInfo case "WARN":
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Oct 30 17:34:43 UTC 2023 - 5.6K bytes - Viewed (0) -
src/cmd/link/internal/ld/xcoff.go
Lstlen uint32 // Length of string table Limpoff uint64 // Offset to start of import file IDs Lstoff uint64 // Offset to start of string table Lsymoff uint64 // Offset to start of symbol table Lrldoff uint64 // Offset to start of relocation entries } // Loader Symbol type XcoffLdSym64 struct { Lvalue uint64 // Address field Loffset uint32 // Byte offset into string table of symbol name
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri Dec 01 19:58:23 UTC 2023 - 51.8K bytes - Viewed (0) -
tensorflow/compiler/mlir/lite/utils/string_utils.h
int WriteToBuffer(char** buffer); private: // Data buffer to store contents of strings, not including headers. std::vector<char> data_; // Offset of the starting index of each string in data buffer. std::vector<size_t> offset_; // Max length in number of characters that we permit the total // buffer containing the concatenation of all added strings to be.
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Wed Jun 12 21:41:49 UTC 2024 - 2.2K bytes - Viewed (0) -
src/internal/zstd/fse_test.go
4, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 1, 1, 1, 1, 1, -1, -1, -1, -1, } // offsetPredefinedDistribution is the predefined distribution table // for offsets. RFC 3.1.1.3.2.2.3. var offsetPredefinedDistribution = []int16{ 1, 1, 1, 1, 1, 1, 2, 2, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, -1, -1, -1, -1, -1, }
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Tue Apr 18 20:34:13 UTC 2023 - 2.6K bytes - Viewed (0) -
src/runtime/cgo/gcc_android.c
if (*tlsg != off) { fatalf("tlsg offset wrong, got %ld want %ld\n", *tlsg, off); } return; } err = pthread_key_create(&k, nil); if(err != 0) { fatalf("pthread_key_create failed: %d", err); } pthread_setspecific(k, (void*)magic1); // If thread local slots are laid out as we expect, our magic word will // be located at some low offset from tlsbase. However, just in case something went
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Sat May 09 23:17:17 UTC 2020 - 2.6K bytes - Viewed (0) -
src/compress/flate/deflatefast.go
prevHash := hash(uint32(x)) e.table[prevHash&tableMask] = tableEntry{offset: e.cur + s - 1, val: uint32(x)} x >>= 8 currHash := hash(uint32(x)) candidate = e.table[currHash&tableMask] e.table[currHash&tableMask] = tableEntry{offset: e.cur + s, val: uint32(x)} offset := s - (candidate.offset - e.cur) if offset > maxMatchOffset || uint32(x) != candidate.val { cv = uint32(x >> 8)
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Oct 19 18:48:17 UTC 2020 - 9.4K bytes - Viewed (0) -
src/internal/bytealg/compare_ppc64x.s
ADD $-16,R9,R9 MOVD $32,R11 // set offsets to load into vector MOVD $48,R12 // set offsets to load into vector PCALIGN $16 cmp64_loop: LXVD2X (R5)(R0),V3 // load bytes of A at offset 0 into vector LXVD2X (R6)(R0),V4 // load bytes of B at offset 0 into vector VCMPEQUDCC V3,V4,V1 BGE CR6,different // jump out if its different LXVD2X (R5)(R10),V3 // load bytes of A at offset 16 into vector
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Aug 28 17:33:20 UTC 2023 - 6.7K bytes - Viewed (0) -
src/cmd/compile/internal/types/size.go
} for i, m := range methods { m.Offset = int64(i) * int64(PtrSize) } t.SetAllMethods(methods) } // calcStructOffset computes the offsets of a sequence of fields, // starting at the given offset. It returns the resulting offset and // maximum field alignment. func calcStructOffset(t *Type, fields []*Field, offset int64) int64 { for _, f := range fields { CalcSize(f.Type)
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Mar 25 19:53:03 UTC 2024 - 15K bytes - Viewed (0)