- Sort Score
- Result 10 results
- Languages All
Results 31 - 40 of 91 for coffsets (1 sec)
-
cmd/erasure-decode_test.go
} // To generate random offset/length. r := rand.New(rand.NewSource(UTCNow().UnixNano())) buf := &bytes.Buffer{} // Verify erasure.Decode() for random offsets and lengths. for i := 0; i < iterations; i++ { offset := r.Int63n(length) readLen := r.Int63n(length - offset) expected := data[offset : offset+readLen] // Get the checksums of the current part.
Registered: Sun Jun 16 00:44:34 UTC 2024 - Last Modified: Tue Jan 30 20:43:25 UTC 2024 - 21.1K bytes - Viewed (0) -
platforms/jvm/language-java/src/main/java/org/gradle/api/internal/tasks/compile/DiagnosticToProblemListener.java
// ... otherwise we can still report the line spec.lineInFileLocation(resourceName, line); } } // If we know the offsets ... // (offset doesn't require line and column to be set, hence the separate check) // (documentation says that getEndPosition() will be NOPOS iff getPosition() is NOPOS) if (0 < position) {
Registered: Wed Jun 12 18:38:38 UTC 2024 - Last Modified: Thu Jun 06 06:17:43 UTC 2024 - 11.5K bytes - Viewed (0) -
internal/s3select/select.go
rsc.offset = rsc.size + offset } if rsc.offset < 0 { return rsc.offset, errors.New("seek to invalid negative offset") } if rsc.offset >= rsc.size { return rsc.offset, errors.New("seek past end of object") } if rsc.reader != nil { _ = rsc.reader.Close() rsc.reader = nil } return rsc.offset, nil } // Read call to implement io.Reader
Registered: Sun Jun 16 00:44:34 UTC 2024 - Last Modified: Fri May 24 23:05:23 UTC 2024 - 21K bytes - Viewed (0) -
src/internal/zstd/block.go
r.repeatedOffset2 = r.repeatedOffset1 r.repeatedOffset1 = offset } else { if literal == 0 { offset++ } switch offset { case 1: offset = r.repeatedOffset1 case 2: offset = r.repeatedOffset2 r.repeatedOffset2 = r.repeatedOffset1 r.repeatedOffset1 = offset case 3: offset = r.repeatedOffset3 r.repeatedOffset3 = r.repeatedOffset2
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu Sep 28 17:57:43 UTC 2023 - 10.2K bytes - Viewed (0) -
src/cmd/vendor/github.com/google/pprof/profile/profile.go
// and if the offsets match, if they are available. func adjacent(m1, m2 *Mapping) bool { if m1.File != "" && m2.File != "" { if m1.File != m2.File { return false } } if m1.BuildID != "" && m2.BuildID != "" { if m1.BuildID != m2.BuildID { return false } } if m1.Limit != m2.Start { return false } if m1.Offset != 0 && m2.Offset != 0 {
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri May 31 19:48:28 UTC 2024 - 22.3K bytes - Viewed (0) -
src/runtime/profbuf.go
// for the stack. // // The current effective offsets into the tags and data circular buffers // for reading and writing are stored in the high 30 and low 32 bits of r and w. // The bottom bits of the high 32 are additional flag bits in w, unused in r. // "Effective" offsets means the total number of reads or writes, mod 2^length. // The offset in the buffer is the effective offset mod the length of the buffer.
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Mar 25 19:53:03 UTC 2024 - 18.2K bytes - Viewed (0) -
cmd/object-api-utils.go
} return &HTTPRangeSpec{Start: start, End: end} } // Returns the compressed offset which should be skipped. // If encrypted offsets are adjusted for encrypted block headers/trailers. // Since de-compression is after decryption encryption overhead is only added to compressedOffset.
Registered: Sun Jun 16 00:44:34 UTC 2024 - Last Modified: Tue Jun 11 03:13:30 UTC 2024 - 36.3K bytes - Viewed (0) -
tensorflow/cc/gradients/array_grad.cc
} if (grad_inputs.size() != 1) { return errors::InvalidArgument("Concat grad should have 1 input"); } // For each dx[i], we take a slice of dy. The offset and size of the // slice is given by offset[i] and shape[i]. const Output& dy = grad_inputs[0]; for (int i = 0; i < inputs.size(); ++i) { grad_outputs->push_back(
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Tue Oct 10 23:33:32 UTC 2023 - 31.7K bytes - Viewed (0) -
src/crypto/aes/asm_ppc64x.s
VCIPHER V0, V1, V0 VCIPHER V0, V2, V0 // Load xk[20:27] and cipher LXVD2X (R10+R5), V1 LXVD2X (R11+R5), V2 VCIPHER V0, V1, V0 VCIPHER V0, V2, V0 // Increment xk pointer to reuse constant offsets in R6-R12. ADD $112, R5 // Load xk[28:35] and cipher LXVD2X (R0+R5), V1 LXVD2X (R6+R5), V2 VCIPHER V0, V1, V0 VCIPHER V0, V2, V0 // Load xk[36:43] and cipher LXVD2X (R7+R5), V1
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon May 20 18:05:32 UTC 2024 - 18.6K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/writebarrier.go
} if size%ptrSize != 0 { ptr.Fatalf("unaligned pointer write") } if off < 0 || off+size > 64*ptrSize { // memory range goes off end of tracked offsets return true } z := zeroes[mem.ID] if ptr != z.base { // This isn't the object we know about at this memory state. return true } // Mask of bits we're asking about m := (uint64(1)<<(size/ptrSize) - 1) << (off / ptrSize) if z.mask&m == m {
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri Sep 08 19:09:14 UTC 2023 - 23.5K bytes - Viewed (0)