- Sort Score
- Result 10 results
- Languages All
Results 21 - 30 of 217 for clobbered (0.14 sec)
-
src/cmd/compile/internal/test/testdata/phi_test.go
// Some will be stack phis. For those stack phis, make sure the spill // of the second argument uses the phi's width (4 bytes), not its width // (8 bytes). Otherwise, a random stack slot gets clobbered. runtime.Gosched() return a + b + c + d + e + f + g + h + i + j + k + l + m + n + o + p + q + r + s + t + u + v + w + x + y + z } func TestPhi(t *testing.T) { want := int32(0) got := foo()
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed Dec 23 06:40:04 UTC 2020 - 2.2K bytes - Viewed (0) -
src/runtime/cgo/gcc_aix_ppc64.S
bl saveregs stdu 1, -296(1) // Set up Go ABI constant registers // Must match _cgo_reginit in runtime package. xor 0, 0, 0 // Restore g pointer (r30 in Go ABI, which may have been clobbered by C) mr 30, 4 // Call fn mr 12, 3 mtctr 12 bctrl addi 1, 1, 296 bl restoreregs ld 2, 40(1) ld 0, 16(1) mtlr 0 blr saveregs: // Save callee-save registers
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Tue Jan 24 22:38:02 UTC 2023 - 2.7K bytes - Viewed (0) -
src/crypto/internal/edwards25519/field/fe_alias_test.go
// leading to incorrect results. That is, it ensures that it's safe to write // // v.Invert(v) // // or // // v.Add(v, v) // // without any of the inputs getting clobbered by the output being written. func TestAliasing(t *testing.T) { type target struct { name string oneArgF func(v, x *Element) *Element twoArgsF func(v, x, y *Element) *Element }
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Aug 28 17:26:17 UTC 2023 - 3.4K bytes - Viewed (0) -
src/slices/iter_test.go
} if len(chunks) == 0 { return } // Verify that appending to the end of the first chunk does not // clobber the beginning of the next chunk. s := Clone(tc.s) chunks[0] = append(chunks[0], -1) if !Equal(s, tc.s) { t.Errorf("slice was clobbered: %v, want %v", s, tc.s) } }) } } func TestChunkPanics(t *testing.T) { for _, test := range []struct {
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri May 10 17:28:50 UTC 2024 - 5.7K bytes - Viewed (0) -
src/cmd/internal/buildid/buildid_test.go
if elf.ProgType(phdr.Type) == elf.PT_NOTE { // Increase the size so we keep // reading notes. order.PutUint64(data[phoff+4*8:], phdr.Filesz+1) // Clobber the Align field to zero. order.PutUint64(data[phoff+6*8:], 0) // Clobber the note type so we // keep reading notes. order.PutUint32(data[phdr.Off+12:], 0) } phoff += phsize }
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Mar 04 17:31:28 UTC 2024 - 6.6K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/flagalloc.go
// This case handles the possibility where a flag value is generated but never used. // In that case, there's no corresponding Select to overwrite the flags value, // so we must consider flags clobbered by the tuple-generating instruction. return true } return false } // copyFlags copies v (flag generator) into b, returns the copy. // If v's arg is also flags, copy recursively.
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Oct 31 21:41:20 UTC 2022 - 6.7K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/schedule_test.go
// one is A1 -> A1carry -> A1Carryvalue, the other is A2 -> A2carry -> A2Carryvalue. If they // are not scheduled properly, the carry will be clobbered, causing the carry to be regenerated. c := testConfigARM64(t) fun := c.Fun("entry", Bloc("entry", Valu("mem0", OpInitMem, types.TypeMem, 0, nil), Valu("x", OpARM64MOVDconst, c.config.Types.UInt64, 5, nil),
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Sat Oct 08 01:46:00 UTC 2022 - 5.6K bytes - Viewed (0) -
src/runtime/asm_mips64x.s
// to the buffer space in R25. // It clobbers R23 (the linker temp register). // The act of CALLing gcWriteBarrier will clobber R31 (LR). // It does not clobber any other general-purpose registers, // but may clobber others (e.g., floating point registers). TEXT gcWriteBarrier<>(SB),NOSPLIT,$192 // Save the registers clobbered by the fast path. MOVV R1, 184(R29) MOVV R2, 192(R29) retry:
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Nov 06 19:45:59 UTC 2023 - 24.3K bytes - Viewed (0) -
src/runtime/asm_riscv64.s
// to the buffer space in X24. // It clobbers X31 aka T6 (the linker temp register - REG_TMP). // The act of CALLing gcWriteBarrier will clobber RA (LR). // It does not clobber any other general-purpose registers, // but may clobber others (e.g., floating point registers). TEXT gcWriteBarrier<>(SB),NOSPLIT,$208 // Save the registers clobbered by the fast path. MOV A0, 24*8(X2) MOV A1, 25*8(X2) retry:
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu Nov 09 13:57:06 UTC 2023 - 27K bytes - Viewed (0) -
src/crypto/md5/md5block_ppc64x.s
#include "textflag.h" // ENDIAN_MOVE generates the appropriate // 4 byte load for big or little endian. // The 4 bytes at ptr+off is loaded into dst. // The idx reg is only needed for big endian // and is clobbered when used. #ifdef GOARCH_ppc64le #define ENDIAN_MOVE(off, ptr, dst, idx) \ MOVWZ off(ptr),dst #else #define ENDIAN_MOVE(off, ptr, dst, idx) \ MOVD $off,idx; \ MOVWBR (idx)(ptr), dst #endif
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon May 20 18:05:32 UTC 2024 - 5.3K bytes - Viewed (0)