- Sort Score
- Result 10 results
- Languages All
Results 1 - 10 of 15 for V18 (0.06 sec)
-
src/crypto/sha512/sha512block_ppc64x.s
LXVD2X (INP)(R_x040), VS50 // load v18 (=vs50) in advance VSLDOI $8, V16, V16, V17 SHA512ROUND0(V7, V0, V1, V2, V3, V4, V5, V6, V17, R_x0b0) VPERMLE(V18,V18,LEMASK,V18) SHA512ROUND0(V6, V7, V0, V1, V2, V3, V4, V5, V18, R_x0c0) LXVD2X (INP)(R_x050), VS52 // load v20 (=vs52) in advance VSLDOI $8, V18, V18, V19 SHA512ROUND0(V5, V6, V7, V0, V1, V2, V3, V4, V19, R_x0d0)
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Mar 04 17:29:44 UTC 2024 - 15.8K bytes - Viewed (0) -
src/crypto/aes/gcm_ppc64x.s
XXLOR key2, key2, V26; \ VCIPHER vin, V25, vin; \ VCIPHER vin, V26, vin // Encrypt 4 values in V15 - V18 // with the specified key from // VS1 - VS9. #define VCIPHER_4X1_KEY(key) \ XXLOR key, key, V23; \ VCIPHER V15, V23, V15; \ VCIPHER V16, V23, V16; \ VCIPHER V17, V23, V17; \ VCIPHER V18, V23, V18 // Encrypt 8 values in V15 - V22 // with the specified key, // assuming it is a VSreg
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Mar 04 17:29:44 UTC 2024 - 27.1K bytes - Viewed (0) -
src/hash/crc32/crc32_ppc64le.s
LVX (R4),V16 // load from buffer OR $0,R2,R2 VPMSUMD V17,const1,V9 // vpmsumd with constants LVX (R4+off16),V17 // load next from buffer OR $0,R2,R2 VPMSUMD V18,const1,V10 // vpmsumd with constants LVX (R4+off32),V18 // load next from buffer OR $0,R2,R2 VPMSUMD V19,const1,V11 // vpmsumd with constants LVX (R4+off48),V19 // load next from buffer OR $0,R2,R2
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon May 06 12:09:50 UTC 2024 - 13.1K bytes - Viewed (0) -
src/internal/bytealg/index_ppc64x.s
VSEL V6, V5, V31, V7 // merge even and odd indices VCLZD V7, V18 // find index of first match MFVSRD V18, R25 // get first value CMP R25, $64 // Found if < 64 BLT foundR25 // Return byte index where found VSLDOI $8, V18, V18, V18 // Adjust 2nd value MFVSRD V18, R25 // get second value CMP R25, $64 // Found if < 64
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri Apr 21 16:47:45 UTC 2023 - 31.6K bytes - Viewed (0) -
src/cmd/internal/notsha256/sha256block_ppc64x.s
SHA256ROUND0(V0, V1, V2, V3, V4, V5, V6, V7, V16, R_x0a0) VSLDOI $4, V16, V16, V17 SHA256ROUND0(V7, V0, V1, V2, V3, V4, V5, V6, V17, R_x0b0) VSLDOI $4, V17, V17, V18 SHA256ROUND0(V6, V7, V0, V1, V2, V3, V4, V5, V18, R_x0c0) VSLDOI $4, V18, V18, V19 LXVD2X (INP)(R_x030), V20 // load v20 in advance SHA256ROUND0(V5, V6, V7, V0, V1, V2, V3, V4, V19, R_x0d0) VPERMLE(V20, V20, LEMASK, V20)
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu Oct 19 23:33:27 UTC 2023 - 14.5K bytes - Viewed (0) -
src/crypto/sha256/sha256block_ppc64x.s
SHA256ROUND0(V0, V1, V2, V3, V4, V5, V6, V7, V16, R_x0a0) VSLDOI $4, V16, V16, V17 SHA256ROUND0(V7, V0, V1, V2, V3, V4, V5, V6, V17, R_x0b0) VSLDOI $4, V17, V17, V18 SHA256ROUND0(V6, V7, V0, V1, V2, V3, V4, V5, V18, R_x0c0) VSLDOI $4, V18, V18, V19 LXVD2X (INP)(R_x030), V20 // load v20 in advance SHA256ROUND0(V5, V6, V7, V0, V1, V2, V3, V4, V19, R_x0d0) VPERMLE(V20, V20, LEMASK, V20)
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Mar 04 17:29:44 UTC 2024 - 14.4K bytes - Viewed (0) -
src/math/big/arith_s390x.s
VACCCQ V7, V15, V30, V31 VACQ V7, V15, V30, V23 VACCCQ V8, V16, V31, V0 // V0 has carry-over VACQ V8, V16, V31, V24 VPDI $0x4, V17, V17, V17 // flip the doublewords to big-endian order VPDI $0x4, V18, V18, V18 // flip the doublewords to big-endian order VPDI $0x4, V19, V19, V19 // flip the doublewords to big-endian order VPDI $0x4, V20, V20, V20 // flip the doublewords to big-endian order
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu Oct 19 23:33:27 UTC 2023 - 20.3K bytes - Viewed (0) -
src/cmd/asm/internal/asm/testdata/arm64enc.s
VCNT V13.B8, V11.B8 // ab59200e VMOV V31.B[15], V18 // f2071f5e VDUP V31.B[15], V18 // f2071f5e VDUP V31.B[13], V20.B16 // f4071b4e VEOR V4.B8, V18.B8, V7.B8 // 471e242e
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Jul 24 01:11:41 UTC 2023 - 43.9K bytes - Viewed (0) -
src/crypto/aes/asm_ppc64x.s
VXOR V12, V12, V12 \ VXOR V13, V13, V13 \ VXOR V14, V14, V14 \ VXOR V15, V15, V15 \ VXOR V16, V16, V16 \ VXOR V17, V17, V17 \ VXOR V18, V18, V18 \ VXOR V19, V19, V19 \ VXOR V20, V20, V20 //func cryptBlocksChain(src, dst *byte, length int, key *uint32, iv *byte, enc int, nr int) TEXT ·cryptBlocksChain(SB), NOSPLIT|NOFRAME, $0 MOVD src+0(FP), INP MOVD dst+8(FP), OUTP MOVD length+16(FP), LEN
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon May 20 18:05:32 UTC 2024 - 18.6K bytes - Viewed (0) -
src/cmd/asm/internal/asm/operand_test.go
{"V7", "V7"}, {"V8", "V8"}, {"V9", "V9"}, {"V10", "V10"}, {"V11", "V11"}, {"V12", "V12"}, {"V13", "V13"}, {"V14", "V14"}, {"V15", "V15"}, {"V16", "V16"}, {"V17", "V17"}, {"V18", "V18"}, {"V19", "V19"}, {"V20", "V20"}, {"V21", "V21"}, {"V22", "V22"}, {"V23", "V23"}, {"V24", "V24"}, {"V25", "V25"}, {"V26", "V26"}, {"V27", "V27"}, {"V28", "V28"},
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Tue Aug 29 18:31:05 UTC 2023 - 23.9K bytes - Viewed (0)