Search Options

Results per page
Sort
Preferred Languages
Advance

Results 1 - 10 of 11 for stp (0.08 sec)

  1. src/crypto/internal/edwards25519/field/fe_arm64.s

    	ADD R0>>51, R11, R11
    	ADD R1>>51, R12, R12
    	ADD R2>>51, R13, R13
    	ADD R3>>51, R14, R14
    	// R4>>51 * 19 + R10 -> R10
    	LSR $51, R4, R21
    	MOVD $19, R22
    	MADD R22, R10, R21, R10
    
    	STP (R10, R11), 0(R20)
    	STP (R12, R13), 16(R20)
    	MOVD R14, 32(R20)
    
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Mon Mar 04 17:29:44 UTC 2024
    - 1K bytes
    - Viewed (0)
  2. src/crypto/internal/nistec/p256_asm_arm64.s

    		LDP.P	16(b_ptr), (acc2, acc3)
    		CSEL	EQ, acc2, t2, t2
    		CSEL	EQ, acc3, t3, t3
    
    		CMP	$16, const1
    		BNE	loop_select
    
    	STP	(x0, x1), 0*16(res_ptr)
    	STP	(x2, x3), 1*16(res_ptr)
    	STP	(y0, y1), 2*16(res_ptr)
    	STP	(y2, y3), 3*16(res_ptr)
    	STP	(t0, t1), 4*16(res_ptr)
    	STP	(t2, t3), 5*16(res_ptr)
    	RET
    /* ---------------------------------------*/
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Mon Mar 04 17:29:44 UTC 2024
    - 29.7K bytes
    - Viewed (0)
  3. src/runtime/asm_arm64.s

    	// R0 and R1 already saved
    	STP	(R2, R3), 1*8(RSP)
    	STP	(R4, R5), 3*8(RSP)
    	STP	(R6, R7), 5*8(RSP)
    	STP	(R8, R9), 7*8(RSP)
    	STP	(R10, R11), 9*8(RSP)
    	STP	(R12, R13), 11*8(RSP)
    	STP	(R14, R15), 13*8(RSP)
    	// R16, R17 may be clobbered by linker trampoline
    	// R18 is unused.
    	STP	(R19, R20), 15*8(RSP)
    	STP	(R21, R22), 17*8(RSP)
    	STP	(R23, R24), 19*8(RSP)
    	STP	(R25, R26), 21*8(RSP)
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Sat May 11 20:38:24 UTC 2024
    - 43.4K bytes
    - Viewed (0)
  4. test/codegen/memcombine.go

    	_, _ = w1[3], w2[3]
    	w1[0], w1[1], w1[2], w1[3] = 0, 0, 0, 0 // arm64:"STP",-"MOVB",-"MOVH"
    	w2[2], w2[3], w2[1], w2[0] = 0, 0, 0, 0 // arm64:"STP",-"MOVB",-"MOVH"
    }
    
    func zero_uint64_2(d1, d2 []uint64) {
    	_, _ = d1[1], d2[1]
    	d1[0], d1[1] = 0, 0 // arm64:"STP",-"MOVB",-"MOVH"
    	d2[1], d2[0] = 0, 0 // arm64:"STP",-"MOVB",-"MOVH"
    }
    
    func loadstore(p, q *[4]uint8) {
    	// amd64:"MOVL",-"MOVB"
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Thu Mar 21 19:45:41 UTC 2024
    - 29.7K bytes
    - Viewed (0)
  5. src/cmd/compile/internal/ssa/_gen/ARM64.rules

    		(MOVDstore ptr (MOVDconst [0]) mem))
    (Zero [16] ptr mem) =>
    	(STP [0] ptr (MOVDconst [0]) (MOVDconst [0]) mem)
    
    (Zero [32] ptr mem) =>
    	(STP [16] ptr (MOVDconst [0]) (MOVDconst [0])
    		(STP [0] ptr (MOVDconst [0]) (MOVDconst [0]) mem))
    
    (Zero [48] ptr mem) =>
    	(STP [32] ptr (MOVDconst [0]) (MOVDconst [0])
    		(STP [16] ptr (MOVDconst [0]) (MOVDconst [0])
    			(STP [0] ptr (MOVDconst [0]) (MOVDconst [0]) mem)))
    
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Thu May 23 15:49:20 UTC 2024
    - 113.1K bytes
    - Viewed (0)
  6. src/cmd/compile/internal/ssa/_gen/ARM64Ops.go

    		{name: "MOVDstore", argLength: 3, reg: gpstore, aux: "SymOff", asm: "MOVD", typ: "Mem", faultOnNilArg0: true, symEffect: "Write"},   // store 8 bytes of arg1 to arg0 + auxInt + aux.  arg2=mem.
    		{name: "STP", argLength: 4, reg: gpstore2, aux: "SymOff", asm: "STP", typ: "Mem", faultOnNilArg0: true, symEffect: "Write"},         // store 16 bytes of arg1 and arg2 to arg0 + auxInt + aux.  arg3=mem.
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Thu May 23 15:49:20 UTC 2024
    - 58.8K bytes
    - Viewed (0)
  7. src/cmd/internal/obj/arm64/asm7.go

    		}
    		v := c.regoff(&p.From)
    		o1 = c.opldpstp(p, o, v, rf, rt1, rt2, 1)
    
    	case 67: /* stp (r1, r2), O(R)!; stp (r1, r2), (R)O! */
    		rt, rf1, rf2 := p.To.Reg, p.From.Reg, int16(p.From.Offset)
    		if rt == obj.REG_NONE {
    			rt = o.param
    		}
    		if rt == obj.REG_NONE {
    			c.ctxt.Diag("invalid stp destination: %v\n", p)
    		}
    		v := c.regoff(&p.To)
    		o1 = c.opldpstp(p, o, v, rt, rf1, rf2, 0)
    
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Wed May 15 15:44:14 UTC 2024
    - 201.1K bytes
    - Viewed (0)
  8. src/cmd/compile/internal/ssa/rewriteARM64.go

    		return true
    	}
    	// match: (Move [64] dst src mem)
    	// result: (STP [48] dst (Select0 <typ.UInt64> (LDP [48] src mem)) (Select1 <typ.UInt64> (LDP [48] src mem)) (STP [32] dst (Select0 <typ.UInt64> (LDP [32] src mem)) (Select1 <typ.UInt64> (LDP [32] src mem)) (STP [16] dst (Select0 <typ.UInt64> (LDP [16] src mem)) (Select1 <typ.UInt64> (LDP [16] src mem)) (STP dst (Select0 <typ.UInt64> (LDP src mem)) (Select1 <typ.UInt64> (LDP src mem)) mem))))
    	for {
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Thu May 23 15:49:20 UTC 2024
    - 608.6K bytes
    - Viewed (0)
  9. src/cmd/asm/internal/asm/parse.go

    		// TODO: Consistency in the encoding would be nice here.
    		if p.arch.InFamily(sys.ARM, sys.ARM64) {
    			// Special form
    			// ARM: destination register pair (R1, R2).
    			// ARM64: register pair (R1, R2) for LDP/STP.
    			if prefix != 0 || scale != 0 {
    				p.errorf("illegal address mode for register pair")
    				return
    			}
    			a.Type = obj.TYPE_REGREG
    			a.Offset = int64(r2)
    			// Nothing may follow
    			return
    		}
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Wed Feb 21 14:34:57 UTC 2024
    - 36.9K bytes
    - Viewed (0)
  10. src/cmd/link/internal/arm64/asm.go

    	}
    	return loader.ExtReloc{}, false
    }
    
    func elfsetupplt(ctxt *ld.Link, ldr *loader.Loader, plt, gotplt *loader.SymbolBuilder, dynamic loader.Sym) {
    	if plt.Size() == 0 {
    		// stp     x16, x30, [sp, #-16]!
    		// identifying information
    		plt.AddUint32(ctxt.Arch, 0xa9bf7bf0)
    
    		// the following two instructions (adrp + ldr) load *got[2] into x17
    		// adrp    x16, &got[0]
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Tue Jan 30 20:09:45 UTC 2024
    - 47K bytes
    - Viewed (0)
Back to top