- Sort Score
- Result 10 results
- Languages All
Results 21 - 30 of 37 for SLL (0.09 sec)
-
src/cmd/compile/internal/ssa/_gen/MIPSOps.go
{name: "SQRTF", argLength: 1, reg: fp11, asm: "SQRTF"}, // sqrt(arg0), float32 // shifts {name: "SLL", argLength: 2, reg: gp21, asm: "SLL"}, // arg0 << arg1, shift amount is mod 32 {name: "SLLconst", argLength: 1, reg: gp11, asm: "SLL", aux: "Int32"}, // arg0 << auxInt, shift amount must be 0 through 31 inclusive
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 24 14:43:03 UTC 2023 - 24K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/_gen/ARM64.rules
// can do the shift directly. // left shift (Lsh(64|32|16|8)x64 <t> x y) && shiftIsBounded(v) => (SLL <t> x y) (Lsh(64|32|16|8)x32 <t> x y) && shiftIsBounded(v) => (SLL <t> x y) (Lsh(64|32|16|8)x16 <t> x y) && shiftIsBounded(v) => (SLL <t> x y) (Lsh(64|32|16|8)x8 <t> x y) && shiftIsBounded(v) => (SLL <t> x y) // signed right shift (Rsh64x(64|32|16|8) <t> x y) && shiftIsBounded(v) => (SRA <t> x y)
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu May 23 15:49:20 UTC 2024 - 113.1K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/rewriteARM.go
v0.AddArg2(x, y) v.AddArg(v0) return true } // match: (CMP x (SLL y z)) // result: (CMPshiftLLreg x y z) for { x := v_0 if v_1.Op != OpARMSLL { break } z := v_1.Args[1] y := v_1.Args[0] v.reset(OpARMCMPshiftLLreg) v.AddArg3(x, y, z) return true } // match: (CMP (SLL y z) x) // result: (InvertFlags (CMPshiftLLreg x y z)) for {
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Nov 20 17:19:36 UTC 2023 - 486.8K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/rewriteMIPS.go
// match: (AtomicAnd8 ptr val mem) // cond: !config.BigEndian // result: (LoweredAtomicAnd (AND <typ.UInt32Ptr> (MOVWconst [^3]) ptr) (OR <typ.UInt32> (SLL <typ.UInt32> (ZeroExt8to32 val) (SLLconst <typ.UInt32> [3] (ANDconst <typ.UInt32> [3] ptr))) (NORconst [0] <typ.UInt32> (SLL <typ.UInt32> (MOVWconst [0xff]) (SLLconst <typ.UInt32> [3] (ANDconst <typ.UInt32> [3] ptr))))) mem) for { ptr := v_0 val := v_1 mem := v_2
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 24 14:43:03 UTC 2023 - 176.6K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/_gen/ARMOps.go
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri Feb 24 00:21:13 UTC 2023 - 41K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/_gen/RISCV64Ops.go
{name: "MOVDnop", argLength: 1, reg: regInfo{inputs: []regMask{gpMask}, outputs: []regMask{gpMask}}, resultInArg0: true}, // nop, return arg0 in same register // Shift ops {name: "SLL", argLength: 2, reg: gp21, asm: "SLL"}, // arg0 << (aux1 & 63), logical left shift {name: "SLLW", argLength: 2, reg: gp21, asm: "SLLW"}, // arg0 << (aux1 & 31), logical left shift of 32 bit value, sign extended to 64 bits
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu Mar 07 14:57:07 UTC 2024 - 30.7K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/rewriteCond_test.go
one64 int64 = 1 one32 int32 = 1 v64 int64 = 11 // ensure it's not 2**n +/- 1 v64_n int64 = -11 v32 int32 = 11 v32_n int32 = -11 uv32 uint32 = 19 uz uint8 = 1 // for lowering to SLL/SRL/SRA ) var crTests = []struct { name string tf func(t *testing.T) }{ {"AddConst64", testAddConst64}, {"AddConst32", testAddConst32}, {"AddVar64", testAddVar64},
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri Mar 24 01:19:09 UTC 2023 - 11.1K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/rewriteARM64.go
v_0 := v.Args[0] // match: (SLL x (MOVDconst [c])) // result: (SLLconst x [c&63]) for { x := v_0 if v_1.Op != OpARM64MOVDconst { break } c := auxIntToInt64(v_1.AuxInt) v.reset(OpARM64SLLconst) v.AuxInt = int64ToAuxInt(c & 63) v.AddArg(x) return true } // match: (SLL x (ANDconst [63] y)) // result: (SLL x y) for { x := v_0
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu May 23 15:49:20 UTC 2024 - 608.6K bytes - Viewed (0) -
src/cmd/asm/internal/asm/testdata/armerror.s
CMP.S $29, g // ERROR "invalid .S suffix" ADD.W R1<<R2, R3 // ERROR "invalid .W suffix" SUB.U R1<<R2, R3, R9 // ERROR "invalid .U suffix" CMN.S R5->R2, R1 // ERROR "invalid .S suffix" SLL.P R1, R2, R3 // ERROR "invalid .P suffix" SRA.U R2, R8 // ERROR "invalid .U suffix" SWI.S // ERROR "invalid .S suffix" SWI.P $0 // ERROR "invalid .P suffix"
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri Nov 03 14:06:21 UTC 2017 - 14.4K bytes - Viewed (0) -
test/codegen/mathbits.go
// s390x:"MOVWBR" // arm64:"REVW" // ppc64x/power10: "BRW" return bits.ReverseBytes32(n) } func ReverseBytes16(n uint16) uint16 { // amd64:"ROLW" // arm64:"REV16W",-"UBFX",-"ORR" // arm/5:"SLL","SRL","ORR" // arm/6:"REV16" // arm/7:"REV16" // ppc64x/power10: "BRH" return bits.ReverseBytes16(n) } // --------------------- // // bits.RotateLeft // // --------------------- //
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu May 23 18:51:17 UTC 2024 - 19.6K bytes - Viewed (0)