- Sort Score
- Result 10 results
- Languages All
Results 11 - 20 of 554 for opload (0.12 sec)
-
src/sync/map_test.go
) var mapOps = [...]mapOp{ opLoad, opStore, opLoadOrStore, opLoadAndDelete, opDelete, opSwap, opCompareAndSwap, opCompareAndDelete, opClear, } // mapCall is a quick.Generator for calls on mapInterface. type mapCall struct { op mapOp k, v any } func (c mapCall) apply(m mapInterface) (any, bool) { switch c.op { case opLoad: return m.Load(c.k) case opStore:
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu Feb 01 15:34:22 UTC 2024 - 8.1K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/memcombine.go
} } if v.Op != extOp { return false } load := v.Args[0] if load.Op != OpLoad { return false } if load.Uses != 1 { return false } if load.Args[1] != mem { return false } p, off := splitPtr(load.Args[0]) if p != base { return false } r[i] = LoadRecord{load: load, offset: off, shift: shift} } // Sort in memory address order.
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu Mar 21 19:45:41 UTC 2024 - 18.4K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/expand_calls.go
return m0 } if a.Op == OpDereference { a.Op = OpLoad // For purposes of parameter passing expansion, a Dereference is a Load. } if !rc.hasRegs() && !CanSSA(at) { dst := x.offsetFrom(b, rc.storeDest, rc.storeOffset, types.NewPtr(at)) if x.debug > 1 { x.Printf("...recur store %s at %s\n", a.LongString(), dst.LongString()) } if a.Op == OpLoad {
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Tue Nov 28 05:13:40 UTC 2023 - 31.9K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/writebarrier.go
bEnd.Succs = append(bEnd.Succs, e) e.b.Preds[e.i].b = bEnd } // set up control flow for write barrier test // load word, test word, avoiding partial register write from load byte. cfgtypes := &f.Config.Types flag := b.NewValue2(pos, OpLoad, cfgtypes.UInt32, wbaddr, mem) flag = b.NewValue2(pos, OpNeq32, cfgtypes.Bool, flag, const0) b.Kind = BlockIf b.SetControl(flag)
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri Sep 08 19:09:14 UTC 2023 - 23.5K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/rewritedec64.go
v0.AddArg2(ptr, mem) v1 := b.NewValue0(v.Pos, OpLoad, typ.UInt32) v2 := b.NewValue0(v.Pos, OpOffPtr, typ.UInt32Ptr) v2.AuxInt = int64ToAuxInt(4) v2.AddArg(ptr) v1.AddArg2(v2, mem) v.AddArg2(v0, v1) return true } // match: (Load <t> ptr mem) // cond: is64BitInt(t) && config.BigEndian && !t.IsSigned()
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu Jan 19 22:42:34 UTC 2023 - 65.3K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/branchelim.go
default: return } // Find all the values used in computing the address of any load. // Typically these values have operations like AddPtr, Lsh64x64, etc. loadAddr := f.newSparseSet(f.NumValues()) defer f.retSparseSet(loadAddr) for _, b := range f.Blocks { for _, v := range b.Values { switch v.Op { case OpLoad, OpAtomicLoad8, OpAtomicLoad32, OpAtomicLoad64, OpAtomicLoadPtr, OpAtomicLoadAcq32, OpAtomicLoadAcq64:
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed Nov 30 17:46:51 UTC 2022 - 12.7K bytes - Viewed (0) -
src/cmd/internal/obj/ppc64/asm9.go
if eh > 1 { c.ctxt.Diag("illegal EH field\n%v", p) } o1 = AOP_RRRI(c.oploadx(p.As), uint32(p.To.Reg), uint32(p.From.Index), uint32(p.From.Reg), uint32(eh)) } else { o1 = AOP_RRR(c.oploadx(p.As), uint32(p.To.Reg), uint32(p.From.Index), uint32(p.From.Reg)) } default: o1 = AOP_RRR(c.oploadx(p.As), uint32(p.To.Reg), uint32(p.From.Index), uint32(p.From.Reg)) } case 46: /* plain op */
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 15 13:55:28 UTC 2024 - 156.1K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/loopreschedchecks.go
sp := test.NewValue0(bb.Pos, OpSP, pt) cmpOp := OpLess64U if pt.Size() == 4 { cmpOp = OpLess32U } limaddr := test.NewValue1I(bb.Pos, OpOffPtr, pt, 2*pt.Size(), g) lim := test.NewValue2(bb.Pos, OpLoad, pt, limaddr, mem0) cmp := test.NewValue2(bb.Pos, cmpOp, cfgtypes.Bool, sp, lim) test.SetControl(cmp) // if true, goto sched test.AddEdgeTo(sched) // if false, rewrite edge to header.
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Tue Aug 22 21:17:10 UTC 2023 - 16K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/check.go
v.Op, v.Type.String()) } case OpStringLen: if v.Type != c.Types.Int { f.Fatalf("bad %s type: want int, have %s", v.Op, v.Type.String()) } case OpLoad: if !v.Args[1].Type.IsMemory() { f.Fatalf("bad arg 1 type to %s: want mem, have %s", v.Op, v.Args[1].Type.String()) } case OpStore: if !v.Type.IsMemory() {
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Tue Apr 09 16:41:23 UTC 2024 - 17.6K bytes - Viewed (0) -
src/cmd/compile/internal/ssagen/ssa.go
b.AddEdgeTo(cacheMiss) b.AddEdgeTo(loopHead) // On a hit, load the data fields of the cache entry. // Case = e.Case // Itab = e.Itab s.startBlock(cacheHit) eCase := s.newValue2(ssa.OpLoad, typs.Int, s.newValue1I(ssa.OpOffPtr, typs.IntPtr, s.config.PtrSize, e), s.mem()) eItab := s.newValue2(ssa.OpLoad, typs.BytePtr, s.newValue1I(ssa.OpOffPtr, typs.BytePtrPtr, 2*s.config.PtrSize, e), s.mem())
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Jun 10 19:44:43 UTC 2024 - 284.9K bytes - Viewed (0)