- Sort Score
- Result 10 results
- Languages All
Results 1 - 10 of 18 for opcodeTable (0.14 sec)
-
src/cmd/compile/internal/ssa/value.go
} return fmt.Sprintf("v%d", v.ID) } func (v *Value) AuxInt8() int8 { if opcodeTable[v.Op].auxType != auxInt8 && opcodeTable[v.Op].auxType != auxNameOffsetInt8 { v.Fatalf("op %s doesn't have an int8 aux field", v.Op) } return int8(v.AuxInt) } func (v *Value) AuxUInt8() uint8 { if opcodeTable[v.Op].auxType != auxUInt8 { v.Fatalf("op %s doesn't have a uint8 aux field", v.Op) }
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri May 03 16:40:22 UTC 2024 - 16.7K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/_gen/main.go
fmt.Fprintln(w, "}") fmt.Fprintln(w, "func (o Op) Asm() obj.As {return opcodeTable[o].asm}") fmt.Fprintln(w, "func (o Op) Scale() int16 {return int16(opcodeTable[o].scale)}") // generate op string method fmt.Fprintln(w, "func (o Op) String() string {return opcodeTable[o].name }") fmt.Fprintln(w, "func (o Op) SymEffect() SymEffect { return opcodeTable[o].symEffect }")
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu Jan 19 22:42:34 UTC 2023 - 16.9K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/nilcheck.go
// Find any pointers that this op is guaranteed to fault on if nil. var ptrstore [2]*Value ptrs := ptrstore[:0] if opcodeTable[v.Op].faultOnNilArg0 && (faultOnLoad || v.Type.IsMemory()) { // On AIX, only writing will fault. ptrs = append(ptrs, v.Args[0]) } if opcodeTable[v.Op].faultOnNilArg1 && (faultOnLoad || (v.Type.IsMemory() && v.Op != OpPPC64LoweredMove)) { // On AIX, only writing will fault.
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Tue Oct 31 20:45:54 UTC 2023 - 11.3K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/lower.go
// lowering and a subsequent dead code elimination (because lowering // rules may leave dead generic ops behind). for _, b := range f.Blocks { for _, v := range b.Values { if !opcodeTable[v.Op].generic { continue // lowered } switch v.Op {
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu Feb 16 00:16:13 UTC 2023 - 1.7K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/deadcode.go
q = append(q, v) if v.Pos.IsStmt() != src.PosNotStmt { liveOrderStmts = append(liveOrderStmts, v) } } } for _, v := range b.Values { if (opcodeTable[v.Op].call || opcodeTable[v.Op].hasSideEffects || opcodeTable[v.Op].nilCheck) && !live[v.ID] { live[v.ID] = true q = append(q, v) if v.Pos.IsStmt() != src.PosNotStmt { liveOrderStmts = append(liveOrderStmts, v) } }
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri Dec 08 00:29:01 UTC 2023 - 9.2K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/regalloc.go
} if op.IsCall() { if ac, ok := v.Aux.(*AuxCall); ok && ac.reg != nil { return *ac.Reg(&opcodeTable[op].reg, s.f.Config) } } if op == OpMakeResult && s.f.OwnAux.reg != nil { return *s.f.OwnAux.ResultReg(s.f.Config) } return opcodeTable[op].reg } func (s *regAllocState) isGReg(r register) bool { return s.f.Config.hasGReg && s.GReg == r }
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Tue Nov 21 17:49:56 UTC 2023 - 87.2K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/schedule.go
for _, b := range f.Blocks { for _, v := range b.Values { for i, a := range v.Args { if a.Op == OpSPanchored || opcodeTable[a.Op].nilCheck { v.SetArg(i, a.Args[0]) } } } for i, c := range b.ControlValues() { if c.Op == OpSPanchored || opcodeTable[c.Op].nilCheck { b.ReplaceControl(i, c.Args[0]) } } } for _, b := range f.Blocks { i := 0
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Apr 08 15:53:17 UTC 2024 - 16.4K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/addressingmodes.go
// has the pointer in arg[1] and the index in arg[2]. ptrIndex := 0 if opcodeTable[v.Op].resultInArg0 { ptrIndex = 1 } p := v.Args[ptrIndex] c, ok := combine[[2]Op{v.Op, p.Op}] if !ok { continue } // See if we can combine the Aux/AuxInt values. switch [2]auxType{opcodeTable[v.Op].auxType, opcodeTable[p.Op].auxType} { case [2]auxType{auxSymOff, auxInt32}: // TODO: introduce auxSymOff32
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed Jul 26 17:19:57 UTC 2023 - 24.3K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/likelyadjust.go
} inner.outer = outer outer.isInner = false } func checkContainsCall(bb *Block) bool { if bb.Kind == BlockDefer { return true } for _, v := range bb.Values { if opcodeTable[v.Op].call { return true } } return false } type loopnest struct { f *Func b2l []*loop po []*Block sdom SparseTree
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Oct 31 21:41:20 UTC 2022 - 15.4K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/check.go
} for _, v := range b.Values { // Check to make sure argument count makes sense (argLen of -1 indicates // variable length args) nArgs := opcodeTable[v.Op].argLen if nArgs != -1 && int32(len(v.Args)) != nArgs { f.Fatalf("value %s has %d args, expected %d", v.LongString(), len(v.Args), nArgs) } // Check to make sure aux values make sense.
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Tue Apr 09 16:41:23 UTC 2024 - 17.6K bytes - Viewed (0)