- Sort Score
- Result 10 results
- Languages All
Results 1 - 10 of 12 for opcodeTable (0.16 sec)
-
src/cmd/compile/internal/ssa/value.go
} return fmt.Sprintf("v%d", v.ID) } func (v *Value) AuxInt8() int8 { if opcodeTable[v.Op].auxType != auxInt8 && opcodeTable[v.Op].auxType != auxNameOffsetInt8 { v.Fatalf("op %s doesn't have an int8 aux field", v.Op) } return int8(v.AuxInt) } func (v *Value) AuxUInt8() uint8 { if opcodeTable[v.Op].auxType != auxUInt8 { v.Fatalf("op %s doesn't have a uint8 aux field", v.Op) }
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri May 03 16:40:22 UTC 2024 - 16.7K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/nilcheck.go
// Find any pointers that this op is guaranteed to fault on if nil. var ptrstore [2]*Value ptrs := ptrstore[:0] if opcodeTable[v.Op].faultOnNilArg0 && (faultOnLoad || v.Type.IsMemory()) { // On AIX, only writing will fault. ptrs = append(ptrs, v.Args[0]) } if opcodeTable[v.Op].faultOnNilArg1 && (faultOnLoad || (v.Type.IsMemory() && v.Op != OpPPC64LoweredMove)) { // On AIX, only writing will fault.
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Tue Oct 31 20:45:54 UTC 2023 - 11.3K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/deadcode.go
q = append(q, v) if v.Pos.IsStmt() != src.PosNotStmt { liveOrderStmts = append(liveOrderStmts, v) } } } for _, v := range b.Values { if (opcodeTable[v.Op].call || opcodeTable[v.Op].hasSideEffects || opcodeTable[v.Op].nilCheck) && !live[v.ID] { live[v.ID] = true q = append(q, v) if v.Pos.IsStmt() != src.PosNotStmt { liveOrderStmts = append(liveOrderStmts, v) } }
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri Dec 08 00:29:01 UTC 2023 - 9.2K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/regalloc.go
} if op.IsCall() { if ac, ok := v.Aux.(*AuxCall); ok && ac.reg != nil { return *ac.Reg(&opcodeTable[op].reg, s.f.Config) } } if op == OpMakeResult && s.f.OwnAux.reg != nil { return *s.f.OwnAux.ResultReg(s.f.Config) } return opcodeTable[op].reg } func (s *regAllocState) isGReg(r register) bool { return s.f.Config.hasGReg && s.GReg == r }
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Tue Nov 21 17:49:56 UTC 2023 - 87.2K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/schedule.go
for _, b := range f.Blocks { for _, v := range b.Values { for i, a := range v.Args { if a.Op == OpSPanchored || opcodeTable[a.Op].nilCheck { v.SetArg(i, a.Args[0]) } } } for i, c := range b.ControlValues() { if c.Op == OpSPanchored || opcodeTable[c.Op].nilCheck { b.ReplaceControl(i, c.Args[0]) } } } for _, b := range f.Blocks { i := 0
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Apr 08 15:53:17 UTC 2024 - 16.4K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/addressingmodes.go
// has the pointer in arg[1] and the index in arg[2]. ptrIndex := 0 if opcodeTable[v.Op].resultInArg0 { ptrIndex = 1 } p := v.Args[ptrIndex] c, ok := combine[[2]Op{v.Op, p.Op}] if !ok { continue } // See if we can combine the Aux/AuxInt values. switch [2]auxType{opcodeTable[v.Op].auxType, opcodeTable[p.Op].auxType} { case [2]auxType{auxSymOff, auxInt32}: // TODO: introduce auxSymOff32
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed Jul 26 17:19:57 UTC 2023 - 24.3K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/check.go
} for _, v := range b.Values { // Check to make sure argument count makes sense (argLen of -1 indicates // variable length args) nArgs := opcodeTable[v.Op].argLen if nArgs != -1 && int32(len(v.Args)) != nArgs { f.Fatalf("value %s has %d args, expected %d", v.LongString(), len(v.Args), nArgs) } // Check to make sure aux values make sense.
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Tue Apr 09 16:41:23 UTC 2024 - 17.6K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/fuse.go
// There may be false positives. func isEmpty(b *Block) bool { for _, v := range b.Values { if v.Uses > 0 || v.Op.IsCall() || v.Op.HasSideEffects() || v.Type.IsVoid() || opcodeTable[v.Op].nilCheck { return false } } return true } // fuseBlockPlain handles a run of blocks with length >= 2, // whose interior has single predecessors and successors,
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Tue Oct 31 20:45:54 UTC 2023 - 9K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/config.go
c.useSSE = false } } if ctxt.Flag_shared { // LoweredWB is secretly a CALL and CALLs on 386 in // shared mode get rewritten by obj6.go to go through // the GOT, which clobbers BX. opcodeTable[Op386LoweredWB].reg.clobbers |= 1 << 3 // BX } // Create the GC register map index. // TODO: This is only used for debug printing. Maybe export config.registers? gcRegMapSize := int16(0)
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Tue Apr 02 16:11:47 UTC 2024 - 12.9K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/debug.go
state.currentState.slots[slot] = loc } // Handle any register clobbering. Call operations, for example, // clobber all registers even though they don't explicitly write to // them. clobbers := uint64(opcodeTable[v.Op].reg.clobbers) for { if clobbers == 0 { break } reg := uint8(bits.TrailingZeros64(clobbers)) clobbers &^= 1 << reg for _, slot := range locs.registers[reg] {
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Jun 10 19:44:43 UTC 2024 - 58.4K bytes - Viewed (0)