- Sort Score
- Result 10 results
- Languages All
Results 21 - 30 of 1,032 for spill (0.16 sec)
-
src/cmd/compile/internal/ssa/func.go
// RegArgs is a slice of register-memory pairs that must be spilled and unspilled in the uncommon path of function entry. RegArgs []Spill // OwnAux describes parameters and results for this function. OwnAux *AuxCall // CloSlot holds the compiler-synthesized name (".closureptr") // where we spill the closure pointer for range func bodies. CloSlot *ir.Name
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Jun 10 19:44:43 UTC 2024 - 25.8K bytes - Viewed (0) -
docs/bigdata/README.md
mapreduce.reduce.shuffle.merge.percent=0.9 # Minimum % merges in RAM mapreduce.reduce.speculative=false # Disable speculation for reducing mapreduce.task.io.sort.factor=999 # Threshold before writing to disk mapreduce.task.sort.spill.percent=0.9 # Minimum % before spilling to disk ```
Registered: Sun Jun 16 00:44:34 UTC 2024 - Last Modified: Thu Sep 29 04:28:45 UTC 2022 - 14.7K bytes - Viewed (0) -
test/codegen/comparisons.go
} func cmpstring2(x, y string) int { // We want to fail if there are two calls to cmpstring. // They will both have the same line number, so a test // like in cmpstring1 will not work. Instead, we // look for spill/restore instructions, which only // need to exist if there are 2 calls. //amd64:-`MOVQ\t.*\(SP\)` return cmp.Compare(x, y)
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri Apr 19 16:31:02 UTC 2024 - 15.2K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/schedule.go
if v.Op == OpSPanchored { // Free this value if v.Uses != 0 { base.Fatalf("SPAnchored still has %d uses", v.Uses) } v.resetArgs() f.freeValue(v) } else { if opcodeTable[v.Op].nilCheck { if v.Uses != 0 { base.Fatalf("nilcheck still has %d uses", v.Uses) } // We can't delete the nil check, but we mark // it as having void type so regalloc won't
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Apr 08 15:53:17 UTC 2024 - 16.4K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/op.go
return c.registers[m].objNum } // ArgWidth returns the amount of stack needed for all the inputs // and outputs of a function or method, including ABI-defined parameter // slots and ABI-defined spill slots for register-resident parameters. // // The name is taken from the types package's ArgWidth(<function type>), // which predated changes to the ABI; this version handles those changes. func (a *AuxCall) ArgWidth() int64 {
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Apr 22 15:29:10 UTC 2024 - 18.7K bytes - Viewed (0) -
src/runtime/sys_openbsd_amd64.s
// Transition from C ABI to Go ABI. PUSH_REGS_HOST_TO_ABI0() // Set up ABIInternal environment: g in R14, cleared X15. get_tls(R12) MOVQ g(R12), R14 PXOR X15, X15 // Reserve space for spill slots. NOP SP // disable vet stack checking ADJSP $24 // Call into the Go signal handler MOVQ DI, AX // sig MOVQ SI, BX // info MOVQ DX, CX // ctx CALL ·sigtrampgo<ABIInternal>(SB) ADJSP $-24
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Tue Jun 06 18:49:01 UTC 2023 - 15.5K bytes - Viewed (0) -
src/runtime/stubs.go
// therefore be >= stackArgsSize. It must include additional space for spilling // register arguments for stack growth and preemption. // // TODO(mknyszek): Once we don't need the additional spill space, remove frameSize, // since frameSize will be redundant with stackArgsSize. // // Arguments passed in registers must be laid out in regArgs according to the ABI.
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 29 17:58:53 UTC 2024 - 20.2K bytes - Viewed (0) -
src/runtime/sys_darwin_amd64.s
// Transition from C ABI to Go ABI. PUSH_REGS_HOST_TO_ABI0() // Set up ABIInternal environment: g in R14, cleared X15. get_tls(R12) MOVQ g(R12), R14 PXOR X15, X15 // Reserve space for spill slots. NOP SP // disable vet stack checking ADJSP $24 // Call into the Go signal handler MOVQ DI, AX // sig MOVQ SI, BX // info MOVQ DX, CX // ctx CALL ·sigtrampgo<ABIInternal>(SB)
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu Aug 03 16:07:59 UTC 2023 - 19.7K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/writebarrier.go
// any pointers we must get into the write barrier buffer still make it, // possibly in a different order and possibly a different (but definitely // more than 0) number of times. // In light of that, we process all the OpStoreWBs first. This minimizes // the amount of spill/restore code we need around the Zero/Move calls.
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri Sep 08 19:09:14 UTC 2023 - 23.5K bytes - Viewed (0) -
src/cmd/compile/internal/walk/assign.go
nodes.Append(nif) // Index to start copying into s. // idx = newLen - len(l2) // We use this expression instead of oldLen because it avoids // a spill/restore of oldLen. // Note: this doesn't work optimally currently because // the compiler optimizer undoes this arithmetic. idx := ir.NewBinaryExpr(base.Pos, ir.OSUB, newLen, ir.NewUnaryExpr(base.Pos, ir.OLEN, l2))
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 08 17:09:06 UTC 2024 - 20.3K bytes - Viewed (0)