Search Options

Results per page
Sort
Preferred Languages
Advance

Results 51 - 60 of 275 for clobber (0.13 sec)

  1. src/cmd/compile/internal/ssa/_gen/LOONG64Ops.go

    		// function calls
    		{name: "CALLstatic", argLength: -1, reg: regInfo{clobbers: callerSave}, aux: "CallOff", clobberFlags: true, call: true},                                               // call static function aux.(*obj.LSym).  last arg=mem, auxint=argsize, returns mem
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Tue Nov 21 19:04:19 UTC 2023
    - 25.2K bytes
    - Viewed (0)
  2. src/cmd/compile/internal/ssa/rewrite.go

    	return nil // too far away
    }
    
    // clobber invalidates values. Returns true.
    // clobber is used by rewrite rules to:
    //
    //	A) make sure the values are really dead and never used again.
    //	B) decrement use counts of the values' args.
    func clobber(vv ...*Value) bool {
    	for _, v := range vv {
    		v.reset(OpInvalid)
    		// Note: leave v.Block intact.  The Block field is used after clobber.
    	}
    	return true
    }
    
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Fri Jun 07 19:02:52 UTC 2024
    - 64.2K bytes
    - Viewed (0)
  3. src/cmd/compile/internal/ssa/rewritegeneric.go

    			break
    		}
    		v.reset(OpMove)
    		v.AuxInt = int64ToAuxInt(n)
    		v.Aux = typeToAux(t1)
    		v.AddArg3(dst1, src1, mem)
    		return true
    	}
    	// match: (Move {t} [n] dst1 src1 move:(Move {t} [n] dst2 _ mem))
    	// cond: move.Uses == 1 && isSamePtr(dst1, dst2) && disjoint(src1, n, dst2, n) && clobber(move)
    	// result: (Move {t} [n] dst1 src1 mem)
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Mon Apr 22 18:24:47 UTC 2024
    - 812.2K bytes
    - Viewed (0)
  4. test/live.go

    	f43([]*int{&p, &r, &q})
    	f43([]*int{&q, &p, &r})
    }
    
    //go:noescape
    func f43(a []*int)
    
    // Assigning to a sub-element that makes up an entire local variable
    // should clobber that variable.
    func f44(f func() [2]*int) interface{} { // ERROR "live at entry to f44: f"
    	type T struct {
    		s [1][2]*int
    	}
    	ret := T{} // ERROR "stack object ret T"
    	ret.s[0] = f()
    	return ret
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Tue Dec 05 20:34:30 UTC 2023
    - 18K bytes
    - Viewed (0)
  5. src/cmd/compile/internal/ssa/rewriteRISCV64.go

    	// cond: x.Uses == 1 && clobber(x)
    	// result: @x.Block (MOVBUload <t> [off] {sym} ptr mem)
    	for {
    		t := v.Type
    		x := v_0
    		if x.Op != OpRISCV64MOVBload {
    			break
    		}
    		off := auxIntToInt32(x.AuxInt)
    		sym := auxToSym(x.Aux)
    		mem := x.Args[1]
    		ptr := x.Args[0]
    		if !(x.Uses == 1 && clobber(x)) {
    			break
    		}
    		b = x.Block
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Thu Mar 07 14:57:07 UTC 2024
    - 205.1K bytes
    - Viewed (0)
  6. src/cmd/internal/obj/x86/obj6.go

    	} else {
    		lea = ALEAL
    		mov = AMOVL
    		reg = REG_CX
    		if p.As == ALEAL && p.To.Reg != p.From.Reg && p.To.Reg != p.From.Index {
    			// Special case: clobber the destination register with
    			// the PC so we don't have to clobber CX.
    			// The SSA backend depends on CX not being clobbered across LEAL.
    			// See cmd/compile/internal/ssa/gen/386.rules (search for Flag_shared).
    			reg = p.To.Reg
    		}
    	}
    
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Fri Sep 08 18:36:45 UTC 2023
    - 40.9K bytes
    - Viewed (0)
  7. src/runtime/stack.go

    	if n&(n-1) != 0 {
    		throw("stack not a power of 2")
    	}
    	if stk.lo+n < stk.hi {
    		throw("bad stack size")
    	}
    	if stackDebug >= 1 {
    		println("stackfree", v, n)
    		memclrNoHeapPointers(v, n) // for testing, clobber stack data
    	}
    	if debug.efence != 0 || stackFromSystem != 0 {
    		if debug.efence != 0 || stackFaultOnFree != 0 {
    			sysFault(v, n)
    		} else {
    			sysFree(v, n, &memstats.stacks_sys)
    		}
    		return
    	}
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Wed May 22 22:31:00 UTC 2024
    - 41.1K bytes
    - Viewed (0)
  8. src/cmd/compile/internal/ssa/_gen/RISCV64Ops.go

    		fp2gp   = regInfo{inputs: []regMask{fpMask, fpMask}, outputs: []regMask{gpMask}}
    
    		call        = regInfo{clobbers: callerSave}
    		callClosure = regInfo{inputs: []regMask{gpspMask, regCtxt, 0}, clobbers: callerSave}
    		callInter   = regInfo{inputs: []regMask{gpMask}, clobbers: callerSave}
    	)
    
    	RISCV64ops := []opData{
    		{name: "ADD", argLength: 2, reg: gp21, asm: "ADD", commutative: true}, // arg0 + arg1
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Thu Mar 07 14:57:07 UTC 2024
    - 30.7K bytes
    - Viewed (0)
  9. src/runtime/asm_386.s

    // gcWriteBarrier does NOT follow the Go ABI. It accepts the
    // number of bytes of buffer needed in DI, and returns a pointer
    // to the buffer space in DI.
    // It clobbers FLAGS. It does not clobber any general-purpose registers,
    // but may clobber others (e.g., SSE registers).
    // Typical use would be, when doing *(CX+88) = AX
    //     CMPL    $0, runtime.writeBarrier(SB)
    //     JEQ     dowrite
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Fri Mar 15 15:45:13 UTC 2024
    - 43.1K bytes
    - Viewed (0)
  10. src/runtime/race_ppc64le.s

    	BL	racecall<>(SB)
    	MOVD	R15, R8	// restore the original function
    	MOVD	R17, R6 // restore arg list addr
    	// Call the atomic function.
    	// racecall will call LLVM race code which might clobber r30 (g)
    	MOVD	runtimeĀ·tls_g(SB), R10
    	MOVD	0(R10), g
    
    	MOVD	g_racectx(g), R3
    	MOVD	R8, R4		// pc being called same TODO as above
    	MOVD	(R1), R5	// caller pc from latest LR
    	BL	racecall<>(SB)
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Fri May 17 18:37:29 UTC 2024
    - 17K bytes
    - Viewed (0)
Back to top