Search Options

Results per page
Sort
Preferred Languages
Advance

Results 21 - 30 of 122 for clobber (0.12 sec)

  1. pkg/registry/policy/poddisruptionbudget/strategy_test.go

    		t.Errorf("PodDisruptionBudget status updates should allow change of CurrentHealthy: %v", newPdb.Status.CurrentHealthy)
    	}
    	if newPdb.Spec.MinAvailable.IntValue() != 3 {
    		t.Errorf("PodDisruptionBudget status updates should not clobber spec: %v", newPdb.Spec)
    	}
    	errs := StatusStrategy.ValidateUpdate(ctx, newPdb, oldPdb)
    	if len(errs) != 0 {
    		t.Errorf("Unexpected error %v", errs)
    	}
    }
    
    Registered: Sat Jun 15 01:39:40 UTC 2024
    - Last Modified: Wed Apr 24 18:25:29 UTC 2024
    - 19.2K bytes
    - Viewed (0)
  2. src/runtime/mkpreempt.go

    		reg := fmt.Sprintf("F%d", i)
    		l.add("FMOVD", reg, 8)
    	}
    
    	// allocate frame, save PC of interrupted instruction (in LR) and flags (condition code)
    	p("IPM R10") // save flags upfront, as ADD will clobber flags
    	p("MOVD R14, -%d(R15)", l.stack)
    	p("ADD $-%d, R15", l.stack)
    	p("MOVW R10, 8(R15)") // save flags
    
    	l.save()
    	p("CALL ·asyncPreempt2(SB)")
    	l.restore()
    
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Mon Nov 20 17:19:36 UTC 2023
    - 15.3K bytes
    - Viewed (0)
  3. pkg/registry/apps/statefulset/strategy_test.go

    	if newPS.Status.Replicas != 2 {
    		t.Errorf("StatefulSet status updates should allow change of pods: %v", newPS.Status.Replicas)
    	}
    	if newPS.Spec.Replicas != 3 {
    		t.Errorf("StatefulSet status updates should not clobber spec: %v", newPS.Spec)
    	}
    	errs := StatusStrategy.ValidateUpdate(ctx, newPS, oldPS)
    	if len(errs) != 0 {
    		t.Errorf("unexpected error %v", errs)
    	}
    }
    
    Registered: Sat Jun 15 01:39:40 UTC 2024
    - Last Modified: Wed Apr 24 18:25:29 UTC 2024
    - 22.1K bytes
    - Viewed (0)
  4. src/slices/slices.go

    		//            i  i+m      n  n+m
    		// That's the result we want.
    		return s
    	}
    
    	// The hard case - v overlaps c or d. We can't just shift up
    	// the data because we'd move or clobber the values we're trying
    	// to insert.
    	// So instead, write v on top of d, then rotate.
    	copy(s[n:], v)
    
    	// Now we have
    	// s: aaaaaaaabbbbccccccccvvvv
    	//            ^   ^       ^   ^
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Mon Apr 29 14:01:59 UTC 2024
    - 13.6K bytes
    - Viewed (0)
  5. src/cmd/compile/internal/ssa/_gen/LOONG64Ops.go

    		// function calls
    		{name: "CALLstatic", argLength: -1, reg: regInfo{clobbers: callerSave}, aux: "CallOff", clobberFlags: true, call: true},                                               // call static function aux.(*obj.LSym).  last arg=mem, auxint=argsize, returns mem
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Tue Nov 21 19:04:19 UTC 2023
    - 25.2K bytes
    - Viewed (0)
  6. test/live.go

    	f43([]*int{&p, &r, &q})
    	f43([]*int{&q, &p, &r})
    }
    
    //go:noescape
    func f43(a []*int)
    
    // Assigning to a sub-element that makes up an entire local variable
    // should clobber that variable.
    func f44(f func() [2]*int) interface{} { // ERROR "live at entry to f44: f"
    	type T struct {
    		s [1][2]*int
    	}
    	ret := T{} // ERROR "stack object ret T"
    	ret.s[0] = f()
    	return ret
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Tue Dec 05 20:34:30 UTC 2023
    - 18K bytes
    - Viewed (0)
  7. src/cmd/internal/obj/x86/obj6.go

    	} else {
    		lea = ALEAL
    		mov = AMOVL
    		reg = REG_CX
    		if p.As == ALEAL && p.To.Reg != p.From.Reg && p.To.Reg != p.From.Index {
    			// Special case: clobber the destination register with
    			// the PC so we don't have to clobber CX.
    			// The SSA backend depends on CX not being clobbered across LEAL.
    			// See cmd/compile/internal/ssa/gen/386.rules (search for Flag_shared).
    			reg = p.To.Reg
    		}
    	}
    
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Fri Sep 08 18:36:45 UTC 2023
    - 40.9K bytes
    - Viewed (0)
  8. src/runtime/stack.go

    	if n&(n-1) != 0 {
    		throw("stack not a power of 2")
    	}
    	if stk.lo+n < stk.hi {
    		throw("bad stack size")
    	}
    	if stackDebug >= 1 {
    		println("stackfree", v, n)
    		memclrNoHeapPointers(v, n) // for testing, clobber stack data
    	}
    	if debug.efence != 0 || stackFromSystem != 0 {
    		if debug.efence != 0 || stackFaultOnFree != 0 {
    			sysFault(v, n)
    		} else {
    			sysFree(v, n, &memstats.stacks_sys)
    		}
    		return
    	}
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Wed May 22 22:31:00 UTC 2024
    - 41.1K bytes
    - Viewed (0)
  9. src/cmd/compile/internal/ssa/_gen/RISCV64Ops.go

    		fp2gp   = regInfo{inputs: []regMask{fpMask, fpMask}, outputs: []regMask{gpMask}}
    
    		call        = regInfo{clobbers: callerSave}
    		callClosure = regInfo{inputs: []regMask{gpspMask, regCtxt, 0}, clobbers: callerSave}
    		callInter   = regInfo{inputs: []regMask{gpMask}, clobbers: callerSave}
    	)
    
    	RISCV64ops := []opData{
    		{name: "ADD", argLength: 2, reg: gp21, asm: "ADD", commutative: true}, // arg0 + arg1
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Thu Mar 07 14:57:07 UTC 2024
    - 30.7K bytes
    - Viewed (0)
  10. src/runtime/race_ppc64le.s

    	BL	racecall<>(SB)
    	MOVD	R15, R8	// restore the original function
    	MOVD	R17, R6 // restore arg list addr
    	// Call the atomic function.
    	// racecall will call LLVM race code which might clobber r30 (g)
    	MOVD	runtime·tls_g(SB), R10
    	MOVD	0(R10), g
    
    	MOVD	g_racectx(g), R3
    	MOVD	R8, R4		// pc being called same TODO as above
    	MOVD	(R1), R5	// caller pc from latest LR
    	BL	racecall<>(SB)
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Fri May 17 18:37:29 UTC 2024
    - 17K bytes
    - Viewed (0)
Back to top