Search Options

Results per page
Sort
Preferred Languages
Advance

Results 41 - 50 of 54 for movbeq (0.09 sec)

  1. src/runtime/mgc.go

    // gcTestMoveStackOnNextCall causes the stack to be moved on a call
    // immediately following the call to this. It may not work correctly
    // if any other work appears after this call (such as returning).
    // Typically the following call should be marked go:noinline so it
    // performs a stack check.
    //
    // In rare cases this may not cause the stack to move, specifically if
    // there's a preemption between this call and the next.
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Wed May 29 16:25:21 UTC 2024
    - 62K bytes
    - Viewed (0)
  2. src/cmd/internal/obj/s390x/asmz.go

    	op_MVI     uint32 = 0x9200 // FORMAT_SI         MOVE (immediate)
    	op_MVIY    uint32 = 0xEB52 // FORMAT_SIY        MOVE (immediate)
    	op_MVN     uint32 = 0xD100 // FORMAT_SS1        MOVE NUMERICS
    	op_MVO     uint32 = 0xF100 // FORMAT_SS2        MOVE WITH OFFSET
    	op_MVPG    uint32 = 0xB254 // FORMAT_RRE        MOVE PAGE
    	op_MVST    uint32 = 0xB255 // FORMAT_RRE        MOVE STRING
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Tue Apr 16 17:46:09 UTC 2024
    - 176.7K bytes
    - Viewed (0)
  3. staging/src/k8s.io/apiserver/pkg/server/config.go

    		// the hash used for the identity should include both the hostname and the identity value.
    		// TODO: receive the identity value as a parameter once the apiserver identity lease controller
    		// post start hook is moved to generic apiserver.
    		b := cryptobyte.NewBuilder(nil)
    		b.AddUint16LengthPrefixed(func(b *cryptobyte.Builder) {
    			b.AddBytes([]byte(hostname))
    		})
    		b.AddUint16LengthPrefixed(func(b *cryptobyte.Builder) {
    Registered: Sat Jun 15 01:39:40 UTC 2024
    - Last Modified: Tue May 28 08:48:22 UTC 2024
    - 47.7K bytes
    - Viewed (0)
  4. src/cmd/compile/internal/ssa/_gen/genericOps.go

    	// Normally we require that the source and destination of Move do not overlap.
    	// There is an exception when we know all the loads will happen before all
    	// the stores. In that case, overlap is ok. See
    	// memmove inlining in generic.rules. When inlineablememmovesize (in ../rewrite.go)
    	// returns true, we must do all loads before all stores, when lowering Move.
    	// The type of Move is used for the write barrier pass to insert write barriers
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Thu May 23 15:49:20 UTC 2024
    - 42.6K bytes
    - Viewed (0)
  5. src/cmd/compile/internal/ssa/rewriteARM64.go

    		v7.AddArg4(dst, v8, v10, v11)
    		v3.AddArg4(dst, v4, v6, v7)
    		v.AddArg4(dst, v0, v2, v3)
    		return true
    	}
    	// match: (Move [s] dst src mem)
    	// cond: s%16 != 0 && s%16 <= 8 && s > 16
    	// result: (Move [8] (OffPtr <dst.Type> dst [s-8]) (OffPtr <src.Type> src [s-8]) (Move [s-s%16] dst src mem))
    	for {
    		s := auxIntToInt64(v.AuxInt)
    		dst := v_0
    		src := v_1
    		mem := v_2
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Thu May 23 15:49:20 UTC 2024
    - 608.6K bytes
    - Viewed (0)
  6. CHANGELOG/CHANGELOG-1.29.md

    - `kube-apiserver` updated:
    Registered: Sat Jun 15 01:39:40 UTC 2024
    - Last Modified: Wed Jun 12 03:42:38 UTC 2024
    - 324.5K bytes
    - Viewed (0)
  7. src/cmd/go/internal/modget/get.go

    			// (it is always relative to the initial build list, before applying
    			// queries). So the only way that the result of an "all" query can change
    			// is if some matching package moves from one module in the build list
    			// to another, which should not happen very often.
    			continue
    		}
    
    		// When we load imports, we detect the following conditions:
    		//
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Fri Jun 07 18:26:32 UTC 2024
    - 66.5K bytes
    - Viewed (0)
  8. tensorflow/compiler/mlir/tensorflow/transforms/sparsecore/embedding_pipelining.cc

        const llvm::SetVector<Value>& inputs, const llvm::SetVector<Value>& outputs,
        func::FuncOp parent_func, ModuleOp module, const std::string& name,
        bool flag_for_inlining) {
      // Moves all of the Operations in 'ops' into a newly created func.FuncOp
      // function named 'name' and replaces the original ops with a call to the
      // newly created function using a tf.StatefulPartitionedCall. Here,
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Thu Apr 25 16:01:03 UTC 2024
    - 92.9K bytes
    - Viewed (0)
  9. src/cmd/link/internal/ld/lib.go

    	// be populated, so that it can walk the functions in each unit.
    	// Call into the loader to do this (requires that we collect the
    	// set of internal libraries first). NB: might be simpler if we
    	// moved isRuntimeDepPkg to cmd/internal and then did the test in
    	// loader.AssignTextSymbolOrder.
    	ctxt.Library = postorder(ctxt.Library)
    	intlibs := []bool{}
    	for _, lib := range ctxt.Library {
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Tue May 21 18:45:27 UTC 2024
    - 88.6K bytes
    - Viewed (0)
  10. src/cmd/internal/obj/arm64/asm7.go

    	case REG_UXTH <= r && r < REG_UXTW:
    		return roff(rm, 1, num)
    	case REG_UXTW <= r && r < REG_UXTX:
    		if a.Type == obj.TYPE_MEM {
    			if num == 0 {
    				// According to the arm64 specification, for instructions MOVB, MOVBU and FMOVB,
    				// the extension amount must be 0, encoded in "S" as 0 if omitted, or as 1 if present.
    				// But in Go, we don't distinguish between Rn.UXTW and Rn.UXTW<<0, so we encode it as
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Wed May 15 15:44:14 UTC 2024
    - 201.1K bytes
    - Viewed (0)
Back to top