Search Options

Results per page
Sort
Preferred Languages
Advance

Results 1 - 10 of 10 for isInlinableMemmove (0.4 sec)

  1. src/cmd/compile/internal/ssa/rewrite.go

    		return sz <= 8 || disjoint(dst, sz, src, sz)
    	case "arm", "loong64", "mips", "mips64", "mipsle", "mips64le":
    		return sz <= 4
    	}
    	return false
    }
    func IsInlinableMemmove(dst, src *Value, sz int64, c *Config) bool {
    	return isInlinableMemmove(dst, src, sz, c)
    }
    
    // logLargeCopy logs the occurrence of a large copy.
    // The best place to do this is in the rewrite rules where the size of the move is easy to find.
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Fri Jun 07 19:02:52 UTC 2024
    - 64.2K bytes
    - Viewed (0)
  2. src/cmd/compile/internal/ssa/_gen/generic.rules

    (SelectN [0] call:(StaticCall {sym} s1:(Store _ (Const(64|32) [sz]) s2:(Store  _ src s3:(Store {t} _ dst mem)))))
    	&& sz >= 0
    	&& isSameCall(sym, "runtime.memmove")
    	&& s1.Uses == 1 && s2.Uses == 1 && s3.Uses == 1
    	&& isInlinableMemmove(dst, src, int64(sz), config)
    	&& clobber(s1, s2, s3, call)
    	=> (Move {types.Types[types.TUINT8]} [int64(sz)] dst src mem)
    
    // Match post-expansion calls, register version.
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Thu May 16 22:21:05 UTC 2024
    - 135.3K bytes
    - Viewed (0)
  3. src/cmd/compile/internal/ssa/_gen/PPC64.rules

            && isInlinableMemmove(dst, src, sz, config)
            && clobber(s1, s2, s3, call)
            => (Move [sz] dst src mem)
    
    // Match post-lowering calls, register version.
    (SelectN [0] call:(CALLstatic {sym} dst src (MOVDconst [sz]) mem))
            && sz >= 0
            && isSameCall(sym, "runtime.memmove")
            && call.Uses == 1
            && isInlinableMemmove(dst, src, sz, config)
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Fri Jun 07 19:02:52 UTC 2024
    - 53.2K bytes
    - Viewed (0)
  4. src/cmd/compile/internal/ssa/_gen/AMD64.rules

    	&& s1.Uses == 1 && s2.Uses == 1 && s3.Uses == 1
    	&& isInlinableMemmove(dst, src, sc.Val64(), config)
    	&& clobber(s1, s2, s3, call)
    	=> (Move [sc.Val64()] dst src mem)
    
    // Match post-lowering calls, register version.
    (SelectN [0] call:(CALLstatic {sym} dst src (MOVQconst [sz]) mem))
    	&& sz >= 0
    	&& isSameCall(sym, "runtime.memmove")
    	&& call.Uses == 1
    	&& isInlinableMemmove(dst, src, sz, config)
    	&& clobber(call)
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Tue Mar 12 19:38:41 UTC 2024
    - 93.9K bytes
    - Viewed (0)
  5. src/cmd/compile/internal/ssa/_gen/ARM64.rules

    	&& s1.Uses == 1 && s2.Uses == 1 && s3.Uses == 1
    	&& isInlinableMemmove(dst, src, sz, config)
    	&& clobber(s1, s2, s3, call)
    	=> (Move [sz] dst src mem)
    
    // Match post-lowering calls, register version.
    (SelectN [0] call:(CALLstatic {sym} dst src (MOVDconst [sz]) mem))
    	&& sz >= 0
    	&& isSameCall(sym, "runtime.memmove")
    	&& call.Uses == 1
    	&& isInlinableMemmove(dst, src, sz, config)
    	&& clobber(call)
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Thu May 23 15:49:20 UTC 2024
    - 113.1K bytes
    - Viewed (0)
  6. src/cmd/compile/internal/ssa/rewritegeneric.go

    	// cond: t1.Compare(t2) == types.CMPeq && isSamePtr(tmp1, tmp2) && isStackPtr(src) && !isVolatile(src) && disjoint(src, s, tmp2, s) && (disjoint(src, s, dst, s) || isInlinableMemmove(dst, src, s, config))
    	// result: (Move {t1} [s] dst src midmem)
    	for {
    		s := auxIntToInt64(v.AuxInt)
    		t1 := auxToType(v.Aux)
    		dst := v_0
    		tmp1 := v_1
    		midmem := v_2
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Mon Apr 22 18:24:47 UTC 2024
    - 812.2K bytes
    - Viewed (0)
  7. src/cmd/compile/internal/ssa/rewritePPC64.go

    	// cond: sz >= 0 && isSameCall(sym, "runtime.memmove") && s1.Uses == 1 && s2.Uses == 1 && s3.Uses == 1 && isInlinableMemmove(dst, src, sz, config) && clobber(s1, s2, s3, call)
    	// result: (Move [sz] dst src mem)
    	for {
    		if auxIntToInt64(v.AuxInt) != 0 {
    			break
    		}
    		call := v_0
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Fri Jun 07 19:02:52 UTC 2024
    - 360.2K bytes
    - Viewed (0)
  8. src/cmd/compile/internal/ssa/rewriteARM64.go

    	// cond: sz >= 0 && isSameCall(sym, "runtime.memmove") && s1.Uses == 1 && s2.Uses == 1 && s3.Uses == 1 && isInlinableMemmove(dst, src, sz, config) && clobber(s1, s2, s3, call)
    	// result: (Move [sz] dst src mem)
    	for {
    		if auxIntToInt64(v.AuxInt) != 0 {
    			break
    		}
    		call := v_0
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Thu May 23 15:49:20 UTC 2024
    - 608.6K bytes
    - Viewed (0)
  9. src/cmd/compile/internal/ssa/rewriteAMD64.go

    	// cond: sc.Val64() >= 0 && isSameCall(sym, "runtime.memmove") && s1.Uses == 1 && s2.Uses == 1 && s3.Uses == 1 && isInlinableMemmove(dst, src, sc.Val64(), config) && clobber(s1, s2, s3, call)
    	// result: (Move [sc.Val64()] dst src mem)
    	for {
    		if auxIntToInt64(v.AuxInt) != 0 {
    			break
    		}
    		call := v_0
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Tue Mar 12 19:38:41 UTC 2024
    - 712.7K bytes
    - Viewed (0)
  10. src/cmd/compile/internal/ssagen/ssa.go

    }
    func (s *state) moveWhichMayOverlap(t *types.Type, dst, src *ssa.Value, mayOverlap bool) {
    	s.instrumentMove(t, dst, src)
    	if mayOverlap && t.IsArray() && t.NumElem() > 1 && !ssa.IsInlinableMemmove(dst, src, t.Size(), s.f.Config) {
    		// Normally, when moving Go values of type T from one location to another,
    		// we don't need to worry about partial overlaps. The two Ts must either be
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Mon Jun 10 19:44:43 UTC 2024
    - 284.9K bytes
    - Viewed (0)
Back to top