- Sort Score
- Result 10 results
- Languages All
Results 1 - 10 of 11 for AddArgs (0.1 sec)
-
src/cmd/compile/internal/ssa/value.go
func (v *Value) AddArg6(w1, w2, w3, w4, w5, w6 *Value) { v.Args = append(v.Args, w1, w2, w3, w4, w5, w6) w1.Uses++ w2.Uses++ w3.Uses++ w4.Uses++ w5.Uses++ w6.Uses++ } func (v *Value) AddArgs(a ...*Value) { if v.Args == nil { v.resetArgs() // use argstorage } v.Args = append(v.Args, a...) for _, x := range a { x.Uses++ } } func (v *Value) SetArg(i int, w *Value) {
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri May 03 16:40:22 UTC 2024 - 16.7K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/trim.go
// the current phi is // v = φ(v0, v1, ..., u, ..., vk) // then the merged phi is // v = φ(v0, v1, ..., u0, ..., vk, u1, ..., un) v.SetArg(i, u.Args[0]) v.AddArgs(u.Args[1:]...) } else { // If the original block contained u = φ(u0, u1, ..., un) and // the current phi is // v = φ(v0, v1, ..., vi, ..., vk) // i.e. it does not use a value from the predecessor block,
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri Nov 18 17:59:44 UTC 2022 - 4.2K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/expand_calls.go
mem = x.decomposeAsNecessary(v.Pos, v.Block, a, mem, rc) } var preArgStore [2]*Value preArgs := append(preArgStore[:0], v.Args[0:firstArg]...) v.resetArgs() v.AddArgs(preArgs...) v.AddArgs(allResults...) v.AddArg(mem) for _, a := range oldArgs { if a.Uses == 0 { x.invalidateRecursively(a) } } return }
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Tue Nov 28 05:13:40 UTC 2023 - 31.9K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/decompose.go
} for _, a := range v.Args { for i := 0; i < n; i++ { fields[i].AddArg(a.Block.NewValue1I(v.Pos, OpStructSelect, t.FieldType(i), int64(i), a)) } } v.reset(StructMakeOp(n)) v.AddArgs(fields[:n]...) // Recursively decompose phis for each field. for _, f := range fields[:n] { decomposeUserPhi(f) } } // decomposeArrayPhi replaces phi-of-array with arraymake(phi-of-array-element),
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Tue Aug 23 21:22:15 UTC 2022 - 13.4K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/addressingmodes.go
} // Combine the operations. tmp = append(tmp[:0], v.Args[:ptrIndex]...) tmp = append(tmp, p.Args...) tmp = append(tmp, v.Args[ptrIndex+1:]...) v.resetArgs() v.Op = c v.AddArgs(tmp...) if needSplit[c] { // It turns out that some of the combined instructions have faster two-instruction equivalents, // but not the two instructions that led to them being combined here. For example
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed Jul 26 17:19:57 UTC 2023 - 24.3K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/sccp.go
// change its value immediately after visiting Phi, because some of its input // edges may still not be visited at this moment. constValue := f.newValue(val.Op, val.Type, f.Entry, val.Pos) constValue.AddArgs(args...) matched := rewriteValuegeneric(constValue) if matched { if isConst(constValue) { return lattice{constant, constValue} } }
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Jan 22 16:54:50 UTC 2024 - 17.6K bytes - Viewed (0) -
src/cmd/compile/internal/ssagen/phi.go
if a == v { continue // self-reference } if a == w { continue // already have this witness } if w != nil { // two witnesses, need a phi value v.Op = ssa.OpPhi v.AddArgs(args...) v.Aux = nil continue loop } w = a // save witness } if w == nil { s.s.Fatalf("no witness for reachable phi %s", v) } // One witness. Make v a copy of w.
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri Nov 18 17:59:44 UTC 2022 - 15.2K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/writebarrier.go
for i := 0; i < nargs; i++ { argTypes[i] = typ } call := b.NewValue0A(pos, OpStaticCall, types.TypeResultMem, StaticAuxCall(fn, b.Func.ABIDefault.ABIAnalyzeTypes(argTypes, nil))) call.AddArgs(args...) call.AuxInt = int64(nargs) * typ.Size() return b.NewValue1I(pos, OpSelectN, types.TypeMem, 0, call) } // round to a multiple of r, r is a power of 2. func round(o int64, r int64) int64 {
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri Sep 08 19:09:14 UTC 2023 - 23.5K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/rewrite.go
OpPPC64XOR: OpPPC64XORCC, } b := op.Block opCC := b.NewValue0I(op.Pos, ccOpMap[op.Op], types.NewTuple(op.Type, types.TypeFlags), op.AuxInt) opCC.AddArgs(op.Args...) op.reset(OpSelect0) op.AddArgs(opCC) return op } // Try converting a RLDICL to ANDCC. If successful, return the mask otherwise 0. func convertPPC64RldiclAndccconst(sauxint int64) int64 {
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri Jun 07 19:02:52 UTC 2024 - 64.2K bytes - Viewed (0) -
tensorflow/compiler/jit/encapsulate_subgraphs_pass.cc
// original graph that has an output edge in the subgraph, and whose second // element is the arg node in the subgraph that it sends to. The vector will // be filled in below in AddArgs. std::vector<std::pair<const Node*, Node*>> src_arg_pairs; TF_RETURN_IF_ERROR(CopySubgraphNodes(&node_images)); TF_RETURN_IF_ERROR(CopySubgraphEdges(node_images, &src_arg_pairs));
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Thu Feb 22 08:47:20 UTC 2024 - 51K bytes - Viewed (0)