- Sort Score
- Result 10 results
- Languages All
Results 21 - 30 of 264 for walkFn (0.14 sec)
-
tensorflow/compiler/mlir/tensorflow/transforms/sparsecore/embedding_sequencing.cc
// Walk the input and output dependencies of the Ops in `operations` to form // the closer of Ops needed to evaluate 'operations'. Input dependencies are // walked if 'predecessors' is true and output dependencies are walked if // 'successors' is true. In either case, if a discoverd Op is in the // 'ops_to_avoid' set, then the dependency walking is terminated.
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Thu Apr 25 16:01:03 UTC 2024 - 39.4K bytes - Viewed (0) -
src/text/template/exec.go
truth, ok := isTrue(indirectInterface(val)) if !ok { s.errorf("if/with can't use %v", val) } if truth { if typ == parse.NodeWith { s.walk(val, list) } else { s.walk(dot, list) } } else if elseList != nil { s.walk(dot, elseList) } } // IsTrue reports whether the value is 'true', in the sense of not the zero of its type,
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri May 24 21:22:24 UTC 2024 - 32K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/deadstore.go
} // Walk backwards looking for dead stores. Keep track of shadowed addresses. // A "shadowed address" is a pointer, offset, and size describing a memory region that // is known to be written. We keep track of shadowed addresses in the shadowed map, // mapping the ID of the address to a shadowRange where future writes will happen. // Since we're walking backwards, writes to a shadowed region are useless,
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu Apr 25 20:07:26 UTC 2024 - 11K bytes - Viewed (0) -
src/cmd/compile/internal/walk/assign.go
if as.X != nil && as.Y != nil { return convas(as, init) } return as } // walkAssignDotType walks an OAS2DOTTYPE node. func walkAssignDotType(n *ir.AssignListStmt, init *ir.Nodes) ir.Node { walkExprListSafe(n.Lhs, init) n.Rhs[0] = walkExpr(n.Rhs[0], init) return n } // walkAssignFunc walks an OAS2FUNC node. func walkAssignFunc(init *ir.Nodes, n *ir.AssignListStmt) ir.Node { init.Append(ir.TakeInit(n)...)
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 08 17:09:06 UTC 2024 - 20.3K bytes - Viewed (0) -
src/cmd/compile/internal/ssagen/phi.go
// resolveFwdRefs links all FwdRef uses up to their nearest dominating definition. func (s *phiState) resolveFwdRefs() { // Do a depth-first walk of the dominator tree, keeping track // of the most-recently-seen value for each variable. // Map from variable ID to SSA value at the current point of the walk. values := make([]*ssa.Value, len(s.varnum)) for i := range values { values[i] = s.placeholder } // Stack of work to do.
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri Nov 18 17:59:44 UTC 2022 - 15.2K bytes - Viewed (0) -
src/cmd/compile/internal/walk/builtin.go
r := ir.NewBlockStmt(base.Pos, nil) r.List = calls return walkStmt(typecheck.Stmt(r)) } // walkRecoverFP walks an ORECOVERFP node. func walkRecoverFP(nn *ir.CallExpr, init *ir.Nodes) ir.Node { return mkcall("gorecover", nn.Type(), init, walkExpr(nn.Args[0], init)) } // walkUnsafeData walks an OUNSAFESLICEDATA or OUNSAFESTRINGDATA expression. func walkUnsafeData(n *ir.UnaryExpr, init *ir.Nodes) ir.Node {
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri Mar 08 22:35:22 UTC 2024 - 31.2K bytes - Viewed (0) -
tensorflow/compiler/mlir/tensorflow/analysis/resource_alias_analysis.cc
// for such regions to a new unique-id. This is required because walk() walks // the attached regions first before visiting the op, so there is no // opportunity during the walk to seed region arguments. Also note that walk // eventually also visits the Op on which the walk() is called, so make sure // we do not overwrite the function argument mapping here. func_op.walk([&](Operation* op) { if (op == func_op) return;
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Wed May 15 09:04:13 UTC 2024 - 28.2K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/nilcheck.go
op walkState } work := make([]bp, 0, 256) work = append(work, bp{block: f.Entry}) // map from value ID to known non-nil version of that value ID // (in the current dominator path being walked). This slice is updated by // walkStates to maintain the known non-nil values. // If there is extrinsic information about non-nil-ness, this map // points a value to itself. If a value is known non-nil because we
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Tue Oct 31 20:45:54 UTC 2023 - 11.3K bytes - Viewed (0) -
src/cmd/cover/cover.go
} return f } } f.addCounters(n.Lbrace, n.Lbrace+1, n.Rbrace+1, n.List, true) // +1 to step past closing brace. case *ast.IfStmt: if n.Init != nil { ast.Walk(f, n.Init) } ast.Walk(f, n.Cond) ast.Walk(f, n.Body) if n.Else == nil { return nil } // The elses are special, because if we have // if x { // } else if y { // }
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Tue May 14 19:41:17 UTC 2024 - 34.5K bytes - Viewed (0) -
pkg/volume/util/atomic_writer.go
// 1. The payload is validated; if the payload is invalid, the function returns // // 2. The current timestamped directory is detected by reading the data directory // symlink // // 3. The old version of the volume is walked to determine whether any // portion of the payload was deleted and is still present on disk. // // 4. The data in the current timestamped directory is compared to the projected
Registered: Sat Jun 15 01:39:40 UTC 2024 - Last Modified: Fri May 31 12:32:15 UTC 2024 - 16.6K bytes - Viewed (0)