Search Options

Results per page
Sort
Preferred Languages
Advance

Results 1 - 10 of 10 for BackEdge (0.21 sec)

  1. tensorflow/compiler/jit/shape_inference_helpers.cc

            if (e->src()->IsNextIteration()) {
              back_edges_.push_back(
                  BackEdge{e, e->src(), e->src_output(), e->dst(), e->dst_input()});
            }
          }
        }
      }
      for (const BackEdge& be : back_edges_) {
        graph_->RemoveEdge(be.edge);
      }
      return absl::OkStatus();
    }
    
    const std::vector<BackEdgeHelper::BackEdge>& BackEdgeHelper::RemovedEdges()
        const {
      return back_edges_;
    }
    
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Fri Feb 09 11:36:41 UTC 2024
    - 1.9K bytes
    - Viewed (0)
  2. tensorflow/compiler/jit/deadness_analysis.cc

    }
    
    namespace {
    Status CreateMultipleNextIterationInputsError(Node* merge) {
      std::vector<string> backedges;
      for (const Edge* backedge : merge->in_edges()) {
        if (backedge->src()->IsNextIteration()) {
          backedges.push_back(absl::StrCat("  ", SummarizeNode(*backedge->src())));
        }
      }
      return errors::InvalidArgument(
          "Multiple NextIteration inputs to merge node ",
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Tue Mar 12 06:33:33 UTC 2024
    - 60.4K bytes
    - Viewed (0)
  3. src/cmd/compile/internal/ssa/loopreschedchecks.go

    // license that can be found in the LICENSE file.
    
    package ssa
    
    import (
    	"cmd/compile/internal/types"
    	"fmt"
    )
    
    // an edgeMem records a backedge, together with the memory
    // phi functions at the target of the backedge that must
    // be updated when a rescheduling check replaces the backedge.
    type edgeMem struct {
    	e Edge
    	m *Value // phi for memory at dest of e
    }
    
    // a rewriteTarget is a value-argindex pair indicating
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Tue Aug 22 21:17:10 UTC 2023
    - 16K bytes
    - Viewed (0)
  4. tensorflow/compiler/jit/shape_inference_test.cc

        auto next_iteration =
            ops::NextIteration(scope.WithOpName("while/NextIteration"), add);
    
        auto sink = ops::Identity(scope.WithOpName("sink"), exit);
    
        // Remove the dummy node and add the loop backedge.
        scope.graph()->RemoveNode(dummy.node());
        scope.graph()->AddEdge(next_iteration.node(), 0, merge.output.node(), 1);
    
        TF_EXPECT_OK(scope.ToGraph(&graph));
      }
    
      GraphShapeInfo shape_info;
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Fri May 31 00:41:19 UTC 2024
    - 10.3K bytes
    - Viewed (0)
  5. src/cmd/compile/internal/ssa/likelyadjust.go

    			} else if len(b.Succs) == 2 {
    				// If successor is an unvisited backedge, it's in loop and we don't care.
    				// Its default unlikely is also zero which is consistent with favoring loop edges.
    				// Notice that this can act like a "reset" on unlikeliness at loops; the
    				// default "everything returns" unlikeliness is erased by min with the
    				// backedge likeliness; however a loop with calls on every path will be
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Mon Oct 31 21:41:20 UTC 2022
    - 15.4K bytes
    - Viewed (0)
  6. tensorflow/cc/ops/while_loop.cc

        result[i] = outputs[i].node();
      }
      return result;
    }
    
    // Manually generates the name of the `loop_var_idx`-th NextIteration node of a
    // loop being constructed with `scope`. This is used to define the backedge
    // before the NextIteration node is created.
    string NextIterationName(const Scope& scope, int loop_var_idx) {
      string result;
      const string& prefix = scope.impl()->name();
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Mon Feb 26 01:01:21 UTC 2024
    - 9.5K bytes
    - Viewed (0)
  7. tensorflow/compiler/jit/shape_inference.cc

    }
    
    Status PropagateShapes(Graph* graph,
                           const std::map<int, InferredShape>& arg_shapes,
                           const std::vector<BackEdgeHelper::BackEdge>& back_edges,
                           ShapeRefiner* shape_refiner) {
      std::map<const Node*, const Node*> merge_to_next_iteration;
      for (const auto& e : back_edges) {
        if (e.src->IsNextIteration() && e.dst->IsMerge()) {
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Fri May 31 00:41:19 UTC 2024
    - 13K bytes
    - Viewed (0)
  8. tensorflow/compiler/mlir/tensorflow/translate/import_model.cc

      // input graph to infer shapes and construct a Function. For each
      // "NextIteration" node, there are two operations, "NextIteration.source"
      // and "NextIteration.sink" are added to the MLIR module.
      using BackEdge = BackEdgeHelper::BackEdge;
    
      // Removes backedges from the input graph. The removed edges are added back to
      // to OpBuilder after the remaining graph is converted to the Function.
      Status RemoveBackedges();
    
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Wed May 01 11:17:36 UTC 2024
    - 183.2K bytes
    - Viewed (0)
  9. tensorflow/compiler/jit/deadness_analysis_test.cc

      }
    }
    
    TEST(DeadnessAnalysisTest, LoopInvariantPredicateOnBackedge) {
      // Create a merge that "looks like" a loop but isn't really.  It has a value
      // that does not depend on the merge on its backedge.
      Scope root = Scope::NewRootScope().ExitOnError();
      InductionVarInfo iv = CreateInductionVariable(root, "iv0", "frame", 0);
      DependentInductionVar dependent_iv =
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Thu Feb 22 06:59:07 UTC 2024
    - 51.6K bytes
    - Viewed (0)
  10. src/cmd/compile/internal/ssa/debug.go

    		reset(p0)
    		return p0, blockChanged
    	}
    
    	// More than one predecessor
    
    	if updating {
    		// After the first approximation, i.e., when updating, results
    		// can only get smaller, because initially backedge
    		// predecessors do not participate in the intersection.  This
    		// means that for the update, given the prior approximation of
    		// startState, there is no need to re-intersect with unchanged
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Mon Jun 10 19:44:43 UTC 2024
    - 58.4K bytes
    - Viewed (0)
Back to top