Search Options

Results per page
Sort
Preferred Languages
Advance

Results 1 - 10 of 309 for backedge (0.12 sec)

  1. tensorflow/compiler/jit/shape_inference_helpers.cc

            if (e->src()->IsNextIteration()) {
              back_edges_.push_back(
                  BackEdge{e, e->src(), e->src_output(), e->dst(), e->dst_input()});
            }
          }
        }
      }
      for (const BackEdge& be : back_edges_) {
        graph_->RemoveEdge(be.edge);
      }
      return absl::OkStatus();
    }
    
    const std::vector<BackEdgeHelper::BackEdge>& BackEdgeHelper::RemovedEdges()
        const {
      return back_edges_;
    }
    
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Fri Feb 09 11:36:41 UTC 2024
    - 1.9K bytes
    - Viewed (0)
  2. tensorflow/compiler/jit/shape_inference_helpers.h

      Status Remove(Graph* graph);
    
      // Gets the list of removed edges.
      const std::vector<BackEdge>& RemovedEdges() const;
    
      // Replaces the back edges removed by a prior call to Remove.
      Status Replace();
    
     private:
      Graph* graph_ = nullptr;  // not owned
      std::vector<BackEdge> back_edges_;
      // Set once Replace has been called.
      bool replaced_ = false;
    };
    
    }  // namespace tensorflow
    
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Thu Apr 12 18:06:51 UTC 2018
    - 2.2K bytes
    - Viewed (0)
  3. tensorflow/compiler/jit/deadness_analysis.cc

      std::vector<string> backedges;
      for (const Edge* backedge : merge->in_edges()) {
        if (backedge->src()->IsNextIteration()) {
          backedges.push_back(absl::StrCat("  ", SummarizeNode(*backedge->src())));
        }
      }
      return errors::InvalidArgument(
          "Multiple NextIteration inputs to merge node ",
          FormatNodeForError(*merge), ": \n", absl::StrJoin(backedges, "\n"),
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Tue Mar 12 06:33:33 UTC 2024
    - 60.4K bytes
    - Viewed (0)
  4. src/cmd/compile/internal/ssa/loopreschedchecks.go

    	// 5. Rewrite backedges to include reschedule check,
    	//    and modify destination phi function appropriately with new
    	//    definitions for mem.
    
    	if f.NoSplit { // nosplit functions don't reschedule.
    		return
    	}
    
    	backedges := backedges(f)
    	if len(backedges) == 0 { // no backedges means no rescheduling checks.
    		return
    	}
    
    	lastMems := findLastMems(f)
    
    	idom := f.Idom()
    	po := f.postorder()
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Tue Aug 22 21:17:10 UTC 2023
    - 16K bytes
    - Viewed (0)
  5. tensorflow/compiler/mlir/tensorflow/tests/graphdef2mlir/graph-while-loop.pbtxt

    # RUN: tf-mlir-translate -graphdef-to-mlir -tf-enable-shape-inference-on-import=false -mlir-print-debuginfo %s -o - | FileCheck %s
    
    # Verify that importing a Graph with a backedge leads to two NextIteration nodes
    # to break the cycle.
    
    # CHECK-LABEL: func @main()
    # CHECK:    %[[NEXTITERATION:[a-z0-9]+]], %[[NEXTITERATION_token:[a-z0-9]+]], {{.*}} = tf_executor.NextIteration.Source
    # CHECK:    tf_executor.Merge {{.*}} %[[NEXTITERATION]]
    
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Thu Aug 27 18:14:13 UTC 2020
    - 3K bytes
    - Viewed (0)
  6. tensorflow/compiler/jit/shape_inference_test.cc

        auto next_iteration =
            ops::NextIteration(scope.WithOpName("while/NextIteration"), add);
    
        auto sink = ops::Identity(scope.WithOpName("sink"), exit);
    
        // Remove the dummy node and add the loop backedge.
        scope.graph()->RemoveNode(dummy.node());
        scope.graph()->AddEdge(next_iteration.node(), 0, merge.output.node(), 1);
    
        TF_EXPECT_OK(scope.ToGraph(&graph));
      }
    
      GraphShapeInfo shape_info;
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Fri May 31 00:41:19 UTC 2024
    - 10.3K bytes
    - Viewed (0)
  7. tensorflow/compiler/jit/shape_inference.cc

      // loops, we temporarily remove loop backedges and add them back again after
      // the shape inference is complete.
      BackEdgeHelper back_edge;
      TF_RETURN_IF_ERROR(back_edge.Remove(graph));
      TF_RETURN_IF_ERROR(PropagateShapes(graph, arg_shapes,
                                         back_edge.RemovedEdges(), &shape_refiner));
      TF_RETURN_IF_ERROR(back_edge.Replace());
    
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Fri May 31 00:41:19 UTC 2024
    - 13K bytes
    - Viewed (0)
  8. tensorflow/cc/ops/while_loop.cc

        result[i] = outputs[i].node();
      }
      return result;
    }
    
    // Manually generates the name of the `loop_var_idx`-th NextIteration node of a
    // loop being constructed with `scope`. This is used to define the backedge
    // before the NextIteration node is created.
    string NextIterationName(const Scope& scope, int loop_var_idx) {
      string result;
      const string& prefix = scope.impl()->name();
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Mon Feb 26 01:01:21 UTC 2024
    - 9.5K bytes
    - Viewed (0)
  9. src/cmd/compile/internal/ssa/likelyadjust.go

    			} else if len(b.Succs) == 2 {
    				// If successor is an unvisited backedge, it's in loop and we don't care.
    				// Its default unlikely is also zero which is consistent with favoring loop edges.
    				// Notice that this can act like a "reset" on unlikeliness at loops; the
    				// default "everything returns" unlikeliness is erased by min with the
    				// backedge likeliness; however a loop with calls on every path will be
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Mon Oct 31 21:41:20 UTC 2022
    - 15.4K bytes
    - Viewed (0)
  10. tensorflow/compiler/mlir/tensorflow/translate/import_model.cc

          std::unordered_set<const Node*>* nodes);
    
      // The input graph with backedges removed. The removed backedges are stored
      // in the back_edge_helper.
      BackEdgeHelper back_edge_helper_;
      // A map between node and output index, for each backedge.
      absl::flat_hash_map<const Node*, int> back_edge_node_output_;
      absl::flat_hash_map<const Node*, BackEdge> back_edge_dst_inputs_;
      // A map between sink and source operation of NextIteration
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Wed May 01 11:17:36 UTC 2024
    - 183.2K bytes
    - Viewed (0)
Back to top