Search Options

Results per page
Sort
Preferred Languages
Advance

Results 1 - 7 of 7 for SymbolicGradient (2.05 sec)

  1. tensorflow/compiler/mlir/tensorflow/tests/graphdef2mlir/graph-gradient-def.pbtxt

        }
      }
      attr {
        key: "index_type"
        value {
          type: DT_INT32
        }
      }
      experimental_debug_info {
      }
    }
    node {
      name: "gradients/foo_grad/SymbolicGradient"
      op: "SymbolicGradient"
      input: "Const"
      input: "gradients/Fill"
      attr {
        key: "Tin"
        value {
          list {
            type: DT_FLOAT
            type: DT_FLOAT
          }
        }
      }
      attr {
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Wed Nov 11 19:14:04 UTC 2020
    - 4.3K bytes
    - Viewed (0)
  2. tensorflow/cc/gradients/functional_grad.cc

        input_dtypes.push_back(op.input_type(i));
      }
    
      func_inputs.insert(std::end(func_inputs), std::begin(grad_inputs),
                         std::end(grad_inputs));
    
      auto grad = SymbolicGradient(scope, func_inputs, input_dtypes, f);
      for (int i = 0; i < num_inputs; i++) {
        grad_outputs->push_back(grad[i]);
      }
    
      return scope.status();
    }
    
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Fri Oct 15 20:09:06 UTC 2021
    - 2.1K bytes
    - Viewed (0)
  3. tensorflow/compiler/jit/resource_operation_safety_analysis_test.cc

      TF_ASSERT_OK(root.graph()->AddFunctionLibrary(flib_def));
    
      Node* read = MakeRead(root, "R");
      NameAttrList fn;
      fn.set_name("Const_func");
      Node* symbolic_gradient =
          ops::SymbolicGradient(root, /*input=*/{ops::Const(root, 1.0f)},
                                /*Tout=*/{DT_FLOAT}, fn)
              .output[0]
              .node();
    
      root.graph()->AddControlEdge(symbolic_gradient, read);
    
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Tue Apr 28 16:53:59 UTC 2020
    - 18.7K bytes
    - Viewed (0)
  4. tensorflow/compiler/jit/compilability_check_util.cc

        const Node& node, string* uncompilable_reason) const {
      // There is a SymbolicGradient kernel on the XLA_JIT device, but the gradient
      // is really a kind of function call and will be handled by
      // IsCompilableCall().
      if (node.type_string() == "SymbolicGradient") {
        *uncompilable_reason =
            "SymbolicGradient should be handled by IsCompilableCall().";
        return false;
      }
    
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Tue Mar 12 06:33:33 UTC 2024
    - 30.3K bytes
    - Viewed (0)
  5. tensorflow/compiler/jit/mark_for_compilation_pass_test.cc

        Node* a =
            ops::SourceOp("UncompilableNullary", builder.opts().WithName("A"));
    
        // Builds a Symbolic gradient for Supported
        NodeBuilder b_builder("B", "SymbolicGradient",
                              builder.opts().op_registry());
        NameAttrList b_name_attr;
        b_name_attr.set_name("Supported");
        b_builder.Attr("f", b_name_attr);
        b_builder.Attr("Tin", {DT_FLOAT, DT_FLOAT});
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Wed Feb 14 10:11:10 UTC 2024
    - 79.6K bytes
    - Viewed (0)
  6. tensorflow/compiler/jit/mark_for_compilation_pass.cc

    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Wed Feb 21 12:19:41 UTC 2024
    - 85.3K bytes
    - Viewed (0)
  7. tensorflow/compiler/mlir/tensorflow/ir/tf_generated_ops.td

    Undefined if `compute_uv` is false.}]>:$v
      );
    
      TF_DerivedOperandTypeAttr T = TF_DerivedOperandTypeAttr<0>;
    }
    
    def TF_SymbolicGradientOp : TF_Op<"SymbolicGradient", [Pure]> {
      let summary = [{
    Computes the gradient function for function f via backpropagation.
      }];
    
      let arguments = (ins
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Tue Jun 11 23:24:08 UTC 2024
    - 793K bytes
    - Viewed (0)
Back to top