Search Options

Results per page
Sort
Preferred Languages
Advance

Results 1 - 10 of 31 for Brad (0.04 sec)

  1. src/strings/replace_test.go

    	testCases = append(testCases,
    		testCase{capitalLetters, "brad", "BrAd"},
    		testCase{capitalLetters, Repeat("a", (32<<10)+123), Repeat("A", (32<<10)+123)},
    		testCase{capitalLetters, "", ""},
    
    		testCase{inc, "brad", "csbe"},
    		testCase{inc, "\x00\xff", "\x01\x00"},
    		testCase{inc, "", ""},
    
    		testCase{NewReplacer("a", "1", "a", "2"), "brad", "br1d"},
    	)
    
    	// repeat maps "a"->"a", "b"->"bb", "c"->"ccc", ...
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Fri Feb 24 22:53:05 UTC 2017
    - 14.1K bytes
    - Viewed (0)
  2. api/go1.4.txt

    # CL 134210043 archive/zip: add Writer.Flush, Brad Fitzpatrick <******@****.***>
    pkg archive/zip, method (*Writer) Flush() error
    
    # CL 97140043 compress/flate: add Reset() to allow reusing large buffers to compress multiple buffers, James Robinson <******@****.***>
    pkg compress/flate, type Resetter interface { Reset }
    pkg compress/flate, type Resetter interface, Reset(io.Reader, []uint8) error
    pkg compress/zlib, type Resetter interface { Reset }
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Fri Dec 12 03:01:01 UTC 2014
    - 34K bytes
    - Viewed (0)
  3. tensorflow/cc/gradients/nn_grad.cc

        auto multiply_result = Multiply(scope, subtraction_result, logits_softmax);
        grad = Add(scope, grad, multiply_result);
      }
      auto minus_log_softmax = Multiply(scope, LogSoftmax(scope, logits), -1.0f);
      grad_outputs->push_back(grad);
      grad_outputs->push_back(BroadcastMul(scope, grad_loss, minus_log_softmax));
      return scope.status();
    }
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Fri May 27 23:34:33 UTC 2022
    - 24.5K bytes
    - Viewed (0)
  4. tensorflow/compiler/mlir/tensorflow/transforms/decompose_resource_ops.td

         (CreateTFReadVariableOp $src_op, $grad, $ms_resource),
         (TF_AddV2Op:$ms_new
           (TF_MulOp
             (TF_MulOp $grad, $grad),
             (TF_SubOp $one, $rho)
           ),
           (TF_MulOp
              (CreateTFReadVariableOp $src_op, $grad, $ms_resource),
              $rho
           )
         ),
         (TF_AssignVariableOp $ms_resource, $ms_new, (CreateConstBoolAttrFalse)),
         // mg = grad * (one - rho) + mg * rho;
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Wed May 22 19:47:48 UTC 2024
    - 20.7K bytes
    - Viewed (0)
  5. tensorflow/c/experimental/gradients/math_grad.cc

        /* Given upstream grad U and a Sub op A-B, the gradients are:
         *
         *    dA =  U
         *    dB = -U
         *
         */
    
        // Grad for A
        DCHECK(grad_outputs[0]);
        grad_inputs[0] = grad_outputs[0];
        grad_inputs[0]->Ref();
    
        // Grad for B
        // negate the upstream grad
        std::string name = "Neg_Sub_Grad_B";
        TF_RETURN_IF_ERROR(
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Wed Feb 28 13:53:47 UTC 2024
    - 15.2K bytes
    - Viewed (0)
  6. tensorflow/compiler/mlir/tensorflow/transforms/tensor_array_ops_decomposition.cc

    LogicalResult HandleTensorArrayGradV3Op(
        TF::TensorArrayGradV3Op grad,
        llvm::SmallDenseMap<Value, TensorArrayStats>* stats) {
      auto local_var = grad.getHandle();
      OpBuilder builder(grad);
      Value grad_var;
      auto sit = stats->find(local_var);
      if (sit == stats->end()) return grad.emitOpError("unknown tensor array");
      auto emplace_res =
          sit->getSecond().grads.try_emplace(grad.getSource().str(), Value());
      if (!emplace_res.second) {
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Thu Mar 02 20:41:19 UTC 2023
    - 40.2K bytes
    - Viewed (0)
  7. tensorflow/cc/gradients/linalg_grad.cc

      tensorflow::Output y = op.input(1);
      if (DataTypeIsComplex(grad.type())) {
        x = Conj(scope, x);
        y = Conj(scope, y);
      }
    
      const auto x_shape = Shape(scope, x);
      const auto y_shape = Shape(scope, y);
      Output grad_x =
          EinsumGradWrt(scope, grad, y, x_shape, x_subs, y_subs, output_subs);
      Output grad_y =
          EinsumGradWrt(scope, grad, x, y_shape, y_subs, x_subs, output_subs);
    
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Mon Mar 07 23:11:54 UTC 2022
    - 20.4K bytes
    - Viewed (0)
  8. tensorflow/c/eager/tape.h

            return s;
          }
        } else {
          if (!persistent_) {
            trace.backward_function_deleter(trace.backward_function);
          }
          for (Gradient* grad : out_gradients) {
            if (grad != nullptr) {
              vspace.DeleteGradient(grad);
            }
          }
        }
        for (int i = 0, end = in_gradients.size(); i < end; ++i) {
          const int64_t id = trace.input_tensor_id[i];
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Tue Apr 02 12:40:29 UTC 2024
    - 47.2K bytes
    - Viewed (0)
  9. tensorflow/c/c_api_function.cc

      if (TF_GetCode(status) != TF_OK) return;
      if (!grad) return;
    
      status->status = g->graph.AddFunctionDef(grad->record->fdef(),
                                               grad->record->stack_traces());
      if (TF_GetCode(status) != TF_OK) return;
    
      tensorflow::GradientDef gdef;
      gdef.set_function_name(func->record->fdef().signature().name());
      gdef.set_gradient_func(grad->record->fdef().signature().name());
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Mon Apr 15 03:35:10 UTC 2024
    - 13.6K bytes
    - Viewed (0)
  10. tensorflow/compiler/mlir/tf2xla/tests/legalize-tf-with-tf2xla-hlo-importer.mlir

      func.func @max_pool_grad_valid(%orig_input: tensor<10x24x24x64xf32>, %orig_output: tensor<10x12x12x64xf32>, %grad: tensor<10x12x12x64xf32>) -> tensor<10x24x24x64xf32> {
        // CHECK: %[[ZERO:.*]] = mhlo.constant dense<0.000000e+00> : tensor<f32>
        // CHECK: %[[RESULT:.*]] = "mhlo.select_and_scatter"(%[[INPUT]], %[[GRAD]], %[[ZERO]]) <{
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Sat Apr 06 15:32:52 UTC 2024
    - 38.6K bytes
    - Viewed (0)
Back to top