Search Options

Results per page
Sort
Preferred Languages
Advance

Results 1 - 10 of 79 for Brad (0.66 sec)

  1. src/strings/replace_test.go

    	testCases = append(testCases,
    		testCase{capitalLetters, "brad", "BrAd"},
    		testCase{capitalLetters, Repeat("a", (32<<10)+123), Repeat("A", (32<<10)+123)},
    		testCase{capitalLetters, "", ""},
    
    		testCase{inc, "brad", "csbe"},
    		testCase{inc, "\x00\xff", "\x01\x00"},
    		testCase{inc, "", ""},
    
    		testCase{NewReplacer("a", "1", "a", "2"), "brad", "br1d"},
    	)
    
    	// repeat maps "a"->"a", "b"->"bb", "c"->"ccc", ...
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Fri Feb 24 22:53:05 UTC 2017
    - 14.1K bytes
    - Viewed (0)
  2. api/go1.4.txt

    # CL 134210043 archive/zip: add Writer.Flush, Brad Fitzpatrick <******@****.***>
    pkg archive/zip, method (*Writer) Flush() error
    
    # CL 97140043 compress/flate: add Reset() to allow reusing large buffers to compress multiple buffers, James Robinson <******@****.***>
    pkg compress/flate, type Resetter interface { Reset }
    pkg compress/flate, type Resetter interface, Reset(io.Reader, []uint8) error
    pkg compress/zlib, type Resetter interface { Reset }
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Fri Dec 12 03:01:01 UTC 2014
    - 34K bytes
    - Viewed (0)
  3. src/database/sql/sql_test.go

    		wantErr string
    	}
    	execTests := []execTest{
    		// Okay:
    		{[]any{"Brad", 31}, ""},
    		{[]any{"Brad", int64(31)}, ""},
    		{[]any{"Bob", "32"}, ""},
    		{[]any{7, 9}, ""},
    
    		// Invalid conversions:
    		{[]any{"Brad", int64(0xFFFFFFFF)}, "sql: converting argument $2 type: sql/driver: value 4294967295 overflows int32"},
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Thu May 23 18:42:28 UTC 2024
    - 111.6K bytes
    - Viewed (0)
  4. tensorflow/compiler/mlir/tfr/examples/mnist/ops_defs.py

    
    @tf.RegisterGradient('NewConv2D')
    def _conv_add_relu_grad(op: ops.Operation, grad):
      act = op.get_attr('act')
      y = op.outputs[0]
      if act == 'RELU':
        grad = gen_nn_ops.relu_grad(grad, y)
      elif act == 'RELU6':
        grad = gen_nn_ops.relu6_grad(grad, y)
      elif act == 'TANH':
        y = math_ops.conj(y)
        grad = gen_math_ops.tanh_grad(y, grad)
    
      broadcast_shape = tf.shape(y)
      input_value_shape = tf.shape(op.inputs[2])
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Thu Aug 31 20:23:51 UTC 2023
    - 6.8K bytes
    - Viewed (0)
  5. tensorflow/cc/gradients/math_grad.cc

                   std::vector<Output>* grad_outputs) {
      auto grad = grad_inputs[0];
      auto two_over_root_pi =
          Cast(scope, Const(scope, 2 / std::sqrt(M_PI)), grad.type());
      Scope grad_scope = scope.WithControlDependencies(grad);
      auto x = ConjugateHelper(grad_scope, op.input(0));
      // grad * 2/sqrt(pi) * exp(-x**2)
      auto dx = Mul(grad_scope, Mul(grad_scope, grad, two_over_root_pi),
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Fri Aug 25 18:20:20 UTC 2023
    - 50.7K bytes
    - Viewed (0)
  6. tensorflow/cc/gradients/nn_grad.cc

        auto multiply_result = Multiply(scope, subtraction_result, logits_softmax);
        grad = Add(scope, grad, multiply_result);
      }
      auto minus_log_softmax = Multiply(scope, LogSoftmax(scope, logits), -1.0f);
      grad_outputs->push_back(grad);
      grad_outputs->push_back(BroadcastMul(scope, grad_loss, minus_log_softmax));
      return scope.status();
    }
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Fri May 27 23:34:33 UTC 2022
    - 24.5K bytes
    - Viewed (0)
  7. tensorflow/compiler/mlir/tensorflow/tests/decompose_resource_ops.mlir

        // CHECK: [[GRAD_SQUARE:%.*]] = "tf.Mul"([[GRAD]], [[GRAD]]) : (tensor<f32>, tensor<f32>) -> tensor<f32>
        // CHECK: [[NEW_ACC:%.*]] = "tf.AddV2"([[OLD_ACC]], [[GRAD_SQUARE]]) : (tensor<*xf32>, tensor<f32>) -> tensor<*xf32>
        // CHECK: [[LR_MULTIPLY:%.*]] = "tf.Mul"([[LR]], [[GRAD]]) : (tensor<f32>, tensor<f32>) -> tensor<f32>
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Wed May 22 19:47:48 UTC 2024
    - 51.3K bytes
    - Viewed (0)
  8. tensorflow/compiler/mlir/tensorflow/transforms/decompose_resource_ops.td

         (CreateTFReadVariableOp $src_op, $grad, $ms_resource),
         (TF_AddV2Op:$ms_new
           (TF_MulOp
             (TF_MulOp $grad, $grad),
             (TF_SubOp $one, $rho)
           ),
           (TF_MulOp
              (CreateTFReadVariableOp $src_op, $grad, $ms_resource),
              $rho
           )
         ),
         (TF_AssignVariableOp $ms_resource, $ms_new, (CreateConstBoolAttrFalse)),
         // mg = grad * (one - rho) + mg * rho;
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Wed May 22 19:47:48 UTC 2024
    - 20.7K bytes
    - Viewed (0)
  9. tensorflow/c/experimental/gradients/math_grad.cc

        /* Given upstream grad U and a Sub op A-B, the gradients are:
         *
         *    dA =  U
         *    dB = -U
         *
         */
    
        // Grad for A
        DCHECK(grad_outputs[0]);
        grad_inputs[0] = grad_outputs[0];
        grad_inputs[0]->Ref();
    
        // Grad for B
        // negate the upstream grad
        std::string name = "Neg_Sub_Grad_B";
        TF_RETURN_IF_ERROR(
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Wed Feb 28 13:53:47 UTC 2024
    - 15.2K bytes
    - Viewed (0)
  10. tensorflow/c/experimental/gradients/nn_grad.cc

                     absl::Span<AbstractTensorHandle*> grad_inputs) override {
        // Grad for Softmax Input
        TF_RETURN_IF_ERROR(BroadcastMul(
            ctx, grad_outputs[0], forward_outputs_[1],
            grad_inputs.subspan(0, 1)));  // upstream_grad * local softmax grad
    
        // Grad for labels is null
        grad_inputs[1] = nullptr;
        return absl::OkStatus();
      }
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Tue Apr 09 06:38:45 UTC 2024
    - 5.7K bytes
    - Viewed (0)
Back to top