- Sort Score
- Result 10 results
- Languages All
Results 31 - 40 of 108 for elim (0.72 sec)
-
tensorflow/compiler/mlir/quantization/tensorflow/python/quantize_model.py
output_directory, quantization_options, representative_dataset, ) elif method.preset_method == _PresetMethod.METHOD_DYNAMIC_RANGE_INT8: return _dynamic_range_quantize( saved_model_path, output_directory, quantization_options, ) elif ( method.preset_method == _PresetMethod.METHOD_STATIC_RANGE_WEIGHT_ONLY_INT8 ):
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Fri May 17 03:36:50 UTC 2024 - 34.2K bytes - Viewed (0) -
cluster/log-dump/log-dump.sh
local gke_zone="${ZONE:-}" source "${KUBE_ROOT}/cluster/gce/util.sh" ZONE="${gke_zone}" elif [[ -z "${LOG_DUMP_SSH_KEY:-}" ]]; then echo 'LOG_DUMP_SSH_KEY not set, but required when using log_dump_custom_get_instances' exit 1 elif [[ -z "${LOG_DUMP_SSH_USER:-}" ]]; then echo 'LOG_DUMP_SSH_USER not set, but required when using log_dump_custom_get_instances' exit 1 fi
Registered: Sat Jun 15 01:39:40 UTC 2024 - Last Modified: Tue May 21 21:15:57 UTC 2024 - 28.9K bytes - Viewed (0) -
src/runtime/cgocall.go
at := (*arraytype)(unsafe.Pointer(t)) if !indir { if at.Len != 1 { throw("can't happen") } cgoCheckArg(at.Elem, p, at.Elem.Kind_&abi.KindDirectIface == 0, top, msg) return } for i := uintptr(0); i < at.Len; i++ { cgoCheckArg(at.Elem, p, true, top, msg) p = add(p, at.Elem.Size_) } case abi.Chan, abi.Map: // These types contain internal pointers that will
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu May 23 01:16:47 UTC 2024 - 24.2K bytes - Viewed (0) -
src/encoding/json/encode.go
} se.arrayEnc(e, v, opts) e.ptrLevel-- } func newSliceEncoder(t reflect.Type) encoderFunc { // Byte slices get special treatment; arrays don't. if t.Elem().Kind() == reflect.Uint8 { p := reflect.PointerTo(t.Elem()) if !p.Implements(marshalerType) && !p.Implements(textMarshalerType) { return encodeByteSlice } } enc := sliceEncoder{newArrayEncoder(t)} return enc.encode }
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu May 23 00:18:55 UTC 2024 - 36.2K bytes - Viewed (0) -
tensorflow/compiler/mlir/quantization/stablehlo/passes/bridge/convert_tf_quant_ops_to_mhlo.cc
storage_type_max); } else { SmallVector<double> scales_vec; SmallVector<int64_t> zero_points_vec; for (auto elem : scales.getValues<float>()) scales_vec.push_back(elem); for (auto elem : zero_points.getValues<int32_t>()) zero_points_vec.push_back(elem); elem_ty = quant::UniformQuantizedPerAxisType::get( flags, storage_type, expressed_type, scales_vec, zero_points_vec,
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Fri May 17 17:58:54 UTC 2024 - 30.9K bytes - Viewed (0) -
src/cmd/compile/internal/types2/stmt.go
// declare a variable inside a function body if the variable is never used." check.usage(sig.scope) } func (check *Checker) usage(scope *Scope) { var unused []*Var for name, elem := range scope.elems { elem = resolve(name, elem) if v, _ := elem.(*Var); v != nil && !v.used { unused = append(unused, v) } } sort.Slice(unused, func(i, j int) bool { return cmpPos(unused[i].pos, unused[j].pos) < 0 })
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu May 30 19:19:55 UTC 2024 - 30.7K bytes - Viewed (0) -
staging/src/k8s.io/apiextensions-apiserver/pkg/apiserver/schema/validation_test.go
} } // check that anything other than type and properties in metadata is forbidden tt = reflect.TypeOf(Structural{}) for i := 0; i < tt.NumField(); i++ { s := Structural{} x := reflect.ValueOf(&s).Elem() fuzzer.Fuzz(x.Field(i).Addr().Interface()) s.Type = "object" s.Properties = map[string]Structural{ "name": {}, "generateName": {}, }
Registered: Sat Jun 15 01:39:40 UTC 2024 - Last Modified: Fri May 31 18:20:00 UTC 2024 - 11.6K bytes - Viewed (0) -
src/go/types/stmt.go
// declare a variable inside a function body if the variable is never used." check.usage(sig.scope) } func (check *Checker) usage(scope *Scope) { var unused []*Var for name, elem := range scope.elems { elem = resolve(name, elem) if v, _ := elem.(*Var); v != nil && !v.used { unused = append(unused, v) } } sort.Slice(unused, func(i, j int) bool { return cmpPos(unused[i].pos, unused[j].pos) < 0 })
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu May 30 19:19:55 UTC 2024 - 30.6K bytes - Viewed (0) -
src/go/types/unify.go
// assume they are the same to avoid spurious follow-on errors. return (x.len < 0 || y.len < 0 || x.len == y.len) && u.nify(x.elem, y.elem, emode, p) } case *Slice: // Two slice types unify if their element types unify. if y, ok := y.(*Slice); ok { return u.nify(x.elem, y.elem, emode, p) } case *Struct: // Two struct types unify if they have the same sequence of fields,
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Tue Jun 11 16:24:39 UTC 2024 - 27.9K bytes - Viewed (0) -
src/cmd/compile/internal/types2/infer.go
}() switch t := typ.(type) { case *Basic: // nothing to do case *Alias: return w.isParameterized(Unalias(t)) case *Array: return w.isParameterized(t.elem) case *Slice: return w.isParameterized(t.elem) case *Struct: return w.varList(t.fields) case *Pointer: return w.isParameterized(t.base) case *Tuple:
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri May 24 13:54:20 UTC 2024 - 26.4K bytes - Viewed (0)