- Sort Score
- Result 10 results
- Languages All
Results 11 - 20 of 20 for CalcSize (0.18 sec)
-
src/cmd/compile/internal/walk/complit.go
te := types.NewArray(n.Type().Elem(), int64(len(entries))) // TODO(#47904): mark tk and te NoAlg here once the // compiler/linker can handle NoAlg types correctly. types.CalcSize(tk) types.CalcSize(te) // make and initialize static arrays vstatk := readonlystaticname(tk) vstate := readonlystaticname(te) datak := ir.NewCompLitExpr(base.Pos, ir.OARRAYLIT, nil, nil)
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri Sep 08 19:03:54 UTC 2023 - 19.5K bytes - Viewed (0) -
src/regexp/syntax/parse.go
case OpCapture, OpStar: // star can be 1+ or 2+; assume 2 pessimistically size = 2 + p.calcSize(re.Sub[0], false) case OpPlus, OpQuest: size = 1 + p.calcSize(re.Sub[0], false) case OpConcat: for _, sub := range re.Sub { size += p.calcSize(sub, false) } case OpAlternate: for _, sub := range re.Sub { size += p.calcSize(sub, false) } if len(re.Sub) > 1 { size += int64(len(re.Sub)) - 1 }
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Tue Apr 02 13:59:01 UTC 2024 - 52.1K bytes - Viewed (0) -
src/cmd/compile/internal/types/type.go
} func (t *Type) Size() int64 { if t.kind == TSSA { if t == TypeInt128 { return 16 } return 0 } CalcSize(t) return t.width } func (t *Type) Alignment() int64 { CalcSize(t) return int64(t.align) } func (t *Type) SimpleString() string { return t.kind.String() } // Cmp is a comparison between values a and b.
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu Apr 04 14:29:45 UTC 2024 - 49.5K bytes - Viewed (0) -
src/cmd/compile/internal/ssagen/pgen.go
continue } if mls != nil && mls.Subsumed(n) { continue } if !n.Used() { fn.DebugInfo.(*ssa.FuncDebug).OptDcl = fn.Dcl[i:] fn.Dcl = fn.Dcl[:i] break } types.CalcSize(n.Type()) w := n.Type().Size() if w >= types.MaxWidth || w < 0 { base.Fatalf("bad width") } if w == 0 && lastHasPtr { // Pad between a pointer-containing object and a zero-sized object.
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed May 15 15:44:14 UTC 2024 - 13.1K bytes - Viewed (0) -
src/cmd/compile/internal/typecheck/typecheck.go
// exactly. If dostrcmp is 2, it matches names with case folding. func Lookdot(n *ir.SelectorExpr, t *types.Type, dostrcmp int) *types.Field { s := n.Sel types.CalcSize(t) var f1 *types.Field if t.IsStruct() { f1 = Lookdot1(n, s, t, t.Fields(), dostrcmp) } else if t.IsInterface() { f1 = Lookdot1(n, s, t, t.AllMethods(), dostrcmp) } var f2 *types.Field
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Wed Mar 20 19:08:34 UTC 2024 - 30.5K bytes - Viewed (0) -
src/cmd/compile/internal/ssa/value.go
nameOff := v.Aux.(*AuxNameOffset) return nameOff.Name, nameOff.Offset } // CanSSA reports whether values of type t can be represented as a Value. func CanSSA(t *types.Type) bool { types.CalcSize(t) if t.Size() > int64(4*types.PtrSize) { // 4*Widthptr is an arbitrary constant. We want it // to be at least 3*Widthptr so slices can be registerized. // Too big and we'll introduce too much register pressure.
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri May 03 16:40:22 UTC 2024 - 16.7K bytes - Viewed (0) -
src/cmd/compile/internal/walk/convert.go
t := types.NewArray(types.Types[types.TUINT8], int64(len(sc))) var a ir.Node if n.Esc() == ir.EscNone && len(sc) <= int(ir.MaxImplicitStackVarSize) { a = stackBufAddr(t.NumElem(), t.Elem()) } else { types.CalcSize(t) a = ir.NewUnaryExpr(base.Pos, ir.ONEW, nil) a.SetType(types.NewPtr(t)) a.SetTypecheck(1) a.MarkNonNil() } p := typecheck.TempAt(base.Pos, ir.CurFunc, t.PtrTo()) // *[n]byte
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Oct 09 17:28:22 UTC 2023 - 18.2K bytes - Viewed (0) -
src/cmd/compile/internal/walk/builtin.go
} if n.Esc() == ir.EscNone { if t.Size() > ir.MaxImplicitStackVarSize { base.Fatalf("large ONEW with EscNone: %v", n) } return stackTempAddr(init, t) } types.CalcSize(t) n.MarkNonNil() return n } func walkMinMax(n *ir.CallExpr, init *ir.Nodes) ir.Node { init.Append(ir.TakeInit(n)...) walkExprList(n.Args, init) return n }
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri Mar 08 22:35:22 UTC 2024 - 31.2K bytes - Viewed (0) -
src/cmd/compile/internal/ssagen/ssa.go
} return v } // map <--> *hmap if to.Kind() == types.TMAP && from == types.NewPtr(reflectdata.MapType()) { return v } types.CalcSize(from) types.CalcSize(to) if from.Size() != to.Size() { s.Fatalf("CONVNOP width mismatch %v (%d) -> %v (%d)\n", from, from.Size(), to, to.Size()) return nil } if etypesign(from.Kind()) != etypesign(to.Kind()) {
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Mon Jun 10 19:44:43 UTC 2024 - 284.9K bytes - Viewed (0) -
src/cmd/compile/internal/walk/order.go
func (o *orderState) addrTemp(n ir.Node) ir.Node { if n.Op() == ir.OLITERAL || n.Op() == ir.ONIL { // TODO: expand this to all static composite literal nodes? n = typecheck.DefaultLit(n, nil) types.CalcSize(n.Type()) vstat := readonlystaticname(n.Type()) var s staticinit.Schedule s.StaticAssign(vstat, 0, n, n.Type()) if s.Out != nil { base.Fatalf("staticassign of const generated code: %+v", n) }
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Fri Mar 08 02:00:33 UTC 2024 - 42.7K bytes - Viewed (0)