Search Options

Results per page
Sort
Preferred Languages
Advance

Results 1 - 10 of 73 for sizeclass (0.15 sec)

  1. src/runtime/mcentral.go

    	}
    }
    
    // grow allocates a new empty span from the heap and initializes it for c's size class.
    func (c *mcentral) grow() *mspan {
    	npages := uintptr(class_to_allocnpages[c.spanclass.sizeclass()])
    	size := uintptr(class_to_size[c.spanclass.sizeclass()])
    
    	s := mheap_.alloc(npages, c.spanclass)
    	if s == nil {
    		return nil
    	}
    
    	// Use division by multiplication and shifts to quickly compute:
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Mon Mar 25 19:53:03 UTC 2024
    - 8.1K bytes
    - Viewed (0)
  2. src/runtime/mfinal_test.go

    	b.RunParallel(func(pb *testing.PB) {
    		for pb.Next() {
    			v := new(int)
    			runtime.SetFinalizer(v, fin)
    		}
    	})
    }
    
    // One chunk must be exactly one sizeclass in size.
    // It should be a sizeclass not used much by others, so we
    // have a greater chance of finding adjacent ones.
    // size class 19: 320 byte objects, 25 per page, 1 page alloc at a time
    const objsize = 320
    
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Thu Jan 19 20:45:58 UTC 2023
    - 5.6K bytes
    - Viewed (0)
  3. src/runtime/mcache.go

    		// Count up how many slots were used and record it.
    		stats := memstats.heapStats.acquire()
    		slotsUsed := int64(s.allocCount) - int64(s.allocCountBeforeCache)
    		atomic.Xadd64(&stats.smallAllocCount[spc.sizeclass()], slotsUsed)
    
    		// Flush tinyAllocs.
    		if spc == tinySpanClass {
    			atomic.Xadd64(&stats.tinyAllocCount, int64(c.tinyAllocs))
    			c.tinyAllocs = 0
    		}
    		memstats.heapStats.release()
    
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Mon Mar 25 19:53:03 UTC 2024
    - 10K bytes
    - Viewed (0)
  4. src/runtime/mheap.go

    		// since we're not holding the heap lock.
    		s.spanclass = spanclass
    		if sizeclass := spanclass.sizeclass(); sizeclass == 0 {
    			s.elemsize = nbytes
    			s.nelems = 1
    			s.divMul = 0
    		} else {
    			s.elemsize = uintptr(class_to_size[sizeclass])
    			if !s.spanclass.noscan() && heapBitsInSpan(s.elemsize) {
    				// Reserve space for the pointer/scan bitmap at the end.
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Wed May 22 22:31:00 UTC 2024
    - 78K bytes
    - Viewed (0)
  5. src/runtime/mksizeclasses.go

    		// so wasted space is at most 12.5%.
    		allocsize := pageSize
    		for allocsize%size > allocsize/8 {
    			allocsize += pageSize
    		}
    		npages := allocsize / pageSize
    
    		// If the previous sizeclass chose the same
    		// allocation size and fit the same number of
    		// objects into the page, we might as well
    		// use just this size instead of having two
    		// different sizes.
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Wed May 22 20:31:27 UTC 2024
    - 9.6K bytes
    - Viewed (0)
  6. src/runtime/export_test.go

    				continue
    			}
    			if s.isUnusedUserArenaChunk() {
    				continue
    			}
    			if sizeclass := s.spanclass.sizeclass(); sizeclass == 0 {
    				slow.Mallocs++
    				slow.Alloc += uint64(s.elemsize)
    			} else {
    				slow.Mallocs += uint64(s.allocCount)
    				slow.Alloc += uint64(s.allocCount) * uint64(s.elemsize)
    				bySize[sizeclass].Mallocs += uint64(s.allocCount)
    			}
    		}
    
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Thu May 30 17:50:53 UTC 2024
    - 46.1K bytes
    - Viewed (0)
  7. src/runtime/malloc.go

    			if hasHeader {
    				size += mallocHeaderSize
    			}
    			var sizeclass uint8
    			if size <= smallSizeMax-8 {
    				sizeclass = size_to_class8[divRoundUp(size, smallSizeDiv)]
    			} else {
    				sizeclass = size_to_class128[divRoundUp(size-smallSizeMax, largeSizeDiv)]
    			}
    			size = uintptr(class_to_size[sizeclass])
    			spc := makeSpanClass(sizeclass, noscan)
    			span = c.alloc[spc]
    			v := nextFreeFast(span)
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Wed May 29 17:58:53 UTC 2024
    - 59.6K bytes
    - Viewed (0)
  8. src/runtime/mgcsweep.go

    			}
    			lock(&mheap_.lock)
    			mheap_.userArena.quarantineList.remove(s)
    			mheap_.userArena.readyList.insert(s)
    			unlock(&mheap_.lock)
    		})
    		return false
    	}
    
    	if spc.sizeclass() != 0 {
    		// Handle spans for small objects.
    		if nfreed > 0 {
    			// Only mark the span as needing zeroing if we've freed any
    			// objects, because a fresh span that had been allocated into,
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Wed May 08 17:52:18 UTC 2024
    - 32.9K bytes
    - Viewed (0)
  9. src/runtime/mfinal.go

    		}
    		throw("runtime.SetFinalizer: pointer not in allocated block")
    	}
    
    	// Move base forward if we've got an allocation header.
    	if !span.spanclass.noscan() && !heapBitsInSpan(span.elemsize) && span.spanclass.sizeclass() != 0 {
    		base += mallocHeaderSize
    	}
    
    	if uintptr(e.data) != base {
    		// As an implementation detail we allow to set finalizers for an inner byte
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Fri Jun 07 01:56:56 UTC 2024
    - 19K bytes
    - Viewed (0)
  10. src/runtime/mbitmap.go

    		// Handle header-less objects.
    		return typePointers{elem: addr, addr: addr, mask: span.heapBitsSmallForAddr(addr)}
    	}
    
    	// All of these objects have a header.
    	var typ *_type
    	if spc.sizeclass() != 0 {
    		// Pull the allocation header from the first word of the object.
    		typ = *(**_type)(unsafe.Pointer(addr))
    		addr += mallocHeaderSize
    	} else {
    		typ = span.largeType
    		if typ == nil {
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Thu May 23 00:18:55 UTC 2024
    - 60K bytes
    - Viewed (0)
Back to top