Search Options

Results per page
Sort
Preferred Languages
Advance

Results 1 - 10 of 193 for mspan (0.04 sec)

  1. src/runtime/traceallocfree.go

    }
    
    // SpanAlloc records an event indicating that the span has just been allocated.
    func (tl traceLocker) SpanAlloc(s *mspan) {
    	tl.eventWriter(traceGoRunning, traceProcRunning).commit(traceEvSpanAlloc, traceSpanID(s), traceArg(s.npages), traceSpanTypeAndClass(s))
    }
    
    // SpanFree records an event indicating that the span is about to be freed.
    func (tl traceLocker) SpanFree(s *mspan) {
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Wed May 22 20:32:51 UTC 2024
    - 5.9K bytes
    - Viewed (0)
  2. src/runtime/mheap.go

    	if span.list != list {
    		print("runtime: failed mSpanList.remove span.npages=", span.npages,
    			" span=", span, " prev=", span.prev, " span.list=", span.list, " list=", list, "\n")
    		throw("mSpanList.remove")
    	}
    	if list.first == span {
    		list.first = span.next
    	} else {
    		span.prev.next = span.next
    	}
    	if list.last == span {
    		list.last = span.prev
    	} else {
    		span.next.prev = span.prev
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Wed May 22 22:31:00 UTC 2024
    - 78K bytes
    - Viewed (0)
  3. src/runtime/mgcsweep.go

    		throw("mspan.sweep: m is not locked")
    	}
    
    	s := sl.mspan
    	if !preserve {
    		// We'll release ownership of this span. Nil it out to
    		// prevent the caller from accidentally using it.
    		sl.mspan = nil
    	}
    
    	sweepgen := mheap_.sweepgen
    	if state := s.state.get(); state != mSpanInUse || s.sweepgen != sweepgen-1 {
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Wed May 08 17:52:18 UTC 2024
    - 32.9K bytes
    - Viewed (0)
  4. src/runtime/arena.go

    		rzSize := computeRZlog(span.elemsize)
    		span.elemsize -= rzSize
    		span.largeType.Size_ = span.elemsize
    		rzStart := span.base() + span.elemsize
    		span.userArenaChunkFree = makeAddrRange(span.base(), rzStart)
    		asanpoison(unsafe.Pointer(rzStart), span.limit-rzStart)
    		asanunpoison(unsafe.Pointer(span.base()), span.elemsize)
    	}
    
    	if rate := MemProfileRate; rate > 0 {
    		c := getMCache(mp)
    		if c == nil {
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Wed May 08 17:44:56 UTC 2024
    - 37.9K bytes
    - Viewed (0)
  5. src/runtime/mbitmap.go

    	}
    	return tp
    }
    
    // objBase returns the base pointer for the object containing addr in span.
    //
    // Assumes that addr points into a valid part of span (span.base() <= addr < span.limit).
    //
    //go:nosplit
    func (span *mspan) objBase(addr uintptr) uintptr {
    	return span.base() + span.objIndex(addr)*span.elemsize
    }
    
    // bulkBarrierPreWrite executes a write barrier
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Thu May 23 00:18:55 UTC 2024
    - 60K bytes
    - Viewed (0)
  6. src/cmd/compile/internal/test/inl_test.go

    			"spanOfUnchecked",
    			"typePointers.nextFast",
    			"(*gcWork).putFast",
    			"(*gcWork).tryGetFast",
    			"(*guintptr).set",
    			"(*markBits).advance",
    			"(*mspan).allocBitsForIndex",
    			"(*mspan).base",
    			"(*mspan).markBitsForBase",
    			"(*mspan).markBitsForIndex",
    			"(*mspan).writeUserArenaHeapBits",
    			"(*muintptr).set",
    			"(*puintptr).set",
    			"(*wbBuf).get1",
    			"(*wbBuf).get2",
    
    			// Trace-related ones.
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Tue Apr 09 04:07:57 UTC 2024
    - 10.7K bytes
    - Viewed (0)
  7. src/runtime/malloc.go

    		span = c.allocLarge(size, noscan)
    		span.freeindex = 1
    		span.allocCount = 1
    		size = span.elemsize
    		x = unsafe.Pointer(span.base())
    		if needzero && span.needzero != 0 {
    			delayedZeroing = true
    		}
    		if !noscan {
    			// Tell the GC not to look at this yet.
    			span.largeType = nil
    			header = &span.largeType
    		}
    	}
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Wed May 29 17:58:53 UTC 2024
    - 59.6K bytes
    - Viewed (0)
  8. src/runtime/export_test.go

    	}
    	return false
    }
    
    // mspan wrapper for testing.
    type MSpan mspan
    
    // Allocate an mspan for testing.
    func AllocMSpan() *MSpan {
    	var s *mspan
    	systemstack(func() {
    		lock(&mheap_.lock)
    		s = (*mspan)(mheap_.spanalloc.alloc())
    		unlock(&mheap_.lock)
    	})
    	return (*MSpan)(s)
    }
    
    // Free an allocated mspan.
    func FreeMSpan(s *MSpan) {
    	systemstack(func() {
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Thu May 30 17:50:53 UTC 2024
    - 46.1K bytes
    - Viewed (0)
  9. src/runtime/metrics/doc.go

    		Memory that is occupied by runtime mcache structures that are
    		currently being used.
    
    	/memory/classes/metadata/mspan/free:bytes
    		Memory that is reserved for runtime mspan structures, but not
    		in-use.
    
    	/memory/classes/metadata/mspan/inuse:bytes
    		Memory that is occupied by runtime mspan structures that are
    		currently being used.
    
    	/memory/classes/metadata/other:bytes
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Wed May 22 22:58:43 UTC 2024
    - 20K bytes
    - Viewed (0)
  10. src/runtime/metrics.go

    				out.scalar = in.sysStats.mCacheInUse
    			},
    		},
    		"/memory/classes/metadata/mspan/free:bytes": {
    			deps: makeStatDepSet(sysStatsDep),
    			compute: func(in *statAggregate, out *metricValue) {
    				out.kind = metricKindUint64
    				out.scalar = in.sysStats.mSpanSys - in.sysStats.mSpanInUse
    			},
    		},
    		"/memory/classes/metadata/mspan/inuse:bytes": {
    			deps: makeStatDepSet(sysStatsDep),
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Mon Apr 08 21:03:13 UTC 2024
    - 26K bytes
    - Viewed (0)
Back to top