Search Options

Results per page
Sort
Preferred Languages
Advance

Results 1 - 9 of 9 for allocCount (0.14 sec)

  1. src/runtime/mcache.go

    		gcController.totalAlloc.Add(bytesAllocated)
    
    		// Clear the second allocCount just to be safe.
    		s.allocCountBeforeCache = 0
    	}
    
    	// Get a new cached span from the central lists.
    	s = mheap_.central[spc].mcentral.cacheSpan()
    	if s == nil {
    		throw("out of memory")
    	}
    
    	if s.allocCount == s.nelems {
    		throw("span has no free space")
    	}
    
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Mon Mar 25 19:53:03 UTC 2024
    - 10K bytes
    - Viewed (0)
  2. src/runtime/mcentral.go

    havespan:
    	if !traceDone {
    		trace := traceAcquire()
    		if trace.ok() {
    			trace.GCSweepDone()
    			traceRelease(trace)
    		}
    	}
    	n := int(s.nelems) - int(s.allocCount)
    	if n == 0 || s.freeindex == s.nelems || s.allocCount == s.nelems {
    		throw("span has no free objects")
    	}
    	freeByteBase := s.freeindex &^ (64 - 1)
    	whichByte := freeByteBase / 8
    	// Init alloc bits cache.
    	s.refillAllocCache(whichByte)
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Mon Mar 25 19:53:03 UTC 2024
    - 8.1K bytes
    - Viewed (0)
  3. src/runtime/malloc.go

    		throw("freeIndex is not valid")
    	}
    
    	v = gclinkptr(uintptr(freeIndex)*s.elemsize + s.base())
    	s.allocCount++
    	if s.allocCount > s.nelems {
    		println("s.allocCount=", s.allocCount, "s.nelems=", s.nelems)
    		throw("s.allocCount > s.nelems")
    	}
    	return
    }
    
    // Allocate an object of size bytes.
    // Small objects are allocated from the per-P cache's free lists.
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Wed May 29 17:58:53 UTC 2024
    - 59.6K bytes
    - Viewed (0)
  4. src/runtime/stack.go

    	if s == nil {
    		// no free stacks. Allocate another span worth.
    		s = mheap_.allocManual(_StackCacheSize>>_PageShift, spanAllocStack)
    		if s == nil {
    			throw("out of memory")
    		}
    		if s.allocCount != 0 {
    			throw("bad allocCount")
    		}
    		if s.manualFreeList.ptr() != nil {
    			throw("bad manualFreeList")
    		}
    		osStackAlloc(s)
    		s.elemsize = fixedStack << order
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Wed May 22 22:31:00 UTC 2024
    - 41.1K bytes
    - Viewed (0)
  5. src/runtime/mgcsweep.go

    	nalloc := uint16(s.countAlloc())
    	nfreed := s.allocCount - nalloc
    	if nalloc > s.allocCount {
    		// The zombie check above should have caught this in
    		// more detail.
    		print("runtime: nelems=", s.nelems, " nalloc=", nalloc, " previous allocCount=", s.allocCount, " nfreed=", nfreed, "\n")
    		throw("sweep increased allocation count")
    	}
    
    	s.allocCount = nalloc
    	s.freeindex = 0 // reset allocation index to start of span.
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Wed May 08 17:52:18 UTC 2024
    - 32.9K bytes
    - Viewed (0)
  6. src/runtime/mheap.go

    	case mSpanManual:
    		if s.allocCount != 0 {
    			throw("mheap.freeSpanLocked - invalid stack free")
    		}
    	case mSpanInUse:
    		if s.isUserArenaChunk {
    			throw("mheap.freeSpanLocked - invalid free of user arena chunk")
    		}
    		if s.allocCount != 0 || s.sweepgen != h.sweepgen {
    			print("mheap.freeSpanLocked - span ", s, " ptr ", hex(s.base()), " allocCount ", s.allocCount, " sweepgen ", s.sweepgen, "/", h.sweepgen, "\n")
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Wed May 22 22:31:00 UTC 2024
    - 78K bytes
    - Viewed (0)
  7. src/runtime/export_test.go

    			if sizeclass := s.spanclass.sizeclass(); sizeclass == 0 {
    				slow.Mallocs++
    				slow.Alloc += uint64(s.elemsize)
    			} else {
    				slow.Mallocs += uint64(s.allocCount)
    				slow.Alloc += uint64(s.allocCount) * uint64(s.elemsize)
    				bySize[sizeclass].Mallocs += uint64(s.allocCount)
    			}
    		}
    
    		// Add in frees by just reading the stats for those directly.
    		var m heapStatsDelta
    		memstats.heapStats.unsafeRead(&m)
    
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Thu May 30 17:50:53 UTC 2024
    - 46.1K bytes
    - Viewed (0)
  8. src/runtime/arena.go

    	h.initSpan(s, spanAllocHeap, spc, base, userArenaChunkPages)
    	s.isUserArenaChunk = true
    	s.elemsize -= userArenaChunkReserveBytes()
    	s.limit = s.base() + s.elemsize
    	s.freeindex = 1
    	s.allocCount = 1
    
    	// Account for this new arena chunk memory.
    	gcController.heapInUse.add(int64(userArenaChunkBytes))
    	gcController.heapReleased.add(-int64(userArenaChunkBytes))
    
    	stats := memstats.heapStats.acquire()
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Wed May 08 17:44:56 UTC 2024
    - 37.9K bytes
    - Viewed (0)
  9. tensorflow/cc/experimental/libtf/tests/value_test.cc

      ASSERT_EQ(c, TaggedValue(3.f));
    }
    
    namespace {
    int alloc_count = 0;
    class Cool {
     public:
      Cool() { alloc_count++; }
      ~Cool() { alloc_count--; }
    };
    }  // namespace
    
    TEST(Test1, TestCapsule) {
      TaggedValue test_moved, test_copy;
      ASSERT_EQ(alloc_count, 0);
      void* ptr_value = new Cool();
      {
        TaggedValue capsule =
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Thu Sep 01 11:18:25 UTC 2022
    - 3.4K bytes
    - Viewed (0)
Back to top