Search Options

Results per page
Sort
Preferred Languages
Advance

Results 1 - 6 of 6 for racemalloc (0.15 sec)

  1. src/runtime/race0.go

    func racereleasemergeg(gp *g, addr unsafe.Pointer)                          { throw("race") }
    func racefingo()                                                            { throw("race") }
    func racemalloc(p unsafe.Pointer, sz uintptr)                               { throw("race") }
    func racefree(p unsafe.Pointer, sz uintptr)                                 { throw("race") }
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Thu Oct 28 18:17:57 UTC 2021
    - 2.8K bytes
    - Viewed (0)
  2. src/runtime/race.go

    		racearenastart = uintptr(addr)
    	}
    	if racearenaend < uintptr(addr)+size {
    		racearenaend = uintptr(addr) + size
    	}
    	racecall(&__tsan_map_shadow, uintptr(addr), size, 0, 0)
    }
    
    //go:nosplit
    func racemalloc(p unsafe.Pointer, sz uintptr) {
    	racecall(&__tsan_malloc, 0, 0, uintptr(p), sz)
    }
    
    //go:nosplit
    func racefree(p unsafe.Pointer, sz uintptr) {
    	racecall(&__tsan_free, uintptr(p), sz, 0, 0)
    }
    
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Fri May 17 18:37:29 UTC 2024
    - 20.4K bytes
    - Viewed (0)
  3. src/runtime/stack.go

    	}
    
    	if traceAllocFreeEnabled() {
    		trace := traceTryAcquire()
    		if trace.ok() {
    			trace.GoroutineStackAlloc(uintptr(v), uintptr(n))
    			traceRelease(trace)
    		}
    	}
    	if raceenabled {
    		racemalloc(v, uintptr(n))
    	}
    	if msanenabled {
    		msanmalloc(v, uintptr(n))
    	}
    	if asanenabled {
    		asanunpoison(v, uintptr(n))
    	}
    	if stackDebug >= 1 {
    		print("  allocated ", v, "\n")
    	}
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Wed May 22 22:31:00 UTC 2024
    - 41.1K bytes
    - Viewed (0)
  4. src/runtime/arena.go

    	// a race marking the bit.
    	if gcphase != _GCoff {
    		gcmarknewobject(span, span.base())
    	}
    
    	if raceenabled {
    		// TODO(mknyszek): Track individual objects.
    		racemalloc(unsafe.Pointer(span.base()), span.elemsize)
    	}
    
    	if msanenabled {
    		// TODO(mknyszek): Track individual objects.
    		msanmalloc(unsafe.Pointer(span.base()), span.elemsize)
    	}
    
    	if asanenabled {
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Wed May 08 17:44:56 UTC 2024
    - 37.9K bytes
    - Viewed (0)
  5. src/runtime/malloc.go

    	// This may be racing with GC so do it atomically if there can be
    	// a race marking the bit.
    	if gcphase != _GCoff {
    		gcmarknewobject(span, uintptr(x))
    	}
    
    	if raceenabled {
    		racemalloc(x, size)
    	}
    
    	if msanenabled {
    		msanmalloc(x, size)
    	}
    
    	if asanenabled {
    		// We should only read/write the memory with the size asked by the user.
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Wed May 29 17:58:53 UTC 2024
    - 59.6K bytes
    - Viewed (0)
  6. src/runtime/proc.go

    		systemstack(func() {
    			gp.stack = stackalloc(startingStackSize)
    		})
    		gp.stackguard0 = gp.stack.lo + stackGuard
    	} else {
    		if raceenabled {
    			racemalloc(unsafe.Pointer(gp.stack.lo), gp.stack.hi-gp.stack.lo)
    		}
    		if msanenabled {
    			msanmalloc(unsafe.Pointer(gp.stack.lo), gp.stack.hi-gp.stack.lo)
    		}
    		if asanenabled {
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Wed May 29 17:58:53 UTC 2024
    - 207.5K bytes
    - Viewed (0)
Back to top