Search Options

Results per page
Sort
Preferred Languages
Advance

Results 21 - 30 of 36 for Atack (0.06 sec)

  1. src/runtime/mheap.go

    // whether the span has been zeroed. Note that it may not be.
    func (h *mheap) alloc(npages uintptr, spanclass spanClass) *mspan {
    	// Don't do any operations that lock the heap on the G stack.
    	// It might trigger stack growth, and the stack growth code needs
    	// to be able to allocate heap.
    	var s *mspan
    	systemstack(func() {
    		// To prevent excessive heap growth, before allocating n pages
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Wed May 22 22:31:00 UTC 2024
    - 78K bytes
    - Viewed (0)
  2. src/cmd/compile/internal/ssa/prove.go

    // Called when backing up on a branch.
    func (ft *factsTable) restore() {
    	if ft.unsatDepth > 0 {
    		ft.unsatDepth--
    	} else {
    		ft.unsat = false
    	}
    	for {
    		old := ft.stack[len(ft.stack)-1]
    		ft.stack = ft.stack[:len(ft.stack)-1]
    		if old == checkpointFact {
    			break
    		}
    		if old.r == lt|eq|gt {
    			delete(ft.facts, old.p)
    		} else {
    			ft.facts[old.p] = old.r
    		}
    	}
    	for {
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Mon Mar 04 17:30:21 UTC 2024
    - 48.9K bytes
    - Viewed (0)
  3. src/cmd/dist/build.go

    		// Do not include local development, so that people working in the
    		// main branch for day-to-day work on the Go toolchain itself can
    		// still have full paths for stack traces for compiler crashes and the like.
    		env = append(env, "GOFLAGS=-trimpath -ldflags=-w -gcflags=cmd/...=-dwarf=false")
    	}
    	return env
    }
    
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Mon May 20 18:34:40 UTC 2024
    - 54K bytes
    - Viewed (0)
  4. src/index/suffixarray/sais2.go

    	// max(maxID, numLMS/2). This level of the recursion needs maxID,
    	// and all deeper levels of the recursion will need no more than numLMS/2,
    	// so this one allocation is guaranteed to suffice for the entire stack
    	// of recursive calls.
    	tmp := oldTmp
    	if len(tmp) < len(saTmp) {
    		tmp = saTmp
    	}
    	if len(tmp) < numLMS {
    		// TestSAIS/forcealloc reaches this code.
    		n := maxID
    		if n < numLMS/2 {
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Mon Mar 18 23:57:18 UTC 2024
    - 52.3K bytes
    - Viewed (0)
  5. src/cmd/vendor/golang.org/x/sys/unix/syscall_linux.go

    // minIovec is the size of the small initial allocation used by
    // Readv, Writev, etc.
    //
    // This small allocation gets stack allocated, which lets the
    // common use case of len(iovs) <= minIovs avoid more expensive
    // heap allocations.
    const minIovec = 8
    
    // appendBytes converts bs to Iovecs and appends them to vecs.
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Thu Mar 07 05:26:45 UTC 2024
    - 77.5K bytes
    - Viewed (0)
  6. src/cmd/link/internal/ppc64/asm.go

    //
    // There are 3 cases today (as paraphrased from the ELFv2 document):
    //
    //  1. R2 holds the TOC pointer on entry. The call stub must save R2 into the ELFv2 TOC stack save slot.
    //
    //  2. R2 holds the TOC pointer on entry. The caller has already saved R2 to the TOC stack save slot.
    //
    //  3. R2 does not hold the TOC pointer on entry. The caller has no expectations of R2.
    //
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Tue Mar 19 20:54:08 UTC 2024
    - 63.7K bytes
    - Viewed (0)
  7. src/regexp/syntax/parse.go

    		re1 := p.stack[n-1]
    		re3 := p.stack[n-3]
    		// Make re3 the more complex of the two.
    		if re1.Op > re3.Op {
    			re1, re3 = re3, re1
    			p.stack[n-3] = re3
    		}
    		mergeCharClass(re3, re1)
    		p.reuse(re1)
    		p.stack = p.stack[:n-1]
    		return true
    	}
    
    	if n >= 2 {
    		re1 := p.stack[n-1]
    		re2 := p.stack[n-2]
    		if re2.Op == opVerticalBar {
    			if n >= 3 {
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Tue Apr 02 13:59:01 UTC 2024
    - 52.1K bytes
    - Viewed (0)
  8. src/cmd/cgo/out.go

    		// assignments, so it won't use much stack space, so
    		// it's OK to not split the stack. Splitting the stack
    		// can run into a bug in clang (as of 2018-11-09):
    		// this is a leaf function, and when clang sees a leaf
    		// function it won't emit the split stack prologue for
    		// the function. However, if this function refers to a
    		// non-split-stack function, which will happen if the
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Fri Mar 29 16:41:10 UTC 2024
    - 59.6K bytes
    - Viewed (0)
  9. src/runtime/mgcpacer.go

    	// lastStackScan is the number of bytes of stack that were scanned
    	// last GC cycle.
    	lastStackScan atomic.Uint64
    
    	// maxStackScan is the amount of allocated goroutine stack space in
    	// use by goroutines.
    	//
    	// This number tracks allocated goroutine stack space rather than used
    	// goroutine stack space (i.e. what is actually scanned) because used
    	// goroutine stack space is much harder to measure cheaply. By using
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Mon Mar 25 19:53:03 UTC 2024
    - 55.4K bytes
    - Viewed (0)
  10. src/cmd/internal/obj/riscv/obj.go

    			alignedValue := p.From.Offset
    			v := pcAlignPadLength(pc, alignedValue)
    			pc += int64(v)
    		}
    	}
    	return pc
    }
    
    // stackOffset updates Addr offsets based on the current stack size.
    //
    // The stack looks like:
    // -------------------
    // |                 |
    // |      PARAMs     |
    // |                 |
    // |                 |
    // -------------------
    // |    Parent RA    |   SP on function entry
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Sun Apr 07 03:32:27 UTC 2024
    - 77K bytes
    - Viewed (0)
Back to top