Search Options

Results per page
Sort
Preferred Languages
Advance

Results 1 - 6 of 6 for checkindex (0.14 sec)

  1. src/cmd/internal/obj/arm64/asm7.go

    		case ARNG_B:
    			c.checkindex(p, index1, 15)
    			c.checkindex(p, index2, 15)
    			imm5 |= 1
    			imm5 |= index1 << 1
    			imm4 |= index2
    		case ARNG_H:
    			c.checkindex(p, index1, 7)
    			c.checkindex(p, index2, 7)
    			imm5 |= 2
    			imm5 |= index1 << 2
    			imm4 |= index2 << 1
    		case ARNG_S:
    			c.checkindex(p, index1, 3)
    			c.checkindex(p, index2, 3)
    			imm5 |= 4
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Wed May 15 15:44:14 UTC 2024
    - 201.1K bytes
    - Viewed (0)
  2. tests/migrate_test.go

    			checkColumnType(t, "name", false)
    			checkIndex(t, nil)
    		}
    		checkUnique = func(t *testing.T) {
    			checkColumnType(t, "name", true)
    			checkIndex(t, []gorm.Index{uniqueConstraintIndex})
    		}
    		checkUniqueIndex = func(t *testing.T) {
    			checkColumnType(t, "name", true)
    			checkIndex(t, []gorm.Index{uniqueIndex})
    		}
    		checkMyIndex = func(t *testing.T) {
    			checkColumnType(t, "name", true)
    Registered: Wed Jun 12 16:27:09 UTC 2024
    - Last Modified: Mon Mar 18 11:24:16 UTC 2024
    - 56.2K bytes
    - Viewed (0)
  3. src/cmd/cgo/gcc.go

    	// Using a function literal like this lets us evaluate the
    	// function arguments only once while doing pointer checks.
    	// This is particularly useful when passing additional arguments
    	// to _cgoCheckPointer, as done in checkIndex and checkAddr.
    	//
    	// When the function argument is a conversion to unsafe.Pointer,
    	// we unwrap the conversion before checking the pointer,
    	// and then wrap again when calling C.f. This lets us check
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Mon May 20 15:50:06 UTC 2024
    - 97K bytes
    - Viewed (0)
  4. src/runtime/mpagealloc.go

    		// Fast path: we're clearing a single bit, and we know exactly
    		// where it is, so mark it directly.
    		i := chunkIndex(base)
    		pi := chunkPageIndex(base)
    		p.chunkOf(i).free1(pi)
    		p.scav.index.free(i, pi, 1)
    	} else {
    		// Slow path: we're clearing more bits so we may need to iterate.
    		sc, ec := chunkIndex(base), chunkIndex(limit)
    		si, ei := chunkPageIndex(base), chunkPageIndex(limit)
    
    		if sc == ec {
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Mon Mar 25 19:53:03 UTC 2024
    - 39.2K bytes
    - Viewed (0)
  5. src/runtime/export_test.go

    }
    
    func (s *ScavengeIndex) Find(force bool) (ChunkIdx, uint) {
    	ci, off := s.i.find(force)
    	return ChunkIdx(ci), off
    }
    
    func (s *ScavengeIndex) AllocRange(base, limit uintptr) {
    	sc, ec := chunkIndex(base), chunkIndex(limit-1)
    	si, ei := chunkPageIndex(base), chunkPageIndex(limit-1)
    
    	if sc == ec {
    		// The range doesn't cross any chunk boundaries.
    		s.i.alloc(sc, ei+1-si)
    	} else {
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Thu May 30 17:50:53 UTC 2024
    - 46.1K bytes
    - Viewed (0)
  6. src/runtime/mgcscavenge.go

    	// Update minHeapIdx. Note that even if there's no mapping work to do,
    	// we may still have a new, lower minimum heap address.
    	minHeapIdx := s.minHeapIdx.Load()
    	if baseIdx := uintptr(chunkIndex(base)); minHeapIdx == 0 || baseIdx < minHeapIdx {
    		s.minHeapIdx.Store(baseIdx)
    	}
    	return s.sysGrow(base, limit, sysStat)
    }
    
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Wed May 08 17:48:45 UTC 2024
    - 52.3K bytes
    - Viewed (0)
Back to top