Search Options

Results per page
Sort
Preferred Languages
Advance

Results 31 - 40 of 90 for getcallersp (0.17 sec)

  1. src/cmd/compile/internal/ssa/_gen/AMD64.rules

    (GetG mem) && v.Block.Func.OwnAux.Fn.ABI() != obj.ABIInternal => (LoweredGetG mem) // only lower in old ABI. in new ABI we have a G register.
    (GetClosurePtr ...) => (LoweredGetClosurePtr ...)
    (GetCallerPC ...) => (LoweredGetCallerPC ...)
    (GetCallerSP ...) => (LoweredGetCallerSP ...)
    
    (HasCPUFeature {s}) => (SETNE (CMPLconst [0] (LoweredHasCPUFeature {s})))
    (Addr {sym} base) => (LEAQ {sym} base)
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Tue Mar 12 19:38:41 UTC 2024
    - 93.9K bytes
    - Viewed (0)
  2. tensorflow/compiler/mlir/quantization/tensorflow/cc/quantization_unit_loc.cc

      if (!llvm::isa<CallSiteLoc>(attr)) return false;
      auto callsite_loc = llvm::dyn_cast<CallSiteLoc>(attr);
    
      if (!mlir::isa<NameLoc>(callsite_loc.getCaller())) return false;
      StringRef caller_name =
          mlir::cast<NameLoc>(callsite_loc.getCaller()).getName().strref();
      return caller_name.starts_with(kQuantizationUnitPrefix) &&
             caller_name.ends_with(kQuantizationUnitSuffix);
    }
    
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Thu Apr 25 16:01:03 UTC 2024
    - 4.2K bytes
    - Viewed (0)
  3. src/cmd/compile/internal/ssa/_gen/ARM64.rules

    (IsInBounds      idx len) => (LessThanU  (CMP idx len))
    (IsSliceInBounds idx len) => (LessEqualU (CMP idx len))
    
    // pseudo-ops
    (GetClosurePtr ...) => (LoweredGetClosurePtr ...)
    (GetCallerSP   ...) => (LoweredGetCallerSP   ...)
    (GetCallerPC   ...) => (LoweredGetCallerPC   ...)
    
    // Absorb pseudo-ops into blocks.
    (If (Equal         cc) yes no) => (EQ cc yes no)
    (If (NotEqual      cc) yes no) => (NE cc yes no)
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Thu May 23 15:49:20 UTC 2024
    - 113.1K bytes
    - Viewed (0)
  4. tensorflow/compiler/mlir/tensorflow/utils/call_graph_util.h

    // Get all the entry functions in an MLIR module.
    llvm::SmallVector<func::FuncOp> GetEntryFunctions(ModuleOp module);
    
    // Get all the functions referenced in a symber user op and save them in
    // `callees`.
    LogicalResult GetCallees(SymbolUserOpInterface op, SymbolTable &symtab,
                             llvm::SmallVector<func::FuncOp> &callees);
    
    // Find the first op with any of the specified types on each path rooted at the
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Sat Dec 16 06:18:49 UTC 2023
    - 4.3K bytes
    - Viewed (0)
  5. src/cmd/compile/internal/ssagen/ssa.go

    		},
    		all...)
    
    	add("runtime", "getcallerpc",
    		func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
    			return s.newValue0(ssa.OpGetCallerPC, s.f.Config.Types.Uintptr)
    		},
    		all...)
    
    	add("runtime", "getcallersp",
    		func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
    			return s.newValue1(ssa.OpGetCallerSP, s.f.Config.Types.Uintptr, s.mem())
    		},
    		all...)
    
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Mon Jun 10 19:44:43 UTC 2024
    - 284.9K bytes
    - Viewed (0)
  6. src/runtime/map_fast64.go

    package runtime
    
    import (
    	"internal/abi"
    	"internal/goarch"
    	"unsafe"
    )
    
    func mapaccess1_fast64(t *maptype, h *hmap, key uint64) unsafe.Pointer {
    	if raceenabled && h != nil {
    		callerpc := getcallerpc()
    		racereadpc(unsafe.Pointer(h), callerpc, abi.FuncPCABIInternal(mapaccess1_fast64))
    	}
    	if h == nil || h.count == 0 {
    		return unsafe.Pointer(&zeroVal[0])
    	}
    	if h.flags&hashWriting != 0 {
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Thu May 23 01:17:26 UTC 2024
    - 14.1K bytes
    - Viewed (0)
  7. src/runtime/map_fast32.go

    package runtime
    
    import (
    	"internal/abi"
    	"internal/goarch"
    	"unsafe"
    )
    
    func mapaccess1_fast32(t *maptype, h *hmap, key uint32) unsafe.Pointer {
    	if raceenabled && h != nil {
    		callerpc := getcallerpc()
    		racereadpc(unsafe.Pointer(h), callerpc, abi.FuncPCABIInternal(mapaccess1_fast32))
    	}
    	if h == nil || h.count == 0 {
    		return unsafe.Pointer(&zeroVal[0])
    	}
    	if h.flags&hashWriting != 0 {
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Thu May 23 01:17:26 UTC 2024
    - 13.9K bytes
    - Viewed (0)
  8. tensorflow/compiler/mlir/tensorflow/utils/call_graph_util.cc

        // have `tf_saved_model.initializer_type` instead.
        if (IsEntryFunction(func)) {
          entry_funcs.push_back(func);
        }
      });
      return entry_funcs;
    }
    
    LogicalResult GetCallees(SymbolUserOpInterface op, SymbolTable &symtab,
                             llvm::SmallVector<func::FuncOp> &callees) {
      for (auto attr : op->getAttrs()) {
        auto sym = mlir::dyn_cast<SymbolRefAttr>(attr.getValue());
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Thu Apr 25 16:01:03 UTC 2024
    - 2.6K bytes
    - Viewed (0)
  9. src/runtime/map_faststr.go

    package runtime
    
    import (
    	"internal/abi"
    	"internal/goarch"
    	"unsafe"
    )
    
    func mapaccess1_faststr(t *maptype, h *hmap, ky string) unsafe.Pointer {
    	if raceenabled && h != nil {
    		callerpc := getcallerpc()
    		racereadpc(unsafe.Pointer(h), callerpc, abi.FuncPCABIInternal(mapaccess1_faststr))
    	}
    	if h == nil || h.count == 0 {
    		return unsafe.Pointer(&zeroVal[0])
    	}
    	if h.flags&hashWriting != 0 {
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Thu May 23 01:17:26 UTC 2024
    - 15.3K bytes
    - Viewed (0)
  10. src/runtime/race_amd64.s

    #define RARG2 DX
    #define RARG3 CX
    #endif
    
    // func runtime·raceread(addr uintptr)
    // Called from instrumented code.
    // Defined as ABIInternal so as to avoid introducing a wrapper,
    // which would render runtime.getcallerpc ineffective.
    TEXT	runtime·raceread<ABIInternal>(SB), NOSPLIT, $0-8
    	MOVQ	AX, RARG1
    	MOVQ	(SP), RARG2
    	// void __tsan_read(ThreadState *thr, void *addr, void *pc);
    	MOVQ	$__tsan_read(SB), AX
    	JMP	racecalladdr<>(SB)
    
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Fri May 17 18:37:29 UTC 2024
    - 15.1K bytes
    - Viewed (0)
Back to top