Search Options

Results per page
Sort
Preferred Languages
Advance

Results 1 - 3 of 3 for with_xla_support (0.14 sec)

  1. configure.py

            write_to_bazelrc(
                'build --copt="-DEIGEN_ALTIVEC_ENABLE_MMA_DYNAMIC_DISPATCH=1"')
    
      with_xla_support = environ_cp.get('TF_ENABLE_XLA', None)
      if with_xla_support is not None:
        write_to_bazelrc('build --define=with_xla_support=%s' %
                         ('true' if int(with_xla_support) else 'false'))
    
      set_action_env_var(
          environ_cp, 'TF_NEED_ROCM', 'ROCm', False, bazel_config_name='rocm')
    Registered: Tue Nov 05 12:39:12 UTC 2024
    - Last Modified: Wed Oct 02 22:16:02 UTC 2024
    - 48.2K bytes
    - Viewed (0)
  2. .bazelrc

    # Enable all targets in XLA
    build:cpu_cross --define=with_cross_compiler_support=true
    
    # Disable XLA on mobile.
    build:xla     --define=with_xla_support=true # TODO: remove, it's on by default.
    build:android --define=with_xla_support=false
    build:ios     --define=with_xla_support=false
    
    # BEGIN TF REMOTE BUILD EXECUTION OPTIONS
    # Options when using remote execution
    Registered: Tue Nov 05 12:39:12 UTC 2024
    - Last Modified: Mon Oct 28 22:02:31 UTC 2024
    - 51.3K bytes
    - Viewed (0)
  3. tensorflow/BUILD

        visibility = ["//visibility:public"],
    )
    
    config_setting(
        name = "with_xla_support",
        define_values = {"with_xla_support": "true"},
        visibility = ["//visibility:public"],
    )
    
    # By default, XLA GPU is compiled into tensorflow when building with
    # --config=cuda even when `with_xla_support` is false. The config setting
    # here allows us to override the behavior if needed.
    config_setting(
    Registered: Tue Nov 05 12:39:12 UTC 2024
    - Last Modified: Wed Oct 16 05:28:35 UTC 2024
    - 53.5K bytes
    - Viewed (0)
Back to top