Search Options

Results per page
Sort
Preferred Languages
Advance

Results 1 - 10 of 19 for config (0.17 sec)

  1. ci/official/requirements_updater/BUILD.bazel

    load("@python//3.12:defs.bzl", compile_pip_requirements_3_12 = "compile_pip_requirements")
    load("@python//3.9:defs.bzl", compile_pip_requirements_3_9 = "compile_pip_requirements")
    load("@updater_config_repository//:updater_config_repository.bzl", "REQUIREMENTS_FILE_NAME")
    
    compile_pip_requirements_3_9(
        name = "requirements_3_9",
        extra_args = ["--allow-unsafe"],
        requirements_in = REQUIREMENTS_FILE_NAME,
    Plain Text
    - Registered: Tue Apr 30 12:39:09 GMT 2024
    - Last Modified: Wed Mar 27 18:00:18 GMT 2024
    - 3K bytes
    - Viewed (2)
  2. ci/official/requirements_updater/WORKSPACE

    load("@rules_python//python/pip_install:repositories.bzl", "pip_install_dependencies")
    
    default_python_version = "3.10"
    
    load(
        "//:updater_config_repository.bzl",
        "updater_config_repository",  # @unused
    )
    
    updater_config_repository(name = "updater_config_repository")
    
    python_register_multi_toolchains(
        name = "python",
        default_version = default_python_version,
        ignore_root_user_error = True,
    Plain Text
    - Registered: Tue Apr 30 12:39:09 GMT 2024
    - Last Modified: Fri Apr 05 22:12:56 GMT 2024
    - 1.6K bytes
    - Viewed (1)
  3. tensorflow/BUILD

        "@llvm_terminfo//:__subpackages__",
        "@llvm_zlib//:__subpackages__",
        "@local_config_cuda//:__subpackages__",
        "@local_config_git//:__subpackages__",
        "@local_config_nccl//:__subpackages__",
        "@local_config_rocm//:__subpackages__",
        "@local_config_tensorrt//:__subpackages__",
        "@local_execution_config_platform//:__subpackages__",
        "@mkl_dnn_acl_compatible//:__subpackages__",
    Plain Text
    - Registered: Tue Apr 30 12:39:09 GMT 2024
    - Last Modified: Tue Apr 09 18:15:11 GMT 2024
    - 53.4K bytes
    - Viewed (8)
  4. CONTRIBUTING.md

        ```bash
        export flags="--config=opt -k"
        ```
    
        If the tests are to be run on the GPU, add CUDA paths to LD_LIBRARY_PATH and
        add the `cuda` option flag
    
        ```bash
        export LD_LIBRARY_PATH="${LD_LIBRARY_PATH}:/usr/local/cuda/lib64:/usr/local/cuda/extras/CUPTI/lib64:$LD_LIBRARY_PATH"
        export flags="--config=opt --config=cuda -k"
        ```
    
    Plain Text
    - Registered: Tue May 07 12:40:20 GMT 2024
    - Last Modified: Thu Mar 21 11:45:51 GMT 2024
    - 15.6K bytes
    - Viewed (0)
  5. ci/official/wheel.sh

      gsutil cp -n "$TFCI_OUTPUT_DIR"/*.whl "$TFCI_ARTIFACT_STAGING_GCS_URI"
    fi
    
    if [[ "$TFCI_WHL_BAZEL_TEST_ENABLE" == 1 ]]; then
      tfrun bazel test $TFCI_BAZEL_COMMON_ARGS --config="${TFCI_BAZEL_TARGET_SELECTING_CONFIG_PREFIX}_wheel_test"
    Shell Script
    - Registered: Tue Apr 30 12:39:09 GMT 2024
    - Last Modified: Wed Mar 06 21:54:13 GMT 2024
    - 1.8K bytes
    - Viewed (0)
  6. .bazelrc

    build:rbe_linux_cuda --repo_env=TF_CUDA_CONFIG_REPO="@sigbuild-r2.17-clang_config_cuda"
    build:rbe_linux_cuda --repo_env=TF_TENSORRT_CONFIG_REPO="@sigbuild-r2.17-clang_config_tensorrt"
    build:rbe_linux_cuda --repo_env=TF_NCCL_CONFIG_REPO="@sigbuild-r2.17-clang_config_nccl"
    test:rbe_linux_cuda --test_env=LD_LIBRARY_PATH="/usr/local/cuda/lib64:/usr/local/cuda/extras/CUPTI/lib64"
    
    build:rbe_linux_cuda_nvcc --config=rbe_linux_cuda
    Plain Text
    - Registered: Tue May 07 12:40:20 GMT 2024
    - Last Modified: Thu May 02 19:34:20 GMT 2024
    - 52.8K bytes
    - Viewed (2)
  7. configure.py

      print('    %s' % config['cudnn_library_dir'])
      print('    %s' % config['cudnn_include_dir'])
    
      if 'tensorrt_version' in config:
        print('Found TensorRT %s in:' % config['tensorrt_version'])
        print('    %s' % config['tensorrt_library_dir'])
        print('    %s' % config['tensorrt_include_dir'])
    
      if config.get('nccl_version', None):
        print('Found NCCL %s in:' % config['nccl_version'])
    Python
    - Registered: Tue Apr 30 12:39:09 GMT 2024
    - Last Modified: Mon Apr 15 18:25:36 GMT 2024
    - 53.8K bytes
    - Viewed (1)
  8. ci/official/utilities/code_check_full.bats

        --@local_config_cuda//:enable_cuda \
        "somepath(//tensorflow/tools/pip_package:wheel, " \
        "@local_config_cuda//cuda:cudart + "\
        "@local_config_cuda//cuda:cudart + "\
        "@local_config_cuda//cuda:cuda_driver + "\
        "@local_config_cuda//cuda:cudnn + "\
        "@local_config_cuda//cuda:curand + "\
        "@local_config_cuda//cuda:cusolver + "\
    Plain Text
    - Registered: Tue Apr 30 12:39:09 GMT 2024
    - Last Modified: Wed Mar 06 21:54:13 GMT 2024
    - 13.2K bytes
    - Viewed (0)
  9. .github/workflows/update-rbe.yml

              sed -i"" "/\"$1\"/ s/sha256:[[:alnum:]]*/$digest/g" tensorflow/tools/toolchains/remote_config/configs.bzl
              echo "success."
            }
            # See https://github.com/tensorflow/tensorflow/blob/master/tensorflow/tools/toolchains/remote_config/configs.bzl
            # This is a mapping of name_container_map keys under sigbuild_tf_configs
            # to tag names on gcr.io/tensorflow-sigs/build.
            # TF 2.9
    Others
    - Registered: Tue May 07 12:40:20 GMT 2024
    - Last Modified: Wed Apr 10 15:40:34 GMT 2024
    - 7.2K bytes
    - Viewed (0)
  10. ci/official/envs/linux_x86_tpu

    # ==============================================================================
    source ci/official/envs/linux_x86
    TFCI_BAZEL_COMMON_ARGS="--repo_env=TF_PYTHON_VERSION=$TFCI_PYTHON_VERSION --config release_cpu_linux --config=tpu"
    TFCI_BAZEL_TARGET_SELECTING_CONFIG_PREFIX=linux_tpu
    TFCI_BUILD_PIP_PACKAGE_ARGS="--repo_env=WHEEL_NAME=tensorflow_tpu"
    TFCI_LIB_SUFFIX="-tpu-linux-x86_64"
    TFCI_WHL_BAZEL_TEST_ENABLE=0
    TFCI_WHL_IMPORT_TEST_ENABLE=0
    Plain Text
    - Registered: Tue Apr 30 12:39:09 GMT 2024
    - Last Modified: Wed Mar 27 21:16:27 GMT 2024
    - 1.2K bytes
    - Viewed (0)
Back to top