Skip to content

Commit

Permalink
Force clean+fetch when re-running configure with different settings. (t…
Browse files Browse the repository at this point in the history
…ensorflow#4285)

* Run bazel clean and bazel fetch in the configure script even when building
  without GPU support to force clean+fetch if the user re-runs ./configure
  with a different setting.
* Print a more actionable error messsage if the user attempts to build with
  --config=cuda but did not configure TensorFlow to build with GPU support.
* Update the BUILD file in @local_config_cuda to use repository-local labels.

Fixes tensorflow#4105
  • Loading branch information
davidzchen authored and martinwicke committed Sep 21, 2016
1 parent 754048a commit 4316aeb
Show file tree
Hide file tree
Showing 4 changed files with 47 additions and 7 deletions.
9 changes: 7 additions & 2 deletions configure
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,11 @@ pushd `dirname $0` #> /dev/null
SOURCE_BASE_DIR=`pwd -P`
popd > /dev/null

function bazel_clean_and_fetch() {
bazel clean --expunge
bazel fetch //tensorflow/...
}

## Set up python-related environment settings
while true; do
fromuser=""
Expand Down Expand Up @@ -114,6 +119,7 @@ done
export TF_NEED_CUDA
if [ "$TF_NEED_CUDA" == "0" ]; then
echo "Configuration finished"
bazel_clean_and_fetch
exit
fi

Expand Down Expand Up @@ -300,7 +306,6 @@ EOF
TF_CUDA_COMPUTE_CAPABILITIES=""
done

bazel clean --expunge
bazel fetch //...
bazel_clean_and_fetch

echo "Configuration finished"
10 changes: 6 additions & 4 deletions third_party/gpus/crosstool/BUILD.tpl
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,12 @@ licenses(["restricted"])

package(default_visibility = ["//visibility:public"])

filegroup(
name = "crosstool",
srcs = ["CROSSTOOL"],
output_licenses = ["unencumbered"],
cc_toolchain_suite(
name = "toolchain",
toolchains = {
"local|compiler": ":cc-compiler-local",
"darwin|compiler": ":cc-compiler-darwin",
},
)

cc_toolchain(
Expand Down
33 changes: 33 additions & 0 deletions third_party/gpus/cuda_configure.bzl
Original file line number Diff line number Diff line change
Expand Up @@ -331,6 +331,33 @@ def _file(repository_ctx, label):
{})


_DUMMY_CROSSTOOL_BZL_FILE = """
def error_gpu_disabled():
fail("ERROR: Building with --config=cuda but TensorFlow is not configured " +
"to build with GPU support. Please re-run ./configure and enter 'Y' " +
"at the prompt to build with GPU support.")
native.genrule(
name = "error_gen_crosstool",
outs = ["CROSSTOOL"],
cmd = "echo 'Should not be run.' && exit 1",
)
native.filegroup(
name = "crosstool",
srcs = [":CROSSTOOL"],
output_licenses = ["unencumbered"],
)
"""


_DUMMY_CROSSTOOL_BUILD_FILE = """
load("//crosstool:error_gpu_disabled.bzl", "error_gpu_disabled")
error_gpu_disabled()
"""


def _create_dummy_repository(repository_ctx):
cpu_value = _cpu_value(repository_ctx)
symlink_files = _cuda_symlink_files(cpu_value, _DEFAULT_CUDA_VERSION,
Expand Down Expand Up @@ -371,6 +398,12 @@ def _create_dummy_repository(repository_ctx):
for c in _DEFAULT_CUDA_COMPUTE_CAPABILITIES]),
})

# If cuda_configure is not configured to build with GPU support, and the user
# attempts to build with --config=cuda, add a dummy build rule to intercept
# this and fail with an actionable error message.
repository_ctx.file("crosstool/error_gpu_disabled.bzl",
_DUMMY_CROSSTOOL_BZL_FILE)
repository_ctx.file("crosstool/BUILD", _DUMMY_CROSSTOOL_BUILD_FILE)

def _symlink_dir(repository_ctx, src_dir, dest_dir):
"""Symlinks all the files in a directory.
Expand Down
2 changes: 1 addition & 1 deletion tools/bazel.rc.template
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
build:cuda --crosstool_top=@local_config_cuda//crosstool
build:cuda --crosstool_top=@local_config_cuda//crosstool:toolchain
build:cuda --define=using_cuda=true --define=using_cuda_nvcc=true

build --force_python=py$PYTHON_MAJOR_VERSION
Expand Down

0 comments on commit 4316aeb

Please sign in to comment.