forked from TonyTangYu/pytorch
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathdefs.bzl
65 lines (51 loc) · 2.21 KB
/
defs.bzl
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
"""
Macros for selecting with / without various GPU libraries. Most of these are meant to be used
directly by tensorflow in place of their build's own configure.py + bazel-gen system.
"""
load("@bazel_skylib//lib:selects.bzl", "selects")
def if_cuda(if_true, if_false = []):
"""Helper for selecting based on the whether CUDA is configured. """
return selects.with_or({
"@//tools/config:cuda_enabled_and_capable": if_true,
"//conditions:default": if_false,
})
def if_tensorrt(if_true, if_false = []):
"""Helper for selecting based on the whether TensorRT is configured. """
return select({
"//conditions:default": if_false,
})
def if_rocm(if_true, if_false = []):
"""Helper for selecting based on the whether ROCM is configured. """
return select({
"//conditions:default": if_false,
})
def if_sycl(if_true, if_false = []):
"""Helper for selecting based on the whether SYCL/ComputeCPP is configured."""
# NOTE: Tensorflow expects some stange behavior (see their if_sycl) if we
# actually plan on supporting this at some point.
return select({
"//conditions:default": if_false,
})
def if_ccpp(if_true, if_false = []):
"""Helper for selecting based on the whether ComputeCPP is configured. """
return select({
"//conditions:default": if_false,
})
def cuda_default_copts():
return if_cuda(["-DGOOGLE_CUDA=1"])
def cuda_default_features():
return if_cuda(["-per_object_debug_info", "-use_header_modules", "cuda_clang"])
def rocm_default_copts():
return if_rocm(["-x", "rocm"])
def rocm_copts(opts = []):
return rocm_default_copts() + if_rocm(opts)
def cuda_is_configured():
# FIXME(dcollins): currently only used by tensorflow's xla stuff, which we aren't building. However bazel
# query hits it so this needs to be defined. Because bazel doesn't actually resolve config at macro expansion
# time, `select` can't be used here (since xla expects lists of strings and not lists of select objects).
# Instead, the xla build rules must be rewritten to use `if_cuda_is_configured`
return False
def if_cuda_is_configured(x):
return if_cuda(x, [])
def if_rocm_is_configured(x):
return if_rocm(x, [])