Skip to content

Commit 1df6acb

Browse files
committed
infra: CI updates
1 parent 98520db commit 1df6acb

File tree

5 files changed

+78
-79
lines changed

5 files changed

+78
-79
lines changed

.gitignore

+2-1
Original file line numberDiff line numberDiff line change
@@ -74,4 +74,5 @@ tests/py/dynamo/models/*.ts
7474
tests/py/dynamo/models/*.ep
7575
*.deb
7676
*.tar.xz
77-
MODULE.bazel.lock
77+
MODULE.bazel.lock
78+
*.whl

MODULE.bazel

+7-8
Original file line numberDiff line numberDiff line change
@@ -69,7 +69,6 @@ http_archive(
6969
urls = ["https://download.pytorch.org/libtorch/nightly/cu128/libtorch-cxx11-abi-shared-with-deps-latest.zip"],
7070
)
7171

72-
7372
http_archive(
7473
name = "libtorch_win",
7574
build_file = "@//third_party/libtorch:BUILD",
@@ -81,13 +80,13 @@ http_archive(
8180
# It is possible to specify a wheel file to use as the libtorch source by providing the URL below and
8281
# using the build flag `--//toolchains/dep_src:torch="whl"`
8382

84-
http_archive(
85-
name = "torch_whl",
86-
build_file = "@//third_party/libtorch:BUILD",
87-
strip_prefix = "torch",
88-
type = "zip",
89-
urls = ["https://pypi.jetson-ai-lab.dev/jp6/cu126/+f/52c/2cbdd62b78f32/torch-2.7.0-cp310-cp310-linux_aarch64.whl#sha256=52c2cbdd62b78f32c51fa178212e4721241a2ba9e0c4d7d690dd808bd890d51b"],
90-
)
83+
# http_archive(
84+
# name = "torch_whl",
85+
# build_file = "@//third_party/libtorch:BUILD",
86+
# strip_prefix = "torch",
87+
# type = "zip",
88+
# urls = ["https://pypi.jetson-ai-lab.dev/jp6/cu126/+f/52c/2cbdd62b78f32/torch-2.7.0-cp310-cp310-linux_aarch64.whl#sha256=52c2cbdd62b78f32c51fa178212e4721241a2ba9e0c4d7d690dd808bd890d51b"],
89+
# )
9190

9291
# Download these tarballs manually from the NVIDIA website
9392
# Either place them in the distdir directory in third_party and use the --distdir flag

pyproject.toml

+4-4
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ requires = [
88
"cffi>=1.15.1",
99
"typing-extensions>=4.7.0",
1010
"future>=0.18.3",
11-
"tensorrt->=10.9.0,<10.10.0",
11+
"tensorrt-cu12>=10.9.0,<10.10.0",
1212
"torch>=2.8.0.dev,<2.9.0",
1313
"pybind11==2.6.2",
1414
"numpy",
@@ -57,9 +57,9 @@ keywords = [
5757
dependencies = [
5858
"torch>=2.8.0.dev,<2.9.0",
5959
"tensorrt>=10.9.0,<10.10.0",
60-
"tensorrt->=10.9.0,<10.10.0",
61-
"tensorrt--bindings>=10.9.0,<10.10.0",
62-
"tensorrt--libs>=10.9.0,<10.10.0",
60+
"tensorrt-cu12>=10.9.0,<10.10.0",
61+
"tensorrt-cu12-bindings>=10.9.0,<10.10.0",
62+
"tensorrt-cu12-libs>=10.9.0,<10.10.0",
6363
"packaging>=23",
6464
"numpy",
6565
"typing-extensions>=4.7.0",

setup.py

+6-53
Original file line numberDiff line numberDiff line change
@@ -79,7 +79,7 @@ def load_dep_info():
7979
dir_path = os.path.join(str(get_root_dir()), "py")
8080

8181
PRE_CXX11_ABI = False
82-
JETPACK_VERSION = None
82+
IS_JETPACK = False
8383
PY_ONLY = False
8484
NO_TS = False
8585
LEGACY = False
@@ -136,40 +136,10 @@ def load_dep_info():
136136
if ci_env_var == "1":
137137
CI_BUILD = True
138138

139-
if "--use-pre-cxx11-abi" in sys.argv:
140-
sys.argv.remove("--use-pre-cxx11-abi")
141-
PRE_CXX11_ABI = True
142-
143-
if (pre_cxx11_abi_env_var := os.environ.get("USE_PRE_CXX11_ABI")) is not None:
144-
if pre_cxx11_abi_env_var == "1":
145-
PRE_CXX11_ABI = True
146-
147139
if platform.uname().processor == "aarch64":
148-
if "--jetpack-version" in sys.argv:
149-
version_idx = sys.argv.index("--jetpack-version") + 1
150-
version = sys.argv[version_idx]
151-
sys.argv.remove(version)
152-
sys.argv.remove("--jetpack-version")
153-
if version == "4.5":
154-
JETPACK_VERSION = "4.5"
155-
elif version == "4.6":
156-
JETPACK_VERSION = "4.6"
157-
elif version == "5.0":
158-
JETPACK_VERSION = "5.0"
159-
elif version == "6.1":
160-
JETPACK_VERSION = "6.1"
161-
162-
if not JETPACK_VERSION:
163-
warnings.warn(
164-
"Assuming jetpack version to be 6.1, if not use the --jetpack-version option"
165-
)
166-
JETPACK_VERSION = "6.1"
167-
168-
if PRE_CXX11_ABI:
169-
warnings.warn(
170-
"Jetson platform detected. Please remove --use-pre-cxx11-abi flag if you are using it."
171-
)
172-
140+
if "--jetpack" in sys.argv:
141+
sys.argv.remove("--jetpack")
142+
IS_JETPACK = True
173143

174144
BAZEL_EXE = None
175145
if not PY_ONLY:
@@ -204,30 +174,13 @@ def build_libtorchtrt_cxx11_abi(
204174
if target_python:
205175
cmd.append("--config=python")
206176

207-
if pre_cxx11_abi:
208-
cmd.append("--config=pre_cxx11_abi")
209-
print("using PRE CXX11 ABI build")
210-
else:
211-
cmd.append("--config=cxx11_abi")
212-
print("using CXX11 ABI build")
213-
214177
if IS_WINDOWS:
215178
cmd.append("--config=windows")
216179
else:
217180
cmd.append("--config=linux")
218181

219-
if JETPACK_VERSION == "4.5":
220-
cmd.append("--platforms=//toolchains:jetpack_4.5")
221-
print("Jetpack version: 4.5")
222-
elif JETPACK_VERSION == "4.6":
223-
cmd.append("--platforms=//toolchains:jetpack_4.6")
224-
print("Jetpack version: 4.6")
225-
elif JETPACK_VERSION == "5.0":
226-
cmd.append("--platforms=//toolchains:jetpack_5.0")
227-
print("Jetpack version: 5.0")
228-
elif JETPACK_VERSION == "6.1":
229-
cmd.append("--platforms=//toolchains:jetpack_6.1")
230-
print("Jetpack version: 6.1")
182+
if IS_JETPACK:
183+
cmd.append("--config=jetpack")
231184

232185
if CI_BUILD:
233186
cmd.append("--platforms=//toolchains:ci_rhel_x86_64_linux")

toolchains/ci_workspaces/MODULE.bazel.tmpl

+59-13
Original file line numberDiff line numberDiff line change
@@ -4,10 +4,10 @@ module(
44
version = "${BUILD_VERSION}"
55
)
66

7-
bazel_dep(name = "googletest", version = "1.14.0")
8-
bazel_dep(name = "platforms", version = "0.0.10")
9-
bazel_dep(name = "rules_cc", version = "0.0.9")
10-
bazel_dep(name = "rules_python", version = "0.34.0")
7+
bazel_dep(name = "googletest", version = "1.16.0")
8+
bazel_dep(name = "platforms", version = "0.0.11")
9+
bazel_dep(name = "rules_cc", version = "0.1.1")
10+
bazel_dep(name = "rules_python", version = "1.3.0")
1111

1212
python = use_extension("@rules_python//python/extensions:python.bzl", "python")
1313
python.toolchain(
@@ -27,7 +27,7 @@ local_repository = use_repo_rule("@bazel_tools//tools/build_defs/repo:local.bzl"
2727
# External dependency for torch_tensorrt if you already have precompiled binaries.
2828
local_repository(
2929
name = "torch_tensorrt",
30-
path = "/opt/conda/lib/python3.8/site-packages/torch_tensorrt",
30+
path = "/opt/conda/lib/python3.10/site-packages/torch_tensorrt",
3131
)
3232

3333

@@ -40,6 +40,15 @@ new_local_repository(
4040
path = "${CUDA_HOME}",
4141
)
4242

43+
# Server Arm (SBSA) and Jetson Jetpack (L4T) use different versions of CUDA and TensorRT
44+
# These versions can be selected using the flag `--//toolchains/dep_collection:compute_libs="jetpack"`
45+
46+
new_local_repository(
47+
name = "cuda_l4t",
48+
build_file = "@//third_party/cuda:BUILD",
49+
path = "/usr/local/cuda-12.6",
50+
)
51+
4352
new_local_repository(
4453
name = "cuda_win",
4554
build_file = "@//third_party/cuda:BUILD",
@@ -53,12 +62,31 @@ http_archive = use_repo_rule("@bazel_tools//tools/build_defs/repo:http.bzl", "ht
5362
# Tarballs and fetched dependencies (default - use in cases when building from precompiled bin and tarballs)
5463
#############################################################################################################
5564

56-
http_archive(
57-
name = "libtorch",
58-
build_file = "@//third_party/libtorch:BUILD",
59-
strip_prefix = "libtorch",
60-
urls = ["https://download.pytorch.org/libtorch/${CHANNEL}/${CU_VERSION}/libtorch-cxx11-abi-shared-with-deps-latest.zip"],
61-
)
65+
# http_archive(
66+
# name = "libtorch",
67+
# build_file = "@//third_party/libtorch:BUILD",
68+
# strip_prefix = "libtorch",
69+
# urls = ["https://download.pytorch.org/libtorch/${CHANNEL}/${CU_VERSION}/libtorch-cxx11-abi-shared-with-deps-latest.zip"],
70+
# )
71+
72+
# http_archive(
73+
# name = "libtorch_win",
74+
# build_file = "@//third_party/libtorch:BUILD",
75+
# strip_prefix = "libtorch",
76+
# urls = ["https://download.pytorch.org/libtorch//${CHANNEL}/${CU_VERSION}/libtorch-win-shared-with-deps-latest.zip"],
77+
# )
78+
79+
80+
# It is possible to specify a wheel file to use as the libtorch source by providing the URL below and
81+
# using the build flag `--//toolchains/dep_src:torch="whl"`
82+
83+
# http_archive(
84+
# name = "torch_whl",
85+
# build_file = "@//third_party/libtorch:BUILD",
86+
# strip_prefix = "torch",
87+
# type = "zip",
88+
# urls = ["file:///${TORCH_WHL_PATH}"],
89+
# )
6290

6391
# Download these tarballs manually from the NVIDIA website
6492
# Either place them in the distdir directory in third_party and use the --distdir flag
@@ -73,6 +101,24 @@ http_archive(
73101
],
74102
)
75103

104+
http_archive(
105+
name = "tensorrt_sbsa",
106+
build_file = "@//third_party/tensorrt/archive:BUILD",
107+
strip_prefix = "TensorRT-10.9.0.34",
108+
urls = [
109+
"https://developer.nvidia.com/downloads/compute/machine-learning/tensorrt/10.9.0/tars/TensorRT-10.9.0.34.Linux.aarch64-gnu.cuda-12.8.tar.gz",
110+
],
111+
)
112+
113+
http_archive(
114+
name = "tensorrt_l4t",
115+
build_file = "@//third_party/tensorrt/archive:BUILD",
116+
strip_prefix = "TensorRT-10.3.0.26",
117+
urls = [
118+
"https://developer.nvidia.com/downloads/compute/machine-learning/tensorrt/10.3.0/tars/TensorRT-10.3.0.26.l4t.aarch64-gnu.cuda-12.6.tar.gz",
119+
],
120+
)
121+
76122
http_archive(
77123
name = "tensorrt_win",
78124
build_file = "@//third_party/tensorrt/archive:BUILD",
@@ -95,13 +141,13 @@ http_archive(
95141
# for both versions here and do not use --config=pre-cxx11-abi
96142

97143
new_local_repository(
98-
name = "libtorch_win",
144+
name = "libtorch",
99145
path = "${TORCH_INSTALL_PATH}",
100146
build_file = "third_party/libtorch/BUILD"
101147
)
102148

103149
new_local_repository(
104-
name = "libtorch_pre_cxx11_abi",
150+
name = "libtorch_win",
105151
path = "${TORCH_INSTALL_PATH}",
106152
build_file = "third_party/libtorch/BUILD"
107153
)

0 commit comments

Comments
 (0)