93
93
runs-on : ${{inputs.runner_name}}
94
94
container :
95
95
image : ${{ inputs.docker_image }}
96
+ # TODO: options may differ for other GPU cards
96
97
options : -u 1001 --device=/dev/dri -v /dev/dri/by-path:/dev/dri/by-path --privileged --cap-add SYS_ADMIN
97
98
98
99
steps :
@@ -105,16 +106,17 @@ jobs:
105
106
- name : Checkout LLVM
106
107
uses : actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
107
108
108
- - name : Get information about platform
109
- if : ${{ always() }}
110
- run : ${{ env.CURRENT_DIR }}/unified-runtime/.github/scripts/get_system_info.sh
111
-
109
+ # TODO: how other workflows (using dockers) deal with the path change?
110
+ # also, note: I don't think it's needed anywhere now - I guess we can only use the relative paths...?
112
111
- name : Set working directory as environment variable
113
- run : echo "CURRENT_DIR=$(pwd)" >> $GITHUB_ENV
112
+ run : |
113
+ pwd
114
+ echo "${{github.workspace}}"
114
115
116
+ # TODO: move installation of python3-venv to docker
115
117
# Latest distros do not allow global pip installation
116
118
- name : Install UR python dependencies in venv
117
- working-directory : ${{ env.CURRENT_DIR }} /unified-runtime
119
+ working-directory : . /unified-runtime
118
120
run : |
119
121
sudo apt update
120
122
sudo apt install -y python3-venv
@@ -125,64 +127,36 @@ jobs:
125
127
126
128
- name : Download DPC++
127
129
run : |
128
- wget -O ${{ env.CURRENT_DIR }}/dpcpp_compiler.tar.gz https://github.com/intel/llvm/releases/download/nightly-2024-12-12/sycl_linux.tar.gz
129
- mkdir -p ${{ env.CURRENT_DIR }}/dpcpp_compiler
130
- tar -xvf ${{ env.CURRENT_DIR }}/dpcpp_compiler.tar.gz -C dpcpp_compiler
131
-
132
- - name : Install Intel Level Zero loader
133
- working-directory : ${{ env.CURRENT_DIR }}
134
- run : |
135
- curl -L https://github.com/oneapi-src/level-zero/archive/refs/heads/master.tar.gz -o level-zero.tar.gz
136
- tar -xzf level-zero.tar.gz
137
- cd level-zero-master
138
-
139
- mkdir build && cd build
140
- cmake .. -D CMAKE_BUILD_TYPE=Release -D CMAKE_INSTALL_PREFIX=usr/local/install
141
- cmake --build . --target package
142
- cmake --build . --target install
130
+ wget -O dpcpp_compiler.tar.gz https://github.com/intel/llvm/releases/download/nightly-2024-12-12/sycl_linux.tar.gz
131
+ mkdir -p dpcpp_compiler
132
+ tar -xvf dpcpp_compiler.tar.gz -C dpcpp_compiler
143
133
134
+ # TODO_1: move this installation to docker? Perhaps there's other docker that already has these packages? we aren't the only ones using L0 ;-)
135
+ # TODO_2: if needed here, only install it for level zero jobs; and install required staff for other adapters under other 'if'
136
+ #
137
+ # it seems there's image for intel drivers (L0):
138
+ # ghcr.io/intel/llvm/ubuntu2404_intel_drivers:alldeps
139
+ # and for other adapters (HIP, CUDA):
140
+ # ghcr.io/intel/llvm/ubuntu2204_build:latest
141
+ # or
142
+ # ghcr.io/intel/llvm/ubuntu2404_build:latest
143
+ # not sure on OpenCL and NativeCPU - to be checked.
144
144
- name : Install Intel Level Zero GPU
145
145
run : |
146
+ wget -qO - https://repositories.intel.com/gpu/intel-graphics.key | \
147
+ sudo gpg --yes --dearmor --output /usr/share/keyrings/intel-graphics.gpg
148
+ echo "deb [arch=amd64,i386 signed-by=/usr/share/keyrings/intel-graphics.gpg] https://repositories.intel.com/gpu/ubuntu jammy unified" | \
149
+ sudo tee /etc/apt/sources.list.d/intel-gpu-jammy.list
146
150
sudo apt-get update
147
- sudo apt-get install -y gnupg2 gpg-agent curl
148
- curl -fsSL https://repositories.intel.com/gpu/intel-graphics.key | sudo gpg --dearmor -o /usr/share/keyrings/intel-graphics.gpg
149
- echo "deb [arch=amd64 signed-by=/usr/share/keyrings/intel-graphics.gpg] https://repositories.intel.com/gpu/ubuntu jammy/lts/2350 unified" | sudo tee /etc/apt/sources.list.d/intel-gpu-jammy.list
150
- sudo apt update
151
- sudo apt install -y \
152
- intel-opencl-icd intel-level-zero-gpu \
153
- intel-media-va-driver-non-free libmfx1 libmfxgen1 libvpl2 \
154
- libegl-mesa0 libegl1-mesa libegl1-mesa-dev libgbm1 libgl1-mesa-dev libgl1-mesa-dri \
155
- libglapi-mesa libgles2-mesa-dev libglx-mesa0 libigdgmm12 libxatracker2 mesa-va-drivers \
156
- mesa-vdpau-drivers mesa-vulkan-drivers va-driver-all vainfo hwinfo clinfo
157
-
158
- - name : Add L0 to PATH
159
- run : |
160
- export PATH=/__w/llvm/llvm/level-zero-master/build/usr/local/install/bin:$PATH
161
- export LD_LIBRARY_PATH=/__w/llvm/llvm/level-zero-master/build/usr/local/install/lib:$LD_LIBRARY_PATH
162
- export CPATH=/__w/llvm/llvm/level-zero-master/build/usr/local/install/include:$CPATH
163
- export LIBRARY_PATH=/__w/llvm/llvm/level-zero-master/build/usr/local/install/lib:$LIBRARY_PATH
164
-
165
- echo 'export PATH=/__w/llvm/llvm/level-zero-master/build/usr/local/install/bin:$PATH' >> ~/.bashrc
166
- echo 'export LD_LIBRARY_PATH=/__w/llvm/llvm/level-zero-master/build/usr/local/install/lib:$LD_LIBRARY_PATH' >> ~/.bashrc
167
- echo 'export CPATH=/__w/llvm/llvm/level-zero-master/build/usr/local/install/include:$CPATH' >> ~/.bashrc
168
- echo 'export LIBRARY_PATH=/__w/llvm/llvm/level-zero-master/build/usr/local/install/lib:$LIBRARY_PATH' >> ~/.bashrc
169
- . ~/.bashrc
170
- shell : bash
171
-
172
- - name : Check PATH
173
- run : |
174
- echo $PATH
175
- echo $LD_LIBRARY_PATH
176
- echo $CPATH
177
- echo $LIBRARY_PATH
178
- ls -l /__w/llvm/llvm/level-zero-master/build/usr/local/install/lib
151
+ sudo apt-get install -y libze-intel-gpu1 libze1 libze-dev intel-opencl-icd
152
+ # libstdc++-12-dev
179
153
180
154
- name : Configure Unified Runtime project
181
- working-directory : ${{ env.CURRENT_DIR }}/unified-runtime
182
155
# ">" is used to avoid adding "\" at the end of each line; this command is quite long
183
156
run : >
184
157
cmake
185
- -B${{ env.CURRENT_DIR }}/build
158
+ -S unified-runtime
159
+ -B build
186
160
-DCMAKE_C_COMPILER=${{matrix.compiler.c}}
187
161
-DCMAKE_CXX_COMPILER=${{matrix.compiler.cxx}}
188
162
-DCMAKE_BUILD_TYPE=${{matrix.build_type}}
@@ -194,53 +168,34 @@ jobs:
194
168
${{ matrix.adapter.other_name != '' && format('-DUR_BUILD_ADAPTER_{0}=ON', matrix.adapter.other_name) || '' }}
195
169
-DUR_STATIC_LOADER=${{matrix.adapter.static_Loader}}
196
170
-DUR_STATIC_ADAPTER_${{matrix.adapter.name}}=${{matrix.adapter.static_adapter}}
197
- -DUR_DPCXX=${{ env.CURRENT_DIR }} /dpcpp_compiler/bin/clang++
198
- -DUR_SYCL_LIBRARY_DIR=${{ env.CURRENT_DIR }} /dpcpp_compiler/lib
199
- -DCMAKE_INSTALL_PREFIX=${{ env.CURRENT_DIR }} /install
171
+ -DUR_DPCXX=. /dpcpp_compiler/bin/clang++
172
+ -DUR_SYCL_LIBRARY_DIR=. /dpcpp_compiler/lib
173
+ -DCMAKE_INSTALL_PREFIX=. /install
200
174
${{ matrix.adapter.name == 'HIP' && '-DUR_CONFORMANCE_AMD_ARCH=gfx1030' || '' }}
201
175
${{ matrix.adapter.name == 'HIP' && '-DUR_HIP_PLATFORM=AMD' || '' }}
202
176
203
177
- name : Build
204
178
# This is so that device binaries can find the sycl runtime library
205
- run : cmake --build ${{ env.CURRENT_DIR }}/ build -j $(nproc)
179
+ run : cmake --build build -j $(nproc)
206
180
207
181
- name : Install
208
182
# This is to check that install command does not fail
209
- run : cmake --install ${{ env.CURRENT_DIR }}/build
210
-
211
- # - name: Run ldd on libur_* libraries
212
- # run: |
213
- # cd ${{ env.CURRENT_DIR }}
214
- # pwd
215
- # ls
216
- # cd build
217
- # pwd
218
- # ls
219
- # ldd lib/libur_*
220
-
221
- # - name: Check if GPU is avaliable
222
- # run: |
223
- # ls -la /dev/dri/
224
- # id
225
- # dpkg -l | grep level-zero
226
- # ldconfig -p | grep libze
227
- # ls -la /dev/dri/
183
+ run : cmake --install build
228
184
229
185
- name : Test adapter specific
230
186
env :
231
187
ZE_ENABLE_LOADER_DEBUG_TRACE : 1
232
188
ZE_DEBUG : 1
233
- # run: ctest -C ${{matrix.build_type}} --test-dir ${{ env.CURRENT_DIR }}/build --output-on-failure -L "adapter-specific" -E "memcheck" --timeout 600 -VV
234
- run : ctest -C ${{ matrix.build_type }} --test-dir ${{ env.CURRENT_DIR }}/build --output-on-failure -L "adapter-specific" -R "test-adapter-level_zero$" --timeout 600 -VV
189
+ run : ctest -C ${{matrix.build_type}} --test-dir build --output-on-failure -L "adapter-specific" -E "memcheck" --timeout 600
235
190
# Don't run adapter specific tests when building multiple adapters
236
191
if : ${{ matrix.adapter.other_name == '' }}
237
192
238
193
- name : Test adapters
239
194
env :
240
195
ZE_ENABLE_LOADER_DEBUG_TRACE : 1
241
196
ZE_DEBUG : 1
242
- run : env UR_CTS_ADAPTER_PLATFORM="${{matrix.adapter.platform}}" ctest -C ${{matrix.build_type}} --test-dir ${{ env.CURRENT_DIR }}/ build --output-on-failure -L "conformance" --timeout 600 -VV
197
+ run : env UR_CTS_ADAPTER_PLATFORM="${{matrix.adapter.platform}}" ctest -C ${{matrix.build_type}} --test-dir build --output-on-failure -L "conformance" --timeout 600
243
198
244
199
- name : Get information about platform
245
200
if : ${{ always() }}
246
- run : ${{ env.CURRENT_DIR }} /unified-runtime/.github/scripts/get_system_info.sh
201
+ run : . /unified-runtime/.github/scripts/get_system_info.sh
0 commit comments