docker build -t algo-rust-bert-demo:1 .
docker run -it algo-rust-bert-demo:1 /bin/sh
export LIBTORCH=$(python -c 'import torch; from pathlib import Path; print(Path(torch.__file__).parent)')
export DYLD_LIBRARY_PATH=${LIBTORCH}/lib
export LD_LIBRARY_PATH=${LIBTORCH}/lib:$LD_LIBRARY_PATH
export LIBTORCH_CXX11_ABI=0
docker build -t algo-rust-bert-demo:1 -f ./Dockerfile.amd .
docker build -t algo-rust-bert-demo:1 -f ./Dockerfile.arm .
// create micro pythin env
micromamba env create -f environment.yml --platform osx-arm64
// install torch(v0.13.1)
pip install torch==2.1.0 torchvision==0.16.0 torchaudio==2.1.0
// set env
export LIBTORCH=$(python3 -c 'import torch; from pathlib import Path; print(Path(torch.__file__).parent)')
export DYLD_LIBRARY_PATH=${LIBTORCH}/lib
export LD_LIBRARY_PATH=${LIBTORCH}/lib:$LD_LIBRARY_PATH
export LIBTORCH_CXX11_ABI=0
// verify mac python architecture
python -c "import sysconfig;print(sysconfig.get_platform())"
// verify mac architecture
uname -m
// verify torch package
python -c "import torch;print(torch.__version__);"
model address
ls $HOME/Library/Caches/.rustbert
git push large data to github
git lfs install
git lfs track "./resources/all-MiniLM-L12-v2/rust_model.ot"
// rollback large fs
git filter-branch -f --index-filter 'git rm --cached --ignore-unmatch ./resources/all-MiniLM-L12-v2/rust_model.ot'
git add .gitattributes
git commit -m "extend lfs conf"
git push origin master
// install micromamba with homebrew
brew install micromamba
// create a new conda environment:
micromamba env create -f environment.yml --platform osx-arm64
// activate the new environment
micromamba activate tch-rs-demo
import torch
# 查看torch版本
print('--- torch版本 ---')
print(torch.__version__)
# 查看cuda版本
print('--- cuda版本 ---')
print(torch.version.cuda)
# GPU是否可用
print('--- GPU是否可用 ---')
print(torch.cuda.is_available())
# 返回gpu数量
print('--- GPU数量 ---')
print(torch.cuda.device_count())
# 返回gpu名字,设备索引默认从0开始
print('--- GPU名称 ---')
n = 0
while n < torch.cuda.device_count():
print(torch.cuda.get_device_name(n))
n += 1
# 代码测试
print('--- PyTorch代码测试 ---')
print(torch.rand(3,3))
print('--- PyTorch代码测试(在GPU上测试PyTorch代码) ---')
print(torch.rand(3,3).cuda())
wget -O onnxruntime-arm.tgz https://github.com/microsoft/onnxruntime/releases/download/v1.18.1/onnxruntime-osx-arm64-1.18.1.tgz
tar -zxvf onnxruntime-arm.tgz
export ORT_DYLIB_PATH=......../onnxruntime-osx-arm64-1.18.1/lib/libonnxruntime.dylib
Reference
- pytorch locally
- tch-rs does not run on m1 mac
- Docker最佳实践
- Rust cargo book
- Failed to run custom build command for openssl-sys v0.9.80
- Rust编译Linux通用可执行文件
- macos-cross-toolchains
- rust-bert
- Writing dockerfile in rust project
- medium-rust-dockerize
- Create an Optimized Rust Alpine Docker Image
- Demo of tch-rs on M1
mac m silicon config
amd os
- Compile for arm64 raspberry pi
- Fixing the “GH001: Large files detected. You may want to try Git Large File Storage.”
onnx runtime reference