Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
35 changes: 16 additions & 19 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

6 changes: 3 additions & 3 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -57,17 +57,17 @@ tracing-subscriber = "0.3.22"
opentelemetry = { version = "0.31.0", features = ["metrics"] }

## External Crate
burn-central-client = { version = "0.5.0", path = "../burn-central-client/burn-central-client" }
burn-central-client = { version = "0.6.0" }
# burn-central-client = "0.5.0"

burn-central-registry = { path = "crates/burn-central-registry", version = "0.5.0" }

## Crate
burn-central-core = { path = "crates/burn-central-core", version = "0.5.0" }
burn-central-runtime = { path = "crates/burn-central-runtime", version = "0.5.0" }
burn-central-macros = { path = "crates/burn-central-macros", version = "0.5.0" }
burn-central-inference = { path = "crates/burn-central-inference", version = "0.5.0" }
burn-central-fleet = { path = "crates/burn-central-fleet", version = "0.5.0" }
burn-central-artifact = { path = "crates/burn-central-artifact", version = "0.5.0" }

### For xtask crate ###
tracel-xtask = "4.5.0"
Expand All @@ -76,4 +76,4 @@ tracel-xtask = "4.5.0"
debug = 0 # Speed up compilation time and not necessary.

[patch.crates-io]
burn-central-client = { path = "../burn-central-client/burn-central-client" }
burn-central-client = { git = "https://github.com/tracel-ai/burn-central-client.git", branch = "feat/fleets" }
20 changes: 20 additions & 0 deletions crates/burn-central-artifact/Cargo.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
[package]
name = "burn-central-artifact"
edition.workspace = true
version.workspace = true
readme.workspace = true
license.workspace = true
rust-version.workspace = true
authors.workspace = true
repository.workspace = true
keywords.workspace = true
categories.workspace = true

[dependencies]
crossbeam = { workspace = true }
reqwest = { version = "0.13.2", features = ["blocking"] }
serde = { workspace = true, features = ["derive"] }
sha2 = { workspace = true }
serde_json = { workspace = true }
burn-central-core = { workspace = true }
thiserror = { workspace = true }
80 changes: 80 additions & 0 deletions crates/burn-central-artifact/src/artifact_download.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,80 @@
use std::fs::{self, File};
use std::io::BufWriter;
use std::path::{Path, PathBuf};

use burn_central_core::bundle::normalize_bundle_path;

use crate::download::{DownloadError, DownloadTask, download_tasks};
use crate::tools::path::safe_join;

/// Generic download descriptor for any model artifact file.
#[derive(Debug, Clone)]
pub struct ArtifactDownloadFile {
pub rel_path: String,
pub url: String,
pub size_bytes: u64,
pub checksum: String,
}

/// Download artifact files into a destination directory, validating size and checksum.
pub fn download_artifacts_to_dir(
dest_root: &Path,
files: &[ArtifactDownloadFile],
) -> Result<(), DownloadError> {
fs::create_dir_all(dest_root)?;

if files.is_empty() {
return Ok(());
}

let mut tmps = Vec::with_capacity(files.len());
let mut tasks = Vec::with_capacity(files.len());
for file in files {
let rel_path = normalize_bundle_path(&file.rel_path);
let dest = safe_join(dest_root, &rel_path)
.map_err(|e| DownloadError::InvalidPath(e.to_string()))?;

if let Some(parent) = dest.parent() {
fs::create_dir_all(parent)?;
}
let tmp = temp_path(&dest)?;
tmps.push((dest.clone(), tmp.clone()));

let dest_file = File::create(dest)?;
let writer = BufWriter::new(dest_file);

tasks.push(DownloadTask {
rel_path: rel_path.clone(),
url: file.url.clone(),
writer,
expected_size: file.size_bytes,
expected_checksum: file.checksum.clone(),
});
}

let parallelism = std::thread::available_parallelism()
.map(|n| n.get())
.unwrap_or(4);
let http = reqwest::blocking::Client::new();
let res = download_tasks(&http, tasks, parallelism);

for (tmp_dest, tmp) in tmps {
if tmp_dest.exists() {
fs::remove_file(&tmp_dest)?;
}
if tmp.exists() {
fs::rename(tmp, tmp_dest)?;
}
}

res
}

/// Generate a temporary file path for downloads.
fn temp_path(dest: &Path) -> Result<PathBuf, DownloadError> {
let file_name = dest
.file_name()
.ok_or_else(|| DownloadError::InvalidPath("missing file name".to_string()))?
.to_string_lossy();
Ok(dest.with_file_name(format!(".{file_name}.partial")))
}
165 changes: 165 additions & 0 deletions crates/burn-central-artifact/src/download.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,165 @@
use std::io::{Read, Write};

use crossbeam::channel;
use reqwest::blocking::Client as HttpClient;
use sha2::Digest;

use crate::tools::validation::normalize_checksum;

#[derive(Debug, thiserror::Error)]
pub enum DownloadError {
#[error("failed to download {path}: {details}")]
DownloadFailed { path: String, details: String },
#[error("size mismatch for {path}: expected {expected} bytes, got {actual} bytes")]
SizeMismatch {
path: String,
expected: u64,
actual: u64,
},
#[error("checksum mismatch for {path}: expected {expected}, got {actual}")]
ChecksumMismatch {
path: String,
expected: String,
actual: String,
},
#[error("invalid checksum: {0}")]
InvalidChecksum(String),
#[error("writer error: {0}")]
WriterError(#[from] std::io::Error),
#[error("invalid path: {0}")]
InvalidPath(String),
}

/// A single file download task.
#[derive(Clone)]
pub struct DownloadTask<W> {
pub rel_path: String,
pub url: String,
pub writer: W,
pub expected_size: u64,
pub expected_checksum: String,
}

/// Download multiple files in parallel.
pub fn download_tasks<W: Write + Send>(
http: &HttpClient,
tasks: Vec<DownloadTask<W>>,
max_parallel: usize,
) -> Result<(), DownloadError> {
if tasks.is_empty() {
return Ok(());
}

if max_parallel <= 1 || tasks.len() == 1 {
for mut task in tasks {
download_one(http, &mut task)?;
}
return Ok(());
}

let (tx, rx) = channel::unbounded::<DownloadTask<W>>();
for task in tasks {
tx.send(task).expect("channel open");
}
drop(tx);

crossbeam::scope(|scope| {
let mut handles = Vec::new();
let worker_count = max_parallel.min(rx.len().max(1));
for _ in 0..worker_count {
let rx = rx.clone();
let http = http.clone();
handles.push(scope.spawn(move |_| {
for mut task in rx.iter() {
download_one(&http, &mut task)?;
}
Ok::<(), DownloadError>(())
}));
}

for handle in handles {
handle.join().expect("thread panicked")?;
}

Ok(())
})
.expect("scope failed")
}

/// Download a single file with checksum verification.
fn download_one<W: Write>(
http: &HttpClient,
task: &mut DownloadTask<W>,
) -> Result<(), DownloadError> {
// if let Some(parent) = task.dest.parent() {
// fs::create_dir_all(parent)?;
// }

// let tmp = temp_path(&task.dest)?;

let mut resp = http
.get(&task.url)
.send()
.map_err(|e| DownloadError::DownloadFailed {
path: task.rel_path.clone(),
details: e.to_string(),
})?;

if !resp.status().is_success() {
return Err(DownloadError::DownloadFailed {
path: task.rel_path.clone(),
details: format!("HTTP {}", resp.status()),
});
}

let sink = &mut task.writer;
let mut hasher = sha2::Sha256::new();
let mut buf = [0u8; 1024 * 64];
let mut total = 0u64;

loop {
let read = resp.read(&mut buf)?;
if read == 0 {
break;
}
sink.write_all(&buf[..read])?;
hasher.update(&buf[..read]);
total += read as u64;
}

let digest = format!("{:x}", hasher.finalize());
let expected_checksum =
normalize_checksum(&task.expected_checksum).map_err(DownloadError::InvalidChecksum)?;

if total != task.expected_size {
return Err(DownloadError::SizeMismatch {
path: task.rel_path.clone(),
expected: task.expected_size,
actual: total,
});
}
if digest != expected_checksum {
return Err(DownloadError::ChecksumMismatch {
path: task.rel_path.clone(),
expected: expected_checksum,
actual: digest,
});
}

// if task.dest.exists() {
// fs::remove_file(&task.dest)?;
// }

// fs::rename(tmp, &task.dest)?;

Ok(())
}

// /// Generate a temporary file path for downloads.
// fn temp_path(dest: &Path) -> Result<PathBuf, RegistryError> {
// let file_name = dest
// .file_name()
// .ok_or_else(|| RegistryError::InvalidPath("missing file name".to_string()))?
// .to_string_lossy();
// Ok(dest.with_file_name(format!(".{file_name}.partial")))
// }
8 changes: 8 additions & 0 deletions crates/burn-central-artifact/src/lib.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
//! This crate centralizes traits, structures and utilities for handling artifacts and models in Burn Central.

mod artifact_download;
mod download;
mod tools;

pub use artifact_download::{ArtifactDownloadFile, download_artifacts_to_dir};
pub use download::DownloadError;
2 changes: 2 additions & 0 deletions crates/burn-central-artifact/src/tools/mod.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
pub mod path;
pub mod validation;
28 changes: 28 additions & 0 deletions crates/burn-central-artifact/src/tools/path.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
use std::path::{Path, PathBuf};

use burn_central_core::bundle::normalize_bundle_path;

/// Sanitize a relative path to prevent directory traversal attacks.
pub fn sanitize_rel_path(path: &str) -> Result<PathBuf, String> {
let normalized = normalize_bundle_path(path);
let rel = Path::new(&normalized);
for component in rel.components() {
use std::path::Component;
match component {
Component::ParentDir | Component::RootDir | Component::Prefix(_) => {
return Err(format!("invalid path component: {path}"));
}
Component::CurDir => {
return Err(format!("invalid path component: {path}"));
}
Component::Normal(_) => {}
}
}
Ok(PathBuf::from(normalized))
}

/// Safely join a root path with a relative path.
pub fn safe_join(root: &Path, rel: &str) -> Result<PathBuf, String> {
let rel = sanitize_rel_path(rel)?;
Ok(root.join(rel))
}
Loading
Loading