Skip to content
Open
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 2 additions & 1 deletion nexus/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@ internal-dns-resolver.workspace = true
internal-dns-types.workspace = true
ipnetwork.workspace = true
itertools.workspace = true
jiff.workspace = true
lldpd_client.workspace = true
macaddr.workspace = true
maplit.workspace = true
Expand Down Expand Up @@ -144,7 +145,7 @@ update-common.workspace = true
update-engine.workspace = true
omicron-workspace-hack.workspace = true
omicron-uuid-kinds.workspace = true
zip.workspace = true
zip = { workspace = true, features = ["jiff-02"] }

[dev-dependencies]
async-bb8-diesel.workspace = true
Expand Down
8 changes: 7 additions & 1 deletion nexus/src/app/background/tasks/support_bundle_collector.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1241,10 +1241,16 @@ fn recursively_add_directory_to_zipfile(

let file_type = entry.file_type()?;
if file_type.is_file() {
let src = entry.path();

let system_mtime = entry.metadata().and_then(|m| m.modified())?;
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Two questions about this...

Should we ignore errors here so that we still include files that we fail to read an mtime from and continue collecting the bundle?

Since we are getting the logs in a zip file and then unpacking them on a sled where a bundle is being collected are we sure that the unzip process is preserving original modification times? I believe it does hence the entire point of this PR haha but it would be good to maybe test that the zip crate is doing this.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Should we ignore errors here so that we still include files that we fail to read an mtime from and continue collecting the bundle?

Makes sense, since mtime is just a nice to have. Updated in 72d5f84

Since we are getting the logs in a zip file and then unpacking them on a sled where a bundle is being collected are we sure that the unzip process is preserving original modification times?

Yes, but it's good to be sure. I modified app::background::tasks::support_bundle_collector::test::test_collect_chunked to write out the bundle:

diff --git a/nexus/src/app/background/tasks/support_bundle_collector.rs b/nexus/src/app/background/tasks/support_bundle_collector.rs
index 6c22e9f38..5a661c6ba 100644
--- a/nexus/src/app/background/tasks/support_bundle_collector.rs
+++ b/nexus/src/app/background/tasks/support_bundle_collector.rs
@@ -2149,9 +2149,7 @@ mod test {
             .support_bundle_download(
                 &opctx,
                 observed_bundle.id.into(),
-                SupportBundleQueryType::Path {
-                    file_path: "bundle_id.txt".to_string(),
-                },
+                SupportBundleQueryType::Whole,
                 head,
                 range,
             )
@@ -2161,6 +2159,7 @@ mod test {
         // Read the body to bytes, then convert to string
         let body_bytes =
             response.into_body().collect().await.unwrap().to_bytes();
+        std::fs::write("/tmp/test_bundle.zip", &body_bytes).unwrap();
         let body_string = String::from_utf8(body_bytes.to_vec()).unwrap();
 
         // Verify the content matches the bundle ID

The mtime of files in the zip are set as expected:

wfc@illumos:~$ unzip -l /tmp/test_bundle.zip 
Archive:  /tmp/test_bundle.zip
  Length      Date    Time    Name
---------  ---------- -----   ----
       36  10-21-2025 01:58   bundle_id.txt
        0  01-01-1980 00:00   rack/
        0  01-01-1980 00:00   rack/c19a698f-c6f9-4a17-ae30-20d711b8f7dc/
        0  01-01-1980 00:00   rack/c19a698f-c6f9-4a17-ae30-20d711b8f7dc/sled/
        0  01-01-1980 00:00   rack/c19a698f-c6f9-4a17-ae30-20d711b8f7dc/sled/b6d65341-167c-41df-9b5c-41cded99c229/
      729  10-21-2025 01:58   rack/c19a698f-c6f9-4a17-ae30-20d711b8f7dc/sled/b6d65341-167c-41df-9b5c-41cded99c229/sled.txt
   200694  10-21-2025 01:58   reconfigurator_state.json
      239  10-21-2025 01:58   sled_info.json
        0  01-01-1980 00:00   sp_task_dumps/
        0  01-01-1980 00:00   sp_task_dumps/sled_0/
      590  10-21-2025 01:58   sp_task_dumps/sled_0/dump-0.zip
        0  01-01-1980 00:00   sp_task_dumps/sled_1/
      590  10-21-2025 01:58   sp_task_dumps/sled_1/dump-0.zip
        0  01-01-1980 00:00   sp_task_dumps/switch_0/
      588  10-21-2025 01:58   sp_task_dumps/switch_0/dump-0.zip
        0  01-01-1980 00:00   sp_task_dumps/switch_1/
      588  10-21-2025 01:58   sp_task_dumps/switch_1/dump-0.zip
---------                     -------
   204054                     17 files

let zoned = jiff::Zoned::try_from(system_mtime)?;
let zip_time = zip::DateTime::try_from(zoned.datetime())?;

let opts = FullFileOptions::default()
.last_modified_time(zip_time)
.compression_method(zip::CompressionMethod::Deflated)
.large_file(true);
let src = entry.path();

zip.start_file_from_path(dst, opts)?;
let mut file = std::fs::File::open(&src)?;
Expand Down
4 changes: 3 additions & 1 deletion sled-diagnostics/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,11 @@ workspace = true
anyhow.workspace = true
camino.workspace = true
cfg-if.workspace = true
chrono.workspace = true
fs-err = { workspace = true, features = ["tokio"] }
futures.workspace = true
illumos-utils.workspace = true
jiff.workspace = true
libc.workspace = true
omicron-workspace-hack.workspace = true
once_cell.workspace = true
Expand All @@ -26,7 +28,7 @@ slog.workspace = true
thiserror.workspace = true
tokio = { workspace = true, features = ["full"] }
zfs-test-harness.workspace = true
zip = { workspace = true, features = ["zstd"] }
zip = { workspace = true, features = ["jiff-02","zstd"] }

[dev-dependencies]
omicron-common.workspace = true
Expand Down
122 changes: 71 additions & 51 deletions sled-diagnostics/src/logs.rs
Original file line number Diff line number Diff line change
Expand Up @@ -451,11 +451,11 @@ impl LogsHandle {
service: &str,
zip: &mut zip::ZipWriter<W>,
log_snapshots: &mut LogSnapshots,
logfile: &Utf8Path,
logfile: &LogFile,
logtype: LogType,
) -> Result<(), LogError> {
let snapshot_logfile =
self.find_log_in_snapshot(log_snapshots, logfile).await?;
self.find_log_in_snapshot(log_snapshots, &logfile.path).await?;

if logtype == LogType::Current {
// Since we are processing the current log files in a zone we need
Expand Down Expand Up @@ -501,13 +501,24 @@ impl LogsHandle {
.filter(|f| is_log_file(f.path(), filename))
{
let logfile = f.path();
let system_mtime =
f.metadata().and_then(|m| m.modified()).inspect_err(|e| {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

metadata() here doesn't follow symlinks...which I believe is okay since we are operating from the gz context.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Right, this is fine today. On the one hand I don't foresee us copying over symlinks from a sled zip, but there's no harm in handling them just in case. Switched to Utf8Path::metadata which will traverse symlinks in fa07354.

warn!(&self.log, "sled-diagnostic failed to get mtime of logfile";
"error" => %e,
"logfile" => %logfile,
);
}).ok();
let mtime = system_mtime
.and_then(|m| jiff::Timestamp::try_from(m).ok());

if logfile.is_file() {
write_log_to_zip(
&self.log,
service,
zip,
LogType::Current,
logfile,
mtime,
)?;
}
}
Expand All @@ -522,6 +533,7 @@ impl LogsHandle {
zip,
logtype,
&snapshot_logfile,
logfile.modified,
)?;
}
false => {
Expand Down Expand Up @@ -612,7 +624,7 @@ impl LogsHandle {
&service,
&mut zip,
&mut log_snapshots,
&current.path,
&current,
LogType::Current,
)
.await?;
Expand All @@ -628,13 +640,13 @@ impl LogsHandle {
.archived
.into_iter()
.filter(|log| log.path.as_str().contains("crypt/debug"))
.map(|log| log.path)
.collect();

// Since these logs can be spread out across multiple U.2 devices
// we need to sort them by timestamp.
archived.sort_by_key(|log| {
log.as_str()
log.path
.as_str()
.rsplit_once(".")
.and_then(|(_, date)| date.parse::<u64>().ok())
.unwrap_or(0)
Expand Down Expand Up @@ -703,6 +715,7 @@ fn write_log_to_zip<W: Write + Seek>(
zip: &mut zip::ZipWriter<W>,
logtype: LogType,
snapshot_logfile: &Utf8Path,
mtime: Option<jiff::Timestamp>,
) -> Result<(), LogError> {
let Some(log_name) = snapshot_logfile.file_name() else {
warn!(
Expand All @@ -715,10 +728,19 @@ fn write_log_to_zip<W: Write + Seek>(
};

let mut src = File::open(&snapshot_logfile)?;

let zip_mtime = mtime
.and_then(|ts| {
let zoned = ts.in_tz("UTC").ok()?;
zip::DateTime::try_from(zoned.datetime()).ok()
})
.unwrap_or_else(zip::DateTime::default);

let zip_path = format!("{service}/{logtype}/{log_name}");
zip.start_file_from_path(
zip_path,
FullFileOptions::default()
.last_modified_time(zip_mtime)
.compression_method(zip::CompressionMethod::Zstd)
.compression_level(Some(3))
// NB: From the docs
Expand Down Expand Up @@ -757,8 +779,8 @@ enum ExtraLogKind<'a> {

#[derive(Debug, Default, PartialEq)]
struct ExtraLogs<'a> {
current: Option<&'a Utf8Path>,
rotated: Vec<&'a Utf8Path>,
current: Option<&'a LogFile>,
rotated: Vec<&'a LogFile>,
}

fn sort_extra_logs<'a>(
Expand All @@ -780,15 +802,15 @@ fn sort_extra_logs<'a>(
warn!(
logger,
"found multiple current log files for {name}";
"old" => %old_path,
"old" => %old_path.path,
"new" => %log.path,
);
}
entry.current = Some(&log.path);
entry.current = Some(&log);
}
ExtraLogKind::Rotated { name, log } => {
let entry = res.entry(name).or_default();
entry.rotated.push(&log.path);
entry.rotated.push(&log);
}
}
}
Expand Down Expand Up @@ -838,9 +860,9 @@ fn sort_cockroach_extra_logs(logs: &[LogFile]) -> HashMap<&str, ExtraLogs<'_>> {
let entry = interested.entry(prefix).or_default();

if file_name == format!("{prefix}.log") {
entry.current = Some(log.path.as_path());
entry.current = Some(log);
} else {
entry.rotated.push(log.path.as_path());
entry.rotated.push(log);
}
}
}
Expand Down Expand Up @@ -917,52 +939,30 @@ mod test {
"bogus.log",
"some/dir"
].into_iter().map(|l| {
oxlog::LogFile { path: Utf8PathBuf::from(l), size: None, modified: None }
}).collect();
oxlog::LogFile { path: Utf8PathBuf::from(l), size: None, modified: None }
}).collect();
let logs_map: HashMap<_, _> =
logs.iter().map(|l| (l.path.as_str(), l)).collect();

let mut expected: HashMap<&str, ExtraLogs<'_>> = HashMap::new();

// cockroach
expected.entry("cockroach").or_default().current =
Some(Utf8Path::new("cockroach.log"));
expected
.entry("cockroach")
.or_default()
.rotated
.push(Utf8Path::new("cockroach.oxzcockroachdba3628a56-6f85-43b5-be50-71d8f0e04877.root.2025-01-31T17_11_45Z.011435.log"));
expected
.entry("cockroach")
.or_default()
.rotated
.push(Utf8Path::new("cockroach.oxzcockroachdba3628a56-6f85-43b5-be50-71d8f0e04877.root.2025-02-01T01_51_51Z.011486.log"));
Some(&logs_map["cockroach.log"]);
expected.entry("cockroach").or_default().rotated.push(&logs_map["cockroach.oxzcockroachdba3628a56-6f85-43b5-be50-71d8f0e04877.root.2025-01-31T17_11_45Z.011435.log"]);
expected.entry("cockroach").or_default().rotated.push(&logs_map["cockroach.oxzcockroachdba3628a56-6f85-43b5-be50-71d8f0e04877.root.2025-02-01T01_51_51Z.011486.log"]);

// cockroach-health
expected.entry("cockroach-health").or_default().current =
Some(Utf8Path::new("cockroach-health.log"));
expected
.entry("cockroach-health")
.or_default()
.rotated
.push(Utf8Path::new("cockroach-health.oxzcockroachdba3628a56-6f85-43b5-be50-71d8f0e04877.root.2025-01-31T21_43_26Z.011435.log"));
expected
.entry("cockroach-health")
.or_default()
.rotated
.push(Utf8Path::new("cockroach-health.oxzcockroachdba3628a56-6f85-43b5-be50-71d8f0e04877.root.2025-02-01T01_51_53Z.011486.log"));
Some(&logs_map["cockroach-health.log"]);
expected.entry("cockroach-health").or_default().rotated.push(&logs_map["cockroach-health.oxzcockroachdba3628a56-6f85-43b5-be50-71d8f0e04877.root.2025-01-31T21_43_26Z.011435.log"]);
expected.entry("cockroach-health").or_default().rotated.push(&logs_map["cockroach-health.oxzcockroachdba3628a56-6f85-43b5-be50-71d8f0e04877.root.2025-02-01T01_51_53Z.011486.log"]);

// cockroach-stderr
expected.entry("cockroach-stderr").or_default().current =
Some(Utf8Path::new("cockroach-stderr.log"));
expected
.entry("cockroach-stderr")
.or_default()
.rotated
.push(Utf8Path::new("cockroach-stderr.oxzcockroachdba3628a56-6f85-43b5-be50-71d8f0e04877.root.2023-08-30T18_56_19Z.011950.log"));
expected
.entry("cockroach-stderr")
.or_default()
.rotated
.push(Utf8Path::new("cockroach-stderr.oxzcockroachdba3628a56-6f85-43b5-be50-71d8f0e04877.root.2023-08-31T02_59_24Z.010479.log"));
Some(&logs_map["cockroach-stderr.log"]);
expected.entry("cockroach-stderr").or_default().rotated.push(&logs_map["cockroach-stderr.oxzcockroachdba3628a56-6f85-43b5-be50-71d8f0e04877.root.2023-08-30T18_56_19Z.011950.log"]);
expected.entry("cockroach-stderr").or_default().rotated.push(&logs_map["cockroach-stderr.oxzcockroachdba3628a56-6f85-43b5-be50-71d8f0e04877.root.2023-08-31T02_59_24Z.010479.log"]);

let extra = sort_cockroach_extra_logs(logs.as_slice());
assert_eq!(
Expand Down Expand Up @@ -1177,6 +1177,12 @@ mod illumos_tests {
let mut logfile_handle =
fs_err::tokio::File::create_new(&logfile).await.unwrap();
logfile_handle.write_all(data.as_bytes()).await.unwrap();

// 2025-10-15T00:00:00
let mtime = std::time::SystemTime::UNIX_EPOCH
+ std::time::Duration::from_secs(1760486400);
let std_file = logfile_handle.into_std().await.into_file();
std_file.set_modified(mtime).unwrap();
}

// Populate some file with similar names that should be skipped over
Expand All @@ -1197,27 +1203,40 @@ mod illumos_tests {
let zipfile_path = mountpoint.join("test.zip");
let zipfile = File::create_new(&zipfile_path).unwrap();
let mut zip = ZipWriter::new(zipfile);
let log = LogFile {
path: mountpoint
.join(format!("var/svc/log/{}", logfile_to_data[0].0)),
size: None,
modified: None,
};

loghandle
.process_logs(
"mg-ddm",
&mut zip,
&mut log_snapshots,
&mountpoint
.join(format!("var/svc/log/{}", logfile_to_data[0].0)),
&log,
LogType::Current,
)
.await
.unwrap();

zip.finish().unwrap();

// Confirm the zip has our file and data
let expected_zip_mtime =
zip::DateTime::from_date_and_time(2025, 10, 15, 0, 0, 0)
.unwrap();

// Confirm the zip has our file and data with the right mtime
let mut archive =
ZipArchive::new(File::open(zipfile_path).unwrap()).unwrap();
for (name, data) in logfile_to_data {
let mut file_in_zip =
archive.by_name(&format!("mg-ddm/current/{name}")).unwrap();

let mtime = file_in_zip.last_modified().unwrap();
assert_eq!(mtime, expected_zip_mtime, "file mtime matches");

let mut contents = String::new();
file_in_zip.read_to_string(&mut contents).unwrap();

Expand Down Expand Up @@ -1283,13 +1302,14 @@ mod illumos_tests {
let zipfile_path = mountpoint.join("test.zip");
let zipfile = File::create_new(&zipfile_path).unwrap();
let mut zip = ZipWriter::new(zipfile);
let log = LogFile { path: logfile, size: None, modified: None };

loghandle
.process_logs(
"mg-ddm",
&mut zip,
&mut log_snapshots,
&logfile,
&log,
LogType::Current,
)
.await
Expand Down
4 changes: 2 additions & 2 deletions workspace-hack/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -150,7 +150,7 @@ x509-cert = { version = "0.2.5" }
zerocopy-c38e5c1d305a1b54 = { package = "zerocopy", version = "0.8.27", default-features = false, features = ["derive", "simd"] }
zerocopy-ca01ad9e24f5d932 = { package = "zerocopy", version = "0.7.35", features = ["derive", "simd"] }
zeroize = { version = "1.8.1", features = ["std", "zeroize_derive"] }
zip-164d15cefe24d7eb = { package = "zip", version = "4.2.0", default-features = false, features = ["bzip2", "deflate", "zstd"] }
zip-164d15cefe24d7eb = { package = "zip", version = "4.2.0", default-features = false, features = ["bzip2", "deflate", "jiff-02", "zstd"] }
zip-3b31131e45eafb45 = { package = "zip", version = "0.6.6", default-features = false, features = ["bzip2", "deflate"] }

[build-dependencies]
Expand Down Expand Up @@ -291,7 +291,7 @@ x509-cert = { version = "0.2.5" }
zerocopy-c38e5c1d305a1b54 = { package = "zerocopy", version = "0.8.27", default-features = false, features = ["derive", "simd"] }
zerocopy-ca01ad9e24f5d932 = { package = "zerocopy", version = "0.7.35", features = ["derive", "simd"] }
zeroize = { version = "1.8.1", features = ["std", "zeroize_derive"] }
zip-164d15cefe24d7eb = { package = "zip", version = "4.2.0", default-features = false, features = ["bzip2", "deflate", "zstd"] }
zip-164d15cefe24d7eb = { package = "zip", version = "4.2.0", default-features = false, features = ["bzip2", "deflate", "jiff-02", "zstd"] }
zip-3b31131e45eafb45 = { package = "zip", version = "0.6.6", default-features = false, features = ["bzip2", "deflate"] }

[target.x86_64-unknown-linux-gnu.dependencies]
Expand Down
Loading