diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 00000000..ddcf6756 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,32 @@ +name: CI + +on: + pull_request: + +env: + CARGO_TERM_COLOR: always + +jobs: + check: + name: fmt + clippy + test + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - uses: dtolnay/rust-toolchain@stable + with: + components: rustfmt, clippy + + - uses: Swatinem/rust-cache@v2 + + - name: Install system dependencies + run: sudo apt-get update && sudo apt-get install -y libdbus-1-dev pkg-config + + - name: Check formatting + run: cargo fmt -- --check + + - name: Clippy + run: cargo clippy --all-targets --keep-going -- -D warnings + + - name: Run tests + run: cargo test --workspace --no-fail-fast diff --git a/Cargo.toml b/Cargo.toml index 6a01eef2..3ef5505c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -59,6 +59,13 @@ keyring = { version = "3.0" } inherits = "release" lto = "thin" +[[bin]] +name = "mock-probe-verus" +path = "tests/helpers/mock_probe_verus.rs" +test = false +doc = false +bench = false + [dev-dependencies] tempfile = "3.24.0" diff --git a/docs/design.md b/docs/design.md new file mode 100644 index 00000000..2e919690 --- /dev/null +++ b/docs/design.md @@ -0,0 +1,383 @@ +# Structure for Verification Projects + +**Author:** Shaowei Lin + +## 1. Introduction + +### 1.1 Blueprint + +Similar to Blueprint for theorem-proving projects, we have Structure for verification +projects. Also known as VeriLib Structure or the molecular structure of the project. + +A good example is Terrence Tao's Equational Theories project. You can see in the +[GitHub repo](https://github.com/teorth/equational_theories/tree/main/blueprint/src/chapter) +that the structure of the project is broken up into chapters which are saved in the +`/blueprint` folder. + +Here is the [Blueprint website](https://teorth.github.io/equational_theories/blueprint/) +or visualizer that was created from the Blueprint files. + +Here is a [visualization of the dependency graph](https://teorth.github.io/equational_theories/blueprint/dep_graph_document.html) +for the project. You can see the atomic dependencies, but not the molecular/chapter +structure, which makes it hard to sort through all the atoms. + +The following repo is a fork of the original Equational Theories project. It contains a +`.verilib` folder with artifacts which represent changes to the database made by the +VeriLib CLI tool: + +- `verilib create` — `blueprint.json`, `config.json`, `structure_files.json` +- `verilib atomize` — `structure_meta.json` +- `verilib specify` — `/certs/specify/veri*.json` +- `verilib verify` — `/certs/verify/veri*.json` + +### 1.2 Structure + +We want to create a collection of Structure files for formal verification, similar to +the Blueprint files for math theorems. The files will contain the molecular and atomic +structure of the project. The molecular structure can then be visualized and explored on +the VeriLib website. + +Blueprint requires manual updating of the statuses of the theorems and proofs, but VeriLib +can automate this update through atomization, specification and verification checks. + +The following is a branch of the Dalek-Lite project. It contains a `.verilib` folder with +artifacts which represent changes to the database made by the VeriLib CLI tool: + +- `verilib create` — `tracked_functions.csv`, `config.json`, `structure_files.json`, `/curve25519-dalek/src/*/*.md` +- `verilib atomize` — `atoms.json`, `structure_meta.json` +- `verilib specify` — `specification.json`, `/certs/specify/veri*.json` +- `verilib verify` — `verification.json`, `/certs/verify/veri*.json` + +## 2. Requirements + +### 2.1 Conceptual Overview of VeriLib Structure + +- Structure == Cognitive Structure (will not call it Blueprint from now on) +- Cognitive structure of project defined by topics and stubs +- Code hierarchy of project defined by folders/modules and impls/specs/proofs (impl = implementation) +- Cognitive structure can be different from code hierarchy +- Different kinds of atoms: stubs, impls, specs, proofs +- Different kinds of molecules: topics, folders, modules +- Different kinds of dependencies: stub deps, impl deps, spec deps, proof deps +- Different kinds of viewers: stub viewer, impl viewer, spec viewer, proof viewer, multimodal viewer +- Cognitive structure can overlap with code hierarchy + - Some stubs are IDENTIFIED with impls + - Some impls are not stubs (e.g. test functions) + - Some stubs are not impls (e.g. natural language stubs) +- In the absence of structure files + - Default to code hierarchy and generate appropriate structure files + - Generated structure files need not be committed to GitHub + - But users can choose to commit these structure files (preferably generated locally, not downloaded from VeriLib) +- Besides `.md` files, there could be other formats for structure files in future, e.g. LaTeX + +### 2.2 MVP Overview + +- MVP == Stub Viewer +- For this MVP, the aim is to visualize the cognitive structure of the project +- Stub dependencies can only be code dependencies (comes from atomization) for now +- Arbitrary stub dependencies not allowed for now, but will be needed in future (e.g. "this theorem stub will depend on the following lemma stubs") + +### 2.3 Workflow + +- **Initialization** + - The user creates structure files. + - Files are committed to repo. + - The user calls `verilib create` which: + - Asks questions about implementation language, proof language, etc. + - Creates a remote VeriLib repo. + - Creates a local config file. + - Performs structure checks. +- **Changes** + - The user makes changes to the structure files (manually). + - The user makes changes to the source code. + - The user (or the CI workflow) calls `verilib atomize` to perform structure checks. + - The user calls `verilib specify` to validate and sign the specifications if necessary. + - The user calls `verilib verify` to verify and sign the proofs if necessary. + - The user commits the changes to the GitHub repo. +- **VeriLib web** + - Automatically checks the repo branch to see if there is a need to reclone. + - Performs atomization checks. + - Performs specification checks. + - Performs verification checks. + - In the future, we should be able to view other commits and other branches. + +### 2.4 Config File + +A `config.json` file in the `.verilib` folder with: + +- The VeriLib repo ID +- The VeriLib URL +- The implementation language, e.g. Rust +- The proof language, e.g. Verus, Lean +- The structure format, e.g. Structure, Blueprint +- The structure root path, e.g. `/structure` + +### 2.5 Atom Names + +- The unique identifier for functions/definitions/theorems is called `code-name` rather + than `scip-name` because for Lean, we could be using Pantograph for atomization, + instead of a SCIP-based atomizer. +- The `code-name` will start with the scheme, e.g. `scip:...` or `panto:...` to + distinguish between the different naming formats. + +### 2.6 Structure Files + +- The user selects a folder (e.g. the root `/` of the repo, or the `/structure` folder), + called the **structure root**, where the molecular structure will be stored. + - The folders in the structure root will be the molecules. + - The atoms will be `.md` files nested in the structure root. +- Each `.md` atom file contains an informal spec, code and/or proof of the atom. +- When the user has created source code that contains the formal spec, code, and/or + proof of the atom and wants to connect it to the atom, they can add its `code-path`, + `code-line` as YAML frontmatter in the `.md` atom file. Or they can provide the + `code-name` of the function if known. We can use verilib-cli to autofill the + `code-name` from the code path and line. + + Example frontmatter: + + ```yaml + --- + code-line: 497 + code-path: /curve25519-dalek/src/backend/serial/u64/scalar.rs + code-name: curve25519-dalek/4.1.3/scalar/u64/serial/backend/Scalar52<&Scalar52>#add() + veri-name: scalar/main-theorem + --- + ``` + +- Optionally, the user can provide a `veri-name` for the atom. +- The user can also write down some informal dependencies as a YAML list in the + frontmatter. Atoms in the dependencies can be identified by either their `code-name`s + or their `veri-name`s (e.g. `veri:scalar/main-theorem`). Molecular paths are not used + because they break when atoms are moved around. +- Specification certs and verification certs are stored in the `.verilib` metadata folder, + in a folder where the certs are stored as separate files named after their `code-name` + or `veri-name`. +- The formal dependencies, code-line-endings, code-text can all be extracted from the + source code in the repo, and should therefore not be committed to the repo. +- The `code-name`s should be used as the official identifiers of atoms used throughout + the verification project, to avoid referring to atoms by their code paths and lines or + molecular paths, which are all brittle. + +### 2.7 Structure Checks + +- The goal is to check the structure stub files for coherence. +- Can be triggered independently with command `verilib atomize`. +- Replaces `code-path`, `code-line` in `.md` atom files with `code-name`. +- Check that `veri-name`s are unique. +- Check that dependencies are `code-name`s or `veri-name`s. + +### 2.8 Atomization Checks + +- The goal is to compute the dependencies and the code content of each atom. +- There are many kinds of atoms: + - Structure stubs (`.md` files which are placeholders for future functions or theorems) + - Code functions (implementation is transparent to other atoms) + - Spec definitions (term is transparent to other atoms) + - Spec theorems (proof is opaque to other atoms) +- There are many kinds of dependencies: + - Stub dependencies (informal relationships between stubs) + - Type/spec dependencies (atoms used by the type/spec) + - Term/proof dependencies (atoms used by the term/proof) + - Transpilation dependencies (e.g. this Lean function is a transpilation of this Rust function) +- There are many kinds of statuses: + - Type statuses are also called specification statuses + - Term statuses are also called verification statuses + +### 2.9 About Overwriting .md Stub Files + +- On verilib server, if ALL the `.md` files are missing, generate `.md` files from code + hierarchy. +- Locally, user can choose to generate ANY missing `.md` files using `verilib create` + and commit them, but this is not necessary since VeriLib can handle repos without + `.md` files. +- User should not be downloading `.md` files from VeriLib. +- For `.md` files with only `code-path` and `code-line`, user can choose to fill in + `code-name` by running `verilib atomize` locally because the `code-name` is more + stable. After filling in the stub files, the user can commit the changes to the repo. + **When a `.md` file has conflicting `code-line`/`code-path` and `code-name` info, the + `code-name` will always take precedence.** The user can even choose to delete the + `code-line`s/`code-path`s from the stub files and commit the changes if they want. +- **`.md` files will not be overwritten during specification/verification**, since spec + and verification certs are stored separately. + +### 2.10 Specification Checks + +- The goal is to update the specification statuses of the atoms. +- There are many kinds of specification statuses: + - No spec + - Only informal spec written + - Formal spec written + - Formal spec validated +- **Specification certs workflow:** + - The user runs `verilib specify` locally prior to making a commit to the repo. + - The CLI tool first checks if the user has permissions to validate the spec, and if + there is a link to the private key of the user. + - It will then check the existing specification certs (which contain a checksum of the + spec previously validated) against the current list of specs (those that contain + `ensures` or `requires`), and show the user a menu of these new/changed specs. + - The user selects a spec, and the CLI tool will show the diff in the specs. + - If the diff looks good, the user chooses accept. + - The new spec will be signed with the user's private key and then stored in the repo. + - The user then commits the changes. +- Specification certs will be stored on VeriLib and in the GitHub repo. + - A cert is a single file with metadata and cryptographic hashes: + - The `code-name` of the function that was specified + - The hash of the spec of the function + - (In future, the hash of spec dependencies) + - The name of the person who validated the spec + - The public key of the person who validated the spec + - The timestamp when the spec was validated + - The specification hash of the above information + - **For the MVP, the spec cert will just contain a timestamp.** + +### 2.11 Verification Checks + +- The goal is to update the verification statuses of the atoms. +- There are many kinds of verification statuses: + - No proof + - Only informal proof written + - Incomplete formal proof written (with sorry's) + - Complete formal proof written + - Complete formal proof verified, but dependencies may not be verified + - Complete formal proof verified, and all dependencies verified +- **Verification certs:** + - In MVP, all verification statuses MUST come from compilation, not read from a cert. + - So for MVP, these compilation results can be stored in the VeriLib DB but should not + be committed in the git repo. + - In future, we can store cryptographically-signed verification certs in the git repo, to + avoid recompiling everything. + - The cryptographic signing will prevent users from changing the verification status + without calling verilib CLI tool. +- Verification certs will be stored on VeriLib and in the GitHub repo. + - For MVP, a single cert is generated remotely by the VeriLib server for the whole + package. The cert can be committed to the git repo if desired. + - In future, a cert for individual functions is a file with metadata and cryptographic + hashes: + - The `code-name` of the function that was verified + - The specification hash of the spec of the function + - The hash of the proof of the function + - (In future, the hash of proof dependencies) + - The name and version of the agent that verified the proof (e.g. VeriLib) + - The public key of the agent who verified the proof + - The name and version of the proof checker + - The hash of the proof checker code + - The timestamp when the proof was verified + - The verification hash of the above information + - In future, running `verilib verify` will: + - Get existing certs for current atoms from the VeriLib server + - Make a list of current atoms with proofs but without certs and verify them + - Make a list of current atoms whose certs have expired because of edits to the atom + +### 2.12 Blueprint Files + +- We want to provide similar support for existing math-theorem-proving projects with + Blueprint files. +- Shaowei has a script that can convert the Blueprint files into Structure files, so we can + get limited support this way. It will be a quick way to get buy-in for VeriLib from + mathematicians doing FV. + +## 3. Implementation + +### 3.1 Implementation Stages + +- Stage 1: Only reclone and topological layouts. No web edits. +- Stage 2: Viewing different branches and commits in a VeriLib repo. +- Stage 3: Layouts can be pulled into local machines to be committed. +- Stage 4: VeriLib deployment after atomization on local machine. + +### 3.2 Identifiers + +- Identifiers for atoms should be used in atomization, specification and verification. +- The atoms can be functions from the implementation source code, theorems and + definitions from the proof source code, or nodes in the informal project structure. +- The identifiers cannot be database IDs because GitHub users would not know them. +- The identifiers cannot be the molecular paths of the atoms, because these will change. +- The identifiers for functions should be computable from the implementation source code. + +### 3.3 Atomization + +- Compute **atoms dictionary**: + - The keys are `code-name`s. + - The values are `code-module`, `code-path`, `code-lines` (start and end), and `dependencies`. +- Compute **lines dictionary**: + - The keys are code-lines intervals (with start and end). + - The values are `code-name`s. + - Can do a O(log N) lookup using an interval tree. + +### 3.4 Verification + +- Either verify all modules in the repo, or verify a single module. +- The output is a list of `(code-path, code-line, error-msg)` of the failed functions. +- Get the `code-name` from the `code-path` and `code-line` from the code-lines dictionary. +- We don't want to recompute the atoms dictionary with every verification. + +### 3.5 Schema of probe outputs + +**stubs.json** (output of `probe-verus stubify`): + +```json +{ + "curve25519-dalek/src/montgomery.rs/MontgomeryPoint.ct_eq(&MontgomeryPoint).md": { + "code-line": 111, + "code-path": "curve25519-dalek/src/montgomery.rs", + "code-name": "probe:curve25519-dalek/4.1.3/montgomery/MontgomeryPoint#ConstantTimeEq<&MontgomeryPoint>#ct_eq()" + } +} +``` + +**atoms.json** (output of `probe-verus atomize`): + +```json +{ + "probe:curve25519-dalek/4.1.3/field/u64/serial/backend/FieldElement51#ConditionallySelectable<&FieldElement51>#conditional_swap()": { + "display-name": "conditional_swap", + "dependencies": [ + "scip:curve25519-dalek/4.1.3/subtle_assumes/u64/serial/backend/choice_is_true()", + "scip:curve25519-dalek/4.1.3/subtle_assumes/u64/serial/backend/conditional_swap_u64()" + ], + "code-module": "field/u64/serial/backend", + "code-path": "curve25519-dalek/src/backend/serial/u64/field.rs", + "code-text": { + "lines-start": 687, + "lines-end": 732 + } + } +} +``` + +**specs.json** (output of `probe-verus specify`): + +```json +{ + "probe:curve25519-dalek/4.1.3/pow2k_lemmas/field_lemmas/lemmas/c2_val()": { + "code-path": "src/lemmas/field_lemmas/pow2k_lemmas.rs", + "spec-text": { + "lines-start": 687, + "lines-end": 732 + }, + "specified": true + } +} +``` + +**proofs.json** (output of `probe-verus verify`): + +```json +{ + "probe:curve25519-dalek/4.1.3/pow2k_lemmas/field_lemmas/lemmas/c2_val()": { + "code-path": "src/lemmas/field_lemmas/pow2k_lemmas.rs", + "code-line": 456, + "verified": true, + "status": "success" + } +} +``` + +### 3.6 Certificates + +- Specification and verification certificates of an atom need to be stored separately from + the other metadata of the atom or from certs of other atoms, so that they can be copied + and transported. +- When the latest changes to a repo are pulled onto the VeriLib server, previously-unseen + certs need to be verified (hashes recomputed, proofs rechecked) before they are added + to the database. diff --git a/src/commands/api.rs b/src/commands/api.rs index dc7d691f..13f0dea5 100644 --- a/src/commands/api.rs +++ b/src/commands/api.rs @@ -1,8 +1,10 @@ +#![allow(dead_code)] // WIP: not yet wired into CLI — see https://github.com/Beneficial-AI-Foundation/verilib-cli/issues/36 + use anyhow::{Context, Result}; use serde::{Deserialize, Serialize}; use std::fs; -use std::path::PathBuf; -use std::io::{self, Read, IsTerminal}; +use std::io::{self, IsTerminal, Read}; +use std::path::{Path, PathBuf}; #[derive(Debug, Clone)] pub enum ApiSubcommand { @@ -105,11 +107,7 @@ struct BatchResult { error: Option, } -pub async fn handle_api( - subcommand: ApiSubcommand, - json_output: bool, - dry_run: bool, -) -> Result<()> { +pub async fn handle_api(subcommand: ApiSubcommand, json_output: bool, dry_run: bool) -> Result<()> { match subcommand { ApiSubcommand::Get { file } => handle_get(file, json_output).await, ApiSubcommand::List { filter } => handle_list(filter, json_output).await, @@ -129,10 +127,25 @@ pub async fn handle_api( status_id, statement_type, code_name, - } => handle_create_file(path, content, from_file, disabled, specified, status_id, statement_type, code_name, json_output, dry_run).await, + } => { + handle_create_file( + path, + content, + from_file, + disabled, + specified, + status_id, + statement_type, + code_name, + json_output, + dry_run, + ) + .await + } } } +#[allow(clippy::too_many_arguments)] async fn handle_create_file( path: PathBuf, content: Option, @@ -153,21 +166,23 @@ async fn handle_create_file( (content, format!("file {:?}", p)) } else if !io::stdin().is_terminal() { let mut content = String::new(); - io::stdin().read_to_string(&mut content) + io::stdin() + .read_to_string(&mut content) .context("Failed to read from stdin")?; (content, "stdin".to_string()) } else { anyhow::bail!("No content provided. Use --content, --from-file, or pipe content to stdin."); }; - let identifier = path.file_name() + let identifier = path + .file_name() .ok_or_else(|| anyhow::anyhow!("Invalid path: no filename"))? .to_string_lossy() .to_string(); let logical_parent = path.parent().unwrap_or_else(|| std::path::Path::new("")); let verilib_root = PathBuf::from(".verilib"); - + let physical_parent = if logical_parent.starts_with(&verilib_root) { logical_parent.to_path_buf() } else { @@ -175,8 +190,12 @@ async fn handle_create_file( }; if !dry_run { - fs::create_dir_all(&physical_parent) - .with_context(|| format!("Failed to create parent directories for {:?}", physical_parent))?; + fs::create_dir_all(&physical_parent).with_context(|| { + format!( + "Failed to create parent directories for {:?}", + physical_parent + ) + })?; } let mut next_index = 0; @@ -201,7 +220,8 @@ async fn handle_create_file( let meta_path = physical_parent.join(&meta_filename); let final_code_name = code_name.unwrap_or_else(|| { - physical_parent.file_name() + physical_parent + .file_name() .map(|n| n.to_string_lossy().to_string()) .unwrap_or_else(|| "unknown".to_string()) }); @@ -214,7 +234,7 @@ async fn handle_create_file( } else { &path_str }; - + let json_path = if clean_path.starts_with('/') { clean_path.to_string() } else { @@ -253,14 +273,17 @@ async fn handle_create_file( println!("Would create atom file: {:?}", atom_path); println!("Would create meta file: {:?}", meta_path); println!("Source: {}", source_desc); - println!("Meta content:\n{}", serde_json::to_string_pretty(&meta_json)?); + println!( + "Meta content:\n{}", + serde_json::to_string_pretty(&meta_json)? + ); } return Ok(()); } fs::write(&atom_path, &final_content) .with_context(|| format!("Failed to write atom file: {:?}", atom_path))?; - + let meta_content_str = serde_json::to_string_pretty(&meta_json)?; fs::write(&meta_path, &meta_content_str) .with_context(|| format!("Failed to write meta file: {:?}", meta_path))?; @@ -284,13 +307,12 @@ async fn handle_create_file( async fn handle_get(file: PathBuf, json_output: bool) -> Result<()> { let resolved_path = resolve_file_path(&file)?; validate_meta_file(&resolved_path)?; - + let content = fs::read_to_string(&resolved_path) .with_context(|| format!("Failed to read file: {:?}", resolved_path))?; - - let meta: MetaFile = serde_json::from_str(&content) - .context("Failed to parse meta file")?; - + + let meta: MetaFile = serde_json::from_str(&content).context("Failed to parse meta file")?; + let output = GetOutput { file: resolved_path.to_string_lossy().to_string(), specified: meta.specified, @@ -298,7 +320,7 @@ async fn handle_get(file: PathBuf, json_output: bool) -> Result<()> { verified: meta.status_id == 2, status_id: meta.status_id, }; - + if json_output { println!("{}", serde_json::to_string_pretty(&output)?); } else { @@ -308,25 +330,25 @@ async fn handle_get(file: PathBuf, json_output: bool) -> Result<()> { println!(" Verified: {}", output.verified); println!(" Status ID: {}", output.status_id); } - + Ok(()) } async fn handle_list(filter: Option, json_output: bool) -> Result<()> { let verilib_dir = PathBuf::from(".verilib"); - + if !verilib_dir.exists() { anyhow::bail!("No .verilib directory found. Please run 'init' first."); } - + let mut files = Vec::new(); - + for entry in walkdir::WalkDir::new(&verilib_dir) .into_iter() .filter_map(|e| e.ok()) { let path = entry.path(); - if path.is_file() && path.extension().map_or(false, |ext| ext == "verilib") { + if path.is_file() && path.extension().is_some_and(|ext| ext == "verilib") { let file_name = path.file_name().unwrap_or_default().to_string_lossy(); if file_name.contains(".meta.") { if let Ok(content) = fs::read_to_string(path) { @@ -337,7 +359,7 @@ async fn handle_list(filter: Option, json_output: bool) -> Result< Some(StatusFilter::Ignored) => meta.disabled, Some(StatusFilter::Verified) => meta.status_id == 2, }; - + if matches_filter { files.push(FileInfo { path: path.to_string_lossy().to_string(), @@ -351,18 +373,20 @@ async fn handle_list(filter: Option, json_output: bool) -> Result< } } } - + if json_output { let output = ListOutput { files }; println!("{}", serde_json::to_string_pretty(&output)?); } else { println!("Found {} files", files.len()); for file in files { - println!(" {} [Spec: {} | Ign: {} | Ver: {}]", - file.path, file.specified, file.ignored, file.verified); + println!( + " {} [Spec: {} | Ign: {} | Ver: {}]", + file.path, file.specified, file.ignored, file.verified + ); } } - + Ok(()) } @@ -376,33 +400,32 @@ async fn handle_set( ) -> Result<()> { let resolved_path = resolve_file_path(&file)?; validate_meta_file(&resolved_path)?; - + if verified.is_some() { check_admin_status()?; } - + let content = fs::read_to_string(&resolved_path) .with_context(|| format!("Failed to read file: {:?}", resolved_path))?; - - let mut meta: MetaFile = serde_json::from_str(&content) - .context("Failed to parse meta file")?; - + + let mut meta: MetaFile = serde_json::from_str(&content).context("Failed to parse meta file")?; + let mut changes = Vec::new(); - + if let Some(val) = specified { if meta.specified != val { changes.push(format!("specified: {} -> {}", meta.specified, val)); meta.specified = val; } } - + if let Some(val) = ignored { if meta.disabled != val { changes.push(format!("ignored: {} -> {}", meta.disabled, val)); meta.disabled = val; } } - + if let Some(val) = verified { let new_status = if val { 2 } else { 0 }; if meta.status_id != new_status { @@ -410,56 +433,62 @@ async fn handle_set( meta.status_id = new_status; } } - + if changes.is_empty() { if !json_output { println!("No changes needed for: {}", resolved_path.display()); } return Ok(()); } - + if dry_run { if json_output { println!("{{\"dry_run\": true, \"changes\": {:?}}}", changes); } else { - println!("DRY RUN - Would make the following changes to {}:", resolved_path.display()); + println!( + "DRY RUN - Would make the following changes to {}:", + resolved_path.display() + ); for change in changes { println!(" - {}", change); } } return Ok(()); } - - let new_content = serde_json::to_string_pretty(&meta) - .context("Failed to serialize meta file")?; - + + let new_content = + serde_json::to_string_pretty(&meta).context("Failed to serialize meta file")?; + fs::write(&resolved_path, new_content) .with_context(|| format!("Failed to write file: {:?}", resolved_path))?; - + if json_output { - println!("{{\"success\": true, \"file\": \"{}\", \"changes\": {}}}", - resolved_path.display(), changes.len()); + println!( + "{{\"success\": true, \"file\": \"{}\", \"changes\": {}}}", + resolved_path.display(), + changes.len() + ); } else { println!("Successfully updated: {}", resolved_path.display()); for change in changes { println!(" - {}", change); } } - + Ok(()) } async fn handle_batch(input: PathBuf, json_output: bool, dry_run: bool) -> Result<()> { let content = fs::read_to_string(&input) .with_context(|| format!("Failed to read batch input file: {:?}", input))?; - - let batch: BatchInput = serde_json::from_str(&content) - .context("Failed to parse batch input JSON")?; - + + let batch: BatchInput = + serde_json::from_str(&content).context("Failed to parse batch input JSON")?; + let mut results = Vec::new(); let mut success_count = 0; let mut error_count = 0; - + for op in batch.operations { let file_path = PathBuf::from(&op.file); let result = handle_set( @@ -467,11 +496,11 @@ async fn handle_batch(input: PathBuf, json_output: bool, dry_run: bool) -> Resul op.specified, op.ignored, op.verified, - false, + false, dry_run, ) .await; - + match result { Ok(_) => { success_count += 1; @@ -491,7 +520,7 @@ async fn handle_batch(input: PathBuf, json_output: bool, dry_run: bool) -> Resul } } } - + if json_output { let output = BatchOutput { success_count, @@ -511,11 +540,11 @@ async fn handle_batch(input: PathBuf, json_output: bool, dry_run: bool) -> Resul } } } - + if error_count > 0 { std::process::exit(1); } - + Ok(()) } @@ -523,16 +552,16 @@ fn validate_meta_file(file: &PathBuf) -> Result<()> { if !file.exists() { anyhow::bail!("File not found: {:?}", file); } - + if !file.is_file() { anyhow::bail!("Path is not a file: {:?}", file); } - + let file_name = file.file_name().unwrap_or_default().to_string_lossy(); if !file_name.contains(".meta.verilib") { anyhow::bail!("File is not a .meta.verilib file: {:?}", file); } - + Ok(()) } @@ -549,26 +578,25 @@ fn check_admin_status() -> Result<()> { Ok(()) } - -fn resolve_file_path(input: &PathBuf) -> Result { +fn resolve_file_path(input: &Path) -> Result { use regex::Regex; - + let input_str = input.to_string_lossy().to_string(); let mut path = input_str.clone(); - + if path.starts_with(".verilib/") { path = path.strip_prefix(".verilib/").unwrap().to_string(); } else if path.starts_with(".verilib\\") { path = path.strip_prefix(".verilib\\").unwrap().to_string(); } - + let path_buf = PathBuf::from(&path); let parent = path_buf.parent(); let filename = path_buf.file_name().unwrap_or_default().to_string_lossy(); - + let re = Regex::new(r"^\[\d+\]\s*-\s*").unwrap(); let clean_filename = re.replace(&filename, "").to_string(); - + let final_filename = if clean_filename.ends_with(".meta.verilib") { clean_filename } else if clean_filename.ends_with(".verilib") { @@ -576,7 +604,7 @@ fn resolve_file_path(input: &PathBuf) -> Result { } else { format!("{}.meta.verilib", clean_filename) }; - + let resolved = if let Some(parent_path) = parent { PathBuf::from(".verilib") .join(parent_path) @@ -584,7 +612,7 @@ fn resolve_file_path(input: &PathBuf) -> Result { } else { PathBuf::from(".verilib").join(&final_filename) }; - + if let Some(parent_dir) = resolved.parent() { if parent_dir.exists() { if let Ok(entries) = fs::read_dir(parent_dir) { @@ -598,6 +626,6 @@ fn resolve_file_path(input: &PathBuf) -> Result { } } } - + Ok(resolved) } diff --git a/src/commands/atomize.rs b/src/commands/atomize.rs index 2fa62ef4..09dfcbca 100644 --- a/src/commands/atomize.rs +++ b/src/commands/atomize.rs @@ -4,8 +4,8 @@ use crate::config::ProjectConfig; use crate::structure::{ - cleanup_intermediate_files, parse_frontmatter, run_command, - write_frontmatter, CommandConfig, ExternalTool, ATOMIZE_INTERMEDIATE_FILES, + cleanup_intermediate_files, parse_frontmatter, run_command, write_frontmatter, CommandConfig, + ExternalTool, ATOMIZE_INTERMEDIATE_FILES, }; use anyhow::{bail, Context, Result}; use intervaltree::IntervalTree; @@ -41,7 +41,11 @@ pub async fn handle_atomize( true } else { ProjectConfig::init(&project_root)?; - if ProjectConfig::global().unwrap().structure_root_path().is_err() { + if ProjectConfig::global() + .unwrap() + .structure_root_path() + .is_err() + { bail!( "Verus project detected but no .verilib/config.json found. \ Run 'verilib-cli create' first." @@ -63,25 +67,19 @@ pub async fn handle_atomize( let atoms_path = config.atoms_path(); let cmd_config = config.command_config(); - // Step 1: Generate stubs.json from .md files using probe-verus stubify - let stubs = generate_stubs( - &project_root, - &structure_root, - &stubs_path, - &cmd_config, - )?; - println!("Loaded {} stubs from structure files", stubs.len()); + // Step 1: Generate stubs from .md files + let stubs = if no_probe { + load_stubs_from_md_files(&structure_root)? + } else { + generate_stubs(&project_root, &structure_root, &stubs_path, &cmd_config)? + }; + println!("Loaded {} stubs", stubs.len()); // Step 2: Generate or load atoms.json let probe_atoms = if no_probe { load_atoms_from_file(&atoms_path)? } else { - generate_probe_atoms( - &project_root, - &atoms_path, - &cmd_config, - use_rust_analyzer, - )? + generate_probe_atoms(&project_root, &atoms_path, &cmd_config, use_rust_analyzer)? }; println!("Loaded {} atoms", probe_atoms.len()); @@ -99,10 +97,7 @@ pub async fn handle_atomize( } // Step 5: Save enriched stubs.json - println!( - "Saving enriched stubs to {}...", - stubs_path.display() - ); + println!("Saving enriched stubs to {}...", stubs_path.display()); let content = serde_json::to_string_pretty(&enriched)?; std::fs::write(&stubs_path, content)?; @@ -261,6 +256,48 @@ fn generate_stubs( Ok(stubs) } +/// Walk the structure directory and parse .md frontmatter to build stubs +/// without requiring probe-verus. This mirrors what `probe-verus stubify` does. +fn load_stubs_from_md_files(structure_root: &Path) -> Result> { + if !structure_root.exists() { + bail!( + "Structure directory not found at {}. Run 'verilib-cli create' first.", + structure_root.display() + ); + } + + println!( + "Loading stubs from .md files in {}...", + structure_root.display() + ); + + let mut stubs: HashMap = HashMap::new(); + for entry in WalkDir::new(structure_root) + .into_iter() + .filter_map(|e| e.ok()) + { + let path = entry.path(); + if path.extension().and_then(|e| e.to_str()) != Some("md") { + continue; + } + let rel_path = path + .strip_prefix(structure_root) + .unwrap_or(path) + .to_string_lossy() + .to_string(); + match parse_frontmatter(path) { + Ok(fm) => { + stubs.insert(rel_path, serde_json::to_value(fm)?); + } + Err(e) => { + eprintln!("Warning: skipping {}: {}", rel_path, e); + } + } + } + + Ok(stubs) +} + /// Load atoms from an existing atoms.json file. fn load_atoms_from_file(atoms_path: &Path) -> Result> { if !atoms_path.exists() { @@ -450,15 +487,14 @@ impl ProbeIndex { let mut skipped_count = 0; for (file_path, entry) in stubs { - let (code_name, atom) = - match self.resolve_code_name_and_atom(entry, file_path, atoms) { - Some(r) => r, - None => { - skipped_count += 1; - result.insert(file_path.clone(), entry.clone()); - continue; - } - }; + let (code_name, atom) = match self.resolve_code_name_and_atom(entry, file_path, atoms) { + Some(r) => r, + None => { + skipped_count += 1; + result.insert(file_path.clone(), entry.clone()); + continue; + } + }; let enriched_entry = build_enriched_entry(&code_name, atom); result.insert(file_path.clone(), enriched_entry); diff --git a/src/commands/auth.rs b/src/commands/auth.rs index cca5d878..77b4c837 100644 --- a/src/commands/auth.rs +++ b/src/commands/auth.rs @@ -5,9 +5,8 @@ use crate::storage::{get_credential_storage, print_platform_help}; pub async fn handle_auth() -> Result<()> { println!("Please enter your Verilib API key:"); - - let key = prompt_password("API Key: ") - .context("Failed to read API key from input")?; + + let key = prompt_password("API Key: ").context("Failed to read API key from input")?; if key.trim().is_empty() { anyhow::bail!("API key cannot be empty"); @@ -16,12 +15,12 @@ pub async fn handle_auth() -> Result<()> { println!("Attempting to store API key..."); let entry = get_credential_storage()?; - - match entry.set_password(&key.trim()) { + + match entry.set_password(key.trim()) { Ok(()) => { println!("API key successfully stored."); println!("Your API key is securely stored in the system keyring."); - + match entry.get_password() { Ok(stored_key) => { if stored_key == key.trim() { @@ -42,6 +41,6 @@ pub async fn handle_auth() -> Result<()> { anyhow::bail!("Failed to store API key"); } } - + Ok(()) } diff --git a/src/commands/create.rs b/src/commands/create.rs index 8dab169a..56ad6d6f 100644 --- a/src/commands/create.rs +++ b/src/commands/create.rs @@ -2,10 +2,8 @@ //! //! Initialize structure files from source analysis using probe-verus. -use crate::structure::{ - run_command, write_frontmatter, CommandConfig, ExternalTool, -}; use crate::config::ProjectConfig; +use crate::structure::{run_command, write_frontmatter, CommandConfig, ExternalTool}; use anyhow::{bail, Context, Result}; use serde_json::{json, Value}; use std::collections::{HashMap, HashSet}; @@ -100,8 +98,12 @@ struct TrackedFunction { /// Read tracked functions CSV and return a HashMap. fn read_tracked_csv(csv_path: &Path) -> Result> { let mut results = HashMap::new(); - let mut reader = csv::Reader::from_path(csv_path) - .with_context(|| format!("Failed to read tracked functions from {}", csv_path.display()))?; + let mut reader = csv::Reader::from_path(csv_path).with_context(|| { + format!( + "Failed to read tracked functions from {}", + csv_path.display() + ) + })?; for result in reader.records() { let record = result?; @@ -286,7 +288,6 @@ mod tests { ); } - #[test] fn test_parse_empty_link() { assert_eq!(parse_tracked_link(""), None); diff --git a/src/commands/deploy.rs b/src/commands/deploy.rs index 20ed7d60..4a7c3233 100644 --- a/src/commands/deploy.rs +++ b/src/commands/deploy.rs @@ -1,20 +1,22 @@ +#![allow(dead_code)] // WIP: not yet wired into CLI — see https://github.com/Beneficial-AI-Foundation/verilib-cli/issues/36 + use anyhow::{Context, Result}; use dialoguer::Select; use regex::Regex; use reqwest::Client; use serde_json::Value; -use sha2::{Sha256, Digest}; +use sha2::{Digest, Sha256}; use std::collections::HashMap; use std::ffi::OsStr; use std::fs; use std::io::{self, Write}; use std::path::{Path, PathBuf}; -use crate::constants::{auth_required_msg, DEFAULT_BASE_URL}; +use super::types::{DeployNode, DeployResponse, VerifierVersionsResponse, LANGUAGES, TYPES}; use crate::commands::status::get_stored_api_key; -use crate::download::handle_api_error; use crate::config::{ProjectConfig, RepoConfig}; -use super::types::{DeployNode, DeployResponse, VerifierVersionsResponse, LANGUAGES, TYPES}; +use crate::constants::{auth_required_msg, DEFAULT_BASE_URL}; +use crate::download::handle_api_error; #[derive(Debug, Clone, Copy)] enum ChangeDecision { @@ -29,23 +31,25 @@ pub async fn handle_deploy(url: Option, debug: bool) -> Result<()> { println!("Debug mode: {}", debug); } - let api_key = get_stored_api_key() - .context(auth_required_msg())?; + let api_key = get_stored_api_key().context(auth_required_msg())?; let url_base = url.unwrap_or_else(|| DEFAULT_BASE_URL.to_string()); let repo_id = read_repo_id_from_config()?; - - let deploy_info = if repo_id.is_none() { - println!("New repository - collecting deployment information..."); - Some(collect_deploy_info(&url_base, &api_key, debug).await?) - } else { - println!("Updating existing repository (ID: {})...", repo_id.as_ref().unwrap()); - None + + let deploy_info = match &repo_id { + None => { + println!("New repository - collecting deployment information..."); + Some(collect_deploy_info(&url_base, &api_key, debug).await?) + } + Some(id) => { + println!("Updating existing repository (ID: {})...", id); + None + } }; println!("\nScanning .verilib directory..."); - + let verilib_path = PathBuf::from(".verilib"); if !verilib_path.exists() { anyhow::bail!("No .verilib directory found. Please run 'init' first."); @@ -53,16 +57,21 @@ pub async fn handle_deploy(url: Option, debug: bool) -> Result<()> { let mut decision = ChangeDecision::Ask; let mut has_changes = false; - let tree = build_tree(&verilib_path, &verilib_path, &mut decision, &mut has_changes)?; + let tree = build_tree( + &verilib_path, + &verilib_path, + &mut decision, + &mut has_changes, + )?; let layouts = build_layouts(&verilib_path, &verilib_path)?; - + if debug { let tree_json = serde_json::to_string_pretty(&tree) .context("Failed to serialize tree for debugging")?; fs::write(".verilib/debug_deploy_tree.json", &tree_json) .context("Failed to write debug tree file")?; println!("Debug: Tree saved to .verilib/debug_deploy_tree.json"); - + let layouts_json = serde_json::to_string_pretty(&layouts) .context("Failed to serialize layouts for debugging")?; fs::write(".verilib/debug_deploy_layouts.json", &layouts_json) @@ -74,21 +83,23 @@ pub async fn handle_deploy(url: Option, debug: bool) -> Result<()> { "tree": tree, "layouts": layouts, }); - + if has_changes { payload["has_changes"] = Value::Bool(true); } - if let Some((language_id, proof_id, verifierversion_id, summary, description, type_id)) = deploy_info { + if let Some((language_id, proof_id, verifierversion_id, summary, description, type_id)) = + deploy_info + { payload["language_id"] = Value::Number(language_id.into()); payload["proof_id"] = Value::Number(proof_id.into()); payload["summary"] = Value::String(summary); payload["type_id"] = Value::Number(type_id.into()); - + if let Some(desc) = description { payload["description"] = Value::String(desc); } - + if let Some(version_id) = verifierversion_id { payload["verifierversion_id"] = Value::Number(version_id.into()); } @@ -114,12 +125,12 @@ pub async fn handle_deploy(url: Option, debug: bool) -> Result<()> { .context("Failed to send deploy request")?; let status = response.status(); - + if !status.is_success() { let error_msg = handle_api_error(response).await?; anyhow::bail!(error_msg); } - + let response_text = response .text() .await @@ -128,15 +139,14 @@ pub async fn handle_deploy(url: Option, debug: bool) -> Result<()> { if debug { println!("Debug: API response: {}", response_text); } - - let deploy_response: DeployResponse = serde_json::from_str(&response_text) - .context("Failed to parse deploy response")?; - - save_config_from_response(&deploy_response, &url_base) - .context("Failed to save config file")?; + + let deploy_response: DeployResponse = + serde_json::from_str(&response_text).context("Failed to parse deploy response")?; + + save_config_from_response(&deploy_response, &url_base).context("Failed to save config file")?; println!("Deployment successful!"); - + Ok(()) } @@ -170,15 +180,20 @@ fn save_config_from_response(response_data: &DeployResponse, base_url: &str) -> } fn detect_language_in_path(search_path: &PathBuf, debug: bool) -> Option { - let full_path = std::fs::canonicalize(search_path) - .unwrap_or_else(|_| search_path.clone()); + let full_path = std::fs::canonicalize(search_path).unwrap_or_else(|_| search_path.clone()); if debug { - println!("Debug: Scanning for language detection in directory: {}", full_path.display()); + println!( + "Debug: Scanning for language detection in directory: {}", + full_path.display() + ); } - + for language in LANGUAGES { if debug { - println!("Debug: Checking for {} with extensions: {:?}", language.name, language.extensions); + println!( + "Debug: Checking for {} with extensions: {:?}", + language.name, language.extensions + ); } for ext in language.extensions { if find_files_with_extension(search_path, ext, debug) { @@ -189,7 +204,7 @@ fn detect_language_in_path(search_path: &PathBuf, debug: bool) -> Option { } } } - + if debug { println!("Debug: No matching language detected"); } @@ -201,23 +216,27 @@ fn find_files_with_extension(dir: &Path, extension: &str, debug: bool) -> bool { for entry in entries.flatten() { let path = entry.path(); let file_name = path.file_name().unwrap_or_default().to_string_lossy(); - + if file_name == "config.json" || file_name == "debug_response.json" { continue; } - + if path.is_dir() { let dir_name = file_name.to_string(); let ext_without_dot = extension.trim_start_matches('.'); - - if dir_name == format!("mod{}", extension) || - dir_name.ends_with(&format!(".{}", ext_without_dot)) { + + if dir_name == format!("mod{}", extension) + || dir_name.ends_with(&format!(".{}", ext_without_dot)) + { if debug { - println!("Debug: Found matching directory: {} with extension {}", dir_name, extension); + println!( + "Debug: Found matching directory: {} with extension {}", + dir_name, extension + ); } return true; } - + if find_files_with_extension(&path, extension, debug) { return true; } @@ -227,7 +246,10 @@ fn find_files_with_extension(dir: &Path, extension: &str, debug: bool) -> bool { let ext_without_dot = extension.trim_start_matches('.'); if file_ext_str == ext_without_dot { if debug { - println!("Debug: Found matching file: {} with extension {}", file_name, extension); + println!( + "Debug: Found matching file: {} with extension {}", + file_name, extension + ); } return true; } @@ -235,12 +257,13 @@ fn find_files_with_extension(dir: &Path, extension: &str, debug: bool) -> bool { } } } - + false } fn prompt_language(default_id: Option, prompt_text: &str) -> Result { - let items: Vec = LANGUAGES.iter() + let items: Vec = LANGUAGES + .iter() .map(|l| { if Some(l.id) == default_id { format!("{} (detected)", l.name) @@ -249,30 +272,35 @@ fn prompt_language(default_id: Option, prompt_text: &str) -> Result { } }) .collect(); - + let default_idx = if let Some(id) = default_id { LANGUAGES.iter().position(|l| l.id == id).unwrap_or(0) } else { 0 }; - + let selection = Select::new() .with_prompt(prompt_text) .items(&items) .default(default_idx) .interact() .context("Failed to get language selection")?; - + Ok(LANGUAGES[selection].id) } -async fn fetch_verifier_versions(proof_id: u32, base_url: &str, api_key: &str, debug: bool) -> Result> { +async fn fetch_verifier_versions( + proof_id: u32, + base_url: &str, + api_key: &str, + debug: bool, +) -> Result> { let endpoint = format!("{}/v2/verifier/versions/{}", base_url, proof_id); - + if debug { println!("Debug: Fetching verifier versions from: {}", endpoint); } - + let client = Client::new(); let response = client .get(&endpoint) @@ -281,11 +309,11 @@ async fn fetch_verifier_versions(proof_id: u32, base_url: &str, api_key: &str, d .send() .await .context("Failed to fetch verifier versions")?; - + if debug { println!("Debug: Response status: {}", response.status()); } - + if !response.status().is_success() { let error_msg = handle_api_error(response).await?; if debug { @@ -293,54 +321,56 @@ async fn fetch_verifier_versions(proof_id: u32, base_url: &str, api_key: &str, d } return Ok(None); } - + let response_text = response .text() .await .context("Failed to read response body")?; - + if debug { println!("Debug: Response body: {}", response_text); } - + let versions_response: VerifierVersionsResponse = serde_json::from_str(&response_text) .context("Failed to parse verifier versions response")?; - + if debug { println!("Debug: Found {} versions", versions_response.data.len()); } - + if versions_response.data.is_empty() { if debug { println!("Debug: No versions available"); } return Ok(None); } - - let items: Vec = versions_response.data.iter() + + let items: Vec = versions_response + .data + .iter() .map(|v| v.version.clone()) .collect(); - + let selection = Select::new() .with_prompt("Select Verifier Version") .items(&items) .default(0) .interact() .context("Failed to get version selection")?; - + Ok(Some(versions_response.data[selection].id)) } fn prompt_type() -> Result { let items: Vec<&str> = TYPES.iter().map(|(_, name)| *name).collect(); - + let selection = Select::new() .with_prompt("Select Type") .items(&items) .default(0) .interact() .context("Failed to get type selection")?; - + Ok(TYPES[selection].0) } @@ -349,26 +379,29 @@ fn prompt_summary() -> Result { println!("\nEnter summary (max 128 characters, required):"); print!("> "); io::stdout().flush()?; - + let mut input = String::new(); io::stdin().read_line(&mut input)?; let input = input.trim().to_string(); - + if input.is_empty() { println!("Summary cannot be empty. Please try again."); continue; } - + if input.chars().all(|c| c.is_whitespace()) { println!("Summary cannot contain only whitespace. Please try again."); continue; } - + if input.len() > 128 { - println!("Summary must be 128 characters or less (current: {}). Please try again.", input.len()); + println!( + "Summary must be 128 characters or less (current: {}). Please try again.", + input.len() + ); continue; } - + return Ok(input); } } @@ -377,11 +410,11 @@ fn prompt_description() -> Result> { println!("\nEnter description (optional, press Enter to skip):"); print!("> "); io::stdout().flush()?; - + let mut input = String::new(); io::stdin().read_line(&mut input)?; let input = input.trim().to_string(); - + if input.is_empty() { Ok(None) } else { @@ -389,31 +422,52 @@ fn prompt_description() -> Result> { } } -pub async fn collect_deploy_info(base_url: &str, api_key: &str, debug: bool) -> Result<(u32, u32, Option, String, Option, u32)> { +pub async fn collect_deploy_info( + base_url: &str, + api_key: &str, + debug: bool, +) -> Result<(u32, u32, Option, String, Option, u32)> { collect_deploy_info_with_path(base_url, api_key, &PathBuf::from(".verilib"), debug).await } -pub async fn collect_deploy_info_with_path(base_url: &str, api_key: &str, search_path: &PathBuf, debug: bool) -> Result<(u32, u32, Option, String, Option, u32)> { +pub async fn collect_deploy_info_with_path( + base_url: &str, + api_key: &str, + search_path: &PathBuf, + debug: bool, +) -> Result<(u32, u32, Option, String, Option, u32)> { let detected_language = detect_language_in_path(search_path, debug); - + let language_id = prompt_language(detected_language, "Select Language:")?; let proof_id = prompt_language(Some(language_id), "Select Proof Language:")?; - + let verifierversion_id = fetch_verifier_versions(proof_id, base_url, api_key, debug).await?; - + let summary = prompt_summary()?; let description = prompt_description()?; let type_id = prompt_type()?; - - Ok((language_id, proof_id, verifierversion_id, summary, description, type_id)) + + Ok(( + language_id, + proof_id, + verifierversion_id, + summary, + description, + type_id, + )) } -fn build_tree(base_path: &Path, current_path: &Path, decision: &mut ChangeDecision, has_changes: &mut bool) -> Result> { +fn build_tree( + base_path: &Path, + current_path: &Path, + decision: &mut ChangeDecision, + has_changes: &mut bool, +) -> Result> { let mut nodes = Vec::new(); - + let entries = fs::read_dir(current_path) .with_context(|| format!("Failed to read directory: {:?}", current_path))?; - + for entry in entries { let entry = entry?; let path = entry.path(); @@ -424,15 +478,16 @@ fn build_tree(base_path: &Path, current_path: &Path, decision: &mut ChangeDecisi if extension == Some(OsStr::new("json")) { continue; } - + if path.is_dir() { - let relative_path = path.strip_prefix(base_path) + let relative_path = path + .strip_prefix(base_path) .unwrap() .to_string_lossy() .to_string(); - + let children = build_tree(base_path, &path, decision, has_changes)?; - + nodes.push(DeployNode { identifier: relative_path, content: String::new(), @@ -450,49 +505,79 @@ fn build_tree(base_path: &Path, current_path: &Path, decision: &mut ChangeDecisi .with_context(|| format!("Failed to read file: {:?}", path))?; let regex_pattern = r"\[\d*\]\s-\s"; let re = Regex::new(regex_pattern).unwrap(); - let identifier_base = path.strip_prefix(base_path) + let identifier_base = path + .strip_prefix(base_path) .unwrap() .to_string_lossy() .to_string() .trim_end_matches(".atom.verilib") .to_string(); let identifier = re.replace(&identifier_base, "").to_string(); - - let meta_file_name = file_name_str.trim_end_matches(".atom.verilib").to_string() + ".meta.verilib"; + + let meta_file_name = + file_name_str.trim_end_matches(".atom.verilib").to_string() + ".meta.verilib"; let meta_path = path.parent().unwrap().join(meta_file_name); - - let (dependencies, code_name, status_id, stored_fingerprint, snippets_value, specified, disabled) = if meta_path.exists() { + + let ( + dependencies, + code_name, + status_id, + stored_fingerprint, + snippets_value, + specified, + disabled, + ) = if meta_path.exists() { let meta_content = fs::read_to_string(&meta_path)?; let meta_value: Value = serde_json::from_str(&meta_content)?; - + let deps = if let Some(deps) = meta_value.get("dependencies") { serde_json::from_value(deps.clone()).unwrap_or_default() } else { Vec::new() }; - + let name = if let Some(name) = meta_value.get("code_name") { name.as_str().unwrap_or_default().to_string() } else { String::new() }; - - let status = meta_value.get("status_id").and_then(|v| v.as_u64()).map(|v| v as u32); - let fingerprint = meta_value.get("fingerprint").and_then(|v| v.as_str()).map(|s| s.to_string()); + + let status = meta_value + .get("status_id") + .and_then(|v| v.as_u64()) + .map(|v| v as u32); + let fingerprint = meta_value + .get("fingerprint") + .and_then(|v| v.as_str()) + .map(|s| s.to_string()); let snippets = meta_value.get("snippets").cloned(); - let specified = meta_value.get("specified").and_then(|v| v.as_bool()).unwrap_or_default(); - let disabled = meta_value.get("disabled").and_then(|v| v.as_bool()).unwrap_or_default(); - - (deps, name, status, fingerprint, snippets, specified, disabled) + let specified = meta_value + .get("specified") + .and_then(|v| v.as_bool()) + .unwrap_or_default(); + let disabled = meta_value + .get("disabled") + .and_then(|v| v.as_bool()) + .unwrap_or_default(); + + ( + deps, + name, + status, + fingerprint, + snippets, + specified, + disabled, + ) } else { (Vec::new(), String::new(), None, None, None, false, false) }; - + let mut hasher = Sha256::new(); hasher.update(&content); let hash_result = hasher.finalize(); let current_fingerprint = format!("{:x}", hash_result); - + let (final_content, snippets) = if let Some(stored_fp) = stored_fingerprint { if stored_fp != current_fingerprint { let use_new_content = match *decision { @@ -501,19 +586,19 @@ fn build_tree(base_path: &Path, current_path: &Path, decision: &mut ChangeDecisi ChangeDecision::Ask => { println!("\nFile has been modified: {}", identifier); println!(" Current file differs from the stored version."); - + let options = vec![ "Yes - Deploy edited content (triggers re-snippetization for entire repository)", "No - Keep original snippets for this file", "No to all - Skip all edited files" ]; - + let selection = Select::new() .with_prompt("Would you like to deploy the edited content?") .items(&options) .default(0) .interact()?; - + match selection { 0 => { *decision = ChangeDecision::YesToAll; @@ -531,15 +616,13 @@ fn build_tree(base_path: &Path, current_path: &Path, decision: &mut ChangeDecisi if use_new_content { *has_changes = true; } - } else { - } - (content.clone(), snippets_value) + (content.clone(), snippets_value) } else { *has_changes = true; (content.clone(), None) }; - + nodes.push(DeployNode { identifier, content: final_content, @@ -548,45 +631,46 @@ fn build_tree(base_path: &Path, current_path: &Path, decision: &mut ChangeDecisi file_type: "file".to_string(), children: Vec::new(), status_id, - snippets, + snippets, specified, - disabled + disabled, }); } } - + Ok(nodes) } fn build_layouts(base_path: &Path, current_path: &Path) -> Result> { let mut layouts = HashMap::new(); - + let entries = fs::read_dir(current_path) .with_context(|| format!("Failed to read directory: {:?}", current_path))?; - + for entry in entries { let entry = entry?; let path = entry.path(); - + if path.is_dir() { let layout_file = path.join("layout.verilib"); - + if layout_file.exists() { let layout_content = fs::read_to_string(&layout_file)?; let layout_value: Value = serde_json::from_str(&layout_content)?; - - let relative_path = path.strip_prefix(base_path) + + let relative_path = path + .strip_prefix(base_path) .unwrap() .to_string_lossy() .to_string(); - + layouts.insert(relative_path, layout_value); } - + let child_layouts = build_layouts(base_path, &path)?; layouts.extend(child_layouts); } } - + Ok(layouts) } diff --git a/src/commands/init.rs b/src/commands/init.rs index 3566c106..d371e1eb 100644 --- a/src/commands/init.rs +++ b/src/commands/init.rs @@ -9,7 +9,7 @@ use std::process::Command; use crate::commands::deploy::collect_deploy_info_with_path; use crate::commands::status::get_stored_api_key; use crate::constants::{auth_required_msg, DEFAULT_BASE_URL}; -use crate::download::{handle_api_error}; +use crate::download::handle_api_error; use crate::structure::{create_gitignore, ExecutionMode}; #[derive(serde::Deserialize, Debug)] @@ -23,34 +23,32 @@ struct CreateRepoData { } pub async fn handle_init(id: Option, url: Option, debug: bool) -> Result<()> { - let api_key = get_stored_api_key() - .context(auth_required_msg())?; - + let api_key = get_stored_api_key().context(auth_required_msg())?; + let url_base = url.unwrap_or_else(|| DEFAULT_BASE_URL.to_string()); - + let repo_id = if let Some(repo_id) = id { println!("Initializing project with repository ID: {}", repo_id); repo_id } else { let git_url = prompt_git_url()?; - + println!("Creating new repository from git URL: {}", git_url); - + let repo_id = create_repo_from_git_url(&git_url, &url_base, &api_key, debug).await?; - + println!("Repository created successfully!"); println!("Repository ID: {}", repo_id); - + repo_id }; - + let execution_mode = prompt_execution_mode()?; - fs::create_dir_all(".verilib") - .context("Failed to create .verilib directory")?; - + fs::create_dir_all(".verilib").context("Failed to create .verilib directory")?; + save_config(&repo_id, &url_base, true, execution_mode)?; - + Ok(()) } @@ -72,10 +70,10 @@ fn prompt_execution_mode() -> Result { fn detect_git_url() -> Option { let output = Command::new("git") - .args(&["config", "--get", "remote.origin.url"]) + .args(["config", "--get", "remote.origin.url"]) .output() .ok()?; - + if output.status.success() { let url = String::from_utf8(output.stdout).ok()?; let url = url.trim().to_string(); @@ -84,7 +82,7 @@ fn detect_git_url() -> Option { // Get current branch if let Ok(branch_output) = Command::new("git") - .args(&["rev-parse", "--abbrev-ref", "HEAD"]) + .args(["rev-parse", "--abbrev-ref", "HEAD"]) .output() { if branch_output.status.success() { @@ -101,7 +99,7 @@ fn detect_git_url() -> Option { return Some(normalized); } } - + None } @@ -121,12 +119,12 @@ fn normalize_git_url(url: &str) -> String { } } } - + // If already HTTPS, just remove .git suffix if present if url.starts_with("https://") || url.starts_with("http://") { return url.strip_suffix(".git").unwrap_or(url).to_string(); } - + // Return as-is if we can't parse it url.to_string() } @@ -136,11 +134,13 @@ fn prompt_git_url() -> Result { println!("• Full repository: https://github.com/user/repo"); println!("• Specific branch: https://github.com/user/repo@branch-name"); println!("• Folder only: https://github.com/user/repo/tree/main/folder-name"); - println!("• Folder from branch: https://github.com/user/repo/tree/main/folder-name@branch-name"); + println!( + "• Folder from branch: https://github.com/user/repo/tree/main/folder-name@branch-name" + ); println!(); - + let detected_url = detect_git_url(); - + let git_url = if let Some(default_url) = detected_url { Input::::new() .with_prompt("Enter repository URL") @@ -153,22 +153,27 @@ fn prompt_git_url() -> Result { .interact_text() .context("Failed to get git URL input")? }; - + let git_url = git_url.trim().to_string(); - + if git_url.is_empty() { anyhow::bail!("Repository URL cannot be empty"); } - + Ok(git_url) } -async fn create_repo_from_git_url(git_url: &str, base_url: &str, api_key: &str, debug: bool) -> Result { +async fn create_repo_from_git_url( + git_url: &str, + base_url: &str, + api_key: &str, + debug: bool, +) -> Result { println!("\nCollecting repository information..."); - - let (language_id, proof_id, verifierversion_id, summary, description, type_id) = + + let (language_id, proof_id, verifierversion_id, summary, description, type_id) = collect_deploy_info_with_path(base_url, api_key, &PathBuf::from("."), debug).await?; - + let mut payload = serde_json::json!({ "url": git_url, "language_id": language_id, @@ -176,17 +181,17 @@ async fn create_repo_from_git_url(git_url: &str, base_url: &str, api_key: &str, "summary": summary, "type_id": type_id, }); - + if let Some(desc) = description { payload["description"] = Value::String(desc); } - + if let Some(version_id) = verifierversion_id { payload["verifierversion_id"] = Value::Number(version_id.into()); } - + let endpoint = format!("{}/v2/repo/create", base_url); - + let client = Client::new(); let response = client .post(&endpoint) @@ -196,22 +201,22 @@ async fn create_repo_from_git_url(git_url: &str, base_url: &str, api_key: &str, .send() .await .context("Failed to send create repository request")?; - + let status = response.status(); - + if !status.is_success() { let error_msg = handle_api_error(response).await?; anyhow::bail!(error_msg); } - + let response_text = response .text() .await .context("Failed to read response body")?; - + let create_response: CreateRepoResponse = serde_json::from_str(&response_text) .context("Failed to parse create repository response")?; - + Ok(create_response.data.id.to_string()) } diff --git a/src/commands/mod.rs b/src/commands/mod.rs index a6ff5775..48f34e52 100644 --- a/src/commands/mod.rs +++ b/src/commands/mod.rs @@ -1,20 +1,20 @@ pub mod api; pub mod atomize; pub mod auth; +pub mod create; pub mod deploy; pub mod init; pub mod reclone; pub mod specify; pub mod status; -pub mod create; -pub mod verify; pub mod types; +pub mod verify; pub use atomize::handle_atomize; pub use auth::handle_auth; +pub use create::handle_create; pub use init::handle_init; pub use reclone::handle_reclone; pub use specify::handle_specify; pub use status::handle_status; -pub use create::handle_create; pub use verify::handle_verify; diff --git a/src/commands/reclone.rs b/src/commands/reclone.rs index a3793591..72a762e6 100644 --- a/src/commands/reclone.rs +++ b/src/commands/reclone.rs @@ -4,9 +4,9 @@ use serde_json::Value; use std::path::PathBuf; use std::process::Command; -use crate::constants::{auth_required_msg, init_required_msg}; use crate::commands::status::get_stored_api_key; use crate::config::ProjectConfig; +use crate::constants::{auth_required_msg, init_required_msg}; use crate::download::handle_api_error; pub async fn handle_reclone(debug: bool) -> Result<()> { @@ -15,29 +15,30 @@ pub async fn handle_reclone(debug: bool) -> Result<()> { } else { println!("Starting reclone process..."); } - + // Check if authentication exists - get_stored_api_key() - .context(auth_required_msg())?; - + get_stored_api_key().context(auth_required_msg())?; + let project_root = PathBuf::from("."); let config = ProjectConfig::load(&project_root)?; - let repo = config.repo.ok_or_else(|| anyhow::anyhow!(init_required_msg()))?; + let repo = config + .repo + .ok_or_else(|| anyhow::anyhow!(init_required_msg()))?; let repo_id = repo.id; let url_base = repo.url; - + println!("Found repository ID: {}", repo_id); if debug { println!("Debug: Using URL: {}", url_base); } - + // Check if git is available if !is_git_available() { anyhow::bail!("Git is not found. Please install Git to use this command"); } - + // Check for uncommitted changes if has_uncommitted_changes()? { println!("Warning: You have uncommitted changes in your git repository."); @@ -51,13 +52,13 @@ pub async fn handle_reclone(debug: bool) -> Result<()> { println!("Please push your changes before running reclone."); anyhow::bail!("Unpushed commits detected"); } - + // Perform the reclone API call let api_key = get_stored_api_key()?; let endpoint = format!("{}/v2/repo/reclone/{}", url_base, repo_id); - + println!("Calling reclone endpoint: {}", endpoint); - + let client = Client::new(); let response = client .post(&endpoint) @@ -66,51 +67,52 @@ pub async fn handle_reclone(debug: bool) -> Result<()> { .send() .await .context("Failed to send reclone request")?; - + let status = response.status(); - + if debug { println!("Debug: Response status: {}", status); } - + if !status.is_success() { let error_msg = handle_api_error(response).await?; anyhow::bail!(error_msg); } - + let response_text = response .text() .await .context("Failed to read response body")?; - + if debug { println!("Debug: Raw response body:"); println!("{}", response_text); } - - let json_response: Value = serde_json::from_str(&response_text) - .context("Failed to parse JSON response")?; - + + let json_response: Value = + serde_json::from_str(&response_text).context("Failed to parse JSON response")?; + if debug { println!("Debug: Parsed JSON response:"); - println!("{}", serde_json::to_string_pretty(&json_response).unwrap_or_else(|_| "Failed to pretty print".to_string())); + println!( + "{}", + serde_json::to_string_pretty(&json_response) + .unwrap_or_else(|_| "Failed to pretty print".to_string()) + ); } - + if let Some(status) = json_response.get("status") { if status == "success" { println!("Repository successfully updated!"); return Ok(()); } } - + anyhow::bail!("Unexpected response format from reclone API"); } fn is_git_available() -> bool { - Command::new("git") - .arg("--version") - .output() - .is_ok() + Command::new("git").arg("--version").output().is_ok() } fn has_uncommitted_changes() -> Result { @@ -118,11 +120,11 @@ fn has_uncommitted_changes() -> Result { .args(["status", "--porcelain"]) .output() .context("Failed to run git status")?; - + if !output.status.success() { anyhow::bail!("Git status command failed. Make sure you're in a git repository"); } - + // If git status --porcelain returns any output, there are uncommitted changes Ok(!output.stdout.is_empty()) } @@ -132,26 +134,26 @@ fn has_unpushed_commits() -> Result { .args(["rev-list", "--count", "@{u}..HEAD"]) .output() .context("Failed to run git rev-list to check pushed status")?; - + if !output.status.success() { let stderr = String::from_utf8_lossy(&output.stderr); if stderr.contains("no upstream configured") { - // Fallback: Check if the current HEAD commit exists on any remote branch - let branch_output = Command::new("git") - .args(["branch", "-r", "--contains", "HEAD"]) - .output() - .context("Failed to check remote branches")?; - - // If output has content, commit is on some remote (safe) - return Ok(branch_output.stdout.is_empty()); + // Fallback: Check if the current HEAD commit exists on any remote branch + let branch_output = Command::new("git") + .args(["branch", "-r", "--contains", "HEAD"]) + .output() + .context("Failed to check remote branches")?; + + // If output has content, commit is on some remote (safe) + return Ok(branch_output.stdout.is_empty()); } anyhow::bail!("Git command failed: {}", stderr.trim()); } - + let count = String::from_utf8_lossy(&output.stdout) .trim() .parse::() .unwrap_or(0); - + Ok(count > 0) } diff --git a/src/commands/specify.rs b/src/commands/specify.rs index 991e69eb..0baa3185 100644 --- a/src/commands/specify.rs +++ b/src/commands/specify.rs @@ -4,8 +4,8 @@ use crate::config::ProjectConfig; use crate::structure::{ - create_cert, display_menu, get_existing_certs, run_command, - CommandConfig, ExternalTool, ATOMIZE_INTERMEDIATE_FILES, cleanup_intermediate_files, + cleanup_intermediate_files, create_cert, display_menu, get_existing_certs, run_command, + CommandConfig, ExternalTool, ATOMIZE_INTERMEDIATE_FILES, }; use anyhow::{bail, Context, Result}; use serde_json::Value; @@ -43,12 +43,7 @@ pub async fn handle_specify(project_root: PathBuf, no_probe: bool, check_only: b let specs_data = if no_probe { load_specs_from_file(&specs_path)? } else { - run_probe_specify( - &project_root, - &specs_path, - &atoms_path, - &cmd_config, - )? + run_probe_specify(&project_root, &specs_path, &atoms_path, &cmd_config)? }; // Enrich stubs with spec-text (only for functions where specified=true) @@ -65,11 +60,7 @@ pub async fn handle_specify(project_root: PathBuf, no_probe: bool, check_only: b } // Display menu and create certs for selected functions - let newly_certified = collect_certifications( - &uncertified, - &certs_dir, - auto_validate - )?; + let newly_certified = collect_certifications(&uncertified, &certs_dir, auto_validate)?; // Update specified status based on all certified functions let all_certified: HashSet = existing_certs.union(&newly_certified).cloned().collect(); @@ -133,10 +124,7 @@ fn find_uncertified_functions( let uncertified: HashMap = stubs_with_specs .into_iter() .filter(|(_, stub)| { - let code_name = stub - .get("code-name") - .and_then(|v| v.as_str()) - .unwrap_or(""); + let code_name = stub.get("code-name").and_then(|v| v.as_str()).unwrap_or(""); !existing_certs.contains(code_name) }) .collect(); @@ -221,10 +209,7 @@ fn collect_certifications( for idx in &selected_indices { let (_stub_path, stub) = &uncertified_list[*idx]; - let code_name = stub - .get("code-name") - .and_then(|v| v.as_str()) - .unwrap_or(""); + let code_name = stub.get("code-name").and_then(|v| v.as_str()).unwrap_or(""); newly_certified.insert(code_name.to_string()); let cert_path = create_cert(certs_dir, code_name)?; println!( @@ -367,10 +352,7 @@ fn incorporate_spec_text( let mut count = 0; for stub in stubs_data.values_mut() { if let Some(obj) = stub.as_object_mut() { - let code_name = obj - .get("code-name") - .and_then(|v| v.as_str()) - .unwrap_or(""); + let code_name = obj.get("code-name").and_then(|v| v.as_str()).unwrap_or(""); if let Some(spec_info) = specs_data.get(code_name) { // Only add spec-text if specified is true diff --git a/src/commands/status.rs b/src/commands/status.rs index 253b51ef..13b9d0de 100644 --- a/src/commands/status.rs +++ b/src/commands/status.rs @@ -4,7 +4,7 @@ use crate::storage::{get_credential_storage, get_platform_info}; pub async fn handle_status() -> Result<()> { let platform_info = get_platform_info(); - + match get_stored_api_key() { Ok(key) => { let masked_key = format!("{}***", if key.len() > 4 { &key[..4] } else { &key }); @@ -25,7 +25,8 @@ pub async fn handle_status() -> Result<()> { pub fn get_stored_api_key() -> Result { let entry = get_credential_storage()?; - - entry.get_password() + + entry + .get_password() .context("Failed to retrieve API key from storage") } diff --git a/src/commands/types.rs b/src/commands/types.rs index 9967478f..bc515483 100644 --- a/src/commands/types.rs +++ b/src/commands/types.rs @@ -1,3 +1,5 @@ +#![allow(dead_code)] // WIP: deploy types not yet wired into CLI — see https://github.com/Beneficial-AI-Foundation/verilib-cli/issues/36 + use serde::{Deserialize, Serialize}; #[derive(Debug)] @@ -8,16 +10,56 @@ pub struct Language { } pub const LANGUAGES: &[Language] = &[ - Language { id: 1, name: "Dafny", extensions: &[".dfy"] }, - Language { id: 2, name: "Lean", extensions: &[".lean"] }, - Language { id: 3, name: "Rocq", extensions: &[".v"] }, - Language { id: 4, name: "Isabelle", extensions: &[".thy"] }, - Language { id: 5, name: "Metamath", extensions: &[".mm"] }, - Language { id: 6, name: "Rust", extensions: &[".rs"] }, - Language { id: 7, name: "RefinedC", extensions: &[".c"] }, - Language { id: 8, name: "Python", extensions: &[".py"] }, - Language { id: 9, name: "Kani", extensions: &[".rs"] }, - Language { id: 10, name: "Verus", extensions: &[".rs"] }, + Language { + id: 1, + name: "Dafny", + extensions: &[".dfy"], + }, + Language { + id: 2, + name: "Lean", + extensions: &[".lean"], + }, + Language { + id: 3, + name: "Rocq", + extensions: &[".v"], + }, + Language { + id: 4, + name: "Isabelle", + extensions: &[".thy"], + }, + Language { + id: 5, + name: "Metamath", + extensions: &[".mm"], + }, + Language { + id: 6, + name: "Rust", + extensions: &[".rs"], + }, + Language { + id: 7, + name: "RefinedC", + extensions: &[".c"], + }, + Language { + id: 8, + name: "Python", + extensions: &[".py"], + }, + Language { + id: 9, + name: "Kani", + extensions: &[".rs"], + }, + Language { + id: 10, + name: "Verus", + extensions: &[".rs"], + }, ]; pub const TYPES: &[(u32, &str)] = &[ diff --git a/src/commands/verify.rs b/src/commands/verify.rs index 25d17d60..831ee64a 100644 --- a/src/commands/verify.rs +++ b/src/commands/verify.rs @@ -4,8 +4,8 @@ use crate::config::ProjectConfig; use crate::structure::{ - cleanup_intermediate_files, get_display_name, run_command, - CommandConfig, ExternalTool, VERIFY_INTERMEDIATE_FILES, + cleanup_intermediate_files, get_display_name, run_command, CommandConfig, ExternalTool, + VERIFY_INTERMEDIATE_FILES, }; use anyhow::{bail, Context, Result}; use serde_json::Value; diff --git a/src/config.rs b/src/config.rs index 4a2c6a0f..5bff05f3 100644 --- a/src/config.rs +++ b/src/config.rs @@ -25,16 +25,16 @@ pub struct ProjectConfig { #[serde(skip_serializing_if = "Option::is_none")] pub repo: Option, - + #[serde(rename = "structure-root", skip_serializing_if = "Option::is_none")] pub structure_root: Option, - + #[serde(default, rename = "execution-mode")] pub execution_mode: ExecutionMode, - + #[serde(default = "default_docker_image", rename = "docker-image")] pub docker_image: String, - + #[serde(default, rename = "auto-validate-specs")] pub auto_validate_specs: bool, } @@ -111,9 +111,7 @@ impl ProjectConfig { pub fn structure_root_path(&self) -> Result { let root = self.structure_root.as_deref().ok_or_else(|| { - anyhow::anyhow!( - "No 'structure-root' in config.json. Run 'verilib-cli create' first." - ) + anyhow::anyhow!("No 'structure-root' in config.json. Run 'verilib-cli create' first.") })?; Ok(self.project_root.join(root)) } @@ -125,27 +123,23 @@ impl ProjectConfig { return Ok(Self::default()); } - let content = std::fs::read_to_string(&config_path) - .context("Failed to read config.json")?; + let content = + std::fs::read_to_string(&config_path).context("Failed to read config.json")?; - let config: Self = serde_json::from_str(&content) - .context("Failed to parse config.json")?; + let config: Self = serde_json::from_str(&content).context("Failed to parse config.json")?; Ok(config) } pub fn save(&self, project_root: &Path) -> Result { let verilib_path = project_root.join(".verilib"); - std::fs::create_dir_all(&verilib_path) - .context("Failed to create .verilib directory")?; + std::fs::create_dir_all(&verilib_path).context("Failed to create .verilib directory")?; let config_path = verilib_path.join("config.json"); - let content = serde_json::to_string_pretty(self) - .context("Failed to serialize config")?; - - std::fs::write(&config_path, content) - .context("Failed to write config.json")?; + let content = serde_json::to_string_pretty(self).context("Failed to serialize config")?; + + std::fs::write(&config_path, content).context("Failed to write config.json")?; Ok(config_path) } diff --git a/src/constants.rs b/src/constants.rs index dfccc2c5..59cf6536 100644 --- a/src/constants.rs +++ b/src/constants.rs @@ -9,7 +9,10 @@ pub fn auth_required_msg() -> String { } pub fn init_required_msg() -> String { - format!("Project not initialized. Please run '{} init ' first", CLI_NAME) + format!( + "Project not initialized. Please run '{} init ' first", + CLI_NAME + ) } pub const DEFAULT_DOCKER_IMAGE: &str = "ghcr.io/beneficial-ai-foundation/verilib-cli:latest"; @@ -22,6 +25,7 @@ pub const PROBE_VERUS_TESTED_MAX_VERSION: &str = "<2.0.0"; pub const ATOMIZE_INTERMEDIATE_FILES: &[&str] = &["data/index.scip", "data/index.scip.json"]; /// Common intermediate files generated by the verify command. -pub const VERIFY_INTERMEDIATE_FILES: &[&str] = - &["data/verification_config.json", "data/verification_output.txt"]; - +pub const VERIFY_INTERMEDIATE_FILES: &[&str] = &[ + "data/verification_config.json", + "data/verification_output.txt", +]; diff --git a/src/download/client.rs b/src/download/client.rs index 4b7dfe8b..a2d3606f 100644 --- a/src/download/client.rs +++ b/src/download/client.rs @@ -1,3 +1,5 @@ +#![allow(dead_code)] // WIP: not yet wired into CLI — see https://github.com/Beneficial-AI-Foundation/verilib-cli/issues/36 + use anyhow::{Context, Result}; use reqwest::Client; use std::fs; @@ -5,8 +7,8 @@ use std::io::{self, Write}; use std::time::Duration; use tokio::time::sleep; -use super::types::{DownloadResponse, AtomizationStatusResponse}; use super::error::handle_api_error; +use super::types::{AtomizationStatusResponse, DownloadResponse}; pub async fn download_repo( repo_id: &str, @@ -15,7 +17,7 @@ pub async fn download_repo( debug: bool, ) -> Result { let endpoint = format!("{}/v2/repo/download/{}", base_url, repo_id); - + let client = Client::new(); let response = client .get(&endpoint) @@ -24,17 +26,17 @@ pub async fn download_repo( .send() .await .context("Failed to send request to API")?; - + if !response.status().is_success() { let error_msg = handle_api_error(response).await?; anyhow::bail!(error_msg); } - + let response_text = response .text() .await .context("Failed to read response body")?; - + if debug { fs::create_dir_all(".verilib") .context("Failed to create .verilib directory for debug output")?; @@ -42,30 +44,26 @@ pub async fn download_repo( .context("Failed to write debug response file")?; println!("Debug: API response saved to .verilib/debug_response.json"); } - - let download_data: DownloadResponse = serde_json::from_str(&response_text) - .context("Failed to parse JSON response")?; - + + let download_data: DownloadResponse = + serde_json::from_str(&response_text).context("Failed to parse JSON response")?; + Ok(download_data) } -pub async fn wait_for_atomization( - repo_id: &str, - base_url: &str, - api_key: &str, -) -> Result<()> { +pub async fn wait_for_atomization(repo_id: &str, base_url: &str, api_key: &str) -> Result<()> { let endpoint = format!("{}/api/atomization-status?id={}", base_url, repo_id); let client = Client::new(); - + print!("Waiting for atomization"); io::stdout().flush().unwrap(); - + loop { sleep(Duration::from_secs(2)).await; - + print!("."); io::stdout().flush().unwrap(); - + let response = match client .get(&endpoint) .header("Authorization", format!("ApiKey {}", api_key)) @@ -78,30 +76,31 @@ pub async fn wait_for_atomization( continue; } }; - + if !response.status().is_success() { continue; } - + let response_text = match response.text().await { Ok(text) => text, Err(_) => { continue; } }; - - let status_response: AtomizationStatusResponse = match serde_json::from_str(&response_text) { + + let status_response: AtomizationStatusResponse = match serde_json::from_str(&response_text) + { Ok(data) => data, Err(_) => { continue; } }; - + if status_response.status_id == "2" { println!(); break; } } - + Ok(()) } diff --git a/src/download/error.rs b/src/download/error.rs index 73d8d115..2a20d56b 100644 --- a/src/download/error.rs +++ b/src/download/error.rs @@ -16,25 +16,30 @@ struct ApiErrorData { pub async fn handle_api_error(response: Response) -> Result { let status = response.status(); - + let response_text = match response.text().await { Ok(text) => text, Err(_) => return Ok(format!("API request failed with status: {}", status)), }; - + if let Ok(error_response) = serde_json::from_str::(&response_text) { if error_response.error { return Ok(format!( "API error ({}): {}", - error_response.data.code, - error_response.data.message + error_response.data.code, error_response.data.message )); } } - + if !response_text.is_empty() { - Ok(format!("API request failed with status: {} - {}", status, response_text)) + Ok(format!( + "API request failed with status: {} - {}", + status, response_text + )) } else { - Ok(format!("API request failed with status: {} - Unable to read error response", status)) + Ok(format!( + "API request failed with status: {} - Unable to read error response", + status + )) } } diff --git a/src/download/mod.rs b/src/download/mod.rs index 1ee1850a..8fb99477 100644 --- a/src/download/mod.rs +++ b/src/download/mod.rs @@ -1,5 +1,5 @@ -mod types; mod client; mod error; +mod types; pub use error::handle_api_error; diff --git a/src/download/types.rs b/src/download/types.rs index f74018a6..46cd1366 100644 --- a/src/download/types.rs +++ b/src/download/types.rs @@ -1,3 +1,5 @@ +#![allow(dead_code)] // WIP: not yet wired into CLI — see https://github.com/Beneficial-AI-Foundation/verilib-cli/issues/36 + use serde::{Deserialize, Deserializer, Serialize}; use std::collections::HashMap; @@ -59,13 +61,15 @@ where { use serde::de::Error; use serde_json::Value; - + let value = Value::deserialize(deserializer)?; - + match value { Value::Array(arr) if arr.is_empty() => Ok(HashMap::new()), Value::Object(_) => serde_json::from_value(value).map_err(Error::custom), - _ => Err(Error::custom("expected an object or empty array for layouts")), + _ => Err(Error::custom( + "expected an object or empty array for layouts", + )), } } diff --git a/src/executor.rs b/src/executor.rs index 4cd25e53..76f9cc7c 100644 --- a/src/executor.rs +++ b/src/executor.rs @@ -1,5 +1,7 @@ +use crate::constants::{ + DEFAULT_DOCKER_IMAGE, PROBE_VERUS_MIN_VERSION, PROBE_VERUS_TESTED_MAX_VERSION, +}; use anyhow::{bail, Context, Result}; -use crate::constants::{DEFAULT_DOCKER_IMAGE, PROBE_VERUS_MIN_VERSION, PROBE_VERUS_TESTED_MAX_VERSION}; use semver::{Version, VersionReq}; use serde::{Deserialize, Serialize}; use std::path::Path; @@ -7,7 +9,6 @@ use std::process::{Command, Output}; pub const PROBE_REPO_URL: &str = "https://github.com/Beneficial-AI-Foundation/probe-verus"; - #[derive(Debug, Clone, PartialEq, Eq)] pub enum ExternalTool { /// The `probe-verus` CLI tool. @@ -22,19 +23,14 @@ impl ExternalTool { } } -#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq)] #[serde(rename_all = "lowercase")] pub enum ExecutionMode { + #[default] Local, Docker, } -impl Default for ExecutionMode { - fn default() -> Self { - Self::Local - } -} - #[derive(Debug, Clone, Serialize, Deserialize)] pub struct CommandConfig { #[serde(default)] @@ -114,7 +110,10 @@ fn check_probe_verus_version() -> Result<()> { .expect("PROBE_VERUS_TESTED_MAX_VERSION is a valid semver requirement"); if !min_req.matches(&version) { - eprintln!("Error: probe-verus {} is too old for this version of verilib-cli.", version); + eprintln!( + "Error: probe-verus {} is too old for this version of verilib-cli.", + version + ); eprintln!(" Minimum required: {}", PROBE_VERUS_MIN_VERSION); eprintln!(" Installed: {}", version); eprintln!(); @@ -122,7 +121,11 @@ fn check_probe_verus_version() -> Result<()> { eprintln!(" git clone {}", PROBE_REPO_URL); eprintln!(" cd probe-verus"); eprintln!(" cargo install --path ."); - bail!("probe-verus {} is below the minimum required version ({})", version, PROBE_VERUS_MIN_VERSION); + bail!( + "probe-verus {} is below the minimum required version ({})", + version, + PROBE_VERUS_MIN_VERSION + ); } if !tested_max_req.matches(&version) { @@ -131,7 +134,10 @@ fn check_probe_verus_version() -> Result<()> { version, PROBE_VERUS_TESTED_MAX_VERSION ); eprintln!(" It may work, but you could encounter unexpected behaviour."); - eprintln!(" Consider filing an issue at {} if you hit problems.", PROBE_REPO_URL); + eprintln!( + " Consider filing an issue at {} if you hit problems.", + PROBE_REPO_URL + ); } Ok(()) @@ -167,7 +173,7 @@ fn run_local(program: &str, args: &[&str], cwd: Option<&Path>) -> Result fn ensure_image_pulled(image: &str) -> Result<()> { let status = Command::new("docker") - .args(&["image", "inspect", image]) + .args(["image", "inspect", image]) .stdout(std::process::Stdio::null()) .stderr(std::process::Stdio::null()) .status(); @@ -181,7 +187,7 @@ fn ensure_image_pulled(image: &str) -> Result<()> { println!("Docker image {} not found locally. Pulling...", image); let status = Command::new("docker") - .args(&["pull", "--platform", "linux/amd64", image]) + .args(["pull", "--platform", "linux/amd64", image]) .status() .context(format!("Failed to pull docker image {}", image))?; @@ -192,17 +198,12 @@ fn ensure_image_pulled(image: &str) -> Result<()> { Ok(()) } -fn run_docker( - program: &str, - args: &[&str], - cwd: Option<&Path>, - image: &str, -) -> Result { +fn run_docker(program: &str, args: &[&str], cwd: Option<&Path>, image: &str) -> Result { ensure_image_pulled(image)?; - let host_cwd = cwd - .map(|p| p.to_path_buf()) - .unwrap_or_else(|| std::env::current_dir().unwrap_or_else(|_| Path::new(".").to_path_buf())); + let host_cwd = cwd.map(|p| p.to_path_buf()).unwrap_or_else(|| { + std::env::current_dir().unwrap_or_else(|_| Path::new(".").to_path_buf()) + }); let host_cwd_str = host_cwd.to_string_lossy(); @@ -219,9 +220,12 @@ fn run_docker( let mut docker_args = vec![ "run", "--rm", - "--platform", "linux/amd64", - "--entrypoint", program, - "-u", &user_arg, + "--platform", + "linux/amd64", + "--entrypoint", + program, + "-u", + &user_arg, "-v", ]; @@ -229,10 +233,13 @@ fn run_docker( docker_args.push(&mount_arg); docker_args.extend_from_slice(&[ - "--tmpfs", "/tmp", - "--tmpfs", "/home/tooluser/.cache", + "--tmpfs", + "/tmp", + "--tmpfs", + "/home/tooluser/.cache", "--security-opt=no-new-privileges", - "-w", "/workspace", + "-w", + "/workspace", image, ]); diff --git a/src/main.rs b/src/main.rs index 6fa06d78..51af3e0a 100644 --- a/src/main.rs +++ b/src/main.rs @@ -69,7 +69,14 @@ async fn main() -> Result<()> { no_probe, check_only, } => { - handle_verify(project_root, package, verify_only_module, no_probe, check_only).await?; + handle_verify( + project_root, + package, + verify_only_module, + no_probe, + check_only, + ) + .await?; } } diff --git a/src/storage/factory.rs b/src/storage/factory.rs index 1ca80423..77471f73 100644 --- a/src/storage/factory.rs +++ b/src/storage/factory.rs @@ -1,6 +1,6 @@ -use anyhow::Result; -use crate::storage::types::{CredentialStorage, StorageType}; use crate::storage::file::FileStorage; +use crate::storage::types::{CredentialStorage, StorageType}; +use anyhow::Result; #[cfg(not(target_os = "linux"))] use crate::storage::keyring::KeyringStorage; @@ -20,7 +20,7 @@ impl CredentialStorageFactory { { Ok(Box::new(KeyringStorage::new()?)) } - + #[cfg(target_os = "linux")] { anyhow::bail!( diff --git a/src/storage/file.rs b/src/storage/file.rs index 268d38ba..35a360cd 100644 --- a/src/storage/file.rs +++ b/src/storage/file.rs @@ -16,22 +16,19 @@ pub struct FileStorage { impl FileStorage { pub fn new() -> Result { - let home_dir = dirs::home_dir() - .context("Failed to get home directory")?; + let home_dir = dirs::home_dir().context("Failed to get home directory")?; let file_path = home_dir.join(FILE_NAME); Ok(Self { file_path }) } fn ensure_secure_file(&self) -> Result<()> { if !self.file_path.exists() { - File::create(&self.file_path) - .context("Failed to create credentials file")?; + File::create(&self.file_path).context("Failed to create credentials file")?; } #[cfg(unix)] { - let metadata = fs::metadata(&self.file_path) - .context("Failed to read file metadata")?; + let metadata = fs::metadata(&self.file_path).context("Failed to read file metadata")?; let mut permissions = metadata.permissions(); permissions.set_mode(0o600); fs::set_permissions(&self.file_path, permissions) @@ -63,8 +60,7 @@ impl CredentialStorage for FileStorage { anyhow::bail!("No credentials file found"); } - let mut file = File::open(&self.file_path) - .context("Failed to open credentials file")?; + let mut file = File::open(&self.file_path).context("Failed to open credentials file")?; let mut password = String::new(); file.read_to_string(&mut password) @@ -79,8 +75,7 @@ impl CredentialStorage for FileStorage { fn delete_password(&self) -> Result<()> { if self.file_path.exists() { - fs::remove_file(&self.file_path) - .context("Failed to delete credentials file")?; + fs::remove_file(&self.file_path).context("Failed to delete credentials file")?; } Ok(()) } diff --git a/src/storage/keyring.rs b/src/storage/keyring.rs index 602f0fd3..8ddeb126 100644 --- a/src/storage/keyring.rs +++ b/src/storage/keyring.rs @@ -1,5 +1,5 @@ -use anyhow::{Context, Result}; use crate::storage::types::CredentialStorage; +use anyhow::{Context, Result}; const SERVICE_NAME: &str = "verilib"; @@ -12,8 +12,8 @@ pub struct KeyringStorage { impl KeyringStorage { pub fn new() -> Result { let user = whoami::username(); - let entry = keyring::Entry::new(SERVICE_NAME, &user) - .context("Failed to create keyring entry")?; + let entry = + keyring::Entry::new(SERVICE_NAME, &user).context("Failed to create keyring entry")?; Ok(Self { entry }) } } diff --git a/src/storage/mod.rs b/src/storage/mod.rs index 6cb0b4ea..c6b6521a 100644 --- a/src/storage/mod.rs +++ b/src/storage/mod.rs @@ -1,12 +1,12 @@ -mod types; -mod file; mod factory; +mod file; +mod types; #[cfg(not(target_os = "linux"))] mod keyring; -pub use types::{CredentialStorage, StorageType}; pub use factory::CredentialStorageFactory; +pub use types::{CredentialStorage, StorageType}; use anyhow::Result; @@ -16,22 +16,22 @@ pub fn get_credential_storage() -> Result> { pub fn get_platform_info() -> String { let storage_type = StorageType::from_env(); - + let base_info = if storage_type.should_use_file_storage() { "Secure file storage (~/.verilib_credentials)" } else { #[cfg(target_os = "macos")] let platform = "macOS Keychain (apple-native)"; - + #[cfg(target_os = "windows")] let platform = "Windows Credential Manager (windows-native)"; - + #[cfg(not(any(target_os = "macos", target_os = "windows")))] let platform = "Generic keyring backend"; - + platform }; - + match storage_type { StorageType::Auto => base_info.to_string(), StorageType::File => format!("{} (forced via VERILIB_STORAGE=file)", base_info), @@ -41,11 +41,11 @@ pub fn get_platform_info() -> String { pub fn print_platform_help() { let storage_type = StorageType::from_env(); - + eprintln!("Storage configuration:"); eprintln!(" • Current: {}", get_platform_info()); eprintln!(); - + if storage_type.should_use_file_storage() { eprintln!("File storage tips:"); eprintln!(" • Credentials are stored in a secure file: ~/.verilib_credentials"); @@ -58,7 +58,7 @@ pub fn print_platform_help() { eprintln!(" • Make sure you allow access to the keychain when prompted"); eprintln!(" • You may need to unlock your keychain"); } - + #[cfg(target_os = "windows")] { eprintln!("Credential Manager tips:"); @@ -66,7 +66,7 @@ pub fn print_platform_help() { eprintln!(" • You may need administrator privileges"); } } - + eprintln!(); eprintln!("Environment variable options:"); eprintln!(" • VERILIB_STORAGE=auto (default, platform-specific)"); diff --git a/src/storage/types.rs b/src/storage/types.rs index 9cd5358f..93d831b7 100644 --- a/src/storage/types.rs +++ b/src/storage/types.rs @@ -32,7 +32,7 @@ impl StorageType { pub trait CredentialStorage { fn set_password(&self, password: &str) -> Result<()>; fn get_password(&self) -> Result; - + #[allow(dead_code)] fn delete_password(&self) -> Result<()>; } diff --git a/src/structure/certs.rs b/src/structure/certs.rs index 009811c9..6bf7c9e0 100644 --- a/src/structure/certs.rs +++ b/src/structure/certs.rs @@ -24,9 +24,7 @@ pub fn encode_name(name: &str) -> String { /// Decode a filename back to an identifier. pub fn decode_name(encoded: &str) -> String { - percent_decode_str(encoded) - .decode_utf8_lossy() - .to_string() + percent_decode_str(encoded).decode_utf8_lossy().to_string() } /// Get the set of identifiers that already have certs. @@ -40,7 +38,7 @@ pub fn get_existing_certs(certs_dir: &Path) -> Result> { for entry in std::fs::read_dir(certs_dir)? { let entry = entry?; let path = entry.path(); - if path.extension().map_or(false, |ext| ext == "json") { + if path.extension().is_some_and(|ext| ext == "json") { if let Some(stem) = path.file_stem() { let encoded_name = stem.to_string_lossy(); let name = decode_name(&encoded_name); diff --git a/src/structure/mod.rs b/src/structure/mod.rs index dc19f5c2..e4bb7668 100644 --- a/src/structure/mod.rs +++ b/src/structure/mod.rs @@ -7,9 +7,9 @@ pub mod certs; pub mod frontmatter; pub mod utils; -pub use certs::{create_cert, get_existing_certs}; -pub use utils::create_gitignore; +pub use crate::constants::{ATOMIZE_INTERMEDIATE_FILES, VERIFY_INTERMEDIATE_FILES}; pub use crate::executor::{CommandConfig, ExecutionMode, ExternalTool}; +pub use certs::{create_cert, get_existing_certs}; pub use frontmatter::{parse as parse_frontmatter, write as write_frontmatter}; -pub use crate::constants::{ATOMIZE_INTERMEDIATE_FILES, VERIFY_INTERMEDIATE_FILES}; +pub use utils::create_gitignore; pub use utils::{cleanup_intermediate_files, display_menu, get_display_name, run_command}; diff --git a/src/structure/utils.rs b/src/structure/utils.rs index d0418bda..c66bf2e0 100644 --- a/src/structure/utils.rs +++ b/src/structure/utils.rs @@ -119,15 +119,13 @@ pub fn get_display_name(name: &str) -> String { } } - /// Create .gitignore for generated files in .verilib directory. pub fn create_gitignore(verilib_path: &Path) -> Result<()> { let gitignore_path = verilib_path.join(".gitignore"); if !gitignore_path.exists() { let gitignore_content = "# Generated by VeriLib (not tracked)\natoms.json\nspecs.json\nstubs.json\nproofs.json\n"; - std::fs::write(&gitignore_path, gitignore_content) - .context("Failed to write .gitignore")?; + std::fs::write(&gitignore_path, gitignore_content).context("Failed to write .gitignore")?; println!("Created .verilib/.gitignore"); } Ok(()) diff --git a/tests/fixtures/config_auto_validate.json b/tests/fixtures/config_auto_validate.json new file mode 100644 index 00000000..90757041 --- /dev/null +++ b/tests/fixtures/config_auto_validate.json @@ -0,0 +1,4 @@ +{ + "structure-root": ".verilib/structure", + "auto-validate-specs": true +} diff --git a/tests/fixtures/tracked_functions.csv b/tests/fixtures/tracked_functions.csv new file mode 100644 index 00000000..531ca1cb --- /dev/null +++ b/tests/fixtures/tracked_functions.csv @@ -0,0 +1,4 @@ +function,module,link +func_a,module,src/module.rs#L10 +func_b,module,src/module.rs#L25 +func_c,other,src/other.rs#L5 diff --git a/tests/helpers/mock_probe_verus.rs b/tests/helpers/mock_probe_verus.rs new file mode 100644 index 00000000..150df8b3 --- /dev/null +++ b/tests/helpers/mock_probe_verus.rs @@ -0,0 +1,49 @@ +use std::{env, fs, process}; + +fn main() { + let args: Vec = env::args().collect(); + let fixtures = env::var("MOCK_FIXTURES_DIR").unwrap_or_else(|_| { + eprintln!("MOCK_FIXTURES_DIR not set"); + process::exit(1); + }); + + let subcommand = args.get(1).map(|s| s.as_str()).unwrap_or(""); + + if subcommand == "--version" { + println!("probe-verus 1.1.0"); + return; + } + + let output_path = args + .windows(2) + .find(|w| w[0] == "-o" || w[0] == "--output") + .map(|w| &w[1]); + + let fixture_file = match subcommand { + "tracked-csv" => "tracked_functions.csv", + "stubify" => "stubs.json", + "atomize" => "atoms.json", + "specify" => "specs.json", + "verify" => "proofs.json", + _ => { + eprintln!("mock-probe-verus: unknown subcommand '{}'", subcommand); + process::exit(1); + } + }; + + let src = std::path::PathBuf::from(&fixtures).join(fixture_file); + if let Some(dest) = output_path { + if let Some(parent) = std::path::Path::new(dest.as_str()).parent() { + let _ = fs::create_dir_all(parent); + } + fs::copy(&src, dest).unwrap_or_else(|e| { + eprintln!( + "mock-probe-verus: failed to copy {} -> {}: {}", + src.display(), + dest, + e + ); + process::exit(1); + }); + } +} diff --git a/tests/regression_test.rs b/tests/regression_test.rs new file mode 100644 index 00000000..85ba3918 --- /dev/null +++ b/tests/regression_test.rs @@ -0,0 +1,796 @@ +//! Regression tests for the verilib-cli pipeline. +//! +//! Every test verifies observable behavior: exit codes, artifact contents, +//! and file existence/integrity. No test asserts on stdout/stderr message text. + +use chrono::DateTime; +use std::collections::HashMap; +use std::fs; +use std::path::{Path, PathBuf}; +use std::process::{Command, Output}; +use tempfile::TempDir; + +// --------------------------------------------------------------------------- +// Helpers +// --------------------------------------------------------------------------- + +fn fixtures_dir() -> PathBuf { + PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("tests/fixtures") +} + +fn copy_dir_recursive(src: &Path, dst: &Path) -> std::io::Result<()> { + fs::create_dir_all(dst)?; + for entry in fs::read_dir(src)? { + let entry = entry?; + let src_path = entry.path(); + let dst_path = dst.join(entry.file_name()); + if src_path.is_dir() { + copy_dir_recursive(&src_path, &dst_path)?; + } else { + fs::copy(&src_path, &dst_path)?; + } + } + Ok(()) +} + +/// Create a temp project with Verus-style Cargo.toml and all fixtures in `.verilib/`. +fn setup_project() -> TempDir { + setup_project_with_config("config.json") +} + +fn setup_project_with_config(config_name: &str) -> TempDir { + let tmp = TempDir::new().expect("Failed to create temp dir"); + let verilib = tmp.path().join(".verilib"); + fs::create_dir_all(&verilib).expect("Failed to create .verilib dir"); + + fs::write( + tmp.path().join("Cargo.toml"), + "[package]\nname = \"test-verus-project\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n\ + [dependencies]\nvstd = { git = \"https://github.com/verus-lang/verus\", rev = \"test\" }\n", + ) + .expect("Failed to write Cargo.toml"); + + let fix = fixtures_dir(); + fs::copy(fix.join(config_name), verilib.join("config.json")).expect("Failed to copy config"); + + for file in ["atoms.json", "specs.json", "proofs.json", "stubs.json"] { + fs::copy(fix.join(file), verilib.join(file)) + .unwrap_or_else(|_| panic!("Failed to copy {}", file)); + } + + copy_dir_recursive(&fix.join("structure"), &verilib.join("structure")) + .expect("Failed to copy structure dir"); + copy_dir_recursive(&fix.join("certs"), &verilib.join("certs")) + .expect("Failed to copy certs dir"); + + tmp +} + +fn cli(args: &[&str], cwd: &Path) -> Output { + Command::new(env!("CARGO_BIN_EXE_verilib-cli")) + .args(args) + .current_dir(cwd) + .output() + .expect("Failed to execute verilib-cli") +} + +fn read_json(path: &Path) -> serde_json::Value { + let content = fs::read_to_string(path) + .unwrap_or_else(|e| panic!("Failed to read {}: {}", path.display(), e)); + serde_json::from_str(&content) + .unwrap_or_else(|e| panic!("Failed to parse {}: {}", path.display(), e)) +} + +fn read_stubs(project: &Path) -> HashMap { + let v = read_json(&project.join(".verilib/stubs.json")); + serde_json::from_value(v).expect("stubs.json is not an object") +} + +fn assert_success(output: &Output, context: &str) { + assert!( + output.status.success(), + "{} failed (exit {:?}).\nstderr: {}", + context, + output.status.code(), + String::from_utf8_lossy(&output.stderr) + ); +} + +fn assert_failure(output: &Output, context: &str) { + assert!( + !output.status.success(), + "{} should have failed but succeeded.\nstdout: {}", + context, + String::from_utf8_lossy(&output.stdout) + ); +} + +fn collect_md_checksums(dir: &Path) -> HashMap> { + use sha2::{Digest, Sha256}; + let mut result = HashMap::new(); + if !dir.exists() { + return result; + } + for entry in walk(dir) { + if entry.extension().is_some_and(|e| e == "md") { + let content = fs::read(&entry).unwrap(); + result.insert(entry, Sha256::digest(&content).to_vec()); + } + } + result +} + +fn walk(dir: &Path) -> Vec { + let mut files = Vec::new(); + if let Ok(entries) = fs::read_dir(dir) { + for entry in entries.flatten() { + let path = entry.path(); + if path.is_dir() { + files.extend(walk(&path)); + } else { + files.push(path); + } + } + } + files +} + +// =========================================================================== +// atomize +// =========================================================================== + +mod atomize { + use super::*; + + /// Enrichment must populate every stub with the six fields that downstream + /// commands depend on: code-name, code-path, code-text, code-module, + /// dependencies, and display-name. (design: Section 3.5) + #[test] + fn enriched_stubs_have_all_required_fields() { + let tmp = setup_project(); + assert_success(&cli(&["atomize", "--no-probe"], tmp.path()), "atomize"); + + let stubs = read_stubs(tmp.path()); + let required = [ + "code-name", + "code-path", + "code-text", + "code-module", + "dependencies", + "display-name", + ]; + for (key, stub) in &stubs { + for field in &required { + assert!( + stub.get(*field).is_some(), + "stub '{}' missing required field '{}'", + key, + field + ); + } + } + } + + /// When a stub already has a code-name that matches an atom, the code-name + /// takes precedence over code-line for atom matching -- even when the + /// code-line falls inside a different atom's range. (design: Section 2.9) + #[test] + fn code_name_takes_precedence_over_code_line() { + let tmp = setup_project(); + + let md = tmp + .path() + .join(".verilib/structure/src/module.rs/func_a().md"); + fs::write( + &md, + "---\ncode-name: \"probe:test/1.0.0/module/func_a()\"\n\ + code-path: \"src/module.rs\"\ncode-line: 25\n---\n", + ) + .unwrap(); + + assert_success(&cli(&["atomize", "--no-probe"], tmp.path()), "atomize"); + + let stubs = read_stubs(tmp.path()); + let func_a = &stubs["src/module.rs/func_a().md"]; + assert_eq!( + func_a["code-name"].as_str(), + Some("probe:test/1.0.0/module/func_a()"), + ); + assert_eq!(func_a["display-name"].as_str(), Some("func_a")); + } + + /// The dependency arrays in enriched stubs must be populated from atom + /// data, not invented or left empty. (design: Sections 2.8, 3.3) + #[test] + fn dependencies_come_from_atoms() { + let tmp = setup_project(); + assert_success(&cli(&["atomize", "--no-probe"], tmp.path()), "atomize"); + + let stubs = read_stubs(tmp.path()); + + let deps_a: Vec<&str> = stubs["src/module.rs/func_a().md"]["dependencies"] + .as_array() + .unwrap() + .iter() + .filter_map(|v| v.as_str()) + .collect(); + assert!(deps_a.contains(&"probe:test/1.0.0/module/helper()")); + + assert!(stubs["src/module.rs/func_b().md"]["dependencies"] + .as_array() + .unwrap() + .is_empty()); + + let deps_c: Vec<&str> = stubs["src/other.rs/func_c().md"]["dependencies"] + .as_array() + .unwrap() + .iter() + .filter_map(|v| v.as_str()) + .collect(); + assert!(deps_c.contains(&"probe:test/1.0.0/module/func_a()")); + } + + /// `--check-only` exits successfully when .md stub frontmatter is + /// consistent with the enriched output (no drift). (design: Section 2.7) + #[test] + fn check_only_passes_when_stubs_match() { + let tmp = setup_project(); + assert_success( + &cli(&["atomize", "--no-probe"], tmp.path()), + "atomize setup", + ); + assert_success( + &cli(&["atomize", "--no-probe", "--check-only"], tmp.path()), + "atomize --check-only", + ); + } + + /// `--check-only` exits non-zero when a .md file has a code-name that + /// disagrees with the enriched atom data. (design: Section 2.7) + #[test] + fn check_only_detects_code_name_mismatch() { + let tmp = setup_project(); + let md = tmp + .path() + .join(".verilib/structure/src/module.rs/func_a().md"); + fs::write( + &md, + "---\ncode-name: \"probe:test/1.0.0/module/WRONG_NAME()\"\n\ + code-path: \"src/module.rs\"\ncode-line: 10\n---\n", + ) + .unwrap(); + + assert_failure( + &cli(&["atomize", "--no-probe", "--check-only"], tmp.path()), + "atomize --check-only with wrong code-name", + ); + } + + /// `--update-stubs` writes the enriched code-name back into the .md + /// frontmatter so that future `--check-only` runs pass. (design: Section 2.9) + #[test] + fn update_stubs_writes_code_name_to_md_files() { + let tmp = setup_project(); + + let md = tmp + .path() + .join(".verilib/structure/src/module.rs/func_a().md"); + fs::write( + &md, + "---\ncode-path: \"src/module.rs\"\ncode-line: 10\n---\n", + ) + .unwrap(); + + assert_success( + &cli(&["atomize", "--no-probe", "--update-stubs"], tmp.path()), + "atomize --update-stubs", + ); + + let content = fs::read_to_string(&md).unwrap(); + assert!( + content.contains("code-name:") && content.contains("probe:test/1.0.0/module/func_a()"), + "code-name should have been written to .md" + ); + } + + /// Enrichment is idempotent: running atomize twice with the same inputs + /// must produce byte-identical stubs.json. + #[test] + fn two_runs_produce_identical_output() { + let tmp = setup_project(); + + assert_success(&cli(&["atomize", "--no-probe"], tmp.path()), "first run"); + let first: serde_json::Value = read_json(&tmp.path().join(".verilib/stubs.json")); + + assert_success(&cli(&["atomize", "--no-probe"], tmp.path()), "second run"); + let second: serde_json::Value = read_json(&tmp.path().join(".verilib/stubs.json")); + + assert_eq!(first, second, "atomize must be idempotent"); + } + + /// `atomize --no-probe` requires atoms.json on disk; without it the + /// command must exit non-zero. + #[test] + fn fails_without_atoms_json() { + let tmp = setup_project(); + fs::remove_file(tmp.path().join(".verilib/atoms.json")).unwrap(); + assert_failure( + &cli(&["atomize", "--no-probe"], tmp.path()), + "atomize without atoms.json", + ); + } + + /// A Verus project (vstd dependency) without .verilib/config.json must + /// exit non-zero -- the user needs to run `create` first. (design: Section 2.4) + #[test] + fn fails_on_verus_project_without_config() { + let tmp = TempDir::new().unwrap(); + fs::write( + tmp.path().join("Cargo.toml"), + "[package]\nname = \"verus-test\"\nversion = \"0.1.0\"\n\n\ + [dependencies]\nvstd = { git = \"https://github.com/verus-lang/verus\" }\n", + ) + .unwrap(); + + assert_failure( + &cli(&["atomize"], tmp.path()), + "atomize on Verus project without config", + ); + } +} + +// =========================================================================== +// atomize --atoms-only +// =========================================================================== + +mod atomize_atoms_only { + use super::*; + + /// Atoms-only mode does not require a .verilib/config.json because it + /// skips stubs enrichment entirely. + #[test] + fn works_without_project_config() { + let tmp = TempDir::new().unwrap(); + let verilib = tmp.path().join(".verilib"); + fs::create_dir_all(&verilib).unwrap(); + fs::copy( + fixtures_dir().join("atoms.json"), + verilib.join("atoms.json"), + ) + .unwrap(); + + assert_success( + &cli(&["atomize", "--atoms-only", "--no-probe"], tmp.path()), + "atoms-only without config", + ); + } + + /// A Cargo.toml with no Verus dependencies causes atomize to auto-select + /// atoms-only mode and exit successfully. + #[test] + fn auto_detected_for_pure_rust_project() { + let tmp = TempDir::new().unwrap(); + fs::write( + tmp.path().join("Cargo.toml"), + "[package]\nname = \"pure-rust\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n\ + [dependencies]\nserde = \"1.0\"\n", + ) + .unwrap(); + let verilib = tmp.path().join(".verilib"); + fs::create_dir_all(&verilib).unwrap(); + fs::copy( + fixtures_dir().join("atoms.json"), + verilib.join("atoms.json"), + ) + .unwrap(); + + assert_success( + &cli(&["atomize", "--no-probe"], tmp.path()), + "auto atoms-only for pure Rust", + ); + } + + /// Atoms-only must not touch stubs.json, even when a full project setup + /// exists on disk. + #[test] + fn does_not_modify_stubs_json() { + let tmp = setup_project(); + let stubs_before = fs::read_to_string(tmp.path().join(".verilib/stubs.json")).unwrap(); + + assert_success( + &cli(&["atomize", "--atoms-only", "--no-probe"], tmp.path()), + "atoms-only", + ); + + let stubs_after = fs::read_to_string(tmp.path().join(".verilib/stubs.json")).unwrap(); + assert_eq!(stubs_before, stubs_after, "stubs.json must be unchanged"); + } +} + +// =========================================================================== +// specify +// =========================================================================== + +mod specify { + use super::*; + + /// After specify, stubs whose specs have `specified=true` in specs.json + /// (func_a, func_b) must gain a `spec-text` field. Stubs with + /// `specified=false` (func_c) must not. (design: Sections 2.10, 3.5) + #[test] + fn populates_spec_text_for_specified_stubs_only() { + let tmp = setup_project_with_config("config_auto_validate.json"); + assert_success( + &cli(&["atomize", "--no-probe"], tmp.path()), + "atomize setup", + ); + assert_success(&cli(&["specify", "--no-probe"], tmp.path()), "specify"); + + let stubs = read_stubs(tmp.path()); + assert!(stubs["src/module.rs/func_a().md"] + .get("spec-text") + .is_some()); + assert!(stubs["src/module.rs/func_b().md"] + .get("spec-text") + .is_some()); + assert!(stubs["src/other.rs/func_c().md"].get("spec-text").is_none()); + } + + /// The full specify flow (with auto-validate creating certs) must never + /// modify the .md structure files on disk. (design: Section 2.9) + #[test] + fn does_not_modify_md_files() { + let tmp = setup_project_with_config("config_auto_validate.json"); + assert_success( + &cli(&["atomize", "--no-probe"], tmp.path()), + "atomize setup", + ); + + let dir = tmp.path().join(".verilib/structure"); + let before = collect_md_checksums(&dir); + assert!(!before.is_empty()); + + assert_success( + &cli(&["specify", "--no-probe"], tmp.path()), + "specify (auto-validate)", + ); + + assert_eq!(before, collect_md_checksums(&dir)); + } + + /// Cert files created by specify must contain an ISO 8601 timestamp. + /// (design: Section 2.10) + #[test] + fn certs_contain_iso8601_timestamp() { + let tmp = setup_project_with_config("config_auto_validate.json"); + assert_success( + &cli(&["atomize", "--no-probe"], tmp.path()), + "atomize setup", + ); + assert_success( + &cli(&["specify", "--no-probe"], tmp.path()), + "specify (auto-validate)", + ); + + let certs_dir = tmp.path().join(".verilib/certs/specs"); + assert!(certs_dir.exists()); + + let certs: Vec<_> = fs::read_dir(&certs_dir) + .unwrap() + .flatten() + .filter(|e| e.path().extension().is_some_and(|ext| ext == "json")) + .collect(); + assert!(!certs.is_empty(), "auto-validate should create cert files"); + + for entry in &certs { + let cert = read_json(&entry.path()); + let ts = cert["timestamp"] + .as_str() + .unwrap_or_else(|| panic!("cert {} has no timestamp", entry.path().display())); + assert!( + DateTime::parse_from_rfc3339(ts).is_ok(), + "timestamp '{}' is not valid RFC 3339 / ISO 8601", + ts + ); + } + } + + /// `--check-only` exits successfully when every specified stub has a + /// corresponding cert file on disk. (design: Section 2.10) + #[test] + fn check_only_passes_when_all_stubs_certified() { + let tmp = setup_project(); + + let cert_path = tmp + .path() + .join(".verilib/certs/specs/probe%3Atest%2F1%2E0%2E0%2Fmodule%2Ffunc_b%28%29.json"); + fs::write( + &cert_path, + r#"{"timestamp": "2026-01-27T10:00:00.000000000Z"}"#, + ) + .unwrap(); + + assert_success( + &cli(&["specify", "--no-probe", "--check-only"], tmp.path()), + "specify --check-only (all certified)", + ); + } + + /// `specify --no-probe` requires specs.json on disk; without it the + /// command must exit non-zero. + #[test] + fn fails_without_specs_json() { + let tmp = setup_project(); + fs::remove_file(tmp.path().join(".verilib/specs.json")).unwrap(); + assert_failure( + &cli(&["specify", "--no-probe", "--check-only"], tmp.path()), + "specify without specs.json", + ); + } +} + +// =========================================================================== +// verify +// =========================================================================== + +mod verify { + use super::*; + + /// After verify, each stub's `verified` field must reflect the + /// corresponding entry in proofs.json. (design: Sections 2.11, 3.5) + #[test] + fn sets_verified_field_from_proofs() { + let tmp = setup_project(); + assert_success( + &cli(&["atomize", "--no-probe"], tmp.path()), + "atomize setup", + ); + assert_success(&cli(&["verify", "--no-probe"], tmp.path()), "verify"); + + let stubs = read_stubs(tmp.path()); + assert_eq!( + stubs["src/module.rs/func_a().md"]["verified"].as_bool(), + Some(true) + ); + assert_eq!( + stubs["src/module.rs/func_b().md"]["verified"].as_bool(), + Some(false) + ); + } + + /// The verify flow must never modify the .md structure files on disk. + /// (design: Section 2.9) + #[test] + fn does_not_modify_md_files() { + let tmp = setup_project(); + let dir = tmp.path().join(".verilib/structure"); + let before = collect_md_checksums(&dir); + assert!(!before.is_empty()); + + assert_success(&cli(&["verify", "--no-probe"], tmp.path()), "verify"); + + assert_eq!(before, collect_md_checksums(&dir)); + } + + /// `--check-only` exits non-zero when stubs.json contains entries with + /// `status: "failure"`. (design: Section 2.11) + #[test] + fn check_only_detects_failure_status() { + let tmp = setup_project(); + assert_failure( + &cli(&["verify", "--check-only"], tmp.path()), + "verify --check-only with failures", + ); + } + + /// `--check-only` exits successfully when no stub has a failure status. + /// (design: Section 2.11) + #[test] + fn check_only_passes_when_no_failures() { + let tmp = setup_project(); + let stubs_path = tmp.path().join(".verilib/stubs.json"); + let mut stubs: serde_json::Value = + serde_json::from_str(&fs::read_to_string(&stubs_path).unwrap()).unwrap(); + + if let Some(obj) = stubs.as_object_mut() { + for stub in obj.values_mut() { + if let Some(o) = stub.as_object_mut() { + o.remove("status"); + } + } + } + fs::write(&stubs_path, serde_json::to_string_pretty(&stubs).unwrap()).unwrap(); + + assert_success( + &cli(&["verify", "--check-only"], tmp.path()), + "verify --check-only (clean)", + ); + } + + /// Verification is idempotent: running verify twice with the same inputs + /// must produce byte-identical stubs.json. + #[test] + fn two_runs_produce_identical_output() { + let tmp = setup_project(); + assert_success( + &cli(&["atomize", "--no-probe"], tmp.path()), + "atomize setup", + ); + + assert_success(&cli(&["verify", "--no-probe"], tmp.path()), "first run"); + let first: serde_json::Value = read_json(&tmp.path().join(".verilib/stubs.json")); + + assert_success(&cli(&["verify", "--no-probe"], tmp.path()), "second run"); + let second: serde_json::Value = read_json(&tmp.path().join(".verilib/stubs.json")); + + assert_eq!(first, second, "verify must be idempotent"); + } + + /// `verify --no-probe` requires proofs.json on disk; without it the + /// command must exit non-zero. + #[test] + fn fails_without_proofs_json() { + let tmp = setup_project(); + fs::remove_file(tmp.path().join(".verilib/proofs.json")).unwrap(); + assert_failure( + &cli(&["verify", "--no-probe"], tmp.path()), + "verify without proofs.json", + ); + } + + /// `verify --check-only` requires stubs.json to exist; without it the + /// command must exit non-zero. + #[test] + fn check_only_fails_without_stubs_json() { + let tmp = TempDir::new().unwrap(); + let verilib = tmp.path().join(".verilib"); + fs::create_dir_all(&verilib).unwrap(); + fs::write( + verilib.join("config.json"), + r#"{"structure-root": ".verilib/structure"}"#, + ) + .unwrap(); + + assert_failure( + &cli(&["verify", "--check-only"], tmp.path()), + "verify without stubs.json", + ); + } +} + +// =========================================================================== +// Mock probe-verus helpers (unix only — requires symlink) +// =========================================================================== + +#[cfg(unix)] +fn setup_mock_probe_dir() -> TempDir { + let mock_dir = TempDir::new().expect("Failed to create mock dir"); + let mock_binary = PathBuf::from(env!("CARGO_BIN_EXE_mock-probe-verus")); + std::os::unix::fs::symlink(&mock_binary, mock_dir.path().join("probe-verus")) + .expect("Failed to symlink mock probe-verus"); + mock_dir +} + +#[cfg(unix)] +fn cli_with_mock(args: &[&str], cwd: &Path, mock_bin_dir: &Path) -> Output { + let mut paths = vec![mock_bin_dir.to_path_buf()]; + paths.extend(std::env::split_paths( + &std::env::var("PATH").unwrap_or_default(), + )); + let new_path = std::env::join_paths(paths).expect("Failed to join PATH"); + Command::new(env!("CARGO_BIN_EXE_verilib-cli")) + .args(args) + .current_dir(cwd) + .env("PATH", new_path) + .env("MOCK_FIXTURES_DIR", fixtures_dir()) + .output() + .expect("Failed to execute verilib-cli") +} + +// =========================================================================== +// create (requires mock probe-verus) +// =========================================================================== + +#[cfg(unix)] +mod create { + use super::*; + + /// `create` must produce `.verilib/config.json` with a `structure-root` + /// field pointing at the structure directory. (design: Sections 2.3, 2.4) + #[test] + fn produces_config_with_structure_root() { + let mock_dir = setup_mock_probe_dir(); + let tmp = TempDir::new().unwrap(); + + assert_success( + &cli_with_mock(&["create"], tmp.path(), mock_dir.path()), + "create", + ); + + let config = read_json(&tmp.path().join(".verilib/config.json")); + assert_eq!( + config["structure-root"].as_str(), + Some(".verilib/structure"), + ); + } +} + +// =========================================================================== +// pipeline (requires mock probe-verus) +// =========================================================================== + +#[cfg(unix)] +mod pipeline { + use super::*; + + /// End-to-end: create -> atomize --update-stubs -> specify -> verify, + /// all driven by a mock probe-verus binary. Verifies the pipeline + /// produces the expected artifacts at each stage. (design: Section 2.3) + #[test] + fn full_create_atomize_specify_verify_workflow() { + let mock_dir = setup_mock_probe_dir(); + let tmp = TempDir::new().unwrap(); + + fs::write( + tmp.path().join("Cargo.toml"), + "[package]\nname = \"test-verus-project\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n\ + [dependencies]\nvstd = { git = \"https://github.com/verus-lang/verus\", rev = \"test\" }\n", + ) + .unwrap(); + + // create + assert_success( + &cli_with_mock(&["create"], tmp.path(), mock_dir.path()), + "create", + ); + let config_path = tmp.path().join(".verilib/config.json"); + assert!(config_path.exists()); + let config = read_json(&config_path); + assert!(config.get("structure-root").is_some()); + + // Enable auto-validate for the specify step + let mut cfg: serde_json::Value = + serde_json::from_str(&fs::read_to_string(&config_path).unwrap()).unwrap(); + cfg["auto-validate-specs"] = serde_json::Value::Bool(true); + fs::write(&config_path, serde_json::to_string_pretty(&cfg).unwrap()).unwrap(); + + // atomize --update-stubs + assert_success( + &cli_with_mock(&["atomize", "--update-stubs"], tmp.path(), mock_dir.path()), + "atomize --update-stubs", + ); + let stubs_path = tmp.path().join(".verilib/stubs.json"); + assert!(stubs_path.exists()); + let stubs = read_stubs(tmp.path()); + assert!(!stubs.is_empty()); + for (key, stub) in &stubs { + assert!( + stub.get("code-name").is_some(), + "stub '{}' should have code-name", + key + ); + } + + // specify + assert_success( + &cli_with_mock(&["specify"], tmp.path(), mock_dir.path()), + "specify", + ); + + // verify + assert_success( + &cli_with_mock(&["verify"], tmp.path(), mock_dir.path()), + "verify", + ); + let final_stubs = read_stubs(tmp.path()); + for (key, stub) in &final_stubs { + if stub.get("code-name").and_then(|v| v.as_str()).is_some() { + assert!( + stub.get("verified").is_some(), + "stub '{}' should have 'verified' after verify", + key + ); + } + } + } +} diff --git a/tests/structure_commands_test.rs b/tests/structure_commands_test.rs deleted file mode 100644 index d42494b5..00000000 --- a/tests/structure_commands_test.rs +++ /dev/null @@ -1,685 +0,0 @@ -//! Integration tests for structure commands (atomize, specify, verify). -//! -//! These tests use --no-probe and --check-only flags to test command logic -//! without requiring probe-verus to be installed. - -use std::fs; -use std::path::PathBuf; -use tempfile::TempDir; - -/// Helper to set up a test project with fixtures. -fn setup_test_project() -> TempDir { - let temp_dir = TempDir::new().expect("Failed to create temp dir"); - let verilib_dir = temp_dir.path().join(".verilib"); - fs::create_dir_all(&verilib_dir).expect("Failed to create .verilib dir"); - - // Verus-style Cargo.toml so atomize uses the full pipeline (not atoms-only) - fs::write( - temp_dir.path().join("Cargo.toml"), - r#"[package] -name = "test-verus-project" -version = "0.1.0" -edition = "2021" - -[dependencies] -vstd = { git = "https://github.com/verus-lang/verus", rev = "test" } -"#, - ) - .expect("Failed to write Cargo.toml"); - - // Copy fixtures - let fixtures_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("tests/fixtures"); - - // Copy config.json - fs::copy( - fixtures_dir.join("config.json"), - verilib_dir.join("config.json"), - ) - .expect("Failed to copy config.json"); - - // Copy JSON files - for file in ["atoms.json", "specs.json", "proofs.json", "stubs.json"] { - fs::copy(fixtures_dir.join(file), verilib_dir.join(file)) - .expect(&format!("Failed to copy {}", file)); - } - - // Copy structure directory - let src_structure = fixtures_dir.join("structure"); - let dst_structure = verilib_dir.join("structure"); - copy_dir_recursive(&src_structure, &dst_structure).expect("Failed to copy structure dir"); - - // Copy certs directory - let src_certs = fixtures_dir.join("certs"); - let dst_certs = verilib_dir.join("certs"); - copy_dir_recursive(&src_certs, &dst_certs).expect("Failed to copy certs dir"); - - temp_dir -} - -/// Recursively copy a directory. -fn copy_dir_recursive(src: &PathBuf, dst: &PathBuf) -> std::io::Result<()> { - fs::create_dir_all(dst)?; - for entry in fs::read_dir(src)? { - let entry = entry?; - let src_path = entry.path(); - let dst_path = dst.join(entry.file_name()); - if src_path.is_dir() { - copy_dir_recursive(&src_path, &dst_path)?; - } else { - fs::copy(&src_path, &dst_path)?; - } - } - Ok(()) -} - -/// Helper to run verilib-cli command and capture output. -fn run_command(args: &[&str], cwd: &std::path::Path) -> std::process::Output { - std::process::Command::new(env!("CARGO_BIN_EXE_verilib-cli")) - .args(args) - .current_dir(cwd) - .output() - .expect("Failed to execute command") -} - -// ============================================================================ -// ATOMIZE TESTS -// ============================================================================ - -mod atomize_tests { - use super::*; - - #[test] - fn test_atomize_no_probe_loads_atoms_from_file() { - let temp_dir = setup_test_project(); - let output = run_command(&["atomize", "--no-probe"], temp_dir.path()); - - let stdout = String::from_utf8_lossy(&output.stdout); - assert!( - stdout.contains("Loading atoms from"), - "Should load atoms from file" - ); - assert!(stdout.contains("Loaded 4 atoms"), "Should load 4 atoms"); - } - - #[test] - fn test_atomize_check_only_passes_when_stubs_match() { - let temp_dir = setup_test_project(); - - // First run atomize to ensure stubs.json is up to date - run_command(&["atomize", "--no-probe"], temp_dir.path()); - - // Now check-only should pass - let output = run_command(&["atomize", "--no-probe", "--check-only"], temp_dir.path()); - - assert!(output.status.success(), "check-only should pass when stubs match"); - } - - #[test] - fn test_atomize_check_only_fails_when_stubs_mismatch() { - let temp_dir = setup_test_project(); - let verilib_dir = temp_dir.path().join(".verilib"); - - // Modify a .md file to create a mismatch - change code-name in the frontmatter - // The check compares .md stubs against enriched (from atoms.json) - let md_path = verilib_dir.join("structure/src/module.rs/func_a().md"); - fs::write( - &md_path, - r#"--- -code-name: "probe:test/1.0.0/module/WRONG_NAME()" -code-path: "src/module.rs" -code-line: 10 ---- -"#, - ) - .unwrap(); - - // check-only should fail because .md has WRONG_NAME but atoms.json has func_a - let output = run_command(&["atomize", "--no-probe", "--check-only"], temp_dir.path()); - - assert!( - !output.status.success(), - "check-only should fail when stubs mismatch" - ); - let stderr = String::from_utf8_lossy(&output.stderr); - assert!( - stderr.contains("mismatch") || stderr.contains("do not match"), - "Should report mismatch: {}", - stderr - ); - } - - #[test] - fn test_atomize_no_probe_fails_without_atoms_json() { - let temp_dir = setup_test_project(); - let verilib_dir = temp_dir.path().join(".verilib"); - - // Remove atoms.json - fs::remove_file(verilib_dir.join("atoms.json")).unwrap(); - - let output = run_command(&["atomize", "--no-probe"], temp_dir.path()); - - assert!(!output.status.success(), "Should fail without atoms.json"); - let stderr = String::from_utf8_lossy(&output.stderr); - assert!( - stderr.contains("atoms.json not found"), - "Should report missing atoms.json" - ); - } - - #[test] - fn test_atomize_update_stubs_updates_md_files() { - let temp_dir = setup_test_project(); - let verilib_dir = temp_dir.path().join(".verilib"); - - // Create a .md file without code-name - let md_path = verilib_dir.join("structure/src/module.rs/func_a().md"); - fs::write( - &md_path, - r#"--- -code-path: "src/module.rs" -code-line: 10 ---- -"#, - ) - .unwrap(); - - // Run atomize with --update-stubs - let output = run_command(&["atomize", "--no-probe", "--update-stubs"], temp_dir.path()); - assert!(output.status.success(), "atomize --update-stubs should succeed"); - - // Check that the .md file was updated with code-name - let content = fs::read_to_string(&md_path).unwrap(); - assert!( - content.contains("code-name:"), - "Should have added code-name to .md file" - ); - assert!( - content.contains("probe:test/1.0.0/module/func_a()"), - "Should have correct code-name value" - ); - } - - #[test] - fn test_atomize_enriches_stubs_json() { - let temp_dir = setup_test_project(); - let verilib_dir = temp_dir.path().join(".verilib"); - - // Run atomize - let output = run_command(&["atomize", "--no-probe"], temp_dir.path()); - assert!(output.status.success(), "atomize should succeed"); - - // Check that stubs.json was enriched - let stubs_path = verilib_dir.join("stubs.json"); - let stubs: serde_json::Value = - serde_json::from_str(&fs::read_to_string(&stubs_path).unwrap()).unwrap(); - - // Check func_a has enriched fields from atoms.json - let func_a = &stubs["src/module.rs/func_a().md"]; - assert_eq!( - func_a["code-module"].as_str(), - Some("module"), - "Should have code-module" - ); - assert_eq!( - func_a["display-name"].as_str(), - Some("func_a"), - "Should have display-name" - ); - assert!( - func_a["dependencies"].is_array(), - "Should have dependencies array" - ); - } -} - -// ============================================================================ -// ATOMS-ONLY TESTS -// ============================================================================ - -mod atoms_only_tests { - use super::*; - - #[test] - fn test_atoms_only_no_probe_works_without_config() { - let temp_dir = TempDir::new().expect("Failed to create temp dir"); - let verilib_dir = temp_dir.path().join(".verilib"); - fs::create_dir_all(&verilib_dir).unwrap(); - - let fixtures_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("tests/fixtures"); - fs::copy( - fixtures_dir.join("atoms.json"), - verilib_dir.join("atoms.json"), - ) - .unwrap(); - - let output = run_command(&["atomize", "--atoms-only", "--no-probe"], temp_dir.path()); - - assert!( - output.status.success(), - "atoms-only --no-probe should succeed without config.json: {}", - String::from_utf8_lossy(&output.stderr) - ); - let stdout = String::from_utf8_lossy(&output.stdout); - assert!( - stdout.contains("Atoms-only mode"), - "Should report atoms-only mode: {}", - stdout - ); - } - - #[test] - fn test_auto_detect_pure_rust_triggers_atoms_only() { - let temp_dir = TempDir::new().expect("Failed to create temp dir"); - - fs::write( - temp_dir.path().join("Cargo.toml"), - r#"[package] -name = "my-rust-project" -version = "0.1.0" -edition = "2021" - -[dependencies] -serde = "1.0" -"#, - ) - .unwrap(); - - let verilib_dir = temp_dir.path().join(".verilib"); - fs::create_dir_all(&verilib_dir).unwrap(); - let fixtures_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("tests/fixtures"); - fs::copy( - fixtures_dir.join("atoms.json"), - verilib_dir.join("atoms.json"), - ) - .unwrap(); - - let output = run_command(&["atomize", "--no-probe"], temp_dir.path()); - - assert!( - output.status.success(), - "Should auto-detect pure Rust and use atoms-only: {}", - String::from_utf8_lossy(&output.stderr) - ); - let stdout = String::from_utf8_lossy(&output.stdout); - assert!( - stdout.contains("Auto-enabling atoms-only mode"), - "Should auto-enable atoms-only: {}", - stdout - ); - } - - #[test] - fn test_auto_detect_verus_project_without_config_fails() { - let temp_dir = TempDir::new().expect("Failed to create temp dir"); - - fs::write( - temp_dir.path().join("Cargo.toml"), - r#"[package] -name = "my-verus-project" -version = "0.1.0" -edition = "2021" - -[dependencies] -vstd = { git = "https://github.com/verus-lang/verus", rev = "abc123" } -"#, - ) - .unwrap(); - - let output = run_command(&["atomize"], temp_dir.path()); - - assert!( - !output.status.success(), - "Verus project without config should fail" - ); - let stderr = String::from_utf8_lossy(&output.stderr); - assert!( - stderr.contains("Run 'verilib-cli create' first"), - "Should tell user to run create: {}", - stderr - ); - } - - #[test] - fn test_atoms_only_flag_overrides_full_pipeline() { - let temp_dir = setup_test_project(); - - let verilib_dir = temp_dir.path().join(".verilib"); - let stubs_before = fs::read_to_string(verilib_dir.join("stubs.json")).unwrap(); - - let output = run_command(&["atomize", "--atoms-only", "--no-probe"], temp_dir.path()); - - assert!( - output.status.success(), - "atoms-only should succeed even with config: {}", - String::from_utf8_lossy(&output.stderr) - ); - - let stubs_after = fs::read_to_string(verilib_dir.join("stubs.json")).unwrap(); - assert_eq!( - stubs_before, stubs_after, - "atoms-only should not modify stubs.json" - ); - } -} - -// ============================================================================ -// SPECIFY TESTS -// ============================================================================ - -mod specify_tests { - use super::*; - - #[test] - fn test_specify_no_probe_loads_specs_from_file() { - let temp_dir = setup_test_project(); - let output = run_command(&["specify", "--no-probe", "--check-only"], temp_dir.path()); - - let stdout = String::from_utf8_lossy(&output.stdout); - assert!( - stdout.contains("Loading specs from"), - "Should load specs from file" - ); - } - - #[test] - fn test_specify_check_only_reports_uncertified() { - let temp_dir = setup_test_project(); - - // We have certs for func_a but not func_b - // func_b has specified=true in specs.json, so it should be reported as uncertified - let output = run_command(&["specify", "--no-probe", "--check-only"], temp_dir.path()); - - // Check if it reports uncertified stubs - let stderr = String::from_utf8_lossy(&output.stderr); - - // The test should either pass (all certified) or fail (some uncertified) - // Based on our fixtures, func_b is specified but has no cert - if !output.status.success() { - assert!( - stderr.contains("missing certs") || stderr.contains("uncertified"), - "Should report uncertified stubs" - ); - } - } - - #[test] - fn test_specify_no_probe_fails_without_specs_json() { - let temp_dir = setup_test_project(); - let verilib_dir = temp_dir.path().join(".verilib"); - - // Remove specs.json - fs::remove_file(verilib_dir.join("specs.json")).unwrap(); - - let output = run_command(&["specify", "--no-probe", "--check-only"], temp_dir.path()); - - assert!(!output.status.success(), "Should fail without specs.json"); - let stderr = String::from_utf8_lossy(&output.stderr); - assert!( - stderr.contains("specs.json not found"), - "Should report missing specs.json" - ); - } - - #[test] - fn test_specify_check_only_passes_when_all_certified() { - let temp_dir = setup_test_project(); - let verilib_dir = temp_dir.path().join(".verilib"); - - // Add cert for func_b (func_a already has a cert) - let cert_path = verilib_dir.join("certs/specs/probe%3Atest%2F1%2E0%2E0%2Fmodule%2Ffunc_b%28%29.json"); - fs::write(&cert_path, r#"{"timestamp": "2026-01-27T10:00:00.000000000Z"}"#).unwrap(); - - // check-only should pass when all specified functions have certs - let output = run_command(&["specify", "--no-probe", "--check-only"], temp_dir.path()); - - assert!( - output.status.success(), - "check-only should pass when all specs have certs" - ); - let stdout = String::from_utf8_lossy(&output.stdout); - assert!( - stdout.contains("All stubs with specs have certs") || stdout.contains("already validated"), - "Should report all certified" - ); - } -} - -// ============================================================================ -// VERIFY TESTS -// ============================================================================ - -mod verify_tests { - use super::*; - - #[test] - fn test_verify_no_probe_loads_proofs_from_file() { - let temp_dir = setup_test_project(); - let output = run_command(&["verify", "--no-probe"], temp_dir.path()); - - let stdout = String::from_utf8_lossy(&output.stdout); - assert!( - stdout.contains("Loading proofs from"), - "Should load proofs from file" - ); - assert!(stdout.contains("Loaded 4 proofs"), "Should load 4 proofs"); - } - - #[test] - fn test_verify_check_only_detects_failures() { - let temp_dir = setup_test_project(); - - // Our fixtures have func_b with status "failure" - let output = run_command(&["verify", "--check-only"], temp_dir.path()); - - assert!( - !output.status.success(), - "check-only should fail when there are failures" - ); - let stderr = String::from_utf8_lossy(&output.stderr); - assert!( - stderr.contains("failure") || stderr.contains("failed verification"), - "Should report failures" - ); - } - - #[test] - fn test_verify_check_only_passes_when_no_failures() { - let temp_dir = setup_test_project(); - let verilib_dir = temp_dir.path().join(".verilib"); - - // Modify stubs.json to remove failure status - let stubs_path = verilib_dir.join("stubs.json"); - let mut stubs: serde_json::Value = - serde_json::from_str(&fs::read_to_string(&stubs_path).unwrap()).unwrap(); - - // Remove failure status from all stubs - if let Some(obj) = stubs.as_object_mut() { - for (_, stub) in obj.iter_mut() { - if let Some(stub_obj) = stub.as_object_mut() { - stub_obj.remove("status"); - } - } - } - fs::write(&stubs_path, serde_json::to_string_pretty(&stubs).unwrap()).unwrap(); - - let output = run_command(&["verify", "--check-only"], temp_dir.path()); - - assert!( - output.status.success(), - "check-only should pass when no failures" - ); - } - - #[test] - fn test_verify_no_probe_fails_without_proofs_json() { - let temp_dir = setup_test_project(); - let verilib_dir = temp_dir.path().join(".verilib"); - - // Remove proofs.json - fs::remove_file(verilib_dir.join("proofs.json")).unwrap(); - - let output = run_command(&["verify", "--no-probe"], temp_dir.path()); - - assert!(!output.status.success(), "Should fail without proofs.json"); - let stderr = String::from_utf8_lossy(&output.stderr); - assert!( - stderr.contains("proofs.json not found"), - "Should report missing proofs.json" - ); - } - - #[test] - fn test_verify_updates_stubs_with_verification_status() { - let temp_dir = setup_test_project(); - let verilib_dir = temp_dir.path().join(".verilib"); - - // Run verify with --no-probe - let output = run_command(&["verify", "--no-probe"], temp_dir.path()); - assert!(output.status.success(), "verify should succeed"); - - // Check that stubs.json was updated - let stubs_path = verilib_dir.join("stubs.json"); - let stubs: serde_json::Value = - serde_json::from_str(&fs::read_to_string(&stubs_path).unwrap()).unwrap(); - - // func_a should be verified (proofs.json has verified: true) - let func_a = &stubs["src/module.rs/func_a().md"]; - assert_eq!( - func_a["verified"].as_bool(), - Some(true), - "func_a should be verified" - ); - - // func_b should not be verified (proofs.json has verified: false) - let func_b = &stubs["src/module.rs/func_b().md"]; - assert_eq!( - func_b["verified"].as_bool(), - Some(false), - "func_b should not be verified" - ); - } -} - -// ============================================================================ -// CREATE TESTS -// ============================================================================ - -mod create_tests { - use super::*; - - #[test] - fn test_create_creates_config_and_verilib_dir() { - let temp_dir = TempDir::new().expect("Failed to create temp dir"); - - // create will fail at probe-verus step, but should create config first - let _output = run_command(&["create"], temp_dir.path()); - - let verilib_dir = temp_dir.path().join(".verilib"); - assert!(verilib_dir.exists(), ".verilib directory should be created"); - - let config_path = verilib_dir.join("config.json"); - assert!(config_path.exists(), "config.json should be created"); - - let config: serde_json::Value = - serde_json::from_str(&fs::read_to_string(&config_path).unwrap()).unwrap(); - assert_eq!( - config["structure-root"].as_str(), - Some(".verilib/structure"), - "config should have default structure-root" - ); - } - - #[test] - fn test_create_no_seed_file_needed() { - let temp_dir = TempDir::new().expect("Failed to create temp dir"); - - let output = run_command(&["create"], temp_dir.path()); - - let stderr = String::from_utf8_lossy(&output.stderr); - assert!( - !stderr.contains("functions_to_track.csv"), - "Should not reference functions_to_track.csv: {}", - stderr - ); - let seed_path = temp_dir.path().join(".verilib").join("seed.csv"); - assert!( - !seed_path.exists(), - "Should not create seed.csv (probe-verus discovers functions automatically)" - ); - } - - #[test] - fn test_create_fails_when_probe_verus_not_installed() { - let temp_dir = TempDir::new().expect("Failed to create temp dir"); - - let output = run_command(&["create"], temp_dir.path()); - - if !output.status.success() { - let stderr = String::from_utf8_lossy(&output.stderr); - assert!( - stderr.contains("probe-verus") || stderr.contains("not found") || stderr.contains("not installed"), - "Should report that probe-verus is required: {}", - stderr - ); - } - } - -} - -// ============================================================================ -// ERROR HANDLING TESTS -// ============================================================================ - -mod error_handling_tests { - use super::*; - - #[test] - fn test_verus_project_fails_without_config() { - let temp_dir = TempDir::new().expect("Failed to create temp dir"); - let verilib_dir = temp_dir.path().join(".verilib"); - fs::create_dir_all(&verilib_dir).expect("Failed to create .verilib dir"); - - fs::write( - temp_dir.path().join("Cargo.toml"), - r#"[package] -name = "verus-test" -version = "0.1.0" - -[dependencies] -vstd = { git = "https://github.com/verus-lang/verus" } -"#, - ) - .unwrap(); - - let output = run_command(&["atomize", "--no-probe"], temp_dir.path()); - assert!(!output.status.success()); - let stderr = String::from_utf8_lossy(&output.stderr); - assert!( - stderr.contains("Run 'verilib-cli create' first"), - "Should tell user to run create: {}", - stderr - ); - } - - #[test] - fn test_verify_fails_without_stubs_json() { - let temp_dir = TempDir::new().expect("Failed to create temp dir"); - let verilib_dir = temp_dir.path().join(".verilib"); - fs::create_dir_all(&verilib_dir).expect("Failed to create .verilib dir"); - - // Create config but no stubs.json - fs::write( - verilib_dir.join("config.json"), - r#"{"structure-root": ".verilib/structure"}"#, - ) - .unwrap(); - - let output = run_command(&["verify", "--check-only"], temp_dir.path()); - assert!(!output.status.success()); - let stderr = String::from_utf8_lossy(&output.stderr); - assert!( - stderr.contains("not found") || stderr.contains("atomize"), - "Should report missing stubs.json" - ); - } -}