diff --git a/Cargo.lock b/Cargo.lock index 9139d83c..e7f0af47 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2269,6 +2269,16 @@ version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" +[[package]] +name = "mime_guess" +version = "2.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7c44f8e672c00fe5308fa235f821cb4198414e1c77935c1ab6948d3fd78550e" +dependencies = [ + "mime", + "unicase", +] + [[package]] name = "minicov" version = "0.3.8" @@ -2872,6 +2882,7 @@ dependencies = [ "js-sys", "log", "mime", + "mime_guess", "percent-encoding", "pin-project-lite", "quinn", diff --git a/core/Cargo.toml b/core/Cargo.toml index b52cebc5..03cd1221 100644 --- a/core/Cargo.toml +++ b/core/Cargo.toml @@ -45,7 +45,7 @@ log = { version = "0.4.29", default-features = false } pubgrub = { version = "0.3.0", default-features = false } # partialzip = { version = "5.0.0", default-features = false, optional = true } pyo3 = { version = "0.28.2", default-features = false, features = ["macros", "chrono", "indexmap"], optional = true } -reqwest-middleware = { version = "0.5.1" } +reqwest-middleware = { version = "0.5.1", features = ["multipart"] } semver = { version = "1.0.27", features = ["serde"] } serde = { version = "1.0.228", features = ["derive"] } serde_json = { version = "1.0.149", default-features = false, features = ["preserve_order"] } @@ -67,7 +67,7 @@ tokio = { version = "1.50.0", default-features = false, features = ["rt", "io-ut bytes = { version = "1.11.1", default-features = false } toml_edit = { version = "0.25.4", features = ["serde"] } globset = { version = "0.4.18", default-features = false } -reqwest = { version = "0.13.2", optional = true, features = ["rustls", "stream"] } +reqwest = { version = "0.13.2", optional = true, features = ["rustls", "stream", "multipart"] } dunce = "1.0.5" [dev-dependencies] diff --git a/core/src/auth.rs b/core/src/auth.rs index 56106b31..2797c70c 100644 --- a/core/src/auth.rs +++ b/core/src/auth.rs @@ -416,6 +416,32 @@ pub type StandardHTTPAuthentication = RestrictAuthentication< Unauthenticated, >; +impl StandardHTTPAuthentication { + /// Extracts the bearer tokens from the configured credential set into a URL-glob map + /// suitable for driving publish-time credential selection. Basic-auth entries are + /// dropped, since publish only supports bearer authentication. + pub fn try_into_publish_bearer_auth_map( + self, + ) -> Result, globset::Error> { + let mut partial = GlobMapBuilder::new(); + + // `GlobMap` stores keys and values in parallel vectors; consume `self` so we + // can move bearer tokens into a publish-only map without cloning secrets. + for (key, sequence_auth) in self + .restricted + .keys + .into_iter() + .zip(self.restricted.values.into_iter()) + { + if let StandardInnerAuthentication::BearerAuth(inner) = sequence_auth.lower { + partial.add(key, inner); + } + } + + partial.build() + } +} + /// Utility to simplify construction of `StandardHTTPAuthentication` #[derive(Debug, Default, Clone)] pub struct StandardHTTPAuthenticationBuilder { diff --git a/core/src/commands/build.rs b/core/src/commands/build.rs index 428f5455..d28f76c9 100644 --- a/core/src/commands/build.rs +++ b/core/src/commands/build.rs @@ -1,4 +1,4 @@ -use camino::Utf8Path; +use camino::{Utf8Path, Utf8PathBuf}; use thiserror::Error; use crate::{ @@ -209,6 +209,19 @@ impl From> } } +pub fn default_kpar_path( + project: &Pr, + workspace: Option<&Workspace>, + project_path: &Utf8Path, +) -> Result> { + let mut path = workspace + .map(Workspace::root_path) + .unwrap_or(project_path) + .join("output"); + path.push(default_kpar_file_name(project)?); + Ok(path) +} + pub fn default_kpar_file_name( project: &Pr, ) -> Result> { diff --git a/core/src/commands/mod.rs b/core/src/commands/mod.rs index d3a93171..4ae0433c 100644 --- a/core/src/commands/mod.rs +++ b/core/src/commands/mod.rs @@ -10,6 +10,8 @@ pub mod include; pub mod info; pub mod init; pub mod lock; +#[cfg(all(feature = "filesystem", feature = "networking"))] +pub mod publish; pub mod remove; #[cfg(feature = "filesystem")] pub mod root; diff --git a/core/src/commands/publish.rs b/core/src/commands/publish.rs new file mode 100644 index 00000000..d9fab105 --- /dev/null +++ b/core/src/commands/publish.rs @@ -0,0 +1,413 @@ +// SPDX-FileCopyrightText: © 2025 Sysand contributors +// SPDX-License-Identifier: MIT OR Apache-2.0 + +use std::sync::Arc; + +use bytes::Bytes; +use camino::Utf8Path; +use serde::Deserialize; +use sha2::Digest; +use thiserror::Error; +use url::Url; + +use crate::{ + auth::{ForceBearerAuth, HTTPAuthentication}, + project::{ProjectRead, local_kpar::LocalKParProject}, +}; + +/// Defensive upper bound on kpar file size (100 MiB) to catch unexpected uploads by mistake. +const MAX_KPAR_PUBLISH_SIZE: u64 = 100 * 1024 * 1024; +/// Path appended to the index URL to form the upload endpoint. +const UPLOAD_ENDPOINT_PATH: &str = "/api/v1/upload"; + +pub fn do_publish>( + path: P, + index: Url, + auth: ForceBearerAuth, + client: reqwest_middleware::ClientWithMiddleware, + runtime: Arc, +) -> Result { + let path = path.as_ref(); + let header = crate::style::get_style_config().header; + let upload_url = build_upload_url(&index)?; + let PublishPreparation { + purl_versioned, + metadata, + kpar_bytes, + } = prepare_publish_payload(path)?; + log::info!( + "{header}{:>12}{header:#} `{purl_versioned}` to {index}", + "Publishing", + ); + + // Stash the URL as a string for post-request logging; the `Url` itself + // is moved into the closure since `with_authentication` may call it + // multiple times and each `post` consumes the URL. + let upload_url_for_log = upload_url.to_string(); + + let build_request = move |c: &reqwest_middleware::ClientWithMiddleware| { + let metadata_part = reqwest::multipart::Part::text(metadata.clone()) + .mime_str("application/json") + .unwrap(); + let kpar_part = reqwest::multipart::Part::stream(kpar_bytes.clone()) + // we declare an arbitrary filename to help server side libraries + // make reasonable assumptions reading the POST request, such as not + // trying to parse the binary data as UTF-8 or similar + .file_name("project.kpar") + .mime_str("application/zip") + .unwrap(); + + let form = reqwest::multipart::Form::new() + .part("metadata", metadata_part) + .part("kpar", kpar_part); + + c.post(upload_url.clone()).multipart(form) + }; + + let response = + runtime.block_on(async { auth.with_authentication(&client, &build_request).await })?; + + let status = response.status().as_u16(); + let response_url = response.url().to_string(); + let body_bytes = runtime + .block_on(response.bytes()) + .map_err(PublishError::ResponseBody)?; + log::debug!( + "publish response: request URL `{}`, final URL `{}`, status {}", + upload_url_for_log, + response_url, + status + ); + + map_publish_response(status, &body_bytes, &upload_url_for_log, &response_url) +} + +pub fn build_upload_url(index: &Url) -> Result { + if !matches!(index.scheme(), "http" | "https") { + return Err(PublishError::InvalidIndexUrl { + url: index.as_str().into(), + reason: "URL scheme must be http or https".to_string(), + }); + } + + if index.query().is_some() { + return Err(PublishError::InvalidIndexUrl { + url: index.as_str().into(), + reason: "URL must not include a query component".to_string(), + }); + } + + if index.fragment().is_some() { + return Err(PublishError::InvalidIndexUrl { + url: index.as_str().into(), + reason: "URL must not include a fragment component".to_string(), + }); + } + + let mut upload_url = index.to_owned(); + { + // Guaranteed for validated http(s) URLs. + let mut segments = upload_url.path_segments_mut().unwrap(); + // Normalize both `https://host` and `https://host/`. + segments.pop_if_empty(); + } + + // After normalization, reject URLs that already end with the upload path. + if upload_url.path().ends_with(UPLOAD_ENDPOINT_PATH) { + return Err(PublishError::InvalidIndexUrl { + url: index.as_str().into(), + reason: "URL must point to the index root; do not include `/api/v1/upload`".to_string(), + }); + } + + { + let mut segments = upload_url.path_segments_mut().unwrap(); + for segment in UPLOAD_ENDPOINT_PATH.trim_start_matches('/').split('/') { + segments.push(segment); + } + } + + Ok(upload_url) +} + +#[derive(Debug)] +pub struct PublishResponse { + pub status: u16, + pub message: String, + pub is_new_project: bool, +} + +#[derive(Error, Debug)] +pub enum PublishError { + #[error("failed to read kpar file at `{0}`: {1}")] + KparRead(Box, std::io::Error), + + #[error("failed to open kpar project at `{0}`: {1}")] + KparOpen(Box, String), + + #[error("missing project info in kpar")] + MissingInfo, + + #[error("missing project metadata in kpar")] + MissingMeta, + + #[error("missing publisher in project info (required for publishing)")] + MissingPublisher, + + #[error( + "publisher field `{0}` is invalid for modern project IDs: must be 3-50 characters, use only ASCII letters and numbers, may include single spaces or hyphens between words, and must start and end with a letter or number" + )] + InvalidPublisher(Box), + + #[error( + "name field `{0}` is invalid for modern project IDs: must be 3-50 characters, use only ASCII letters and numbers, may include single spaces, hyphens, or dots between words, and must start and end with a letter or number" + )] + InvalidName(Box), + + #[error( + "version field `{version}` is invalid for publishing: must be a valid Semantic Versioning 2.0 version ({source})" + )] + InvalidVersion { + version: Box, + source: semver::Error, + }, + + #[error("missing license in project info (required for publishing)")] + MissingLicense, + + #[error( + "license field `{license}` is invalid for publishing: must be a valid SPDX license expression ({source})" + )] + InvalidLicense { + license: Box, + source: spdx::error::ParseError, + }, + + #[error("invalid index URL `{url}` for publish endpoint: {reason}")] + InvalidIndexUrl { url: Box, reason: String }, + + #[error("HTTP request failed: {0:#?}")] + Http(#[from] reqwest_middleware::Error), + + #[error("failed to read server response body: {0:#?}")] + ResponseBody(#[source] reqwest::Error), + + #[error("server error ({status}): {body}")] + ServerError { status: u16, body: String }, + + #[error("authentication failed: {0}")] + AuthError(String), + + #[error("conflict: package version already exists: {0}")] + Conflict(String), + + #[error("bad request: {0}")] + BadRequest(String), + + #[error("publish endpoint not found: {0}")] + NotFound(String), + + #[error( + "KPAR file is unexpectedly large ({size} bytes, limit is {limit} bytes); verify you are publishing the correct file" + )] + KparTooLarge { size: u64, limit: u64 }, +} + +// --- Private helpers --- + +struct PublishPreparation { + purl_versioned: String, + // Keep upload payload in `Bytes` so request retries clone cheaply. + kpar_bytes: Bytes, + metadata: String, +} + +/// Reads and validates a `.kpar` file, returning the upload payload and metadata. +fn prepare_publish_payload(path: &Utf8Path) -> Result { + // Open and validate kpar. + let kpar_project = LocalKParProject::new_guess_root(path) + .map_err(|e| PublishError::KparOpen(path.as_str().into(), e.to_string()))?; + + let (info, meta) = kpar_project + .get_project() + .map_err(|e| PublishError::KparOpen(path.as_str().into(), e.to_string()))?; + + let info = info.ok_or(PublishError::MissingInfo)?; + // Validate that metadata exists; contents are not used during upload. + _ = meta.ok_or(PublishError::MissingMeta)?; + + let publisher = info + .publisher + .as_deref() + .ok_or(PublishError::MissingPublisher)?; + let name = &info.name; + let version = &info.version; + let license = info + .license + .as_deref() + .ok_or(PublishError::MissingLicense)?; + if !is_valid_publisher(publisher) { + return Err(PublishError::InvalidPublisher(publisher.into())); + } + if !is_valid_name(name) { + return Err(PublishError::InvalidName(name.as_str().into())); + } + semver::Version::parse(version).map_err(|source| PublishError::InvalidVersion { + version: version.as_str().into(), + source, + })?; + spdx::Expression::parse(license).map_err(|source| PublishError::InvalidLicense { + license: license.into(), + source, + })?; + let normalized_publisher = normalize_field(publisher); + let normalized_name = normalize_field(name); + let purl_versioned = format!("pkg:sysand/{normalized_publisher}/{normalized_name}@{version}"); + + let file_size = std::fs::metadata(path) + .map_err(|e| PublishError::KparRead(path.as_str().into(), e))? + .len(); + if file_size > MAX_KPAR_PUBLISH_SIZE { + return Err(PublishError::KparTooLarge { + size: file_size, + limit: MAX_KPAR_PUBLISH_SIZE, + }); + } + + let kpar_bytes = + std::fs::read(path).map_err(|e| PublishError::KparRead(path.as_str().into(), e))?; + let sha256_digest = format!("{:x}", sha2::Sha256::digest(&kpar_bytes)); + let metadata = serde_json::json!({ + "normalized_publisher": normalized_publisher, + "normalized_name": normalized_name, + "version": version, + "license": license, + "kpar_sha256_digest": sha256_digest, + }) + .to_string(); + + Ok(PublishPreparation { + purl_versioned, + metadata, + kpar_bytes: Bytes::from(kpar_bytes), + }) +} + +/// Maps an HTTP status and body to a `PublishResponse` or `PublishError`. +fn map_publish_response( + status: u16, + body_bytes: &[u8], + upload_url_for_log: &str, + response_url: &str, +) -> Result { + match status { + 200 => Ok(PublishResponse { + status, + message: String::from_utf8_lossy(body_bytes).into_owned(), + is_new_project: false, + }), + 201 => Ok(PublishResponse { + status, + message: String::from_utf8_lossy(body_bytes).into_owned(), + is_new_project: true, + }), + 400 => Err(PublishError::BadRequest(error_body_to_string(body_bytes))), + 401 | 403 => Err(PublishError::AuthError(error_body_to_string(body_bytes))), + 404 => Err(PublishError::NotFound(error_body_to_string(body_bytes))), + 409 => Err(PublishError::Conflict(error_body_to_string(body_bytes))), + _ => { + log::warn!( + "publish failed: request URL `{}`, final URL `{}`, status {}", + upload_url_for_log, + response_url, + status + ); + Err(PublishError::ServerError { + status, + body: error_body_to_string(body_bytes), + }) + } + } +} + +/// Validates a publisher or name field for modern project IDs. +/// +/// Rules: 3-50 ASCII alphanumeric characters, with single separators (space, +/// hyphen, and optionally dot when `allow_dot` is true) allowed between words. +/// Must start and end with an alphanumeric character. +/// +/// Publish-only; if additional surfaces need this, extract to a shared module. +fn is_valid_field(s: &str, allow_dot: bool) -> bool { + if !s.is_ascii() { + return false; + } + let bytes = s.as_bytes(); + + // check length between 3-50 + if !(3..=50).contains(&bytes.len()) { + return false; + } + + // check first and last characters are alphanum + if !bytes[0].is_ascii_alphanumeric() || !bytes[bytes.len() - 1].is_ascii_alphanumeric() { + return false; + } + + // check all characters, except first and last + for i in 1..(bytes.len() - 1) { + let b = bytes[i]; + + // alphanums are ok + if b.is_ascii_alphanumeric() { + continue; + } + + // and separators are ok + let is_separator = b == b'-' || b == b' ' || (allow_dot && b == b'.'); + if !is_separator { + return false; + } + + // but only isolated separators characters are ok + // knowing first/last is an alphanum, this is sufficient + if !bytes[i - 1].is_ascii_alphanumeric() { + return false; + } + } + + true +} + +fn is_valid_publisher(s: &str) -> bool { + is_valid_field(s, false) +} + +fn is_valid_name(s: &str) -> bool { + is_valid_field(s, true) +} + +fn normalize_field(s: &str) -> String { + s.to_ascii_lowercase().replace(' ', "-") +} + +#[derive(Deserialize)] +struct ErrorResponse { + error: String, +} + +fn error_body_to_string(body_bytes: &[u8]) -> String { + let text = String::from_utf8_lossy(body_bytes); + let trimmed = text.trim(); + + if trimmed.is_empty() { + return "no error details provided".to_string(); + } + + serde_json::from_str::(trimmed) + .map(|error| error.error) + .unwrap_or_else(|_| trimmed.to_string()) +} + +#[cfg(test)] +#[path = "./publish_tests.rs"] +mod tests; diff --git a/core/src/commands/publish_tests.rs b/core/src/commands/publish_tests.rs new file mode 100644 index 00000000..1f2c53e3 --- /dev/null +++ b/core/src/commands/publish_tests.rs @@ -0,0 +1,132 @@ +// SPDX-FileCopyrightText: © 2025 Sysand contributors +// SPDX-License-Identifier: MIT OR Apache-2.0 + +use super::{ + PublishError, build_upload_url, error_body_to_string, is_valid_name, is_valid_publisher, + normalize_field, +}; +use url::Url; + +#[test] +fn publisher_field_validation() { + assert!(is_valid_publisher("Acme Labs")); + assert!(is_valid_publisher("ACME-LABS-42")); + assert!(is_valid_publisher("abc")); + assert!(is_valid_publisher( + "abcdefghijklmnopqrstuvxyzabcdefghijklmnopqrstuvxyz" + )); + assert!(!is_valid_publisher("ab")); + assert!(!is_valid_publisher( + "abcdefghijklmnopqrstuvxyzabcdefghijklmnopqrstuvxyza" + )); + assert!(!is_valid_publisher("Acme.Labs")); + assert!(!is_valid_publisher("Åcme Labs")); + assert!(!is_valid_publisher("Acme Labs")); + assert!(!is_valid_publisher("Acme. Labs")); + assert!(!is_valid_publisher("Acme- Labs")); + assert!(!is_valid_publisher("Acme__Labs")); + assert!(!is_valid_publisher("Acme.")); +} + +#[test] +fn name_field_validation() { + assert!(is_valid_name("My.Project Alpha")); + assert!(is_valid_name("Alpha-2")); + assert!(!is_valid_name("ab")); + assert!(!is_valid_name("My..Project")); + assert!(!is_valid_name("My__Project")); + assert!(!is_valid_name(".Project")); +} + +#[test] +fn normalize_field_preserves_dot() { + assert_eq!(normalize_field("My.Project Alpha"), "my.project-alpha"); + assert_eq!(normalize_field("ACME LABS"), "acme-labs"); +} + +#[test] +fn build_upload_url_appends_endpoint_path() { + assert_eq!( + build_upload_url(&Url::parse("https://example.org").unwrap()) + .unwrap() + .as_str(), + "https://example.org/api/v1/upload" + ); + assert_eq!( + build_upload_url(&Url::parse("https://example.org/").unwrap()) + .unwrap() + .as_str(), + "https://example.org/api/v1/upload" + ); + assert_eq!( + build_upload_url(&Url::parse("https://example.org/index").unwrap()) + .unwrap() + .as_str(), + "https://example.org/index/api/v1/upload" + ); + assert_eq!( + build_upload_url(&Url::parse("https://example.org/index/").unwrap()) + .unwrap() + .as_str(), + "https://example.org/index/api/v1/upload" + ); +} + +#[test] +fn build_upload_url_preserves_percent_encoded_segments() { + assert_eq!( + build_upload_url(&Url::parse("https://example.org/my%20index/").unwrap()) + .unwrap() + .as_str(), + "https://example.org/my%20index/api/v1/upload" + ); +} + +#[test] +fn build_upload_url_rejects_upload_endpoint_path() { + for url in [ + "https://example.org/api/v1/upload", + "https://example.org/api/v1/upload/", + "https://example.org/index/api/v1/upload", + ] { + let err = build_upload_url(&Url::parse(url).unwrap()).unwrap_err(); + assert!(matches!(err, PublishError::InvalidIndexUrl { .. })); + } +} + +#[test] +fn build_upload_url_rejects_query_and_fragment() { + let err = + build_upload_url(&Url::parse("https://example.org/index?x=1#frag").unwrap()).unwrap_err(); + assert!(matches!(err, PublishError::InvalidIndexUrl { .. })); +} + +#[test] +fn build_upload_url_rejects_non_http_scheme() { + let err = build_upload_url(&Url::parse("ftp://example.org").unwrap()).unwrap_err(); + assert!(matches!(err, PublishError::InvalidIndexUrl { .. })); +} + +#[test] +fn build_upload_url_rejects_non_hierarchical_url() { + let err = build_upload_url(&Url::parse("mailto:test@example.org").unwrap()).unwrap_err(); + assert!(matches!(err, PublishError::InvalidIndexUrl { .. })); +} + +#[test] +fn error_body_to_string_trims_text_content() { + assert_eq!(error_body_to_string(b" unauthorized\n"), "unauthorized"); +} + +#[test] +fn error_body_to_string_extracts_error_from_json() { + assert_eq!( + error_body_to_string(br#"{"error":"Invalid token"}"#), + "Invalid token" + ); +} + +#[test] +fn error_body_to_string_reports_empty_body() { + assert_eq!(error_body_to_string(b" \n\t "), "no error details provided"); +} diff --git a/docs/src/SUMMARY.md b/docs/src/SUMMARY.md index 57e75320..0ed2d3bb 100644 --- a/docs/src/SUMMARY.md +++ b/docs/src/SUMMARY.md @@ -14,6 +14,7 @@ - [sysand include](commands/include.md) - [sysand exclude](commands/exclude.md) - [sysand build](commands/build.md) + - [sysand publish](commands/publish.md) - [sysand lock](commands/lock.md) - [sysand env](commands/env.md) - [sysand env install](commands/env/install.md) diff --git a/docs/src/commands/publish.md b/docs/src/commands/publish.md new file mode 100644 index 00000000..a704406e --- /dev/null +++ b/docs/src/commands/publish.md @@ -0,0 +1,83 @@ +# `sysand publish` + +Publish a KPAR to a sysand package index. + +## Usage + +```sh +sysand publish --index [PATH] +``` + +## Description + +Publishes a `.kpar` file to a sysand-compatible package index. The project +must be built first using [`sysand build`](build.md). + +Authentication is required. See [Authentication](../authentication.md) for +how to configure credentials. +For `sysand publish`, only bearer token credentials +(`SYSAND_CRED__BEARER_TOKEN`) are used. +If no matching bearer token credentials are configured for the publish URL, +the command fails before making the upload request. + +`--index` is required for `sysand publish`. + +The package identifier used during publish is derived from project metadata. +Before publishing, ensure `publisher`, `name`, `version`, and `license` follow +these rules: + +- `publisher`: 3-50 characters, ASCII letters and numbers only, with optional + single spaces or hyphens between words, and must start and end with a letter + or number. +- `name`: 3-50 characters, ASCII letters and numbers only, with optional single + spaces, hyphens, or dots between words, and must start and end with a letter + or number. +- `version`: must be a valid Semantic Versioning 2.0 version. +- `license`: required and must be a valid + [SPDX license expression](https://spdx.github.io/spdx-spec/latest/annexes/spdx-license-expressions/). + See [Project metadata: `license`](../metadata.md#license) for examples. + +`name` dots are preserved in the published identifier (they are not normalized +away). + +## Arguments + +- `[PATH]`: Path to the `.kpar` file to publish. If not provided, looks for + a KPAR in the output directory matching the current project's name and + version (e.g. `output/-.kpar`). + +## Options + +- `--index `: URL of the package index to publish to. Required. + Provide the index root URL (for example, `https://sysand.org` or + `https://my-index.example.com/index`), not the upload endpoint path + (`/api/v1/upload`). + +{{#include ./partials/global_opts.md}} + +## Examples + +Build and publish the current project: + +```sh +sysand build +sysand publish --index https://sysand.org +``` + +Publish a specific KPAR file: + +```sh +sysand publish --index https://sysand.org ./my-project-1.0.0.kpar +``` + +Publish to a custom index: + +```sh +sysand publish --index https://my-index.example.com +``` + +## See Also + +- [`sysand build`](build.md) — Build a KPAR from a project +- [Authentication](../authentication.md) — Configure credentials +- [Publishing a package](../publishing.md) — Publishing guide diff --git a/sysand/Cargo.toml b/sysand/Cargo.toml index cf341ecd..e9346e7e 100644 --- a/sysand/Cargo.toml +++ b/sysand/Cargo.toml @@ -42,7 +42,7 @@ url = { version = "2.5.8", default-features = false } pubgrub = { version = "0.3.0", default-features = false } indexmap = "2.13.0" tokio = { version = "1.50.0", default-features = false } -reqwest-middleware = { version = "0.5.1" } +reqwest-middleware = { version = "0.5.1", features = ["multipart"] } reqwest = { version = "0.13.2", features = ["rustls", "blocking"] } [dev-dependencies] diff --git a/sysand/src/cli.rs b/sysand/src/cli.rs index 1034a426..36626a02 100644 --- a/sysand/src/cli.rs +++ b/sysand/src/cli.rs @@ -12,6 +12,7 @@ use clap::{ValueEnum, builder::StyledStr, crate_authors}; use fluent_uri::Iri; use semver::VersionReq; use sysand_core::build::KparCompressionMethod; +use url::Url; use crate::env_vars; @@ -181,6 +182,19 @@ pub enum Command { #[arg(long, short, default_value_t = false, verbatim_doc_comment)] allow_path_usage: bool, }, + /// Publish a KPAR to a sysand package index + Publish { + /// Path to the KPAR file to publish. If not provided, will look + /// for a KPAR in the output directory with the current project's + /// name and version + #[clap(verbatim_doc_comment)] + path: Option, + + /// Root URL of the package index to publish to + /// (e.g. https://sysand.org), not the upload endpoint path + #[arg(long, value_name = "URL", verbatim_doc_comment)] + index: Url, + }, /// Create or update lockfile Lock { #[command(flatten)] diff --git a/sysand/src/commands/mod.rs b/sysand/src/commands/mod.rs index 4de8a420..d87694a2 100644 --- a/sysand/src/commands/mod.rs +++ b/sysand/src/commands/mod.rs @@ -11,6 +11,7 @@ pub mod info; pub mod init; pub mod lock; pub mod print_root; +pub mod publish; pub mod remove; pub mod sources; pub mod sync; diff --git a/sysand/src/commands/publish.rs b/sysand/src/commands/publish.rs new file mode 100644 index 00000000..0cd1dc90 --- /dev/null +++ b/sysand/src/commands/publish.rs @@ -0,0 +1,105 @@ +// SPDX-FileCopyrightText: © 2025 Sysand contributors +// SPDX-License-Identifier: MIT OR Apache-2.0 + +use std::sync::Arc; + +use anyhow::{Result, bail}; +use camino::Utf8PathBuf; +use sysand_core::{ + auth::{GlobMapResult, StandardHTTPAuthentication}, + build::default_kpar_path, + commands::publish::{build_upload_url, do_publish}, + context::ProjectContext, + project::utils::wrapfs, +}; +use url::Url; + +use crate::CliError; + +pub fn command_publish( + path: Option, + index: Url, + ctx: &ProjectContext, + auth_policy: Arc, + client: reqwest_middleware::ClientWithMiddleware, + runtime: Arc, +) -> Result<()> { + let kpar_path = resolve_publish_kpar_path(path, ctx)?; + if !wrapfs::is_file(&kpar_path)? { + bail!("KPAR file not found at `{kpar_path}`, run `sysand build` first"); + } + // Consume the Arc (or clone if shared) to extract owned credentials. + let bearer_map = Arc::unwrap_or_clone(auth_policy).try_into_publish_bearer_auth_map()?; + + // Match credentials against the concrete upload endpoint, not the index root, + // so users can scope patterns to `/api/v1/upload` when needed. + let upload_url = build_upload_url(&index)?; + let bearer = match bearer_map.lookup(upload_url.as_str()) { + GlobMapResult::Found(_, token) => token.clone(), + GlobMapResult::Ambiguous(candidates) => { + // Publish must resolve to exactly one bearer token. Unlike the + // general fetch/auth flow, do not probe multiple credentials here: + // we do not want to retry uploads or accidentally send unrelated + // publish credentials to the endpoint. A future refinement could + // prefer the most specific glob match, which would support + // separate read and publish credentials under the same host. + bail!( + "multiple bearer token credentials configured for publish URL `{upload_url}`; \ + refine SYSAND_CRED_ URL patterns so exactly one bearer token matches ({} candidates found)", + candidates.len() + ); + } + GlobMapResult::NotFound => { + bail!( + "no bearer token credentials configured for publish URL `{upload_url}`; \ + set SYSAND_CRED_ and SYSAND_CRED__BEARER_TOKEN with a matching URL pattern" + ); + } + }; + + let response = do_publish(kpar_path, index, bearer, client, runtime)?; + + let header = sysand_core::style::get_style_config().header; + if response.is_new_project { + log::info!( + "{header}{:>12}{header:#} new project successfully", + "Published" + ); + } else { + log::info!( + "{header}{:>12}{header:#} new release successfully", + "Published" + ); + } + + Ok(()) +} + +fn resolve_publish_kpar_path( + path: Option, + ctx: &ProjectContext, +) -> Result { + if let Some(path) = path { + return Ok(path); + } + + // Without an explicit path, publish must resolve one concrete project artifact. + // If no current project is discovered but a workspace is, this is ambiguous + // (workspace-level context may contain multiple projects), so require `[PATH]`. + let current_project = match (ctx.current_project.as_ref(), ctx.current_workspace.as_ref()) { + (Some(current_project), _) => current_project, + (None, Some(_)) => { + bail!( + "`sysand publish` without [PATH] is not supported from a workspace; \ + run the command from a project directory or pass an explicit .kpar path" + ); + } + (None, None) => return Err(CliError::MissingProjectCurrentDir.into()), + }; + + Ok(default_kpar_path( + current_project, + ctx.current_workspace.as_ref(), + ¤t_project.project_path, + )?) +} diff --git a/sysand/src/lib.rs b/sysand/src/lib.rs index 2e47df5e..55ea64d8 100644 --- a/sysand/src/lib.rs +++ b/sysand/src/lib.rs @@ -40,7 +40,6 @@ use sysand_core::{ }, resolve::net_utils::create_reqwest_client, stdlib::known_std_libs, - workspace::Workspace, }; use url::Url; @@ -59,6 +58,7 @@ use crate::{ init::command_init, lock::command_lock, print_root::command_print_root, + publish::command_publish, remove::command_remove, sources::{command_sources_env, command_sources_project}, sync::command_sync, @@ -267,7 +267,7 @@ pub fn run_cli(args: cli::Args) -> Result<()> { } } } - let basic_auth_policy = Arc::new(auths_builder.build()?); + let auth_policy = Arc::new(auths_builder.build()?); match args.command { Command::Init { @@ -309,7 +309,7 @@ pub fn run_cli(args: cli::Args) -> Result<()> { project_root, client, runtime, - basic_auth_policy, + auth_policy, ctx, ) } else { @@ -322,7 +322,7 @@ pub fn run_cli(args: cli::Args) -> Result<()> { project_root, client, runtime, - basic_auth_policy, + auth_policy, ctx, ) } @@ -369,7 +369,7 @@ pub fn run_cli(args: cli::Args) -> Result<()> { project_root, client, runtime, - basic_auth_policy, + auth_policy, &ctx, ) .map(|_| ()) @@ -414,7 +414,7 @@ pub fn run_cli(args: cli::Args) -> Result<()> { &project_root, client.clone(), runtime.clone(), - basic_auth_policy.clone(), + auth_policy.clone(), &ctx, )? } else { @@ -429,7 +429,7 @@ pub fn run_cli(args: cli::Args) -> Result<()> { client, &provided_iris, runtime, - basic_auth_policy, + auth_policy, &ctx, ) } @@ -483,7 +483,7 @@ pub fn run_cli(args: cli::Args) -> Result<()> { &project_root, &client, runtime.clone(), - basic_auth_policy.clone(), + auth_policy.clone(), )?; enum Location { @@ -568,7 +568,7 @@ pub fn run_cli(args: cli::Args) -> Result<()> { &excluded_iris, overrides, runtime, - basic_auth_policy, + auth_policy, ), (Location::Iri(iri), Some(subcommand)) => { let numbered = subcommand.numbered(); @@ -581,7 +581,7 @@ pub fn run_cli(args: cli::Args) -> Result<()> { index_urls, overrides, runtime, - basic_auth_policy, + auth_policy, ) } (Location::Path(path), None) => command_info_path(&path, &excluded_iris), @@ -614,7 +614,7 @@ pub fn run_cli(args: cli::Args) -> Result<()> { ctx, client, runtime, - basic_auth_policy, + auth_policy, ) } Command::Remove { locator } => { @@ -642,18 +642,17 @@ pub fn run_cli(args: cli::Args) -> Result<()> { let path = if let Some(path) = path { path } else { - let mut output_dir = ctx - .current_workspace - .as_ref() - .map(Workspace::root_path) - .unwrap_or_else(|| ¤t_project.project_path) - .join("output"); - let name = sysand_core::build::default_kpar_file_name(¤t_project)?; - if !wrapfs::is_dir(&output_dir)? { - wrapfs::create_dir(&output_dir)?; + let path = sysand_core::build::default_kpar_path( + ¤t_project, + ctx.current_workspace.as_ref(), + ¤t_project.project_path, + )?; + if let Some(output_dir) = path.parent() + && !wrapfs::is_dir(output_dir)? + { + wrapfs::create_dir(output_dir)?; } - output_dir.push(name); - output_dir + path }; command_build_for_project( path, @@ -681,6 +680,9 @@ pub fn run_cli(args: cli::Args) -> Result<()> { ) } } + cli::Command::Publish { path, index } => { + command_publish(path, index, &ctx, auth_policy, client, runtime) + } Command::Sources { sources_opts } => { let cli::SourcesOptions { no_deps, @@ -711,7 +713,7 @@ pub fn run_cli(args: cli::Args) -> Result<()> { &config, client, runtime, - basic_auth_policy, + auth_policy, ), } } diff --git a/sysand/tests/cli_publish.rs b/sysand/tests/cli_publish.rs new file mode 100644 index 00000000..4fb39747 --- /dev/null +++ b/sysand/tests/cli_publish.rs @@ -0,0 +1,604 @@ +// SPDX-FileCopyrightText: © 2025 Sysand contributors +// SPDX-License-Identifier: MIT OR Apache-2.0 + +use assert_cmd::prelude::*; +use camino::{Utf8Path, Utf8PathBuf}; +use camino_tempfile::Utf8TempDir; +use indexmap::IndexMap; +use mockito::{Matcher, Server}; +use predicates::prelude::*; + +// pub due to https://github.com/rust-lang/rust/issues/46379 +mod common; +pub use common::*; + +type TestResult = Result<(), Box>; + +fn init_project(name: &str) -> Result<(Utf8TempDir, Utf8PathBuf), Box> { + let (temp_dir, cwd, out) = run_sysand( + [ + "init", + "--version", + "1.0.0", + "--name", + name, + "--license", + "MIT", + ], + None, + )?; + out.assert().success(); + Ok((temp_dir, cwd)) +} + +fn run_sysand_ok(cwd: &Utf8Path, args: &[&str], cfg: Option<&str>) -> TestResult { + let out = run_sysand_in(cwd, args.iter().copied(), cfg)?; + out.assert().success(); + Ok(()) +} + +fn include_basic_model(cwd: &Utf8Path) -> TestResult { + std::fs::write(cwd.join("test.sysml"), "package P;\n")?; + run_sysand_ok(cwd, &["include", "--no-index-symbols", "test.sysml"], None) +} + +fn build_default_kpar(cwd: &Utf8Path) -> TestResult { + run_sysand_ok(cwd, &["build"], None) +} + +fn build_kpar_at(cwd: &Utf8Path, kpar_path: &str) -> TestResult { + run_sysand_ok(cwd, &["build", kpar_path], None) +} + +fn setup_built_project( + name: &str, +) -> Result<(Utf8TempDir, Utf8PathBuf), Box> { + let (temp_dir, cwd) = init_project(name)?; + include_basic_model(&cwd)?; + build_default_kpar(&cwd)?; + Ok((temp_dir, cwd)) +} + +fn setup_built_project_at( + name: &str, + kpar_path: &str, +) -> Result<(Utf8TempDir, Utf8PathBuf), Box> { + let (temp_dir, cwd) = init_project(name)?; + include_basic_model(&cwd)?; + build_kpar_at(&cwd, kpar_path)?; + Ok((temp_dir, cwd)) +} + +fn set_project_field(cwd: &Utf8Path, field: &str, value: &str) -> TestResult { + run_sysand_ok(cwd, &["info", field, "--set", value], None) +} + +fn bearer_env_for_url(url: &str) -> IndexMap { + let mut env = IndexMap::new(); + env.insert("SYSAND_CRED_TEST".to_string(), format!("{url}/**")); + env.insert( + "SYSAND_CRED_TEST_BEARER_TOKEN".to_string(), + "test-token".to_string(), + ); + env +} + +#[test] +fn publish_without_path_from_workspace_root_reports_explicit_error() -> TestResult { + let (_temp_dir, cwd) = new_temp_cwd()?; + std::fs::write( + cwd.join(".workspace.json"), + br#"{"projects": [{"path": "project1", "iris": ["urn:kpar:project1"]}]}"#, + )?; + std::fs::create_dir(cwd.join("project1"))?; + + let out = run_sysand_in(&cwd, ["publish", "--index", "http://localhost:1"], None)?; + out.assert() + .failure() + .stderr(predicate::str::contains("not supported from a workspace")) + .stderr(predicate::str::contains("explicit .kpar path")); + + Ok(()) +} + +#[test] +fn publish_missing_kpar() -> TestResult { + let (_temp_dir, cwd) = init_project("test-publish")?; + let out = run_sysand_in(&cwd, ["publish", "--index", "http://localhost:1"], None)?; + + out.assert() + .failure() + .stderr(predicate::str::contains("KPAR file not found")) + .stderr(predicate::str::contains("sysand build")); + + Ok(()) +} + +#[test] +fn publish_explicit_missing_kpar() -> TestResult { + let (_temp_dir, cwd) = init_project("test-publish")?; + let out = run_sysand_in( + &cwd, + [ + "publish", + "nonexistent.kpar", + "--index", + "http://localhost:1", + ], + None, + )?; + + out.assert() + .failure() + .stderr(predicate::str::contains("KPAR file not found")); + + Ok(()) +} + +#[test] +fn publish_network_error() -> TestResult { + let (_temp_dir, cwd) = setup_built_project("test-publish")?; + let env = bearer_env_for_url("http://localhost:1"); + let out = run_sysand_in_with( + &cwd, + ["publish", "--index", "http://localhost:1"], + None, + &env, + )?; + + out.assert() + .failure() + .stderr(predicate::str::contains("HTTP request failed")); + + Ok(()) +} + +#[test] +fn publish_requires_index_argument() -> TestResult { + let (_temp_dir, cwd) = setup_built_project("test-publish")?; + let out = run_sysand_in(&cwd, ["publish"], None)?; + out.assert() + .failure() + .stderr(predicate::str::contains( + "required arguments were not provided", + )) + .stderr(predicate::str::contains("--index ")); + + Ok(()) +} + +#[test] +fn publish_requires_index_value() -> TestResult { + let (_temp_dir, cwd) = setup_built_project("test-publish")?; + let out = run_sysand_in(&cwd, ["publish", "--index"], None)?; + out.assert().failure().stderr(predicate::str::contains( + "a value is required for '--index '", + )); + + Ok(()) +} + +#[test] +fn publish_requires_index_even_with_config_default() -> TestResult { + let (_temp_dir, cwd) = setup_built_project("test-publish")?; + + let config_path = cwd.join("publish-test.toml"); + std::fs::write( + &config_path, + "[[index]]\nurl = \"https://config-default.example.com\"\ndefault = true\n", + )?; + + let out = run_sysand_in(&cwd, ["publish"], Some(config_path.as_str()))?; + out.assert() + .failure() + .stderr(predicate::str::contains( + "required arguments were not provided", + )) + .stderr(predicate::str::contains("--index ")); + + Ok(()) +} + +#[test] +fn publish_with_explicit_index_succeeds() -> TestResult { + let (_temp_dir, cwd) = setup_built_project("test-publish")?; + let mut server = Server::new(); + let publish_mock = server + .mock("POST", "/api/v1/upload") + .match_header("authorization", "Bearer test-token") + .match_header( + "content-type", + Matcher::Regex("multipart/form-data; boundary=.*".to_string()), + ) + .match_header( + "content-length", + Matcher::Regex("^[1-9][0-9]{2,}$".to_string()), + ) + .match_body(Matcher::AllOf(vec![ + Matcher::Regex(r#"name="metadata""#.to_string()), + Matcher::Regex(r#"Content-Type: application/json"#.to_string()), + Matcher::Regex(r#""kpar_sha256_digest":"[0-9a-f]{64}""#.to_string()), + Matcher::Regex(r#""normalized_publisher":"#.to_string()), + Matcher::Regex(r#""normalized_name":"#.to_string()), + Matcher::Regex(r#""version":"#.to_string()), + Matcher::Regex(r#""license":"#.to_string()), + Matcher::Regex(r#"name="kpar""#.to_string()), + Matcher::Regex(r#"Content-Type: application/zip"#.to_string()), + ])) + .with_status(201) + .with_body("created") + .expect(1) + .create(); + + let env = bearer_env_for_url(server.url().as_str()); + let out = run_sysand_in_with( + &cwd, + ["publish", "--index", server.url().as_str()], + None, + &env, + )?; + out.assert().success(); + publish_mock.assert(); + + Ok(()) +} + +#[test] +fn publish_explicit_path_outside_project_dir() -> TestResult { + let (_temp_dir, cwd) = setup_built_project_at("outside-publish", "artifact.kpar")?; + let kpar_path = cwd.join("artifact.kpar"); + + let (_outside_temp_dir, outside_cwd) = new_temp_cwd()?; + let env = bearer_env_for_url("http://localhost:1"); + let out = run_sysand_in_with( + &outside_cwd, + [ + "publish", + kpar_path.as_str(), + "--index", + "http://localhost:1", + ], + None, + &env, + )?; + + out.assert() + .failure() + .stderr(predicate::str::contains("unable to find interchange project").not()) + .stderr(predicate::str::contains("HTTP request failed")); + + Ok(()) +} + +#[test] +fn publish_invalid_index_url_errors_early() -> TestResult { + let (_temp_dir, cwd) = setup_built_project_at("invalid-index", "artifact.kpar")?; + let out = run_sysand_in( + &cwd, + ["publish", "artifact.kpar", "--index", "ftp://example.org"], + None, + )?; + + out.assert() + .failure() + .stderr(predicate::str::contains("invalid index URL")) + .stderr(predicate::str::contains("HTTP request failed").not()); + + Ok(()) +} + +#[test] +fn publish_rejects_upload_endpoint_index_url() -> TestResult { + let (_temp_dir, cwd) = setup_built_project_at("upload-endpoint-index", "artifact.kpar")?; + let mut server = Server::new(); + let publish_mock = server.mock("POST", "/api/v1/upload").expect(0).create(); + let endpoint_url = format!("{}/api/v1/upload", server.url()); + + let env = bearer_env_for_url(server.url().as_str()); + let out = run_sysand_in_with( + &cwd, + ["publish", "artifact.kpar", "--index", endpoint_url.as_str()], + None, + &env, + )?; + + out.assert() + .failure() + .stderr(predicate::str::contains("invalid index URL")) + .stderr(predicate::str::contains("do not include `/api/v1/upload`")) + .stderr(predicate::str::contains("HTTP request failed").not()); + publish_mock.assert(); + + Ok(()) +} + +#[test] +fn publish_rejects_invalid_semver_version() -> TestResult { + let (_temp_dir, cwd) = init_project("invalid-version")?; + + let project_file = cwd.join(".project.json"); + let project_json = std::fs::read_to_string(&project_file)?; + let project_json = + project_json.replace("\"version\": \"1.0.0\"", "\"version\": \"not-semver\""); + std::fs::write(project_file, project_json)?; + + include_basic_model(&cwd)?; + build_kpar_at(&cwd, "artifact.kpar")?; + + let env = bearer_env_for_url("http://localhost:1"); + let out = run_sysand_in_with( + &cwd, + ["publish", "artifact.kpar", "--index", "http://localhost:1"], + None, + &env, + )?; + + out.assert() + .failure() + .stderr(predicate::str::contains("version field")) + .stderr(predicate::str::contains("Semantic Versioning 2.0 version")) + .stderr(predicate::str::contains("HTTP request failed").not()); + + Ok(()) +} + +#[test] +fn publish_rejects_noncanonicalizable_publisher() -> TestResult { + let (_temp_dir, cwd) = init_project("valid-publish-name")?; + set_project_field(&cwd, "publisher", "bad__publisher")?; + include_basic_model(&cwd)?; + build_kpar_at(&cwd, "artifact.kpar")?; + + let env = bearer_env_for_url("http://localhost:1"); + let out = run_sysand_in_with( + &cwd, + ["publish", "artifact.kpar", "--index", "http://localhost:1"], + None, + &env, + )?; + + out.assert() + .failure() + .stderr(predicate::str::contains("publisher field")) + .stderr(predicate::str::contains("must be 3-50 characters")) + .stderr(predicate::str::contains("HTTP request failed").not()); + + Ok(()) +} + +#[test] +fn publish_rejects_noncanonicalizable_name() -> TestResult { + let (_temp_dir, cwd) = init_project("valid-publish-name")?; + set_project_field(&cwd, "name", "bad__name")?; + include_basic_model(&cwd)?; + build_kpar_at(&cwd, "artifact.kpar")?; + + let env = bearer_env_for_url("http://localhost:1"); + let out = run_sysand_in_with( + &cwd, + ["publish", "artifact.kpar", "--index", "http://localhost:1"], + None, + &env, + )?; + + out.assert() + .failure() + .stderr(predicate::str::contains("name field")) + .stderr(predicate::str::contains("must be 3-50 characters")) + .stderr(predicate::str::contains("HTTP request failed").not()); + + Ok(()) +} + +#[test] +fn publish_sends_kpar_with_integrity_metadata() -> TestResult { + let (_temp_dir, cwd) = init_project("seed-project")?; + set_project_field(&cwd, "publisher", "Acme Labs")?; + set_project_field(&cwd, "name", "My.Project Alpha")?; + include_basic_model(&cwd)?; + build_kpar_at(&cwd, "artifact.kpar")?; + + let mut server = Server::new(); + let publish_mock = server + .mock("POST", "/api/v1/upload") + .match_header("authorization", "Bearer test-token") + .match_header( + "content-type", + Matcher::Regex("multipart/form-data; boundary=.*".to_string()), + ) + .match_header( + "content-length", + Matcher::Regex("^[1-9][0-9]{2,}$".to_string()), + ) + .match_body(Matcher::AllOf(vec![ + Matcher::Regex(r#"name="metadata""#.to_string()), + Matcher::Regex(r#""kpar_sha256_digest":"[0-9a-f]{64}""#.to_string()), + Matcher::Regex(r#""normalized_publisher":"acme-labs""#.to_string()), + Matcher::Regex(r#""normalized_name":"my.project-alpha""#.to_string()), + Matcher::Regex(r#""version":"1.0.0""#.to_string()), + Matcher::Regex(r#""license":"MIT""#.to_string()), + Matcher::Regex(r#"name="kpar""#.to_string()), + Matcher::Regex(r#"Content-Type: application/zip"#.to_string()), + ])) + .with_status(201) + .with_body("created") + .expect(1) + .create(); + + let index_url = server.url(); + let env = bearer_env_for_url(index_url.as_str()); + let out = run_sysand_in_with( + &cwd, + ["publish", "artifact.kpar", "--index", index_url.as_str()], + None, + &env, + )?; + + out.assert().success(); + publish_mock.assert(); + + Ok(()) +} + +#[test] +fn publish_ignores_basic_auth_credentials() -> TestResult { + let (_temp_dir, cwd) = setup_built_project("publish-basic-auth-ignored")?; + + let mut server = Server::new(); + let publish_mock = server.mock("POST", "/api/v1/upload").expect(0).create(); + + let pattern = format!("{}/**", server.url()); + let mut env = IndexMap::new(); + env.insert("SYSAND_CRED_TEST".to_string(), pattern); + env.insert( + "SYSAND_CRED_TEST_BASIC_USER".to_string(), + "user".to_string(), + ); + env.insert( + "SYSAND_CRED_TEST_BASIC_PASS".to_string(), + "pass".to_string(), + ); + + let out = run_sysand_in_with( + &cwd, + ["publish", "--index", server.url().as_str()], + None, + &env, + )?; + out.assert() + .failure() + .stderr(predicate::str::contains( + "no bearer token credentials configured for publish URL", + )) + .stderr(predicate::str::contains("HTTP request failed").not()); + + publish_mock.assert(); + + Ok(()) +} + +#[test] +fn publish_rejects_ambiguous_bearer_credentials() -> TestResult { + let (_temp_dir, cwd) = setup_built_project("publish-ambiguous-bearer")?; + + let mut server = Server::new(); + let publish_mock = server.mock("POST", "/api/v1/upload").expect(0).create(); + + let base = server.url(); + let mut env = IndexMap::new(); + env.insert("SYSAND_CRED_A".to_string(), format!("{base}/**")); + env.insert( + "SYSAND_CRED_A_BEARER_TOKEN".to_string(), + "token-a".to_string(), + ); + env.insert("SYSAND_CRED_B".to_string(), format!("{base}/api/**")); + env.insert( + "SYSAND_CRED_B_BEARER_TOKEN".to_string(), + "token-b".to_string(), + ); + + let out = run_sysand_in_with(&cwd, ["publish", "--index", base.as_str()], None, &env)?; + out.assert() + .failure() + .stderr(predicate::str::contains( + "multiple bearer token credentials configured for publish URL", + )) + .stderr(predicate::str::contains("HTTP request failed").not()); + + publish_mock.assert(); + + Ok(()) +} + +/// Helper for tests that publish to a mock server returning a specific status code +/// and assert that the CLI maps it to the expected error message(s). +fn assert_publish_error_status( + project_name: &str, + status: usize, + response_body: &str, + content_type: Option<&str>, + expected_stderr: &[&str], +) -> TestResult { + let (_temp_dir, cwd) = setup_built_project(project_name)?; + + let mut server = Server::new(); + let mut mock = server + .mock("POST", "/api/v1/upload") + .with_status(status) + .with_body(response_body) + .expect(1); + if let Some(ct) = content_type { + mock = mock.with_header("content-type", ct); + } + let publish_mock = mock.create(); + + let env = bearer_env_for_url(server.url().as_str()); + let out = run_sysand_in_with( + &cwd, + ["publish", "--index", server.url().as_str()], + None, + &env, + )?; + let mut assertion = out.assert().failure(); + for pattern in expected_stderr { + assertion = assertion.stderr(predicate::str::contains(*pattern)); + } + publish_mock.assert(); + + Ok(()) +} + +#[test] +fn publish_401_maps_to_auth_error() -> TestResult { + assert_publish_error_status( + "publish-auth-401", + 401, + "unauthorized", + None, + &["authentication failed", "unauthorized"], + ) +} + +#[test] +fn publish_403_maps_to_auth_error() -> TestResult { + assert_publish_error_status( + "publish-auth-403", + 403, + "forbidden", + None, + &["authentication failed", "forbidden"], + ) +} + +#[test] +fn publish_404_maps_to_not_found_error() -> TestResult { + assert_publish_error_status( + "publish-not-found", + 404, + "missing endpoint", + None, + &["publish endpoint not found", "missing endpoint"], + ) +} + +#[test] +fn publish_409_maps_to_conflict_error() -> TestResult { + assert_publish_error_status( + "publish-conflict", + 409, + "already exists", + None, + &["conflict: package version already exists", "already exists"], + ) +} + +#[test] +fn publish_500_json_error_body_extracts_error_message() -> TestResult { + assert_publish_error_status( + "publish-server-error", + 500, + r#"{"error":"Invalid token"}"#, + Some("application/json"), + &["server error (500)", "Invalid token"], + ) +}