From 5bb9f504969b85ef75820cf5f0782cfca6bbd351 Mon Sep 17 00:00:00 2001 From: aish-where-ya Date: Mon, 2 Oct 2023 14:04:23 -0400 Subject: [PATCH 1/3] Added par support to wash-lib Signed-off-by: aish-where-ya --- crates/wash-lib/src/cli/mod.rs | 1 + crates/wash-lib/src/cli/par.rs | 179 +++++++++++++++++++++++++++++++++ src/par.rs | 155 ++++++++-------------------- src/util.rs | 7 +- 4 files changed, 223 insertions(+), 119 deletions(-) create mode 100644 crates/wash-lib/src/cli/par.rs diff --git a/crates/wash-lib/src/cli/mod.rs b/crates/wash-lib/src/cli/mod.rs index 60828310..545ede04 100644 --- a/crates/wash-lib/src/cli/mod.rs +++ b/crates/wash-lib/src/cli/mod.rs @@ -43,6 +43,7 @@ pub mod get; pub mod inspect; pub mod link; pub mod output; +pub mod par; pub mod registry; pub mod spy; pub mod start; diff --git a/crates/wash-lib/src/cli/par.rs b/crates/wash-lib/src/cli/par.rs new file mode 100644 index 00000000..edd6b017 --- /dev/null +++ b/crates/wash-lib/src/cli/par.rs @@ -0,0 +1,179 @@ +use std::{fs::File, io::prelude::*, path::PathBuf}; + +use crate::cli::{extract_keypair, OutputKind}; +use anyhow::{anyhow, bail, Context, Result}; +use nkeys::KeyPairType; +use provider_archive::ProviderArchive; + +const GZIP_MAGIC: [u8; 2] = [0x1f, 0x8b]; + +pub struct ParCreateArgs { + pub capid: String, + pub vendor: String, + pub revision: Option, + pub version: Option, + pub schema: Option, + pub issuer: Option, + pub subject: Option, + pub name: String, + pub directory: Option, + pub arch: String, + pub binary: String, + pub destination: Option, + pub compress: bool, + pub disable_keygen: bool, +} +pub struct ParInsertArgs { + pub archive: String, + pub arch: String, + pub binary: String, + pub directory: Option, + pub issuer: Option, + pub subject: Option, + pub disable_keygen: bool, +} + +#[allow(clippy::too_many_arguments)] +pub async fn handle_par_create( + ParCreateArgs { + // claims related options + capid, + vendor, + revision, + version, + schema, + issuer, + subject, + name, + // par related options + directory, + arch, + binary, + destination, + compress, + disable_keygen, + }: ParCreateArgs, + output_kind: OutputKind, +) -> Result { + let mut par = ProviderArchive::new(&capid, &name, &vendor, revision, version); + + let mut f = File::open(binary.clone())?; + let mut lib = Vec::new(); + f.read_to_end(&mut lib)?; + + let issuer = extract_keypair( + issuer, + Some(binary.clone()), + directory.clone(), + KeyPairType::Account, + disable_keygen, + output_kind, + )?; + let subject = extract_keypair( + subject, + Some(binary.clone()), + directory, + KeyPairType::Service, + disable_keygen, + output_kind, + )?; + + par.add_library(&arch, &lib).map_err(convert_error)?; + + let extension = if compress { ".par.gz" } else { ".par" }; + let outfile = match destination { + Some(path) => path, + None => format!( + "{}{}", + PathBuf::from(binary.clone()) + .file_stem() + .unwrap() + .to_str() + .unwrap(), + extension + ), + }; + if let Some(ref schema) = schema { + let bytes = std::fs::read(schema)?; + par.set_schema( + serde_json::from_slice::(&bytes) + .with_context(|| "Unable to parse JSON from file contents".to_string())?, + ) + .map_err(convert_error) + .with_context(|| format!("Error parsing JSON schema from file '{:?}'", schema))?; + } + + par.write(&outfile, &issuer, &subject, compress) + .await + .map_err(|e| anyhow!("{}", e)) + .with_context(|| { + format!( + "Error writing PAR. Please ensure directory {:?} exists", + PathBuf::from(outfile.clone()).parent().unwrap(), + ) + })?; + Ok(outfile) +} + +pub async fn handle_par_insert( + ParInsertArgs { + archive, + arch, + binary, + directory, + issuer, + subject, + disable_keygen, + }: ParInsertArgs, + output_kind: OutputKind, +) -> Result<()> { + let mut buf = Vec::new(); + let mut f = File::open(archive.clone())?; + f.read_to_end(&mut buf)?; + + let mut par = ProviderArchive::try_load(&buf) + .await + .map_err(convert_error)?; + + let issuer = extract_keypair( + issuer, + Some(binary.clone().to_owned()), + directory.clone(), + KeyPairType::Account, + disable_keygen, + output_kind, + )?; + let subject = extract_keypair( + subject, + Some(binary.clone().to_owned()), + directory, + KeyPairType::Service, + disable_keygen, + output_kind, + )?; + + let mut f = File::open(binary.clone())?; + let mut lib = Vec::new(); + f.read_to_end(&mut lib)?; + + par.add_library(&arch, &lib).map_err(convert_error)?; + + par.write(&archive, &issuer, &subject, is_compressed(&buf)?) + .await + .map_err(convert_error)?; + + Ok(()) +} + +/// Converts error from Send + Sync error to standard anyhow error +pub(crate) fn convert_error(e: Box) -> anyhow::Error { + anyhow!(e.to_string()) +} + +/// Inspects the byte slice for a GZIP header, and returns true if the file is compressed +fn is_compressed(input: &[u8]) -> Result { + if input.len() < 2 { + bail!("Not enough bytes to be a valid PAR file"); + } + Ok(input[0..2] == GZIP_MAGIC) +} diff --git a/src/par.rs b/src/par.rs index a22e7fda..0a508f81 100644 --- a/src/par.rs +++ b/src/par.rs @@ -1,15 +1,11 @@ -use std::{collections::HashMap, fs::File, io::prelude::*, path::PathBuf}; +use std::{collections::HashMap, path::PathBuf}; -use crate::util::convert_error; -use anyhow::{anyhow, bail, Context, Result}; +use anyhow::Result; use clap::{Parser, Subcommand}; use log::warn; -use nkeys::KeyPairType; -use provider_archive::ProviderArchive; use serde_json::json; -use wash_lib::cli::{extract_keypair, inspect, CommandOutput, OutputKind}; - -const GZIP_MAGIC: [u8; 2] = [0x1f, 0x8b]; +use wash_lib::cli::par::{handle_par_create, handle_par_insert}; +use wash_lib::cli::{inspect, par, CommandOutput, OutputKind}; #[derive(Debug, Clone, Subcommand)] pub(crate) enum ParCliCommand { @@ -204,6 +200,42 @@ impl From for inspect::InspectCliCommand { } } } + +impl From for par::ParCreateArgs { + fn from(cmd: CreateCommand) -> Self { + par::ParCreateArgs { + capid: cmd.capid, + vendor: cmd.vendor, + revision: cmd.revision, + version: cmd.version, + schema: cmd.schema, + issuer: cmd.issuer, + subject: cmd.subject, + name: cmd.name, + directory: cmd.directory, + arch: cmd.arch, + binary: cmd.binary, + destination: cmd.destination, + compress: cmd.compress, + disable_keygen: cmd.disable_keygen, + } + } +} + +impl From for par::ParInsertArgs { + fn from(cmd: InsertCommand) -> Self { + par::ParInsertArgs { + archive: cmd.archive, + arch: cmd.arch, + binary: cmd.binary, + directory: cmd.directory, + issuer: cmd.issuer, + subject: cmd.subject, + disable_keygen: cmd.disable_keygen, + } + } +} + pub(crate) async fn handle_command( command: ParCliCommand, output_kind: OutputKind, @@ -223,69 +255,7 @@ pub(crate) async fn handle_create( cmd: CreateCommand, output_kind: OutputKind, ) -> Result { - let mut par = ProviderArchive::new( - &cmd.capid, - &cmd.name, - &cmd.vendor, - cmd.revision, - cmd.version, - ); - - let mut f = File::open(cmd.binary.clone())?; - let mut lib = Vec::new(); - f.read_to_end(&mut lib)?; - - let issuer = extract_keypair( - cmd.issuer, - Some(cmd.binary.clone()), - cmd.directory.clone(), - KeyPairType::Account, - cmd.disable_keygen, - output_kind, - )?; - let subject = extract_keypair( - cmd.subject, - Some(cmd.binary.clone()), - cmd.directory, - KeyPairType::Service, - cmd.disable_keygen, - output_kind, - )?; - - par.add_library(&cmd.arch, &lib).map_err(convert_error)?; - - let extension = if cmd.compress { ".par.gz" } else { ".par" }; - let outfile = match cmd.destination { - Some(path) => path, - None => format!( - "{}{}", - PathBuf::from(cmd.binary.clone()) - .file_stem() - .unwrap() - .to_str() - .unwrap(), - extension - ), - }; - if let Some(ref schema) = cmd.schema { - let bytes = std::fs::read(schema)?; - par.set_schema( - serde_json::from_slice::(&bytes) - .with_context(|| "Unable to parse JSON from file contents".to_string())?, - ) - .map_err(convert_error) - .with_context(|| format!("Error parsing JSON schema from file '{:?}'", schema))?; - } - - par.write(&outfile, &issuer, &subject, cmd.compress) - .await - .map_err(|e| anyhow!("{}", e)) - .with_context(|| { - format!( - "Error writing PAR. Please ensure directory {:?} exists", - PathBuf::from(outfile.clone()).parent().unwrap(), - ) - })?; + let outfile = handle_par_create(cmd.into(), output_kind).await?; let mut map = HashMap::new(); map.insert("file".to_string(), json!(outfile)); @@ -300,40 +270,7 @@ pub(crate) async fn handle_insert( cmd: InsertCommand, output_kind: OutputKind, ) -> Result { - let mut buf = Vec::new(); - let mut f = File::open(cmd.archive.clone())?; - f.read_to_end(&mut buf)?; - - let mut par = ProviderArchive::try_load(&buf) - .await - .map_err(convert_error)?; - - let issuer = extract_keypair( - cmd.issuer, - Some(cmd.binary.clone()), - cmd.directory.clone(), - KeyPairType::Account, - cmd.disable_keygen, - output_kind, - )?; - let subject = extract_keypair( - cmd.subject, - Some(cmd.binary.clone()), - cmd.directory, - KeyPairType::Service, - cmd.disable_keygen, - output_kind, - )?; - - let mut f = File::open(cmd.binary.clone())?; - let mut lib = Vec::new(); - f.read_to_end(&mut lib)?; - - par.add_library(&cmd.arch, &lib).map_err(convert_error)?; - - par.write(&cmd.archive, &issuer, &subject, is_compressed(&buf)?) - .await - .map_err(convert_error)?; + handle_par_insert(cmd.clone().into(), output_kind).await?; let mut map = HashMap::new(); map.insert("file".to_string(), json!(cmd.archive)); @@ -346,14 +283,6 @@ pub(crate) async fn handle_insert( )) } -/// Inspects the byte slice for a GZIP header, and returns true if the file is compressed -fn is_compressed(input: &[u8]) -> Result { - if input.len() < 2 { - bail!("Not enough bytes to be a valid PAR file"); - } - Ok(input[0..2] == GZIP_MAGIC) -} - #[cfg(test)] mod test { use super::*; diff --git a/src/util.rs b/src/util.rs index 3fc796de..03e651be 100644 --- a/src/util.rs +++ b/src/util.rs @@ -1,6 +1,6 @@ use std::{fs::File, io::Read, path::PathBuf}; -use anyhow::{anyhow, Context, Result}; +use anyhow::{Context, Result}; use term_table::{Table, TableStyle}; use wash_lib::config::DEFAULT_NATS_TIMEOUT_MS; @@ -25,11 +25,6 @@ pub(crate) fn default_timeout_ms() -> u64 { DEFAULT_NATS_TIMEOUT_MS } -/// Converts error from Send + Sync error to standard anyhow error -pub(crate) fn convert_error(e: Box) -> anyhow::Error { - anyhow!(e.to_string()) -} - /// Transform a json string (e.g. "{"hello": "world"}") into msgpack bytes pub(crate) fn json_str_to_msgpack_bytes(payload: &str) -> Result> { let json = serde_json::from_str::(payload)?; From 1b95d9c9801d3f4e19e16611ccf45ff65564d239 Mon Sep 17 00:00:00 2001 From: aish-where-ya Date: Tue, 3 Oct 2023 11:18:58 -0400 Subject: [PATCH 2/3] Addressed reveiw comments Signed-off-by: aish-where-ya --- crates/wash-lib/src/cli/par.rs | 152 ++++----------------------------- src/par.rs | 121 ++++++++++++++++++++------ 2 files changed, 111 insertions(+), 162 deletions(-) diff --git a/crates/wash-lib/src/cli/par.rs b/crates/wash-lib/src/cli/par.rs index edd6b017..1014987c 100644 --- a/crates/wash-lib/src/cli/par.rs +++ b/crates/wash-lib/src/cli/par.rs @@ -1,11 +1,6 @@ -use std::{fs::File, io::prelude::*, path::PathBuf}; - -use crate::cli::{extract_keypair, OutputKind}; -use anyhow::{anyhow, bail, Context, Result}; -use nkeys::KeyPairType; +use anyhow::{anyhow, Context, Result}; use provider_archive::ProviderArchive; - -const GZIP_MAGIC: [u8; 2] = [0x1f, 0x8b]; +use std::path::PathBuf; pub struct ParCreateArgs { pub capid: String, @@ -13,86 +8,27 @@ pub struct ParCreateArgs { pub revision: Option, pub version: Option, pub schema: Option, - pub issuer: Option, - pub subject: Option, pub name: String, - pub directory: Option, pub arch: String, - pub binary: String, - pub destination: Option, - pub compress: bool, - pub disable_keygen: bool, -} -pub struct ParInsertArgs { - pub archive: String, - pub arch: String, - pub binary: String, - pub directory: Option, - pub issuer: Option, - pub subject: Option, - pub disable_keygen: bool, } -#[allow(clippy::too_many_arguments)] -pub async fn handle_par_create( +pub async fn create_provider_archive( ParCreateArgs { - // claims related options capid, vendor, revision, version, schema, - issuer, - subject, name, - // par related options - directory, arch, - binary, - destination, - compress, - disable_keygen, }: ParCreateArgs, - output_kind: OutputKind, -) -> Result { + binary_bytes: &[u8], +) -> Result { let mut par = ProviderArchive::new(&capid, &name, &vendor, revision, version); - let mut f = File::open(binary.clone())?; - let mut lib = Vec::new(); - f.read_to_end(&mut lib)?; - - let issuer = extract_keypair( - issuer, - Some(binary.clone()), - directory.clone(), - KeyPairType::Account, - disable_keygen, - output_kind, - )?; - let subject = extract_keypair( - subject, - Some(binary.clone()), - directory, - KeyPairType::Service, - disable_keygen, - output_kind, - )?; - - par.add_library(&arch, &lib).map_err(convert_error)?; + par.add_library(&arch, binary_bytes) + .map_err(convert_error)?; - let extension = if compress { ".par.gz" } else { ".par" }; - let outfile = match destination { - Some(path) => path, - None => format!( - "{}{}", - PathBuf::from(binary.clone()) - .file_stem() - .unwrap() - .to_str() - .unwrap(), - extension - ), - }; if let Some(ref schema) = schema { let bytes = std::fs::read(schema)?; par.set_schema( @@ -103,77 +39,21 @@ pub async fn handle_par_create( .with_context(|| format!("Error parsing JSON schema from file '{:?}'", schema))?; } - par.write(&outfile, &issuer, &subject, compress) - .await - .map_err(|e| anyhow!("{}", e)) - .with_context(|| { - format!( - "Error writing PAR. Please ensure directory {:?} exists", - PathBuf::from(outfile.clone()).parent().unwrap(), - ) - })?; - Ok(outfile) + Ok(par) } -pub async fn handle_par_insert( - ParInsertArgs { - archive, - arch, - binary, - directory, - issuer, - subject, - disable_keygen, - }: ParInsertArgs, - output_kind: OutputKind, -) -> Result<()> { - let mut buf = Vec::new(); - let mut f = File::open(archive.clone())?; - f.read_to_end(&mut buf)?; - - let mut par = ProviderArchive::try_load(&buf) - .await +pub async fn insert_provider_archive( + arch: String, + binary_bytes: &[u8], + mut par: ProviderArchive, +) -> Result { + par.add_library(&arch, binary_bytes) .map_err(convert_error)?; - let issuer = extract_keypair( - issuer, - Some(binary.clone().to_owned()), - directory.clone(), - KeyPairType::Account, - disable_keygen, - output_kind, - )?; - let subject = extract_keypair( - subject, - Some(binary.clone().to_owned()), - directory, - KeyPairType::Service, - disable_keygen, - output_kind, - )?; - - let mut f = File::open(binary.clone())?; - let mut lib = Vec::new(); - f.read_to_end(&mut lib)?; - - par.add_library(&arch, &lib).map_err(convert_error)?; - - par.write(&archive, &issuer, &subject, is_compressed(&buf)?) - .await - .map_err(convert_error)?; - - Ok(()) + Ok(par) } /// Converts error from Send + Sync error to standard anyhow error -pub(crate) fn convert_error(e: Box) -> anyhow::Error { +pub fn convert_error(e: Box) -> anyhow::Error { anyhow!(e.to_string()) } - -/// Inspects the byte slice for a GZIP header, and returns true if the file is compressed -fn is_compressed(input: &[u8]) -> Result { - if input.len() < 2 { - bail!("Not enough bytes to be a valid PAR file"); - } - Ok(input[0..2] == GZIP_MAGIC) -} diff --git a/src/par.rs b/src/par.rs index 0a508f81..f5c7665d 100644 --- a/src/par.rs +++ b/src/par.rs @@ -1,11 +1,17 @@ +use std::fs::File; +use std::io::Read; use std::{collections::HashMap, path::PathBuf}; -use anyhow::Result; +use anyhow::{anyhow, bail, Context, Result}; use clap::{Parser, Subcommand}; use log::warn; +use nkeys::KeyPairType; +use provider_archive::ProviderArchive; use serde_json::json; -use wash_lib::cli::par::{handle_par_create, handle_par_insert}; -use wash_lib::cli::{inspect, par, CommandOutput, OutputKind}; +use wash_lib::cli::par::{convert_error, create_provider_archive, insert_provider_archive}; +use wash_lib::cli::{extract_keypair, inspect, par, CommandOutput, OutputKind}; + +const GZIP_MAGIC: [u8; 2] = [0x1f, 0x8b]; #[derive(Debug, Clone, Subcommand)] pub(crate) enum ParCliCommand { @@ -209,29 +215,8 @@ impl From for par::ParCreateArgs { revision: cmd.revision, version: cmd.version, schema: cmd.schema, - issuer: cmd.issuer, - subject: cmd.subject, name: cmd.name, - directory: cmd.directory, - arch: cmd.arch, - binary: cmd.binary, - destination: cmd.destination, - compress: cmd.compress, - disable_keygen: cmd.disable_keygen, - } - } -} - -impl From for par::ParInsertArgs { - fn from(cmd: InsertCommand) -> Self { - par::ParInsertArgs { - archive: cmd.archive, arch: cmd.arch, - binary: cmd.binary, - directory: cmd.directory, - issuer: cmd.issuer, - subject: cmd.subject, - disable_keygen: cmd.disable_keygen, } } } @@ -255,7 +240,51 @@ pub(crate) async fn handle_create( cmd: CreateCommand, output_kind: OutputKind, ) -> Result { - let outfile = handle_par_create(cmd.into(), output_kind).await?; + let mut f = File::open(cmd.binary.clone())?; + let mut lib = Vec::new(); + f.read_to_end(&mut lib)?; + + let issuer = extract_keypair( + cmd.issuer.clone(), + Some(cmd.binary.clone()), + cmd.directory.clone(), + KeyPairType::Account, + cmd.disable_keygen, + output_kind, + )?; + let subject = extract_keypair( + cmd.subject.clone(), + Some(cmd.binary.clone()), + cmd.directory.clone(), + KeyPairType::Service, + cmd.disable_keygen, + output_kind, + )?; + + let extension = if cmd.compress { ".par.gz" } else { ".par" }; + let outfile = match cmd.destination.clone() { + Some(path) => path, + None => format!( + "{}{}", + PathBuf::from(cmd.binary.clone()) + .file_stem() + .unwrap() + .to_str() + .unwrap(), + extension + ), + }; + + let mut par = create_provider_archive(cmd.clone().into(), &lib).await?; + par.write(&outfile, &issuer, &subject, cmd.compress) + .await + .map_err(|e| anyhow!("{}", e)) + .with_context(|| { + format!( + "Error writing PAR. Please ensure directory {:?} exists", + PathBuf::from(outfile.clone()).parent().unwrap(), + ) + })?; let mut map = HashMap::new(); map.insert("file".to_string(), json!(outfile)); @@ -270,7 +299,39 @@ pub(crate) async fn handle_insert( cmd: InsertCommand, output_kind: OutputKind, ) -> Result { - handle_par_insert(cmd.clone().into(), output_kind).await?; + let mut buf = Vec::new(); + let mut f = File::open(cmd.archive.clone())?; + f.read_to_end(&mut buf)?; + + let mut f = File::open(cmd.binary.clone())?; + let mut lib = Vec::new(); + f.read_to_end(&mut lib)?; + + let issuer = extract_keypair( + cmd.issuer.clone(), + Some(cmd.binary.clone().to_owned()), + cmd.directory.clone(), + KeyPairType::Account, + cmd.disable_keygen, + output_kind, + )?; + let subject = extract_keypair( + cmd.subject.clone(), + Some(cmd.binary.clone().to_owned()), + cmd.directory.clone(), + KeyPairType::Service, + cmd.disable_keygen, + output_kind, + )?; + + let mut par = ProviderArchive::try_load(&buf) + .await + .map_err(convert_error)?; + + par = insert_provider_archive(cmd.arch, &lib, par).await?; + par.write(&cmd.archive, &issuer, &subject, is_compressed(&buf)?) + .await + .map_err(convert_error)?; let mut map = HashMap::new(); map.insert("file".to_string(), json!(cmd.archive)); @@ -283,6 +344,14 @@ pub(crate) async fn handle_insert( )) } +/// Inspects the byte slice for a GZIP header, and returns true if the file is compressed +fn is_compressed(input: &[u8]) -> Result { + if input.len() < 2 { + bail!("Not enough bytes to be a valid PAR file"); + } + Ok(input[0..2] == GZIP_MAGIC) +} + #[cfg(test)] mod test { use super::*; From c168d2eb1da76d1cffe9eb126c4bf770dbbba2dc Mon Sep 17 00:00:00 2001 From: aish-where-ya Date: Tue, 3 Oct 2023 11:28:56 -0400 Subject: [PATCH 3/3] Minor fix Signed-off-by: aish-where-ya --- crates/wash-lib/src/cli/par.rs | 2 +- src/par.rs | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/wash-lib/src/cli/par.rs b/crates/wash-lib/src/cli/par.rs index 1014987c..cfb0dfe4 100644 --- a/crates/wash-lib/src/cli/par.rs +++ b/crates/wash-lib/src/cli/par.rs @@ -42,7 +42,7 @@ pub async fn create_provider_archive( Ok(par) } -pub async fn insert_provider_archive( +pub async fn insert_provider_binary( arch: String, binary_bytes: &[u8], mut par: ProviderArchive, diff --git a/src/par.rs b/src/par.rs index f5c7665d..fe3a632b 100644 --- a/src/par.rs +++ b/src/par.rs @@ -8,7 +8,7 @@ use log::warn; use nkeys::KeyPairType; use provider_archive::ProviderArchive; use serde_json::json; -use wash_lib::cli::par::{convert_error, create_provider_archive, insert_provider_archive}; +use wash_lib::cli::par::{convert_error, create_provider_archive, insert_provider_binary}; use wash_lib::cli::{extract_keypair, inspect, par, CommandOutput, OutputKind}; const GZIP_MAGIC: [u8; 2] = [0x1f, 0x8b]; @@ -328,7 +328,7 @@ pub(crate) async fn handle_insert( .await .map_err(convert_error)?; - par = insert_provider_archive(cmd.arch, &lib, par).await?; + par = insert_provider_binary(cmd.arch, &lib, par).await?; par.write(&cmd.archive, &issuer, &subject, is_compressed(&buf)?) .await .map_err(convert_error)?;