diff --git a/sources/Cargo.lock b/sources/Cargo.lock index 9ab6d56a1..f933988b2 100644 --- a/sources/Cargo.lock +++ b/sources/Cargo.lock @@ -3206,15 +3206,19 @@ dependencies = [ "bottlerocket-release", "bytes", "chrono", + "datastore", "futures", "futures-core", "generate-readme", "log", "lz4", + "models", "nix", "pentacle", + "percent-encoding", "rand", "semver", + "serde_json", "simplelog", "snafu", "storewolf", @@ -3224,6 +3228,7 @@ dependencies = [ "tough", "update_metadata", "url", + "walkdir", ] [[package]] @@ -3288,6 +3293,7 @@ dependencies = [ "libc", "serde", "serde_json", + "serde_plain", "toml", ] @@ -4948,6 +4954,7 @@ dependencies = [ "http 0.2.12", "log", "models", + "serde", "serde_json", "shlex", "simplelog", diff --git a/sources/api/apiclient/README.md b/sources/api/apiclient/README.md index dea3dfc89..78694f7bb 100644 --- a/sources/api/apiclient/README.md +++ b/sources/api/apiclient/README.md @@ -174,6 +174,10 @@ You can see all your pending settings like this: ```shell apiclient raw -u /tx ``` +You can also see pending metadata along with pending setting using version 2 of `/tx` like this: +```shell +apiclient raw -u /v2/tx +``` To *commit* the settings, and let the system apply them to any relevant configuration files or services, do this: ```shell diff --git a/sources/api/apiclient/README.tpl b/sources/api/apiclient/README.tpl index 9604d7f68..e886246a4 100644 --- a/sources/api/apiclient/README.tpl +++ b/sources/api/apiclient/README.tpl @@ -174,6 +174,10 @@ You can see all your pending settings like this: ```shell apiclient raw -u /tx ``` +You can also see pending metadata along with pending setting using version 2 of `/tx` like this: +```shell +apiclient raw -u /v2/tx +``` To *commit* the settings, and let the system apply them to any relevant configuration files or services, do this: ```shell diff --git a/sources/api/apiserver/src/server/controller.rs b/sources/api/apiserver/src/server/controller.rs index b064ecaa6..476041921 100644 --- a/sources/api/apiserver/src/server/controller.rs +++ b/sources/api/apiserver/src/server/controller.rs @@ -13,7 +13,7 @@ use actix_web::HttpResponse; use datastore::deserialization::{from_map, from_map_with_prefix}; use datastore::serialization::to_pairs_with_prefix; use datastore::{deserialize_scalar, Committed, DataStore, Key, KeyType, ScalarError, Value}; -use model::{ConfigurationFiles, Services, Settings}; +use model::{ConfigurationFiles, Services, Settings, Strength}; use num::FromPrimitive; use std::os::unix::process::ExitStatusExt; use thar_be_updates::error::TbuErrorStatus; @@ -44,6 +44,34 @@ where .map(|maybe_settings| maybe_settings.unwrap_or_default()) } +/// Gets the metadata for metadata_key_name in the given transaction +/// Returns all metadata if metadata_key_name is None +pub(crate) fn get_transaction_metadata( + datastore: &D, + transaction: S, + metadata_key_name: Option, +) -> Result>> +where + D: DataStore, + S: Into, +{ + let pending = Committed::Pending { + tx: transaction.into(), + }; + + let metadata = datastore + .get_metadata_prefix("settings.", &pending, &metadata_key_name) + .with_context(|_| error::DataStoreSnafu { + op: format!("get_metadata_prefix '{}' for {:?}", "settings.", pending), + })?; + + if metadata.is_empty() { + return Ok(HashMap::new()); + } + + Ok(metadata) +} + /// Deletes the transaction from the data store, removing any uncommitted settings under that /// transaction name. pub(crate) fn delete_transaction( @@ -364,6 +392,7 @@ pub(crate) fn set_settings( datastore: &mut D, settings: &Settings, transaction: &str, + strength: Strength, ) -> Result<()> { trace!("Serializing Settings to write to data store"); let settings_json = serde_json::to_value(settings).context(error::SettingsToJsonSnafu)?; @@ -372,6 +401,96 @@ pub(crate) fn set_settings( let pending = Committed::Pending { tx: transaction.into(), }; + + info!("Writing Metadata to data store"); + match strength { + Strength::Strong => { + // Get keys in the request + let keys: HashSet<&str> = pairs.iter().map(|pair| pair.0.name().as_str()).collect(); + // Get strength metadata for the keys from live + let committed_strength_live = get_metadata_for_data_keys(datastore, "strength", &keys)?; + + // Change the weak strength to strong if the committed strength is weak and requested strength is strong + for (key, value) in committed_strength_live { + // if the strength is weak then we need to change it to strong + if value == Strength::Weak.to_string() { + let data_key = + Key::new(KeyType::Data, key.clone()).context(error::NewKeySnafu { + key_type: "data", + name: key.clone(), + })?; + + let metadata_key_strength = + Key::new(KeyType::Meta, "strength").context(error::NewKeySnafu { + key_type: "meta", + name: "strength", + })?; // change this to name as strength and value as weak or strong + + let metadata_value = datastore::serialize_scalar::<_, ScalarError>( + &Strength::Strong.to_string(), + ) + .with_context(|_| error::SerializeSnafu {})?; + + datastore + .set_metadata(&metadata_key_strength, &data_key, metadata_value, &pending) + .context(error::DataStoreSnafu { + op: "Change strength metadata key to strong", + })?; + } + } + } + Strength::Weak => { + for key in pairs.keys() { + // The get key funtion returns Ok(None) in case if the path does not exist + // and error if some path exist and some error occurred in fetching + // Hence we we will return error in case of error + // from get key function and continue to add/change to weak key + // if the value is None. + let value = datastore + .get_key(key, &Committed::Live) + .context(error::DataStoreSnafu { op: "get_key" })?; + + // Get metadata value for the key + // If strength does not exist this hashmap will be empty + // and if strength exist this hashmap will return HashMap + let mut keys_to_get_metadata: HashSet<&str> = HashSet::new(); + keys_to_get_metadata.insert(key.name().as_str()); + let strength_pair = + get_metadata_for_data_keys(datastore, "strength", &keys_to_get_metadata)?; + + let is_setting_strong = strength_pair.is_empty() + || strength_pair.get(key.name().as_str()) + == Some(&serde_json::Value::String(Strength::Strong.to_string())); + + // We need to log that we are not changing the strength from strong to weak + // and continue for other settings. + if value.is_some() && is_setting_strong { + warn!("Trying to change the strength from strong to weak for key: {}, Operation ignored", key.name()); + continue; + } + + // If the strength and setting both does not exist and requested strength is weak + // Set strength metadata. + let metadata_key = + Key::new(KeyType::Meta, "strength").context(error::NewKeySnafu { + key_type: "meta", + name: "strength", + })?; + + let metadata_value = + datastore::serialize_scalar::<_, ScalarError>(&Strength::Weak.to_string()) + .with_context(|_| error::SerializeSnafu {})?; + + datastore + .set_metadata(&metadata_key, key, metadata_value, &pending) + .context(error::DataStoreSnafu { + op: "create strength metadata key as weak", + })?; + } + } + }; + + info!("Writing Settings to data store: {:?}", pairs); datastore .set_keys(&pairs, &pending) .context(error::DataStoreSnafu { op: "set_keys" }) @@ -398,7 +517,7 @@ pub(crate) fn get_metadata_for_data_keys>( key_type: "data", name: *data_key_str, })?; - let value_str = match datastore.get_metadata(&md_key, &data_key) { + let value_str = match datastore.get_metadata(&md_key, &data_key, &Committed::Live) { Ok(Some(v)) => v, // TODO: confirm we want to skip requested keys if not populated, or error Ok(None) => continue, @@ -424,7 +543,7 @@ pub(crate) fn get_metadata_for_all_data_keys>( ) -> Result> { trace!("Getting metadata '{}'", md_key_str.as_ref()); let meta_map = datastore - .get_metadata_prefix("", &Some(md_key_str)) + .get_metadata_prefix("", &Committed::Live, &Some(md_key_str)) .context(error::DataStoreSnafu { op: "get_metadata_prefix", })?; @@ -781,7 +900,7 @@ mod test { let mut ds = MemoryDataStore::new(); let tx = "test transaction"; let pending = Committed::Pending { tx: tx.into() }; - set_settings(&mut ds, &settings, tx).unwrap(); + set_settings(&mut ds, &settings, tx, Strength::Strong).unwrap(); // Retrieve directly let key = Key::new(KeyType::Data, "settings.motd").unwrap(); @@ -800,6 +919,7 @@ mod test { &Key::new(KeyType::Meta, "my-meta").unwrap(), &Key::new(KeyType::Data, data_key).unwrap(), "\"json string\"", + &Committed::Live, ) .unwrap(); } @@ -824,6 +944,7 @@ mod test { &Key::new(KeyType::Meta, "my-meta").unwrap(), &Key::new(KeyType::Data, data_key).unwrap(), "\"json string\"", + &Committed::Live, ) .unwrap(); } diff --git a/sources/api/apiserver/src/server/error.rs b/sources/api/apiserver/src/server/error.rs index 6faae6036..d3297210a 100644 --- a/sources/api/apiserver/src/server/error.rs +++ b/sources/api/apiserver/src/server/error.rs @@ -234,6 +234,9 @@ pub enum Error { stdout: Vec, source: serde_json::Error, }, + + #[snafu(display("Error deserializing response value to SettingsGenerator: {}", source))] + DeserializeSettingsGenerator { source: serde_json::Error }, } pub type Result = std::result::Result; diff --git a/sources/api/apiserver/src/server/mod.rs b/sources/api/apiserver/src/server/mod.rs index eaee36170..9bf157c23 100644 --- a/sources/api/apiserver/src/server/mod.rs +++ b/sources/api/apiserver/src/server/mod.rs @@ -11,13 +11,14 @@ pub use error::Error; use actix_web::{ body::BoxBody, error::ResponseError, web, App, HttpRequest, HttpResponse, HttpServer, Responder, }; +use core::str; use datastore::{serialize_scalar, Committed, FilesystemDataStore, Key, KeyType, Value}; use error::Result; use fs2::FileExt; use http::StatusCode; use log::info; use model::ephemeral_storage::{Bind, Init}; -use model::{ConfigurationFiles, Model, Report, Services, Settings}; +use model::{ConfigurationFiles, Model, Report, Services, Settings, SettingsGenerator, Strength}; use nix::unistd::{chown, Gid}; use serde::{Deserialize, Serialize}; use snafu::{ensure, OptionExt, ResultExt}; @@ -108,6 +109,14 @@ where web::post().to(commit_transaction_and_apply), ), ) + .service( + web::scope("/v2") + .route("/tx", web::get().to(get_transaction_v2)) + .route( + "/metadata/setting-generators", + web::get().to(get_setting_generators_v2), + ), + ) .service(web::scope("/os").route("", web::get().to(get_os_info))) .service( web::scope("/metadata") @@ -304,8 +313,9 @@ async fn patch_settings( data: web::Data, ) -> Result { let transaction = transaction_name(&query); + let strength = strength(&query); let mut datastore = data.ds.write().ok().context(error::DataStoreLockSnafu)?; - controller::set_settings(&mut *datastore, &settings, transaction)?; + controller::set_settings(&mut *datastore, &settings, transaction, strength)?; Ok(HttpResponse::NoContent().finish()) // 204 } @@ -318,13 +328,14 @@ async fn patch_settings_key_pair( // Convert to a Map of Key Value pairs. let settings_key_pair_map = construct_key_pair_map(&settings.request_payload)?; let transaction = transaction_name(&query); + let strength = strength(&query); let mut datastore = data.ds.write().ok().context(error::DataStoreLockSnafu)?; // We massage the values in the input key pair map. // The data store deserialization code understands how to turn the key names // (a.b.c) and serialized values into the nested Settings structure. let settings_model = datastore::deserialization::from_map(&settings_key_pair_map) .context(error::DeserializeMapSnafu)?; - controller::set_settings(&mut *datastore, &settings_model, transaction)?; + controller::set_settings(&mut *datastore, &settings_model, transaction, strength)?; Ok(HttpResponse::NoContent().finish()) // 204 } @@ -342,9 +353,34 @@ async fn get_transaction( let transaction = transaction_name(&query); let datastore = data.ds.read().ok().context(error::DataStoreLockSnafu)?; let data = controller::get_transaction(&*datastore, transaction)?; + Ok(SettingsResponse(data)) } +/// Get any pending settings in the given transaction, or the "default" transaction if unspecified. +async fn get_transaction_v2( + query: web::Query>, + data: web::Data, +) -> Result { + let transaction = transaction_name(&query); + let datastore = data.ds.read().ok().context(error::DataStoreLockSnafu)?; + let settings = controller::get_transaction(&*datastore, transaction)?; + let transaction_metadata = + controller::get_transaction_metadata(&*datastore, transaction, None)?; + + let mut metadata = HashMap::new(); + for (key, value) in transaction_metadata { + let mut inner_map = HashMap::new(); + for (inner_key, inner_value) in value { + inner_map.insert(inner_key.name().clone(), inner_value); + } + metadata.insert(key.name().clone(), inner_map); + } + + let data = SettingsWithMetadata { settings, metadata }; + Ok(SettingsResponseWithMetadata(data)) +} + /// Delete the given transaction, or the "default" transaction if unspecified. async fn delete_transaction( query: web::Query>, @@ -453,6 +489,43 @@ async fn get_affected_services( /// Get all settings that have setting-generator metadata async fn get_setting_generators(data: web::Data) -> Result { + let datastore = data.ds.read().ok().context(error::DataStoreLockSnafu)?; + let metadata_for_keys = + controller::get_metadata_for_all_data_keys(&*datastore, "setting-generator")?; + let mut resp: HashMap = HashMap::new(); + + for (key, value) in metadata_for_keys.iter() { + match value { + Value::String(command) => { + resp.insert( + key.to_string(), + serde_json::Value::String(command.to_string()), + ); + } + Value::Object(obj) => { + let setting_generator: SettingsGenerator = + serde_json::from_value(Value::Object(obj.clone())) + .context(error::DeserializeSettingsGeneratorSnafu)?; + // Not return weak setting generators because the customers using + // v1 of this api are not aware of the strength. + if setting_generator.strength == Strength::Strong { + resp.insert( + key.to_string(), + serde_json::Value::String(setting_generator.command), + ); + } + } + // We know it is not possible, as in default we set the setting-generator + // as string or object + _ => { + info!("Invalid value type for key '{}': {:?}", key, value); + } + } + } + Ok(MetadataResponse(resp)) +} + +async fn get_setting_generators_v2(data: web::Data) -> Result { let datastore = data.ds.read().ok().context(error::DataStoreLockSnafu)?; let resp = controller::get_metadata_for_all_data_keys(&*datastore, "setting-generator")?; Ok(MetadataResponse(resp)) @@ -749,6 +822,13 @@ fn transaction_name(query: &web::Query>) -> &str { query.get("tx").map(String::as_str).unwrap_or("default") } +fn strength(query: &web::Query>) -> Strength { + match query.get("strength") { + Some(strength) => strength.parse::().unwrap_or(Strength::Strong), + None => Strength::Strong, + } +} + // Helpers methods for the 'set' API fn construct_key_pair_map(settings_key_pair_vec: &Vec) -> Result> { @@ -888,6 +968,7 @@ impl ResponseError for error::Error { UpdateLockOpen { .. } => StatusCode::INTERNAL_SERVER_ERROR, ReportExec { .. } => StatusCode::INTERNAL_SERVER_ERROR, ReportResult { .. } => StatusCode::INTERNAL_SERVER_ERROR, + DeserializeSettingsGenerator { .. } => StatusCode::BAD_REQUEST, }; HttpResponse::build(status_code).body(self.to_string()) @@ -940,6 +1021,16 @@ macro_rules! impl_responder_for { struct ModelResponse(serde_json::Value); impl_responder_for!(ModelResponse, self, self.0); +/// This lets us respond from our handler methods with a Settings (or Result) +#[derive(Debug, Clone, PartialEq, Default, Serialize, Deserialize)] +struct SettingsWithMetadata { + settings: Settings, + metadata: HashMap>, +} + +struct SettingsResponseWithMetadata(SettingsWithMetadata); +impl_responder_for!(SettingsResponseWithMetadata, self, self.0); + /// This lets us respond from our handler methods with a Settings (or Result) struct SettingsResponse(Settings); impl_responder_for!(SettingsResponse, self, self.0); diff --git a/sources/api/datastore/src/error.rs b/sources/api/datastore/src/error.rs index d1c7207ae..86d386b52 100644 --- a/sources/api/datastore/src/error.rs +++ b/sources/api/datastore/src/error.rs @@ -59,6 +59,12 @@ pub enum Error { #[snafu(display("Key name beyond maximum length {}: {}", name, max))] KeyTooLong { name: String, max: usize }, + + #[snafu(display("Unable to serialize data: {}", source))] + Serialize { source: serde_json::Error }, + + #[snafu(display("Unable to de-serialize data: {}", source))] + DeSerialize { source: serde_json::Error }, } pub type Result = std::result::Result; diff --git a/sources/api/datastore/src/filesystem.rs b/sources/api/datastore/src/filesystem.rs index bf89eed78..99bba948e 100644 --- a/sources/api/datastore/src/filesystem.rs +++ b/sources/api/datastore/src/filesystem.rs @@ -4,7 +4,7 @@ //! Data is kept in files with paths resembling the keys, e.g. a/b/c for a.b.c, and metadata is //! kept in a suffixed file next to the data, e.g. a/b/c.meta for metadata "meta" about a.b.c -use log::{debug, error, trace}; +use log::{debug, error, trace, warn}; use percent_encoding::{percent_decode_str, utf8_percent_encode, AsciiSet, NON_ALPHANUMERIC}; use snafu::{ensure, OptionExt, ResultExt}; use std::collections::{HashMap, HashSet}; @@ -13,6 +13,8 @@ use std::io; use std::path::{self, Path, PathBuf}; use walkdir::{DirEntry, WalkDir}; +use crate::{deserialize_scalar, serialize_scalar, ScalarError}; + use super::key::{Key, KeyType}; use super::{error, Committed, DataStore, Result}; @@ -413,6 +415,7 @@ impl DataStore for FilesystemDataStore { fn list_populated_metadata( &self, prefix: S1, + committed: &Committed, metadata_key_name: &Option, ) -> Result>> where @@ -420,7 +423,7 @@ impl DataStore for FilesystemDataStore { S2: AsRef, { // Find metadata key paths on disk - let key_paths = find_populated_key_paths(self, KeyType::Meta, prefix, &Committed::Live)?; + let key_paths = find_populated_key_paths(self, KeyType::Meta, prefix, committed)?; // For each file on disk, check the user's conditions, and add it to our output let mut result = HashMap::new(); @@ -460,8 +463,13 @@ impl DataStore for FilesystemDataStore { self.delete_key_path(path, committed) } - fn get_metadata_raw(&self, metadata_key: &Key, data_key: &Key) -> Result> { - let path = self.metadata_path(metadata_key, data_key, &Committed::Live)?; + fn get_metadata_raw( + &self, + metadata_key: &Key, + data_key: &Key, + committed: &Committed, + ) -> Result> { + let path = self.metadata_path(metadata_key, data_key, committed)?; read_file_for_key(metadata_key, &path) } @@ -470,8 +478,9 @@ impl DataStore for FilesystemDataStore { metadata_key: &Key, data_key: &Key, value: S, + committed: &Committed, ) -> Result<()> { - let path = self.metadata_path(metadata_key, data_key, &Committed::Live)?; + let path = self.metadata_path(metadata_key, data_key, committed)?; write_file_mkdir(path, value) } @@ -489,6 +498,95 @@ impl DataStore for FilesystemDataStore { let pending = Committed::Pending { tx: transaction.into(), }; + + // We will first commit metadata to correctly check that if a setting exist and + // its strength is strong, we will not change it to weak. + // Get metadata from pending transaction + let transaction_metadata = + self.get_metadata_prefix("settings.", &pending, &None as &Option<&str>)?; + + trace!( + "commit_transaction: transaction_metadata: {:?}", + transaction_metadata + ); + + for (key, value) in transaction_metadata { + for (metadata_key, metadata_value) in value { + // For now we are only processing the strength metadata from pending + // transaction to live + if metadata_key.name() != "strength" { + continue; + } + + // strength in pending transaction + let pending_strength: String = + deserialize_scalar::<_, ScalarError>(&metadata_value.clone()) + .with_context(|_| error::DeSerializeSnafu {})?; + + // Get the setting strength in live + // get_metadata function returns Ok(None) in case strength does not exist + // We will consider this case as strength equals strong. + let committed_strength = + match self.get_metadata(&metadata_key, &key, &Committed::Live) { + Ok(Some(v)) => v, + Ok(None) => "strong".to_string(), + Err(_) => continue, + }; + + // The get key funtion returns Ok(None) in case if the path does not exist + // and error if some path exist and some error occurred in fetching + // Hence we we will return error in case of error + // from get key function and continue to add/change to weak key + // if the value is None. + let value = self.get_key(&key, &Committed::Live)?; + + trace!( + "commit_transaction: key: {:?}, metadata_key: {:?}, metadata_value: {:?}", + key.name(), + metadata_key.name(), + metadata_value + ); + + match (pending_strength.as_str(), committed_strength.as_str()) { + ("weak", "strong") => { + // Do not change from strong to weak if setting exists + // otherwise commit strength metadata with value as "weak" + if value.is_some() { + warn!("Trying to change the strength from strong to weak for key: {}, Operation ignored", key.name()); + continue; + } else { + let met_value = serialize_scalar::<_, ScalarError>(&pending_strength) + .with_context(|_| error::SerializeSnafu {})?; + + self.set_metadata(&metadata_key, &key, met_value, &Committed::Live)?; + } + } + ("strong", "weak") => { + let met_value = serialize_scalar::<_, ScalarError>(&pending_strength) + .with_context(|_| error::SerializeSnafu {})?; + self.set_metadata(&metadata_key, &key, met_value, &Committed::Live)?; + } + ("weak", "weak") => { + trace!("The strength for setting {} is already weak", key.name()); + continue; + } + ("strong", "strong") => { + trace!("The strength for setting {} is already strong", key.name()); + continue; + } + _ => { + warn!( + "The strength for setting {} is not one of weak or strong. Pending strength: {}, Committed strength: {} ", + key.name(), + pending_strength, + committed_strength + ); + continue; + } + }; + } + } + // Get data for changed keys let pending_data = self.get_prefix("settings.", &pending)?; diff --git a/sources/api/datastore/src/lib.rs b/sources/api/datastore/src/lib.rs index 9156a525a..3d375d35c 100644 --- a/sources/api/datastore/src/lib.rs +++ b/sources/api/datastore/src/lib.rs @@ -72,6 +72,7 @@ pub trait DataStore { fn list_populated_metadata( &self, prefix: S1, + committed: &Committed, metadata_key_name: &Option, ) -> Result>> where @@ -89,7 +90,12 @@ pub trait DataStore { /// Retrieve the value for a single metadata key from the datastore. Values will inherit from /// earlier in the tree, if more specific values are not found later. - fn get_metadata(&self, metadata_key: &Key, data_key: &Key) -> Result> { + fn get_metadata( + &self, + metadata_key: &Key, + data_key: &Key, + committed: &Committed, + ) -> Result> { let mut result = Ok(None); let mut current_path = Vec::new(); @@ -101,7 +107,7 @@ pub trait DataStore { unreachable!("Prefix of Key failed to make Key: {:?}", current_path) }); - if let Some(md) = self.get_metadata_raw(metadata_key, &data_key)? { + if let Some(md) = self.get_metadata_raw(metadata_key, &data_key, committed)? { result = Ok(Some(md)); } } @@ -110,13 +116,19 @@ pub trait DataStore { /// Retrieve the value for a single metadata key from the datastore, without taking into /// account inheritance of metadata from earlier in the tree. - fn get_metadata_raw(&self, metadata_key: &Key, data_key: &Key) -> Result>; + fn get_metadata_raw( + &self, + metadata_key: &Key, + data_key: &Key, + committed: &Committed, + ) -> Result>; /// Set the value of a single metadata key in the datastore. fn set_metadata>( &mut self, metadata_key: &Key, data_key: &Key, value: S, + committed: &Committed, ) -> Result<()>; /// Removes the given metadata key from the given data key in the datastore. If we /// succeeded, we return Ok(()); if the data or metadata key didn't exist, we also return @@ -205,13 +217,14 @@ pub trait DataStore { fn get_metadata_prefix( &self, find_prefix: S1, + committed: &Committed, metadata_key_name: &Option, ) -> Result>> where S1: AsRef, S2: AsRef, { - let meta_map = self.list_populated_metadata(&find_prefix, metadata_key_name)?; + let meta_map = self.list_populated_metadata(&find_prefix, committed, metadata_key_name)?; trace!("Found populated metadata: {:?}", meta_map); if meta_map.is_empty() { return Ok(HashMap::new()); @@ -234,12 +247,12 @@ pub trait DataStore { meta_key, &data_key ); - let value = self.get_metadata(&meta_key, &data_key)?.context( - error::ListedMetaNotPresentSnafu { + let value = self + .get_metadata(&meta_key, &data_key, committed)? + .context(error::ListedMetaNotPresentSnafu { meta_key: meta_key.name(), data_key: data_key.name(), - }, - )?; + })?; // Insert a top-level map entry for the data key if we've found metadata. let data_entry = result.entry(data_key.clone()).or_insert_with(HashMap::new); @@ -336,14 +349,20 @@ mod test { let grandchild = Key::new(KeyType::Data, "a.b.c").unwrap(); // Set metadata on parent - m.set_metadata(&meta, &parent, "value").unwrap(); + m.set_metadata(&meta, &parent, "value", &Committed::Live) + .unwrap(); // Metadata shows up on grandchild... assert_eq!( - m.get_metadata(&meta, &grandchild).unwrap(), + m.get_metadata(&meta, &grandchild, &Committed::Live) + .unwrap(), Some("value".to_string()) ); // ...but only through inheritance, not directly. - assert_eq!(m.get_metadata_raw(&meta, &grandchild).unwrap(), None); + assert_eq!( + m.get_metadata_raw(&meta, &grandchild, &Committed::Live) + .unwrap(), + None + ); } #[test] @@ -379,20 +398,22 @@ mod test { let mk1 = Key::new(KeyType::Meta, "metatest1").unwrap(); let mk2 = Key::new(KeyType::Meta, "metatest2").unwrap(); let mk3 = Key::new(KeyType::Meta, "metatest3").unwrap(); - m.set_metadata(&mk1, &k1, "41").unwrap(); - m.set_metadata(&mk2, &k2, "42").unwrap(); - m.set_metadata(&mk3, &k3, "43").unwrap(); + m.set_metadata(&mk1, &k1, "41", &Committed::Live).unwrap(); + m.set_metadata(&mk2, &k2, "42", &Committed::Live).unwrap(); + m.set_metadata(&mk3, &k3, "43", &Committed::Live).unwrap(); // Check all metadata assert_eq!( - m.get_metadata_prefix("x.", &None as &Option<&str>).unwrap(), + m.get_metadata_prefix("x.", &Committed::Live, &None as &Option<&str>) + .unwrap(), hashmap!(k1 => hashmap!(mk1 => "41".to_string()), k2.clone() => hashmap!(mk2.clone() => "42".to_string())) ); // Check metadata matching a given name assert_eq!( - m.get_metadata_prefix("x.", &Some("metatest2")).unwrap(), + m.get_metadata_prefix("x.", &Committed::Live, &Some("metatest2")) + .unwrap(), hashmap!(k2 => hashmap!(mk2 => "42".to_string())) ); } diff --git a/sources/api/datastore/src/memory.rs b/sources/api/datastore/src/memory.rs index 4ffc39791..812272d85 100644 --- a/sources/api/datastore/src/memory.rs +++ b/sources/api/datastore/src/memory.rs @@ -16,6 +16,9 @@ pub struct MemoryDataStore { // Map of data keys to their metadata, which in turn is a mapping of metadata keys to // arbitrary (string/serialized) values. metadata: HashMap>, + // Map of data keys to their metadata, which in turn is a mapping of metadata keys to + // arbitrary (string/serialized) values in pending transaction + pending_metadata: HashMap>, } impl MemoryDataStore { @@ -57,14 +60,20 @@ impl DataStore for MemoryDataStore { fn list_populated_metadata( &self, prefix: S1, + committed: &Committed, metadata_key_name: &Option, ) -> Result>> where S1: AsRef, S2: AsRef, { + let metadata_to_use = match committed { + Committed::Live => &self.metadata, + Committed::Pending { .. } => &self.pending_metadata, + }; + let mut result = HashMap::new(); - for (data_key, meta_map) in self.metadata.iter() { + for (data_key, meta_map) in metadata_to_use.iter() { // Confirm data key matches requested prefix. if !data_key.name().starts_with(prefix.as_ref()) { continue; @@ -112,8 +121,18 @@ impl DataStore for MemoryDataStore { Ok(dataset.contains_key(key)) } - fn get_metadata_raw(&self, metadata_key: &Key, data_key: &Key) -> Result> { - let metadata_for_data = self.metadata.get(data_key); + fn get_metadata_raw( + &self, + metadata_key: &Key, + data_key: &Key, + committed: &Committed, + ) -> Result> { + let metadata_to_use = match committed { + Committed::Live => &self.metadata, + Committed::Pending { .. } => &self.pending_metadata, + }; + + let metadata_for_data = metadata_to_use.get(data_key); // If we have a metadata entry for this data key, then we can try fetching the requested // metadata key, otherwise we'll return early with Ok(None). let result = metadata_for_data.and_then(|m| m.get(metadata_key)); @@ -125,17 +144,14 @@ impl DataStore for MemoryDataStore { metadata_key: &Key, data_key: &Key, value: S, + committed: &Committed, ) -> Result<()> { - // If we don't already have a metadata entry for this data key, insert one. - let metadata_for_data = self - .metadata - // Clone data key because we want the HashMap key type to be Key, not &Key, and we - // can't pass ownership because we only have a reference from our parameters. - .entry(data_key.clone()) - .or_default(); - - metadata_for_data.insert(metadata_key.clone(), value.as_ref().to_owned()); - Ok(()) + match committed { + Committed::Live => set_metadata_raw(&mut self.metadata, metadata_key, data_key, value), + Committed::Pending { .. } => { + set_metadata_raw(&mut self.pending_metadata, metadata_key, data_key, value) + } + } } fn unset_metadata(&mut self, metadata_key: &Key, data_key: &Key) -> Result<()> { @@ -179,6 +195,23 @@ impl DataStore for MemoryDataStore { } } +fn set_metadata_raw>( + metadata_to_use: &mut HashMap>, + metadata_key: &Key, + data_key: &Key, + value: S, +) -> Result<()> { + // If we don't already have a metadata entry for this data key, insert one. + let metadata_for_data = metadata_to_use + // Clone data key because we want the HashMap key type to be Key, not &Key, and we + // can't pass ownership because we only have a reference from our parameters. + .entry(data_key.clone()) + .or_default(); + + metadata_for_data.insert(metadata_key.clone(), value.as_ref().to_owned()); + Ok(()) +} + #[cfg(test)] mod test { use super::super::{Committed, DataStore, Key, KeyType}; @@ -198,14 +231,38 @@ mod test { let mdkey = Key::new(KeyType::Meta, "testmd").unwrap(); let md = "mdval"; - m.set_metadata(&mdkey, &k, md).unwrap(); + m.set_metadata(&mdkey, &k, md, &Committed::Live).unwrap(); assert_eq!( - m.get_metadata_raw(&mdkey, &k).unwrap(), + m.get_metadata_raw(&mdkey, &k, &Committed::Live).unwrap(), + Some(md.to_string()) + ); + + m.set_metadata( + &mdkey, + &k, + md, + &Committed::Pending { + tx: "test".to_owned(), + }, + ) + .unwrap(); + assert_eq!( + m.get_metadata_raw( + &mdkey, + &k, + &Committed::Pending { + tx: "test".to_owned() + } + ) + .unwrap(), Some(md.to_string()) ); m.unset_metadata(&mdkey, &k).unwrap(); - assert_eq!(m.get_metadata_raw(&mdkey, &k).unwrap(), None); + assert_eq!( + m.get_metadata_raw(&mdkey, &k, &Committed::Live).unwrap(), + None + ); m.unset_key(&k, &Committed::Live).unwrap(); assert_eq!(m.get_key(&k, &Committed::Live).unwrap(), None); diff --git a/sources/api/migration/migrator/Cargo.toml b/sources/api/migration/migrator/Cargo.toml index 50d33f650..ec114ecbd 100644 --- a/sources/api/migration/migrator/Cargo.toml +++ b/sources/api/migration/migrator/Cargo.toml @@ -20,10 +20,12 @@ futures = { workspace = true, features = ["default"] } futures-core.workspace = true log.workspace = true lz4.workspace = true +models.workspace = true nix.workspace = true pentacle.workspace = true rand = { workspace = true, features = ["std", "std_rng"] } semver.workspace = true +serde_json.workspace = true simplelog.workspace = true snafu.workspace = true tokio = { workspace = true, features = ["fs", "macros", "rt-multi-thread"] } @@ -31,6 +33,9 @@ tokio-util = { workspace = true, features = ["compat", "io-util"] } tough = { workspace = true } update_metadata.workspace = true url.workspace = true +walkdir.workspace = true +datastore.workspace = true +percent-encoding.workspace = true [build-dependencies] generate-readme.workspace = true diff --git a/sources/api/migration/migrator/src/datastore_helper.rs b/sources/api/migration/migrator/src/datastore_helper.rs new file mode 100644 index 000000000..944872603 --- /dev/null +++ b/sources/api/migration/migrator/src/datastore_helper.rs @@ -0,0 +1,116 @@ +//! This module contains the functions that interact with the data store, retrieving data to +//! update and writing back updated data. + +use snafu::ResultExt; +use std::collections::HashMap; + +use crate::{error, Result}; +use datastore::{deserialize_scalar, serialize_scalar, Committed, DataStore, Key, KeyType, Value}; + +/// Mapping of metadata key name to arbitrary value. Each data key can have a Metadata describing +/// its metadata keys. +/// example: Key: settings.host-containers.admin.source, Metadata: strength and Value: "weak" +pub type Metadata = HashMap; + +/// DataStoreData holds all data that exists in datastore. +// We will take this as an input and use it to delete weak settings and settings generator. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct DataStoreData { + /// Mapping of data key names to their arbitrary values. + pub data: HashMap, + /// Mapping of data key names to their metadata. + pub metadata: HashMap, +} + +// To get input data from the existing data store, we use datastore methods. +// This method is private to the crate, so we can +// reconsider as needed. +/// Retrieves data from the specified data store in a consistent format for easy modification. +pub(crate) fn get_input_data( + datastore: &D, + committed: &Committed, +) -> Result { + let raw_data = datastore + .get_prefix("", committed) + .with_context(|_| error::GetDataSnafu { + committed: committed.clone(), + })?; + + let mut data = HashMap::new(); + for (data_key, value_str) in raw_data.into_iter() { + // Store keys with just their name, rather than the full Key. + let key_name = data_key.name(); + // Deserialize values to Value so there's a consistent input type. (We can't specify item + // types because we'd have to know the model structure.) + let value = + deserialize_scalar(&value_str).context(error::DeserializeSnafu { input: value_str })?; + data.insert(key_name.clone(), value); + } + + let mut metadata = HashMap::new(); + + let raw_metadata = datastore + .get_metadata_prefix("", committed, &None as &Option<&str>) + .context(error::GetMetadataSnafu)?; + for (data_key, meta_map) in raw_metadata.into_iter() { + let data_key_name = data_key.name(); + let data_entry = metadata + .entry(data_key_name.clone()) + .or_insert_with(HashMap::new); + for (metadata_key, value_str) in meta_map.into_iter() { + let metadata_key_name = metadata_key.name(); + let value = deserialize_scalar(&value_str) + .context(error::DeserializeSnafu { input: value_str })?; + data_entry.insert(metadata_key_name.clone(), value); + } + } + Ok(DataStoreData { data, metadata }) +} + +// Similar to get_input_data, we use datastore methods here; +// This method is also private to the crate, so we can reconsider as needed. +/// Updates the given data store with the given (updated) data. +pub(crate) fn set_output_data( + datastore: &mut D, + input: &DataStoreData, + committed: &Committed, +) -> Result<()> { + // Prepare serialized data + let mut data = HashMap::new(); + for (data_key_name, raw_value) in &input.data { + let data_key = Key::new(KeyType::Data, data_key_name).context(error::InvalidKeySnafu { + key_type: KeyType::Data, + key: data_key_name, + })?; + let value = serialize_scalar(raw_value).context(error::SerializeSnafu)?; + data.insert(data_key, value); + } + + // This is one of the rare cases where we want to set keys directly in the datastore: + // * We're operating on a temporary copy of the datastore, so no concurrency issues + // * We're either about to reboot or just have, and the settings applier will run afterward + datastore + .set_keys(&data, committed) + .context(error::DataStoreWriteSnafu)?; + + // Set metadata in a loop (currently no batch API) + for (data_key_name, meta_map) in &input.metadata { + let data_key = Key::new(KeyType::Data, data_key_name).context(error::InvalidKeySnafu { + key_type: KeyType::Data, + key: data_key_name, + })?; + for (metadata_key_name, raw_value) in meta_map.iter() { + let metadata_key = + Key::new(KeyType::Meta, metadata_key_name).context(error::InvalidKeySnafu { + key_type: KeyType::Meta, + key: metadata_key_name, + })?; + let value = serialize_scalar(&raw_value).context(error::SerializeSnafu)?; + datastore + .set_metadata(&metadata_key, &data_key, value, committed) + .context(error::DataStoreWriteSnafu)?; + } + } + + Ok(()) +} diff --git a/sources/api/migration/migrator/src/error.rs b/sources/api/migration/migrator/src/error.rs index f6ce68615..2704a5f92 100644 --- a/sources/api/migration/migrator/src/error.rs +++ b/sources/api/migration/migrator/src/error.rs @@ -106,6 +106,51 @@ pub(crate) enum Error { #[snafu(source(from(tough::error::Error, Box::new)))] source: Box, }, + + #[snafu(display("Unable to get {:?} data from datastore: {}", committed, source))] + GetData { + committed: datastore::Committed, + #[snafu(source(from(datastore::Error, Box::new)))] + source: Box, + }, + + #[snafu(display("Unable to deserialize to Value from '{}': {}", input, source))] + Deserialize { + input: String, + source: datastore::ScalarError, + }, + + #[snafu(display("Unable to get metadata from datastore: {}", source))] + GetMetadata { + #[snafu(source(from(datastore::Error, Box::new)))] + source: Box, + }, + + #[snafu(display("Update used invalid {:?} key '{}': {}", key_type, key, source))] + InvalidKey { + key_type: datastore::KeyType, + key: String, + #[snafu(source(from(datastore::Error, Box::new)))] + source: Box, + }, + + #[snafu(display("Unable to write to data store: {}", source))] + DataStoreWrite { + #[snafu(source(from(datastore::Error, Box::new)))] + source: Box, + }, + + #[snafu(display("Unable to serialize Value: {}", source))] + Serialize { source: datastore::ScalarError }, + + #[snafu(display("Unable to list transactions in data store: {}", source))] + ListTransactions { + #[snafu(source(from(datastore::Error, Box::new)))] + source: Box, + }, + + #[snafu(display("Error deserializing response value to SettingsGenerator: {}", source))] + DeserializeSettingsGenerator { source: serde_json::Error }, } /// Result alias containing our Error type. diff --git a/sources/api/migration/migrator/src/main.rs b/sources/api/migration/migrator/src/main.rs index a3f09c8ad..00ccfed84 100644 --- a/sources/api/migration/migrator/src/main.rs +++ b/sources/api/migration/migrator/src/main.rs @@ -22,14 +22,18 @@ extern crate log; use args::Args; +use datastore::{Committed, DataStore, FilesystemDataStore, Value}; +use datastore_helper::{get_input_data, set_output_data, DataStoreData}; use direction::Direction; use error::Result; use futures::{StreamExt, TryStreamExt}; +use model::SettingsGenerator; use nix::{dir::Dir, fcntl::OFlag, sys::stat::Mode, unistd::fsync}; use rand::{distributions::Alphanumeric, thread_rng, Rng}; use semver::Version; use simplelog::{Config as LogConfig, SimpleLogger}; use snafu::{ensure, OptionExt, ResultExt}; +use std::collections::HashSet; use std::convert::TryInto; use std::env; use std::io::ErrorKind; @@ -46,11 +50,14 @@ use update_metadata::Manifest; use url::Url; mod args; +mod datastore_helper; mod direction; mod error; #[cfg(test)] mod test; +type DataStoreImplementation = FilesystemDataStore; + // Returning a Result from main makes it print a Debug representation of the error, but with Snafu // we have nice Display representations of the error, so we wrap "main" (run) and print any error. // https://github.com/shepmaster/snafu/issues/110 @@ -62,6 +69,20 @@ async fn main() { eprintln!("{}", e); process::exit(1); } + + let datastore = match remove_weak_settings(&args.datastore_path, &args.migrate_to_version).await + { + Ok(datastore) => datastore, + Err(e) => { + eprintln!("{}", e); + process::exit(1); + } + }; + + // change datastore path in args + let mut args = args; + args.datastore_path = datastore; + if let Err(e) = run(&args).await { eprintln!("{}", e); process::exit(1); @@ -234,6 +255,79 @@ where Ok(to) } +async fn remove_weak_settings

(datastore_path: P, new_version: &Version) -> Result +where + P: AsRef, +{ + // We start with the given source_datastore, updating this to delete weak settings and setting-generators + let source_datastore = datastore_path.as_ref(); + // We create a new data store (below) to serve as the target for update. (Start at + // source just to have the right type) + let target_datastore = new_datastore_location(source_datastore, new_version)?; + + let source = DataStoreImplementation::new(source_datastore); + let mut target = DataStoreImplementation::new(&target_datastore); + + // Run for both live data and pending transactions + let mut committeds = vec![Committed::Live]; + let transactions = source + .list_transactions() + .context(error::ListTransactionsSnafu)?; + committeds.extend(transactions.into_iter().map(|tx| Committed::Pending { tx })); + + for committed in committeds { + let input = get_input_data(&source, &committed)?; + + let mut migrated = input.clone(); + let input_after_removing_weak_settings = remove_weak_setting_from_datastore(&mut migrated)?; + + set_output_data(&mut target, &input_after_removing_weak_settings, &committed)?; + } + + Ok(target_datastore) +} + +fn remove_weak_setting_from_datastore(datastore: &mut DataStoreData) -> Result { + let mut keys_to_remove = HashSet::new(); + + // Collect the metadata keys whose strength is weak + for (key, inner_map) in &datastore.metadata { + if let Some(strength) = inner_map.get("strength") { + if strength == &Value::String("weak".to_string()) { + keys_to_remove.insert(key.clone()); + } + } + } + // Remove strength metadata for weak settings and weak settings + for key in keys_to_remove { + let metadata = datastore.metadata.get(&key); + if let Some(metadata) = metadata { + let mut inner_map = metadata.clone(); + inner_map.remove("strength"); + datastore.metadata.insert(key.clone(), inner_map); + } + datastore.data.remove(&key); + } + + // Remove all the setting generators with weak strength + // We just need to delete all the weak settings-genrator + // as this datastore will be input for migrations and those depends on other metadata in datastore. + for (key, inner_map) in datastore.metadata.clone() { + if let Some(Value::Object(obj)) = inner_map.get("setting-generator") { + let setting_generator: SettingsGenerator = + serde_json::from_value(Value::Object(obj.clone())) + .context(error::DeserializeSettingsGeneratorSnafu)?; + if setting_generator.is_weak() { + let mut inner_map = inner_map.clone(); + inner_map.remove("setting-generator"); + datastore.metadata.insert(key.clone(), inner_map); + } + } + } + + Ok(datastore.clone()) +} + /// Runs the given migrations in their given order. The given direction is passed to each /// migration so it knows which direction we're migrating. /// diff --git a/sources/api/openapi.yaml b/sources/api/openapi.yaml index 0b806fd9d..38e73f18b 100644 --- a/sources/api/openapi.yaml +++ b/sources/api/openapi.yaml @@ -345,6 +345,27 @@ paths: description: "Successful deleted pending settings - deleted keys are returned" 500: description: "Server error" + + /v2/tx: + get: + summary: "Get pending settings and metadata in a transaction" + operationId: "get_tx_v2" + parameters: + - in: query + name: tx + description: "Transaction to retrieve pending settings and metadata; defaults to user 'default' transaction" + schema: + type: string + required: false + responses: + 200: + description: "Successful request" + content: + application/json: + schema: + $ref: #/components/schemas/SettingsResponseWithMetadata" + 500: + description: "Server error" /tx/list: get: @@ -494,6 +515,29 @@ paths: type: string 500: description: "Server error" + /v2/metadata/setting-generators: + get: + summary: "Get programs with strength needed to generate settings" + operationId: "get_setting_generators_v2" + responses: + 200: + description: "Successful request" + content: + application/json: + # The response is a hashmap of string to Object. Example: + # { "settings.foobar": "/usr/bin/foobar" } or + # { "settings.foobar": { + # "command": "/usr/bin/foobar", + # "strength": "weak", + # "skip_if_populated": true + # } + # } + schema: + type: object + additionalProperties: + type: string + 500: + description: "Server error" /metadata/templates: get: diff --git a/sources/api/settings-committer/src/main.rs b/sources/api/settings-committer/src/main.rs index 807938d1e..05ed8326b 100644 --- a/sources/api/settings-committer/src/main.rs +++ b/sources/api/settings-committer/src/main.rs @@ -16,7 +16,7 @@ use snafu::ResultExt; use std::str::FromStr; use std::{collections::HashMap, env, process}; -const API_PENDING_URI_BASE: &str = "/tx"; +const API_PENDING_URI_BASE: &str = "/v2/tx"; const API_COMMIT_URI_BASE: &str = "/tx/commit"; type Result = std::result::Result; diff --git a/sources/api/storewolf/src/main.rs b/sources/api/storewolf/src/main.rs index b4d6a3f66..5ceff426b 100644 --- a/sources/api/storewolf/src/main.rs +++ b/sources/api/storewolf/src/main.rs @@ -23,7 +23,7 @@ use std::{env, fs, process}; use bottlerocket_modeled_types::SingleLineString; use datastore::key::{Key, KeyType}; use datastore::serialization::{to_pairs, to_pairs_with_prefix}; -use datastore::{self, DataStore, FilesystemDataStore, ScalarError}; +use datastore::{self, Committed, DataStore, FilesystemDataStore, ScalarError}; // The default path to defaults.toml. const DEFAULTS_TOML: &str = "/etc/storewolf/defaults.toml"; @@ -186,11 +186,50 @@ fn parse_metadata_toml(md_toml_val: toml::Value) -> Result> // A table means there is more processing to do. Add the current // key and value to the Vec to be processed further. toml::Value::Table(table) => { - for (key, val) in table { - trace!("Found table for key '{}'", &key); - let mut path = path.clone(); - path.push(key.to_string()); - to_process.push((path, val)); + // This is special case to handle metadata as table that contains + // "command": "command", + // "strength": "weak", + // "skip-if-populated": true + if table.contains_key("command") + && table.contains_key("strength") + && table.contains_key("skip-if-populated") + { + ensure!( + !path.is_empty(), + error::InternalSnafu { + msg: "Cannot create empty metadata data key - is root not a Table?" + .to_string() + } + ); + + // Get the metadata key from the end of the path + let md_key = path.pop().context(error::InternalSnafu { + msg: "parse_metadata_toml found empty 'path' in the to_process vec - is 'metadata' not a Table?", + })?; + let data_key = path.join("."); + + trace!( + "Found metadata key '{}' for data key '{}'", + &md_key, + &data_key + ); + + // Ensure the metadata/data keys don't contain newline chars + let md = + SingleLineString::try_from(md_key).context(error::SingleLineStringSnafu)?; + let key = SingleLineString::try_from(data_key) + .context(error::SingleLineStringSnafu)?; + let val = toml::Value::Table(table); + + // Create the Metadata struct + def_metadatas.push(model::Metadata { key, md, val }) + } else { + for (key, val) in table { + trace!("Found table for key '{}'", &key); + let mut path = path.clone(); + path.push(key.to_string()); + to_process.push((path, val)); + } } } @@ -259,10 +298,10 @@ fn populate_default_datastore>( if live_path.exists() { debug!("Gathering existing data from the datastore"); existing_metadata = datastore - .list_populated_metadata("", &None as &Option<&str>) + .list_populated_metadata("", &Committed::Live, &None as &Option<&str>) .context(error::QueryMetadataSnafu)?; existing_data = datastore - .list_populated_keys("", &datastore::Committed::Live) + .list_populated_keys("", &Committed::Live) .context(error::QueryDataSnafu)?; } else { info!("Creating datastore at: {}", &live_path.display()); @@ -333,6 +372,18 @@ fn populate_default_datastore>( } // If we have metadata, write it out to the datastore in Live state + // If we have setting-generator metadata as table like + // [metadata.settings.host-containers.admin.source.setting-generator] + // command = "schnauzer-v2 render --requires 'aws@v1(helpers=[ecr-prefix])' --template '{{ ecr-prefix settings.aws.region }}/bottlerocket-admin:v0.11.13'" + // strength = "weak" + // skip-if-populated = true + // We will have to write this in datastore as file with name source.settings-generator + // contents { + // "command" : "schnauzer-v2 render --requires 'aws@v1(helpers=[ecr-prefix])' --template '{{ ecr-prefix settings.aws.region }}/bottlerocket-admin:v0.11.13'", + // "strength" : "weak", + // "skip-if-populated" : "true" + // } + // we will parse such metadata differently in parse_metadata_toml function if let Some(def_metadata_val) = maybe_metadata_val { debug!("Serializing metadata and writing new keys to datastore"); // Create a Vec from the metadata toml::Value @@ -385,7 +436,7 @@ fn populate_default_datastore>( for metadata in metadata_to_write { let (md, key, val) = metadata; datastore - .set_metadata(&md, &key, val) + .set_metadata(&md, &key, val, &Committed::Live) .context(error::WriteMetadataSnafu)?; } } diff --git a/sources/api/sundog/Cargo.toml b/sources/api/sundog/Cargo.toml index da7fae3e8..abd4d9e8e 100644 --- a/sources/api/sundog/Cargo.toml +++ b/sources/api/sundog/Cargo.toml @@ -16,6 +16,7 @@ datastore.workspace = true http.workspace = true log.workspace = true models.workspace = true +serde = { workspace = true, features = ["derive"] } serde_json.workspace = true simplelog.workspace = true shlex.workspace = true diff --git a/sources/api/sundog/src/main.rs b/sources/api/sundog/src/main.rs index 6694629df..8a9eda191 100644 --- a/sources/api/sundog/src/main.rs +++ b/sources/api/sundog/src/main.rs @@ -10,6 +10,7 @@ The output is collected and sent to a known Bottlerocket API server endpoint. #[macro_use] extern crate log; +use serde_json::Value; use shlex::Shlex; use simplelog::{Config as LogConfig, LevelFilter, SimpleLogger}; use snafu::{ensure, OptionExt, ResultExt}; @@ -23,6 +24,7 @@ use tokio::process::Command as AsyncCommand; use datastore::serialization::to_pairs_with_prefix; use datastore::{self, deserialization, Key, KeyType}; +use model::{SettingsGenerator, Strength}; // Limit settings generator execution to at most 6 minutes to prevent boot from hanging for too long. const SETTINGS_GENERATOR_TIMEOUT: Duration = Duration::from_secs(360); @@ -161,6 +163,9 @@ mod error { #[snafu(display("Logger setup error: {}", source))] Logger { source: log::SetLoggerError }, + + #[snafu(display("Error deserializing response value to SettingsGenerator: {}", source))] + DeserializeSettingsGenerator { source: serde_json::Error }, } } @@ -169,7 +174,7 @@ use error::SundogError; type Result = std::result::Result; /// Request the setting generators from the API. -async fn get_setting_generators(socket_path: S) -> Result> +async fn get_setting_generators(socket_path: S) -> Result> where S: AsRef, { @@ -189,8 +194,33 @@ where } ); - let generators: HashMap = serde_json::from_str(&response_body) + let response: HashMap = serde_json::from_str(&response_body) .context(error::ResponseJsonSnafu { method: "GET", uri })?; + + get_setting_generator_from_api_response(response) +} + +fn get_setting_generator_from_api_response( + response: HashMap, +) -> Result> { + let mut generators: HashMap = HashMap::new(); + for (key, value) in response { + match value { + Value::String(command) => { + generators.insert(key, SettingsGenerator::with_command(command)); + } + Value::Object(obj) => { + let setting_generator: SettingsGenerator = + serde_json::from_value(Value::Object(obj)) + .context(error::DeserializeSettingsGeneratorSnafu)?; + generators.insert(key, setting_generator); + } + _ => { + info!("Invalid value type for key '{}': {:?}", key, value); + } + } + } + trace!("Generators: {:?}", &generators); Ok(generators) @@ -301,14 +331,16 @@ where } /// Run the setting generators and collect the output +/// separately for weak and strong settings async fn get_dynamic_settings

( socket_path: P, - generators: HashMap, -) -> Result + generators: HashMap, +) -> Result<(model::Settings, model::Settings)> where P: AsRef, { - let mut settings = HashMap::new(); + let mut strong_settings = HashMap::new(); + let mut weak_settings = HashMap::new(); // Build the list of settings to query from the datastore to see if they // are currently populated. @@ -321,7 +353,9 @@ where let proxy_envs = build_proxy_env(socket_path).await?; // For each generator, run it and capture the output - for (setting_str, generator) in generators { + for (setting_str, generator_object) in generators { + let generator = generator_object.command; + let setting = Key::new(KeyType::Data, &setting_str).context(error::InvalidKeySnafu { key_type: KeyType::Data, key: &setting_str, @@ -334,9 +368,10 @@ where // TODO: We can optimize by using a prefix trie here. But there are no satisfactory trie // implementations I can find on crates.io. We can roll our own at some point if this // becomes a bottleneck - if populated_settings - .iter() - .any(|k| k.starts_with_segments(setting.segments())) + if generator_object.skip_if_populated + && populated_settings + .iter() + .any(|k| k.starts_with_segments(setting.segments())) { debug!("Setting '{}' is already populated, skipping", setting); continue; @@ -445,19 +480,31 @@ where })?; trace!("Serialized output: {}", &serialized_output); - settings.insert(setting, serialized_output); + // Add the setting to the appropriate map + if generator_object.strength == Strength::Strong { + strong_settings.insert(setting, serialized_output); + } else { + weak_settings.insert(setting, serialized_output); + } } // The API takes a properly nested Settings struct, so deserialize our map to a Settings // and ensure it is correct - let settings_struct: model::Settings = - deserialization::from_map(&settings).context(error::DeserializeSnafu)?; + let weak_settings_struct: model::Settings = + deserialization::from_map(&weak_settings).context(error::DeserializeSnafu)?; + + let strong_settings_struct: model::Settings = + deserialization::from_map(&strong_settings).context(error::DeserializeSnafu)?; - Ok(settings_struct) + Ok((weak_settings_struct, strong_settings_struct)) } /// Send the settings to the datastore through the API -async fn set_settings(socket_path: S, settings: model::Settings) -> Result<()> +async fn set_settings( + socket_path: S, + settings: model::Settings, + strength: Strength, +) -> Result<()> where S: AsRef, { @@ -465,9 +512,10 @@ where let request_body = serde_json::to_string(&settings).context(error::SerializeRequestSnafu)?; let uri = &format!( - "{}?tx={}", + "{}?tx={}&strength={}", constants::API_SETTINGS_URI, - constants::LAUNCH_TRANSACTION + constants::LAUNCH_TRANSACTION, + strength ); let method = "PATCH"; trace!("Settings to {} to {}: {}", method, uri, &request_body); @@ -566,10 +614,22 @@ async fn run() -> Result<()> { } info!("Retrieving settings values"); - let settings = get_dynamic_settings(&args.socket_path, generators).await?; + + let (weak_settings, strong_settings) = + get_dynamic_settings(&args.socket_path, generators).await?; + + trace!( + "Weak settings from dynamic settings function: {:#?}", + weak_settings + ); + trace!( + "Strong settings from dynamic settings function: {:#?}", + strong_settings + ); info!("Sending settings values to the API"); - set_settings(&args.socket_path, settings).await?; + set_settings(&args.socket_path, weak_settings, Strength::Weak).await?; + set_settings(&args.socket_path, strong_settings, Strength::Strong).await?; Ok(()) } @@ -584,3 +644,46 @@ async fn main() { process::exit(1); } } + +#[cfg(test)] +mod test { + use super::*; + + #[test] + fn test_get_setting_generator_from_api_response() { + let api_response = r#" + { + "host-containers.admin.source": "generator1", + "host-containers.control.source": { + "command": "generator2", + "strength": "weak", + "skip-if-populated": true + } + }"#; + + let response: HashMap = serde_json::from_str(api_response).unwrap(); + + let expected_admin = SettingsGenerator { + command: "generator1".to_string(), + strength: Strength::Strong, + skip_if_populated: true, + }; + + let expected_control = SettingsGenerator { + command: "generator2".to_string(), + strength: Strength::Weak, + skip_if_populated: true, + }; + + let result = get_setting_generator_from_api_response(response).unwrap(); + + assert_eq!( + result.get("host-containers.admin.source").unwrap(), + &expected_admin + ); + assert_eq!( + result.get("host-containers.control.source").unwrap(), + &expected_control + ); + } +} diff --git a/sources/constants/src/lib.rs b/sources/constants/src/lib.rs index 979c3e85e..7a4725961 100644 --- a/sources/constants/src/lib.rs +++ b/sources/constants/src/lib.rs @@ -5,7 +5,7 @@ // Shared API settings pub const API_SOCKET: &str = "/run/api.sock"; pub const API_SETTINGS_URI: &str = "/settings"; -pub const API_SETTINGS_GENERATORS_URI: &str = "/metadata/setting-generators"; +pub const API_SETTINGS_GENERATORS_URI: &str = "/v2/metadata/setting-generators"; // Shared transaction used by boot time services pub const LAUNCH_TRANSACTION: &str = "bottlerocket-launch"; diff --git a/sources/models/Cargo.toml b/sources/models/Cargo.toml index 88b8b5691..8b621ba3d 100644 --- a/sources/models/Cargo.toml +++ b/sources/models/Cargo.toml @@ -17,6 +17,7 @@ serde_json.workspace = true toml.workspace = true bottlerocket-settings-plugin.workspace = true bottlerocket-settings-models.workspace = true +serde_plain.workspace = true [build-dependencies] generate-readme.workspace = true diff --git a/sources/models/src/lib.rs b/sources/models/src/lib.rs index fa76b8c46..b3a73855b 100644 --- a/sources/models/src/lib.rs +++ b/sources/models/src/lib.rs @@ -28,6 +28,7 @@ use bottlerocket_release::BottlerocketRelease; use bottlerocket_settings_models::model_derive::model; use bottlerocket_settings_plugin::BottlerocketSettings; use serde::{Deserialize, Serialize}; +use serde_plain::derive_fromstr_from_deserialize; use std::collections::HashMap; use bottlerocket_settings_models::modeled_types::SingleLineString; @@ -90,3 +91,55 @@ struct Report { name: String, description: String, } + +/// Strength represents whether the settings can be overridden by a setting generator or not. +#[derive(Deserialize, Serialize, Debug, Clone, Copy, PartialEq)] +#[serde(rename_all = "kebab-case")] +pub enum Strength { + Strong, + Weak, +} + +impl std::fmt::Display for Strength { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Strength::Strong => write!(f, "strong"), + Strength::Weak => write!(f, "weak"), + } + } +} + +derive_fromstr_from_deserialize!(Strength); + +/// Struct to hold the setting generator definition containing +/// command, strength, skip-if-populated +#[derive(Deserialize, Serialize, std::fmt::Debug, PartialEq)] +pub struct SettingsGenerator { + pub command: String, + pub strength: Strength, + #[serde(rename = "skip-if-populated")] + pub skip_if_populated: bool, +} + +impl SettingsGenerator { + pub fn is_weak(&self) -> bool { + self.strength == Strength::Weak + } + + pub fn with_command(command: String) -> Self { + SettingsGenerator { + command, + ..SettingsGenerator::default() + } + } +} + +impl Default for SettingsGenerator { + fn default() -> Self { + SettingsGenerator { + command: String::new(), + strength: Strength::Strong, + skip_if_populated: true, + } + } +}