forked from DGNum/colmena
feat(meta): add disallowApplyAll options
unify meta access for machinesFile update release note and config name
This commit is contained in:
parent
1b3c272b58
commit
dd7a2924ca
10 changed files with 111 additions and 24 deletions
|
@ -151,6 +151,16 @@ pub async fn run(_global_args: &ArgMatches, local_args: &ArgMatches) -> Result<(
|
|||
.map(NodeFilter::new)
|
||||
.transpose()?;
|
||||
|
||||
if !filter.is_some() {
|
||||
// User did not specify node, we should check meta and see rules
|
||||
let meta = hive.get_meta_config().await?;
|
||||
if !meta.allow_apply_all {
|
||||
log::error!("No node filter is specified and meta.allowApplyAll is set to false.");
|
||||
log::error!("Hint: Filter the nodes with --on.");
|
||||
quit::with_code(1);
|
||||
}
|
||||
}
|
||||
|
||||
let goal_arg = local_args.value_of("goal").unwrap();
|
||||
let goal = Goal::from_str(goal_arg).unwrap();
|
||||
|
||||
|
|
|
@ -202,11 +202,21 @@ let
|
|||
};
|
||||
};
|
||||
};
|
||||
|
||||
# Add required config Key here since we don't want to eval nixpkgs
|
||||
metaConfigKeys = [
|
||||
"name" "description"
|
||||
"machinesFile"
|
||||
"allowApplyAll"
|
||||
];
|
||||
|
||||
metaConfig = lib.filterAttrs (n: v: elem n metaConfigKeys) hive.meta;
|
||||
in {
|
||||
inherit
|
||||
nodes toplevel
|
||||
deploymentConfig deploymentConfigSelected
|
||||
evalAll evalSelected evalSelectedDrvPaths introspect;
|
||||
evalAll evalSelected evalSelectedDrvPaths introspect
|
||||
metaConfig;
|
||||
|
||||
meta = hive.meta;
|
||||
|
||||
|
|
|
@ -10,7 +10,7 @@ use std::convert::AsRef;
|
|||
|
||||
use tempfile::{NamedTempFile, TempPath};
|
||||
use tokio::process::Command;
|
||||
use tokio::sync::RwLock;
|
||||
use tokio::sync::OnceCell;
|
||||
use serde::Serialize;
|
||||
use validator::Validate;
|
||||
|
||||
|
@ -22,7 +22,7 @@ use super::{
|
|||
NodeFilter,
|
||||
NixExpression,
|
||||
ProfileDerivation,
|
||||
StorePath,
|
||||
StorePath, MetaConfig,
|
||||
};
|
||||
use super::deployment::TargetNode;
|
||||
use crate::error::ColmenaResult;
|
||||
|
@ -58,8 +58,7 @@ pub struct Hive {
|
|||
/// Whether to pass --show-trace in Nix commands.
|
||||
show_trace: bool,
|
||||
|
||||
/// The cached machines_file expression.
|
||||
machines_file: RwLock<Option<Option<String>>>,
|
||||
meta_config: OnceCell<MetaConfig>,
|
||||
}
|
||||
|
||||
struct NixInstantiate<'hive> {
|
||||
|
@ -113,12 +112,12 @@ impl Hive {
|
|||
pub fn new(path: HivePath) -> ColmenaResult<Self> {
|
||||
let context_dir = path.context_dir();
|
||||
|
||||
Ok(Self {
|
||||
Ok(Self{
|
||||
path,
|
||||
context_dir,
|
||||
assets: Assets::new(),
|
||||
show_trace: false,
|
||||
machines_file: RwLock::new(None),
|
||||
meta_config: OnceCell::new(),
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -126,6 +125,13 @@ impl Hive {
|
|||
self.context_dir.as_ref().map(|p| p.as_ref())
|
||||
}
|
||||
|
||||
pub async fn get_meta_config(&self) -> ColmenaResult<&MetaConfig> {
|
||||
self.meta_config.get_or_try_init(||async {
|
||||
self.nix_instantiate("hive.metaConfig").eval()
|
||||
.capture_json().await
|
||||
}).await
|
||||
}
|
||||
|
||||
pub fn set_show_trace(&mut self, value: bool) {
|
||||
self.show_trace = value;
|
||||
}
|
||||
|
@ -142,7 +148,7 @@ impl Hive {
|
|||
let mut options = NixOptions::default();
|
||||
options.set_show_trace(self.show_trace);
|
||||
|
||||
if let Some(machines_file) = self.machines_file().await? {
|
||||
if let Some(machines_file) = &self.get_meta_config().await?.machines_file {
|
||||
options.set_builders(Some(format!("@{}", machines_file)));
|
||||
}
|
||||
|
||||
|
@ -319,22 +325,6 @@ impl Hive {
|
|||
}
|
||||
}
|
||||
|
||||
/// Retrieve the machinesFile setting for the Hive.
|
||||
async fn machines_file(&self) -> ColmenaResult<Option<String>> {
|
||||
if let Some(machines_file) = &*self.machines_file.read().await {
|
||||
return Ok(machines_file.clone());
|
||||
}
|
||||
|
||||
let expr = "toJSON (hive.meta.machinesFile or null)";
|
||||
let s: String = self.nix_instantiate(expr).eval()
|
||||
.capture_json().await?;
|
||||
|
||||
let parsed: Option<String> = serde_json::from_str(&s).unwrap();
|
||||
self.machines_file.write().await.replace(parsed.clone());
|
||||
|
||||
Ok(parsed)
|
||||
}
|
||||
|
||||
/// Returns the base expression from which the evaluated Hive can be used.
|
||||
fn get_base_expression(&self) -> String {
|
||||
self.assets.get_base_expression(self.path())
|
||||
|
|
|
@ -282,6 +282,19 @@ with builtins; rec {
|
|||
default = {};
|
||||
type = types.attrsOf types.unspecified;
|
||||
};
|
||||
allowApplyAll = lib.mkOption {
|
||||
description = ''
|
||||
Whether to allow deployments without a node filter set.
|
||||
|
||||
If set to false, a node filter must be specified with `--on` when
|
||||
deploying.
|
||||
|
||||
It helps prevent accidental deployments to the entire cluster
|
||||
when tags are used (e.g., `@production` and `@staging`).
|
||||
'';
|
||||
default = true;
|
||||
type = types.bool;
|
||||
};
|
||||
};
|
||||
};
|
||||
}
|
||||
|
|
|
@ -542,3 +542,22 @@ fn test_hive_introspect() {
|
|||
|
||||
assert_eq!("true", eval);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_hive_get_meta() {
|
||||
let hive = TempHive::new(r#"
|
||||
{
|
||||
meta.allowApplyAll = false;
|
||||
meta.specialArgs = {
|
||||
this_is_new = false;
|
||||
};
|
||||
}
|
||||
"#);
|
||||
|
||||
let eval = block_on(hive.get_meta_config())
|
||||
.unwrap();
|
||||
|
||||
eprintln!("{:?}", eval);
|
||||
|
||||
assert!(!eval.allow_apply_all);
|
||||
}
|
||||
|
|
|
@ -82,6 +82,15 @@ pub struct NodeConfig {
|
|||
keys: HashMap<String, Key>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Validate, Deserialize)]
|
||||
pub struct MetaConfig {
|
||||
#[serde(rename = "allowApplyAll")]
|
||||
pub allow_apply_all: bool,
|
||||
|
||||
#[serde(rename = "machinesFile")]
|
||||
pub machines_file: Option<String>,
|
||||
}
|
||||
|
||||
/// Nix options.
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct NixOptions {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue