forked from DGNum/colmena
feat(meta): add disallowApplyAll options
unify meta access for machinesFile update release note and config name
This commit is contained in:
parent
1b3c272b58
commit
dd7a2924ca
10 changed files with 111 additions and 24 deletions
19
integration-tests/allow-apply-all/default.nix
Normal file
19
integration-tests/allow-apply-all/default.nix
Normal file
|
@ -0,0 +1,19 @@
|
|||
{ pkgs ? import ../nixpkgs.nix }:
|
||||
|
||||
let
|
||||
tools = pkgs.callPackage ../tools.nix {
|
||||
targets = [ "alpha" ];
|
||||
};
|
||||
in tools.makeTest {
|
||||
name = "colmena-allow-apply-all";
|
||||
|
||||
bundle = ./.;
|
||||
|
||||
testScript = ''
|
||||
logs = deployer.fail("cd /tmp/bundle && run-copy-stderr ${tools.colmenaExec} apply")
|
||||
|
||||
assert "no filter supplied" in logs
|
||||
|
||||
deployer.succeed("cd /tmp/bundle && run-copy-stderr ${tools.colmenaExec} apply --on @target")
|
||||
'';
|
||||
}
|
14
integration-tests/allow-apply-all/hive.nix
Normal file
14
integration-tests/allow-apply-all/hive.nix
Normal file
|
@ -0,0 +1,14 @@
|
|||
let
|
||||
tools = import ./tools.nix {
|
||||
insideVm = true;
|
||||
targets = ["alpha"];
|
||||
};
|
||||
in {
|
||||
meta = {
|
||||
nixpkgs = tools.pkgs;
|
||||
allowApplyAll = false;
|
||||
};
|
||||
|
||||
deployer = tools.getStandaloneConfigFor "deployer";
|
||||
alpha = tools.getStandaloneConfigFor "alpha";
|
||||
}
|
|
@ -8,6 +8,8 @@
|
|||
flakes-streaming = import ./flakes { evaluator = "streaming"; };
|
||||
parallel = import ./parallel {};
|
||||
|
||||
allow-apply-all = import ./allow-apply-all {};
|
||||
|
||||
apply-stable = let
|
||||
test = import ./apply { pkgs = import ./nixpkgs-stable.nix; };
|
||||
in test.override (old: {
|
||||
|
|
|
@ -7,6 +7,7 @@
|
|||
- In `apply-local`, we now only escalate privileges during activation ([#85](https://github.com/zhaofengli/colmena/issues/85)).
|
||||
- Impure overlays are no longer imported by default if a path is specified in `meta.nixpkgs` ([#39](https://github.com/zhaofengli/colmena/issues/39))
|
||||
- GC roots are now created right after the builds are complete, as opposed to after activation.
|
||||
- The [`meta.allowApplyAll`](./reference/meta.md#allowapplyall) option has been added. If set to false, deployments without a node filter (`--on`) are disallowed.
|
||||
|
||||
## [Release 0.3.0](https://github.com/zhaofengli/colmena/releases/tag/v0.3.0) (2022/04/27)
|
||||
|
||||
|
|
|
@ -151,6 +151,16 @@ pub async fn run(_global_args: &ArgMatches, local_args: &ArgMatches) -> Result<(
|
|||
.map(NodeFilter::new)
|
||||
.transpose()?;
|
||||
|
||||
if !filter.is_some() {
|
||||
// User did not specify node, we should check meta and see rules
|
||||
let meta = hive.get_meta_config().await?;
|
||||
if !meta.allow_apply_all {
|
||||
log::error!("No node filter is specified and meta.allowApplyAll is set to false.");
|
||||
log::error!("Hint: Filter the nodes with --on.");
|
||||
quit::with_code(1);
|
||||
}
|
||||
}
|
||||
|
||||
let goal_arg = local_args.value_of("goal").unwrap();
|
||||
let goal = Goal::from_str(goal_arg).unwrap();
|
||||
|
||||
|
|
|
@ -202,11 +202,21 @@ let
|
|||
};
|
||||
};
|
||||
};
|
||||
|
||||
# Add required config Key here since we don't want to eval nixpkgs
|
||||
metaConfigKeys = [
|
||||
"name" "description"
|
||||
"machinesFile"
|
||||
"allowApplyAll"
|
||||
];
|
||||
|
||||
metaConfig = lib.filterAttrs (n: v: elem n metaConfigKeys) hive.meta;
|
||||
in {
|
||||
inherit
|
||||
nodes toplevel
|
||||
deploymentConfig deploymentConfigSelected
|
||||
evalAll evalSelected evalSelectedDrvPaths introspect;
|
||||
evalAll evalSelected evalSelectedDrvPaths introspect
|
||||
metaConfig;
|
||||
|
||||
meta = hive.meta;
|
||||
|
||||
|
|
|
@ -10,7 +10,7 @@ use std::convert::AsRef;
|
|||
|
||||
use tempfile::{NamedTempFile, TempPath};
|
||||
use tokio::process::Command;
|
||||
use tokio::sync::RwLock;
|
||||
use tokio::sync::OnceCell;
|
||||
use serde::Serialize;
|
||||
use validator::Validate;
|
||||
|
||||
|
@ -22,7 +22,7 @@ use super::{
|
|||
NodeFilter,
|
||||
NixExpression,
|
||||
ProfileDerivation,
|
||||
StorePath,
|
||||
StorePath, MetaConfig,
|
||||
};
|
||||
use super::deployment::TargetNode;
|
||||
use crate::error::ColmenaResult;
|
||||
|
@ -58,8 +58,7 @@ pub struct Hive {
|
|||
/// Whether to pass --show-trace in Nix commands.
|
||||
show_trace: bool,
|
||||
|
||||
/// The cached machines_file expression.
|
||||
machines_file: RwLock<Option<Option<String>>>,
|
||||
meta_config: OnceCell<MetaConfig>,
|
||||
}
|
||||
|
||||
struct NixInstantiate<'hive> {
|
||||
|
@ -113,12 +112,12 @@ impl Hive {
|
|||
pub fn new(path: HivePath) -> ColmenaResult<Self> {
|
||||
let context_dir = path.context_dir();
|
||||
|
||||
Ok(Self {
|
||||
Ok(Self{
|
||||
path,
|
||||
context_dir,
|
||||
assets: Assets::new(),
|
||||
show_trace: false,
|
||||
machines_file: RwLock::new(None),
|
||||
meta_config: OnceCell::new(),
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -126,6 +125,13 @@ impl Hive {
|
|||
self.context_dir.as_ref().map(|p| p.as_ref())
|
||||
}
|
||||
|
||||
pub async fn get_meta_config(&self) -> ColmenaResult<&MetaConfig> {
|
||||
self.meta_config.get_or_try_init(||async {
|
||||
self.nix_instantiate("hive.metaConfig").eval()
|
||||
.capture_json().await
|
||||
}).await
|
||||
}
|
||||
|
||||
pub fn set_show_trace(&mut self, value: bool) {
|
||||
self.show_trace = value;
|
||||
}
|
||||
|
@ -142,7 +148,7 @@ impl Hive {
|
|||
let mut options = NixOptions::default();
|
||||
options.set_show_trace(self.show_trace);
|
||||
|
||||
if let Some(machines_file) = self.machines_file().await? {
|
||||
if let Some(machines_file) = &self.get_meta_config().await?.machines_file {
|
||||
options.set_builders(Some(format!("@{}", machines_file)));
|
||||
}
|
||||
|
||||
|
@ -319,22 +325,6 @@ impl Hive {
|
|||
}
|
||||
}
|
||||
|
||||
/// Retrieve the machinesFile setting for the Hive.
|
||||
async fn machines_file(&self) -> ColmenaResult<Option<String>> {
|
||||
if let Some(machines_file) = &*self.machines_file.read().await {
|
||||
return Ok(machines_file.clone());
|
||||
}
|
||||
|
||||
let expr = "toJSON (hive.meta.machinesFile or null)";
|
||||
let s: String = self.nix_instantiate(expr).eval()
|
||||
.capture_json().await?;
|
||||
|
||||
let parsed: Option<String> = serde_json::from_str(&s).unwrap();
|
||||
self.machines_file.write().await.replace(parsed.clone());
|
||||
|
||||
Ok(parsed)
|
||||
}
|
||||
|
||||
/// Returns the base expression from which the evaluated Hive can be used.
|
||||
fn get_base_expression(&self) -> String {
|
||||
self.assets.get_base_expression(self.path())
|
||||
|
|
|
@ -282,6 +282,19 @@ with builtins; rec {
|
|||
default = {};
|
||||
type = types.attrsOf types.unspecified;
|
||||
};
|
||||
allowApplyAll = lib.mkOption {
|
||||
description = ''
|
||||
Whether to allow deployments without a node filter set.
|
||||
|
||||
If set to false, a node filter must be specified with `--on` when
|
||||
deploying.
|
||||
|
||||
It helps prevent accidental deployments to the entire cluster
|
||||
when tags are used (e.g., `@production` and `@staging`).
|
||||
'';
|
||||
default = true;
|
||||
type = types.bool;
|
||||
};
|
||||
};
|
||||
};
|
||||
}
|
||||
|
|
|
@ -542,3 +542,22 @@ fn test_hive_introspect() {
|
|||
|
||||
assert_eq!("true", eval);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_hive_get_meta() {
|
||||
let hive = TempHive::new(r#"
|
||||
{
|
||||
meta.allowApplyAll = false;
|
||||
meta.specialArgs = {
|
||||
this_is_new = false;
|
||||
};
|
||||
}
|
||||
"#);
|
||||
|
||||
let eval = block_on(hive.get_meta_config())
|
||||
.unwrap();
|
||||
|
||||
eprintln!("{:?}", eval);
|
||||
|
||||
assert!(!eval.allow_apply_all);
|
||||
}
|
||||
|
|
|
@ -82,6 +82,15 @@ pub struct NodeConfig {
|
|||
keys: HashMap<String, Key>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Validate, Deserialize)]
|
||||
pub struct MetaConfig {
|
||||
#[serde(rename = "allowApplyAll")]
|
||||
pub allow_apply_all: bool,
|
||||
|
||||
#[serde(rename = "machinesFile")]
|
||||
pub machines_file: Option<String>,
|
||||
}
|
||||
|
||||
/// Nix options.
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct NixOptions {
|
||||
|
|
Loading…
Reference in a new issue