All checks were successful
Check meta / check_dns (pull_request) Successful in 14s
Check meta / check_meta (pull_request) Successful in 15s
Run pre-commit on all files / pre-commit (pull_request) Successful in 28s
Check workflows / check_workflows (pull_request) Successful in 16s
Build all the nodes / ap01 (pull_request) Successful in 1m0s
Build all the nodes / bridge01 (pull_request) Successful in 1m8s
Build all the nodes / build01 (pull_request) Successful in 58s
Build all the nodes / cof02 (pull_request) Successful in 46s
Build all the nodes / geo01 (pull_request) Successful in 42s
Build all the nodes / compute01 (pull_request) Successful in 1m10s
Build all the nodes / geo02 (pull_request) Successful in 42s
Build all the nodes / hypervisor01 (pull_request) Successful in 42s
Build all the nodes / hypervisor02 (pull_request) Successful in 46s
Build all the nodes / hypervisor03 (pull_request) Successful in 43s
Build all the nodes / iso (pull_request) Successful in 51s
Build all the nodes / netaccess01 (pull_request) Successful in 22s
Build all the nodes / lab-router01 (pull_request) Successful in 42s
Build all the nodes / netcore00 (pull_request) Successful in 21s
Build all the nodes / netcore01 (pull_request) Successful in 20s
Build all the nodes / krz01 (pull_request) Successful in 1m24s
Build all the nodes / netcore02 (pull_request) Successful in 21s
Build all the nodes / tower01 (pull_request) Successful in 44s
Build all the nodes / rescue01 (pull_request) Successful in 1m2s
Build all the nodes / vault01 (pull_request) Successful in 51s
Build all the nodes / web01 (pull_request) Successful in 1m1s
Build all the nodes / storage01 (pull_request) Successful in 1m58s
Build the shell / build-shell (pull_request) Successful in 25s
Build all the nodes / web02 (pull_request) Successful in 42s
Build all the nodes / web03 (pull_request) Successful in 46s
Run pre-commit on all files / pre-commit (push) Successful in 28s
Build all the nodes / ap01 (push) Successful in 38s
Build all the nodes / bridge01 (push) Successful in 43s
Build all the nodes / geo01 (push) Successful in 50s
Build all the nodes / geo02 (push) Successful in 50s
Build all the nodes / cof02 (push) Successful in 52s
Build all the nodes / build01 (push) Successful in 55s
Build all the nodes / compute01 (push) Successful in 1m13s
Build all the nodes / hypervisor01 (push) Successful in 43s
Build all the nodes / iso (push) Successful in 1m20s
Build all the nodes / krz01 (push) Successful in 1m25s
Build all the nodes / netaccess01 (push) Successful in 21s
Build all the nodes / hypervisor02 (push) Successful in 44s
Build all the nodes / hypervisor03 (push) Successful in 1m38s
Build all the nodes / netcore00 (push) Successful in 1m14s
Build all the nodes / netcore01 (push) Successful in 22s
Build all the nodes / netcore02 (push) Successful in 22s
Build all the nodes / tower01 (push) Successful in 43s
Build all the nodes / rescue01 (push) Successful in 1m1s
Build all the nodes / web02 (push) Successful in 51s
Build all the nodes / vault01 (push) Successful in 54s
Build all the nodes / web01 (push) Successful in 1m3s
Build the shell / build-shell (push) Successful in 24s
Build all the nodes / storage01 (push) Successful in 1m42s
Build all the nodes / web03 (push) Successful in 48s
Build all the nodes / lab-router01 (push) Successful in 54s
This made colmena unnecessarily slow, we don't plan to use aliases and it was a big bowl of slow spaghetti
639 lines
21 KiB
Diff
639 lines
21 KiB
Diff
From f49797f5a589b88e6ad938038e53570fdaa37fa8 Mon Sep 17 00:00:00 2001
|
|
From: Tom Hubrecht <tom.hubrecht@dgnum.eu>
|
|
Date: Mon, 26 May 2025 13:47:07 +0200
|
|
Subject: [PATCH] Revert "add aliases, refactor eval logic"
|
|
|
|
This reverts commit 3ef5ba8658f68564ed268828ccee39a363200188.
|
|
---
|
|
src/error.rs | 14 +-
|
|
src/nix/hive/mod.rs | 94 +++----------
|
|
src/nix/hive/options.nix | 9 --
|
|
src/nix/mod.rs | 2 -
|
|
src/nix/node_filter.rs | 290 ++++++++++++++-------------------------
|
|
5 files changed, 122 insertions(+), 287 deletions(-)
|
|
|
|
diff --git a/src/error.rs b/src/error.rs
|
|
index cbc41ba..f0fbe11 100644
|
|
--- a/src/error.rs
|
|
+++ b/src/error.rs
|
|
@@ -6,7 +6,7 @@ use std::process::ExitStatus;
|
|
use snafu::{Backtrace, Snafu};
|
|
use validator::ValidationErrors;
|
|
|
|
-use crate::nix::{key, NodeName, Profile, StorePath};
|
|
+use crate::nix::{key, Profile, StorePath};
|
|
|
|
pub type ColmenaResult<T> = Result<T, ColmenaError>;
|
|
|
|
@@ -76,18 +76,6 @@ pub enum ColmenaError {
|
|
#[snafu(display("Filter rule cannot be empty"))]
|
|
EmptyFilterRule,
|
|
|
|
- #[snafu(display(
|
|
- "Alias \"{}\" is already taken by {} \"{}\"",
|
|
- what.as_str(),
|
|
- if *is_node_name { "node name" } else { "alias" },
|
|
- with.as_str(),
|
|
- ))]
|
|
- DuplicateAlias {
|
|
- what: NodeName,
|
|
- is_node_name: bool,
|
|
- with: NodeName,
|
|
- },
|
|
-
|
|
#[snafu(display("Deployment already executed"))]
|
|
DeploymentAlreadyExecuted,
|
|
|
|
diff --git a/src/nix/hive/mod.rs b/src/nix/hive/mod.rs
|
|
index 8ebf6de..63ae08e 100644
|
|
--- a/src/nix/hive/mod.rs
|
|
+++ b/src/nix/hive/mod.rs
|
|
@@ -3,7 +3,7 @@ mod assets;
|
|
#[cfg(test)]
|
|
mod tests;
|
|
|
|
-use std::collections::{HashMap, HashSet};
|
|
+use std::collections::HashMap;
|
|
use std::convert::AsRef;
|
|
use std::path::{Path, PathBuf};
|
|
use std::str::FromStr;
|
|
@@ -14,7 +14,6 @@ use tokio::sync::OnceCell;
|
|
use validator::Validate;
|
|
|
|
use super::deployment::TargetNode;
|
|
-use super::node_filter::{NeedsEval, PartialNodeConfig};
|
|
use super::{
|
|
Flake, MetaConfig, NixExpression, NixFlags, NodeConfig, NodeFilter, NodeName,
|
|
ProfileDerivation, RegistryConfig, SerializedNixExpression, StorePath,
|
|
@@ -262,66 +261,39 @@ impl Hive {
|
|
ssh_config: Option<PathBuf>,
|
|
ssh_only: bool,
|
|
) -> ColmenaResult<HashMap<NodeName, TargetNode>> {
|
|
+ let mut node_configs = None;
|
|
+
|
|
log::info!("Enumerating systems...");
|
|
let registry = self.get_registry_config().await?;
|
|
|
|
log::info!("Enumerating nodes...");
|
|
+
|
|
let all_nodes = self.node_names().await?;
|
|
-
|
|
- // try to quickly evaluate the filter without any data to see if it's trivial to evaluate
|
|
- let filter_trivial = filter.as_ref().and_then(|filter| filter.try_eval_trivial());
|
|
-
|
|
let selected_nodes = match filter {
|
|
- Some(filter) if filter_trivial.is_none() => {
|
|
- log::debug!("Retrieving deployment info for all nodes...");
|
|
+ Some(filter) => {
|
|
+ if filter.has_node_config_rules() {
|
|
+ log::debug!("Retrieving deployment info for all nodes...");
|
|
|
|
- let needs_eval = filter.needs_eval();
|
|
+ let all_node_configs = self.deployment_info().await?;
|
|
+ let filtered = filter
|
|
+ .filter_node_configs(all_node_configs.iter())
|
|
+ .into_iter()
|
|
+ .collect();
|
|
|
|
- let all_node_configs = self.deployment_info_partial(needs_eval).await?;
|
|
+ node_configs = Some(all_node_configs);
|
|
|
|
- // Check for collisions between node names and aliases
|
|
- // Returns error if:
|
|
- // - A node has an alias matching another node's name
|
|
- // - A node has an alias matching its own name
|
|
- // - A node has an alias already used by another node
|
|
- if needs_eval.aliases {
|
|
- let mut taken_aliases = HashSet::new();
|
|
- for (name, config) in all_node_configs.iter() {
|
|
- for alias in config.aliases.as_ref().unwrap().iter() {
|
|
- let overlaps_this = alias == name;
|
|
- let overlaps_names = all_node_configs.contains_key(alias);
|
|
- let overlaps_aliases = !taken_aliases.insert(alias.clone());
|
|
- if overlaps_this || overlaps_names || overlaps_aliases {
|
|
- return Err(ColmenaError::DuplicateAlias {
|
|
- what: alias.clone(),
|
|
- is_node_name: overlaps_this || overlaps_names,
|
|
- with: name.clone(),
|
|
- });
|
|
- }
|
|
- }
|
|
- }
|
|
+ filtered
|
|
+ } else {
|
|
+ filter.filter_node_names(&all_nodes)?.into_iter().collect()
|
|
}
|
|
-
|
|
- let filtered = filter
|
|
- .filter_nodes(all_node_configs.iter())
|
|
- .into_iter()
|
|
- .collect();
|
|
-
|
|
- filtered
|
|
}
|
|
- _ => match filter_trivial {
|
|
- // Filter is known to always evaluate to no nodes
|
|
- Some(false) => vec![],
|
|
- _ => all_nodes.clone(),
|
|
- },
|
|
+ None => all_nodes.clone(),
|
|
};
|
|
|
|
let n_selected = selected_nodes.len();
|
|
- log::debug!("Filtered {n_selected} node names for deployment");
|
|
|
|
- let mut node_configs = if n_selected == all_nodes.len() {
|
|
- log::debug!("Retrieving deployment info for all nodes...");
|
|
- self.deployment_info().await?
|
|
+ let mut node_configs = if let Some(configs) = node_configs {
|
|
+ configs.into_iter().filter(|(name, _)| selected_nodes.contains(name)).collect()
|
|
} else {
|
|
log::debug!("Retrieving deployment info for selected nodes...");
|
|
self.deployment_info_selected(&selected_nodes).await?
|
|
@@ -425,34 +397,6 @@ impl Hive {
|
|
Ok(configs)
|
|
}
|
|
|
|
- pub async fn deployment_info_partial(
|
|
- &self,
|
|
- needs_eval: NeedsEval,
|
|
- ) -> ColmenaResult<HashMap<NodeName, PartialNodeConfig>> {
|
|
- if !needs_eval.any() {
|
|
- // Need just the un-aliased names
|
|
- return Ok(self
|
|
- .node_names()
|
|
- .await?
|
|
- .into_iter()
|
|
- .map(|name| (name, PartialNodeConfig::default()))
|
|
- .collect());
|
|
- }
|
|
-
|
|
- let expr = format!(
|
|
- "(mapAttrs (name: attrs: {{ inherit (attrs) {} {}; }}) hive.deploymentConfig)",
|
|
- needs_eval.aliases.then_some("aliases").unwrap_or_default(),
|
|
- needs_eval.tags.then_some("tags").unwrap_or_default(),
|
|
- );
|
|
- let configs: HashMap<NodeName, PartialNodeConfig> = self
|
|
- .nix_instantiate(&expr)
|
|
- .eval_with_builders()
|
|
- .await?
|
|
- .capture_json()
|
|
- .await?;
|
|
- Ok(configs)
|
|
- }
|
|
-
|
|
/// Retrieve deployment info for a single node.
|
|
#[cfg_attr(not(target_os = "linux"), allow(dead_code))]
|
|
pub async fn deployment_info_single(
|
|
diff --git a/src/nix/hive/options.nix b/src/nix/hive/options.nix
|
|
index 0d13642..0db53c5 100644
|
|
--- a/src/nix/hive/options.nix
|
|
+++ b/src/nix/hive/options.nix
|
|
@@ -179,15 +179,6 @@ with builtins; rec {
|
|
type = types.listOf types.str;
|
|
default = [];
|
|
};
|
|
- aliases = lib.mkOption {
|
|
- description = ''
|
|
- A list of aliases for the node.
|
|
-
|
|
- Can be used to select a node with another name.
|
|
- '';
|
|
- type = types.listOf types.str;
|
|
- default = [];
|
|
- };
|
|
keys = lib.mkOption {
|
|
description = ''
|
|
A set of secrets to be deployed to the node.
|
|
diff --git a/src/nix/mod.rs b/src/nix/mod.rs
|
|
index 6728270..4823f74 100644
|
|
--- a/src/nix/mod.rs
|
|
+++ b/src/nix/mod.rs
|
|
@@ -75,8 +75,6 @@ pub struct NodeConfig {
|
|
|
|
tags: Vec<String>,
|
|
|
|
- aliases: Vec<NodeName>,
|
|
-
|
|
#[serde(rename = "replaceUnknownProfiles")]
|
|
replace_unknown_profiles: bool,
|
|
|
|
diff --git a/src/nix/node_filter.rs b/src/nix/node_filter.rs
|
|
index 434fb95..886ad50 100644
|
|
--- a/src/nix/node_filter.rs
|
|
+++ b/src/nix/node_filter.rs
|
|
@@ -7,7 +7,6 @@ use std::str::FromStr;
|
|
|
|
use clap::Args;
|
|
use glob::Pattern as GlobPattern;
|
|
-use serde::Deserialize;
|
|
|
|
use super::{ColmenaError, ColmenaResult, NodeConfig, NodeName};
|
|
|
|
@@ -29,53 +28,6 @@ The list is comma-separated and globs are supported. To match tags, prepend the
|
|
pub on: Option<NodeFilter>,
|
|
}
|
|
|
|
-/// Which fields need to be evaluated
|
|
-/// in order to execute the node filter.
|
|
-#[derive(Clone, Copy, Debug, Default)]
|
|
-pub struct NeedsEval {
|
|
- /// Need to evaluate deployment.aliases of all nodes.
|
|
- pub aliases: bool,
|
|
- /// Need to evaluate deployment.tags of all nodes.
|
|
- pub tags: bool,
|
|
-}
|
|
-
|
|
-impl NeedsEval {
|
|
- pub fn any(&self) -> bool {
|
|
- self.aliases || self.tags
|
|
- }
|
|
-}
|
|
-
|
|
-impl std::ops::BitOr for NeedsEval {
|
|
- type Output = Self;
|
|
- fn bitor(self, rhs: Self) -> Self::Output {
|
|
- Self {
|
|
- aliases: self.aliases || rhs.aliases,
|
|
- tags: self.tags || rhs.tags,
|
|
- }
|
|
- }
|
|
-}
|
|
-
|
|
-impl std::ops::BitOrAssign for NeedsEval {
|
|
- fn bitor_assign(&mut self, rhs: Self) {
|
|
- *self = *self | rhs;
|
|
- }
|
|
-}
|
|
-
|
|
-#[derive(Debug, Default, Deserialize)]
|
|
-pub struct PartialNodeConfig {
|
|
- pub aliases: Option<Vec<NodeName>>,
|
|
- pub tags: Option<Vec<String>>,
|
|
-}
|
|
-
|
|
-impl From<NodeConfig> for PartialNodeConfig {
|
|
- fn from(node_config: NodeConfig) -> Self {
|
|
- Self {
|
|
- aliases: Some(node_config.aliases),
|
|
- tags: Some(node_config.tags),
|
|
- }
|
|
- }
|
|
-}
|
|
-
|
|
/// A filter rule.
|
|
#[derive(Debug, Clone, Eq, PartialEq)]
|
|
pub enum NodeFilter {
|
|
@@ -288,47 +240,30 @@ impl NodeFilter {
|
|
}
|
|
}
|
|
|
|
- /// Returns which NodeConfig information is needed to evaluate the filter.
|
|
+ /// Returns whether the filter has any rule matching NodeConfig information.
|
|
///
|
|
/// Evaluating `config.deployment` can potentially be very expensive,
|
|
/// especially when its values (e.g., tags) depend on other parts of
|
|
/// the configuration.
|
|
- pub fn needs_eval(&self) -> NeedsEval {
|
|
- // XXX: is the hashset overkill?
|
|
+ pub fn has_node_config_rules(&self) -> bool {
|
|
match self {
|
|
- Self::MatchName(_) => NeedsEval {
|
|
- aliases: true,
|
|
- ..Default::default()
|
|
- },
|
|
- Self::MatchTag(_) => NeedsEval {
|
|
- tags: true,
|
|
- ..Default::default()
|
|
- },
|
|
- Self::Union(v) | Self::Inter(v) => v
|
|
- .iter()
|
|
- .fold(NeedsEval::default(), |acc, e| acc | e.needs_eval()),
|
|
- Self::Not(e) => e.needs_eval(),
|
|
- Self::Empty => NeedsEval::default(),
|
|
+ Self::MatchName(_) => false,
|
|
+ Self::MatchTag(_) => true,
|
|
+ Self::Union(v) => v.iter().any(|e| e.has_node_config_rules()),
|
|
+ Self::Inter(v) => v.iter().any(|e| e.has_node_config_rules()),
|
|
+ Self::Not(e) => e.has_node_config_rules(),
|
|
+ Self::Empty => false,
|
|
}
|
|
}
|
|
|
|
/// Decides whether a node is accepted by the filter or not.
|
|
- /// panic if the filter depends on tags or aliases and they're None
|
|
- fn is_accepted(&self, name: &NodeName, config: &PartialNodeConfig) -> bool {
|
|
+ /// panic if the filter depends on tags and config is None
|
|
+ fn is_accepted(&self, name: &NodeName, config: Option<&NodeConfig>) -> bool {
|
|
match self {
|
|
- Self::MatchName(pat) => {
|
|
- pat.matches(name.as_str())
|
|
- || config
|
|
- .aliases
|
|
- .as_ref()
|
|
- .expect("aliases missing")
|
|
- .iter()
|
|
- .any(|alias| pat.matches(&alias.0))
|
|
- }
|
|
+ Self::MatchName(pat) => pat.matches(name.as_str()),
|
|
Self::MatchTag(pat) => config
|
|
- .tags
|
|
- .as_ref()
|
|
- .expect("tags missing")
|
|
+ .unwrap()
|
|
+ .tags()
|
|
.iter()
|
|
.any(|tag| pat.matches(tag.as_str())),
|
|
Self::Union(v) => v.iter().any(|e| e.is_accepted(name, config)),
|
|
@@ -339,17 +274,17 @@ impl NodeFilter {
|
|
}
|
|
|
|
/// Runs the filter against a set of NodeConfigs and returns the matched ones.
|
|
- pub fn filter_nodes<'a, I>(&self, nodes: I) -> HashSet<NodeName>
|
|
+ pub fn filter_node_configs<'a, I>(&self, nodes: I) -> HashSet<NodeName>
|
|
where
|
|
- I: Iterator<Item = (&'a NodeName, &'a PartialNodeConfig)>,
|
|
+ I: Iterator<Item = (&'a NodeName, &'a NodeConfig)>,
|
|
{
|
|
if self == &Self::Empty {
|
|
return HashSet::new();
|
|
}
|
|
|
|
nodes
|
|
- .filter_map(|(name, config)| {
|
|
- if self.is_accepted(name, config) {
|
|
+ .filter_map(|(name, node)| {
|
|
+ if self.is_accepted(name, Some(node)) {
|
|
Some(name)
|
|
} else {
|
|
None
|
|
@@ -359,34 +294,26 @@ impl NodeFilter {
|
|
.collect()
|
|
}
|
|
|
|
- /// In case of trivial filters which dont actually use any node info
|
|
- /// Try to eval them immediately
|
|
- pub fn try_eval_trivial(&self) -> Option<bool> {
|
|
- match self {
|
|
- Self::MatchName(_) => None,
|
|
- Self::MatchTag(_) => None,
|
|
- Self::Union(fs) => {
|
|
- for f in fs {
|
|
- match f.try_eval_trivial() {
|
|
- None => return None,
|
|
- Some(true) => return Some(true),
|
|
- Some(false) => continue,
|
|
+ /// Runs the filter against a set of node names and returns the matched ones.
|
|
+ pub fn filter_node_names(&self, nodes: &[NodeName]) -> ColmenaResult<HashSet<NodeName>> {
|
|
+ if self.has_node_config_rules() {
|
|
+ Err(ColmenaError::Unknown {
|
|
+ message: format!(
|
|
+ "Not enough information to run rule {:?} - We only have node names",
|
|
+ self
|
|
+ ),
|
|
+ })
|
|
+ } else {
|
|
+ Ok(nodes
|
|
+ .iter()
|
|
+ .filter_map(|name| {
|
|
+ if self.is_accepted(name, None) {
|
|
+ Some(name.clone())
|
|
+ } else {
|
|
+ None
|
|
}
|
|
- }
|
|
- Some(false)
|
|
- }
|
|
- Self::Inter(fs) => {
|
|
- for f in fs {
|
|
- match f.try_eval_trivial() {
|
|
- None => return None,
|
|
- Some(true) => continue,
|
|
- Some(false) => return Some(false),
|
|
- }
|
|
- }
|
|
- Some(true)
|
|
- }
|
|
- Self::Not(f) => f.try_eval_trivial().map(|b| !b),
|
|
- Self::Empty => Some(true),
|
|
+ })
|
|
+ .collect())
|
|
}
|
|
}
|
|
}
|
|
@@ -395,36 +322,6 @@ impl NodeFilter {
|
|
mod tests {
|
|
use super::*;
|
|
|
|
- impl PartialNodeConfig {
|
|
- fn known_empty() -> Self {
|
|
- Self {
|
|
- aliases: Some(vec![]),
|
|
- tags: Some(vec![]),
|
|
- }
|
|
- }
|
|
-
|
|
- pub fn known_aliases_tags(
|
|
- aliases: Option<Vec<NodeName>>,
|
|
- tags: Option<Vec<String>>,
|
|
- ) -> Self {
|
|
- Self { aliases, tags }
|
|
- }
|
|
-
|
|
- fn known_tags(tags: Vec<String>) -> Self {
|
|
- Self {
|
|
- aliases: Some(vec![]),
|
|
- tags: Some(tags),
|
|
- }
|
|
- }
|
|
-
|
|
- fn known_aliases(aliases: Vec<NodeName>) -> Self {
|
|
- Self {
|
|
- aliases: Some(aliases),
|
|
- tags: Some(vec![]),
|
|
- }
|
|
- }
|
|
- }
|
|
-
|
|
use std::collections::{HashMap, HashSet};
|
|
|
|
macro_rules! node {
|
|
@@ -527,109 +424,126 @@ mod tests {
|
|
}
|
|
|
|
#[test]
|
|
- fn test_filter_nodes_names_only() {
|
|
- let nodes = vec![
|
|
- (node!("lax-alpha"), PartialNodeConfig::known_empty()),
|
|
- (node!("lax-beta"), PartialNodeConfig::known_empty()),
|
|
- (node!("sfo-gamma"), PartialNodeConfig::known_empty()),
|
|
- ];
|
|
+ fn test_filter_node_names() {
|
|
+ let nodes = vec![node!("lax-alpha"), node!("lax-beta"), node!("sfo-gamma")];
|
|
|
|
assert_eq!(
|
|
&HashSet::from_iter([node!("lax-alpha")]),
|
|
&NodeFilter::new("lax-alpha")
|
|
.unwrap()
|
|
- .filter_nodes(nodes.iter().map(|x| (&x.0, &x.1))),
|
|
+ .filter_node_names(&nodes)
|
|
+ .unwrap(),
|
|
);
|
|
|
|
assert_eq!(
|
|
&HashSet::from_iter([node!("lax-alpha"), node!("lax-beta")]),
|
|
&NodeFilter::new("lax-*")
|
|
.unwrap()
|
|
- .filter_nodes(nodes.iter().map(|x| (&x.0, &x.1))),
|
|
+ .filter_node_names(&nodes)
|
|
+ .unwrap(),
|
|
);
|
|
}
|
|
|
|
#[test]
|
|
- fn test_filter_nodes() {
|
|
- let nodes: HashMap<NodeName, PartialNodeConfig> = HashMap::from([
|
|
- (
|
|
- node!("alpha"),
|
|
- PartialNodeConfig::known_tags(vec!["web".to_string(), "infra-lax".to_string()]),
|
|
- ),
|
|
- (
|
|
- node!("beta"),
|
|
- PartialNodeConfig::known_tags(vec!["router".to_string(), "infra-sfo".to_string()]),
|
|
- ),
|
|
- (
|
|
- node!("gamma-a"),
|
|
- PartialNodeConfig::known_tags(vec!["controller".to_string()]),
|
|
- ),
|
|
- (
|
|
- node!("gamma-b"),
|
|
- PartialNodeConfig::known_tags(vec!["ewaste".to_string()]),
|
|
- ),
|
|
- (
|
|
- node!("aliases-test"),
|
|
- PartialNodeConfig::known_aliases_tags(
|
|
- Some(vec![node!("whatever-alias1"), node!("whatever-alias2")]),
|
|
- Some(vec!["testing".into()]),
|
|
- ),
|
|
- ),
|
|
- ]);
|
|
- assert_eq!(5, nodes.len());
|
|
+ fn test_filter_node_configs() {
|
|
+ // TODO: Better way to mock
|
|
+ let template = NodeConfig {
|
|
+ tags: vec![],
|
|
+ target_host: None,
|
|
+ target_user: None,
|
|
+ target_port: None,
|
|
+ allow_local_deployment: false,
|
|
+ build_on_target: false,
|
|
+ replace_unknown_profiles: false,
|
|
+ privilege_escalation_command: vec![],
|
|
+ extra_ssh_options: vec![],
|
|
+ keys: HashMap::new(),
|
|
+ system_type: None,
|
|
+ };
|
|
+
|
|
+ let mut nodes = HashMap::new();
|
|
+
|
|
+ nodes.insert(
|
|
+ node!("alpha"),
|
|
+ NodeConfig {
|
|
+ tags: vec!["web".to_string(), "infra-lax".to_string()],
|
|
+ ..template.clone()
|
|
+ },
|
|
+ );
|
|
+
|
|
+ nodes.insert(
|
|
+ node!("beta"),
|
|
+ NodeConfig {
|
|
+ tags: vec!["router".to_string(), "infra-sfo".to_string()],
|
|
+ ..template.clone()
|
|
+ },
|
|
+ );
|
|
+
|
|
+ nodes.insert(
|
|
+ node!("gamma-a"),
|
|
+ NodeConfig {
|
|
+ tags: vec!["controller".to_string()],
|
|
+ ..template.clone()
|
|
+ },
|
|
+ );
|
|
+
|
|
+ nodes.insert(
|
|
+ node!("gamma-b"),
|
|
+ NodeConfig {
|
|
+ tags: vec!["ewaste".to_string()],
|
|
+ ..template
|
|
+ },
|
|
+ );
|
|
+
|
|
+ assert_eq!(4, nodes.len());
|
|
|
|
assert_eq!(
|
|
&HashSet::from_iter([node!("alpha")]),
|
|
- &NodeFilter::new("@web").unwrap().filter_nodes(nodes.iter()),
|
|
+ &NodeFilter::new("@web")
|
|
+ .unwrap()
|
|
+ .filter_node_configs(nodes.iter()),
|
|
);
|
|
|
|
assert_eq!(
|
|
&HashSet::from_iter([node!("alpha"), node!("beta")]),
|
|
&NodeFilter::new("@infra-*")
|
|
.unwrap()
|
|
- .filter_nodes(nodes.iter()),
|
|
+ .filter_node_configs(nodes.iter()),
|
|
);
|
|
|
|
assert_eq!(
|
|
&HashSet::from_iter([node!("beta"), node!("gamma-a")]),
|
|
&NodeFilter::new("@router,@controller")
|
|
.unwrap()
|
|
- .filter_nodes(nodes.iter()),
|
|
+ .filter_node_configs(nodes.iter()),
|
|
);
|
|
|
|
assert_eq!(
|
|
&HashSet::from_iter([node!("beta"), node!("gamma-a"), node!("gamma-b")]),
|
|
&NodeFilter::new("@router,gamma-*")
|
|
.unwrap()
|
|
- .filter_nodes(nodes.iter()),
|
|
+ .filter_node_configs(nodes.iter()),
|
|
);
|
|
|
|
assert_eq!(
|
|
&HashSet::from_iter([]),
|
|
&NodeFilter::new("@router&@controller")
|
|
.unwrap()
|
|
- .filter_nodes(nodes.iter()),
|
|
+ .filter_node_configs(nodes.iter()),
|
|
);
|
|
|
|
assert_eq!(
|
|
&HashSet::from_iter([node!("beta")]),
|
|
&NodeFilter::new("@router&@infra-*")
|
|
.unwrap()
|
|
- .filter_nodes(nodes.iter()),
|
|
+ .filter_node_configs(nodes.iter()),
|
|
);
|
|
|
|
assert_eq!(
|
|
&HashSet::from_iter([node!("alpha")]),
|
|
&NodeFilter::new("!@router&@infra-*")
|
|
.unwrap()
|
|
- .filter_nodes(nodes.iter()),
|
|
- );
|
|
-
|
|
- assert_eq!(
|
|
- &HashSet::from_iter([node!("aliases-test")]),
|
|
- &NodeFilter::new("whatever-alias1")
|
|
- .unwrap()
|
|
- .filter_nodes(nodes.iter()),
|
|
+ .filter_node_configs(nodes.iter()),
|
|
);
|
|
}
|
|
}
|
|
--
|
|
2.49.0
|
|
|