feat(targets): complex targets sets expressions
allows intersection and complementary
This commit is contained in:
parent
71b1b660f2
commit
f28359373f
1 changed files with 319 additions and 89 deletions
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
use std::collections::HashSet;
|
use std::collections::HashSet;
|
||||||
use std::convert::AsRef;
|
use std::convert::AsRef;
|
||||||
use std::iter::{FromIterator, Iterator};
|
use std::iter::Iterator;
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
|
||||||
use clap::Args;
|
use clap::Args;
|
||||||
|
@ -28,22 +28,26 @@ The list is comma-separated and globs are supported. To match tags, prepend the
|
||||||
pub on: Option<NodeFilter>,
|
pub on: Option<NodeFilter>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A node filter containing a list of rules.
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub struct NodeFilter {
|
|
||||||
rules: Vec<Rule>,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A filter rule.
|
/// A filter rule.
|
||||||
///
|
|
||||||
/// The filter rules are OR'd together.
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||||
enum Rule {
|
pub enum NodeFilter {
|
||||||
/// Matches a node's attribute name.
|
/// Matches a node's attribute name.
|
||||||
MatchName(GlobPattern),
|
MatchName(GlobPattern),
|
||||||
|
|
||||||
/// Matches a node's `deployment.tags`.
|
/// Matches a node's `deployment.tags`.
|
||||||
MatchTag(GlobPattern),
|
MatchTag(GlobPattern),
|
||||||
|
|
||||||
|
/// Matches an Union
|
||||||
|
Union(Vec<Box<NodeFilter>>),
|
||||||
|
|
||||||
|
/// Matches an Intersection
|
||||||
|
Inter(Vec<Box<NodeFilter>>),
|
||||||
|
|
||||||
|
/// Matches the complementary
|
||||||
|
Not(Box<NodeFilter>),
|
||||||
|
|
||||||
|
/// Empty
|
||||||
|
Empty,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FromStr for NodeFilter {
|
impl FromStr for NodeFilter {
|
||||||
|
@ -53,7 +57,169 @@ impl FromStr for NodeFilter {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn end_delimiter(c: char) -> bool {
|
||||||
|
[',', '&', ')'].contains(&c)
|
||||||
|
}
|
||||||
|
|
||||||
impl NodeFilter {
|
impl NodeFilter {
|
||||||
|
fn and(a: Self, b: Self) -> Self {
|
||||||
|
match (a, b) {
|
||||||
|
(Self::Inter(mut av), Self::Inter(mut bv)) => {
|
||||||
|
av.append(&mut bv);
|
||||||
|
Self::Inter(av)
|
||||||
|
}
|
||||||
|
(Self::Inter(mut av), b) => {
|
||||||
|
av.push(Box::new(b));
|
||||||
|
Self::Inter(av)
|
||||||
|
}
|
||||||
|
(a, Self::Inter(mut bv)) => {
|
||||||
|
bv.push(Box::new(a));
|
||||||
|
Self::Inter(bv)
|
||||||
|
}
|
||||||
|
(a, b) => Self::Inter(vec![Box::new(a), Box::new(b)]),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn or(a: Self, b: Self) -> Self {
|
||||||
|
match (a, b) {
|
||||||
|
(Self::Union(mut av), Self::Union(mut bv)) => {
|
||||||
|
av.append(&mut bv);
|
||||||
|
Self::Union(av)
|
||||||
|
}
|
||||||
|
(Self::Union(mut av), b) => {
|
||||||
|
av.push(Box::new(b));
|
||||||
|
Self::Union(av)
|
||||||
|
}
|
||||||
|
(a, Self::Union(mut bv)) => {
|
||||||
|
bv.push(Box::new(a));
|
||||||
|
Self::Union(bv)
|
||||||
|
}
|
||||||
|
(a, b) => Self::Union(vec![Box::new(a), Box::new(b)]),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn not(a: Self) -> Self {
|
||||||
|
if let Self::Not(ae) = a {
|
||||||
|
*ae
|
||||||
|
} else {
|
||||||
|
Self::Not(Box::new(a))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parses an elementary expression,
|
||||||
|
/// that is base tags and name, with expression between parentheses
|
||||||
|
/// Negations are also parsed here as the most prioritary operation
|
||||||
|
///
|
||||||
|
/// It returns the unparsed text that follows
|
||||||
|
fn parse_expr0(unparsed: &str) -> ColmenaResult<(Self, &str)> {
|
||||||
|
let unparsed = unparsed.trim_start();
|
||||||
|
// Negation
|
||||||
|
if let Some(negated_expr) = unparsed.strip_prefix('!') {
|
||||||
|
let (negated, unparsed) = Self::parse_expr0(negated_expr)?;
|
||||||
|
Ok((Self::not(negated), unparsed))
|
||||||
|
} else
|
||||||
|
// parentheses
|
||||||
|
if let Some(parenthesed_expr) = unparsed.strip_prefix('(') {
|
||||||
|
let (interior, unparsed) = Self::parse_expr2(parenthesed_expr)?;
|
||||||
|
Ok((
|
||||||
|
interior,
|
||||||
|
unparsed.strip_prefix(')').ok_or(ColmenaError::Unknown {
|
||||||
|
message: format!("Expected a closing parenthesis at {:?}.", unparsed),
|
||||||
|
})?,
|
||||||
|
))
|
||||||
|
} else
|
||||||
|
// tag
|
||||||
|
if let Some(tag_expr) = unparsed.strip_prefix('@') {
|
||||||
|
match tag_expr
|
||||||
|
.find(end_delimiter)
|
||||||
|
.map(|idx| tag_expr.split_at(idx))
|
||||||
|
.map(|(tag, end)| (tag.trim_end(), end))
|
||||||
|
{
|
||||||
|
Some((tag, unparsed)) => {
|
||||||
|
if tag.is_empty() {
|
||||||
|
return Err(ColmenaError::EmptyFilterRule);
|
||||||
|
} else {
|
||||||
|
Ok((Self::MatchTag(GlobPattern::new(tag).unwrap()), unparsed))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
let tag_expr = tag_expr.trim_end();
|
||||||
|
if tag_expr.is_empty() {
|
||||||
|
Err(ColmenaError::EmptyFilterRule)
|
||||||
|
} else {
|
||||||
|
Ok((Self::MatchTag(GlobPattern::new(tag_expr).unwrap()), ""))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else
|
||||||
|
//node name
|
||||||
|
{
|
||||||
|
match unparsed
|
||||||
|
.find(end_delimiter)
|
||||||
|
.map(|idx| unparsed.split_at(idx))
|
||||||
|
.map(|(tag, end)| (tag.trim_end(), end))
|
||||||
|
{
|
||||||
|
Some((name, unparsed)) => {
|
||||||
|
if name.is_empty() {
|
||||||
|
Err(ColmenaError::EmptyFilterRule)
|
||||||
|
} else {
|
||||||
|
Ok((Self::MatchName(GlobPattern::new(name).unwrap()), unparsed))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
let unparsed = unparsed.trim_end();
|
||||||
|
if unparsed.is_empty() {
|
||||||
|
Err(ColmenaError::EmptyFilterRule)
|
||||||
|
} else {
|
||||||
|
Ok((Self::MatchName(GlobPattern::new(unparsed).unwrap()), ""))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parses the union operations between elementary expression.
|
||||||
|
///
|
||||||
|
/// It returns the unparsed text that follows
|
||||||
|
fn parse_op1(acc: Self, unparsed: &str) -> ColmenaResult<(Self, &str)> {
|
||||||
|
let unparsed = unparsed.trim_start();
|
||||||
|
if let Some(unions) = unparsed.strip_prefix(',') {
|
||||||
|
let (base_expr, unparsed) = Self::parse_expr0(unions)?;
|
||||||
|
Self::parse_op1(Self::or(acc, base_expr), unparsed)
|
||||||
|
} else {
|
||||||
|
Ok((acc, unparsed))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parses elementary expression and their unions.
|
||||||
|
///
|
||||||
|
/// It returns the unparsed text that follows
|
||||||
|
fn parse_expr1(unparsed: &str) -> ColmenaResult<(Self, &str)> {
|
||||||
|
let (base_expr, unparsed) = Self::parse_expr0(unparsed)?;
|
||||||
|
Self::parse_op1(base_expr, unparsed)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parses the intersection operations between unions.
|
||||||
|
///
|
||||||
|
/// It returns the unparsed text that follows
|
||||||
|
fn parse_op2(acc: Self, unparsed: &str) -> ColmenaResult<(Self, &str)> {
|
||||||
|
if let Some(intersections) = unparsed.strip_prefix('&') {
|
||||||
|
let (union, unparsed) = Self::parse_expr1(intersections)?;
|
||||||
|
Self::parse_op2(Self::and(acc, union), unparsed)
|
||||||
|
} else {
|
||||||
|
Ok((acc, unparsed))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parses a complete expression
|
||||||
|
///
|
||||||
|
/// It returns the unparsed text that follows
|
||||||
|
fn parse_expr2(unparsed: &str) -> ColmenaResult<(Self, &str)> {
|
||||||
|
let (union, unparsed) = Self::parse_expr1(unparsed)?;
|
||||||
|
Self::parse_op2(union, unparsed)
|
||||||
|
}
|
||||||
|
|
||||||
/// Creates a new filter using an expression passed using `--on`.
|
/// Creates a new filter using an expression passed using `--on`.
|
||||||
pub fn new<S: AsRef<str>>(filter: S) -> ColmenaResult<Self> {
|
pub fn new<S: AsRef<str>>(filter: S) -> ColmenaResult<Self> {
|
||||||
let filter = filter.as_ref();
|
let filter = filter.as_ref();
|
||||||
|
@ -62,29 +228,16 @@ impl NodeFilter {
|
||||||
if trimmed.is_empty() {
|
if trimmed.is_empty() {
|
||||||
log::warn!("Filter \"{}\" is blank and will match nothing", filter);
|
log::warn!("Filter \"{}\" is blank and will match nothing", filter);
|
||||||
|
|
||||||
return Ok(Self { rules: Vec::new() });
|
return Ok(Self::Empty);
|
||||||
}
|
}
|
||||||
|
let (target_filter, unparsed) = Self::parse_expr2(trimmed)?;
|
||||||
let rules = trimmed
|
if unparsed != "" {
|
||||||
.split(',')
|
Err(ColmenaError::Unknown {
|
||||||
.map(|pattern| {
|
message: format!("Found garbage {:?} when parsing the node filter.", unparsed),
|
||||||
let pattern = pattern.trim();
|
|
||||||
|
|
||||||
if pattern.is_empty() {
|
|
||||||
return Err(ColmenaError::EmptyFilterRule);
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(tag_pattern) = pattern.strip_prefix('@') {
|
|
||||||
Ok(Rule::MatchTag(GlobPattern::new(tag_pattern).unwrap()))
|
|
||||||
} else {
|
|
||||||
Ok(Rule::MatchName(GlobPattern::new(pattern).unwrap()))
|
|
||||||
}
|
|
||||||
})
|
})
|
||||||
.collect::<Vec<ColmenaResult<Rule>>>();
|
} else {
|
||||||
|
Ok(target_filter)
|
||||||
let rules = Result::from_iter(rules)?;
|
}
|
||||||
|
|
||||||
Ok(Self { rules })
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns whether the filter has any rule matching NodeConfig information.
|
/// Returns whether the filter has any rule matching NodeConfig information.
|
||||||
|
@ -93,7 +246,31 @@ impl NodeFilter {
|
||||||
/// especially when its values (e.g., tags) depend on other parts of
|
/// especially when its values (e.g., tags) depend on other parts of
|
||||||
/// the configuration.
|
/// the configuration.
|
||||||
pub fn has_node_config_rules(&self) -> bool {
|
pub fn has_node_config_rules(&self) -> bool {
|
||||||
self.rules.iter().any(|rule| rule.matches_node_config())
|
match self {
|
||||||
|
Self::MatchName(_) => false,
|
||||||
|
Self::MatchTag(_) => true,
|
||||||
|
Self::Union(v) => v.iter().any(|e| e.has_node_config_rules()),
|
||||||
|
Self::Inter(v) => v.iter().any(|e| e.has_node_config_rules()),
|
||||||
|
Self::Not(e) => e.has_node_config_rules(),
|
||||||
|
Self::Empty => false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Decides whether a node is accepted by the filter or not.
|
||||||
|
/// panic if the filter depends on tags and config is None
|
||||||
|
fn is_accepted(&self, name: &NodeName, config: Option<&NodeConfig>) -> bool {
|
||||||
|
match self {
|
||||||
|
Self::MatchName(pat) => pat.matches(name.as_str()),
|
||||||
|
Self::MatchTag(pat) => config
|
||||||
|
.unwrap()
|
||||||
|
.tags()
|
||||||
|
.iter()
|
||||||
|
.any(|tag| pat.matches(tag.as_str())),
|
||||||
|
Self::Union(v) => v.iter().any(|e| e.is_accepted(name, config)),
|
||||||
|
Self::Inter(v) => v.iter().all(|e| e.is_accepted(name, config)),
|
||||||
|
Self::Not(e) => !e.is_accepted(name, config),
|
||||||
|
Self::Empty => false,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Runs the filter against a set of NodeConfigs and returns the matched ones.
|
/// Runs the filter against a set of NodeConfigs and returns the matched ones.
|
||||||
|
@ -101,30 +278,17 @@ impl NodeFilter {
|
||||||
where
|
where
|
||||||
I: Iterator<Item = (&'a NodeName, &'a NodeConfig)>,
|
I: Iterator<Item = (&'a NodeName, &'a NodeConfig)>,
|
||||||
{
|
{
|
||||||
if self.rules.is_empty() {
|
if self == &Self::Empty {
|
||||||
return HashSet::new();
|
return HashSet::new();
|
||||||
}
|
}
|
||||||
|
|
||||||
nodes
|
nodes
|
||||||
.filter_map(|(name, node)| {
|
.filter_map(|(name, node)| {
|
||||||
for rule in self.rules.iter() {
|
if self.is_accepted(name, Some(node)) {
|
||||||
match rule {
|
Some(name)
|
||||||
Rule::MatchName(pat) => {
|
} else {
|
||||||
if pat.matches(name.as_str()) {
|
None
|
||||||
return Some(name);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Rule::MatchTag(pat) => {
|
|
||||||
for tag in node.tags() {
|
|
||||||
if pat.matches(tag) {
|
|
||||||
return Some(name);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
None
|
|
||||||
})
|
})
|
||||||
.cloned()
|
.cloned()
|
||||||
.collect()
|
.collect()
|
||||||
|
@ -132,32 +296,24 @@ impl NodeFilter {
|
||||||
|
|
||||||
/// Runs the filter against a set of node names and returns the matched ones.
|
/// Runs the filter against a set of node names and returns the matched ones.
|
||||||
pub fn filter_node_names(&self, nodes: &[NodeName]) -> ColmenaResult<HashSet<NodeName>> {
|
pub fn filter_node_names(&self, nodes: &[NodeName]) -> ColmenaResult<HashSet<NodeName>> {
|
||||||
nodes.iter().filter_map(|name| -> Option<ColmenaResult<NodeName>> {
|
if self.has_node_config_rules() {
|
||||||
for rule in self.rules.iter() {
|
Err(ColmenaError::Unknown {
|
||||||
match rule {
|
message: format!(
|
||||||
Rule::MatchName(pat) => {
|
"Not enough information to run rule {:?} - We only have node names",
|
||||||
if pat.matches(name.as_str()) {
|
self
|
||||||
return Some(Ok(name.clone()));
|
),
|
||||||
}
|
})
|
||||||
|
} else {
|
||||||
|
Ok(nodes
|
||||||
|
.iter()
|
||||||
|
.filter_map(|name| {
|
||||||
|
if self.is_accepted(name, None) {
|
||||||
|
Some(name.clone())
|
||||||
|
} else {
|
||||||
|
None
|
||||||
}
|
}
|
||||||
_ => {
|
})
|
||||||
return Some(Err(ColmenaError::Unknown {
|
.collect())
|
||||||
message: format!("Not enough information to run rule {:?} - We only have node names", rule),
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
None
|
|
||||||
}).collect()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Rule {
|
|
||||||
/// Returns whether the rule matches against the NodeConfig (i.e., `config.deployment`).
|
|
||||||
pub fn matches_node_config(&self) -> bool {
|
|
||||||
match self {
|
|
||||||
Self::MatchTag(_) => true,
|
|
||||||
Self::MatchName(_) => false,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -177,13 +333,13 @@ mod tests {
|
||||||
#[test]
|
#[test]
|
||||||
fn test_empty_filter() {
|
fn test_empty_filter() {
|
||||||
let filter = NodeFilter::new("").unwrap();
|
let filter = NodeFilter::new("").unwrap();
|
||||||
assert_eq!(0, filter.rules.len());
|
assert_eq!(NodeFilter::Empty, filter);
|
||||||
|
|
||||||
let filter = NodeFilter::new("\t").unwrap();
|
let filter = NodeFilter::new("\t").unwrap();
|
||||||
assert_eq!(0, filter.rules.len());
|
assert_eq!(NodeFilter::Empty, filter);
|
||||||
|
|
||||||
let filter = NodeFilter::new(" ").unwrap();
|
let filter = NodeFilter::new(" ").unwrap();
|
||||||
assert_eq!(0, filter.rules.len());
|
assert_eq!(NodeFilter::Empty, filter);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -197,21 +353,73 @@ mod tests {
|
||||||
fn test_filter_rule_mixed() {
|
fn test_filter_rule_mixed() {
|
||||||
let filter = NodeFilter::new("@router,gamma-*").unwrap();
|
let filter = NodeFilter::new("@router,gamma-*").unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
vec![
|
NodeFilter::Union(vec![
|
||||||
Rule::MatchTag(GlobPattern::new("router").unwrap()),
|
Box::new(NodeFilter::MatchTag(GlobPattern::new("router").unwrap())),
|
||||||
Rule::MatchName(GlobPattern::new("gamma-*").unwrap()),
|
Box::new(NodeFilter::MatchName(GlobPattern::new("gamma-*").unwrap())),
|
||||||
],
|
]),
|
||||||
filter.rules,
|
filter,
|
||||||
);
|
);
|
||||||
|
|
||||||
let filter = NodeFilter::new("a, \t@b , c-*").unwrap();
|
let filter = NodeFilter::new("a, \t@b , c-*").unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
vec![
|
NodeFilter::Union(vec![
|
||||||
Rule::MatchName(GlobPattern::new("a").unwrap()),
|
Box::new(NodeFilter::MatchName(GlobPattern::new("a").unwrap())),
|
||||||
Rule::MatchTag(GlobPattern::new("b").unwrap()),
|
Box::new(NodeFilter::MatchTag(GlobPattern::new("b").unwrap())),
|
||||||
Rule::MatchName(GlobPattern::new("c-*").unwrap()),
|
Box::new(NodeFilter::MatchName(GlobPattern::new("c-*").unwrap())),
|
||||||
],
|
]),
|
||||||
filter.rules,
|
filter,
|
||||||
|
);
|
||||||
|
|
||||||
|
let filter = NodeFilter::new("a & \t@b , c-*").unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
NodeFilter::Inter(vec![
|
||||||
|
Box::new(NodeFilter::MatchName(GlobPattern::new("a").unwrap())),
|
||||||
|
Box::new(NodeFilter::Union(vec![
|
||||||
|
Box::new(NodeFilter::MatchTag(GlobPattern::new("b").unwrap())),
|
||||||
|
Box::new(NodeFilter::MatchName(GlobPattern::new("c-*").unwrap())),
|
||||||
|
])),
|
||||||
|
]),
|
||||||
|
filter,
|
||||||
|
);
|
||||||
|
|
||||||
|
let filter = NodeFilter::new("( a & \t@b ) , c-*").unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
NodeFilter::Union(vec![
|
||||||
|
Box::new(NodeFilter::Inter(vec![
|
||||||
|
Box::new(NodeFilter::MatchName(GlobPattern::new("a").unwrap())),
|
||||||
|
Box::new(NodeFilter::MatchTag(GlobPattern::new("b").unwrap())),
|
||||||
|
])),
|
||||||
|
Box::new(NodeFilter::MatchName(GlobPattern::new("c-*").unwrap())),
|
||||||
|
]),
|
||||||
|
filter,
|
||||||
|
);
|
||||||
|
|
||||||
|
let filter = NodeFilter::new("( a & \t@b ) , ! c-*").unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
NodeFilter::Union(vec![
|
||||||
|
Box::new(NodeFilter::Inter(vec![
|
||||||
|
Box::new(NodeFilter::MatchName(GlobPattern::new("a").unwrap())),
|
||||||
|
Box::new(NodeFilter::MatchTag(GlobPattern::new("b").unwrap())),
|
||||||
|
])),
|
||||||
|
Box::new(NodeFilter::Not(Box::new(NodeFilter::MatchName(
|
||||||
|
GlobPattern::new("c-*").unwrap()
|
||||||
|
)))),
|
||||||
|
]),
|
||||||
|
filter,
|
||||||
|
);
|
||||||
|
|
||||||
|
let filter = NodeFilter::new("( a & \t@b ) , !!! c-*").unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
NodeFilter::Union(vec![
|
||||||
|
Box::new(NodeFilter::Inter(vec![
|
||||||
|
Box::new(NodeFilter::MatchName(GlobPattern::new("a").unwrap())),
|
||||||
|
Box::new(NodeFilter::MatchTag(GlobPattern::new("b").unwrap())),
|
||||||
|
])),
|
||||||
|
Box::new(NodeFilter::Not(Box::new(NodeFilter::MatchName(
|
||||||
|
GlobPattern::new("c-*").unwrap()
|
||||||
|
)))),
|
||||||
|
]),
|
||||||
|
filter,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -250,6 +458,7 @@ mod tests {
|
||||||
privilege_escalation_command: vec![],
|
privilege_escalation_command: vec![],
|
||||||
extra_ssh_options: vec![],
|
extra_ssh_options: vec![],
|
||||||
keys: HashMap::new(),
|
keys: HashMap::new(),
|
||||||
|
system_type: None,
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut nodes = HashMap::new();
|
let mut nodes = HashMap::new();
|
||||||
|
@ -315,5 +524,26 @@ mod tests {
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.filter_node_configs(nodes.iter()),
|
.filter_node_configs(nodes.iter()),
|
||||||
);
|
);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
&HashSet::from_iter([]),
|
||||||
|
&NodeFilter::new("@router&@controller")
|
||||||
|
.unwrap()
|
||||||
|
.filter_node_configs(nodes.iter()),
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
&HashSet::from_iter([node!("beta")]),
|
||||||
|
&NodeFilter::new("@router&@infra-*")
|
||||||
|
.unwrap()
|
||||||
|
.filter_node_configs(nodes.iter()),
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
&HashSet::from_iter([node!("alpha")]),
|
||||||
|
&NodeFilter::new("!@router&@infra-*")
|
||||||
|
.unwrap()
|
||||||
|
.filter_node_configs(nodes.iter()),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Reference in a new issue