forked from DGNum/colmena
Misc cleanup
This commit is contained in:
parent
e2bad26be5
commit
a42e8c5bf0
20 changed files with 89 additions and 121 deletions
12
src/cli.rs
12
src/cli.rs
|
@ -7,21 +7,21 @@ use lazy_static::lazy_static;
|
||||||
use crate::command;
|
use crate::command;
|
||||||
|
|
||||||
/// Base URL of the manual, without the trailing slash.
|
/// Base URL of the manual, without the trailing slash.
|
||||||
const MANUAL_URL_BASE: &'static str = "https://zhaofengli.github.io/colmena";
|
const MANUAL_URL_BASE: &str = "https://zhaofengli.github.io/colmena";
|
||||||
|
|
||||||
/// URL to the manual.
|
/// URL to the manual.
|
||||||
///
|
///
|
||||||
/// We maintain CLI and Nix API stability for each minor version.
|
/// We maintain CLI and Nix API stability for each minor version.
|
||||||
/// This ensures that the user always sees accurate documentations, and we can
|
/// This ensures that the user always sees accurate documentations, and we can
|
||||||
/// easily perform updates to the manual after a release.
|
/// easily perform updates to the manual after a release.
|
||||||
const MANUAL_URL: &'static str = concatcp!(MANUAL_URL_BASE, "/", env!("CARGO_PKG_VERSION_MAJOR"), ".", env!("CARGO_PKG_VERSION_MINOR"));
|
const MANUAL_URL: &str = concatcp!(MANUAL_URL_BASE, "/", env!("CARGO_PKG_VERSION_MAJOR"), ".", env!("CARGO_PKG_VERSION_MINOR"));
|
||||||
|
|
||||||
/// The note shown when the user is using a pre-release version.
|
/// The note shown when the user is using a pre-release version.
|
||||||
///
|
///
|
||||||
/// API stability cannot be guaranteed for pre-release versions.
|
/// API stability cannot be guaranteed for pre-release versions.
|
||||||
/// Links to the version currently in development automatically
|
/// Links to the version currently in development automatically
|
||||||
/// leads the user to the unstable manual.
|
/// leads the user to the unstable manual.
|
||||||
const MANUAL_DISCREPANCY_NOTE: &'static str = "Note: You are using a pre-release version of Colmena, so the supported options may be different from what's in the manual.";
|
const MANUAL_DISCREPANCY_NOTE: &str = "Note: You are using a pre-release version of Colmena, so the supported options may be different from what's in the manual.";
|
||||||
|
|
||||||
lazy_static! {
|
lazy_static! {
|
||||||
static ref LONG_ABOUT: String = {
|
static ref LONG_ABOUT: String = {
|
||||||
|
@ -32,8 +32,8 @@ For more details, read the manual at <{}>.
|
||||||
|
|
||||||
"#, MANUAL_URL);
|
"#, MANUAL_URL);
|
||||||
|
|
||||||
if env!("CARGO_PKG_VERSION_PRE").len() != 0 {
|
if !env!("CARGO_PKG_VERSION_PRE").is_empty() {
|
||||||
message += &MANUAL_DISCREPANCY_NOTE;
|
message += MANUAL_DISCREPANCY_NOTE;
|
||||||
}
|
}
|
||||||
|
|
||||||
message
|
message
|
||||||
|
@ -157,7 +157,7 @@ pub async fn run() {
|
||||||
return gen_completions(args);
|
return gen_completions(args);
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(_) = matches.subcommand_matches("gen-help-markdown") {
|
if matches.subcommand_matches("gen-help-markdown").is_some() {
|
||||||
return gen_help_markdown();
|
return gen_help_markdown();
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -118,11 +118,9 @@ pub async fn run(_global_args: &ArgMatches<'_>, local_args: &ArgMatches<'_>) ->
|
||||||
let ssh_config = env::var("SSH_CONFIG_FILE")
|
let ssh_config = env::var("SSH_CONFIG_FILE")
|
||||||
.ok().map(PathBuf::from);
|
.ok().map(PathBuf::from);
|
||||||
|
|
||||||
let filter = if let Some(f) = local_args.value_of("on") {
|
let filter = local_args.value_of("on")
|
||||||
Some(NodeFilter::new(f)?)
|
.map(NodeFilter::new)
|
||||||
} else {
|
.transpose()?;
|
||||||
None
|
|
||||||
};
|
|
||||||
|
|
||||||
let goal_arg = local_args.value_of("goal").unwrap();
|
let goal_arg = local_args.value_of("goal").unwrap();
|
||||||
let goal = Goal::from_str(goal_arg).unwrap();
|
let goal = Goal::from_str(goal_arg).unwrap();
|
||||||
|
|
|
@ -59,11 +59,9 @@ pub async fn run(_global_args: &ArgMatches<'_>, local_args: &ArgMatches<'_>) ->
|
||||||
let ssh_config = env::var("SSH_CONFIG_FILE")
|
let ssh_config = env::var("SSH_CONFIG_FILE")
|
||||||
.ok().map(PathBuf::from);
|
.ok().map(PathBuf::from);
|
||||||
|
|
||||||
let filter = if let Some(f) = local_args.value_of("on") {
|
let filter = local_args.value_of("on")
|
||||||
Some(NodeFilter::new(f)?)
|
.map(NodeFilter::new)
|
||||||
} else {
|
.transpose()?;
|
||||||
None
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut targets = hive.select_nodes(filter, ssh_config, true).await?;
|
let mut targets = hive.select_nodes(filter, ssh_config, true).await?;
|
||||||
|
|
||||||
|
|
32
src/job.rs
32
src/job.rs
|
@ -395,7 +395,7 @@ impl JobMonitor {
|
||||||
metadata.state = new_state;
|
metadata.state = new_state;
|
||||||
|
|
||||||
if message.is_some() {
|
if message.is_some() {
|
||||||
metadata.custom_message = message.clone();
|
metadata.custom_message = message;
|
||||||
}
|
}
|
||||||
|
|
||||||
let metadata = if new_state == JobState::Waiting {
|
let metadata = if new_state == JobState::Waiting {
|
||||||
|
@ -513,10 +513,7 @@ impl JobMonitor {
|
||||||
impl JobState {
|
impl JobState {
|
||||||
/// Returns whether this state is final.
|
/// Returns whether this state is final.
|
||||||
pub fn is_final(&self) -> bool {
|
pub fn is_final(&self) -> bool {
|
||||||
match self {
|
matches!(self, Self::Failed | Self::Succeeded)
|
||||||
Self::Failed | Self::Succeeded => true,
|
|
||||||
_ => false,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -718,11 +715,11 @@ impl JobMetadata {
|
||||||
let node_list = describe_node_list(&self.nodes)
|
let node_list = describe_node_list(&self.nodes)
|
||||||
.unwrap_or_else(|| "some node(s)".to_string());
|
.unwrap_or_else(|| "some node(s)".to_string());
|
||||||
|
|
||||||
let message = self.custom_message.as_ref().map(|e| e.as_str())
|
let message = self.custom_message.as_deref()
|
||||||
.unwrap_or("No message");
|
.unwrap_or("No message");
|
||||||
|
|
||||||
Some(match (self.job_type, self.state) {
|
Some(match (self.job_type, self.state) {
|
||||||
(JobType::Meta, JobState::Succeeded) => format!("All done!"),
|
(JobType::Meta, JobState::Succeeded) => "All done!".to_string(),
|
||||||
|
|
||||||
(JobType::Evaluate, JobState::Running) => format!("Evaluating {}", node_list),
|
(JobType::Evaluate, JobState::Running) => format!("Evaluating {}", node_list),
|
||||||
(JobType::Evaluate, JobState::Succeeded) => format!("Evaluated {}", node_list),
|
(JobType::Evaluate, JobState::Succeeded) => format!("Evaluated {}", node_list),
|
||||||
|
@ -732,19 +729,19 @@ impl JobMetadata {
|
||||||
(JobType::Build, JobState::Succeeded) => format!("Built {}", node_list),
|
(JobType::Build, JobState::Succeeded) => format!("Built {}", node_list),
|
||||||
(JobType::Build, JobState::Failed) => format!("Build failed: {}", message),
|
(JobType::Build, JobState::Failed) => format!("Build failed: {}", message),
|
||||||
|
|
||||||
(JobType::Push, JobState::Running) => format!("Pushing system closure"),
|
(JobType::Push, JobState::Running) => "Pushing system closure".to_string(),
|
||||||
(JobType::Push, JobState::Succeeded) => format!("Pushed system closure"),
|
(JobType::Push, JobState::Succeeded) => "Pushed system closure".to_string(),
|
||||||
(JobType::Push, JobState::Failed) => format!("Push failed: {}", message),
|
(JobType::Push, JobState::Failed) => format!("Push failed: {}", message),
|
||||||
|
|
||||||
(JobType::UploadKeys, JobState::Running) => format!("Uploading keys"),
|
(JobType::UploadKeys, JobState::Running) => "Uploading keys".to_string(),
|
||||||
(JobType::UploadKeys, JobState::Succeeded) => format!("Uploaded keys"),
|
(JobType::UploadKeys, JobState::Succeeded) => "Uploaded keys".to_string(),
|
||||||
(JobType::UploadKeys, JobState::Failed) => format!("Key upload failed: {}", message),
|
(JobType::UploadKeys, JobState::Failed) => format!("Key upload failed: {}", message),
|
||||||
|
|
||||||
(JobType::Activate, JobState::Running) => format!("Activating system profile"),
|
(JobType::Activate, JobState::Running) => "Activating system profile".to_string(),
|
||||||
(JobType::Activate, JobState::Failed) => format!("Activation failed: {}", message),
|
(JobType::Activate, JobState::Failed) => format!("Activation failed: {}", message),
|
||||||
|
|
||||||
(_, JobState::Failed) => format!("Failed: {}", message),
|
(_, JobState::Failed) => format!("Failed: {}", message),
|
||||||
(_, JobState::Succeeded) => format!("Succeeded"),
|
(_, JobState::Succeeded) => "Succeeded".to_string(),
|
||||||
_ => "".to_string(),
|
_ => "".to_string(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -760,7 +757,7 @@ impl JobMetadata {
|
||||||
JobType::Push => format!("Failed to push system closure to {}", node_list),
|
JobType::Push => format!("Failed to push system closure to {}", node_list),
|
||||||
JobType::UploadKeys => format!("Failed to upload keys to {}", node_list),
|
JobType::UploadKeys => format!("Failed to upload keys to {}", node_list),
|
||||||
JobType::Activate => format!("Failed to deploy to {}", node_list),
|
JobType::Activate => format!("Failed to deploy to {}", node_list),
|
||||||
JobType::Meta => format!("Failed to complete requested operation"),
|
JobType::Meta => "Failed to complete requested operation".to_string(),
|
||||||
_ => format!("Failed to complete job on {}", node_list),
|
_ => format!("Failed to complete job on {}", node_list),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -775,10 +772,7 @@ impl Event {
|
||||||
|
|
||||||
impl EventPayload {
|
impl EventPayload {
|
||||||
fn privileged(&self) -> bool {
|
fn privileged(&self) -> bool {
|
||||||
match self {
|
matches!(self, Self::ShutdownMonitor)
|
||||||
Self::ShutdownMonitor => true,
|
|
||||||
_ => false,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -856,7 +850,7 @@ fn describe_node_list(nodes: &[NodeName]) -> Option<String> {
|
||||||
while let Some((_, node)) = iter.next() {
|
while let Some((_, node)) = iter.next() {
|
||||||
let next = iter.peek();
|
let next = iter.peek();
|
||||||
|
|
||||||
if s.len() != 0 {
|
if !s.is_empty() {
|
||||||
if next.is_none() {
|
if next.is_none() {
|
||||||
s += if total > 2 { ", and " } else { " and " };
|
s += if total > 2 { ", and " } else { " and " };
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -67,34 +67,16 @@ impl Goal {
|
||||||
|
|
||||||
pub fn should_switch_profile(&self) -> bool {
|
pub fn should_switch_profile(&self) -> bool {
|
||||||
use Goal::*;
|
use Goal::*;
|
||||||
match self {
|
matches!(self, Boot | Switch)
|
||||||
Boot | Switch => true,
|
|
||||||
_ => false,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn requires_activation(&self) -> bool {
|
pub fn requires_activation(&self) -> bool {
|
||||||
use Goal::*;
|
use Goal::*;
|
||||||
match self {
|
!matches!(self, Build | UploadKeys | Push)
|
||||||
Build | UploadKeys | Push => false,
|
|
||||||
_ => true,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn requires_target_host(&self) -> bool {
|
pub fn requires_target_host(&self) -> bool {
|
||||||
use Goal::*;
|
use Goal::*;
|
||||||
match self {
|
!matches!(self, Build)
|
||||||
Build => false,
|
|
||||||
_ => true,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Is this a real goal supported by switch-to-configuration?
|
|
||||||
pub fn is_real_goal(&self) -> bool {
|
|
||||||
use Goal::*;
|
|
||||||
match self {
|
|
||||||
Build | UploadKeys | Push => false,
|
|
||||||
_ => true,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -198,7 +198,8 @@ impl Deployment {
|
||||||
|
|
||||||
fn get_chunks(&mut self) -> Vec<TargetNodeMap> {
|
fn get_chunks(&mut self) -> Vec<TargetNodeMap> {
|
||||||
let eval_limit = self.evaluation_node_limit.get_limit()
|
let eval_limit = self.evaluation_node_limit.get_limit()
|
||||||
.unwrap_or(self.targets.len());
|
.unwrap_or_else(|| self.targets.len());
|
||||||
|
|
||||||
let mut result = Vec::new();
|
let mut result = Vec::new();
|
||||||
|
|
||||||
for chunk in self.targets.drain().chunks(eval_limit).into_iter() {
|
for chunk in self.targets.drain().chunks(eval_limit).into_iter() {
|
||||||
|
@ -226,7 +227,7 @@ impl Deployment {
|
||||||
}
|
}
|
||||||
|
|
||||||
for (name, profile) in profiles.iter() {
|
for (name, profile) in profiles.iter() {
|
||||||
let target = chunk.remove(&name).unwrap();
|
let target = chunk.remove(name).unwrap();
|
||||||
self.clone().deploy_node(parent.clone(), target, profile.clone()).await?;
|
self.clone().deploy_node(parent.clone(), target, profile.clone()).await?;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -55,7 +55,7 @@ impl Flake {
|
||||||
|
|
||||||
/// Returns the local directory, if it exists.
|
/// Returns the local directory, if it exists.
|
||||||
pub fn local_dir(&self) -> Option<&Path> {
|
pub fn local_dir(&self) -> Option<&Path> {
|
||||||
self.local_dir.as_ref().map(|d| d.as_path())
|
self.local_dir.as_deref()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -23,7 +23,7 @@ use super::NixCommand;
|
||||||
use crate::util::CommandExecution;
|
use crate::util::CommandExecution;
|
||||||
use crate::job::JobHandle;
|
use crate::job::JobHandle;
|
||||||
|
|
||||||
const HIVE_EVAL: &'static [u8] = include_bytes!("eval.nix");
|
const HIVE_EVAL: &[u8] = include_bytes!("eval.nix");
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub enum HivePath {
|
pub enum HivePath {
|
||||||
|
@ -250,7 +250,7 @@ impl Hive {
|
||||||
/// Evaluation may take up a lot of memory, so we make it possible
|
/// Evaluation may take up a lot of memory, so we make it possible
|
||||||
/// to split up the evaluation process into chunks and run them
|
/// to split up the evaluation process into chunks and run them
|
||||||
/// concurrently with other processes (e.g., build and apply).
|
/// concurrently with other processes (e.g., build and apply).
|
||||||
pub async fn eval_selected(&self, nodes: &Vec<NodeName>, job: Option<JobHandle>) -> NixResult<StoreDerivation<ProfileMap>> {
|
pub async fn eval_selected(&self, nodes: &[NodeName], job: Option<JobHandle>) -> NixResult<StoreDerivation<ProfileMap>> {
|
||||||
let nodes_expr = SerializedNixExpresssion::new(nodes)?;
|
let nodes_expr = SerializedNixExpresssion::new(nodes)?;
|
||||||
|
|
||||||
let expr = format!("hive.buildSelected {}", nodes_expr.expression());
|
let expr = format!("hive.buildSelected {}", nodes_expr.expression());
|
||||||
|
@ -260,7 +260,7 @@ impl Hive {
|
||||||
execution.set_job(job);
|
execution.set_job(job);
|
||||||
|
|
||||||
let path = execution.capture_store_path().await?;
|
let path = execution.capture_store_path().await?;
|
||||||
let drv = path.to_derivation()
|
let drv = path.into_derivation()
|
||||||
.expect("The result should be a store derivation");
|
.expect("The result should be a store derivation");
|
||||||
|
|
||||||
Ok(drv)
|
Ok(drv)
|
||||||
|
@ -286,7 +286,7 @@ impl Hive {
|
||||||
}
|
}
|
||||||
|
|
||||||
let expr = "toJSON (hive.meta.machinesFile or null)";
|
let expr = "toJSON (hive.meta.machinesFile or null)";
|
||||||
let s: String = self.nix_instantiate(&expr).eval()
|
let s: String = self.nix_instantiate(expr).eval()
|
||||||
.capture_json().await?;
|
.capture_json().await?;
|
||||||
|
|
||||||
let parsed: Option<String> = serde_json::from_str(&s).unwrap();
|
let parsed: Option<String> = serde_json::from_str(&s).unwrap();
|
||||||
|
@ -303,7 +303,7 @@ impl Hive {
|
||||||
options.append(&mut vec![
|
options.append(&mut vec![
|
||||||
"--option".to_owned(),
|
"--option".to_owned(),
|
||||||
"builders".to_owned(),
|
"builders".to_owned(),
|
||||||
format!("@{}", machines_file).to_owned()
|
format!("@{}", machines_file),
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -311,7 +311,7 @@ impl Hive {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn nix_instantiate(&self, expression: &str) -> NixInstantiate {
|
fn nix_instantiate(&self, expression: &str) -> NixInstantiate {
|
||||||
NixInstantiate::new(&self, expression.to_owned())
|
NixInstantiate::new(self, expression.to_owned())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn path(&self) -> &HivePath {
|
fn path(&self) -> &HivePath {
|
||||||
|
@ -410,7 +410,7 @@ struct SerializedNixExpresssion {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SerializedNixExpresssion {
|
impl SerializedNixExpresssion {
|
||||||
pub fn new<'de, T>(data: T) -> NixResult<Self> where T: Serialize {
|
pub fn new<T>(data: T) -> NixResult<Self> where T: Serialize {
|
||||||
let mut tmp = NamedTempFile::new()?;
|
let mut tmp = NamedTempFile::new()?;
|
||||||
let json = serde_json::to_vec(&data).expect("Could not serialize data");
|
let json = serde_json::to_vec(&data).expect("Could not serialize data");
|
||||||
tmp.write_all(&json)?;
|
tmp.write_all(&json)?;
|
||||||
|
|
|
@ -16,7 +16,7 @@ use crate::job::JobHandle;
|
||||||
use crate::nix::{Key, NixResult};
|
use crate::nix::{Key, NixResult};
|
||||||
use crate::util::capture_stream;
|
use crate::util::capture_stream;
|
||||||
|
|
||||||
const SCRIPT_TEMPLATE: &'static str = include_str!("./key_uploader.template.sh");
|
const SCRIPT_TEMPLATE: &str = include_str!("./key_uploader.template.sh");
|
||||||
|
|
||||||
pub fn generate_script<'a>(key: &'a Key, destination: &'a Path, require_ownership: bool) -> Cow<'a, str> {
|
pub fn generate_script<'a>(key: &'a Key, destination: &'a Path, require_ownership: bool) -> Cow<'a, str> {
|
||||||
let key_script = SCRIPT_TEMPLATE.to_string()
|
let key_script = SCRIPT_TEMPLATE.to_string()
|
||||||
|
|
|
@ -57,14 +57,14 @@ impl Host for Local {
|
||||||
|
|
||||||
async fn upload_keys(&mut self, keys: &HashMap<String, Key>, require_ownership: bool) -> NixResult<()> {
|
async fn upload_keys(&mut self, keys: &HashMap<String, Key>, require_ownership: bool) -> NixResult<()> {
|
||||||
for (name, key) in keys {
|
for (name, key) in keys {
|
||||||
self.upload_key(&name, &key, require_ownership).await?;
|
self.upload_key(name, key, require_ownership).await?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn activate(&mut self, profile: &Profile, goal: Goal) -> NixResult<()> {
|
async fn activate(&mut self, profile: &Profile, goal: Goal) -> NixResult<()> {
|
||||||
if !goal.is_real_goal() {
|
if !goal.requires_activation() {
|
||||||
return Err(NixError::Unsupported);
|
return Err(NixError::Unsupported);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -54,13 +54,13 @@ impl Host for Ssh {
|
||||||
}
|
}
|
||||||
async fn upload_keys(&mut self, keys: &HashMap<String, Key>, require_ownership: bool) -> NixResult<()> {
|
async fn upload_keys(&mut self, keys: &HashMap<String, Key>, require_ownership: bool) -> NixResult<()> {
|
||||||
for (name, key) in keys {
|
for (name, key) in keys {
|
||||||
self.upload_key(&name, &key, require_ownership).await?;
|
self.upload_key(name, key, require_ownership).await?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
async fn activate(&mut self, profile: &Profile, goal: Goal) -> NixResult<()> {
|
async fn activate(&mut self, profile: &Profile, goal: Goal) -> NixResult<()> {
|
||||||
if !goal.is_real_goal() {
|
if !goal.requires_activation() {
|
||||||
return Err(NixError::Unsupported);
|
return Err(NixError::Unsupported);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -76,7 +76,7 @@ impl Host for Ssh {
|
||||||
self.run_command(command).await
|
self.run_command(command).await
|
||||||
}
|
}
|
||||||
async fn run_command(&mut self, command: &[&str]) -> NixResult<()> {
|
async fn run_command(&mut self, command: &[&str]) -> NixResult<()> {
|
||||||
let command = self.ssh(&command);
|
let command = self.ssh(command);
|
||||||
self.run_command(command).await
|
self.run_command(command).await
|
||||||
}
|
}
|
||||||
async fn active_derivation_known(&mut self) -> NixResult<bool> {
|
async fn active_derivation_known(&mut self) -> NixResult<bool> {
|
||||||
|
@ -86,7 +86,7 @@ impl Host for Ssh {
|
||||||
|
|
||||||
match paths {
|
match paths {
|
||||||
Ok(paths) => {
|
Ok(paths) => {
|
||||||
for path in paths.lines().into_iter() {
|
if let Some(path) = paths.lines().into_iter().next() {
|
||||||
let remote_profile: StorePath = path.to_string().try_into().unwrap();
|
let remote_profile: StorePath = path.to_string().try_into().unwrap();
|
||||||
if remote_profile.exists() {
|
if remote_profile.exists() {
|
||||||
return Ok(true);
|
return Ok(true);
|
||||||
|
|
|
@ -160,7 +160,7 @@ fn validate_unix_name(name: &str) -> Result<(), ValidationError> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn validate_dest_dir(dir: &PathBuf) -> Result<(), ValidationError> {
|
fn validate_dest_dir(dir: &Path) -> Result<(), ValidationError> {
|
||||||
if dir.has_root() {
|
if dir.has_root() {
|
||||||
Ok(())
|
Ok(())
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -47,7 +47,7 @@ pub use node_filter::NodeFilter;
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests;
|
mod tests;
|
||||||
|
|
||||||
pub const SYSTEM_PROFILE: &'static str = "/nix/var/nix/profiles/system";
|
pub const SYSTEM_PROFILE: &str = "/nix/var/nix/profiles/system";
|
||||||
|
|
||||||
pub type NixResult<T> = Result<T, NixError>;
|
pub type NixResult<T> = Result<T, NixError>;
|
||||||
|
|
||||||
|
@ -203,7 +203,7 @@ impl NodeName {
|
||||||
|
|
||||||
fn validate(s: String) -> NixResult<String> {
|
fn validate(s: String) -> NixResult<String> {
|
||||||
// FIXME: Elaborate
|
// FIXME: Elaborate
|
||||||
if s.len() == 0 {
|
if s.is_empty() {
|
||||||
return Err(NixError::EmptyNodeName);
|
return Err(NixError::EmptyNodeName);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -230,7 +230,7 @@ impl NodeConfig {
|
||||||
Some(uname) => uname.clone(),
|
Some(uname) => uname.clone(),
|
||||||
None => get_current_username().unwrap().into_string().unwrap(),
|
None => get_current_username().unwrap().into_string().unwrap(),
|
||||||
};
|
};
|
||||||
let mut host = Ssh::new(username.clone(), target_host.clone());
|
let mut host = Ssh::new(username, target_host.clone());
|
||||||
host.set_privilege_escalation_command(self.privilege_escalation_command.clone());
|
host.set_privilege_escalation_command(self.privilege_escalation_command.clone());
|
||||||
|
|
||||||
if let Some(target_port) = self.target_port {
|
if let Some(target_port) = self.target_port {
|
||||||
|
@ -333,7 +333,7 @@ fn validate_keys(keys: &HashMap<String, Key>) -> Result<(), ValidationErrorType>
|
||||||
return Err(ValidationErrorType::new("Secret key name cannot be absolute"));
|
return Err(ValidationErrorType::new("Secret key name cannot be absolute"));
|
||||||
}
|
}
|
||||||
|
|
||||||
if path.components().collect::<Vec<_>>().len() != 1 {
|
if path.components().count() != 1 {
|
||||||
return Err(ValidationErrorType::new("Secret key name cannot contain path separators"));
|
return Err(ValidationErrorType::new("Secret key name cannot contain path separators"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -31,7 +31,7 @@ impl NodeFilter {
|
||||||
let filter = filter.as_ref();
|
let filter = filter.as_ref();
|
||||||
let trimmed = filter.trim();
|
let trimmed = filter.trim();
|
||||||
|
|
||||||
if trimmed.len() == 0 {
|
if trimmed.is_empty() {
|
||||||
log::warn!("Filter \"{}\" is blank and will match nothing", filter);
|
log::warn!("Filter \"{}\" is blank and will match nothing", filter);
|
||||||
|
|
||||||
return Ok(Self {
|
return Ok(Self {
|
||||||
|
@ -39,14 +39,14 @@ impl NodeFilter {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
let rules = trimmed.split(",").map(|pattern| {
|
let rules = trimmed.split(',').map(|pattern| {
|
||||||
let pattern = pattern.trim();
|
let pattern = pattern.trim();
|
||||||
|
|
||||||
if pattern.len() == 0 {
|
if pattern.is_empty() {
|
||||||
return Err(NixError::EmptyFilterRule);
|
return Err(NixError::EmptyFilterRule);
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(tag_pattern) = pattern.strip_prefix("@") {
|
if let Some(tag_pattern) = pattern.strip_prefix('@') {
|
||||||
Ok(Rule::MatchTag(GlobPattern::new(tag_pattern).unwrap()))
|
Ok(Rule::MatchTag(GlobPattern::new(tag_pattern).unwrap()))
|
||||||
} else {
|
} else {
|
||||||
Ok(Rule::MatchName(GlobPattern::new(pattern).unwrap()))
|
Ok(Rule::MatchName(GlobPattern::new(pattern).unwrap()))
|
||||||
|
@ -66,14 +66,14 @@ impl NodeFilter {
|
||||||
/// especially when its values (e.g., tags) depend on other parts of
|
/// especially when its values (e.g., tags) depend on other parts of
|
||||||
/// the configuration.
|
/// the configuration.
|
||||||
pub fn has_node_config_rules(&self) -> bool {
|
pub fn has_node_config_rules(&self) -> bool {
|
||||||
self.rules.iter().find(|rule| rule.matches_node_config()).is_some()
|
self.rules.iter().any(|rule| rule.matches_node_config())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Runs the filter against a set of NodeConfigs and returns the matched ones.
|
/// Runs the filter against a set of NodeConfigs and returns the matched ones.
|
||||||
pub fn filter_node_configs<'a, I>(&self, nodes: I) -> HashSet<NodeName>
|
pub fn filter_node_configs<'a, I>(&self, nodes: I) -> HashSet<NodeName>
|
||||||
where I: Iterator<Item = (&'a NodeName, &'a NodeConfig)>
|
where I: Iterator<Item = (&'a NodeName, &'a NodeConfig)>
|
||||||
{
|
{
|
||||||
if self.rules.len() == 0 {
|
if self.rules.is_empty() {
|
||||||
return HashSet::new();
|
return HashSet::new();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -232,7 +232,7 @@ mod tests {
|
||||||
|
|
||||||
nodes.insert(node!("gamma-b"), NodeConfig {
|
nodes.insert(node!("gamma-b"), NodeConfig {
|
||||||
tags: vec![ "ewaste".to_string() ],
|
tags: vec![ "ewaste".to_string() ],
|
||||||
..template.clone()
|
..template
|
||||||
});
|
});
|
||||||
|
|
||||||
assert_eq!(4, nodes.len());
|
assert_eq!(4, nodes.len());
|
||||||
|
|
|
@ -28,7 +28,7 @@ impl Profile {
|
||||||
return Err(NixError::InvalidProfile);
|
return Err(NixError::InvalidProfile);
|
||||||
}
|
}
|
||||||
|
|
||||||
if let None = path.to_str() {
|
if path.to_str().is_none() {
|
||||||
Err(NixError::InvalidProfile)
|
Err(NixError::InvalidProfile)
|
||||||
} else {
|
} else {
|
||||||
Ok(Self(path))
|
Ok(Self(path))
|
||||||
|
@ -43,11 +43,10 @@ impl Profile {
|
||||||
.expect("The string should be UTF-8 valid")
|
.expect("The string should be UTF-8 valid")
|
||||||
.to_string();
|
.to_string();
|
||||||
|
|
||||||
let mut v = Vec::new();
|
Some(vec![
|
||||||
v.push(switch_to_configuration);
|
switch_to_configuration,
|
||||||
v.push(goal.to_string());
|
goal.to_string(),
|
||||||
|
])
|
||||||
Some(v)
|
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
@ -60,7 +59,7 @@ impl Profile {
|
||||||
|
|
||||||
/// Returns the raw store path.
|
/// Returns the raw store path.
|
||||||
pub fn as_path(&self) -> &Path {
|
pub fn as_path(&self) -> &Path {
|
||||||
&self.0.as_path()
|
self.0.as_path()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -28,7 +28,7 @@ impl StorePath {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Converts the store path into a store derivation.
|
/// Converts the store path into a store derivation.
|
||||||
pub fn to_derivation<T: TryFrom<Vec<StorePath>>>(self) -> Option<StoreDerivation<T>> {
|
pub fn into_derivation<T: TryFrom<Vec<StorePath>>>(self) -> Option<StoreDerivation<T>> {
|
||||||
if self.is_derivation() {
|
if self.is_derivation() {
|
||||||
Some(StoreDerivation::<T>::from_store_path_unchecked(self))
|
Some(StoreDerivation::<T>::from_store_path_unchecked(self))
|
||||||
} else {
|
} else {
|
||||||
|
@ -57,9 +57,9 @@ impl TryFrom<String> for StorePath {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Into<PathBuf> for StorePath {
|
impl From<StorePath> for PathBuf {
|
||||||
fn into(self) -> PathBuf {
|
fn from(sp: StorePath) -> Self {
|
||||||
self.0
|
sp.0
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -111,11 +111,11 @@ impl SimpleProgressOutput {
|
||||||
match self {
|
match self {
|
||||||
Self::Plain(o) => {
|
Self::Plain(o) => {
|
||||||
o.run_until_completion().await
|
o.run_until_completion().await
|
||||||
.map(|o| Self::Plain(o))
|
.map(Self::Plain)
|
||||||
}
|
}
|
||||||
Self::Spinner(o) => {
|
Self::Spinner(o) => {
|
||||||
o.run_until_completion().await
|
o.run_until_completion().await
|
||||||
.map(|o| Self::Spinner(o))
|
.map(Self::Spinner)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -59,9 +59,8 @@ struct JobState {
|
||||||
impl SpinnerOutput {
|
impl SpinnerOutput {
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
let meta_bar = {
|
let meta_bar = {
|
||||||
let bar = ProgressBar::new(100)
|
ProgressBar::new(100)
|
||||||
.with_style(get_spinner_style(DEFAULT_LABEL_WIDTH, LineStyle::Normal));
|
.with_style(get_spinner_style(DEFAULT_LABEL_WIDTH, LineStyle::Normal))
|
||||||
bar
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let (sender, receiver) = create_channel();
|
let (sender, receiver) = create_channel();
|
||||||
|
@ -84,7 +83,7 @@ impl SpinnerOutput {
|
||||||
state.clone()
|
state.clone()
|
||||||
} else {
|
} else {
|
||||||
let bar = self.create_bar(LineStyle::Normal);
|
let bar = self.create_bar(LineStyle::Normal);
|
||||||
let state = JobState::new(bar.clone());
|
let state = JobState::new(bar);
|
||||||
self.job_state.insert(job_id, state.clone());
|
self.job_state.insert(job_id, state.clone());
|
||||||
state
|
state
|
||||||
}
|
}
|
||||||
|
@ -95,7 +94,7 @@ impl SpinnerOutput {
|
||||||
let bar = ProgressBar::new(100)
|
let bar = ProgressBar::new(100)
|
||||||
.with_style(self.get_spinner_style(style));
|
.with_style(self.get_spinner_style(style));
|
||||||
|
|
||||||
let bar = self.multi.add(bar.clone());
|
let bar = self.multi.add(bar);
|
||||||
bar.enable_steady_tick(100);
|
bar.enable_steady_tick(100);
|
||||||
bar
|
bar
|
||||||
}
|
}
|
||||||
|
|
|
@ -31,23 +31,20 @@ pub async fn run_wrapped<'a, F, U, T>(global_args: &'a ArgMatches<'a>, local_arg
|
||||||
}
|
}
|
||||||
|
|
||||||
fn troubleshoot(global_args: &ArgMatches<'_>, _local_args: &ArgMatches<'_>, error: &NixError) -> Result<(), NixError> {
|
fn troubleshoot(global_args: &ArgMatches<'_>, _local_args: &ArgMatches<'_>, error: &NixError) -> Result<(), NixError> {
|
||||||
match error {
|
if let NixError::NoFlakesSupport = error {
|
||||||
NixError::NoFlakesSupport => {
|
// People following the tutorial might put hive.nix directly
|
||||||
// People following the tutorial might put hive.nix directly
|
// in their Colmena checkout, and encounter NoFlakesSupport
|
||||||
// in their Colmena checkout, and encounter NoFlakesSupport
|
// because Colmena always prefers flake.nix when it exists.
|
||||||
// because Colmena always prefers flake.nix when it exists.
|
|
||||||
|
|
||||||
if global_args.occurrences_of("config") == 0 {
|
if global_args.occurrences_of("config") == 0 {
|
||||||
let cwd = env::current_dir()?;
|
let cwd = env::current_dir()?;
|
||||||
if cwd.join("flake.nix").is_file() && cwd.join("hive.nix").is_file() {
|
if cwd.join("flake.nix").is_file() && cwd.join("hive.nix").is_file() {
|
||||||
eprintln!("Hint: You have both flake.nix and hive.nix in the current directory, and");
|
eprintln!("Hint: You have both flake.nix and hive.nix in the current directory, and");
|
||||||
eprintln!(" Colmena will always prefer flake.nix if it exists.");
|
eprintln!(" Colmena will always prefer flake.nix if it exists.");
|
||||||
eprintln!();
|
eprintln!();
|
||||||
eprintln!(" Try passing `-f hive.nix` explicitly if this is what you want.");
|
eprintln!(" Try passing `-f hive.nix` explicitly if this is what you want.");
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
|
||||||
_ => {},
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|
|
@ -112,7 +112,7 @@ pub async fn hive_from_args(args: &ArgMatches<'_>) -> NixResult<Hive> {
|
||||||
let path = args.value_of("config").expect("The config arg should exist").to_owned();
|
let path = args.value_of("config").expect("The config arg should exist").to_owned();
|
||||||
let fpath = canonicalize_cli_path(&path);
|
let fpath = canonicalize_cli_path(&path);
|
||||||
|
|
||||||
if !fpath.exists() && path.contains(":") {
|
if !fpath.exists() && path.contains(':') {
|
||||||
// Treat as flake URI
|
// Treat as flake URI
|
||||||
let flake = Flake::from_uri(path).await?;
|
let flake = Flake::from_uri(path).await?;
|
||||||
let hive_path = HivePath::Flake(flake);
|
let hive_path = HivePath::Flake(flake);
|
||||||
|
@ -157,7 +157,7 @@ The list is comma-separated and globs are supported. To match tags, prepend the
|
||||||
}
|
}
|
||||||
|
|
||||||
fn canonicalize_cli_path(path: &str) -> PathBuf {
|
fn canonicalize_cli_path(path: &str) -> PathBuf {
|
||||||
if !path.starts_with("/") {
|
if !path.starts_with('/') {
|
||||||
format!("./{}", path).into()
|
format!("./{}", path).into()
|
||||||
} else {
|
} else {
|
||||||
path.into()
|
path.into()
|
||||||
|
|
Loading…
Add table
Reference in a new issue