WIP: Generic activation program #9
9 changed files with 355 additions and 21 deletions
|
@ -44,9 +44,18 @@ version = "1.28.1"
|
|||
features = [
|
||||
"fs",
|
||||
"io-util",
|
||||
"io-std",
|
||||
"macros",
|
||||
"process",
|
||||
"rt",
|
||||
"rt-multi-thread",
|
||||
"sync",
|
||||
]
|
||||
|
||||
[[bin]]
|
||||
name = "colmena"
|
||||
path = "src/main.rs"
|
||||
|
||||
[lib]
|
||||
name = "colmena"
|
||||
path = "src/lib.rs"
|
||||
|
|
|
@ -55,6 +55,9 @@ pub enum ColmenaError {
|
|||
#[snafu(display("Unexpected active profile: {:?}", profile))]
|
||||
ActiveProfileUnexpected { profile: Profile },
|
||||
|
||||
#[snafu(display("Invalid JSON from activation program: {}", error))]
|
||||
InvalidActivationProgramJson { error: serde_json::Error },
|
||||
|
||||
|
||||
#[snafu(display("Could not determine current profile"))]
|
||||
FailedToGetCurrentProfile,
|
||||
|
||||
|
|
16
src/lib.rs
Normal file
16
src/lib.rs
Normal file
|
@ -0,0 +1,16 @@
|
|||
#![allow(dead_code)]
|
||||
|
||||
mod cli;
|
||||
mod command;
|
||||
mod error;
|
||||
mod job;
|
||||
mod nix;
|
||||
mod progress;
|
||||
mod troubleshooter;
|
||||
mod util;
|
||||
|
||||
pub use nix::host::generic::Request as GenericRequest;
|
||||
pub use nix::host::generic::Response as GenericResponse;
|
||||
pub use nix::host::CopyOptions;
|
||||
pub use nix::key::{Key, UploadAt};
|
||||
pub use nix::store::StorePath;
|
|
@ -197,7 +197,7 @@ let
|
|||
];
|
||||
|
||||
serializableSystemTypeConfigKeys = [
|
||||
"supportsDeployment"
|
||||
"supportsDeployment" "activationProgram" "protocol"
|
||||
];
|
||||
|
||||
in rec {
|
||||
|
|
|
@ -274,24 +274,38 @@ impl Hive {
|
|||
for node in selected_nodes.into_iter() {
|
||||
let config = node_configs.remove(&node).unwrap();
|
||||
|
||||
let host = config.to_ssh_host().map(|mut host| {
|
||||
n_ssh += 1;
|
||||
|
||||
if let Some(ssh_config) = &ssh_config {
|
||||
host.set_ssh_config(ssh_config.clone());
|
||||
}
|
||||
|
||||
if self.is_flake() {
|
||||
host.set_use_nix3_copy(true);
|
||||
}
|
||||
|
||||
host.upcast()
|
||||
});
|
||||
let ssh_host = host.is_some();
|
||||
let target = TargetNode::new(node.clone(), host, config);
|
||||
|
||||
if !ssh_only || ssh_host {
|
||||
if let Some(system_type) = config.system_type.as_ref() {
|
||||
rlahfa
commented
Note: do not forget to check also if there's an activationProgram in the registry and if not, fallback to the legacy SSH mode. Note: do not forget to check also if there's an activationProgram in the registry and if not, fallback to the legacy SSH mode.
|
||||
log::debug!(
|
||||
"Using generic host (system_type: {}) for node {}",
|
||||
system_type,
|
||||
node.0
|
||||
);
|
||||
let system_config = registry.systems.get(system_type).unwrap();
|
||||
let mut generic_host = config.to_generic_host(system_config)?;
|
||||
generic_host.connect().await?;
|
||||
let target = TargetNode::new(node.clone(), Some(Box::new(generic_host)), config);
|
||||
targets.insert(node, target);
|
||||
} else {
|
||||
log::debug!("Using SSH host for node {}", node.0);
|
||||
let host = config.to_ssh_host().map(|mut host| {
|
||||
n_ssh += 1;
|
||||
|
||||
if let Some(ssh_config) = &ssh_config {
|
||||
host.set_ssh_config(ssh_config.clone());
|
||||
}
|
||||
|
||||
if self.is_flake() {
|
||||
host.set_use_nix3_copy(true);
|
||||
}
|
||||
|
||||
host.upcast()
|
||||
});
|
||||
let ssh_host = host.is_some();
|
||||
let target = TargetNode::new(node.clone(), host, config);
|
||||
|
||||
if !ssh_only || ssh_host {
|
||||
targets.insert(node, target);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -134,6 +134,13 @@ with builtins; rec {
|
|||
type = types.nullOr types.str;
|
||||
default = "root";
|
||||
};
|
||||
connectionUri = lib.mkOption {
|
||||
description = mdDoc ''
|
||||
Connection options given to the activation program.
|
||||
'';
|
||||
type = types.str;
|
||||
default = "ssh://localhost";
|
||||
};
|
||||
allowLocalDeployment = lib.mkOption {
|
||||
description = mdDoc ''
|
||||
Allow the configuration to be applied locally on the host running
|
||||
|
@ -258,6 +265,12 @@ with builtins; rec {
|
|||
type = types.functionTo types.unspecified;
|
||||
default = _: {};
|
||||
};
|
||||
activationProgram = lib.mkOption {
|
||||
description = mdDoc ''
|
||||
Program to execute at activation time.
|
||||
'';
|
||||
type = types.path;
|
||||
};
|
||||
};
|
||||
};
|
||||
registryOptions = { lib, ... }: let
|
||||
|
|
265
src/nix/host/generic.rs
Normal file
265
src/nix/host/generic.rs
Normal file
|
@ -0,0 +1,265 @@
|
|||
use std::collections::HashMap;
|
||||
use std::process::Stdio;
|
||||
|
||||
use async_trait::async_trait;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tokio::io::{AsyncBufReadExt, AsyncWriteExt, BufReader, BufWriter};
|
||||
use tokio::process::{Child, ChildStdin, ChildStdout, Command};
|
||||
|
||||
use super::{CopyDirection, CopyOptions, Host, RebootOptions};
|
||||
use crate::error::{ColmenaError, ColmenaResult};
|
||||
use crate::job::JobHandle;
|
||||
use crate::nix::{self, Key, Profile, StorePath, SystemTypeConfig};
|
||||
|
||||
pub type TransportId = String;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct Transport {
|
||||
id: TransportId,
|
||||
long_name: String,
|
||||
description: String,
|
||||
}
|
||||
|
||||
pub type GoalId = String;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct Goal {
|
||||
id: GoalId,
|
||||
long_name: String,
|
||||
description: String,
|
||||
}
|
||||
|
||||
/// A request to the activation program
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub enum Request {
|
||||
Connection {
|
||||
connection_uri: String,
|
||||
},
|
||||
|
||||
/// Copy closure to/from host
|
||||
CopyClosure {
|
||||
path: StorePath,
|
||||
to_host: bool,
|
||||
options: CopyOptions,
|
||||
},
|
||||
|
||||
/// Deploys the profile to host
|
||||
Deploy {
|
||||
goal: GoalId,
|
||||
toplevel: StorePath,
|
||||
options: CopyOptions,
|
||||
},
|
||||
|
||||
/// Realizes the derivation
|
||||
Realize {
|
||||
path: StorePath,
|
||||
},
|
||||
|
||||
/// Uploads keys to host
|
||||
UploadKeys {
|
||||
keys: HashMap<String, Key>,
|
||||
require_ownership: bool,
|
||||
},
|
||||
|
||||
Activate {
|
||||
profile: StorePath,
|
||||
goal: GoalId,
|
||||
},
|
||||
|
||||
Reboot {
|
||||
wait_for_boot: bool,
|
||||
},
|
||||
}
|
||||
|
||||
/// A response from the activation program
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub enum Response {
|
||||
ConnectionFailed { error: String },
|
||||
ConnectionSucceded { supported_goals: Vec<String> },
|
||||
Progress { phase: String },
|
||||
NewStorePath { store_path: StorePath },
|
||||
Failed { error: String },
|
||||
Unsupported,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ActivationCommand {
|
||||
command: Child,
|
||||
stdin: BufWriter<ChildStdin>,
|
||||
stdout: BufReader<ChildStdout>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct GenericHost {
|
||||
activation_program: ActivationCommand,
|
||||
connection_uri: String,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl Host for GenericHost {
|
||||
async fn copy_closure(
|
||||
&mut self,
|
||||
closure: &StorePath,
|
||||
direction: CopyDirection,
|
||||
options: CopyOptions,
|
||||
) -> ColmenaResult<()> {
|
||||
self.call_default_handler(Request::CopyClosure {
|
||||
path: closure.clone(),
|
||||
to_host: direction == CopyDirection::ToRemote,
|
||||
options,
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
async fn realize_remote(&mut self, derivation: &StorePath) -> ColmenaResult<Vec<StorePath>> {
|
||||
Ok(self
|
||||
.call(
|
||||
Request::Realize {
|
||||
path: derivation.clone(),
|
||||
},
|
||||
move |response, mut store_paths| {
|
||||
match response {
|
||||
Response::Progress { phase } => println!("{}", phase),
|
||||
Response::NewStorePath { store_path } => store_paths.push(store_path),
|
||||
_ => (),
|
||||
};
|
||||
store_paths
|
||||
},
|
||||
Vec::new(),
|
||||
)
|
||||
.await?)
|
||||
}
|
||||
|
||||
fn set_job(&mut self, _: Option<JobHandle>) {}
|
||||
|
||||
async fn deploy(
|
||||
&mut self,
|
||||
profile: &Profile,
|
||||
goal: nix::Goal,
|
||||
copy_options: CopyOptions,
|
||||
) -> ColmenaResult<()> {
|
||||
self.call_default_handler(Request::Deploy {
|
||||
goal: goal.to_string(),
|
||||
toplevel: profile.as_store_path().clone(),
|
||||
options: copy_options,
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
async fn upload_keys(
|
||||
&mut self,
|
||||
keys: &HashMap<String, Key>,
|
||||
require_ownership: bool,
|
||||
) -> ColmenaResult<()> {
|
||||
self.call_default_handler(Request::UploadKeys {
|
||||
keys: keys.clone(),
|
||||
require_ownership,
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
async fn get_current_system_profile(&mut self) -> ColmenaResult<Profile> {
|
||||
Err(ColmenaError::Unsupported)
|
||||
}
|
||||
|
||||
async fn get_main_system_profile(&mut self) -> ColmenaResult<Profile> {
|
||||
Err(ColmenaError::Unsupported)
|
||||
}
|
||||
|
||||
async fn activate(&mut self, profile: &Profile, goal: nix::Goal) -> ColmenaResult<()> {
|
||||
self.call_default_handler(Request::Activate {
|
||||
profile: profile.as_store_path().clone(),
|
||||
goal: goal.to_string(),
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
async fn reboot(&mut self, options: RebootOptions) -> ColmenaResult<()> {
|
||||
self.call_default_handler(Request::Reboot {
|
||||
wait_for_boot: options.wait_for_boot,
|
||||
})
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
impl GenericHost {
|
||||
pub fn new(system: &SystemTypeConfig, connection_uri: String) -> ColmenaResult<GenericHost> {
|
||||
let mut command = Command::new(system.activation_program.as_path())
|
||||
.stdin(Stdio::piped())
|
||||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::inherit())
|
||||
.spawn()?;
|
||||
|
||||
let stdin = BufWriter::new(command.stdin.take().unwrap());
|
||||
let stdout = BufReader::new(command.stdout.take().unwrap());
|
||||
|
||||
let activation_program = ActivationCommand {
|
||||
command,
|
||||
stdin,
|
||||
stdout,
|
||||
};
|
||||
|
||||
Ok(Self {
|
||||
activation_program,
|
||||
connection_uri,
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn connect(&mut self) -> ColmenaResult<()> {
|
||||
self.call_default_handler(Request::Connection {
|
||||
connection_uri: self.connection_uri.clone(),
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
async fn call<F, T>(
|
||||
&mut self,
|
||||
request: Request,
|
||||
handler: F,
|
||||
initial_value: T,
|
||||
) -> ColmenaResult<T>
|
||||
where
|
||||
F: Fn(Response, T) -> T,
|
||||
{
|
||||
let json = serde_json::to_string(&request)
|
||||
.map_err(|error| ColmenaError::InvalidActivationProgramJson { error })?;
|
||||
|
||||
log::trace!("giving to activation program stdin {}", json.as_str());
|
||||
|
||||
let stdin = &mut self.activation_program.stdin;
|
||||
let stdout = &mut self.activation_program.stdout;
|
||||
|
||||
stdin.write_all(json.as_bytes()).await?;
|
||||
stdin.write_all(b"\n").await?;
|
||||
stdin.flush().await?;
|
||||
|
||||
let mut line = String::new();
|
||||
let mut value = initial_value;
|
||||
|
||||
// We're reading JSONL, so we can read all the line and parse it
|
||||
while stdout.read_line(&mut line).await.is_ok_and(|x| x != 0) {
|
||||
if line == "\n" {
|
||||
log::trace!("finished receiving responses from activation program");
|
||||
break;
|
||||
}
|
||||
log::trace!("receiving from activation program:\n{}", line);
|
||||
let response: Response = serde_json::from_str(line.as_str())
|
||||
.map_err(|error| ColmenaError::InvalidActivationProgramJson { error })?;
|
||||
|
||||
match response {
|
||||
Response::Progress { phase } => {
|
||||
log::info!("{phase}");
|
||||
break;
|
||||
}
|
||||
Response::Unsupported => return Err(ColmenaError::Unsupported),
|
||||
_ => value = handler(response, value),
|
||||
}
|
||||
}
|
||||
|
||||
Ok(value)
|
||||
}
|
||||
|
||||
async fn call_default_handler(&mut self, request: Request) -> ColmenaResult<()> {
|
||||
self.call(request, |_, _| {}, ()).await
|
||||
}
|
||||
}
|
|
@ -1,6 +1,7 @@
|
|||
use std::collections::HashMap;
|
||||
|
||||
use async_trait::async_trait;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use super::{Goal, Key, Profile, StorePath};
|
||||
use crate::error::{ColmenaError, ColmenaResult};
|
||||
|
@ -14,13 +15,16 @@ pub use local::Local;
|
|||
|
||||
mod key_uploader;
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub mod generic;
|
||||
pub use generic::GenericHost;
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub enum CopyDirection {
|
||||
ToRemote,
|
||||
FromRemote,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct CopyOptions {
|
||||
include_outputs: bool,
|
||||
use_substitutes: bool,
|
||||
|
|
|
@ -10,8 +10,8 @@ use validator::{Validate, ValidationError as ValidationErrorType};
|
|||
use crate::error::{ColmenaError, ColmenaResult};
|
||||
|
||||
pub mod host;
|
||||
use host::Ssh;
|
||||
pub use host::{CopyDirection, CopyOptions, Host, RebootOptions};
|
||||
use host::{GenericHost, Ssh};
|
||||
|
||||
pub mod hive;
|
||||
pub use hive::{Hive, HivePath};
|
||||
|
@ -67,6 +67,9 @@ pub struct NodeConfig {
|
|||
#[serde(rename = "targetPort")]
|
||||
target_port: Option<u16>,
|
||||
|
||||
#[serde(rename = "connectionUri")]
|
||||
connection_uri: String,
|
||||
|
||||
#[serde(rename = "allowLocalDeployment")]
|
||||
allow_local_deployment: bool,
|
||||
|
||||
|
@ -101,6 +104,9 @@ pub struct MetaConfig {
|
|||
pub struct SystemTypeConfig {
|
||||
#[serde(rename = "supportsDeployment")]
|
||||
pub supports_deployment: bool,
|
||||
|
||||
#[serde(rename = "activationProgram")]
|
||||
pub activation_program: StorePath,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Validate, Deserialize)]
|
||||
|
@ -208,6 +214,10 @@ impl NodeConfig {
|
|||
host
|
||||
})
|
||||
}
|
||||
|
||||
pub fn to_generic_host(&self, system_config: &SystemTypeConfig) -> ColmenaResult<GenericHost> {
|
||||
GenericHost::new(system_config, self.connection_uri.clone())
|
||||
}
|
||||
}
|
||||
|
||||
impl NixFlags {
|
||||
|
|
Loading…
Reference in a new issue
UnexpectedActivationProgramReply
IMHO