Add support for using an already-evaluated hive

Note that the interface hasn't been finalized yet.
After this commit, the discovery order is:

1. flake.nix `colmenaHive` output (prepared hive)
2. flake.nix `colmena` output (raw hive)
3. hive.nix (raw hive)
This commit is contained in:
Zhaofeng Li 2022-09-18 17:27:46 -06:00
parent bd048e98d8
commit d4dcf1c6e9
5 changed files with 130 additions and 7 deletions

View file

@ -78,7 +78,7 @@ impl Assets {
}
HivePath::Flake(_) => {
format!(
"with builtins; let assets = getFlake \"{assets_flake_uri}\"; hive = assets.colmenaEval; in ",
"with builtins; let assets = getFlake \"{assets_flake_uri}\"; hive = assets.processFlake; in ",
assets_flake_uri = self.assets_flake_uri.as_ref().expect("The assets flake must have been initialized"),
)
}

View file

@ -6,10 +6,38 @@
};
outputs = { self, hive }: {
colmenaEval = import ./eval.nix {
rawFlake = hive;
colmenaOptions = import ./options.nix;
colmenaModules = import ./modules.nix;
};
processFlake = let
compatibleSchema = "v0";
# Evaluates a raw hive.
#
# This uses the `colmena` output.
evalHive = rawFlake: import ./eval.nix {
inherit rawFlake;
hermetic = true;
colmenaOptions = import ./options.nix;
colmenaModules = import ./modules.nix;
};
# Uses an already-evaluated hive.
#
# This uses the `colmenaHive` output.
checkPreparedHive = hiveOutput:
if !(hiveOutput ? __schema) then
throw ''
The colmenaHive output does not contain a valid evaluated hive.
Hint: Use `colmena.lib.makeHive`.
''
else if hiveOutput.__schema != compatibleSchema then
throw ''
The colmenaHive output (schema ${hiveOutput.__schema}) isn't compatible with this version of Colmena.
Hint: Use the same version of Colmena as in the Flake input.
''
else hiveOutput;
in
if hive.outputs ? colmenaHive then checkPreparedHive hive.outputs.colmenaHive
else evalHive hive;
};
}

View file

@ -0,0 +1,27 @@
{
"nodes": {
"nixpkgs": {
"locked": {
"lastModified": 1624626397,
"narHash": "sha256-+h0ulo5//RqStx6g6MDqD9MzgmBfeZ1VYxwEaSmw/Zs=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "e1f8852faac7638e88d5e8a5b9ee2a7568685e3f",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixos-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"root": {
"inputs": {
"nixpkgs": "nixpkgs"
}
}
},
"root": "root",
"version": 7
}

View file

@ -0,0 +1,29 @@
{
inputs = {
nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
colmena.url = "git+file://@repoPath@";
};
outputs = { nixpkgs, colmena, ... }: {
colmenaHive = colmena.lib.makeHive {
meta = {
nixpkgs = import nixpkgs {
system = "x86_64-linux";
};
};
host-a = { name, nodes, pkgs, ... }: {
boot.isContainer = true;
time.timeZone = nodes.host-b.config.time.timeZone;
};
host-b = {
deployment = {
targetHost = "somehost.tld";
targetPort = 1234;
targetUser = "luser";
};
boot.isContainer = true;
time.timeZone = "America/Los_Angeles";
};
};
};
}

View file

@ -3,13 +3,14 @@
use super::*;
use std::collections::HashSet;
use std::fs;
use std::hash::Hash;
use std::io::Write;
use std::iter::{FromIterator, Iterator};
use std::ops::Deref;
use std::path::PathBuf;
use tempfile::NamedTempFile;
use tempfile::{Builder as TempFileBuilder, NamedTempFile};
use tokio_test::block_on;
macro_rules! node {
@ -179,6 +180,44 @@ fn test_parse_flake() {
));
}
#[test]
fn test_parse_makehive_flake() {
// make a copy of the flake so we can edit the colmena input
let src_dir = PathBuf::from("./src/nix/hive/tests/makehive-flake");
let flake_dir = TempFileBuilder::new()
.prefix("makehive-flake-")
.tempdir()
.unwrap();
for entry in fs::read_dir(src_dir).unwrap() {
let entry = entry.unwrap();
if entry.file_type().unwrap().is_file() {
fs::copy(entry.path(), flake_dir.as_ref().join(entry.file_name())).unwrap();
}
}
let flake_nix = flake_dir.as_ref().join("flake.nix");
let patched_flake = fs::read_to_string(&flake_nix)
.unwrap()
.replace("@repoPath@", env!("CARGO_MANIFEST_DIR"));
fs::write(flake_nix, patched_flake).unwrap();
// run the test
let flake = block_on(Flake::from_dir(flake_dir)).unwrap();
let hive_path = HivePath::Flake(flake);
let mut hive = block_on(Hive::new(hive_path)).unwrap();
hive.set_show_trace(true);
let nodes = block_on(hive.deployment_info()).unwrap();
assert!(set_eq(
&["host-a", "host-b"],
&nodes.keys().map(NodeName::as_str).collect::<Vec<&str>>(),
));
}
#[test]
fn test_parse_node_references() {
TempHive::valid(