Compare commits
3 commits
main
...
nix-define
Author | SHA1 | Date | |
---|---|---|---|
eee64b7aec | |||
470a0c360a | |||
76865b3293 |
33 changed files with 734 additions and 1473 deletions
|
@ -10,7 +10,8 @@ insert_final_newline = true
|
|||
trim_trailing_whitespace = true
|
||||
charset = utf-8
|
||||
|
||||
[*.json]
|
||||
# Rust
|
||||
[*.rs]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
|
||||
|
|
73
.github/workflows/build.yml
vendored
73
.github/workflows/build.yml
vendored
|
@ -13,23 +13,19 @@ jobs:
|
|||
image: ubuntu-latest
|
||||
system: aarch64-linux
|
||||
- label: x86_64-darwin
|
||||
image: macos-latest
|
||||
system: x86_64-darwin
|
||||
- label: aarch64-darwin
|
||||
image: macos-latest
|
||||
system: aarch64-darwin
|
||||
image: macos-12
|
||||
|
||||
name: ${{ matrix.label }}
|
||||
runs-on: ${{ matrix.image }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4.2.2
|
||||
- uses: actions/checkout@v3.3.0
|
||||
|
||||
- name: Install Nix
|
||||
uses: DeterminateSystems/nix-installer-action@b92f66560d6f97d6576405a7bae901ab57e72b6a # v15
|
||||
uses: DeterminateSystems/nix-installer-action@9b252454a8d70586c4ee7f163bf4bb1e9de3d763 # v2
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3.2.0
|
||||
if: matrix.system == 'aarch64-linux'
|
||||
uses: docker/setup-qemu-action@v2.1.0
|
||||
if: matrix.system != ''
|
||||
|
||||
- name: Generate System Flags
|
||||
run: |
|
||||
|
@ -43,7 +39,7 @@ jobs:
|
|||
HOST_SYSTEM: '${{ matrix.system }}'
|
||||
|
||||
- name: Enable Binary Cache
|
||||
uses: cachix/cachix-action@v15
|
||||
uses: cachix/cachix-action@v12
|
||||
with:
|
||||
name: colmena
|
||||
authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}'
|
||||
|
@ -53,60 +49,3 @@ jobs:
|
|||
|
||||
- name: Build manual
|
||||
run: nix build .#manual -L
|
||||
|
||||
nix-matrix:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
matrix: ${{ steps.set-matrix.outputs.matrix }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4.2.2
|
||||
|
||||
- uses: DeterminateSystems/nix-installer-action@v15
|
||||
continue-on-error: true # Self-hosted runners already have Nix installed
|
||||
|
||||
- name: Enable Binary Cache
|
||||
uses: cachix/cachix-action@v15
|
||||
with:
|
||||
name: colmena
|
||||
authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}'
|
||||
|
||||
- id: set-matrix
|
||||
name: Generate Nix Matrix
|
||||
run: |
|
||||
set -Eeu
|
||||
matrix="$(nix eval --json '.#githubActions.matrix')"
|
||||
echo "matrix=$matrix" >> "$GITHUB_OUTPUT"
|
||||
|
||||
nix-matrix-job:
|
||||
name: ${{ matrix.name }}
|
||||
runs-on: ${{ matrix.os }}
|
||||
needs:
|
||||
- build
|
||||
- nix-matrix
|
||||
strategy:
|
||||
matrix: ${{ fromJSON(needs.nix-matrix.outputs.matrix) }}
|
||||
steps:
|
||||
- name: Maximize build space
|
||||
uses: easimon/maximize-build-space@master
|
||||
with:
|
||||
remove-dotnet: 'true'
|
||||
build-mount-path: /nix
|
||||
|
||||
- name: Set /nix permissions
|
||||
run: |
|
||||
sudo chown root:root /nix
|
||||
|
||||
- uses: actions/checkout@v4.2.2
|
||||
|
||||
- uses: DeterminateSystems/nix-installer-action@v15
|
||||
continue-on-error: true # Self-hosted runners already have Nix installed
|
||||
|
||||
- name: Enable Binary Cache
|
||||
uses: cachix/cachix-action@v15
|
||||
with:
|
||||
name: colmena
|
||||
authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}'
|
||||
|
||||
- name: Build ${{ matrix.attr }}
|
||||
run: |
|
||||
nix build --no-link --print-out-paths -L '.#${{ matrix.attr }}'
|
||||
|
|
6
.github/workflows/linters.yml
vendored
6
.github/workflows/linters.yml
vendored
|
@ -10,13 +10,13 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4.2.2
|
||||
- uses: actions/checkout@v3.3.0
|
||||
|
||||
- name: Install Nix
|
||||
uses: DeterminateSystems/nix-installer-action@b92f66560d6f97d6576405a7bae901ab57e72b6a # v15
|
||||
uses: DeterminateSystems/nix-installer-action@9b252454a8d70586c4ee7f163bf4bb1e9de3d763 # v2
|
||||
|
||||
- name: Enable binary cache
|
||||
uses: cachix/cachix-action@v15
|
||||
uses: cachix/cachix-action@v12
|
||||
with:
|
||||
name: colmena
|
||||
authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}'
|
||||
|
|
10
.github/workflows/manual-stable.yml
vendored
10
.github/workflows/manual-stable.yml
vendored
|
@ -16,13 +16,13 @@ jobs:
|
|||
if: github.repository == 'zhaofengli/colmena'
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4.2.2
|
||||
- uses: actions/checkout@v3.3.0
|
||||
|
||||
- name: Install Nix
|
||||
uses: DeterminateSystems/nix-installer-action@b92f66560d6f97d6576405a7bae901ab57e72b6a # v15
|
||||
uses: DeterminateSystems/nix-installer-action@9b252454a8d70586c4ee7f163bf4bb1e9de3d763 # v2
|
||||
|
||||
- name: Enable Binary Cache
|
||||
uses: cachix/cachix-action@v15
|
||||
uses: cachix/cachix-action@v12
|
||||
with:
|
||||
name: colmena
|
||||
authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}'
|
||||
|
@ -38,7 +38,7 @@ jobs:
|
|||
run: nix build .#manual -L
|
||||
|
||||
- name: Deploy manual
|
||||
uses: JamesIves/github-pages-deploy-action@v4.6.9
|
||||
uses: JamesIves/github-pages-deploy-action@4.1.6
|
||||
with:
|
||||
branch: gh-pages
|
||||
folder: result
|
||||
|
@ -52,7 +52,7 @@ jobs:
|
|||
if: ${{ env.api_version == env.latest_stable_api }}
|
||||
|
||||
- name: Deploy redirect farm
|
||||
uses: JamesIves/github-pages-deploy-action@v4.6.9
|
||||
uses: JamesIves/github-pages-deploy-action@4.1.6
|
||||
with:
|
||||
branch: gh-pages
|
||||
folder: result-redirectFarm
|
||||
|
|
10
.github/workflows/manual.yml
vendored
10
.github/workflows/manual.yml
vendored
|
@ -16,13 +16,13 @@ jobs:
|
|||
if: github.repository == 'zhaofengli/colmena'
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4.2.2
|
||||
- uses: actions/checkout@v3.3.0
|
||||
|
||||
- name: Install Nix
|
||||
uses: DeterminateSystems/nix-installer-action@b92f66560d6f97d6576405a7bae901ab57e72b6a # v15
|
||||
uses: DeterminateSystems/nix-installer-action@9b252454a8d70586c4ee7f163bf4bb1e9de3d763 # v2
|
||||
|
||||
- name: Enable Binary Cache
|
||||
uses: cachix/cachix-action@v15
|
||||
uses: cachix/cachix-action@v12
|
||||
with:
|
||||
name: colmena
|
||||
authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}'
|
||||
|
@ -32,7 +32,7 @@ jobs:
|
|||
run: nix build .#manual -L
|
||||
|
||||
- name: Deploy manual
|
||||
uses: JamesIves/github-pages-deploy-action@v4.6.9
|
||||
uses: JamesIves/github-pages-deploy-action@v4.3.4
|
||||
with:
|
||||
branch: gh-pages
|
||||
folder: result
|
||||
|
@ -47,7 +47,7 @@ jobs:
|
|||
run: nix build .#manual.redirectFarm -L
|
||||
|
||||
- name: Deploy redirect farm
|
||||
uses: JamesIves/github-pages-deploy-action@v4.6.9
|
||||
uses: JamesIves/github-pages-deploy-action@4.1.6
|
||||
with:
|
||||
branch: gh-pages
|
||||
folder: result-redirectFarm
|
||||
|
|
6
.github/workflows/tests.yml
vendored
6
.github/workflows/tests.yml
vendored
|
@ -13,15 +13,15 @@ jobs:
|
|||
name: ${{ matrix.os.label }}
|
||||
runs-on: ${{ matrix.os.image }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4.2.2
|
||||
- uses: actions/checkout@v3.3.0
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Install Nix
|
||||
uses: DeterminateSystems/nix-installer-action@b92f66560d6f97d6576405a7bae901ab57e72b6a # v15
|
||||
uses: DeterminateSystems/nix-installer-action@9b252454a8d70586c4ee7f163bf4bb1e9de3d763 # v2
|
||||
|
||||
- name: Enable Binary Cache
|
||||
uses: cachix/cachix-action@v15
|
||||
uses: cachix/cachix-action@v12
|
||||
with:
|
||||
name: colmena
|
||||
authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}'
|
||||
|
|
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -1,4 +1,3 @@
|
|||
result*
|
||||
/target
|
||||
/.direnv
|
||||
/.vscode
|
||||
|
|
18
.srcignore
18
.srcignore
|
@ -1,18 +0,0 @@
|
|||
# Exclusions from source distribution
|
||||
#
|
||||
# Files listed here will not be part of colmena.src
|
||||
|
||||
/.github
|
||||
/CNAME
|
||||
/renovate.json
|
||||
|
||||
/manual
|
||||
/integration-tests
|
||||
|
||||
/nix
|
||||
/default.nix
|
||||
/flake-compat.nix
|
||||
/package.nix
|
||||
/shell.nix
|
||||
|
||||
# vim: set ft=gitignore:
|
1227
Cargo.lock
generated
1227
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
11
Cargo.toml
11
Cargo.toml
|
@ -9,17 +9,18 @@ edition = "2021"
|
|||
[dependencies]
|
||||
async-stream = "0.3.5"
|
||||
async-trait = "0.1.68"
|
||||
atty = "0.2"
|
||||
clap = { version = "4.3", features = ["derive"] }
|
||||
clap_complete = "4.3"
|
||||
clicolors-control = "1"
|
||||
console = "0.15.5"
|
||||
const_format = "0.2.30"
|
||||
env_logger = "0.11.0"
|
||||
env_logger = "0.10.0"
|
||||
futures = "0.3.28"
|
||||
glob = "0.3.1"
|
||||
hostname = "0.4.0"
|
||||
hostname = "0.3.1"
|
||||
indicatif = "0.17.3"
|
||||
itertools = "0.13.0"
|
||||
itertools = "0.11.0"
|
||||
libc = "0.2.144"
|
||||
log = "0.4.17"
|
||||
quit = "2.0.0"
|
||||
|
@ -27,12 +28,12 @@ regex = "1"
|
|||
serde = { version = "1.0.163", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
shell-escape = "0.1.5"
|
||||
snafu = { version = "0.8.0", features = ["backtrace", "backtraces-impl-backtrace-crate"] }
|
||||
snafu = { version = "0.7.4", features = ["backtrace", "backtraces-impl-backtrace-crate"] }
|
||||
sys-info = "0.9.1"
|
||||
tempfile = "3.5.0"
|
||||
tokio-stream = "0.1.14"
|
||||
uuid = { version = "1.3.2", features = ["serde", "v4"] }
|
||||
validator = { version = "0.19.0", features = ["derive"] }
|
||||
validator = { version = "0.16.0", features = ["derive"] }
|
||||
|
||||
[dev-dependencies]
|
||||
ntest = "0.9.0"
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
[![Matrix Channel](https://img.shields.io/badge/Matrix-%23colmena%3Anixos.org-blueviolet)](https://matrix.to/#/#colmena:nixos.org)
|
||||
[![Stable Manual](https://img.shields.io/badge/Manual-Stable-informational)](https://colmena.cli.rs/stable)
|
||||
[![Unstable Manual](https://img.shields.io/badge/Manual-Unstable-orange)](https://colmena.cli.rs/unstable)
|
||||
[![Build](https://github.com/zhaofengli/colmena/actions/workflows/build.yml/badge.svg)](https://github.com/zhaofengli/colmena/actions/workflows/build.yml)
|
||||
[![Build](https://github.com/zhaofengli/colmena/workflows/Build/badge.svg)](https://github.com/zhaofengli/colmena/actions/workflows/build.yml)
|
||||
|
||||
Colmena is a simple, stateless [NixOS](https://nixos.org) deployment tool modeled after [NixOps](https://github.com/NixOS/nixops) and [morph](https://github.com/DBCDK/morph), written in Rust.
|
||||
It's a thin wrapper over Nix commands like `nix-instantiate` and `nix-copy-closure`, and supports parallel deployment.
|
||||
|
|
35
flake.lock
35
flake.lock
|
@ -31,33 +31,13 @@
|
|||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nix-github-actions": {
|
||||
"inputs": {
|
||||
"nixpkgs": [
|
||||
"nixpkgs"
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1729742964,
|
||||
"narHash": "sha256-B4mzTcQ0FZHdpeWcpDYPERtyjJd/NIuaQ9+BV1h+MpA=",
|
||||
"owner": "nix-community",
|
||||
"repo": "nix-github-actions",
|
||||
"rev": "e04df33f62cdcf93d73e9a04142464753a16db67",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "nix-community",
|
||||
"repo": "nix-github-actions",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1734649271,
|
||||
"narHash": "sha256-4EVBRhOjMDuGtMaofAIqzJbg4Ql7Ai0PSeuVZTHjyKQ=",
|
||||
"lastModified": 1696019113,
|
||||
"narHash": "sha256-X3+DKYWJm93DRSdC5M6K5hLqzSya9BjibtBsuARoPco=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "d70bd19e0a38ad4790d3913bf08fcbfc9eeca507",
|
||||
"rev": "f5892ddac112a1e9b3612c39af1b72987ee5783a",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
|
@ -71,23 +51,22 @@
|
|||
"inputs": {
|
||||
"flake-compat": "flake-compat",
|
||||
"flake-utils": "flake-utils",
|
||||
"nix-github-actions": "nix-github-actions",
|
||||
"nixpkgs": "nixpkgs",
|
||||
"stable": "stable"
|
||||
}
|
||||
},
|
||||
"stable": {
|
||||
"locked": {
|
||||
"lastModified": 1734875076,
|
||||
"narHash": "sha256-Pzyb+YNG5u3zP79zoi8HXYMs15Q5dfjDgwCdUI5B0nY=",
|
||||
"lastModified": 1696039360,
|
||||
"narHash": "sha256-g7nIUV4uq1TOVeVIDEZLb005suTWCUjSY0zYOlSBsyE=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "1807c2b91223227ad5599d7067a61665c52d1295",
|
||||
"rev": "32dcb45f66c0487e92db8303a798ebc548cadedc",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"ref": "nixos-24.11",
|
||||
"ref": "nixos-23.05",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
|
|
45
flake.nix
45
flake.nix
|
@ -3,12 +3,7 @@
|
|||
|
||||
inputs = {
|
||||
nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
|
||||
stable.url = "github:NixOS/nixpkgs/nixos-24.11";
|
||||
|
||||
nix-github-actions = {
|
||||
url = "github:nix-community/nix-github-actions";
|
||||
inputs.nixpkgs.follows = "nixpkgs";
|
||||
};
|
||||
stable.url = "github:NixOS/nixpkgs/nixos-23.05";
|
||||
|
||||
flake-utils.url = "github:numtide/flake-utils";
|
||||
|
||||
|
@ -18,23 +13,12 @@
|
|||
};
|
||||
};
|
||||
|
||||
outputs = {
|
||||
self,
|
||||
nixpkgs,
|
||||
stable,
|
||||
flake-utils,
|
||||
nix-github-actions,
|
||||
...
|
||||
} @ inputs: let
|
||||
outputs = { self, nixpkgs, stable, flake-utils, ... } @ inputs: let
|
||||
supportedSystems = [ "x86_64-linux" "i686-linux" "aarch64-linux" "x86_64-darwin" "aarch64-darwin" ];
|
||||
colmenaOptions = import ./src/nix/hive/options.nix;
|
||||
colmenaModules = import ./src/nix/hive/modules.nix;
|
||||
in flake-utils.lib.eachSystem supportedSystems (system: let
|
||||
pkgs = import nixpkgs {
|
||||
inherit system;
|
||||
overlays = [
|
||||
];
|
||||
};
|
||||
pkgs = nixpkgs.legacyPackages.${system};
|
||||
in rec {
|
||||
# We still maintain the expression in a Nixpkgs-acceptable form
|
||||
defaultPackage = self.packages.${system}.colmena;
|
||||
|
@ -99,17 +83,11 @@
|
|||
in if pkgs.stdenv.isLinux then import ./integration-tests {
|
||||
pkgs = import nixpkgs {
|
||||
inherit system;
|
||||
overlays = [
|
||||
self.overlays.default
|
||||
inputsOverlay
|
||||
];
|
||||
overlays = [ self.overlays.default inputsOverlay ];
|
||||
};
|
||||
pkgsStable = import stable {
|
||||
inherit system;
|
||||
overlays = [
|
||||
self.overlays.default
|
||||
inputsOverlay
|
||||
];
|
||||
overlays = [ self.overlays.default inputsOverlay ];
|
||||
};
|
||||
} else {};
|
||||
}) // {
|
||||
|
@ -126,11 +104,14 @@
|
|||
inherit rawHive colmenaOptions colmenaModules;
|
||||
hermetic = true;
|
||||
};
|
||||
};
|
||||
|
||||
githubActions = nix-github-actions.lib.mkGithubMatrix {
|
||||
checks = {
|
||||
inherit (self.checks) x86_64-linux;
|
||||
};
|
||||
};
|
||||
nixConfig = {
|
||||
extra-substituters = [
|
||||
"https://colmena.cachix.org"
|
||||
];
|
||||
extra-trusted-public-keys = [
|
||||
"colmena.cachix.org-1:7BzpDnjjH8ki2CT3f6GdOk7QAzPOl+1t3LvTLXqYcSg="
|
||||
];
|
||||
};
|
||||
}
|
||||
|
|
3
garnix.yaml
Normal file
3
garnix.yaml
Normal file
|
@ -0,0 +1,3 @@
|
|||
builds:
|
||||
include:
|
||||
- 'checks.x86_64-linux.*'
|
|
@ -8,18 +8,8 @@
|
|||
apply-local = import ./apply-local { inherit pkgs; };
|
||||
build-on-target = import ./build-on-target { inherit pkgs; };
|
||||
exec = import ./exec { inherit pkgs; };
|
||||
|
||||
# FIXME: The old evaluation method doesn't work purely with Nix 2.21+
|
||||
flakes = import ./flakes {
|
||||
inherit pkgs;
|
||||
extraApplyFlags = "--experimental-flake-eval";
|
||||
};
|
||||
flakes-impure = import ./flakes {
|
||||
inherit pkgs;
|
||||
pure = false;
|
||||
};
|
||||
#flakes-streaming = import ./flakes { inherit pkgs; evaluator = "streaming"; };
|
||||
|
||||
flakes = import ./flakes { inherit pkgs; };
|
||||
flakes-streaming = import ./flakes { inherit pkgs; evaluator = "streaming"; };
|
||||
parallel = import ./parallel { inherit pkgs; };
|
||||
|
||||
allow-apply-all = import ./allow-apply-all { inherit pkgs; };
|
||||
|
|
|
@ -1,29 +1,13 @@
|
|||
{ pkgs
|
||||
, evaluator ? "chunked"
|
||||
, extraApplyFlags ? ""
|
||||
, pure ? true
|
||||
}:
|
||||
|
||||
let
|
||||
inherit (pkgs) lib;
|
||||
|
||||
tools = pkgs.callPackage ../tools.nix {
|
||||
targets = [ "alpha" ];
|
||||
};
|
||||
|
||||
applyFlags = "--evaluator ${evaluator} ${extraApplyFlags}"
|
||||
+ lib.optionalString (!pure) "--impure";
|
||||
|
||||
# From integration-tests/nixpkgs.nix
|
||||
colmenaFlakeInputs = pkgs._inputs;
|
||||
in tools.runTest {
|
||||
name = "colmena-flakes-${evaluator}"
|
||||
+ lib.optionalString (!pure) "-impure";
|
||||
|
||||
nodes.deployer = {
|
||||
virtualisation.additionalPaths =
|
||||
lib.mapAttrsToList (k: v: v.outPath) colmenaFlakeInputs;
|
||||
};
|
||||
name = "colmena-flakes-${evaluator}";
|
||||
|
||||
colmena.test = {
|
||||
bundle = ./.;
|
||||
|
@ -32,13 +16,12 @@ in tools.runTest {
|
|||
import re
|
||||
|
||||
deployer.succeed("sed -i 's @nixpkgs@ path:${pkgs._inputs.nixpkgs.outPath}?narHash=${pkgs._inputs.nixpkgs.narHash} g' /tmp/bundle/flake.nix")
|
||||
deployer.succeed("sed -i 's @colmena@ path:${tools.colmena.src} g' /tmp/bundle/flake.nix")
|
||||
|
||||
with subtest("Lock flake dependencies"):
|
||||
deployer.succeed("cd /tmp/bundle && nix --extra-experimental-features \"nix-command flakes\" flake lock")
|
||||
|
||||
with subtest("Deploy with a plain flake without git"):
|
||||
deployer.succeed("cd /tmp/bundle && ${tools.colmenaExec} apply --on @target ${applyFlags}")
|
||||
deployer.succeed("cd /tmp/bundle && ${tools.colmenaExec} apply --on @target --evaluator ${evaluator}")
|
||||
alpha.succeed("grep FIRST /etc/deployment")
|
||||
|
||||
with subtest("Deploy with a git flake"):
|
||||
|
@ -46,22 +29,21 @@ in tools.runTest {
|
|||
|
||||
# don't put probe.nix in source control - should fail
|
||||
deployer.succeed("cd /tmp/bundle && git init && git add flake.nix flake.lock hive.nix tools.nix")
|
||||
logs = deployer.fail("cd /tmp/bundle && run-copy-stderr ${tools.colmenaExec} apply --on @target ${applyFlags}")
|
||||
assert re.search(r"probe.nix.*(No such file or directory|does not exist)", logs), "Expected error message not found in log"
|
||||
logs = deployer.fail("cd /tmp/bundle && run-copy-stderr ${tools.colmenaExec} apply --on @target --evaluator ${evaluator}")
|
||||
assert re.search(r"probe.nix.*No such file or directory", logs)
|
||||
|
||||
# now it should succeed
|
||||
deployer.succeed("cd /tmp/bundle && git add probe.nix")
|
||||
deployer.succeed("cd /tmp/bundle && ${tools.colmenaExec} apply --on @target ${applyFlags}")
|
||||
deployer.succeed("cd /tmp/bundle && ${tools.colmenaExec} apply --on @target --evaluator ${evaluator}")
|
||||
alpha.succeed("grep SECOND /etc/deployment")
|
||||
|
||||
'' + lib.optionalString pure ''
|
||||
with subtest("Check that impure expressions are forbidden"):
|
||||
deployer.succeed("sed -i 's|SECOND|''${builtins.readFile /etc/hostname}|g' /tmp/bundle/probe.nix")
|
||||
logs = deployer.fail("cd /tmp/bundle && run-copy-stderr ${tools.colmenaExec} apply --on @target ${applyFlags}")
|
||||
assert re.search(r"access to absolute path.*forbidden in pure (eval|evaluation) mode", logs), "Expected error message not found in log"
|
||||
logs = deployer.fail("cd /tmp/bundle && run-copy-stderr ${tools.colmenaExec} apply --on @target --evaluator ${evaluator}")
|
||||
assert re.search(r"access to absolute path.*forbidden in pure eval mode", logs)
|
||||
|
||||
with subtest("Check that impure expressions can be allowed with --impure"):
|
||||
deployer.succeed("cd /tmp/bundle && ${tools.colmenaExec} apply --on @target ${applyFlags} --impure")
|
||||
deployer.succeed("cd /tmp/bundle && ${tools.colmenaExec} apply --on @target --evaluator ${evaluator} --impure")
|
||||
alpha.succeed("grep deployer /etc/deployment")
|
||||
'';
|
||||
};
|
||||
|
|
|
@ -3,15 +3,13 @@
|
|||
|
||||
inputs = {
|
||||
nixpkgs.url = "@nixpkgs@";
|
||||
colmena.url = "@colmena@";
|
||||
};
|
||||
|
||||
outputs = { self, nixpkgs, colmena }: let
|
||||
outputs = { self, nixpkgs }: let
|
||||
pkgs = import nixpkgs {
|
||||
system = "x86_64-linux";
|
||||
};
|
||||
in {
|
||||
colmena = import ./hive.nix { inherit pkgs; };
|
||||
colmenaHive = colmena.lib.makeHive self.outputs.colmena;
|
||||
};
|
||||
}
|
||||
|
|
|
@ -140,7 +140,7 @@ let
|
|||
nix.settings.substituters = lib.mkForce [];
|
||||
|
||||
virtualisation = {
|
||||
memorySize = 6144;
|
||||
memorySize = 4096;
|
||||
writableStore = true;
|
||||
additionalPaths = [
|
||||
"${pkgs.path}"
|
||||
|
@ -165,9 +165,6 @@ let
|
|||
exec "$@" 2> >(tee /dev/stderr)
|
||||
'')
|
||||
];
|
||||
|
||||
# Re-enable switch-to-configuration
|
||||
system.switch.enable = true;
|
||||
};
|
||||
|
||||
# Setup for target nodes
|
||||
|
@ -183,9 +180,6 @@ let
|
|||
sshKeys.snakeOilPublicKey
|
||||
];
|
||||
virtualisation.writableStore = true;
|
||||
|
||||
# Re-enable switch-to-configuration
|
||||
system.switch.enable = true;
|
||||
};
|
||||
|
||||
nodes = let
|
||||
|
|
|
@ -90,34 +90,6 @@ To build and deploy to all nodes:
|
|||
colmena apply
|
||||
```
|
||||
|
||||
## Direct Flake Evaluation (Experimental)
|
||||
|
||||
By default, Colmena uses `nix-instantiate` to evaluate your flake which does not work purely on Nix 2.21+, necessitating the use of `--impure`.
|
||||
There is experimental support for evaluating flakes directly with `nix eval`, enabled via `--experimental-flake-eval`.
|
||||
|
||||
To use this new evaluation mode, your flake needs to depend on Colmena itself as an input and expose a new output called `colmenaHive`:
|
||||
|
||||
```diff
|
||||
{
|
||||
inputs = {
|
||||
+ # ADDED: Colmena input
|
||||
+ colmena.url = "github:zhaofengli/colmena";
|
||||
|
||||
# ... Rest of configuration ...
|
||||
};
|
||||
outputs = { self, colmena, ... }: {
|
||||
+ # ADDED: New colmenaHive output
|
||||
+ colmenaHive = colmena.lib.makeHive self.outputs.colmena;
|
||||
|
||||
# Your existing colmena output
|
||||
colmena = {
|
||||
# ... Rest of configuration ...
|
||||
};
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
## Next Steps
|
||||
|
||||
- Head to the [Features](../features/index.md) section to see what else Colmena can do.
|
||||
|
|
13
package.nix
13
package.nix
|
@ -1,16 +1,13 @@
|
|||
{ lib
|
||||
, stdenv
|
||||
, rustPlatform
|
||||
, nix-gitignore
|
||||
, installShellFiles
|
||||
, nix-eval-jobs
|
||||
}:
|
||||
{ lib, stdenv, rustPlatform, installShellFiles, nix-eval-jobs }:
|
||||
|
||||
rustPlatform.buildRustPackage rec {
|
||||
pname = "colmena";
|
||||
version = "0.5.0-pre";
|
||||
|
||||
src = nix-gitignore.gitignoreSource [ ./.srcignore ] ./.;
|
||||
src = lib.cleanSourceWith {
|
||||
filter = name: type: !(type == "directory" && builtins.elem (baseNameOf name) [ "target" "manual" "integration-tests" ]);
|
||||
src = lib.cleanSource ./.;
|
||||
};
|
||||
|
||||
cargoLock = {
|
||||
lockFile = ./Cargo.lock;
|
||||
|
|
|
@ -1,14 +0,0 @@
|
|||
{
|
||||
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
|
||||
"extends": [
|
||||
"config:recommended",
|
||||
"group:allNonMajor"
|
||||
],
|
||||
"lockFileMaintenance": {
|
||||
"enabled": true,
|
||||
"extends": ["schedule:weekly"]
|
||||
},
|
||||
"nix": {
|
||||
"enabled": true
|
||||
}
|
||||
}
|
22
src/cli.rs
22
src/cli.rs
|
@ -10,7 +10,7 @@ use env_logger::fmt::WriteStyle;
|
|||
use crate::{
|
||||
command::{self, apply::DeployOpts},
|
||||
error::ColmenaResult,
|
||||
nix::{hive::EvaluationMethod, Hive, HivePath},
|
||||
nix::{Hive, HivePath},
|
||||
};
|
||||
|
||||
/// Base URL of the manual, without the trailing slash.
|
||||
|
@ -137,21 +137,6 @@ This only works when building locally.
|
|||
value_names = ["NAME", "VALUE"],
|
||||
)]
|
||||
nix_option: Vec<String>,
|
||||
#[arg(
|
||||
long,
|
||||
default_value_t,
|
||||
help = "Use direct flake evaluation (experimental)",
|
||||
long_help = r#"If enabled, flakes will be evaluated using `nix eval`. This requires the flake to depend on Colmena as an input and expose a compatible `colmenaHive` output:
|
||||
|
||||
outputs = { self, colmena, ... }: {
|
||||
colmenaHive = colmena.lib.makeHive self.outputs.colmena;
|
||||
colmena = ...;
|
||||
};
|
||||
|
||||
This is an experimental feature."#,
|
||||
global = true
|
||||
)]
|
||||
experimental_flake_eval: bool,
|
||||
#[arg(
|
||||
long,
|
||||
value_name = "WHEN",
|
||||
|
@ -277,11 +262,6 @@ async fn get_hive(opts: &Opts) -> ColmenaResult<Hive> {
|
|||
hive.set_impure(true);
|
||||
}
|
||||
|
||||
if opts.experimental_flake_eval {
|
||||
log::warn!("Using direct flake evaluation (experimental)");
|
||||
hive.set_evaluation_method(EvaluationMethod::DirectFlakeEval);
|
||||
}
|
||||
|
||||
for chunks in opts.nix_option.chunks_exact(2) {
|
||||
let [name, value] = chunks else {
|
||||
unreachable!()
|
||||
|
|
14
src/job.rs
14
src/job.rs
|
@ -874,17 +874,11 @@ fn describe_node_list(nodes: &[NodeName]) -> Option<String> {
|
|||
}
|
||||
|
||||
let (idx, next) = next.unwrap();
|
||||
let remaining_text = rough_limit - s.len();
|
||||
let remaining_nodes = total - idx;
|
||||
let remaining = rough_limit - s.len();
|
||||
|
||||
if next.len() + other_text.len() >= remaining_text {
|
||||
if remaining_nodes == 1 {
|
||||
write!(s, ", and {}", next.as_str()).unwrap();
|
||||
break;
|
||||
} else {
|
||||
write!(s, ", and {} other nodes", remaining_nodes).unwrap();
|
||||
break;
|
||||
}
|
||||
if next.len() + other_text.len() >= remaining {
|
||||
write!(s, ", and {} other nodes", total - idx).unwrap();
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -370,6 +370,9 @@ impl Deployment {
|
|||
for (name, profile_drv) in profile_drvs.iter() {
|
||||
let mut target = chunk.remove(name).unwrap();
|
||||
|
||||
// TODO: look if target's system type support build on target.
|
||||
// hard fail if the build on target.
|
||||
|
||||
if let Some(force_build_on_target) = self.options.force_build_on_target {
|
||||
target.config.set_build_on_target(force_build_on_target);
|
||||
}
|
||||
|
@ -439,6 +442,9 @@ impl Deployment {
|
|||
return Err(ColmenaError::Unsupported);
|
||||
}
|
||||
|
||||
// TODO: add to check if there's support to upload keys.
|
||||
// If not, warn about it but do nothing.
|
||||
|
||||
let host = target.host.as_mut().unwrap();
|
||||
host.set_job(Some(job));
|
||||
host.upload_keys(&target.config.keys, true).await?;
|
||||
|
@ -621,6 +627,8 @@ impl Deployment {
|
|||
let host = target.host.as_mut().unwrap();
|
||||
host.set_job(Some(job.clone()));
|
||||
|
||||
// TODO: certain system types does not have a concept of profile.
|
||||
// generalize the profile algorithm
|
||||
if !target.config.replace_unknown_profiles {
|
||||
job.message("Checking remote profile...".to_string())?;
|
||||
|
||||
|
|
|
@ -51,10 +51,7 @@ impl Assets {
|
|||
// We explicitly specify `path:` instead of letting Nix resolve
|
||||
// automatically, which would involve checking parent directories
|
||||
// for a git repository.
|
||||
let uri = format!(
|
||||
"path:{}",
|
||||
temp_dir.path().canonicalize().unwrap().to_str().unwrap()
|
||||
);
|
||||
let uri = format!("path:{}", temp_dir.path().to_str().unwrap());
|
||||
let _ = lock_flake_quiet(&uri).await;
|
||||
let assets_flake = Flake::from_uri(uri).await?;
|
||||
assets_flake_uri = Some(assets_flake.locked_uri().to_owned());
|
||||
|
|
|
@ -202,7 +202,7 @@ let
|
|||
|
||||
in rec {
|
||||
# Exported attributes
|
||||
__schema = "v0.20241006";
|
||||
__schema = "v0";
|
||||
|
||||
nodes = listToAttrs (map (name: { inherit name; value = evalNode name (configsFor name); }) nodeNames);
|
||||
toplevel = lib.mapAttrs (_: v: v.config.system.build.toplevel) nodes;
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
|
||||
outputs = { self, hive }: {
|
||||
processFlake = let
|
||||
compatibleSchema = "v0.20241006";
|
||||
compatibleSchema = "v0";
|
||||
|
||||
# Evaluates a raw hive.
|
||||
#
|
||||
|
|
|
@ -8,7 +8,6 @@ use std::convert::AsRef;
|
|||
use std::path::{Path, PathBuf};
|
||||
use std::str::FromStr;
|
||||
|
||||
use const_format::formatcp;
|
||||
use tokio::process::Command;
|
||||
use tokio::sync::OnceCell;
|
||||
use validator::Validate;
|
||||
|
@ -23,21 +22,6 @@ use crate::job::JobHandle;
|
|||
use crate::util::{CommandExecution, CommandExt};
|
||||
use assets::Assets;
|
||||
|
||||
/// The version of the Hive schema we are compatible with.
|
||||
///
|
||||
/// Currently we are tied to one specific version.
|
||||
const HIVE_SCHEMA: &str = "v0.20241006";
|
||||
|
||||
/// The snippet to be used for `nix eval --apply`.
|
||||
const FLAKE_APPLY_SNIPPET: &str = formatcp!(
|
||||
r#"with builtins; hive: assert (hive.__schema == "{}" || throw ''
|
||||
The colmenaHive output (schema ${{hive.__schema}}) isn't compatible with this version of Colmena.
|
||||
|
||||
Hint: Use the same version of Colmena as in the Flake input.
|
||||
''); "#,
|
||||
HIVE_SCHEMA
|
||||
);
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum HivePath {
|
||||
/// A Nix Flake.
|
||||
|
@ -79,33 +63,11 @@ impl FromStr for HivePath {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq)]
|
||||
pub enum EvaluationMethod {
|
||||
/// Use nix-instantiate and specify the entire Nix expression.
|
||||
///
|
||||
/// This is the default method.
|
||||
///
|
||||
/// For flakes, we use `builtins.getFlakes`. Pure evaluation no longer works
|
||||
/// with this method in Nix 2.21+.
|
||||
NixInstantiate,
|
||||
|
||||
/// Use `nix eval --apply` on top of a flake.
|
||||
///
|
||||
/// This can be activated with --experimental-flake-eval.
|
||||
///
|
||||
/// In this method, we can no longer pull in our bundled assets and
|
||||
/// the flake must expose a compatible `colmenaHive` output.
|
||||
DirectFlakeEval,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Hive {
|
||||
/// Path to the hive.
|
||||
path: HivePath,
|
||||
|
||||
/// Method to evaluate the hive with.
|
||||
evaluation_method: EvaluationMethod,
|
||||
|
||||
/// Path to the context directory.
|
||||
///
|
||||
/// Normally this is directory containing the "hive.nix"
|
||||
|
@ -174,7 +136,6 @@ impl Hive {
|
|||
|
||||
Ok(Self {
|
||||
path,
|
||||
evaluation_method: EvaluationMethod::NixInstantiate,
|
||||
context_dir,
|
||||
assets,
|
||||
show_trace: false,
|
||||
|
@ -200,14 +161,6 @@ impl Hive {
|
|||
.await
|
||||
}
|
||||
|
||||
pub fn set_evaluation_method(&mut self, method: EvaluationMethod) {
|
||||
if !self.is_flake() && method == EvaluationMethod::DirectFlakeEval {
|
||||
return;
|
||||
}
|
||||
|
||||
self.evaluation_method = method;
|
||||
}
|
||||
|
||||
pub async fn get_registry_config(&self) -> ColmenaResult<&RegistryConfig> {
|
||||
self.registry_config
|
||||
.get_or_try_init(|| async {
|
||||
|
@ -503,10 +456,7 @@ impl Hive {
|
|||
|
||||
/// Returns the base expression from which the evaluated Hive can be used.
|
||||
fn get_base_expression(&self) -> String {
|
||||
match self.evaluation_method {
|
||||
EvaluationMethod::NixInstantiate => self.assets.get_base_expression(),
|
||||
EvaluationMethod::DirectFlakeEval => FLAKE_APPLY_SNIPPET.to_string(),
|
||||
}
|
||||
self.assets.get_base_expression()
|
||||
}
|
||||
|
||||
/// Returns whether this Hive is a flake.
|
||||
|
@ -529,11 +479,6 @@ impl<'hive> NixInstantiate<'hive> {
|
|||
}
|
||||
|
||||
fn instantiate(&self) -> Command {
|
||||
// TODO: Better error handling
|
||||
if self.hive.evaluation_method == EvaluationMethod::DirectFlakeEval {
|
||||
panic!("Instantiation is not supported with DirectFlakeEval");
|
||||
}
|
||||
|
||||
let mut command = Command::new("nix-instantiate");
|
||||
|
||||
if self.hive.is_flake() {
|
||||
|
@ -552,48 +497,17 @@ impl<'hive> NixInstantiate<'hive> {
|
|||
}
|
||||
|
||||
fn eval(self) -> Command {
|
||||
let mut command = self.instantiate();
|
||||
let flags = self.hive.nix_flags();
|
||||
|
||||
match self.hive.evaluation_method {
|
||||
EvaluationMethod::NixInstantiate => {
|
||||
let mut command = self.instantiate();
|
||||
|
||||
command
|
||||
.arg("--eval")
|
||||
.arg("--json")
|
||||
.arg("--strict")
|
||||
// Ensures the derivations are instantiated
|
||||
// Required for system profile evaluation and IFD
|
||||
.arg("--read-write-mode")
|
||||
.args(flags.to_args());
|
||||
|
||||
command
|
||||
}
|
||||
EvaluationMethod::DirectFlakeEval => {
|
||||
let mut command = Command::new("nix");
|
||||
let flake = if let HivePath::Flake(flake) = self.hive.path() {
|
||||
flake
|
||||
} else {
|
||||
panic!("The DirectFlakeEval evaluation method only support flakes");
|
||||
};
|
||||
|
||||
let hive_installable = format!("{}#colmenaHive", flake.uri());
|
||||
|
||||
let mut full_expression = self.hive.get_base_expression();
|
||||
full_expression += &self.expression;
|
||||
|
||||
command
|
||||
.arg("eval") // nix eval
|
||||
.args(["--extra-experimental-features", "flakes nix-command"])
|
||||
.arg(hive_installable)
|
||||
.arg("--json")
|
||||
.arg("--apply")
|
||||
.arg(&full_expression)
|
||||
.args(flags.to_args());
|
||||
|
||||
command
|
||||
}
|
||||
}
|
||||
command
|
||||
.arg("--eval")
|
||||
.arg("--json")
|
||||
.arg("--strict")
|
||||
// Ensures the derivations are instantiated
|
||||
// Required for system profile evaluation and IFD
|
||||
.arg("--read-write-mode")
|
||||
.args(flags.to_args());
|
||||
command
|
||||
}
|
||||
|
||||
async fn instantiate_with_builders(self) -> ColmenaResult<Command> {
|
||||
|
|
|
@ -1,17 +1,18 @@
|
|||
with builtins; rec {
|
||||
keyType = { lib, name, config, ... }: let
|
||||
inherit (lib) types;
|
||||
mdDoc = lib.mdDoc or (md: md);
|
||||
in {
|
||||
options = {
|
||||
name = lib.mkOption {
|
||||
description = ''
|
||||
description = mdDoc ''
|
||||
File name of the key.
|
||||
'';
|
||||
default = name;
|
||||
type = types.str;
|
||||
};
|
||||
text = lib.mkOption {
|
||||
description = ''
|
||||
description = mdDoc ''
|
||||
Content of the key.
|
||||
One of `text`, `keyCommand` and `keyFile` must be set.
|
||||
'';
|
||||
|
@ -19,7 +20,7 @@ with builtins; rec {
|
|||
type = types.nullOr types.str;
|
||||
};
|
||||
keyFile = lib.mkOption {
|
||||
description = ''
|
||||
description = mdDoc ''
|
||||
Path of the local file to read the key from.
|
||||
One of `text`, `keyCommand` and `keyFile` must be set.
|
||||
'';
|
||||
|
@ -28,7 +29,7 @@ with builtins; rec {
|
|||
type = types.nullOr types.path;
|
||||
};
|
||||
keyCommand = lib.mkOption {
|
||||
description = ''
|
||||
description = mdDoc ''
|
||||
Command to run to generate the key.
|
||||
One of `text`, `keyCommand` and `keyFile` must be set.
|
||||
'';
|
||||
|
@ -38,14 +39,14 @@ with builtins; rec {
|
|||
in types.nullOr nonEmptyList;
|
||||
};
|
||||
destDir = lib.mkOption {
|
||||
description = ''
|
||||
description = mdDoc ''
|
||||
Destination directory on the host.
|
||||
'';
|
||||
default = "/run/keys";
|
||||
type = types.path;
|
||||
};
|
||||
path = lib.mkOption {
|
||||
description = ''
|
||||
description = mdDoc ''
|
||||
Full path to the destination.
|
||||
'';
|
||||
default = "${config.destDir}/${config.name}";
|
||||
|
@ -53,28 +54,28 @@ with builtins; rec {
|
|||
internal = true;
|
||||
};
|
||||
user = lib.mkOption {
|
||||
description = ''
|
||||
description = mdDoc ''
|
||||
The group that will own the file.
|
||||
'';
|
||||
default = "root";
|
||||
type = types.str;
|
||||
};
|
||||
group = lib.mkOption {
|
||||
description = ''
|
||||
description = mdDoc ''
|
||||
The group that will own the file.
|
||||
'';
|
||||
default = "root";
|
||||
type = types.str;
|
||||
};
|
||||
permissions = lib.mkOption {
|
||||
description = ''
|
||||
description = mdDoc ''
|
||||
Permissions to set for the file.
|
||||
'';
|
||||
default = "0600";
|
||||
type = types.str;
|
||||
};
|
||||
uploadAt = lib.mkOption {
|
||||
description = ''
|
||||
description = mdDoc ''
|
||||
When to upload the keys.
|
||||
|
||||
- pre-activation (default): Upload the keys before activating the new system profile.
|
||||
|
@ -93,6 +94,7 @@ with builtins; rec {
|
|||
# Largely compatible with NixOps/Morph.
|
||||
deploymentOptions = { name, lib, ... }: let
|
||||
inherit (lib) types;
|
||||
mdDoc = lib.mdDoc or (md: md);
|
||||
in {
|
||||
options = {
|
||||
deployment = {
|
||||
|
@ -105,7 +107,7 @@ with builtins; rec {
|
|||
type = types.str;
|
||||
};
|
||||
targetHost = lib.mkOption {
|
||||
description = ''
|
||||
description = mdDoc ''
|
||||
The target SSH node for deployment.
|
||||
|
||||
By default, the node's attribute name will be used.
|
||||
|
@ -115,7 +117,7 @@ with builtins; rec {
|
|||
default = name;
|
||||
};
|
||||
targetPort = lib.mkOption {
|
||||
description = ''
|
||||
description = mdDoc ''
|
||||
The target SSH port for deployment.
|
||||
|
||||
By default, the port is the standard port (22) or taken
|
||||
|
@ -125,7 +127,7 @@ with builtins; rec {
|
|||
default = null;
|
||||
};
|
||||
targetUser = lib.mkOption {
|
||||
description = ''
|
||||
description = mdDoc ''
|
||||
The user to use to log into the remote node. If set to null, the
|
||||
target user will not be specified in SSH invocations.
|
||||
'';
|
||||
|
@ -133,7 +135,7 @@ with builtins; rec {
|
|||
default = "root";
|
||||
};
|
||||
allowLocalDeployment = lib.mkOption {
|
||||
description = ''
|
||||
description = mdDoc ''
|
||||
Allow the configuration to be applied locally on the host running
|
||||
Colmena.
|
||||
|
||||
|
@ -150,7 +152,7 @@ with builtins; rec {
|
|||
default = false;
|
||||
};
|
||||
buildOnTarget = lib.mkOption {
|
||||
description = ''
|
||||
description = mdDoc ''
|
||||
Whether to build the system profiles on the target node itself.
|
||||
|
||||
When enabled, Colmena will copy the derivation to the target
|
||||
|
@ -170,7 +172,7 @@ with builtins; rec {
|
|||
default = false;
|
||||
};
|
||||
tags = lib.mkOption {
|
||||
description = ''
|
||||
description = mdDoc ''
|
||||
A list of tags for the node.
|
||||
|
||||
Can be used to select a group of nodes for deployment.
|
||||
|
@ -179,7 +181,7 @@ with builtins; rec {
|
|||
default = [];
|
||||
};
|
||||
keys = lib.mkOption {
|
||||
description = ''
|
||||
description = mdDoc ''
|
||||
A set of secrets to be deployed to the node.
|
||||
|
||||
Secrets are transferred to the node out-of-band and
|
||||
|
@ -189,7 +191,7 @@ with builtins; rec {
|
|||
default = {};
|
||||
};
|
||||
replaceUnknownProfiles = lib.mkOption {
|
||||
description = ''
|
||||
description = mdDoc ''
|
||||
Allow a configuration to be applied to a host running a profile we
|
||||
have no knowledge of. By setting this option to false, you reduce
|
||||
the likelyhood of rolling back changes made via another Colmena user.
|
||||
|
@ -205,7 +207,7 @@ with builtins; rec {
|
|||
default = true;
|
||||
};
|
||||
privilegeEscalationCommand = lib.mkOption {
|
||||
description = ''
|
||||
description = mdDoc ''
|
||||
Command to use to elevate privileges when activating the new profiles on SSH hosts.
|
||||
|
||||
This is used on SSH hosts when `deployment.targetUser` is not `root`.
|
||||
|
@ -215,7 +217,7 @@ with builtins; rec {
|
|||
default = [ "sudo" "-H" "--" ];
|
||||
};
|
||||
sshOptions = lib.mkOption {
|
||||
description = ''
|
||||
description = mdDoc ''
|
||||
Extra SSH options to pass to the SSH command.
|
||||
'';
|
||||
type = types.listOf types.str;
|
||||
|
@ -273,28 +275,29 @@ with builtins; rec {
|
|||
# Hive-wide options
|
||||
metaOptions = { lib, ... }: let
|
||||
inherit (lib) types;
|
||||
mdDoc = lib.mdDoc or (md: md);
|
||||
in {
|
||||
options = {
|
||||
name = lib.mkOption {
|
||||
description = ''
|
||||
description = mdDoc ''
|
||||
The name of the configuration.
|
||||
'';
|
||||
type = types.str;
|
||||
default = "hive";
|
||||
};
|
||||
description = lib.mkOption {
|
||||
description = ''
|
||||
description = mdDoc ''
|
||||
A short description for the configuration.
|
||||
'';
|
||||
type = types.str;
|
||||
default = "A Colmena Hive";
|
||||
};
|
||||
nixpkgs = lib.mkOption {
|
||||
description = ''
|
||||
description = mdDoc ''
|
||||
The pinned Nixpkgs package set. Accepts one of the following:
|
||||
|
||||
- A path to a Nixpkgs checkout
|
||||
- The Nixpkgs lambda (e.g., import <nixpkgs>)
|
||||
- The Nixpkgs lambda (e.g., import \<nixpkgs\>)
|
||||
- An initialized Nixpkgs attribute set
|
||||
|
||||
This option must be specified when using Flakes.
|
||||
|
@ -303,21 +306,21 @@ with builtins; rec {
|
|||
default = null;
|
||||
};
|
||||
nodeNixpkgs = lib.mkOption {
|
||||
description = ''
|
||||
description = mdDoc ''
|
||||
Node-specific Nixpkgs pins.
|
||||
'';
|
||||
type = types.attrsOf types.unspecified;
|
||||
default = {};
|
||||
};
|
||||
nodeSpecialArgs = lib.mkOption {
|
||||
description = ''
|
||||
description = mdDoc ''
|
||||
Node-specific special args.
|
||||
'';
|
||||
type = types.attrsOf types.unspecified;
|
||||
default = {};
|
||||
};
|
||||
machinesFile = lib.mkOption {
|
||||
description = ''
|
||||
description = mdDoc ''
|
||||
Use the machines listed in this file when building this hive configuration.
|
||||
|
||||
If your Colmena host has nix configured to allow for remote builds
|
||||
|
@ -341,7 +344,7 @@ with builtins; rec {
|
|||
type = types.nullOr types.path;
|
||||
};
|
||||
specialArgs = lib.mkOption {
|
||||
description = ''
|
||||
description = mdDoc ''
|
||||
A set of special arguments to be passed to NixOS modules.
|
||||
|
||||
This will be merged into the `specialArgs` used to evaluate
|
||||
|
@ -351,7 +354,7 @@ with builtins; rec {
|
|||
type = types.attrsOf types.unspecified;
|
||||
};
|
||||
allowApplyAll = lib.mkOption {
|
||||
description = ''
|
||||
description = mdDoc ''
|
||||
Whether to allow deployments without a node filter set.
|
||||
|
||||
If set to false, a node filter must be specified with `--on` when
|
||||
|
|
|
@ -87,14 +87,14 @@ pub struct Key {
|
|||
#[serde(flatten)]
|
||||
source: KeySource,
|
||||
|
||||
#[validate(custom(function = "validate_dest_dir"))]
|
||||
#[validate(custom = "validate_dest_dir")]
|
||||
#[serde(rename = "destDir")]
|
||||
dest_dir: PathBuf,
|
||||
|
||||
#[validate(custom(function = "validate_unix_name"))]
|
||||
#[validate(custom = "validate_unix_name")]
|
||||
user: String,
|
||||
|
||||
#[validate(custom(function = "validate_unix_name"))]
|
||||
#[validate(custom = "validate_unix_name")]
|
||||
group: String,
|
||||
|
||||
permissions: String,
|
||||
|
|
|
@ -84,7 +84,7 @@ pub struct NodeConfig {
|
|||
#[serde(rename = "sshOptions")]
|
||||
extra_ssh_options: Vec<String>,
|
||||
|
||||
#[validate(custom(function = "validate_keys"))]
|
||||
#[validate(custom = "validate_keys")]
|
||||
keys: HashMap<String, Key>,
|
||||
}
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
use std::collections::HashSet;
|
||||
use std::convert::AsRef;
|
||||
use std::iter::Iterator;
|
||||
use std::iter::{FromIterator, Iterator};
|
||||
use std::str::FromStr;
|
||||
|
||||
use clap::Args;
|
||||
|
@ -28,26 +28,22 @@ The list is comma-separated and globs are supported. To match tags, prepend the
|
|||
pub on: Option<NodeFilter>,
|
||||
}
|
||||
|
||||
/// A node filter containing a list of rules.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct NodeFilter {
|
||||
rules: Vec<Rule>,
|
||||
}
|
||||
|
||||
/// A filter rule.
|
||||
///
|
||||
/// The filter rules are OR'd together.
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub enum NodeFilter {
|
||||
enum Rule {
|
||||
/// Matches a node's attribute name.
|
||||
MatchName(GlobPattern),
|
||||
|
||||
/// Matches a node's `deployment.tags`.
|
||||
MatchTag(GlobPattern),
|
||||
|
||||
/// Matches an Union
|
||||
Union(Vec<Box<NodeFilter>>),
|
||||
|
||||
/// Matches an Intersection
|
||||
Inter(Vec<Box<NodeFilter>>),
|
||||
|
||||
/// Matches the complementary
|
||||
Not(Box<NodeFilter>),
|
||||
|
||||
/// Empty
|
||||
Empty,
|
||||
}
|
||||
|
||||
impl FromStr for NodeFilter {
|
||||
|
@ -57,169 +53,7 @@ impl FromStr for NodeFilter {
|
|||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn end_delimiter(c: char) -> bool {
|
||||
[',', '&', ')'].contains(&c)
|
||||
}
|
||||
|
||||
impl NodeFilter {
|
||||
fn and(a: Self, b: Self) -> Self {
|
||||
match (a, b) {
|
||||
(Self::Inter(mut av), Self::Inter(mut bv)) => {
|
||||
av.append(&mut bv);
|
||||
Self::Inter(av)
|
||||
}
|
||||
(Self::Inter(mut av), b) => {
|
||||
av.push(Box::new(b));
|
||||
Self::Inter(av)
|
||||
}
|
||||
(a, Self::Inter(mut bv)) => {
|
||||
bv.push(Box::new(a));
|
||||
Self::Inter(bv)
|
||||
}
|
||||
(a, b) => Self::Inter(vec![Box::new(a), Box::new(b)]),
|
||||
}
|
||||
}
|
||||
|
||||
fn or(a: Self, b: Self) -> Self {
|
||||
match (a, b) {
|
||||
(Self::Union(mut av), Self::Union(mut bv)) => {
|
||||
av.append(&mut bv);
|
||||
Self::Union(av)
|
||||
}
|
||||
(Self::Union(mut av), b) => {
|
||||
av.push(Box::new(b));
|
||||
Self::Union(av)
|
||||
}
|
||||
(a, Self::Union(mut bv)) => {
|
||||
bv.push(Box::new(a));
|
||||
Self::Union(bv)
|
||||
}
|
||||
(a, b) => Self::Union(vec![Box::new(a), Box::new(b)]),
|
||||
}
|
||||
}
|
||||
|
||||
fn not(a: Self) -> Self {
|
||||
if let Self::Not(ae) = a {
|
||||
*ae
|
||||
} else {
|
||||
Self::Not(Box::new(a))
|
||||
}
|
||||
}
|
||||
|
||||
/// Parses an elementary expression,
|
||||
/// that is base tags and name, with expression between parentheses
|
||||
/// Negations are also parsed here as the most prioritary operation
|
||||
///
|
||||
/// It returns the unparsed text that follows
|
||||
fn parse_expr0(unparsed: &str) -> ColmenaResult<(Self, &str)> {
|
||||
let unparsed = unparsed.trim_start();
|
||||
// Negation
|
||||
if let Some(negated_expr) = unparsed.strip_prefix('!') {
|
||||
let (negated, unparsed) = Self::parse_expr0(negated_expr)?;
|
||||
Ok((Self::not(negated), unparsed))
|
||||
} else
|
||||
// parentheses
|
||||
if let Some(parenthesed_expr) = unparsed.strip_prefix('(') {
|
||||
let (interior, unparsed) = Self::parse_expr2(parenthesed_expr)?;
|
||||
Ok((
|
||||
interior,
|
||||
unparsed.strip_prefix(')').ok_or(ColmenaError::Unknown {
|
||||
message: format!("Expected a closing parenthesis at {:?}.", unparsed),
|
||||
})?,
|
||||
))
|
||||
} else
|
||||
// tag
|
||||
if let Some(tag_expr) = unparsed.strip_prefix('@') {
|
||||
match tag_expr
|
||||
.find(end_delimiter)
|
||||
.map(|idx| tag_expr.split_at(idx))
|
||||
.map(|(tag, end)| (tag.trim_end(), end))
|
||||
{
|
||||
Some((tag, unparsed)) => {
|
||||
if tag.is_empty() {
|
||||
return Err(ColmenaError::EmptyFilterRule);
|
||||
} else {
|
||||
Ok((Self::MatchTag(GlobPattern::new(tag).unwrap()), unparsed))
|
||||
}
|
||||
}
|
||||
None => {
|
||||
let tag_expr = tag_expr.trim_end();
|
||||
if tag_expr.is_empty() {
|
||||
Err(ColmenaError::EmptyFilterRule)
|
||||
} else {
|
||||
Ok((Self::MatchTag(GlobPattern::new(tag_expr).unwrap()), ""))
|
||||
}
|
||||
}
|
||||
}
|
||||
} else
|
||||
//node name
|
||||
{
|
||||
match unparsed
|
||||
.find(end_delimiter)
|
||||
.map(|idx| unparsed.split_at(idx))
|
||||
.map(|(tag, end)| (tag.trim_end(), end))
|
||||
{
|
||||
Some((name, unparsed)) => {
|
||||
if name.is_empty() {
|
||||
Err(ColmenaError::EmptyFilterRule)
|
||||
} else {
|
||||
Ok((Self::MatchName(GlobPattern::new(name).unwrap()), unparsed))
|
||||
}
|
||||
}
|
||||
None => {
|
||||
let unparsed = unparsed.trim_end();
|
||||
if unparsed.is_empty() {
|
||||
Err(ColmenaError::EmptyFilterRule)
|
||||
} else {
|
||||
Ok((Self::MatchName(GlobPattern::new(unparsed).unwrap()), ""))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Parses the union operations between elementary expression.
|
||||
///
|
||||
/// It returns the unparsed text that follows
|
||||
fn parse_op1(acc: Self, unparsed: &str) -> ColmenaResult<(Self, &str)> {
|
||||
let unparsed = unparsed.trim_start();
|
||||
if let Some(unions) = unparsed.strip_prefix(',') {
|
||||
let (base_expr, unparsed) = Self::parse_expr0(unions)?;
|
||||
Self::parse_op1(Self::or(acc, base_expr), unparsed)
|
||||
} else {
|
||||
Ok((acc, unparsed))
|
||||
}
|
||||
}
|
||||
|
||||
/// Parses elementary expression and their unions.
|
||||
///
|
||||
/// It returns the unparsed text that follows
|
||||
fn parse_expr1(unparsed: &str) -> ColmenaResult<(Self, &str)> {
|
||||
let (base_expr, unparsed) = Self::parse_expr0(unparsed)?;
|
||||
Self::parse_op1(base_expr, unparsed)
|
||||
}
|
||||
|
||||
/// Parses the intersection operations between unions.
|
||||
///
|
||||
/// It returns the unparsed text that follows
|
||||
fn parse_op2(acc: Self, unparsed: &str) -> ColmenaResult<(Self, &str)> {
|
||||
if let Some(intersections) = unparsed.strip_prefix('&') {
|
||||
let (union, unparsed) = Self::parse_expr1(intersections)?;
|
||||
Self::parse_op2(Self::and(acc, union), unparsed)
|
||||
} else {
|
||||
Ok((acc, unparsed))
|
||||
}
|
||||
}
|
||||
|
||||
/// Parses a complete expression
|
||||
///
|
||||
/// It returns the unparsed text that follows
|
||||
fn parse_expr2(unparsed: &str) -> ColmenaResult<(Self, &str)> {
|
||||
let (union, unparsed) = Self::parse_expr1(unparsed)?;
|
||||
Self::parse_op2(union, unparsed)
|
||||
}
|
||||
|
||||
/// Creates a new filter using an expression passed using `--on`.
|
||||
pub fn new<S: AsRef<str>>(filter: S) -> ColmenaResult<Self> {
|
||||
let filter = filter.as_ref();
|
||||
|
@ -228,16 +62,29 @@ impl NodeFilter {
|
|||
if trimmed.is_empty() {
|
||||
log::warn!("Filter \"{}\" is blank and will match nothing", filter);
|
||||
|
||||
return Ok(Self::Empty);
|
||||
return Ok(Self { rules: Vec::new() });
|
||||
}
|
||||
let (target_filter, unparsed) = Self::parse_expr2(trimmed)?;
|
||||
if unparsed != "" {
|
||||
Err(ColmenaError::Unknown {
|
||||
message: format!("Found garbage {:?} when parsing the node filter.", unparsed),
|
||||
|
||||
let rules = trimmed
|
||||
.split(',')
|
||||
.map(|pattern| {
|
||||
let pattern = pattern.trim();
|
||||
|
||||
if pattern.is_empty() {
|
||||
return Err(ColmenaError::EmptyFilterRule);
|
||||
}
|
||||
|
||||
if let Some(tag_pattern) = pattern.strip_prefix('@') {
|
||||
Ok(Rule::MatchTag(GlobPattern::new(tag_pattern).unwrap()))
|
||||
} else {
|
||||
Ok(Rule::MatchName(GlobPattern::new(pattern).unwrap()))
|
||||
}
|
||||
})
|
||||
} else {
|
||||
Ok(target_filter)
|
||||
}
|
||||
.collect::<Vec<ColmenaResult<Rule>>>();
|
||||
|
||||
let rules = Result::from_iter(rules)?;
|
||||
|
||||
Ok(Self { rules })
|
||||
}
|
||||
|
||||
/// Returns whether the filter has any rule matching NodeConfig information.
|
||||
|
@ -246,31 +93,7 @@ impl NodeFilter {
|
|||
/// especially when its values (e.g., tags) depend on other parts of
|
||||
/// the configuration.
|
||||
pub fn has_node_config_rules(&self) -> bool {
|
||||
match self {
|
||||
Self::MatchName(_) => false,
|
||||
Self::MatchTag(_) => true,
|
||||
Self::Union(v) => v.iter().any(|e| e.has_node_config_rules()),
|
||||
Self::Inter(v) => v.iter().any(|e| e.has_node_config_rules()),
|
||||
Self::Not(e) => e.has_node_config_rules(),
|
||||
Self::Empty => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Decides whether a node is accepted by the filter or not.
|
||||
/// panic if the filter depends on tags and config is None
|
||||
fn is_accepted(&self, name: &NodeName, config: Option<&NodeConfig>) -> bool {
|
||||
match self {
|
||||
Self::MatchName(pat) => pat.matches(name.as_str()),
|
||||
Self::MatchTag(pat) => config
|
||||
.unwrap()
|
||||
.tags()
|
||||
.iter()
|
||||
.any(|tag| pat.matches(tag.as_str())),
|
||||
Self::Union(v) => v.iter().any(|e| e.is_accepted(name, config)),
|
||||
Self::Inter(v) => v.iter().all(|e| e.is_accepted(name, config)),
|
||||
Self::Not(e) => !e.is_accepted(name, config),
|
||||
Self::Empty => false,
|
||||
}
|
||||
self.rules.iter().any(|rule| rule.matches_node_config())
|
||||
}
|
||||
|
||||
/// Runs the filter against a set of NodeConfigs and returns the matched ones.
|
||||
|
@ -278,17 +101,30 @@ impl NodeFilter {
|
|||
where
|
||||
I: Iterator<Item = (&'a NodeName, &'a NodeConfig)>,
|
||||
{
|
||||
if self == &Self::Empty {
|
||||
if self.rules.is_empty() {
|
||||
return HashSet::new();
|
||||
}
|
||||
|
||||
nodes
|
||||
.filter_map(|(name, node)| {
|
||||
if self.is_accepted(name, Some(node)) {
|
||||
Some(name)
|
||||
} else {
|
||||
None
|
||||
for rule in self.rules.iter() {
|
||||
match rule {
|
||||
Rule::MatchName(pat) => {
|
||||
if pat.matches(name.as_str()) {
|
||||
return Some(name);
|
||||
}
|
||||
}
|
||||
Rule::MatchTag(pat) => {
|
||||
for tag in node.tags() {
|
||||
if pat.matches(tag) {
|
||||
return Some(name);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
})
|
||||
.cloned()
|
||||
.collect()
|
||||
|
@ -296,24 +132,32 @@ impl NodeFilter {
|
|||
|
||||
/// Runs the filter against a set of node names and returns the matched ones.
|
||||
pub fn filter_node_names(&self, nodes: &[NodeName]) -> ColmenaResult<HashSet<NodeName>> {
|
||||
if self.has_node_config_rules() {
|
||||
Err(ColmenaError::Unknown {
|
||||
message: format!(
|
||||
"Not enough information to run rule {:?} - We only have node names",
|
||||
self
|
||||
),
|
||||
})
|
||||
} else {
|
||||
Ok(nodes
|
||||
.iter()
|
||||
.filter_map(|name| {
|
||||
if self.is_accepted(name, None) {
|
||||
Some(name.clone())
|
||||
} else {
|
||||
None
|
||||
nodes.iter().filter_map(|name| -> Option<ColmenaResult<NodeName>> {
|
||||
for rule in self.rules.iter() {
|
||||
match rule {
|
||||
Rule::MatchName(pat) => {
|
||||
if pat.matches(name.as_str()) {
|
||||
return Some(Ok(name.clone()));
|
||||
}
|
||||
}
|
||||
})
|
||||
.collect())
|
||||
_ => {
|
||||
return Some(Err(ColmenaError::Unknown {
|
||||
message: format!("Not enough information to run rule {:?} - We only have node names", rule),
|
||||
}));
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
}).collect()
|
||||
}
|
||||
}
|
||||
|
||||
impl Rule {
|
||||
/// Returns whether the rule matches against the NodeConfig (i.e., `config.deployment`).
|
||||
pub fn matches_node_config(&self) -> bool {
|
||||
match self {
|
||||
Self::MatchTag(_) => true,
|
||||
Self::MatchName(_) => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -333,13 +177,13 @@ mod tests {
|
|||
#[test]
|
||||
fn test_empty_filter() {
|
||||
let filter = NodeFilter::new("").unwrap();
|
||||
assert_eq!(NodeFilter::Empty, filter);
|
||||
assert_eq!(0, filter.rules.len());
|
||||
|
||||
let filter = NodeFilter::new("\t").unwrap();
|
||||
assert_eq!(NodeFilter::Empty, filter);
|
||||
assert_eq!(0, filter.rules.len());
|
||||
|
||||
let filter = NodeFilter::new(" ").unwrap();
|
||||
assert_eq!(NodeFilter::Empty, filter);
|
||||
assert_eq!(0, filter.rules.len());
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -353,73 +197,21 @@ mod tests {
|
|||
fn test_filter_rule_mixed() {
|
||||
let filter = NodeFilter::new("@router,gamma-*").unwrap();
|
||||
assert_eq!(
|
||||
NodeFilter::Union(vec![
|
||||
Box::new(NodeFilter::MatchTag(GlobPattern::new("router").unwrap())),
|
||||
Box::new(NodeFilter::MatchName(GlobPattern::new("gamma-*").unwrap())),
|
||||
]),
|
||||
filter,
|
||||
vec![
|
||||
Rule::MatchTag(GlobPattern::new("router").unwrap()),
|
||||
Rule::MatchName(GlobPattern::new("gamma-*").unwrap()),
|
||||
],
|
||||
filter.rules,
|
||||
);
|
||||
|
||||
let filter = NodeFilter::new("a, \t@b , c-*").unwrap();
|
||||
assert_eq!(
|
||||
NodeFilter::Union(vec![
|
||||
Box::new(NodeFilter::MatchName(GlobPattern::new("a").unwrap())),
|
||||
Box::new(NodeFilter::MatchTag(GlobPattern::new("b").unwrap())),
|
||||
Box::new(NodeFilter::MatchName(GlobPattern::new("c-*").unwrap())),
|
||||
]),
|
||||
filter,
|
||||
);
|
||||
|
||||
let filter = NodeFilter::new("a & \t@b , c-*").unwrap();
|
||||
assert_eq!(
|
||||
NodeFilter::Inter(vec![
|
||||
Box::new(NodeFilter::MatchName(GlobPattern::new("a").unwrap())),
|
||||
Box::new(NodeFilter::Union(vec![
|
||||
Box::new(NodeFilter::MatchTag(GlobPattern::new("b").unwrap())),
|
||||
Box::new(NodeFilter::MatchName(GlobPattern::new("c-*").unwrap())),
|
||||
])),
|
||||
]),
|
||||
filter,
|
||||
);
|
||||
|
||||
let filter = NodeFilter::new("( a & \t@b ) , c-*").unwrap();
|
||||
assert_eq!(
|
||||
NodeFilter::Union(vec![
|
||||
Box::new(NodeFilter::Inter(vec![
|
||||
Box::new(NodeFilter::MatchName(GlobPattern::new("a").unwrap())),
|
||||
Box::new(NodeFilter::MatchTag(GlobPattern::new("b").unwrap())),
|
||||
])),
|
||||
Box::new(NodeFilter::MatchName(GlobPattern::new("c-*").unwrap())),
|
||||
]),
|
||||
filter,
|
||||
);
|
||||
|
||||
let filter = NodeFilter::new("( a & \t@b ) , ! c-*").unwrap();
|
||||
assert_eq!(
|
||||
NodeFilter::Union(vec![
|
||||
Box::new(NodeFilter::Inter(vec![
|
||||
Box::new(NodeFilter::MatchName(GlobPattern::new("a").unwrap())),
|
||||
Box::new(NodeFilter::MatchTag(GlobPattern::new("b").unwrap())),
|
||||
])),
|
||||
Box::new(NodeFilter::Not(Box::new(NodeFilter::MatchName(
|
||||
GlobPattern::new("c-*").unwrap()
|
||||
)))),
|
||||
]),
|
||||
filter,
|
||||
);
|
||||
|
||||
let filter = NodeFilter::new("( a & \t@b ) , !!! c-*").unwrap();
|
||||
assert_eq!(
|
||||
NodeFilter::Union(vec![
|
||||
Box::new(NodeFilter::Inter(vec![
|
||||
Box::new(NodeFilter::MatchName(GlobPattern::new("a").unwrap())),
|
||||
Box::new(NodeFilter::MatchTag(GlobPattern::new("b").unwrap())),
|
||||
])),
|
||||
Box::new(NodeFilter::Not(Box::new(NodeFilter::MatchName(
|
||||
GlobPattern::new("c-*").unwrap()
|
||||
)))),
|
||||
]),
|
||||
filter,
|
||||
vec![
|
||||
Rule::MatchName(GlobPattern::new("a").unwrap()),
|
||||
Rule::MatchTag(GlobPattern::new("b").unwrap()),
|
||||
Rule::MatchName(GlobPattern::new("c-*").unwrap()),
|
||||
],
|
||||
filter.rules,
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -458,7 +250,6 @@ mod tests {
|
|||
privilege_escalation_command: vec![],
|
||||
extra_ssh_options: vec![],
|
||||
keys: HashMap::new(),
|
||||
system_type: None,
|
||||
};
|
||||
|
||||
let mut nodes = HashMap::new();
|
||||
|
@ -524,26 +315,5 @@ mod tests {
|
|||
.unwrap()
|
||||
.filter_node_configs(nodes.iter()),
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
&HashSet::from_iter([]),
|
||||
&NodeFilter::new("@router&@controller")
|
||||
.unwrap()
|
||||
.filter_node_configs(nodes.iter()),
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
&HashSet::from_iter([node!("beta")]),
|
||||
&NodeFilter::new("@router&@infra-*")
|
||||
.unwrap()
|
||||
.filter_node_configs(nodes.iter()),
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
&HashSet::from_iter([node!("alpha")]),
|
||||
&NodeFilter::new("!@router&@infra-*")
|
||||
.unwrap()
|
||||
.filter_node_configs(nodes.iter()),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,8 +7,6 @@
|
|||
pub mod plain;
|
||||
pub mod spinner;
|
||||
|
||||
use std::io::IsTerminal;
|
||||
|
||||
use async_trait::async_trait;
|
||||
use tokio::sync::mpsc::{self, UnboundedReceiver as TokioReceiver, UnboundedSender as TokioSender};
|
||||
|
||||
|
@ -92,7 +90,7 @@ pub enum LineStyle {
|
|||
|
||||
impl SimpleProgressOutput {
|
||||
pub fn new(verbose: bool) -> Self {
|
||||
let tty = std::io::stdout().is_terminal();
|
||||
let tty = atty::is(atty::Stream::Stdout);
|
||||
|
||||
if verbose || !tty {
|
||||
Self::Plain(PlainOutput::new())
|
||||
|
|
Loading…
Reference in a new issue