Compare commits
3 commits
main
...
nix-define
Author | SHA1 | Date | |
---|---|---|---|
eee64b7aec | |||
470a0c360a | |||
76865b3293 |
33 changed files with 734 additions and 1473 deletions
|
@ -10,7 +10,8 @@ insert_final_newline = true
|
||||||
trim_trailing_whitespace = true
|
trim_trailing_whitespace = true
|
||||||
charset = utf-8
|
charset = utf-8
|
||||||
|
|
||||||
[*.json]
|
# Rust
|
||||||
|
[*.rs]
|
||||||
indent_style = space
|
indent_style = space
|
||||||
indent_size = 2
|
indent_size = 2
|
||||||
|
|
||||||
|
|
73
.github/workflows/build.yml
vendored
73
.github/workflows/build.yml
vendored
|
@ -13,23 +13,19 @@ jobs:
|
||||||
image: ubuntu-latest
|
image: ubuntu-latest
|
||||||
system: aarch64-linux
|
system: aarch64-linux
|
||||||
- label: x86_64-darwin
|
- label: x86_64-darwin
|
||||||
image: macos-latest
|
image: macos-12
|
||||||
system: x86_64-darwin
|
|
||||||
- label: aarch64-darwin
|
|
||||||
image: macos-latest
|
|
||||||
system: aarch64-darwin
|
|
||||||
|
|
||||||
name: ${{ matrix.label }}
|
name: ${{ matrix.label }}
|
||||||
runs-on: ${{ matrix.image }}
|
runs-on: ${{ matrix.image }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4.2.2
|
- uses: actions/checkout@v3.3.0
|
||||||
|
|
||||||
- name: Install Nix
|
- name: Install Nix
|
||||||
uses: DeterminateSystems/nix-installer-action@b92f66560d6f97d6576405a7bae901ab57e72b6a # v15
|
uses: DeterminateSystems/nix-installer-action@9b252454a8d70586c4ee7f163bf4bb1e9de3d763 # v2
|
||||||
|
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v3.2.0
|
uses: docker/setup-qemu-action@v2.1.0
|
||||||
if: matrix.system == 'aarch64-linux'
|
if: matrix.system != ''
|
||||||
|
|
||||||
- name: Generate System Flags
|
- name: Generate System Flags
|
||||||
run: |
|
run: |
|
||||||
|
@ -43,7 +39,7 @@ jobs:
|
||||||
HOST_SYSTEM: '${{ matrix.system }}'
|
HOST_SYSTEM: '${{ matrix.system }}'
|
||||||
|
|
||||||
- name: Enable Binary Cache
|
- name: Enable Binary Cache
|
||||||
uses: cachix/cachix-action@v15
|
uses: cachix/cachix-action@v12
|
||||||
with:
|
with:
|
||||||
name: colmena
|
name: colmena
|
||||||
authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}'
|
authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}'
|
||||||
|
@ -53,60 +49,3 @@ jobs:
|
||||||
|
|
||||||
- name: Build manual
|
- name: Build manual
|
||||||
run: nix build .#manual -L
|
run: nix build .#manual -L
|
||||||
|
|
||||||
nix-matrix:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
outputs:
|
|
||||||
matrix: ${{ steps.set-matrix.outputs.matrix }}
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4.2.2
|
|
||||||
|
|
||||||
- uses: DeterminateSystems/nix-installer-action@v15
|
|
||||||
continue-on-error: true # Self-hosted runners already have Nix installed
|
|
||||||
|
|
||||||
- name: Enable Binary Cache
|
|
||||||
uses: cachix/cachix-action@v15
|
|
||||||
with:
|
|
||||||
name: colmena
|
|
||||||
authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}'
|
|
||||||
|
|
||||||
- id: set-matrix
|
|
||||||
name: Generate Nix Matrix
|
|
||||||
run: |
|
|
||||||
set -Eeu
|
|
||||||
matrix="$(nix eval --json '.#githubActions.matrix')"
|
|
||||||
echo "matrix=$matrix" >> "$GITHUB_OUTPUT"
|
|
||||||
|
|
||||||
nix-matrix-job:
|
|
||||||
name: ${{ matrix.name }}
|
|
||||||
runs-on: ${{ matrix.os }}
|
|
||||||
needs:
|
|
||||||
- build
|
|
||||||
- nix-matrix
|
|
||||||
strategy:
|
|
||||||
matrix: ${{ fromJSON(needs.nix-matrix.outputs.matrix) }}
|
|
||||||
steps:
|
|
||||||
- name: Maximize build space
|
|
||||||
uses: easimon/maximize-build-space@master
|
|
||||||
with:
|
|
||||||
remove-dotnet: 'true'
|
|
||||||
build-mount-path: /nix
|
|
||||||
|
|
||||||
- name: Set /nix permissions
|
|
||||||
run: |
|
|
||||||
sudo chown root:root /nix
|
|
||||||
|
|
||||||
- uses: actions/checkout@v4.2.2
|
|
||||||
|
|
||||||
- uses: DeterminateSystems/nix-installer-action@v15
|
|
||||||
continue-on-error: true # Self-hosted runners already have Nix installed
|
|
||||||
|
|
||||||
- name: Enable Binary Cache
|
|
||||||
uses: cachix/cachix-action@v15
|
|
||||||
with:
|
|
||||||
name: colmena
|
|
||||||
authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}'
|
|
||||||
|
|
||||||
- name: Build ${{ matrix.attr }}
|
|
||||||
run: |
|
|
||||||
nix build --no-link --print-out-paths -L '.#${{ matrix.attr }}'
|
|
||||||
|
|
6
.github/workflows/linters.yml
vendored
6
.github/workflows/linters.yml
vendored
|
@ -10,13 +10,13 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4.2.2
|
- uses: actions/checkout@v3.3.0
|
||||||
|
|
||||||
- name: Install Nix
|
- name: Install Nix
|
||||||
uses: DeterminateSystems/nix-installer-action@b92f66560d6f97d6576405a7bae901ab57e72b6a # v15
|
uses: DeterminateSystems/nix-installer-action@9b252454a8d70586c4ee7f163bf4bb1e9de3d763 # v2
|
||||||
|
|
||||||
- name: Enable binary cache
|
- name: Enable binary cache
|
||||||
uses: cachix/cachix-action@v15
|
uses: cachix/cachix-action@v12
|
||||||
with:
|
with:
|
||||||
name: colmena
|
name: colmena
|
||||||
authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}'
|
authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}'
|
||||||
|
|
10
.github/workflows/manual-stable.yml
vendored
10
.github/workflows/manual-stable.yml
vendored
|
@ -16,13 +16,13 @@ jobs:
|
||||||
if: github.repository == 'zhaofengli/colmena'
|
if: github.repository == 'zhaofengli/colmena'
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4.2.2
|
- uses: actions/checkout@v3.3.0
|
||||||
|
|
||||||
- name: Install Nix
|
- name: Install Nix
|
||||||
uses: DeterminateSystems/nix-installer-action@b92f66560d6f97d6576405a7bae901ab57e72b6a # v15
|
uses: DeterminateSystems/nix-installer-action@9b252454a8d70586c4ee7f163bf4bb1e9de3d763 # v2
|
||||||
|
|
||||||
- name: Enable Binary Cache
|
- name: Enable Binary Cache
|
||||||
uses: cachix/cachix-action@v15
|
uses: cachix/cachix-action@v12
|
||||||
with:
|
with:
|
||||||
name: colmena
|
name: colmena
|
||||||
authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}'
|
authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}'
|
||||||
|
@ -38,7 +38,7 @@ jobs:
|
||||||
run: nix build .#manual -L
|
run: nix build .#manual -L
|
||||||
|
|
||||||
- name: Deploy manual
|
- name: Deploy manual
|
||||||
uses: JamesIves/github-pages-deploy-action@v4.6.9
|
uses: JamesIves/github-pages-deploy-action@4.1.6
|
||||||
with:
|
with:
|
||||||
branch: gh-pages
|
branch: gh-pages
|
||||||
folder: result
|
folder: result
|
||||||
|
@ -52,7 +52,7 @@ jobs:
|
||||||
if: ${{ env.api_version == env.latest_stable_api }}
|
if: ${{ env.api_version == env.latest_stable_api }}
|
||||||
|
|
||||||
- name: Deploy redirect farm
|
- name: Deploy redirect farm
|
||||||
uses: JamesIves/github-pages-deploy-action@v4.6.9
|
uses: JamesIves/github-pages-deploy-action@4.1.6
|
||||||
with:
|
with:
|
||||||
branch: gh-pages
|
branch: gh-pages
|
||||||
folder: result-redirectFarm
|
folder: result-redirectFarm
|
||||||
|
|
10
.github/workflows/manual.yml
vendored
10
.github/workflows/manual.yml
vendored
|
@ -16,13 +16,13 @@ jobs:
|
||||||
if: github.repository == 'zhaofengli/colmena'
|
if: github.repository == 'zhaofengli/colmena'
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4.2.2
|
- uses: actions/checkout@v3.3.0
|
||||||
|
|
||||||
- name: Install Nix
|
- name: Install Nix
|
||||||
uses: DeterminateSystems/nix-installer-action@b92f66560d6f97d6576405a7bae901ab57e72b6a # v15
|
uses: DeterminateSystems/nix-installer-action@9b252454a8d70586c4ee7f163bf4bb1e9de3d763 # v2
|
||||||
|
|
||||||
- name: Enable Binary Cache
|
- name: Enable Binary Cache
|
||||||
uses: cachix/cachix-action@v15
|
uses: cachix/cachix-action@v12
|
||||||
with:
|
with:
|
||||||
name: colmena
|
name: colmena
|
||||||
authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}'
|
authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}'
|
||||||
|
@ -32,7 +32,7 @@ jobs:
|
||||||
run: nix build .#manual -L
|
run: nix build .#manual -L
|
||||||
|
|
||||||
- name: Deploy manual
|
- name: Deploy manual
|
||||||
uses: JamesIves/github-pages-deploy-action@v4.6.9
|
uses: JamesIves/github-pages-deploy-action@v4.3.4
|
||||||
with:
|
with:
|
||||||
branch: gh-pages
|
branch: gh-pages
|
||||||
folder: result
|
folder: result
|
||||||
|
@ -47,7 +47,7 @@ jobs:
|
||||||
run: nix build .#manual.redirectFarm -L
|
run: nix build .#manual.redirectFarm -L
|
||||||
|
|
||||||
- name: Deploy redirect farm
|
- name: Deploy redirect farm
|
||||||
uses: JamesIves/github-pages-deploy-action@v4.6.9
|
uses: JamesIves/github-pages-deploy-action@4.1.6
|
||||||
with:
|
with:
|
||||||
branch: gh-pages
|
branch: gh-pages
|
||||||
folder: result-redirectFarm
|
folder: result-redirectFarm
|
||||||
|
|
6
.github/workflows/tests.yml
vendored
6
.github/workflows/tests.yml
vendored
|
@ -13,15 +13,15 @@ jobs:
|
||||||
name: ${{ matrix.os.label }}
|
name: ${{ matrix.os.label }}
|
||||||
runs-on: ${{ matrix.os.image }}
|
runs-on: ${{ matrix.os.image }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4.2.2
|
- uses: actions/checkout@v3.3.0
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- name: Install Nix
|
- name: Install Nix
|
||||||
uses: DeterminateSystems/nix-installer-action@b92f66560d6f97d6576405a7bae901ab57e72b6a # v15
|
uses: DeterminateSystems/nix-installer-action@9b252454a8d70586c4ee7f163bf4bb1e9de3d763 # v2
|
||||||
|
|
||||||
- name: Enable Binary Cache
|
- name: Enable Binary Cache
|
||||||
uses: cachix/cachix-action@v15
|
uses: cachix/cachix-action@v12
|
||||||
with:
|
with:
|
||||||
name: colmena
|
name: colmena
|
||||||
authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}'
|
authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}'
|
||||||
|
|
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -1,4 +1,3 @@
|
||||||
result*
|
result*
|
||||||
/target
|
/target
|
||||||
/.direnv
|
/.direnv
|
||||||
/.vscode
|
|
||||||
|
|
18
.srcignore
18
.srcignore
|
@ -1,18 +0,0 @@
|
||||||
# Exclusions from source distribution
|
|
||||||
#
|
|
||||||
# Files listed here will not be part of colmena.src
|
|
||||||
|
|
||||||
/.github
|
|
||||||
/CNAME
|
|
||||||
/renovate.json
|
|
||||||
|
|
||||||
/manual
|
|
||||||
/integration-tests
|
|
||||||
|
|
||||||
/nix
|
|
||||||
/default.nix
|
|
||||||
/flake-compat.nix
|
|
||||||
/package.nix
|
|
||||||
/shell.nix
|
|
||||||
|
|
||||||
# vim: set ft=gitignore:
|
|
1227
Cargo.lock
generated
1227
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
11
Cargo.toml
11
Cargo.toml
|
@ -9,17 +9,18 @@ edition = "2021"
|
||||||
[dependencies]
|
[dependencies]
|
||||||
async-stream = "0.3.5"
|
async-stream = "0.3.5"
|
||||||
async-trait = "0.1.68"
|
async-trait = "0.1.68"
|
||||||
|
atty = "0.2"
|
||||||
clap = { version = "4.3", features = ["derive"] }
|
clap = { version = "4.3", features = ["derive"] }
|
||||||
clap_complete = "4.3"
|
clap_complete = "4.3"
|
||||||
clicolors-control = "1"
|
clicolors-control = "1"
|
||||||
console = "0.15.5"
|
console = "0.15.5"
|
||||||
const_format = "0.2.30"
|
const_format = "0.2.30"
|
||||||
env_logger = "0.11.0"
|
env_logger = "0.10.0"
|
||||||
futures = "0.3.28"
|
futures = "0.3.28"
|
||||||
glob = "0.3.1"
|
glob = "0.3.1"
|
||||||
hostname = "0.4.0"
|
hostname = "0.3.1"
|
||||||
indicatif = "0.17.3"
|
indicatif = "0.17.3"
|
||||||
itertools = "0.13.0"
|
itertools = "0.11.0"
|
||||||
libc = "0.2.144"
|
libc = "0.2.144"
|
||||||
log = "0.4.17"
|
log = "0.4.17"
|
||||||
quit = "2.0.0"
|
quit = "2.0.0"
|
||||||
|
@ -27,12 +28,12 @@ regex = "1"
|
||||||
serde = { version = "1.0.163", features = ["derive"] }
|
serde = { version = "1.0.163", features = ["derive"] }
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
shell-escape = "0.1.5"
|
shell-escape = "0.1.5"
|
||||||
snafu = { version = "0.8.0", features = ["backtrace", "backtraces-impl-backtrace-crate"] }
|
snafu = { version = "0.7.4", features = ["backtrace", "backtraces-impl-backtrace-crate"] }
|
||||||
sys-info = "0.9.1"
|
sys-info = "0.9.1"
|
||||||
tempfile = "3.5.0"
|
tempfile = "3.5.0"
|
||||||
tokio-stream = "0.1.14"
|
tokio-stream = "0.1.14"
|
||||||
uuid = { version = "1.3.2", features = ["serde", "v4"] }
|
uuid = { version = "1.3.2", features = ["serde", "v4"] }
|
||||||
validator = { version = "0.19.0", features = ["derive"] }
|
validator = { version = "0.16.0", features = ["derive"] }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
ntest = "0.9.0"
|
ntest = "0.9.0"
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
[![Matrix Channel](https://img.shields.io/badge/Matrix-%23colmena%3Anixos.org-blueviolet)](https://matrix.to/#/#colmena:nixos.org)
|
[![Matrix Channel](https://img.shields.io/badge/Matrix-%23colmena%3Anixos.org-blueviolet)](https://matrix.to/#/#colmena:nixos.org)
|
||||||
[![Stable Manual](https://img.shields.io/badge/Manual-Stable-informational)](https://colmena.cli.rs/stable)
|
[![Stable Manual](https://img.shields.io/badge/Manual-Stable-informational)](https://colmena.cli.rs/stable)
|
||||||
[![Unstable Manual](https://img.shields.io/badge/Manual-Unstable-orange)](https://colmena.cli.rs/unstable)
|
[![Unstable Manual](https://img.shields.io/badge/Manual-Unstable-orange)](https://colmena.cli.rs/unstable)
|
||||||
[![Build](https://github.com/zhaofengli/colmena/actions/workflows/build.yml/badge.svg)](https://github.com/zhaofengli/colmena/actions/workflows/build.yml)
|
[![Build](https://github.com/zhaofengli/colmena/workflows/Build/badge.svg)](https://github.com/zhaofengli/colmena/actions/workflows/build.yml)
|
||||||
|
|
||||||
Colmena is a simple, stateless [NixOS](https://nixos.org) deployment tool modeled after [NixOps](https://github.com/NixOS/nixops) and [morph](https://github.com/DBCDK/morph), written in Rust.
|
Colmena is a simple, stateless [NixOS](https://nixos.org) deployment tool modeled after [NixOps](https://github.com/NixOS/nixops) and [morph](https://github.com/DBCDK/morph), written in Rust.
|
||||||
It's a thin wrapper over Nix commands like `nix-instantiate` and `nix-copy-closure`, and supports parallel deployment.
|
It's a thin wrapper over Nix commands like `nix-instantiate` and `nix-copy-closure`, and supports parallel deployment.
|
||||||
|
|
35
flake.lock
35
flake.lock
|
@ -31,33 +31,13 @@
|
||||||
"type": "github"
|
"type": "github"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"nix-github-actions": {
|
|
||||||
"inputs": {
|
|
||||||
"nixpkgs": [
|
|
||||||
"nixpkgs"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"locked": {
|
|
||||||
"lastModified": 1729742964,
|
|
||||||
"narHash": "sha256-B4mzTcQ0FZHdpeWcpDYPERtyjJd/NIuaQ9+BV1h+MpA=",
|
|
||||||
"owner": "nix-community",
|
|
||||||
"repo": "nix-github-actions",
|
|
||||||
"rev": "e04df33f62cdcf93d73e9a04142464753a16db67",
|
|
||||||
"type": "github"
|
|
||||||
},
|
|
||||||
"original": {
|
|
||||||
"owner": "nix-community",
|
|
||||||
"repo": "nix-github-actions",
|
|
||||||
"type": "github"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"nixpkgs": {
|
"nixpkgs": {
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1734649271,
|
"lastModified": 1696019113,
|
||||||
"narHash": "sha256-4EVBRhOjMDuGtMaofAIqzJbg4Ql7Ai0PSeuVZTHjyKQ=",
|
"narHash": "sha256-X3+DKYWJm93DRSdC5M6K5hLqzSya9BjibtBsuARoPco=",
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"rev": "d70bd19e0a38ad4790d3913bf08fcbfc9eeca507",
|
"rev": "f5892ddac112a1e9b3612c39af1b72987ee5783a",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
|
@ -71,23 +51,22 @@
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"flake-compat": "flake-compat",
|
"flake-compat": "flake-compat",
|
||||||
"flake-utils": "flake-utils",
|
"flake-utils": "flake-utils",
|
||||||
"nix-github-actions": "nix-github-actions",
|
|
||||||
"nixpkgs": "nixpkgs",
|
"nixpkgs": "nixpkgs",
|
||||||
"stable": "stable"
|
"stable": "stable"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"stable": {
|
"stable": {
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1734875076,
|
"lastModified": 1696039360,
|
||||||
"narHash": "sha256-Pzyb+YNG5u3zP79zoi8HXYMs15Q5dfjDgwCdUI5B0nY=",
|
"narHash": "sha256-g7nIUV4uq1TOVeVIDEZLb005suTWCUjSY0zYOlSBsyE=",
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"rev": "1807c2b91223227ad5599d7067a61665c52d1295",
|
"rev": "32dcb45f66c0487e92db8303a798ebc548cadedc",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"ref": "nixos-24.11",
|
"ref": "nixos-23.05",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
}
|
}
|
||||||
|
|
45
flake.nix
45
flake.nix
|
@ -3,12 +3,7 @@
|
||||||
|
|
||||||
inputs = {
|
inputs = {
|
||||||
nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
|
nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
|
||||||
stable.url = "github:NixOS/nixpkgs/nixos-24.11";
|
stable.url = "github:NixOS/nixpkgs/nixos-23.05";
|
||||||
|
|
||||||
nix-github-actions = {
|
|
||||||
url = "github:nix-community/nix-github-actions";
|
|
||||||
inputs.nixpkgs.follows = "nixpkgs";
|
|
||||||
};
|
|
||||||
|
|
||||||
flake-utils.url = "github:numtide/flake-utils";
|
flake-utils.url = "github:numtide/flake-utils";
|
||||||
|
|
||||||
|
@ -18,23 +13,12 @@
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
outputs = {
|
outputs = { self, nixpkgs, stable, flake-utils, ... } @ inputs: let
|
||||||
self,
|
|
||||||
nixpkgs,
|
|
||||||
stable,
|
|
||||||
flake-utils,
|
|
||||||
nix-github-actions,
|
|
||||||
...
|
|
||||||
} @ inputs: let
|
|
||||||
supportedSystems = [ "x86_64-linux" "i686-linux" "aarch64-linux" "x86_64-darwin" "aarch64-darwin" ];
|
supportedSystems = [ "x86_64-linux" "i686-linux" "aarch64-linux" "x86_64-darwin" "aarch64-darwin" ];
|
||||||
colmenaOptions = import ./src/nix/hive/options.nix;
|
colmenaOptions = import ./src/nix/hive/options.nix;
|
||||||
colmenaModules = import ./src/nix/hive/modules.nix;
|
colmenaModules = import ./src/nix/hive/modules.nix;
|
||||||
in flake-utils.lib.eachSystem supportedSystems (system: let
|
in flake-utils.lib.eachSystem supportedSystems (system: let
|
||||||
pkgs = import nixpkgs {
|
pkgs = nixpkgs.legacyPackages.${system};
|
||||||
inherit system;
|
|
||||||
overlays = [
|
|
||||||
];
|
|
||||||
};
|
|
||||||
in rec {
|
in rec {
|
||||||
# We still maintain the expression in a Nixpkgs-acceptable form
|
# We still maintain the expression in a Nixpkgs-acceptable form
|
||||||
defaultPackage = self.packages.${system}.colmena;
|
defaultPackage = self.packages.${system}.colmena;
|
||||||
|
@ -99,17 +83,11 @@
|
||||||
in if pkgs.stdenv.isLinux then import ./integration-tests {
|
in if pkgs.stdenv.isLinux then import ./integration-tests {
|
||||||
pkgs = import nixpkgs {
|
pkgs = import nixpkgs {
|
||||||
inherit system;
|
inherit system;
|
||||||
overlays = [
|
overlays = [ self.overlays.default inputsOverlay ];
|
||||||
self.overlays.default
|
|
||||||
inputsOverlay
|
|
||||||
];
|
|
||||||
};
|
};
|
||||||
pkgsStable = import stable {
|
pkgsStable = import stable {
|
||||||
inherit system;
|
inherit system;
|
||||||
overlays = [
|
overlays = [ self.overlays.default inputsOverlay ];
|
||||||
self.overlays.default
|
|
||||||
inputsOverlay
|
|
||||||
];
|
|
||||||
};
|
};
|
||||||
} else {};
|
} else {};
|
||||||
}) // {
|
}) // {
|
||||||
|
@ -126,11 +104,14 @@
|
||||||
inherit rawHive colmenaOptions colmenaModules;
|
inherit rawHive colmenaOptions colmenaModules;
|
||||||
hermetic = true;
|
hermetic = true;
|
||||||
};
|
};
|
||||||
|
};
|
||||||
|
|
||||||
githubActions = nix-github-actions.lib.mkGithubMatrix {
|
nixConfig = {
|
||||||
checks = {
|
extra-substituters = [
|
||||||
inherit (self.checks) x86_64-linux;
|
"https://colmena.cachix.org"
|
||||||
};
|
];
|
||||||
};
|
extra-trusted-public-keys = [
|
||||||
|
"colmena.cachix.org-1:7BzpDnjjH8ki2CT3f6GdOk7QAzPOl+1t3LvTLXqYcSg="
|
||||||
|
];
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
3
garnix.yaml
Normal file
3
garnix.yaml
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
builds:
|
||||||
|
include:
|
||||||
|
- 'checks.x86_64-linux.*'
|
|
@ -8,18 +8,8 @@
|
||||||
apply-local = import ./apply-local { inherit pkgs; };
|
apply-local = import ./apply-local { inherit pkgs; };
|
||||||
build-on-target = import ./build-on-target { inherit pkgs; };
|
build-on-target = import ./build-on-target { inherit pkgs; };
|
||||||
exec = import ./exec { inherit pkgs; };
|
exec = import ./exec { inherit pkgs; };
|
||||||
|
flakes = import ./flakes { inherit pkgs; };
|
||||||
# FIXME: The old evaluation method doesn't work purely with Nix 2.21+
|
flakes-streaming = import ./flakes { inherit pkgs; evaluator = "streaming"; };
|
||||||
flakes = import ./flakes {
|
|
||||||
inherit pkgs;
|
|
||||||
extraApplyFlags = "--experimental-flake-eval";
|
|
||||||
};
|
|
||||||
flakes-impure = import ./flakes {
|
|
||||||
inherit pkgs;
|
|
||||||
pure = false;
|
|
||||||
};
|
|
||||||
#flakes-streaming = import ./flakes { inherit pkgs; evaluator = "streaming"; };
|
|
||||||
|
|
||||||
parallel = import ./parallel { inherit pkgs; };
|
parallel = import ./parallel { inherit pkgs; };
|
||||||
|
|
||||||
allow-apply-all = import ./allow-apply-all { inherit pkgs; };
|
allow-apply-all = import ./allow-apply-all { inherit pkgs; };
|
||||||
|
|
|
@ -1,29 +1,13 @@
|
||||||
{ pkgs
|
{ pkgs
|
||||||
, evaluator ? "chunked"
|
, evaluator ? "chunked"
|
||||||
, extraApplyFlags ? ""
|
|
||||||
, pure ? true
|
|
||||||
}:
|
}:
|
||||||
|
|
||||||
let
|
let
|
||||||
inherit (pkgs) lib;
|
|
||||||
|
|
||||||
tools = pkgs.callPackage ../tools.nix {
|
tools = pkgs.callPackage ../tools.nix {
|
||||||
targets = [ "alpha" ];
|
targets = [ "alpha" ];
|
||||||
};
|
};
|
||||||
|
|
||||||
applyFlags = "--evaluator ${evaluator} ${extraApplyFlags}"
|
|
||||||
+ lib.optionalString (!pure) "--impure";
|
|
||||||
|
|
||||||
# From integration-tests/nixpkgs.nix
|
|
||||||
colmenaFlakeInputs = pkgs._inputs;
|
|
||||||
in tools.runTest {
|
in tools.runTest {
|
||||||
name = "colmena-flakes-${evaluator}"
|
name = "colmena-flakes-${evaluator}";
|
||||||
+ lib.optionalString (!pure) "-impure";
|
|
||||||
|
|
||||||
nodes.deployer = {
|
|
||||||
virtualisation.additionalPaths =
|
|
||||||
lib.mapAttrsToList (k: v: v.outPath) colmenaFlakeInputs;
|
|
||||||
};
|
|
||||||
|
|
||||||
colmena.test = {
|
colmena.test = {
|
||||||
bundle = ./.;
|
bundle = ./.;
|
||||||
|
@ -32,13 +16,12 @@ in tools.runTest {
|
||||||
import re
|
import re
|
||||||
|
|
||||||
deployer.succeed("sed -i 's @nixpkgs@ path:${pkgs._inputs.nixpkgs.outPath}?narHash=${pkgs._inputs.nixpkgs.narHash} g' /tmp/bundle/flake.nix")
|
deployer.succeed("sed -i 's @nixpkgs@ path:${pkgs._inputs.nixpkgs.outPath}?narHash=${pkgs._inputs.nixpkgs.narHash} g' /tmp/bundle/flake.nix")
|
||||||
deployer.succeed("sed -i 's @colmena@ path:${tools.colmena.src} g' /tmp/bundle/flake.nix")
|
|
||||||
|
|
||||||
with subtest("Lock flake dependencies"):
|
with subtest("Lock flake dependencies"):
|
||||||
deployer.succeed("cd /tmp/bundle && nix --extra-experimental-features \"nix-command flakes\" flake lock")
|
deployer.succeed("cd /tmp/bundle && nix --extra-experimental-features \"nix-command flakes\" flake lock")
|
||||||
|
|
||||||
with subtest("Deploy with a plain flake without git"):
|
with subtest("Deploy with a plain flake without git"):
|
||||||
deployer.succeed("cd /tmp/bundle && ${tools.colmenaExec} apply --on @target ${applyFlags}")
|
deployer.succeed("cd /tmp/bundle && ${tools.colmenaExec} apply --on @target --evaluator ${evaluator}")
|
||||||
alpha.succeed("grep FIRST /etc/deployment")
|
alpha.succeed("grep FIRST /etc/deployment")
|
||||||
|
|
||||||
with subtest("Deploy with a git flake"):
|
with subtest("Deploy with a git flake"):
|
||||||
|
@ -46,22 +29,21 @@ in tools.runTest {
|
||||||
|
|
||||||
# don't put probe.nix in source control - should fail
|
# don't put probe.nix in source control - should fail
|
||||||
deployer.succeed("cd /tmp/bundle && git init && git add flake.nix flake.lock hive.nix tools.nix")
|
deployer.succeed("cd /tmp/bundle && git init && git add flake.nix flake.lock hive.nix tools.nix")
|
||||||
logs = deployer.fail("cd /tmp/bundle && run-copy-stderr ${tools.colmenaExec} apply --on @target ${applyFlags}")
|
logs = deployer.fail("cd /tmp/bundle && run-copy-stderr ${tools.colmenaExec} apply --on @target --evaluator ${evaluator}")
|
||||||
assert re.search(r"probe.nix.*(No such file or directory|does not exist)", logs), "Expected error message not found in log"
|
assert re.search(r"probe.nix.*No such file or directory", logs)
|
||||||
|
|
||||||
# now it should succeed
|
# now it should succeed
|
||||||
deployer.succeed("cd /tmp/bundle && git add probe.nix")
|
deployer.succeed("cd /tmp/bundle && git add probe.nix")
|
||||||
deployer.succeed("cd /tmp/bundle && ${tools.colmenaExec} apply --on @target ${applyFlags}")
|
deployer.succeed("cd /tmp/bundle && ${tools.colmenaExec} apply --on @target --evaluator ${evaluator}")
|
||||||
alpha.succeed("grep SECOND /etc/deployment")
|
alpha.succeed("grep SECOND /etc/deployment")
|
||||||
|
|
||||||
'' + lib.optionalString pure ''
|
|
||||||
with subtest("Check that impure expressions are forbidden"):
|
with subtest("Check that impure expressions are forbidden"):
|
||||||
deployer.succeed("sed -i 's|SECOND|''${builtins.readFile /etc/hostname}|g' /tmp/bundle/probe.nix")
|
deployer.succeed("sed -i 's|SECOND|''${builtins.readFile /etc/hostname}|g' /tmp/bundle/probe.nix")
|
||||||
logs = deployer.fail("cd /tmp/bundle && run-copy-stderr ${tools.colmenaExec} apply --on @target ${applyFlags}")
|
logs = deployer.fail("cd /tmp/bundle && run-copy-stderr ${tools.colmenaExec} apply --on @target --evaluator ${evaluator}")
|
||||||
assert re.search(r"access to absolute path.*forbidden in pure (eval|evaluation) mode", logs), "Expected error message not found in log"
|
assert re.search(r"access to absolute path.*forbidden in pure eval mode", logs)
|
||||||
|
|
||||||
with subtest("Check that impure expressions can be allowed with --impure"):
|
with subtest("Check that impure expressions can be allowed with --impure"):
|
||||||
deployer.succeed("cd /tmp/bundle && ${tools.colmenaExec} apply --on @target ${applyFlags} --impure")
|
deployer.succeed("cd /tmp/bundle && ${tools.colmenaExec} apply --on @target --evaluator ${evaluator} --impure")
|
||||||
alpha.succeed("grep deployer /etc/deployment")
|
alpha.succeed("grep deployer /etc/deployment")
|
||||||
'';
|
'';
|
||||||
};
|
};
|
||||||
|
|
|
@ -3,15 +3,13 @@
|
||||||
|
|
||||||
inputs = {
|
inputs = {
|
||||||
nixpkgs.url = "@nixpkgs@";
|
nixpkgs.url = "@nixpkgs@";
|
||||||
colmena.url = "@colmena@";
|
|
||||||
};
|
};
|
||||||
|
|
||||||
outputs = { self, nixpkgs, colmena }: let
|
outputs = { self, nixpkgs }: let
|
||||||
pkgs = import nixpkgs {
|
pkgs = import nixpkgs {
|
||||||
system = "x86_64-linux";
|
system = "x86_64-linux";
|
||||||
};
|
};
|
||||||
in {
|
in {
|
||||||
colmena = import ./hive.nix { inherit pkgs; };
|
colmena = import ./hive.nix { inherit pkgs; };
|
||||||
colmenaHive = colmena.lib.makeHive self.outputs.colmena;
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
|
@ -140,7 +140,7 @@ let
|
||||||
nix.settings.substituters = lib.mkForce [];
|
nix.settings.substituters = lib.mkForce [];
|
||||||
|
|
||||||
virtualisation = {
|
virtualisation = {
|
||||||
memorySize = 6144;
|
memorySize = 4096;
|
||||||
writableStore = true;
|
writableStore = true;
|
||||||
additionalPaths = [
|
additionalPaths = [
|
||||||
"${pkgs.path}"
|
"${pkgs.path}"
|
||||||
|
@ -165,9 +165,6 @@ let
|
||||||
exec "$@" 2> >(tee /dev/stderr)
|
exec "$@" 2> >(tee /dev/stderr)
|
||||||
'')
|
'')
|
||||||
];
|
];
|
||||||
|
|
||||||
# Re-enable switch-to-configuration
|
|
||||||
system.switch.enable = true;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
# Setup for target nodes
|
# Setup for target nodes
|
||||||
|
@ -183,9 +180,6 @@ let
|
||||||
sshKeys.snakeOilPublicKey
|
sshKeys.snakeOilPublicKey
|
||||||
];
|
];
|
||||||
virtualisation.writableStore = true;
|
virtualisation.writableStore = true;
|
||||||
|
|
||||||
# Re-enable switch-to-configuration
|
|
||||||
system.switch.enable = true;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
nodes = let
|
nodes = let
|
||||||
|
|
|
@ -90,34 +90,6 @@ To build and deploy to all nodes:
|
||||||
colmena apply
|
colmena apply
|
||||||
```
|
```
|
||||||
|
|
||||||
## Direct Flake Evaluation (Experimental)
|
|
||||||
|
|
||||||
By default, Colmena uses `nix-instantiate` to evaluate your flake which does not work purely on Nix 2.21+, necessitating the use of `--impure`.
|
|
||||||
There is experimental support for evaluating flakes directly with `nix eval`, enabled via `--experimental-flake-eval`.
|
|
||||||
|
|
||||||
To use this new evaluation mode, your flake needs to depend on Colmena itself as an input and expose a new output called `colmenaHive`:
|
|
||||||
|
|
||||||
```diff
|
|
||||||
{
|
|
||||||
inputs = {
|
|
||||||
+ # ADDED: Colmena input
|
|
||||||
+ colmena.url = "github:zhaofengli/colmena";
|
|
||||||
|
|
||||||
# ... Rest of configuration ...
|
|
||||||
};
|
|
||||||
outputs = { self, colmena, ... }: {
|
|
||||||
+ # ADDED: New colmenaHive output
|
|
||||||
+ colmenaHive = colmena.lib.makeHive self.outputs.colmena;
|
|
||||||
|
|
||||||
# Your existing colmena output
|
|
||||||
colmena = {
|
|
||||||
# ... Rest of configuration ...
|
|
||||||
};
|
|
||||||
};
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
## Next Steps
|
## Next Steps
|
||||||
|
|
||||||
- Head to the [Features](../features/index.md) section to see what else Colmena can do.
|
- Head to the [Features](../features/index.md) section to see what else Colmena can do.
|
||||||
|
|
13
package.nix
13
package.nix
|
@ -1,16 +1,13 @@
|
||||||
{ lib
|
{ lib, stdenv, rustPlatform, installShellFiles, nix-eval-jobs }:
|
||||||
, stdenv
|
|
||||||
, rustPlatform
|
|
||||||
, nix-gitignore
|
|
||||||
, installShellFiles
|
|
||||||
, nix-eval-jobs
|
|
||||||
}:
|
|
||||||
|
|
||||||
rustPlatform.buildRustPackage rec {
|
rustPlatform.buildRustPackage rec {
|
||||||
pname = "colmena";
|
pname = "colmena";
|
||||||
version = "0.5.0-pre";
|
version = "0.5.0-pre";
|
||||||
|
|
||||||
src = nix-gitignore.gitignoreSource [ ./.srcignore ] ./.;
|
src = lib.cleanSourceWith {
|
||||||
|
filter = name: type: !(type == "directory" && builtins.elem (baseNameOf name) [ "target" "manual" "integration-tests" ]);
|
||||||
|
src = lib.cleanSource ./.;
|
||||||
|
};
|
||||||
|
|
||||||
cargoLock = {
|
cargoLock = {
|
||||||
lockFile = ./Cargo.lock;
|
lockFile = ./Cargo.lock;
|
||||||
|
|
|
@ -1,14 +0,0 @@
|
||||||
{
|
|
||||||
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
|
|
||||||
"extends": [
|
|
||||||
"config:recommended",
|
|
||||||
"group:allNonMajor"
|
|
||||||
],
|
|
||||||
"lockFileMaintenance": {
|
|
||||||
"enabled": true,
|
|
||||||
"extends": ["schedule:weekly"]
|
|
||||||
},
|
|
||||||
"nix": {
|
|
||||||
"enabled": true
|
|
||||||
}
|
|
||||||
}
|
|
22
src/cli.rs
22
src/cli.rs
|
@ -10,7 +10,7 @@ use env_logger::fmt::WriteStyle;
|
||||||
use crate::{
|
use crate::{
|
||||||
command::{self, apply::DeployOpts},
|
command::{self, apply::DeployOpts},
|
||||||
error::ColmenaResult,
|
error::ColmenaResult,
|
||||||
nix::{hive::EvaluationMethod, Hive, HivePath},
|
nix::{Hive, HivePath},
|
||||||
};
|
};
|
||||||
|
|
||||||
/// Base URL of the manual, without the trailing slash.
|
/// Base URL of the manual, without the trailing slash.
|
||||||
|
@ -137,21 +137,6 @@ This only works when building locally.
|
||||||
value_names = ["NAME", "VALUE"],
|
value_names = ["NAME", "VALUE"],
|
||||||
)]
|
)]
|
||||||
nix_option: Vec<String>,
|
nix_option: Vec<String>,
|
||||||
#[arg(
|
|
||||||
long,
|
|
||||||
default_value_t,
|
|
||||||
help = "Use direct flake evaluation (experimental)",
|
|
||||||
long_help = r#"If enabled, flakes will be evaluated using `nix eval`. This requires the flake to depend on Colmena as an input and expose a compatible `colmenaHive` output:
|
|
||||||
|
|
||||||
outputs = { self, colmena, ... }: {
|
|
||||||
colmenaHive = colmena.lib.makeHive self.outputs.colmena;
|
|
||||||
colmena = ...;
|
|
||||||
};
|
|
||||||
|
|
||||||
This is an experimental feature."#,
|
|
||||||
global = true
|
|
||||||
)]
|
|
||||||
experimental_flake_eval: bool,
|
|
||||||
#[arg(
|
#[arg(
|
||||||
long,
|
long,
|
||||||
value_name = "WHEN",
|
value_name = "WHEN",
|
||||||
|
@ -277,11 +262,6 @@ async fn get_hive(opts: &Opts) -> ColmenaResult<Hive> {
|
||||||
hive.set_impure(true);
|
hive.set_impure(true);
|
||||||
}
|
}
|
||||||
|
|
||||||
if opts.experimental_flake_eval {
|
|
||||||
log::warn!("Using direct flake evaluation (experimental)");
|
|
||||||
hive.set_evaluation_method(EvaluationMethod::DirectFlakeEval);
|
|
||||||
}
|
|
||||||
|
|
||||||
for chunks in opts.nix_option.chunks_exact(2) {
|
for chunks in opts.nix_option.chunks_exact(2) {
|
||||||
let [name, value] = chunks else {
|
let [name, value] = chunks else {
|
||||||
unreachable!()
|
unreachable!()
|
||||||
|
|
14
src/job.rs
14
src/job.rs
|
@ -874,17 +874,11 @@ fn describe_node_list(nodes: &[NodeName]) -> Option<String> {
|
||||||
}
|
}
|
||||||
|
|
||||||
let (idx, next) = next.unwrap();
|
let (idx, next) = next.unwrap();
|
||||||
let remaining_text = rough_limit - s.len();
|
let remaining = rough_limit - s.len();
|
||||||
let remaining_nodes = total - idx;
|
|
||||||
|
|
||||||
if next.len() + other_text.len() >= remaining_text {
|
if next.len() + other_text.len() >= remaining {
|
||||||
if remaining_nodes == 1 {
|
write!(s, ", and {} other nodes", total - idx).unwrap();
|
||||||
write!(s, ", and {}", next.as_str()).unwrap();
|
break;
|
||||||
break;
|
|
||||||
} else {
|
|
||||||
write!(s, ", and {} other nodes", remaining_nodes).unwrap();
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -370,6 +370,9 @@ impl Deployment {
|
||||||
for (name, profile_drv) in profile_drvs.iter() {
|
for (name, profile_drv) in profile_drvs.iter() {
|
||||||
let mut target = chunk.remove(name).unwrap();
|
let mut target = chunk.remove(name).unwrap();
|
||||||
|
|
||||||
|
// TODO: look if target's system type support build on target.
|
||||||
|
// hard fail if the build on target.
|
||||||
|
|
||||||
if let Some(force_build_on_target) = self.options.force_build_on_target {
|
if let Some(force_build_on_target) = self.options.force_build_on_target {
|
||||||
target.config.set_build_on_target(force_build_on_target);
|
target.config.set_build_on_target(force_build_on_target);
|
||||||
}
|
}
|
||||||
|
@ -439,6 +442,9 @@ impl Deployment {
|
||||||
return Err(ColmenaError::Unsupported);
|
return Err(ColmenaError::Unsupported);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TODO: add to check if there's support to upload keys.
|
||||||
|
// If not, warn about it but do nothing.
|
||||||
|
|
||||||
let host = target.host.as_mut().unwrap();
|
let host = target.host.as_mut().unwrap();
|
||||||
host.set_job(Some(job));
|
host.set_job(Some(job));
|
||||||
host.upload_keys(&target.config.keys, true).await?;
|
host.upload_keys(&target.config.keys, true).await?;
|
||||||
|
@ -621,6 +627,8 @@ impl Deployment {
|
||||||
let host = target.host.as_mut().unwrap();
|
let host = target.host.as_mut().unwrap();
|
||||||
host.set_job(Some(job.clone()));
|
host.set_job(Some(job.clone()));
|
||||||
|
|
||||||
|
// TODO: certain system types does not have a concept of profile.
|
||||||
|
// generalize the profile algorithm
|
||||||
if !target.config.replace_unknown_profiles {
|
if !target.config.replace_unknown_profiles {
|
||||||
job.message("Checking remote profile...".to_string())?;
|
job.message("Checking remote profile...".to_string())?;
|
||||||
|
|
||||||
|
|
|
@ -51,10 +51,7 @@ impl Assets {
|
||||||
// We explicitly specify `path:` instead of letting Nix resolve
|
// We explicitly specify `path:` instead of letting Nix resolve
|
||||||
// automatically, which would involve checking parent directories
|
// automatically, which would involve checking parent directories
|
||||||
// for a git repository.
|
// for a git repository.
|
||||||
let uri = format!(
|
let uri = format!("path:{}", temp_dir.path().to_str().unwrap());
|
||||||
"path:{}",
|
|
||||||
temp_dir.path().canonicalize().unwrap().to_str().unwrap()
|
|
||||||
);
|
|
||||||
let _ = lock_flake_quiet(&uri).await;
|
let _ = lock_flake_quiet(&uri).await;
|
||||||
let assets_flake = Flake::from_uri(uri).await?;
|
let assets_flake = Flake::from_uri(uri).await?;
|
||||||
assets_flake_uri = Some(assets_flake.locked_uri().to_owned());
|
assets_flake_uri = Some(assets_flake.locked_uri().to_owned());
|
||||||
|
|
|
@ -202,7 +202,7 @@ let
|
||||||
|
|
||||||
in rec {
|
in rec {
|
||||||
# Exported attributes
|
# Exported attributes
|
||||||
__schema = "v0.20241006";
|
__schema = "v0";
|
||||||
|
|
||||||
nodes = listToAttrs (map (name: { inherit name; value = evalNode name (configsFor name); }) nodeNames);
|
nodes = listToAttrs (map (name: { inherit name; value = evalNode name (configsFor name); }) nodeNames);
|
||||||
toplevel = lib.mapAttrs (_: v: v.config.system.build.toplevel) nodes;
|
toplevel = lib.mapAttrs (_: v: v.config.system.build.toplevel) nodes;
|
||||||
|
|
|
@ -7,7 +7,7 @@
|
||||||
|
|
||||||
outputs = { self, hive }: {
|
outputs = { self, hive }: {
|
||||||
processFlake = let
|
processFlake = let
|
||||||
compatibleSchema = "v0.20241006";
|
compatibleSchema = "v0";
|
||||||
|
|
||||||
# Evaluates a raw hive.
|
# Evaluates a raw hive.
|
||||||
#
|
#
|
||||||
|
|
|
@ -8,7 +8,6 @@ use std::convert::AsRef;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
|
||||||
use const_format::formatcp;
|
|
||||||
use tokio::process::Command;
|
use tokio::process::Command;
|
||||||
use tokio::sync::OnceCell;
|
use tokio::sync::OnceCell;
|
||||||
use validator::Validate;
|
use validator::Validate;
|
||||||
|
@ -23,21 +22,6 @@ use crate::job::JobHandle;
|
||||||
use crate::util::{CommandExecution, CommandExt};
|
use crate::util::{CommandExecution, CommandExt};
|
||||||
use assets::Assets;
|
use assets::Assets;
|
||||||
|
|
||||||
/// The version of the Hive schema we are compatible with.
|
|
||||||
///
|
|
||||||
/// Currently we are tied to one specific version.
|
|
||||||
const HIVE_SCHEMA: &str = "v0.20241006";
|
|
||||||
|
|
||||||
/// The snippet to be used for `nix eval --apply`.
|
|
||||||
const FLAKE_APPLY_SNIPPET: &str = formatcp!(
|
|
||||||
r#"with builtins; hive: assert (hive.__schema == "{}" || throw ''
|
|
||||||
The colmenaHive output (schema ${{hive.__schema}}) isn't compatible with this version of Colmena.
|
|
||||||
|
|
||||||
Hint: Use the same version of Colmena as in the Flake input.
|
|
||||||
''); "#,
|
|
||||||
HIVE_SCHEMA
|
|
||||||
);
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub enum HivePath {
|
pub enum HivePath {
|
||||||
/// A Nix Flake.
|
/// A Nix Flake.
|
||||||
|
@ -79,33 +63,11 @@ impl FromStr for HivePath {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq)]
|
|
||||||
pub enum EvaluationMethod {
|
|
||||||
/// Use nix-instantiate and specify the entire Nix expression.
|
|
||||||
///
|
|
||||||
/// This is the default method.
|
|
||||||
///
|
|
||||||
/// For flakes, we use `builtins.getFlakes`. Pure evaluation no longer works
|
|
||||||
/// with this method in Nix 2.21+.
|
|
||||||
NixInstantiate,
|
|
||||||
|
|
||||||
/// Use `nix eval --apply` on top of a flake.
|
|
||||||
///
|
|
||||||
/// This can be activated with --experimental-flake-eval.
|
|
||||||
///
|
|
||||||
/// In this method, we can no longer pull in our bundled assets and
|
|
||||||
/// the flake must expose a compatible `colmenaHive` output.
|
|
||||||
DirectFlakeEval,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct Hive {
|
pub struct Hive {
|
||||||
/// Path to the hive.
|
/// Path to the hive.
|
||||||
path: HivePath,
|
path: HivePath,
|
||||||
|
|
||||||
/// Method to evaluate the hive with.
|
|
||||||
evaluation_method: EvaluationMethod,
|
|
||||||
|
|
||||||
/// Path to the context directory.
|
/// Path to the context directory.
|
||||||
///
|
///
|
||||||
/// Normally this is directory containing the "hive.nix"
|
/// Normally this is directory containing the "hive.nix"
|
||||||
|
@ -174,7 +136,6 @@ impl Hive {
|
||||||
|
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
path,
|
path,
|
||||||
evaluation_method: EvaluationMethod::NixInstantiate,
|
|
||||||
context_dir,
|
context_dir,
|
||||||
assets,
|
assets,
|
||||||
show_trace: false,
|
show_trace: false,
|
||||||
|
@ -200,14 +161,6 @@ impl Hive {
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn set_evaluation_method(&mut self, method: EvaluationMethod) {
|
|
||||||
if !self.is_flake() && method == EvaluationMethod::DirectFlakeEval {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
self.evaluation_method = method;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn get_registry_config(&self) -> ColmenaResult<&RegistryConfig> {
|
pub async fn get_registry_config(&self) -> ColmenaResult<&RegistryConfig> {
|
||||||
self.registry_config
|
self.registry_config
|
||||||
.get_or_try_init(|| async {
|
.get_or_try_init(|| async {
|
||||||
|
@ -503,10 +456,7 @@ impl Hive {
|
||||||
|
|
||||||
/// Returns the base expression from which the evaluated Hive can be used.
|
/// Returns the base expression from which the evaluated Hive can be used.
|
||||||
fn get_base_expression(&self) -> String {
|
fn get_base_expression(&self) -> String {
|
||||||
match self.evaluation_method {
|
self.assets.get_base_expression()
|
||||||
EvaluationMethod::NixInstantiate => self.assets.get_base_expression(),
|
|
||||||
EvaluationMethod::DirectFlakeEval => FLAKE_APPLY_SNIPPET.to_string(),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns whether this Hive is a flake.
|
/// Returns whether this Hive is a flake.
|
||||||
|
@ -529,11 +479,6 @@ impl<'hive> NixInstantiate<'hive> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn instantiate(&self) -> Command {
|
fn instantiate(&self) -> Command {
|
||||||
// TODO: Better error handling
|
|
||||||
if self.hive.evaluation_method == EvaluationMethod::DirectFlakeEval {
|
|
||||||
panic!("Instantiation is not supported with DirectFlakeEval");
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut command = Command::new("nix-instantiate");
|
let mut command = Command::new("nix-instantiate");
|
||||||
|
|
||||||
if self.hive.is_flake() {
|
if self.hive.is_flake() {
|
||||||
|
@ -552,48 +497,17 @@ impl<'hive> NixInstantiate<'hive> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn eval(self) -> Command {
|
fn eval(self) -> Command {
|
||||||
|
let mut command = self.instantiate();
|
||||||
let flags = self.hive.nix_flags();
|
let flags = self.hive.nix_flags();
|
||||||
|
command
|
||||||
match self.hive.evaluation_method {
|
.arg("--eval")
|
||||||
EvaluationMethod::NixInstantiate => {
|
.arg("--json")
|
||||||
let mut command = self.instantiate();
|
.arg("--strict")
|
||||||
|
// Ensures the derivations are instantiated
|
||||||
command
|
// Required for system profile evaluation and IFD
|
||||||
.arg("--eval")
|
.arg("--read-write-mode")
|
||||||
.arg("--json")
|
.args(flags.to_args());
|
||||||
.arg("--strict")
|
command
|
||||||
// Ensures the derivations are instantiated
|
|
||||||
// Required for system profile evaluation and IFD
|
|
||||||
.arg("--read-write-mode")
|
|
||||||
.args(flags.to_args());
|
|
||||||
|
|
||||||
command
|
|
||||||
}
|
|
||||||
EvaluationMethod::DirectFlakeEval => {
|
|
||||||
let mut command = Command::new("nix");
|
|
||||||
let flake = if let HivePath::Flake(flake) = self.hive.path() {
|
|
||||||
flake
|
|
||||||
} else {
|
|
||||||
panic!("The DirectFlakeEval evaluation method only support flakes");
|
|
||||||
};
|
|
||||||
|
|
||||||
let hive_installable = format!("{}#colmenaHive", flake.uri());
|
|
||||||
|
|
||||||
let mut full_expression = self.hive.get_base_expression();
|
|
||||||
full_expression += &self.expression;
|
|
||||||
|
|
||||||
command
|
|
||||||
.arg("eval") // nix eval
|
|
||||||
.args(["--extra-experimental-features", "flakes nix-command"])
|
|
||||||
.arg(hive_installable)
|
|
||||||
.arg("--json")
|
|
||||||
.arg("--apply")
|
|
||||||
.arg(&full_expression)
|
|
||||||
.args(flags.to_args());
|
|
||||||
|
|
||||||
command
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn instantiate_with_builders(self) -> ColmenaResult<Command> {
|
async fn instantiate_with_builders(self) -> ColmenaResult<Command> {
|
||||||
|
|
|
@ -1,17 +1,18 @@
|
||||||
with builtins; rec {
|
with builtins; rec {
|
||||||
keyType = { lib, name, config, ... }: let
|
keyType = { lib, name, config, ... }: let
|
||||||
inherit (lib) types;
|
inherit (lib) types;
|
||||||
|
mdDoc = lib.mdDoc or (md: md);
|
||||||
in {
|
in {
|
||||||
options = {
|
options = {
|
||||||
name = lib.mkOption {
|
name = lib.mkOption {
|
||||||
description = ''
|
description = mdDoc ''
|
||||||
File name of the key.
|
File name of the key.
|
||||||
'';
|
'';
|
||||||
default = name;
|
default = name;
|
||||||
type = types.str;
|
type = types.str;
|
||||||
};
|
};
|
||||||
text = lib.mkOption {
|
text = lib.mkOption {
|
||||||
description = ''
|
description = mdDoc ''
|
||||||
Content of the key.
|
Content of the key.
|
||||||
One of `text`, `keyCommand` and `keyFile` must be set.
|
One of `text`, `keyCommand` and `keyFile` must be set.
|
||||||
'';
|
'';
|
||||||
|
@ -19,7 +20,7 @@ with builtins; rec {
|
||||||
type = types.nullOr types.str;
|
type = types.nullOr types.str;
|
||||||
};
|
};
|
||||||
keyFile = lib.mkOption {
|
keyFile = lib.mkOption {
|
||||||
description = ''
|
description = mdDoc ''
|
||||||
Path of the local file to read the key from.
|
Path of the local file to read the key from.
|
||||||
One of `text`, `keyCommand` and `keyFile` must be set.
|
One of `text`, `keyCommand` and `keyFile` must be set.
|
||||||
'';
|
'';
|
||||||
|
@ -28,7 +29,7 @@ with builtins; rec {
|
||||||
type = types.nullOr types.path;
|
type = types.nullOr types.path;
|
||||||
};
|
};
|
||||||
keyCommand = lib.mkOption {
|
keyCommand = lib.mkOption {
|
||||||
description = ''
|
description = mdDoc ''
|
||||||
Command to run to generate the key.
|
Command to run to generate the key.
|
||||||
One of `text`, `keyCommand` and `keyFile` must be set.
|
One of `text`, `keyCommand` and `keyFile` must be set.
|
||||||
'';
|
'';
|
||||||
|
@ -38,14 +39,14 @@ with builtins; rec {
|
||||||
in types.nullOr nonEmptyList;
|
in types.nullOr nonEmptyList;
|
||||||
};
|
};
|
||||||
destDir = lib.mkOption {
|
destDir = lib.mkOption {
|
||||||
description = ''
|
description = mdDoc ''
|
||||||
Destination directory on the host.
|
Destination directory on the host.
|
||||||
'';
|
'';
|
||||||
default = "/run/keys";
|
default = "/run/keys";
|
||||||
type = types.path;
|
type = types.path;
|
||||||
};
|
};
|
||||||
path = lib.mkOption {
|
path = lib.mkOption {
|
||||||
description = ''
|
description = mdDoc ''
|
||||||
Full path to the destination.
|
Full path to the destination.
|
||||||
'';
|
'';
|
||||||
default = "${config.destDir}/${config.name}";
|
default = "${config.destDir}/${config.name}";
|
||||||
|
@ -53,28 +54,28 @@ with builtins; rec {
|
||||||
internal = true;
|
internal = true;
|
||||||
};
|
};
|
||||||
user = lib.mkOption {
|
user = lib.mkOption {
|
||||||
description = ''
|
description = mdDoc ''
|
||||||
The group that will own the file.
|
The group that will own the file.
|
||||||
'';
|
'';
|
||||||
default = "root";
|
default = "root";
|
||||||
type = types.str;
|
type = types.str;
|
||||||
};
|
};
|
||||||
group = lib.mkOption {
|
group = lib.mkOption {
|
||||||
description = ''
|
description = mdDoc ''
|
||||||
The group that will own the file.
|
The group that will own the file.
|
||||||
'';
|
'';
|
||||||
default = "root";
|
default = "root";
|
||||||
type = types.str;
|
type = types.str;
|
||||||
};
|
};
|
||||||
permissions = lib.mkOption {
|
permissions = lib.mkOption {
|
||||||
description = ''
|
description = mdDoc ''
|
||||||
Permissions to set for the file.
|
Permissions to set for the file.
|
||||||
'';
|
'';
|
||||||
default = "0600";
|
default = "0600";
|
||||||
type = types.str;
|
type = types.str;
|
||||||
};
|
};
|
||||||
uploadAt = lib.mkOption {
|
uploadAt = lib.mkOption {
|
||||||
description = ''
|
description = mdDoc ''
|
||||||
When to upload the keys.
|
When to upload the keys.
|
||||||
|
|
||||||
- pre-activation (default): Upload the keys before activating the new system profile.
|
- pre-activation (default): Upload the keys before activating the new system profile.
|
||||||
|
@ -93,6 +94,7 @@ with builtins; rec {
|
||||||
# Largely compatible with NixOps/Morph.
|
# Largely compatible with NixOps/Morph.
|
||||||
deploymentOptions = { name, lib, ... }: let
|
deploymentOptions = { name, lib, ... }: let
|
||||||
inherit (lib) types;
|
inherit (lib) types;
|
||||||
|
mdDoc = lib.mdDoc or (md: md);
|
||||||
in {
|
in {
|
||||||
options = {
|
options = {
|
||||||
deployment = {
|
deployment = {
|
||||||
|
@ -105,7 +107,7 @@ with builtins; rec {
|
||||||
type = types.str;
|
type = types.str;
|
||||||
};
|
};
|
||||||
targetHost = lib.mkOption {
|
targetHost = lib.mkOption {
|
||||||
description = ''
|
description = mdDoc ''
|
||||||
The target SSH node for deployment.
|
The target SSH node for deployment.
|
||||||
|
|
||||||
By default, the node's attribute name will be used.
|
By default, the node's attribute name will be used.
|
||||||
|
@ -115,7 +117,7 @@ with builtins; rec {
|
||||||
default = name;
|
default = name;
|
||||||
};
|
};
|
||||||
targetPort = lib.mkOption {
|
targetPort = lib.mkOption {
|
||||||
description = ''
|
description = mdDoc ''
|
||||||
The target SSH port for deployment.
|
The target SSH port for deployment.
|
||||||
|
|
||||||
By default, the port is the standard port (22) or taken
|
By default, the port is the standard port (22) or taken
|
||||||
|
@ -125,7 +127,7 @@ with builtins; rec {
|
||||||
default = null;
|
default = null;
|
||||||
};
|
};
|
||||||
targetUser = lib.mkOption {
|
targetUser = lib.mkOption {
|
||||||
description = ''
|
description = mdDoc ''
|
||||||
The user to use to log into the remote node. If set to null, the
|
The user to use to log into the remote node. If set to null, the
|
||||||
target user will not be specified in SSH invocations.
|
target user will not be specified in SSH invocations.
|
||||||
'';
|
'';
|
||||||
|
@ -133,7 +135,7 @@ with builtins; rec {
|
||||||
default = "root";
|
default = "root";
|
||||||
};
|
};
|
||||||
allowLocalDeployment = lib.mkOption {
|
allowLocalDeployment = lib.mkOption {
|
||||||
description = ''
|
description = mdDoc ''
|
||||||
Allow the configuration to be applied locally on the host running
|
Allow the configuration to be applied locally on the host running
|
||||||
Colmena.
|
Colmena.
|
||||||
|
|
||||||
|
@ -150,7 +152,7 @@ with builtins; rec {
|
||||||
default = false;
|
default = false;
|
||||||
};
|
};
|
||||||
buildOnTarget = lib.mkOption {
|
buildOnTarget = lib.mkOption {
|
||||||
description = ''
|
description = mdDoc ''
|
||||||
Whether to build the system profiles on the target node itself.
|
Whether to build the system profiles on the target node itself.
|
||||||
|
|
||||||
When enabled, Colmena will copy the derivation to the target
|
When enabled, Colmena will copy the derivation to the target
|
||||||
|
@ -170,7 +172,7 @@ with builtins; rec {
|
||||||
default = false;
|
default = false;
|
||||||
};
|
};
|
||||||
tags = lib.mkOption {
|
tags = lib.mkOption {
|
||||||
description = ''
|
description = mdDoc ''
|
||||||
A list of tags for the node.
|
A list of tags for the node.
|
||||||
|
|
||||||
Can be used to select a group of nodes for deployment.
|
Can be used to select a group of nodes for deployment.
|
||||||
|
@ -179,7 +181,7 @@ with builtins; rec {
|
||||||
default = [];
|
default = [];
|
||||||
};
|
};
|
||||||
keys = lib.mkOption {
|
keys = lib.mkOption {
|
||||||
description = ''
|
description = mdDoc ''
|
||||||
A set of secrets to be deployed to the node.
|
A set of secrets to be deployed to the node.
|
||||||
|
|
||||||
Secrets are transferred to the node out-of-band and
|
Secrets are transferred to the node out-of-band and
|
||||||
|
@ -189,7 +191,7 @@ with builtins; rec {
|
||||||
default = {};
|
default = {};
|
||||||
};
|
};
|
||||||
replaceUnknownProfiles = lib.mkOption {
|
replaceUnknownProfiles = lib.mkOption {
|
||||||
description = ''
|
description = mdDoc ''
|
||||||
Allow a configuration to be applied to a host running a profile we
|
Allow a configuration to be applied to a host running a profile we
|
||||||
have no knowledge of. By setting this option to false, you reduce
|
have no knowledge of. By setting this option to false, you reduce
|
||||||
the likelyhood of rolling back changes made via another Colmena user.
|
the likelyhood of rolling back changes made via another Colmena user.
|
||||||
|
@ -205,7 +207,7 @@ with builtins; rec {
|
||||||
default = true;
|
default = true;
|
||||||
};
|
};
|
||||||
privilegeEscalationCommand = lib.mkOption {
|
privilegeEscalationCommand = lib.mkOption {
|
||||||
description = ''
|
description = mdDoc ''
|
||||||
Command to use to elevate privileges when activating the new profiles on SSH hosts.
|
Command to use to elevate privileges when activating the new profiles on SSH hosts.
|
||||||
|
|
||||||
This is used on SSH hosts when `deployment.targetUser` is not `root`.
|
This is used on SSH hosts when `deployment.targetUser` is not `root`.
|
||||||
|
@ -215,7 +217,7 @@ with builtins; rec {
|
||||||
default = [ "sudo" "-H" "--" ];
|
default = [ "sudo" "-H" "--" ];
|
||||||
};
|
};
|
||||||
sshOptions = lib.mkOption {
|
sshOptions = lib.mkOption {
|
||||||
description = ''
|
description = mdDoc ''
|
||||||
Extra SSH options to pass to the SSH command.
|
Extra SSH options to pass to the SSH command.
|
||||||
'';
|
'';
|
||||||
type = types.listOf types.str;
|
type = types.listOf types.str;
|
||||||
|
@ -273,28 +275,29 @@ with builtins; rec {
|
||||||
# Hive-wide options
|
# Hive-wide options
|
||||||
metaOptions = { lib, ... }: let
|
metaOptions = { lib, ... }: let
|
||||||
inherit (lib) types;
|
inherit (lib) types;
|
||||||
|
mdDoc = lib.mdDoc or (md: md);
|
||||||
in {
|
in {
|
||||||
options = {
|
options = {
|
||||||
name = lib.mkOption {
|
name = lib.mkOption {
|
||||||
description = ''
|
description = mdDoc ''
|
||||||
The name of the configuration.
|
The name of the configuration.
|
||||||
'';
|
'';
|
||||||
type = types.str;
|
type = types.str;
|
||||||
default = "hive";
|
default = "hive";
|
||||||
};
|
};
|
||||||
description = lib.mkOption {
|
description = lib.mkOption {
|
||||||
description = ''
|
description = mdDoc ''
|
||||||
A short description for the configuration.
|
A short description for the configuration.
|
||||||
'';
|
'';
|
||||||
type = types.str;
|
type = types.str;
|
||||||
default = "A Colmena Hive";
|
default = "A Colmena Hive";
|
||||||
};
|
};
|
||||||
nixpkgs = lib.mkOption {
|
nixpkgs = lib.mkOption {
|
||||||
description = ''
|
description = mdDoc ''
|
||||||
The pinned Nixpkgs package set. Accepts one of the following:
|
The pinned Nixpkgs package set. Accepts one of the following:
|
||||||
|
|
||||||
- A path to a Nixpkgs checkout
|
- A path to a Nixpkgs checkout
|
||||||
- The Nixpkgs lambda (e.g., import <nixpkgs>)
|
- The Nixpkgs lambda (e.g., import \<nixpkgs\>)
|
||||||
- An initialized Nixpkgs attribute set
|
- An initialized Nixpkgs attribute set
|
||||||
|
|
||||||
This option must be specified when using Flakes.
|
This option must be specified when using Flakes.
|
||||||
|
@ -303,21 +306,21 @@ with builtins; rec {
|
||||||
default = null;
|
default = null;
|
||||||
};
|
};
|
||||||
nodeNixpkgs = lib.mkOption {
|
nodeNixpkgs = lib.mkOption {
|
||||||
description = ''
|
description = mdDoc ''
|
||||||
Node-specific Nixpkgs pins.
|
Node-specific Nixpkgs pins.
|
||||||
'';
|
'';
|
||||||
type = types.attrsOf types.unspecified;
|
type = types.attrsOf types.unspecified;
|
||||||
default = {};
|
default = {};
|
||||||
};
|
};
|
||||||
nodeSpecialArgs = lib.mkOption {
|
nodeSpecialArgs = lib.mkOption {
|
||||||
description = ''
|
description = mdDoc ''
|
||||||
Node-specific special args.
|
Node-specific special args.
|
||||||
'';
|
'';
|
||||||
type = types.attrsOf types.unspecified;
|
type = types.attrsOf types.unspecified;
|
||||||
default = {};
|
default = {};
|
||||||
};
|
};
|
||||||
machinesFile = lib.mkOption {
|
machinesFile = lib.mkOption {
|
||||||
description = ''
|
description = mdDoc ''
|
||||||
Use the machines listed in this file when building this hive configuration.
|
Use the machines listed in this file when building this hive configuration.
|
||||||
|
|
||||||
If your Colmena host has nix configured to allow for remote builds
|
If your Colmena host has nix configured to allow for remote builds
|
||||||
|
@ -341,7 +344,7 @@ with builtins; rec {
|
||||||
type = types.nullOr types.path;
|
type = types.nullOr types.path;
|
||||||
};
|
};
|
||||||
specialArgs = lib.mkOption {
|
specialArgs = lib.mkOption {
|
||||||
description = ''
|
description = mdDoc ''
|
||||||
A set of special arguments to be passed to NixOS modules.
|
A set of special arguments to be passed to NixOS modules.
|
||||||
|
|
||||||
This will be merged into the `specialArgs` used to evaluate
|
This will be merged into the `specialArgs` used to evaluate
|
||||||
|
@ -351,7 +354,7 @@ with builtins; rec {
|
||||||
type = types.attrsOf types.unspecified;
|
type = types.attrsOf types.unspecified;
|
||||||
};
|
};
|
||||||
allowApplyAll = lib.mkOption {
|
allowApplyAll = lib.mkOption {
|
||||||
description = ''
|
description = mdDoc ''
|
||||||
Whether to allow deployments without a node filter set.
|
Whether to allow deployments without a node filter set.
|
||||||
|
|
||||||
If set to false, a node filter must be specified with `--on` when
|
If set to false, a node filter must be specified with `--on` when
|
||||||
|
|
|
@ -87,14 +87,14 @@ pub struct Key {
|
||||||
#[serde(flatten)]
|
#[serde(flatten)]
|
||||||
source: KeySource,
|
source: KeySource,
|
||||||
|
|
||||||
#[validate(custom(function = "validate_dest_dir"))]
|
#[validate(custom = "validate_dest_dir")]
|
||||||
#[serde(rename = "destDir")]
|
#[serde(rename = "destDir")]
|
||||||
dest_dir: PathBuf,
|
dest_dir: PathBuf,
|
||||||
|
|
||||||
#[validate(custom(function = "validate_unix_name"))]
|
#[validate(custom = "validate_unix_name")]
|
||||||
user: String,
|
user: String,
|
||||||
|
|
||||||
#[validate(custom(function = "validate_unix_name"))]
|
#[validate(custom = "validate_unix_name")]
|
||||||
group: String,
|
group: String,
|
||||||
|
|
||||||
permissions: String,
|
permissions: String,
|
||||||
|
|
|
@ -84,7 +84,7 @@ pub struct NodeConfig {
|
||||||
#[serde(rename = "sshOptions")]
|
#[serde(rename = "sshOptions")]
|
||||||
extra_ssh_options: Vec<String>,
|
extra_ssh_options: Vec<String>,
|
||||||
|
|
||||||
#[validate(custom(function = "validate_keys"))]
|
#[validate(custom = "validate_keys")]
|
||||||
keys: HashMap<String, Key>,
|
keys: HashMap<String, Key>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
use std::collections::HashSet;
|
use std::collections::HashSet;
|
||||||
use std::convert::AsRef;
|
use std::convert::AsRef;
|
||||||
use std::iter::Iterator;
|
use std::iter::{FromIterator, Iterator};
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
|
||||||
use clap::Args;
|
use clap::Args;
|
||||||
|
@ -28,26 +28,22 @@ The list is comma-separated and globs are supported. To match tags, prepend the
|
||||||
pub on: Option<NodeFilter>,
|
pub on: Option<NodeFilter>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// A node filter containing a list of rules.
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub struct NodeFilter {
|
||||||
|
rules: Vec<Rule>,
|
||||||
|
}
|
||||||
|
|
||||||
/// A filter rule.
|
/// A filter rule.
|
||||||
|
///
|
||||||
|
/// The filter rules are OR'd together.
|
||||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||||
pub enum NodeFilter {
|
enum Rule {
|
||||||
/// Matches a node's attribute name.
|
/// Matches a node's attribute name.
|
||||||
MatchName(GlobPattern),
|
MatchName(GlobPattern),
|
||||||
|
|
||||||
/// Matches a node's `deployment.tags`.
|
/// Matches a node's `deployment.tags`.
|
||||||
MatchTag(GlobPattern),
|
MatchTag(GlobPattern),
|
||||||
|
|
||||||
/// Matches an Union
|
|
||||||
Union(Vec<Box<NodeFilter>>),
|
|
||||||
|
|
||||||
/// Matches an Intersection
|
|
||||||
Inter(Vec<Box<NodeFilter>>),
|
|
||||||
|
|
||||||
/// Matches the complementary
|
|
||||||
Not(Box<NodeFilter>),
|
|
||||||
|
|
||||||
/// Empty
|
|
||||||
Empty,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FromStr for NodeFilter {
|
impl FromStr for NodeFilter {
|
||||||
|
@ -57,169 +53,7 @@ impl FromStr for NodeFilter {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
|
||||||
fn end_delimiter(c: char) -> bool {
|
|
||||||
[',', '&', ')'].contains(&c)
|
|
||||||
}
|
|
||||||
|
|
||||||
impl NodeFilter {
|
impl NodeFilter {
|
||||||
fn and(a: Self, b: Self) -> Self {
|
|
||||||
match (a, b) {
|
|
||||||
(Self::Inter(mut av), Self::Inter(mut bv)) => {
|
|
||||||
av.append(&mut bv);
|
|
||||||
Self::Inter(av)
|
|
||||||
}
|
|
||||||
(Self::Inter(mut av), b) => {
|
|
||||||
av.push(Box::new(b));
|
|
||||||
Self::Inter(av)
|
|
||||||
}
|
|
||||||
(a, Self::Inter(mut bv)) => {
|
|
||||||
bv.push(Box::new(a));
|
|
||||||
Self::Inter(bv)
|
|
||||||
}
|
|
||||||
(a, b) => Self::Inter(vec![Box::new(a), Box::new(b)]),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn or(a: Self, b: Self) -> Self {
|
|
||||||
match (a, b) {
|
|
||||||
(Self::Union(mut av), Self::Union(mut bv)) => {
|
|
||||||
av.append(&mut bv);
|
|
||||||
Self::Union(av)
|
|
||||||
}
|
|
||||||
(Self::Union(mut av), b) => {
|
|
||||||
av.push(Box::new(b));
|
|
||||||
Self::Union(av)
|
|
||||||
}
|
|
||||||
(a, Self::Union(mut bv)) => {
|
|
||||||
bv.push(Box::new(a));
|
|
||||||
Self::Union(bv)
|
|
||||||
}
|
|
||||||
(a, b) => Self::Union(vec![Box::new(a), Box::new(b)]),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn not(a: Self) -> Self {
|
|
||||||
if let Self::Not(ae) = a {
|
|
||||||
*ae
|
|
||||||
} else {
|
|
||||||
Self::Not(Box::new(a))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Parses an elementary expression,
|
|
||||||
/// that is base tags and name, with expression between parentheses
|
|
||||||
/// Negations are also parsed here as the most prioritary operation
|
|
||||||
///
|
|
||||||
/// It returns the unparsed text that follows
|
|
||||||
fn parse_expr0(unparsed: &str) -> ColmenaResult<(Self, &str)> {
|
|
||||||
let unparsed = unparsed.trim_start();
|
|
||||||
// Negation
|
|
||||||
if let Some(negated_expr) = unparsed.strip_prefix('!') {
|
|
||||||
let (negated, unparsed) = Self::parse_expr0(negated_expr)?;
|
|
||||||
Ok((Self::not(negated), unparsed))
|
|
||||||
} else
|
|
||||||
// parentheses
|
|
||||||
if let Some(parenthesed_expr) = unparsed.strip_prefix('(') {
|
|
||||||
let (interior, unparsed) = Self::parse_expr2(parenthesed_expr)?;
|
|
||||||
Ok((
|
|
||||||
interior,
|
|
||||||
unparsed.strip_prefix(')').ok_or(ColmenaError::Unknown {
|
|
||||||
message: format!("Expected a closing parenthesis at {:?}.", unparsed),
|
|
||||||
})?,
|
|
||||||
))
|
|
||||||
} else
|
|
||||||
// tag
|
|
||||||
if let Some(tag_expr) = unparsed.strip_prefix('@') {
|
|
||||||
match tag_expr
|
|
||||||
.find(end_delimiter)
|
|
||||||
.map(|idx| tag_expr.split_at(idx))
|
|
||||||
.map(|(tag, end)| (tag.trim_end(), end))
|
|
||||||
{
|
|
||||||
Some((tag, unparsed)) => {
|
|
||||||
if tag.is_empty() {
|
|
||||||
return Err(ColmenaError::EmptyFilterRule);
|
|
||||||
} else {
|
|
||||||
Ok((Self::MatchTag(GlobPattern::new(tag).unwrap()), unparsed))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
None => {
|
|
||||||
let tag_expr = tag_expr.trim_end();
|
|
||||||
if tag_expr.is_empty() {
|
|
||||||
Err(ColmenaError::EmptyFilterRule)
|
|
||||||
} else {
|
|
||||||
Ok((Self::MatchTag(GlobPattern::new(tag_expr).unwrap()), ""))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else
|
|
||||||
//node name
|
|
||||||
{
|
|
||||||
match unparsed
|
|
||||||
.find(end_delimiter)
|
|
||||||
.map(|idx| unparsed.split_at(idx))
|
|
||||||
.map(|(tag, end)| (tag.trim_end(), end))
|
|
||||||
{
|
|
||||||
Some((name, unparsed)) => {
|
|
||||||
if name.is_empty() {
|
|
||||||
Err(ColmenaError::EmptyFilterRule)
|
|
||||||
} else {
|
|
||||||
Ok((Self::MatchName(GlobPattern::new(name).unwrap()), unparsed))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
None => {
|
|
||||||
let unparsed = unparsed.trim_end();
|
|
||||||
if unparsed.is_empty() {
|
|
||||||
Err(ColmenaError::EmptyFilterRule)
|
|
||||||
} else {
|
|
||||||
Ok((Self::MatchName(GlobPattern::new(unparsed).unwrap()), ""))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Parses the union operations between elementary expression.
|
|
||||||
///
|
|
||||||
/// It returns the unparsed text that follows
|
|
||||||
fn parse_op1(acc: Self, unparsed: &str) -> ColmenaResult<(Self, &str)> {
|
|
||||||
let unparsed = unparsed.trim_start();
|
|
||||||
if let Some(unions) = unparsed.strip_prefix(',') {
|
|
||||||
let (base_expr, unparsed) = Self::parse_expr0(unions)?;
|
|
||||||
Self::parse_op1(Self::or(acc, base_expr), unparsed)
|
|
||||||
} else {
|
|
||||||
Ok((acc, unparsed))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Parses elementary expression and their unions.
|
|
||||||
///
|
|
||||||
/// It returns the unparsed text that follows
|
|
||||||
fn parse_expr1(unparsed: &str) -> ColmenaResult<(Self, &str)> {
|
|
||||||
let (base_expr, unparsed) = Self::parse_expr0(unparsed)?;
|
|
||||||
Self::parse_op1(base_expr, unparsed)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Parses the intersection operations between unions.
|
|
||||||
///
|
|
||||||
/// It returns the unparsed text that follows
|
|
||||||
fn parse_op2(acc: Self, unparsed: &str) -> ColmenaResult<(Self, &str)> {
|
|
||||||
if let Some(intersections) = unparsed.strip_prefix('&') {
|
|
||||||
let (union, unparsed) = Self::parse_expr1(intersections)?;
|
|
||||||
Self::parse_op2(Self::and(acc, union), unparsed)
|
|
||||||
} else {
|
|
||||||
Ok((acc, unparsed))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Parses a complete expression
|
|
||||||
///
|
|
||||||
/// It returns the unparsed text that follows
|
|
||||||
fn parse_expr2(unparsed: &str) -> ColmenaResult<(Self, &str)> {
|
|
||||||
let (union, unparsed) = Self::parse_expr1(unparsed)?;
|
|
||||||
Self::parse_op2(union, unparsed)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Creates a new filter using an expression passed using `--on`.
|
/// Creates a new filter using an expression passed using `--on`.
|
||||||
pub fn new<S: AsRef<str>>(filter: S) -> ColmenaResult<Self> {
|
pub fn new<S: AsRef<str>>(filter: S) -> ColmenaResult<Self> {
|
||||||
let filter = filter.as_ref();
|
let filter = filter.as_ref();
|
||||||
|
@ -228,16 +62,29 @@ impl NodeFilter {
|
||||||
if trimmed.is_empty() {
|
if trimmed.is_empty() {
|
||||||
log::warn!("Filter \"{}\" is blank and will match nothing", filter);
|
log::warn!("Filter \"{}\" is blank and will match nothing", filter);
|
||||||
|
|
||||||
return Ok(Self::Empty);
|
return Ok(Self { rules: Vec::new() });
|
||||||
}
|
}
|
||||||
let (target_filter, unparsed) = Self::parse_expr2(trimmed)?;
|
|
||||||
if unparsed != "" {
|
let rules = trimmed
|
||||||
Err(ColmenaError::Unknown {
|
.split(',')
|
||||||
message: format!("Found garbage {:?} when parsing the node filter.", unparsed),
|
.map(|pattern| {
|
||||||
|
let pattern = pattern.trim();
|
||||||
|
|
||||||
|
if pattern.is_empty() {
|
||||||
|
return Err(ColmenaError::EmptyFilterRule);
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(tag_pattern) = pattern.strip_prefix('@') {
|
||||||
|
Ok(Rule::MatchTag(GlobPattern::new(tag_pattern).unwrap()))
|
||||||
|
} else {
|
||||||
|
Ok(Rule::MatchName(GlobPattern::new(pattern).unwrap()))
|
||||||
|
}
|
||||||
})
|
})
|
||||||
} else {
|
.collect::<Vec<ColmenaResult<Rule>>>();
|
||||||
Ok(target_filter)
|
|
||||||
}
|
let rules = Result::from_iter(rules)?;
|
||||||
|
|
||||||
|
Ok(Self { rules })
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns whether the filter has any rule matching NodeConfig information.
|
/// Returns whether the filter has any rule matching NodeConfig information.
|
||||||
|
@ -246,31 +93,7 @@ impl NodeFilter {
|
||||||
/// especially when its values (e.g., tags) depend on other parts of
|
/// especially when its values (e.g., tags) depend on other parts of
|
||||||
/// the configuration.
|
/// the configuration.
|
||||||
pub fn has_node_config_rules(&self) -> bool {
|
pub fn has_node_config_rules(&self) -> bool {
|
||||||
match self {
|
self.rules.iter().any(|rule| rule.matches_node_config())
|
||||||
Self::MatchName(_) => false,
|
|
||||||
Self::MatchTag(_) => true,
|
|
||||||
Self::Union(v) => v.iter().any(|e| e.has_node_config_rules()),
|
|
||||||
Self::Inter(v) => v.iter().any(|e| e.has_node_config_rules()),
|
|
||||||
Self::Not(e) => e.has_node_config_rules(),
|
|
||||||
Self::Empty => false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Decides whether a node is accepted by the filter or not.
|
|
||||||
/// panic if the filter depends on tags and config is None
|
|
||||||
fn is_accepted(&self, name: &NodeName, config: Option<&NodeConfig>) -> bool {
|
|
||||||
match self {
|
|
||||||
Self::MatchName(pat) => pat.matches(name.as_str()),
|
|
||||||
Self::MatchTag(pat) => config
|
|
||||||
.unwrap()
|
|
||||||
.tags()
|
|
||||||
.iter()
|
|
||||||
.any(|tag| pat.matches(tag.as_str())),
|
|
||||||
Self::Union(v) => v.iter().any(|e| e.is_accepted(name, config)),
|
|
||||||
Self::Inter(v) => v.iter().all(|e| e.is_accepted(name, config)),
|
|
||||||
Self::Not(e) => !e.is_accepted(name, config),
|
|
||||||
Self::Empty => false,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Runs the filter against a set of NodeConfigs and returns the matched ones.
|
/// Runs the filter against a set of NodeConfigs and returns the matched ones.
|
||||||
|
@ -278,17 +101,30 @@ impl NodeFilter {
|
||||||
where
|
where
|
||||||
I: Iterator<Item = (&'a NodeName, &'a NodeConfig)>,
|
I: Iterator<Item = (&'a NodeName, &'a NodeConfig)>,
|
||||||
{
|
{
|
||||||
if self == &Self::Empty {
|
if self.rules.is_empty() {
|
||||||
return HashSet::new();
|
return HashSet::new();
|
||||||
}
|
}
|
||||||
|
|
||||||
nodes
|
nodes
|
||||||
.filter_map(|(name, node)| {
|
.filter_map(|(name, node)| {
|
||||||
if self.is_accepted(name, Some(node)) {
|
for rule in self.rules.iter() {
|
||||||
Some(name)
|
match rule {
|
||||||
} else {
|
Rule::MatchName(pat) => {
|
||||||
None
|
if pat.matches(name.as_str()) {
|
||||||
|
return Some(name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Rule::MatchTag(pat) => {
|
||||||
|
for tag in node.tags() {
|
||||||
|
if pat.matches(tag) {
|
||||||
|
return Some(name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
None
|
||||||
})
|
})
|
||||||
.cloned()
|
.cloned()
|
||||||
.collect()
|
.collect()
|
||||||
|
@ -296,24 +132,32 @@ impl NodeFilter {
|
||||||
|
|
||||||
/// Runs the filter against a set of node names and returns the matched ones.
|
/// Runs the filter against a set of node names and returns the matched ones.
|
||||||
pub fn filter_node_names(&self, nodes: &[NodeName]) -> ColmenaResult<HashSet<NodeName>> {
|
pub fn filter_node_names(&self, nodes: &[NodeName]) -> ColmenaResult<HashSet<NodeName>> {
|
||||||
if self.has_node_config_rules() {
|
nodes.iter().filter_map(|name| -> Option<ColmenaResult<NodeName>> {
|
||||||
Err(ColmenaError::Unknown {
|
for rule in self.rules.iter() {
|
||||||
message: format!(
|
match rule {
|
||||||
"Not enough information to run rule {:?} - We only have node names",
|
Rule::MatchName(pat) => {
|
||||||
self
|
if pat.matches(name.as_str()) {
|
||||||
),
|
return Some(Ok(name.clone()));
|
||||||
})
|
}
|
||||||
} else {
|
|
||||||
Ok(nodes
|
|
||||||
.iter()
|
|
||||||
.filter_map(|name| {
|
|
||||||
if self.is_accepted(name, None) {
|
|
||||||
Some(name.clone())
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
}
|
||||||
})
|
_ => {
|
||||||
.collect())
|
return Some(Err(ColmenaError::Unknown {
|
||||||
|
message: format!("Not enough information to run rule {:?} - We only have node names", rule),
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None
|
||||||
|
}).collect()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Rule {
|
||||||
|
/// Returns whether the rule matches against the NodeConfig (i.e., `config.deployment`).
|
||||||
|
pub fn matches_node_config(&self) -> bool {
|
||||||
|
match self {
|
||||||
|
Self::MatchTag(_) => true,
|
||||||
|
Self::MatchName(_) => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -333,13 +177,13 @@ mod tests {
|
||||||
#[test]
|
#[test]
|
||||||
fn test_empty_filter() {
|
fn test_empty_filter() {
|
||||||
let filter = NodeFilter::new("").unwrap();
|
let filter = NodeFilter::new("").unwrap();
|
||||||
assert_eq!(NodeFilter::Empty, filter);
|
assert_eq!(0, filter.rules.len());
|
||||||
|
|
||||||
let filter = NodeFilter::new("\t").unwrap();
|
let filter = NodeFilter::new("\t").unwrap();
|
||||||
assert_eq!(NodeFilter::Empty, filter);
|
assert_eq!(0, filter.rules.len());
|
||||||
|
|
||||||
let filter = NodeFilter::new(" ").unwrap();
|
let filter = NodeFilter::new(" ").unwrap();
|
||||||
assert_eq!(NodeFilter::Empty, filter);
|
assert_eq!(0, filter.rules.len());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -353,73 +197,21 @@ mod tests {
|
||||||
fn test_filter_rule_mixed() {
|
fn test_filter_rule_mixed() {
|
||||||
let filter = NodeFilter::new("@router,gamma-*").unwrap();
|
let filter = NodeFilter::new("@router,gamma-*").unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
NodeFilter::Union(vec![
|
vec![
|
||||||
Box::new(NodeFilter::MatchTag(GlobPattern::new("router").unwrap())),
|
Rule::MatchTag(GlobPattern::new("router").unwrap()),
|
||||||
Box::new(NodeFilter::MatchName(GlobPattern::new("gamma-*").unwrap())),
|
Rule::MatchName(GlobPattern::new("gamma-*").unwrap()),
|
||||||
]),
|
],
|
||||||
filter,
|
filter.rules,
|
||||||
);
|
);
|
||||||
|
|
||||||
let filter = NodeFilter::new("a, \t@b , c-*").unwrap();
|
let filter = NodeFilter::new("a, \t@b , c-*").unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
NodeFilter::Union(vec![
|
vec![
|
||||||
Box::new(NodeFilter::MatchName(GlobPattern::new("a").unwrap())),
|
Rule::MatchName(GlobPattern::new("a").unwrap()),
|
||||||
Box::new(NodeFilter::MatchTag(GlobPattern::new("b").unwrap())),
|
Rule::MatchTag(GlobPattern::new("b").unwrap()),
|
||||||
Box::new(NodeFilter::MatchName(GlobPattern::new("c-*").unwrap())),
|
Rule::MatchName(GlobPattern::new("c-*").unwrap()),
|
||||||
]),
|
],
|
||||||
filter,
|
filter.rules,
|
||||||
);
|
|
||||||
|
|
||||||
let filter = NodeFilter::new("a & \t@b , c-*").unwrap();
|
|
||||||
assert_eq!(
|
|
||||||
NodeFilter::Inter(vec![
|
|
||||||
Box::new(NodeFilter::MatchName(GlobPattern::new("a").unwrap())),
|
|
||||||
Box::new(NodeFilter::Union(vec![
|
|
||||||
Box::new(NodeFilter::MatchTag(GlobPattern::new("b").unwrap())),
|
|
||||||
Box::new(NodeFilter::MatchName(GlobPattern::new("c-*").unwrap())),
|
|
||||||
])),
|
|
||||||
]),
|
|
||||||
filter,
|
|
||||||
);
|
|
||||||
|
|
||||||
let filter = NodeFilter::new("( a & \t@b ) , c-*").unwrap();
|
|
||||||
assert_eq!(
|
|
||||||
NodeFilter::Union(vec![
|
|
||||||
Box::new(NodeFilter::Inter(vec![
|
|
||||||
Box::new(NodeFilter::MatchName(GlobPattern::new("a").unwrap())),
|
|
||||||
Box::new(NodeFilter::MatchTag(GlobPattern::new("b").unwrap())),
|
|
||||||
])),
|
|
||||||
Box::new(NodeFilter::MatchName(GlobPattern::new("c-*").unwrap())),
|
|
||||||
]),
|
|
||||||
filter,
|
|
||||||
);
|
|
||||||
|
|
||||||
let filter = NodeFilter::new("( a & \t@b ) , ! c-*").unwrap();
|
|
||||||
assert_eq!(
|
|
||||||
NodeFilter::Union(vec![
|
|
||||||
Box::new(NodeFilter::Inter(vec![
|
|
||||||
Box::new(NodeFilter::MatchName(GlobPattern::new("a").unwrap())),
|
|
||||||
Box::new(NodeFilter::MatchTag(GlobPattern::new("b").unwrap())),
|
|
||||||
])),
|
|
||||||
Box::new(NodeFilter::Not(Box::new(NodeFilter::MatchName(
|
|
||||||
GlobPattern::new("c-*").unwrap()
|
|
||||||
)))),
|
|
||||||
]),
|
|
||||||
filter,
|
|
||||||
);
|
|
||||||
|
|
||||||
let filter = NodeFilter::new("( a & \t@b ) , !!! c-*").unwrap();
|
|
||||||
assert_eq!(
|
|
||||||
NodeFilter::Union(vec![
|
|
||||||
Box::new(NodeFilter::Inter(vec![
|
|
||||||
Box::new(NodeFilter::MatchName(GlobPattern::new("a").unwrap())),
|
|
||||||
Box::new(NodeFilter::MatchTag(GlobPattern::new("b").unwrap())),
|
|
||||||
])),
|
|
||||||
Box::new(NodeFilter::Not(Box::new(NodeFilter::MatchName(
|
|
||||||
GlobPattern::new("c-*").unwrap()
|
|
||||||
)))),
|
|
||||||
]),
|
|
||||||
filter,
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -458,7 +250,6 @@ mod tests {
|
||||||
privilege_escalation_command: vec![],
|
privilege_escalation_command: vec![],
|
||||||
extra_ssh_options: vec![],
|
extra_ssh_options: vec![],
|
||||||
keys: HashMap::new(),
|
keys: HashMap::new(),
|
||||||
system_type: None,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut nodes = HashMap::new();
|
let mut nodes = HashMap::new();
|
||||||
|
@ -524,26 +315,5 @@ mod tests {
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.filter_node_configs(nodes.iter()),
|
.filter_node_configs(nodes.iter()),
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
&HashSet::from_iter([]),
|
|
||||||
&NodeFilter::new("@router&@controller")
|
|
||||||
.unwrap()
|
|
||||||
.filter_node_configs(nodes.iter()),
|
|
||||||
);
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
&HashSet::from_iter([node!("beta")]),
|
|
||||||
&NodeFilter::new("@router&@infra-*")
|
|
||||||
.unwrap()
|
|
||||||
.filter_node_configs(nodes.iter()),
|
|
||||||
);
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
&HashSet::from_iter([node!("alpha")]),
|
|
||||||
&NodeFilter::new("!@router&@infra-*")
|
|
||||||
.unwrap()
|
|
||||||
.filter_node_configs(nodes.iter()),
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,8 +7,6 @@
|
||||||
pub mod plain;
|
pub mod plain;
|
||||||
pub mod spinner;
|
pub mod spinner;
|
||||||
|
|
||||||
use std::io::IsTerminal;
|
|
||||||
|
|
||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
use tokio::sync::mpsc::{self, UnboundedReceiver as TokioReceiver, UnboundedSender as TokioSender};
|
use tokio::sync::mpsc::{self, UnboundedReceiver as TokioReceiver, UnboundedSender as TokioSender};
|
||||||
|
|
||||||
|
@ -92,7 +90,7 @@ pub enum LineStyle {
|
||||||
|
|
||||||
impl SimpleProgressOutput {
|
impl SimpleProgressOutput {
|
||||||
pub fn new(verbose: bool) -> Self {
|
pub fn new(verbose: bool) -> Self {
|
||||||
let tty = std::io::stdout().is_terminal();
|
let tty = atty::is(atty::Stream::Stdout);
|
||||||
|
|
||||||
if verbose || !tty {
|
if verbose || !tty {
|
||||||
Self::Plain(PlainOutput::new())
|
Self::Plain(PlainOutput::new())
|
||||||
|
|
Loading…
Reference in a new issue