feat(tvix/derivation): serialize Nix Derivation

This adds a Derivation structure and allows to write it to a structure that implements std::fmt:Write.
The implementation is based on the go-nix version.

Change-Id: Ib54e1202b5c67f5d206b21bc109a751e971064cf
Reviewed-on: https://cl.tvl.fyi/c/depot/+/7659
Reviewed-by: flokli <flokli@flokli.de>
Reviewed-by: tazjin <tazjin@tvl.su>
Tested-by: BuildkiteCI
This commit is contained in:
Jürgen Hahn 2022-12-28 10:28:37 +01:00 committed by jrhahn
parent 42fe3941c2
commit bb185b2c6e
22 changed files with 516 additions and 0 deletions

10
tvix/Cargo.lock generated
View file

@ -476,6 +476,16 @@ dependencies = [
"syn 1.0.103",
]
[[package]]
name = "derivation"
version = "0.1.0"
dependencies = [
"glob",
"serde",
"serde_json",
"test-generator",
]
[[package]]
name = "diff"
version = "0.1.13"

View file

@ -33,6 +33,16 @@ rec {
# You can override the features with
# workspaceMembers."${crateName}".build.override { features = [ "default" "feature1" ... ]; }.
workspaceMembers = {
"derivation" = rec {
packageId = "derivation";
build = internal.buildRustCrateWithFeatures {
packageId = "derivation";
};
# Debug support which might change between releases.
# File a bug if you depend on any for non-debug work!
debug = internal.debugCrate { inherit packageId; };
};
"nix-cli" = rec {
packageId = "nix-cli";
build = internal.buildRustCrateWithFeatures {
@ -1433,6 +1443,47 @@ rec {
}
];
};
"derivation" = rec {
crateName = "derivation";
version = "0.1.0";
edition = "2021";
# We can't filter paths with references in Nix 2.4
# See https://github.com/NixOS/nix/issues/5410
src =
if (lib.versionOlder builtins.nixVersion "2.4pre20211007")
then lib.cleanSourceWith { filter = sourceFilter; src = ./derivation; }
else ./derivation;
dependencies = [
{
name = "blake3";
packageId = "blake3";
features = [ "rayon" "std" ];
}
{
name = "maplit";
packageId = "maplit";
}
{
name = "prost";
packageId = "prost";
}
{
name = "tonic";
packageId = "tonic";
}
];
buildDependencies = [
{
name = "prost-build";
packageId = "prost-build";
}
{
name = "tonic-build";
packageId = "tonic-build";
}
];
};
"diff" = rec {
crateName = "diff";
@ -2657,6 +2708,16 @@ rec {
"value-bag" = [ "dep:value-bag" ];
};
};
"maplit" = rec {
crateName = "maplit";
version = "1.0.2";
edition = "2015";
sha256 = "07b5kjnhrrmfhgqm9wprjw8adx6i225lqp49gasgqg74lahnabiy";
authors = [
"bluss"
];
};
"matchit" = rec {
crateName = "matchit";
version = "0.7.0";

View file

@ -19,6 +19,7 @@
members = [
"cli",
"derivation",
"eval",
"eval/builtin-macros",
"nar",

View file

@ -0,0 +1,19 @@
[package]
name = "derivation"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
glob = "0.3.0"
serde = { version = "1.0", features = ["derive"] }
[dev-dependencies.test-generator]
# This fork of test-generator adds support for cargo workspaces, see
# also https://github.com/frehberg/test-generator/pull/14
git = "https://github.com/JamesGuthrie/test-generator.git"
rev = "82e799979980962aec1aa324ec6e0e4cad781f41"
[dev-dependencies]
serde_json = "1.0"

216
tvix/derivation/src/lib.rs Normal file
View file

@ -0,0 +1,216 @@
use serde::{Deserialize, Serialize};
use std::{collections::BTreeMap, fmt, fmt::Write};
#[cfg(test)]
mod tests;
const DERIVATION_PREFIX: &str = "Derive";
const PAREN_OPEN: char = '(';
const PAREN_CLOSE: char = ')';
const BRACKET_OPEN: char = '[';
const BRACKET_CLOSE: char = ']';
const COMMA: char = ',';
const QUOTE: char = '"';
const STRING_ESCAPER: [(char, &str); 5] = [
('\\', "\\\\"),
('\n', "\\n"),
('\r', "\\r"),
('\t', "\\t"),
('\"', "\\\""),
];
fn default_resource() -> String {
"".to_string()
}
#[derive(Serialize, Deserialize)]
pub struct Output {
path: String,
#[serde(default = "default_resource")]
hash_algorithm: String,
#[serde(default = "default_resource")]
hash: String,
}
#[derive(Serialize, Deserialize)]
pub struct Derivation {
outputs: BTreeMap<String, Output>,
input_sources: Vec<String>,
input_derivations: BTreeMap<String, Vec<String>>,
platform: String,
builder: String,
arguments: Vec<String>,
environment: BTreeMap<String, String>,
}
fn escape_string(s: &String) -> String {
let mut s_replaced = s.clone();
for escape_sequence in STRING_ESCAPER {
s_replaced = s_replaced.replace(escape_sequence.0, escape_sequence.1);
}
return format!("\"{}\"", s_replaced);
}
fn write_array_elements(
writer: &mut impl Write,
quote: bool,
open: &str,
closing: &str,
elements: Vec<&String>,
) -> Result<(), fmt::Error> {
writer.write_str(open)?;
for (index, element) in elements.iter().enumerate() {
if index > 0 {
writer.write_char(COMMA)?;
}
if quote {
writer.write_char(QUOTE)?;
}
writer.write_str(element)?;
if quote {
writer.write_char(QUOTE)?;
}
}
writer.write_str(closing)?;
return Ok(());
}
pub fn serialize_derivation(
derivation: Derivation,
writer: &mut impl Write,
) -> Result<(), fmt::Error> {
writer.write_str(DERIVATION_PREFIX)?;
writer.write_char(PAREN_OPEN)?;
// Step 1: Write outputs
{
writer.write_char(BRACKET_OPEN)?;
for (ii, (output_name, output)) in derivation.outputs.iter().enumerate() {
if ii > 0 {
writer.write_char(COMMA)?;
}
// TODO(jrhahn) option to strip output
let elements = vec![
output_name,
&output.path,
&output.hash_algorithm,
&output.hash,
];
write_array_elements(
writer,
true,
&PAREN_OPEN.to_string(),
&PAREN_CLOSE.to_string(),
elements,
)?
}
writer.write_char(BRACKET_CLOSE)?;
}
// Step 2: Write input_derivations
{
writer.write_char(COMMA)?;
writer.write_char(BRACKET_OPEN)?;
for (ii, (input_derivation_path, input_derivation)) in
derivation.input_derivations.iter().enumerate()
{
if ii > 0 {
writer.write_char(COMMA)?;
}
writer.write_char(PAREN_OPEN)?;
writer.write_char(QUOTE)?;
writer.write_str(input_derivation_path.as_str())?;
writer.write_char(QUOTE)?;
writer.write_char(COMMA)?;
write_array_elements(
writer,
true,
&BRACKET_OPEN.to_string(),
&BRACKET_CLOSE.to_string(),
input_derivation.iter().map(|s| s).collect(),
)?;
writer.write_char(PAREN_CLOSE)?;
}
writer.write_char(BRACKET_CLOSE)?;
}
// Step 3: Write input_sources
{
writer.write_char(COMMA)?;
write_array_elements(
writer,
true,
&BRACKET_OPEN.to_string(),
&BRACKET_CLOSE.to_string(),
derivation.input_sources.iter().map(|s| s).collect(),
)?;
}
// Step 4: Write platform
{
writer.write_char(COMMA)?;
writer.write_str(&escape_string(&derivation.platform).as_str())?;
}
// Step 5: Write builder
{
writer.write_char(COMMA)?;
writer.write_str(&escape_string(&derivation.builder).as_str())?;
}
// Step 6: Write arguments
{
writer.write_char(COMMA)?;
write_array_elements(
writer,
true,
&BRACKET_OPEN.to_string(),
&BRACKET_CLOSE.to_string(),
derivation.arguments.iter().map(|s| s).collect(),
)?;
}
// Step 7: Write env
{
writer.write_char(COMMA)?;
writer.write_char(BRACKET_OPEN)?;
for (ii, (key, environment)) in derivation.environment.iter().enumerate() {
if ii > 0 {
writer.write_char(COMMA)?;
}
// TODO(jrhahn) add strip option
write_array_elements(
writer,
false,
&PAREN_OPEN.to_string(),
&PAREN_CLOSE.to_string(),
vec![&escape_string(key), &escape_string(&environment)],
)?;
}
writer.write_char(BRACKET_CLOSE)?;
}
// Step 8: Close Derive call
writer.write_char(PAREN_CLOSE)?;
return Ok(());
}

View file

@ -0,0 +1 @@
Derive([("out","/nix/store/4q0pg5zpfmznxscq3avycvf9xdvx50n3-bar","r:sha256","08813cbee9903c62be4c5027726a418a300da4500b2d369d3af9286f4815ceba")],[],[],":",":",[],[("builder",":"),("name","bar"),("out","/nix/store/4q0pg5zpfmznxscq3avycvf9xdvx50n3-bar"),("outputHash","08813cbee9903c62be4c5027726a418a300da4500b2d369d3af9286f4815ceba"),("outputHashAlgo","sha256"),("outputHashMode","recursive"),("system",":")])

View file

@ -0,0 +1,23 @@
{
"outputs": {
"out": {
"path": "/nix/store/4q0pg5zpfmznxscq3avycvf9xdvx50n3-bar",
"hash_algorithm": "r:sha256",
"hash": "08813cbee9903c62be4c5027726a418a300da4500b2d369d3af9286f4815ceba"
}
},
"input_sources": [],
"input_derivations": {},
"platform": ":",
"builder": ":",
"arguments": [],
"environment": {
"builder": ":",
"name": "bar",
"out": "/nix/store/4q0pg5zpfmznxscq3avycvf9xdvx50n3-bar",
"outputHash": "08813cbee9903c62be4c5027726a418a300da4500b2d369d3af9286f4815ceba",
"outputHashAlgo": "sha256",
"outputHashMode": "recursive",
"system": ":"
}
}

View file

@ -0,0 +1 @@
Derive([("out","/nix/store/pzr7lsd3q9pqsnb42r9b23jc5sh8irvn-nested-json","","")],[],[],":",":",[],[("builder",":"),("json","{\"hello\":\"moto\\n\"}"),("name","nested-json"),("out","/nix/store/pzr7lsd3q9pqsnb42r9b23jc5sh8irvn-nested-json"),("system",":")])

View file

@ -0,0 +1,19 @@
{
"outputs": {
"out": {
"path": "/nix/store/pzr7lsd3q9pqsnb42r9b23jc5sh8irvn-nested-json"
}
},
"input_sources": [],
"input_derivations": {},
"platform": ":",
"builder": ":",
"arguments": [],
"environment": {
"builder": ":",
"json": "{\"hello\":\"moto\\n\"}",
"name": "nested-json",
"out": "/nix/store/pzr7lsd3q9pqsnb42r9b23jc5sh8irvn-nested-json",
"system": ":"
}
}

View file

@ -0,0 +1 @@
Derive([("out","/nix/store/5vyvcwah9l9kf07d52rcgdk70g2f4y13-foo","","")],[("/nix/store/0hm2f1psjpcwg8fijsmr4wwxrx59s092-bar.drv",["out"])],[],":",":",[],[("bar","/nix/store/4q0pg5zpfmznxscq3avycvf9xdvx50n3-bar"),("builder",":"),("name","foo"),("out","/nix/store/5vyvcwah9l9kf07d52rcgdk70g2f4y13-foo"),("system",":")])

View file

@ -0,0 +1,23 @@
{
"outputs": {
"out": {
"path": "/nix/store/5vyvcwah9l9kf07d52rcgdk70g2f4y13-foo"
}
},
"input_sources": [],
"input_derivations": {
"/nix/store/0hm2f1psjpcwg8fijsmr4wwxrx59s092-bar.drv": [
"out"
]
},
"platform": ":",
"builder": ":",
"arguments": [],
"environment": {
"bar": "/nix/store/4q0pg5zpfmznxscq3avycvf9xdvx50n3-bar",
"builder": ":",
"name": "foo",
"out": "/nix/store/5vyvcwah9l9kf07d52rcgdk70g2f4y13-foo",
"system": ":"
}
}

View file

@ -0,0 +1 @@
Derive([("out","/nix/store/vgvdj6nf7s8kvfbl2skbpwz9kc7xjazc-unicode","","")],[],[],":",":",[],[("builder",":"),("letters","räksmörgås\nrødgrød med fløde\nLübeck\n肥猪\nこんにちは / 今日は\n🌮\n"),("name","unicode"),("out","/nix/store/vgvdj6nf7s8kvfbl2skbpwz9kc7xjazc-unicode"),("system",":")])

View file

@ -0,0 +1,19 @@
{
"outputs": {
"out": {
"path": "/nix/store/vgvdj6nf7s8kvfbl2skbpwz9kc7xjazc-unicode"
}
},
"input_sources": [],
"input_derivations": {},
"platform": ":",
"builder": ":",
"arguments": [],
"environment": {
"builder": ":",
"letters": "räksmörgås\nrødgrød med fløde\nLübeck\n肥猪\nこんにちは / 今日は\n🌮\n",
"name": "unicode",
"out": "/nix/store/vgvdj6nf7s8kvfbl2skbpwz9kc7xjazc-unicode",
"system": ":"
}
}

View file

@ -0,0 +1 @@
Derive([("out","/nix/store/6a39dl014j57bqka7qx25k0vb20vkqm6-structured-attrs","","")],[],[],":",":",[],[("__json","{\"builder\":\":\",\"name\":\"structured-attrs\",\"system\":\":\"}"),("out","/nix/store/6a39dl014j57bqka7qx25k0vb20vkqm6-structured-attrs")])

View file

@ -0,0 +1,16 @@
{
"outputs": {
"out": {
"path": "/nix/store/6a39dl014j57bqka7qx25k0vb20vkqm6-structured-attrs"
}
},
"input_sources": [],
"input_derivations": {},
"platform": ":",
"builder": ":",
"arguments": [],
"environment": {
"__json": "{\"builder\":\":\",\"name\":\"structured-attrs\",\"system\":\":\"}",
"out": "/nix/store/6a39dl014j57bqka7qx25k0vb20vkqm6-structured-attrs"
}
}

View file

@ -0,0 +1 @@
Derive([("out","/nix/store/fhaj6gmwns62s6ypkcldbaj2ybvkhx3p-foo","","")],[("/nix/store/ss2p4wmxijn652haqyd7dckxwl4c7hxx-bar.drv",["out"])],[],":",":",[],[("bar","/nix/store/mp57d33657rf34lzvlbpfa1gjfv5gmpg-bar"),("builder",":"),("name","foo"),("out","/nix/store/fhaj6gmwns62s6ypkcldbaj2ybvkhx3p-foo"),("system",":")])

View file

@ -0,0 +1,23 @@
{
"outputs": {
"out": {
"path": "/nix/store/fhaj6gmwns62s6ypkcldbaj2ybvkhx3p-foo"
}
},
"input_sources": [],
"input_derivations": {
"/nix/store/ss2p4wmxijn652haqyd7dckxwl4c7hxx-bar.drv": [
"out"
]
},
"platform": ":",
"builder": ":",
"arguments": [],
"environment": {
"bar": "/nix/store/mp57d33657rf34lzvlbpfa1gjfv5gmpg-bar",
"builder": ":",
"name": "foo",
"out": "/nix/store/fhaj6gmwns62s6ypkcldbaj2ybvkhx3p-foo",
"system": ":"
}
}

View file

@ -0,0 +1 @@
Derive([("lib","/nix/store/2vixb94v0hy2xc6p7mbnxxcyc095yyia-has-multi-out-lib","",""),("out","/nix/store/55lwldka5nyxa08wnvlizyqw02ihy8ic-has-multi-out","","")],[],[],":",":",[],[("builder",":"),("lib","/nix/store/2vixb94v0hy2xc6p7mbnxxcyc095yyia-has-multi-out-lib"),("name","has-multi-out"),("out","/nix/store/55lwldka5nyxa08wnvlizyqw02ihy8ic-has-multi-out"),("outputs","out lib"),("system",":")])

View file

@ -0,0 +1,23 @@
{
"outputs": {
"lib": {
"path": "/nix/store/2vixb94v0hy2xc6p7mbnxxcyc095yyia-has-multi-out-lib"
},
"out": {
"path": "/nix/store/55lwldka5nyxa08wnvlizyqw02ihy8ic-has-multi-out"
}
},
"input_sources": [],
"input_derivations": {},
"platform": ":",
"builder": ":",
"arguments": [],
"environment": {
"builder": ":",
"lib": "/nix/store/2vixb94v0hy2xc6p7mbnxxcyc095yyia-has-multi-out-lib",
"name": "has-multi-out",
"out": "/nix/store/55lwldka5nyxa08wnvlizyqw02ihy8ic-has-multi-out",
"outputs": "out lib",
"system": ":"
}
}

View file

@ -0,0 +1 @@
Derive([("out","/nix/store/mp57d33657rf34lzvlbpfa1gjfv5gmpg-bar","r:sha1","0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33")],[],[],":",":",[],[("builder",":"),("name","bar"),("out","/nix/store/mp57d33657rf34lzvlbpfa1gjfv5gmpg-bar"),("outputHash","0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33"),("outputHashAlgo","sha1"),("outputHashMode","recursive"),("system",":")])

View file

@ -0,0 +1,23 @@
{
"outputs": {
"out": {
"path": "/nix/store/mp57d33657rf34lzvlbpfa1gjfv5gmpg-bar",
"hash_algorithm": "r:sha1",
"hash": "0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33"
}
},
"input_sources": [],
"input_derivations": {},
"platform": ":",
"builder": ":",
"arguments": [],
"environment": {
"builder": ":",
"name": "bar",
"out": "/nix/store/mp57d33657rf34lzvlbpfa1gjfv5gmpg-bar",
"outputHash": "0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33",
"outputHashAlgo": "sha1",
"outputHashMode": "recursive",
"system": ":"
}
}

View file

@ -0,0 +1,32 @@
use super::{serialize_derivation, Derivation};
use std::fs::File;
use std::io::Read;
use std::path::Path;
use test_generator::test_resources;
fn read_file(path: &str) -> String {
let path = Path::new(path);
let mut file = File::open(path).unwrap();
let mut data = String::new();
file.read_to_string(&mut data).unwrap();
return data;
}
fn assert_derivation_ok(path_to_drv_file: &str) {
let data = read_file(&format!("{}.json", path_to_drv_file));
let derivation: Derivation = serde_json::from_str(&data).expect("JSON was not well-formatted");
let mut serialized_derivation = String::new();
serialize_derivation(derivation, &mut serialized_derivation).unwrap();
let expected = read_file(path_to_drv_file);
assert_eq!(expected, serialized_derivation);
}
#[test_resources("src/tests/derivation_tests/*.drv")]
fn derivation_files_ok(path: &str) {
assert_derivation_ok(path);
}