feat(tvix-store): Improve tvix-store copy.
This change contains 2 improvements to the tvix-store copy command: 1. Allows reading the reference graph from stdin, using `-` argument 2. Supports json representation produced by `nix path-info --json` command. In general it makes is easier and faster to import arbitrary closures from an existing nix store with e.g the following command: ``` nix path-info ./result --json --closure-size --recursive | \ jq -s '{closure: add}' | \ tvix-store copy - ``` Change-Id: Id6eea2993da233ecfbdc186f1a8c37735b686264 Reviewed-on: https://cl.tvl.fyi/c/depot/+/12765 Tested-by: BuildkiteCI Reviewed-by: flokli <flokli@flokli.de>
This commit is contained in:
parent
b1764e1109
commit
6aada91062
3 changed files with 117 additions and 12 deletions
|
@ -92,6 +92,12 @@ where
|
||||||
bytes: self.bytes,
|
bytes: self.bytes,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
pub fn to_owned(&self) -> Signature<String> {
|
||||||
|
Signature {
|
||||||
|
name: self.name.to_string(),
|
||||||
|
bytes: self.bytes,
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, 'de, S> Deserialize<'de> for Signature<S>
|
impl<'a, 'de, S> Deserialize<'de> for Signature<S>
|
||||||
|
@ -133,6 +139,16 @@ where
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<S> std::hash::Hash for Signature<S>
|
||||||
|
where
|
||||||
|
S: AsRef<str>,
|
||||||
|
{
|
||||||
|
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
|
||||||
|
state.write(self.name.as_ref().as_bytes());
|
||||||
|
state.write(&self.bytes);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, thiserror::Error, PartialEq, Eq)]
|
#[derive(Debug, thiserror::Error, PartialEq, Eq)]
|
||||||
pub enum Error {
|
pub enum Error {
|
||||||
#[error("Invalid name: {0}")]
|
#[error("Invalid name: {0}")]
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
use crate::{nixbase32, nixhash::NixHash, store_path::StorePathRef};
|
use crate::{narinfo::SignatureRef, nixbase32, nixhash::NixHash, store_path::StorePathRef};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::collections::BTreeSet;
|
use std::collections::BTreeSet;
|
||||||
|
|
||||||
|
@ -15,7 +15,7 @@ pub struct ExportedPathInfo<'a> {
|
||||||
#[serde(
|
#[serde(
|
||||||
rename = "narHash",
|
rename = "narHash",
|
||||||
serialize_with = "to_nix_nixbase32_string",
|
serialize_with = "to_nix_nixbase32_string",
|
||||||
deserialize_with = "from_nix_nixbase32_string"
|
deserialize_with = "from_nix_hash_string"
|
||||||
)]
|
)]
|
||||||
pub nar_sha256: [u8; 32],
|
pub nar_sha256: [u8; 32],
|
||||||
|
|
||||||
|
@ -25,11 +25,17 @@ pub struct ExportedPathInfo<'a> {
|
||||||
#[serde(borrow)]
|
#[serde(borrow)]
|
||||||
pub path: StorePathRef<'a>,
|
pub path: StorePathRef<'a>,
|
||||||
|
|
||||||
|
#[serde(borrow)]
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub deriver: Option<StorePathRef<'a>>,
|
||||||
|
|
||||||
/// The list of other Store Paths this Store Path refers to.
|
/// The list of other Store Paths this Store Path refers to.
|
||||||
/// StorePathRef does Ord by the nixbase32-encoded string repr, so this is correct.
|
/// StorePathRef does Ord by the nixbase32-encoded string repr, so this is correct.
|
||||||
pub references: BTreeSet<StorePathRef<'a>>,
|
pub references: BTreeSet<StorePathRef<'a>>,
|
||||||
// more recent versions of Nix also have a `valid: true` field here, Nix 2.3 doesn't,
|
// more recent versions of Nix also have a `valid: true` field here, Nix 2.3 doesn't,
|
||||||
// and nothing seems to use it.
|
// and nothing seems to use it.
|
||||||
|
#[serde(default, skip_serializing_if = "Vec::is_empty")]
|
||||||
|
pub signatures: Vec<SignatureRef<'a>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// ExportedPathInfo are ordered by their `path` field.
|
/// ExportedPathInfo are ordered by their `path` field.
|
||||||
|
@ -56,18 +62,49 @@ where
|
||||||
/// The length of a sha256 digest, nixbase32-encoded.
|
/// The length of a sha256 digest, nixbase32-encoded.
|
||||||
const NIXBASE32_SHA256_ENCODE_LEN: usize = nixbase32::encode_len(32);
|
const NIXBASE32_SHA256_ENCODE_LEN: usize = nixbase32::encode_len(32);
|
||||||
|
|
||||||
fn from_nix_nixbase32_string<'de, D>(deserializer: D) -> Result<[u8; 32], D::Error>
|
fn from_nix_hash_string<'de, D>(deserializer: D) -> Result<[u8; 32], D::Error>
|
||||||
where
|
where
|
||||||
D: serde::Deserializer<'de>,
|
D: serde::Deserializer<'de>,
|
||||||
{
|
{
|
||||||
let str: &'de str = Deserialize::deserialize(deserializer)?;
|
let str: &'de str = Deserialize::deserialize(deserializer)?;
|
||||||
|
if let Some(digest_str) = str.strip_prefix("sha256:") {
|
||||||
|
return from_nix_nixbase32_string::<D>(digest_str);
|
||||||
|
}
|
||||||
|
if let Some(digest_str) = str.strip_prefix("sha256-") {
|
||||||
|
return from_sri_string::<D>(digest_str);
|
||||||
|
}
|
||||||
|
Err(serde::de::Error::invalid_value(
|
||||||
|
serde::de::Unexpected::Str(str),
|
||||||
|
&"extected a valid nixbase32 or sri narHash",
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
let digest_str = str.strip_prefix("sha256:").ok_or_else(|| {
|
fn from_sri_string<'de, D>(str: &str) -> Result<[u8; 32], D::Error>
|
||||||
serde::de::Error::invalid_value(serde::de::Unexpected::Str(str), &"sha256:…")
|
where
|
||||||
|
D: serde::Deserializer<'de>,
|
||||||
|
{
|
||||||
|
let digest: [u8; 32] = data_encoding::BASE64
|
||||||
|
.decode(str.as_bytes())
|
||||||
|
.map_err(|_| {
|
||||||
|
serde::de::Error::invalid_value(
|
||||||
|
serde::de::Unexpected::Str(str),
|
||||||
|
&"valid base64 encoded string",
|
||||||
|
)
|
||||||
|
})?
|
||||||
|
.try_into()
|
||||||
|
.map_err(|_| {
|
||||||
|
serde::de::Error::invalid_value(serde::de::Unexpected::Str(str), &"valid digest len")
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
|
Ok(digest)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn from_nix_nixbase32_string<'de, D>(str: &str) -> Result<[u8; 32], D::Error>
|
||||||
|
where
|
||||||
|
D: serde::Deserializer<'de>,
|
||||||
|
{
|
||||||
let digest_str: [u8; NIXBASE32_SHA256_ENCODE_LEN] =
|
let digest_str: [u8; NIXBASE32_SHA256_ENCODE_LEN] =
|
||||||
digest_str.as_bytes().try_into().map_err(|_| {
|
str.as_bytes().try_into().map_err(|_| {
|
||||||
serde::de::Error::invalid_value(serde::de::Unexpected::Str(str), &"valid digest len")
|
serde::de::Error::invalid_value(serde::de::Unexpected::Str(str), &"valid digest len")
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
|
@ -110,10 +147,49 @@ mod tests {
|
||||||
b"7n0mbqydcipkpbxm24fab066lxk68aqk-libunistring-1.1"
|
b"7n0mbqydcipkpbxm24fab066lxk68aqk-libunistring-1.1"
|
||||||
)
|
)
|
||||||
.expect("must parse"),
|
.expect("must parse"),
|
||||||
|
deriver: None,
|
||||||
references: BTreeSet::from_iter([StorePathRef::from_bytes(
|
references: BTreeSet::from_iter([StorePathRef::from_bytes(
|
||||||
b"7n0mbqydcipkpbxm24fab066lxk68aqk-libunistring-1.1"
|
b"7n0mbqydcipkpbxm24fab066lxk68aqk-libunistring-1.1"
|
||||||
)
|
)
|
||||||
.unwrap()]),
|
.unwrap()]),
|
||||||
|
signatures: vec![],
|
||||||
|
},
|
||||||
|
deserialized.first().unwrap()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Ensure we can parse output from `nix path-info --json``
|
||||||
|
#[test]
|
||||||
|
fn serialize_deserialize_from_path_info() {
|
||||||
|
// JSON extracted from
|
||||||
|
// nix path-info /nix/store/z6r3bn5l51679pwkvh9nalp6c317z34m-libcxx-16.0.6-dev --json --closure-size
|
||||||
|
let pathinfos_str_json = r#"[{"closureSize":10756176,"deriver":"/nix/store/vs9976cyyxpykvdnlv7x85fpp3shn6ij-libcxx-16.0.6.drv","narHash":"sha256-E73Nt0NAKGxCnsyBFDUaCAbA+wiF5qjq1O9J7WrnT0E=","narSize":7020664,"path":"/nix/store/z6r3bn5l51679pwkvh9nalp6c317z34m-libcxx-16.0.6-dev","references":["/nix/store/lzzd5jgybnpfj86xkcpnd54xgwc4m457-libcxx-16.0.6"],"registrationTime":1730048276,"signatures":["cache.nixos.org-1:cTdhK6hnpPwtMXFX43CYb7v+CbpAusVI/MORZ3v5aHvpBYNg1MfBHVVeoexMBpNtHA8uFAn0aEsJaLXYIDhJDg=="],"valid":true}]"#;
|
||||||
|
|
||||||
|
let deserialized: BTreeSet<ExportedPathInfo> =
|
||||||
|
serde_json::from_str(pathinfos_str_json).expect("must serialize");
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
&ExportedPathInfo {
|
||||||
|
closure_size: 10756176,
|
||||||
|
nar_sha256: hex!(
|
||||||
|
"13bdcdb74340286c429ecc8114351a0806c0fb0885e6a8ead4ef49ed6ae74f41"
|
||||||
|
),
|
||||||
|
nar_size: 7020664,
|
||||||
|
path: StorePathRef::from_bytes(
|
||||||
|
b"z6r3bn5l51679pwkvh9nalp6c317z34m-libcxx-16.0.6-dev"
|
||||||
|
)
|
||||||
|
.expect("must parse"),
|
||||||
|
deriver: Some(
|
||||||
|
StorePathRef::from_bytes(
|
||||||
|
b"vs9976cyyxpykvdnlv7x85fpp3shn6ij-libcxx-16.0.6.drv"
|
||||||
|
)
|
||||||
|
.expect("must parse")
|
||||||
|
),
|
||||||
|
references: BTreeSet::from_iter([StorePathRef::from_bytes(
|
||||||
|
b"lzzd5jgybnpfj86xkcpnd54xgwc4m457-libcxx-16.0.6"
|
||||||
|
)
|
||||||
|
.unwrap()]),
|
||||||
|
signatures: vec![SignatureRef::parse("cache.nixos.org-1:cTdhK6hnpPwtMXFX43CYb7v+CbpAusVI/MORZ3v5aHvpBYNg1MfBHVVeoexMBpNtHA8uFAn0aEsJaLXYIDhJDg==").expect("must parse")],
|
||||||
},
|
},
|
||||||
deserialized.first().unwrap()
|
deserialized.first().unwrap()
|
||||||
);
|
);
|
||||||
|
|
|
@ -85,10 +85,18 @@ enum Commands {
|
||||||
#[clap(flatten)]
|
#[clap(flatten)]
|
||||||
service_addrs: ServiceUrlsGrpc,
|
service_addrs: ServiceUrlsGrpc,
|
||||||
|
|
||||||
/// A path pointing to a JSON file produced by the Nix
|
/// A path pointing to a JSON file(or '-' for stdin) produced by the Nix
|
||||||
/// `__structuredAttrs` containing reference graph information provided
|
/// `__structuredAttrs` containing reference graph information provided
|
||||||
/// by the `exportReferencesGraph` feature.
|
/// by the `exportReferencesGraph` feature.
|
||||||
///
|
///
|
||||||
|
/// Additionally supports the output from the following nix command:
|
||||||
|
///
|
||||||
|
/// ```notrust
|
||||||
|
/// nix path-info --json --closure-size --recursive <some-path> | \
|
||||||
|
/// jq -s '{closure: add}' | \
|
||||||
|
/// tvix-store copy -
|
||||||
|
/// ```
|
||||||
|
///
|
||||||
/// This can be used to invoke tvix-store inside a Nix derivation
|
/// This can be used to invoke tvix-store inside a Nix derivation
|
||||||
/// copying to a Tvix store (or outside, if the JSON file is copied
|
/// copying to a Tvix store (or outside, if the JSON file is copied
|
||||||
/// out).
|
/// out).
|
||||||
|
@ -348,9 +356,14 @@ async fn run_cli(
|
||||||
} => {
|
} => {
|
||||||
let (blob_service, directory_service, path_info_service, _nar_calculation_service) =
|
let (blob_service, directory_service, path_info_service, _nar_calculation_service) =
|
||||||
tvix_store::utils::construct_services(service_addrs).await?;
|
tvix_store::utils::construct_services(service_addrs).await?;
|
||||||
|
|
||||||
// Parse the file at reference_graph_path.
|
// Parse the file at reference_graph_path.
|
||||||
let reference_graph_json = tokio::fs::read(&reference_graph_path).await?;
|
let reference_graph_json = if reference_graph_path == PathBuf::from("-") {
|
||||||
|
let mut writer: Vec<u8> = vec![];
|
||||||
|
tokio::io::copy(&mut tokio::io::stdin(), &mut writer).await?;
|
||||||
|
writer
|
||||||
|
} else {
|
||||||
|
tokio::fs::read(&reference_graph_path).await?
|
||||||
|
};
|
||||||
|
|
||||||
#[derive(Deserialize, Serialize)]
|
#[derive(Deserialize, Serialize)]
|
||||||
struct ReferenceGraph<'a> {
|
struct ReferenceGraph<'a> {
|
||||||
|
@ -430,8 +443,8 @@ async fn run_cli(
|
||||||
references: elem.references.iter().map(StorePath::to_owned).collect(),
|
references: elem.references.iter().map(StorePath::to_owned).collect(),
|
||||||
nar_size: elem.nar_size,
|
nar_size: elem.nar_size,
|
||||||
nar_sha256: elem.nar_sha256,
|
nar_sha256: elem.nar_sha256,
|
||||||
signatures: vec![],
|
signatures: elem.signatures.iter().map(|s| s.to_owned()).collect(),
|
||||||
deriver: None,
|
deriver: elem.deriver.map(|p| p.to_owned()),
|
||||||
ca: None,
|
ca: None,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
Loading…
Add table
Reference in a new issue