refactor(tvix/glue/fetchers): use named field for structs
This allows giving more self-speaking names, as well as documenting each field individually. Change-Id: Ide164d684b7f819aac279cc8e657c02fc24d093f Reviewed-on: https://cl.tvl.fyi/c/depot/+/11786 Tested-by: BuildkiteCI Autosubmit: flokli <flokli@flokli.de> Reviewed-by: Connor Brewster <cbrewster@hey.com>
This commit is contained in:
parent
a7bf5b975f
commit
e1d3fa240a
3 changed files with 76 additions and 30 deletions
|
@ -6,7 +6,6 @@ use crate::{
|
||||||
tvix_store_io::TvixStoreIO,
|
tvix_store_io::TvixStoreIO,
|
||||||
};
|
};
|
||||||
use nix_compat::nixhash;
|
use nix_compat::nixhash;
|
||||||
use nix_compat::nixhash::NixHash;
|
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
use tracing::info;
|
use tracing::info;
|
||||||
use tvix_eval::builtin_macros::builtins;
|
use tvix_eval::builtin_macros::builtins;
|
||||||
|
@ -80,6 +79,7 @@ async fn extract_fetch_args(
|
||||||
#[builtins(state = "Rc<TvixStoreIO>")]
|
#[builtins(state = "Rc<TvixStoreIO>")]
|
||||||
pub(crate) mod fetcher_builtins {
|
pub(crate) mod fetcher_builtins {
|
||||||
use crate::builtins::FetcherError;
|
use crate::builtins::FetcherError;
|
||||||
|
use nix_compat::nixhash::NixHash;
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
@ -147,7 +147,10 @@ pub(crate) mod fetcher_builtins {
|
||||||
fetch_lazy(
|
fetch_lazy(
|
||||||
state,
|
state,
|
||||||
name,
|
name,
|
||||||
Fetch::URL(url, args.sha256.map(NixHash::Sha256)),
|
Fetch::URL {
|
||||||
|
url,
|
||||||
|
exp_hash: args.sha256.map(NixHash::Sha256),
|
||||||
|
},
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -172,7 +175,14 @@ pub(crate) mod fetcher_builtins {
|
||||||
let url = Url::parse(&args.url_str)
|
let url = Url::parse(&args.url_str)
|
||||||
.map_err(|e| ErrorKind::TvixError(Rc::new(FetcherError::InvalidUrl(e))))?;
|
.map_err(|e| ErrorKind::TvixError(Rc::new(FetcherError::InvalidUrl(e))))?;
|
||||||
|
|
||||||
fetch_lazy(state, name, Fetch::Tarball(url, args.sha256))
|
fetch_lazy(
|
||||||
|
state,
|
||||||
|
name,
|
||||||
|
Fetch::Tarball {
|
||||||
|
url,
|
||||||
|
exp_nar_sha256: args.sha256,
|
||||||
|
},
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[builtin("fetchGit")]
|
#[builtin("fetchGit")]
|
||||||
|
|
|
@ -25,10 +25,14 @@ use decompression::DecompressedReader;
|
||||||
/// Representing options for doing a fetch.
|
/// Representing options for doing a fetch.
|
||||||
#[derive(Clone, Eq, PartialEq)]
|
#[derive(Clone, Eq, PartialEq)]
|
||||||
pub enum Fetch {
|
pub enum Fetch {
|
||||||
/// Fetch a literal file from the given URL, with an optional expected
|
/// Fetch a literal file from the given URL,
|
||||||
/// NixHash of it.
|
/// with an optional expected hash.
|
||||||
/// TODO: check if this is *always* sha256, and if so, make it [u8; 32].
|
URL {
|
||||||
URL(Url, Option<NixHash>),
|
/// The URL to fetch from.
|
||||||
|
url: Url,
|
||||||
|
/// The expected hash of the file.
|
||||||
|
exp_hash: Option<NixHash>,
|
||||||
|
},
|
||||||
|
|
||||||
/// Fetch a tarball from the given URL and unpack.
|
/// Fetch a tarball from the given URL and unpack.
|
||||||
/// The file must be a tape archive (.tar), optionally compressed with gzip,
|
/// The file must be a tape archive (.tar), optionally compressed with gzip,
|
||||||
|
@ -37,7 +41,12 @@ pub enum Fetch {
|
||||||
/// so it is best if the tarball contains a single directory at top level.
|
/// so it is best if the tarball contains a single directory at top level.
|
||||||
/// Optionally, a sha256 digest can be provided to verify the unpacked
|
/// Optionally, a sha256 digest can be provided to verify the unpacked
|
||||||
/// contents against.
|
/// contents against.
|
||||||
Tarball(Url, Option<[u8; 32]>),
|
Tarball {
|
||||||
|
/// The URL to fetch from.
|
||||||
|
url: Url,
|
||||||
|
/// The expected hash of the contents, as NAR.
|
||||||
|
exp_nar_sha256: Option<[u8; 32]>,
|
||||||
|
},
|
||||||
|
|
||||||
/// TODO
|
/// TODO
|
||||||
Git(),
|
Git(),
|
||||||
|
@ -60,7 +69,7 @@ fn redact_url(url: &Url) -> Url {
|
||||||
impl std::fmt::Debug for Fetch {
|
impl std::fmt::Debug for Fetch {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
Fetch::URL(url, exp_hash) => {
|
Fetch::URL { url, exp_hash } => {
|
||||||
let url = redact_url(url);
|
let url = redact_url(url);
|
||||||
if let Some(exp_hash) = exp_hash {
|
if let Some(exp_hash) = exp_hash {
|
||||||
write!(f, "URL [url: {}, exp_hash: Some({})]", &url, exp_hash)
|
write!(f, "URL [url: {}, exp_hash: Some({})]", &url, exp_hash)
|
||||||
|
@ -68,14 +77,17 @@ impl std::fmt::Debug for Fetch {
|
||||||
write!(f, "URL [url: {}, exp_hash: None]", &url)
|
write!(f, "URL [url: {}, exp_hash: None]", &url)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Fetch::Tarball(url, exp_digest) => {
|
Fetch::Tarball {
|
||||||
|
url,
|
||||||
|
exp_nar_sha256,
|
||||||
|
} => {
|
||||||
let url = redact_url(url);
|
let url = redact_url(url);
|
||||||
if let Some(exp_digest) = exp_digest {
|
if let Some(exp_nar_sha256) = exp_nar_sha256 {
|
||||||
write!(
|
write!(
|
||||||
f,
|
f,
|
||||||
"Tarball [url: {}, exp_hash: Some({})]",
|
"Tarball [url: {}, exp_nar_sha256: Some({})]",
|
||||||
url,
|
url,
|
||||||
NixHash::Sha256(*exp_digest)
|
NixHash::Sha256(*exp_nar_sha256)
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
write!(f, "Tarball [url: {}, exp_hash: None]", url)
|
write!(f, "Tarball [url: {}, exp_hash: None]", url)
|
||||||
|
@ -95,9 +107,24 @@ impl Fetch {
|
||||||
name: &'a str,
|
name: &'a str,
|
||||||
) -> Result<Option<StorePathRef<'a>>, BuildStorePathError> {
|
) -> Result<Option<StorePathRef<'a>>, BuildStorePathError> {
|
||||||
let ca_hash = match self {
|
let ca_hash = match self {
|
||||||
Fetch::URL(_, Some(nixhash)) => CAHash::Flat(nixhash.clone()),
|
Fetch::URL {
|
||||||
Fetch::Tarball(_, Some(nar_sha256)) => CAHash::Nar(NixHash::Sha256(*nar_sha256)),
|
exp_hash: Some(exp_hash),
|
||||||
_ => return Ok(None),
|
..
|
||||||
|
} => CAHash::Flat(exp_hash.clone()),
|
||||||
|
|
||||||
|
Fetch::Tarball {
|
||||||
|
exp_nar_sha256: Some(exp_nar_sha256),
|
||||||
|
..
|
||||||
|
} => CAHash::Nar(NixHash::Sha256(*exp_nar_sha256)),
|
||||||
|
|
||||||
|
Fetch::Git() => todo!(),
|
||||||
|
|
||||||
|
// everything else
|
||||||
|
Fetch::URL { exp_hash: None, .. }
|
||||||
|
| Fetch::Tarball {
|
||||||
|
exp_nar_sha256: None,
|
||||||
|
..
|
||||||
|
} => return Ok(None),
|
||||||
};
|
};
|
||||||
|
|
||||||
// calculate the store path of this fetch
|
// calculate the store path of this fetch
|
||||||
|
@ -190,7 +217,7 @@ where
|
||||||
/// didn't match the previously communicated hash contained inside the FetchArgs.
|
/// didn't match the previously communicated hash contained inside the FetchArgs.
|
||||||
pub async fn ingest(&self, fetch: Fetch) -> Result<(Node, CAHash, u64), FetcherError> {
|
pub async fn ingest(&self, fetch: Fetch) -> Result<(Node, CAHash, u64), FetcherError> {
|
||||||
match fetch {
|
match fetch {
|
||||||
Fetch::URL(url, exp_hash) => {
|
Fetch::URL { url, exp_hash } => {
|
||||||
// Construct a AsyncRead reading from the data as its downloaded.
|
// Construct a AsyncRead reading from the data as its downloaded.
|
||||||
let mut r = self.download(url.clone()).await?;
|
let mut r = self.download(url.clone()).await?;
|
||||||
|
|
||||||
|
@ -199,7 +226,7 @@ where
|
||||||
|
|
||||||
// Copy the contents from the download reader to the blob writer.
|
// Copy the contents from the download reader to the blob writer.
|
||||||
// Calculate the digest of the file received, depending on the
|
// Calculate the digest of the file received, depending on the
|
||||||
// communicated expected hash (or sha256 if none provided).
|
// communicated expected hash algo (or sha256 if none provided).
|
||||||
let (actual_hash, blob_size) = match exp_hash
|
let (actual_hash, blob_size) = match exp_hash
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.map(NixHash::algo)
|
.map(NixHash::algo)
|
||||||
|
@ -243,7 +270,10 @@ where
|
||||||
blob_size,
|
blob_size,
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
Fetch::Tarball(url, exp_nar_sha256) => {
|
Fetch::Tarball {
|
||||||
|
url,
|
||||||
|
exp_nar_sha256,
|
||||||
|
} => {
|
||||||
// Construct a AsyncRead reading from the data as its downloaded.
|
// Construct a AsyncRead reading from the data as its downloaded.
|
||||||
let r = self.download(url.clone()).await?;
|
let r = self.download(url.clone()).await?;
|
||||||
|
|
||||||
|
@ -396,7 +426,10 @@ mod tests {
|
||||||
.unwrap(),
|
.unwrap(),
|
||||||
);
|
);
|
||||||
|
|
||||||
let fetch = Fetch::URL(url, Some(exp_hash));
|
let fetch = Fetch::URL {
|
||||||
|
url,
|
||||||
|
exp_hash: Some(exp_hash),
|
||||||
|
};
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
"06qi00hylriyfm0nl827crgjvbax84mz-notmuch-extract-patch",
|
"06qi00hylriyfm0nl827crgjvbax84mz-notmuch-extract-patch",
|
||||||
&fetch
|
&fetch
|
||||||
|
@ -410,10 +443,13 @@ mod tests {
|
||||||
#[test]
|
#[test]
|
||||||
fn fetch_tarball_store_path() {
|
fn fetch_tarball_store_path() {
|
||||||
let url = Url::parse("https://github.com/NixOS/nixpkgs/archive/91050ea1e57e50388fa87a3302ba12d188ef723a.tar.gz").unwrap();
|
let url = Url::parse("https://github.com/NixOS/nixpkgs/archive/91050ea1e57e50388fa87a3302ba12d188ef723a.tar.gz").unwrap();
|
||||||
let exp_nixbase32 =
|
let exp_sha256 =
|
||||||
nixbase32::decode_fixed("1hf6cgaci1n186kkkjq106ryf8mmlq9vnwgfwh625wa8hfgdn4dm")
|
nixbase32::decode_fixed("1hf6cgaci1n186kkkjq106ryf8mmlq9vnwgfwh625wa8hfgdn4dm")
|
||||||
.unwrap();
|
.unwrap();
|
||||||
let fetch = Fetch::Tarball(url, Some(exp_nixbase32));
|
let fetch = Fetch::Tarball {
|
||||||
|
url,
|
||||||
|
exp_nar_sha256: Some(exp_sha256),
|
||||||
|
};
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
"7adgvk5zdfq4pwrhsm3n9lzypb12gw0g-source",
|
"7adgvk5zdfq4pwrhsm3n9lzypb12gw0g-source",
|
||||||
|
|
|
@ -160,16 +160,16 @@ mod tests {
|
||||||
static ref FOO_OUT_PATH: StorePath =
|
static ref FOO_OUT_PATH: StorePath =
|
||||||
StorePath::from_bytes(b"fhaj6gmwns62s6ypkcldbaj2ybvkhx3p-foo").expect("must parse");
|
StorePath::from_bytes(b"fhaj6gmwns62s6ypkcldbaj2ybvkhx3p-foo").expect("must parse");
|
||||||
|
|
||||||
static ref FETCH_URL : Fetch = Fetch::URL(
|
static ref FETCH_URL : Fetch = Fetch::URL{
|
||||||
Url::parse("https://raw.githubusercontent.com/aaptel/notmuch-extract-patch/f732a53e12a7c91a06755ebfab2007adc9b3063b/notmuch-extract-patch").unwrap(),
|
url: Url::parse("https://raw.githubusercontent.com/aaptel/notmuch-extract-patch/f732a53e12a7c91a06755ebfab2007adc9b3063b/notmuch-extract-patch").unwrap(),
|
||||||
Some(NixHash::Sha256(nixbase32::decode_fixed("0nawkl04sj7psw6ikzay7kydj3dhd0fkwghcsf5rzaw4bmp4kbax").unwrap()))
|
exp_hash: Some(NixHash::Sha256(nixbase32::decode_fixed("0nawkl04sj7psw6ikzay7kydj3dhd0fkwghcsf5rzaw4bmp4kbax").unwrap()))
|
||||||
);
|
};
|
||||||
static ref FETCH_URL_OUT_PATH: StorePath = StorePath::from_bytes(b"06qi00hylriyfm0nl827crgjvbax84mz-notmuch-extract-patch").unwrap();
|
static ref FETCH_URL_OUT_PATH: StorePath = StorePath::from_bytes(b"06qi00hylriyfm0nl827crgjvbax84mz-notmuch-extract-patch").unwrap();
|
||||||
|
|
||||||
static ref FETCH_TARBALL : Fetch = Fetch::Tarball(
|
static ref FETCH_TARBALL : Fetch = Fetch::Tarball{
|
||||||
Url::parse("https://github.com/NixOS/nixpkgs/archive/91050ea1e57e50388fa87a3302ba12d188ef723a.tar.gz").unwrap(),
|
url: Url::parse("https://github.com/NixOS/nixpkgs/archive/91050ea1e57e50388fa87a3302ba12d188ef723a.tar.gz").unwrap(),
|
||||||
Some(nixbase32::decode_fixed("1hf6cgaci1n186kkkjq106ryf8mmlq9vnwgfwh625wa8hfgdn4dm").unwrap())
|
exp_nar_sha256: Some(nixbase32::decode_fixed("1hf6cgaci1n186kkkjq106ryf8mmlq9vnwgfwh625wa8hfgdn4dm").unwrap())
|
||||||
);
|
};
|
||||||
static ref FETCH_TARBALL_OUT_PATH: StorePath = StorePath::from_bytes(b"7adgvk5zdfq4pwrhsm3n9lzypb12gw0g-source").unwrap();
|
static ref FETCH_TARBALL_OUT_PATH: StorePath = StorePath::from_bytes(b"7adgvk5zdfq4pwrhsm3n9lzypb12gw0g-source").unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue