refactor(tvix/store/tests): combine tests into one parametrized function
Change-Id: I9ff43b29be68b9840c58286da96fa52927691804 Reviewed-on: https://cl.tvl.fyi/c/depot/+/12507 Reviewed-by: flokli <flokli@flokli.de> Tested-by: BuildkiteCI
This commit is contained in:
parent
c1e69e260d
commit
6deff4d8e9
2 changed files with 103 additions and 179 deletions
|
@ -1,11 +1,13 @@
|
||||||
use lazy_static::lazy_static;
|
use lazy_static::lazy_static;
|
||||||
use rstest::*;
|
use rstest::{self, *};
|
||||||
|
use rstest_reuse::*;
|
||||||
|
use std::io;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
pub use tvix_castore::fixtures::*;
|
pub use tvix_castore::fixtures::*;
|
||||||
use tvix_castore::{
|
use tvix_castore::{
|
||||||
blobservice::{BlobService, MemoryBlobService},
|
blobservice::{BlobService, MemoryBlobService},
|
||||||
directoryservice::{DirectoryService, MemoryDirectoryService},
|
directoryservice::{DirectoryService, MemoryDirectoryService},
|
||||||
proto as castorepb,
|
proto as castorepb, Node,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::proto::{
|
use crate::proto::{
|
||||||
|
@ -17,6 +19,10 @@ pub const DUMMY_PATH: &str = "00000000000000000000000000000000-dummy";
|
||||||
pub const DUMMY_PATH_DIGEST: [u8; 20] = [0; 20];
|
pub const DUMMY_PATH_DIGEST: [u8; 20] = [0; 20];
|
||||||
|
|
||||||
lazy_static! {
|
lazy_static! {
|
||||||
|
pub static ref CASTORE_NODE_SYMLINK: Node = Node::Symlink {
|
||||||
|
target: "/nix/store/somewhereelse".try_into().unwrap(),
|
||||||
|
};
|
||||||
|
|
||||||
/// The NAR representation of a symlink pointing to `/nix/store/somewhereelse`
|
/// The NAR representation of a symlink pointing to `/nix/store/somewhereelse`
|
||||||
pub static ref NAR_CONTENTS_SYMLINK: Vec<u8> = vec![
|
pub static ref NAR_CONTENTS_SYMLINK: Vec<u8> = vec![
|
||||||
13, 0, 0, 0, 0, 0, 0, 0, b'n', b'i', b'x', b'-', b'a', b'r', b'c', b'h', b'i', b'v', b'e', b'-', b'1', 0,
|
13, 0, 0, 0, 0, 0, 0, 0, b'n', b'i', b'x', b'-', b'a', b'r', b'c', b'h', b'i', b'v', b'e', b'-', b'1', 0,
|
||||||
|
@ -31,6 +37,12 @@ lazy_static! {
|
||||||
1, 0, 0, 0, 0, 0, 0, 0, b')', 0, 0, 0, 0, 0, 0, 0 // ")"
|
1, 0, 0, 0, 0, 0, 0, 0, b')', 0, 0, 0, 0, 0, 0, 0 // ")"
|
||||||
];
|
];
|
||||||
|
|
||||||
|
pub static ref CASTORE_NODE_HELLOWORLD: Node = Node::File {
|
||||||
|
digest: HELLOWORLD_BLOB_DIGEST.clone(),
|
||||||
|
size: HELLOWORLD_BLOB_CONTENTS.len() as u64,
|
||||||
|
executable: false,
|
||||||
|
};
|
||||||
|
|
||||||
/// The NAR representation of a regular file with the contents "Hello World!"
|
/// The NAR representation of a regular file with the contents "Hello World!"
|
||||||
pub static ref NAR_CONTENTS_HELLOWORLD: Vec<u8> = vec![
|
pub static ref NAR_CONTENTS_HELLOWORLD: Vec<u8> = vec![
|
||||||
13, 0, 0, 0, 0, 0, 0, 0, b'n', b'i', b'x', b'-', b'a', b'r', b'c', b'h', b'i', b'v', b'e', b'-', b'1', 0,
|
13, 0, 0, 0, 0, 0, 0, 0, b'n', b'i', b'x', b'-', b'a', b'r', b'c', b'h', b'i', b'v', b'e', b'-', b'1', 0,
|
||||||
|
@ -44,6 +56,22 @@ lazy_static! {
|
||||||
1, 0, 0, 0, 0, 0, 0, 0, b')', 0, 0, 0, 0, 0, 0, 0 // ")"
|
1, 0, 0, 0, 0, 0, 0, 0, b')', 0, 0, 0, 0, 0, 0, 0 // ")"
|
||||||
];
|
];
|
||||||
|
|
||||||
|
pub static ref CASTORE_NODE_TOO_BIG: Node = Node::File {
|
||||||
|
digest: HELLOWORLD_BLOB_DIGEST.clone(),
|
||||||
|
size: 42, // <- note the wrong size here!
|
||||||
|
executable: false,
|
||||||
|
};
|
||||||
|
pub static ref CASTORE_NODE_TOO_SMALL: Node = Node::File {
|
||||||
|
digest: HELLOWORLD_BLOB_DIGEST.clone(),
|
||||||
|
size: 2, // <- note the wrong size here!
|
||||||
|
executable: false,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub static ref CASTORE_NODE_COMPLICATED: Node = Node::Directory {
|
||||||
|
digest: DIRECTORY_COMPLICATED.digest(),
|
||||||
|
size: DIRECTORY_COMPLICATED.size(),
|
||||||
|
};
|
||||||
|
|
||||||
/// The NAR representation of a more complicated directory structure.
|
/// The NAR representation of a more complicated directory structure.
|
||||||
pub static ref NAR_CONTENTS_COMPLICATED: Vec<u8> = vec![
|
pub static ref NAR_CONTENTS_COMPLICATED: Vec<u8> = vec![
|
||||||
13, 0, 0, 0, 0, 0, 0, 0, b'n', b'i', b'x', b'-', b'a', b'r', b'c', b'h', b'i', b'v', b'e', b'-', b'1', 0,
|
13, 0, 0, 0, 0, 0, 0, 0, b'n', b'i', b'x', b'-', b'a', b'r', b'c', b'h', b'i', b'v', b'e', b'-', b'1', 0,
|
||||||
|
@ -136,7 +164,47 @@ pub(crate) fn blob_service() -> Arc<dyn BlobService> {
|
||||||
Arc::from(MemoryBlobService::default())
|
Arc::from(MemoryBlobService::default())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[fixture]
|
||||||
|
pub(crate) async fn blob_service_with_contents() -> Arc<dyn BlobService> {
|
||||||
|
let blob_service = Arc::from(MemoryBlobService::default());
|
||||||
|
for (blob_contents, blob_digest) in [
|
||||||
|
(EMPTY_BLOB_CONTENTS, &*EMPTY_BLOB_DIGEST),
|
||||||
|
(HELLOWORLD_BLOB_CONTENTS, &*HELLOWORLD_BLOB_DIGEST),
|
||||||
|
] {
|
||||||
|
// put all data into the stores.
|
||||||
|
// insert blob into the store
|
||||||
|
let mut writer = blob_service.open_write().await;
|
||||||
|
tokio::io::copy(&mut io::Cursor::new(blob_contents), &mut writer)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(blob_digest.clone(), writer.close().await.unwrap());
|
||||||
|
}
|
||||||
|
blob_service
|
||||||
|
}
|
||||||
|
|
||||||
#[fixture]
|
#[fixture]
|
||||||
pub(crate) fn directory_service() -> Arc<dyn DirectoryService> {
|
pub(crate) fn directory_service() -> Arc<dyn DirectoryService> {
|
||||||
Arc::from(MemoryDirectoryService::default())
|
Arc::from(MemoryDirectoryService::default())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[fixture]
|
||||||
|
pub(crate) async fn directory_service_with_contents() -> Arc<dyn DirectoryService> {
|
||||||
|
let directory_service = Arc::from(MemoryDirectoryService::default());
|
||||||
|
for directory in [&*DIRECTORY_WITH_KEEP, &*DIRECTORY_COMPLICATED] {
|
||||||
|
directory_service.put(directory.clone()).await.unwrap();
|
||||||
|
}
|
||||||
|
directory_service
|
||||||
|
}
|
||||||
|
|
||||||
|
#[template]
|
||||||
|
#[rstest]
|
||||||
|
#[case::symlink (&*CASTORE_NODE_SYMLINK, Ok(Ok(&*NAR_CONTENTS_SYMLINK)))]
|
||||||
|
#[case::helloworld (&*CASTORE_NODE_HELLOWORLD, Ok(Ok(&*NAR_CONTENTS_HELLOWORLD)))]
|
||||||
|
#[case::too_big (&*CASTORE_NODE_TOO_BIG, Ok(Err(io::ErrorKind::UnexpectedEof)))]
|
||||||
|
#[case::too_small (&*CASTORE_NODE_TOO_SMALL, Ok(Err(io::ErrorKind::InvalidInput)))]
|
||||||
|
#[case::complicated(&*CASTORE_NODE_COMPLICATED, Ok(Ok(&*NAR_CONTENTS_COMPLICATED)))]
|
||||||
|
fn castore_fixtures_template(
|
||||||
|
#[case] test_input: &Node,
|
||||||
|
#[case] test_output: Result<Result<&Vec<u8>, io::ErrorKind>, crate::nar::RenderError>,
|
||||||
|
) {
|
||||||
|
}
|
||||||
|
|
|
@ -1,10 +1,7 @@
|
||||||
use crate::nar::calculate_size_and_sha256;
|
|
||||||
use crate::nar::write_nar;
|
use crate::nar::write_nar;
|
||||||
use crate::tests::fixtures::blob_service;
|
|
||||||
use crate::tests::fixtures::directory_service;
|
|
||||||
use crate::tests::fixtures::*;
|
use crate::tests::fixtures::*;
|
||||||
use rstest::*;
|
use rstest::*;
|
||||||
use sha2::{Digest, Sha256};
|
use rstest_reuse::*;
|
||||||
use std::io;
|
use std::io;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use tokio::io::sink;
|
use tokio::io::sink;
|
||||||
|
@ -12,29 +9,6 @@ use tvix_castore::blobservice::BlobService;
|
||||||
use tvix_castore::directoryservice::DirectoryService;
|
use tvix_castore::directoryservice::DirectoryService;
|
||||||
use tvix_castore::Node;
|
use tvix_castore::Node;
|
||||||
|
|
||||||
#[rstest]
|
|
||||||
#[tokio::test]
|
|
||||||
async fn single_symlink(
|
|
||||||
blob_service: Arc<dyn BlobService>,
|
|
||||||
directory_service: Arc<dyn DirectoryService>,
|
|
||||||
) {
|
|
||||||
let mut buf: Vec<u8> = vec![];
|
|
||||||
|
|
||||||
write_nar(
|
|
||||||
&mut buf,
|
|
||||||
&Node::Symlink {
|
|
||||||
target: "/nix/store/somewhereelse".try_into().unwrap(),
|
|
||||||
},
|
|
||||||
// don't put anything in the stores, as we don't actually do any requests.
|
|
||||||
blob_service,
|
|
||||||
directory_service,
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.expect("must succeed");
|
|
||||||
|
|
||||||
assert_eq!(buf, NAR_CONTENTS_SYMLINK.to_vec());
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Make sure the NARRenderer fails if a referred blob doesn't exist.
|
/// Make sure the NARRenderer fails if a referred blob doesn't exist.
|
||||||
#[rstest]
|
#[rstest]
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
|
@ -44,11 +18,7 @@ async fn single_file_missing_blob(
|
||||||
) {
|
) {
|
||||||
let e = write_nar(
|
let e = write_nar(
|
||||||
sink(),
|
sink(),
|
||||||
&Node::File {
|
&CASTORE_NODE_HELLOWORLD,
|
||||||
digest: HELLOWORLD_BLOB_DIGEST.clone(),
|
|
||||||
size: HELLOWORLD_BLOB_CONTENTS.len() as u64,
|
|
||||||
executable: false,
|
|
||||||
},
|
|
||||||
// the blobservice is empty intentionally, to provoke the error.
|
// the blobservice is empty intentionally, to provoke the error.
|
||||||
blob_service,
|
blob_service,
|
||||||
directory_service,
|
directory_service,
|
||||||
|
@ -64,158 +34,44 @@ async fn single_file_missing_blob(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Make sure the NAR Renderer fails if the returned blob meta has another size
|
#[apply(castore_fixtures_template)]
|
||||||
/// than specified in the proto node.
|
|
||||||
#[rstest]
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn single_file_wrong_blob_size(
|
async fn seekable(
|
||||||
blob_service: Arc<dyn BlobService>,
|
#[future] blob_service_with_contents: Arc<dyn BlobService>,
|
||||||
directory_service: Arc<dyn DirectoryService>,
|
#[future] directory_service_with_contents: Arc<dyn DirectoryService>,
|
||||||
|
#[case] test_input: &Node,
|
||||||
|
#[case] test_output: Result<Result<&Vec<u8>, io::ErrorKind>, crate::nar::RenderError>,
|
||||||
) {
|
) {
|
||||||
// insert blob into the store
|
let blob_service = blob_service_with_contents.await;
|
||||||
let mut writer = blob_service.open_write().await;
|
let directory_service = directory_service_with_contents.await;
|
||||||
tokio::io::copy(
|
|
||||||
&mut io::Cursor::new(HELLOWORLD_BLOB_CONTENTS.to_vec()),
|
|
||||||
&mut writer,
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(
|
|
||||||
HELLOWORLD_BLOB_DIGEST.clone(),
|
|
||||||
writer.close().await.unwrap()
|
|
||||||
);
|
|
||||||
|
|
||||||
// Test with a root FileNode of a too big size
|
|
||||||
let e = write_nar(
|
|
||||||
sink(),
|
|
||||||
&Node::File {
|
|
||||||
digest: HELLOWORLD_BLOB_DIGEST.clone(),
|
|
||||||
size: 42, // <- note the wrong size here!
|
|
||||||
executable: false,
|
|
||||||
},
|
|
||||||
blob_service.clone(),
|
|
||||||
directory_service.clone(),
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.expect_err("must fail");
|
|
||||||
|
|
||||||
match e {
|
|
||||||
crate::nar::RenderError::NARWriterError(e) => {
|
|
||||||
assert_eq!(io::ErrorKind::UnexpectedEof, e.kind());
|
|
||||||
}
|
|
||||||
_ => panic!("unexpected error: {:?}", e),
|
|
||||||
}
|
|
||||||
|
|
||||||
// Test with a root FileNode of a too small size
|
|
||||||
let e = write_nar(
|
|
||||||
sink(),
|
|
||||||
&Node::File {
|
|
||||||
digest: HELLOWORLD_BLOB_DIGEST.clone(),
|
|
||||||
size: 2, // <- note the wrong size here!
|
|
||||||
executable: false,
|
|
||||||
},
|
|
||||||
blob_service,
|
|
||||||
directory_service,
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.expect_err("must fail");
|
|
||||||
|
|
||||||
match e {
|
|
||||||
crate::nar::RenderError::NARWriterError(e) => {
|
|
||||||
assert_eq!(io::ErrorKind::InvalidInput, e.kind());
|
|
||||||
}
|
|
||||||
_ => panic!("unexpected error: {:?}", e),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[rstest]
|
|
||||||
#[tokio::test]
|
|
||||||
async fn single_file(
|
|
||||||
blob_service: Arc<dyn BlobService>,
|
|
||||||
directory_service: Arc<dyn DirectoryService>,
|
|
||||||
) {
|
|
||||||
// insert blob into the store
|
|
||||||
let mut writer = blob_service.open_write().await;
|
|
||||||
tokio::io::copy(&mut io::Cursor::new(HELLOWORLD_BLOB_CONTENTS), &mut writer)
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
HELLOWORLD_BLOB_DIGEST.clone(),
|
|
||||||
writer.close().await.unwrap()
|
|
||||||
);
|
|
||||||
|
|
||||||
let mut buf: Vec<u8> = vec![];
|
let mut buf: Vec<u8> = vec![];
|
||||||
|
let read_result = write_nar(
|
||||||
write_nar(
|
|
||||||
&mut buf,
|
&mut buf,
|
||||||
&Node::File {
|
test_input,
|
||||||
digest: HELLOWORLD_BLOB_DIGEST.clone(),
|
// don't put anything in the stores, as we don't actually do any requests.
|
||||||
size: HELLOWORLD_BLOB_CONTENTS.len() as u64,
|
|
||||||
executable: false,
|
|
||||||
},
|
|
||||||
blob_service,
|
blob_service,
|
||||||
directory_service,
|
directory_service,
|
||||||
)
|
)
|
||||||
.await
|
.await;
|
||||||
.expect("must succeed");
|
|
||||||
|
|
||||||
assert_eq!(buf, NAR_CONTENTS_HELLOWORLD.to_vec());
|
match (read_result, test_output) {
|
||||||
}
|
(Ok(_), Err(_)) => panic!("creating reader should have failed but succeeded"),
|
||||||
|
(Ok(_), Ok(Err(_))) => panic!("creating reader should have failed but succeeded"),
|
||||||
#[rstest]
|
(Err(err), Ok(Ok(_))) => {
|
||||||
#[tokio::test]
|
panic!("creating reader should have succeeded but failed: {}", err)
|
||||||
async fn test_complicated(
|
}
|
||||||
blob_service: Arc<dyn BlobService>,
|
(Err(reader_err), Err(expected_err)) => {
|
||||||
directory_service: Arc<dyn DirectoryService>,
|
assert_eq!(format!("{}", reader_err), format!("{}", expected_err));
|
||||||
) {
|
}
|
||||||
// put all data into the stores.
|
(Err(reader_err), Ok(Err(expected_err))) => {
|
||||||
// insert blob into the store
|
let crate::nar::RenderError::NARWriterError(e) = reader_err else {
|
||||||
let mut writer = blob_service.open_write().await;
|
panic!("expected nar writer error")
|
||||||
tokio::io::copy(&mut io::Cursor::new(EMPTY_BLOB_CONTENTS), &mut writer)
|
};
|
||||||
.await
|
assert_eq!(e.kind(), expected_err);
|
||||||
.unwrap();
|
}
|
||||||
assert_eq!(EMPTY_BLOB_DIGEST.clone(), writer.close().await.unwrap());
|
(Ok(_n), Ok(Ok(expected_read_result))) => {
|
||||||
|
assert_eq!(buf, expected_read_result.to_vec());
|
||||||
// insert directories
|
}
|
||||||
directory_service
|
}
|
||||||
.put(DIRECTORY_WITH_KEEP.clone())
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
directory_service
|
|
||||||
.put(DIRECTORY_COMPLICATED.clone())
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let mut buf: Vec<u8> = vec![];
|
|
||||||
|
|
||||||
write_nar(
|
|
||||||
&mut buf,
|
|
||||||
&Node::Directory {
|
|
||||||
digest: DIRECTORY_COMPLICATED.digest(),
|
|
||||||
size: DIRECTORY_COMPLICATED.size(),
|
|
||||||
},
|
|
||||||
blob_service.clone(),
|
|
||||||
directory_service.clone(),
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.expect("must succeed");
|
|
||||||
|
|
||||||
assert_eq!(buf, NAR_CONTENTS_COMPLICATED.to_vec());
|
|
||||||
|
|
||||||
// ensure calculate_nar does return the correct sha256 digest and sum.
|
|
||||||
let (nar_size, nar_digest) = calculate_size_and_sha256(
|
|
||||||
&Node::Directory {
|
|
||||||
digest: DIRECTORY_COMPLICATED.digest(),
|
|
||||||
size: DIRECTORY_COMPLICATED.size(),
|
|
||||||
},
|
|
||||||
blob_service,
|
|
||||||
directory_service,
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.expect("must succeed");
|
|
||||||
|
|
||||||
assert_eq!(NAR_CONTENTS_COMPLICATED.len() as u64, nar_size);
|
|
||||||
let d = Sha256::digest(NAR_CONTENTS_COMPLICATED.clone());
|
|
||||||
assert_eq!(d.as_slice(), nar_digest);
|
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Reference in a new issue