refactor(tvix/store/tests/nar_renderer): use rstest fixtures

Change-Id: I975729cb97d69d080fb63fbb8f81b4ac46cbfb7f
Reviewed-on: https://cl.tvl.fyi/c/depot/+/11276
Reviewed-by: Connor Brewster <cbrewster@hey.com>
Tested-by: BuildkiteCI
This commit is contained in:
Florian Klink 2024-03-27 12:16:07 +01:00 committed by flokli
parent 40f65b363f
commit 024409bb90

View file

@ -1,7 +1,9 @@
use crate::nar::calculate_size_and_sha256; use crate::nar::calculate_size_and_sha256;
use crate::nar::write_nar; use crate::nar::write_nar;
use crate::tests::fixtures::blob_service;
use crate::tests::fixtures::directory_service;
use crate::tests::fixtures::*; use crate::tests::fixtures::*;
use crate::tests::utils::*; use rstest::*;
use sha2::{Digest, Sha256}; use sha2::{Digest, Sha256};
use std::io; use std::io;
use std::sync::Arc; use std::sync::Arc;
@ -10,10 +12,12 @@ use tvix_castore::blobservice::BlobService;
use tvix_castore::directoryservice::DirectoryService; use tvix_castore::directoryservice::DirectoryService;
use tvix_castore::proto as castorepb; use tvix_castore::proto as castorepb;
#[rstest]
#[tokio::test] #[tokio::test]
async fn single_symlink() { async fn single_symlink(
let blob_service: Arc<dyn BlobService> = gen_blob_service().into(); blob_service: Arc<dyn BlobService>,
let directory_service: Arc<dyn DirectoryService> = gen_directory_service().into(); directory_service: Arc<dyn DirectoryService>,
) {
let mut buf: Vec<u8> = vec![]; let mut buf: Vec<u8> = vec![];
write_nar( write_nar(
@ -33,11 +37,12 @@ async fn single_symlink() {
} }
/// Make sure the NARRenderer fails if a referred blob doesn't exist. /// Make sure the NARRenderer fails if a referred blob doesn't exist.
#[rstest]
#[tokio::test] #[tokio::test]
async fn single_file_missing_blob() { async fn single_file_missing_blob(
let blob_service: Arc<dyn BlobService> = gen_blob_service().into(); blob_service: Arc<dyn BlobService>,
let directory_service: Arc<dyn DirectoryService> = gen_directory_service().into(); directory_service: Arc<dyn DirectoryService>,
) {
let e = write_nar( let e = write_nar(
sink(), sink(),
&castorepb::node::Node::File(castorepb::FileNode { &castorepb::node::Node::File(castorepb::FileNode {
@ -63,10 +68,12 @@ async fn single_file_missing_blob() {
/// Make sure the NAR Renderer fails if the returned blob meta has another size /// Make sure the NAR Renderer fails if the returned blob meta has another size
/// than specified in the proto node. /// than specified in the proto node.
#[rstest]
#[tokio::test] #[tokio::test]
async fn single_file_wrong_blob_size() { async fn single_file_wrong_blob_size(
let blob_service: Arc<dyn BlobService> = gen_blob_service().into(); blob_service: Arc<dyn BlobService>,
directory_service: Arc<dyn DirectoryService>,
) {
// insert blob into the store // insert blob into the store
let mut writer = blob_service.open_write().await; let mut writer = blob_service.open_write().await;
tokio::io::copy( tokio::io::copy(
@ -80,64 +87,57 @@ async fn single_file_wrong_blob_size() {
writer.close().await.unwrap() writer.close().await.unwrap()
); );
let bs = blob_service.clone();
// Test with a root FileNode of a too big size // Test with a root FileNode of a too big size
{ let e = write_nar(
let directory_service: Arc<dyn DirectoryService> = gen_directory_service().into(); sink(),
let e = write_nar( &castorepb::node::Node::File(castorepb::FileNode {
sink(), name: "doesntmatter".into(),
&castorepb::node::Node::File(castorepb::FileNode { digest: HELLOWORLD_BLOB_DIGEST.clone().into(),
name: "doesntmatter".into(), size: 42, // <- note the wrong size here!
digest: HELLOWORLD_BLOB_DIGEST.clone().into(), executable: false,
size: 42, // <- note the wrong size here! }),
executable: false, blob_service.clone(),
}), directory_service.clone(),
bs, )
directory_service, .await
) .expect_err("must fail");
.await
.expect_err("must fail");
match e { match e {
crate::nar::RenderError::NARWriterError(e) => { crate::nar::RenderError::NARWriterError(e) => {
assert_eq!(io::ErrorKind::UnexpectedEof, e.kind()); assert_eq!(io::ErrorKind::UnexpectedEof, e.kind());
}
_ => panic!("unexpected error: {:?}", e),
} }
_ => panic!("unexpected error: {:?}", e),
} }
let bs = blob_service.clone();
// Test with a root FileNode of a too small size // Test with a root FileNode of a too small size
{ let e = write_nar(
let directory_service: Arc<dyn DirectoryService> = gen_directory_service().into(); sink(),
let e = write_nar( &castorepb::node::Node::File(castorepb::FileNode {
sink(), name: "doesntmatter".into(),
&castorepb::node::Node::File(castorepb::FileNode { digest: HELLOWORLD_BLOB_DIGEST.clone().into(),
name: "doesntmatter".into(), size: 2, // <- note the wrong size here!
digest: HELLOWORLD_BLOB_DIGEST.clone().into(), executable: false,
size: 2, // <- note the wrong size here! }),
executable: false, blob_service,
}), directory_service,
bs, )
directory_service, .await
) .expect_err("must fail");
.await
.expect_err("must fail");
match e { match e {
crate::nar::RenderError::NARWriterError(e) => { crate::nar::RenderError::NARWriterError(e) => {
assert_eq!(io::ErrorKind::InvalidInput, e.kind()); assert_eq!(io::ErrorKind::InvalidInput, e.kind());
}
_ => panic!("unexpected error: {:?}", e),
} }
_ => panic!("unexpected error: {:?}", e),
} }
} }
#[rstest]
#[tokio::test] #[tokio::test]
async fn single_file() { async fn single_file(
let blob_service: Arc<dyn BlobService> = gen_blob_service().into(); blob_service: Arc<dyn BlobService>,
let directory_service: Arc<dyn DirectoryService> = gen_directory_service().into(); directory_service: Arc<dyn DirectoryService>,
) {
// insert blob into the store // insert blob into the store
let mut writer = blob_service.open_write().await; let mut writer = blob_service.open_write().await;
tokio::io::copy(&mut io::Cursor::new(HELLOWORLD_BLOB_CONTENTS), &mut writer) tokio::io::copy(&mut io::Cursor::new(HELLOWORLD_BLOB_CONTENTS), &mut writer)
@ -168,11 +168,12 @@ async fn single_file() {
assert_eq!(buf, NAR_CONTENTS_HELLOWORLD.to_vec()); assert_eq!(buf, NAR_CONTENTS_HELLOWORLD.to_vec());
} }
#[rstest]
#[tokio::test] #[tokio::test]
async fn test_complicated() { async fn test_complicated(
let blob_service: Arc<dyn BlobService> = gen_blob_service().into(); blob_service: Arc<dyn BlobService>,
let directory_service: Arc<dyn DirectoryService> = gen_directory_service().into(); directory_service: Arc<dyn DirectoryService>,
) {
// put all data into the stores. // put all data into the stores.
// insert blob into the store // insert blob into the store
let mut writer = blob_service.open_write().await; let mut writer = blob_service.open_write().await;
@ -181,6 +182,7 @@ async fn test_complicated() {
.unwrap(); .unwrap();
assert_eq!(EMPTY_BLOB_DIGEST.clone(), writer.close().await.unwrap()); assert_eq!(EMPTY_BLOB_DIGEST.clone(), writer.close().await.unwrap());
// insert directories
directory_service directory_service
.put(DIRECTORY_WITH_KEEP.clone()) .put(DIRECTORY_WITH_KEEP.clone())
.await .await
@ -192,9 +194,6 @@ async fn test_complicated() {
let mut buf: Vec<u8> = vec![]; let mut buf: Vec<u8> = vec![];
let bs = blob_service.clone();
let ds = directory_service.clone();
write_nar( write_nar(
&mut buf, &mut buf,
&castorepb::node::Node::Directory(castorepb::DirectoryNode { &castorepb::node::Node::Directory(castorepb::DirectoryNode {
@ -202,8 +201,8 @@ async fn test_complicated() {
digest: DIRECTORY_COMPLICATED.digest().into(), digest: DIRECTORY_COMPLICATED.digest().into(),
size: DIRECTORY_COMPLICATED.size(), size: DIRECTORY_COMPLICATED.size(),
}), }),
bs, blob_service.clone(),
ds, directory_service.clone(),
) )
.await .await
.expect("must succeed"); .expect("must succeed");
@ -211,16 +210,14 @@ async fn test_complicated() {
assert_eq!(buf, NAR_CONTENTS_COMPLICATED.to_vec()); assert_eq!(buf, NAR_CONTENTS_COMPLICATED.to_vec());
// ensure calculate_nar does return the correct sha256 digest and sum. // ensure calculate_nar does return the correct sha256 digest and sum.
let bs = blob_service.clone();
let ds = directory_service.clone();
let (nar_size, nar_digest) = calculate_size_and_sha256( let (nar_size, nar_digest) = calculate_size_and_sha256(
&castorepb::node::Node::Directory(castorepb::DirectoryNode { &castorepb::node::Node::Directory(castorepb::DirectoryNode {
name: "doesntmatter".into(), name: "doesntmatter".into(),
digest: DIRECTORY_COMPLICATED.digest().into(), digest: DIRECTORY_COMPLICATED.digest().into(),
size: DIRECTORY_COMPLICATED.size(), size: DIRECTORY_COMPLICATED.size(),
}), }),
bs, blob_service,
ds, directory_service,
) )
.await .await
.expect("must succeed"); .expect("must succeed");