feat(tvix/store/proto): use Bytes instead of Vec<u8>

Makes use of https://github.com/tokio-rs/prost/pull/341, which makes our
bytes field cheaper to clone.

It's a bit annoying to configure due to
https://github.com/hyperium/tonic/issues/908, but the workaround does
get the job done.

Change-Id: I25714600b041bb5432d3adf5859b151e72b12778
Reviewed-on: https://cl.tvl.fyi/c/depot/+/8975
Reviewed-by: raitobezarius <tvl@lahfa.xyz>
Tested-by: BuildkiteCI
Reviewed-by: tazjin <tazjin@tvl.su>
Autosubmit: flokli <flokli@flokli.de>
This commit is contained in:
Florian Klink 2023-07-19 18:52:50 +03:00 committed by clbot
parent 7971d7d9ff
commit 432222f098
34 changed files with 216 additions and 164 deletions

View file

@ -52,6 +52,12 @@ version = "3.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0d261e256854913907f67ed06efbc3338dfe6179796deefc1ff763fc1aee5535" checksum = "0d261e256854913907f67ed06efbc3338dfe6179796deefc1ff763fc1aee5535"
[[package]]
name = "bytes"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "89b2fd2a0dcf38d7971e2194b6b6eebab45ae01067456a7fd93d5547a61b70be"
[[package]] [[package]]
name = "cfg-if" name = "cfg-if"
version = "1.0.0" version = "1.0.0"
@ -790,6 +796,7 @@ dependencies = [
name = "tvix-eval" name = "tvix-eval"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"bytes",
"codemap", "codemap",
"codemap-diagnostic", "codemap-diagnostic",
"dirs", "dirs",

2
tvix/Cargo.lock generated
View file

@ -2697,6 +2697,7 @@ checksum = "3528ecfd12c466c6f163363caf2d02a71161dd5e1cc6ae7b34207ea2d42d81ed"
name = "tvix-cli" name = "tvix-cli"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"bytes",
"clap 4.2.7", "clap 4.2.7",
"data-encoding", "data-encoding",
"dirs", "dirs",
@ -2714,6 +2715,7 @@ dependencies = [
name = "tvix-eval" name = "tvix-eval"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"bytes",
"codemap", "codemap",
"codemap-diagnostic", "codemap-diagnostic",
"criterion", "criterion",

View file

@ -7931,6 +7931,10 @@ rec {
then lib.cleanSourceWith { filter = sourceFilter; src = ./cli; } then lib.cleanSourceWith { filter = sourceFilter; src = ./cli; }
else ./cli; else ./cli;
dependencies = [ dependencies = [
{
name = "bytes";
packageId = "bytes";
}
{ {
name = "clap"; name = "clap";
packageId = "clap 4.2.7"; packageId = "clap 4.2.7";
@ -7991,6 +7995,10 @@ rec {
else ./eval; else ./eval;
libName = "tvix_eval"; libName = "tvix_eval";
dependencies = [ dependencies = [
{
name = "bytes";
packageId = "bytes";
}
{ {
name = "codemap"; name = "codemap";
packageId = "codemap"; packageId = "codemap";

View file

@ -18,6 +18,7 @@ smol_str = "0.2.0"
ssri = "7.0.0" ssri = "7.0.0"
data-encoding = "2.3.3" data-encoding = "2.3.3"
thiserror = "1.0.38" thiserror = "1.0.38"
bytes = "1.4.0"
[dependencies.wu-manber] [dependencies.wu-manber]
git = "https://github.com/tvlfyi/wu-manber.git" git = "https://github.com/tvlfyi/wu-manber.git"

View file

@ -77,7 +77,7 @@ impl EvalIO for NixCompatIO {
self.underlying.read_to_string(path) self.underlying.read_to_string(path)
} }
fn read_dir(&self, path: &Path) -> Result<Vec<(Vec<u8>, FileType)>, io::Error> { fn read_dir(&self, path: &Path) -> Result<Vec<(bytes::Bytes, FileType)>, io::Error> {
self.underlying.read_dir(path) self.underlying.read_dir(path)
} }
} }

View file

@ -72,7 +72,7 @@ impl<T: EvalIO> EvalIO for TvixIO<T> {
self.actual.read_to_string(path) self.actual.read_to_string(path)
} }
fn read_dir(&self, path: &Path) -> Result<Vec<(Vec<u8>, FileType)>, io::Error> { fn read_dir(&self, path: &Path) -> Result<Vec<(bytes::Bytes, FileType)>, io::Error> {
self.actual.read_dir(path) self.actual.read_dir(path)
} }
} }

View file

@ -10,6 +10,7 @@ name = "tvix_eval"
[dependencies] [dependencies]
builtin-macros = { path = "./builtin-macros", package = "tvix-eval-builtin-macros" } builtin-macros = { path = "./builtin-macros", package = "tvix-eval-builtin-macros" }
bytes = "1.4.0"
codemap = "0.1.3" codemap = "0.1.3"
codemap-diagnostic = "0.1.1" codemap-diagnostic = "0.1.1"
dirs = "4.0.0" dirs = "4.0.0"

View file

@ -38,8 +38,10 @@ mod impure_builtins {
let dir = generators::request_read_dir(&co, path).await; let dir = generators::request_read_dir(&co, path).await;
let res = dir.into_iter().map(|(name, ftype)| { let res = dir.into_iter().map(|(name, ftype)| {
( (
// TODO: propagate Vec<u8> into NixString. // TODO: propagate Vec<u8> or bytes::Bytes into NixString.
NixString::from(String::from_utf8(name).expect("parsing file name as string")), NixString::from(
String::from_utf8(name.to_vec()).expect("parsing file name as string"),
),
Value::String( Value::String(
match ftype { match ftype {
FileType::Directory => "directory", FileType::Directory => "directory",

View file

@ -42,7 +42,7 @@ pub trait EvalIO {
/// Read the directory at the specified path and return the names /// Read the directory at the specified path and return the names
/// of its entries associated with their [`FileType`]. /// of its entries associated with their [`FileType`].
fn read_dir(&self, path: &Path) -> Result<Vec<(Vec<u8>, FileType)>, io::Error>; fn read_dir(&self, path: &Path) -> Result<Vec<(bytes::Bytes, FileType)>, io::Error>;
/// Import the given path. What this means depends on the /// Import the given path. What this means depends on the
/// implementation, for example for a `std::io`-based /// implementation, for example for a `std::io`-based
@ -76,7 +76,7 @@ impl EvalIO for StdIO {
std::fs::read_to_string(&path) std::fs::read_to_string(&path)
} }
fn read_dir(&self, path: &Path) -> Result<Vec<(Vec<u8>, FileType)>, io::Error> { fn read_dir(&self, path: &Path) -> Result<Vec<(bytes::Bytes, FileType)>, io::Error> {
let mut result = vec![]; let mut result = vec![];
for entry in path.read_dir()? { for entry in path.read_dir()? {
@ -93,7 +93,7 @@ impl EvalIO for StdIO {
FileType::Unknown FileType::Unknown
}; };
result.push((entry.file_name().into_vec(), val)) result.push((entry.file_name().into_vec().into(), val))
} }
Ok(result) Ok(result)
@ -125,7 +125,7 @@ impl EvalIO for DummyIO {
)) ))
} }
fn read_dir(&self, _: &Path) -> Result<Vec<(Vec<u8>, FileType)>, io::Error> { fn read_dir(&self, _: &Path) -> Result<Vec<(bytes::Bytes, FileType)>, io::Error> {
Err(io::Error::new( Err(io::Error::new(
io::ErrorKind::Unsupported, io::ErrorKind::Unsupported,
"I/O methods are not implemented in DummyIO", "I/O methods are not implemented in DummyIO",

View file

@ -186,7 +186,7 @@ pub enum VMResponse {
Path(PathBuf), Path(PathBuf),
/// VM response with the contents of a directory. /// VM response with the contents of a directory.
Directory(Vec<(Vec<u8>, FileType)>), Directory(Vec<(bytes::Bytes, FileType)>),
/// VM response with a span to use at the current point. /// VM response with a span to use at the current point.
Span(LightSpan), Span(LightSpan),
@ -735,7 +735,7 @@ pub(crate) async fn request_path_exists(co: &GenCo, path: PathBuf) -> Value {
} }
} }
pub(crate) async fn request_read_dir(co: &GenCo, path: PathBuf) -> Vec<(Vec<u8>, FileType)> { pub(crate) async fn request_read_dir(co: &GenCo, path: PathBuf) -> Vec<(bytes::Bytes, FileType)> {
match co.yield_(VMRequest::ReadDir(path)).await { match co.yield_(VMRequest::ReadDir(path)).await {
VMResponse::Directory(dir) => dir, VMResponse::Directory(dir) => dir,
msg => panic!( msg => panic!(

View file

@ -12,7 +12,15 @@ fn main() -> Result<()> {
builder = builder.file_descriptor_set_path(descriptor_path); builder = builder.file_descriptor_set_path(descriptor_path);
}; };
builder.build_server(true).build_client(true).compile( // https://github.com/hyperium/tonic/issues/908
let mut config = prost_build::Config::new();
config.bytes(["."]);
builder
.build_server(true)
.build_client(true)
.compile_with_config(
config,
&[ &[
"tvix/store/protos/castore.proto", "tvix/store/protos/castore.proto",
"tvix/store/protos/pathinfo.proto", "tvix/store/protos/pathinfo.proto",

View file

@ -98,7 +98,7 @@ impl BlobService for GRPCBlobService {
self.tokio_handle.spawn(async move { self.tokio_handle.spawn(async move {
Ok(grpc_client Ok(grpc_client
.stat(proto::StatBlobRequest { .stat(proto::StatBlobRequest {
digest: digest.to_vec(), digest: digest.into(),
..Default::default() ..Default::default()
}) })
.await? .await?
@ -126,7 +126,7 @@ impl BlobService for GRPCBlobService {
self.tokio_handle.spawn(async move { self.tokio_handle.spawn(async move {
let stream = grpc_client let stream = grpc_client
.read(proto::ReadBlobRequest { .read(proto::ReadBlobRequest {
digest: digest.to_vec(), digest: digest.into(),
}) })
.await? .await?
.into_inner(); .into_inner();

View file

@ -91,13 +91,12 @@ impl DirectoryService for GRPCDirectoryService {
fn get(&self, digest: &B3Digest) -> Result<Option<crate::proto::Directory>, crate::Error> { fn get(&self, digest: &B3Digest) -> Result<Option<crate::proto::Directory>, crate::Error> {
// Get a new handle to the gRPC client, and copy the digest. // Get a new handle to the gRPC client, and copy the digest.
let mut grpc_client = self.grpc_client.clone(); let mut grpc_client = self.grpc_client.clone();
let digest_cpy = digest.clone();
let digest_as_vec = digest.to_vec();
let task = self.tokio_handle.spawn(async move { let task = self.tokio_handle.spawn(async move {
let mut s = grpc_client let mut s = grpc_client
.get(proto::GetDirectoryRequest { .get(proto::GetDirectoryRequest {
recursive: false, recursive: false,
by_what: Some(ByWhat::Digest(digest_as_vec)), by_what: Some(ByWhat::Digest(digest_cpy.into())),
}) })
.await? .await?
.into_inner(); .into_inner();
@ -160,13 +159,15 @@ impl DirectoryService for GRPCDirectoryService {
) -> Box<dyn Iterator<Item = Result<proto::Directory, Error>> + Send> { ) -> Box<dyn Iterator<Item = Result<proto::Directory, Error>> + Send> {
let mut grpc_client = self.grpc_client.clone(); let mut grpc_client = self.grpc_client.clone();
let root_directory_digest_as_vec = root_directory_digest.to_vec(); // clone so we can move it
let root_directory_digest_cpy = root_directory_digest.clone();
let task: tokio::task::JoinHandle<Result<Streaming<proto::Directory>, Status>> = let task: tokio::task::JoinHandle<Result<Streaming<proto::Directory>, Status>> =
self.tokio_handle.spawn(async move { self.tokio_handle.spawn(async move {
let s = grpc_client let s = grpc_client
.get(proto::GetDirectoryRequest { .get(proto::GetDirectoryRequest {
recursive: true, recursive: true,
by_what: Some(ByWhat::Digest(root_directory_digest_as_vec)), by_what: Some(ByWhat::Digest(root_directory_digest_cpy.into())),
}) })
.await? .await?
.into_inner(); .into_inner();

View file

@ -108,7 +108,7 @@ mod tests {
let node_directory_complicated = let node_directory_complicated =
crate::proto::node::Node::Directory(crate::proto::DirectoryNode { crate::proto::node::Node::Directory(crate::proto::DirectoryNode {
name: "doesntmatter".into(), name: "doesntmatter".into(),
digest: DIRECTORY_COMPLICATED.digest().to_vec(), digest: DIRECTORY_COMPLICATED.digest().into(),
size: DIRECTORY_COMPLICATED.size(), size: DIRECTORY_COMPLICATED.size(),
}); });

View file

@ -13,7 +13,7 @@ pub struct InodeTracker {
blob_digest_to_inode: HashMap<B3Digest, u64>, blob_digest_to_inode: HashMap<B3Digest, u64>,
// lookup table for symlinks by their target // lookup table for symlinks by their target
symlink_target_to_inode: HashMap<Vec<u8>, u64>, symlink_target_to_inode: HashMap<bytes::Bytes, u64>,
// lookup table for directories by their B3Digest. // lookup table for directories by their B3Digest.
// Note the corresponding directory may not be present in data yet. // Note the corresponding directory may not be present in data yet.
@ -171,7 +171,7 @@ impl InodeTracker {
self.blob_digest_to_inode.insert(digest.clone(), ino); self.blob_digest_to_inode.insert(digest.clone(), ino);
} }
InodeData::Symlink(ref target) => { InodeData::Symlink(ref target) => {
self.symlink_target_to_inode.insert(target.to_vec(), ino); self.symlink_target_to_inode.insert(target.clone(), ino);
} }
InodeData::Directory(DirectoryInodeData::Sparse(ref digest, _size)) => { InodeData::Directory(DirectoryInodeData::Sparse(ref digest, _size)) => {
self.directory_digest_to_inode.insert(digest.clone(), ino); self.directory_digest_to_inode.insert(digest.clone(), ino);

View file

@ -5,7 +5,7 @@ use crate::{proto, B3Digest};
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub enum InodeData { pub enum InodeData {
Regular(B3Digest, u32, bool), // digest, size, executable Regular(B3Digest, u32, bool), // digest, size, executable
Symlink(Vec<u8>), // target Symlink(bytes::Bytes), // target
Directory(DirectoryInodeData), // either [DirectoryInodeData:Sparse] or [DirectoryInodeData:Populated] Directory(DirectoryInodeData), // either [DirectoryInodeData:Sparse] or [DirectoryInodeData:Populated]
} }

View file

@ -58,7 +58,7 @@ fn populate_blob_a(
node: Some(proto::Node { node: Some(proto::Node {
node: Some(proto::node::Node::File(FileNode { node: Some(proto::node::Node::File(FileNode {
name: BLOB_A_NAME.into(), name: BLOB_A_NAME.into(),
digest: fixtures::BLOB_A_DIGEST.to_vec(), digest: fixtures::BLOB_A_DIGEST.clone().into(),
size: fixtures::BLOB_A.len() as u32, size: fixtures::BLOB_A.len() as u32,
executable: false, executable: false,
})), })),
@ -84,7 +84,7 @@ fn populate_blob_b(
node: Some(proto::Node { node: Some(proto::Node {
node: Some(proto::node::Node::File(FileNode { node: Some(proto::node::Node::File(FileNode {
name: BLOB_B_NAME.into(), name: BLOB_B_NAME.into(),
digest: fixtures::BLOB_B_DIGEST.to_vec(), digest: fixtures::BLOB_B_DIGEST.clone().into(),
size: fixtures::BLOB_B.len() as u32, size: fixtures::BLOB_B.len() as u32,
executable: false, executable: false,
})), })),
@ -154,7 +154,7 @@ fn populate_directory_with_keep(
node: Some(proto::Node { node: Some(proto::Node {
node: Some(proto::node::Node::Directory(DirectoryNode { node: Some(proto::node::Node::Directory(DirectoryNode {
name: DIRECTORY_WITH_KEEP_NAME.into(), name: DIRECTORY_WITH_KEEP_NAME.into(),
digest: fixtures::DIRECTORY_WITH_KEEP.digest().to_vec(), digest: fixtures::DIRECTORY_WITH_KEEP.digest().into(),
size: fixtures::DIRECTORY_WITH_KEEP.size(), size: fixtures::DIRECTORY_WITH_KEEP.size(),
})), })),
}), }),
@ -175,7 +175,7 @@ fn populate_pathinfo_without_directory(
node: Some(proto::Node { node: Some(proto::Node {
node: Some(proto::node::Node::Directory(DirectoryNode { node: Some(proto::node::Node::Directory(DirectoryNode {
name: DIRECTORY_WITH_KEEP_NAME.into(), name: DIRECTORY_WITH_KEEP_NAME.into(),
digest: fixtures::DIRECTORY_WITH_KEEP.digest().to_vec(), digest: fixtures::DIRECTORY_WITH_KEEP.digest().into(),
size: fixtures::DIRECTORY_WITH_KEEP.size(), size: fixtures::DIRECTORY_WITH_KEEP.size(),
})), })),
}), }),
@ -195,7 +195,7 @@ fn populate_blob_a_without_blob(
node: Some(proto::Node { node: Some(proto::Node {
node: Some(proto::node::Node::File(FileNode { node: Some(proto::node::Node::File(FileNode {
name: BLOB_A_NAME.into(), name: BLOB_A_NAME.into(),
digest: fixtures::BLOB_A_DIGEST.to_vec(), digest: fixtures::BLOB_A_DIGEST.clone().into(),
size: fixtures::BLOB_A.len() as u32, size: fixtures::BLOB_A.len() as u32,
executable: false, executable: false,
})), })),
@ -232,7 +232,7 @@ fn populate_directory_complicated(
node: Some(proto::Node { node: Some(proto::Node {
node: Some(proto::node::Node::Directory(DirectoryNode { node: Some(proto::node::Node::Directory(DirectoryNode {
name: DIRECTORY_COMPLICATED_NAME.into(), name: DIRECTORY_COMPLICATED_NAME.into(),
digest: fixtures::DIRECTORY_COMPLICATED.digest().to_vec(), digest: fixtures::DIRECTORY_COMPLICATED.digest().into(),
size: fixtures::DIRECTORY_COMPLICATED.size(), size: fixtures::DIRECTORY_COMPLICATED.size(),
})), })),
}), }),

View file

@ -66,8 +66,6 @@ fn process_entry(
) -> Result<proto::node::Node, Error> { ) -> Result<proto::node::Node, Error> {
let file_type = entry.file_type(); let file_type = entry.file_type();
let entry_path: PathBuf = entry.path().to_path_buf();
if file_type.is_dir() { if file_type.is_dir() {
let directory = maybe_directory let directory = maybe_directory
.expect("tvix bug: must be called with some directory in the case of directory"); .expect("tvix bug: must be called with some directory in the case of directory");
@ -80,41 +78,45 @@ fn process_entry(
.map_err(|e| Error::UploadDirectoryError(entry.path().to_path_buf(), e))?; .map_err(|e| Error::UploadDirectoryError(entry.path().to_path_buf(), e))?;
return Ok(proto::node::Node::Directory(proto::DirectoryNode { return Ok(proto::node::Node::Directory(proto::DirectoryNode {
name: entry.file_name().as_bytes().to_vec(), name: entry.file_name().as_bytes().to_owned().into(),
digest: directory_digest.to_vec(), digest: directory_digest.into(),
size: directory_size, size: directory_size,
})); }));
} }
if file_type.is_symlink() { if file_type.is_symlink() {
let target = std::fs::read_link(&entry_path) let target: bytes::Bytes = std::fs::read_link(entry.path())
.map_err(|e| Error::UnableToStat(entry_path.clone(), e))?; .map_err(|e| Error::UnableToStat(entry.path().to_path_buf(), e))?
.as_os_str()
.as_bytes()
.to_owned()
.into();
return Ok(proto::node::Node::Symlink(proto::SymlinkNode { return Ok(proto::node::Node::Symlink(proto::SymlinkNode {
name: entry.file_name().as_bytes().to_vec(), name: entry.file_name().as_bytes().to_owned().into(),
target: target.as_os_str().as_bytes().to_vec(), target,
})); }));
} }
if file_type.is_file() { if file_type.is_file() {
let metadata = entry let metadata = entry
.metadata() .metadata()
.map_err(|e| Error::UnableToStat(entry_path.clone(), e.into()))?; .map_err(|e| Error::UnableToStat(entry.path().to_path_buf(), e.into()))?;
let mut file = File::open(entry_path.clone()) let mut file = File::open(entry.path())
.map_err(|e| Error::UnableToOpen(entry_path.clone(), e))?; .map_err(|e| Error::UnableToOpen(entry.path().to_path_buf(), e))?;
let mut writer = blob_service.open_write(); let mut writer = blob_service.open_write();
if let Err(e) = io::copy(&mut file, &mut writer) { if let Err(e) = io::copy(&mut file, &mut writer) {
return Err(Error::UnableToRead(entry_path, e)); return Err(Error::UnableToRead(entry.path().to_path_buf(), e));
}; };
let digest = writer.close()?; let digest = writer.close()?;
return Ok(proto::node::Node::File(proto::FileNode { return Ok(proto::node::Node::File(proto::FileNode {
name: entry.file_name().as_bytes().to_vec(), name: entry.file_name().as_bytes().to_vec().into(),
digest: digest.to_vec(), digest: digest.into(),
size: metadata.len() as u32, size: metadata.len() as u32,
// If it's executable by the user, it'll become executable. // If it's executable by the user, it'll become executable.
// This matches nix's dump() function behaviour. // This matches nix's dump() function behaviour.
@ -150,8 +152,9 @@ pub fn ingest_path<P: AsRef<Path> + Debug>(
.file_name() .file_name()
.unwrap_or_default() .unwrap_or_default()
.as_bytes() .as_bytes()
.to_vec(), .to_owned()
target: target.as_os_str().as_bytes().to_vec(), .into(),
target: target.as_os_str().as_bytes().to_vec().into(),
})); }));
} }

View file

@ -13,13 +13,13 @@ pub enum RenderError {
StoreError(crate::Error), StoreError(crate::Error),
#[error("unable to find directory {}, referred from {:?}", .0, .1)] #[error("unable to find directory {}, referred from {:?}", .0, .1)]
DirectoryNotFound(B3Digest, Vec<u8>), DirectoryNotFound(B3Digest, bytes::Bytes),
#[error("unable to find blob {}, referred from {:?}", BASE64.encode(.0), .1)] #[error("unable to find blob {}, referred from {:?}", BASE64.encode(.0), .1)]
BlobNotFound([u8; 32], Vec<u8>), BlobNotFound([u8; 32], bytes::Bytes),
#[error("unexpected size in metadata for blob {}, referred from {:?} returned, expected {}, got {}", BASE64.encode(.0), .1, .2, .3)] #[error("unexpected size in metadata for blob {}, referred from {:?} returned, expected {}, got {}", BASE64.encode(.0), .1, .2, .3)]
UnexpectedBlobMeta([u8; 32], Vec<u8>, u32, u32), UnexpectedBlobMeta([u8; 32], bytes::Bytes, u32, u32),
#[error("failure using the NAR writer: {0}")] #[error("failure using the NAR writer: {0}")]
NARWriterError(std::io::Error), NARWriterError(std::io::Error),

View file

@ -115,7 +115,7 @@ fn walk_node(
None => { None => {
return Err(RenderError::DirectoryNotFound( return Err(RenderError::DirectoryNotFound(
digest, digest,
proto_directory_node.name.to_owned(), proto_directory_node.name.clone(),
)) ))
} }
Some(proto_directory) => { Some(proto_directory) => {

View file

@ -97,7 +97,7 @@ impl PathInfoService for GRPCPathInfoService {
let path_info = grpc_client let path_info = grpc_client
.get(proto::GetPathInfoRequest { .get(proto::GetPathInfoRequest {
by_what: Some(proto::get_path_info_request::ByWhat::ByOutputHash( by_what: Some(proto::get_path_info_request::ByWhat::ByOutputHash(
digest.to_vec(), digest.to_vec().into(),
)), )),
}) })
.await? .await?
@ -154,6 +154,7 @@ impl PathInfoService for GRPCPathInfoService {
let nar_sha256: [u8; 32] = resp let nar_sha256: [u8; 32] = resp
.nar_sha256 .nar_sha256
.to_vec()
.try_into() .try_into()
.map_err(|_e| crate::Error::StorageError("invalid digest length".to_string()))?; .map_err(|_e| crate::Error::StorageError("invalid digest length".to_string()))?;

View file

@ -133,9 +133,7 @@ impl super::blob_service_server::BlobService for GRPCBlobServiceWrapper {
x: Result<bytes::Bytes, io::Error>, x: Result<bytes::Bytes, io::Error>,
) -> Result<super::BlobChunk, Status> { ) -> Result<super::BlobChunk, Status> {
match x { match x {
Ok(bytes) => Ok(super::BlobChunk { Ok(bytes) => Ok(super::BlobChunk { data: bytes }),
data: bytes.to_vec(),
}),
Err(e) => Err(Status::from(e)), Err(e) => Err(Status::from(e)),
} }
} }
@ -156,7 +154,7 @@ impl super::blob_service_server::BlobService for GRPCBlobServiceWrapper {
let req_inner = request.into_inner(); let req_inner = request.into_inner();
let data_stream = req_inner.map(|x| { let data_stream = req_inner.map(|x| {
x.map(|x| VecDeque::from(x.data)) x.map(|x| VecDeque::from(x.data.to_vec()))
.map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidInput, e)) .map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidInput, e))
}); });
@ -182,7 +180,9 @@ impl super::blob_service_server::BlobService for GRPCBlobServiceWrapper {
})? })?
.to_vec(); .to_vec();
Ok(super::PutBlobResponse { digest }) Ok(super::PutBlobResponse {
digest: digest.into(),
})
}) })
.await .await
.map_err(|_| Status::internal("failed to wait for task"))??; .map_err(|_| Status::internal("failed to wait for task"))??;

View file

@ -176,7 +176,7 @@ impl proto::directory_service_server::DirectoryService for GRPCDirectoryServiceW
match last_directory_dgst { match last_directory_dgst {
None => Err(Status::invalid_argument("no directories received")), None => Err(Status::invalid_argument("no directories received")),
Some(last_directory_dgst) => Ok(Response::new(proto::PutDirectoryResponse { Some(last_directory_dgst) => Ok(Response::new(proto::PutDirectoryResponse {
root_digest: last_directory_dgst.to_vec(), root_digest: last_directory_dgst.into(),
})), })),
} }
} }

View file

@ -26,10 +26,11 @@ impl proto::path_info_service_server::PathInfoService for GRPCPathInfoServiceWra
) -> Result<Response<proto::PathInfo>> { ) -> Result<Response<proto::PathInfo>> {
match request.into_inner().by_what { match request.into_inner().by_what {
None => Err(Status::unimplemented("by_what needs to be specified")), None => Err(Status::unimplemented("by_what needs to be specified")),
Some(proto::get_path_info_request::ByWhat::ByOutputHash(digest)) => { Some(proto::get_path_info_request::ByWhat::ByOutputHash(output_digest)) => {
let digest: [u8; 20] = digest let digest: [u8; 20] = output_digest
.to_vec()
.try_into() .try_into()
.map_err(|_e| Status::invalid_argument("invalid digest length"))?; .map_err(|_e| Status::invalid_argument("invalid output digest length"))?;
match self.path_info_service.get(digest) { match self.path_info_service.get(digest) {
Ok(None) => Err(Status::not_found("PathInfo not found")), Ok(None) => Err(Status::not_found("PathInfo not found")),
Ok(Some(path_info)) => Ok(Response::new(path_info)), Ok(Some(path_info)) => Ok(Response::new(path_info)),
@ -72,7 +73,7 @@ impl proto::path_info_service_server::PathInfoService for GRPCPathInfoServiceWra
Ok(Response::new(proto::CalculateNarResponse { Ok(Response::new(proto::CalculateNarResponse {
nar_size, nar_size,
nar_sha256: nar_sha256.to_vec(), nar_sha256: nar_sha256.to_vec().into(),
})) }))
} }
} }

View file

@ -86,7 +86,7 @@ fn validate_node_name<E>(name: &[u8], err: fn(Vec<u8>) -> E) -> Result<(), E> {
/// Checks a digest for validity. /// Checks a digest for validity.
/// Digests are 32 bytes long, as we store blake3 digests. /// Digests are 32 bytes long, as we store blake3 digests.
fn validate_digest<E>(digest: &Vec<u8>, err: fn(usize) -> E) -> Result<(), E> { fn validate_digest<E>(digest: &bytes::Bytes, err: fn(usize) -> E) -> Result<(), E> {
if digest.len() != 32 { if digest.len() != 32 {
return Err(err(digest.len())); return Err(err(digest.len()));
} }

View file

@ -18,7 +18,7 @@ fn size() {
let d = Directory { let d = Directory {
directories: vec![DirectoryNode { directories: vec![DirectoryNode {
name: "foo".into(), name: "foo".into(),
digest: DUMMY_DIGEST.to_vec(), digest: DUMMY_DIGEST.to_vec().into(),
size: 0, size: 0,
}], }],
..Default::default() ..Default::default()
@ -29,7 +29,7 @@ fn size() {
let d = Directory { let d = Directory {
directories: vec![DirectoryNode { directories: vec![DirectoryNode {
name: "foo".into(), name: "foo".into(),
digest: DUMMY_DIGEST.to_vec(), digest: DUMMY_DIGEST.to_vec().into(),
size: 4, size: 4,
}], }],
..Default::default() ..Default::default()
@ -40,7 +40,7 @@ fn size() {
let d = Directory { let d = Directory {
files: vec![FileNode { files: vec![FileNode {
name: "foo".into(), name: "foo".into(),
digest: DUMMY_DIGEST.to_vec(), digest: DUMMY_DIGEST.to_vec().into(),
size: 42, size: 42,
executable: false, executable: false,
}], }],
@ -88,7 +88,7 @@ fn validate_invalid_names() {
let d = Directory { let d = Directory {
directories: vec![DirectoryNode { directories: vec![DirectoryNode {
name: "".into(), name: "".into(),
digest: DUMMY_DIGEST.to_vec(), digest: DUMMY_DIGEST.to_vec().into(),
size: 42, size: 42,
}], }],
..Default::default() ..Default::default()
@ -105,7 +105,7 @@ fn validate_invalid_names() {
let d = Directory { let d = Directory {
directories: vec![DirectoryNode { directories: vec![DirectoryNode {
name: ".".into(), name: ".".into(),
digest: DUMMY_DIGEST.to_vec(), digest: DUMMY_DIGEST.to_vec().into(),
size: 42, size: 42,
}], }],
..Default::default() ..Default::default()
@ -122,7 +122,7 @@ fn validate_invalid_names() {
let d = Directory { let d = Directory {
files: vec![FileNode { files: vec![FileNode {
name: "..".into(), name: "..".into(),
digest: DUMMY_DIGEST.to_vec(), digest: DUMMY_DIGEST.to_vec().into(),
size: 42, size: 42,
executable: false, executable: false,
}], }],
@ -174,7 +174,7 @@ fn validate_invalid_digest() {
let d = Directory { let d = Directory {
directories: vec![DirectoryNode { directories: vec![DirectoryNode {
name: "foo".into(), name: "foo".into(),
digest: vec![0x00, 0x42], // invalid length digest: vec![0x00, 0x42].into(), // invalid length
size: 42, size: 42,
}], }],
..Default::default() ..Default::default()
@ -195,12 +195,12 @@ fn validate_sorting() {
directories: vec![ directories: vec![
DirectoryNode { DirectoryNode {
name: "b".into(), name: "b".into(),
digest: DUMMY_DIGEST.to_vec(), digest: DUMMY_DIGEST.to_vec().into(),
size: 42, size: 42,
}, },
DirectoryNode { DirectoryNode {
name: "a".into(), name: "a".into(),
digest: DUMMY_DIGEST.to_vec(), digest: DUMMY_DIGEST.to_vec().into(),
size: 42, size: 42,
}, },
], ],
@ -220,12 +220,12 @@ fn validate_sorting() {
directories: vec![ directories: vec![
DirectoryNode { DirectoryNode {
name: "a".into(), name: "a".into(),
digest: DUMMY_DIGEST.to_vec(), digest: DUMMY_DIGEST.to_vec().into(),
size: 42, size: 42,
}, },
DirectoryNode { DirectoryNode {
name: "a".into(), name: "a".into(),
digest: DUMMY_DIGEST.to_vec(), digest: DUMMY_DIGEST.to_vec().into(),
size: 42, size: 42,
}, },
], ],
@ -245,12 +245,12 @@ fn validate_sorting() {
directories: vec![ directories: vec![
DirectoryNode { DirectoryNode {
name: "a".into(), name: "a".into(),
digest: DUMMY_DIGEST.to_vec(), digest: DUMMY_DIGEST.to_vec().into(),
size: 42, size: 42,
}, },
DirectoryNode { DirectoryNode {
name: "b".into(), name: "b".into(),
digest: DUMMY_DIGEST.to_vec(), digest: DUMMY_DIGEST.to_vec().into(),
size: 42, size: 42,
}, },
], ],
@ -266,12 +266,12 @@ fn validate_sorting() {
directories: vec![ directories: vec![
DirectoryNode { DirectoryNode {
name: "b".into(), name: "b".into(),
digest: DUMMY_DIGEST.to_vec(), digest: DUMMY_DIGEST.to_vec().into(),
size: 42, size: 42,
}, },
DirectoryNode { DirectoryNode {
name: "c".into(), name: "c".into(),
digest: DUMMY_DIGEST.to_vec(), digest: DUMMY_DIGEST.to_vec().into(),
size: 42, size: 42,
}, },
], ],

View file

@ -16,7 +16,7 @@ async fn not_found_read() {
let resp = service let resp = service
.read(tonic::Request::new(ReadBlobRequest { .read(tonic::Request::new(ReadBlobRequest {
digest: BLOB_A_DIGEST.to_vec(), digest: BLOB_A_DIGEST.clone().into(),
})) }))
.await; .await;
@ -36,7 +36,7 @@ async fn not_found_stat() {
let resp = service let resp = service
.stat(tonic::Request::new(StatBlobRequest { .stat(tonic::Request::new(StatBlobRequest {
digest: BLOB_A_DIGEST.to_vec(), digest: BLOB_A_DIGEST.clone().into(),
..Default::default() ..Default::default()
})) }))
.await .await
@ -54,7 +54,7 @@ async fn put_read_stat() {
// Send blob A. // Send blob A.
let put_resp = service let put_resp = service
.put(tonic_mock::streaming_request(vec![BlobChunk { .put(tonic_mock::streaming_request(vec![BlobChunk {
data: BLOB_A.clone(), data: BLOB_A.clone().into(),
}])) }]))
.await .await
.expect("must succeed") .expect("must succeed")
@ -67,7 +67,7 @@ async fn put_read_stat() {
// expose it yet. // expose it yet.
let _resp = service let _resp = service
.stat(tonic::Request::new(StatBlobRequest { .stat(tonic::Request::new(StatBlobRequest {
digest: BLOB_A_DIGEST.to_vec(), digest: BLOB_A_DIGEST.clone().into(),
..Default::default() ..Default::default()
})) }))
.await .await
@ -77,7 +77,7 @@ async fn put_read_stat() {
// Read the blob. It should return the same data. // Read the blob. It should return the same data.
let resp = service let resp = service
.read(tonic::Request::new(ReadBlobRequest { .read(tonic::Request::new(ReadBlobRequest {
digest: BLOB_A_DIGEST.to_vec(), digest: BLOB_A_DIGEST.clone().into(),
})) }))
.await; .await;
@ -90,7 +90,7 @@ async fn put_read_stat() {
.expect("must be some") .expect("must be some")
.expect("must succeed"); .expect("must succeed");
assert_eq!(BLOB_A.to_vec(), item.data); assert_eq!(BLOB_A.clone(), item.data);
// … and no more elements // … and no more elements
assert!(rx.next().await.is_none()); assert!(rx.next().await.is_none());

View file

@ -42,7 +42,7 @@ async fn not_found() {
let resp = service let resp = service
.get(tonic::Request::new(GetDirectoryRequest { .get(tonic::Request::new(GetDirectoryRequest {
by_what: Some(ByWhat::Digest(DIRECTORY_A.digest().to_vec())), by_what: Some(ByWhat::Digest(DIRECTORY_A.digest().into())),
..Default::default() ..Default::default()
})) }))
.await; .await;
@ -80,7 +80,7 @@ async fn put_get() {
let items = get_directories( let items = get_directories(
&service, &service,
GetDirectoryRequest { GetDirectoryRequest {
by_what: Some(ByWhat::Digest(DIRECTORY_A.digest().to_vec())), by_what: Some(ByWhat::Digest(DIRECTORY_A.digest().into())),
..Default::default() ..Default::default()
}, },
) )
@ -122,7 +122,7 @@ async fn put_get_multiple() {
&service, &service,
GetDirectoryRequest { GetDirectoryRequest {
recursive: false, recursive: false,
by_what: Some(ByWhat::Digest(DIRECTORY_B.digest().to_vec())), by_what: Some(ByWhat::Digest(DIRECTORY_B.digest().into())),
}, },
) )
.await .await
@ -136,7 +136,7 @@ async fn put_get_multiple() {
&service, &service,
GetDirectoryRequest { GetDirectoryRequest {
recursive: true, recursive: true,
by_what: Some(ByWhat::Digest(DIRECTORY_B.digest().to_vec())), by_what: Some(ByWhat::Digest(DIRECTORY_B.digest().into())),
}, },
) )
.await .await
@ -172,7 +172,7 @@ async fn put_get_dedup() {
&service, &service,
GetDirectoryRequest { GetDirectoryRequest {
recursive: true, recursive: true,
by_what: Some(ByWhat::Digest(DIRECTORY_C.digest().to_vec())), by_what: Some(ByWhat::Digest(DIRECTORY_C.digest().into())),
}, },
) )
.await .await
@ -215,7 +215,7 @@ async fn put_reject_wrong_size() {
let broken_parent_directory = Directory { let broken_parent_directory = Directory {
directories: vec![DirectoryNode { directories: vec![DirectoryNode {
name: "foo".into(), name: "foo".into(),
digest: DIRECTORY_A.digest().to_vec(), digest: DIRECTORY_A.digest().into(),
size: 42, size: 42,
}], }],
..Default::default() ..Default::default()

View file

@ -32,7 +32,7 @@ async fn not_found() {
let resp = service let resp = service
.get(Request::new(GetPathInfoRequest { .get(Request::new(GetPathInfoRequest {
by_what: Some(ByOutputHash(DUMMY_OUTPUT_HASH.to_vec())), by_what: Some(ByOutputHash(DUMMY_OUTPUT_HASH.clone())),
})) }))
.await; .await;
@ -62,7 +62,7 @@ async fn put_get() {
let resp = service let resp = service
.get(Request::new(GetPathInfoRequest { .get(Request::new(GetPathInfoRequest {
by_what: Some(ByOutputHash(DUMMY_OUTPUT_HASH.to_vec())), by_what: Some(ByOutputHash(DUMMY_OUTPUT_HASH.clone())),
})) }))
.await; .await;

View file

@ -1,20 +1,28 @@
use crate::proto::{self, Node, PathInfo, ValidatePathInfoError}; use crate::proto::{self, Node, PathInfo, ValidatePathInfoError};
use crate::B3Digest;
use bytes::Bytes;
use lazy_static::lazy_static; use lazy_static::lazy_static;
use nix_compat::store_path::{self, StorePath}; use nix_compat::store_path::{self, StorePath};
use std::str::FromStr; use std::str::FromStr;
use test_case::test_case; use test_case::test_case;
lazy_static! { lazy_static! {
static ref DUMMY_DIGEST: Vec<u8> = vec![ static ref DUMMY_DIGEST: B3Digest = {
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, let u: &[u8; 32] = &[
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
]; ];
static ref DUMMY_DIGEST_2: Vec<u8> = vec![ u.into()
0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, };
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, static ref DUMMY_DIGEST_2: B3Digest = {
0x00, 0x00, let u: &[u8; 32] = &[
0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
]; ];
u.into()
};
} }
const DUMMY_NAME: &str = "00000000000000000000000000000000-dummy"; const DUMMY_NAME: &str = "00000000000000000000000000000000-dummy";
@ -44,7 +52,7 @@ fn validate_no_node(
#[test_case( #[test_case(
proto::DirectoryNode { proto::DirectoryNode {
name: DUMMY_NAME.into(), name: DUMMY_NAME.into(),
digest: DUMMY_DIGEST.to_vec(), digest: DUMMY_DIGEST.clone().into(),
size: 0, size: 0,
}, },
Ok(StorePath::from_str(DUMMY_NAME).expect("must succeed")); Ok(StorePath::from_str(DUMMY_NAME).expect("must succeed"));
@ -53,7 +61,7 @@ fn validate_no_node(
#[test_case( #[test_case(
proto::DirectoryNode { proto::DirectoryNode {
name: DUMMY_NAME.into(), name: DUMMY_NAME.into(),
digest: vec![], digest: Bytes::new(),
size: 0, size: 0,
}, },
Err(ValidatePathInfoError::InvalidDigestLen(0)); Err(ValidatePathInfoError::InvalidDigestLen(0));
@ -62,7 +70,7 @@ fn validate_no_node(
#[test_case( #[test_case(
proto::DirectoryNode { proto::DirectoryNode {
name: "invalid".into(), name: "invalid".into(),
digest: DUMMY_DIGEST.to_vec(), digest: DUMMY_DIGEST.clone().into(),
size: 0, size: 0,
}, },
Err(ValidatePathInfoError::InvalidNodeName( Err(ValidatePathInfoError::InvalidNodeName(
@ -88,7 +96,7 @@ fn validate_directory(
#[test_case( #[test_case(
proto::FileNode { proto::FileNode {
name: DUMMY_NAME.into(), name: DUMMY_NAME.into(),
digest: DUMMY_DIGEST.to_vec(), digest: DUMMY_DIGEST.clone().into(),
size: 0, size: 0,
executable: false, executable: false,
}, },
@ -98,7 +106,7 @@ fn validate_directory(
#[test_case( #[test_case(
proto::FileNode { proto::FileNode {
name: DUMMY_NAME.into(), name: DUMMY_NAME.into(),
digest: vec![], digest: Bytes::new(),
..Default::default() ..Default::default()
}, },
Err(ValidatePathInfoError::InvalidDigestLen(0)); Err(ValidatePathInfoError::InvalidDigestLen(0));
@ -107,7 +115,7 @@ fn validate_directory(
#[test_case( #[test_case(
proto::FileNode { proto::FileNode {
name: "invalid".into(), name: "invalid".into(),
digest: DUMMY_DIGEST.to_vec(), digest: DUMMY_DIGEST.clone().into(),
..Default::default() ..Default::default()
}, },
Err(ValidatePathInfoError::InvalidNodeName( Err(ValidatePathInfoError::InvalidNodeName(
@ -167,11 +175,11 @@ fn validate_references() {
node: Some(Node { node: Some(Node {
node: Some(proto::node::Node::Directory(proto::DirectoryNode { node: Some(proto::node::Node::Directory(proto::DirectoryNode {
name: DUMMY_NAME.into(), name: DUMMY_NAME.into(),
digest: DUMMY_DIGEST.to_vec(), digest: DUMMY_DIGEST.clone().into(),
size: 0, size: 0,
})), })),
}), }),
references: vec![DUMMY_DIGEST_2.to_vec()], references: vec![DUMMY_DIGEST_2.clone().into()],
narinfo: None, narinfo: None,
}; };
assert!(path_info.validate().is_ok()); assert!(path_info.validate().is_ok());
@ -180,7 +188,7 @@ fn validate_references() {
let path_info_with_narinfo_missing_refs = PathInfo { let path_info_with_narinfo_missing_refs = PathInfo {
narinfo: Some(proto::NarInfo { narinfo: Some(proto::NarInfo {
nar_size: 0, nar_size: 0,
nar_sha256: DUMMY_DIGEST.to_vec(), nar_sha256: DUMMY_DIGEST.clone().into(),
signatures: vec![], signatures: vec![],
reference_names: vec![], reference_names: vec![],
}), }),
@ -198,7 +206,7 @@ fn validate_references() {
let path_info_with_narinfo = PathInfo { let path_info_with_narinfo = PathInfo {
narinfo: Some(proto::NarInfo { narinfo: Some(proto::NarInfo {
nar_size: 0, nar_size: 0,
nar_sha256: DUMMY_DIGEST.to_vec(), nar_sha256: DUMMY_DIGEST.clone().into(),
signatures: vec![], signatures: vec![],
reference_names: vec![format!("/nix/store/{}", DUMMY_NAME)], reference_names: vec![format!("/nix/store/{}", DUMMY_NAME)],
}), }),

View file

@ -129,7 +129,7 @@ impl TvixStoreIO {
// assemble a new root_node with a name that is derived from the nar hash. // assemble a new root_node with a name that is derived from the nar hash.
let renamed_root_node = { let renamed_root_node = {
let name = output_path.to_string().into_bytes(); let name = output_path.to_string().into_bytes().into();
match root_node { match root_node {
crate::proto::node::Node::Directory(n) => { crate::proto::node::Node::Directory(n) => {
@ -153,7 +153,7 @@ impl TvixStoreIO {
references: vec![], references: vec![],
narinfo: Some(crate::proto::NarInfo { narinfo: Some(crate::proto::NarInfo {
nar_size, nar_size,
nar_sha256: nar_sha256.to_vec(), nar_sha256: nar_sha256.to_vec().into(),
signatures: vec![], signatures: vec![],
reference_names: vec![], reference_names: vec![],
// TODO: narinfo for talosctl.src contains `CA: fixed:r:sha256:1x13j5hy75221bf6kz7cpgld9vgic6bqx07w5xjs4pxnksj6lxb6` // TODO: narinfo for talosctl.src contains `CA: fixed:r:sha256:1x13j5hy75221bf6kz7cpgld9vgic6bqx07w5xjs4pxnksj6lxb6`
@ -264,7 +264,7 @@ impl EvalIO for TvixStoreIO {
} }
#[instrument(skip(self), ret, err)] #[instrument(skip(self), ret, err)]
fn read_dir(&self, path: &Path) -> Result<Vec<(Vec<u8>, FileType)>, io::Error> { fn read_dir(&self, path: &Path) -> Result<Vec<(bytes::Bytes, FileType)>, io::Error> {
if let Ok((store_path, sub_path)) = if let Ok((store_path, sub_path)) =
StorePath::from_absolute_path_full(&path.to_string_lossy()) StorePath::from_absolute_path_full(&path.to_string_lossy())
{ {
@ -283,7 +283,7 @@ impl EvalIO for TvixStoreIO {
})?; })?;
if let Some(directory) = self.directory_service.get(&digest)? { if let Some(directory) = self.directory_service.get(&digest)? {
let mut children: Vec<(Vec<u8>, FileType)> = Vec::new(); let mut children: Vec<(bytes::Bytes, FileType)> = Vec::new();
for node in directory.nodes() { for node in directory.nodes() {
children.push(match node { children.push(match node {
crate::proto::node::Node::Directory(e) => { crate::proto::node::Node::Directory(e) => {

View file

@ -8,13 +8,16 @@ pub const HELLOWORLD_BLOB_CONTENTS: &[u8] = b"Hello World!";
pub const EMPTY_BLOB_CONTENTS: &[u8] = b""; pub const EMPTY_BLOB_CONTENTS: &[u8] = b"";
lazy_static! { lazy_static! {
pub static ref DUMMY_DIGEST: Vec<u8> = vec![ pub static ref DUMMY_DIGEST: B3Digest = {
let u: &[u8; 32] = &[
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
]; ];
pub static ref DUMMY_DATA_1: Vec<u8> = vec![0x01, 0x02, 0x03]; u.into()
pub static ref DUMMY_DATA_2: Vec<u8> = vec![0x04, 0x05]; };
pub static ref DUMMY_DATA_1: bytes::Bytes = vec![0x01, 0x02, 0x03].into();
pub static ref DUMMY_DATA_2: bytes::Bytes = vec![0x04, 0x05].into();
pub static ref HELLOWORLD_BLOB_DIGEST: B3Digest = pub static ref HELLOWORLD_BLOB_DIGEST: B3Digest =
blake3::hash(HELLOWORLD_BLOB_CONTENTS).as_bytes().into(); blake3::hash(HELLOWORLD_BLOB_CONTENTS).as_bytes().into();
@ -22,19 +25,19 @@ lazy_static! {
blake3::hash(EMPTY_BLOB_CONTENTS).as_bytes().into(); blake3::hash(EMPTY_BLOB_CONTENTS).as_bytes().into();
// 2 bytes // 2 bytes
pub static ref BLOB_A: Vec<u8> = vec![0x00, 0x01]; pub static ref BLOB_A: bytes::Bytes = vec![0x00, 0x01].into();
pub static ref BLOB_A_DIGEST: B3Digest = blake3::hash(&BLOB_A).as_bytes().into(); pub static ref BLOB_A_DIGEST: B3Digest = blake3::hash(&BLOB_A).as_bytes().into();
// 1MB // 1MB
pub static ref BLOB_B: Vec<u8> = (0..255).collect::<Vec<u8>>().repeat(4 * 1024); pub static ref BLOB_B: bytes::Bytes = (0..255).collect::<Vec<u8>>().repeat(4 * 1024).into();
pub static ref BLOB_B_DIGEST: B3Digest = blake3::hash(&BLOB_B).as_bytes().into(); pub static ref BLOB_B_DIGEST: B3Digest = blake3::hash(&BLOB_B).as_bytes().into();
// Directories // Directories
pub static ref DIRECTORY_WITH_KEEP: proto::Directory = proto::Directory { pub static ref DIRECTORY_WITH_KEEP: proto::Directory = proto::Directory {
directories: vec![], directories: vec![],
files: vec![FileNode { files: vec![FileNode {
name: b".keep".to_vec(), name: b".keep".to_vec().into(),
digest: EMPTY_BLOB_DIGEST.to_vec(), digest: EMPTY_BLOB_DIGEST.clone().into(),
size: 0, size: 0,
executable: false, executable: false,
}], }],
@ -42,26 +45,26 @@ lazy_static! {
}; };
pub static ref DIRECTORY_COMPLICATED: proto::Directory = proto::Directory { pub static ref DIRECTORY_COMPLICATED: proto::Directory = proto::Directory {
directories: vec![DirectoryNode { directories: vec![DirectoryNode {
name: b"keep".to_vec(), name: b"keep".to_vec().into(),
digest: DIRECTORY_WITH_KEEP.digest().to_vec(), digest: DIRECTORY_WITH_KEEP.digest().into(),
size: DIRECTORY_WITH_KEEP.size(), size: DIRECTORY_WITH_KEEP.size(),
}], }],
files: vec![FileNode { files: vec![FileNode {
name: b".keep".to_vec(), name: b".keep".to_vec().into(),
digest: EMPTY_BLOB_DIGEST.to_vec(), digest: EMPTY_BLOB_DIGEST.clone().into(),
size: 0, size: 0,
executable: false, executable: false,
}], }],
symlinks: vec![SymlinkNode { symlinks: vec![SymlinkNode {
name: b"aa".to_vec(), name: b"aa".to_vec().into(),
target: b"/nix/store/somewhereelse".to_vec(), target: b"/nix/store/somewhereelse".to_vec().into(),
}], }],
}; };
pub static ref DIRECTORY_A: Directory = Directory::default(); pub static ref DIRECTORY_A: Directory = Directory::default();
pub static ref DIRECTORY_B: Directory = Directory { pub static ref DIRECTORY_B: Directory = Directory {
directories: vec![DirectoryNode { directories: vec![DirectoryNode {
name: b"a".to_vec(), name: b"a".to_vec().into(),
digest: DIRECTORY_A.digest().to_vec(), digest: DIRECTORY_A.digest().into(),
size: DIRECTORY_A.size(), size: DIRECTORY_A.size(),
}], }],
..Default::default() ..Default::default()
@ -69,13 +72,13 @@ lazy_static! {
pub static ref DIRECTORY_C: Directory = Directory { pub static ref DIRECTORY_C: Directory = Directory {
directories: vec![ directories: vec![
DirectoryNode { DirectoryNode {
name: b"a".to_vec(), name: b"a".to_vec().into(),
digest: DIRECTORY_A.digest().to_vec(), digest: DIRECTORY_A.digest().into(),
size: DIRECTORY_A.size(), size: DIRECTORY_A.size(),
}, },
DirectoryNode { DirectoryNode {
name: b"a'".to_vec(), name: b"a'".to_vec().into(),
digest: DIRECTORY_A.digest().to_vec(), digest: DIRECTORY_A.digest().into(),
size: DIRECTORY_A.size(), size: DIRECTORY_A.size(),
} }
], ],
@ -83,10 +86,10 @@ lazy_static! {
}; };
// output hash // output hash
pub static ref DUMMY_OUTPUT_HASH: Vec<u8> = vec![ pub static ref DUMMY_OUTPUT_HASH: bytes::Bytes = vec![
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00 0x00, 0x00, 0x00, 0x00, 0x00
]; ].into();
/// The NAR representation of a symlink pointing to `/nix/store/somewhereelse` /// The NAR representation of a symlink pointing to `/nix/store/somewhereelse`
pub static ref NAR_CONTENTS_SYMLINK: Vec<u8> = vec![ pub static ref NAR_CONTENTS_SYMLINK: Vec<u8> = vec![

View file

@ -54,7 +54,7 @@ fn single_file() {
assert_eq!( assert_eq!(
crate::proto::node::Node::File(proto::FileNode { crate::proto::node::Node::File(proto::FileNode {
name: "root".into(), name: "root".into(),
digest: HELLOWORLD_BLOB_DIGEST.to_vec(), digest: HELLOWORLD_BLOB_DIGEST.clone().into(),
size: HELLOWORLD_BLOB_CONTENTS.len() as u32, size: HELLOWORLD_BLOB_CONTENTS.len() as u32,
executable: false, executable: false,
}), }),
@ -92,8 +92,14 @@ fn complicated() {
// ensure root_node matched expectations // ensure root_node matched expectations
assert_eq!( assert_eq!(
crate::proto::node::Node::Directory(proto::DirectoryNode { crate::proto::node::Node::Directory(proto::DirectoryNode {
name: tmpdir.path().file_name().unwrap().as_bytes().to_vec(), name: tmpdir
digest: DIRECTORY_COMPLICATED.digest().to_vec(), .path()
.file_name()
.unwrap()
.as_bytes()
.to_owned()
.into(),
digest: DIRECTORY_COMPLICATED.digest().into(),
size: DIRECTORY_COMPLICATED.size(), size: DIRECTORY_COMPLICATED.size(),
}), }),
root_node, root_node,

View file

@ -36,7 +36,7 @@ fn single_file_missing_blob() {
&mut buf, &mut buf,
&crate::proto::node::Node::File(FileNode { &crate::proto::node::Node::File(FileNode {
name: "doesntmatter".into(), name: "doesntmatter".into(),
digest: HELLOWORLD_BLOB_DIGEST.to_vec(), digest: HELLOWORLD_BLOB_DIGEST.clone().into(),
size: HELLOWORLD_BLOB_CONTENTS.len() as u32, size: HELLOWORLD_BLOB_CONTENTS.len() as u32,
executable: false, executable: false,
}), }),
@ -77,7 +77,7 @@ fn single_file_wrong_blob_size() {
&mut buf, &mut buf,
&crate::proto::node::Node::File(FileNode { &crate::proto::node::Node::File(FileNode {
name: "doesntmatter".into(), name: "doesntmatter".into(),
digest: HELLOWORLD_BLOB_DIGEST.to_vec(), digest: HELLOWORLD_BLOB_DIGEST.clone().into(),
size: 42, // <- note the wrong size here! size: 42, // <- note the wrong size here!
executable: false, executable: false,
}), }),
@ -102,7 +102,7 @@ fn single_file_wrong_blob_size() {
&mut buf, &mut buf,
&crate::proto::node::Node::File(FileNode { &crate::proto::node::Node::File(FileNode {
name: "doesntmatter".into(), name: "doesntmatter".into(),
digest: HELLOWORLD_BLOB_DIGEST.to_vec(), digest: HELLOWORLD_BLOB_DIGEST.clone().into(),
size: 2, // <- note the wrong size here! size: 2, // <- note the wrong size here!
executable: false, executable: false,
}), }),
@ -127,7 +127,7 @@ fn single_file() {
// insert blob into the store // insert blob into the store
let mut writer = blob_service.open_write(); let mut writer = blob_service.open_write();
io::copy( io::copy(
&mut io::Cursor::new(HELLOWORLD_BLOB_CONTENTS.to_vec()), &mut io::Cursor::new(HELLOWORLD_BLOB_CONTENTS.clone()),
&mut writer, &mut writer,
) )
.unwrap(); .unwrap();
@ -139,7 +139,7 @@ fn single_file() {
&mut buf, &mut buf,
&crate::proto::node::Node::File(FileNode { &crate::proto::node::Node::File(FileNode {
name: "doesntmatter".into(), name: "doesntmatter".into(),
digest: HELLOWORLD_BLOB_DIGEST.to_vec(), digest: HELLOWORLD_BLOB_DIGEST.clone().into(),
size: HELLOWORLD_BLOB_CONTENTS.len() as u32, size: HELLOWORLD_BLOB_CONTENTS.len() as u32,
executable: false, executable: false,
}), }),
@ -160,7 +160,7 @@ fn test_complicated() {
// insert blob into the store // insert blob into the store
let mut writer = blob_service.open_write(); let mut writer = blob_service.open_write();
io::copy( io::copy(
&mut io::Cursor::new(EMPTY_BLOB_CONTENTS.to_vec()), &mut io::Cursor::new(EMPTY_BLOB_CONTENTS.clone()),
&mut writer, &mut writer,
) )
.unwrap(); .unwrap();
@ -177,7 +177,7 @@ fn test_complicated() {
&mut buf, &mut buf,
&crate::proto::node::Node::Directory(DirectoryNode { &crate::proto::node::Node::Directory(DirectoryNode {
name: "doesntmatter".into(), name: "doesntmatter".into(),
digest: DIRECTORY_COMPLICATED.digest().to_vec(), digest: DIRECTORY_COMPLICATED.digest().clone().into(),
size: DIRECTORY_COMPLICATED.size(), size: DIRECTORY_COMPLICATED.size(),
}), }),
blob_service.clone(), blob_service.clone(),
@ -191,7 +191,7 @@ fn test_complicated() {
let (nar_size, nar_digest) = calculate_size_and_sha256( let (nar_size, nar_digest) = calculate_size_and_sha256(
&crate::proto::node::Node::Directory(DirectoryNode { &crate::proto::node::Node::Directory(DirectoryNode {
name: "doesntmatter".into(), name: "doesntmatter".into(),
digest: DIRECTORY_COMPLICATED.digest().to_vec(), digest: DIRECTORY_COMPLICATED.digest().clone().into(),
size: DIRECTORY_COMPLICATED.size(), size: DIRECTORY_COMPLICATED.size(),
}), }),
blob_service, blob_service,