refactor(tvix/store): use bytes for node names and symlink targets

Some paths might use names that are not valid UTF-8. We should be able
to represent them.

We don't actually need to touch the PathInfo structures, as they need to
represent StorePaths, which come with their own harder restrictions,
which can't encode non-UTF8 data.

While this doesn't change any of the wire format of the gRPC messages,
it does however change the interface of tvix_eval::EvalIO - its
read_dir() method does now return a list of Vec<u8>, rather than
SmolStr. Maybe this should be OsString instead?

Change-Id: I821016d9a58ec441ee081b0b9f01c9240723af0b
Reviewed-on: https://cl.tvl.fyi/c/depot/+/8974
Autosubmit: flokli <flokli@flokli.de>
Reviewed-by: raitobezarius <tvl@lahfa.xyz>
Tested-by: BuildkiteCI
This commit is contained in:
Florian Klink 2023-07-18 19:37:25 +03:00 committed by clbot
parent 638f3e874d
commit 72e82ffcb1
27 changed files with 245 additions and 253 deletions

View file

@ -11,7 +11,6 @@ use std::process::Command;
use std::sync::RwLock;
use std::{io, path::PathBuf};
use smol_str::SmolStr;
use tvix_eval::{EvalIO, FileType, StdIO};
/// Compatibility implementation of [`EvalIO`] that uses C++ Nix to
@ -78,7 +77,7 @@ impl EvalIO for NixCompatIO {
self.underlying.read_to_string(path)
}
fn read_dir(&self, path: &Path) -> Result<Vec<(SmolStr, FileType)>, io::Error> {
fn read_dir(&self, path: &Path) -> Result<Vec<(Vec<u8>, FileType)>, io::Error> {
self.underlying.read_dir(path)
}
}

View file

@ -9,7 +9,6 @@
//! calculation will not work.
use crate::KnownPaths;
use smol_str::SmolStr;
use std::cell::RefCell;
use std::io;
use std::path::{Path, PathBuf};
@ -73,7 +72,7 @@ impl<T: EvalIO> EvalIO for TvixIO<T> {
self.actual.read_to_string(path)
}
fn read_dir(&self, path: &Path) -> Result<Vec<(SmolStr, FileType)>, io::Error> {
fn read_dir(&self, path: &Path) -> Result<Vec<(Vec<u8>, FileType)>, io::Error> {
self.actual.read_dir(path)
}
}

View file

@ -38,7 +38,8 @@ mod impure_builtins {
let dir = generators::request_read_dir(&co, path).await;
let res = dir.into_iter().map(|(name, ftype)| {
(
NixString::from(name.as_str()),
// TODO: propagate Vec<u8> into NixString.
NixString::from(String::from_utf8(name).expect("parsing file name as string")),
Value::String(
match ftype {
FileType::Directory => "directory",

View file

@ -15,12 +15,14 @@
//! In the context of Nix builds, callers also use this interface to determine
//! how store paths are opened and so on.
use smol_str::SmolStr;
use std::{
io,
path::{Path, PathBuf},
};
#[cfg(target_family = "unix")]
use std::os::unix::ffi::OsStringExt;
/// Types of files as represented by `builtins.readDir` in Nix.
#[derive(Debug)]
pub enum FileType {
@ -40,7 +42,7 @@ pub trait EvalIO {
/// Read the directory at the specified path and return the names
/// of its entries associated with their [`FileType`].
fn read_dir(&self, path: &Path) -> Result<Vec<(SmolStr, FileType)>, io::Error>;
fn read_dir(&self, path: &Path) -> Result<Vec<(Vec<u8>, FileType)>, io::Error>;
/// Import the given path. What this means depends on the
/// implementation, for example for a `std::io`-based
@ -63,6 +65,7 @@ pub trait EvalIO {
#[cfg(feature = "impure")]
pub struct StdIO;
// TODO: we might want to make this whole impl to be target_family = "unix".
#[cfg(feature = "impure")]
impl EvalIO for StdIO {
fn path_exists(&self, path: &Path) -> Result<bool, io::Error> {
@ -73,7 +76,7 @@ impl EvalIO for StdIO {
std::fs::read_to_string(&path)
}
fn read_dir(&self, path: &Path) -> Result<Vec<(SmolStr, FileType)>, io::Error> {
fn read_dir(&self, path: &Path) -> Result<Vec<(Vec<u8>, FileType)>, io::Error> {
let mut result = vec![];
for entry in path.read_dir()? {
@ -90,7 +93,7 @@ impl EvalIO for StdIO {
FileType::Unknown
};
result.push((SmolStr::new(entry.file_name().to_string_lossy()), val));
result.push((entry.file_name().into_vec(), val))
}
Ok(result)
@ -122,7 +125,7 @@ impl EvalIO for DummyIO {
))
}
fn read_dir(&self, _: &Path) -> Result<Vec<(SmolStr, FileType)>, io::Error> {
fn read_dir(&self, _: &Path) -> Result<Vec<(Vec<u8>, FileType)>, io::Error> {
Err(io::Error::new(
io::ErrorKind::Unsupported,
"I/O methods are not implemented in DummyIO",

View file

@ -10,7 +10,6 @@
use core::pin::Pin;
use genawaiter::rc::Co;
pub use genawaiter::rc::Gen;
use smol_str::SmolStr;
use std::fmt::Display;
use std::future::Future;
@ -187,7 +186,7 @@ pub enum VMResponse {
Path(PathBuf),
/// VM response with the contents of a directory.
Directory(Vec<(SmolStr, FileType)>),
Directory(Vec<(Vec<u8>, FileType)>),
/// VM response with a span to use at the current point.
Span(LightSpan),
@ -736,7 +735,7 @@ pub(crate) async fn request_path_exists(co: &GenCo, path: PathBuf) -> Value {
}
}
pub(crate) async fn request_read_dir(co: &GenCo, path: PathBuf) -> Vec<(SmolStr, FileType)> {
pub(crate) async fn request_read_dir(co: &GenCo, path: PathBuf) -> Vec<(Vec<u8>, FileType)> {
match co.yield_(VMRequest::ReadDir(path)).await {
VMResponse::Directory(dir) => dir,
msg => panic!(

View file

@ -67,21 +67,21 @@ impl<'a, 'w> Node<'a, 'w> {
}
/// Make this node a symlink.
pub fn symlink(mut self, target: &str) -> io::Result<()> {
pub fn symlink(mut self, target: &[u8]) -> io::Result<()> {
debug_assert!(
target.len() <= wire::MAX_TARGET_LEN,
"target.len() > {}",
wire::MAX_TARGET_LEN
);
debug_assert!(
!target.contains('\0'),
!target.contains(&b'\0'),
"invalid target characters: {target:?}"
);
debug_assert!(!target.is_empty(), "empty target");
self.write(&wire::TOK_SYM)?;
self.write(&target.len().to_le_bytes())?;
self.write(target.as_bytes())?;
self.write(target)?;
self.pad(target.len() as u64)?;
self.write(&wire::TOK_PAR)?;
Ok(())
@ -136,11 +136,11 @@ impl<'a, 'w> Node<'a, 'w> {
}
#[cfg(debug_assertions)]
type Name = String;
type Name = Vec<u8>;
#[cfg(not(debug_assertions))]
type Name = ();
fn into_name(_name: &str) -> Name {
fn into_name(_name: &[u8]) -> Name {
#[cfg(debug_assertions)]
_name.to_owned()
}
@ -163,17 +163,18 @@ impl<'a, 'w> Directory<'a, 'w> {
///
/// The entry is simply another [`Node`], which can then be filled like the
/// root of a NAR (including, of course, by nesting directories).
pub fn entry(&mut self, name: &str) -> io::Result<Node<'_, 'w>> {
pub fn entry(&mut self, name: &[u8]) -> io::Result<Node<'_, 'w>> {
debug_assert!(
name.len() <= wire::MAX_NAME_LEN,
"name.len() > {}",
wire::MAX_NAME_LEN
);
debug_assert!(!["", ".", ".."].contains(&name), "invalid name: {name:?}");
debug_assert!(
!name.contains(['/', '\0']),
"invalid name characters: {name:?}"
);
debug_assert!(name != b"", "name may not be empty");
debug_assert!(name != b".", "invalid name: {name:?}");
debug_assert!(name != b"..", "invalid name: {name:?}");
debug_assert!(!name.contains(&b'/'), "invalid name characters: {name:?}");
debug_assert!(!name.contains(&b'\0'), "invalid name characters: {name:?}");
match self.prev_name {
None => {
@ -187,7 +188,7 @@ impl<'a, 'w> Directory<'a, 'w> {
"misordered names: {_prev_name:?} >= {name:?}"
);
_prev_name.clear();
_prev_name.push_str(name);
_prev_name.append(&mut name.to_vec());
}
self.node.write(&wire::TOK_PAR)?;
}
@ -195,7 +196,7 @@ impl<'a, 'w> Directory<'a, 'w> {
self.node.write(&wire::TOK_ENT)?;
self.node.write(&name.len().to_le_bytes())?;
self.node.write(name.as_bytes())?;
self.node.write(name)?;
self.node.pad(name.len() as u64)?;
self.node.write(&wire::TOK_NOD)?;

View file

@ -103,7 +103,7 @@ type DirectoryNode struct {
unknownFields protoimpl.UnknownFields
// The (base)name of the directory
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
Name []byte `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
// The blake3 hash of a Directory message, serialized in protobuf canonical form.
Digest []byte `protobuf:"bytes,2,opt,name=digest,proto3" json:"digest,omitempty"`
// Number of child elements in the Directory referred to by `digest`.
@ -151,11 +151,11 @@ func (*DirectoryNode) Descriptor() ([]byte, []int) {
return file_tvix_store_protos_castore_proto_rawDescGZIP(), []int{1}
}
func (x *DirectoryNode) GetName() string {
func (x *DirectoryNode) GetName() []byte {
if x != nil {
return x.Name
}
return ""
return nil
}
func (x *DirectoryNode) GetDigest() []byte {
@ -179,7 +179,7 @@ type FileNode struct {
unknownFields protoimpl.UnknownFields
// The (base)name of the file
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
Name []byte `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
// The blake3 digest of the file contents
Digest []byte `protobuf:"bytes,2,opt,name=digest,proto3" json:"digest,omitempty"`
// The file content size
@ -220,11 +220,11 @@ func (*FileNode) Descriptor() ([]byte, []int) {
return file_tvix_store_protos_castore_proto_rawDescGZIP(), []int{2}
}
func (x *FileNode) GetName() string {
func (x *FileNode) GetName() []byte {
if x != nil {
return x.Name
}
return ""
return nil
}
func (x *FileNode) GetDigest() []byte {
@ -255,9 +255,9 @@ type SymlinkNode struct {
unknownFields protoimpl.UnknownFields
// The (base)name of the symlink
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
Name []byte `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
// The target of the symlink.
Target string `protobuf:"bytes,2,opt,name=target,proto3" json:"target,omitempty"`
Target []byte `protobuf:"bytes,2,opt,name=target,proto3" json:"target,omitempty"`
}
func (x *SymlinkNode) Reset() {
@ -292,18 +292,18 @@ func (*SymlinkNode) Descriptor() ([]byte, []int) {
return file_tvix_store_protos_castore_proto_rawDescGZIP(), []int{3}
}
func (x *SymlinkNode) GetName() string {
func (x *SymlinkNode) GetName() []byte {
if x != nil {
return x.Name
}
return ""
return nil
}
func (x *SymlinkNode) GetTarget() string {
func (x *SymlinkNode) GetTarget() []byte {
if x != nil {
return x.Target
}
return ""
return nil
}
var File_tvix_store_protos_castore_proto protoreflect.FileDescriptor
@ -325,20 +325,20 @@ var file_tvix_store_protos_castore_proto_rawDesc = []byte{
0x53, 0x79, 0x6d, 0x6c, 0x69, 0x6e, 0x6b, 0x4e, 0x6f, 0x64, 0x65, 0x52, 0x08, 0x73, 0x79, 0x6d,
0x6c, 0x69, 0x6e, 0x6b, 0x73, 0x22, 0x4f, 0x0a, 0x0d, 0x44, 0x69, 0x72, 0x65, 0x63, 0x74, 0x6f,
0x72, 0x79, 0x4e, 0x6f, 0x64, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01,
0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x64, 0x69,
0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x64, 0x69,
0x67, 0x65, 0x73, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x06, 0x64, 0x69, 0x67, 0x65,
0x73, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0d,
0x52, 0x04, 0x73, 0x69, 0x7a, 0x65, 0x22, 0x6a, 0x0a, 0x08, 0x46, 0x69, 0x6c, 0x65, 0x4e, 0x6f,
0x64, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09,
0x64, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c,
0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x64, 0x69, 0x67, 0x65, 0x73, 0x74,
0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x06, 0x64, 0x69, 0x67, 0x65, 0x73, 0x74, 0x12, 0x12,
0x0a, 0x04, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x04, 0x73, 0x69,
0x7a, 0x65, 0x12, 0x1e, 0x0a, 0x0a, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x61, 0x62, 0x6c, 0x65,
0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0a, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x61, 0x62,
0x6c, 0x65, 0x22, 0x39, 0x0a, 0x0b, 0x53, 0x79, 0x6d, 0x6c, 0x69, 0x6e, 0x6b, 0x4e, 0x6f, 0x64,
0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52,
0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52,
0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x18,
0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x42, 0x28, 0x5a,
0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x06, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x42, 0x28, 0x5a,
0x26, 0x63, 0x6f, 0x64, 0x65, 0x2e, 0x74, 0x76, 0x6c, 0x2e, 0x66, 0x79, 0x69, 0x2f, 0x74, 0x76,
0x69, 0x78, 0x2f, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x73, 0x3b,
0x73, 0x74, 0x6f, 0x72, 0x65, 0x76, 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,

View file

@ -25,7 +25,7 @@ message Directory {
// A DirectoryNode represents a directory in a Directory.
message DirectoryNode {
// The (base)name of the directory
string name = 1;
bytes name = 1;
// The blake3 hash of a Directory message, serialized in protobuf canonical form.
bytes digest = 2;
// Number of child elements in the Directory referred to by `digest`.
@ -44,7 +44,7 @@ message DirectoryNode {
// A FileNode represents a regular or executable file in a Directory.
message FileNode {
// The (base)name of the file
string name = 1;
bytes name = 1;
// The blake3 digest of the file contents
bytes digest = 2;
// The file content size
@ -56,7 +56,7 @@ message FileNode {
// A SymlinkNode represents a symbolic link in a Directory.
message SymlinkNode {
// The (base)name of the symlink
string name = 1;
bytes name = 1;
// The target of the symlink.
string target = 2;
bytes target = 2;
}

View file

@ -234,7 +234,7 @@ fn print_node(node: &Node, path: &Path) {
Node::Directory(directory_node) => {
info!(
path = ?path,
name = directory_node.name,
name = ?directory_node.name,
digest = BASE64.encode(&directory_node.digest),
"import successful",
)
@ -242,7 +242,7 @@ fn print_node(node: &Node, path: &Path) {
Node::File(file_node) => {
info!(
path = ?path,
name = file_node.name,
name = ?file_node.name,
digest = BASE64.encode(&file_node.digest),
"import successful"
)
@ -250,8 +250,8 @@ fn print_node(node: &Node, path: &Path) {
Node::Symlink(symlink_node) => {
info!(
path = ?path,
name = symlink_node.name,
target = symlink_node.target,
name = ?symlink_node.name,
target = ?symlink_node.target,
"import successful"
)
}

View file

@ -1,6 +1,6 @@
use super::DirectoryService;
use crate::{proto::NamedNode, B3Digest, Error};
use std::sync::Arc;
use std::{os::unix::ffi::OsStrExt, sync::Arc};
use tracing::{instrument, warn};
/// This traverses from a (root) node to the given (sub)path, returning the Node
@ -58,9 +58,9 @@ pub fn traverse_to(
// look for first_component in the [Directory].
// FUTUREWORK: as the nodes() iterator returns in a sorted fashion, we
// could stop as soon as e.name is larger than the search string.
let child_node = directory.nodes().find(|n| {
n.get_name() == first_component.as_os_str().to_str().unwrap()
});
let child_node = directory
.nodes()
.find(|n| n.get_name() == first_component.as_os_str().as_bytes());
match child_node {
// child node not found means there's no such element inside the directory.
@ -105,7 +105,7 @@ mod tests {
// construct the node for DIRECTORY_COMPLICATED
let node_directory_complicated =
crate::proto::node::Node::Directory(crate::proto::DirectoryNode {
name: "doesntmatter".to_string(),
name: "doesntmatter".into(),
digest: DIRECTORY_COMPLICATED.digest().to_vec(),
size: DIRECTORY_COMPLICATED.size(),
});

View file

@ -13,7 +13,7 @@ pub struct InodeTracker {
blob_digest_to_inode: HashMap<B3Digest, u64>,
// lookup table for symlinks by their target
symlink_target_to_inode: HashMap<String, u64>,
symlink_target_to_inode: HashMap<Vec<u8>, u64>,
// lookup table for directories by their B3Digest.
// Note the corresponding directory may not be present in data yet.
@ -171,7 +171,7 @@ impl InodeTracker {
self.blob_digest_to_inode.insert(digest.clone(), ino);
}
InodeData::Symlink(ref target) => {
self.symlink_target_to_inode.insert(target.to_string(), ino);
self.symlink_target_to_inode.insert(target.to_vec(), ino);
}
InodeData::Directory(DirectoryInodeData::Sparse(ref digest, _size)) => {
self.directory_digest_to_inode.insert(digest.clone(), ino);
@ -251,7 +251,7 @@ mod tests {
#[test]
fn put_symlink() {
let mut inode_tracker = InodeTracker::default();
let f = InodeData::Symlink("target".to_string());
let f = InodeData::Symlink("target".into());
// put it in
let ino = inode_tracker.put(f.clone());
@ -260,7 +260,7 @@ mod tests {
let data = inode_tracker.get(ino).expect("must be some");
match *data {
InodeData::Symlink(ref target) => {
assert_eq!("target", target);
assert_eq!(b"target".to_vec(), *target);
}
InodeData::Regular(..) | InodeData::Directory(..) => panic!("wrong type"),
}
@ -269,10 +269,7 @@ mod tests {
assert_eq!(ino, inode_tracker.put(f));
// inserting another file should return a different ino
assert_ne!(
ino,
inode_tracker.put(InodeData::Symlink("target2".to_string()))
);
assert_ne!(ino, inode_tracker.put(InodeData::Symlink("target2".into())));
}
// TODO: put sparse directory

View file

@ -5,7 +5,7 @@ use crate::{proto, B3Digest};
#[derive(Clone, Debug)]
pub enum InodeData {
Regular(B3Digest, u32, bool), // digest, size, executable
Symlink(String), // target
Symlink(Vec<u8>), // target
Directory(DirectoryInodeData), // either [DirectoryInodeData:Sparse] or [DirectoryInodeData:Populated]
}

View file

@ -19,6 +19,7 @@ use crate::{
use fuser::{FileAttr, ReplyAttr, Request};
use nix_compat::store_path::StorePath;
use std::io::{self, Read, Seek};
use std::os::unix::ffi::OsStrExt;
use std::str::FromStr;
use std::sync::Arc;
use std::{collections::HashMap, time::Duration};
@ -142,7 +143,7 @@ impl FUSE {
let root_node = path_info.node.unwrap().node.unwrap();
// The name must match what's passed in the lookup, otherwise we return nothing.
if root_node.get_name() != store_path.to_string() {
if root_node.get_name() != store_path.to_string().as_bytes() {
return Ok(None);
}
@ -279,7 +280,9 @@ impl fuser::Filesystem for FUSE {
let _enter = span.enter();
// in the children, find the one with the desired name.
if let Some((child_ino, _)) = children.iter().find(|e| e.1.get_name() == name) {
if let Some((child_ino, _)) =
children.iter().find(|e| e.1.get_name() == name.as_bytes())
{
// lookup the child [InodeData] in [self.inode_tracker].
// We know the inodes for children have already been allocated.
let child_inode_data = self.inode_tracker.get(*child_ino).unwrap();
@ -353,7 +356,7 @@ impl fuser::Filesystem for FUSE {
Node::File(_) => fuser::FileType::RegularFile,
Node::Symlink(_) => fuser::FileType::Symlink,
},
child_node.get_name(),
std::ffi::OsStr::from_bytes(child_node.get_name()),
);
if full {
break;
@ -480,7 +483,7 @@ impl fuser::Filesystem for FUSE {
InodeData::Directory(..) | InodeData::Regular(..) => {
reply.error(libc::EINVAL);
}
InodeData::Symlink(ref target) => reply.data(target.as_bytes()),
InodeData::Symlink(ref target) => reply.data(target),
}
}
}

View file

@ -57,7 +57,7 @@ fn populate_blob_a(
let path_info = PathInfo {
node: Some(proto::Node {
node: Some(proto::node::Node::File(FileNode {
name: BLOB_A_NAME.to_string(),
name: BLOB_A_NAME.into(),
digest: fixtures::BLOB_A_DIGEST.to_vec(),
size: fixtures::BLOB_A.len() as u32,
executable: false,
@ -83,7 +83,7 @@ fn populate_blob_b(
let path_info = PathInfo {
node: Some(proto::Node {
node: Some(proto::node::Node::File(FileNode {
name: BLOB_B_NAME.to_string(),
name: BLOB_B_NAME.into(),
digest: fixtures::BLOB_B_DIGEST.to_vec(),
size: fixtures::BLOB_B.len() as u32,
executable: false,
@ -103,8 +103,8 @@ fn populate_symlink(
let path_info = PathInfo {
node: Some(proto::Node {
node: Some(proto::node::Node::Symlink(proto::SymlinkNode {
name: SYMLINK_NAME.to_string(),
target: BLOB_A_NAME.to_string(),
name: SYMLINK_NAME.into(),
target: BLOB_A_NAME.into(),
})),
}),
..Default::default()
@ -123,8 +123,8 @@ fn populate_symlink2(
let path_info = PathInfo {
node: Some(proto::Node {
node: Some(proto::node::Node::Symlink(proto::SymlinkNode {
name: SYMLINK_NAME2.to_string(),
target: "/nix/store/somewhereelse".to_string(),
name: SYMLINK_NAME2.into(),
target: "/nix/store/somewhereelse".into(),
})),
}),
..Default::default()
@ -153,7 +153,7 @@ fn populate_directory_with_keep(
let path_info = PathInfo {
node: Some(proto::Node {
node: Some(proto::node::Node::Directory(DirectoryNode {
name: DIRECTORY_WITH_KEEP_NAME.to_string(),
name: DIRECTORY_WITH_KEEP_NAME.into(),
digest: fixtures::DIRECTORY_WITH_KEEP.digest().to_vec(),
size: fixtures::DIRECTORY_WITH_KEEP.size(),
})),
@ -174,7 +174,7 @@ fn populate_pathinfo_without_directory(
let path_info = PathInfo {
node: Some(proto::Node {
node: Some(proto::node::Node::Directory(DirectoryNode {
name: DIRECTORY_WITH_KEEP_NAME.to_string(),
name: DIRECTORY_WITH_KEEP_NAME.into(),
digest: fixtures::DIRECTORY_WITH_KEEP.digest().to_vec(),
size: fixtures::DIRECTORY_WITH_KEEP.size(),
})),
@ -194,7 +194,7 @@ fn populate_blob_a_without_blob(
let path_info = PathInfo {
node: Some(proto::Node {
node: Some(proto::node::Node::File(FileNode {
name: BLOB_A_NAME.to_string(),
name: BLOB_A_NAME.into(),
digest: fixtures::BLOB_A_DIGEST.to_vec(),
size: fixtures::BLOB_A.len() as u32,
executable: false,
@ -231,7 +231,7 @@ fn populate_directory_complicated(
let path_info = PathInfo {
node: Some(proto::Node {
node: Some(proto::node::Node::Directory(DirectoryNode {
name: DIRECTORY_COMPLICATED_NAME.to_string(),
name: DIRECTORY_COMPLICATED_NAME.into(),
digest: fixtures::DIRECTORY_COMPLICATED.digest().to_vec(),
size: fixtures::DIRECTORY_COMPLICATED.size(),
})),

View file

@ -1,6 +1,7 @@
use crate::blobservice::BlobService;
use crate::directoryservice::DirectoryService;
use crate::{directoryservice::DirectoryPutter, proto};
use std::os::unix::ffi::OsStrExt;
use std::sync::Arc;
use std::{
collections::HashMap,
@ -79,11 +80,7 @@ fn process_entry(
.map_err(|e| Error::UploadDirectoryError(entry.path().to_path_buf(), e))?;
return Ok(proto::node::Node::Directory(proto::DirectoryNode {
name: entry
.file_name()
.to_str()
.map(|s| Ok(s.to_owned()))
.unwrap_or(Err(Error::InvalidEncoding(entry.path().to_path_buf())))?,
name: entry.file_name().as_bytes().to_vec(),
digest: directory_digest.to_vec(),
size: directory_size,
}));
@ -94,15 +91,8 @@ fn process_entry(
.map_err(|e| Error::UnableToStat(entry_path.clone(), e))?;
return Ok(proto::node::Node::Symlink(proto::SymlinkNode {
name: entry
.file_name()
.to_str()
.map(|s| Ok(s.to_owned()))
.unwrap_or(Err(Error::InvalidEncoding(entry.path().to_path_buf())))?,
target: target
.to_str()
.map(|s| Ok(s.to_owned()))
.unwrap_or(Err(Error::InvalidEncoding(entry.path().to_path_buf())))?,
name: entry.file_name().as_bytes().to_vec(),
target: target.as_os_str().as_bytes().to_vec(),
}));
}
@ -123,11 +113,7 @@ fn process_entry(
let digest = writer.close()?;
return Ok(proto::node::Node::File(proto::FileNode {
name: entry
.file_name()
.to_str()
.map(|s| Ok(s.to_owned()))
.unwrap_or(Err(Error::InvalidEncoding(entry.path().to_path_buf())))?,
name: entry.file_name().as_bytes().to_vec(),
digest: digest.to_vec(),
size: metadata.len() as u32,
// If it's executable by the user, it'll become executable.
@ -163,13 +149,9 @@ pub fn ingest_path<P: AsRef<Path> + Debug>(
.as_ref()
.file_name()
.unwrap_or_default()
.to_str()
.map(|s| Ok(s.to_owned()))
.unwrap_or(Err(Error::InvalidEncoding(p.as_ref().to_path_buf())))?,
target: target
.to_str()
.map(|s| Ok(s.to_owned()))
.unwrap_or(Err(Error::InvalidEncoding(p.as_ref().to_path_buf())))?,
.as_bytes()
.to_vec(),
target: target.as_os_str().as_bytes().to_vec(),
}));
}

View file

@ -12,14 +12,14 @@ pub enum RenderError {
#[error("failure talking to a backing store client: {0}")]
StoreError(crate::Error),
#[error("unable to find directory {}, referred from {}", .0, .1)]
DirectoryNotFound(B3Digest, String),
#[error("unable to find directory {}, referred from {:?}", .0, .1)]
DirectoryNotFound(B3Digest, Vec<u8>),
#[error("unable to find blob {}, referred from {}", BASE64.encode(.0), .1)]
BlobNotFound([u8; 32], String),
#[error("unable to find blob {}, referred from {:?}", BASE64.encode(.0), .1)]
BlobNotFound([u8; 32], Vec<u8>),
#[error("unexpected size in metadata for blob {}, referred from {} returned, expected {}, got {}", BASE64.encode(.0), .1, .2, .3)]
UnexpectedBlobMeta([u8; 32], String, u32, u32),
#[error("unexpected size in metadata for blob {}, referred from {:?} returned, expected {}, got {}", BASE64.encode(.0), .1, .2, .3)]
UnexpectedBlobMeta([u8; 32], Vec<u8>, u32, u32),
#[error("failure using the NAR writer: {0}")]
NARWriterError(std::io::Error),

View file

@ -116,7 +116,7 @@ impl proto::directory_service_server::DirectoryService for GRPCDirectoryServiceW
match seen_directories_sizes.get(&child_directory_digest) {
None => {
return Err(Status::invalid_argument(format!(
"child directory '{}' ({}) in directory '{}' not seen yet",
"child directory '{:?}' ({}) in directory '{}' not seen yet",
child_directory.name,
&child_directory_digest,
&directory.digest(),
@ -125,7 +125,7 @@ impl proto::directory_service_server::DirectoryService for GRPCDirectoryServiceW
Some(seen_child_directory_size) => {
if seen_child_directory_size != &child_directory.size {
return Err(Status::invalid_argument(format!(
"child directory '{}' ({}) in directory '{}' referred with wrong size, expected {}, actual {}",
"child directory '{:?}' ({}) in directory '{}' referred with wrong size, expected {}, actual {}",
child_directory.name,
&child_directory_digest,
&directory.digest(),

View file

@ -1,6 +1,5 @@
#![allow(clippy::derive_partial_eq_without_eq)]
// https://github.com/hyperium/tonic/issues/1056
use std::str::FromStr;
use std::{collections::HashSet, iter::Peekable};
use thiserror::Error;
@ -35,14 +34,14 @@ mod tests;
#[derive(Debug, PartialEq, Eq, Error)]
pub enum ValidateDirectoryError {
/// Elements are not in sorted order
#[error("{0} is not sorted")]
WrongSorting(String),
#[error("{0:?} is not sorted")]
WrongSorting(Vec<u8>),
/// Multiple elements with the same name encountered
#[error("{0} is a duplicate name")]
DuplicateName(String),
#[error("{0:?} is a duplicate name")]
DuplicateName(Vec<u8>),
/// Invalid name encountered
#[error("Invalid name in {0}")]
InvalidName(String),
#[error("Invalid name in {0:?}")]
InvalidName(Vec<u8>),
/// Invalid digest length encountered
#[error("Invalid Digest length: {0}")]
InvalidDigestLen(usize),
@ -56,8 +55,8 @@ pub enum ValidatePathInfoError {
NoNodePresent(),
/// Invalid node name encountered.
#[error("Failed to parse {0} as StorePath: {1}")]
InvalidNodeName(String, store_path::Error),
#[error("Failed to parse {0:?} as StorePath: {1}")]
InvalidNodeName(Vec<u8>, store_path::Error),
/// The digest the (root) node refers to has invalid length.
#[error("Invalid Digest length: {0}")]
@ -73,10 +72,14 @@ pub enum ValidatePathInfoError {
/// error that's generated from the supplied constructor.
///
/// We disallow slashes, null bytes, '.', '..' and the empty string.
fn validate_node_name<E>(name: &str, err: fn(String) -> E) -> Result<(), E> {
if name.is_empty() || name == ".." || name == "." || name.contains('\x00') || name.contains('/')
fn validate_node_name<E>(name: &[u8], err: fn(Vec<u8>) -> E) -> Result<(), E> {
if name.is_empty()
|| name == b".."
|| name == b"."
|| name.contains(&0x00)
|| name.contains(&b'/')
{
return Err(err(name.to_string()));
return Err(err(name.to_vec()));
}
Ok(())
}
@ -95,12 +98,12 @@ fn validate_digest<E>(digest: &Vec<u8>, err: fn(usize) -> E) -> Result<(), E> {
/// On success, this returns the parsed [StorePath].
/// On error, it returns an error generated from the supplied constructor.
fn parse_node_name_root<E>(
name: &str,
err: fn(String, store_path::Error) -> E,
name: &[u8],
err: fn(Vec<u8>, store_path::Error) -> E,
) -> Result<StorePath, E> {
match StorePath::from_str(name) {
match StorePath::from_bytes(name) {
Ok(np) => Ok(np),
Err(e) => Err(err(name.to_string(), e)),
Err(e) => Err(err(name.to_vec(), e)),
}
}
@ -169,29 +172,29 @@ impl PathInfo {
/// NamedNode is implemented for [FileNode], [DirectoryNode] and [SymlinkNode]
/// and [node::Node], so we can ask all of them for the name easily.
pub trait NamedNode {
fn get_name(&self) -> &str;
fn get_name(&self) -> &[u8];
}
impl NamedNode for &FileNode {
fn get_name(&self) -> &str {
self.name.as_str()
fn get_name(&self) -> &[u8] {
&self.name
}
}
impl NamedNode for &DirectoryNode {
fn get_name(&self) -> &str {
self.name.as_str()
fn get_name(&self) -> &[u8] {
&self.name
}
}
impl NamedNode for &SymlinkNode {
fn get_name(&self) -> &str {
self.name.as_str()
fn get_name(&self) -> &[u8] {
&self.name
}
}
impl NamedNode for node::Node {
fn get_name(&self) -> &str {
fn get_name(&self) -> &[u8] {
match self {
node::Node::File(node_file) => &node_file.name,
node::Node::Directory(node_directory) => &node_directory.name,
@ -204,11 +207,11 @@ impl NamedNode for node::Node {
/// If the passed name is larger than the previous one, the reference is updated.
/// If it's not, an error is returned.
fn update_if_lt_prev<'n>(
prev_name: &mut &'n str,
name: &'n str,
prev_name: &mut &'n [u8],
name: &'n [u8],
) -> Result<(), ValidateDirectoryError> {
if *name < **prev_name {
return Err(ValidateDirectoryError::WrongSorting(name.to_string()));
return Err(ValidateDirectoryError::WrongSorting(name.to_vec()));
}
*prev_name = name;
Ok(())
@ -217,11 +220,11 @@ fn update_if_lt_prev<'n>(
/// Inserts the given name into a HashSet if it's not already in there.
/// If it is, an error is returned.
fn insert_once<'n>(
seen_names: &mut HashSet<&'n str>,
name: &'n str,
seen_names: &mut HashSet<&'n [u8]>,
name: &'n [u8],
) -> Result<(), ValidateDirectoryError> {
if seen_names.get(name).is_some() {
return Err(ValidateDirectoryError::DuplicateName(name.to_string()));
return Err(ValidateDirectoryError::DuplicateName(name.to_vec()));
}
seen_names.insert(name);
Ok(())
@ -258,11 +261,11 @@ impl Directory {
/// - not properly sorted lists
/// - duplicate names in the three lists
pub fn validate(&self) -> Result<(), ValidateDirectoryError> {
let mut seen_names: HashSet<&str> = HashSet::new();
let mut seen_names: HashSet<&[u8]> = HashSet::new();
let mut last_directory_name: &str = "";
let mut last_file_name: &str = "";
let mut last_symlink_name: &str = "";
let mut last_directory_name: &[u8] = b"";
let mut last_file_name: &[u8] = b"";
let mut last_symlink_name: &[u8] = b"";
// check directories
for directory_node in &self.directories {
@ -272,8 +275,8 @@ impl Directory {
ValidateDirectoryError::InvalidDigestLen,
)?;
update_if_lt_prev(&mut last_directory_name, directory_node.name.as_str())?;
insert_once(&mut seen_names, directory_node.name.as_str())?;
update_if_lt_prev(&mut last_directory_name, &directory_node.name)?;
insert_once(&mut seen_names, &directory_node.name)?;
}
// check files
@ -281,16 +284,16 @@ impl Directory {
validate_node_name(&file_node.name, ValidateDirectoryError::InvalidName)?;
validate_digest(&file_node.digest, ValidateDirectoryError::InvalidDigestLen)?;
update_if_lt_prev(&mut last_file_name, file_node.name.as_str())?;
insert_once(&mut seen_names, file_node.name.as_str())?;
update_if_lt_prev(&mut last_file_name, &file_node.name)?;
insert_once(&mut seen_names, &file_node.name)?;
}
// check symlinks
for symlink_node in &self.symlinks {
validate_node_name(&symlink_node.name, ValidateDirectoryError::InvalidName)?;
update_if_lt_prev(&mut last_symlink_name, symlink_node.name.as_str())?;
insert_once(&mut seen_names, symlink_node.name.as_str())?;
update_if_lt_prev(&mut last_symlink_name, &symlink_node.name)?;
insert_once(&mut seen_names, &symlink_node.name)?;
}
Ok(())

View file

@ -20,7 +20,7 @@ fn size() {
{
let d = Directory {
directories: vec![DirectoryNode {
name: String::from("foo"),
name: "foo".into(),
digest: DUMMY_DIGEST.to_vec(),
size: 0,
}],
@ -31,7 +31,7 @@ fn size() {
{
let d = Directory {
directories: vec![DirectoryNode {
name: String::from("foo"),
name: "foo".into(),
digest: DUMMY_DIGEST.to_vec(),
size: 4,
}],
@ -42,7 +42,7 @@ fn size() {
{
let d = Directory {
files: vec![FileNode {
name: String::from("foo"),
name: "foo".into(),
digest: DUMMY_DIGEST.to_vec(),
size: 42,
executable: false,
@ -54,8 +54,8 @@ fn size() {
{
let d = Directory {
symlinks: vec![SymlinkNode {
name: String::from("foo"),
target: String::from("bar"),
name: "foo".into(),
target: "bar".into(),
}],
..Default::default()
};
@ -89,7 +89,7 @@ fn validate_invalid_names() {
{
let d = Directory {
directories: vec![DirectoryNode {
name: "".to_string(),
name: "".into(),
digest: DUMMY_DIGEST.to_vec(),
size: 42,
}],
@ -97,7 +97,7 @@ fn validate_invalid_names() {
};
match d.validate().expect_err("must fail") {
ValidateDirectoryError::InvalidName(n) => {
assert_eq!(n, "")
assert_eq!(n, b"")
}
_ => panic!("unexpected error"),
};
@ -106,7 +106,7 @@ fn validate_invalid_names() {
{
let d = Directory {
directories: vec![DirectoryNode {
name: ".".to_string(),
name: ".".into(),
digest: DUMMY_DIGEST.to_vec(),
size: 42,
}],
@ -114,7 +114,7 @@ fn validate_invalid_names() {
};
match d.validate().expect_err("must fail") {
ValidateDirectoryError::InvalidName(n) => {
assert_eq!(n, ".")
assert_eq!(n, b".")
}
_ => panic!("unexpected error"),
};
@ -123,7 +123,7 @@ fn validate_invalid_names() {
{
let d = Directory {
files: vec![FileNode {
name: "..".to_string(),
name: "..".into(),
digest: DUMMY_DIGEST.to_vec(),
size: 42,
executable: false,
@ -132,7 +132,7 @@ fn validate_invalid_names() {
};
match d.validate().expect_err("must fail") {
ValidateDirectoryError::InvalidName(n) => {
assert_eq!(n, "..")
assert_eq!(n, b"..")
}
_ => panic!("unexpected error"),
};
@ -141,14 +141,14 @@ fn validate_invalid_names() {
{
let d = Directory {
symlinks: vec![SymlinkNode {
name: "\x00".to_string(),
target: "foo".to_string(),
name: "\x00".into(),
target: "foo".into(),
}],
..Default::default()
};
match d.validate().expect_err("must fail") {
ValidateDirectoryError::InvalidName(n) => {
assert_eq!(n, "\x00")
assert_eq!(n, b"\x00")
}
_ => panic!("unexpected error"),
};
@ -157,14 +157,14 @@ fn validate_invalid_names() {
{
let d = Directory {
symlinks: vec![SymlinkNode {
name: "foo/bar".to_string(),
target: "foo".to_string(),
name: "foo/bar".into(),
target: "foo".into(),
}],
..Default::default()
};
match d.validate().expect_err("must fail") {
ValidateDirectoryError::InvalidName(n) => {
assert_eq!(n, "foo/bar")
assert_eq!(n, b"foo/bar")
}
_ => panic!("unexpected error"),
};
@ -175,7 +175,7 @@ fn validate_invalid_names() {
fn validate_invalid_digest() {
let d = Directory {
directories: vec![DirectoryNode {
name: "foo".to_string(),
name: "foo".into(),
digest: vec![0x00, 0x42], // invalid length
size: 42,
}],
@ -196,12 +196,12 @@ fn validate_sorting() {
let d = Directory {
directories: vec![
DirectoryNode {
name: "b".to_string(),
name: "b".into(),
digest: DUMMY_DIGEST.to_vec(),
size: 42,
},
DirectoryNode {
name: "a".to_string(),
name: "a".into(),
digest: DUMMY_DIGEST.to_vec(),
size: 42,
},
@ -210,7 +210,7 @@ fn validate_sorting() {
};
match d.validate().expect_err("must fail") {
ValidateDirectoryError::WrongSorting(s) => {
assert_eq!(s, "a".to_string());
assert_eq!(s, b"a");
}
_ => panic!("unexpected error"),
}
@ -221,12 +221,12 @@ fn validate_sorting() {
let d = Directory {
directories: vec![
DirectoryNode {
name: "a".to_string(),
name: "a".into(),
digest: DUMMY_DIGEST.to_vec(),
size: 42,
},
DirectoryNode {
name: "a".to_string(),
name: "a".into(),
digest: DUMMY_DIGEST.to_vec(),
size: 42,
},
@ -235,7 +235,7 @@ fn validate_sorting() {
};
match d.validate().expect_err("must fail") {
ValidateDirectoryError::DuplicateName(s) => {
assert_eq!(s, "a".to_string());
assert_eq!(s, b"a");
}
_ => panic!("unexpected error"),
}
@ -246,12 +246,12 @@ fn validate_sorting() {
let d = Directory {
directories: vec![
DirectoryNode {
name: "a".to_string(),
name: "a".into(),
digest: DUMMY_DIGEST.to_vec(),
size: 42,
},
DirectoryNode {
name: "b".to_string(),
name: "b".into(),
digest: DUMMY_DIGEST.to_vec(),
size: 42,
},
@ -267,19 +267,19 @@ fn validate_sorting() {
let d = Directory {
directories: vec![
DirectoryNode {
name: "b".to_string(),
name: "b".into(),
digest: DUMMY_DIGEST.to_vec(),
size: 42,
},
DirectoryNode {
name: "c".to_string(),
name: "c".into(),
digest: DUMMY_DIGEST.to_vec(),
size: 42,
},
],
symlinks: vec![SymlinkNode {
name: "a".to_string(),
target: "foo".to_string(),
name: "a".into(),
target: "foo".into(),
}],
..Default::default()
};

View file

@ -1,7 +1,7 @@
use crate::proto::node::Node;
use crate::proto::Directory;
use crate::proto::DirectoryNode;
use crate::proto::FileNode;
use crate::proto::NamedNode;
use crate::proto::SymlinkNode;
#[test]
@ -9,68 +9,66 @@ fn iterator() {
let d = Directory {
directories: vec![
DirectoryNode {
name: "c".to_string(),
name: "c".into(),
..DirectoryNode::default()
},
DirectoryNode {
name: "d".to_string(),
name: "d".into(),
..DirectoryNode::default()
},
DirectoryNode {
name: "h".to_string(),
name: "h".into(),
..DirectoryNode::default()
},
DirectoryNode {
name: "l".to_string(),
name: "l".into(),
..DirectoryNode::default()
},
],
files: vec![
FileNode {
name: "b".to_string(),
name: "b".into(),
..FileNode::default()
},
FileNode {
name: "e".to_string(),
name: "e".into(),
..FileNode::default()
},
FileNode {
name: "g".to_string(),
name: "g".into(),
..FileNode::default()
},
FileNode {
name: "j".to_string(),
name: "j".into(),
..FileNode::default()
},
],
symlinks: vec![
SymlinkNode {
name: "a".to_string(),
name: "a".into(),
..SymlinkNode::default()
},
SymlinkNode {
name: "f".to_string(),
name: "f".into(),
..SymlinkNode::default()
},
SymlinkNode {
name: "i".to_string(),
name: "i".into(),
..SymlinkNode::default()
},
SymlinkNode {
name: "k".to_string(),
name: "k".into(),
..SymlinkNode::default()
},
],
};
// We keep this strings here and convert to string to make the comparison
// less messy.
let mut node_names: Vec<String> = vec![];
for node in d.nodes() {
match node {
Node::Directory(n) => node_names.push(n.name),
Node::File(n) => node_names.push(n.name),
Node::Symlink(n) => node_names.push(n.name),
};
node_names.push(String::from_utf8(node.get_name().to_vec()).unwrap());
}
assert_eq!(

View file

@ -190,8 +190,8 @@ async fn put_reject_failed_validation() {
// construct a broken Directory message that fails validation
let broken_directory = Directory {
symlinks: vec![SymlinkNode {
name: "".to_string(),
target: "doesntmatter".to_string(),
name: "".into(),
target: "doesntmatter".into(),
}],
..Default::default()
};
@ -214,7 +214,7 @@ async fn put_reject_wrong_size() {
// Construct a directory referring to DIRECTORY_A, but with wrong size.
let broken_parent_directory = Directory {
directories: vec![DirectoryNode {
name: "foo".to_string(),
name: "foo".into(),
digest: DIRECTORY_A.digest().to_vec(),
size: 42,
}],

View file

@ -48,8 +48,8 @@ async fn put_get() {
let path_info = PathInfo {
node: Some(Node {
node: Some(Symlink(SymlinkNode {
name: "00000000000000000000000000000000-foo".to_string(),
target: "doesntmatter".to_string(),
name: "00000000000000000000000000000000-foo".into(),
target: "doesntmatter".into(),
})),
}),
..Default::default()

View file

@ -43,7 +43,7 @@ fn validate_no_node(
#[test_case(
proto::DirectoryNode {
name: DUMMY_NAME.to_string(),
name: DUMMY_NAME.into(),
digest: DUMMY_DIGEST.to_vec(),
size: 0,
},
@ -52,7 +52,7 @@ fn validate_no_node(
)]
#[test_case(
proto::DirectoryNode {
name: DUMMY_NAME.to_string(),
name: DUMMY_NAME.into(),
digest: vec![],
size: 0,
},
@ -61,12 +61,12 @@ fn validate_no_node(
)]
#[test_case(
proto::DirectoryNode {
name: "invalid".to_string(),
name: "invalid".into(),
digest: DUMMY_DIGEST.to_vec(),
size: 0,
},
Err(ValidatePathInfoError::InvalidNodeName(
"invalid".to_string(),
"invalid".into(),
store_path::Error::InvalidLength()
));
"invalid node name"
@ -87,7 +87,7 @@ fn validate_directory(
#[test_case(
proto::FileNode {
name: DUMMY_NAME.to_string(),
name: DUMMY_NAME.into(),
digest: DUMMY_DIGEST.to_vec(),
size: 0,
executable: false,
@ -97,7 +97,7 @@ fn validate_directory(
)]
#[test_case(
proto::FileNode {
name: DUMMY_NAME.to_string(),
name: DUMMY_NAME.into(),
digest: vec![],
..Default::default()
},
@ -106,12 +106,12 @@ fn validate_directory(
)]
#[test_case(
proto::FileNode {
name: "invalid".to_string(),
name: "invalid".into(),
digest: DUMMY_DIGEST.to_vec(),
..Default::default()
},
Err(ValidatePathInfoError::InvalidNodeName(
"invalid".to_string(),
"invalid".into(),
store_path::Error::InvalidLength()
));
"invalid node name"
@ -129,7 +129,7 @@ fn validate_file(t_file_node: proto::FileNode, t_result: Result<StorePath, Valid
#[test_case(
proto::SymlinkNode {
name: DUMMY_NAME.to_string(),
name: DUMMY_NAME.into(),
..Default::default()
},
Ok(StorePath::from_str(DUMMY_NAME).expect("must succeed"));
@ -137,11 +137,11 @@ fn validate_file(t_file_node: proto::FileNode, t_result: Result<StorePath, Valid
)]
#[test_case(
proto::SymlinkNode {
name: "invalid".to_string(),
name: "invalid".into(),
..Default::default()
},
Err(ValidatePathInfoError::InvalidNodeName(
"invalid".to_string(),
"invalid".into(),
store_path::Error::InvalidLength()
));
"invalid node name"
@ -166,7 +166,7 @@ fn validate_references() {
let path_info = PathInfo {
node: Some(Node {
node: Some(proto::node::Node::Directory(proto::DirectoryNode {
name: DUMMY_NAME.to_string(),
name: DUMMY_NAME.into(),
digest: DUMMY_DIGEST.to_vec(),
size: 0,
})),

View file

@ -7,7 +7,6 @@ use nix_compat::{
nixhash::{HashAlgo, NixHash, NixHashWithMode},
store_path::{build_regular_ca_path, StorePath},
};
use smol_str::SmolStr;
use std::{io, path::Path, path::PathBuf, sync::Arc};
use tracing::{error, instrument, warn};
use tvix_eval::{EvalIO, FileType, StdIO};
@ -130,7 +129,7 @@ impl TvixStoreIO {
// assemble a new root_node with a name that is derived from the nar hash.
let renamed_root_node = {
let name = output_path.to_string();
let name = output_path.to_string().into_bytes();
match root_node {
crate::proto::node::Node::Directory(n) => {
@ -265,7 +264,7 @@ impl EvalIO for TvixStoreIO {
}
#[instrument(skip(self), ret, err)]
fn read_dir(&self, path: &Path) -> Result<Vec<(SmolStr, FileType)>, io::Error> {
fn read_dir(&self, path: &Path) -> Result<Vec<(Vec<u8>, FileType)>, io::Error> {
if let Ok((store_path, sub_path)) =
StorePath::from_absolute_path_full(&path.to_string_lossy())
{
@ -285,17 +284,17 @@ impl EvalIO for TvixStoreIO {
})?;
if let Some(directory) = self.directory_service.get(&digest)? {
let mut children: Vec<(SmolStr, FileType)> = Vec::new();
let mut children: Vec<(Vec<u8>, FileType)> = Vec::new();
for node in directory.nodes() {
children.push(match node {
crate::proto::node::Node::Directory(e) => {
(e.name.into(), FileType::Directory)
(e.name, FileType::Directory)
}
crate::proto::node::Node::File(e) => {
(e.name.into(), FileType::Regular)
(e.name, FileType::Regular)
}
crate::proto::node::Node::Symlink(e) => {
(e.name.into(), FileType::Symlink)
(e.name, FileType::Symlink)
}
})
}
@ -338,11 +337,20 @@ impl EvalIO for TvixStoreIO {
let path_info = self.import_path_with_pathinfo(path)?;
// from the [PathInfo], extract the store path (as string).
let mut path = PathBuf::from(nix_compat::store_path::STORE_DIR_WITH_SLASH);
path.push(path_info.node.unwrap().node.unwrap().get_name());
Ok({
let mut path = PathBuf::from(nix_compat::store_path::STORE_DIR_WITH_SLASH);
// and return it
Ok(path)
let root_node_name = path_info.node.unwrap().node.unwrap().get_name().to_vec();
// This must be a string, otherwise it would have failed validation.
let root_node_name = String::from_utf8(root_node_name).unwrap();
// append to the PathBuf
path.push(root_node_name);
// and return it
path
})
}
#[instrument(skip(self), ret)]

View file

@ -33,7 +33,7 @@ lazy_static! {
pub static ref DIRECTORY_WITH_KEEP: proto::Directory = proto::Directory {
directories: vec![],
files: vec![FileNode {
name: ".keep".to_string(),
name: b".keep".to_vec(),
digest: EMPTY_BLOB_DIGEST.to_vec(),
size: 0,
executable: false,
@ -42,25 +42,25 @@ lazy_static! {
};
pub static ref DIRECTORY_COMPLICATED: proto::Directory = proto::Directory {
directories: vec![DirectoryNode {
name: "keep".to_string(),
name: b"keep".to_vec(),
digest: DIRECTORY_WITH_KEEP.digest().to_vec(),
size: DIRECTORY_WITH_KEEP.size(),
}],
files: vec![FileNode {
name: ".keep".to_string(),
name: b".keep".to_vec(),
digest: EMPTY_BLOB_DIGEST.to_vec(),
size: 0,
executable: false,
}],
symlinks: vec![SymlinkNode {
name: "aa".to_string(),
target: "/nix/store/somewhereelse".to_string(),
name: b"aa".to_vec(),
target: b"/nix/store/somewhereelse".to_vec(),
}],
};
pub static ref DIRECTORY_A: Directory = Directory::default();
pub static ref DIRECTORY_B: Directory = Directory {
directories: vec![DirectoryNode {
name: "a".to_string(),
name: b"a".to_vec(),
digest: DIRECTORY_A.digest().to_vec(),
size: DIRECTORY_A.size(),
}],
@ -69,12 +69,12 @@ lazy_static! {
pub static ref DIRECTORY_C: Directory = Directory {
directories: vec![
DirectoryNode {
name: "a".to_string(),
name: b"a".to_vec(),
digest: DIRECTORY_A.digest().to_vec(),
size: DIRECTORY_A.size(),
},
DirectoryNode {
name: "a'".to_string(),
name: b"a'".to_vec(),
digest: DIRECTORY_A.digest().to_vec(),
size: DIRECTORY_A.size(),
}

View file

@ -5,6 +5,9 @@ use crate::tests::fixtures::DIRECTORY_COMPLICATED;
use crate::tests::fixtures::*;
use tempfile::TempDir;
#[cfg(target_family = "unix")]
use std::os::unix::ffi::OsStrExt;
#[cfg(target_family = "unix")]
#[test]
fn symlink() {
@ -26,8 +29,8 @@ fn symlink() {
assert_eq!(
crate::proto::node::Node::Symlink(proto::SymlinkNode {
name: "doesntmatter".to_string(),
target: "/nix/store/somewhereelse".to_string(),
name: "doesntmatter".into(),
target: "/nix/store/somewhereelse".into(),
}),
root_node,
)
@ -50,7 +53,7 @@ fn single_file() {
assert_eq!(
crate::proto::node::Node::File(proto::FileNode {
name: "root".to_string(),
name: "root".into(),
digest: HELLOWORLD_BLOB_DIGEST.to_vec(),
size: HELLOWORLD_BLOB_CONTENTS.len() as u32,
executable: false,
@ -62,6 +65,7 @@ fn single_file() {
assert!(blob_service.has(&HELLOWORLD_BLOB_DIGEST).unwrap());
}
#[cfg(target_family = "unix")]
#[test]
fn complicated() {
let tmpdir = TempDir::new().unwrap();
@ -88,12 +92,7 @@ fn complicated() {
// ensure root_node matched expectations
assert_eq!(
crate::proto::node::Node::Directory(proto::DirectoryNode {
name: tmpdir
.path()
.file_name()
.unwrap()
.to_string_lossy()
.to_string(),
name: tmpdir.path().file_name().unwrap().as_bytes().to_vec(),
digest: DIRECTORY_COMPLICATED.digest().to_vec(),
size: DIRECTORY_COMPLICATED.size(),
}),

View file

@ -15,8 +15,8 @@ fn single_symlink() {
write_nar(
&mut buf,
&crate::proto::node::Node::Symlink(SymlinkNode {
name: "doesntmatter".to_string(),
target: "/nix/store/somewhereelse".to_string(),
name: "doesntmatter".into(),
target: "/nix/store/somewhereelse".into(),
}),
// don't put anything in the stores, as we don't actually do any requests.
gen_blob_service(),
@ -35,7 +35,7 @@ fn single_file_missing_blob() {
let e = write_nar(
&mut buf,
&crate::proto::node::Node::File(FileNode {
name: "doesntmatter".to_string(),
name: "doesntmatter".into(),
digest: HELLOWORLD_BLOB_DIGEST.to_vec(),
size: HELLOWORLD_BLOB_CONTENTS.len() as u32,
executable: false,
@ -76,7 +76,7 @@ fn single_file_wrong_blob_size() {
let e = write_nar(
&mut buf,
&crate::proto::node::Node::File(FileNode {
name: "doesntmatter".to_string(),
name: "doesntmatter".into(),
digest: HELLOWORLD_BLOB_DIGEST.to_vec(),
size: 42, // <- note the wrong size here!
executable: false,
@ -101,7 +101,7 @@ fn single_file_wrong_blob_size() {
let e = write_nar(
&mut buf,
&crate::proto::node::Node::File(FileNode {
name: "doesntmatter".to_string(),
name: "doesntmatter".into(),
digest: HELLOWORLD_BLOB_DIGEST.to_vec(),
size: 2, // <- note the wrong size here!
executable: false,
@ -138,7 +138,7 @@ fn single_file() {
write_nar(
&mut buf,
&crate::proto::node::Node::File(FileNode {
name: "doesntmatter".to_string(),
name: "doesntmatter".into(),
digest: HELLOWORLD_BLOB_DIGEST.to_vec(),
size: HELLOWORLD_BLOB_CONTENTS.len() as u32,
executable: false,
@ -176,7 +176,7 @@ fn test_complicated() {
write_nar(
&mut buf,
&crate::proto::node::Node::Directory(DirectoryNode {
name: "doesntmatter".to_string(),
name: "doesntmatter".into(),
digest: DIRECTORY_COMPLICATED.digest().to_vec(),
size: DIRECTORY_COMPLICATED.size(),
}),
@ -190,7 +190,7 @@ fn test_complicated() {
// ensure calculate_nar does return the correct sha256 digest and sum.
let (nar_size, nar_digest) = calculate_size_and_sha256(
&crate::proto::node::Node::Directory(DirectoryNode {
name: "doesntmatter".to_string(),
name: "doesntmatter".into(),
digest: DIRECTORY_COMPLICATED.digest().to_vec(),
size: DIRECTORY_COMPLICATED.size(),
}),