refactor(tvix/store): use strictly typed PathInfo struct

This switches the PathInfoService trait from using the proto-derived
PathInfo struct to a more restrictive struct, and updates all
implementations to use it.

It removes a lot of the previous conversion and checks, as invalid
states became nonrepresentable, and validations are expressed on the
type level.

PathInfoService implementations consuming protobuf need to convert and
do the verification internally, and can only return the strongly typed
variant.

The nix_compat::narinfo::NarInfo conversions for the proto PathInfo
are removed, we only keep a version showing a NarInfo representation for
the strong struct.

Converting back to a PathInfo requires the root node now, but is
otherwise trivial, so left to the users.

Co-Authored-By: Florian Klink <flokli@flokli.de>
Change-Id: I6fdfdb44063efebb44a8f0097b6b81a828717e03
Reviewed-on: https://cl.tvl.fyi/c/depot/+/12588
Reviewed-by: flokli <flokli@flokli.de>
Tested-by: BuildkiteCI
This commit is contained in:
Marijan Petričević 2024-10-10 09:11:17 -05:00
parent b4ccaac7ad
commit e8040ec61f
26 changed files with 726 additions and 1042 deletions

View file

@ -1 +1,30 @@
use super::{node, Node, SymlinkNode};
mod directory;
/// Create a node with an empty symlink target, and ensure it fails validation.
#[test]
fn convert_symlink_empty_target_invalid() {
Node {
node: Some(node::Node::Symlink(SymlinkNode {
name: "foo".into(),
target: "".into(),
})),
}
.into_name_and_node()
.expect_err("must fail validation");
}
/// Create a node with a symlink target including null bytes, and ensure it
/// fails validation.
#[test]
fn convert_symlink_target_null_byte_invalid() {
Node {
node: Some(node::Node::Symlink(SymlinkNode {
name: "foo".into(),
target: "foo\0".into(),
})),
}
.into_name_and_node()
.expect_err("must fail validation");
}

View file

@ -136,22 +136,6 @@ Similarly, we also don't properly populate the build environment for
`fetchClosure` yet. (Note there already is `ExportedPathInfo`, so once
`structuredAttrs` is there this should be easy.
### PathInfo Data types
Similar to the refactors done in tvix-castore, we want a stricter type for
PathInfo, and use the `tvix_castore::nodes::Node` type we now have as the root
node.
This allows removing some checks, conversions and handling for invalid data in
many different places in different store implementations.
Steps:
- Define the stricter `PathInfo` type
- Update the `PathInfoService` trait to use the stricter types
- Update the grpc client impl to convert from the proto types to the
stricter types (and reject invalid ones)
- Update the grpc server wrapper to convert to the proto types
### PathInfo: include references by content
In the PathInfo struct, we currently only store references by their names and
store path hash. Getting the castore node for the content at that store path

View file

@ -182,7 +182,7 @@ pub(crate) mod derivation_builtins {
use tvix_castore::Node;
use tvix_eval::generators::Gen;
use tvix_eval::{NixContext, NixContextElement, NixString};
use tvix_store::proto::{NarInfo, PathInfo};
use tvix_store::pathinfoservice::PathInfo;
#[builtin("placeholder")]
async fn builtin_placeholder(co: GenCo, input: Value) -> Result<Value, ErrorKind> {
@ -568,15 +568,6 @@ pub(crate) mod derivation_builtins {
let blob_digest = blob_writer.close().await?;
let ca_hash = CAHash::Text(Sha256::digest(&content).into());
let store_path: StorePathRef =
build_ca_path(name.to_str()?, &ca_hash, content.iter_ctx_plain(), false)
.map_err(|_e| {
nix_compat::derivation::DerivationError::InvalidOutputName(
name.to_str_lossy().into_owned(),
)
})
.map_err(DerivationError::InvalidDerivation)?;
let root_node = Node::File {
digest: blob_digest,
size: blob_size,
@ -590,41 +581,38 @@ pub(crate) mod derivation_builtins {
.await
.map_err(|e| ErrorKind::TvixError(Rc::new(e)))?;
// assemble references from plain context.
let reference_paths: Vec<StorePathRef> = content
.iter_ctx_plain()
.map(|elem| StorePathRef::from_absolute_path(elem.as_bytes()))
.collect::<Result<_, _>>()
.map_err(|e| ErrorKind::TvixError(Rc::new(e)))?;
// persist via pathinfo service.
state
.path_info_service
.put(PathInfo {
node: Some(tvix_castore::proto::Node::from_name_and_node(
store_path.to_string().into(),
root_node,
)),
references: reference_paths
.iter()
.map(|x| bytes::Bytes::copy_from_slice(x.digest()))
.collect(),
narinfo: Some(NarInfo {
nar_size,
nar_sha256: nar_sha256.to_vec().into(),
signatures: vec![],
reference_names: reference_paths
.into_iter()
.map(|x| x.to_string())
.collect(),
deriver: None,
ca: Some(ca_hash.into()),
}),
store_path: build_ca_path(
name.to_str()?,
&ca_hash,
content.iter_ctx_plain(),
false,
)
.map_err(|_e| {
nix_compat::derivation::DerivationError::InvalidOutputName(
name.to_str_lossy().into_owned(),
)
})
.map_err(DerivationError::InvalidDerivation)?,
node: root_node,
// assemble references from plain context.
references: content
.iter_ctx_plain()
.map(|elem| StorePath::from_absolute_path(elem.as_bytes()))
.collect::<Result<_, _>>()
.map_err(|e| ErrorKind::TvixError(Rc::new(e)))?,
nar_size,
nar_sha256,
signatures: vec![],
deriver: None,
ca: Some(ca_hash),
})
.await
.map_err(|e| ErrorKind::TvixError(Rc::new(e)))?;
Ok::<_, ErrorKind>(store_path)
.map_err(|e| ErrorKind::TvixError(Rc::new(e)))
.map(|path_info| path_info.store_path)
})?;
let abs_path = store_path.to_absolute_path();

View file

@ -11,7 +11,10 @@ use tokio_util::io::{InspectReader, InspectWriter};
use tracing::{instrument, warn, Span};
use tracing_indicatif::span_ext::IndicatifSpanExt;
use tvix_castore::{blobservice::BlobService, directoryservice::DirectoryService, Node};
use tvix_store::{nar::NarCalculationService, pathinfoservice::PathInfoService, proto::PathInfo};
use tvix_store::{
nar::NarCalculationService,
pathinfoservice::{PathInfo, PathInfoService},
};
use url::Url;
use crate::builtins::FetcherError;
@ -571,19 +574,14 @@ where
// Construct the PathInfo and persist it.
let path_info = PathInfo {
node: Some(tvix_castore::proto::Node::from_name_and_node(
store_path.to_string().into(),
node.clone(),
)),
store_path: store_path.to_owned(),
node: node.clone(),
references: vec![],
narinfo: Some(tvix_store::proto::NarInfo {
nar_size,
nar_sha256: nar_sha256.to_vec().into(),
signatures: vec![],
reference_names: vec![],
deriver: None,
ca: Some(ca_hash.into()),
}),
nar_size,
nar_sha256,
signatures: vec![],
deriver: None,
ca: Some(ca_hash),
};
self.path_info_service

View file

@ -23,7 +23,7 @@ use tvix_castore::{
directoryservice::{self, DirectoryService},
Node,
};
use tvix_store::{pathinfoservice::PathInfoService, proto::PathInfo};
use tvix_store::pathinfoservice::{PathInfo, PathInfoService};
use crate::fetchers::Fetcher;
use crate::known_paths::KnownPaths;
@ -119,23 +119,8 @@ impl TvixStoreIO {
.get(*store_path.digest())
.await?
{
// if we have a PathInfo, we know there will be a root_node (due to validation)
// TODO: use stricter typed BuildRequest here.
Some(path_info) => {
let (name, node) = path_info
.node
.expect("no node")
.into_name_and_node()
.expect("invalid node");
assert_eq!(
store_path.to_string().as_bytes(),
name.as_ref(),
"returned node basename must match requested store path"
);
node
}
Some(path_info) => path_info.node,
// If there's no PathInfo found, this normally means we have to
// trigger the build (and insert into PathInfoService, after
// reference scanning).
@ -336,47 +321,37 @@ impl TvixStoreIO {
// assemble the PathInfo to persist
let path_info = PathInfo {
node: Some(tvix_castore::proto::Node::from_name_and_node(
drv_output
.1
.path
.as_ref()
.ok_or(std::io::Error::new(
std::io::ErrorKind::Other,
"missing output store path",
))?
.to_string()
.into(),
output_node,
)),
store_path: drv_output
.1
.path
.as_ref()
.ok_or(std::io::Error::new(
std::io::ErrorKind::Other,
"Tvix bug: missing output store path",
))?
.to_owned(),
node: output_node,
references: output_needles
.iter()
.map(|path| Bytes::from(path.digest().as_slice().to_vec()))
.map(|s| (**s).to_owned())
.collect(),
narinfo: Some(tvix_store::proto::NarInfo {
nar_size,
nar_sha256: Bytes::from(nar_sha256.to_vec()),
signatures: vec![],
reference_names: output_needles
.iter()
.map(|path| path.to_string())
.collect(),
deriver: Some(tvix_store::proto::StorePath {
name: drv_path
nar_size,
nar_sha256,
signatures: vec![],
deriver: Some(
StorePath::from_name_and_digest_fixed(
drv_path
.name()
.strip_suffix(".drv")
.expect("missing .drv suffix")
.to_string(),
digest: drv_path.digest().to_vec().into(),
}),
ca: drv.fod_digest().map(
|fod_digest| -> tvix_store::proto::nar_info::Ca {
(&CAHash::Nar(nix_compat::nixhash::NixHash::Sha256(
fod_digest,
)))
.into()
},
.expect("missing .drv suffix"),
*drv_path.digest(),
)
.expect(
"Tvix bug: StorePath without .drv suffix must be valid",
),
),
ca: drv.fod_digest().map(|fod_digest| {
CAHash::Nar(nix_compat::nixhash::NixHash::Sha256(fod_digest))
}),
};
@ -421,8 +396,7 @@ impl TvixStoreIO {
) -> io::Result<(PathInfo, NixHash, StorePathRef<'a>)> {
// Ask the PathInfoService for the NAR size and sha256
// We always need it no matter what is the actual hash mode
// because the path info construct a narinfo which *always*
// require a SHA256 of the NAR representation and the NAR size.
// because the [PathInfo] needs to contain nar_{sha256,size}.
let (nar_size, nar_sha256) = self
.nar_calculation_service
.as_ref()
@ -431,7 +405,7 @@ impl TvixStoreIO {
// Calculate the output path. This might still fail, as some names are illegal.
let output_path =
nix_compat::store_path::build_ca_path(name, ca, Vec::<String>::new(), false).map_err(
nix_compat::store_path::build_ca_path(name, ca, Vec::<&str>::new(), false).map_err(
|_| {
std::io::Error::new(
std::io::ErrorKind::InvalidData,
@ -446,8 +420,8 @@ impl TvixStoreIO {
let path_info = tvix_store::import::derive_nar_ca_path_info(
nar_size,
nar_sha256,
Some(ca),
output_path.to_string().into(),
Some(ca.clone()),
output_path.to_owned(),
root_node,
);

View file

@ -1,10 +1,14 @@
use axum::{http::StatusCode, response::IntoResponse};
use bytes::Bytes;
use nix_compat::{narinfo::NarInfo, nix_http, nixbase32};
use nix_compat::{
narinfo::{NarInfo, Signature},
nix_http, nixbase32,
store_path::StorePath,
};
use prost::Message;
use tracing::{instrument, warn, Span};
use tvix_castore::proto::{self as castorepb};
use tvix_store::proto::PathInfo;
use tvix_store::pathinfoservice::PathInfo;
use crate::AppState;
@ -57,35 +61,15 @@ pub async fn get(
})?
.ok_or(StatusCode::NOT_FOUND)?;
let store_path = path_info.validate().map_err(|e| {
warn!(err=%e, "invalid PathInfo");
StatusCode::INTERNAL_SERVER_ERROR
})?;
let mut narinfo = path_info.to_narinfo(store_path.as_ref()).ok_or_else(|| {
warn!(path_info=?path_info, "PathInfo contained no NAR data");
StatusCode::INTERNAL_SERVER_ERROR
})?;
// encode the (unnamed) root node in the NAR url itself.
// We strip the name from the proto node before sending it out.
// It's not needed to render the NAR, it'll make the URL shorter, and it
// will make caching these requests easier.
let (_, root_node) = path_info
.node
.as_ref()
.expect("invalid pathinfo")
.to_owned()
.into_name_and_node()
.expect("invalid pathinfo");
let url = format!(
"nar/tvix-castore/{}?narsize={}",
data_encoding::BASE64URL_NOPAD
.encode(&castorepb::Node::from_name_and_node("".into(), root_node).encode_to_vec()),
narinfo.nar_size,
data_encoding::BASE64URL_NOPAD.encode(
&castorepb::Node::from_name_and_node("".into(), path_info.node.clone()).encode_to_vec()
),
path_info.nar_size,
);
let mut narinfo = path_info.to_narinfo();
narinfo.url = &url;
Ok((
@ -128,9 +112,6 @@ pub async fn put(
// Extract the NARHash from the PathInfo.
Span::current().record("path_info.nar_info", nixbase32::encode(&narinfo.nar_hash));
// populate the pathinfo.
let mut pathinfo = PathInfo::from(&narinfo);
// Lookup root node with peek, as we don't want to update the LRU list.
// We need to be careful to not hold the RwLock across the await point.
let maybe_root_node: Option<tvix_castore::Node> =
@ -138,19 +119,29 @@ pub async fn put(
match maybe_root_node {
Some(root_node) => {
// Set the root node from the lookup.
// We need to rename the node to the narinfo storepath basename, as
// that's where it's stored in PathInfo.
pathinfo.node = Some(castorepb::Node::from_name_and_node(
narinfo.store_path.to_string().into(),
root_node,
));
// Persist the PathInfo.
path_info_service.put(pathinfo).await.map_err(|e| {
warn!(err=%e, "failed to persist the PathInfo");
StatusCode::INTERNAL_SERVER_ERROR
})?;
path_info_service
.put(PathInfo {
store_path: narinfo.store_path.to_owned(),
node: root_node,
references: narinfo.references.iter().map(StorePath::to_owned).collect(),
nar_sha256: narinfo.nar_hash,
nar_size: narinfo.nar_size,
signatures: narinfo
.signatures
.into_iter()
.map(|s| {
Signature::<String>::new(s.name().to_string(), s.bytes().to_owned())
})
.collect(),
deriver: narinfo.deriver.as_ref().map(StorePath::to_owned),
ca: narinfo.ca,
})
.await
.map_err(|e| {
warn!(err=%e, "failed to persist the PathInfo");
StatusCode::INTERNAL_SERVER_ERROR
})?;
Ok("")
}

View file

@ -133,7 +133,7 @@ where
}
}
#[derive(Debug, thiserror::Error)]
#[derive(Debug, thiserror::Error, PartialEq, Eq)]
pub enum Error {
#[error("Invalid name: {0}")]
InvalidName(String),

View file

@ -4,7 +4,7 @@ use clap::Subcommand;
use futures::future::try_join_all;
use futures::StreamExt;
use futures::TryStreamExt;
use nix_compat::path_info::ExportedPathInfo;
use nix_compat::{path_info::ExportedPathInfo, store_path::StorePath};
use serde::Deserialize;
use serde::Serialize;
use std::path::PathBuf;
@ -16,15 +16,13 @@ use tracing::{info, info_span, instrument, Level, Span};
use tracing_indicatif::span_ext::IndicatifSpanExt as _;
use tvix_castore::import::fs::ingest_path;
use tvix_store::nar::NarCalculationService;
use tvix_store::proto::NarInfo;
use tvix_store::proto::PathInfo;
use tvix_store::utils::{ServiceUrls, ServiceUrlsGrpc};
use tvix_castore::proto::blob_service_server::BlobServiceServer;
use tvix_castore::proto::directory_service_server::DirectoryServiceServer;
use tvix_castore::proto::GRPCBlobServiceWrapper;
use tvix_castore::proto::GRPCDirectoryServiceWrapper;
use tvix_store::pathinfoservice::PathInfoService;
use tvix_store::pathinfoservice::{PathInfo, PathInfoService};
use tvix_store::proto::path_info_service_server::PathInfoServiceServer;
use tvix_store::proto::GRPCPathInfoServiceWrapper;
@ -359,23 +357,14 @@ async fn run_cli(cli: Cli) -> Result<(), Box<dyn std::error::Error + Send + Sync
// Create and upload a PathInfo pointing to the root_node,
// annotated with information we have from the reference graph.
let path_info = PathInfo {
node: Some(tvix_castore::proto::Node::from_name_and_node(
elem.path.to_string().into(),
root_node,
)),
references: Vec::from_iter(
elem.references.iter().map(|e| e.digest().to_vec().into()),
),
narinfo: Some(NarInfo {
nar_size: elem.nar_size,
nar_sha256: elem.nar_sha256.to_vec().into(),
signatures: vec![],
reference_names: Vec::from_iter(
elem.references.iter().map(|e| e.to_string()),
),
deriver: None,
ca: None,
}),
store_path: elem.path.to_owned(),
node: root_node,
references: elem.references.iter().map(StorePath::to_owned).collect(),
nar_size: elem.nar_size,
nar_sha256: elem.nar_sha256,
signatures: vec![],
deriver: None,
ca: None,
};
path_info_service.put(path_info).await?;

View file

@ -3,18 +3,17 @@ use std::path::Path;
use tracing::{debug, instrument};
use tvix_castore::{
blobservice::BlobService, directoryservice::DirectoryService, import::fs::ingest_path, Node,
PathComponent,
};
use nix_compat::{
nixhash::{CAHash, NixHash},
store_path::{self, StorePathRef},
store_path::{self, StorePath, StorePathRef},
};
use crate::{
nar::NarCalculationService,
pathinfoservice::PathInfoService,
proto::{nar_info, NarInfo, PathInfo},
pathinfoservice::{PathInfo, PathInfoService},
proto::nar_info,
};
impl From<CAHash> for nar_info::Ca {
@ -74,33 +73,29 @@ pub fn path_to_name(path: &Path) -> std::io::Result<&str> {
/// Takes the NAR size, SHA-256 of the NAR representation, the root node and optionally
/// a CA hash information.
///
/// Returns the path information object for a NAR-style object.
/// Constructs a [PathInfo] for a NAR-style object.
///
/// This [`PathInfo`] can be further filled for signatures, deriver or verified for the expected
/// hashes.
/// The user can then further fill the fields (like deriver, signatures), and/or
/// verify to have the expected hashes.
#[inline]
pub fn derive_nar_ca_path_info(
nar_size: u64,
nar_sha256: [u8; 32],
ca: Option<&CAHash>,
name: bytes::Bytes,
ca: Option<CAHash>,
store_path: StorePath<String>,
root_node: Node,
) -> PathInfo {
// assemble the [crate::proto::PathInfo] object.
PathInfo {
node: Some(tvix_castore::proto::Node::from_name_and_node(
name, root_node,
)),
store_path,
node: root_node,
// There's no reference scanning on path contents ingested like this.
references: vec![],
narinfo: Some(NarInfo {
nar_size,
nar_sha256: nar_sha256.to_vec().into(),
signatures: vec![],
reference_names: vec![],
deriver: None,
ca: ca.map(|ca_hash| ca_hash.into()),
}),
nar_size,
nar_sha256,
signatures: vec![],
deriver: None,
ca,
}
}
@ -141,19 +136,13 @@ where
)
})?;
let name: PathComponent = output_path
.to_string()
.as_str()
.try_into()
.expect("Tvix bug: StorePath must be PathComponent");
log_node(name.as_ref(), &root_node, path.as_ref());
let path_info = derive_nar_ca_path_info(
nar_size,
nar_sha256,
Some(&CAHash::Nar(NixHash::Sha256(nar_sha256))),
name.into(),
Some(CAHash::Nar(NixHash::Sha256(nar_sha256))),
output_path.to_owned(),
root_node,
);

View file

@ -1,6 +1,7 @@
pub mod composition;
pub mod import;
pub mod nar;
pub mod path_info;
pub mod pathinfoservice;
pub mod proto;
pub mod utils;

View file

@ -0,0 +1,87 @@
use nix_compat::{
narinfo::{Flags, Signature},
nixhash::CAHash,
store_path::StorePath,
};
/// Holds metadata about a store path, but not its contents.
///
/// This is somewhat equivalent to the information Nix holds in its SQLite
/// database, or publishes as .narinfo files, except we also embed the
/// [tvix_castore::Node] describing the contents in the castore model.
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct PathInfo {
/// The store path this is about.
pub store_path: StorePath<String>,
/// The contents in the tvix-castore model.
//// Can be a directory, file or symlink.
pub node: tvix_castore::Node,
/// A list of references.
pub references: Vec<StorePath<String>>,
/// The size of the NAR representation of the contents, in bytes.
pub nar_size: u64,
/// The sha256 digest of the NAR representation of the contents.
pub nar_sha256: [u8; 32],
/// The signatures, usually shown in a .narinfo file.
pub signatures: Vec<Signature<String>>,
/// The StorePath of the .drv file producing this output.
/// The .drv suffix is omitted in its `name` field.
pub deriver: Option<StorePath<String>>,
/// The CA field in the .narinfo.
/// Its textual representations seen in the wild are one of the following:
///
/// * `fixed:r:sha256:1gcky5hlf5vqfzpyhihydmm54grhc94mcs8w7xr8613qsqb1v2j6`
/// fixed-output derivations using "recursive" `outputHashMode`.
/// * `fixed:sha256:19xqkh72crbcba7flwxyi3n293vav6d7qkzkh2v4zfyi4iia8vj8 fixed-output derivations using "flat" `outputHashMode\`
/// * `text:sha256:19xqkh72crbcba7flwxyi3n293vav6d7qkzkh2v4zfyi4iia8vj8`
/// Text hashing, used for uploaded .drv files and outputs produced by
/// builtins.toFile.
///
/// Semantically, they can be split into the following components:
///
/// * "content address prefix". Currently, "fixed" and "text" are supported.
/// * "hash mode". Currently, "flat" and "recursive" are supported.
/// * "hash type". The underlying hash function used.
/// Currently, sha1, md5, sha256, sha512.
/// * "digest". The digest itself.
///
/// There are some restrictions on the possible combinations.
/// For example, `text` and `fixed:recursive` always imply sha256.
pub ca: Option<CAHash>,
}
impl PathInfo {
/// Reconstructs a [nix_compat::narinfo::NarInfo<'_>].
///
/// It does very little allocation (a Vec each for `signatures` and
/// `references`), the rest points to data owned elsewhere.
///
/// It can be used to validate Signatures, or render a .narinfo file
/// (after some more fields are populated)
///
/// Keep in mind this is not able to reconstruct all data present in the
/// NarInfo<'_>, as some of it is not stored at all:
/// - the `system`, `file_hash` and `file_size` fields are set to `None`.
/// - the URL is set to an empty string.
/// - Compression is set to "none"
///
/// If you want to render it out to a string and be able to parse it back
/// in, at least URL *must* be set again.
pub fn to_narinfo(&self) -> nix_compat::narinfo::NarInfo<'_> {
nix_compat::narinfo::NarInfo {
flags: Flags::empty(),
store_path: self.store_path.as_ref(),
nar_hash: self.nar_sha256,
nar_size: self.nar_size,
references: self.references.iter().map(StorePath::as_ref).collect(),
signatures: self.signatures.iter().map(Signature::as_ref).collect(),
ca: self.ca.clone(),
system: None,
deriver: self.deriver.as_ref().map(StorePath::as_ref),
url: "",
compression: Some("none"),
file_hash: None,
file_size: None,
}
}
}

View file

@ -1,6 +1,5 @@
use super::PathInfoService;
use super::{PathInfo, PathInfoService};
use crate::proto;
use crate::proto::PathInfo;
use async_stream::try_stream;
use bigtable_rs::{bigtable, google::bigtable::v2 as bigtable_v2};
use bytes::Bytes;
@ -232,14 +231,13 @@ impl PathInfoService for BigtablePathInfoService {
}
// Try to parse the value into a PathInfo message
let path_info = proto::PathInfo::decode(Bytes::from(cell.value))
let path_info_proto = proto::PathInfo::decode(Bytes::from(cell.value))
.map_err(|e| Error::StorageError(format!("unable to decode pathinfo proto: {}", e)))?;
let store_path = path_info
.validate()
.map_err(|e| Error::StorageError(format!("invalid PathInfo: {}", e)))?;
let path_info = PathInfo::try_from(path_info_proto)
.map_err(|e| Error::StorageError(format!("Invalid path info: {e}")))?;
if store_path.digest() != &digest {
if path_info.store_path.digest() != &digest {
return Err(Error::StorageError("PathInfo has unexpected digest".into()));
}
@ -248,14 +246,10 @@ impl PathInfoService for BigtablePathInfoService {
#[instrument(level = "trace", skip_all, fields(path_info.root_node = ?path_info.node))]
async fn put(&self, path_info: PathInfo) -> Result<PathInfo, Error> {
let store_path = path_info
.validate()
.map_err(|e| Error::InvalidRequest(format!("pathinfo failed validation: {}", e)))?;
let mut client = self.client.clone();
let path_info_key = derive_pathinfo_key(store_path.digest());
let path_info_key = derive_pathinfo_key(path_info.store_path.digest());
let data = path_info.encode_to_vec();
let data = proto::PathInfo::from(path_info.clone()).encode_to_vec();
if data.len() as u64 > CELL_SIZE_LIMIT {
return Err(Error::StorageError(
"PathInfo exceeds cell limit on Bigtable".into(),
@ -340,16 +334,12 @@ impl PathInfoService for BigtablePathInfoService {
}
// Try to parse the value into a PathInfo message.
let path_info = proto::PathInfo::decode(Bytes::from(cell.value))
let path_info_proto = proto::PathInfo::decode(Bytes::from(cell.value))
.map_err(|e| Error::StorageError(format!("unable to decode pathinfo proto: {}", e)))?;
// Validate the containing PathInfo, ensure its StorePath digest
// matches row key.
let store_path = path_info
.validate()
.map_err(|e| Error::StorageError(format!("invalid PathInfo: {}", e)))?;
let path_info = PathInfo::try_from(path_info_proto).map_err(|e| Error::StorageError(format!("Invalid path info: {e}")))?;
let exp_path_info_key = derive_pathinfo_key(store_path.digest());
let exp_path_info_key = derive_pathinfo_key(path_info.store_path.digest());
if exp_path_info_key.as_bytes() != row_key.as_slice() {
Err(Error::StorageError("PathInfo has unexpected digest".into()))?

View file

@ -1,6 +1,5 @@
use std::sync::Arc;
use crate::proto::PathInfo;
use futures::stream::BoxStream;
use nix_compat::nixbase32;
use tonic::async_trait;
@ -8,7 +7,7 @@ use tracing::{debug, instrument};
use tvix_castore::composition::{CompositionContext, ServiceBuilder};
use tvix_castore::Error;
use super::PathInfoService;
use super::{PathInfo, PathInfoService};
/// Asks near first, if not found, asks far.
/// If found in there, returns it, and *inserts* it into
@ -105,11 +104,9 @@ mod test {
use crate::{
pathinfoservice::{LruPathInfoService, MemoryPathInfoService, PathInfoService},
tests::fixtures::PATH_INFO_WITH_NARINFO,
tests::fixtures::PATH_INFO,
};
const PATH_INFO_DIGEST: [u8; 20] = [0; 20];
/// Helper function setting up an instance of a "far" and "near"
/// PathInfoService.
async fn create_pathinfoservice() -> super::Cache<LruPathInfoService, MemoryPathInfoService> {
@ -129,21 +126,25 @@ mod test {
let svc = create_pathinfoservice().await;
// query the PathInfo, things should not be there.
assert!(svc.get(PATH_INFO_DIGEST).await.unwrap().is_none());
assert!(svc
.get(*PATH_INFO.store_path.digest())
.await
.unwrap()
.is_none());
// insert it into the far one.
svc.far.put(PATH_INFO_WITH_NARINFO.clone()).await.unwrap();
svc.far.put(PATH_INFO.clone()).await.unwrap();
// now try getting it again, it should succeed.
assert_eq!(
Some(PATH_INFO_WITH_NARINFO.clone()),
svc.get(PATH_INFO_DIGEST).await.unwrap()
Some(PATH_INFO.clone()),
svc.get(*PATH_INFO.store_path.digest()).await.unwrap()
);
// peek near, it should now be there.
assert_eq!(
Some(PATH_INFO_WITH_NARINFO.clone()),
svc.near.get(PATH_INFO_DIGEST).await.unwrap()
Some(PATH_INFO.clone()),
svc.near.get(*PATH_INFO.store_path.digest()).await.unwrap()
);
}
}

View file

@ -58,32 +58,20 @@ where
.as_ref()
.get(*store_path.digest())
.await?
.map(|path_info| {
let node = path_info
.node
.as_ref()
.expect("missing root node")
.to_owned();
match node.into_name_and_node() {
Ok((_name, node)) => Ok(node),
Err(e) => Err(Error::StorageError(e.to_string())),
}
})
.transpose()?)
.map(|path_info| path_info.node))
}
fn list(&self) -> BoxStream<Result<(PathComponent, Node), Error>> {
Box::pin(self.0.as_ref().list().map(|result| {
result.and_then(|path_info| {
let node = path_info
.node
.as_ref()
.expect("missing root node")
.to_owned();
node.into_name_and_node()
.map_err(|e| Error::StorageError(e.to_string()))
result.map(|path_info| {
let basename = path_info.store_path.to_string();
(
basename
.as_str()
.try_into()
.expect("Tvix bug: StorePath must be PathComponent"),
path_info.node,
)
})
}))
}

View file

@ -1,7 +1,7 @@
use super::PathInfoService;
use super::{PathInfo, PathInfoService};
use crate::{
nar::NarCalculationService,
proto::{self, ListPathInfoRequest, PathInfo},
proto::{self, ListPathInfoRequest},
};
use async_stream::try_stream;
use futures::stream::BoxStream;
@ -53,15 +53,10 @@ where
.await;
match path_info {
Ok(path_info) => {
let path_info = path_info.into_inner();
path_info
.validate()
.map_err(|e| Error::StorageError(format!("invalid pathinfo: {}", e)))?;
Ok(Some(path_info))
}
Ok(path_info) => Ok(Some(
PathInfo::try_from(path_info.into_inner())
.map_err(|e| Error::StorageError(format!("Invalid path info: {e}")))?,
)),
Err(e) if e.code() == Code::NotFound => Ok(None),
Err(e) => Err(Error::StorageError(e.to_string())),
}
@ -72,12 +67,12 @@ where
let path_info = self
.grpc_client
.clone()
.put(path_info)
.put(proto::PathInfo::from(path_info))
.await
.map_err(|e| Error::StorageError(e.to_string()))?
.into_inner();
Ok(path_info)
Ok(PathInfo::try_from(path_info)
.map_err(|e| Error::StorageError(format!("Invalid path info: {e}")))?)
}
#[instrument(level = "trace", skip_all)]
@ -91,21 +86,8 @@ where
loop {
match stream.message().await {
Ok(o) => match o {
Some(pathinfo) => {
// validate the pathinfo
if let Err(e) = pathinfo.validate() {
Err(Error::StorageError(format!(
"pathinfo {:?} failed validation: {}",
pathinfo, e
)))?;
}
yield pathinfo
}
None => {
return;
},
},
Ok(Some(path_info)) => yield PathInfo::try_from(path_info).map_err(|e| Error::StorageError(format!("Invalid path info: {e}")))?,
Ok(None) => return,
Err(e) => Err(Error::StorageError(e.to_string()))?,
}
}

View file

@ -8,11 +8,10 @@ use tokio::sync::RwLock;
use tonic::async_trait;
use tracing::instrument;
use crate::proto::PathInfo;
use tvix_castore::composition::{CompositionContext, ServiceBuilder};
use tvix_castore::Error;
use super::PathInfoService;
use super::{PathInfo, PathInfoService};
pub struct LruPathInfoService {
lru: Arc<RwLock<LruCache<[u8; 20], PathInfo>>>,
@ -35,15 +34,10 @@ impl PathInfoService for LruPathInfoService {
#[instrument(level = "trace", skip_all, fields(path_info.root_node = ?path_info.node))]
async fn put(&self, path_info: PathInfo) -> Result<PathInfo, Error> {
// call validate
let store_path = path_info
.validate()
.map_err(|e| Error::InvalidRequest(format!("invalid PathInfo: {}", e)))?;
self.lru
.write()
.await
.put(*store_path.digest(), path_info.clone());
.put(*path_info.store_path.digest(), path_info.clone());
Ok(path_info)
}
@ -91,40 +85,22 @@ impl ServiceBuilder for LruPathInfoServiceConfig {
#[cfg(test)]
mod test {
use nix_compat::store_path::StorePath;
use std::num::NonZeroUsize;
use crate::{
pathinfoservice::{LruPathInfoService, PathInfoService},
proto::PathInfo,
tests::fixtures::PATH_INFO_WITH_NARINFO,
pathinfoservice::{LruPathInfoService, PathInfo, PathInfoService},
tests::fixtures::PATH_INFO,
};
use lazy_static::lazy_static;
use tvix_castore::proto as castorepb;
lazy_static! {
static ref PATHINFO_1: PathInfo = PATH_INFO_WITH_NARINFO.clone();
static ref PATHINFO_1_DIGEST: [u8; 20] = [0; 20];
static ref PATHINFO_2: PathInfo = {
let mut p = PATHINFO_1.clone();
let root_node = p.node.as_mut().unwrap();
if let castorepb::Node { node: Some(node) } = root_node {
match node {
castorepb::node::Node::Directory(n) => {
n.name = "11111111111111111111111111111111-dummy2".into()
}
castorepb::node::Node::File(n) => {
n.name = "11111111111111111111111111111111-dummy2".into()
}
castorepb::node::Node::Symlink(n) => {
n.name = "11111111111111111111111111111111-dummy2".into()
}
}
} else {
unreachable!()
}
let mut p = PATH_INFO.clone();
p.store_path = StorePath::from_name_and_digest_fixed("dummy", [1; 20]).unwrap();
p
};
static ref PATHINFO_2_DIGEST: [u8; 20] = *(PATHINFO_2.validate().unwrap()).digest();
static ref PATHINFO_2_DIGEST: [u8; 20] = *PATHINFO_2.store_path.digest();
}
#[tokio::test]
@ -133,18 +109,20 @@ mod test {
// pathinfo_1 should not be there
assert!(svc
.get(*PATHINFO_1_DIGEST)
.get(*PATH_INFO.store_path.digest())
.await
.expect("no error")
.is_none());
// insert it
svc.put(PATHINFO_1.clone()).await.expect("no error");
svc.put(PATH_INFO.clone()).await.expect("no error");
// now it should be there.
assert_eq!(
Some(PATHINFO_1.clone()),
svc.get(*PATHINFO_1_DIGEST).await.expect("no error")
Some(PATH_INFO.clone()),
svc.get(*PATH_INFO.store_path.digest())
.await
.expect("no error")
);
// insert pathinfo_2. This will evict pathinfo 1
@ -158,7 +136,7 @@ mod test {
// … but pathinfo 1 not anymore.
assert!(svc
.get(*PATHINFO_1_DIGEST)
.get(*PATH_INFO.store_path.digest())
.await
.expect("no error")
.is_none());

View file

@ -1,5 +1,4 @@
use super::PathInfoService;
use crate::proto::PathInfo;
use super::{PathInfo, PathInfoService};
use async_stream::try_stream;
use futures::stream::BoxStream;
use nix_compat::nixbase32;
@ -29,22 +28,11 @@ impl PathInfoService for MemoryPathInfoService {
#[instrument(level = "trace", skip_all, fields(path_info.root_node = ?path_info.node))]
async fn put(&self, path_info: PathInfo) -> Result<PathInfo, Error> {
// Call validate on the received PathInfo message.
match path_info.validate() {
Err(e) => Err(Error::InvalidRequest(format!(
"failed to validate PathInfo: {}",
e
))),
// This overwrites existing PathInfo objects with the same store path digest.
let mut db = self.db.write().await;
db.insert(*path_info.store_path.digest(), path_info.clone());
// In case the PathInfo is valid, and we were able to extract a NixPath, store it in the database.
// This overwrites existing PathInfo objects.
Ok(nix_path) => {
let mut db = self.db.write().await;
db.insert(*nix_path.digest(), path_info.clone());
Ok(path_info)
}
}
Ok(path_info)
}
fn list(&self) -> BoxStream<'static, Result<PathInfo, Error>> {

View file

@ -19,7 +19,7 @@ use tvix_castore::composition::{Registry, ServiceBuilder};
use tvix_castore::Error;
use crate::nar::NarCalculationService;
use crate::proto::PathInfo;
pub use crate::path_info::PathInfo;
pub use self::combinators::{
Cache as CachePathInfoService, CacheConfig as CachePathInfoServiceConfig,

View file

@ -1,10 +1,11 @@
use super::PathInfoService;
use crate::{nar::ingest_nar_and_hash, proto::PathInfo};
use super::{PathInfo, PathInfoService};
use crate::nar::ingest_nar_and_hash;
use futures::{stream::BoxStream, TryStreamExt};
use nix_compat::{
narinfo::{self, NarInfo},
narinfo::{self, NarInfo, Signature},
nixbase32,
nixhash::NixHash,
store_path::StorePath,
};
use reqwest::StatusCode;
use std::sync::Arc;
@ -12,9 +13,7 @@ use tokio::io::{self, AsyncRead};
use tonic::async_trait;
use tracing::{debug, instrument, warn};
use tvix_castore::composition::{CompositionContext, ServiceBuilder};
use tvix_castore::{
blobservice::BlobService, directoryservice::DirectoryService, proto as castorepb, Error,
};
use tvix_castore::{blobservice::BlobService, directoryservice::DirectoryService, Error};
use url::Url;
/// NixHTTPPathInfoService acts as a bridge in between the Nix HTTP Binary cache
@ -137,12 +136,11 @@ where
}
}
// Convert to a (sparse) PathInfo. We still need to populate the node field,
// and for this we need to download the NAR file.
// To construct the full PathInfo, we also need to populate the node field,
// and for this we need to download the NAR file and ingest it into castore.
// FUTUREWORK: Keep some database around mapping from narsha256 to
// (unnamed) rootnode, so we can use that (and the name from the
// StorePath) and avoid downloading the same NAR a second time.
let pathinfo: PathInfo = (&narinfo).into();
// create a request for the NAR file itself.
let nar_url = self.base_url.join(narinfo.url).map_err(|e| {
@ -228,12 +226,18 @@ where
}
Ok(Some(PathInfo {
node: Some(castorepb::Node::from_name_and_node(
narinfo.store_path.to_string().into(),
root_node,
)),
references: pathinfo.references,
narinfo: pathinfo.narinfo,
store_path: narinfo.store_path.to_owned(),
node: root_node,
references: narinfo.references.iter().map(StorePath::to_owned).collect(),
nar_size: narinfo.nar_size,
nar_sha256: narinfo.nar_hash,
deriver: narinfo.deriver.as_ref().map(StorePath::to_owned),
signatures: narinfo
.signatures
.into_iter()
.map(|s| Signature::<String>::new(s.name().to_string(), s.bytes().to_owned()))
.collect(),
ca: narinfo.ca,
}))
}

View file

@ -1,5 +1,5 @@
use super::PathInfoService;
use crate::proto::PathInfo;
use super::{PathInfo, PathInfoService};
use crate::proto;
use data_encoding::BASE64;
use futures::{stream::BoxStream, StreamExt};
use prost::Message;
@ -78,10 +78,13 @@ impl PathInfoService for RedbPathInfoService {
let table = txn.open_table(PATHINFO_TABLE)?;
match table.get(digest)? {
Some(pathinfo_bytes) => Ok(Some(
PathInfo::decode(pathinfo_bytes.value().as_slice()).map_err(|e| {
warn!(err=%e, "failed to decode stored PathInfo");
Error::StorageError("failed to decode stored PathInfo".to_string())
})?,
proto::PathInfo::decode(pathinfo_bytes.value().as_slice())
.map_err(|e| {
warn!(err=%e, "failed to decode stored PathInfo");
Error::StorageError("failed to decode stored PathInfo".to_string())
})?
.try_into()
.map_err(|e| Error::StorageError(format!("Invalid path info: {e}")))?,
)),
None => Ok(None),
}
@ -92,25 +95,19 @@ impl PathInfoService for RedbPathInfoService {
#[instrument(level = "trace", skip_all, fields(path_info.root_node = ?path_info.node))]
async fn put(&self, path_info: PathInfo) -> Result<PathInfo, Error> {
// Call validate on the received PathInfo message.
let store_path = path_info
.validate()
.map_err(|e| {
warn!(err=%e, "failed to validate PathInfo");
Error::StorageError("failed to validate PathInfo".to_string())
})?
.to_owned();
let path_info_encoded = path_info.encode_to_vec();
let db = self.db.clone();
tokio::task::spawn_blocking({
let path_info = path_info.clone();
move || -> Result<(), Error> {
let txn = db.begin_write()?;
{
let mut table = txn.open_table(PATHINFO_TABLE)?;
table
.insert(store_path.digest(), path_info_encoded)
.insert(
*path_info.store_path.digest(),
proto::PathInfo::from(path_info).encode_to_vec(),
)
.map_err(|e| {
warn!(err=%e, "failed to insert PathInfo");
Error::StorageError("failed to insert PathInfo".to_string())
@ -137,12 +134,18 @@ impl PathInfoService for RedbPathInfoService {
for elem in table.iter()? {
let elem = elem?;
tokio::runtime::Handle::current()
.block_on(tx.send(Ok(
PathInfo::decode(elem.1.value().as_slice()).map_err(|e| {
.block_on(tx.send(Ok({
let path_info_proto = proto::PathInfo::decode(
elem.1.value().as_slice(),
)
.map_err(|e| {
warn!(err=%e, "invalid PathInfo");
Error::StorageError("invalid PathInfo".to_string())
})?,
)))
})?;
PathInfo::try_from(path_info_proto).map_err(|e| {
Error::StorageError(format!("Invalid path info: {e}"))
})?
})))
.map_err(|e| Error::StorageError(e.to_string()))?;
}

View file

@ -1,7 +1,6 @@
//! This module provides a [PathInfoService] implementation that signs narinfos
use super::PathInfoService;
use crate::proto::PathInfo;
use super::{PathInfo, PathInfoService};
use futures::stream::BoxStream;
use std::path::PathBuf;
use std::sync::Arc;
@ -11,9 +10,9 @@ use tvix_castore::composition::{CompositionContext, ServiceBuilder};
use tvix_castore::Error;
use nix_compat::narinfo::{parse_keypair, SigningKey};
use nix_compat::narinfo::{parse_keypair, Signature, SigningKey};
use nix_compat::nixbase32;
use tracing::{instrument, warn};
use tracing::instrument;
#[cfg(test)]
use super::MemoryPathInfoService;
@ -52,22 +51,15 @@ where
}
async fn put(&self, path_info: PathInfo) -> Result<PathInfo, Error> {
let store_path = path_info.validate().map_err(|e| {
warn!(err=%e, "invalid PathInfo");
Error::StorageError(e.to_string())
})?;
let root_node = path_info.node.clone();
// If we have narinfo then sign it, else passthrough to the upper pathinfoservice
let path_info_to_put = match path_info.to_narinfo(store_path.as_ref()) {
Some(mut nar_info) => {
nar_info.add_signature(self.signing_key.as_ref());
let mut signed_path_info = PathInfo::from(&nar_info);
signed_path_info.node = root_node;
signed_path_info
}
None => path_info,
};
self.inner.put(path_info_to_put).await
let mut path_info = path_info.clone();
let mut nar_info = path_info.to_narinfo();
nar_info.add_signature(self.signing_key.as_ref());
path_info.signatures = nar_info
.signatures
.into_iter()
.map(|s| Signature::<String>::new(s.name().to_string(), s.bytes().to_owned()))
.collect();
self.inner.put(path_info).await
}
fn list(&self) -> BoxStream<'static, Result<PathInfo, Error>> {
@ -134,51 +126,35 @@ pub const DUMMY_VERIFYING_KEY: &str = "do.not.use:cuXqnuzlWfGTKmfzBPx2kXShjRryZM
#[cfg(test)]
mod test {
use crate::{
pathinfoservice::PathInfoService,
proto::PathInfo,
tests::fixtures::{DUMMY_PATH, PATH_INFO_WITH_NARINFO},
};
use crate::{pathinfoservice::PathInfoService, tests::fixtures::PATH_INFO};
use nix_compat::narinfo::VerifyingKey;
use lazy_static::lazy_static;
use nix_compat::store_path::StorePath;
lazy_static! {
static ref PATHINFO_1: PathInfo = PATH_INFO_WITH_NARINFO.clone();
static ref PATHINFO_1_DIGEST: [u8; 20] = [0; 20];
}
#[tokio::test]
async fn put_and_verify_signature() {
let svc = super::test_signing_service();
// pathinfo_1 should not be there ...
assert!(svc
.get(*PATHINFO_1_DIGEST)
.get(*PATH_INFO.store_path.digest())
.await
.expect("no error")
.is_none());
// ... and not be signed
assert!(PATHINFO_1.narinfo.clone().unwrap().signatures.is_empty());
assert!(PATH_INFO.signatures.is_empty());
// insert it
svc.put(PATHINFO_1.clone()).await.expect("no error");
svc.put(PATH_INFO.clone()).await.expect("no error");
// now it should be there ...
let signed = svc
.get(*PATHINFO_1_DIGEST)
.get(*PATH_INFO.store_path.digest())
.await
.expect("no error")
.unwrap();
// and signed
let narinfo = signed
.to_narinfo(
StorePath::from_bytes(DUMMY_PATH.as_bytes()).expect("DUMMY_PATH to be parsed"),
)
.expect("no error");
let narinfo = signed.to_narinfo();
let fp = narinfo.fingerprint();
// load our keypair from the fixtures

View file

@ -6,12 +6,10 @@ use futures::TryStreamExt;
use rstest::*;
use rstest_reuse::{self, *};
use super::PathInfoService;
use super::{PathInfo, PathInfoService};
use crate::pathinfoservice::redb::RedbPathInfoService;
use crate::pathinfoservice::MemoryPathInfoService;
use crate::proto::PathInfo;
use crate::tests::fixtures::DUMMY_PATH_DIGEST;
use tvix_castore::proto as castorepb;
use crate::tests::fixtures::{DUMMY_PATH_DIGEST, PATH_INFO};
use crate::pathinfoservice::test_signing_service;
@ -52,32 +50,35 @@ async fn not_found(svc: impl PathInfoService) {
#[apply(path_info_services)]
#[tokio::test]
async fn put_get(svc: impl PathInfoService) {
let path_info = PathInfo {
node: Some(castorepb::Node {
node: Some(castorepb::node::Node::Symlink(castorepb::SymlinkNode {
name: "00000000000000000000000000000000-foo".into(),
target: "doesntmatter".into(),
})),
}),
..Default::default()
};
// insert
let resp = svc.put(path_info.clone()).await.expect("must succeed");
let resp = svc.put(PATH_INFO.clone()).await.expect("must succeed");
// expect the returned PathInfo to be equal (for now)
// in the future, some stores might add additional fields/signatures.
assert_eq!(path_info, resp);
// expect the returned PathInfo to be equal,
// remove the signatures as the SigningPathInfoService adds them
assert_eq!(*PATH_INFO, strip_signatures(resp));
// get it back
let resp = svc.get(DUMMY_PATH_DIGEST).await.expect("must succeed");
assert_eq!(Some(path_info.clone()), resp);
assert_eq!(Some(PATH_INFO.clone()), resp.map(strip_signatures));
// Ensure the listing endpoint works, and returns the same path_info.
// FUTUREWORK: split this, some impls might (rightfully) not support listing
let pathinfos: Vec<PathInfo> = svc.list().try_collect().await.expect("must succeed");
// We should get a single pathinfo back, the one we inserted.
assert_eq!(vec![path_info], pathinfos);
assert_eq!(
vec![PATH_INFO.clone()],
pathinfos
.into_iter()
.map(strip_signatures)
.collect::<Vec<_>>()
);
}
fn strip_signatures(path_info: PathInfo) -> PathInfo {
PathInfo {
signatures: vec![],
..path_info
}
}

View file

@ -1,5 +1,5 @@
use crate::nar::{NarCalculationService, RenderError};
use crate::pathinfoservice::PathInfoService;
use crate::pathinfoservice::{PathInfo, PathInfoService};
use crate::proto;
use futures::{stream::BoxStream, TryStreamExt};
use std::ops::Deref;
@ -44,7 +44,7 @@ where
.map_err(|_e| Status::invalid_argument("invalid output digest length"))?;
match self.path_info_service.get(digest).await {
Ok(None) => Err(Status::not_found("PathInfo not found")),
Ok(Some(path_info)) => Ok(Response::new(path_info)),
Ok(Some(path_info)) => Ok(Response::new(proto::PathInfo::from(path_info))),
Err(e) => {
warn!(err = %e, "failed to get PathInfo");
Err(e.into())
@ -56,12 +56,15 @@ where
#[instrument(skip_all)]
async fn put(&self, request: Request<proto::PathInfo>) -> Result<Response<proto::PathInfo>> {
let path_info = request.into_inner();
let path_info_proto = request.into_inner();
let path_info = PathInfo::try_from(path_info_proto)
.map_err(|e| Status::invalid_argument(format!("Invalid path info: {e}")))?;
// Store the PathInfo in the client. Clients MUST validate the data
// they receive, so we don't validate additionally here.
match self.path_info_service.put(path_info).await {
Ok(path_info_new) => Ok(Response::new(path_info_new)),
Ok(path_info_new) => Ok(Response::new(proto::PathInfo::from(path_info_new))),
Err(e) => {
warn!(err = %e, "failed to put PathInfo");
Err(e.into())
@ -99,6 +102,7 @@ where
let stream = Box::pin(
self.path_info_service
.list()
.map_ok(proto::PathInfo::from)
.map_err(|e| Status::internal(e.to_string())),
);

View file

@ -4,7 +4,7 @@ use bytes::Bytes;
use data_encoding::BASE64;
// https://github.com/hyperium/tonic/issues/1056
use nix_compat::{
narinfo::Flags,
narinfo::{Signature, SignatureError},
nixhash::{CAHash, NixHash},
store_path::{self, StorePathRef},
};
@ -17,6 +17,8 @@ pub use grpc_pathinfoservice_wrapper::GRPCPathInfoServiceWrapper;
tonic::include_proto!("tvix.store.v1");
use tvix_castore::proto as castorepb;
#[cfg(feature = "tonic-reflection")]
/// Compiled file descriptors for implementing [gRPC
/// reflection](https://github.com/grpc/grpc/blob/master/doc/server-reflection.md) with e.g.
@ -70,183 +72,18 @@ pub enum ValidatePathInfoError {
/// The deriver field is invalid.
#[error("deriver field is invalid: {0}")]
InvalidDeriverField(store_path::Error),
}
/// Parses a root node name.
///
/// On success, this returns the parsed [store_path::StorePathRef].
/// On error, it returns an error generated from the supplied constructor.
fn parse_node_name_root<E>(
name: &[u8],
err: fn(Vec<u8>, store_path::Error) -> E,
) -> Result<store_path::StorePathRef<'_>, E> {
store_path::StorePathRef::from_bytes(name).map_err(|e| err(name.to_vec(), e))
}
/// The narinfo field is missing
#[error("The narinfo field is missing")]
NarInfoFieldMissing,
impl PathInfo {
/// validate performs some checks on the PathInfo struct,
/// Returning either a [store_path::StorePath] of the root node, or a
/// [ValidatePathInfoError].
pub fn validate(&self) -> Result<store_path::StorePath<String>, ValidatePathInfoError> {
// ensure the references have the right number of bytes.
for (i, reference) in self.references.iter().enumerate() {
if reference.len() != store_path::DIGEST_SIZE {
return Err(ValidatePathInfoError::InvalidReferenceDigestLen(
i,
reference.len(),
));
}
}
/// The ca field is invalid
#[error("The ca field is invalid: {0}")]
InvalidCaField(ConvertCAError),
// If there is a narinfo field populated…
if let Some(narinfo) = &self.narinfo {
// ensure the nar_sha256 digest has the correct length.
if narinfo.nar_sha256.len() != 32 {
return Err(ValidatePathInfoError::InvalidNarSha256DigestLen(
narinfo.nar_sha256.len(),
));
}
// ensure the number of references there matches PathInfo.references count.
if narinfo.reference_names.len() != self.references.len() {
return Err(ValidatePathInfoError::InconsistentNumberOfReferences(
self.references.len(),
narinfo.reference_names.len(),
));
}
// parse references in reference_names.
for (i, reference_name_str) in narinfo.reference_names.iter().enumerate() {
// ensure thy parse as (non-absolute) store path
let reference_names_store_path = store_path::StorePathRef::from_bytes(
reference_name_str.as_bytes(),
)
.map_err(|_| {
ValidatePathInfoError::InvalidNarinfoReferenceName(
i,
reference_name_str.to_owned(),
)
})?;
// ensure their digest matches the one at self.references[i].
{
// This is safe, because we ensured the proper length earlier already.
let reference_digest = self.references[i].to_vec().try_into().unwrap();
if reference_names_store_path.digest() != &reference_digest {
return Err(
ValidatePathInfoError::InconsistentNarinfoReferenceNameDigest(
i,
reference_digest,
*reference_names_store_path.digest(),
),
);
}
}
// If the Deriver field is populated, ensure it parses to a
// [store_path::StorePath].
// We can't check for it to *not* end with .drv, as the .drv files produced by
// recursive Nix end with multiple .drv suffixes, and only one is popped when
// converting to this field.
if let Some(deriver) = &narinfo.deriver {
store_path::StorePathRef::from_name_and_digest(&deriver.name, &deriver.digest)
.map_err(ValidatePathInfoError::InvalidDeriverField)?;
}
}
}
// Ensure there is a (root) node present, and it properly parses to a [store_path::StorePath].
let root_nix_path = match &self.node {
None => Err(ValidatePathInfoError::NoNodePresent)?,
Some(node) => {
// NOTE: We could have some PathComponent not allocating here,
// so this can return StorePathRef.
// However, as this will get refactored away to stricter types
// soon anyways, there's no point.
let (name, _node) = node
.clone()
.into_name_and_node()
.map_err(ValidatePathInfoError::InvalidRootNode)?;
// parse the name of the node itself and return
parse_node_name_root(name.as_ref(), ValidatePathInfoError::InvalidNodeName)?
.to_owned()
}
};
// return the root nix path
Ok(root_nix_path)
}
/// With self and its store path name, this reconstructs a
/// [nix_compat::narinfo::NarInfo<'_>].
/// It can be used to validate Signatures, or get back a (sparse) NarInfo
/// struct to prepare writing it out.
///
/// It assumes self to be validated first, and will only return None if the
/// `narinfo` field is unpopulated.
///
/// It does very little allocation (a Vec each for `signatures` and
/// `references`), the rest points to data owned elsewhere.
///
/// Keep in mind this is not able to reconstruct all data present in the
/// NarInfo<'_>, as some of it is not stored at all:
/// - the `system`, `file_hash` and `file_size` fields are set to `None`.
/// - the URL is set to an empty string.
/// - Compression is set to "none"
///
/// If you want to render it out to a string and be able to parse it back
/// in, at least URL *must* be set again.
pub fn to_narinfo<'a>(
&'a self,
store_path: store_path::StorePathRef<'a>,
) -> Option<nix_compat::narinfo::NarInfo<'_>> {
let narinfo = &self.narinfo.as_ref()?;
Some(nix_compat::narinfo::NarInfo {
flags: Flags::empty(),
store_path,
nar_hash: narinfo
.nar_sha256
.as_ref()
.try_into()
.expect("invalid narhash"),
nar_size: narinfo.nar_size,
references: narinfo
.reference_names
.iter()
.map(|ref_name| {
// This shouldn't pass validation
StorePathRef::from_bytes(ref_name.as_bytes()).expect("invalid reference")
})
.collect(),
signatures: narinfo
.signatures
.iter()
.map(|sig| {
nix_compat::narinfo::SignatureRef::new(
&sig.name,
// This shouldn't pass validation
sig.data[..].try_into().expect("invalid signature len"),
)
})
.collect(),
ca: narinfo
.ca
.as_ref()
.map(|ca| ca.try_into().expect("invalid ca")),
system: None,
deriver: narinfo.deriver.as_ref().map(|deriver| {
StorePathRef::from_name_and_digest(&deriver.name, &deriver.digest)
.expect("invalid deriver")
}),
url: "",
compression: Some("none"),
file_hash: None,
file_size: None,
})
}
/// The signature at position is invalid
#[error("The signature at position {0} is invalid: {1}")]
InvalidSignature(usize, SignatureError),
}
/// Errors that can occur when converting from a [nar_info::Ca] to a (stricter)
@ -341,45 +178,154 @@ impl From<&nix_compat::nixhash::CAHash> for nar_info::Ca {
}
}
impl From<&nix_compat::narinfo::NarInfo<'_>> for NarInfo {
/// Converts from a NarInfo (returned from the NARInfo parser) to the proto-
/// level NarInfo struct.
fn from(value: &nix_compat::narinfo::NarInfo<'_>) -> Self {
let signatures = value
.signatures
.iter()
.map(|sig| nar_info::Signature {
name: sig.name().to_string(),
data: Bytes::copy_from_slice(sig.bytes()),
})
.collect();
NarInfo {
nar_size: value.nar_size,
nar_sha256: Bytes::copy_from_slice(&value.nar_hash),
signatures,
reference_names: value.references.iter().map(|r| r.to_string()).collect(),
deriver: value.deriver.as_ref().map(|sp| StorePath {
name: (*sp.name()).to_owned(),
digest: Bytes::copy_from_slice(sp.digest()),
}),
ca: value.ca.as_ref().map(|ca| ca.into()),
}
}
}
impl From<&nix_compat::narinfo::NarInfo<'_>> for PathInfo {
/// Converts from a NarInfo (returned from the NARInfo parser) to a PathInfo
/// struct with the node set to None.
fn from(value: &nix_compat::narinfo::NarInfo<'_>) -> Self {
impl From<crate::pathinfoservice::PathInfo> for PathInfo {
fn from(value: crate::pathinfoservice::PathInfo) -> Self {
Self {
node: None,
node: Some(castorepb::Node::from_name_and_node(
value.store_path.to_string().into_bytes().into(),
value.node,
)),
references: value
.references
.iter()
.map(|x| Bytes::copy_from_slice(x.digest()))
.map(|reference| Bytes::copy_from_slice(reference.digest()))
.collect(),
narinfo: Some(value.into()),
narinfo: Some(NarInfo {
nar_size: value.nar_size,
nar_sha256: Bytes::copy_from_slice(&value.nar_sha256),
signatures: value
.signatures
.iter()
.map(|sig| nar_info::Signature {
name: sig.name().to_string(),
data: Bytes::copy_from_slice(sig.bytes()),
})
.collect(),
reference_names: value.references.iter().map(|r| r.to_string()).collect(),
deriver: value.deriver.as_ref().map(|sp| StorePath {
name: (*sp.name()).to_owned(),
digest: Bytes::copy_from_slice(sp.digest()),
}),
ca: value.ca.as_ref().map(|ca| ca.into()),
}),
}
}
}
impl TryFrom<PathInfo> for crate::pathinfoservice::PathInfo {
type Error = ValidatePathInfoError;
fn try_from(value: PathInfo) -> Result<Self, Self::Error> {
let narinfo = value
.narinfo
.ok_or_else(|| ValidatePathInfoError::NarInfoFieldMissing)?;
// ensure the references have the right number of bytes.
for (i, reference) in value.references.iter().enumerate() {
if reference.len() != store_path::DIGEST_SIZE {
return Err(ValidatePathInfoError::InvalidReferenceDigestLen(
i,
reference.len(),
));
}
}
// ensure the number of references there matches PathInfo.references count.
if narinfo.reference_names.len() != value.references.len() {
return Err(ValidatePathInfoError::InconsistentNumberOfReferences(
value.references.len(),
narinfo.reference_names.len(),
));
}
// parse references in reference_names.
let mut references = vec![];
for (i, reference_name_str) in narinfo.reference_names.iter().enumerate() {
// ensure thy parse as (non-absolute) store path
let reference_names_store_path =
StorePathRef::from_bytes(reference_name_str.as_bytes()).map_err(|_| {
ValidatePathInfoError::InvalidNarinfoReferenceName(
i,
reference_name_str.to_owned(),
)
})?;
// ensure their digest matches the one at self.references[i].
{
// This is safe, because we ensured the proper length earlier already.
let reference_digest = value.references[i].to_vec().try_into().unwrap();
if reference_names_store_path.digest() != &reference_digest {
return Err(
ValidatePathInfoError::InconsistentNarinfoReferenceNameDigest(
i,
reference_digest,
*reference_names_store_path.digest(),
),
);
} else {
references.push(reference_names_store_path.to_owned());
}
}
}
let nar_sha256_length = narinfo.nar_sha256.len();
// split value.node into the name and node components
let (name, node) = value
.node
.ok_or_else(|| ValidatePathInfoError::NoNodePresent)?
.into_name_and_node()
.map_err(ValidatePathInfoError::InvalidRootNode)?;
Ok(Self {
// value.node has a valid name according to the castore model but might not parse to a
// [StorePath]
store_path: nix_compat::store_path::StorePath::from_bytes(name.as_ref()).map_err(
|err| ValidatePathInfoError::InvalidNodeName(name.as_ref().to_vec(), err),
)?,
node,
references,
nar_size: narinfo.nar_size,
nar_sha256: narinfo.nar_sha256.to_vec()[..]
.try_into()
.map_err(|_| ValidatePathInfoError::InvalidNarSha256DigestLen(nar_sha256_length))?,
// If the Deriver field is populated, ensure it parses to a
// [StorePath].
// We can't check for it to *not* end with .drv, as the .drv files produced by
// recursive Nix end with multiple .drv suffixes, and only one is popped when
// converting to this field.
deriver: narinfo
.deriver
.map(|deriver| {
nix_compat::store_path::StorePath::from_name_and_digest(
&deriver.name,
&deriver.digest,
)
.map_err(ValidatePathInfoError::InvalidDeriverField)
})
.transpose()?,
signatures: narinfo
.signatures
.into_iter()
.enumerate()
.map(|(i, signature)| {
signature.data.to_vec()[..]
.try_into()
.map_err(|_| {
ValidatePathInfoError::InvalidSignature(
i,
SignatureError::InvalidSignatureLen(signature.data.len()),
)
})
.map(|signature_data| Signature::new(signature.name, signature_data))
})
.collect::<Result<Vec<_>, ValidatePathInfoError>>()?,
ca: narinfo
.ca
.as_ref()
.map(TryFrom::try_from)
.transpose()
.map_err(ValidatePathInfoError::InvalidCaField)?,
})
}
}

View file

@ -1,274 +1,226 @@
use crate::proto::{nar_info::Signature, NarInfo, PathInfo, ValidatePathInfoError};
use crate::tests::fixtures::*;
use crate::pathinfoservice::PathInfo;
use crate::proto::{self, ValidatePathInfoError};
use crate::tests::fixtures::{DUMMY_PATH, DUMMY_PATH_DIGEST, DUMMY_PATH_STR};
use bytes::Bytes;
use data_encoding::BASE64;
use nix_compat::nixbase32;
use nix_compat::store_path::{self, StorePath, StorePathRef};
use lazy_static::lazy_static;
use nix_compat::store_path;
use rstest::rstest;
use tvix_castore::fixtures::DUMMY_DIGEST;
use tvix_castore::proto as castorepb;
use tvix_castore::{DirectoryError, ValidateNodeError};
#[rstest]
#[case::no_node(None, Err(ValidatePathInfoError::NoNodePresent))]
#[case::no_node_2(Some(castorepb::Node { node: None}), Err(ValidatePathInfoError::InvalidRootNode(DirectoryError::NoNodeSet)))]
lazy_static! {
/// A valid PathInfo message
/// The references in `narinfo.reference_names` aligns with what's in
/// `references`.
static ref PROTO_PATH_INFO : proto::PathInfo = proto::PathInfo {
node: Some(castorepb::Node {
node: Some(castorepb::node::Node::Directory(castorepb::DirectoryNode {
name: DUMMY_PATH_STR.into(),
digest: DUMMY_DIGEST.clone().into(),
size: 0,
})),
}),
references: vec![DUMMY_PATH_DIGEST.as_slice().into()],
narinfo: Some(proto::NarInfo {
nar_size: 0,
nar_sha256: DUMMY_DIGEST.clone().into(),
signatures: vec![],
reference_names: vec![DUMMY_PATH_STR.to_string()],
deriver: None,
ca: Some(proto::nar_info::Ca { r#type: proto::nar_info::ca::Hash::NarSha256.into(), digest: DUMMY_DIGEST.clone().into() })
}),
};
}
fn validate_pathinfo(
#[test]
fn convert_valid() {
let path_info = PROTO_PATH_INFO.clone();
PathInfo::try_from(path_info).expect("must succeed");
}
/// Create a PathInfo with a correct deriver field and ensure it succeeds.
#[test]
fn convert_valid_deriver() {
let mut path_info = PROTO_PATH_INFO.clone();
// add a valid deriver
let narinfo = path_info.narinfo.as_mut().unwrap();
narinfo.deriver = Some(crate::proto::StorePath {
name: DUMMY_PATH.name().to_string(),
digest: Bytes::from(DUMMY_PATH_DIGEST.as_slice()),
});
let path_info = PathInfo::try_from(path_info).expect("must succeed");
assert_eq!(DUMMY_PATH.clone(), path_info.deriver.unwrap())
}
#[rstest]
#[case::no_node(None, ValidatePathInfoError::NoNodePresent)]
#[case::no_node_2(Some(castorepb::Node { node: None}), ValidatePathInfoError::InvalidRootNode(DirectoryError::NoNodeSet))]
fn convert_pathinfo_wrong_nodes(
#[case] node: Option<castorepb::Node>,
#[case] exp_result: Result<StorePath<String>, ValidatePathInfoError>,
#[case] exp_err: ValidatePathInfoError,
) {
// construct the PathInfo object
let p = PathInfo {
node,
..Default::default()
};
let mut path_info = PROTO_PATH_INFO.clone();
path_info.node = node;
assert_eq!(exp_result, p.validate());
assert_eq!(
exp_err,
PathInfo::try_from(path_info).expect_err("must fail")
);
}
/// Constructs a [proto::PathInfo] with root nodes that have wrong data in
/// various places, causing the conversion to [PathInfo] to fail.
#[rstest]
#[case::ok(castorepb::DirectoryNode {
name: DUMMY_PATH.into(),
digest: DUMMY_DIGEST.clone().into(),
size: 0,
}, Ok(StorePath::from_bytes(DUMMY_PATH.as_bytes()).unwrap()))]
#[case::invalid_digest_length(castorepb::DirectoryNode {
name: DUMMY_PATH.into(),
#[case::directory_invalid_digest_length(
castorepb::node::Node::Directory(castorepb::DirectoryNode {
name: DUMMY_PATH_STR.into(),
digest: Bytes::new(),
size: 0,
}, Err(ValidatePathInfoError::InvalidRootNode(DirectoryError::InvalidNode(DUMMY_PATH.into(), ValidateNodeError::InvalidDigestLen(0)))))]
#[case::invalid_node_name_no_storepath(castorepb::DirectoryNode {
}),
ValidatePathInfoError::InvalidRootNode(DirectoryError::InvalidNode(DUMMY_PATH_STR.into(), ValidateNodeError::InvalidDigestLen(0)))
)]
#[case::directory_invalid_node_name_no_storepath(
castorepb::node::Node::Directory(castorepb::DirectoryNode {
name: "invalid".into(),
digest: DUMMY_DIGEST.clone().into(),
size: 0,
}, Err(ValidatePathInfoError::InvalidNodeName(
"invalid".into(),
store_path::Error::InvalidLength
)))]
fn validate_directory(
#[case] directory_node: castorepb::DirectoryNode,
#[case] exp_result: Result<StorePath<String>, ValidatePathInfoError>,
) {
// construct the PathInfo object
let p = PathInfo {
node: Some(castorepb::Node {
node: Some(castorepb::node::Node::Directory(directory_node)),
}),
..Default::default()
};
assert_eq!(exp_result, p.validate());
}
#[rstest]
#[case::ok(
castorepb::FileNode {
name: DUMMY_PATH.into(),
digest: DUMMY_DIGEST.clone().into(),
size: 0,
executable: false,
},
Ok(StorePath::from_bytes(DUMMY_PATH.as_bytes()).unwrap())
}),
ValidatePathInfoError::InvalidNodeName("invalid".into(), store_path::Error::InvalidLength)
)]
#[case::invalid_digest_len(
castorepb::FileNode {
name: DUMMY_PATH.into(),
#[case::file_invalid_digest_len(
castorepb::node::Node::File(castorepb::FileNode {
name: DUMMY_PATH_STR.into(),
digest: Bytes::new(),
..Default::default()
},
Err(ValidatePathInfoError::InvalidRootNode(DirectoryError::InvalidNode(DUMMY_PATH.into(), ValidateNodeError::InvalidDigestLen(0))))
}),
ValidatePathInfoError::InvalidRootNode(DirectoryError::InvalidNode(DUMMY_PATH_STR.into(), ValidateNodeError::InvalidDigestLen(0)))
)]
#[case::invalid_node_name(
castorepb::FileNode {
#[case::file_invalid_node_name(
castorepb::node::Node::File(castorepb::FileNode {
name: "invalid".into(),
digest: DUMMY_DIGEST.clone().into(),
..Default::default()
},
Err(ValidatePathInfoError::InvalidNodeName(
}),
ValidatePathInfoError::InvalidNodeName(
"invalid".into(),
store_path::Error::InvalidLength
))
)
)]
fn validate_file(
#[case] file_node: castorepb::FileNode,
#[case] exp_result: Result<StorePath<String>, ValidatePathInfoError>,
) {
// construct the PathInfo object
let p = PathInfo {
node: Some(castorepb::Node {
node: Some(castorepb::node::Node::File(file_node)),
}),
..Default::default()
};
assert_eq!(exp_result, p.validate());
}
#[rstest]
#[case::ok(
castorepb::SymlinkNode {
name: DUMMY_PATH.into(),
target: "foo".into(),
},
Ok(StorePath::from_bytes(DUMMY_PATH.as_bytes()).unwrap())
)]
#[case::invalid_node_name(
castorepb::SymlinkNode {
#[case::symlink_invalid_node_name(
castorepb::node::Node::Symlink(castorepb::SymlinkNode {
name: "invalid".into(),
target: "foo".into(),
},
Err(ValidatePathInfoError::InvalidNodeName(
}),
ValidatePathInfoError::InvalidNodeName(
"invalid".into(),
store_path::Error::InvalidLength
))
)
)]
fn validate_symlink(
#[case] symlink_node: castorepb::SymlinkNode,
#[case] exp_result: Result<StorePath<String>, ValidatePathInfoError>,
) {
// construct the PathInfo object
let p = PathInfo {
node: Some(castorepb::Node {
node: Some(castorepb::node::Node::Symlink(symlink_node)),
}),
..Default::default()
};
assert_eq!(exp_result, p.validate());
fn convert_fail_node(#[case] node: castorepb::node::Node, #[case] exp_err: ValidatePathInfoError) {
// construct the proto::PathInfo object
let mut p = PROTO_PATH_INFO.clone();
p.node = Some(castorepb::Node { node: Some(node) });
assert_eq!(exp_err, PathInfo::try_from(p).expect_err("must fail"));
}
/// Ensure parsing a correct PathInfo without narinfo populated succeeds.
/// Ensure a PathInfo without narinfo populated fails converting!
#[test]
fn validate_references_without_narinfo_ok() {
assert!(PATH_INFO_WITHOUT_NARINFO.validate().is_ok());
}
fn convert_without_narinfo_fail() {
let mut path_info = PROTO_PATH_INFO.clone();
path_info.narinfo = None;
/// Ensure parsing a correct PathInfo with narinfo populated succeeds.
#[test]
fn validate_references_with_narinfo_ok() {
assert!(PATH_INFO_WITH_NARINFO.validate().is_ok());
assert_eq!(
ValidatePathInfoError::NarInfoFieldMissing,
PathInfo::try_from(path_info).expect_err("must fail"),
);
}
/// Create a PathInfo with a wrong digest length in narinfo.nar_sha256, and
/// ensure validation fails.
/// ensure conversion fails.
#[test]
fn validate_wrong_nar_sha256() {
let mut path_info = PATH_INFO_WITH_NARINFO.clone();
fn convert_wrong_nar_sha256() {
let mut path_info = PROTO_PATH_INFO.clone();
path_info.narinfo.as_mut().unwrap().nar_sha256 = vec![0xbe, 0xef].into();
match path_info.validate().expect_err("must_fail") {
ValidatePathInfoError::InvalidNarSha256DigestLen(2) => {}
e => panic!("unexpected error: {:?}", e),
};
assert_eq!(
ValidatePathInfoError::InvalidNarSha256DigestLen(2),
PathInfo::try_from(path_info).expect_err("must fail")
);
}
/// Create a PathInfo with a wrong count of narinfo.reference_names,
/// and ensure validation fails.
#[test]
fn validate_inconsistent_num_refs_fail() {
let mut path_info = PATH_INFO_WITH_NARINFO.clone();
fn convert_inconsistent_num_refs_fail() {
let mut path_info = PROTO_PATH_INFO.clone();
path_info.narinfo.as_mut().unwrap().reference_names = vec![];
match path_info.validate().expect_err("must_fail") {
ValidatePathInfoError::InconsistentNumberOfReferences(1, 0) => {}
e => panic!("unexpected error: {:?}", e),
};
assert_eq!(
ValidatePathInfoError::InconsistentNumberOfReferences(1, 0),
PathInfo::try_from(path_info).expect_err("must fail")
);
}
/// Create a PathInfo with a wrong digest length in references.
#[test]
fn validate_invalid_reference_digest_len() {
let mut path_info = PATH_INFO_WITHOUT_NARINFO.clone();
fn convert_invalid_reference_digest_len() {
let mut path_info = PROTO_PATH_INFO.clone();
path_info.references.push(vec![0xff, 0xff].into());
match path_info.validate().expect_err("must fail") {
assert_eq!(
ValidatePathInfoError::InvalidReferenceDigestLen(
1, // position
2, // unexpected digest len
) => {}
e => panic!("unexpected error: {:?}", e),
};
),
PathInfo::try_from(path_info).expect_err("must fail")
);
}
/// Create a PathInfo with a narinfo.reference_name[1] that is no valid store path.
#[test]
fn validate_invalid_narinfo_reference_name() {
let mut path_info = PATH_INFO_WITH_NARINFO.clone();
fn convert_invalid_narinfo_reference_name() {
let mut path_info = PROTO_PATH_INFO.clone();
// This is invalid, as the store prefix is not part of reference_names.
path_info.narinfo.as_mut().unwrap().reference_names[0] =
"/nix/store/00000000000000000000000000000000-dummy".to_string();
match path_info.validate().expect_err("must fail") {
ValidatePathInfoError::InvalidNarinfoReferenceName(0, reference_name) => {
assert_eq!(
"/nix/store/00000000000000000000000000000000-dummy",
reference_name
);
}
e => panic!("unexpected error: {:?}", e),
}
assert_eq!(
ValidatePathInfoError::InvalidNarinfoReferenceName(
0,
"/nix/store/00000000000000000000000000000000-dummy".to_string()
),
PathInfo::try_from(path_info).expect_err("must fail")
);
}
/// Create a PathInfo with a narinfo.reference_name[0] that doesn't match references[0].
#[test]
fn validate_inconsistent_narinfo_reference_name_digest() {
let mut path_info = PATH_INFO_WITH_NARINFO.clone();
fn convert_inconsistent_narinfo_reference_name_digest() {
let mut path_info = PROTO_PATH_INFO.clone();
// mutate the first reference, they were all zeroes before
path_info.references[0] = vec![0xff; store_path::DIGEST_SIZE].into();
match path_info.validate().expect_err("must fail") {
ValidatePathInfoError::InconsistentNarinfoReferenceNameDigest(0, e_expected, e_actual) => {
assert_eq!(path_info.references[0][..], e_expected[..]);
assert_eq!(DUMMY_PATH_DIGEST, e_actual);
}
e => panic!("unexpected error: {:?}", e),
}
}
/// Create a node with an empty symlink target, and ensure it fails validation.
#[test]
fn validate_symlink_empty_target_invalid() {
castorepb::Node {
node: Some(castorepb::node::Node::Symlink(castorepb::SymlinkNode {
name: "foo".into(),
target: "".into(),
})),
}
.into_name_and_node()
.expect_err("must fail validation");
}
/// Create a node with a symlink target including null bytes, and ensure it
/// fails validation.
#[test]
fn validate_symlink_target_null_byte_invalid() {
castorepb::Node {
node: Some(castorepb::node::Node::Symlink(castorepb::SymlinkNode {
name: "foo".into(),
target: "foo\0".into(),
})),
}
.into_name_and_node()
.expect_err("must fail validation");
}
/// Create a PathInfo with a correct deriver field and ensure it succeeds.
#[test]
fn validate_valid_deriver() {
let mut path_info = PATH_INFO_WITH_NARINFO.clone();
// add a valid deriver
let narinfo = path_info.narinfo.as_mut().unwrap();
narinfo.deriver = Some(crate::proto::StorePath {
name: "foo".to_string(),
digest: Bytes::from(DUMMY_PATH_DIGEST.as_slice()),
});
path_info.validate().expect("must validate");
assert_eq!(
ValidatePathInfoError::InconsistentNarinfoReferenceNameDigest(
0,
path_info.references[0][..].try_into().unwrap(),
DUMMY_PATH_DIGEST
),
PathInfo::try_from(path_info).expect_err("must fail")
)
}
/// Create a PathInfo with a broken deriver field and ensure it fails.
#[test]
fn validate_invalid_deriver() {
let mut path_info = PATH_INFO_WITH_NARINFO.clone();
fn convert_invalid_deriver() {
let mut path_info = PROTO_PATH_INFO.clone();
// add a broken deriver (invalid digest)
let narinfo = path_info.narinfo.as_mut().unwrap();
@ -277,157 +229,8 @@ fn validate_invalid_deriver() {
digest: vec![].into(),
});
match path_info.validate().expect_err("must fail validation") {
ValidatePathInfoError::InvalidDeriverField(_) => {}
e => panic!("unexpected error: {:?}", e),
}
}
#[test]
fn from_nixcompat_narinfo() {
let narinfo_parsed = nix_compat::narinfo::NarInfo::parse(
r#"StorePath: /nix/store/s66mzxpvicwk07gjbjfw9izjfa797vsw-hello-2.12.1
URL: nar/1nhgq6wcggx0plpy4991h3ginj6hipsdslv4fd4zml1n707j26yq.nar.xz
Compression: xz
FileHash: sha256:1nhgq6wcggx0plpy4991h3ginj6hipsdslv4fd4zml1n707j26yq
FileSize: 50088
NarHash: sha256:0yzhigwjl6bws649vcs2asa4lbs8hg93hyix187gc7s7a74w5h80
NarSize: 226488
References: 3n58xw4373jp0ljirf06d8077j15pc4j-glibc-2.37-8 s66mzxpvicwk07gjbjfw9izjfa797vsw-hello-2.12.1
Deriver: ib3sh3pcz10wsmavxvkdbayhqivbghlq-hello-2.12.1.drv
Sig: cache.nixos.org-1:8ijECciSFzWHwwGVOIVYdp2fOIOJAfmzGHPQVwpktfTQJF6kMPPDre7UtFw3o+VqenC5P8RikKOAAfN7CvPEAg=="#).expect("must parse");
assert_eq!(
PathInfo {
node: None,
references: vec![
Bytes::copy_from_slice(&nixbase32::decode_fixed::<20>("3n58xw4373jp0ljirf06d8077j15pc4j").unwrap()),
Bytes::copy_from_slice(&nixbase32::decode_fixed::<20>("s66mzxpvicwk07gjbjfw9izjfa797vsw").unwrap()),
],
narinfo: Some(
NarInfo {
nar_size: 226488,
nar_sha256: Bytes::copy_from_slice(
&nixbase32::decode_fixed::<32>("0yzhigwjl6bws649vcs2asa4lbs8hg93hyix187gc7s7a74w5h80".as_bytes())
.unwrap()
),
signatures: vec![Signature {
name: "cache.nixos.org-1".to_string(),
data: BASE64.decode("8ijECciSFzWHwwGVOIVYdp2fOIOJAfmzGHPQVwpktfTQJF6kMPPDre7UtFw3o+VqenC5P8RikKOAAfN7CvPEAg==".as_bytes()).unwrap().into(),
}],
reference_names: vec![
"3n58xw4373jp0ljirf06d8077j15pc4j-glibc-2.37-8".to_string(),
"s66mzxpvicwk07gjbjfw9izjfa797vsw-hello-2.12.1".to_string()
],
deriver: Some(crate::proto::StorePath {
digest: Bytes::copy_from_slice(&nixbase32::decode_fixed::<20>("ib3sh3pcz10wsmavxvkdbayhqivbghlq").unwrap()),
name: "hello-2.12.1".to_string(),
}),
ca: None,
}
)
},
(&narinfo_parsed).into(),
);
}
#[test]
fn from_nixcompat_narinfo_fod() {
let narinfo_parsed = nix_compat::narinfo::NarInfo::parse(
r#"StorePath: /nix/store/pa10z4ngm0g83kx9mssrqzz30s84vq7k-hello-2.12.1.tar.gz
URL: nar/1zjrhzhaizsrlsvdkqfl073vivmxcqnzkff4s50i0cdf541ary1r.nar.xz
Compression: xz
FileHash: sha256:1zjrhzhaizsrlsvdkqfl073vivmxcqnzkff4s50i0cdf541ary1r
FileSize: 1033524
NarHash: sha256:1lvqpbk2k1sb39z8jfxixf7p7v8sj4z6mmpa44nnmff3w1y6h8lh
NarSize: 1033416
References:
Deriver: dyivpmlaq2km6c11i0s6bi6mbsx0ylqf-hello-2.12.1.tar.gz.drv
Sig: cache.nixos.org-1:ywnIG629nQZQhEr6/HLDrLT/mUEp5J1LC6NmWSlJRWL/nM7oGItJQUYWGLvYGhSQvHrhIuvMpjNmBNh/WWqCDg==
CA: fixed:sha256:086vqwk2wl8zfs47sq2xpjc9k066ilmb8z6dn0q6ymwjzlm196cd"#
).expect("must parse");
assert_eq!(
PathInfo {
node: None,
references: vec![],
narinfo: Some(
NarInfo {
nar_size: 1033416,
nar_sha256: Bytes::copy_from_slice(
&nixbase32::decode_fixed::<32>(
"1lvqpbk2k1sb39z8jfxixf7p7v8sj4z6mmpa44nnmff3w1y6h8lh"
)
.unwrap()
),
signatures: vec![Signature {
name: "cache.nixos.org-1".to_string(),
data: BASE64
.decode("ywnIG629nQZQhEr6/HLDrLT/mUEp5J1LC6NmWSlJRWL/nM7oGItJQUYWGLvYGhSQvHrhIuvMpjNmBNh/WWqCDg==".as_bytes())
.unwrap()
.into(),
}],
reference_names: vec![],
deriver: Some(crate::proto::StorePath {
digest: Bytes::copy_from_slice(
&nixbase32::decode_fixed::<20>("dyivpmlaq2km6c11i0s6bi6mbsx0ylqf").unwrap()
),
name: "hello-2.12.1.tar.gz".to_string(),
}),
ca: Some(crate::proto::nar_info::Ca {
r#type: crate::proto::nar_info::ca::Hash::FlatSha256.into(),
digest: Bytes::copy_from_slice(
&nixbase32::decode_fixed::<32>(
"086vqwk2wl8zfs47sq2xpjc9k066ilmb8z6dn0q6ymwjzlm196cd"
)
.unwrap()
)
}),
}
),
},
(&narinfo_parsed).into()
);
}
/// Exercise .as_narinfo() on a PathInfo and ensure important fields are preserved..
#[test]
fn as_narinfo() {
let narinfo_parsed = nix_compat::narinfo::NarInfo::parse(
r#"StorePath: /nix/store/pa10z4ngm0g83kx9mssrqzz30s84vq7k-hello-2.12.1.tar.gz
URL: nar/1zjrhzhaizsrlsvdkqfl073vivmxcqnzkff4s50i0cdf541ary1r.nar.xz
Compression: xz
FileHash: sha256:1zjrhzhaizsrlsvdkqfl073vivmxcqnzkff4s50i0cdf541ary1r
FileSize: 1033524
NarHash: sha256:1lvqpbk2k1sb39z8jfxixf7p7v8sj4z6mmpa44nnmff3w1y6h8lh
NarSize: 1033416
References:
Deriver: dyivpmlaq2km6c11i0s6bi6mbsx0ylqf-hello-2.12.1.tar.gz.drv
Sig: cache.nixos.org-1:ywnIG629nQZQhEr6/HLDrLT/mUEp5J1LC6NmWSlJRWL/nM7oGItJQUYWGLvYGhSQvHrhIuvMpjNmBNh/WWqCDg==
CA: fixed:sha256:086vqwk2wl8zfs47sq2xpjc9k066ilmb8z6dn0q6ymwjzlm196cd"#
).expect("must parse");
let path_info: PathInfo = (&narinfo_parsed).into();
let mut narinfo_returned = path_info
.to_narinfo(
StorePathRef::from_bytes(b"pa10z4ngm0g83kx9mssrqzz30s84vq7k-hello-2.12.1.tar.gz")
.expect("invalid storepath"),
)
.expect("must be some");
narinfo_returned.url = "some.nar";
assert_eq!(
r#"StorePath: /nix/store/pa10z4ngm0g83kx9mssrqzz30s84vq7k-hello-2.12.1.tar.gz
URL: some.nar
Compression: none
NarHash: sha256:1lvqpbk2k1sb39z8jfxixf7p7v8sj4z6mmpa44nnmff3w1y6h8lh
NarSize: 1033416
References:
Deriver: dyivpmlaq2km6c11i0s6bi6mbsx0ylqf-hello-2.12.1.tar.gz.drv
Sig: cache.nixos.org-1:ywnIG629nQZQhEr6/HLDrLT/mUEp5J1LC6NmWSlJRWL/nM7oGItJQUYWGLvYGhSQvHrhIuvMpjNmBNh/WWqCDg==
CA: fixed:sha256:086vqwk2wl8zfs47sq2xpjc9k066ilmb8z6dn0q6ymwjzlm196cd
"#,
narinfo_returned.to_string(),
);
ValidatePathInfoError::InvalidDeriverField(store_path::Error::InvalidLength),
PathInfo::try_from(path_info).expect_err("must fail")
)
}

View file

@ -1,24 +1,27 @@
use crate::pathinfoservice::PathInfo;
use lazy_static::lazy_static;
use nix_compat::nixhash::{CAHash, NixHash};
use nix_compat::store_path::StorePath;
use rstest::{self, *};
use rstest_reuse::*;
use std::io;
use std::sync::Arc;
pub use tvix_castore::fixtures::*;
use tvix_castore::fixtures::{
DIRECTORY_COMPLICATED, DIRECTORY_WITH_KEEP, DUMMY_DIGEST, EMPTY_BLOB_CONTENTS,
EMPTY_BLOB_DIGEST, HELLOWORLD_BLOB_CONTENTS, HELLOWORLD_BLOB_DIGEST,
};
use tvix_castore::{
blobservice::{BlobService, MemoryBlobService},
directoryservice::{DirectoryService, MemoryDirectoryService},
proto as castorepb, Node,
Node,
};
use crate::proto::{
nar_info::{ca, Ca},
NarInfo, PathInfo,
};
pub const DUMMY_PATH: &str = "00000000000000000000000000000000-dummy";
pub const DUMMY_PATH_STR: &str = "00000000000000000000000000000000-dummy";
pub const DUMMY_PATH_DIGEST: [u8; 20] = [0; 20];
lazy_static! {
pub static ref DUMMY_PATH: StorePath<String> = StorePath::from_name_and_digest_fixed("dummy", DUMMY_PATH_DIGEST).unwrap();
pub static ref CASTORE_NODE_SYMLINK: Node = Node::Symlink {
target: "/nix/store/somewhereelse".try_into().unwrap(),
};
@ -130,32 +133,19 @@ lazy_static! {
1, 0, 0, 0, 0, 0, 0, 0, b')', 0, 0, 0, 0, 0, 0, 0, // ")"
];
/// A PathInfo message without .narinfo populated.
pub static ref PATH_INFO_WITHOUT_NARINFO : PathInfo = PathInfo {
node: Some(castorepb::Node {
node: Some(castorepb::node::Node::Directory(castorepb::DirectoryNode {
name: DUMMY_PATH.into(),
digest: DUMMY_DIGEST.clone().into(),
size: 0,
})),
}),
references: vec![DUMMY_PATH_DIGEST.as_slice().into()],
narinfo: None,
};
/// A PathInfo message with .narinfo populated.
/// The references in `narinfo.reference_names` aligns with what's in
/// `references`.
pub static ref PATH_INFO_WITH_NARINFO : PathInfo = PathInfo {
narinfo: Some(NarInfo {
nar_size: 0,
nar_sha256: DUMMY_DIGEST.clone().into(),
signatures: vec![],
reference_names: vec![DUMMY_PATH.to_string()],
deriver: None,
ca: Some(Ca { r#type: ca::Hash::NarSha256.into(), digest: DUMMY_DIGEST.clone().into() })
}),
..PATH_INFO_WITHOUT_NARINFO.clone()
/// A PathInfo message
pub static ref PATH_INFO: PathInfo = PathInfo {
store_path: DUMMY_PATH.clone(),
node: tvix_castore::Node::Directory {
digest: DUMMY_DIGEST.clone(),
size: 0,
},
references: vec![DUMMY_PATH.clone()],
nar_sha256: [0; 32],
nar_size: 0,
signatures: vec![],
deriver: None,
ca: Some(CAHash::Nar(NixHash::Sha256([0; 32]))),
};
}