refactor(tvix/store): use strictly typed PathInfo struct

This switches the PathInfoService trait from using the proto-derived
PathInfo struct to a more restrictive struct, and updates all
implementations to use it.

It removes a lot of the previous conversion and checks, as invalid
states became nonrepresentable, and validations are expressed on the
type level.

PathInfoService implementations consuming protobuf need to convert and
do the verification internally, and can only return the strongly typed
variant.

The nix_compat::narinfo::NarInfo conversions for the proto PathInfo
are removed, we only keep a version showing a NarInfo representation for
the strong struct.

Converting back to a PathInfo requires the root node now, but is
otherwise trivial, so left to the users.

Co-Authored-By: Florian Klink <flokli@flokli.de>
Change-Id: I6fdfdb44063efebb44a8f0097b6b81a828717e03
Reviewed-on: https://cl.tvl.fyi/c/depot/+/12588
Reviewed-by: flokli <flokli@flokli.de>
Tested-by: BuildkiteCI
This commit is contained in:
Marijan Petričević 2024-10-10 09:11:17 -05:00
parent b4ccaac7ad
commit e8040ec61f
26 changed files with 726 additions and 1042 deletions

View file

@ -1 +1,30 @@
use super::{node, Node, SymlinkNode};
mod directory; mod directory;
/// Create a node with an empty symlink target, and ensure it fails validation.
#[test]
fn convert_symlink_empty_target_invalid() {
Node {
node: Some(node::Node::Symlink(SymlinkNode {
name: "foo".into(),
target: "".into(),
})),
}
.into_name_and_node()
.expect_err("must fail validation");
}
/// Create a node with a symlink target including null bytes, and ensure it
/// fails validation.
#[test]
fn convert_symlink_target_null_byte_invalid() {
Node {
node: Some(node::Node::Symlink(SymlinkNode {
name: "foo".into(),
target: "foo\0".into(),
})),
}
.into_name_and_node()
.expect_err("must fail validation");
}

View file

@ -136,22 +136,6 @@ Similarly, we also don't properly populate the build environment for
`fetchClosure` yet. (Note there already is `ExportedPathInfo`, so once `fetchClosure` yet. (Note there already is `ExportedPathInfo`, so once
`structuredAttrs` is there this should be easy. `structuredAttrs` is there this should be easy.
### PathInfo Data types
Similar to the refactors done in tvix-castore, we want a stricter type for
PathInfo, and use the `tvix_castore::nodes::Node` type we now have as the root
node.
This allows removing some checks, conversions and handling for invalid data in
many different places in different store implementations.
Steps:
- Define the stricter `PathInfo` type
- Update the `PathInfoService` trait to use the stricter types
- Update the grpc client impl to convert from the proto types to the
stricter types (and reject invalid ones)
- Update the grpc server wrapper to convert to the proto types
### PathInfo: include references by content ### PathInfo: include references by content
In the PathInfo struct, we currently only store references by their names and In the PathInfo struct, we currently only store references by their names and
store path hash. Getting the castore node for the content at that store path store path hash. Getting the castore node for the content at that store path

View file

@ -182,7 +182,7 @@ pub(crate) mod derivation_builtins {
use tvix_castore::Node; use tvix_castore::Node;
use tvix_eval::generators::Gen; use tvix_eval::generators::Gen;
use tvix_eval::{NixContext, NixContextElement, NixString}; use tvix_eval::{NixContext, NixContextElement, NixString};
use tvix_store::proto::{NarInfo, PathInfo}; use tvix_store::pathinfoservice::PathInfo;
#[builtin("placeholder")] #[builtin("placeholder")]
async fn builtin_placeholder(co: GenCo, input: Value) -> Result<Value, ErrorKind> { async fn builtin_placeholder(co: GenCo, input: Value) -> Result<Value, ErrorKind> {
@ -568,15 +568,6 @@ pub(crate) mod derivation_builtins {
let blob_digest = blob_writer.close().await?; let blob_digest = blob_writer.close().await?;
let ca_hash = CAHash::Text(Sha256::digest(&content).into()); let ca_hash = CAHash::Text(Sha256::digest(&content).into());
let store_path: StorePathRef =
build_ca_path(name.to_str()?, &ca_hash, content.iter_ctx_plain(), false)
.map_err(|_e| {
nix_compat::derivation::DerivationError::InvalidOutputName(
name.to_str_lossy().into_owned(),
)
})
.map_err(DerivationError::InvalidDerivation)?;
let root_node = Node::File { let root_node = Node::File {
digest: blob_digest, digest: blob_digest,
size: blob_size, size: blob_size,
@ -590,41 +581,38 @@ pub(crate) mod derivation_builtins {
.await .await
.map_err(|e| ErrorKind::TvixError(Rc::new(e)))?; .map_err(|e| ErrorKind::TvixError(Rc::new(e)))?;
// assemble references from plain context.
let reference_paths: Vec<StorePathRef> = content
.iter_ctx_plain()
.map(|elem| StorePathRef::from_absolute_path(elem.as_bytes()))
.collect::<Result<_, _>>()
.map_err(|e| ErrorKind::TvixError(Rc::new(e)))?;
// persist via pathinfo service. // persist via pathinfo service.
state state
.path_info_service .path_info_service
.put(PathInfo { .put(PathInfo {
node: Some(tvix_castore::proto::Node::from_name_and_node( store_path: build_ca_path(
store_path.to_string().into(), name.to_str()?,
root_node, &ca_hash,
)), content.iter_ctx_plain(),
references: reference_paths false,
.iter() )
.map(|x| bytes::Bytes::copy_from_slice(x.digest())) .map_err(|_e| {
.collect(), nix_compat::derivation::DerivationError::InvalidOutputName(
narinfo: Some(NarInfo { name.to_str_lossy().into_owned(),
)
})
.map_err(DerivationError::InvalidDerivation)?,
node: root_node,
// assemble references from plain context.
references: content
.iter_ctx_plain()
.map(|elem| StorePath::from_absolute_path(elem.as_bytes()))
.collect::<Result<_, _>>()
.map_err(|e| ErrorKind::TvixError(Rc::new(e)))?,
nar_size, nar_size,
nar_sha256: nar_sha256.to_vec().into(), nar_sha256,
signatures: vec![], signatures: vec![],
reference_names: reference_paths
.into_iter()
.map(|x| x.to_string())
.collect(),
deriver: None, deriver: None,
ca: Some(ca_hash.into()), ca: Some(ca_hash),
}),
}) })
.await .await
.map_err(|e| ErrorKind::TvixError(Rc::new(e)))?; .map_err(|e| ErrorKind::TvixError(Rc::new(e)))
.map(|path_info| path_info.store_path)
Ok::<_, ErrorKind>(store_path)
})?; })?;
let abs_path = store_path.to_absolute_path(); let abs_path = store_path.to_absolute_path();

View file

@ -11,7 +11,10 @@ use tokio_util::io::{InspectReader, InspectWriter};
use tracing::{instrument, warn, Span}; use tracing::{instrument, warn, Span};
use tracing_indicatif::span_ext::IndicatifSpanExt; use tracing_indicatif::span_ext::IndicatifSpanExt;
use tvix_castore::{blobservice::BlobService, directoryservice::DirectoryService, Node}; use tvix_castore::{blobservice::BlobService, directoryservice::DirectoryService, Node};
use tvix_store::{nar::NarCalculationService, pathinfoservice::PathInfoService, proto::PathInfo}; use tvix_store::{
nar::NarCalculationService,
pathinfoservice::{PathInfo, PathInfoService},
};
use url::Url; use url::Url;
use crate::builtins::FetcherError; use crate::builtins::FetcherError;
@ -571,19 +574,14 @@ where
// Construct the PathInfo and persist it. // Construct the PathInfo and persist it.
let path_info = PathInfo { let path_info = PathInfo {
node: Some(tvix_castore::proto::Node::from_name_and_node( store_path: store_path.to_owned(),
store_path.to_string().into(), node: node.clone(),
node.clone(),
)),
references: vec![], references: vec![],
narinfo: Some(tvix_store::proto::NarInfo {
nar_size, nar_size,
nar_sha256: nar_sha256.to_vec().into(), nar_sha256,
signatures: vec![], signatures: vec![],
reference_names: vec![],
deriver: None, deriver: None,
ca: Some(ca_hash.into()), ca: Some(ca_hash),
}),
}; };
self.path_info_service self.path_info_service

View file

@ -23,7 +23,7 @@ use tvix_castore::{
directoryservice::{self, DirectoryService}, directoryservice::{self, DirectoryService},
Node, Node,
}; };
use tvix_store::{pathinfoservice::PathInfoService, proto::PathInfo}; use tvix_store::pathinfoservice::{PathInfo, PathInfoService};
use crate::fetchers::Fetcher; use crate::fetchers::Fetcher;
use crate::known_paths::KnownPaths; use crate::known_paths::KnownPaths;
@ -119,23 +119,8 @@ impl TvixStoreIO {
.get(*store_path.digest()) .get(*store_path.digest())
.await? .await?
{ {
// if we have a PathInfo, we know there will be a root_node (due to validation)
// TODO: use stricter typed BuildRequest here. // TODO: use stricter typed BuildRequest here.
Some(path_info) => { Some(path_info) => path_info.node,
let (name, node) = path_info
.node
.expect("no node")
.into_name_and_node()
.expect("invalid node");
assert_eq!(
store_path.to_string().as_bytes(),
name.as_ref(),
"returned node basename must match requested store path"
);
node
}
// If there's no PathInfo found, this normally means we have to // If there's no PathInfo found, this normally means we have to
// trigger the build (and insert into PathInfoService, after // trigger the build (and insert into PathInfoService, after
// reference scanning). // reference scanning).
@ -336,47 +321,37 @@ impl TvixStoreIO {
// assemble the PathInfo to persist // assemble the PathInfo to persist
let path_info = PathInfo { let path_info = PathInfo {
node: Some(tvix_castore::proto::Node::from_name_and_node( store_path: drv_output
drv_output
.1 .1
.path .path
.as_ref() .as_ref()
.ok_or(std::io::Error::new( .ok_or(std::io::Error::new(
std::io::ErrorKind::Other, std::io::ErrorKind::Other,
"missing output store path", "Tvix bug: missing output store path",
))? ))?
.to_string() .to_owned(),
.into(), node: output_node,
output_node,
)),
references: output_needles references: output_needles
.iter() .iter()
.map(|path| Bytes::from(path.digest().as_slice().to_vec())) .map(|s| (**s).to_owned())
.collect(), .collect(),
narinfo: Some(tvix_store::proto::NarInfo {
nar_size, nar_size,
nar_sha256: Bytes::from(nar_sha256.to_vec()), nar_sha256,
signatures: vec![], signatures: vec![],
reference_names: output_needles deriver: Some(
.iter() StorePath::from_name_and_digest_fixed(
.map(|path| path.to_string()) drv_path
.collect(),
deriver: Some(tvix_store::proto::StorePath {
name: drv_path
.name() .name()
.strip_suffix(".drv") .strip_suffix(".drv")
.expect("missing .drv suffix") .expect("missing .drv suffix"),
.to_string(), *drv_path.digest(),
digest: drv_path.digest().to_vec().into(), )
}), .expect(
ca: drv.fod_digest().map( "Tvix bug: StorePath without .drv suffix must be valid",
|fod_digest| -> tvix_store::proto::nar_info::Ca {
(&CAHash::Nar(nix_compat::nixhash::NixHash::Sha256(
fod_digest,
)))
.into()
},
), ),
),
ca: drv.fod_digest().map(|fod_digest| {
CAHash::Nar(nix_compat::nixhash::NixHash::Sha256(fod_digest))
}), }),
}; };
@ -421,8 +396,7 @@ impl TvixStoreIO {
) -> io::Result<(PathInfo, NixHash, StorePathRef<'a>)> { ) -> io::Result<(PathInfo, NixHash, StorePathRef<'a>)> {
// Ask the PathInfoService for the NAR size and sha256 // Ask the PathInfoService for the NAR size and sha256
// We always need it no matter what is the actual hash mode // We always need it no matter what is the actual hash mode
// because the path info construct a narinfo which *always* // because the [PathInfo] needs to contain nar_{sha256,size}.
// require a SHA256 of the NAR representation and the NAR size.
let (nar_size, nar_sha256) = self let (nar_size, nar_sha256) = self
.nar_calculation_service .nar_calculation_service
.as_ref() .as_ref()
@ -431,7 +405,7 @@ impl TvixStoreIO {
// Calculate the output path. This might still fail, as some names are illegal. // Calculate the output path. This might still fail, as some names are illegal.
let output_path = let output_path =
nix_compat::store_path::build_ca_path(name, ca, Vec::<String>::new(), false).map_err( nix_compat::store_path::build_ca_path(name, ca, Vec::<&str>::new(), false).map_err(
|_| { |_| {
std::io::Error::new( std::io::Error::new(
std::io::ErrorKind::InvalidData, std::io::ErrorKind::InvalidData,
@ -446,8 +420,8 @@ impl TvixStoreIO {
let path_info = tvix_store::import::derive_nar_ca_path_info( let path_info = tvix_store::import::derive_nar_ca_path_info(
nar_size, nar_size,
nar_sha256, nar_sha256,
Some(ca), Some(ca.clone()),
output_path.to_string().into(), output_path.to_owned(),
root_node, root_node,
); );

View file

@ -1,10 +1,14 @@
use axum::{http::StatusCode, response::IntoResponse}; use axum::{http::StatusCode, response::IntoResponse};
use bytes::Bytes; use bytes::Bytes;
use nix_compat::{narinfo::NarInfo, nix_http, nixbase32}; use nix_compat::{
narinfo::{NarInfo, Signature},
nix_http, nixbase32,
store_path::StorePath,
};
use prost::Message; use prost::Message;
use tracing::{instrument, warn, Span}; use tracing::{instrument, warn, Span};
use tvix_castore::proto::{self as castorepb}; use tvix_castore::proto::{self as castorepb};
use tvix_store::proto::PathInfo; use tvix_store::pathinfoservice::PathInfo;
use crate::AppState; use crate::AppState;
@ -57,35 +61,15 @@ pub async fn get(
})? })?
.ok_or(StatusCode::NOT_FOUND)?; .ok_or(StatusCode::NOT_FOUND)?;
let store_path = path_info.validate().map_err(|e| {
warn!(err=%e, "invalid PathInfo");
StatusCode::INTERNAL_SERVER_ERROR
})?;
let mut narinfo = path_info.to_narinfo(store_path.as_ref()).ok_or_else(|| {
warn!(path_info=?path_info, "PathInfo contained no NAR data");
StatusCode::INTERNAL_SERVER_ERROR
})?;
// encode the (unnamed) root node in the NAR url itself.
// We strip the name from the proto node before sending it out.
// It's not needed to render the NAR, it'll make the URL shorter, and it
// will make caching these requests easier.
let (_, root_node) = path_info
.node
.as_ref()
.expect("invalid pathinfo")
.to_owned()
.into_name_and_node()
.expect("invalid pathinfo");
let url = format!( let url = format!(
"nar/tvix-castore/{}?narsize={}", "nar/tvix-castore/{}?narsize={}",
data_encoding::BASE64URL_NOPAD data_encoding::BASE64URL_NOPAD.encode(
.encode(&castorepb::Node::from_name_and_node("".into(), root_node).encode_to_vec()), &castorepb::Node::from_name_and_node("".into(), path_info.node.clone()).encode_to_vec()
narinfo.nar_size, ),
path_info.nar_size,
); );
let mut narinfo = path_info.to_narinfo();
narinfo.url = &url; narinfo.url = &url;
Ok(( Ok((
@ -128,9 +112,6 @@ pub async fn put(
// Extract the NARHash from the PathInfo. // Extract the NARHash from the PathInfo.
Span::current().record("path_info.nar_info", nixbase32::encode(&narinfo.nar_hash)); Span::current().record("path_info.nar_info", nixbase32::encode(&narinfo.nar_hash));
// populate the pathinfo.
let mut pathinfo = PathInfo::from(&narinfo);
// Lookup root node with peek, as we don't want to update the LRU list. // Lookup root node with peek, as we don't want to update the LRU list.
// We need to be careful to not hold the RwLock across the await point. // We need to be careful to not hold the RwLock across the await point.
let maybe_root_node: Option<tvix_castore::Node> = let maybe_root_node: Option<tvix_castore::Node> =
@ -138,16 +119,26 @@ pub async fn put(
match maybe_root_node { match maybe_root_node {
Some(root_node) => { Some(root_node) => {
// Set the root node from the lookup.
// We need to rename the node to the narinfo storepath basename, as
// that's where it's stored in PathInfo.
pathinfo.node = Some(castorepb::Node::from_name_and_node(
narinfo.store_path.to_string().into(),
root_node,
));
// Persist the PathInfo. // Persist the PathInfo.
path_info_service.put(pathinfo).await.map_err(|e| { path_info_service
.put(PathInfo {
store_path: narinfo.store_path.to_owned(),
node: root_node,
references: narinfo.references.iter().map(StorePath::to_owned).collect(),
nar_sha256: narinfo.nar_hash,
nar_size: narinfo.nar_size,
signatures: narinfo
.signatures
.into_iter()
.map(|s| {
Signature::<String>::new(s.name().to_string(), s.bytes().to_owned())
})
.collect(),
deriver: narinfo.deriver.as_ref().map(StorePath::to_owned),
ca: narinfo.ca,
})
.await
.map_err(|e| {
warn!(err=%e, "failed to persist the PathInfo"); warn!(err=%e, "failed to persist the PathInfo");
StatusCode::INTERNAL_SERVER_ERROR StatusCode::INTERNAL_SERVER_ERROR
})?; })?;

View file

@ -133,7 +133,7 @@ where
} }
} }
#[derive(Debug, thiserror::Error)] #[derive(Debug, thiserror::Error, PartialEq, Eq)]
pub enum Error { pub enum Error {
#[error("Invalid name: {0}")] #[error("Invalid name: {0}")]
InvalidName(String), InvalidName(String),

View file

@ -4,7 +4,7 @@ use clap::Subcommand;
use futures::future::try_join_all; use futures::future::try_join_all;
use futures::StreamExt; use futures::StreamExt;
use futures::TryStreamExt; use futures::TryStreamExt;
use nix_compat::path_info::ExportedPathInfo; use nix_compat::{path_info::ExportedPathInfo, store_path::StorePath};
use serde::Deserialize; use serde::Deserialize;
use serde::Serialize; use serde::Serialize;
use std::path::PathBuf; use std::path::PathBuf;
@ -16,15 +16,13 @@ use tracing::{info, info_span, instrument, Level, Span};
use tracing_indicatif::span_ext::IndicatifSpanExt as _; use tracing_indicatif::span_ext::IndicatifSpanExt as _;
use tvix_castore::import::fs::ingest_path; use tvix_castore::import::fs::ingest_path;
use tvix_store::nar::NarCalculationService; use tvix_store::nar::NarCalculationService;
use tvix_store::proto::NarInfo;
use tvix_store::proto::PathInfo;
use tvix_store::utils::{ServiceUrls, ServiceUrlsGrpc}; use tvix_store::utils::{ServiceUrls, ServiceUrlsGrpc};
use tvix_castore::proto::blob_service_server::BlobServiceServer; use tvix_castore::proto::blob_service_server::BlobServiceServer;
use tvix_castore::proto::directory_service_server::DirectoryServiceServer; use tvix_castore::proto::directory_service_server::DirectoryServiceServer;
use tvix_castore::proto::GRPCBlobServiceWrapper; use tvix_castore::proto::GRPCBlobServiceWrapper;
use tvix_castore::proto::GRPCDirectoryServiceWrapper; use tvix_castore::proto::GRPCDirectoryServiceWrapper;
use tvix_store::pathinfoservice::PathInfoService; use tvix_store::pathinfoservice::{PathInfo, PathInfoService};
use tvix_store::proto::path_info_service_server::PathInfoServiceServer; use tvix_store::proto::path_info_service_server::PathInfoServiceServer;
use tvix_store::proto::GRPCPathInfoServiceWrapper; use tvix_store::proto::GRPCPathInfoServiceWrapper;
@ -359,23 +357,14 @@ async fn run_cli(cli: Cli) -> Result<(), Box<dyn std::error::Error + Send + Sync
// Create and upload a PathInfo pointing to the root_node, // Create and upload a PathInfo pointing to the root_node,
// annotated with information we have from the reference graph. // annotated with information we have from the reference graph.
let path_info = PathInfo { let path_info = PathInfo {
node: Some(tvix_castore::proto::Node::from_name_and_node( store_path: elem.path.to_owned(),
elem.path.to_string().into(), node: root_node,
root_node, references: elem.references.iter().map(StorePath::to_owned).collect(),
)),
references: Vec::from_iter(
elem.references.iter().map(|e| e.digest().to_vec().into()),
),
narinfo: Some(NarInfo {
nar_size: elem.nar_size, nar_size: elem.nar_size,
nar_sha256: elem.nar_sha256.to_vec().into(), nar_sha256: elem.nar_sha256,
signatures: vec![], signatures: vec![],
reference_names: Vec::from_iter(
elem.references.iter().map(|e| e.to_string()),
),
deriver: None, deriver: None,
ca: None, ca: None,
}),
}; };
path_info_service.put(path_info).await?; path_info_service.put(path_info).await?;

View file

@ -3,18 +3,17 @@ use std::path::Path;
use tracing::{debug, instrument}; use tracing::{debug, instrument};
use tvix_castore::{ use tvix_castore::{
blobservice::BlobService, directoryservice::DirectoryService, import::fs::ingest_path, Node, blobservice::BlobService, directoryservice::DirectoryService, import::fs::ingest_path, Node,
PathComponent,
}; };
use nix_compat::{ use nix_compat::{
nixhash::{CAHash, NixHash}, nixhash::{CAHash, NixHash},
store_path::{self, StorePathRef}, store_path::{self, StorePath, StorePathRef},
}; };
use crate::{ use crate::{
nar::NarCalculationService, nar::NarCalculationService,
pathinfoservice::PathInfoService, pathinfoservice::{PathInfo, PathInfoService},
proto::{nar_info, NarInfo, PathInfo}, proto::nar_info,
}; };
impl From<CAHash> for nar_info::Ca { impl From<CAHash> for nar_info::Ca {
@ -74,33 +73,29 @@ pub fn path_to_name(path: &Path) -> std::io::Result<&str> {
/// Takes the NAR size, SHA-256 of the NAR representation, the root node and optionally /// Takes the NAR size, SHA-256 of the NAR representation, the root node and optionally
/// a CA hash information. /// a CA hash information.
/// ///
/// Returns the path information object for a NAR-style object. /// Constructs a [PathInfo] for a NAR-style object.
/// ///
/// This [`PathInfo`] can be further filled for signatures, deriver or verified for the expected /// The user can then further fill the fields (like deriver, signatures), and/or
/// hashes. /// verify to have the expected hashes.
#[inline] #[inline]
pub fn derive_nar_ca_path_info( pub fn derive_nar_ca_path_info(
nar_size: u64, nar_size: u64,
nar_sha256: [u8; 32], nar_sha256: [u8; 32],
ca: Option<&CAHash>, ca: Option<CAHash>,
name: bytes::Bytes, store_path: StorePath<String>,
root_node: Node, root_node: Node,
) -> PathInfo { ) -> PathInfo {
// assemble the [crate::proto::PathInfo] object. // assemble the [crate::proto::PathInfo] object.
PathInfo { PathInfo {
node: Some(tvix_castore::proto::Node::from_name_and_node( store_path,
name, root_node, node: root_node,
)),
// There's no reference scanning on path contents ingested like this. // There's no reference scanning on path contents ingested like this.
references: vec![], references: vec![],
narinfo: Some(NarInfo {
nar_size, nar_size,
nar_sha256: nar_sha256.to_vec().into(), nar_sha256,
signatures: vec![], signatures: vec![],
reference_names: vec![],
deriver: None, deriver: None,
ca: ca.map(|ca_hash| ca_hash.into()), ca,
}),
} }
} }
@ -141,19 +136,13 @@ where
) )
})?; })?;
let name: PathComponent = output_path
.to_string()
.as_str()
.try_into()
.expect("Tvix bug: StorePath must be PathComponent");
log_node(name.as_ref(), &root_node, path.as_ref()); log_node(name.as_ref(), &root_node, path.as_ref());
let path_info = derive_nar_ca_path_info( let path_info = derive_nar_ca_path_info(
nar_size, nar_size,
nar_sha256, nar_sha256,
Some(&CAHash::Nar(NixHash::Sha256(nar_sha256))), Some(CAHash::Nar(NixHash::Sha256(nar_sha256))),
name.into(), output_path.to_owned(),
root_node, root_node,
); );

View file

@ -1,6 +1,7 @@
pub mod composition; pub mod composition;
pub mod import; pub mod import;
pub mod nar; pub mod nar;
pub mod path_info;
pub mod pathinfoservice; pub mod pathinfoservice;
pub mod proto; pub mod proto;
pub mod utils; pub mod utils;

View file

@ -0,0 +1,87 @@
use nix_compat::{
narinfo::{Flags, Signature},
nixhash::CAHash,
store_path::StorePath,
};
/// Holds metadata about a store path, but not its contents.
///
/// This is somewhat equivalent to the information Nix holds in its SQLite
/// database, or publishes as .narinfo files, except we also embed the
/// [tvix_castore::Node] describing the contents in the castore model.
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct PathInfo {
/// The store path this is about.
pub store_path: StorePath<String>,
/// The contents in the tvix-castore model.
//// Can be a directory, file or symlink.
pub node: tvix_castore::Node,
/// A list of references.
pub references: Vec<StorePath<String>>,
/// The size of the NAR representation of the contents, in bytes.
pub nar_size: u64,
/// The sha256 digest of the NAR representation of the contents.
pub nar_sha256: [u8; 32],
/// The signatures, usually shown in a .narinfo file.
pub signatures: Vec<Signature<String>>,
/// The StorePath of the .drv file producing this output.
/// The .drv suffix is omitted in its `name` field.
pub deriver: Option<StorePath<String>>,
/// The CA field in the .narinfo.
/// Its textual representations seen in the wild are one of the following:
///
/// * `fixed:r:sha256:1gcky5hlf5vqfzpyhihydmm54grhc94mcs8w7xr8613qsqb1v2j6`
/// fixed-output derivations using "recursive" `outputHashMode`.
/// * `fixed:sha256:19xqkh72crbcba7flwxyi3n293vav6d7qkzkh2v4zfyi4iia8vj8 fixed-output derivations using "flat" `outputHashMode\`
/// * `text:sha256:19xqkh72crbcba7flwxyi3n293vav6d7qkzkh2v4zfyi4iia8vj8`
/// Text hashing, used for uploaded .drv files and outputs produced by
/// builtins.toFile.
///
/// Semantically, they can be split into the following components:
///
/// * "content address prefix". Currently, "fixed" and "text" are supported.
/// * "hash mode". Currently, "flat" and "recursive" are supported.
/// * "hash type". The underlying hash function used.
/// Currently, sha1, md5, sha256, sha512.
/// * "digest". The digest itself.
///
/// There are some restrictions on the possible combinations.
/// For example, `text` and `fixed:recursive` always imply sha256.
pub ca: Option<CAHash>,
}
impl PathInfo {
/// Reconstructs a [nix_compat::narinfo::NarInfo<'_>].
///
/// It does very little allocation (a Vec each for `signatures` and
/// `references`), the rest points to data owned elsewhere.
///
/// It can be used to validate Signatures, or render a .narinfo file
/// (after some more fields are populated)
///
/// Keep in mind this is not able to reconstruct all data present in the
/// NarInfo<'_>, as some of it is not stored at all:
/// - the `system`, `file_hash` and `file_size` fields are set to `None`.
/// - the URL is set to an empty string.
/// - Compression is set to "none"
///
/// If you want to render it out to a string and be able to parse it back
/// in, at least URL *must* be set again.
pub fn to_narinfo(&self) -> nix_compat::narinfo::NarInfo<'_> {
nix_compat::narinfo::NarInfo {
flags: Flags::empty(),
store_path: self.store_path.as_ref(),
nar_hash: self.nar_sha256,
nar_size: self.nar_size,
references: self.references.iter().map(StorePath::as_ref).collect(),
signatures: self.signatures.iter().map(Signature::as_ref).collect(),
ca: self.ca.clone(),
system: None,
deriver: self.deriver.as_ref().map(StorePath::as_ref),
url: "",
compression: Some("none"),
file_hash: None,
file_size: None,
}
}
}

View file

@ -1,6 +1,5 @@
use super::PathInfoService; use super::{PathInfo, PathInfoService};
use crate::proto; use crate::proto;
use crate::proto::PathInfo;
use async_stream::try_stream; use async_stream::try_stream;
use bigtable_rs::{bigtable, google::bigtable::v2 as bigtable_v2}; use bigtable_rs::{bigtable, google::bigtable::v2 as bigtable_v2};
use bytes::Bytes; use bytes::Bytes;
@ -232,14 +231,13 @@ impl PathInfoService for BigtablePathInfoService {
} }
// Try to parse the value into a PathInfo message // Try to parse the value into a PathInfo message
let path_info = proto::PathInfo::decode(Bytes::from(cell.value)) let path_info_proto = proto::PathInfo::decode(Bytes::from(cell.value))
.map_err(|e| Error::StorageError(format!("unable to decode pathinfo proto: {}", e)))?; .map_err(|e| Error::StorageError(format!("unable to decode pathinfo proto: {}", e)))?;
let store_path = path_info let path_info = PathInfo::try_from(path_info_proto)
.validate() .map_err(|e| Error::StorageError(format!("Invalid path info: {e}")))?;
.map_err(|e| Error::StorageError(format!("invalid PathInfo: {}", e)))?;
if store_path.digest() != &digest { if path_info.store_path.digest() != &digest {
return Err(Error::StorageError("PathInfo has unexpected digest".into())); return Err(Error::StorageError("PathInfo has unexpected digest".into()));
} }
@ -248,14 +246,10 @@ impl PathInfoService for BigtablePathInfoService {
#[instrument(level = "trace", skip_all, fields(path_info.root_node = ?path_info.node))] #[instrument(level = "trace", skip_all, fields(path_info.root_node = ?path_info.node))]
async fn put(&self, path_info: PathInfo) -> Result<PathInfo, Error> { async fn put(&self, path_info: PathInfo) -> Result<PathInfo, Error> {
let store_path = path_info
.validate()
.map_err(|e| Error::InvalidRequest(format!("pathinfo failed validation: {}", e)))?;
let mut client = self.client.clone(); let mut client = self.client.clone();
let path_info_key = derive_pathinfo_key(store_path.digest()); let path_info_key = derive_pathinfo_key(path_info.store_path.digest());
let data = path_info.encode_to_vec(); let data = proto::PathInfo::from(path_info.clone()).encode_to_vec();
if data.len() as u64 > CELL_SIZE_LIMIT { if data.len() as u64 > CELL_SIZE_LIMIT {
return Err(Error::StorageError( return Err(Error::StorageError(
"PathInfo exceeds cell limit on Bigtable".into(), "PathInfo exceeds cell limit on Bigtable".into(),
@ -340,16 +334,12 @@ impl PathInfoService for BigtablePathInfoService {
} }
// Try to parse the value into a PathInfo message. // Try to parse the value into a PathInfo message.
let path_info = proto::PathInfo::decode(Bytes::from(cell.value)) let path_info_proto = proto::PathInfo::decode(Bytes::from(cell.value))
.map_err(|e| Error::StorageError(format!("unable to decode pathinfo proto: {}", e)))?; .map_err(|e| Error::StorageError(format!("unable to decode pathinfo proto: {}", e)))?;
// Validate the containing PathInfo, ensure its StorePath digest let path_info = PathInfo::try_from(path_info_proto).map_err(|e| Error::StorageError(format!("Invalid path info: {e}")))?;
// matches row key.
let store_path = path_info
.validate()
.map_err(|e| Error::StorageError(format!("invalid PathInfo: {}", e)))?;
let exp_path_info_key = derive_pathinfo_key(store_path.digest()); let exp_path_info_key = derive_pathinfo_key(path_info.store_path.digest());
if exp_path_info_key.as_bytes() != row_key.as_slice() { if exp_path_info_key.as_bytes() != row_key.as_slice() {
Err(Error::StorageError("PathInfo has unexpected digest".into()))? Err(Error::StorageError("PathInfo has unexpected digest".into()))?

View file

@ -1,6 +1,5 @@
use std::sync::Arc; use std::sync::Arc;
use crate::proto::PathInfo;
use futures::stream::BoxStream; use futures::stream::BoxStream;
use nix_compat::nixbase32; use nix_compat::nixbase32;
use tonic::async_trait; use tonic::async_trait;
@ -8,7 +7,7 @@ use tracing::{debug, instrument};
use tvix_castore::composition::{CompositionContext, ServiceBuilder}; use tvix_castore::composition::{CompositionContext, ServiceBuilder};
use tvix_castore::Error; use tvix_castore::Error;
use super::PathInfoService; use super::{PathInfo, PathInfoService};
/// Asks near first, if not found, asks far. /// Asks near first, if not found, asks far.
/// If found in there, returns it, and *inserts* it into /// If found in there, returns it, and *inserts* it into
@ -105,11 +104,9 @@ mod test {
use crate::{ use crate::{
pathinfoservice::{LruPathInfoService, MemoryPathInfoService, PathInfoService}, pathinfoservice::{LruPathInfoService, MemoryPathInfoService, PathInfoService},
tests::fixtures::PATH_INFO_WITH_NARINFO, tests::fixtures::PATH_INFO,
}; };
const PATH_INFO_DIGEST: [u8; 20] = [0; 20];
/// Helper function setting up an instance of a "far" and "near" /// Helper function setting up an instance of a "far" and "near"
/// PathInfoService. /// PathInfoService.
async fn create_pathinfoservice() -> super::Cache<LruPathInfoService, MemoryPathInfoService> { async fn create_pathinfoservice() -> super::Cache<LruPathInfoService, MemoryPathInfoService> {
@ -129,21 +126,25 @@ mod test {
let svc = create_pathinfoservice().await; let svc = create_pathinfoservice().await;
// query the PathInfo, things should not be there. // query the PathInfo, things should not be there.
assert!(svc.get(PATH_INFO_DIGEST).await.unwrap().is_none()); assert!(svc
.get(*PATH_INFO.store_path.digest())
.await
.unwrap()
.is_none());
// insert it into the far one. // insert it into the far one.
svc.far.put(PATH_INFO_WITH_NARINFO.clone()).await.unwrap(); svc.far.put(PATH_INFO.clone()).await.unwrap();
// now try getting it again, it should succeed. // now try getting it again, it should succeed.
assert_eq!( assert_eq!(
Some(PATH_INFO_WITH_NARINFO.clone()), Some(PATH_INFO.clone()),
svc.get(PATH_INFO_DIGEST).await.unwrap() svc.get(*PATH_INFO.store_path.digest()).await.unwrap()
); );
// peek near, it should now be there. // peek near, it should now be there.
assert_eq!( assert_eq!(
Some(PATH_INFO_WITH_NARINFO.clone()), Some(PATH_INFO.clone()),
svc.near.get(PATH_INFO_DIGEST).await.unwrap() svc.near.get(*PATH_INFO.store_path.digest()).await.unwrap()
); );
} }
} }

View file

@ -58,32 +58,20 @@ where
.as_ref() .as_ref()
.get(*store_path.digest()) .get(*store_path.digest())
.await? .await?
.map(|path_info| { .map(|path_info| path_info.node))
let node = path_info
.node
.as_ref()
.expect("missing root node")
.to_owned();
match node.into_name_and_node() {
Ok((_name, node)) => Ok(node),
Err(e) => Err(Error::StorageError(e.to_string())),
}
})
.transpose()?)
} }
fn list(&self) -> BoxStream<Result<(PathComponent, Node), Error>> { fn list(&self) -> BoxStream<Result<(PathComponent, Node), Error>> {
Box::pin(self.0.as_ref().list().map(|result| { Box::pin(self.0.as_ref().list().map(|result| {
result.and_then(|path_info| { result.map(|path_info| {
let node = path_info let basename = path_info.store_path.to_string();
.node (
.as_ref() basename
.expect("missing root node") .as_str()
.to_owned(); .try_into()
.expect("Tvix bug: StorePath must be PathComponent"),
node.into_name_and_node() path_info.node,
.map_err(|e| Error::StorageError(e.to_string())) )
}) })
})) }))
} }

View file

@ -1,7 +1,7 @@
use super::PathInfoService; use super::{PathInfo, PathInfoService};
use crate::{ use crate::{
nar::NarCalculationService, nar::NarCalculationService,
proto::{self, ListPathInfoRequest, PathInfo}, proto::{self, ListPathInfoRequest},
}; };
use async_stream::try_stream; use async_stream::try_stream;
use futures::stream::BoxStream; use futures::stream::BoxStream;
@ -53,15 +53,10 @@ where
.await; .await;
match path_info { match path_info {
Ok(path_info) => { Ok(path_info) => Ok(Some(
let path_info = path_info.into_inner(); PathInfo::try_from(path_info.into_inner())
.map_err(|e| Error::StorageError(format!("Invalid path info: {e}")))?,
path_info )),
.validate()
.map_err(|e| Error::StorageError(format!("invalid pathinfo: {}", e)))?;
Ok(Some(path_info))
}
Err(e) if e.code() == Code::NotFound => Ok(None), Err(e) if e.code() == Code::NotFound => Ok(None),
Err(e) => Err(Error::StorageError(e.to_string())), Err(e) => Err(Error::StorageError(e.to_string())),
} }
@ -72,12 +67,12 @@ where
let path_info = self let path_info = self
.grpc_client .grpc_client
.clone() .clone()
.put(path_info) .put(proto::PathInfo::from(path_info))
.await .await
.map_err(|e| Error::StorageError(e.to_string()))? .map_err(|e| Error::StorageError(e.to_string()))?
.into_inner(); .into_inner();
Ok(PathInfo::try_from(path_info)
Ok(path_info) .map_err(|e| Error::StorageError(format!("Invalid path info: {e}")))?)
} }
#[instrument(level = "trace", skip_all)] #[instrument(level = "trace", skip_all)]
@ -91,21 +86,8 @@ where
loop { loop {
match stream.message().await { match stream.message().await {
Ok(o) => match o { Ok(Some(path_info)) => yield PathInfo::try_from(path_info).map_err(|e| Error::StorageError(format!("Invalid path info: {e}")))?,
Some(pathinfo) => { Ok(None) => return,
// validate the pathinfo
if let Err(e) = pathinfo.validate() {
Err(Error::StorageError(format!(
"pathinfo {:?} failed validation: {}",
pathinfo, e
)))?;
}
yield pathinfo
}
None => {
return;
},
},
Err(e) => Err(Error::StorageError(e.to_string()))?, Err(e) => Err(Error::StorageError(e.to_string()))?,
} }
} }

View file

@ -8,11 +8,10 @@ use tokio::sync::RwLock;
use tonic::async_trait; use tonic::async_trait;
use tracing::instrument; use tracing::instrument;
use crate::proto::PathInfo;
use tvix_castore::composition::{CompositionContext, ServiceBuilder}; use tvix_castore::composition::{CompositionContext, ServiceBuilder};
use tvix_castore::Error; use tvix_castore::Error;
use super::PathInfoService; use super::{PathInfo, PathInfoService};
pub struct LruPathInfoService { pub struct LruPathInfoService {
lru: Arc<RwLock<LruCache<[u8; 20], PathInfo>>>, lru: Arc<RwLock<LruCache<[u8; 20], PathInfo>>>,
@ -35,15 +34,10 @@ impl PathInfoService for LruPathInfoService {
#[instrument(level = "trace", skip_all, fields(path_info.root_node = ?path_info.node))] #[instrument(level = "trace", skip_all, fields(path_info.root_node = ?path_info.node))]
async fn put(&self, path_info: PathInfo) -> Result<PathInfo, Error> { async fn put(&self, path_info: PathInfo) -> Result<PathInfo, Error> {
// call validate
let store_path = path_info
.validate()
.map_err(|e| Error::InvalidRequest(format!("invalid PathInfo: {}", e)))?;
self.lru self.lru
.write() .write()
.await .await
.put(*store_path.digest(), path_info.clone()); .put(*path_info.store_path.digest(), path_info.clone());
Ok(path_info) Ok(path_info)
} }
@ -91,40 +85,22 @@ impl ServiceBuilder for LruPathInfoServiceConfig {
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use nix_compat::store_path::StorePath;
use std::num::NonZeroUsize; use std::num::NonZeroUsize;
use crate::{ use crate::{
pathinfoservice::{LruPathInfoService, PathInfoService}, pathinfoservice::{LruPathInfoService, PathInfo, PathInfoService},
proto::PathInfo, tests::fixtures::PATH_INFO,
tests::fixtures::PATH_INFO_WITH_NARINFO,
}; };
use lazy_static::lazy_static; use lazy_static::lazy_static;
use tvix_castore::proto as castorepb;
lazy_static! { lazy_static! {
static ref PATHINFO_1: PathInfo = PATH_INFO_WITH_NARINFO.clone();
static ref PATHINFO_1_DIGEST: [u8; 20] = [0; 20];
static ref PATHINFO_2: PathInfo = { static ref PATHINFO_2: PathInfo = {
let mut p = PATHINFO_1.clone(); let mut p = PATH_INFO.clone();
let root_node = p.node.as_mut().unwrap(); p.store_path = StorePath::from_name_and_digest_fixed("dummy", [1; 20]).unwrap();
if let castorepb::Node { node: Some(node) } = root_node {
match node {
castorepb::node::Node::Directory(n) => {
n.name = "11111111111111111111111111111111-dummy2".into()
}
castorepb::node::Node::File(n) => {
n.name = "11111111111111111111111111111111-dummy2".into()
}
castorepb::node::Node::Symlink(n) => {
n.name = "11111111111111111111111111111111-dummy2".into()
}
}
} else {
unreachable!()
}
p p
}; };
static ref PATHINFO_2_DIGEST: [u8; 20] = *(PATHINFO_2.validate().unwrap()).digest(); static ref PATHINFO_2_DIGEST: [u8; 20] = *PATHINFO_2.store_path.digest();
} }
#[tokio::test] #[tokio::test]
@ -133,18 +109,20 @@ mod test {
// pathinfo_1 should not be there // pathinfo_1 should not be there
assert!(svc assert!(svc
.get(*PATHINFO_1_DIGEST) .get(*PATH_INFO.store_path.digest())
.await .await
.expect("no error") .expect("no error")
.is_none()); .is_none());
// insert it // insert it
svc.put(PATHINFO_1.clone()).await.expect("no error"); svc.put(PATH_INFO.clone()).await.expect("no error");
// now it should be there. // now it should be there.
assert_eq!( assert_eq!(
Some(PATHINFO_1.clone()), Some(PATH_INFO.clone()),
svc.get(*PATHINFO_1_DIGEST).await.expect("no error") svc.get(*PATH_INFO.store_path.digest())
.await
.expect("no error")
); );
// insert pathinfo_2. This will evict pathinfo 1 // insert pathinfo_2. This will evict pathinfo 1
@ -158,7 +136,7 @@ mod test {
// … but pathinfo 1 not anymore. // … but pathinfo 1 not anymore.
assert!(svc assert!(svc
.get(*PATHINFO_1_DIGEST) .get(*PATH_INFO.store_path.digest())
.await .await
.expect("no error") .expect("no error")
.is_none()); .is_none());

View file

@ -1,5 +1,4 @@
use super::PathInfoService; use super::{PathInfo, PathInfoService};
use crate::proto::PathInfo;
use async_stream::try_stream; use async_stream::try_stream;
use futures::stream::BoxStream; use futures::stream::BoxStream;
use nix_compat::nixbase32; use nix_compat::nixbase32;
@ -29,23 +28,12 @@ impl PathInfoService for MemoryPathInfoService {
#[instrument(level = "trace", skip_all, fields(path_info.root_node = ?path_info.node))] #[instrument(level = "trace", skip_all, fields(path_info.root_node = ?path_info.node))]
async fn put(&self, path_info: PathInfo) -> Result<PathInfo, Error> { async fn put(&self, path_info: PathInfo) -> Result<PathInfo, Error> {
// Call validate on the received PathInfo message. // This overwrites existing PathInfo objects with the same store path digest.
match path_info.validate() {
Err(e) => Err(Error::InvalidRequest(format!(
"failed to validate PathInfo: {}",
e
))),
// In case the PathInfo is valid, and we were able to extract a NixPath, store it in the database.
// This overwrites existing PathInfo objects.
Ok(nix_path) => {
let mut db = self.db.write().await; let mut db = self.db.write().await;
db.insert(*nix_path.digest(), path_info.clone()); db.insert(*path_info.store_path.digest(), path_info.clone());
Ok(path_info) Ok(path_info)
} }
}
}
fn list(&self) -> BoxStream<'static, Result<PathInfo, Error>> { fn list(&self) -> BoxStream<'static, Result<PathInfo, Error>> {
let db = self.db.clone(); let db = self.db.clone();

View file

@ -19,7 +19,7 @@ use tvix_castore::composition::{Registry, ServiceBuilder};
use tvix_castore::Error; use tvix_castore::Error;
use crate::nar::NarCalculationService; use crate::nar::NarCalculationService;
use crate::proto::PathInfo; pub use crate::path_info::PathInfo;
pub use self::combinators::{ pub use self::combinators::{
Cache as CachePathInfoService, CacheConfig as CachePathInfoServiceConfig, Cache as CachePathInfoService, CacheConfig as CachePathInfoServiceConfig,

View file

@ -1,10 +1,11 @@
use super::PathInfoService; use super::{PathInfo, PathInfoService};
use crate::{nar::ingest_nar_and_hash, proto::PathInfo}; use crate::nar::ingest_nar_and_hash;
use futures::{stream::BoxStream, TryStreamExt}; use futures::{stream::BoxStream, TryStreamExt};
use nix_compat::{ use nix_compat::{
narinfo::{self, NarInfo}, narinfo::{self, NarInfo, Signature},
nixbase32, nixbase32,
nixhash::NixHash, nixhash::NixHash,
store_path::StorePath,
}; };
use reqwest::StatusCode; use reqwest::StatusCode;
use std::sync::Arc; use std::sync::Arc;
@ -12,9 +13,7 @@ use tokio::io::{self, AsyncRead};
use tonic::async_trait; use tonic::async_trait;
use tracing::{debug, instrument, warn}; use tracing::{debug, instrument, warn};
use tvix_castore::composition::{CompositionContext, ServiceBuilder}; use tvix_castore::composition::{CompositionContext, ServiceBuilder};
use tvix_castore::{ use tvix_castore::{blobservice::BlobService, directoryservice::DirectoryService, Error};
blobservice::BlobService, directoryservice::DirectoryService, proto as castorepb, Error,
};
use url::Url; use url::Url;
/// NixHTTPPathInfoService acts as a bridge in between the Nix HTTP Binary cache /// NixHTTPPathInfoService acts as a bridge in between the Nix HTTP Binary cache
@ -137,12 +136,11 @@ where
} }
} }
// Convert to a (sparse) PathInfo. We still need to populate the node field, // To construct the full PathInfo, we also need to populate the node field,
// and for this we need to download the NAR file. // and for this we need to download the NAR file and ingest it into castore.
// FUTUREWORK: Keep some database around mapping from narsha256 to // FUTUREWORK: Keep some database around mapping from narsha256 to
// (unnamed) rootnode, so we can use that (and the name from the // (unnamed) rootnode, so we can use that (and the name from the
// StorePath) and avoid downloading the same NAR a second time. // StorePath) and avoid downloading the same NAR a second time.
let pathinfo: PathInfo = (&narinfo).into();
// create a request for the NAR file itself. // create a request for the NAR file itself.
let nar_url = self.base_url.join(narinfo.url).map_err(|e| { let nar_url = self.base_url.join(narinfo.url).map_err(|e| {
@ -228,12 +226,18 @@ where
} }
Ok(Some(PathInfo { Ok(Some(PathInfo {
node: Some(castorepb::Node::from_name_and_node( store_path: narinfo.store_path.to_owned(),
narinfo.store_path.to_string().into(), node: root_node,
root_node, references: narinfo.references.iter().map(StorePath::to_owned).collect(),
)), nar_size: narinfo.nar_size,
references: pathinfo.references, nar_sha256: narinfo.nar_hash,
narinfo: pathinfo.narinfo, deriver: narinfo.deriver.as_ref().map(StorePath::to_owned),
signatures: narinfo
.signatures
.into_iter()
.map(|s| Signature::<String>::new(s.name().to_string(), s.bytes().to_owned()))
.collect(),
ca: narinfo.ca,
})) }))
} }

View file

@ -1,5 +1,5 @@
use super::PathInfoService; use super::{PathInfo, PathInfoService};
use crate::proto::PathInfo; use crate::proto;
use data_encoding::BASE64; use data_encoding::BASE64;
use futures::{stream::BoxStream, StreamExt}; use futures::{stream::BoxStream, StreamExt};
use prost::Message; use prost::Message;
@ -78,10 +78,13 @@ impl PathInfoService for RedbPathInfoService {
let table = txn.open_table(PATHINFO_TABLE)?; let table = txn.open_table(PATHINFO_TABLE)?;
match table.get(digest)? { match table.get(digest)? {
Some(pathinfo_bytes) => Ok(Some( Some(pathinfo_bytes) => Ok(Some(
PathInfo::decode(pathinfo_bytes.value().as_slice()).map_err(|e| { proto::PathInfo::decode(pathinfo_bytes.value().as_slice())
.map_err(|e| {
warn!(err=%e, "failed to decode stored PathInfo"); warn!(err=%e, "failed to decode stored PathInfo");
Error::StorageError("failed to decode stored PathInfo".to_string()) Error::StorageError("failed to decode stored PathInfo".to_string())
})?, })?
.try_into()
.map_err(|e| Error::StorageError(format!("Invalid path info: {e}")))?,
)), )),
None => Ok(None), None => Ok(None),
} }
@ -92,25 +95,19 @@ impl PathInfoService for RedbPathInfoService {
#[instrument(level = "trace", skip_all, fields(path_info.root_node = ?path_info.node))] #[instrument(level = "trace", skip_all, fields(path_info.root_node = ?path_info.node))]
async fn put(&self, path_info: PathInfo) -> Result<PathInfo, Error> { async fn put(&self, path_info: PathInfo) -> Result<PathInfo, Error> {
// Call validate on the received PathInfo message.
let store_path = path_info
.validate()
.map_err(|e| {
warn!(err=%e, "failed to validate PathInfo");
Error::StorageError("failed to validate PathInfo".to_string())
})?
.to_owned();
let path_info_encoded = path_info.encode_to_vec();
let db = self.db.clone(); let db = self.db.clone();
tokio::task::spawn_blocking({ tokio::task::spawn_blocking({
let path_info = path_info.clone();
move || -> Result<(), Error> { move || -> Result<(), Error> {
let txn = db.begin_write()?; let txn = db.begin_write()?;
{ {
let mut table = txn.open_table(PATHINFO_TABLE)?; let mut table = txn.open_table(PATHINFO_TABLE)?;
table table
.insert(store_path.digest(), path_info_encoded) .insert(
*path_info.store_path.digest(),
proto::PathInfo::from(path_info).encode_to_vec(),
)
.map_err(|e| { .map_err(|e| {
warn!(err=%e, "failed to insert PathInfo"); warn!(err=%e, "failed to insert PathInfo");
Error::StorageError("failed to insert PathInfo".to_string()) Error::StorageError("failed to insert PathInfo".to_string())
@ -137,12 +134,18 @@ impl PathInfoService for RedbPathInfoService {
for elem in table.iter()? { for elem in table.iter()? {
let elem = elem?; let elem = elem?;
tokio::runtime::Handle::current() tokio::runtime::Handle::current()
.block_on(tx.send(Ok( .block_on(tx.send(Ok({
PathInfo::decode(elem.1.value().as_slice()).map_err(|e| { let path_info_proto = proto::PathInfo::decode(
elem.1.value().as_slice(),
)
.map_err(|e| {
warn!(err=%e, "invalid PathInfo"); warn!(err=%e, "invalid PathInfo");
Error::StorageError("invalid PathInfo".to_string()) Error::StorageError("invalid PathInfo".to_string())
})?, })?;
))) PathInfo::try_from(path_info_proto).map_err(|e| {
Error::StorageError(format!("Invalid path info: {e}"))
})?
})))
.map_err(|e| Error::StorageError(e.to_string()))?; .map_err(|e| Error::StorageError(e.to_string()))?;
} }

View file

@ -1,7 +1,6 @@
//! This module provides a [PathInfoService] implementation that signs narinfos //! This module provides a [PathInfoService] implementation that signs narinfos
use super::PathInfoService; use super::{PathInfo, PathInfoService};
use crate::proto::PathInfo;
use futures::stream::BoxStream; use futures::stream::BoxStream;
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::Arc; use std::sync::Arc;
@ -11,9 +10,9 @@ use tvix_castore::composition::{CompositionContext, ServiceBuilder};
use tvix_castore::Error; use tvix_castore::Error;
use nix_compat::narinfo::{parse_keypair, SigningKey}; use nix_compat::narinfo::{parse_keypair, Signature, SigningKey};
use nix_compat::nixbase32; use nix_compat::nixbase32;
use tracing::{instrument, warn}; use tracing::instrument;
#[cfg(test)] #[cfg(test)]
use super::MemoryPathInfoService; use super::MemoryPathInfoService;
@ -52,22 +51,15 @@ where
} }
async fn put(&self, path_info: PathInfo) -> Result<PathInfo, Error> { async fn put(&self, path_info: PathInfo) -> Result<PathInfo, Error> {
let store_path = path_info.validate().map_err(|e| { let mut path_info = path_info.clone();
warn!(err=%e, "invalid PathInfo"); let mut nar_info = path_info.to_narinfo();
Error::StorageError(e.to_string())
})?;
let root_node = path_info.node.clone();
// If we have narinfo then sign it, else passthrough to the upper pathinfoservice
let path_info_to_put = match path_info.to_narinfo(store_path.as_ref()) {
Some(mut nar_info) => {
nar_info.add_signature(self.signing_key.as_ref()); nar_info.add_signature(self.signing_key.as_ref());
let mut signed_path_info = PathInfo::from(&nar_info); path_info.signatures = nar_info
signed_path_info.node = root_node; .signatures
signed_path_info .into_iter()
} .map(|s| Signature::<String>::new(s.name().to_string(), s.bytes().to_owned()))
None => path_info, .collect();
}; self.inner.put(path_info).await
self.inner.put(path_info_to_put).await
} }
fn list(&self) -> BoxStream<'static, Result<PathInfo, Error>> { fn list(&self) -> BoxStream<'static, Result<PathInfo, Error>> {
@ -134,51 +126,35 @@ pub const DUMMY_VERIFYING_KEY: &str = "do.not.use:cuXqnuzlWfGTKmfzBPx2kXShjRryZM
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use crate::{ use crate::{pathinfoservice::PathInfoService, tests::fixtures::PATH_INFO};
pathinfoservice::PathInfoService,
proto::PathInfo,
tests::fixtures::{DUMMY_PATH, PATH_INFO_WITH_NARINFO},
};
use nix_compat::narinfo::VerifyingKey; use nix_compat::narinfo::VerifyingKey;
use lazy_static::lazy_static;
use nix_compat::store_path::StorePath;
lazy_static! {
static ref PATHINFO_1: PathInfo = PATH_INFO_WITH_NARINFO.clone();
static ref PATHINFO_1_DIGEST: [u8; 20] = [0; 20];
}
#[tokio::test] #[tokio::test]
async fn put_and_verify_signature() { async fn put_and_verify_signature() {
let svc = super::test_signing_service(); let svc = super::test_signing_service();
// pathinfo_1 should not be there ... // pathinfo_1 should not be there ...
assert!(svc assert!(svc
.get(*PATHINFO_1_DIGEST) .get(*PATH_INFO.store_path.digest())
.await .await
.expect("no error") .expect("no error")
.is_none()); .is_none());
// ... and not be signed // ... and not be signed
assert!(PATHINFO_1.narinfo.clone().unwrap().signatures.is_empty()); assert!(PATH_INFO.signatures.is_empty());
// insert it // insert it
svc.put(PATHINFO_1.clone()).await.expect("no error"); svc.put(PATH_INFO.clone()).await.expect("no error");
// now it should be there ... // now it should be there ...
let signed = svc let signed = svc
.get(*PATHINFO_1_DIGEST) .get(*PATH_INFO.store_path.digest())
.await .await
.expect("no error") .expect("no error")
.unwrap(); .unwrap();
// and signed // and signed
let narinfo = signed let narinfo = signed.to_narinfo();
.to_narinfo(
StorePath::from_bytes(DUMMY_PATH.as_bytes()).expect("DUMMY_PATH to be parsed"),
)
.expect("no error");
let fp = narinfo.fingerprint(); let fp = narinfo.fingerprint();
// load our keypair from the fixtures // load our keypair from the fixtures

View file

@ -6,12 +6,10 @@ use futures::TryStreamExt;
use rstest::*; use rstest::*;
use rstest_reuse::{self, *}; use rstest_reuse::{self, *};
use super::PathInfoService; use super::{PathInfo, PathInfoService};
use crate::pathinfoservice::redb::RedbPathInfoService; use crate::pathinfoservice::redb::RedbPathInfoService;
use crate::pathinfoservice::MemoryPathInfoService; use crate::pathinfoservice::MemoryPathInfoService;
use crate::proto::PathInfo; use crate::tests::fixtures::{DUMMY_PATH_DIGEST, PATH_INFO};
use crate::tests::fixtures::DUMMY_PATH_DIGEST;
use tvix_castore::proto as castorepb;
use crate::pathinfoservice::test_signing_service; use crate::pathinfoservice::test_signing_service;
@ -52,32 +50,35 @@ async fn not_found(svc: impl PathInfoService) {
#[apply(path_info_services)] #[apply(path_info_services)]
#[tokio::test] #[tokio::test]
async fn put_get(svc: impl PathInfoService) { async fn put_get(svc: impl PathInfoService) {
let path_info = PathInfo {
node: Some(castorepb::Node {
node: Some(castorepb::node::Node::Symlink(castorepb::SymlinkNode {
name: "00000000000000000000000000000000-foo".into(),
target: "doesntmatter".into(),
})),
}),
..Default::default()
};
// insert // insert
let resp = svc.put(path_info.clone()).await.expect("must succeed"); let resp = svc.put(PATH_INFO.clone()).await.expect("must succeed");
// expect the returned PathInfo to be equal (for now) // expect the returned PathInfo to be equal,
// in the future, some stores might add additional fields/signatures. // remove the signatures as the SigningPathInfoService adds them
assert_eq!(path_info, resp); assert_eq!(*PATH_INFO, strip_signatures(resp));
// get it back // get it back
let resp = svc.get(DUMMY_PATH_DIGEST).await.expect("must succeed"); let resp = svc.get(DUMMY_PATH_DIGEST).await.expect("must succeed");
assert_eq!(Some(path_info.clone()), resp); assert_eq!(Some(PATH_INFO.clone()), resp.map(strip_signatures));
// Ensure the listing endpoint works, and returns the same path_info. // Ensure the listing endpoint works, and returns the same path_info.
// FUTUREWORK: split this, some impls might (rightfully) not support listing // FUTUREWORK: split this, some impls might (rightfully) not support listing
let pathinfos: Vec<PathInfo> = svc.list().try_collect().await.expect("must succeed"); let pathinfos: Vec<PathInfo> = svc.list().try_collect().await.expect("must succeed");
// We should get a single pathinfo back, the one we inserted. // We should get a single pathinfo back, the one we inserted.
assert_eq!(vec![path_info], pathinfos); assert_eq!(
vec![PATH_INFO.clone()],
pathinfos
.into_iter()
.map(strip_signatures)
.collect::<Vec<_>>()
);
}
fn strip_signatures(path_info: PathInfo) -> PathInfo {
PathInfo {
signatures: vec![],
..path_info
}
} }

View file

@ -1,5 +1,5 @@
use crate::nar::{NarCalculationService, RenderError}; use crate::nar::{NarCalculationService, RenderError};
use crate::pathinfoservice::PathInfoService; use crate::pathinfoservice::{PathInfo, PathInfoService};
use crate::proto; use crate::proto;
use futures::{stream::BoxStream, TryStreamExt}; use futures::{stream::BoxStream, TryStreamExt};
use std::ops::Deref; use std::ops::Deref;
@ -44,7 +44,7 @@ where
.map_err(|_e| Status::invalid_argument("invalid output digest length"))?; .map_err(|_e| Status::invalid_argument("invalid output digest length"))?;
match self.path_info_service.get(digest).await { match self.path_info_service.get(digest).await {
Ok(None) => Err(Status::not_found("PathInfo not found")), Ok(None) => Err(Status::not_found("PathInfo not found")),
Ok(Some(path_info)) => Ok(Response::new(path_info)), Ok(Some(path_info)) => Ok(Response::new(proto::PathInfo::from(path_info))),
Err(e) => { Err(e) => {
warn!(err = %e, "failed to get PathInfo"); warn!(err = %e, "failed to get PathInfo");
Err(e.into()) Err(e.into())
@ -56,12 +56,15 @@ where
#[instrument(skip_all)] #[instrument(skip_all)]
async fn put(&self, request: Request<proto::PathInfo>) -> Result<Response<proto::PathInfo>> { async fn put(&self, request: Request<proto::PathInfo>) -> Result<Response<proto::PathInfo>> {
let path_info = request.into_inner(); let path_info_proto = request.into_inner();
let path_info = PathInfo::try_from(path_info_proto)
.map_err(|e| Status::invalid_argument(format!("Invalid path info: {e}")))?;
// Store the PathInfo in the client. Clients MUST validate the data // Store the PathInfo in the client. Clients MUST validate the data
// they receive, so we don't validate additionally here. // they receive, so we don't validate additionally here.
match self.path_info_service.put(path_info).await { match self.path_info_service.put(path_info).await {
Ok(path_info_new) => Ok(Response::new(path_info_new)), Ok(path_info_new) => Ok(Response::new(proto::PathInfo::from(path_info_new))),
Err(e) => { Err(e) => {
warn!(err = %e, "failed to put PathInfo"); warn!(err = %e, "failed to put PathInfo");
Err(e.into()) Err(e.into())
@ -99,6 +102,7 @@ where
let stream = Box::pin( let stream = Box::pin(
self.path_info_service self.path_info_service
.list() .list()
.map_ok(proto::PathInfo::from)
.map_err(|e| Status::internal(e.to_string())), .map_err(|e| Status::internal(e.to_string())),
); );

View file

@ -4,7 +4,7 @@ use bytes::Bytes;
use data_encoding::BASE64; use data_encoding::BASE64;
// https://github.com/hyperium/tonic/issues/1056 // https://github.com/hyperium/tonic/issues/1056
use nix_compat::{ use nix_compat::{
narinfo::Flags, narinfo::{Signature, SignatureError},
nixhash::{CAHash, NixHash}, nixhash::{CAHash, NixHash},
store_path::{self, StorePathRef}, store_path::{self, StorePathRef},
}; };
@ -17,6 +17,8 @@ pub use grpc_pathinfoservice_wrapper::GRPCPathInfoServiceWrapper;
tonic::include_proto!("tvix.store.v1"); tonic::include_proto!("tvix.store.v1");
use tvix_castore::proto as castorepb;
#[cfg(feature = "tonic-reflection")] #[cfg(feature = "tonic-reflection")]
/// Compiled file descriptors for implementing [gRPC /// Compiled file descriptors for implementing [gRPC
/// reflection](https://github.com/grpc/grpc/blob/master/doc/server-reflection.md) with e.g. /// reflection](https://github.com/grpc/grpc/blob/master/doc/server-reflection.md) with e.g.
@ -70,183 +72,18 @@ pub enum ValidatePathInfoError {
/// The deriver field is invalid. /// The deriver field is invalid.
#[error("deriver field is invalid: {0}")] #[error("deriver field is invalid: {0}")]
InvalidDeriverField(store_path::Error), InvalidDeriverField(store_path::Error),
}
/// Parses a root node name. /// The narinfo field is missing
/// #[error("The narinfo field is missing")]
/// On success, this returns the parsed [store_path::StorePathRef]. NarInfoFieldMissing,
/// On error, it returns an error generated from the supplied constructor.
fn parse_node_name_root<E>(
name: &[u8],
err: fn(Vec<u8>, store_path::Error) -> E,
) -> Result<store_path::StorePathRef<'_>, E> {
store_path::StorePathRef::from_bytes(name).map_err(|e| err(name.to_vec(), e))
}
impl PathInfo { /// The ca field is invalid
/// validate performs some checks on the PathInfo struct, #[error("The ca field is invalid: {0}")]
/// Returning either a [store_path::StorePath] of the root node, or a InvalidCaField(ConvertCAError),
/// [ValidatePathInfoError].
pub fn validate(&self) -> Result<store_path::StorePath<String>, ValidatePathInfoError> {
// ensure the references have the right number of bytes.
for (i, reference) in self.references.iter().enumerate() {
if reference.len() != store_path::DIGEST_SIZE {
return Err(ValidatePathInfoError::InvalidReferenceDigestLen(
i,
reference.len(),
));
}
}
// If there is a narinfo field populated… /// The signature at position is invalid
if let Some(narinfo) = &self.narinfo { #[error("The signature at position {0} is invalid: {1}")]
// ensure the nar_sha256 digest has the correct length. InvalidSignature(usize, SignatureError),
if narinfo.nar_sha256.len() != 32 {
return Err(ValidatePathInfoError::InvalidNarSha256DigestLen(
narinfo.nar_sha256.len(),
));
}
// ensure the number of references there matches PathInfo.references count.
if narinfo.reference_names.len() != self.references.len() {
return Err(ValidatePathInfoError::InconsistentNumberOfReferences(
self.references.len(),
narinfo.reference_names.len(),
));
}
// parse references in reference_names.
for (i, reference_name_str) in narinfo.reference_names.iter().enumerate() {
// ensure thy parse as (non-absolute) store path
let reference_names_store_path = store_path::StorePathRef::from_bytes(
reference_name_str.as_bytes(),
)
.map_err(|_| {
ValidatePathInfoError::InvalidNarinfoReferenceName(
i,
reference_name_str.to_owned(),
)
})?;
// ensure their digest matches the one at self.references[i].
{
// This is safe, because we ensured the proper length earlier already.
let reference_digest = self.references[i].to_vec().try_into().unwrap();
if reference_names_store_path.digest() != &reference_digest {
return Err(
ValidatePathInfoError::InconsistentNarinfoReferenceNameDigest(
i,
reference_digest,
*reference_names_store_path.digest(),
),
);
}
}
// If the Deriver field is populated, ensure it parses to a
// [store_path::StorePath].
// We can't check for it to *not* end with .drv, as the .drv files produced by
// recursive Nix end with multiple .drv suffixes, and only one is popped when
// converting to this field.
if let Some(deriver) = &narinfo.deriver {
store_path::StorePathRef::from_name_and_digest(&deriver.name, &deriver.digest)
.map_err(ValidatePathInfoError::InvalidDeriverField)?;
}
}
}
// Ensure there is a (root) node present, and it properly parses to a [store_path::StorePath].
let root_nix_path = match &self.node {
None => Err(ValidatePathInfoError::NoNodePresent)?,
Some(node) => {
// NOTE: We could have some PathComponent not allocating here,
// so this can return StorePathRef.
// However, as this will get refactored away to stricter types
// soon anyways, there's no point.
let (name, _node) = node
.clone()
.into_name_and_node()
.map_err(ValidatePathInfoError::InvalidRootNode)?;
// parse the name of the node itself and return
parse_node_name_root(name.as_ref(), ValidatePathInfoError::InvalidNodeName)?
.to_owned()
}
};
// return the root nix path
Ok(root_nix_path)
}
/// With self and its store path name, this reconstructs a
/// [nix_compat::narinfo::NarInfo<'_>].
/// It can be used to validate Signatures, or get back a (sparse) NarInfo
/// struct to prepare writing it out.
///
/// It assumes self to be validated first, and will only return None if the
/// `narinfo` field is unpopulated.
///
/// It does very little allocation (a Vec each for `signatures` and
/// `references`), the rest points to data owned elsewhere.
///
/// Keep in mind this is not able to reconstruct all data present in the
/// NarInfo<'_>, as some of it is not stored at all:
/// - the `system`, `file_hash` and `file_size` fields are set to `None`.
/// - the URL is set to an empty string.
/// - Compression is set to "none"
///
/// If you want to render it out to a string and be able to parse it back
/// in, at least URL *must* be set again.
pub fn to_narinfo<'a>(
&'a self,
store_path: store_path::StorePathRef<'a>,
) -> Option<nix_compat::narinfo::NarInfo<'_>> {
let narinfo = &self.narinfo.as_ref()?;
Some(nix_compat::narinfo::NarInfo {
flags: Flags::empty(),
store_path,
nar_hash: narinfo
.nar_sha256
.as_ref()
.try_into()
.expect("invalid narhash"),
nar_size: narinfo.nar_size,
references: narinfo
.reference_names
.iter()
.map(|ref_name| {
// This shouldn't pass validation
StorePathRef::from_bytes(ref_name.as_bytes()).expect("invalid reference")
})
.collect(),
signatures: narinfo
.signatures
.iter()
.map(|sig| {
nix_compat::narinfo::SignatureRef::new(
&sig.name,
// This shouldn't pass validation
sig.data[..].try_into().expect("invalid signature len"),
)
})
.collect(),
ca: narinfo
.ca
.as_ref()
.map(|ca| ca.try_into().expect("invalid ca")),
system: None,
deriver: narinfo.deriver.as_ref().map(|deriver| {
StorePathRef::from_name_and_digest(&deriver.name, &deriver.digest)
.expect("invalid deriver")
}),
url: "",
compression: Some("none"),
file_hash: None,
file_size: None,
})
}
} }
/// Errors that can occur when converting from a [nar_info::Ca] to a (stricter) /// Errors that can occur when converting from a [nar_info::Ca] to a (stricter)
@ -341,45 +178,154 @@ impl From<&nix_compat::nixhash::CAHash> for nar_info::Ca {
} }
} }
impl From<&nix_compat::narinfo::NarInfo<'_>> for NarInfo { impl From<crate::pathinfoservice::PathInfo> for PathInfo {
/// Converts from a NarInfo (returned from the NARInfo parser) to the proto- fn from(value: crate::pathinfoservice::PathInfo) -> Self {
/// level NarInfo struct. Self {
fn from(value: &nix_compat::narinfo::NarInfo<'_>) -> Self { node: Some(castorepb::Node::from_name_and_node(
let signatures = value value.store_path.to_string().into_bytes().into(),
value.node,
)),
references: value
.references
.iter()
.map(|reference| Bytes::copy_from_slice(reference.digest()))
.collect(),
narinfo: Some(NarInfo {
nar_size: value.nar_size,
nar_sha256: Bytes::copy_from_slice(&value.nar_sha256),
signatures: value
.signatures .signatures
.iter() .iter()
.map(|sig| nar_info::Signature { .map(|sig| nar_info::Signature {
name: sig.name().to_string(), name: sig.name().to_string(),
data: Bytes::copy_from_slice(sig.bytes()), data: Bytes::copy_from_slice(sig.bytes()),
}) })
.collect(); .collect(),
NarInfo {
nar_size: value.nar_size,
nar_sha256: Bytes::copy_from_slice(&value.nar_hash),
signatures,
reference_names: value.references.iter().map(|r| r.to_string()).collect(), reference_names: value.references.iter().map(|r| r.to_string()).collect(),
deriver: value.deriver.as_ref().map(|sp| StorePath { deriver: value.deriver.as_ref().map(|sp| StorePath {
name: (*sp.name()).to_owned(), name: (*sp.name()).to_owned(),
digest: Bytes::copy_from_slice(sp.digest()), digest: Bytes::copy_from_slice(sp.digest()),
}), }),
ca: value.ca.as_ref().map(|ca| ca.into()), ca: value.ca.as_ref().map(|ca| ca.into()),
}),
} }
} }
} }
impl From<&nix_compat::narinfo::NarInfo<'_>> for PathInfo { impl TryFrom<PathInfo> for crate::pathinfoservice::PathInfo {
/// Converts from a NarInfo (returned from the NARInfo parser) to a PathInfo type Error = ValidatePathInfoError;
/// struct with the node set to None. fn try_from(value: PathInfo) -> Result<Self, Self::Error> {
fn from(value: &nix_compat::narinfo::NarInfo<'_>) -> Self { let narinfo = value
Self { .narinfo
node: None, .ok_or_else(|| ValidatePathInfoError::NarInfoFieldMissing)?;
references: value
.references // ensure the references have the right number of bytes.
.iter() for (i, reference) in value.references.iter().enumerate() {
.map(|x| Bytes::copy_from_slice(x.digest())) if reference.len() != store_path::DIGEST_SIZE {
.collect(), return Err(ValidatePathInfoError::InvalidReferenceDigestLen(
narinfo: Some(value.into()), i,
reference.len(),
));
}
}
// ensure the number of references there matches PathInfo.references count.
if narinfo.reference_names.len() != value.references.len() {
return Err(ValidatePathInfoError::InconsistentNumberOfReferences(
value.references.len(),
narinfo.reference_names.len(),
));
}
// parse references in reference_names.
let mut references = vec![];
for (i, reference_name_str) in narinfo.reference_names.iter().enumerate() {
// ensure thy parse as (non-absolute) store path
let reference_names_store_path =
StorePathRef::from_bytes(reference_name_str.as_bytes()).map_err(|_| {
ValidatePathInfoError::InvalidNarinfoReferenceName(
i,
reference_name_str.to_owned(),
)
})?;
// ensure their digest matches the one at self.references[i].
{
// This is safe, because we ensured the proper length earlier already.
let reference_digest = value.references[i].to_vec().try_into().unwrap();
if reference_names_store_path.digest() != &reference_digest {
return Err(
ValidatePathInfoError::InconsistentNarinfoReferenceNameDigest(
i,
reference_digest,
*reference_names_store_path.digest(),
),
);
} else {
references.push(reference_names_store_path.to_owned());
} }
} }
} }
let nar_sha256_length = narinfo.nar_sha256.len();
// split value.node into the name and node components
let (name, node) = value
.node
.ok_or_else(|| ValidatePathInfoError::NoNodePresent)?
.into_name_and_node()
.map_err(ValidatePathInfoError::InvalidRootNode)?;
Ok(Self {
// value.node has a valid name according to the castore model but might not parse to a
// [StorePath]
store_path: nix_compat::store_path::StorePath::from_bytes(name.as_ref()).map_err(
|err| ValidatePathInfoError::InvalidNodeName(name.as_ref().to_vec(), err),
)?,
node,
references,
nar_size: narinfo.nar_size,
nar_sha256: narinfo.nar_sha256.to_vec()[..]
.try_into()
.map_err(|_| ValidatePathInfoError::InvalidNarSha256DigestLen(nar_sha256_length))?,
// If the Deriver field is populated, ensure it parses to a
// [StorePath].
// We can't check for it to *not* end with .drv, as the .drv files produced by
// recursive Nix end with multiple .drv suffixes, and only one is popped when
// converting to this field.
deriver: narinfo
.deriver
.map(|deriver| {
nix_compat::store_path::StorePath::from_name_and_digest(
&deriver.name,
&deriver.digest,
)
.map_err(ValidatePathInfoError::InvalidDeriverField)
})
.transpose()?,
signatures: narinfo
.signatures
.into_iter()
.enumerate()
.map(|(i, signature)| {
signature.data.to_vec()[..]
.try_into()
.map_err(|_| {
ValidatePathInfoError::InvalidSignature(
i,
SignatureError::InvalidSignatureLen(signature.data.len()),
)
})
.map(|signature_data| Signature::new(signature.name, signature_data))
})
.collect::<Result<Vec<_>, ValidatePathInfoError>>()?,
ca: narinfo
.ca
.as_ref()
.map(TryFrom::try_from)
.transpose()
.map_err(ValidatePathInfoError::InvalidCaField)?,
})
}
}

View file

@ -1,274 +1,226 @@
use crate::proto::{nar_info::Signature, NarInfo, PathInfo, ValidatePathInfoError}; use crate::pathinfoservice::PathInfo;
use crate::tests::fixtures::*; use crate::proto::{self, ValidatePathInfoError};
use crate::tests::fixtures::{DUMMY_PATH, DUMMY_PATH_DIGEST, DUMMY_PATH_STR};
use bytes::Bytes; use bytes::Bytes;
use data_encoding::BASE64; use lazy_static::lazy_static;
use nix_compat::nixbase32; use nix_compat::store_path;
use nix_compat::store_path::{self, StorePath, StorePathRef};
use rstest::rstest; use rstest::rstest;
use tvix_castore::fixtures::DUMMY_DIGEST;
use tvix_castore::proto as castorepb; use tvix_castore::proto as castorepb;
use tvix_castore::{DirectoryError, ValidateNodeError}; use tvix_castore::{DirectoryError, ValidateNodeError};
#[rstest] lazy_static! {
#[case::no_node(None, Err(ValidatePathInfoError::NoNodePresent))] /// A valid PathInfo message
#[case::no_node_2(Some(castorepb::Node { node: None}), Err(ValidatePathInfoError::InvalidRootNode(DirectoryError::NoNodeSet)))] /// The references in `narinfo.reference_names` aligns with what's in
/// `references`.
static ref PROTO_PATH_INFO : proto::PathInfo = proto::PathInfo {
node: Some(castorepb::Node {
node: Some(castorepb::node::Node::Directory(castorepb::DirectoryNode {
name: DUMMY_PATH_STR.into(),
digest: DUMMY_DIGEST.clone().into(),
size: 0,
})),
}),
references: vec![DUMMY_PATH_DIGEST.as_slice().into()],
narinfo: Some(proto::NarInfo {
nar_size: 0,
nar_sha256: DUMMY_DIGEST.clone().into(),
signatures: vec![],
reference_names: vec![DUMMY_PATH_STR.to_string()],
deriver: None,
ca: Some(proto::nar_info::Ca { r#type: proto::nar_info::ca::Hash::NarSha256.into(), digest: DUMMY_DIGEST.clone().into() })
}),
};
}
fn validate_pathinfo( #[test]
fn convert_valid() {
let path_info = PROTO_PATH_INFO.clone();
PathInfo::try_from(path_info).expect("must succeed");
}
/// Create a PathInfo with a correct deriver field and ensure it succeeds.
#[test]
fn convert_valid_deriver() {
let mut path_info = PROTO_PATH_INFO.clone();
// add a valid deriver
let narinfo = path_info.narinfo.as_mut().unwrap();
narinfo.deriver = Some(crate::proto::StorePath {
name: DUMMY_PATH.name().to_string(),
digest: Bytes::from(DUMMY_PATH_DIGEST.as_slice()),
});
let path_info = PathInfo::try_from(path_info).expect("must succeed");
assert_eq!(DUMMY_PATH.clone(), path_info.deriver.unwrap())
}
#[rstest]
#[case::no_node(None, ValidatePathInfoError::NoNodePresent)]
#[case::no_node_2(Some(castorepb::Node { node: None}), ValidatePathInfoError::InvalidRootNode(DirectoryError::NoNodeSet))]
fn convert_pathinfo_wrong_nodes(
#[case] node: Option<castorepb::Node>, #[case] node: Option<castorepb::Node>,
#[case] exp_result: Result<StorePath<String>, ValidatePathInfoError>, #[case] exp_err: ValidatePathInfoError,
) { ) {
// construct the PathInfo object // construct the PathInfo object
let p = PathInfo { let mut path_info = PROTO_PATH_INFO.clone();
node, path_info.node = node;
..Default::default()
};
assert_eq!(exp_result, p.validate()); assert_eq!(
exp_err,
PathInfo::try_from(path_info).expect_err("must fail")
);
} }
/// Constructs a [proto::PathInfo] with root nodes that have wrong data in
/// various places, causing the conversion to [PathInfo] to fail.
#[rstest] #[rstest]
#[case::ok(castorepb::DirectoryNode { #[case::directory_invalid_digest_length(
name: DUMMY_PATH.into(), castorepb::node::Node::Directory(castorepb::DirectoryNode {
digest: DUMMY_DIGEST.clone().into(), name: DUMMY_PATH_STR.into(),
size: 0,
}, Ok(StorePath::from_bytes(DUMMY_PATH.as_bytes()).unwrap()))]
#[case::invalid_digest_length(castorepb::DirectoryNode {
name: DUMMY_PATH.into(),
digest: Bytes::new(), digest: Bytes::new(),
size: 0, size: 0,
}, Err(ValidatePathInfoError::InvalidRootNode(DirectoryError::InvalidNode(DUMMY_PATH.into(), ValidateNodeError::InvalidDigestLen(0)))))] }),
#[case::invalid_node_name_no_storepath(castorepb::DirectoryNode { ValidatePathInfoError::InvalidRootNode(DirectoryError::InvalidNode(DUMMY_PATH_STR.into(), ValidateNodeError::InvalidDigestLen(0)))
)]
#[case::directory_invalid_node_name_no_storepath(
castorepb::node::Node::Directory(castorepb::DirectoryNode {
name: "invalid".into(), name: "invalid".into(),
digest: DUMMY_DIGEST.clone().into(), digest: DUMMY_DIGEST.clone().into(),
size: 0, size: 0,
}, Err(ValidatePathInfoError::InvalidNodeName(
"invalid".into(),
store_path::Error::InvalidLength
)))]
fn validate_directory(
#[case] directory_node: castorepb::DirectoryNode,
#[case] exp_result: Result<StorePath<String>, ValidatePathInfoError>,
) {
// construct the PathInfo object
let p = PathInfo {
node: Some(castorepb::Node {
node: Some(castorepb::node::Node::Directory(directory_node)),
}), }),
..Default::default() ValidatePathInfoError::InvalidNodeName("invalid".into(), store_path::Error::InvalidLength)
};
assert_eq!(exp_result, p.validate());
}
#[rstest]
#[case::ok(
castorepb::FileNode {
name: DUMMY_PATH.into(),
digest: DUMMY_DIGEST.clone().into(),
size: 0,
executable: false,
},
Ok(StorePath::from_bytes(DUMMY_PATH.as_bytes()).unwrap())
)] )]
#[case::invalid_digest_len( #[case::file_invalid_digest_len(
castorepb::FileNode { castorepb::node::Node::File(castorepb::FileNode {
name: DUMMY_PATH.into(), name: DUMMY_PATH_STR.into(),
digest: Bytes::new(), digest: Bytes::new(),
..Default::default() ..Default::default()
}, }),
Err(ValidatePathInfoError::InvalidRootNode(DirectoryError::InvalidNode(DUMMY_PATH.into(), ValidateNodeError::InvalidDigestLen(0)))) ValidatePathInfoError::InvalidRootNode(DirectoryError::InvalidNode(DUMMY_PATH_STR.into(), ValidateNodeError::InvalidDigestLen(0)))
)] )]
#[case::invalid_node_name( #[case::file_invalid_node_name(
castorepb::FileNode { castorepb::node::Node::File(castorepb::FileNode {
name: "invalid".into(), name: "invalid".into(),
digest: DUMMY_DIGEST.clone().into(), digest: DUMMY_DIGEST.clone().into(),
..Default::default() ..Default::default()
}, }),
Err(ValidatePathInfoError::InvalidNodeName( ValidatePathInfoError::InvalidNodeName(
"invalid".into(), "invalid".into(),
store_path::Error::InvalidLength store_path::Error::InvalidLength
)) )
)] )]
fn validate_file( #[case::symlink_invalid_node_name(
#[case] file_node: castorepb::FileNode, castorepb::node::Node::Symlink(castorepb::SymlinkNode {
#[case] exp_result: Result<StorePath<String>, ValidatePathInfoError>,
) {
// construct the PathInfo object
let p = PathInfo {
node: Some(castorepb::Node {
node: Some(castorepb::node::Node::File(file_node)),
}),
..Default::default()
};
assert_eq!(exp_result, p.validate());
}
#[rstest]
#[case::ok(
castorepb::SymlinkNode {
name: DUMMY_PATH.into(),
target: "foo".into(),
},
Ok(StorePath::from_bytes(DUMMY_PATH.as_bytes()).unwrap())
)]
#[case::invalid_node_name(
castorepb::SymlinkNode {
name: "invalid".into(), name: "invalid".into(),
target: "foo".into(), target: "foo".into(),
}, }),
Err(ValidatePathInfoError::InvalidNodeName( ValidatePathInfoError::InvalidNodeName(
"invalid".into(), "invalid".into(),
store_path::Error::InvalidLength store_path::Error::InvalidLength
)) )
)] )]
fn validate_symlink( fn convert_fail_node(#[case] node: castorepb::node::Node, #[case] exp_err: ValidatePathInfoError) {
#[case] symlink_node: castorepb::SymlinkNode, // construct the proto::PathInfo object
#[case] exp_result: Result<StorePath<String>, ValidatePathInfoError>, let mut p = PROTO_PATH_INFO.clone();
) { p.node = Some(castorepb::Node { node: Some(node) });
// construct the PathInfo object
let p = PathInfo { assert_eq!(exp_err, PathInfo::try_from(p).expect_err("must fail"));
node: Some(castorepb::Node {
node: Some(castorepb::node::Node::Symlink(symlink_node)),
}),
..Default::default()
};
assert_eq!(exp_result, p.validate());
} }
/// Ensure parsing a correct PathInfo without narinfo populated succeeds. /// Ensure a PathInfo without narinfo populated fails converting!
#[test] #[test]
fn validate_references_without_narinfo_ok() { fn convert_without_narinfo_fail() {
assert!(PATH_INFO_WITHOUT_NARINFO.validate().is_ok()); let mut path_info = PROTO_PATH_INFO.clone();
} path_info.narinfo = None;
/// Ensure parsing a correct PathInfo with narinfo populated succeeds. assert_eq!(
#[test] ValidatePathInfoError::NarInfoFieldMissing,
fn validate_references_with_narinfo_ok() { PathInfo::try_from(path_info).expect_err("must fail"),
assert!(PATH_INFO_WITH_NARINFO.validate().is_ok()); );
} }
/// Create a PathInfo with a wrong digest length in narinfo.nar_sha256, and /// Create a PathInfo with a wrong digest length in narinfo.nar_sha256, and
/// ensure validation fails. /// ensure conversion fails.
#[test] #[test]
fn validate_wrong_nar_sha256() { fn convert_wrong_nar_sha256() {
let mut path_info = PATH_INFO_WITH_NARINFO.clone(); let mut path_info = PROTO_PATH_INFO.clone();
path_info.narinfo.as_mut().unwrap().nar_sha256 = vec![0xbe, 0xef].into(); path_info.narinfo.as_mut().unwrap().nar_sha256 = vec![0xbe, 0xef].into();
match path_info.validate().expect_err("must_fail") { assert_eq!(
ValidatePathInfoError::InvalidNarSha256DigestLen(2) => {} ValidatePathInfoError::InvalidNarSha256DigestLen(2),
e => panic!("unexpected error: {:?}", e), PathInfo::try_from(path_info).expect_err("must fail")
}; );
} }
/// Create a PathInfo with a wrong count of narinfo.reference_names, /// Create a PathInfo with a wrong count of narinfo.reference_names,
/// and ensure validation fails. /// and ensure validation fails.
#[test] #[test]
fn validate_inconsistent_num_refs_fail() { fn convert_inconsistent_num_refs_fail() {
let mut path_info = PATH_INFO_WITH_NARINFO.clone(); let mut path_info = PROTO_PATH_INFO.clone();
path_info.narinfo.as_mut().unwrap().reference_names = vec![]; path_info.narinfo.as_mut().unwrap().reference_names = vec![];
match path_info.validate().expect_err("must_fail") { assert_eq!(
ValidatePathInfoError::InconsistentNumberOfReferences(1, 0) => {} ValidatePathInfoError::InconsistentNumberOfReferences(1, 0),
e => panic!("unexpected error: {:?}", e), PathInfo::try_from(path_info).expect_err("must fail")
}; );
} }
/// Create a PathInfo with a wrong digest length in references. /// Create a PathInfo with a wrong digest length in references.
#[test] #[test]
fn validate_invalid_reference_digest_len() { fn convert_invalid_reference_digest_len() {
let mut path_info = PATH_INFO_WITHOUT_NARINFO.clone(); let mut path_info = PROTO_PATH_INFO.clone();
path_info.references.push(vec![0xff, 0xff].into()); path_info.references.push(vec![0xff, 0xff].into());
match path_info.validate().expect_err("must fail") { assert_eq!(
ValidatePathInfoError::InvalidReferenceDigestLen( ValidatePathInfoError::InvalidReferenceDigestLen(
1, // position 1, // position
2, // unexpected digest len 2, // unexpected digest len
) => {} ),
e => panic!("unexpected error: {:?}", e), PathInfo::try_from(path_info).expect_err("must fail")
}; );
} }
/// Create a PathInfo with a narinfo.reference_name[1] that is no valid store path. /// Create a PathInfo with a narinfo.reference_name[1] that is no valid store path.
#[test] #[test]
fn validate_invalid_narinfo_reference_name() { fn convert_invalid_narinfo_reference_name() {
let mut path_info = PATH_INFO_WITH_NARINFO.clone(); let mut path_info = PROTO_PATH_INFO.clone();
// This is invalid, as the store prefix is not part of reference_names. // This is invalid, as the store prefix is not part of reference_names.
path_info.narinfo.as_mut().unwrap().reference_names[0] = path_info.narinfo.as_mut().unwrap().reference_names[0] =
"/nix/store/00000000000000000000000000000000-dummy".to_string(); "/nix/store/00000000000000000000000000000000-dummy".to_string();
match path_info.validate().expect_err("must fail") {
ValidatePathInfoError::InvalidNarinfoReferenceName(0, reference_name) => {
assert_eq!( assert_eq!(
"/nix/store/00000000000000000000000000000000-dummy", ValidatePathInfoError::InvalidNarinfoReferenceName(
reference_name 0,
"/nix/store/00000000000000000000000000000000-dummy".to_string()
),
PathInfo::try_from(path_info).expect_err("must fail")
); );
} }
e => panic!("unexpected error: {:?}", e),
}
}
/// Create a PathInfo with a narinfo.reference_name[0] that doesn't match references[0]. /// Create a PathInfo with a narinfo.reference_name[0] that doesn't match references[0].
#[test] #[test]
fn validate_inconsistent_narinfo_reference_name_digest() { fn convert_inconsistent_narinfo_reference_name_digest() {
let mut path_info = PATH_INFO_WITH_NARINFO.clone(); let mut path_info = PROTO_PATH_INFO.clone();
// mutate the first reference, they were all zeroes before // mutate the first reference, they were all zeroes before
path_info.references[0] = vec![0xff; store_path::DIGEST_SIZE].into(); path_info.references[0] = vec![0xff; store_path::DIGEST_SIZE].into();
match path_info.validate().expect_err("must fail") { assert_eq!(
ValidatePathInfoError::InconsistentNarinfoReferenceNameDigest(0, e_expected, e_actual) => { ValidatePathInfoError::InconsistentNarinfoReferenceNameDigest(
assert_eq!(path_info.references[0][..], e_expected[..]); 0,
assert_eq!(DUMMY_PATH_DIGEST, e_actual); path_info.references[0][..].try_into().unwrap(),
} DUMMY_PATH_DIGEST
e => panic!("unexpected error: {:?}", e), ),
} PathInfo::try_from(path_info).expect_err("must fail")
} )
/// Create a node with an empty symlink target, and ensure it fails validation.
#[test]
fn validate_symlink_empty_target_invalid() {
castorepb::Node {
node: Some(castorepb::node::Node::Symlink(castorepb::SymlinkNode {
name: "foo".into(),
target: "".into(),
})),
}
.into_name_and_node()
.expect_err("must fail validation");
}
/// Create a node with a symlink target including null bytes, and ensure it
/// fails validation.
#[test]
fn validate_symlink_target_null_byte_invalid() {
castorepb::Node {
node: Some(castorepb::node::Node::Symlink(castorepb::SymlinkNode {
name: "foo".into(),
target: "foo\0".into(),
})),
}
.into_name_and_node()
.expect_err("must fail validation");
}
/// Create a PathInfo with a correct deriver field and ensure it succeeds.
#[test]
fn validate_valid_deriver() {
let mut path_info = PATH_INFO_WITH_NARINFO.clone();
// add a valid deriver
let narinfo = path_info.narinfo.as_mut().unwrap();
narinfo.deriver = Some(crate::proto::StorePath {
name: "foo".to_string(),
digest: Bytes::from(DUMMY_PATH_DIGEST.as_slice()),
});
path_info.validate().expect("must validate");
} }
/// Create a PathInfo with a broken deriver field and ensure it fails. /// Create a PathInfo with a broken deriver field and ensure it fails.
#[test] #[test]
fn validate_invalid_deriver() { fn convert_invalid_deriver() {
let mut path_info = PATH_INFO_WITH_NARINFO.clone(); let mut path_info = PROTO_PATH_INFO.clone();
// add a broken deriver (invalid digest) // add a broken deriver (invalid digest)
let narinfo = path_info.narinfo.as_mut().unwrap(); let narinfo = path_info.narinfo.as_mut().unwrap();
@ -277,157 +229,8 @@ fn validate_invalid_deriver() {
digest: vec![].into(), digest: vec![].into(),
}); });
match path_info.validate().expect_err("must fail validation") {
ValidatePathInfoError::InvalidDeriverField(_) => {}
e => panic!("unexpected error: {:?}", e),
}
}
#[test]
fn from_nixcompat_narinfo() {
let narinfo_parsed = nix_compat::narinfo::NarInfo::parse(
r#"StorePath: /nix/store/s66mzxpvicwk07gjbjfw9izjfa797vsw-hello-2.12.1
URL: nar/1nhgq6wcggx0plpy4991h3ginj6hipsdslv4fd4zml1n707j26yq.nar.xz
Compression: xz
FileHash: sha256:1nhgq6wcggx0plpy4991h3ginj6hipsdslv4fd4zml1n707j26yq
FileSize: 50088
NarHash: sha256:0yzhigwjl6bws649vcs2asa4lbs8hg93hyix187gc7s7a74w5h80
NarSize: 226488
References: 3n58xw4373jp0ljirf06d8077j15pc4j-glibc-2.37-8 s66mzxpvicwk07gjbjfw9izjfa797vsw-hello-2.12.1
Deriver: ib3sh3pcz10wsmavxvkdbayhqivbghlq-hello-2.12.1.drv
Sig: cache.nixos.org-1:8ijECciSFzWHwwGVOIVYdp2fOIOJAfmzGHPQVwpktfTQJF6kMPPDre7UtFw3o+VqenC5P8RikKOAAfN7CvPEAg=="#).expect("must parse");
assert_eq!( assert_eq!(
PathInfo { ValidatePathInfoError::InvalidDeriverField(store_path::Error::InvalidLength),
node: None, PathInfo::try_from(path_info).expect_err("must fail")
references: vec![
Bytes::copy_from_slice(&nixbase32::decode_fixed::<20>("3n58xw4373jp0ljirf06d8077j15pc4j").unwrap()),
Bytes::copy_from_slice(&nixbase32::decode_fixed::<20>("s66mzxpvicwk07gjbjfw9izjfa797vsw").unwrap()),
],
narinfo: Some(
NarInfo {
nar_size: 226488,
nar_sha256: Bytes::copy_from_slice(
&nixbase32::decode_fixed::<32>("0yzhigwjl6bws649vcs2asa4lbs8hg93hyix187gc7s7a74w5h80".as_bytes())
.unwrap()
),
signatures: vec![Signature {
name: "cache.nixos.org-1".to_string(),
data: BASE64.decode("8ijECciSFzWHwwGVOIVYdp2fOIOJAfmzGHPQVwpktfTQJF6kMPPDre7UtFw3o+VqenC5P8RikKOAAfN7CvPEAg==".as_bytes()).unwrap().into(),
}],
reference_names: vec![
"3n58xw4373jp0ljirf06d8077j15pc4j-glibc-2.37-8".to_string(),
"s66mzxpvicwk07gjbjfw9izjfa797vsw-hello-2.12.1".to_string()
],
deriver: Some(crate::proto::StorePath {
digest: Bytes::copy_from_slice(&nixbase32::decode_fixed::<20>("ib3sh3pcz10wsmavxvkdbayhqivbghlq").unwrap()),
name: "hello-2.12.1".to_string(),
}),
ca: None,
}
) )
},
(&narinfo_parsed).into(),
);
}
#[test]
fn from_nixcompat_narinfo_fod() {
let narinfo_parsed = nix_compat::narinfo::NarInfo::parse(
r#"StorePath: /nix/store/pa10z4ngm0g83kx9mssrqzz30s84vq7k-hello-2.12.1.tar.gz
URL: nar/1zjrhzhaizsrlsvdkqfl073vivmxcqnzkff4s50i0cdf541ary1r.nar.xz
Compression: xz
FileHash: sha256:1zjrhzhaizsrlsvdkqfl073vivmxcqnzkff4s50i0cdf541ary1r
FileSize: 1033524
NarHash: sha256:1lvqpbk2k1sb39z8jfxixf7p7v8sj4z6mmpa44nnmff3w1y6h8lh
NarSize: 1033416
References:
Deriver: dyivpmlaq2km6c11i0s6bi6mbsx0ylqf-hello-2.12.1.tar.gz.drv
Sig: cache.nixos.org-1:ywnIG629nQZQhEr6/HLDrLT/mUEp5J1LC6NmWSlJRWL/nM7oGItJQUYWGLvYGhSQvHrhIuvMpjNmBNh/WWqCDg==
CA: fixed:sha256:086vqwk2wl8zfs47sq2xpjc9k066ilmb8z6dn0q6ymwjzlm196cd"#
).expect("must parse");
assert_eq!(
PathInfo {
node: None,
references: vec![],
narinfo: Some(
NarInfo {
nar_size: 1033416,
nar_sha256: Bytes::copy_from_slice(
&nixbase32::decode_fixed::<32>(
"1lvqpbk2k1sb39z8jfxixf7p7v8sj4z6mmpa44nnmff3w1y6h8lh"
)
.unwrap()
),
signatures: vec![Signature {
name: "cache.nixos.org-1".to_string(),
data: BASE64
.decode("ywnIG629nQZQhEr6/HLDrLT/mUEp5J1LC6NmWSlJRWL/nM7oGItJQUYWGLvYGhSQvHrhIuvMpjNmBNh/WWqCDg==".as_bytes())
.unwrap()
.into(),
}],
reference_names: vec![],
deriver: Some(crate::proto::StorePath {
digest: Bytes::copy_from_slice(
&nixbase32::decode_fixed::<20>("dyivpmlaq2km6c11i0s6bi6mbsx0ylqf").unwrap()
),
name: "hello-2.12.1.tar.gz".to_string(),
}),
ca: Some(crate::proto::nar_info::Ca {
r#type: crate::proto::nar_info::ca::Hash::FlatSha256.into(),
digest: Bytes::copy_from_slice(
&nixbase32::decode_fixed::<32>(
"086vqwk2wl8zfs47sq2xpjc9k066ilmb8z6dn0q6ymwjzlm196cd"
)
.unwrap()
)
}),
}
),
},
(&narinfo_parsed).into()
);
}
/// Exercise .as_narinfo() on a PathInfo and ensure important fields are preserved..
#[test]
fn as_narinfo() {
let narinfo_parsed = nix_compat::narinfo::NarInfo::parse(
r#"StorePath: /nix/store/pa10z4ngm0g83kx9mssrqzz30s84vq7k-hello-2.12.1.tar.gz
URL: nar/1zjrhzhaizsrlsvdkqfl073vivmxcqnzkff4s50i0cdf541ary1r.nar.xz
Compression: xz
FileHash: sha256:1zjrhzhaizsrlsvdkqfl073vivmxcqnzkff4s50i0cdf541ary1r
FileSize: 1033524
NarHash: sha256:1lvqpbk2k1sb39z8jfxixf7p7v8sj4z6mmpa44nnmff3w1y6h8lh
NarSize: 1033416
References:
Deriver: dyivpmlaq2km6c11i0s6bi6mbsx0ylqf-hello-2.12.1.tar.gz.drv
Sig: cache.nixos.org-1:ywnIG629nQZQhEr6/HLDrLT/mUEp5J1LC6NmWSlJRWL/nM7oGItJQUYWGLvYGhSQvHrhIuvMpjNmBNh/WWqCDg==
CA: fixed:sha256:086vqwk2wl8zfs47sq2xpjc9k066ilmb8z6dn0q6ymwjzlm196cd"#
).expect("must parse");
let path_info: PathInfo = (&narinfo_parsed).into();
let mut narinfo_returned = path_info
.to_narinfo(
StorePathRef::from_bytes(b"pa10z4ngm0g83kx9mssrqzz30s84vq7k-hello-2.12.1.tar.gz")
.expect("invalid storepath"),
)
.expect("must be some");
narinfo_returned.url = "some.nar";
assert_eq!(
r#"StorePath: /nix/store/pa10z4ngm0g83kx9mssrqzz30s84vq7k-hello-2.12.1.tar.gz
URL: some.nar
Compression: none
NarHash: sha256:1lvqpbk2k1sb39z8jfxixf7p7v8sj4z6mmpa44nnmff3w1y6h8lh
NarSize: 1033416
References:
Deriver: dyivpmlaq2km6c11i0s6bi6mbsx0ylqf-hello-2.12.1.tar.gz.drv
Sig: cache.nixos.org-1:ywnIG629nQZQhEr6/HLDrLT/mUEp5J1LC6NmWSlJRWL/nM7oGItJQUYWGLvYGhSQvHrhIuvMpjNmBNh/WWqCDg==
CA: fixed:sha256:086vqwk2wl8zfs47sq2xpjc9k066ilmb8z6dn0q6ymwjzlm196cd
"#,
narinfo_returned.to_string(),
);
} }

View file

@ -1,24 +1,27 @@
use crate::pathinfoservice::PathInfo;
use lazy_static::lazy_static; use lazy_static::lazy_static;
use nix_compat::nixhash::{CAHash, NixHash};
use nix_compat::store_path::StorePath;
use rstest::{self, *}; use rstest::{self, *};
use rstest_reuse::*; use rstest_reuse::*;
use std::io; use std::io;
use std::sync::Arc; use std::sync::Arc;
pub use tvix_castore::fixtures::*; use tvix_castore::fixtures::{
DIRECTORY_COMPLICATED, DIRECTORY_WITH_KEEP, DUMMY_DIGEST, EMPTY_BLOB_CONTENTS,
EMPTY_BLOB_DIGEST, HELLOWORLD_BLOB_CONTENTS, HELLOWORLD_BLOB_DIGEST,
};
use tvix_castore::{ use tvix_castore::{
blobservice::{BlobService, MemoryBlobService}, blobservice::{BlobService, MemoryBlobService},
directoryservice::{DirectoryService, MemoryDirectoryService}, directoryservice::{DirectoryService, MemoryDirectoryService},
proto as castorepb, Node, Node,
}; };
use crate::proto::{ pub const DUMMY_PATH_STR: &str = "00000000000000000000000000000000-dummy";
nar_info::{ca, Ca},
NarInfo, PathInfo,
};
pub const DUMMY_PATH: &str = "00000000000000000000000000000000-dummy";
pub const DUMMY_PATH_DIGEST: [u8; 20] = [0; 20]; pub const DUMMY_PATH_DIGEST: [u8; 20] = [0; 20];
lazy_static! { lazy_static! {
pub static ref DUMMY_PATH: StorePath<String> = StorePath::from_name_and_digest_fixed("dummy", DUMMY_PATH_DIGEST).unwrap();
pub static ref CASTORE_NODE_SYMLINK: Node = Node::Symlink { pub static ref CASTORE_NODE_SYMLINK: Node = Node::Symlink {
target: "/nix/store/somewhereelse".try_into().unwrap(), target: "/nix/store/somewhereelse".try_into().unwrap(),
}; };
@ -130,32 +133,19 @@ lazy_static! {
1, 0, 0, 0, 0, 0, 0, 0, b')', 0, 0, 0, 0, 0, 0, 0, // ")" 1, 0, 0, 0, 0, 0, 0, 0, b')', 0, 0, 0, 0, 0, 0, 0, // ")"
]; ];
/// A PathInfo message without .narinfo populated. /// A PathInfo message
pub static ref PATH_INFO_WITHOUT_NARINFO : PathInfo = PathInfo { pub static ref PATH_INFO: PathInfo = PathInfo {
node: Some(castorepb::Node { store_path: DUMMY_PATH.clone(),
node: Some(castorepb::node::Node::Directory(castorepb::DirectoryNode { node: tvix_castore::Node::Directory {
name: DUMMY_PATH.into(), digest: DUMMY_DIGEST.clone(),
digest: DUMMY_DIGEST.clone().into(),
size: 0, size: 0,
})), },
}), references: vec![DUMMY_PATH.clone()],
references: vec![DUMMY_PATH_DIGEST.as_slice().into()], nar_sha256: [0; 32],
narinfo: None,
};
/// A PathInfo message with .narinfo populated.
/// The references in `narinfo.reference_names` aligns with what's in
/// `references`.
pub static ref PATH_INFO_WITH_NARINFO : PathInfo = PathInfo {
narinfo: Some(NarInfo {
nar_size: 0, nar_size: 0,
nar_sha256: DUMMY_DIGEST.clone().into(),
signatures: vec![], signatures: vec![],
reference_names: vec![DUMMY_PATH.to_string()],
deriver: None, deriver: None,
ca: Some(Ca { r#type: ca::Hash::NarSha256.into(), digest: DUMMY_DIGEST.clone().into() }) ca: Some(CAHash::Nar(NixHash::Sha256([0; 32]))),
}),
..PATH_INFO_WITHOUT_NARINFO.clone()
}; };
} }