refactor(tvix/store/composition): rename 'default' to 'root'

This becomes the root of the composition. `default` implies we can
directly access anything else, which we cannot. `root` makes this more
understandable, and it's all internal only anyways.

Change-Id: I297511bc05a7c32c59510b9d192b40d1bd937b5f
Reviewed-on: https://cl.tvl.fyi/c/depot/+/12746
Reviewed-by: yuka <yuka@yuka.dev>
Tested-by: BuildkiteCI
This commit is contained in:
Florian Klink 2024-11-09 13:16:11 +00:00 committed by flokli
parent 8df919dcf0
commit d505f03e00
9 changed files with 26 additions and 26 deletions

View file

@ -385,7 +385,7 @@ mod tests {
.await .await
.expect("must succeed"), .expect("must succeed"),
); );
GRPCBlobService::from_client("default".into(), client) GRPCBlobService::from_client("root".into(), client)
}; };
let has = grpc_client let has = grpc_client

View file

@ -30,7 +30,7 @@ pub async fn make_grpc_blob_service_client() -> Box<dyn BlobService> {
let mut maybe_right = Some(right); let mut maybe_right = Some(right);
Box::new(GRPCBlobService::from_client( Box::new(GRPCBlobService::from_client(
"default".into(), "root".into(),
BlobServiceClient::new( BlobServiceClient::new(
Endpoint::try_from("http://[::]:50051") Endpoint::try_from("http://[::]:50051")
.unwrap() .unwrap()

View file

@ -62,7 +62,7 @@
//! "blobstore2": { //! "blobstore2": {
//! "type": "memory" //! "type": "memory"
//! }, //! },
//! "default": { //! "root": {
//! "type": "combined", //! "type": "combined",
//! "local": "blobstore1", //! "local": "blobstore1",
//! "remote": "blobstore2" //! "remote": "blobstore2"
@ -72,7 +72,7 @@
//! let blob_services_configs = with_registry(&REG, || serde_json::from_value(blob_services_configs_json))?; //! let blob_services_configs = with_registry(&REG, || serde_json::from_value(blob_services_configs_json))?;
//! let mut blob_service_composition = Composition::new(&REG); //! let mut blob_service_composition = Composition::new(&REG);
//! blob_service_composition.extend_with_configs::<dyn BlobService>(blob_services_configs); //! blob_service_composition.extend_with_configs::<dyn BlobService>(blob_services_configs);
//! let blob_service: Arc<dyn BlobService> = blob_service_composition.build("default").await?; //! let blob_service: Arc<dyn BlobService> = blob_service_composition.build("root").await?;
//! # Ok(()) //! # Ok(())
//! # }) //! # })
//! # } //! # }
@ -281,7 +281,7 @@ pub fn add_default_services(reg: &mut Registry) {
pub struct CompositionContext<'a> { pub struct CompositionContext<'a> {
// The stack used to detect recursive instantiations and prevent deadlocks // The stack used to detect recursive instantiations and prevent deadlocks
// The TypeId of the trait object is included to distinguish e.g. the // The TypeId of the trait object is included to distinguish e.g. the
// BlobService "default" and the DirectoryService "default". // BlobService "root" and the DirectoryService "root".
stack: Vec<(TypeId, String)>, stack: Vec<(TypeId, String)>,
registry: &'static Registry, registry: &'static Registry,
composition: Option<&'a Composition>, composition: Option<&'a Composition>,
@ -529,7 +529,7 @@ mod test {
#[tokio::test] #[tokio::test]
async fn concurrent() { async fn concurrent() {
let blob_services_configs_json = serde_json::json!({ let blob_services_configs_json = serde_json::json!({
"default": { "root": {
"type": "memory", "type": "memory",
} }
}); });
@ -539,8 +539,8 @@ mod test {
let mut blob_service_composition = Composition::new(&REG); let mut blob_service_composition = Composition::new(&REG);
blob_service_composition.extend_with_configs::<dyn BlobService>(blob_services_configs); blob_service_composition.extend_with_configs::<dyn BlobService>(blob_services_configs);
let (blob_service1, blob_service2) = tokio::join!( let (blob_service1, blob_service2) = tokio::join!(
blob_service_composition.build::<dyn BlobService>("default"), blob_service_composition.build::<dyn BlobService>("root"),
blob_service_composition.build::<dyn BlobService>("default") blob_service_composition.build::<dyn BlobService>("root")
); );
assert!(Arc::ptr_eq( assert!(Arc::ptr_eq(
&blob_service1.unwrap(), &blob_service1.unwrap(),
@ -552,15 +552,15 @@ mod test {
#[tokio::test] #[tokio::test]
async fn reject_recursion() { async fn reject_recursion() {
let blob_services_configs_json = serde_json::json!({ let blob_services_configs_json = serde_json::json!({
"default": { "root": {
"type": "combined", "type": "combined",
"local": "other", "local": "other",
"remote": "other" "remote": "other"
}, },
"other": { "other": {
"type": "combined", "type": "combined",
"local": "default", "local": "root",
"remote": "default" "remote": "root"
} }
}); });
@ -569,11 +569,11 @@ mod test {
let mut blob_service_composition = Composition::new(&REG); let mut blob_service_composition = Composition::new(&REG);
blob_service_composition.extend_with_configs::<dyn BlobService>(blob_services_configs); blob_service_composition.extend_with_configs::<dyn BlobService>(blob_services_configs);
match blob_service_composition match blob_service_composition
.build::<dyn BlobService>("default") .build::<dyn BlobService>("root")
.await .await
{ {
Err(CompositionError::Recursion(stack)) => { Err(CompositionError::Recursion(stack)) => {
assert_eq!(stack, vec!["default".to_string(), "other".to_string()]) assert_eq!(stack, vec!["root".to_string(), "other".to_string()])
} }
other => panic!("should have returned an error, returned: {:?}", other.err()), other => panic!("should have returned an error, returned: {:?}", other.err()),
} }

View file

@ -64,7 +64,7 @@ impl ObjectStoreDirectoryService {
let (object_store, path) = object_store::parse_url_opts(url, options)?; let (object_store, path) = object_store::parse_url_opts(url, options)?;
Ok(Self { Ok(Self {
instance_name: "default".into(), instance_name: "root".into(),
object_store: Arc::new(object_store), object_store: Arc::new(object_store),
base_path: path, base_path: path,
}) })

View file

@ -56,7 +56,7 @@ impl RedbDirectoryService {
create_schema(&db)?; create_schema(&db)?;
Ok(Self { Ok(Self {
instance_name: "default".into(), instance_name: "root".into(),
db: Arc::new(db), db: Arc::new(db),
}) })
} }

View file

@ -33,7 +33,7 @@ pub async fn make_grpc_directory_service_client() -> Box<dyn DirectoryService> {
// Create a client, connecting to the right side. The URI is unused. // Create a client, connecting to the right side. The URI is unused.
let mut maybe_right = Some(right); let mut maybe_right = Some(right);
Box::new(GRPCDirectoryService::from_client( Box::new(GRPCDirectoryService::from_client(
"default".into(), "root".into(),
DirectoryServiceClient::new( DirectoryServiceClient::new(
Endpoint::try_from("http://[::]:50051") Endpoint::try_from("http://[::]:50051")
.unwrap() .unwrap()

View file

@ -128,12 +128,12 @@ mod tests {
async fn test_from_addr_tokio(#[case] uri_str: &str, #[case] exp_succeed: bool) { async fn test_from_addr_tokio(#[case] uri_str: &str, #[case] exp_succeed: bool) {
let mut comp = Composition::new(&REG); let mut comp = Composition::new(&REG);
comp.extend(vec![( comp.extend(vec![(
"default".into(), "root".into(),
DeserializeWithRegistry(Box::new(MemoryBlobServiceConfig {}) DeserializeWithRegistry(Box::new(MemoryBlobServiceConfig {})
as Box<dyn ServiceBuilder<Output = dyn BlobService>>), as Box<dyn ServiceBuilder<Output = dyn BlobService>>),
)]); )]);
comp.extend(vec![( comp.extend(vec![(
"default".into(), "root".into(),
DeserializeWithRegistry(Box::new(MemoryDirectoryServiceConfig {}) DeserializeWithRegistry(Box::new(MemoryDirectoryServiceConfig {})
as Box<dyn ServiceBuilder<Output = dyn DirectoryService>>), as Box<dyn ServiceBuilder<Output = dyn DirectoryService>>),
)]); )]);

View file

@ -296,13 +296,13 @@ impl TryFrom<Url> for NixHTTPPathInfoServiceConfig {
.into_iter() .into_iter()
.find(|(k, _)| k == "blob_service") .find(|(k, _)| k == "blob_service")
.map(|(_, v)| v.to_string()) .map(|(_, v)| v.to_string())
.unwrap_or("default".to_string()); .unwrap_or("root".to_string());
let directory_service = url let directory_service = url
.query_pairs() .query_pairs()
.into_iter() .into_iter()
.find(|(k, _)| k == "directory_service") .find(|(k, _)| k == "directory_service")
.map(|(_, v)| v.to_string()) .map(|(_, v)| v.to_string())
.unwrap_or("default".to_string()); .unwrap_or("root".to_string());
Ok(NixHTTPPathInfoServiceConfig { Ok(NixHTTPPathInfoServiceConfig {
// Stringify the URL and remove the nix+ prefix. // Stringify the URL and remove the nix+ prefix.

View file

@ -145,15 +145,15 @@ pub async fn addrs_to_configs(
let path_info_service_url = Url::parse(&urls.path_info_service_addr)?; let path_info_service_url = Url::parse(&urls.path_info_service_addr)?;
configs.blobservices.insert( configs.blobservices.insert(
"default".into(), "root".into(),
with_registry(&REG, || blob_service_url.try_into())?, with_registry(&REG, || blob_service_url.try_into())?,
); );
configs.directoryservices.insert( configs.directoryservices.insert(
"default".into(), "root".into(),
with_registry(&REG, || directory_service_url.try_into())?, with_registry(&REG, || directory_service_url.try_into())?,
); );
configs.pathinfoservices.insert( configs.pathinfoservices.insert(
"default".into(), "root".into(),
with_registry(&REG, || path_info_service_url.try_into())?, with_registry(&REG, || path_info_service_url.try_into())?,
); );
@ -194,9 +194,9 @@ pub async fn construct_services_from_configs(
comp.extend(configs.directoryservices); comp.extend(configs.directoryservices);
comp.extend(configs.pathinfoservices); comp.extend(configs.pathinfoservices);
let blob_service: Arc<dyn BlobService> = comp.build("default").await?; let blob_service: Arc<dyn BlobService> = comp.build("root").await?;
let directory_service: Arc<dyn DirectoryService> = comp.build("default").await?; let directory_service: Arc<dyn DirectoryService> = comp.build("root").await?;
let path_info_service: Arc<dyn PathInfoService> = comp.build("default").await?; let path_info_service: Arc<dyn PathInfoService> = comp.build("root").await?;
// HACK: The grpc client also implements NarCalculationService, and we // HACK: The grpc client also implements NarCalculationService, and we
// really want to use it (otherwise we'd need to fetch everything again for hashing). // really want to use it (otherwise we'd need to fetch everything again for hashing).