2023-02-27 18:05:51 +01:00
|
|
|
use clap::Subcommand;
|
|
|
|
use data_encoding::BASE64;
|
2023-03-17 00:11:43 +01:00
|
|
|
use nix_compat::derivation::Derivation;
|
|
|
|
use nix_compat::derivation::Output;
|
|
|
|
use nix_compat::nixhash::HashAlgo;
|
|
|
|
use nix_compat::nixhash::NixHash;
|
|
|
|
use nix_compat::nixhash::NixHashWithMode;
|
2023-02-27 18:05:51 +01:00
|
|
|
use std::path::PathBuf;
|
2023-02-16 16:49:22 +01:00
|
|
|
use tracing_subscriber::prelude::*;
|
2023-02-15 14:00:27 +01:00
|
|
|
use tvix_store::blobservice::SledBlobService;
|
|
|
|
use tvix_store::directoryservice::SledDirectoryService;
|
2023-02-27 18:05:51 +01:00
|
|
|
use tvix_store::import::import_path;
|
2023-03-17 00:11:43 +01:00
|
|
|
use tvix_store::nar::NARCalculationService;
|
2023-02-15 14:00:27 +01:00
|
|
|
use tvix_store::nar::NonCachingNARCalculationService;
|
|
|
|
use tvix_store::pathinfoservice::SledPathInfoService;
|
2023-01-02 14:37:08 +01:00
|
|
|
use tvix_store::proto::blob_service_server::BlobServiceServer;
|
|
|
|
use tvix_store::proto::directory_service_server::DirectoryServiceServer;
|
|
|
|
use tvix_store::proto::path_info_service_server::PathInfoServiceServer;
|
2023-02-15 14:00:27 +01:00
|
|
|
use tvix_store::proto::GRPCBlobServiceWrapper;
|
|
|
|
use tvix_store::proto::GRPCDirectoryServiceWrapper;
|
|
|
|
use tvix_store::proto::GRPCPathInfoServiceWrapper;
|
2022-12-28 16:40:28 +01:00
|
|
|
|
2022-11-26 02:14:02 +01:00
|
|
|
#[cfg(feature = "reflection")]
|
2023-01-02 14:37:08 +01:00
|
|
|
use tvix_store::proto::FILE_DESCRIPTOR_SET;
|
2022-11-26 02:14:02 +01:00
|
|
|
|
2022-12-28 16:40:28 +01:00
|
|
|
use clap::Parser;
|
|
|
|
use tonic::{transport::Server, Result};
|
2022-12-28 17:17:53 +01:00
|
|
|
use tracing::{info, Level};
|
2022-12-28 16:40:28 +01:00
|
|
|
|
|
|
|
#[derive(Parser)]
|
|
|
|
#[command(author, version, about, long_about = None)]
|
|
|
|
struct Cli {
|
2023-02-16 16:49:22 +01:00
|
|
|
/// Whether to log in JSON
|
2023-02-27 18:05:51 +01:00
|
|
|
#[arg(long)]
|
2023-02-16 16:49:22 +01:00
|
|
|
json: bool,
|
|
|
|
|
2023-02-27 18:05:51 +01:00
|
|
|
#[arg(long)]
|
2022-12-28 17:17:53 +01:00
|
|
|
log_level: Option<Level>,
|
2023-02-27 18:05:51 +01:00
|
|
|
|
|
|
|
#[command(subcommand)]
|
|
|
|
command: Commands,
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Subcommand)]
|
|
|
|
enum Commands {
|
|
|
|
/// Runs the tvix-store daemon.
|
|
|
|
Daemon {
|
|
|
|
#[arg(long, short = 'l')]
|
|
|
|
listen_address: Option<String>,
|
|
|
|
},
|
|
|
|
/// Imports a list of paths into the store (not using the daemon)
|
|
|
|
Import {
|
|
|
|
#[clap(value_name = "PATH")]
|
|
|
|
paths: Vec<PathBuf>,
|
|
|
|
},
|
2022-12-28 16:40:28 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
#[tokio::main]
|
|
|
|
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
|
|
|
let cli = Cli::parse();
|
|
|
|
|
2023-02-27 18:05:51 +01:00
|
|
|
// configure log settings
|
2022-12-28 17:17:53 +01:00
|
|
|
let level = cli.log_level.unwrap_or(Level::INFO);
|
2023-02-16 16:49:22 +01:00
|
|
|
|
|
|
|
let subscriber = tracing_subscriber::registry()
|
|
|
|
.with(if cli.json {
|
|
|
|
Some(
|
|
|
|
tracing_subscriber::fmt::Layer::new()
|
|
|
|
.with_writer(std::io::stdout.with_max_level(level))
|
|
|
|
.json(),
|
|
|
|
)
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
})
|
|
|
|
.with(if !cli.json {
|
|
|
|
Some(
|
|
|
|
tracing_subscriber::fmt::Layer::new()
|
|
|
|
.with_writer(std::io::stdout.with_max_level(level))
|
|
|
|
.pretty(),
|
|
|
|
)
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
});
|
|
|
|
|
|
|
|
tracing::subscriber::set_global_default(subscriber).expect("Unable to set global subscriber");
|
2022-12-28 17:17:53 +01:00
|
|
|
|
2023-02-27 18:05:51 +01:00
|
|
|
// initialize stores
|
|
|
|
let mut blob_service = SledBlobService::new("blobs.sled".into())?;
|
|
|
|
let mut directory_service = SledDirectoryService::new("directories.sled".into())?;
|
2023-02-15 14:00:27 +01:00
|
|
|
let path_info_service = SledPathInfoService::new("pathinfo.sled".into())?;
|
|
|
|
|
2023-02-27 18:05:51 +01:00
|
|
|
match cli.command {
|
|
|
|
Commands::Daemon { listen_address } => {
|
|
|
|
let listen_address = listen_address
|
|
|
|
.unwrap_or_else(|| "[::]:8000".to_string())
|
|
|
|
.parse()
|
|
|
|
.unwrap();
|
|
|
|
|
|
|
|
let mut server = Server::builder();
|
|
|
|
|
|
|
|
let nar_calculation_service = NonCachingNARCalculationService::new(
|
|
|
|
blob_service.clone(),
|
|
|
|
directory_service.clone(),
|
|
|
|
);
|
|
|
|
|
2023-03-12 17:32:06 +01:00
|
|
|
#[allow(unused_mut)]
|
2023-02-27 18:05:51 +01:00
|
|
|
let mut router = server
|
refactor(tvix/store): remove ChunkService
Whether chunking is involved or not, is an implementation detail of each
Blobstore. Consumers of a whole blob shouldn't need to worry about that.
It currently is not visible in the gRPC interface either. It
shouldn't bleed into everything.
Let the BlobService trait provide `open_read` and `open_write` methods,
which return handles providing io::Read or io::Write, and leave the
details up to the implementation.
This means, our custom BlobReader module can go away, and all the
chunking bits in there, too.
In the future, we might still want to add more chunking-aware syncing,
but as a syncing strategy some stores can expose, not as a fundamental
protocol component.
This currently needs "SyncReadIntoAsyncRead", taken and vendored in from
https://github.com/tokio-rs/tokio/pull/5669.
It provides a AsyncRead for a sync Read, which is necessary to connect
our (sync) BlobReader interface to a GRPC server implementation.
As an alternative, we could also make the BlobReader itself async, and
let consumers of the trait (EvalIO) deal with the async-ness, but this
is less of a change for now.
In terms of vendoring, I initially tried to move our tokio crate to
these commits, but ended up in version incompatibilities, so let's
vendor it in for now.
Change-Id: I5969ebbc4c0e1ceece47981be3b9e7cfb3f59ad0
Reviewed-on: https://cl.tvl.fyi/c/depot/+/8551
Tested-by: BuildkiteCI
Reviewed-by: tazjin <tazjin@tvl.su>
2023-05-11 14:49:01 +02:00
|
|
|
.add_service(BlobServiceServer::new(GRPCBlobServiceWrapper::from(
|
2023-02-27 18:05:51 +01:00
|
|
|
blob_service,
|
|
|
|
)))
|
|
|
|
.add_service(DirectoryServiceServer::new(
|
|
|
|
GRPCDirectoryServiceWrapper::from(directory_service),
|
|
|
|
))
|
|
|
|
.add_service(PathInfoServiceServer::new(GRPCPathInfoServiceWrapper::new(
|
|
|
|
path_info_service,
|
|
|
|
nar_calculation_service,
|
|
|
|
)));
|
|
|
|
|
|
|
|
#[cfg(feature = "reflection")]
|
|
|
|
{
|
|
|
|
let reflection_svc = tonic_reflection::server::Builder::configure()
|
|
|
|
.register_encoded_file_descriptor_set(FILE_DESCRIPTOR_SET)
|
|
|
|
.build()?;
|
|
|
|
router = router.add_service(reflection_svc);
|
|
|
|
}
|
|
|
|
|
|
|
|
info!("tvix-store listening on {}", listen_address);
|
|
|
|
|
|
|
|
router.serve(listen_address).await?;
|
|
|
|
}
|
|
|
|
Commands::Import { paths } => {
|
2023-03-17 00:11:43 +01:00
|
|
|
let nar_calculation_service = NonCachingNARCalculationService::new(
|
|
|
|
blob_service.clone(),
|
|
|
|
directory_service.clone(),
|
|
|
|
);
|
|
|
|
|
2023-02-27 18:05:51 +01:00
|
|
|
for path in paths {
|
refactor(tvix/store): remove ChunkService
Whether chunking is involved or not, is an implementation detail of each
Blobstore. Consumers of a whole blob shouldn't need to worry about that.
It currently is not visible in the gRPC interface either. It
shouldn't bleed into everything.
Let the BlobService trait provide `open_read` and `open_write` methods,
which return handles providing io::Read or io::Write, and leave the
details up to the implementation.
This means, our custom BlobReader module can go away, and all the
chunking bits in there, too.
In the future, we might still want to add more chunking-aware syncing,
but as a syncing strategy some stores can expose, not as a fundamental
protocol component.
This currently needs "SyncReadIntoAsyncRead", taken and vendored in from
https://github.com/tokio-rs/tokio/pull/5669.
It provides a AsyncRead for a sync Read, which is necessary to connect
our (sync) BlobReader interface to a GRPC server implementation.
As an alternative, we could also make the BlobReader itself async, and
let consumers of the trait (EvalIO) deal with the async-ness, but this
is less of a change for now.
In terms of vendoring, I initially tried to move our tokio crate to
these commits, but ended up in version incompatibilities, so let's
vendor it in for now.
Change-Id: I5969ebbc4c0e1ceece47981be3b9e7cfb3f59ad0
Reviewed-on: https://cl.tvl.fyi/c/depot/+/8551
Tested-by: BuildkiteCI
Reviewed-by: tazjin <tazjin@tvl.su>
2023-05-11 14:49:01 +02:00
|
|
|
let root_node = import_path(&mut blob_service, &mut directory_service, &path)?;
|
2023-02-27 18:05:51 +01:00
|
|
|
|
2023-03-17 00:11:43 +01:00
|
|
|
let nar_hash = NixHashWithMode::Recursive(NixHash::new(
|
|
|
|
HashAlgo::Sha256,
|
|
|
|
nar_calculation_service
|
|
|
|
.calculate_nar(&root_node)?
|
|
|
|
.nar_sha256,
|
|
|
|
));
|
|
|
|
|
|
|
|
let mut drv = Derivation::default();
|
|
|
|
drv.outputs.insert(
|
|
|
|
"out".to_string(),
|
|
|
|
Output {
|
|
|
|
path: "".to_string(),
|
|
|
|
hash_with_mode: Some(nar_hash),
|
|
|
|
},
|
|
|
|
);
|
|
|
|
drv.calculate_output_paths(
|
|
|
|
path.file_name()
|
|
|
|
.expect("path must not be ..")
|
|
|
|
.to_str()
|
|
|
|
.expect("path must be valid unicode"),
|
|
|
|
// Note the derivation_or_fod_hash argument is *unused* for FODs, so it doesn't matter what we pass here.
|
|
|
|
&NixHash::new(HashAlgo::Sha256, vec![]),
|
|
|
|
)?;
|
|
|
|
|
|
|
|
println!("{}", drv.outputs.get("out").unwrap().path);
|
|
|
|
|
2023-02-27 18:05:51 +01:00
|
|
|
match root_node {
|
|
|
|
tvix_store::proto::node::Node::Directory(directory_node) => {
|
|
|
|
info!(
|
|
|
|
path = ?path,
|
|
|
|
name = directory_node.name,
|
|
|
|
digest = BASE64.encode(&directory_node.digest),
|
|
|
|
"import successful",
|
|
|
|
)
|
|
|
|
}
|
|
|
|
tvix_store::proto::node::Node::File(file_node) => {
|
|
|
|
info!(
|
|
|
|
path = ?path,
|
|
|
|
name = file_node.name,
|
|
|
|
digest = BASE64.encode(&file_node.digest),
|
|
|
|
"import successful"
|
|
|
|
)
|
|
|
|
}
|
|
|
|
tvix_store::proto::node::Node::Symlink(symlink_node) => {
|
|
|
|
info!(
|
|
|
|
path = ?path,
|
|
|
|
name = symlink_node.name,
|
|
|
|
target = symlink_node.target,
|
|
|
|
"import successful"
|
|
|
|
)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
2022-12-28 16:40:28 +01:00
|
|
|
Ok(())
|
2022-11-11 23:48:24 +01:00
|
|
|
}
|