Compare commits
318 commits
fix-compre
...
fork
Author | SHA1 | Date | |
---|---|---|---|
|
3389c550b9 | ||
|
240b6a8ba4 | ||
|
85fd360bfa | ||
|
3b16bacb8c | ||
|
1f5a20736a | ||
|
e2aa81c0b2 | ||
|
29f160963e | ||
|
e04bbe92a0 | ||
|
684a06ac3c | ||
|
9b6ebb947b | ||
|
10c2866ccd | ||
|
767bc726ef | ||
|
e737488d92 | ||
|
af6dc48971 | ||
|
0022285600 | ||
|
c9eeb7da70 | ||
|
7593592241 | ||
|
7f5956b2be | ||
|
adf8a7da87 | ||
|
e5edb3b192 | ||
|
e826ffb19c | ||
|
5c57529675 | ||
|
af5ce5489f | ||
|
fcd4bfccdf | ||
|
1bddd9a001 | ||
|
9d02fc4ff1 | ||
|
f49e047588 | ||
|
f6dc1f1819 | ||
|
861b7caa06 | ||
|
95640e7be4 | ||
|
9e2ecf9101 | ||
|
2edafd0a5a | ||
|
b800bf2bd4 | ||
|
e9f1bb9917 | ||
|
5e8729188b | ||
|
158ba0d607 | ||
|
8206f68aea | ||
|
773ddcb209 | ||
|
485fb81edf | ||
|
374cde20f2 | ||
|
7fa52f5933 | ||
|
e573f8389c | ||
|
bdf1162d47 | ||
|
7903d26054 | ||
|
863146295b | ||
|
cec28377d8 | ||
|
0a38618675 | ||
|
c352306491 | ||
|
732b46c6fa | ||
|
f51d27acb2 | ||
|
31f55d7ce7 | ||
|
72bdb15958 | ||
|
730d1cc77b | ||
|
317962fbed | ||
|
609b68031b | ||
|
200d49a0e1 | ||
|
32cc8016df | ||
|
33f30573f9 | ||
|
a9403664e1 | ||
|
632e74e435 | ||
|
1c898f7ddc | ||
|
e4714db2d5 | ||
|
c60f4ccfda | ||
|
7f1ecff989 | ||
|
69dcd8e92c | ||
|
9ea1862750 | ||
|
6cfae6c423 | ||
|
0f519a6f13 | ||
|
a115238f1b | ||
|
84bdc582ea | ||
|
18578c3458 | ||
|
3abc104584 | ||
|
8efd6b3cd2 | ||
|
7eb6900129 | ||
|
ebf4647976 | ||
|
e74378a324 | ||
|
5119cae360 | ||
|
23f97d0df0 | ||
|
53908b1b96 | ||
|
1c73c1c703 | ||
|
a2eb46e3d8 | ||
|
11aacd7857 | ||
|
b3c790a81e | ||
|
a28c495c38 | ||
|
a60998c234 | ||
|
6f4d3fec5c | ||
|
06f03b4518 | ||
|
91e76a90a0 | ||
|
9803712ab8 | ||
|
55b0726571 | ||
|
c4654fe373 | ||
|
8c8861cb3c | ||
|
4a4c21482b | ||
|
d235fd99c2 | ||
|
0979379980 | ||
|
f2d9255a98 | ||
|
348998e1fa | ||
|
5a97888d8b | ||
|
2945a359b4 | ||
|
02ee441626 | ||
|
b6dd9d2d5b | ||
|
b2b38f23d0 | ||
|
c622af481e | ||
|
82429f0661 | ||
|
fef2fdcf8e | ||
|
a5fcfd80a1 | ||
|
743407e7ab | ||
|
7bf6563da1 | ||
|
5ef54a5583 | ||
|
2bca043c33 | ||
|
ca695a6f7c | ||
|
c3645b590e | ||
|
8edfb085f5 | ||
|
3e4b9e56a5 | ||
|
2fa5e71d5e | ||
|
d292203235 | ||
|
b9a614a90f | ||
|
ae8758c966 | ||
|
248f46e7fc | ||
|
7a85fb4c64 | ||
|
228bf55646 | ||
|
ced05a2bb6 | ||
|
9af6920478 | ||
|
a774cb8c10 | ||
|
fe07ebfb30 | ||
|
a77b1d176b | ||
|
6167e63e07 | ||
|
d504f668e0 | ||
|
6dff303c5f | ||
|
a742fafdbf | ||
|
25f8ddca6d | ||
|
478a746a92 | ||
|
47ff0d0893 | ||
|
f22defa541 | ||
|
bfab474d0c | ||
|
8b624f93bf | ||
|
e7ff04b5f1 | ||
|
beb6713983 | ||
|
424b1ab5ad | ||
|
a8d2d6e97e | ||
|
4157fbad43 | ||
|
43cf713417 | ||
|
6da55dc1a6 | ||
|
afef485221 | ||
|
3530841a13 | ||
|
6c31d72000 | ||
|
a4ebc8da7c | ||
|
35d5811eec | ||
|
c321a5136f | ||
|
1099b5603c | ||
|
caf653be32 | ||
|
11665f4e0a | ||
|
83a6ad9717 | ||
|
1694ff4e22 | ||
|
73896eb075 | ||
|
8041ce7cbd | ||
|
8f6f45097e | ||
|
012da7d0e1 | ||
|
95ebcc24b0 | ||
|
565c0fd24c | ||
|
e03ea11bad | ||
|
2357079891 | ||
|
2beabe968c | ||
|
413135b925 | ||
|
e579d3869d | ||
|
01af37c532 | ||
|
a4e40d1dfa | ||
|
a259613c76 | ||
|
7612cb4c31 | ||
|
73e16c1855 | ||
|
bc967c538d | ||
|
70d199ea44 | ||
|
ed4f68b1d3 | ||
|
0d6d31bc66 | ||
|
a521f9e5f8 | ||
|
56d4f01e9f | ||
|
86f467cee6 | ||
|
64a085cf52 | ||
|
ab6a4815ff | ||
|
2284c00417 | ||
|
0714184b1f | ||
|
7b1acc5c16 | ||
|
9aa1c2dd19 | ||
|
d8640b6e67 | ||
|
4d5abbe232 | ||
|
ef75a6300b | ||
|
abff828ccc | ||
|
adf9b4c54a | ||
|
d6c57eb957 | ||
|
ddca074886 | ||
|
bb5d7c9678 | ||
|
98863e7312 | ||
|
5382cbb93a | ||
|
e086c76ee9 | ||
|
56fa533e43 | ||
|
0cfe2aaf6a | ||
|
96832c0411 | ||
|
76839683a7 | ||
|
c75b0d08ce | ||
|
5d01892274 | ||
|
21ceef4934 | ||
|
5ec93b57e6 | ||
|
8ea7d2b60e | ||
|
49b173786c | ||
|
04e9531e65 | ||
|
3cd57ce4e3 | ||
|
9d20176241 | ||
|
d3c32f214c | ||
|
1d3fce5857 | ||
|
fa05f12d5d | ||
|
65d8b85dca | ||
|
5d3ae11aa5 | ||
|
a0a13126ee | ||
|
39ef171e00 | ||
|
ad92083dea | ||
|
38b94f5591 | ||
|
a4748e5fa6 | ||
|
83e25fdd19 | ||
|
9d8832474c | ||
|
41706c8764 | ||
|
97c60d9857 | ||
|
c7845f3c88 | ||
|
2f4185ff1a | ||
|
3ca0b53840 | ||
|
5d3f3158d6 | ||
|
d2a80dda88 | ||
|
fd64df0511 | ||
|
1f5a85740e | ||
|
e176e1f11a | ||
|
a781cfcc49 | ||
|
12e6c0bfc0 | ||
|
f5c6acbbeb | ||
|
7c2ac040db | ||
|
d378111d77 | ||
|
a6d6fc418d | ||
|
c06e26f1cc | ||
|
3ea69235a4 | ||
|
8e13838451 | ||
|
c9a2783d2d | ||
|
7f8997fc80 | ||
|
e505880fde | ||
|
3511e328ec | ||
|
9c4b57ac63 | ||
|
549e86d95b | ||
|
c554c1c1c0 | ||
|
591c5ffe53 | ||
|
f648f17ec3 | ||
|
000fb856be | ||
|
3536a7c7fa | ||
|
6bd2760aa7 | ||
|
d33fd78202 | ||
|
172405c40c | ||
|
6559ab4cf5 | ||
|
6366cee717 | ||
|
b8f92a6d53 | ||
|
1d7ba89c19 | ||
|
5b0264223f | ||
|
c2c4169e49 | ||
|
f9703a9af5 | ||
|
13d79e04d8 | ||
|
2510cd6a5c | ||
|
3202d008d5 | ||
|
37b2c2ce78 | ||
|
8908fd18ca | ||
|
59056cf705 | ||
|
480a8106cf | ||
|
26129c1761 | ||
|
9c01ab3e84 | ||
|
66c75ef694 | ||
|
af61d64209 | ||
|
41dc9ee6a2 | ||
|
87d4b00ff5 | ||
|
756539a596 | ||
|
bdf8269859 | ||
|
13720d90bf | ||
|
7cfac04b7d | ||
|
a595d409de | ||
|
4277aa0db6 | ||
|
b21ca2ac26 | ||
|
e682e5ce2a | ||
|
a86dca8c78 | ||
|
1f65a7b0d0 | ||
|
8aa038a90e | ||
|
3f6cd7aebc | ||
|
47a8baf178 | ||
|
9cbebfea27 | ||
|
a794790203 | ||
|
a982f73477 | ||
|
d17f0592da | ||
|
038d1dd551 | ||
|
f3c80c10b9 | ||
|
0b987a509f | ||
|
4769d047b2 | ||
|
c8f92c6e9f | ||
|
5114504b67 | ||
|
6a988a1598 | ||
|
e97202e54a | ||
|
9385b3bcfb | ||
|
636647ce03 | ||
|
ca5134c3da | ||
|
94a0e21e68 | ||
|
b9aa6456e2 | ||
|
9f10a71ec5 | ||
|
dbe698042d | ||
|
2547ff2cf6 | ||
|
39c8b6dece | ||
|
849e639c71 | ||
|
14a4b4cbc3 | ||
|
67335c41b7 | ||
|
6774d9c59c | ||
|
8342f70c60 | ||
|
5d82e1e62f | ||
|
a3194e9280 | ||
|
05b4e805ee | ||
|
62184ee35a | ||
|
e408783bac | ||
|
eea9f3d9ee | ||
|
76394daca3 |
507 changed files with 23184 additions and 34896 deletions
5
.gitreview
Normal file
5
.gitreview
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
[gerrit]
|
||||||
|
host=cl.tvl.fyi
|
||||||
|
port=29418
|
||||||
|
project=depot
|
||||||
|
defaultbranch=canon
|
1
.mailmap
1
.mailmap
|
@ -1 +1,2 @@
|
||||||
Alyssa Ross <hi@alyssa.is>
|
Alyssa Ross <hi@alyssa.is>
|
||||||
|
Aspen Smith <root@gws.fyi> <aspen@gws.fyi> <grfn@gws.fyi>
|
||||||
|
|
|
@ -15,10 +15,9 @@ partially see this as [an experiment][] in tooling for monorepos.
|
||||||
|
|
||||||
## Services
|
## Services
|
||||||
|
|
||||||
* Source code is available primarily through Sourcegraph on
|
* Source code can be viewed primarily via `cgit-pink` on
|
||||||
[cs.tvl.fyi](https://cs.tvl.fyi), where it is searchable and even semantically
|
[code.tvl.fyi](https://code.tvl.fyi), with code search being available through
|
||||||
indexed. A lower-tech view of the repository is also available via cgit-pink
|
Livegrep on [grep.tvl.fyi](https://grep.tvl.fyi).
|
||||||
on [code.tvl.fyi](https://code.tvl.fyi).
|
|
||||||
|
|
||||||
The repository can be cloned using `git` from `https://cl.tvl.fyi/depot`.
|
The repository can be cloned using `git` from `https://cl.tvl.fyi/depot`.
|
||||||
|
|
||||||
|
|
637
corp/rih/backend/Cargo.lock
generated
637
corp/rih/backend/Cargo.lock
generated
File diff suppressed because it is too large
Load diff
21
corp/rih/frontend/Cargo.lock
generated
21
corp/rih/frontend/Cargo.lock
generated
|
@ -1500,19 +1500,20 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wasm-bindgen"
|
name = "wasm-bindgen"
|
||||||
version = "0.2.92"
|
version = "0.2.93"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8"
|
checksum = "a82edfc16a6c469f5f44dc7b571814045d60404b55a0ee849f9bcfa2e63dd9b5"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cfg-if",
|
"cfg-if",
|
||||||
|
"once_cell",
|
||||||
"wasm-bindgen-macro",
|
"wasm-bindgen-macro",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wasm-bindgen-backend"
|
name = "wasm-bindgen-backend"
|
||||||
version = "0.2.92"
|
version = "0.2.93"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "614d787b966d3989fa7bb98a654e369c762374fd3213d212cfc0251257e747da"
|
checksum = "9de396da306523044d3302746f1208fa71d7532227f15e347e2d93e4145dd77b"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bumpalo",
|
"bumpalo",
|
||||||
"log",
|
"log",
|
||||||
|
@ -1537,9 +1538,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wasm-bindgen-macro"
|
name = "wasm-bindgen-macro"
|
||||||
version = "0.2.92"
|
version = "0.2.93"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "a1f8823de937b71b9460c0c34e25f3da88250760bec0ebac694b49997550d726"
|
checksum = "585c4c91a46b072c92e908d99cb1dcdf95c5218eeb6f3bf1efa991ee7a68cccf"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"quote",
|
"quote",
|
||||||
"wasm-bindgen-macro-support",
|
"wasm-bindgen-macro-support",
|
||||||
|
@ -1547,9 +1548,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wasm-bindgen-macro-support"
|
name = "wasm-bindgen-macro-support"
|
||||||
version = "0.2.92"
|
version = "0.2.93"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7"
|
checksum = "afc340c74d9005395cf9dd098506f7f44e38f2b4a21c6aaacf9a105ea5e1e836"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
|
@ -1560,9 +1561,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wasm-bindgen-shared"
|
name = "wasm-bindgen-shared"
|
||||||
version = "0.2.92"
|
version = "0.2.93"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96"
|
checksum = "c62a0a307cb4a311d3a07867860911ca130c3494e8c2719593806c08bc5d0484"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "web-sys"
|
name = "web-sys"
|
||||||
|
|
|
@ -19,7 +19,7 @@ yew-router = "0.17"
|
||||||
wasm-bindgen-futures = "0.4"
|
wasm-bindgen-futures = "0.4"
|
||||||
|
|
||||||
# needs to be in sync with nixpkgs
|
# needs to be in sync with nixpkgs
|
||||||
wasm-bindgen = "= 0.2.92"
|
wasm-bindgen = "= 0.2.93"
|
||||||
uuid = { version = "1.3.3", features = ["v4", "serde"] }
|
uuid = { version = "1.3.3", features = ["v4", "serde"] }
|
||||||
|
|
||||||
[dependencies.serde]
|
[dependencies.serde]
|
||||||
|
|
21
corp/russian/predlozhnik/Cargo.lock
generated
21
corp/russian/predlozhnik/Cargo.lock
generated
|
@ -363,19 +363,20 @@ checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wasm-bindgen"
|
name = "wasm-bindgen"
|
||||||
version = "0.2.92"
|
version = "0.2.93"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8"
|
checksum = "a82edfc16a6c469f5f44dc7b571814045d60404b55a0ee849f9bcfa2e63dd9b5"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cfg-if",
|
"cfg-if",
|
||||||
|
"once_cell",
|
||||||
"wasm-bindgen-macro",
|
"wasm-bindgen-macro",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wasm-bindgen-backend"
|
name = "wasm-bindgen-backend"
|
||||||
version = "0.2.92"
|
version = "0.2.93"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "614d787b966d3989fa7bb98a654e369c762374fd3213d212cfc0251257e747da"
|
checksum = "9de396da306523044d3302746f1208fa71d7532227f15e347e2d93e4145dd77b"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bumpalo",
|
"bumpalo",
|
||||||
"log",
|
"log",
|
||||||
|
@ -400,9 +401,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wasm-bindgen-macro"
|
name = "wasm-bindgen-macro"
|
||||||
version = "0.2.92"
|
version = "0.2.93"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "a1f8823de937b71b9460c0c34e25f3da88250760bec0ebac694b49997550d726"
|
checksum = "585c4c91a46b072c92e908d99cb1dcdf95c5218eeb6f3bf1efa991ee7a68cccf"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"quote",
|
"quote",
|
||||||
"wasm-bindgen-macro-support",
|
"wasm-bindgen-macro-support",
|
||||||
|
@ -410,9 +411,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wasm-bindgen-macro-support"
|
name = "wasm-bindgen-macro-support"
|
||||||
version = "0.2.92"
|
version = "0.2.93"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7"
|
checksum = "afc340c74d9005395cf9dd098506f7f44e38f2b4a21c6aaacf9a105ea5e1e836"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
|
@ -423,9 +424,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wasm-bindgen-shared"
|
name = "wasm-bindgen-shared"
|
||||||
version = "0.2.92"
|
version = "0.2.93"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96"
|
checksum = "c62a0a307cb4a311d3a07867860911ca130c3494e8c2719593806c08bc5d0484"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "web-sys"
|
name = "web-sys"
|
||||||
|
|
|
@ -9,4 +9,4 @@ lazy_static = "1.4"
|
||||||
yew = "0.19"
|
yew = "0.19"
|
||||||
|
|
||||||
# needs to be in sync with nixpkgs
|
# needs to be in sync with nixpkgs
|
||||||
wasm-bindgen = "= 0.2.92"
|
wasm-bindgen = "= 0.2.93"
|
||||||
|
|
91
docs/importing-projects.md
Normal file
91
docs/importing-projects.md
Normal file
|
@ -0,0 +1,91 @@
|
||||||
|
Importing projects into depot
|
||||||
|
=============================
|
||||||
|
|
||||||
|
Before importing an existing `git`-based project into depot, a few questions
|
||||||
|
need to be answered:
|
||||||
|
|
||||||
|
|
||||||
|
* Is the project licensed under a free software license, or public domain?
|
||||||
|
* Do you need to import existing history?
|
||||||
|
* Do you need to export new history with hashes that continue on from the old
|
||||||
|
history? (e.g. importing an existing repository, and exporting from depot to
|
||||||
|
the old upstream)
|
||||||
|
|
||||||
|
Think about this and then pick an approach below:
|
||||||
|
|
||||||
|
## Import with no history (just commit)
|
||||||
|
|
||||||
|
Simply copy the files to where you want them to be in depot, and commit. Nothing
|
||||||
|
else to do!
|
||||||
|
|
||||||
|
## Import without continuous history (subtree merge)
|
||||||
|
|
||||||
|
This import approach lets you drop an existing project into depot, keep its
|
||||||
|
existing history, but not retain the ability to continue external history.
|
||||||
|
|
||||||
|
This means that if you, for example, import a project from a different git host
|
||||||
|
using this method, and then continue to commit to it inside of depot, you will
|
||||||
|
not be able to export a history consistent with your previous hashes using
|
||||||
|
`josh`.
|
||||||
|
|
||||||
|
Commit hashes before the import will exist in depot and be valid.
|
||||||
|
|
||||||
|
Still, this approach might be viable if a project "moves into" depot, or has
|
||||||
|
nothing depending on it externally.
|
||||||
|
|
||||||
|
1. Pick a location in depot where you want your project to be (`$loc` from now on).
|
||||||
|
2. Fetch your project into the same git store as your depot clone (e.g. by
|
||||||
|
adding it as an upstream and fetching it).
|
||||||
|
3. Pick the commit you want to merge (`$commit` from now on).
|
||||||
|
4. Run `git subtree add --prefix=$loc $commit`, which will create the correct
|
||||||
|
merge commit.
|
||||||
|
5. Ensure Gerrit [knows about your commit](#preparing-merges-in-gerrit) for the
|
||||||
|
parent that is being merged.
|
||||||
|
6. Modify the merge commit's message to start with `subtree($project_name):`.
|
||||||
|
Gerrit **will not** allow merge commits that do not follow this format.
|
||||||
|
7. Push your subtree commit for review as normal.
|
||||||
|
|
||||||
|
## Import with continuous history
|
||||||
|
|
||||||
|
This approach imports the history using `josh`, which means that external
|
||||||
|
history before/after the import is consistent (you can continue committing in
|
||||||
|
`depot`, export the history back out, and from an external perspective nothing
|
||||||
|
changes).
|
||||||
|
|
||||||
|
This is what we did with repositories like `nix-1p` and `nixery`.
|
||||||
|
|
||||||
|
Note: Inside of depot, the pre-import commit hashes will **not make sense**.
|
||||||
|
`josh` will rewrite them in such a way that exporting the project will yield the
|
||||||
|
same hashes, but this rewriting changes the hashes of your commits inside of
|
||||||
|
depot.
|
||||||
|
|
||||||
|
1. Pick a location in depot where you want your project to be (`$loc`).
|
||||||
|
2. Fetch your project into the same git store as your depot clone (e.g. by
|
||||||
|
adding it as an upstream and fetching it).
|
||||||
|
3. Check out the commit you want to merge into depot.
|
||||||
|
4. Run `josh-filter ":prefix=$loc"`, and take note of the `FILTERED_HEAD` ref
|
||||||
|
that it produces (`$filtered` from now on).
|
||||||
|
5. Ensure Gerrit [knows about the filtered commit](#preparing-merges-in-gerrit).
|
||||||
|
6. Merge the filtered commit into depot using a standard merge, but make sure to
|
||||||
|
add the `--allow-unrelated-histories` flag. Your commit message **must**
|
||||||
|
start with `subtree($project_name):`, otherwise Gerrit will not let you push
|
||||||
|
a merge.
|
||||||
|
7. Push the merge commit for review as usual.
|
||||||
|
|
||||||
|
------------------------------------------------------
|
||||||
|
|
||||||
|
## Preparing merges in Gerrit
|
||||||
|
|
||||||
|
When pushing a merge to Gerrit, it needs to know about all ancestors of the
|
||||||
|
merge, otherwise it will try to interpret commits as new CLs and reject them for
|
||||||
|
not having a change ID (or create a huge number of CLs, if they do have one).
|
||||||
|
|
||||||
|
To prevent this, we have a special git ref called `subtree-staging` which you
|
||||||
|
can push external trees to.
|
||||||
|
|
||||||
|
Access to `subtree-staging` has to be granted by a TVL admin, so ping tazjin,
|
||||||
|
lukegb, flokli, sterni and so on before proceeding.
|
||||||
|
|
||||||
|
1. Determine the commit you want to merge (`$commit`).
|
||||||
|
2. Run `git push -f $commit origin/subtree-staging` (or replace `origin` with
|
||||||
|
whatever the TVL Gerrit remote is called in your clone).
|
|
@ -1,6 +1,7 @@
|
||||||
{ makeSetupHook }:
|
{ makeSetupHook }:
|
||||||
|
|
||||||
makeSetupHook {
|
makeSetupHook
|
||||||
|
{
|
||||||
name = "rules_java_bazel_hook";
|
name = "rules_java_bazel_hook";
|
||||||
substitutions = {
|
substitutions = {
|
||||||
local_java = ./local_java;
|
local_java = ./local_java;
|
||||||
|
|
|
@ -37,7 +37,9 @@ let
|
||||||
cp -R . $out
|
cp -R . $out
|
||||||
'';
|
'';
|
||||||
};
|
};
|
||||||
in makeSetupHook {
|
in
|
||||||
|
makeSetupHook
|
||||||
|
{
|
||||||
name = "bazelbuild-rules_nodejs-5-hook";
|
name = "bazelbuild-rules_nodejs-5-hook";
|
||||||
propagatedBuildInputs = [
|
propagatedBuildInputs = [
|
||||||
nodejs
|
nodejs
|
||||||
|
|
|
@ -16,12 +16,16 @@
|
||||||
|
|
||||||
let
|
let
|
||||||
cleanAttrs = lib.flip removeAttrs [
|
cleanAttrs = lib.flip removeAttrs [
|
||||||
"bazelTargets" "depsHash" "extraCacheInstall" "extraBuildSetup" "extraBuildInstall"
|
"bazelTargets"
|
||||||
|
"depsHash"
|
||||||
|
"extraCacheInstall"
|
||||||
|
"extraBuildSetup"
|
||||||
|
"extraBuildInstall"
|
||||||
];
|
];
|
||||||
attrs = cleanAttrs baseAttrs;
|
attrs = cleanAttrs baseAttrs;
|
||||||
|
|
||||||
base = stdenv.mkDerivation (attrs // {
|
base = stdenv.mkDerivation (attrs // {
|
||||||
nativeBuildInputs = (attrs.nativeBuildInputs or []) ++ [
|
nativeBuildInputs = (attrs.nativeBuildInputs or [ ]) ++ [
|
||||||
bazel
|
bazel
|
||||||
];
|
];
|
||||||
|
|
||||||
|
@ -69,7 +73,7 @@ let
|
||||||
|
|
||||||
inherit cache;
|
inherit cache;
|
||||||
|
|
||||||
nativeBuildInputs = (base.nativeBuildInputs or []) ++ [
|
nativeBuildInputs = (base.nativeBuildInputs or [ ]) ++ [
|
||||||
coreutils
|
coreutils
|
||||||
];
|
];
|
||||||
|
|
||||||
|
@ -102,4 +106,5 @@ let
|
||||||
runHook postInstall
|
runHook postInstall
|
||||||
'';
|
'';
|
||||||
});
|
});
|
||||||
in build
|
in
|
||||||
|
build
|
||||||
|
|
|
@ -23,7 +23,6 @@ let
|
||||||
{ name
|
{ name
|
||||||
, dependencies ? [ ]
|
, dependencies ? [ ]
|
||||||
, doCheck ? true
|
, doCheck ? true
|
||||||
,
|
|
||||||
}: src:
|
}: src:
|
||||||
(if doCheck then testRustSimple else pkgs.lib.id)
|
(if doCheck then testRustSimple else pkgs.lib.id)
|
||||||
(pkgs.buildRustCrate ({
|
(pkgs.buildRustCrate ({
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
# This program is used as a Gerrit hook to trigger builds on
|
# This program is used as a Gerrit hook to trigger builds on
|
||||||
# Buildkite, Sourcegraph reindexing and other maintenance tasks.
|
# Buildkite and perform other maintenance tasks.
|
||||||
{ depot, ... }:
|
{ depot, ... }:
|
||||||
|
|
||||||
depot.nix.buildGo.program {
|
depot.nix.buildGo.program {
|
||||||
|
|
|
@ -8,7 +8,6 @@
|
||||||
//
|
//
|
||||||
// Gerrit (ref-updated) hook:
|
// Gerrit (ref-updated) hook:
|
||||||
// - Trigger Buildkite CI builds
|
// - Trigger Buildkite CI builds
|
||||||
// - Trigger SourceGraph repository index updates
|
|
||||||
//
|
//
|
||||||
// Buildkite (post-command) hook:
|
// Buildkite (post-command) hook:
|
||||||
// - Submit CL verification status back to Gerrit
|
// - Submit CL verification status back to Gerrit
|
||||||
|
@ -55,10 +54,6 @@ type config struct {
|
||||||
BuildkiteProject string `json:"buildkiteProject"`
|
BuildkiteProject string `json:"buildkiteProject"`
|
||||||
BuildkiteToken string `json:"buildkiteToken"`
|
BuildkiteToken string `json:"buildkiteToken"`
|
||||||
GerritChangeName string `json:"gerritChangeName"`
|
GerritChangeName string `json:"gerritChangeName"`
|
||||||
|
|
||||||
// Optional configuration for Sourcegraph trigger updates.
|
|
||||||
SourcegraphUrl string `json:"sourcegraphUrl"`
|
|
||||||
SourcegraphToken string `json:"sourcegraphToken"`
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// buildTrigger represents the information passed to besadii when it
|
// buildTrigger represents the information passed to besadii when it
|
||||||
|
@ -154,11 +149,6 @@ func loadConfig() (*config, error) {
|
||||||
return nil, fmt.Errorf("invalid 'gerritChangeName': %s", cfg.GerritChangeName)
|
return nil, fmt.Errorf("invalid 'gerritChangeName': %s", cfg.GerritChangeName)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Rudimentary config validation logic
|
|
||||||
if cfg.SourcegraphUrl != "" && cfg.SourcegraphToken == "" {
|
|
||||||
return nil, fmt.Errorf("'SourcegraphToken' must be set if 'SourcegraphUrl' is set")
|
|
||||||
}
|
|
||||||
|
|
||||||
if cfg.Repository == "" || cfg.Branch == "" {
|
if cfg.Repository == "" || cfg.Branch == "" {
|
||||||
return nil, fmt.Errorf("missing repository configuration (required: repository, branch)")
|
return nil, fmt.Errorf("missing repository configuration (required: repository, branch)")
|
||||||
}
|
}
|
||||||
|
@ -299,26 +289,6 @@ func triggerBuild(cfg *config, log *syslog.Writer, trigger *buildTrigger) error
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Trigger a Sourcegraph repository index update.
|
|
||||||
//
|
|
||||||
// https://docs.sourcegraph.com/admin/repo/webhooks
|
|
||||||
func triggerIndexUpdate(cfg *config, log *syslog.Writer) error {
|
|
||||||
req, err := http.NewRequest("POST", cfg.SourcegraphUrl, nil)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
req.Header.Add("Authorization", "token "+cfg.SourcegraphToken)
|
|
||||||
|
|
||||||
_, err = http.DefaultClient.Do(req)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("failed to trigger Sourcegraph index update: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
log.Info("triggered sourcegraph index update")
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Gerrit passes more flags than we want, but Rob Pike decided[0] in
|
// Gerrit passes more flags than we want, but Rob Pike decided[0] in
|
||||||
// 2013 that the Go art project will not allow users to ignore flags
|
// 2013 that the Go art project will not allow users to ignore flags
|
||||||
// because he "doesn't like it". This function allows users to ignore
|
// because he "doesn't like it". This function allows users to ignore
|
||||||
|
@ -458,13 +428,6 @@ func gerritHookMain(cfg *config, log *syslog.Writer, trigger *buildTrigger) {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Err(fmt.Sprintf("failed to trigger Buildkite build: %s", err))
|
log.Err(fmt.Sprintf("failed to trigger Buildkite build: %s", err))
|
||||||
}
|
}
|
||||||
|
|
||||||
if cfg.SourcegraphUrl != "" && trigger.ref == cfg.Branch {
|
|
||||||
err = triggerIndexUpdate(cfg, log)
|
|
||||||
if err != nil {
|
|
||||||
log.Err(fmt.Sprintf("failed to trigger sourcegraph index update: %s", err))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func postCommandMain(cfg *config) {
|
func postCommandMain(cfg *config) {
|
||||||
|
|
|
@ -8,7 +8,9 @@ terraform {
|
||||||
}
|
}
|
||||||
|
|
||||||
backend "s3" {
|
backend "s3" {
|
||||||
endpoint = "https://objects.dc-sto1.glesys.net"
|
endpoints = {
|
||||||
|
s3 = "https://objects.dc-sto1.glesys.net"
|
||||||
|
}
|
||||||
bucket = "tvl-state"
|
bucket = "tvl-state"
|
||||||
key = "terraform/tvl-buildkite"
|
key = "terraform/tvl-buildkite"
|
||||||
region = "glesys"
|
region = "glesys"
|
||||||
|
@ -16,6 +18,8 @@ terraform {
|
||||||
skip_credentials_validation = true
|
skip_credentials_validation = true
|
||||||
skip_region_validation = true
|
skip_region_validation = true
|
||||||
skip_metadata_api_check = true
|
skip_metadata_api_check = true
|
||||||
|
skip_requesting_account_id = true
|
||||||
|
skip_s3_checksum = true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
280
ops/journaldriver/Cargo.lock
generated
280
ops/journaldriver/Cargo.lock
generated
|
@ -4,18 +4,18 @@ version = 3
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "aho-corasick"
|
name = "aho-corasick"
|
||||||
version = "1.1.2"
|
version = "1.1.3"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "b2969dcb958b36655471fc61f7e416fa76033bdd4bfed0678d8fee1e2d07a1f0"
|
checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"memchr",
|
"memchr",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "anyhow"
|
name = "anyhow"
|
||||||
version = "1.0.75"
|
version = "1.0.86"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "a4668cab20f66d8d020e1fbc0ebe47217433c1b6c8f2040faf858554e394ace6"
|
checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "base64"
|
name = "base64"
|
||||||
|
@ -25,9 +25,9 @@ checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "bitflags"
|
name = "bitflags"
|
||||||
version = "2.4.1"
|
version = "2.6.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "327762f6e5a765692301e5bb513e0d9fef63be86bbc14528052b1cd3e6f03e07"
|
checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "build-env"
|
name = "build-env"
|
||||||
|
@ -37,11 +37,11 @@ checksum = "e068f31938f954b695423ecaf756179597627d0828c0d3e48c0a722a8b23cf9e"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "cc"
|
name = "cc"
|
||||||
version = "1.0.84"
|
version = "1.1.15"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "0f8e7c90afad890484a21653d08b6e209ae34770fb5ee298f9c699fcc1e5c856"
|
checksum = "57b6a275aa2903740dc87da01c62040406b8812552e97129a63ea8850a17c6e6"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"libc",
|
"shlex",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -73,9 +73,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "curl"
|
name = "curl"
|
||||||
version = "0.4.44"
|
version = "0.4.46"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "509bd11746c7ac09ebd19f0b17782eae80aadee26237658a6b4808afb5c11a22"
|
checksum = "1e2161dd6eba090ff1594084e95fd67aeccf04382ffea77999ea94ed42ec67b6"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"curl-sys",
|
"curl-sys",
|
||||||
"libc",
|
"libc",
|
||||||
|
@ -83,14 +83,14 @@ dependencies = [
|
||||||
"openssl-sys",
|
"openssl-sys",
|
||||||
"schannel",
|
"schannel",
|
||||||
"socket2",
|
"socket2",
|
||||||
"winapi",
|
"windows-sys 0.52.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "curl-sys"
|
name = "curl-sys"
|
||||||
version = "0.4.68+curl-8.4.0"
|
version = "0.4.74+curl-8.9.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "b4a0d18d88360e374b16b2273c832b5e57258ffc1d4aa4f96b108e0738d5752f"
|
checksum = "8af10b986114528fcdc4b63b6f5f021b7057618411046a4de2ba0f0149a097bf"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cc",
|
"cc",
|
||||||
"libc",
|
"libc",
|
||||||
|
@ -98,14 +98,14 @@ dependencies = [
|
||||||
"openssl-sys",
|
"openssl-sys",
|
||||||
"pkg-config",
|
"pkg-config",
|
||||||
"vcpkg",
|
"vcpkg",
|
||||||
"windows-sys",
|
"windows-sys 0.52.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "deranged"
|
name = "deranged"
|
||||||
version = "0.3.9"
|
version = "0.3.11"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "0f32d04922c60427da6f9fef14d042d9edddef64cb9d4ce0d64d0685fbeb1fd3"
|
checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"powerfmt",
|
"powerfmt",
|
||||||
"serde",
|
"serde",
|
||||||
|
@ -113,9 +113,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "env_logger"
|
name = "env_logger"
|
||||||
version = "0.10.1"
|
version = "0.10.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "95b3f3e67048839cb0d0781f445682a35113da7121f7c949db0e2be96a4fbece"
|
checksum = "4cd405aab171cb85d6735e5c8d9db038c17d3ca007a4d2c25f337935c3d90580"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"humantime",
|
"humantime",
|
||||||
"is-terminal",
|
"is-terminal",
|
||||||
|
@ -124,16 +124,6 @@ dependencies = [
|
||||||
"termcolor",
|
"termcolor",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "errno"
|
|
||||||
version = "0.3.6"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "7c18ee0ed65a5f1f81cac6b1d213b69c35fa47d4252ad41f1486dbd8226fe36e"
|
|
||||||
dependencies = [
|
|
||||||
"libc",
|
|
||||||
"windows-sys",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "foreign-types"
|
name = "foreign-types"
|
||||||
version = "0.3.2"
|
version = "0.3.2"
|
||||||
|
@ -178,9 +168,9 @@ checksum = "aa9a19cbb55df58761df49b23516a86d432839add4af60fc256da840f66ed35b"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "hermit-abi"
|
name = "hermit-abi"
|
||||||
version = "0.3.3"
|
version = "0.4.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "d77f7ec81a6d05a3abb01ab6eb7590f6083d08449fe5a1c8b1e620283546ccb7"
|
checksum = "fbf6a919d6cf397374f7dfeeea91d974c7c0a7221d0d0f4f20d859d329e53fcc"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "humantime"
|
name = "humantime"
|
||||||
|
@ -190,20 +180,20 @@ checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "is-terminal"
|
name = "is-terminal"
|
||||||
version = "0.4.9"
|
version = "0.4.13"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "cb0889898416213fab133e1d33a0e5858a48177452750691bde3666d0fdbaf8b"
|
checksum = "261f68e344040fbd0edea105bef17c66edf46f984ddb1115b775ce31be948f4b"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"hermit-abi",
|
"hermit-abi",
|
||||||
"rustix",
|
"libc",
|
||||||
"windows-sys",
|
"windows-sys 0.52.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "itoa"
|
name = "itoa"
|
||||||
version = "1.0.9"
|
version = "1.0.11"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38"
|
checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "journaldriver"
|
name = "journaldriver"
|
||||||
|
@ -224,15 +214,15 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "lazy_static"
|
name = "lazy_static"
|
||||||
version = "1.4.0"
|
version = "1.5.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
|
checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "libc"
|
name = "libc"
|
||||||
version = "0.2.150"
|
version = "0.2.158"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "89d92a4743f9a61002fae18374ed11e7973f530cb3a3255fb354818118b2203c"
|
checksum = "d8adc4bb1803a324070e64a98ae98f38934d91957a99cfb3a43dcbc01bc56439"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "libsystemd-sys"
|
name = "libsystemd-sys"
|
||||||
|
@ -247,9 +237,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "libz-sys"
|
name = "libz-sys"
|
||||||
version = "1.1.12"
|
version = "1.1.20"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "d97137b25e321a73eef1418d1d5d2eda4d77e12813f8e6dead84bc52c5870a7b"
|
checksum = "d2d16453e800a8cf6dd2fc3eb4bc99b786a9b90c663b8559a5b1a041bf89e472"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cc",
|
"cc",
|
||||||
"libc",
|
"libc",
|
||||||
|
@ -257,17 +247,11 @@ dependencies = [
|
||||||
"vcpkg",
|
"vcpkg",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "linux-raw-sys"
|
|
||||||
version = "0.4.11"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "969488b55f8ac402214f3f5fd243ebb7206cf82de60d3172994707a4bcc2b829"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "log"
|
name = "log"
|
||||||
version = "0.4.20"
|
version = "0.4.22"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f"
|
checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "medallion"
|
name = "medallion"
|
||||||
|
@ -285,21 +269,27 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "memchr"
|
name = "memchr"
|
||||||
version = "2.6.4"
|
version = "2.7.4"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "f665ee40bc4a3c5590afb1e9677db74a508659dfd71e126420da8274909a0167"
|
checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "num-conv"
|
||||||
|
version = "0.1.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "once_cell"
|
name = "once_cell"
|
||||||
version = "1.18.0"
|
version = "1.19.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d"
|
checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "openssl"
|
name = "openssl"
|
||||||
version = "0.10.59"
|
version = "0.10.66"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "7a257ad03cd8fb16ad4172fedf8094451e1af1c4b70097636ef2eac9a5f0cc33"
|
checksum = "9529f4786b70a3e8c61e11179af17ab6188ad8d0ded78c5529441ed39d4bd9c1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bitflags",
|
"bitflags",
|
||||||
"cfg-if",
|
"cfg-if",
|
||||||
|
@ -329,9 +319,9 @@ checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "openssl-sys"
|
name = "openssl-sys"
|
||||||
version = "0.9.95"
|
version = "0.9.103"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "40a4130519a360279579c2053038317e40eff64d13fd3f004f9e1b72b8a6aaf9"
|
checksum = "7f9e8deee91df40a943c71b917e5874b951d32a802526c85721ce3b776c929d6"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cc",
|
"cc",
|
||||||
"libc",
|
"libc",
|
||||||
|
@ -341,9 +331,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pkg-config"
|
name = "pkg-config"
|
||||||
version = "0.3.27"
|
version = "0.3.30"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "26072860ba924cbfa98ea39c8c19b4dd6a4a25423dbdf219c1eca91aa0cf6964"
|
checksum = "d231b230927b5e4ad203db57bbcbee2802f6bce620b1e4a9024a07d94e2907ec"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "powerfmt"
|
name = "powerfmt"
|
||||||
|
@ -353,27 +343,27 @@ checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "proc-macro2"
|
name = "proc-macro2"
|
||||||
version = "1.0.69"
|
version = "1.0.86"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "134c189feb4956b20f6f547d2cf727d4c0fe06722b20a0eec87ed445a97f92da"
|
checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"unicode-ident",
|
"unicode-ident",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "quote"
|
name = "quote"
|
||||||
version = "1.0.33"
|
version = "1.0.37"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae"
|
checksum = "b5b9d34b8991d19d98081b46eacdd8eb58c6f2b201139f7c5f643cc155a633af"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "regex"
|
name = "regex"
|
||||||
version = "1.10.2"
|
version = "1.10.6"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "380b951a9c5e80ddfd6136919eef32310721aa4aacd4889a8d39124b026ab343"
|
checksum = "4219d74c6b67a3654a9fbebc4b419e22126d13d2f3c4a07ee0cb61ff79a79619"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"aho-corasick",
|
"aho-corasick",
|
||||||
"memchr",
|
"memchr",
|
||||||
|
@ -383,9 +373,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "regex-automata"
|
name = "regex-automata"
|
||||||
version = "0.4.3"
|
version = "0.4.7"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "5f804c7828047e88b2d32e2d7fe5a105da8ee3264f01902f796c8e067dc2483f"
|
checksum = "38caf58cc5ef2fed281f89292ef23f6365465ed9a41b7a7754eb4e26496c92df"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"aho-corasick",
|
"aho-corasick",
|
||||||
"memchr",
|
"memchr",
|
||||||
|
@ -394,52 +384,39 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "regex-syntax"
|
name = "regex-syntax"
|
||||||
version = "0.8.2"
|
version = "0.8.4"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f"
|
checksum = "7a66a03ae7c801facd77a29370b4faec201768915ac14a721ba36f20bc9c209b"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "rustix"
|
|
||||||
version = "0.38.21"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "2b426b0506e5d50a7d8dafcf2e81471400deb602392c7dd110815afb4eaf02a3"
|
|
||||||
dependencies = [
|
|
||||||
"bitflags",
|
|
||||||
"errno",
|
|
||||||
"libc",
|
|
||||||
"linux-raw-sys",
|
|
||||||
"windows-sys",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ryu"
|
name = "ryu"
|
||||||
version = "1.0.15"
|
version = "1.0.18"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "1ad4cc8da4ef723ed60bced201181d83791ad433213d8c24efffda1eec85d741"
|
checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "schannel"
|
name = "schannel"
|
||||||
version = "0.1.22"
|
version = "0.1.23"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "0c3733bf4cf7ea0880754e19cb5a462007c4a8c1914bff372ccc95b464f1df88"
|
checksum = "fbc91545643bcf3a0bbb6569265615222618bdf33ce4ffbbd13c4bbd4c093534"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"windows-sys",
|
"windows-sys 0.52.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "serde"
|
name = "serde"
|
||||||
version = "1.0.192"
|
version = "1.0.209"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "bca2a08484b285dcb282d0f67b26cadc0df8b19f8c12502c13d966bf9482f001"
|
checksum = "99fce0ffe7310761ca6bf9faf5115afbc19688edd00171d81b1bb1b116c63e09"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"serde_derive",
|
"serde_derive",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "serde_derive"
|
name = "serde_derive"
|
||||||
version = "1.0.192"
|
version = "1.0.209"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "d6c7207fbec9faa48073f3e3074cbe553af6ea512d7c21ba46e434e70ea9fbc1"
|
checksum = "a5831b979fd7b5439637af1752d535ff49f4860c0f341d1baeb6faf0f4242170"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
|
@ -448,30 +425,37 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "serde_json"
|
name = "serde_json"
|
||||||
version = "1.0.108"
|
version = "1.0.127"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "3d1c7e3eac408d115102c4c24ad393e0821bb3a5df4d506a80f85f7a742a526b"
|
checksum = "8043c06d9f82bd7271361ed64f415fe5e12a77fdb52e573e7f06a516dea329ad"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"itoa",
|
"itoa",
|
||||||
|
"memchr",
|
||||||
"ryu",
|
"ryu",
|
||||||
"serde",
|
"serde",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "socket2"
|
name = "shlex"
|
||||||
version = "0.4.10"
|
version = "1.3.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "9f7916fc008ca5542385b89a3d3ce689953c143e9304a9bf8beec1de48994c0d"
|
checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "socket2"
|
||||||
|
version = "0.5.7"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "ce305eb0b4296696835b71df73eb912e0f1ffd2556a501fcede6e0c50349191c"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"libc",
|
"libc",
|
||||||
"winapi",
|
"windows-sys 0.52.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "syn"
|
name = "syn"
|
||||||
version = "2.0.39"
|
version = "2.0.77"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "23e78b90f2fcf45d3e842032ce32e3f2d1545ba6636271dcbf24fa306d87be7a"
|
checksum = "9f35bcdf61fd8e7be6caf75f429fdca8beb3ed76584befb503b1569faee373ed"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
|
@ -494,21 +478,22 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "termcolor"
|
name = "termcolor"
|
||||||
version = "1.3.0"
|
version = "1.4.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "6093bad37da69aab9d123a8091e4be0aa4a03e4d601ec641c327398315f62b64"
|
checksum = "06794f8f6c5c898b3275aebefa6b8a1cb24cd2c6c79397ab15774837a0bc5755"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"winapi-util",
|
"winapi-util",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "time"
|
name = "time"
|
||||||
version = "0.3.30"
|
version = "0.3.36"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "c4a34ab300f2dee6e562c10a046fc05e358b29f9bf92277f30c3c8d82275f6f5"
|
checksum = "5dfd88e563464686c916c7e46e623e520ddc6d79fa6641390f2e3fa86e83e885"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"deranged",
|
"deranged",
|
||||||
"itoa",
|
"itoa",
|
||||||
|
"num-conv",
|
||||||
"powerfmt",
|
"powerfmt",
|
||||||
"serde",
|
"serde",
|
||||||
"time-core",
|
"time-core",
|
||||||
|
@ -523,10 +508,11 @@ checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "time-macros"
|
name = "time-macros"
|
||||||
version = "0.2.15"
|
version = "0.2.18"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "4ad70d68dba9e1f8aceda7aa6711965dfec1cac869f311a51bd08b3a2ccbce20"
|
checksum = "3f252a68540fde3a3877aeea552b832b40ab9a69e318efd078774a01ddee1ccf"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
|
"num-conv",
|
||||||
"time-core",
|
"time-core",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -548,55 +534,43 @@ version = "0.2.15"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426"
|
checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "winapi"
|
|
||||||
version = "0.3.9"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
|
|
||||||
dependencies = [
|
|
||||||
"winapi-i686-pc-windows-gnu",
|
|
||||||
"winapi-x86_64-pc-windows-gnu",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "winapi-i686-pc-windows-gnu"
|
|
||||||
version = "0.4.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "winapi-util"
|
name = "winapi-util"
|
||||||
version = "0.1.6"
|
version = "0.1.9"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "f29e6f9198ba0d26b4c9f07dbe6f9ed633e1f3d5b8b414090084349e46a52596"
|
checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"winapi",
|
"windows-sys 0.59.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "winapi-x86_64-pc-windows-gnu"
|
|
||||||
version = "0.4.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "windows-sys"
|
name = "windows-sys"
|
||||||
version = "0.48.0"
|
version = "0.52.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9"
|
checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d"
|
||||||
|
dependencies = [
|
||||||
|
"windows-targets",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "windows-sys"
|
||||||
|
version = "0.59.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"windows-targets",
|
"windows-targets",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "windows-targets"
|
name = "windows-targets"
|
||||||
version = "0.48.5"
|
version = "0.52.6"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c"
|
checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"windows_aarch64_gnullvm",
|
"windows_aarch64_gnullvm",
|
||||||
"windows_aarch64_msvc",
|
"windows_aarch64_msvc",
|
||||||
"windows_i686_gnu",
|
"windows_i686_gnu",
|
||||||
|
"windows_i686_gnullvm",
|
||||||
"windows_i686_msvc",
|
"windows_i686_msvc",
|
||||||
"windows_x86_64_gnu",
|
"windows_x86_64_gnu",
|
||||||
"windows_x86_64_gnullvm",
|
"windows_x86_64_gnullvm",
|
||||||
|
@ -605,42 +579,48 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "windows_aarch64_gnullvm"
|
name = "windows_aarch64_gnullvm"
|
||||||
version = "0.48.5"
|
version = "0.52.6"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8"
|
checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "windows_aarch64_msvc"
|
name = "windows_aarch64_msvc"
|
||||||
version = "0.48.5"
|
version = "0.52.6"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc"
|
checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "windows_i686_gnu"
|
name = "windows_i686_gnu"
|
||||||
version = "0.48.5"
|
version = "0.52.6"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e"
|
checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "windows_i686_gnullvm"
|
||||||
|
version = "0.52.6"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "windows_i686_msvc"
|
name = "windows_i686_msvc"
|
||||||
version = "0.48.5"
|
version = "0.52.6"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406"
|
checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "windows_x86_64_gnu"
|
name = "windows_x86_64_gnu"
|
||||||
version = "0.48.5"
|
version = "0.52.6"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e"
|
checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "windows_x86_64_gnullvm"
|
name = "windows_x86_64_gnullvm"
|
||||||
version = "0.48.5"
|
version = "0.52.6"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc"
|
checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "windows_x86_64_msvc"
|
name = "windows_x86_64_msvc"
|
||||||
version = "0.48.5"
|
version = "0.52.6"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538"
|
checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
|
||||||
|
|
|
@ -10,20 +10,29 @@ terraform {
|
||||||
}
|
}
|
||||||
|
|
||||||
backend "s3" {
|
backend "s3" {
|
||||||
endpoint = "https://objects.dc-sto1.glesys.net"
|
endpoints = {
|
||||||
bucket = "tvl-state"
|
s3 = "https://objects.dc-sto1.glesys.net"
|
||||||
key = "terraform/tvl-keycloak"
|
}
|
||||||
region = "glesys"
|
bucket = "tvl-state"
|
||||||
|
key = "terraform/tvl-keycloak"
|
||||||
|
region = "glesys"
|
||||||
|
|
||||||
skip_credentials_validation = true
|
skip_credentials_validation = true
|
||||||
skip_region_validation = true
|
skip_region_validation = true
|
||||||
skip_metadata_api_check = true
|
skip_metadata_api_check = true
|
||||||
|
skip_requesting_account_id = true
|
||||||
|
skip_s3_checksum = true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
provider "keycloak" {
|
provider "keycloak" {
|
||||||
client_id = "terraform"
|
client_id = "terraform"
|
||||||
url = "https://auth.tvl.fyi"
|
url = "https://auth.tvl.fyi"
|
||||||
|
# NOTE: Docs mention this applies to "users of the legacy distribution of keycloak".
|
||||||
|
# However, we get a "failed to perform initial login to Keycloak: error
|
||||||
|
# sending POST request to https://auth.tvl.fyi/realms/master/protocol/openid-connect/token: 404 Not Found"
|
||||||
|
# if we don't set this.
|
||||||
|
base_path = "/auth"
|
||||||
}
|
}
|
||||||
|
|
||||||
resource "keycloak_realm" "tvl" {
|
resource "keycloak_realm" "tvl" {
|
||||||
|
|
|
@ -22,6 +22,13 @@ resource "keycloak_ldap_user_federation" "tvl_ldap" {
|
||||||
"inetOrgPerson",
|
"inetOrgPerson",
|
||||||
"organizationalPerson",
|
"organizationalPerson",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
lifecycle {
|
||||||
|
# Without this, terraform wants to recreate the resource.
|
||||||
|
ignore_changes = [
|
||||||
|
delete_default_mappers
|
||||||
|
]
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
# keycloak_oidc_identity_provider.github will be destroyed
|
# keycloak_oidc_identity_provider.github will be destroyed
|
||||||
|
@ -29,7 +36,7 @@ resource "keycloak_ldap_user_federation" "tvl_ldap" {
|
||||||
resource "keycloak_oidc_identity_provider" "github" {
|
resource "keycloak_oidc_identity_provider" "github" {
|
||||||
alias = "github"
|
alias = "github"
|
||||||
provider_id = "github"
|
provider_id = "github"
|
||||||
client_id = "6d7f8bb2e82bb6739556"
|
client_id = "Iv23liXfGNIr7InMg5Uo"
|
||||||
client_secret = var.github_client_secret
|
client_secret = var.github_client_secret
|
||||||
realm = keycloak_realm.tvl.id
|
realm = keycloak_realm.tvl.id
|
||||||
backchannel_supported = false
|
backchannel_supported = false
|
||||||
|
|
|
@ -29,13 +29,29 @@ func Merge(in1 *map[string]interface{}, in2 *map[string]interface{}) *map[string
|
||||||
return in1
|
return in1
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// The maps are map[string]interface{} with unknown depth.
|
||||||
|
// Loop over both maps into every level and merge them.
|
||||||
new := make(map[string]interface{})
|
new := make(map[string]interface{})
|
||||||
|
|
||||||
for k, v := range *in1 {
|
for k, v := range *in1 {
|
||||||
new[k] = v
|
new[k] = v
|
||||||
}
|
}
|
||||||
|
|
||||||
for k, v := range *in2 {
|
for k, v := range *in2 {
|
||||||
new[k] = v
|
if existing, ok := new[k]; ok {
|
||||||
|
// If both values are maps, merge them recursively
|
||||||
|
if existingMap, ok := existing.(map[string]interface{}); ok {
|
||||||
|
if newMap, ok := v.(map[string]interface{}); ok {
|
||||||
|
new[k] = *Merge(&existingMap, &newMap)
|
||||||
|
} else {
|
||||||
|
new[k] = v
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
new[k] = v
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
new[k] = v
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return &new
|
return &new
|
||||||
|
|
|
@ -47,6 +47,9 @@ func TestMergeWithNilMap(t *testing.T) {
|
||||||
func TestMergeMaps(t *testing.T) {
|
func TestMergeMaps(t *testing.T) {
|
||||||
map1 := map[string]interface{}{
|
map1 := map[string]interface{}{
|
||||||
"foo": "bar",
|
"foo": "bar",
|
||||||
|
"baz": map[string]interface{}{
|
||||||
|
"qux": "quux",
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
map2 := map[string]interface{}{
|
map2 := map[string]interface{}{
|
||||||
|
@ -56,6 +59,9 @@ func TestMergeMaps(t *testing.T) {
|
||||||
result := Merge(&map1, &map2)
|
result := Merge(&map1, &map2)
|
||||||
expected := map[string]interface{}{
|
expected := map[string]interface{}{
|
||||||
"foo": "bar",
|
"foo": "bar",
|
||||||
|
"baz": map[string]interface{}{
|
||||||
|
"qux": "quux",
|
||||||
|
},
|
||||||
"bar": "baz",
|
"bar": "baz",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -3,6 +3,8 @@
|
||||||
(with depot.ops.machines; [
|
(with depot.ops.machines; [
|
||||||
sanduny
|
sanduny
|
||||||
whitby
|
whitby
|
||||||
|
nixery-01
|
||||||
|
volgasprint-cache
|
||||||
]) ++
|
]) ++
|
||||||
|
|
||||||
(with depot.users.tazjin.nixos; [
|
(with depot.users.tazjin.nixos; [
|
||||||
|
|
153
ops/machines/volgasprint-cache/default.nix
Normal file
153
ops/machines/volgasprint-cache/default.nix
Normal file
|
@ -0,0 +1,153 @@
|
||||||
|
# temporary machine for local binary cache proxy during VolgaSprint
|
||||||
|
|
||||||
|
{ depot, lib, pkgs, ... }: # readTree options
|
||||||
|
{ config, ... }: # passed by module system
|
||||||
|
|
||||||
|
let
|
||||||
|
mod = name: depot.path.origSrc + ("/ops/modules/" + name);
|
||||||
|
in
|
||||||
|
{
|
||||||
|
imports = [
|
||||||
|
(mod "tvl-users.nix")
|
||||||
|
];
|
||||||
|
|
||||||
|
boot = {
|
||||||
|
kernelPackages = pkgs.linuxKernel.packages.linux_rpi4;
|
||||||
|
initrd.availableKernelModules = [ "xhci_pci" "usbhid" "usb_storage" ];
|
||||||
|
loader = {
|
||||||
|
grub.enable = false;
|
||||||
|
generic-extlinux-compatible.enable = true;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
depot.auto-deploy = {
|
||||||
|
enable = true;
|
||||||
|
interval = "hourly";
|
||||||
|
};
|
||||||
|
|
||||||
|
fileSystems = {
|
||||||
|
"/" = {
|
||||||
|
device = "/dev/disk/by-label/NIXOS_SD";
|
||||||
|
fsType = "ext4";
|
||||||
|
options = [ "noatime" ];
|
||||||
|
};
|
||||||
|
"/var/public-nix-cache" = {
|
||||||
|
device = "/dev/sda1";
|
||||||
|
fsType = "ext4";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
networking = {
|
||||||
|
firewall = {
|
||||||
|
enable = true;
|
||||||
|
allowedTCPPorts = [ 80 443 8098 ];
|
||||||
|
};
|
||||||
|
|
||||||
|
hostName = "volgacache";
|
||||||
|
domain = "volgasprint.org";
|
||||||
|
|
||||||
|
wireless = {
|
||||||
|
enable = true;
|
||||||
|
networks.VolgaSprint.psk = "nixos-unstable";
|
||||||
|
interfaces = [ "wlan0" ];
|
||||||
|
};
|
||||||
|
|
||||||
|
wg-quick.interfaces = {
|
||||||
|
wg0 = {
|
||||||
|
address = [ "10.10.10.2/24" "fd42::1/128" ];
|
||||||
|
dns = [ "1.1.1.1" ];
|
||||||
|
privateKeyFile = "/etc/wireguard_private_key";
|
||||||
|
|
||||||
|
peers = [
|
||||||
|
{
|
||||||
|
publicKey = "2MZzEGJzA3HrwkHf91TaKJEHwCNyVvsTLWoIYHrCxhY=";
|
||||||
|
presharedKeyFile = "/etc/wireguard_preshared_key";
|
||||||
|
allowedIPs = [ "0.0.0.0/0" "::/0" ];
|
||||||
|
endpoint = "195.201.63.240:8098";
|
||||||
|
persistentKeepalive = 15;
|
||||||
|
}
|
||||||
|
];
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
services.openssh.enable = true;
|
||||||
|
|
||||||
|
services.nginx = {
|
||||||
|
enable = true;
|
||||||
|
recommendedGzipSettings = true;
|
||||||
|
recommendedOptimisation = true;
|
||||||
|
|
||||||
|
appendHttpConfig = ''
|
||||||
|
proxy_cache_path /tmp/pkgcache levels=1:2 keys_zone=cachecache:100m max_size=20g inactive=365d use_temp_path=off;
|
||||||
|
|
||||||
|
# Cache only success status codes; in particular we don't want to cache 404s.
|
||||||
|
# See https://serverfault.com/a/690258/128321
|
||||||
|
map $status $cache_header {
|
||||||
|
200 "public";
|
||||||
|
302 "public";
|
||||||
|
default "no-cache";
|
||||||
|
}
|
||||||
|
access_log /var/log/nginx/access.log;
|
||||||
|
'';
|
||||||
|
|
||||||
|
virtualHosts."cache.volgasprint.org" = {
|
||||||
|
sslCertificate = "/etc/ssl/cache.volgasprint.org/key.pem";
|
||||||
|
sslCertificateKey = "/etc/ssl/cache.volgasprint.org/key.pem";
|
||||||
|
sslTrustedCertificate = "/etc/ssl/cache.volgasprint.org/chain.pem";
|
||||||
|
|
||||||
|
locations."/" = {
|
||||||
|
root = "/var/public-nix-cache";
|
||||||
|
extraConfig = ''
|
||||||
|
expires max;
|
||||||
|
add_header Cache-Control $cache_header always;
|
||||||
|
# Ask the upstream server if a file isn't available locally
|
||||||
|
error_page 404 = @fallback;
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
|
||||||
|
extraConfig = ''
|
||||||
|
# Using a variable for the upstream endpoint to ensure that it is
|
||||||
|
# resolved at runtime as opposed to once when the config file is loaded
|
||||||
|
# and then cached forever (we don't want that):
|
||||||
|
# see https://tenzer.dk/nginx-with-dynamic-upstreams/
|
||||||
|
# This fixes errors like
|
||||||
|
# nginx: [emerg] host not found in upstream "upstream.example.com"
|
||||||
|
# when the upstream host is not reachable for a short time when
|
||||||
|
# nginx is started.
|
||||||
|
resolver 80.67.169.12; # fdn dns
|
||||||
|
set $upstream_endpoint http://cache.nixos.org;
|
||||||
|
'';
|
||||||
|
|
||||||
|
locations."@fallback" = {
|
||||||
|
proxyPass = "$upstream_endpoint";
|
||||||
|
extraConfig = ''
|
||||||
|
proxy_cache cachecache;
|
||||||
|
proxy_cache_valid 200 302 60d;
|
||||||
|
expires max;
|
||||||
|
add_header Cache-Control $cache_header always;
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
|
||||||
|
# We always want to copy cache.nixos.org's nix-cache-info file,
|
||||||
|
# and ignore our own, because `nix-push` by default generates one
|
||||||
|
# without `Priority` field, and thus that file by default has priority
|
||||||
|
# 50 (compared to cache.nixos.org's `Priority: 40`), which will make
|
||||||
|
# download clients prefer `cache.nixos.org` over our binary cache.
|
||||||
|
locations."= /nix-cache-info" = {
|
||||||
|
# Note: This is duplicated with the `@fallback` above,
|
||||||
|
# would be nicer if we could redirect to the @fallback instead.
|
||||||
|
proxyPass = "$upstream_endpoint";
|
||||||
|
extraConfig = ''
|
||||||
|
proxy_cache cachecache;
|
||||||
|
proxy_cache_valid 200 302 60d;
|
||||||
|
expires max;
|
||||||
|
add_header Cache-Control $cache_header always;
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
hardware.enableRedistributableFirmware = true;
|
||||||
|
system.stateVersion = "23.11";
|
||||||
|
}
|
|
@ -11,6 +11,7 @@ in
|
||||||
imports = [
|
imports = [
|
||||||
(mod "atward.nix")
|
(mod "atward.nix")
|
||||||
(mod "cgit.nix")
|
(mod "cgit.nix")
|
||||||
|
(mod "cheddar.nix")
|
||||||
(mod "clbot.nix")
|
(mod "clbot.nix")
|
||||||
(mod "gerrit-autosubmit.nix")
|
(mod "gerrit-autosubmit.nix")
|
||||||
(mod "irccat.nix")
|
(mod "irccat.nix")
|
||||||
|
@ -24,7 +25,6 @@ in
|
||||||
(mod "paroxysm.nix")
|
(mod "paroxysm.nix")
|
||||||
(mod "restic.nix")
|
(mod "restic.nix")
|
||||||
(mod "smtprelay.nix")
|
(mod "smtprelay.nix")
|
||||||
(mod "sourcegraph.nix")
|
|
||||||
(mod "teleirc.nix")
|
(mod "teleirc.nix")
|
||||||
(mod "tvl-buildkite.nix")
|
(mod "tvl-buildkite.nix")
|
||||||
(mod "tvl-slapd/default.nix")
|
(mod "tvl-slapd/default.nix")
|
||||||
|
@ -306,6 +306,9 @@ in
|
||||||
agentCount = 32;
|
agentCount = 32;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
# Run Markdown/code renderer
|
||||||
|
services.depot.cheddar.enable = true;
|
||||||
|
|
||||||
# Start a local SMTP relay to Gmail (used by gerrit)
|
# Start a local SMTP relay to Gmail (used by gerrit)
|
||||||
services.depot.smtprelay = {
|
services.depot.smtprelay = {
|
||||||
enable = true;
|
enable = true;
|
||||||
|
@ -374,9 +377,6 @@ in
|
||||||
};
|
};
|
||||||
|
|
||||||
services.depot = {
|
services.depot = {
|
||||||
# Run a SourceGraph code search instance
|
|
||||||
sourcegraph.enable = true;
|
|
||||||
|
|
||||||
# Run a livegrep code search instance
|
# Run a livegrep code search instance
|
||||||
livegrep.enable = true;
|
livegrep.enable = true;
|
||||||
|
|
||||||
|
|
29
ops/modules/cheddar.nix
Normal file
29
ops/modules/cheddar.nix
Normal file
|
@ -0,0 +1,29 @@
|
||||||
|
{ depot, config, pkgs, lib, ... }:
|
||||||
|
|
||||||
|
let
|
||||||
|
cfg = config.services.depot.cheddar;
|
||||||
|
description = "cheddar - markdown/highlighting server";
|
||||||
|
in
|
||||||
|
{
|
||||||
|
options.services.depot.cheddar = with lib; {
|
||||||
|
enable = mkEnableOption description;
|
||||||
|
port = mkOption {
|
||||||
|
description = "Port on which cheddar should listen";
|
||||||
|
type = types.int;
|
||||||
|
default = 4238;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
config = lib.mkIf cfg.enable {
|
||||||
|
systemd.services.cheddar-server = {
|
||||||
|
inherit description;
|
||||||
|
wantedBy = [ "multi-user.target" ];
|
||||||
|
script = "${depot.tools.cheddar}/bin/cheddar --listen 0.0.0.0:${toString cfg.port} --sourcegraph-server";
|
||||||
|
|
||||||
|
serviceConfig = {
|
||||||
|
DynamicUser = true;
|
||||||
|
Restart = "always";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
}
|
|
@ -1,60 +0,0 @@
|
||||||
# Run sourcegraph, including its entire machinery, in a container.
|
|
||||||
# Running it outside of a container is a futile endeavour for now.
|
|
||||||
{ depot, config, pkgs, lib, ... }:
|
|
||||||
|
|
||||||
let
|
|
||||||
cfg = config.services.depot.sourcegraph;
|
|
||||||
in
|
|
||||||
{
|
|
||||||
options.services.depot.sourcegraph = with lib; {
|
|
||||||
enable = mkEnableOption "SourceGraph code search engine";
|
|
||||||
|
|
||||||
port = mkOption {
|
|
||||||
description = "Port on which SourceGraph should listen";
|
|
||||||
type = types.int;
|
|
||||||
default = 3463;
|
|
||||||
};
|
|
||||||
|
|
||||||
cheddarPort = mkOption {
|
|
||||||
description = "Port on which cheddar should listen";
|
|
||||||
type = types.int;
|
|
||||||
default = 4238;
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
config = lib.mkIf cfg.enable {
|
|
||||||
# Run a cheddar syntax highlighting server
|
|
||||||
systemd.services.cheddar-server = {
|
|
||||||
wantedBy = [ "multi-user.target" ];
|
|
||||||
script = "${depot.tools.cheddar}/bin/cheddar --listen 0.0.0.0:${toString cfg.cheddarPort} --sourcegraph-server";
|
|
||||||
|
|
||||||
serviceConfig = {
|
|
||||||
DynamicUser = true;
|
|
||||||
Restart = "always";
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
virtualisation.oci-containers.containers.sourcegraph = {
|
|
||||||
image = "sourcegraph/server:3.40.0";
|
|
||||||
|
|
||||||
ports = [
|
|
||||||
"127.0.0.1:${toString cfg.port}:7080"
|
|
||||||
];
|
|
||||||
|
|
||||||
volumes = [
|
|
||||||
"/var/lib/sourcegraph/etc:/etc/sourcegraph"
|
|
||||||
"/var/lib/sourcegraph/data:/var/opt/sourcegraph"
|
|
||||||
];
|
|
||||||
|
|
||||||
# TODO(tazjin): Figure out what changed in the protocol.
|
|
||||||
# environment.SRC_SYNTECT_SERVER = "http://172.17.0.1:${toString cfg.cheddarPort}";
|
|
||||||
|
|
||||||
# Sourcegraph needs a higher nofile limit, it logs warnings
|
|
||||||
# otherwise (unclear whether it actually affects the service).
|
|
||||||
extraOptions = [
|
|
||||||
"--ulimit"
|
|
||||||
"nofile=10000:10000"
|
|
||||||
];
|
|
||||||
};
|
|
||||||
};
|
|
||||||
}
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
# This configuration redirects from the previous Sourcegraph instance to
|
||||||
|
# livegrep/cgit where appropriate.
|
||||||
{ config, ... }:
|
{ config, ... }:
|
||||||
|
|
||||||
{
|
{
|
||||||
|
@ -13,17 +15,50 @@
|
||||||
forceSSL = true;
|
forceSSL = true;
|
||||||
|
|
||||||
extraConfig = ''
|
extraConfig = ''
|
||||||
location = / {
|
set $lineno "";
|
||||||
return 301 https://cs.tvl.fyi/depot;
|
|
||||||
|
# depot root
|
||||||
|
location = /depot {
|
||||||
|
return 301 https://code.tvl.fyi/tree/;
|
||||||
|
}
|
||||||
|
|
||||||
|
# folder/file on canon
|
||||||
|
location ~ ^/depot/-/(blob|tree)/([^\s]*)$ {
|
||||||
|
set $path $2;
|
||||||
|
if ($args ~ ^L(\d+)(-\d+)?$) {
|
||||||
|
set $lineno "#n$1";
|
||||||
|
}
|
||||||
|
|
||||||
|
return 302 https://code.tvl.fyi/tree/$path$lineno;
|
||||||
|
}
|
||||||
|
|
||||||
|
# folder/file on specific commit
|
||||||
|
location ~ ^/depot@([a-f0-9]+)/-/(blob|tree)/([^\s]*)$ {
|
||||||
|
set $commit $1;
|
||||||
|
set $path $3;
|
||||||
|
|
||||||
|
if ($args ~ ^L(\d+)(-\d+)?$) {
|
||||||
|
set $lineno "#n$1";
|
||||||
|
}
|
||||||
|
|
||||||
|
return 302 https://code.tvl.fyi/tree/$path?id=$commit$lineno;
|
||||||
|
}
|
||||||
|
|
||||||
|
# commit info
|
||||||
|
location ~ ^/depot/-/commit/([a-f0-9]+)$ {
|
||||||
|
set $commit $1;
|
||||||
|
return 302 https://code.tvl.fyi/commit/?id=$commit;
|
||||||
|
}
|
||||||
|
|
||||||
|
# search handler
|
||||||
|
# This only redirects to the new search, it doesn't try to parse and
|
||||||
|
# rewrite the query.
|
||||||
|
location /search {
|
||||||
|
return 302 https://grep.tvl.fyi/search;
|
||||||
}
|
}
|
||||||
|
|
||||||
location / {
|
location / {
|
||||||
proxy_set_header X-Sg-Auth "Anonymous";
|
return 404 "TVL code search has moved to grep.tvl.fyi and we could not figure out how to rewrite your query. Sorry!";
|
||||||
proxy_pass http://localhost:${toString config.services.depot.sourcegraph.port};
|
|
||||||
}
|
|
||||||
|
|
||||||
location /users/Anonymous/settings {
|
|
||||||
return 301 https://cs.tvl.fyi;
|
|
||||||
}
|
}
|
||||||
'';
|
'';
|
||||||
};
|
};
|
||||||
|
|
|
@ -1,54 +0,0 @@
|
||||||
# serve tazjin's website & blog
|
|
||||||
{ depot, config, lib, pkgs, ... }:
|
|
||||||
|
|
||||||
{
|
|
||||||
imports = [
|
|
||||||
./base.nix
|
|
||||||
];
|
|
||||||
|
|
||||||
config = {
|
|
||||||
services.nginx.virtualHosts."tazj.in" = {
|
|
||||||
enableACME = true;
|
|
||||||
forceSSL = true;
|
|
||||||
root = depot.users.tazjin.homepage;
|
|
||||||
serverAliases = [ "www.tazj.in" ];
|
|
||||||
|
|
||||||
extraConfig = ''
|
|
||||||
location = /en/rss.xml {
|
|
||||||
return 301 https://tazj.in/feed.atom;
|
|
||||||
}
|
|
||||||
|
|
||||||
${depot.users.tazjin.blog.oldRedirects}
|
|
||||||
location /blog/ {
|
|
||||||
alias ${depot.users.tazjin.blog.rendered}/;
|
|
||||||
|
|
||||||
if ($request_uri ~ ^/(.*)\.html$) {
|
|
||||||
return 302 /$1;
|
|
||||||
}
|
|
||||||
|
|
||||||
try_files $uri $uri.html $uri/ =404;
|
|
||||||
}
|
|
||||||
|
|
||||||
location = /predlozhnik {
|
|
||||||
return 302 https://predlozhnik.ru;
|
|
||||||
}
|
|
||||||
|
|
||||||
# redirect for easier entry on a TV
|
|
||||||
location = /tv {
|
|
||||||
return 302 https://tazj.in/blobs/play.html;
|
|
||||||
}
|
|
||||||
|
|
||||||
# Temporary place for serving static files.
|
|
||||||
location /blobs/ {
|
|
||||||
alias /var/lib/tazjins-blobs/;
|
|
||||||
}
|
|
||||||
'';
|
|
||||||
};
|
|
||||||
|
|
||||||
services.nginx.virtualHosts."git.tazj.in" = {
|
|
||||||
enableACME = true;
|
|
||||||
forceSSL = true;
|
|
||||||
extraConfig = "return 301 https://code.tvl.fyi$request_uri;";
|
|
||||||
};
|
|
||||||
};
|
|
||||||
}
|
|
|
@ -63,5 +63,6 @@ in rec {
|
||||||
whitbySystem = (nixosFor depot.ops.machines.whitby).system;
|
whitbySystem = (nixosFor depot.ops.machines.whitby).system;
|
||||||
sandunySystem = (nixosFor depot.ops.machines.sanduny).system;
|
sandunySystem = (nixosFor depot.ops.machines.sanduny).system;
|
||||||
nixeryDev01System = (nixosFor depot.ops.machines.nixery-01).system;
|
nixeryDev01System = (nixosFor depot.ops.machines.nixery-01).system;
|
||||||
|
volgaSprintCacheSystem = (nixosFor depot.ops.machines.volgasprint-cache).system;
|
||||||
meta.ci.targets = [ "sandunySystem" "whitbySystem" "nixeryDev01System" ];
|
meta.ci.targets = [ "sandunySystem" "whitbySystem" "nixeryDev01System" ];
|
||||||
}
|
}
|
||||||
|
|
|
@ -88,10 +88,12 @@ steps:
|
||||||
continue_on_failure: true
|
continue_on_failure: true
|
||||||
|
|
||||||
# Exit with success or failure depending on whether any other steps
|
# Exit with success or failure depending on whether any other steps
|
||||||
# failed.
|
# failed (but not retried).
|
||||||
#
|
#
|
||||||
# This information is checked by querying the Buildkite GraphQL API
|
# This information is checked by querying the Buildkite GraphQL API
|
||||||
# and fetching the count of failed steps.
|
# and fetching all failed steps, then filtering out the ones that were
|
||||||
|
# retried (retried jobs create new jobs, which would also show up in the
|
||||||
|
# query).
|
||||||
#
|
#
|
||||||
# This step must be :duck: (yes, really!) because the post-command
|
# This step must be :duck: (yes, really!) because the post-command
|
||||||
# hook will inspect this name.
|
# hook will inspect this name.
|
||||||
|
@ -109,8 +111,8 @@ steps:
|
||||||
readonly FAILED_JOBS=$(curl 'https://graphql.buildkite.com/v1' \
|
readonly FAILED_JOBS=$(curl 'https://graphql.buildkite.com/v1' \
|
||||||
--silent \
|
--silent \
|
||||||
-H "Authorization: Bearer $(cat ${BUILDKITE_TOKEN_PATH})" \
|
-H "Authorization: Bearer $(cat ${BUILDKITE_TOKEN_PATH})" \
|
||||||
-d "{\"query\": \"query BuildStatusQuery { build(uuid: \\\"$BUILDKITE_BUILD_ID\\\") { jobs(passed: false) { count } } }\"}" | \
|
-d "{\"query\": \"query BuildStatusQuery { build(uuid: \\\"$BUILDKITE_BUILD_ID\\\") { jobs(passed: false, first: 500 ) { edges { node { ... on JobTypeCommand { retried } } } } } }\"}" | \
|
||||||
jq -r '.data.build.jobs.count')
|
jq -r '.data.build.jobs.edges | map(select(.node.retried == false)) | length')
|
||||||
|
|
||||||
echo "$$FAILED_JOBS build jobs failed."
|
echo "$$FAILED_JOBS build jobs failed."
|
||||||
|
|
||||||
|
|
Binary file not shown.
|
@ -21,6 +21,11 @@
|
||||||
email = "root@gws.fyi";
|
email = "root@gws.fyi";
|
||||||
password = "{ARGON2}$argon2id$v=19$m=65536,t=2,p=1$5NEYPJ19nDITK5sGr4bzhQ$Xzpzth6y4w+HGvioHiYgzqFiwMDx0B7HAh+PVbkRuuk";
|
password = "{ARGON2}$argon2id$v=19$m=65536,t=2,p=1$5NEYPJ19nDITK5sGr4bzhQ$Xzpzth6y4w+HGvioHiYgzqFiwMDx0B7HAh+PVbkRuuk";
|
||||||
}
|
}
|
||||||
|
{
|
||||||
|
username = "azahi";
|
||||||
|
email = "azat@bahawi.net";
|
||||||
|
password = "{ARGON2}$argon2id$v=19$m=19456,t=2,p=1$BVRzgfs8YIorOTq62B00CQ$5UXHyG/Ivn5TqB7UNgfjYJMxTjun3NDvAStWFom4oas";
|
||||||
|
}
|
||||||
{
|
{
|
||||||
username = "chickadee";
|
username = "chickadee";
|
||||||
email = "matthewktromp@gmail.com";
|
email = "matthewktromp@gmail.com";
|
||||||
|
@ -36,11 +41,21 @@
|
||||||
email = "me@cynthia.re";
|
email = "me@cynthia.re";
|
||||||
password = "{ARGON2}$argon2id$v=19$m=65536,t=4,p=1$TxjbMGenhEmkyYLrg5uGhbr60THB86YeRZg5bPdiTJo$k9gbRlAPjmxwdUwzbavvsAVkckgQZ0jS2oTtvZBPysk";
|
password = "{ARGON2}$argon2id$v=19$m=65536,t=4,p=1$TxjbMGenhEmkyYLrg5uGhbr60THB86YeRZg5bPdiTJo$k9gbRlAPjmxwdUwzbavvsAVkckgQZ0jS2oTtvZBPysk";
|
||||||
}
|
}
|
||||||
|
{
|
||||||
|
username = "domenkozar";
|
||||||
|
email = "domen@cachix.org";
|
||||||
|
password = "{ARGON2}$argon2id$v=19$m=19456,t=2,p=1$c9WgMrTqPJZenOr5+wlnnQ$XOpRZRTkduzP2+NJBxkg2jhffurg7PDla4/RoAyclwI";
|
||||||
|
}
|
||||||
{
|
{
|
||||||
username = "edef";
|
username = "edef";
|
||||||
email = "edef@edef.eu";
|
email = "edef@edef.eu";
|
||||||
password = "{ARGON2}$argon2id$v=19$m=65536,t=2,p=1$OORx4ERbkgvTmuYCJA8cIw$i5qaBzHkRVw7Tl+wZsTFTDqJwF0vuZqhW3VpknMYMc0";
|
password = "{ARGON2}$argon2id$v=19$m=65536,t=2,p=1$OORx4ERbkgvTmuYCJA8cIw$i5qaBzHkRVw7Tl+wZsTFTDqJwF0vuZqhW3VpknMYMc0";
|
||||||
}
|
}
|
||||||
|
{
|
||||||
|
username = "elle";
|
||||||
|
email = "lnajt4@gmail.com";
|
||||||
|
password = "{ARGON2}$argon2id$v=19$m=19456,t=2,p=1$b5Bfq6u+fEKbtpixOl+yPw$nCyTLbSDYsw30ZiSxhJ6N99IIPQAnS2XRNlpEx9os+0";
|
||||||
|
}
|
||||||
{
|
{
|
||||||
username = "ericvolp12";
|
username = "ericvolp12";
|
||||||
email = "ericvolp12@gmail.com";
|
email = "ericvolp12@gmail.com";
|
||||||
|
@ -111,6 +126,11 @@
|
||||||
email = "lukegb@tvl.fyi";
|
email = "lukegb@tvl.fyi";
|
||||||
password = "{SSHA}7a85VNhpFElFw+N5xcjgGmt4HnBsaGp4";
|
password = "{SSHA}7a85VNhpFElFw+N5xcjgGmt4HnBsaGp4";
|
||||||
}
|
}
|
||||||
|
{
|
||||||
|
username = "mrflos";
|
||||||
|
email = "mrflos@yeswiki.pro";
|
||||||
|
password = "{ARGON2}$argon2id$v=19$m=19456,t=2,p=1$/D1y+6n3+0GigG9mCMqK8A$9PseWm3+QATxN/M3Wu4JM+CnIppLD/LbQaVEKLItv9o";
|
||||||
|
}
|
||||||
{
|
{
|
||||||
username = "noteed";
|
username = "noteed";
|
||||||
email = "noteed@gmail.com";
|
email = "noteed@gmail.com";
|
||||||
|
@ -153,6 +173,11 @@
|
||||||
email = "tazjin@tvl.su";
|
email = "tazjin@tvl.su";
|
||||||
password = "{ARGON2}$argon2id$v=19$m=65536,t=2,p=1$wOPEl9D3kSke//oLtbvqrg$j0npwwXgaXQ/emefKUwL59tH8hdmtzbgH2rQzWSmE2Y";
|
password = "{ARGON2}$argon2id$v=19$m=65536,t=2,p=1$wOPEl9D3kSke//oLtbvqrg$j0npwwXgaXQ/emefKUwL59tH8hdmtzbgH2rQzWSmE2Y";
|
||||||
}
|
}
|
||||||
|
{
|
||||||
|
username = "yl3dy";
|
||||||
|
email = "aleksandr.kiselyov@gmail.com";
|
||||||
|
password = "{ARGON2}$argon2id$v=19$m=19456,t=2,p=1$vPvOa0/7RzDLuD/icQuIzQ$IVMSI7hh/frihuL11sNRj6Jz8TTn1wZZHjZZGszz3pI";
|
||||||
|
}
|
||||||
{
|
{
|
||||||
username = "implr";
|
username = "implr";
|
||||||
email = "implr@hackerspace.pl";
|
email = "implr@hackerspace.pl";
|
||||||
|
@ -259,4 +284,24 @@
|
||||||
email = "tvix@sinavir.fr";
|
email = "tvix@sinavir.fr";
|
||||||
password = "{ARGON2}$argon2id$v=19$m=19456,t=2,p=1$5GXvoN/enVljV97yE/Zasg$OrgY9/ge2LoxNm9OOqxh/kKLxoAvU54MbQa9WWiT0jY";
|
password = "{ARGON2}$argon2id$v=19$m=19456,t=2,p=1$5GXvoN/enVljV97yE/Zasg$OrgY9/ge2LoxNm9OOqxh/kKLxoAvU54MbQa9WWiT0jY";
|
||||||
}
|
}
|
||||||
|
{
|
||||||
|
username = "emery";
|
||||||
|
email = "emery@dmz.rs";
|
||||||
|
password = "{ARGON2}$argon2id$v=19$m=19456,t=2,p=1$b2k5UpTJafqM7yjHfVRjBg$zFGy/ZeI9Hb71TUfJwFp7qDKyUl8tdyFDUK1uNBYfUI";
|
||||||
|
}
|
||||||
|
{
|
||||||
|
username = "aziz";
|
||||||
|
email = "abd.aziz89@gmail.com";
|
||||||
|
password = "{ARGON2}$argon2id$v=19$m=19456,t=2,p=1$xTvdtTF+gavMfF8556CiiQ$IshnauhlEr80skpv5s6ueJLkQxlynzBt6oCp3cQrNCY";
|
||||||
|
}
|
||||||
|
{
|
||||||
|
username = "nikiv";
|
||||||
|
email = "nikita@nikiv.dev";
|
||||||
|
password = "{ARGON2}$argon2id$v=19$m=19456,t=2,p=1$79mMAD2XYa5dg7D9ueWMpw$Edf5WODrFpkNDyWaMdLKcgcErFLx4khmPIk8wzmYGUE";
|
||||||
|
}
|
||||||
|
{
|
||||||
|
username = "ein-shved";
|
||||||
|
email = "mestofel13@gmail.com";
|
||||||
|
password = "{ARGON2}$argon2id$v=19$m=19456,t=2,p=1$D4wzfJoyFye48QNdrC66VA$aBJ/ZaL+rTgXoQa/nFdpHap3G/Oju8WlHaWTii95X8E";
|
||||||
|
}
|
||||||
]
|
]
|
||||||
|
|
47
third_party/chicago95/default.nix
vendored
Normal file
47
third_party/chicago95/default.nix
vendored
Normal file
|
@ -0,0 +1,47 @@
|
||||||
|
# A rendition of everyone's favourite computer theme.
|
||||||
|
{ pkgs, ... }:
|
||||||
|
|
||||||
|
let
|
||||||
|
# Chicago95 has no GTK-4 theme (because GTK-4 removed important features that
|
||||||
|
# it needs), but there is a project with an approximation.
|
||||||
|
#
|
||||||
|
# This is a bit of a hack, but I inject that project's GTK-4 theme as if it
|
||||||
|
# was a part of Chicago95.
|
||||||
|
#
|
||||||
|
# This other project is GPL-3.0, under which Chicago95 is also distributed.
|
||||||
|
gtk4ProjectSrc = pkgs.fetchFromGitHub {
|
||||||
|
owner = "B00merang-Project";
|
||||||
|
repo = "Windows-95";
|
||||||
|
rev = "055abd7a3608afdcb2ef021732e07020f2b416b2";
|
||||||
|
hash = "sha256:1li6wzyn3y09d188xki1h96pmn4xcx2lklfc4rkiq2y2r22wx7kz";
|
||||||
|
};
|
||||||
|
in
|
||||||
|
pkgs.stdenvNoCC.mkDerivation {
|
||||||
|
pname = "Chicago95";
|
||||||
|
version = "master";
|
||||||
|
|
||||||
|
src = pkgs.fetchFromGitHub {
|
||||||
|
owner = "grassmunk";
|
||||||
|
repo = "Chicago95";
|
||||||
|
rev = "bdf5cf36a16102aaac297f3de887c601c2b1146f";
|
||||||
|
hash = "sha256:11fsy3bam1rhp1292zflvzmf1432z1p0ncwy3601wl2f8rnvfdfm";
|
||||||
|
};
|
||||||
|
|
||||||
|
# The project has a Makefile, but it's broken in all sorts of ways, so we just
|
||||||
|
# copy the important stuff manually.
|
||||||
|
dontBuild = true;
|
||||||
|
installPhase = ''
|
||||||
|
mkdir -p $out/share/{icons,fonts,themes,sounds,qt5ct/colors}
|
||||||
|
|
||||||
|
cp -r Theme/Chicago95 $out/share/themes
|
||||||
|
cp -r Icons/* $out/share/icons
|
||||||
|
cp -r Cursors/* $out/share/icons
|
||||||
|
cp -r Fonts/* $out/share/fonts
|
||||||
|
cp Extras/Chicago95_qt.conf $out/share/qt5ct/colors
|
||||||
|
|
||||||
|
cp -r ${gtk4ProjectSrc}/gtk-4.0 $out/share/themes/Chicago95
|
||||||
|
'';
|
||||||
|
|
||||||
|
meta.license = pkgs.lib.licenses.gpl3;
|
||||||
|
}
|
||||||
|
|
12
third_party/gitignoreSource/default.nix
vendored
12
third_party/gitignoreSource/default.nix
vendored
|
@ -1,15 +1,7 @@
|
||||||
{ pkgs, ... }:
|
{ depot, lib, ... }:
|
||||||
|
|
||||||
let
|
let
|
||||||
gitignoreNix = import
|
gitignoreNix = import depot.third_party.sources."gitignore.nix" { inherit lib; };
|
||||||
(pkgs.fetchFromGitHub {
|
|
||||||
owner = "hercules-ci";
|
|
||||||
repo = "gitignore";
|
|
||||||
rev = "f9e996052b5af4032fe6150bba4a6fe4f7b9d698";
|
|
||||||
sha256 = "0jrh5ghisaqdd0vldbywags20m2cxpkbbk5jjjmwaw0gr8nhsafv";
|
|
||||||
})
|
|
||||||
{ inherit (pkgs) lib; };
|
|
||||||
|
|
||||||
in
|
in
|
||||||
{
|
{
|
||||||
__functor = _: gitignoreNix.gitignoreSource;
|
__functor = _: gitignoreNix.gitignoreSource;
|
||||||
|
|
3
third_party/nixpkgs/default.nix
vendored
3
third_party/nixpkgs/default.nix
vendored
|
@ -58,6 +58,9 @@ let
|
||||||
|
|
||||||
# the big lis package change breaks everything in //3p/lisp, undo it for now.
|
# the big lis package change breaks everything in //3p/lisp, undo it for now.
|
||||||
lispPackages = stableNixpkgs.lispPackages;
|
lispPackages = stableNixpkgs.lispPackages;
|
||||||
|
|
||||||
|
# mypaint is broken on stable (2024-09-05)
|
||||||
|
mypaint = stableNixpkgs.mypaint;
|
||||||
};
|
};
|
||||||
|
|
||||||
# Overlay to expose the nixpkgs commits we are using to other Nix code.
|
# Overlay to expose the nixpkgs commits we are using to other Nix code.
|
||||||
|
|
140
third_party/overlays/patches/cbtemulator-uds.patch
vendored
140
third_party/overlays/patches/cbtemulator-uds.patch
vendored
|
@ -1,140 +0,0 @@
|
||||||
commit 1397e10225d8c6fd079a86fccd58fb5d0f4200bc
|
|
||||||
Author: Florian Klink <flokli@flokli.de>
|
|
||||||
Date: Fri Mar 29 10:06:34 2024 +0100
|
|
||||||
|
|
||||||
feat(bigtable/emulator): allow listening on Unix Domain Sockets
|
|
||||||
|
|
||||||
cbtemulator listening on unix domain sockets is much easier than trying
|
|
||||||
to allocate free TCP ports, especially if many cbtemulators are run at
|
|
||||||
the same time in integration tests.
|
|
||||||
|
|
||||||
This adds an additional flag, address, which has priority if it's set,
|
|
||||||
rather than host:port.
|
|
||||||
|
|
||||||
`NewServer` already takes a `laddr string`, so we simply check for it to
|
|
||||||
contain slashes, and if so, listen on unix, rather than TCP.
|
|
||||||
|
|
||||||
diff --git a/bigtable/bttest/inmem.go b/bigtable/bttest/inmem.go
|
|
||||||
index 556abc2a85..33e4bf2667 100644
|
|
||||||
--- a/bttest/inmem.go
|
|
||||||
+++ b/bttest/inmem.go
|
|
||||||
@@ -40,6 +40,7 @@ import (
|
|
||||||
"math"
|
|
||||||
"math/rand"
|
|
||||||
"net"
|
|
||||||
+ "os"
|
|
||||||
"regexp"
|
|
||||||
"sort"
|
|
||||||
"strings"
|
|
||||||
@@ -106,7 +107,15 @@ type server struct {
|
|
||||||
// The Server will be listening for gRPC connections, without TLS,
|
|
||||||
// on the provided address. The resolved address is named by the Addr field.
|
|
||||||
func NewServer(laddr string, opt ...grpc.ServerOption) (*Server, error) {
|
|
||||||
- l, err := net.Listen("tcp", laddr)
|
|
||||||
+ var l net.Listener
|
|
||||||
+ var err error
|
|
||||||
+
|
|
||||||
+ // If the address contains slashes, listen on a unix domain socket instead.
|
|
||||||
+ if strings.Contains(laddr, "/") {
|
|
||||||
+ l, err = net.Listen("unix", laddr)
|
|
||||||
+ } else {
|
|
||||||
+ l, err = net.Listen("tcp", laddr)
|
|
||||||
+ }
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
diff --git a/bigtable/cmd/emulator/cbtemulator.go b/bigtable/cmd/emulator/cbtemulator.go
|
|
||||||
index 144c09ffb1..deaf69b717 100644
|
|
||||||
--- a/cmd/emulator/cbtemulator.go
|
|
||||||
+++ b/cmd/emulator/cbtemulator.go
|
|
||||||
@@ -27,8 +27,9 @@ import (
|
|
||||||
)
|
|
||||||
|
|
||||||
var (
|
|
||||||
- host = flag.String("host", "localhost", "the address to bind to on the local machine")
|
|
||||||
- port = flag.Int("port", 9000, "the port number to bind to on the local machine")
|
|
||||||
+ host = flag.String("host", "localhost", "the address to bind to on the local machine")
|
|
||||||
+ port = flag.Int("port", 9000, "the port number to bind to on the local machine")
|
|
||||||
+ address = flag.String("address", "", "address:port number or unix socket path to listen on. Has priority over host/port")
|
|
||||||
)
|
|
||||||
|
|
||||||
const (
|
|
||||||
@@ -42,7 +43,15 @@ func main() {
|
|
||||||
grpc.MaxRecvMsgSize(maxMsgSize),
|
|
||||||
grpc.MaxSendMsgSize(maxMsgSize),
|
|
||||||
}
|
|
||||||
- srv, err := bttest.NewServer(fmt.Sprintf("%s:%d", *host, *port), opts...)
|
|
||||||
+
|
|
||||||
+ var laddr string
|
|
||||||
+ if *address != "" {
|
|
||||||
+ laddr = *address
|
|
||||||
+ } else {
|
|
||||||
+ laddr = fmt.Sprintf("%s:%d", *host, *port)
|
|
||||||
+ }
|
|
||||||
+
|
|
||||||
+ srv, err := bttest.NewServer(laddr, opts...)
|
|
||||||
if err != nil {
|
|
||||||
log.Fatalf("failed to start emulator: %v", err)
|
|
||||||
}
|
|
||||||
commit ce16f843d6c93159d86b3807c6d9ff66e43aac67
|
|
||||||
Author: Florian Klink <flokli@flokli.de>
|
|
||||||
Date: Fri Mar 29 11:53:15 2024 +0100
|
|
||||||
|
|
||||||
feat(bigtable): clean up unix socket on close
|
|
||||||
|
|
||||||
Call srv.Close when receiving an interrupt, and delete the unix domain
|
|
||||||
socket in that function.
|
|
||||||
|
|
||||||
diff --git a/bigtable/bttest/inmem.go b/bigtable/bttest/inmem.go
|
|
||||||
index 33e4bf2667..0dc96024b1 100644
|
|
||||||
--- a/bttest/inmem.go
|
|
||||||
+++ b/bttest/inmem.go
|
|
||||||
@@ -148,6 +148,11 @@ func (s *Server) Close() {
|
|
||||||
|
|
||||||
s.srv.Stop()
|
|
||||||
s.l.Close()
|
|
||||||
+
|
|
||||||
+ // clean up unix socket
|
|
||||||
+ if strings.Contains(s.Addr, "/") {
|
|
||||||
+ _ = os.Remove(s.Addr)
|
|
||||||
+ }
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *server) CreateTable(ctx context.Context, req *btapb.CreateTableRequest) (*btapb.Table, error) {
|
|
||||||
diff --git a/bigtable/cmd/emulator/cbtemulator.go b/bigtable/cmd/emulator/cbtemulator.go
|
|
||||||
index deaf69b717..5a9e8f7a8c 100644
|
|
||||||
--- a/cmd/emulator/cbtemulator.go
|
|
||||||
+++ b/cmd/emulator/cbtemulator.go
|
|
||||||
@@ -18,9 +18,12 @@ cbtemulator launches the in-memory Cloud Bigtable server on the given address.
|
|
||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
+ "context"
|
|
||||||
"flag"
|
|
||||||
"fmt"
|
|
||||||
"log"
|
|
||||||
+ "os"
|
|
||||||
+ "os/signal"
|
|
||||||
|
|
||||||
"cloud.google.com/go/bigtable/bttest"
|
|
||||||
"google.golang.org/grpc"
|
|
||||||
@@ -51,11 +54,18 @@ func main() {
|
|
||||||
laddr = fmt.Sprintf("%s:%d", *host, *port)
|
|
||||||
}
|
|
||||||
|
|
||||||
+ ctx, stop := signal.NotifyContext(context.Background(), os.Interrupt)
|
|
||||||
+ defer stop()
|
|
||||||
+
|
|
||||||
srv, err := bttest.NewServer(laddr, opts...)
|
|
||||||
if err != nil {
|
|
||||||
log.Fatalf("failed to start emulator: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
fmt.Printf("Cloud Bigtable emulator running on %s\n", srv.Addr)
|
|
||||||
- select {}
|
|
||||||
+ select {
|
|
||||||
+ case <-ctx.Done():
|
|
||||||
+ srv.Close()
|
|
||||||
+ stop()
|
|
||||||
+ }
|
|
||||||
}
|
|
|
@ -0,0 +1,109 @@
|
||||||
|
From 96f66ec32e003c6c215aa2a644281289a71dae7d Mon Sep 17 00:00:00 2001
|
||||||
|
From: Ilan Joselevich <personal@ilanjoselevich.com>
|
||||||
|
Date: Sun, 4 Aug 2024 02:35:27 +0300
|
||||||
|
Subject: [PATCH] Fix: Use mkDerivation with src instead of runCommand for test
|
||||||
|
derivation
|
||||||
|
|
||||||
|
The problem with using runCommand and recreating the src directory with
|
||||||
|
lndir is that it changes the file types of individual files, they will
|
||||||
|
now be a symlink instead of a regular file. If you have a crate that tests
|
||||||
|
that a file is of regular type then it will fail inside the crate2nix derivation.
|
||||||
|
---
|
||||||
|
templates/nix/crate2nix/default.nix | 81 ++++++++-----------
|
||||||
|
1 file changed, 35 insertions(+), 46 deletions(-)
|
||||||
|
|
||||||
|
diff --git a/templates/nix/crate2nix/default.nix b/templates/nix/crate2nix/default.nix
|
||||||
|
index c53925e..90e10c6 100644
|
||||||
|
--- a/templates/nix/crate2nix/default.nix
|
||||||
|
+++ b/templates/nix/crate2nix/default.nix
|
||||||
|
@@ -120,52 +120,41 @@ rec {
|
||||||
|
testPostRun
|
||||||
|
]);
|
||||||
|
in
|
||||||
|
- pkgs.runCommand "run-tests-${testCrate.name}"
|
||||||
|
- {
|
||||||
|
- inherit testCrateFlags;
|
||||||
|
- buildInputs = testInputs;
|
||||||
|
- } ''
|
||||||
|
- set -e
|
||||||
|
-
|
||||||
|
- export RUST_BACKTRACE=1
|
||||||
|
-
|
||||||
|
- # recreate a file hierarchy as when running tests with cargo
|
||||||
|
-
|
||||||
|
- # the source for test data
|
||||||
|
- # It's necessary to locate the source in $NIX_BUILD_TOP/source/
|
||||||
|
- # instead of $NIX_BUILD_TOP/
|
||||||
|
- # because we compiled those test binaries in the former and not the latter.
|
||||||
|
- # So all paths will expect source tree to be there and not in the build top directly.
|
||||||
|
- # For example: $NIX_BUILD_TOP := /build in general, if you ask yourself.
|
||||||
|
- # NOTE: There could be edge cases if `crate.sourceRoot` does exist but
|
||||||
|
- # it's very hard to reason about them.
|
||||||
|
- # Open a bug if you run into this!
|
||||||
|
- mkdir -p source/
|
||||||
|
- cd source/
|
||||||
|
-
|
||||||
|
- ${pkgs.buildPackages.xorg.lndir}/bin/lndir ${crate.src}
|
||||||
|
-
|
||||||
|
- # build outputs
|
||||||
|
- testRoot=target/debug
|
||||||
|
- mkdir -p $testRoot
|
||||||
|
-
|
||||||
|
- # executables of the crate
|
||||||
|
- # we copy to prevent std::env::current_exe() to resolve to a store location
|
||||||
|
- for i in ${crate}/bin/*; do
|
||||||
|
- cp "$i" "$testRoot"
|
||||||
|
- done
|
||||||
|
- chmod +w -R .
|
||||||
|
-
|
||||||
|
- # test harness executables are suffixed with a hash, like cargo does
|
||||||
|
- # this allows to prevent name collision with the main
|
||||||
|
- # executables of the crate
|
||||||
|
- hash=$(basename $out)
|
||||||
|
- for file in ${drv}/tests/*; do
|
||||||
|
- f=$testRoot/$(basename $file)-$hash
|
||||||
|
- cp $file $f
|
||||||
|
- ${testCommand}
|
||||||
|
- done
|
||||||
|
- '';
|
||||||
|
+ pkgs.stdenvNoCC.mkDerivation {
|
||||||
|
+ name = "run-tests-${testCrate.name}";
|
||||||
|
+
|
||||||
|
+ inherit (crate) src;
|
||||||
|
+
|
||||||
|
+ inherit testCrateFlags;
|
||||||
|
+
|
||||||
|
+ buildInputs = testInputs;
|
||||||
|
+
|
||||||
|
+ buildPhase = ''
|
||||||
|
+ set -e
|
||||||
|
+ export RUST_BACKTRACE=1
|
||||||
|
+
|
||||||
|
+ # build outputs
|
||||||
|
+ testRoot=target/debug
|
||||||
|
+ mkdir -p $testRoot
|
||||||
|
+
|
||||||
|
+ # executables of the crate
|
||||||
|
+ # we copy to prevent std::env::current_exe() to resolve to a store location
|
||||||
|
+ for i in ${crate}/bin/*; do
|
||||||
|
+ cp "$i" "$testRoot"
|
||||||
|
+ done
|
||||||
|
+ chmod +w -R .
|
||||||
|
+
|
||||||
|
+ # test harness executables are suffixed with a hash, like cargo does
|
||||||
|
+ # this allows to prevent name collision with the main
|
||||||
|
+ # executables of the crate
|
||||||
|
+ hash=$(basename $out)
|
||||||
|
+ for file in ${drv}/tests/*; do
|
||||||
|
+ f=$testRoot/$(basename $file)-$hash
|
||||||
|
+ cp $file $f
|
||||||
|
+ ${testCommand}
|
||||||
|
+ done
|
||||||
|
+ '';
|
||||||
|
+ };
|
||||||
|
in
|
||||||
|
pkgs.runCommand "${crate.name}-linked"
|
||||||
|
{
|
||||||
|
--
|
||||||
|
2.44.0
|
||||||
|
|
38
third_party/overlays/tvl.nix
vendored
38
third_party/overlays/tvl.nix
vendored
|
@ -90,15 +90,10 @@ depot.nix.readTree.drvTargets {
|
||||||
};
|
};
|
||||||
}));
|
}));
|
||||||
|
|
||||||
# https://github.com/googleapis/google-cloud-go/pull/9665
|
|
||||||
cbtemulator = super.cbtemulator.overrideAttrs (old: {
|
|
||||||
patches = old.patches or [ ] ++ [
|
|
||||||
./patches/cbtemulator-uds.patch
|
|
||||||
];
|
|
||||||
});
|
|
||||||
|
|
||||||
crate2nix = super.crate2nix.overrideAttrs (old: {
|
crate2nix = super.crate2nix.overrideAttrs (old: {
|
||||||
patches = old.patches or [ ] ++ [
|
patches = old.patches or [ ] ++ [
|
||||||
|
# TODO(Kranzes): Remove in next release.
|
||||||
|
./patches/crate2nix-0001-Fix-Use-mkDerivation-with-src-instead-of-runCommand.patch
|
||||||
# https://github.com/nix-community/crate2nix/pull/301
|
# https://github.com/nix-community/crate2nix/pull/301
|
||||||
./patches/crate2nix-tests-debug.patch
|
./patches/crate2nix-tests-debug.patch
|
||||||
];
|
];
|
||||||
|
@ -112,6 +107,25 @@ depot.nix.readTree.drvTargets {
|
||||||
];
|
];
|
||||||
});
|
});
|
||||||
|
|
||||||
|
# https://github.com/NixOS/nixpkgs/pull/329415/files
|
||||||
|
grpc-health-check = super.rustPlatform.buildRustPackage {
|
||||||
|
pname = "grpc-health-check";
|
||||||
|
version = "unstable-2022-08-19";
|
||||||
|
|
||||||
|
src = super.fetchFromGitHub {
|
||||||
|
owner = "paypizza";
|
||||||
|
repo = "grpc-health-check";
|
||||||
|
rev = "f61bb5e10beadc5ed53144cc540d66e19fc510bd";
|
||||||
|
hash = "sha256-nKut9c1HHIacdRcmvlXe0GrtkgCWN6sxJ4ImO0CIDdo=";
|
||||||
|
};
|
||||||
|
|
||||||
|
cargoHash = "sha256-lz+815iE+oXBQ3PfqBO0QBpZY6x1SNR7OU7BjkRszzI=";
|
||||||
|
|
||||||
|
nativeBuildInputs = [ super.protobuf ];
|
||||||
|
# tests fail
|
||||||
|
doCheck = false;
|
||||||
|
};
|
||||||
|
|
||||||
# Imports a patch that fixes usage of this package on versions
|
# Imports a patch that fixes usage of this package on versions
|
||||||
# >=1.9. The patch has been proposed upstream, but so far with no
|
# >=1.9. The patch has been proposed upstream, but so far with no
|
||||||
# reactions from the maintainer:
|
# reactions from the maintainer:
|
||||||
|
@ -121,6 +135,11 @@ depot.nix.readTree.drvTargets {
|
||||||
patches = (old.patches or [ ]) ++ [ ./patches/tpm2-pkcs11-190-dbupgrade.patch ];
|
patches = (old.patches or [ ]) ++ [ ./patches/tpm2-pkcs11-190-dbupgrade.patch ];
|
||||||
});
|
});
|
||||||
|
|
||||||
|
# Dependency isn't supported by Python 3.12
|
||||||
|
html5validator = super.html5validator.override {
|
||||||
|
python3 = self.python311;
|
||||||
|
};
|
||||||
|
|
||||||
# macFUSE bump containing fix for https://github.com/osxfuse/osxfuse/issues/974
|
# macFUSE bump containing fix for https://github.com/osxfuse/osxfuse/issues/974
|
||||||
# https://github.com/NixOS/nixpkgs/pull/320197
|
# https://github.com/NixOS/nixpkgs/pull/320197
|
||||||
fuse =
|
fuse =
|
||||||
|
@ -133,9 +152,4 @@ depot.nix.readTree.drvTargets {
|
||||||
hash = "sha256-ucTzO2qdN4QkowMVvC3+4pjEVjbwMsB0xFk+bvQxwtQ=";
|
hash = "sha256-ucTzO2qdN4QkowMVvC3+4pjEVjbwMsB0xFk+bvQxwtQ=";
|
||||||
};
|
};
|
||||||
}) else super.fuse;
|
}) else super.fuse;
|
||||||
|
|
||||||
treefmt = super.treefmt.overrideAttrs (old: {
|
|
||||||
# https://github.com/numtide/treefmt/pull/328
|
|
||||||
patches = old.patches or [ ] ++ [ ./patches/treefmt-fix-no-cache.patch ];
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
78
third_party/radicle-explorer/0001-remove-dependency-on-plausible.patch
vendored
Normal file
78
third_party/radicle-explorer/0001-remove-dependency-on-plausible.patch
vendored
Normal file
|
@ -0,0 +1,78 @@
|
||||||
|
From cc4718cbea1bd70de21a2be515a944802246ffc7 Mon Sep 17 00:00:00 2001
|
||||||
|
From: Vincent Ambo <mail@tazj.in>
|
||||||
|
Date: Sun, 15 Sep 2024 03:08:28 +0300
|
||||||
|
Subject: [PATCH] remove dependency on plausible
|
||||||
|
|
||||||
|
We don't need spyware, thanks.
|
||||||
|
---
|
||||||
|
package-lock.json | 9 ---------
|
||||||
|
package.json | 1 -
|
||||||
|
src/App.svelte | 8 --------
|
||||||
|
3 files changed, 18 deletions(-)
|
||||||
|
|
||||||
|
diff --git a/package-lock.json b/package-lock.json
|
||||||
|
index d52de6c0..d96e342f 100644
|
||||||
|
--- a/package-lock.json
|
||||||
|
+++ b/package-lock.json
|
||||||
|
@@ -29,7 +29,6 @@
|
||||||
|
"marked-katex-extension": "^5.1.1",
|
||||||
|
"marked-linkify-it": "^3.1.11",
|
||||||
|
"md5": "^2.3.0",
|
||||||
|
- "plausible-tracker": "^0.3.9",
|
||||||
|
"svelte": "^4.2.19",
|
||||||
|
"twemoji": "^14.0.2",
|
||||||
|
"zod": "^3.23.8"
|
||||||
|
@@ -3697,14 +3696,6 @@
|
||||||
|
"url": "https://github.com/sponsors/jonschlinkert"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
- "node_modules/plausible-tracker": {
|
||||||
|
- "version": "0.3.9",
|
||||||
|
- "resolved": "https://registry.npmjs.org/plausible-tracker/-/plausible-tracker-0.3.9.tgz",
|
||||||
|
- "integrity": "sha512-hMhneYm3GCPyQon88SZrVJx+LlqhM1kZFQbuAgXPoh/Az2YvO1B6bitT9qlhpiTdJlsT5lsr3gPmzoVjb5CDXA==",
|
||||||
|
- "engines": {
|
||||||
|
- "node": ">=10"
|
||||||
|
- }
|
||||||
|
- },
|
||||||
|
"node_modules/playwright": {
|
||||||
|
"version": "1.46.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/playwright/-/playwright-1.46.1.tgz",
|
||||||
|
diff --git a/package.json b/package.json
|
||||||
|
index 6d569ad9..61e8d892 100644
|
||||||
|
--- a/package.json
|
||||||
|
+++ b/package.json
|
||||||
|
@@ -73,7 +73,6 @@
|
||||||
|
"marked-katex-extension": "^5.1.1",
|
||||||
|
"marked-linkify-it": "^3.1.11",
|
||||||
|
"md5": "^2.3.0",
|
||||||
|
- "plausible-tracker": "^0.3.9",
|
||||||
|
"svelte": "^4.2.19",
|
||||||
|
"twemoji": "^14.0.2",
|
||||||
|
"zod": "^3.23.8"
|
||||||
|
diff --git a/src/App.svelte b/src/App.svelte
|
||||||
|
index 8161c390..4281ba61 100644
|
||||||
|
--- a/src/App.svelte
|
||||||
|
+++ b/src/App.svelte
|
||||||
|
@@ -1,6 +1,4 @@
|
||||||
|
<script lang="ts">
|
||||||
|
- import Plausible from "plausible-tracker";
|
||||||
|
-
|
||||||
|
import * as router from "@app/lib/router";
|
||||||
|
import { unreachable } from "@app/lib/utils";
|
||||||
|
|
||||||
|
@@ -28,12 +26,6 @@
|
||||||
|
|
||||||
|
void router.loadFromLocation();
|
||||||
|
|
||||||
|
- if (import.meta.env.PROD) {
|
||||||
|
- const plausible = Plausible({ domain: "app.radicle.xyz" });
|
||||||
|
-
|
||||||
|
- plausible.enableAutoPageviews();
|
||||||
|
- }
|
||||||
|
-
|
||||||
|
$: document.documentElement.setAttribute("data-codefont", $codeFont);
|
||||||
|
$: document.documentElement.setAttribute("data-theme", $theme);
|
||||||
|
</script>
|
||||||
|
--
|
||||||
|
2.46.0
|
||||||
|
|
66
third_party/radicle-explorer/default.nix
vendored
Normal file
66
third_party/radicle-explorer/default.nix
vendored
Normal file
|
@ -0,0 +1,66 @@
|
||||||
|
# radicle-explorer is the web UI for Radicle.
|
||||||
|
#
|
||||||
|
# They have an upstream Nix derivation, but it only works with experimental
|
||||||
|
# features Nix and is quite messy, so this is a copy of the relevant parts.
|
||||||
|
{ lib, pkgs, ... }:
|
||||||
|
|
||||||
|
let
|
||||||
|
twemoji-assets = pkgs.fetchFromGitHub {
|
||||||
|
owner = "twitter";
|
||||||
|
repo = "twemoji";
|
||||||
|
rev = "v14.0.2";
|
||||||
|
hash = "sha256-YoOnZ5uVukzi/6bLi22Y8U5TpplPzB7ji42l+/ys5xI=";
|
||||||
|
};
|
||||||
|
|
||||||
|
httpdSrc = pkgs.radicle-httpd.src;
|
||||||
|
in
|
||||||
|
lib.fix (self: pkgs.buildNpmPackage rec {
|
||||||
|
pname = "radicle-explorer";
|
||||||
|
version = (builtins.fromJSON (builtins.readFile "${src}/package.json")).version;
|
||||||
|
|
||||||
|
# source should be synced with the httpd, which is already in nixpkgs
|
||||||
|
src = pkgs.fetchgit {
|
||||||
|
inherit (httpdSrc) url rev;
|
||||||
|
hash = "sha256:09m13238h6j7g02r6332ihgyyzbjx90pgz14rz29pgv7936h6il8";
|
||||||
|
};
|
||||||
|
|
||||||
|
# This might change during nixpkgs bumps and will need updating. Need to fix
|
||||||
|
# upstream so that there is a normal, callable derivation.
|
||||||
|
npmDepsHash = "sha256:1hbrzfjkfc0q8qk03yi6qb9zqm57h7hnkn7fl0yxkrzbrljaljaz";
|
||||||
|
|
||||||
|
patches = [
|
||||||
|
./0001-remove-dependency-on-plausible.patch
|
||||||
|
];
|
||||||
|
|
||||||
|
postPatch = ''
|
||||||
|
patchShebangs --build ./scripts
|
||||||
|
mkdir -p "public/twemoji"
|
||||||
|
cp -t public/twemoji -r -- ${twemoji-assets}/assets/svg/*
|
||||||
|
: >scripts/install-twemoji-assets
|
||||||
|
'';
|
||||||
|
dontConfigure = true;
|
||||||
|
doCheck = false;
|
||||||
|
|
||||||
|
installPhase = ''
|
||||||
|
runHook preInstall
|
||||||
|
mkdir -p "$out"
|
||||||
|
cp -r -t "$out" build/*
|
||||||
|
runHook postInstall
|
||||||
|
'';
|
||||||
|
|
||||||
|
# Override the build-time configuration with other preferred seeds which are
|
||||||
|
# displayed on the landing page.
|
||||||
|
passthru.withPreferredSeeds = seeds:
|
||||||
|
let
|
||||||
|
originalConfig = builtins.fromJSON (builtins.readFile "${src}/config/default.json");
|
||||||
|
config = originalConfig // {
|
||||||
|
preferredSeeds = seeds;
|
||||||
|
};
|
||||||
|
newConfig = pkgs.writeText "local.json" (builtins.toJSON config);
|
||||||
|
in
|
||||||
|
self.overrideAttrs (_: {
|
||||||
|
preBuild = ''
|
||||||
|
cp ${newConfig} config/local.json
|
||||||
|
'';
|
||||||
|
});
|
||||||
|
})
|
126
third_party/rust-crates/default.nix
vendored
126
third_party/rust-crates/default.nix
vendored
|
@ -292,130 +292,4 @@ depot.nix.readTree.drvTargets rec{
|
||||||
sha256 = "1kd047p8jv6mhmfzddjvfa2nwkfrb3l1wml6lfm51n1cr06cc9lz";
|
sha256 = "1kd047p8jv6mhmfzddjvfa2nwkfrb3l1wml6lfm51n1cr06cc9lz";
|
||||||
};
|
};
|
||||||
|
|
||||||
libz-sys = buildRustCrate {
|
|
||||||
pname = "libz-sys";
|
|
||||||
version = "1.1.2";
|
|
||||||
sha256 = "1y7v6bkwr4b6yaf951p1ns7mx47b29ziwdd5wziaic14gs1gwq30";
|
|
||||||
buildDependencies = [
|
|
||||||
cc
|
|
||||||
pkg-config
|
|
||||||
];
|
|
||||||
};
|
|
||||||
|
|
||||||
libgit2-sys = buildRustCrate {
|
|
||||||
pname = "libgit2-sys";
|
|
||||||
version = "0.16.2+1.7.2";
|
|
||||||
sha256 = "0bs446idbmg8s13jvb0ck6qmrskcdn2mp3d4mn9ggxbmiw4ryd3g";
|
|
||||||
dependencies = [
|
|
||||||
libc
|
|
||||||
libz-sys
|
|
||||||
];
|
|
||||||
libPath = "lib.rs";
|
|
||||||
libName = "libgit2_sys";
|
|
||||||
# TODO: this should be available via `pkgs.defaultCrateOverrides`,
|
|
||||||
# I thought that was included by default?
|
|
||||||
nativeBuildInputs = [ pkg-config ];
|
|
||||||
buildInputs = [ pkgs.zlib pkgs.libgit2 ];
|
|
||||||
buildDependencies = [
|
|
||||||
cc
|
|
||||||
pkg-config
|
|
||||||
];
|
|
||||||
env.LIBGIT2_NO_VENDOR = "1";
|
|
||||||
};
|
|
||||||
|
|
||||||
matches = buildRustCrate {
|
|
||||||
pname = "matches";
|
|
||||||
version = "0.1.8";
|
|
||||||
sha256 = "03hl636fg6xggy0a26200xs74amk3k9n0908rga2szn68agyz3cv";
|
|
||||||
libPath = "lib.rs";
|
|
||||||
};
|
|
||||||
|
|
||||||
percent-encoding = buildRustCrate {
|
|
||||||
pname = "percent-encoding";
|
|
||||||
version = "2.1.0";
|
|
||||||
sha256 = "0i838f2nr81585ckmfymf8l1x1vdmx6n8xqvli0lgcy60yl2axy3";
|
|
||||||
libPath = "lib.rs";
|
|
||||||
};
|
|
||||||
|
|
||||||
form_urlencoded = buildRustCrate {
|
|
||||||
pname = "form_urlencoded";
|
|
||||||
version = "1.0.1";
|
|
||||||
sha256 = "0rhv2hfrzk2smdh27walkm66zlvccnnwrbd47fmf8jh6m420dhj8";
|
|
||||||
dependencies = [
|
|
||||||
matches
|
|
||||||
percent-encoding
|
|
||||||
];
|
|
||||||
};
|
|
||||||
|
|
||||||
tinyvec_macros = buildRustCrate {
|
|
||||||
pname = "tinyvec_macros";
|
|
||||||
version = "0.1.0";
|
|
||||||
sha256 = "0aim73hyq5g8b2hs9gjq2sv0xm4xzfbwp5fdyg1frljqzkapq682";
|
|
||||||
};
|
|
||||||
|
|
||||||
tinyvec = buildRustCrate {
|
|
||||||
pname = "tinyvec";
|
|
||||||
version = "1.2.0";
|
|
||||||
sha256 = "1c95nma20kiyrjwfsk7hzd5ir6yy4bm63fmfbfb4dm9ahnlvdp3y";
|
|
||||||
features = [ "alloc" ];
|
|
||||||
dependencies = [
|
|
||||||
tinyvec_macros
|
|
||||||
];
|
|
||||||
};
|
|
||||||
|
|
||||||
unicode-normalization = buildRustCrate {
|
|
||||||
pname = "unicode-normalization";
|
|
||||||
version = "0.1.17";
|
|
||||||
sha256 = "0w4s0avzlf7pzcclhhih93aap613398sshm6jrxcwq0f9lhis11c";
|
|
||||||
dependencies = [
|
|
||||||
tinyvec
|
|
||||||
];
|
|
||||||
};
|
|
||||||
|
|
||||||
unicode-bidi = buildRustCrate {
|
|
||||||
pname = "unicode-bidi";
|
|
||||||
version = "0.3.5";
|
|
||||||
sha256 = "193jzlxj1dfcms2381lyd45zh4ywlicj9lzcfpid1zbkmfarymkz";
|
|
||||||
dependencies = [
|
|
||||||
matches
|
|
||||||
];
|
|
||||||
};
|
|
||||||
|
|
||||||
idna = buildRustCrate {
|
|
||||||
pname = "idna";
|
|
||||||
version = "0.2.3";
|
|
||||||
sha256 = "0hwypd0fpym9lmd4bbqpwyr5lhrlvmvzhi1vy9asc5wxwkzrh299";
|
|
||||||
dependencies = [
|
|
||||||
matches
|
|
||||||
unicode-normalization
|
|
||||||
unicode-bidi
|
|
||||||
];
|
|
||||||
};
|
|
||||||
|
|
||||||
url = buildRustCrate {
|
|
||||||
pname = "url";
|
|
||||||
version = "2.2.1";
|
|
||||||
sha256 = "1ci1djafh83qhpzbmxnr9w5gcrjs3ghf8rrxdy4vklqyji6fvn5v";
|
|
||||||
dependencies = [
|
|
||||||
form_urlencoded
|
|
||||||
idna
|
|
||||||
matches
|
|
||||||
percent-encoding
|
|
||||||
];
|
|
||||||
};
|
|
||||||
|
|
||||||
|
|
||||||
git2 = buildRustCrate {
|
|
||||||
pname = "git2";
|
|
||||||
edition = "2018";
|
|
||||||
version = "0.18.1";
|
|
||||||
sha256 = "1d1wm8cn37svyxgvzfapwilkkc9d2x7fcrgciwn8b2pv9aqz102k";
|
|
||||||
dependencies = [
|
|
||||||
bitflags
|
|
||||||
libc
|
|
||||||
libgit2-sys
|
|
||||||
log
|
|
||||||
url
|
|
||||||
];
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
|
|
66
third_party/sources/sources.json
vendored
66
third_party/sources/sources.json
vendored
|
@ -5,10 +5,22 @@
|
||||||
"homepage": "https://matrix.to/#/#agenix:nixos.org",
|
"homepage": "https://matrix.to/#/#agenix:nixos.org",
|
||||||
"owner": "ryantm",
|
"owner": "ryantm",
|
||||||
"repo": "agenix",
|
"repo": "agenix",
|
||||||
"rev": "c2fc0762bbe8feb06a2e59a364fa81b3a57671c9",
|
"rev": "f6291c5935fdc4e0bef208cfc0dcab7e3f7a1c41",
|
||||||
"sha256": "1lpkwinlax40b7xgzspbkm9rsi4a1x48hxhixnni4irxxwnav0ah",
|
"sha256": "1x8nd8hvsq6mvzig122vprwigsr3z2skanig65haqswn7z7amsvg",
|
||||||
"type": "tarball",
|
"type": "tarball",
|
||||||
"url": "https://github.com/ryantm/agenix/archive/c2fc0762bbe8feb06a2e59a364fa81b3a57671c9.tar.gz",
|
"url": "https://github.com/ryantm/agenix/archive/f6291c5935fdc4e0bef208cfc0dcab7e3f7a1c41.tar.gz",
|
||||||
|
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
|
||||||
|
},
|
||||||
|
"gitignore.nix": {
|
||||||
|
"branch": "master",
|
||||||
|
"description": "Nix functions for filtering local git sources",
|
||||||
|
"homepage": "",
|
||||||
|
"owner": "hercules-ci",
|
||||||
|
"repo": "gitignore.nix",
|
||||||
|
"rev": "637db329424fd7e46cf4185293b9cc8c88c95394",
|
||||||
|
"sha256": "02wxkdpbhlm3yk5mhkhsp3kwakc16xpmsf2baw57nz1dg459qv8w",
|
||||||
|
"type": "tarball",
|
||||||
|
"url": "https://github.com/hercules-ci/gitignore.nix/archive/637db329424fd7e46cf4185293b9cc8c88c95394.tar.gz",
|
||||||
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
|
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
|
||||||
},
|
},
|
||||||
"home-manager": {
|
"home-manager": {
|
||||||
|
@ -17,10 +29,10 @@
|
||||||
"homepage": "https://nix-community.github.io/home-manager/",
|
"homepage": "https://nix-community.github.io/home-manager/",
|
||||||
"owner": "nix-community",
|
"owner": "nix-community",
|
||||||
"repo": "home-manager",
|
"repo": "home-manager",
|
||||||
"rev": "a7117efb3725e6197dd95424136f79147aa35e5b",
|
"rev": "a9c9cc6e50f7cbd2d58ccb1cd46a1e06e9e445ff",
|
||||||
"sha256": "02q3ck1hjs8xzdhfikqxrnsfs9vh4p7rmdha3vbp6nkkdbdvhgg7",
|
"sha256": "1cxp9rgczr4rhhx1klwcr7a61khizq8hv63gvmy9gfsx7fp4h60a",
|
||||||
"type": "tarball",
|
"type": "tarball",
|
||||||
"url": "https://github.com/nix-community/home-manager/archive/a7117efb3725e6197dd95424136f79147aa35e5b.tar.gz",
|
"url": "https://github.com/nix-community/home-manager/archive/a9c9cc6e50f7cbd2d58ccb1cd46a1e06e9e445ff.tar.gz",
|
||||||
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
|
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
|
||||||
},
|
},
|
||||||
"impermanence": {
|
"impermanence": {
|
||||||
|
@ -29,10 +41,10 @@
|
||||||
"homepage": "",
|
"homepage": "",
|
||||||
"owner": "nix-community",
|
"owner": "nix-community",
|
||||||
"repo": "impermanence",
|
"repo": "impermanence",
|
||||||
"rev": "a33ef102a02ce77d3e39c25197664b7a636f9c30",
|
"rev": "63f4d0443e32b0dd7189001ee1894066765d18a5",
|
||||||
"sha256": "1mig6ns8l5iynsm6pfbnx2b9hmr592s1kqbw6gq1n25czdlcniam",
|
"sha256": "0xnshgwfg834dm9l14p2w3wmhjysjpqpgfk37im0vrk1qgva19g2",
|
||||||
"type": "tarball",
|
"type": "tarball",
|
||||||
"url": "https://github.com/nix-community/impermanence/archive/a33ef102a02ce77d3e39c25197664b7a636f9c30.tar.gz",
|
"url": "https://github.com/nix-community/impermanence/archive/63f4d0443e32b0dd7189001ee1894066765d18a5.tar.gz",
|
||||||
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
|
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
|
||||||
},
|
},
|
||||||
"naersk": {
|
"naersk": {
|
||||||
|
@ -41,10 +53,10 @@
|
||||||
"homepage": "",
|
"homepage": "",
|
||||||
"owner": "nmattia",
|
"owner": "nmattia",
|
||||||
"repo": "naersk",
|
"repo": "naersk",
|
||||||
"rev": "fa19d8c135e776dc97f4dcca08656a0eeb28d5c0",
|
"rev": "3fb418eaf352498f6b6c30592e3beb63df42ef11",
|
||||||
"sha256": "1mif058gcbw5d5yixsmzalqlr0h9m9mmbsgv8v4r2mmsbw83k2x0",
|
"sha256": "0v6ncaqm8q2mdv1jhkjjwi1sx4firlhjxpw4wachkwkriyjnkz5g",
|
||||||
"type": "tarball",
|
"type": "tarball",
|
||||||
"url": "https://github.com/nmattia/naersk/archive/fa19d8c135e776dc97f4dcca08656a0eeb28d5c0.tar.gz",
|
"url": "https://github.com/nmattia/naersk/archive/3fb418eaf352498f6b6c30592e3beb63df42ef11.tar.gz",
|
||||||
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
|
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
|
||||||
},
|
},
|
||||||
"napalm": {
|
"napalm": {
|
||||||
|
@ -53,10 +65,10 @@
|
||||||
"homepage": "",
|
"homepage": "",
|
||||||
"owner": "nix-community",
|
"owner": "nix-community",
|
||||||
"repo": "napalm",
|
"repo": "napalm",
|
||||||
"rev": "edcb26c266ca37c9521f6a97f33234633cbec186",
|
"rev": "e1babff744cd278b56abe8478008b4a9e23036cf",
|
||||||
"sha256": "0ai1ax380nnpz0mbgbc5vdzafyjilcmdj7kgv087x2vagpprb4yy",
|
"sha256": "04h62p4hxw7fhclki7hcn739hhig3rh9q4njp24j7bm0dk2kj8h6",
|
||||||
"type": "tarball",
|
"type": "tarball",
|
||||||
"url": "https://github.com/nix-community/napalm/archive/edcb26c266ca37c9521f6a97f33234633cbec186.tar.gz",
|
"url": "https://github.com/nix-community/napalm/archive/e1babff744cd278b56abe8478008b4a9e23036cf.tar.gz",
|
||||||
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
|
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
|
||||||
},
|
},
|
||||||
"nixpkgs": {
|
"nixpkgs": {
|
||||||
|
@ -65,10 +77,10 @@
|
||||||
"homepage": "",
|
"homepage": "",
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"rev": "7f993cdf26ccef564eabf31fdb40d140821e12bc",
|
"rev": "99dc8785f6a0adac95f5e2ab05cc2e1bf666d172",
|
||||||
"sha256": "0dypbvibfdmv14rqlamf451625fw2fyk11prw9bbywi0q2i313d5",
|
"sha256": "11vz7dshwxszab91da1x98qdlmpxi0v7daz24jj3crpll68n93w0",
|
||||||
"type": "tarball",
|
"type": "tarball",
|
||||||
"url": "https://github.com/NixOS/nixpkgs/archive/7f993cdf26ccef564eabf31fdb40d140821e12bc.tar.gz",
|
"url": "https://github.com/NixOS/nixpkgs/archive/99dc8785f6a0adac95f5e2ab05cc2e1bf666d172.tar.gz",
|
||||||
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
|
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
|
||||||
},
|
},
|
||||||
"nixpkgs-stable": {
|
"nixpkgs-stable": {
|
||||||
|
@ -77,10 +89,10 @@
|
||||||
"homepage": "",
|
"homepage": "",
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"rev": "a2e1d0414259a144ebdc048408a807e69e0565af",
|
"rev": "205fd4226592cc83fd4c0885a3e4c9c400efabb5",
|
||||||
"sha256": "1jv90bz3s7j294fhpb29k735fg3xfs9z848szicqarpbz7wfg03g",
|
"sha256": "1f5d2g1p6nfwycpmrnnmc2xmcszp804adp16knjvdkj8nz36y1fg",
|
||||||
"type": "tarball",
|
"type": "tarball",
|
||||||
"url": "https://github.com/NixOS/nixpkgs/archive/a2e1d0414259a144ebdc048408a807e69e0565af.tar.gz",
|
"url": "https://github.com/NixOS/nixpkgs/archive/205fd4226592cc83fd4c0885a3e4c9c400efabb5.tar.gz",
|
||||||
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
|
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
|
||||||
},
|
},
|
||||||
"rust-overlay": {
|
"rust-overlay": {
|
||||||
|
@ -89,10 +101,10 @@
|
||||||
"homepage": "",
|
"homepage": "",
|
||||||
"owner": "oxalica",
|
"owner": "oxalica",
|
||||||
"repo": "rust-overlay",
|
"repo": "rust-overlay",
|
||||||
"rev": "6dc3e45fe4aee36efeed24d64fc68b1f989d5465",
|
"rev": "20c8461785d8f5af32d8d4d5c128589e23d7f033",
|
||||||
"sha256": "0vqgkzbfdj920lbm1dy8kylrv2gk4ard38lb3i20xvp2mp1d39n2",
|
"sha256": "1zy2jcy2ika83dwcpxxvmimk317zimwn7hv8h3v43apqwssl0nxv",
|
||||||
"type": "tarball",
|
"type": "tarball",
|
||||||
"url": "https://github.com/oxalica/rust-overlay/archive/6dc3e45fe4aee36efeed24d64fc68b1f989d5465.tar.gz",
|
"url": "https://github.com/oxalica/rust-overlay/archive/20c8461785d8f5af32d8d4d5c128589e23d7f033.tar.gz",
|
||||||
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
|
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
|
||||||
},
|
},
|
||||||
"rustsec-advisory-db": {
|
"rustsec-advisory-db": {
|
||||||
|
@ -101,10 +113,10 @@
|
||||||
"homepage": "https://rustsec.org",
|
"homepage": "https://rustsec.org",
|
||||||
"owner": "RustSec",
|
"owner": "RustSec",
|
||||||
"repo": "advisory-db",
|
"repo": "advisory-db",
|
||||||
"rev": "af76d4423761499f954411bb3071dcc72e6b0450",
|
"rev": "3cae2352cf82b5815b98aa309e0f4df6aa737cec",
|
||||||
"sha256": "167qxr66j638km3z7zk2drjdr4bgqz77hr35vkwdp0lbafmd6y1c",
|
"sha256": "0bba56sk4dlrf8rm3dmy9bxf95bq4rm1g3ppk4n2vfw0wzf7v7ap",
|
||||||
"type": "tarball",
|
"type": "tarball",
|
||||||
"url": "https://github.com/RustSec/advisory-db/archive/af76d4423761499f954411bb3071dcc72e6b0450.tar.gz",
|
"url": "https://github.com/RustSec/advisory-db/archive/3cae2352cf82b5815b98aa309e0f4df6aa737cec.tar.gz",
|
||||||
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
|
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
602
tools/cheddar/Cargo.lock
generated
602
tools/cheddar/Cargo.lock
generated
File diff suppressed because it is too large
Load diff
|
@ -8,6 +8,7 @@ depot.nix.lazy-deps {
|
||||||
depotfmt.attr = "tools.depotfmt";
|
depotfmt.attr = "tools.depotfmt";
|
||||||
fetch-depot-inbox.attr = "tools.fetch-depot-inbox";
|
fetch-depot-inbox.attr = "tools.fetch-depot-inbox";
|
||||||
git-r.attr = "tools.git-r";
|
git-r.attr = "tools.git-r";
|
||||||
|
git-review.attr = "third_party.nixpkgs.git-review";
|
||||||
gerrit-update.attr = "tools.gerrit-update";
|
gerrit-update.attr = "tools.gerrit-update";
|
||||||
gerrit.attr = "tools.gerrit-cli";
|
gerrit.attr = "tools.gerrit-cli";
|
||||||
hash-password.attr = "tools.hash-password";
|
hash-password.attr = "tools.hash-password";
|
||||||
|
|
|
@ -1,24 +1,14 @@
|
||||||
# Builds treefmt for depot, with a hardcoded configuration that
|
# Builds treefmt for depot, with a hardcoded configuration that
|
||||||
# includes the right paths to formatters.
|
# includes the right paths to formatters.
|
||||||
{ depot, pkgs, ... }:
|
{ pkgs, ... }:
|
||||||
|
|
||||||
let
|
let
|
||||||
# terraform fmt can't handle multiple paths at once, but treefmt
|
|
||||||
# expects this
|
|
||||||
terraformat = pkgs.writeShellScript "terraformat" ''
|
|
||||||
echo "$@" | xargs -n1 ${pkgs.terraform}/bin/terraform fmt
|
|
||||||
'';
|
|
||||||
|
|
||||||
config = pkgs.writeText "depot-treefmt-config" ''
|
config = pkgs.writeText "depot-treefmt-config" ''
|
||||||
[formatter.go]
|
[formatter.go]
|
||||||
command = "${depot.nix.buildGo.go}/bin/gofmt"
|
command = "${pkgs.go}/bin/gofmt"
|
||||||
options = [ "-w" ]
|
options = [ "-w" ]
|
||||||
includes = ["*.go"]
|
includes = ["*.go"]
|
||||||
|
|
||||||
[formatter.tf]
|
|
||||||
command = "${terraformat}"
|
|
||||||
includes = [ "*.tf" ]
|
|
||||||
|
|
||||||
[formatter.nix]
|
[formatter.nix]
|
||||||
command = "${pkgs.nixpkgs-fmt}/bin/nixpkgs-fmt"
|
command = "${pkgs.nixpkgs-fmt}/bin/nixpkgs-fmt"
|
||||||
includes = [ "*.nix" ]
|
includes = [ "*.nix" ]
|
||||||
|
@ -28,8 +18,10 @@ let
|
||||||
|
|
||||||
[formatter.rust]
|
[formatter.rust]
|
||||||
command = "${pkgs.rustfmt}/bin/rustfmt"
|
command = "${pkgs.rustfmt}/bin/rustfmt"
|
||||||
|
options = ["--edition", "2021"]
|
||||||
includes = [ "*.rs" ]
|
includes = [ "*.rs" ]
|
||||||
excludes = [
|
excludes = [
|
||||||
|
"users/emery/*",
|
||||||
"users/tazjin/*",
|
"users/tazjin/*",
|
||||||
]
|
]
|
||||||
'';
|
'';
|
||||||
|
@ -53,10 +45,12 @@ let
|
||||||
'';
|
'';
|
||||||
in
|
in
|
||||||
depotfmt.overrideAttrs (_: {
|
depotfmt.overrideAttrs (_: {
|
||||||
passthru.config = config;
|
passthru = {
|
||||||
passthru.meta.ci.extraSteps.check = {
|
inherit config check;
|
||||||
label = "depot formatting check";
|
meta.ci.extraSteps.check = {
|
||||||
command = check;
|
label = "depot formatting check";
|
||||||
alwaysRun = true;
|
command = check;
|
||||||
|
alwaysRun = true;
|
||||||
|
};
|
||||||
};
|
};
|
||||||
})
|
})
|
||||||
|
|
23
tools/eaglemode/commands/B.nix
Normal file
23
tools/eaglemode/commands/B.nix
Normal file
|
@ -0,0 +1,23 @@
|
||||||
|
{ depot, pkgs, ... }:
|
||||||
|
|
||||||
|
let
|
||||||
|
em = depot.tools.eaglemode;
|
||||||
|
in
|
||||||
|
em.mkCommand {
|
||||||
|
name = "9 B";
|
||||||
|
hotkey = "Ctrl+E";
|
||||||
|
icon = "${./plan9.tga}";
|
||||||
|
|
||||||
|
description = ''
|
||||||
|
Plumb target to Sam or Acme
|
||||||
|
'';
|
||||||
|
|
||||||
|
code = ''
|
||||||
|
ErrorIfNotSingleTarget();
|
||||||
|
|
||||||
|
my @tgt=GetTgt();
|
||||||
|
my $dir=$tgt[0];
|
||||||
|
|
||||||
|
ExecOrError('${pkgs.plan9port}/bin/9', 'B', $dir);
|
||||||
|
'';
|
||||||
|
}
|
26
tools/eaglemode/commands/emacsclient.nix
Normal file
26
tools/eaglemode/commands/emacsclient.nix
Normal file
|
@ -0,0 +1,26 @@
|
||||||
|
{ depot, pkgs, ... }:
|
||||||
|
|
||||||
|
let
|
||||||
|
em = depot.tools.eaglemode;
|
||||||
|
icon = em.mkTGA "emacs" "${pkgs.emacs}/share/icons/hicolor/128x128/apps/emacs.png";
|
||||||
|
in
|
||||||
|
em.mkCommand {
|
||||||
|
name = "Emacsclient";
|
||||||
|
hotkey = "Ctrl+E";
|
||||||
|
icon = "${icon}";
|
||||||
|
|
||||||
|
description = ''
|
||||||
|
Open target in Emacsclient.
|
||||||
|
|
||||||
|
Emacs server must be running already for this to have any effect.
|
||||||
|
'';
|
||||||
|
|
||||||
|
code = ''
|
||||||
|
ErrorIfNotSingleTarget();
|
||||||
|
|
||||||
|
my @tgt=GetTgt();
|
||||||
|
my $dir=$tgt[0];
|
||||||
|
|
||||||
|
ExecOrError('${pkgs.emacs}/bin/emacsclient', '-n', $dir);
|
||||||
|
'';
|
||||||
|
}
|
BIN
tools/eaglemode/commands/plan9.tga
Normal file
BIN
tools/eaglemode/commands/plan9.tga
Normal file
Binary file not shown.
146
tools/eaglemode/default.nix
Normal file
146
tools/eaglemode/default.nix
Normal file
|
@ -0,0 +1,146 @@
|
||||||
|
# Helper functions for extending Eagle Mode with useful stuff.
|
||||||
|
#
|
||||||
|
# Eagle Mode's customisation usually expects people to copy the entire
|
||||||
|
# configuration into their user folder, which we can automate fairly easily
|
||||||
|
# using Nix, letting users choose whether to keep upstream config or not.
|
||||||
|
{ depot, lib, pkgs, ... }:
|
||||||
|
|
||||||
|
let
|
||||||
|
mkDesc = d: lib.concatMapStringsSep "\n"
|
||||||
|
(x: "# Descr =${x}")
|
||||||
|
(builtins.filter (s: s != "") (lib.splitString "\n" d));
|
||||||
|
|
||||||
|
configWrapper = pkgs.runCommand "eaglemode-config-wrapper" { } ''
|
||||||
|
cp ${./wrapper.go} wrapper.go
|
||||||
|
export HOME=$PWD
|
||||||
|
${pkgs.go}/bin/go build wrapper.go
|
||||||
|
install -Dm755 wrapper $out/bin/wrapper
|
||||||
|
'';
|
||||||
|
in
|
||||||
|
rec {
|
||||||
|
# mkCommand creates an Eagle Mode command for the file browser.
|
||||||
|
#
|
||||||
|
# Commands are basically little Perl scripts with a command standard library
|
||||||
|
# available. They receive the user's selected target from Eagle Mode.
|
||||||
|
mkCommand = lib.makeOverridable (
|
||||||
|
{
|
||||||
|
# Name of the command.
|
||||||
|
name
|
||||||
|
, # User-facing description, displayed in Eagle Mode UI. Can be multi-line.
|
||||||
|
description
|
||||||
|
, # Verbatim Perl code of the command. Command library is already available.
|
||||||
|
code
|
||||||
|
, # Caption for the UI button (defaults to name).
|
||||||
|
caption ? name
|
||||||
|
, icon ? "terminal.tga"
|
||||||
|
, # TODO: what's a good default?
|
||||||
|
hotkey ? ""
|
||||||
|
, order ? 1.0
|
||||||
|
}: pkgs.writeTextDir "emFileMan/Commands/${name}.pl" (''
|
||||||
|
#!${pkgs.perl}/bin/perl
|
||||||
|
#[[BEGIN PROPERTIES]]
|
||||||
|
# Type = Command
|
||||||
|
# Interpreter = perl
|
||||||
|
# DefaultFor = directory
|
||||||
|
# Caption = ${caption}
|
||||||
|
# Order = ${toString order}
|
||||||
|
# Icon = ${icon}
|
||||||
|
''
|
||||||
|
+ (lib.optionalString (description != "") "${mkDesc description}\n")
|
||||||
|
+ (lib.optionalString (hotkey != "") "# Hotkey = ${hotkey}\n")
|
||||||
|
+ ''
|
||||||
|
#[[END PROPERTIES]]
|
||||||
|
|
||||||
|
use strict;
|
||||||
|
use warnings;
|
||||||
|
BEGIN { require "$ENV{'EM_DIR'}/res/emFileMan/scripts/cmd-util.pl"; }
|
||||||
|
|
||||||
|
${if builtins.isString code
|
||||||
|
then code
|
||||||
|
else (if builtins.isPath code
|
||||||
|
then builtins.readFile code
|
||||||
|
else throw "code must be a string (literal code) or path to file")}
|
||||||
|
'')
|
||||||
|
);
|
||||||
|
|
||||||
|
# mkTGA converts the given image to a TGA image.
|
||||||
|
mkTGA = name: path: pkgs.runCommand "${name}.tga" { } ''
|
||||||
|
${pkgs.imagemagick}/bin/convert ${path} $out
|
||||||
|
'';
|
||||||
|
|
||||||
|
buildPlugin = lib.makeOverridable (
|
||||||
|
{ name
|
||||||
|
, src
|
||||||
|
, version
|
||||||
|
, eaglemode ? pkgs.eaglemode
|
||||||
|
, target ? name
|
||||||
|
, extraNativeBuildInputs ? [ ]
|
||||||
|
, extraBuildInputs ? [ ]
|
||||||
|
}:
|
||||||
|
pkgs.stdenv.mkDerivation {
|
||||||
|
pname = "eaglemode-plugin-${name}";
|
||||||
|
inherit src version;
|
||||||
|
# inherit (eaglemode.drvAttrs) dontPatchELF;
|
||||||
|
|
||||||
|
nativeBuildInputs = eaglemode.drvAttrs.nativeBuildInputs ++ extraNativeBuildInputs;
|
||||||
|
buildInputs = eaglemode.drvAttrs.buildInputs ++ extraBuildInputs ++ [ eaglemode ];
|
||||||
|
|
||||||
|
buildPhase = ''
|
||||||
|
runHook preBuild
|
||||||
|
|
||||||
|
# merge eaglemode & plugin folders
|
||||||
|
cp -r ${pkgs.srcOnly eaglemode} merged-src && chmod -R u+rw merged-src
|
||||||
|
cp -r $src/* merged-src && chmod -R u+rw merged-src
|
||||||
|
cd merged-src
|
||||||
|
|
||||||
|
export NIX_LDFLAGS="$NIX_LDFLAGS -lXxf86vm -lXext -lXinerama"
|
||||||
|
perl make.pl build projects=${target} continue=no
|
||||||
|
|
||||||
|
runHook postBuild
|
||||||
|
'';
|
||||||
|
|
||||||
|
installPhase = ''
|
||||||
|
runHook preInstall
|
||||||
|
|
||||||
|
mkdir -p $out/lib
|
||||||
|
cp -r lib/lib${target}.so $out/lib
|
||||||
|
|
||||||
|
if [ -d "$src/etc" ]; then
|
||||||
|
cp -r $src/etc/* $out
|
||||||
|
fi
|
||||||
|
|
||||||
|
runHook postInstall
|
||||||
|
'';
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
# etcDir creates a directory layout suitable for use in the EM_USER_CONFIG_DIR
|
||||||
|
# environment variable.
|
||||||
|
#
|
||||||
|
# Note that Eagle Mode requires the value of that variable to be mutable at
|
||||||
|
# runtime (it is the same place where it persists all of its user-controlled
|
||||||
|
# state), so the results of this function can not be used directly.
|
||||||
|
etcDir =
|
||||||
|
{ eaglemode ? pkgs.eaglemode
|
||||||
|
, extraPaths ? [ ]
|
||||||
|
}: pkgs.runCommand "eaglemode-config" { } ''
|
||||||
|
mkdir $out
|
||||||
|
|
||||||
|
${
|
||||||
|
lib.concatMapStringsSep "\n" (s: "cp -rT ${s} $out/\nchmod -R u+rw $out/\n") ([ "${eaglemode}/etc"] ++ extraPaths)
|
||||||
|
}
|
||||||
|
'';
|
||||||
|
|
||||||
|
# withConfig creates an Eagle Mode wrapper that runs it with the given
|
||||||
|
# configuration.
|
||||||
|
withConfig = { eaglemode ? pkgs.eaglemode, config }: pkgs.writeShellScriptBin "eaglemode" ''
|
||||||
|
${configWrapper}/bin/wrapper --em-config "${config}"
|
||||||
|
|
||||||
|
if [ -d "${config}/lib" ]; then
|
||||||
|
export LD_LIBRARY_PATH="${config}/lib:$LD_LIBRARY_PATH"
|
||||||
|
exec ${eaglemode}/bin/eaglemode "$@"
|
||||||
|
fi
|
||||||
|
|
||||||
|
exec ${eaglemode}/bin/eaglemode "$@"
|
||||||
|
'';
|
||||||
|
}
|
10
tools/eaglemode/plugins/avif/default.nix
Normal file
10
tools/eaglemode/plugins/avif/default.nix
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
{ depot, pkgs, ... }:
|
||||||
|
|
||||||
|
depot.tools.eaglemode.buildPlugin {
|
||||||
|
name = "avif";
|
||||||
|
version = "canon";
|
||||||
|
src = ./.;
|
||||||
|
target = "PlAvif";
|
||||||
|
extraBuildInputs = [ pkgs.libavif ];
|
||||||
|
extraNativeBuildInputs = [ pkgs.pkg-config ];
|
||||||
|
}
|
|
@ -0,0 +1,6 @@
|
||||||
|
#%rec:emFpPlugin%#
|
||||||
|
|
||||||
|
FileTypes = { ".avif" }
|
||||||
|
Priority = 1.0
|
||||||
|
Library = "PlAvif"
|
||||||
|
Function = "PlAvifFpPluginFunc"
|
64
tools/eaglemode/plugins/avif/makers/PlAvif.maker.pm
Normal file
64
tools/eaglemode/plugins/avif/makers/PlAvif.maker.pm
Normal file
|
@ -0,0 +1,64 @@
|
||||||
|
package PlAvif;
|
||||||
|
|
||||||
|
use strict;
|
||||||
|
use warnings;
|
||||||
|
|
||||||
|
sub GetDependencies
|
||||||
|
{
|
||||||
|
return ('emCore');
|
||||||
|
}
|
||||||
|
|
||||||
|
sub IsEssential
|
||||||
|
{
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
sub GetFileHandlingrules
|
||||||
|
{
|
||||||
|
return ();
|
||||||
|
}
|
||||||
|
|
||||||
|
sub GetExtraBuildOptions
|
||||||
|
{
|
||||||
|
return ();
|
||||||
|
}
|
||||||
|
|
||||||
|
sub Build
|
||||||
|
{
|
||||||
|
shift;
|
||||||
|
my %options=@_;
|
||||||
|
|
||||||
|
my @libAvifFlags=();
|
||||||
|
if ($options{'avif-inc-dir'} eq '' && $options{'avif-lib-dir'} eq '') {
|
||||||
|
@libAvifFlags=split("\n",readpipe(
|
||||||
|
"perl \"".$options{'utils'}."/PkgConfig.pl\" libavif"
|
||||||
|
));
|
||||||
|
}
|
||||||
|
if (!@libAvifFlags) {
|
||||||
|
if ($options{'avif-inc-dir'} ne '') {
|
||||||
|
push(@libAvifFlags, "--inc-search-dir", $options{'avif-inc-dir'});
|
||||||
|
}
|
||||||
|
if ($options{'avif-lib-dir'} ne '') {
|
||||||
|
push(@libAvifFlags, "--lib-search-dir", $options{'avif-lib-dir'});
|
||||||
|
}
|
||||||
|
push(@libAvifFlags, "--link", "avif");
|
||||||
|
}
|
||||||
|
|
||||||
|
system(
|
||||||
|
@{$options{'unicc_call'}},
|
||||||
|
"--math",
|
||||||
|
"--rtti",
|
||||||
|
"--exceptions",
|
||||||
|
"--bin-dir" , "bin",
|
||||||
|
"--lib-dir" , "lib",
|
||||||
|
"--obj-dir" , "obj",
|
||||||
|
"--inc-search-dir", "include",
|
||||||
|
@libAvifFlags,
|
||||||
|
"--link" , "emCore",
|
||||||
|
"--type" , "dynlib",
|
||||||
|
"--name" , "PlAvif",
|
||||||
|
"src/PlAvif.cpp"
|
||||||
|
)==0 or return 0;
|
||||||
|
|
||||||
|
return 1;
|
||||||
|
}
|
190
tools/eaglemode/plugins/avif/src/PlAvif.cpp
Normal file
190
tools/eaglemode/plugins/avif/src/PlAvif.cpp
Normal file
|
@ -0,0 +1,190 @@
|
||||||
|
#include <emCore/emFpPlugin.h>
|
||||||
|
#include <emCore/emImageFile.h>
|
||||||
|
|
||||||
|
#include "avif/avif.h"
|
||||||
|
|
||||||
|
class PlAvifImageFileModel : public emImageFileModel
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
|
||||||
|
static emRef<PlAvifImageFileModel> Acquire(
|
||||||
|
emContext & context, const emString & name, bool common=true
|
||||||
|
);
|
||||||
|
|
||||||
|
protected:
|
||||||
|
PlAvifImageFileModel(emContext & context, const emString & name);
|
||||||
|
virtual ~PlAvifImageFileModel();
|
||||||
|
virtual void TryStartLoading();
|
||||||
|
virtual bool TryContinueLoading();
|
||||||
|
virtual void QuitLoading();
|
||||||
|
virtual void TryStartSaving();
|
||||||
|
virtual bool TryContinueSaving();
|
||||||
|
virtual void QuitSaving();
|
||||||
|
virtual emUInt64 CalcMemoryNeed();
|
||||||
|
virtual double CalcFileProgress();
|
||||||
|
|
||||||
|
private:
|
||||||
|
struct LoadingState;
|
||||||
|
LoadingState * L = NULL;
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
struct PlAvifImageFileModel::LoadingState {
|
||||||
|
avifRGBImage rgb;
|
||||||
|
avifDecoder * decoder;
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
emRef<PlAvifImageFileModel> PlAvifImageFileModel::Acquire(
|
||||||
|
emContext & context, const emString & name, bool common
|
||||||
|
)
|
||||||
|
{
|
||||||
|
EM_IMPL_ACQUIRE(PlAvifImageFileModel, context, name, common)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
PlAvifImageFileModel::PlAvifImageFileModel(
|
||||||
|
emContext & context, const emString & name
|
||||||
|
)
|
||||||
|
: emImageFileModel(context, name)
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
PlAvifImageFileModel::~PlAvifImageFileModel()
|
||||||
|
{
|
||||||
|
PlAvifImageFileModel::QuitLoading();
|
||||||
|
PlAvifImageFileModel::QuitSaving();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
void PlAvifImageFileModel::TryStartLoading()
|
||||||
|
{
|
||||||
|
avifResult result;
|
||||||
|
|
||||||
|
L = new LoadingState;
|
||||||
|
memset(L, 0, sizeof(LoadingState));
|
||||||
|
|
||||||
|
L->decoder = avifDecoderCreate();
|
||||||
|
if (L->decoder == NULL) {
|
||||||
|
throw emException("failed to create AVIF decoder");
|
||||||
|
}
|
||||||
|
|
||||||
|
result = avifDecoderSetIOFile(L->decoder, GetFilePath());
|
||||||
|
if (result != AVIF_RESULT_OK) {
|
||||||
|
throw emException("%s", avifResultToString(result));
|
||||||
|
}
|
||||||
|
|
||||||
|
result = avifDecoderParse(L->decoder);
|
||||||
|
if (result != AVIF_RESULT_OK) {
|
||||||
|
throw emException("%s", avifResultToString(result));
|
||||||
|
}
|
||||||
|
|
||||||
|
FileFormatInfo = emString::Format(
|
||||||
|
"AVIF %s %ubpc",
|
||||||
|
avifPixelFormatToString(L->decoder->image->yuvFormat),
|
||||||
|
L->decoder->image->depth
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
Signal(ChangeSignal);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
bool PlAvifImageFileModel::TryContinueLoading()
|
||||||
|
{
|
||||||
|
avifResult result;
|
||||||
|
|
||||||
|
if (!Image.GetHeight()) {
|
||||||
|
Image.Setup(
|
||||||
|
L->decoder->image->width,
|
||||||
|
L->decoder->image->height,
|
||||||
|
L->decoder->alphaPresent ? 4 : 3
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
result = avifDecoderNextImage(L->decoder);
|
||||||
|
if (result != AVIF_RESULT_OK) {
|
||||||
|
throw emException("%s", avifResultToString(result));
|
||||||
|
}
|
||||||
|
|
||||||
|
avifRGBImageSetDefaults(&L->rgb, L->decoder->image);
|
||||||
|
L->rgb.format = L->decoder->alphaPresent ?
|
||||||
|
AVIF_RGB_FORMAT_RGBA : AVIF_RGB_FORMAT_RGB;
|
||||||
|
L->rgb.pixels = Image.GetWritableMap();
|
||||||
|
L->rgb.width = Image.GetWidth();
|
||||||
|
L->rgb.height = Image.GetHeight();
|
||||||
|
L->rgb.depth = 8;
|
||||||
|
L->rgb.rowBytes = Image.GetWidth() * Image.GetChannelCount();
|
||||||
|
|
||||||
|
result = avifImageYUVToRGB(L->decoder->image, &L->rgb);
|
||||||
|
if (result != AVIF_RESULT_OK) {
|
||||||
|
throw emException("%s", avifResultToString(result));
|
||||||
|
}
|
||||||
|
|
||||||
|
Signal(ChangeSignal);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
void PlAvifImageFileModel::QuitLoading()
|
||||||
|
{
|
||||||
|
if (L) {
|
||||||
|
if (L->decoder) avifDecoderDestroy(L->decoder);
|
||||||
|
delete L;
|
||||||
|
L = NULL;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
void PlAvifImageFileModel::TryStartSaving()
|
||||||
|
{
|
||||||
|
throw emException("PlAvifImageFileModel: Saving not implemented.");
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
bool PlAvifImageFileModel::TryContinueSaving()
|
||||||
|
{
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
void PlAvifImageFileModel::QuitSaving()
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
emUInt64 PlAvifImageFileModel::CalcMemoryNeed()
|
||||||
|
{
|
||||||
|
return
|
||||||
|
(emUInt64)
|
||||||
|
L->decoder->image->width *
|
||||||
|
L->decoder->image->height *
|
||||||
|
(L->decoder->alphaPresent ? 4 : 3);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
double PlAvifImageFileModel::CalcFileProgress()
|
||||||
|
{
|
||||||
|
return 0.0;
|
||||||
|
}
|
||||||
|
|
||||||
|
extern "C" {
|
||||||
|
emPanel * PlAvifFpPluginFunc(
|
||||||
|
emPanel::ParentArg parent, const emString & name,
|
||||||
|
const emString & path, emFpPlugin * plugin,
|
||||||
|
emString * errorBuf
|
||||||
|
)
|
||||||
|
{
|
||||||
|
if (plugin->Properties.GetCount()) {
|
||||||
|
*errorBuf="PlAvifFpPlugin: No properties allowed.";
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
return new emImageFilePanel(
|
||||||
|
parent, name,
|
||||||
|
PlAvifImageFileModel::Acquire(
|
||||||
|
parent.GetRootContext(), path
|
||||||
|
)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
17
tools/eaglemode/plugins/example.nix
Normal file
17
tools/eaglemode/plugins/example.nix
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
{ depot, pkgs, ... }:
|
||||||
|
|
||||||
|
let
|
||||||
|
em = depot.tools.eaglemode;
|
||||||
|
emSrc = with pkgs; srcOnly eaglemode;
|
||||||
|
in
|
||||||
|
em.buildPlugin {
|
||||||
|
name = "example";
|
||||||
|
version = "canon";
|
||||||
|
|
||||||
|
src = pkgs.runCommand "em-plugin-example-src" { } ''
|
||||||
|
set -ux
|
||||||
|
cp -r ${emSrc}/doc/examples/CppApiExamples/PluginExample $out
|
||||||
|
'';
|
||||||
|
|
||||||
|
target = "PlEx";
|
||||||
|
}
|
12
tools/eaglemode/plugins/qoi/default.nix
Normal file
12
tools/eaglemode/plugins/qoi/default.nix
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
{ depot, pkgs, ... }:
|
||||||
|
|
||||||
|
let
|
||||||
|
em = depot.tools.eaglemode;
|
||||||
|
emSrc = pkgs.srcOnly pkgs.em;
|
||||||
|
in
|
||||||
|
em.buildPlugin {
|
||||||
|
name = "qoi";
|
||||||
|
version = "canon";
|
||||||
|
src = ./.;
|
||||||
|
target = "PlQoi";
|
||||||
|
}
|
|
@ -0,0 +1,6 @@
|
||||||
|
#%rec:emFpPlugin%#
|
||||||
|
|
||||||
|
FileTypes = { ".qoi" }
|
||||||
|
Priority = 1.0
|
||||||
|
Library = "PlQoi"
|
||||||
|
Function = "PlQoiFpPluginFunc"
|
47
tools/eaglemode/plugins/qoi/makers/PlQoi.maker.pm
Normal file
47
tools/eaglemode/plugins/qoi/makers/PlQoi.maker.pm
Normal file
|
@ -0,0 +1,47 @@
|
||||||
|
package PlQoi;
|
||||||
|
|
||||||
|
use strict;
|
||||||
|
use warnings;
|
||||||
|
|
||||||
|
sub GetDependencies
|
||||||
|
{
|
||||||
|
return ('emCore');
|
||||||
|
}
|
||||||
|
|
||||||
|
sub IsEssential
|
||||||
|
{
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
sub GetFileHandlingrules
|
||||||
|
{
|
||||||
|
return ();
|
||||||
|
}
|
||||||
|
|
||||||
|
sub GetExtraBuildOptions
|
||||||
|
{
|
||||||
|
return ();
|
||||||
|
}
|
||||||
|
|
||||||
|
sub Build
|
||||||
|
{
|
||||||
|
shift;
|
||||||
|
my %options=@_;
|
||||||
|
|
||||||
|
system(
|
||||||
|
@{$options{'unicc_call'}},
|
||||||
|
"--math",
|
||||||
|
"--rtti",
|
||||||
|
"--exceptions",
|
||||||
|
"--bin-dir" , "bin",
|
||||||
|
"--lib-dir" , "lib",
|
||||||
|
"--obj-dir" , "obj",
|
||||||
|
"--inc-search-dir", "include",
|
||||||
|
"--link" , "emCore",
|
||||||
|
"--type" , "dynlib",
|
||||||
|
"--name" , "PlQoi",
|
||||||
|
"src/PlQoi.cpp"
|
||||||
|
)==0 or return 0;
|
||||||
|
|
||||||
|
return 1;
|
||||||
|
}
|
273
tools/eaglemode/plugins/qoi/src/PlQoi.cpp
Normal file
273
tools/eaglemode/plugins/qoi/src/PlQoi.cpp
Normal file
|
@ -0,0 +1,273 @@
|
||||||
|
#include <emCore/emFpPlugin.h>
|
||||||
|
#include <emCore/emImageFile.h>
|
||||||
|
|
||||||
|
/*
|
||||||
|
QOI Utilities
|
||||||
|
|
||||||
|
Copyright (c) 2021, Dominic Szablewski - https://phoboslab.org
|
||||||
|
SPDX-License-Identifier: MIT
|
||||||
|
*/
|
||||||
|
|
||||||
|
#define QOI_OP_INDEX 0x00 /* 00xxxxxx */
|
||||||
|
#define QOI_OP_DIFF 0x40 /* 01xxxxxx */
|
||||||
|
#define QOI_OP_LUMA 0x80 /* 10xxxxxx */
|
||||||
|
#define QOI_OP_RUN 0xc0 /* 11xxxxxx */
|
||||||
|
#define QOI_OP_RGB 0xfe /* 11111110 */
|
||||||
|
#define QOI_OP_RGBA 0xff /* 11111111 */
|
||||||
|
|
||||||
|
#define QOI_MASK_2 0xc0 /* 11000000 */
|
||||||
|
|
||||||
|
#define QOI_COLOR_HASH(C) (C.GetRed()*3 + C.GetGreen()*5 + C.GetBlue()*7 + C.GetAlpha()*11)
|
||||||
|
|
||||||
|
#define QOI_MAGIC \
|
||||||
|
(((unsigned int)'q') << 24 | ((unsigned int)'o') << 16 | \
|
||||||
|
((unsigned int)'i') << 8 | ((unsigned int)'f'))
|
||||||
|
|
||||||
|
#define QOI_HEADER_SIZE 14
|
||||||
|
|
||||||
|
static unsigned int qoi_read_32(const unsigned char *bytes, int *p) {
|
||||||
|
unsigned int a = bytes[(*p)++];
|
||||||
|
unsigned int b = bytes[(*p)++];
|
||||||
|
unsigned int c = bytes[(*p)++];
|
||||||
|
unsigned int d = bytes[(*p)++];
|
||||||
|
return a << 24 | b << 16 | c << 8 | d;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class PlQoiImageFileModel : public emImageFileModel
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
|
||||||
|
static emRef<PlQoiImageFileModel> Acquire(
|
||||||
|
emContext & context, const emString & name, bool common=true
|
||||||
|
);
|
||||||
|
|
||||||
|
protected:
|
||||||
|
PlQoiImageFileModel(emContext & context, const emString & name);
|
||||||
|
virtual ~PlQoiImageFileModel();
|
||||||
|
virtual void TryStartLoading();
|
||||||
|
virtual bool TryContinueLoading();
|
||||||
|
virtual void QuitLoading();
|
||||||
|
virtual void TryStartSaving();
|
||||||
|
virtual bool TryContinueSaving();
|
||||||
|
virtual void QuitSaving();
|
||||||
|
virtual emUInt64 CalcMemoryNeed();
|
||||||
|
virtual double CalcFileProgress();
|
||||||
|
|
||||||
|
private:
|
||||||
|
struct LoadingState;
|
||||||
|
LoadingState * L = NULL;
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
struct PlQoiImageFileModel::LoadingState {
|
||||||
|
FILE * file;
|
||||||
|
unsigned int width, height, channels;
|
||||||
|
size_t file_len;
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
emRef<PlQoiImageFileModel> PlQoiImageFileModel::Acquire(
|
||||||
|
emContext & context, const emString & name, bool common
|
||||||
|
)
|
||||||
|
{
|
||||||
|
EM_IMPL_ACQUIRE(PlQoiImageFileModel, context, name, common)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
PlQoiImageFileModel::PlQoiImageFileModel(
|
||||||
|
emContext & context, const emString & name
|
||||||
|
)
|
||||||
|
: emImageFileModel(context, name)
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
PlQoiImageFileModel::~PlQoiImageFileModel()
|
||||||
|
{
|
||||||
|
PlQoiImageFileModel::QuitLoading();
|
||||||
|
PlQoiImageFileModel::QuitSaving();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
void PlQoiImageFileModel::TryStartLoading()
|
||||||
|
{
|
||||||
|
unsigned char header[QOI_HEADER_SIZE];
|
||||||
|
unsigned int header_magic, colorspace;
|
||||||
|
int pos = 0;
|
||||||
|
|
||||||
|
L = new LoadingState;
|
||||||
|
memset(L, 0, sizeof(LoadingState));
|
||||||
|
L->file = fopen(GetFilePath(),"rb");
|
||||||
|
if (!L->file) throw emException("%s",emGetErrorText(errno).Get());
|
||||||
|
|
||||||
|
if (fread(header, 1, sizeof(header), L->file) != sizeof(header)) {
|
||||||
|
if (ferror(L->file)) {
|
||||||
|
throw emException("%s",emGetErrorText(errno).Get());
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
throw emException("QOI header not found");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
header_magic = qoi_read_32(header, &pos);
|
||||||
|
L->width = qoi_read_32(header, &pos);
|
||||||
|
L->height = qoi_read_32(header, &pos);
|
||||||
|
L->channels = header[pos++];
|
||||||
|
colorspace = header[pos++];
|
||||||
|
|
||||||
|
if (
|
||||||
|
L->width == 0 || L->height == 0 ||
|
||||||
|
L->channels < 3 || L->channels > 4 ||
|
||||||
|
colorspace > 1 ||
|
||||||
|
header_magic != QOI_MAGIC
|
||||||
|
) {
|
||||||
|
throw emException("QOI header not valid");
|
||||||
|
}
|
||||||
|
|
||||||
|
fseek(L->file, 0, SEEK_END);
|
||||||
|
L->file_len = ftell(L->file);
|
||||||
|
|
||||||
|
if (L->file_len <= QOI_HEADER_SIZE || fseek(L->file, 0, SEEK_SET) != 0) {
|
||||||
|
throw emException("QOI data incomplete");
|
||||||
|
}
|
||||||
|
|
||||||
|
FileFormatInfo = "QOI ";
|
||||||
|
FileFormatInfo += (
|
||||||
|
colorspace ? "all channels linear" : "sRGB with linear alpha"
|
||||||
|
);
|
||||||
|
|
||||||
|
Signal(ChangeSignal);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
bool PlQoiImageFileModel::TryContinueLoading()
|
||||||
|
{
|
||||||
|
emArray<unsigned char> data;
|
||||||
|
emColor index[64];
|
||||||
|
emColor px { 0, 0, 0, 255 };
|
||||||
|
int pos = QOI_HEADER_SIZE;
|
||||||
|
int run = 0;
|
||||||
|
|
||||||
|
if (!Image.GetHeight()) {
|
||||||
|
Image.Setup(L->width, L->height, L->channels);
|
||||||
|
}
|
||||||
|
|
||||||
|
data.SetCount(L->file_len);
|
||||||
|
if (fread(data.GetWritable(), 1, L->file_len, L->file) < L->file_len) {
|
||||||
|
if (ferror(L->file)) {
|
||||||
|
throw emException("%s",emGetErrorText(errno).Get());
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
throw emException("QOI data incomplete");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
memset(index, 0, sizeof(index));
|
||||||
|
|
||||||
|
for (int px_y = 0; px_y < L->height; px_y++) {
|
||||||
|
for (int px_x = 0; px_x < L->width; px_x++) {
|
||||||
|
if (run > 0) {
|
||||||
|
run--;
|
||||||
|
} else if (pos < data.GetCount()) {
|
||||||
|
int b1 = data.Get(pos++);
|
||||||
|
|
||||||
|
if (b1 == QOI_OP_RGB) {
|
||||||
|
px.SetRed( data.Get(pos++));
|
||||||
|
px.SetGreen( data.Get(pos++));
|
||||||
|
px.SetBlue( data.Get(pos++));
|
||||||
|
} else if (b1 == QOI_OP_RGBA) {
|
||||||
|
px.SetRed( data.Get(pos++));
|
||||||
|
px.SetGreen( data.Get(pos++));
|
||||||
|
px.SetBlue( data.Get(pos++));
|
||||||
|
px.SetAlpha( data.Get(pos++));
|
||||||
|
} else if ((b1 & QOI_MASK_2) == QOI_OP_INDEX) {
|
||||||
|
px = index[b1];
|
||||||
|
} else if ((b1 & QOI_MASK_2) == QOI_OP_DIFF) {
|
||||||
|
px.SetRed(
|
||||||
|
px.GetRed() + ((b1 >> 4) & 0x03) - 2);
|
||||||
|
px.SetGreen(
|
||||||
|
px.GetGreen() + ((b1 >> 2) & 0x03) - 2);
|
||||||
|
px.SetBlue(
|
||||||
|
px.GetBlue() + ( b1 & 0x03) - 2);
|
||||||
|
} else if ((b1 & QOI_MASK_2) == QOI_OP_LUMA) {
|
||||||
|
int b2 = data.Get(pos++);
|
||||||
|
int vg = (b1 & 0x3f) - 32;
|
||||||
|
px.SetRed(
|
||||||
|
px.GetRed() + vg - 8 + ((b2 >> 4) & 0x0f));
|
||||||
|
px.SetGreen(
|
||||||
|
px.GetGreen() + vg);
|
||||||
|
px.SetBlue(
|
||||||
|
px.GetBlue() + vg - 8 + (b2 & 0x0f));
|
||||||
|
} else if ((b1 & QOI_MASK_2) == QOI_OP_RUN) {
|
||||||
|
run = (b1 & 0x3f);
|
||||||
|
}
|
||||||
|
index[QOI_COLOR_HASH(px) % 64] = px;
|
||||||
|
}
|
||||||
|
Image.SetPixel(px_x, px_y, px);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Signal(ChangeSignal);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
void PlQoiImageFileModel::QuitLoading()
|
||||||
|
{
|
||||||
|
if (L) {
|
||||||
|
if (L->file) fclose(L->file);
|
||||||
|
delete L;
|
||||||
|
L = NULL;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
void PlQoiImageFileModel::TryStartSaving()
|
||||||
|
{
|
||||||
|
throw emException("PlQoiImageFileModel: Saving not implemented.");
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
bool PlQoiImageFileModel::TryContinueSaving()
|
||||||
|
{
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
void PlQoiImageFileModel::QuitSaving()
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
emUInt64 PlQoiImageFileModel::CalcMemoryNeed()
|
||||||
|
{
|
||||||
|
return
|
||||||
|
(emUInt64)L->width * L->height * L->channels + L->file_len;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
double PlQoiImageFileModel::CalcFileProgress()
|
||||||
|
{
|
||||||
|
return 0.0;
|
||||||
|
}
|
||||||
|
|
||||||
|
extern "C" {
|
||||||
|
emPanel * PlQoiFpPluginFunc(
|
||||||
|
emPanel::ParentArg parent, const emString & name,
|
||||||
|
const emString & path, emFpPlugin * plugin,
|
||||||
|
emString * errorBuf
|
||||||
|
)
|
||||||
|
{
|
||||||
|
if (plugin->Properties.GetCount()) {
|
||||||
|
*errorBuf="PlQoiFpPlugin: No properties allowed.";
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
return new emImageFilePanel(
|
||||||
|
parent, name,
|
||||||
|
PlQoiImageFileModel::Acquire(
|
||||||
|
parent.GetRootContext(), path
|
||||||
|
)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
18
tools/eaglemode/plugins/yatracker/default.nix
Normal file
18
tools/eaglemode/plugins/yatracker/default.nix
Normal file
|
@ -0,0 +1,18 @@
|
||||||
|
{ depot, pkgs, ... }:
|
||||||
|
|
||||||
|
let
|
||||||
|
em = depot.tools.eaglemode;
|
||||||
|
emSrc = with pkgs; srcOnly eaglemode;
|
||||||
|
in
|
||||||
|
(em.buildPlugin {
|
||||||
|
name = "yatracker";
|
||||||
|
version = "canon";
|
||||||
|
src = ./.;
|
||||||
|
target = "PlYaTracker";
|
||||||
|
}).overrideAttrs (_: {
|
||||||
|
postInstall = ''
|
||||||
|
mkdir -p $out/icons
|
||||||
|
${pkgs.imagemagick}/bin/convert $src/logo.webp $out/icons/yandex-tracker.tga
|
||||||
|
'';
|
||||||
|
})
|
||||||
|
|
|
@ -0,0 +1,6 @@
|
||||||
|
#%rec:emFpPlugin%#
|
||||||
|
|
||||||
|
FileTypes = { ".YaTracker" }
|
||||||
|
Priority = 1.0
|
||||||
|
Library = "PlYaTracker"
|
||||||
|
Function = "PlYaTrackerPluginFunc"
|
BIN
tools/eaglemode/plugins/yatracker/logo.webp
Normal file
BIN
tools/eaglemode/plugins/yatracker/logo.webp
Normal file
Binary file not shown.
After Width: | Height: | Size: 14 KiB |
|
@ -0,0 +1,47 @@
|
||||||
|
package PlYaTracker;
|
||||||
|
|
||||||
|
use strict;
|
||||||
|
use warnings;
|
||||||
|
|
||||||
|
sub GetDependencies
|
||||||
|
{
|
||||||
|
return ('emCore');
|
||||||
|
}
|
||||||
|
|
||||||
|
sub IsEssential
|
||||||
|
{
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
sub GetFileHandlingRules
|
||||||
|
{
|
||||||
|
return ();
|
||||||
|
}
|
||||||
|
|
||||||
|
sub GetExtraBuildOptions
|
||||||
|
{
|
||||||
|
return ();
|
||||||
|
}
|
||||||
|
|
||||||
|
sub Build
|
||||||
|
{
|
||||||
|
shift;
|
||||||
|
my %options=@_;
|
||||||
|
|
||||||
|
system(
|
||||||
|
@{$options{'unicc_call'}},
|
||||||
|
"--math",
|
||||||
|
"--rtti",
|
||||||
|
"--exceptions",
|
||||||
|
"--bin-dir" , "bin",
|
||||||
|
"--lib-dir" , "lib",
|
||||||
|
"--obj-dir" , "obj",
|
||||||
|
"--inc-search-dir", "include",
|
||||||
|
"--link" , "emCore",
|
||||||
|
"--type" , "dynlib",
|
||||||
|
"--name" , "PlYaTracker",
|
||||||
|
"src/PlYaTracker/PlYaTracker.cpp"
|
||||||
|
)==0 or return 0;
|
||||||
|
|
||||||
|
return 1;
|
||||||
|
}
|
|
@ -0,0 +1,58 @@
|
||||||
|
#include <emCore/emFilePanel.h>
|
||||||
|
#include <emCore/emFpPlugin.h>
|
||||||
|
#include <emCore/emRecFileModel.h>
|
||||||
|
#include <emCore/emToolkit.h>
|
||||||
|
|
||||||
|
class PlYaTrackerConfig final : public emRecFileModel, public emStructRec {
|
||||||
|
public:
|
||||||
|
static emRef<PlYaTrackerConfig> Acquire(emContext& context,
|
||||||
|
const emString& name,
|
||||||
|
bool common = true);
|
||||||
|
|
||||||
|
virtual const char* GetFormatName() const;
|
||||||
|
|
||||||
|
emStringRec URL;
|
||||||
|
emStringRec Token;
|
||||||
|
|
||||||
|
protected:
|
||||||
|
PlYaTrackerConfig(emContext& context, const emString& name);
|
||||||
|
};
|
||||||
|
|
||||||
|
emRef<PlYaTrackerConfig> PlYaTrackerConfig::Acquire(emContext& context,
|
||||||
|
const emString& name,
|
||||||
|
bool common) {
|
||||||
|
EM_IMPL_ACQUIRE(PlYaTrackerConfig, context, name, common)
|
||||||
|
}
|
||||||
|
|
||||||
|
const char* PlYaTrackerConfig::GetFormatName() const { return "PlYaTracker"; }
|
||||||
|
|
||||||
|
PlYaTrackerConfig::PlYaTrackerConfig(emContext& context, const emString& name)
|
||||||
|
: emRecFileModel(context, name),
|
||||||
|
emStructRec(),
|
||||||
|
URL(this, "URL"),
|
||||||
|
Token(this, "Token") {
|
||||||
|
PostConstruct(*this);
|
||||||
|
}
|
||||||
|
|
||||||
|
class PlYaTrackerFilePanel : public emFilePanel {
|
||||||
|
public:
|
||||||
|
PlYaTrackerFilePanel(ParentArg parent, const emString& name,
|
||||||
|
emRef<PlYaTrackerConfig> config);
|
||||||
|
|
||||||
|
private:
|
||||||
|
emRef<PlYaTrackerConfig> Config;
|
||||||
|
};
|
||||||
|
|
||||||
|
PlYaTrackerFilePanel::PlYaTrackerFilePanel(ParentArg parent,
|
||||||
|
const emString& name,
|
||||||
|
emRef<PlYaTrackerConfig> config)
|
||||||
|
: emFilePanel(parent, name, config), Config(config) {}
|
||||||
|
|
||||||
|
extern "C" {
|
||||||
|
emPanel* PlYaTrackerPluginFunc(emPanel::ParentArg parent, const emString& name,
|
||||||
|
const emString& path, emFpPlugin* plugin,
|
||||||
|
emString* errorBuf) {
|
||||||
|
return new PlYaTrackerFilePanel(
|
||||||
|
parent, name, PlYaTrackerConfig::Acquire(parent.GetRootContext(), path));
|
||||||
|
}
|
||||||
|
}
|
156
tools/eaglemode/wrapper.go
Normal file
156
tools/eaglemode/wrapper.go
Normal file
|
@ -0,0 +1,156 @@
|
||||||
|
// Eagle Mode configuration wrapper that recreates the required directory
|
||||||
|
// structure for Eagle Mode based on the output of depot.tools.eaglemode.etcDir
|
||||||
|
//
|
||||||
|
// This will replace *all* symlinks in the Eagle Mode configuration directory,
|
||||||
|
// but it will not touch actual files. Missing folders will be created.
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"flag"
|
||||||
|
"fmt"
|
||||||
|
"io/fs"
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
"os/user"
|
||||||
|
"path"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
func configDir() (string, error) {
|
||||||
|
v := os.Getenv("EM_USER_CONFIG_DIR")
|
||||||
|
if v != "" {
|
||||||
|
return v, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
usr, err := user.Current()
|
||||||
|
if err != nil {
|
||||||
|
return "", fmt.Errorf("failed to get current user: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return path.Join(usr.HomeDir, ".eaglemode"), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// cleanupConfig removes *all* existing symlinks in the configuration which do
|
||||||
|
// not point into the right Nix store path.
|
||||||
|
func cleanupConfig(conf string, dir string) (map[string]bool, error) {
|
||||||
|
// In case of first launch, we might have to create the directory.
|
||||||
|
_ = os.MkdirAll(dir, 0755)
|
||||||
|
c := 0
|
||||||
|
|
||||||
|
currentFiles := map[string]bool{}
|
||||||
|
|
||||||
|
walker := func(p string, d fs.DirEntry, e error) error {
|
||||||
|
if e != nil {
|
||||||
|
return fmt.Errorf("could not walk %s in config directory: %w", p, e)
|
||||||
|
}
|
||||||
|
|
||||||
|
if d.Type()&fs.ModeSymlink != 0 {
|
||||||
|
target, err := os.Readlink(p)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("could not read link for %s: %w", p, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if !strings.HasPrefix(target, conf) {
|
||||||
|
err = os.Remove(p)
|
||||||
|
c++
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("could not remove stale link %q: %w", p, err)
|
||||||
|
}
|
||||||
|
log.Printf("removed stale symlink %q", p)
|
||||||
|
} else {
|
||||||
|
currentFiles[p] = false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if d.Type().IsRegular() {
|
||||||
|
currentFiles[p] = true
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
err := filepath.WalkDir(dir, walker)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if c > 0 {
|
||||||
|
log.Printf("removed %v stale symlinks", c)
|
||||||
|
}
|
||||||
|
|
||||||
|
return currentFiles, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// linkConfig traverses the given Eagle Mode configuration and links everything
|
||||||
|
// to the expected location in the user's configuration directory.
|
||||||
|
//
|
||||||
|
// If the user placed actual files in the configuration directory at paths that
|
||||||
|
// would be overwritten, they will not be touched.
|
||||||
|
func linkConfig(conf string, dir string, existing map[string]bool) error {
|
||||||
|
walker := func(p string, d fs.DirEntry, e error) error {
|
||||||
|
if e != nil {
|
||||||
|
return fmt.Errorf("could not walk %s in config directory: %w", p, e)
|
||||||
|
}
|
||||||
|
|
||||||
|
target := path.Join(dir, strings.TrimPrefix(p, conf))
|
||||||
|
|
||||||
|
if d.Type().IsDir() {
|
||||||
|
err := os.MkdirAll(target, 0755)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("could not create directory %q: %w", target, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if shadow, exists := existing[target]; exists {
|
||||||
|
if shadow {
|
||||||
|
log.Printf("WARN: file %q already exists and shadows a file from configuration", target)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
err := os.Symlink(p, target)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to link %q: %w", target, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return filepath.WalkDir(conf, walker)
|
||||||
|
}
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
emConfig := flag.String("em-config", "", "path to em-config dir")
|
||||||
|
|
||||||
|
flag.Parse()
|
||||||
|
log.Println("verifying current Eagle Mode configuration")
|
||||||
|
|
||||||
|
if *emConfig == "" {
|
||||||
|
log.Fatalf("Eagle Mode configuration must be given")
|
||||||
|
}
|
||||||
|
|
||||||
|
if !strings.HasPrefix(*emConfig, "/nix/store/") {
|
||||||
|
log.Fatalf("Eagle Mode configuration must be in Nix store")
|
||||||
|
}
|
||||||
|
|
||||||
|
dir, err := configDir()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("could not determine Eagle Mode config dir: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
currentFiles, err := cleanupConfig(*emConfig, dir)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("failed to remove stale symlinks: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
err = linkConfig(*emConfig, dir, currentFiles)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("failed to link new configuration: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Println("Eagle Mode configuration updated")
|
||||||
|
}
|
1409
tvix/Cargo.lock
generated
1409
tvix/Cargo.lock
generated
File diff suppressed because it is too large
Load diff
3780
tvix/Cargo.nix
3780
tvix/Cargo.nix
File diff suppressed because it is too large
Load diff
124
tvix/Cargo.toml
124
tvix/Cargo.toml
|
@ -27,6 +27,8 @@ members = [
|
||||||
"glue",
|
"glue",
|
||||||
"nar-bridge",
|
"nar-bridge",
|
||||||
"nix-compat",
|
"nix-compat",
|
||||||
|
"nix-compat-derive",
|
||||||
|
"nix-compat-derive-tests",
|
||||||
"serde",
|
"serde",
|
||||||
"store",
|
"store",
|
||||||
"tracing",
|
"tracing",
|
||||||
|
@ -37,6 +39,128 @@ members = [
|
||||||
# https://github.com/rust-lang/rust-clippy/issues/12281
|
# https://github.com/rust-lang/rust-clippy/issues/12281
|
||||||
blocks_in_conditions = "allow"
|
blocks_in_conditions = "allow"
|
||||||
|
|
||||||
|
[workspace.dependencies]
|
||||||
|
anyhow = "1.0.86"
|
||||||
|
async-compression = "0.4.12"
|
||||||
|
async-process = "2.2.4"
|
||||||
|
async-stream = "0.3.5"
|
||||||
|
async-tempfile = "0.4.0"
|
||||||
|
axum = "0.7.5"
|
||||||
|
# https://github.com/liufuyang/bigtable_rs/pull/86
|
||||||
|
bigtable_rs = { git = "https://github.com/liufuyang/bigtable_rs", rev = "1818355a5373a5bc2c84287e3a4e3807154ac8ef" }
|
||||||
|
bitflags = "2.6.0"
|
||||||
|
blake3 = "1.5.4"
|
||||||
|
bstr = "1.10.0"
|
||||||
|
bytes = "1.7.1"
|
||||||
|
clap = "4.5.16"
|
||||||
|
codemap = "0.1.3"
|
||||||
|
codemap-diagnostic = "0.1.2"
|
||||||
|
count-write = "0.1.0"
|
||||||
|
criterion = "0.5"
|
||||||
|
data-encoding = "2.6.0"
|
||||||
|
digest = "0.10.7"
|
||||||
|
dirs = "4.0.0"
|
||||||
|
ed25519 = "2.2.3"
|
||||||
|
ed25519-dalek = "2.1.1"
|
||||||
|
enum-primitive-derive = "0.3.0"
|
||||||
|
erased-serde = "0.4.5"
|
||||||
|
expect-test = "1.5.0"
|
||||||
|
fastcdc = "3.1.0"
|
||||||
|
fuse-backend-rs = "0.11.0"
|
||||||
|
futures = "0.3.30"
|
||||||
|
genawaiter = { version = "0.99.1", default-features = false }
|
||||||
|
glob = "0.3.1"
|
||||||
|
hex-literal = "0.4.1"
|
||||||
|
http = "1.1.0"
|
||||||
|
hyper-util = "0.1.7"
|
||||||
|
indicatif = "0.17.8"
|
||||||
|
itertools = "0.12.1"
|
||||||
|
lazy_static = "1.5.0"
|
||||||
|
lexical-core = "0.8.5"
|
||||||
|
libc = "0.2.158"
|
||||||
|
lru = "0.12.4"
|
||||||
|
magic = "0.16.2"
|
||||||
|
md-5 = "0.10.6"
|
||||||
|
mimalloc = "0.1.43"
|
||||||
|
nix = "0.27.1"
|
||||||
|
nohash-hasher = "0.2.0"
|
||||||
|
nom = "7.1.3"
|
||||||
|
num-traits = "0.2.19"
|
||||||
|
object_store = "0.10.2"
|
||||||
|
opentelemetry = "0.24.0"
|
||||||
|
opentelemetry-http = "0.13.0"
|
||||||
|
opentelemetry-otlp = "0.17.0"
|
||||||
|
opentelemetry_sdk = "0.24.1"
|
||||||
|
os_str_bytes = "6.6"
|
||||||
|
parking_lot = "0.12.3"
|
||||||
|
path-clean = "0.1"
|
||||||
|
petgraph = "0.6.5"
|
||||||
|
pin-project = "1.1"
|
||||||
|
pin-project-lite = "0.2.14"
|
||||||
|
pretty_assertions = "1.4.0"
|
||||||
|
proc-macro2 = "1.0.86"
|
||||||
|
proptest = { version = "1.5.0", default-features = false }
|
||||||
|
prost = "0.13.1"
|
||||||
|
prost-build = "0.13.1"
|
||||||
|
quote = "1.0.37"
|
||||||
|
redb = "2.1.2"
|
||||||
|
regex = "1.10.6"
|
||||||
|
reqwest = { version = "0.12.7", default-features = false }
|
||||||
|
reqwest-middleware = "0.3.3"
|
||||||
|
reqwest-tracing = { version = "0.5.3", default-features = false }
|
||||||
|
rnix = "0.11.0"
|
||||||
|
rowan = "*"
|
||||||
|
rstest = "0.19.0"
|
||||||
|
rstest_reuse = "0.6.0"
|
||||||
|
rustc-hash = "2.0.0"
|
||||||
|
rustyline = "10.1.1"
|
||||||
|
serde = "1.0.209"
|
||||||
|
serde_json = "1.0"
|
||||||
|
serde_qs = "0.12.0"
|
||||||
|
serde_tagged = "0.3.0"
|
||||||
|
serde_with = "3.9.0"
|
||||||
|
sha1 = "0.10.6"
|
||||||
|
sha2 = "0.10.8"
|
||||||
|
sled = "0.34.7"
|
||||||
|
smol_str = "0.2.2"
|
||||||
|
tabwriter = "1.4"
|
||||||
|
tempfile = "3.12.0"
|
||||||
|
test-strategy = "0.2.1"
|
||||||
|
thiserror = "1.0.63"
|
||||||
|
threadpool = "1.8.1"
|
||||||
|
tokio = "1.39.3"
|
||||||
|
tokio-listener = "0.4.3"
|
||||||
|
tokio-retry = "0.3.0"
|
||||||
|
tokio-stream = "0.1.15"
|
||||||
|
tokio-tar = "0.3.1"
|
||||||
|
tokio-test = "0.4.4"
|
||||||
|
tokio-util = "0.7.11"
|
||||||
|
tonic = "0.12.2"
|
||||||
|
tonic-build = "0.12.2"
|
||||||
|
tonic-health = { version = "0.12.2", default-features = false }
|
||||||
|
tonic-reflection = "0.12.2"
|
||||||
|
tower = "0.4.13"
|
||||||
|
tower-http = "0.5.2"
|
||||||
|
tracing = "0.1.40"
|
||||||
|
tracing-indicatif = "0.3.6"
|
||||||
|
tracing-opentelemetry = "0.25.0"
|
||||||
|
tracing-subscriber = "0.3.18"
|
||||||
|
tracing-tracy = "0.11.2"
|
||||||
|
trybuild = "1.0.99"
|
||||||
|
url = "2.5.2"
|
||||||
|
vhost = "0.6"
|
||||||
|
vhost-user-backend = "0.8"
|
||||||
|
virtio-bindings = "0.2.2"
|
||||||
|
virtio-queue = "0.7"
|
||||||
|
vm-memory = "0.10"
|
||||||
|
vmm-sys-util = "0.11"
|
||||||
|
vu128 = "1.1.0"
|
||||||
|
walkdir = "2.5.0"
|
||||||
|
# https://github.com/jneem/wu-manber/pull/1
|
||||||
|
wu-manber = { git = "https://github.com/tvlfyi/wu-manber.git" }
|
||||||
|
xattr = "1.3.1"
|
||||||
|
zstd = "0.13.2"
|
||||||
|
|
||||||
# Add a profile to all targets that enables release optimisations, but
|
# Add a profile to all targets that enables release optimisations, but
|
||||||
# retains debug symbols. This is great for use with
|
# retains debug symbols. This is great for use with
|
||||||
# benchmarking/profiling tools.
|
# benchmarking/profiling tools.
|
||||||
|
|
|
@ -61,7 +61,7 @@ This folder contains the following components:
|
||||||
* `//tvix/castore` - subtree storage/transfer in a content-addressed fashion
|
* `//tvix/castore` - subtree storage/transfer in a content-addressed fashion
|
||||||
* `//tvix/cli` - preliminary REPL & CLI implementation for Tvix
|
* `//tvix/cli` - preliminary REPL & CLI implementation for Tvix
|
||||||
* `//tvix/eval` - an implementation of the Nix programming language
|
* `//tvix/eval` - an implementation of the Nix programming language
|
||||||
* `//tvix/nar-bridge-go` - a HTTP webserver providing a Nix HTTP Binary Cache interface in front of a tvix-store
|
* `//tvix/nar-bridge` - a HTTP webserver providing a Nix HTTP Binary Cache interface in front of a tvix-store
|
||||||
* `//tvix/nix-compat` - a Rust library for compatibility with C++ Nix, features like encodings and hashing schemes and formats
|
* `//tvix/nix-compat` - a Rust library for compatibility with C++ Nix, features like encodings and hashing schemes and formats
|
||||||
* `//tvix/serde` - a Rust library for using the Nix language for app configuration
|
* `//tvix/serde` - a Rust library for using the Nix language for app configuration
|
||||||
* `//tvix/store` - a "filesystem" linking Nix store paths and metadata with the content-addressed layer
|
* `//tvix/store` - a "filesystem" linking Nix store paths and metadata with the content-addressed layer
|
||||||
|
|
|
@ -31,23 +31,37 @@ the `tvix` directory:
|
||||||
export PATH=$PATH:$PWD/target/release-with-debug
|
export PATH=$PATH:$PWD/target/release-with-debug
|
||||||
```
|
```
|
||||||
|
|
||||||
Secondly, configure tvix to use the local backend:
|
Now, spin up tvix-daemon, connecting to some (local) backends:
|
||||||
|
|
||||||
```
|
```
|
||||||
export BLOB_SERVICE_ADDR=sled://$PWD/blobs.sled
|
tvix-store --otlp=false daemon \
|
||||||
|
--blob-service-addr=objectstore+file://$PWD/blobs \
|
||||||
|
--directory-service-addr=sled://$PWD/directories.sled \
|
||||||
|
--path-info-service-addr=sled://$PWD/pathinfo.sled &
|
||||||
|
```
|
||||||
|
|
||||||
|
Copy some data into tvix-store (we use `nar-bridge` for this for now):
|
||||||
|
|
||||||
|
```
|
||||||
|
mg run //tvix:nar-bridge -- --otlp=false &
|
||||||
|
rm -Rf ~/.cache/nix; nix copy --to http://localhost:9000\?compression\=none $(mg build //third_party/nixpkgs:hello)
|
||||||
|
pkill nar-bridge
|
||||||
|
```
|
||||||
|
|
||||||
|
By default, the `tvix-store virtiofs` command used in the `runVM` script
|
||||||
|
connects to a running `tvix-store daemon` via gRPC - in which case you want to
|
||||||
|
keep `tvix-store daemon` running.
|
||||||
|
|
||||||
|
In case you want to have `tvix-store virtiofs` open the stores directly, kill
|
||||||
|
`tvix-store daemon` too, and export the addresses from above:
|
||||||
|
|
||||||
|
```
|
||||||
|
pkill tvix-store
|
||||||
|
export BLOB_SERVICE_ADDR=objectstore+file://$PWD/blobs
|
||||||
export DIRECTORY_SERVICE_ADDR=sled://$PWD/directories.sled
|
export DIRECTORY_SERVICE_ADDR=sled://$PWD/directories.sled
|
||||||
export PATH_INFO_SERVICE_ADDR=sled://$PWD/pathinfo.sled
|
export PATH_INFO_SERVICE_ADDR=sled://$PWD/pathinfo.sled
|
||||||
```
|
```
|
||||||
|
|
||||||
Potentially copy some data into tvix-store (via nar-bridge):
|
|
||||||
|
|
||||||
```
|
|
||||||
mg run //tvix:store -- daemon &
|
|
||||||
$(mg build //tvix:nar-bridge-go)/bin/nar-bridge-http &
|
|
||||||
rm -Rf ~/.cache/nix; nix copy --to http://localhost:9000\?compression\=none $(mg build //third_party/nixpkgs:hello)
|
|
||||||
pkill nar-bridge-http; pkill tvix-store
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Interactive shell
|
#### Interactive shell
|
||||||
Run the VM like this:
|
Run the VM like this:
|
||||||
|
|
||||||
|
@ -100,9 +114,12 @@ Hello, world!
|
||||||
[ 0.299422] reboot: Power down
|
[ 0.299422] reboot: Power down
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Execute a NixOS system closure
|
#### Boot a NixOS system closure
|
||||||
It's also possible to invoke a system closure. To do this, tvix-init honors the
|
It's also possible to boot a system closure. To do this, tvix-init honors the
|
||||||
init= cmdline option, and will switch_root to it.
|
init= cmdline option, and will `switch_root` to it.
|
||||||
|
|
||||||
|
Make sure to first copy that system closure into tvix-store,
|
||||||
|
using a similar `nix copy` comamnd as above.
|
||||||
|
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|
|
@ -1,13 +1,16 @@
|
||||||
{ depot, pkgs, ... }:
|
{ lib, pkgs, ... }:
|
||||||
|
|
||||||
rec {
|
rec {
|
||||||
# A binary that sets up /nix/store from virtiofs, lists all store paths, and
|
# A binary that sets up /nix/store from virtiofs, lists all store paths, and
|
||||||
# powers off the machine.
|
# powers off the machine.
|
||||||
tvix-init = depot.nix.buildGo.program {
|
tvix-init = pkgs.buildGoModule rec {
|
||||||
name = "tvix-init";
|
name = "tvix-init";
|
||||||
srcs = [
|
src = lib.fileset.toSource {
|
||||||
./tvix-init.go
|
root = ./.;
|
||||||
];
|
fileset = ./tvix-init.go;
|
||||||
|
};
|
||||||
|
vendorHash = null;
|
||||||
|
postPatch = "go mod init ${name}";
|
||||||
};
|
};
|
||||||
|
|
||||||
# A kernel with virtiofs support baked in
|
# A kernel with virtiofs support baked in
|
||||||
|
|
|
@ -65,8 +65,8 @@ let
|
||||||
--otlp=false \
|
--otlp=false \
|
||||||
daemon -l $PWD/tvix-store.sock &
|
daemon -l $PWD/tvix-store.sock &
|
||||||
|
|
||||||
# Wait for the socket to be created.
|
# Wait for the service to report healthy.
|
||||||
while [ ! -e $PWD/tvix-store.sock ]; do sleep 1; done
|
timeout 22 sh -c "until ${pkgs.ip2unix}/bin/ip2unix -r out,path=$PWD/tvix-store.sock ${pkgs.grpc-health-check}/bin/grpc-health-check --address 127.0.0.1 --port 8080; do sleep 1; done"
|
||||||
|
|
||||||
# Export env vars so that subsequent tvix-store commands will talk to
|
# Export env vars so that subsequent tvix-store commands will talk to
|
||||||
# our tvix-store daemon over the unix socket.
|
# our tvix-store daemon over the unix socket.
|
||||||
|
@ -89,8 +89,8 @@ let
|
||||||
--otlp=false \
|
--otlp=false \
|
||||||
-l $PWD/nar-bridge.sock &
|
-l $PWD/nar-bridge.sock &
|
||||||
|
|
||||||
# Wait for the socket to be created.
|
# Wait for nar-bridge to report healthy.
|
||||||
while [ ! -e $PWD/nar-bridge.sock ]; do sleep 1; done
|
timeout 22 sh -c "until ${pkgs.curl}/bin/curl -s --unix-socket $PWD/nar-bridge.sock http:///nix-binary-cache; do sleep 1; done"
|
||||||
|
|
||||||
# Upload. We can't use nix copy --to http://…, as it wants access to the nix db.
|
# Upload. We can't use nix copy --to http://…, as it wants access to the nix db.
|
||||||
# However, we can use mkBinaryCache to assemble .narinfo and .nar.xz to upload,
|
# However, we can use mkBinaryCache to assemble .narinfo and .nar.xz to upload,
|
||||||
|
@ -119,14 +119,20 @@ let
|
||||||
grep "${assertVMOutput}" output.txt
|
grep "${assertVMOutput}" output.txt
|
||||||
'';
|
'';
|
||||||
requiredSystemFeatures = [ "kvm" ];
|
requiredSystemFeatures = [ "kvm" ];
|
||||||
|
# HACK: The boot tests are sometimes flaky, and we don't want them to
|
||||||
|
# periodically fail other build. Have Buildkite auto-retry them 2 times
|
||||||
|
# on failure.
|
||||||
|
# Logs for individual failures are still available, so it won't hinder
|
||||||
|
# flakiness debuggability.
|
||||||
|
meta.ci.buildkiteExtraStepArgs = {
|
||||||
|
retry.automatic = true;
|
||||||
|
};
|
||||||
} // lib.optionalAttrs (isClosure && !useNarBridge) {
|
} // lib.optionalAttrs (isClosure && !useNarBridge) {
|
||||||
__structuredAttrs = true;
|
__structuredAttrs = true;
|
||||||
exportReferencesGraph.closure = [ path ];
|
exportReferencesGraph.closure = [ path ];
|
||||||
});
|
});
|
||||||
|
|
||||||
systemFor = sys: (depot.ops.nixos.nixosFor sys).system;
|
testSystem = (pkgs.nixos {
|
||||||
|
|
||||||
testSystem = systemFor ({ modulesPath, pkgs, ... }: {
|
|
||||||
# Set some options necessary to evaluate.
|
# Set some options necessary to evaluate.
|
||||||
boot.loader.systemd-boot.enable = true;
|
boot.loader.systemd-boot.enable = true;
|
||||||
# TODO: figure out how to disable this without causing eval to fail
|
# TODO: figure out how to disable this without causing eval to fail
|
||||||
|
@ -146,7 +152,10 @@ let
|
||||||
|
|
||||||
# Don't warn about stateVersion.
|
# Don't warn about stateVersion.
|
||||||
system.stateVersion = "24.05";
|
system.stateVersion = "24.05";
|
||||||
});
|
|
||||||
|
# Speed-up evaluation and building.
|
||||||
|
documentation.enable = lib.mkForce false;
|
||||||
|
}).config.system.build.toplevel;
|
||||||
|
|
||||||
in
|
in
|
||||||
depot.nix.readTree.drvTargets
|
depot.nix.readTree.drvTargets
|
||||||
|
@ -157,8 +166,8 @@ depot.nix.readTree.drvTargets
|
||||||
});
|
});
|
||||||
docs-persistent = (mkBootTest {
|
docs-persistent = (mkBootTest {
|
||||||
blobServiceAddr = "objectstore+file:///build/blobs";
|
blobServiceAddr = "objectstore+file:///build/blobs";
|
||||||
directoryServiceAddr = "sled:///build/directories.sled";
|
directoryServiceAddr = "redb:///build/directories.redb";
|
||||||
pathInfoServiceAddr = "sled:///build/pathinfo.sled";
|
pathInfoServiceAddr = "redb:///build/pathinfo.redb";
|
||||||
path = ../../docs;
|
path = ../../docs;
|
||||||
importPathName = "docs";
|
importPathName = "docs";
|
||||||
});
|
});
|
||||||
|
@ -171,6 +180,8 @@ depot.nix.readTree.drvTargets
|
||||||
|
|
||||||
closure-nixos = (mkBootTest {
|
closure-nixos = (mkBootTest {
|
||||||
blobServiceAddr = "objectstore+file:///build/blobs";
|
blobServiceAddr = "objectstore+file:///build/blobs";
|
||||||
|
pathInfoServiceAddr = "redb:///build/pathinfo.redb";
|
||||||
|
directoryServiceAddr = "redb:///build/directories.redb";
|
||||||
path = testSystem;
|
path = testSystem;
|
||||||
isClosure = true;
|
isClosure = true;
|
||||||
vmCmdline = "init=${testSystem}/init panic=-1"; # reboot immediately on panic
|
vmCmdline = "init=${testSystem}/init panic=-1"; # reboot immediately on panic
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
|
|
||||||
// Code generated by protoc-gen-go. DO NOT EDIT.
|
// Code generated by protoc-gen-go. DO NOT EDIT.
|
||||||
// versions:
|
// versions:
|
||||||
// protoc-gen-go v1.34.1
|
// protoc-gen-go v1.34.2
|
||||||
// protoc (unknown)
|
// protoc (unknown)
|
||||||
// source: tvix/build/protos/build.proto
|
// source: tvix/build/protos/build.proto
|
||||||
|
|
||||||
|
@ -66,8 +66,8 @@ type BuildRequest struct {
|
||||||
|
|
||||||
// The list of all root nodes that should be visible in `inputs_dir` at the
|
// The list of all root nodes that should be visible in `inputs_dir` at the
|
||||||
// time of the build.
|
// time of the build.
|
||||||
// As root nodes are content-addressed, no additional signatures are needed
|
// As all references are content-addressed, no additional signatures are
|
||||||
// to substitute / make these available in the build environment.
|
// needed to substitute / make these available in the build environment.
|
||||||
// Inputs MUST be sorted by their names.
|
// Inputs MUST be sorted by their names.
|
||||||
Inputs []*castore_go.Node `protobuf:"bytes,1,rep,name=inputs,proto3" json:"inputs,omitempty"`
|
Inputs []*castore_go.Node `protobuf:"bytes,1,rep,name=inputs,proto3" json:"inputs,omitempty"`
|
||||||
// The command (and its args) executed as the build script.
|
// The command (and its args) executed as the build script.
|
||||||
|
@ -560,7 +560,7 @@ func file_tvix_build_protos_build_proto_rawDescGZIP() []byte {
|
||||||
}
|
}
|
||||||
|
|
||||||
var file_tvix_build_protos_build_proto_msgTypes = make([]protoimpl.MessageInfo, 5)
|
var file_tvix_build_protos_build_proto_msgTypes = make([]protoimpl.MessageInfo, 5)
|
||||||
var file_tvix_build_protos_build_proto_goTypes = []interface{}{
|
var file_tvix_build_protos_build_proto_goTypes = []any{
|
||||||
(*BuildRequest)(nil), // 0: tvix.build.v1.BuildRequest
|
(*BuildRequest)(nil), // 0: tvix.build.v1.BuildRequest
|
||||||
(*Build)(nil), // 1: tvix.build.v1.Build
|
(*Build)(nil), // 1: tvix.build.v1.Build
|
||||||
(*BuildRequest_EnvVar)(nil), // 2: tvix.build.v1.BuildRequest.EnvVar
|
(*BuildRequest_EnvVar)(nil), // 2: tvix.build.v1.BuildRequest.EnvVar
|
||||||
|
@ -588,7 +588,7 @@ func file_tvix_build_protos_build_proto_init() {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
if !protoimpl.UnsafeEnabled {
|
if !protoimpl.UnsafeEnabled {
|
||||||
file_tvix_build_protos_build_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
|
file_tvix_build_protos_build_proto_msgTypes[0].Exporter = func(v any, i int) any {
|
||||||
switch v := v.(*BuildRequest); i {
|
switch v := v.(*BuildRequest); i {
|
||||||
case 0:
|
case 0:
|
||||||
return &v.state
|
return &v.state
|
||||||
|
@ -600,7 +600,7 @@ func file_tvix_build_protos_build_proto_init() {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
file_tvix_build_protos_build_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
|
file_tvix_build_protos_build_proto_msgTypes[1].Exporter = func(v any, i int) any {
|
||||||
switch v := v.(*Build); i {
|
switch v := v.(*Build); i {
|
||||||
case 0:
|
case 0:
|
||||||
return &v.state
|
return &v.state
|
||||||
|
@ -612,7 +612,7 @@ func file_tvix_build_protos_build_proto_init() {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
file_tvix_build_protos_build_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} {
|
file_tvix_build_protos_build_proto_msgTypes[2].Exporter = func(v any, i int) any {
|
||||||
switch v := v.(*BuildRequest_EnvVar); i {
|
switch v := v.(*BuildRequest_EnvVar); i {
|
||||||
case 0:
|
case 0:
|
||||||
return &v.state
|
return &v.state
|
||||||
|
@ -624,7 +624,7 @@ func file_tvix_build_protos_build_proto_init() {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
file_tvix_build_protos_build_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} {
|
file_tvix_build_protos_build_proto_msgTypes[3].Exporter = func(v any, i int) any {
|
||||||
switch v := v.(*BuildRequest_BuildConstraints); i {
|
switch v := v.(*BuildRequest_BuildConstraints); i {
|
||||||
case 0:
|
case 0:
|
||||||
return &v.state
|
return &v.state
|
||||||
|
@ -636,7 +636,7 @@ func file_tvix_build_protos_build_proto_init() {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
file_tvix_build_protos_build_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} {
|
file_tvix_build_protos_build_proto_msgTypes[4].Exporter = func(v any, i int) any {
|
||||||
switch v := v.(*BuildRequest_AdditionalFile); i {
|
switch v := v.(*BuildRequest_AdditionalFile); i {
|
||||||
case 0:
|
case 0:
|
||||||
return &v.state
|
return &v.state
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
|
|
||||||
// Code generated by protoc-gen-go. DO NOT EDIT.
|
// Code generated by protoc-gen-go. DO NOT EDIT.
|
||||||
// versions:
|
// versions:
|
||||||
// protoc-gen-go v1.34.1
|
// protoc-gen-go v1.34.2
|
||||||
// protoc (unknown)
|
// protoc (unknown)
|
||||||
// source: tvix/build/protos/rpc_build.proto
|
// source: tvix/build/protos/rpc_build.proto
|
||||||
|
|
||||||
|
@ -40,7 +40,7 @@ var file_tvix_build_protos_rpc_build_proto_rawDesc = []byte{
|
||||||
0x69, 0x6c, 0x64, 0x76, 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
|
0x69, 0x6c, 0x64, 0x76, 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
|
||||||
}
|
}
|
||||||
|
|
||||||
var file_tvix_build_protos_rpc_build_proto_goTypes = []interface{}{
|
var file_tvix_build_protos_rpc_build_proto_goTypes = []any{
|
||||||
(*BuildRequest)(nil), // 0: tvix.build.v1.BuildRequest
|
(*BuildRequest)(nil), // 0: tvix.build.v1.BuildRequest
|
||||||
(*Build)(nil), // 1: tvix.build.v1.Build
|
(*Build)(nil), // 1: tvix.build.v1.Build
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,33 +4,31 @@ version = "0.1.0"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
bytes = "1.4.0"
|
bytes = { workspace = true }
|
||||||
clap = { version = "4.0", features = ["derive", "env"] }
|
clap = { workspace = true, features = ["derive", "env"] }
|
||||||
itertools = "0.12.0"
|
itertools = { workspace = true }
|
||||||
prost = "0.13.1"
|
prost = { workspace = true }
|
||||||
thiserror = "1.0.56"
|
thiserror = { workspace = true }
|
||||||
tokio = { version = "1.32.0" }
|
tokio = { workspace = true }
|
||||||
tokio-listener = { version = "0.4.1", features = [ "tonic012" ] }
|
tokio-listener = { workspace = true, features = ["tonic012"] }
|
||||||
tonic = { version = "0.12.0", features = ["tls", "tls-roots"] }
|
tonic = { workspace = true, features = ["tls", "tls-roots"] }
|
||||||
tvix-castore = { path = "../castore" }
|
tvix-castore = { path = "../castore" }
|
||||||
tvix-tracing = { path = "../tracing" }
|
tvix-tracing = { path = "../tracing" }
|
||||||
tracing = "0.1.40"
|
tracing = { workspace = true }
|
||||||
url = "2.4.0"
|
url = { workspace = true }
|
||||||
|
mimalloc = { workspace = true }
|
||||||
[dependencies.tonic-reflection]
|
tonic-reflection = { workspace = true, optional = true }
|
||||||
optional = true
|
|
||||||
version = "0.12.0"
|
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
prost-build = "0.13.1"
|
prost-build = { workspace = true }
|
||||||
tonic-build = "0.12.0"
|
tonic-build = { workspace = true }
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = []
|
default = []
|
||||||
tonic-reflection = ["dep:tonic-reflection", "tvix-castore/tonic-reflection"]
|
tonic-reflection = ["dep:tonic-reflection", "tvix-castore/tonic-reflection"]
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
rstest = "0.19.0"
|
rstest = { workspace = true }
|
||||||
|
|
||||||
[lints]
|
[lints]
|
||||||
workspace = true
|
workspace = true
|
||||||
|
|
|
@ -20,15 +20,15 @@ fn main() -> Result<()> {
|
||||||
.extern_path(".tvix.castore.v1", "::tvix_castore::proto")
|
.extern_path(".tvix.castore.v1", "::tvix_castore::proto")
|
||||||
.compile(
|
.compile(
|
||||||
&[
|
&[
|
||||||
"tvix/build/protos/build.proto",
|
"build/protos/build.proto",
|
||||||
"tvix/build/protos/rpc_build.proto",
|
"build/protos/rpc_build.proto",
|
||||||
],
|
],
|
||||||
// If we are in running `cargo build` manually, using `../..` works fine,
|
// If we are in running `cargo build` manually, using `../..` works fine,
|
||||||
// but in case we run inside a nix build, we need to instead point PROTO_ROOT
|
// but in case we run inside a nix build, we need to instead point PROTO_ROOT
|
||||||
// to a sparseTree containing that structure.
|
// to a custom tree containing that structure.
|
||||||
&[match std::env::var_os("PROTO_ROOT") {
|
&[match std::env::var_os("PROTO_ROOT") {
|
||||||
Some(proto_root) => proto_root.to_str().unwrap().to_owned(),
|
Some(proto_root) => proto_root.to_str().unwrap().to_owned(),
|
||||||
None => "../..".to_string(),
|
None => "..".to_string(),
|
||||||
}],
|
}],
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,8 +4,8 @@
|
||||||
runTests = true;
|
runTests = true;
|
||||||
}).overrideAttrs (old: rec {
|
}).overrideAttrs (old: rec {
|
||||||
meta.ci.targets = lib.filter (x: lib.hasPrefix "with-features" x || x == "no-features") (lib.attrNames passthru);
|
meta.ci.targets = lib.filter (x: lib.hasPrefix "with-features" x || x == "no-features") (lib.attrNames passthru);
|
||||||
passthru = depot.tvix.utils.mkFeaturePowerset {
|
passthru = old.passthru // (depot.tvix.utils.mkFeaturePowerset {
|
||||||
inherit (old) crateName;
|
inherit (old) crateName;
|
||||||
features = [ "tonic-reflection" ];
|
features = [ "tonic-reflection" ];
|
||||||
};
|
});
|
||||||
})
|
})
|
||||||
|
|
|
@ -5,7 +5,7 @@ syntax = "proto3";
|
||||||
|
|
||||||
package tvix.build.v1;
|
package tvix.build.v1;
|
||||||
|
|
||||||
import "tvix/castore/protos/castore.proto";
|
import "castore/protos/castore.proto";
|
||||||
|
|
||||||
option go_package = "code.tvl.fyi/tvix/build-go;buildv1";
|
option go_package = "code.tvl.fyi/tvix/build-go;buildv1";
|
||||||
|
|
||||||
|
@ -47,8 +47,8 @@ option go_package = "code.tvl.fyi/tvix/build-go;buildv1";
|
||||||
message BuildRequest {
|
message BuildRequest {
|
||||||
// The list of all root nodes that should be visible in `inputs_dir` at the
|
// The list of all root nodes that should be visible in `inputs_dir` at the
|
||||||
// time of the build.
|
// time of the build.
|
||||||
// As root nodes are content-addressed, no additional signatures are needed
|
// As all references are content-addressed, no additional signatures are
|
||||||
// to substitute / make these available in the build environment.
|
// needed to substitute / make these available in the build environment.
|
||||||
// Inputs MUST be sorted by their names.
|
// Inputs MUST be sorted by their names.
|
||||||
repeated tvix.castore.v1.Node inputs = 1;
|
repeated tvix.castore.v1.Node inputs = 1;
|
||||||
|
|
||||||
|
|
|
@ -1,17 +1,12 @@
|
||||||
{ depot, pkgs, ... }:
|
{ depot, pkgs, lib, ... }:
|
||||||
let
|
let
|
||||||
protos = depot.nix.sparseTree {
|
protos = lib.sourceByRegex depot.path.origSrc [
|
||||||
name = "build-protos";
|
"buf.yaml"
|
||||||
root = depot.path.origSrc;
|
"buf.gen.yaml"
|
||||||
paths = [
|
# We need to include castore.proto (only), as it's referred.
|
||||||
# We need to include castore.proto (only), as it's referred.
|
"^tvix(/castore(/protos(/castore\.proto)?)?)?$"
|
||||||
../../castore/protos/castore.proto
|
"^tvix(/build(/protos(/.*\.proto)?)?)?$"
|
||||||
./build.proto
|
];
|
||||||
./rpc_build.proto
|
|
||||||
../../../buf.yaml
|
|
||||||
../../../buf.gen.yaml
|
|
||||||
];
|
|
||||||
};
|
|
||||||
in
|
in
|
||||||
depot.nix.readTree.drvTargets {
|
depot.nix.readTree.drvTargets {
|
||||||
inherit protos;
|
inherit protos;
|
||||||
|
|
|
@ -4,7 +4,7 @@ syntax = "proto3";
|
||||||
|
|
||||||
package tvix.build.v1;
|
package tvix.build.v1;
|
||||||
|
|
||||||
import "tvix/build/protos/build.proto";
|
import "build/protos/build.proto";
|
||||||
|
|
||||||
option go_package = "code.tvl.fyi/tvix/build-go;buildv1";
|
option go_package = "code.tvl.fyi/tvix/build-go;buildv1";
|
||||||
|
|
||||||
|
|
|
@ -17,6 +17,11 @@ use tvix_build::proto::FILE_DESCRIPTOR_SET;
|
||||||
#[cfg(feature = "tonic-reflection")]
|
#[cfg(feature = "tonic-reflection")]
|
||||||
use tvix_castore::proto::FILE_DESCRIPTOR_SET as CASTORE_FILE_DESCRIPTOR_SET;
|
use tvix_castore::proto::FILE_DESCRIPTOR_SET as CASTORE_FILE_DESCRIPTOR_SET;
|
||||||
|
|
||||||
|
use mimalloc::MiMalloc;
|
||||||
|
|
||||||
|
#[global_allocator]
|
||||||
|
static GLOBAL: MiMalloc = MiMalloc;
|
||||||
|
|
||||||
#[derive(Parser)]
|
#[derive(Parser)]
|
||||||
#[command(author, version, about, long_about = None)]
|
#[command(author, version, about, long_about = None)]
|
||||||
struct Cli {
|
struct Cli {
|
||||||
|
@ -85,11 +90,18 @@ async fn main() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||||
|
|
||||||
#[cfg(feature = "tonic-reflection")]
|
#[cfg(feature = "tonic-reflection")]
|
||||||
{
|
{
|
||||||
let reflection_svc = tonic_reflection::server::Builder::configure()
|
router = router.add_service(
|
||||||
.register_encoded_file_descriptor_set(CASTORE_FILE_DESCRIPTOR_SET)
|
tonic_reflection::server::Builder::configure()
|
||||||
.register_encoded_file_descriptor_set(FILE_DESCRIPTOR_SET)
|
.register_encoded_file_descriptor_set(CASTORE_FILE_DESCRIPTOR_SET)
|
||||||
.build()?;
|
.register_encoded_file_descriptor_set(FILE_DESCRIPTOR_SET)
|
||||||
router = router.add_service(reflection_svc);
|
.build_v1alpha()?,
|
||||||
|
);
|
||||||
|
router = router.add_service(
|
||||||
|
tonic_reflection::server::Builder::configure()
|
||||||
|
.register_encoded_file_descriptor_set(CASTORE_FILE_DESCRIPTOR_SET)
|
||||||
|
.register_encoded_file_descriptor_set(FILE_DESCRIPTOR_SET)
|
||||||
|
.build_v1()?,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
info!(listen_address=%listen_address, "listening");
|
info!(listen_address=%listen_address, "listening");
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use tvix_castore::proto::{NamedNode, ValidateNodeError};
|
use tvix_castore::DirectoryError;
|
||||||
|
|
||||||
mod grpc_buildservice_wrapper;
|
mod grpc_buildservice_wrapper;
|
||||||
|
|
||||||
|
@ -19,7 +19,7 @@ pub const FILE_DESCRIPTOR_SET: &[u8] = tonic::include_file_descriptor_set!("tvix
|
||||||
#[derive(Debug, thiserror::Error)]
|
#[derive(Debug, thiserror::Error)]
|
||||||
pub enum ValidateBuildRequestError {
|
pub enum ValidateBuildRequestError {
|
||||||
#[error("invalid input node at position {0}: {1}")]
|
#[error("invalid input node at position {0}: {1}")]
|
||||||
InvalidInputNode(usize, ValidateNodeError),
|
InvalidInputNode(usize, DirectoryError),
|
||||||
|
|
||||||
#[error("input nodes are not sorted by name")]
|
#[error("input nodes are not sorted by name")]
|
||||||
InputNodesNotSorted,
|
InputNodesNotSorted,
|
||||||
|
@ -123,20 +123,21 @@ impl BuildRequest {
|
||||||
/// and all restrictions around paths themselves (relative, clean, …) need
|
/// and all restrictions around paths themselves (relative, clean, …) need
|
||||||
// to be fulfilled.
|
// to be fulfilled.
|
||||||
pub fn validate(&self) -> Result<(), ValidateBuildRequestError> {
|
pub fn validate(&self) -> Result<(), ValidateBuildRequestError> {
|
||||||
// validate all input nodes
|
// validate names. Make sure they're sorted
|
||||||
for (i, n) in self.inputs.iter().enumerate() {
|
|
||||||
// ensure the input node itself is valid
|
|
||||||
n.validate()
|
|
||||||
.map_err(|e| ValidateBuildRequestError::InvalidInputNode(i, e))?;
|
|
||||||
}
|
|
||||||
|
|
||||||
// now we can look at the names, and make sure they're sorted.
|
let mut last_name: bytes::Bytes = "".into();
|
||||||
if !is_sorted(
|
for (i, node) in self.inputs.iter().enumerate() {
|
||||||
self.inputs
|
// TODO(flokli): store result somewhere
|
||||||
.iter()
|
let (name, _node) = node
|
||||||
.map(|e| e.node.as_ref().unwrap().get_name()),
|
.clone()
|
||||||
) {
|
.into_name_and_node()
|
||||||
Err(ValidateBuildRequestError::InputNodesNotSorted)?
|
.map_err(|e| ValidateBuildRequestError::InvalidInputNode(i, e))?;
|
||||||
|
|
||||||
|
if name.as_ref() <= last_name.as_ref() {
|
||||||
|
return Err(ValidateBuildRequestError::InputNodesNotSorted);
|
||||||
|
} else {
|
||||||
|
last_name = name.into()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// validate working_dir
|
// validate working_dir
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
|
|
||||||
// Code generated by protoc-gen-go. DO NOT EDIT.
|
// Code generated by protoc-gen-go. DO NOT EDIT.
|
||||||
// versions:
|
// versions:
|
||||||
// protoc-gen-go v1.34.1
|
// protoc-gen-go v1.34.2
|
||||||
// protoc (unknown)
|
// protoc (unknown)
|
||||||
// source: tvix/castore/protos/castore.proto
|
// source: tvix/castore/protos/castore.proto
|
||||||
|
|
||||||
|
@ -466,7 +466,7 @@ func file_tvix_castore_protos_castore_proto_rawDescGZIP() []byte {
|
||||||
}
|
}
|
||||||
|
|
||||||
var file_tvix_castore_protos_castore_proto_msgTypes = make([]protoimpl.MessageInfo, 5)
|
var file_tvix_castore_protos_castore_proto_msgTypes = make([]protoimpl.MessageInfo, 5)
|
||||||
var file_tvix_castore_protos_castore_proto_goTypes = []interface{}{
|
var file_tvix_castore_protos_castore_proto_goTypes = []any{
|
||||||
(*Directory)(nil), // 0: tvix.castore.v1.Directory
|
(*Directory)(nil), // 0: tvix.castore.v1.Directory
|
||||||
(*DirectoryNode)(nil), // 1: tvix.castore.v1.DirectoryNode
|
(*DirectoryNode)(nil), // 1: tvix.castore.v1.DirectoryNode
|
||||||
(*FileNode)(nil), // 2: tvix.castore.v1.FileNode
|
(*FileNode)(nil), // 2: tvix.castore.v1.FileNode
|
||||||
|
@ -493,7 +493,7 @@ func file_tvix_castore_protos_castore_proto_init() {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
if !protoimpl.UnsafeEnabled {
|
if !protoimpl.UnsafeEnabled {
|
||||||
file_tvix_castore_protos_castore_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
|
file_tvix_castore_protos_castore_proto_msgTypes[0].Exporter = func(v any, i int) any {
|
||||||
switch v := v.(*Directory); i {
|
switch v := v.(*Directory); i {
|
||||||
case 0:
|
case 0:
|
||||||
return &v.state
|
return &v.state
|
||||||
|
@ -505,7 +505,7 @@ func file_tvix_castore_protos_castore_proto_init() {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
file_tvix_castore_protos_castore_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
|
file_tvix_castore_protos_castore_proto_msgTypes[1].Exporter = func(v any, i int) any {
|
||||||
switch v := v.(*DirectoryNode); i {
|
switch v := v.(*DirectoryNode); i {
|
||||||
case 0:
|
case 0:
|
||||||
return &v.state
|
return &v.state
|
||||||
|
@ -517,7 +517,7 @@ func file_tvix_castore_protos_castore_proto_init() {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
file_tvix_castore_protos_castore_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} {
|
file_tvix_castore_protos_castore_proto_msgTypes[2].Exporter = func(v any, i int) any {
|
||||||
switch v := v.(*FileNode); i {
|
switch v := v.(*FileNode); i {
|
||||||
case 0:
|
case 0:
|
||||||
return &v.state
|
return &v.state
|
||||||
|
@ -529,7 +529,7 @@ func file_tvix_castore_protos_castore_proto_init() {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
file_tvix_castore_protos_castore_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} {
|
file_tvix_castore_protos_castore_proto_msgTypes[3].Exporter = func(v any, i int) any {
|
||||||
switch v := v.(*SymlinkNode); i {
|
switch v := v.(*SymlinkNode); i {
|
||||||
case 0:
|
case 0:
|
||||||
return &v.state
|
return &v.state
|
||||||
|
@ -541,7 +541,7 @@ func file_tvix_castore_protos_castore_proto_init() {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
file_tvix_castore_protos_castore_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} {
|
file_tvix_castore_protos_castore_proto_msgTypes[4].Exporter = func(v any, i int) any {
|
||||||
switch v := v.(*Node); i {
|
switch v := v.(*Node); i {
|
||||||
case 0:
|
case 0:
|
||||||
return &v.state
|
return &v.state
|
||||||
|
@ -554,7 +554,7 @@ func file_tvix_castore_protos_castore_proto_init() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
file_tvix_castore_protos_castore_proto_msgTypes[4].OneofWrappers = []interface{}{
|
file_tvix_castore_protos_castore_proto_msgTypes[4].OneofWrappers = []any{
|
||||||
(*Node_Directory)(nil),
|
(*Node_Directory)(nil),
|
||||||
(*Node_File)(nil),
|
(*Node_File)(nil),
|
||||||
(*Node_Symlink)(nil),
|
(*Node_Symlink)(nil),
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
|
|
||||||
// Code generated by protoc-gen-go. DO NOT EDIT.
|
// Code generated by protoc-gen-go. DO NOT EDIT.
|
||||||
// versions:
|
// versions:
|
||||||
// protoc-gen-go v1.34.1
|
// protoc-gen-go v1.34.2
|
||||||
// protoc (unknown)
|
// protoc (unknown)
|
||||||
// source: tvix/castore/protos/rpc_blobstore.proto
|
// source: tvix/castore/protos/rpc_blobstore.proto
|
||||||
|
|
||||||
|
@ -415,7 +415,7 @@ func file_tvix_castore_protos_rpc_blobstore_proto_rawDescGZIP() []byte {
|
||||||
}
|
}
|
||||||
|
|
||||||
var file_tvix_castore_protos_rpc_blobstore_proto_msgTypes = make([]protoimpl.MessageInfo, 6)
|
var file_tvix_castore_protos_rpc_blobstore_proto_msgTypes = make([]protoimpl.MessageInfo, 6)
|
||||||
var file_tvix_castore_protos_rpc_blobstore_proto_goTypes = []interface{}{
|
var file_tvix_castore_protos_rpc_blobstore_proto_goTypes = []any{
|
||||||
(*StatBlobRequest)(nil), // 0: tvix.castore.v1.StatBlobRequest
|
(*StatBlobRequest)(nil), // 0: tvix.castore.v1.StatBlobRequest
|
||||||
(*StatBlobResponse)(nil), // 1: tvix.castore.v1.StatBlobResponse
|
(*StatBlobResponse)(nil), // 1: tvix.castore.v1.StatBlobResponse
|
||||||
(*ReadBlobRequest)(nil), // 2: tvix.castore.v1.ReadBlobRequest
|
(*ReadBlobRequest)(nil), // 2: tvix.castore.v1.ReadBlobRequest
|
||||||
|
@ -444,7 +444,7 @@ func file_tvix_castore_protos_rpc_blobstore_proto_init() {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
if !protoimpl.UnsafeEnabled {
|
if !protoimpl.UnsafeEnabled {
|
||||||
file_tvix_castore_protos_rpc_blobstore_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
|
file_tvix_castore_protos_rpc_blobstore_proto_msgTypes[0].Exporter = func(v any, i int) any {
|
||||||
switch v := v.(*StatBlobRequest); i {
|
switch v := v.(*StatBlobRequest); i {
|
||||||
case 0:
|
case 0:
|
||||||
return &v.state
|
return &v.state
|
||||||
|
@ -456,7 +456,7 @@ func file_tvix_castore_protos_rpc_blobstore_proto_init() {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
file_tvix_castore_protos_rpc_blobstore_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
|
file_tvix_castore_protos_rpc_blobstore_proto_msgTypes[1].Exporter = func(v any, i int) any {
|
||||||
switch v := v.(*StatBlobResponse); i {
|
switch v := v.(*StatBlobResponse); i {
|
||||||
case 0:
|
case 0:
|
||||||
return &v.state
|
return &v.state
|
||||||
|
@ -468,7 +468,7 @@ func file_tvix_castore_protos_rpc_blobstore_proto_init() {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
file_tvix_castore_protos_rpc_blobstore_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} {
|
file_tvix_castore_protos_rpc_blobstore_proto_msgTypes[2].Exporter = func(v any, i int) any {
|
||||||
switch v := v.(*ReadBlobRequest); i {
|
switch v := v.(*ReadBlobRequest); i {
|
||||||
case 0:
|
case 0:
|
||||||
return &v.state
|
return &v.state
|
||||||
|
@ -480,7 +480,7 @@ func file_tvix_castore_protos_rpc_blobstore_proto_init() {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
file_tvix_castore_protos_rpc_blobstore_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} {
|
file_tvix_castore_protos_rpc_blobstore_proto_msgTypes[3].Exporter = func(v any, i int) any {
|
||||||
switch v := v.(*BlobChunk); i {
|
switch v := v.(*BlobChunk); i {
|
||||||
case 0:
|
case 0:
|
||||||
return &v.state
|
return &v.state
|
||||||
|
@ -492,7 +492,7 @@ func file_tvix_castore_protos_rpc_blobstore_proto_init() {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
file_tvix_castore_protos_rpc_blobstore_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} {
|
file_tvix_castore_protos_rpc_blobstore_proto_msgTypes[4].Exporter = func(v any, i int) any {
|
||||||
switch v := v.(*PutBlobResponse); i {
|
switch v := v.(*PutBlobResponse); i {
|
||||||
case 0:
|
case 0:
|
||||||
return &v.state
|
return &v.state
|
||||||
|
@ -504,7 +504,7 @@ func file_tvix_castore_protos_rpc_blobstore_proto_init() {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
file_tvix_castore_protos_rpc_blobstore_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} {
|
file_tvix_castore_protos_rpc_blobstore_proto_msgTypes[5].Exporter = func(v any, i int) any {
|
||||||
switch v := v.(*StatBlobResponse_ChunkMeta); i {
|
switch v := v.(*StatBlobResponse_ChunkMeta); i {
|
||||||
case 0:
|
case 0:
|
||||||
return &v.state
|
return &v.state
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
|
|
||||||
// Code generated by protoc-gen-go. DO NOT EDIT.
|
// Code generated by protoc-gen-go. DO NOT EDIT.
|
||||||
// versions:
|
// versions:
|
||||||
// protoc-gen-go v1.34.1
|
// protoc-gen-go v1.34.2
|
||||||
// protoc (unknown)
|
// protoc (unknown)
|
||||||
// source: tvix/castore/protos/rpc_directory.proto
|
// source: tvix/castore/protos/rpc_directory.proto
|
||||||
|
|
||||||
|
@ -199,7 +199,7 @@ func file_tvix_castore_protos_rpc_directory_proto_rawDescGZIP() []byte {
|
||||||
}
|
}
|
||||||
|
|
||||||
var file_tvix_castore_protos_rpc_directory_proto_msgTypes = make([]protoimpl.MessageInfo, 2)
|
var file_tvix_castore_protos_rpc_directory_proto_msgTypes = make([]protoimpl.MessageInfo, 2)
|
||||||
var file_tvix_castore_protos_rpc_directory_proto_goTypes = []interface{}{
|
var file_tvix_castore_protos_rpc_directory_proto_goTypes = []any{
|
||||||
(*GetDirectoryRequest)(nil), // 0: tvix.castore.v1.GetDirectoryRequest
|
(*GetDirectoryRequest)(nil), // 0: tvix.castore.v1.GetDirectoryRequest
|
||||||
(*PutDirectoryResponse)(nil), // 1: tvix.castore.v1.PutDirectoryResponse
|
(*PutDirectoryResponse)(nil), // 1: tvix.castore.v1.PutDirectoryResponse
|
||||||
(*Directory)(nil), // 2: tvix.castore.v1.Directory
|
(*Directory)(nil), // 2: tvix.castore.v1.Directory
|
||||||
|
@ -223,7 +223,7 @@ func file_tvix_castore_protos_rpc_directory_proto_init() {
|
||||||
}
|
}
|
||||||
file_tvix_castore_protos_castore_proto_init()
|
file_tvix_castore_protos_castore_proto_init()
|
||||||
if !protoimpl.UnsafeEnabled {
|
if !protoimpl.UnsafeEnabled {
|
||||||
file_tvix_castore_protos_rpc_directory_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
|
file_tvix_castore_protos_rpc_directory_proto_msgTypes[0].Exporter = func(v any, i int) any {
|
||||||
switch v := v.(*GetDirectoryRequest); i {
|
switch v := v.(*GetDirectoryRequest); i {
|
||||||
case 0:
|
case 0:
|
||||||
return &v.state
|
return &v.state
|
||||||
|
@ -235,7 +235,7 @@ func file_tvix_castore_protos_rpc_directory_proto_init() {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
file_tvix_castore_protos_rpc_directory_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
|
file_tvix_castore_protos_rpc_directory_proto_msgTypes[1].Exporter = func(v any, i int) any {
|
||||||
switch v := v.(*PutDirectoryResponse); i {
|
switch v := v.(*PutDirectoryResponse); i {
|
||||||
case 0:
|
case 0:
|
||||||
return &v.state
|
return &v.state
|
||||||
|
@ -248,7 +248,7 @@ func file_tvix_castore_protos_rpc_directory_proto_init() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
file_tvix_castore_protos_rpc_directory_proto_msgTypes[0].OneofWrappers = []interface{}{
|
file_tvix_castore_protos_rpc_directory_proto_msgTypes[0].OneofWrappers = []any{
|
||||||
(*GetDirectoryRequest_Digest)(nil),
|
(*GetDirectoryRequest_Digest)(nil),
|
||||||
}
|
}
|
||||||
type x struct{}
|
type x struct{}
|
||||||
|
|
|
@ -4,100 +4,68 @@ version = "0.1.0"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
async-compression = { version = "0.4.9", features = ["tokio", "zstd"]}
|
async-compression = { workspace = true, features = ["tokio", "zstd"] }
|
||||||
async-stream = "0.3.5"
|
async-stream = { workspace = true }
|
||||||
async-tempfile = "0.4.0"
|
async-tempfile = { workspace = true }
|
||||||
blake3 = { version = "1.3.1", features = ["rayon", "std", "traits-preview"] }
|
blake3 = { workspace = true, features = ["rayon", "std", "traits-preview"] }
|
||||||
bstr = "1.6.0"
|
bstr = { workspace = true }
|
||||||
bytes = "1.4.0"
|
bytes = { workspace = true }
|
||||||
data-encoding = "2.6.0"
|
data-encoding = { workspace = true }
|
||||||
digest = "0.10.7"
|
digest = { workspace = true }
|
||||||
fastcdc = { version = "3.1.0", features = ["tokio"] }
|
fastcdc = { workspace = true, features = ["tokio"] }
|
||||||
futures = "0.3.30"
|
futures = { workspace = true }
|
||||||
lazy_static = "1.4.0"
|
lazy_static = { workspace = true }
|
||||||
object_store = { version = "0.10.1", features = ["http"] }
|
object_store = { workspace = true, features = ["http"] }
|
||||||
parking_lot = "0.12.1"
|
parking_lot = { workspace = true }
|
||||||
pin-project-lite = "0.2.13"
|
pin-project-lite = { workspace = true }
|
||||||
prost = "0.13.1"
|
prost = { workspace = true }
|
||||||
sled = { version = "0.34.7" }
|
sled = { workspace = true }
|
||||||
thiserror = "1.0.38"
|
thiserror = { workspace = true }
|
||||||
tokio-stream = { version = "0.1.14", features = ["fs", "net"] }
|
tokio-stream = { workspace = true, features = ["fs", "net"] }
|
||||||
tokio-util = { version = "0.7.9", features = ["io", "io-util", "codec"] }
|
tokio-util = { workspace = true, features = ["io", "io-util", "codec"] }
|
||||||
tokio-tar = "0.3.1"
|
tokio-tar = { workspace = true }
|
||||||
tokio = { version = "1.32.0", features = ["fs", "macros", "net", "rt", "rt-multi-thread", "signal"] }
|
tokio = { workspace = true, features = ["fs", "macros", "net", "rt", "rt-multi-thread", "signal"] }
|
||||||
tonic = "0.12.0"
|
tonic = { workspace = true }
|
||||||
tower = "0.4.13"
|
tower = { workspace = true }
|
||||||
tracing = "0.1.37"
|
tracing = { workspace = true }
|
||||||
tracing-indicatif = "0.3.6"
|
tracing-indicatif = { workspace = true }
|
||||||
tvix-tracing = { path = "../tracing", features = ["tonic"] }
|
tvix-tracing = { path = "../tracing", features = ["tonic"] }
|
||||||
url = "2.4.0"
|
url = { workspace = true }
|
||||||
walkdir = "2.4.0"
|
walkdir = { workspace = true }
|
||||||
zstd = "0.13.0"
|
zstd = { workspace = true }
|
||||||
serde = { version = "1.0.197", features = [ "derive" ] }
|
serde = { workspace = true, features = ["derive"] }
|
||||||
serde_with = "3.7.0"
|
serde_with = { workspace = true }
|
||||||
serde_qs = "0.12.0"
|
serde_qs = { workspace = true }
|
||||||
petgraph = "0.6.4"
|
petgraph = { workspace = true }
|
||||||
erased-serde = "0.4.5"
|
erased-serde = { workspace = true }
|
||||||
serde_tagged = "0.3.0"
|
serde_tagged = { workspace = true }
|
||||||
hyper-util = "0.1.6"
|
hyper-util = { workspace = true }
|
||||||
|
redb = { workspace = true }
|
||||||
[dependencies.bigtable_rs]
|
bigtable_rs = { workspace = true, optional = true }
|
||||||
optional = true
|
fuse-backend-rs = { workspace = true, optional = true }
|
||||||
version = "0.2.10"
|
libc = { workspace = true, optional = true }
|
||||||
|
threadpool = { workspace = true, optional = true }
|
||||||
[dependencies.fuse-backend-rs]
|
tonic-reflection = { workspace = true, optional = true }
|
||||||
optional = true
|
vhost = { workspace = true, optional = true }
|
||||||
version = "0.11.0"
|
vhost-user-backend = { workspace = true, optional = true }
|
||||||
|
virtio-queue = { workspace = true, optional = true }
|
||||||
[dependencies.libc]
|
vm-memory = { workspace = true, optional = true }
|
||||||
optional = true
|
vmm-sys-util = { workspace = true, optional = true }
|
||||||
version = "0.2.144"
|
virtio-bindings = { workspace = true, optional = true }
|
||||||
|
|
||||||
[dependencies.threadpool]
|
|
||||||
version = "1.8.1"
|
|
||||||
optional = true
|
|
||||||
|
|
||||||
[dependencies.tonic-reflection]
|
|
||||||
optional = true
|
|
||||||
version = "0.12.0"
|
|
||||||
|
|
||||||
[dependencies.vhost]
|
|
||||||
optional = true
|
|
||||||
version = "0.6"
|
|
||||||
|
|
||||||
[dependencies.vhost-user-backend]
|
|
||||||
optional = true
|
|
||||||
version = "0.8"
|
|
||||||
|
|
||||||
[dependencies.virtio-queue]
|
|
||||||
optional = true
|
|
||||||
version = "0.7"
|
|
||||||
|
|
||||||
[dependencies.vm-memory]
|
|
||||||
optional = true
|
|
||||||
version = "0.10"
|
|
||||||
|
|
||||||
[dependencies.vmm-sys-util]
|
|
||||||
optional = true
|
|
||||||
version = "0.11"
|
|
||||||
|
|
||||||
[dependencies.virtio-bindings]
|
|
||||||
optional = true
|
|
||||||
version = "0.2.1"
|
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
prost-build = "0.13.1"
|
prost-build = { workspace = true }
|
||||||
tonic-build = "0.12.0"
|
tonic-build = { workspace = true }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
async-process = "2.1.0"
|
async-process = { workspace = true }
|
||||||
rstest = "0.19.0"
|
rstest = { workspace = true }
|
||||||
tempfile = "3.3.0"
|
tempfile = { workspace = true }
|
||||||
tokio-retry = "0.3.0"
|
tokio-retry = { workspace = true }
|
||||||
hex-literal = "0.4.1"
|
hex-literal = { workspace = true }
|
||||||
rstest_reuse = "0.6.0"
|
rstest_reuse = { workspace = true }
|
||||||
xattr = "1.3.1"
|
xattr = { workspace = true }
|
||||||
serde_json = "*"
|
serde_json = { workspace = true }
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = ["cloud"]
|
default = ["cloud"]
|
||||||
|
@ -125,6 +93,3 @@ tonic-reflection = ["dep:tonic-reflection"]
|
||||||
# Requires the following packages in $PATH:
|
# Requires the following packages in $PATH:
|
||||||
# cbtemulator, google-cloud-bigtable-tool
|
# cbtemulator, google-cloud-bigtable-tool
|
||||||
integration = []
|
integration = []
|
||||||
|
|
||||||
[lints]
|
|
||||||
workspace = true
|
|
||||||
|
|
|
@ -20,16 +20,16 @@ fn main() -> Result<()> {
|
||||||
.type_attribute(".", "#[derive(Eq, Hash)]")
|
.type_attribute(".", "#[derive(Eq, Hash)]")
|
||||||
.compile(
|
.compile(
|
||||||
&[
|
&[
|
||||||
"tvix/castore/protos/castore.proto",
|
"castore/protos/castore.proto",
|
||||||
"tvix/castore/protos/rpc_blobstore.proto",
|
"castore/protos/rpc_blobstore.proto",
|
||||||
"tvix/castore/protos/rpc_directory.proto",
|
"castore/protos/rpc_directory.proto",
|
||||||
],
|
],
|
||||||
// If we are in running `cargo build` manually, using `../..` works fine,
|
// If we are in running `cargo build` manually, using `../..` works fine,
|
||||||
// but in case we run inside a nix build, we need to instead point PROTO_ROOT
|
// but in case we run inside a nix build, we need to instead point PROTO_ROOT
|
||||||
// to a sparseTree containing that structure.
|
// to a custom tree containing that structure.
|
||||||
&[match std::env::var_os("PROTO_ROOT") {
|
&[match std::env::var_os("PROTO_ROOT") {
|
||||||
Some(proto_root) => proto_root.to_str().unwrap().to_owned(),
|
Some(proto_root) => proto_root.to_str().unwrap().to_owned(),
|
||||||
None => "../..".to_string(),
|
None => "..".to_string(),
|
||||||
}],
|
}],
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,16 +1,10 @@
|
||||||
{ depot, pkgs, ... }:
|
{ depot, pkgs, lib, ... }:
|
||||||
let
|
let
|
||||||
protos = depot.nix.sparseTree {
|
protos = lib.sourceByRegex depot.path.origSrc [
|
||||||
name = "castore-protos";
|
"buf.yaml"
|
||||||
root = depot.path.origSrc;
|
"buf.gen.yaml"
|
||||||
paths = [
|
"^tvix(/castore(/protos(/.*\.proto)?)?)?$"
|
||||||
./castore.proto
|
];
|
||||||
./rpc_blobstore.proto
|
|
||||||
./rpc_directory.proto
|
|
||||||
../../../buf.yaml
|
|
||||||
../../../buf.gen.yaml
|
|
||||||
];
|
|
||||||
};
|
|
||||||
in
|
in
|
||||||
depot.nix.readTree.drvTargets {
|
depot.nix.readTree.drvTargets {
|
||||||
inherit protos;
|
inherit protos;
|
||||||
|
|
|
@ -4,7 +4,7 @@ syntax = "proto3";
|
||||||
|
|
||||||
package tvix.castore.v1;
|
package tvix.castore.v1;
|
||||||
|
|
||||||
import "tvix/castore/protos/castore.proto";
|
import "castore/protos/castore.proto";
|
||||||
|
|
||||||
option go_package = "code.tvl.fyi/tvix/castore-go;castorev1";
|
option go_package = "code.tvl.fyi/tvix/castore-go;castorev1";
|
||||||
|
|
||||||
|
|
|
@ -1,14 +1,12 @@
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use futures::{StreamExt, TryStreamExt};
|
|
||||||
use tokio_util::io::{ReaderStream, StreamReader};
|
|
||||||
use tonic::async_trait;
|
use tonic::async_trait;
|
||||||
use tracing::{instrument, warn};
|
use tracing::instrument;
|
||||||
|
|
||||||
use crate::composition::{CompositionContext, ServiceBuilder};
|
use crate::composition::{CompositionContext, ServiceBuilder};
|
||||||
use crate::{B3Digest, Error};
|
use crate::{B3Digest, Error};
|
||||||
|
|
||||||
use super::{naive_seeker::NaiveSeeker, BlobReader, BlobService, BlobWriter};
|
use super::{BlobReader, BlobService, BlobWriter, ChunkedReader};
|
||||||
|
|
||||||
/// Combinator for a BlobService, using a "local" and "remote" blobservice.
|
/// Combinator for a BlobService, using a "local" and "remote" blobservice.
|
||||||
/// Requests are tried in (and returned from) the local store first, only if
|
/// Requests are tried in (and returned from) the local store first, only if
|
||||||
|
@ -71,19 +69,16 @@ where
|
||||||
// otherwise, a chunked reader, which will always try the
|
// otherwise, a chunked reader, which will always try the
|
||||||
// local backend first.
|
// local backend first.
|
||||||
|
|
||||||
// map Vec<ChunkMeta> to Vec<(B3Digest, u64)>
|
let chunked_reader = ChunkedReader::from_chunks(
|
||||||
let chunks: Vec<(B3Digest, u64)> = remote_chunks
|
remote_chunks.into_iter().map(|chunk| {
|
||||||
.into_iter()
|
|
||||||
.map(|chunk_meta| {
|
|
||||||
(
|
(
|
||||||
B3Digest::try_from(chunk_meta.digest)
|
chunk.digest.try_into().expect("invalid b3 digest"),
|
||||||
.expect("invalid chunk digest"),
|
chunk.size,
|
||||||
chunk_meta.size,
|
|
||||||
)
|
)
|
||||||
})
|
}),
|
||||||
.collect();
|
Arc::new(self.clone()) as Arc<dyn BlobService>,
|
||||||
|
);
|
||||||
Ok(Some(make_chunked_reader(self.clone(), chunks)))
|
Ok(Some(Box::new(chunked_reader)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -131,41 +126,3 @@ impl ServiceBuilder for CombinedBlobServiceConfig {
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn make_chunked_reader<BS>(
|
|
||||||
// This must consume, as we can't retain references to blob_service,
|
|
||||||
// as it'd add a lifetime to BlobReader in general, which will get
|
|
||||||
// problematic in TvixStoreFs, which is using async move closures and cloning.
|
|
||||||
blob_service: BS,
|
|
||||||
// A list of b3 digests for individual chunks, and their sizes.
|
|
||||||
chunks: Vec<(B3Digest, u64)>,
|
|
||||||
) -> Box<dyn BlobReader>
|
|
||||||
where
|
|
||||||
BS: BlobService + Clone + 'static,
|
|
||||||
{
|
|
||||||
// TODO: offset, verified streaming
|
|
||||||
|
|
||||||
// construct readers for each chunk
|
|
||||||
let blob_service = blob_service.clone();
|
|
||||||
let readers_stream = tokio_stream::iter(chunks).map(move |(digest, _)| {
|
|
||||||
let d = digest.to_owned();
|
|
||||||
let blob_service = blob_service.clone();
|
|
||||||
async move {
|
|
||||||
blob_service.open_read(&d.to_owned()).await?.ok_or_else(|| {
|
|
||||||
warn!(chunk.digest = %digest, "chunk not found");
|
|
||||||
std::io::Error::new(std::io::ErrorKind::NotFound, "chunk not found")
|
|
||||||
})
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// convert the stream of readers to a stream of streams of byte chunks
|
|
||||||
let bytes_streams = readers_stream.then(|elem| async { elem.await.map(ReaderStream::new) });
|
|
||||||
|
|
||||||
// flatten into one stream of byte chunks
|
|
||||||
let bytes_stream = bytes_streams.try_flatten();
|
|
||||||
|
|
||||||
// convert into AsyncRead
|
|
||||||
let blob_reader = StreamReader::new(bytes_stream);
|
|
||||||
|
|
||||||
Box::new(NaiveSeeker::new(Box::pin(blob_reader)))
|
|
||||||
}
|
|
||||||
|
|
|
@ -11,7 +11,6 @@ mod combinator;
|
||||||
mod from_addr;
|
mod from_addr;
|
||||||
mod grpc;
|
mod grpc;
|
||||||
mod memory;
|
mod memory;
|
||||||
mod naive_seeker;
|
|
||||||
mod object_store;
|
mod object_store;
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
|
|
@ -1,265 +0,0 @@
|
||||||
use super::BlobReader;
|
|
||||||
use futures::ready;
|
|
||||||
use pin_project_lite::pin_project;
|
|
||||||
use std::io;
|
|
||||||
use std::task::Poll;
|
|
||||||
use tokio::io::AsyncRead;
|
|
||||||
use tracing::{debug, instrument, trace, warn};
|
|
||||||
|
|
||||||
pin_project! {
|
|
||||||
/// This implements [tokio::io::AsyncSeek] for and [tokio::io::AsyncRead] by
|
|
||||||
/// simply skipping over some bytes, keeping track of the position.
|
|
||||||
/// It fails whenever you try to seek backwards.
|
|
||||||
///
|
|
||||||
/// ## Pinning concerns:
|
|
||||||
///
|
|
||||||
/// [NaiveSeeker] is itself pinned by callers, and we do not need to concern
|
|
||||||
/// ourselves regarding that.
|
|
||||||
///
|
|
||||||
/// Though, its fields as per
|
|
||||||
/// <https://doc.rust-lang.org/std/pin/#pinning-is-not-structural-for-field>
|
|
||||||
/// can be pinned or unpinned.
|
|
||||||
///
|
|
||||||
/// So we need to go over each field and choose our policy carefully.
|
|
||||||
///
|
|
||||||
/// The obvious cases are the bookkeeping integers we keep in the structure,
|
|
||||||
/// those are private and not shared to anyone, we never build a
|
|
||||||
/// `Pin<&mut X>` out of them at any point, therefore, we can safely never
|
|
||||||
/// mark them as pinned. Of course, it is expected that no developer here
|
|
||||||
/// attempt to `pin!(self.pos)` to pin them because it makes no sense. If
|
|
||||||
/// they have to become pinned, they should be marked `#[pin]` and we need
|
|
||||||
/// to discuss it.
|
|
||||||
///
|
|
||||||
/// So the bookkeeping integers are in the right state with respect to their
|
|
||||||
/// pinning status. The projection should offer direct access.
|
|
||||||
///
|
|
||||||
/// On the `r` field, i.e. a `BufReader<R>`, given that
|
|
||||||
/// <https://docs.rs/tokio/latest/tokio/io/struct.BufReader.html#impl-Unpin-for-BufReader%3CR%3E>
|
|
||||||
/// is available, even a `Pin<&mut BufReader<R>>` can be safely moved.
|
|
||||||
///
|
|
||||||
/// The only care we should have regards the internal reader itself, i.e.
|
|
||||||
/// the `R` instance, see that Tokio decided to `#[pin]` it too:
|
|
||||||
/// <https://docs.rs/tokio/latest/src/tokio/io/util/buf_reader.rs.html#29>
|
|
||||||
///
|
|
||||||
/// In general, there's no `Unpin` instance for `R: tokio::io::AsyncRead`
|
|
||||||
/// (see <https://docs.rs/tokio/latest/tokio/io/trait.AsyncRead.html>).
|
|
||||||
///
|
|
||||||
/// Therefore, we could keep it unpinned and pin it in every call site
|
|
||||||
/// whenever we need to call `poll_*` which can be confusing to the non-
|
|
||||||
/// expert developer and we have a fair share amount of situations where the
|
|
||||||
/// [BufReader] instance is naked, i.e. in its `&mut BufReader<R>`
|
|
||||||
/// form, this is annoying because it could lead to expose the naked `R`
|
|
||||||
/// internal instance somehow and would produce a risk of making it move
|
|
||||||
/// unexpectedly.
|
|
||||||
///
|
|
||||||
/// We choose the path of the least resistance as we have no reason to have
|
|
||||||
/// access to the raw `BufReader<R>` instance, we just `#[pin]` it too and
|
|
||||||
/// enjoy its `poll_*` safe APIs and push the unpinning concerns to the
|
|
||||||
/// internal implementations themselves, which studied the question longer
|
|
||||||
/// than us.
|
|
||||||
pub struct NaiveSeeker<R: tokio::io::AsyncRead> {
|
|
||||||
#[pin]
|
|
||||||
r: tokio::io::BufReader<R>,
|
|
||||||
pos: u64,
|
|
||||||
bytes_to_skip: u64,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The buffer size used to discard data.
|
|
||||||
const DISCARD_BUF_SIZE: usize = 4096;
|
|
||||||
|
|
||||||
impl<R: tokio::io::AsyncRead> NaiveSeeker<R> {
|
|
||||||
pub fn new(r: R) -> Self {
|
|
||||||
NaiveSeeker {
|
|
||||||
r: tokio::io::BufReader::new(r),
|
|
||||||
pos: 0,
|
|
||||||
bytes_to_skip: 0,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<R: tokio::io::AsyncRead> tokio::io::AsyncRead for NaiveSeeker<R> {
|
|
||||||
#[instrument(level = "trace", skip_all)]
|
|
||||||
fn poll_read(
|
|
||||||
self: std::pin::Pin<&mut Self>,
|
|
||||||
cx: &mut std::task::Context<'_>,
|
|
||||||
buf: &mut tokio::io::ReadBuf<'_>,
|
|
||||||
) -> Poll<std::io::Result<()>> {
|
|
||||||
// The amount of data read can be determined by the increase
|
|
||||||
// in the length of the slice returned by `ReadBuf::filled`.
|
|
||||||
let filled_before = buf.filled().len();
|
|
||||||
|
|
||||||
let this = self.project();
|
|
||||||
ready!(this.r.poll_read(cx, buf))?;
|
|
||||||
|
|
||||||
let bytes_read = buf.filled().len() - filled_before;
|
|
||||||
*this.pos += bytes_read as u64;
|
|
||||||
|
|
||||||
trace!(bytes_read = bytes_read, new_pos = this.pos, "poll_read");
|
|
||||||
|
|
||||||
Ok(()).into()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<R: tokio::io::AsyncRead> tokio::io::AsyncBufRead for NaiveSeeker<R> {
|
|
||||||
fn poll_fill_buf(
|
|
||||||
self: std::pin::Pin<&mut Self>,
|
|
||||||
cx: &mut std::task::Context<'_>,
|
|
||||||
) -> Poll<io::Result<&[u8]>> {
|
|
||||||
self.project().r.poll_fill_buf(cx)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[instrument(level = "trace", skip(self))]
|
|
||||||
fn consume(self: std::pin::Pin<&mut Self>, amt: usize) {
|
|
||||||
let this = self.project();
|
|
||||||
this.r.consume(amt);
|
|
||||||
*this.pos += amt as u64;
|
|
||||||
|
|
||||||
trace!(new_pos = this.pos, "consume");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<R: tokio::io::AsyncRead> tokio::io::AsyncSeek for NaiveSeeker<R> {
|
|
||||||
#[instrument(level="trace", skip(self), fields(inner_pos=%self.pos), err(Debug))]
|
|
||||||
fn start_seek(
|
|
||||||
self: std::pin::Pin<&mut Self>,
|
|
||||||
position: std::io::SeekFrom,
|
|
||||||
) -> std::io::Result<()> {
|
|
||||||
let absolute_offset: u64 = match position {
|
|
||||||
io::SeekFrom::Start(start_offset) => {
|
|
||||||
if start_offset < self.pos {
|
|
||||||
return Err(io::Error::new(
|
|
||||||
io::ErrorKind::Unsupported,
|
|
||||||
format!("can't seek backwards ({} -> {})", self.pos, start_offset),
|
|
||||||
));
|
|
||||||
} else {
|
|
||||||
start_offset
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// we don't know the total size, can't support this.
|
|
||||||
io::SeekFrom::End(_end_offset) => {
|
|
||||||
return Err(io::Error::new(
|
|
||||||
io::ErrorKind::Unsupported,
|
|
||||||
"can't seek from end",
|
|
||||||
));
|
|
||||||
}
|
|
||||||
io::SeekFrom::Current(relative_offset) => {
|
|
||||||
if relative_offset < 0 {
|
|
||||||
return Err(io::Error::new(
|
|
||||||
io::ErrorKind::Unsupported,
|
|
||||||
"can't seek backwards relative to current position",
|
|
||||||
));
|
|
||||||
} else {
|
|
||||||
self.pos + relative_offset as u64
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// we already know absolute_offset is >= self.pos
|
|
||||||
debug_assert!(
|
|
||||||
absolute_offset >= self.pos,
|
|
||||||
"absolute_offset {} must be >= self.pos {}",
|
|
||||||
absolute_offset,
|
|
||||||
self.pos
|
|
||||||
);
|
|
||||||
|
|
||||||
// calculate bytes to skip
|
|
||||||
let this = self.project();
|
|
||||||
*this.bytes_to_skip = absolute_offset - *this.pos;
|
|
||||||
|
|
||||||
debug!(bytes_to_skip = *this.bytes_to_skip, "seek");
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[instrument(skip_all)]
|
|
||||||
fn poll_complete(
|
|
||||||
mut self: std::pin::Pin<&mut Self>,
|
|
||||||
cx: &mut std::task::Context<'_>,
|
|
||||||
) -> Poll<std::io::Result<u64>> {
|
|
||||||
if self.bytes_to_skip == 0 {
|
|
||||||
// return the new position (from the start of the stream)
|
|
||||||
return Poll::Ready(Ok(self.pos));
|
|
||||||
}
|
|
||||||
|
|
||||||
// discard some bytes, until pos is where we want it to be.
|
|
||||||
// We create a buffer that we'll discard later on.
|
|
||||||
let mut discard_buf = [0; DISCARD_BUF_SIZE];
|
|
||||||
|
|
||||||
// Loop until we've reached the desired seek position. This is done by issuing repeated
|
|
||||||
// `poll_read` calls.
|
|
||||||
// If the data is not available yet, we will yield back to the executor
|
|
||||||
// and wait to be polled again.
|
|
||||||
loop {
|
|
||||||
if self.bytes_to_skip == 0 {
|
|
||||||
return Poll::Ready(Ok(self.pos));
|
|
||||||
}
|
|
||||||
|
|
||||||
// calculate the length we want to skip at most, which is either a max
|
|
||||||
// buffer size, or the number of remaining bytes to read, whatever is
|
|
||||||
// smaller.
|
|
||||||
let bytes_to_skip_now = std::cmp::min(self.bytes_to_skip as usize, discard_buf.len());
|
|
||||||
let mut discard_buf = tokio::io::ReadBuf::new(&mut discard_buf[..bytes_to_skip_now]);
|
|
||||||
|
|
||||||
ready!(self.as_mut().poll_read(cx, &mut discard_buf))?;
|
|
||||||
let bytes_skipped = discard_buf.filled().len();
|
|
||||||
|
|
||||||
if bytes_skipped == 0 {
|
|
||||||
return Poll::Ready(Err(io::Error::new(
|
|
||||||
io::ErrorKind::UnexpectedEof,
|
|
||||||
"got EOF while trying to skip bytes",
|
|
||||||
)));
|
|
||||||
}
|
|
||||||
// decrement bytes to skip. The poll_read call already updated self.pos.
|
|
||||||
*self.as_mut().project().bytes_to_skip -= bytes_skipped as u64;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<R: tokio::io::AsyncRead + Send + Unpin + 'static> BlobReader for NaiveSeeker<R> {}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::{NaiveSeeker, DISCARD_BUF_SIZE};
|
|
||||||
use std::io::{Cursor, SeekFrom};
|
|
||||||
use tokio::io::{AsyncReadExt, AsyncSeekExt};
|
|
||||||
|
|
||||||
/// This seek requires multiple `poll_read` as we use a multiples of
|
|
||||||
/// DISCARD_BUF_SIZE when doing the seek.
|
|
||||||
/// This ensures we don't hang indefinitely.
|
|
||||||
#[tokio::test]
|
|
||||||
async fn seek() {
|
|
||||||
let buf = vec![0u8; DISCARD_BUF_SIZE * 4];
|
|
||||||
let reader = Cursor::new(&buf);
|
|
||||||
let mut seeker = NaiveSeeker::new(reader);
|
|
||||||
seeker.seek(SeekFrom::Start(4000)).await.unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn seek_read() {
|
|
||||||
let mut buf = vec![0u8; DISCARD_BUF_SIZE * 2];
|
|
||||||
buf.extend_from_slice(&[1u8; DISCARD_BUF_SIZE * 2]);
|
|
||||||
buf.extend_from_slice(&[2u8; DISCARD_BUF_SIZE * 2]);
|
|
||||||
|
|
||||||
let reader = Cursor::new(&buf);
|
|
||||||
let mut seeker = NaiveSeeker::new(reader);
|
|
||||||
|
|
||||||
let mut read_buf = vec![0u8; DISCARD_BUF_SIZE];
|
|
||||||
seeker.read_exact(&mut read_buf).await.expect("must read");
|
|
||||||
assert_eq!(read_buf.as_slice(), &[0u8; DISCARD_BUF_SIZE]);
|
|
||||||
|
|
||||||
seeker
|
|
||||||
.seek(SeekFrom::Current(DISCARD_BUF_SIZE as i64))
|
|
||||||
.await
|
|
||||||
.expect("must seek");
|
|
||||||
seeker.read_exact(&mut read_buf).await.expect("must read");
|
|
||||||
assert_eq!(read_buf.as_slice(), &[1u8; DISCARD_BUF_SIZE]);
|
|
||||||
|
|
||||||
seeker
|
|
||||||
.seek(SeekFrom::Start(2 * 2 * DISCARD_BUF_SIZE as u64))
|
|
||||||
.await
|
|
||||||
.expect("must seek");
|
|
||||||
seeker.read_exact(&mut read_buf).await.expect("must read");
|
|
||||||
assert_eq!(read_buf.as_slice(), &[2u8; DISCARD_BUF_SIZE]);
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -149,7 +149,7 @@ impl<'r, 'de: 'r, T: 'static> SeedFactory<'de, TagString<'de>> for RegistryWithF
|
||||||
.0
|
.0
|
||||||
.iter()
|
.iter()
|
||||||
.find(|(k, _)| *k == &(TypeId::of::<T>(), tag.as_ref()))
|
.find(|(k, _)| *k == &(TypeId::of::<T>(), tag.as_ref()))
|
||||||
.ok_or_else(|| serde::de::Error::custom("Unknown tag"))?
|
.ok_or_else(|| serde::de::Error::custom(format!("Unknown type: {}", tag)))?
|
||||||
.1;
|
.1;
|
||||||
|
|
||||||
let entry: &RegistryEntry<T> = <dyn Any>::downcast_ref(&**seed).unwrap();
|
let entry: &RegistryEntry<T> = <dyn Any>::downcast_ref(&**seed).unwrap();
|
||||||
|
@ -215,7 +215,7 @@ impl<'de, T: 'static> serde::Deserialize<'de> for DeserializeWithRegistry<T> {
|
||||||
|
|
||||||
#[derive(Debug, thiserror::Error)]
|
#[derive(Debug, thiserror::Error)]
|
||||||
enum TryFromUrlError {
|
enum TryFromUrlError {
|
||||||
#[error("Unknown tag: {0}")]
|
#[error("Unknown type: {0}")]
|
||||||
UnknownTag(String),
|
UnknownTag(String),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -274,6 +274,9 @@ pub fn add_default_services(reg: &mut Registry) {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct CompositionContext<'a> {
|
pub struct CompositionContext<'a> {
|
||||||
|
// The stack used to detect recursive instantiations and prevent deadlocks
|
||||||
|
// The TypeId of the trait object is included to distinguish e.g. the
|
||||||
|
// BlobService "default" and the DirectoryService "default".
|
||||||
stack: Vec<(TypeId, String)>,
|
stack: Vec<(TypeId, String)>,
|
||||||
composition: Option<&'a Composition>,
|
composition: Option<&'a Composition>,
|
||||||
}
|
}
|
||||||
|
@ -431,10 +434,13 @@ impl Composition {
|
||||||
new_context
|
new_context
|
||||||
.stack
|
.stack
|
||||||
.push((TypeId::of::<T>(), entrypoint.clone()));
|
.push((TypeId::of::<T>(), entrypoint.clone()));
|
||||||
let res = config
|
let res =
|
||||||
.build(&entrypoint, &new_context)
|
config.build(&entrypoint, &new_context).await.map_err(|e| {
|
||||||
.await
|
match e.downcast() {
|
||||||
.map_err(|e| CompositionError::Failed(entrypoint, e.into()));
|
Ok(e) => *e,
|
||||||
|
Err(e) => CompositionError::Failed(entrypoint, e.into()),
|
||||||
|
}
|
||||||
|
});
|
||||||
tx.send(Some(res.clone())).unwrap();
|
tx.send(Some(res.clone())).unwrap();
|
||||||
res
|
res
|
||||||
})
|
})
|
||||||
|
@ -470,3 +476,66 @@ impl Composition {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use super::*;
|
||||||
|
use crate::blobservice::BlobService;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
/// Test that we return a reference to the same instance of MemoryBlobService (via ptr_eq)
|
||||||
|
/// when instantiating the same entrypoint twice. By instantiating concurrently, we also
|
||||||
|
/// test the channels notifying the second consumer when the store has been instantiated.
|
||||||
|
#[tokio::test]
|
||||||
|
async fn concurrent() {
|
||||||
|
let blob_services_configs_json = serde_json::json!({
|
||||||
|
"default": {
|
||||||
|
"type": "memory",
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
let blob_services_configs =
|
||||||
|
with_registry(®, || serde_json::from_value(blob_services_configs_json)).unwrap();
|
||||||
|
let mut blob_service_composition = Composition::default();
|
||||||
|
blob_service_composition.extend_with_configs::<dyn BlobService>(blob_services_configs);
|
||||||
|
let (blob_service1, blob_service2) = tokio::join!(
|
||||||
|
blob_service_composition.build::<dyn BlobService>("default"),
|
||||||
|
blob_service_composition.build::<dyn BlobService>("default")
|
||||||
|
);
|
||||||
|
assert!(Arc::ptr_eq(
|
||||||
|
&blob_service1.unwrap(),
|
||||||
|
&blob_service2.unwrap()
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Test that we throw the correct error when an instantiation would recurse (deadlock)
|
||||||
|
#[tokio::test]
|
||||||
|
async fn reject_recursion() {
|
||||||
|
let blob_services_configs_json = serde_json::json!({
|
||||||
|
"default": {
|
||||||
|
"type": "combined",
|
||||||
|
"local": "other",
|
||||||
|
"remote": "other"
|
||||||
|
},
|
||||||
|
"other": {
|
||||||
|
"type": "combined",
|
||||||
|
"local": "default",
|
||||||
|
"remote": "default"
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
let blob_services_configs =
|
||||||
|
with_registry(®, || serde_json::from_value(blob_services_configs_json)).unwrap();
|
||||||
|
let mut blob_service_composition = Composition::default();
|
||||||
|
blob_service_composition.extend_with_configs::<dyn BlobService>(blob_services_configs);
|
||||||
|
match blob_service_composition
|
||||||
|
.build::<dyn BlobService>("default")
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
Err(CompositionError::Recursion(stack)) => {
|
||||||
|
assert_eq!(stack, vec!["default".to_string(), "other".to_string()])
|
||||||
|
}
|
||||||
|
other => panic!("should have returned an error, returned: {:?}", other.err()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -6,7 +6,7 @@ use thiserror::Error;
|
||||||
pub struct B3Digest(Bytes);
|
pub struct B3Digest(Bytes);
|
||||||
|
|
||||||
// TODO: allow converting these errors to crate::Error
|
// TODO: allow converting these errors to crate::Error
|
||||||
#[derive(Error, Debug)]
|
#[derive(Error, Debug, PartialEq)]
|
||||||
pub enum Error {
|
pub enum Error {
|
||||||
#[error("invalid digest length: {0}")]
|
#[error("invalid digest length: {0}")]
|
||||||
InvalidDigestLen(usize),
|
InvalidDigestLen(usize),
|
||||||
|
@ -26,6 +26,11 @@ impl From<B3Digest> for bytes::Bytes {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl From<blake3::Hash> for B3Digest {
|
||||||
|
fn from(value: blake3::Hash) -> Self {
|
||||||
|
Self(Bytes::copy_from_slice(value.as_bytes()))
|
||||||
|
}
|
||||||
|
}
|
||||||
impl From<digest::Output<blake3::Hasher>> for B3Digest {
|
impl From<digest::Output<blake3::Hasher>> for B3Digest {
|
||||||
fn from(value: digest::Output<blake3::Hasher>) -> Self {
|
fn from(value: digest::Output<blake3::Hasher>) -> Self {
|
||||||
let v = Into::<[u8; B3_LEN]>::into(value);
|
let v = Into::<[u8; B3_LEN]>::into(value);
|
||||||
|
@ -67,6 +72,12 @@ impl From<&[u8; B3_LEN]> for B3Digest {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl From<B3Digest> for [u8; B3_LEN] {
|
||||||
|
fn from(value: B3Digest) -> Self {
|
||||||
|
value.0.to_vec().try_into().unwrap()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl Clone for B3Digest {
|
impl Clone for B3Digest {
|
||||||
fn clone(&self) -> Self {
|
fn clone(&self) -> Self {
|
||||||
Self(self.0.to_owned())
|
Self(self.0.to_owned())
|
||||||
|
|
|
@ -9,7 +9,9 @@ use std::sync::Arc;
|
||||||
use tonic::async_trait;
|
use tonic::async_trait;
|
||||||
use tracing::{instrument, trace, warn};
|
use tracing::{instrument, trace, warn};
|
||||||
|
|
||||||
use super::{utils::traverse_directory, DirectoryPutter, DirectoryService, SimplePutter};
|
use super::{
|
||||||
|
utils::traverse_directory, Directory, DirectoryPutter, DirectoryService, SimplePutter,
|
||||||
|
};
|
||||||
use crate::composition::{CompositionContext, ServiceBuilder};
|
use crate::composition::{CompositionContext, ServiceBuilder};
|
||||||
use crate::{proto, B3Digest, Error};
|
use crate::{proto, B3Digest, Error};
|
||||||
|
|
||||||
|
@ -149,7 +151,7 @@ fn derive_directory_key(digest: &B3Digest) -> String {
|
||||||
#[async_trait]
|
#[async_trait]
|
||||||
impl DirectoryService for BigtableDirectoryService {
|
impl DirectoryService for BigtableDirectoryService {
|
||||||
#[instrument(skip(self, digest), err, fields(directory.digest = %digest))]
|
#[instrument(skip(self, digest), err, fields(directory.digest = %digest))]
|
||||||
async fn get(&self, digest: &B3Digest) -> Result<Option<proto::Directory>, Error> {
|
async fn get(&self, digest: &B3Digest) -> Result<Option<Directory>, Error> {
|
||||||
let mut client = self.client.clone();
|
let mut client = self.client.clone();
|
||||||
let directory_key = derive_directory_key(digest);
|
let directory_key = derive_directory_key(digest);
|
||||||
|
|
||||||
|
@ -241,28 +243,20 @@ impl DirectoryService for BigtableDirectoryService {
|
||||||
|
|
||||||
// Try to parse the value into a Directory message.
|
// Try to parse the value into a Directory message.
|
||||||
let directory = proto::Directory::decode(Bytes::from(row_cell.value))
|
let directory = proto::Directory::decode(Bytes::from(row_cell.value))
|
||||||
.map_err(|e| Error::StorageError(format!("unable to decode directory proto: {}", e)))?;
|
.map_err(|e| Error::StorageError(format!("unable to decode directory proto: {}", e)))?
|
||||||
|
.try_into()
|
||||||
// validate the Directory.
|
|
||||||
directory
|
|
||||||
.validate()
|
|
||||||
.map_err(|e| Error::StorageError(format!("invalid Directory message: {}", e)))?;
|
.map_err(|e| Error::StorageError(format!("invalid Directory message: {}", e)))?;
|
||||||
|
|
||||||
Ok(Some(directory))
|
Ok(Some(directory))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(skip(self, directory), err, fields(directory.digest = %directory.digest()))]
|
#[instrument(skip(self, directory), err, fields(directory.digest = %directory.digest()))]
|
||||||
async fn put(&self, directory: proto::Directory) -> Result<B3Digest, Error> {
|
async fn put(&self, directory: Directory) -> Result<B3Digest, Error> {
|
||||||
let directory_digest = directory.digest();
|
let directory_digest = directory.digest();
|
||||||
let mut client = self.client.clone();
|
let mut client = self.client.clone();
|
||||||
let directory_key = derive_directory_key(&directory_digest);
|
let directory_key = derive_directory_key(&directory_digest);
|
||||||
|
|
||||||
// Ensure the directory we're trying to upload passes validation
|
let data = proto::Directory::from(directory).encode_to_vec();
|
||||||
directory
|
|
||||||
.validate()
|
|
||||||
.map_err(|e| Error::InvalidRequest(format!("directory is invalid: {}", e)))?;
|
|
||||||
|
|
||||||
let data = directory.encode_to_vec();
|
|
||||||
if data.len() as u64 > CELL_SIZE_LIMIT {
|
if data.len() as u64 > CELL_SIZE_LIMIT {
|
||||||
return Err(Error::StorageError(
|
return Err(Error::StorageError(
|
||||||
"Directory exceeds cell limit on Bigtable".into(),
|
"Directory exceeds cell limit on Bigtable".into(),
|
||||||
|
@ -310,7 +304,7 @@ impl DirectoryService for BigtableDirectoryService {
|
||||||
fn get_recursive(
|
fn get_recursive(
|
||||||
&self,
|
&self,
|
||||||
root_directory_digest: &B3Digest,
|
root_directory_digest: &B3Digest,
|
||||||
) -> BoxStream<'static, Result<proto::Directory, Error>> {
|
) -> BoxStream<'static, Result<Directory, Error>> {
|
||||||
traverse_directory(self.clone(), root_directory_digest)
|
traverse_directory(self.clone(), root_directory_digest)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -7,10 +7,9 @@ use futures::TryStreamExt;
|
||||||
use tonic::async_trait;
|
use tonic::async_trait;
|
||||||
use tracing::{instrument, trace};
|
use tracing::{instrument, trace};
|
||||||
|
|
||||||
use super::{DirectoryGraph, DirectoryService, RootToLeavesValidator, SimplePutter};
|
use super::{Directory, DirectoryGraph, DirectoryService, RootToLeavesValidator, SimplePutter};
|
||||||
use crate::composition::{CompositionContext, ServiceBuilder};
|
use crate::composition::{CompositionContext, ServiceBuilder};
|
||||||
use crate::directoryservice::DirectoryPutter;
|
use crate::directoryservice::DirectoryPutter;
|
||||||
use crate::proto;
|
|
||||||
use crate::B3Digest;
|
use crate::B3Digest;
|
||||||
use crate::Error;
|
use crate::Error;
|
||||||
|
|
||||||
|
@ -40,7 +39,7 @@ where
|
||||||
DS2: DirectoryService + Clone + 'static,
|
DS2: DirectoryService + Clone + 'static,
|
||||||
{
|
{
|
||||||
#[instrument(skip(self, digest), fields(directory.digest = %digest))]
|
#[instrument(skip(self, digest), fields(directory.digest = %digest))]
|
||||||
async fn get(&self, digest: &B3Digest) -> Result<Option<proto::Directory>, Error> {
|
async fn get(&self, digest: &B3Digest) -> Result<Option<Directory>, Error> {
|
||||||
match self.near.get(digest).await? {
|
match self.near.get(digest).await? {
|
||||||
Some(directory) => {
|
Some(directory) => {
|
||||||
trace!("serving from cache");
|
trace!("serving from cache");
|
||||||
|
@ -82,7 +81,7 @@ where
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(skip_all)]
|
#[instrument(skip_all)]
|
||||||
async fn put(&self, _directory: proto::Directory) -> Result<B3Digest, Error> {
|
async fn put(&self, _directory: Directory) -> Result<B3Digest, Error> {
|
||||||
Err(Error::StorageError("unimplemented".to_string()))
|
Err(Error::StorageError("unimplemented".to_string()))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -90,7 +89,7 @@ where
|
||||||
fn get_recursive(
|
fn get_recursive(
|
||||||
&self,
|
&self,
|
||||||
root_directory_digest: &B3Digest,
|
root_directory_digest: &B3Digest,
|
||||||
) -> BoxStream<'static, Result<proto::Directory, Error>> {
|
) -> BoxStream<'static, Result<Directory, Error>> {
|
||||||
let near = self.near.clone();
|
let near = self.near.clone();
|
||||||
let far = self.far.clone();
|
let far = self.far.clone();
|
||||||
let digest = root_directory_digest.clone();
|
let digest = root_directory_digest.clone();
|
||||||
|
@ -179,3 +178,75 @@ impl ServiceBuilder for CacheConfig {
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct Router<DS1, DS2> {
|
||||||
|
writes: DS1,
|
||||||
|
reads: DS2,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl<DS1, DS2> DirectoryService for Router<DS1, DS2>
|
||||||
|
where
|
||||||
|
DS1: DirectoryService + Clone + 'static,
|
||||||
|
DS2: DirectoryService + Clone + 'static,
|
||||||
|
{
|
||||||
|
#[instrument(skip(self, digest), fields(directory.digest = %digest))]
|
||||||
|
async fn get(&self, digest: &B3Digest) -> Result<Option<Directory>, Error> {
|
||||||
|
self.reads.get(digest).await
|
||||||
|
}
|
||||||
|
|
||||||
|
#[instrument(skip_all)]
|
||||||
|
async fn put(&self, directory: Directory) -> Result<B3Digest, Error> {
|
||||||
|
self.writes.put(directory).await
|
||||||
|
}
|
||||||
|
|
||||||
|
#[instrument(skip_all, fields(directory.digest = %root_directory_digest))]
|
||||||
|
fn get_recursive(
|
||||||
|
&self,
|
||||||
|
root_directory_digest: &B3Digest,
|
||||||
|
) -> BoxStream<'static, Result<Directory, Error>> {
|
||||||
|
self.reads.get_recursive(root_directory_digest)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[instrument(skip_all)]
|
||||||
|
fn put_multiple_start(&self) -> Box<(dyn DirectoryPutter + 'static)> {
|
||||||
|
self.writes.put_multiple_start()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(serde::Deserialize, Debug)]
|
||||||
|
#[serde(deny_unknown_fields)]
|
||||||
|
pub struct RouterConfig {
|
||||||
|
writes: String,
|
||||||
|
reads: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TryFrom<url::Url> for RouterConfig {
|
||||||
|
type Error = Box<dyn std::error::Error + Send + Sync>;
|
||||||
|
fn try_from(_url: url::Url) -> Result<Self, Self::Error> {
|
||||||
|
Err(Error::StorageError(
|
||||||
|
"Instantiating a CombinedDirectoryService from a url is not supported".into(),
|
||||||
|
)
|
||||||
|
.into())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl ServiceBuilder for RouterConfig {
|
||||||
|
type Output = dyn DirectoryService;
|
||||||
|
async fn build<'a>(
|
||||||
|
&'a self,
|
||||||
|
_instance_name: &str,
|
||||||
|
context: &CompositionContext,
|
||||||
|
) -> Result<Arc<dyn DirectoryService>, Box<dyn std::error::Error + Send + Sync + 'static>> {
|
||||||
|
let (writes, reads) = futures::join!(
|
||||||
|
context.resolve(self.writes.clone()),
|
||||||
|
context.resolve(self.reads.clone())
|
||||||
|
);
|
||||||
|
Ok(Arc::new(Router {
|
||||||
|
writes: writes?,
|
||||||
|
reads: reads?,
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -1,7 +1,5 @@
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
use bstr::ByteSlice;
|
|
||||||
|
|
||||||
use petgraph::{
|
use petgraph::{
|
||||||
graph::{DiGraph, NodeIndex},
|
graph::{DiGraph, NodeIndex},
|
||||||
visit::{Bfs, DfsPostOrder, EdgeRef, IntoNodeIdentifiers, Walker},
|
visit::{Bfs, DfsPostOrder, EdgeRef, IntoNodeIdentifiers, Walker},
|
||||||
|
@ -10,10 +8,7 @@ use petgraph::{
|
||||||
use tracing::instrument;
|
use tracing::instrument;
|
||||||
|
|
||||||
use super::order_validator::{LeavesToRootValidator, OrderValidator, RootToLeavesValidator};
|
use super::order_validator::{LeavesToRootValidator, OrderValidator, RootToLeavesValidator};
|
||||||
use crate::{
|
use crate::{path::PathComponent, B3Digest, Directory, Node};
|
||||||
proto::{self, Directory, DirectoryNode},
|
|
||||||
B3Digest,
|
|
||||||
};
|
|
||||||
|
|
||||||
#[derive(thiserror::Error, Debug)]
|
#[derive(thiserror::Error, Debug)]
|
||||||
pub enum Error {
|
pub enum Error {
|
||||||
|
@ -21,6 +16,11 @@ pub enum Error {
|
||||||
ValidationError(String),
|
ValidationError(String),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
struct EdgeWeight {
|
||||||
|
name: PathComponent,
|
||||||
|
size: u64,
|
||||||
|
}
|
||||||
|
|
||||||
/// This can be used to validate and/or re-order a Directory closure (DAG of
|
/// This can be used to validate and/or re-order a Directory closure (DAG of
|
||||||
/// connected Directories), and their insertion order.
|
/// connected Directories), and their insertion order.
|
||||||
///
|
///
|
||||||
|
@ -58,7 +58,7 @@ pub struct DirectoryGraph<O> {
|
||||||
//
|
//
|
||||||
// The option in the edge weight tracks the pending validation state of the respective edge, for example if
|
// The option in the edge weight tracks the pending validation state of the respective edge, for example if
|
||||||
// the child has not been added yet.
|
// the child has not been added yet.
|
||||||
graph: DiGraph<Option<Directory>, Option<DirectoryNode>>,
|
graph: DiGraph<Option<Directory>, Option<EdgeWeight>>,
|
||||||
|
|
||||||
// A lookup table from directory digest to node index.
|
// A lookup table from directory digest to node index.
|
||||||
digest_to_node_ix: HashMap<B3Digest, NodeIndex>,
|
digest_to_node_ix: HashMap<B3Digest, NodeIndex>,
|
||||||
|
@ -67,18 +67,18 @@ pub struct DirectoryGraph<O> {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct ValidatedDirectoryGraph {
|
pub struct ValidatedDirectoryGraph {
|
||||||
graph: DiGraph<Option<Directory>, Option<DirectoryNode>>,
|
graph: DiGraph<Option<Directory>, Option<EdgeWeight>>,
|
||||||
|
|
||||||
root: Option<NodeIndex>,
|
root: Option<NodeIndex>,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_edge(dir: &DirectoryNode, child: &Directory) -> Result<(), Error> {
|
fn check_edge(edge: &EdgeWeight, child: &Directory) -> Result<(), Error> {
|
||||||
// Ensure the size specified in the child node matches our records.
|
// Ensure the size specified in the child node matches our records.
|
||||||
if dir.size != child.size() {
|
if edge.size != child.size() {
|
||||||
return Err(Error::ValidationError(format!(
|
return Err(Error::ValidationError(format!(
|
||||||
"'{}' has wrong size, specified {}, recorded {}",
|
"'{}' has wrong size, specified {}, recorded {}",
|
||||||
dir.name.as_bstr(),
|
edge.name,
|
||||||
dir.size,
|
edge.size,
|
||||||
child.size(),
|
child.size(),
|
||||||
)));
|
)));
|
||||||
}
|
}
|
||||||
|
@ -88,7 +88,7 @@ fn check_edge(dir: &DirectoryNode, child: &Directory) -> Result<(), Error> {
|
||||||
impl DirectoryGraph<LeavesToRootValidator> {
|
impl DirectoryGraph<LeavesToRootValidator> {
|
||||||
/// Insert a new Directory into the closure
|
/// Insert a new Directory into the closure
|
||||||
#[instrument(level = "trace", skip_all, fields(directory.digest=%directory.digest(), directory.size=%directory.size()), err)]
|
#[instrument(level = "trace", skip_all, fields(directory.digest=%directory.digest(), directory.size=%directory.size()), err)]
|
||||||
pub fn add(&mut self, directory: proto::Directory) -> Result<(), Error> {
|
pub fn add(&mut self, directory: Directory) -> Result<(), Error> {
|
||||||
if !self.order_validator.add_directory(&directory) {
|
if !self.order_validator.add_directory(&directory) {
|
||||||
return Err(Error::ValidationError(
|
return Err(Error::ValidationError(
|
||||||
"unknown directory was referenced".into(),
|
"unknown directory was referenced".into(),
|
||||||
|
@ -108,7 +108,7 @@ impl DirectoryGraph<RootToLeavesValidator> {
|
||||||
|
|
||||||
/// Insert a new Directory into the closure
|
/// Insert a new Directory into the closure
|
||||||
#[instrument(level = "trace", skip_all, fields(directory.digest=%directory.digest(), directory.size=%directory.size()), err)]
|
#[instrument(level = "trace", skip_all, fields(directory.digest=%directory.digest(), directory.size=%directory.size()), err)]
|
||||||
pub fn add(&mut self, directory: proto::Directory) -> Result<(), Error> {
|
pub fn add(&mut self, directory: Directory) -> Result<(), Error> {
|
||||||
let digest = directory.digest();
|
let digest = directory.digest();
|
||||||
if !self.order_validator.digest_allowed(&digest) {
|
if !self.order_validator.digest_allowed(&digest) {
|
||||||
return Err(Error::ValidationError("unexpected digest".into()));
|
return Err(Error::ValidationError("unexpected digest".into()));
|
||||||
|
@ -129,12 +129,7 @@ impl<O: OrderValidator> DirectoryGraph<O> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Adds a directory which has already been confirmed to be in-order to the graph
|
/// Adds a directory which has already been confirmed to be in-order to the graph
|
||||||
pub fn add_order_unchecked(&mut self, directory: proto::Directory) -> Result<(), Error> {
|
pub fn add_order_unchecked(&mut self, directory: Directory) -> Result<(), Error> {
|
||||||
// Do some basic validation
|
|
||||||
directory
|
|
||||||
.validate()
|
|
||||||
.map_err(|e| Error::ValidationError(e.to_string()))?;
|
|
||||||
|
|
||||||
let digest = directory.digest();
|
let digest = directory.digest();
|
||||||
|
|
||||||
// Teach the graph about the existence of a node with this digest
|
// Teach the graph about the existence of a node with this digest
|
||||||
|
@ -149,23 +144,32 @@ impl<O: OrderValidator> DirectoryGraph<O> {
|
||||||
}
|
}
|
||||||
|
|
||||||
// set up edges to all child directories
|
// set up edges to all child directories
|
||||||
for subdir in &directory.directories {
|
for (name, node) in directory.nodes() {
|
||||||
let subdir_digest: B3Digest = subdir.digest.clone().try_into().unwrap();
|
if let Node::Directory { digest, size } = node {
|
||||||
|
let child_ix = *self
|
||||||
|
.digest_to_node_ix
|
||||||
|
.entry(digest.clone())
|
||||||
|
.or_insert_with(|| self.graph.add_node(None));
|
||||||
|
|
||||||
let child_ix = *self
|
let pending_edge_check = match &self.graph[child_ix] {
|
||||||
.digest_to_node_ix
|
Some(child) => {
|
||||||
.entry(subdir_digest)
|
// child is already available, validate the edge now
|
||||||
.or_insert_with(|| self.graph.add_node(None));
|
check_edge(
|
||||||
|
&EdgeWeight {
|
||||||
let pending_edge_check = match &self.graph[child_ix] {
|
name: name.clone(),
|
||||||
Some(child) => {
|
size: *size,
|
||||||
// child is already available, validate the edge now
|
},
|
||||||
check_edge(subdir, child)?;
|
child,
|
||||||
None
|
)?;
|
||||||
}
|
None
|
||||||
None => Some(subdir.clone()), // pending validation
|
}
|
||||||
};
|
None => Some(EdgeWeight {
|
||||||
self.graph.add_edge(ix, child_ix, pending_edge_check);
|
name: name.clone(),
|
||||||
|
size: *size,
|
||||||
|
}), // pending validation
|
||||||
|
};
|
||||||
|
self.graph.add_edge(ix, child_ix, pending_edge_check);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// validate the edges from parents to this node
|
// validate the edges from parents to this node
|
||||||
|
@ -183,6 +187,7 @@ impl<O: OrderValidator> DirectoryGraph<O> {
|
||||||
.expect("edge not found")
|
.expect("edge not found")
|
||||||
.take()
|
.take()
|
||||||
.expect("edge is already validated");
|
.expect("edge is already validated");
|
||||||
|
|
||||||
check_edge(&edge_weight, &directory)?;
|
check_edge(&edge_weight, &directory)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -266,37 +271,37 @@ impl ValidatedDirectoryGraph {
|
||||||
.filter_map(move |i| nodes[i.index()].weight.take())
|
.filter_map(move |i| nodes[i.index()].weight.take())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
/*
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use crate::{
|
|
||||||
fixtures::{DIRECTORY_A, DIRECTORY_B, DIRECTORY_C},
|
|
||||||
proto::{self, Directory},
|
|
||||||
};
|
|
||||||
use lazy_static::lazy_static;
|
|
||||||
use rstest::rstest;
|
|
||||||
|
|
||||||
lazy_static! {
|
|
||||||
pub static ref BROKEN_DIRECTORY : Directory = Directory {
|
pub static ref BROKEN_DIRECTORY : Directory = Directory {
|
||||||
symlinks: vec![proto::SymlinkNode {
|
symlinks: vec![SymlinkNode {
|
||||||
name: "".into(), // invalid name!
|
name: "".into(), // invalid name!
|
||||||
target: "doesntmatter".into(),
|
target: "doesntmatter".into(),
|
||||||
}],
|
}],
|
||||||
..Default::default()
|
..Default::default()
|
||||||
};
|
};
|
||||||
|
*/
|
||||||
pub static ref BROKEN_PARENT_DIRECTORY: Directory = Directory {
|
#[cfg(test)]
|
||||||
directories: vec![proto::DirectoryNode {
|
mod tests {
|
||||||
name: "foo".into(),
|
use crate::fixtures::{DIRECTORY_A, DIRECTORY_B, DIRECTORY_C};
|
||||||
digest: DIRECTORY_A.digest().into(),
|
use crate::{Directory, Node};
|
||||||
size: DIRECTORY_A.size() + 42, // wrong!
|
use lazy_static::lazy_static;
|
||||||
}],
|
use rstest::rstest;
|
||||||
..Default::default()
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
use super::{DirectoryGraph, LeavesToRootValidator, RootToLeavesValidator};
|
use super::{DirectoryGraph, LeavesToRootValidator, RootToLeavesValidator};
|
||||||
|
|
||||||
|
lazy_static! {
|
||||||
|
pub static ref BROKEN_PARENT_DIRECTORY: Directory =
|
||||||
|
Directory::try_from_iter([
|
||||||
|
(
|
||||||
|
"foo".try_into().unwrap(),
|
||||||
|
Node::Directory{
|
||||||
|
digest: DIRECTORY_A.digest(),
|
||||||
|
size: DIRECTORY_A.size() + 42, // wrong!
|
||||||
|
}
|
||||||
|
)
|
||||||
|
]).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
#[rstest]
|
#[rstest]
|
||||||
/// Uploading an empty directory should succeed.
|
/// Uploading an empty directory should succeed.
|
||||||
#[case::empty_directory(&[&*DIRECTORY_A], false, Some(vec![&*DIRECTORY_A]))]
|
#[case::empty_directory(&[&*DIRECTORY_A], false, Some(vec![&*DIRECTORY_A]))]
|
||||||
|
@ -312,8 +317,6 @@ mod tests {
|
||||||
#[case::unconnected_node(&[&*DIRECTORY_A, &*DIRECTORY_C, &*DIRECTORY_B], false, None)]
|
#[case::unconnected_node(&[&*DIRECTORY_A, &*DIRECTORY_C, &*DIRECTORY_B], false, None)]
|
||||||
/// Uploading B (referring to A) should fail immediately, because A was never uploaded.
|
/// Uploading B (referring to A) should fail immediately, because A was never uploaded.
|
||||||
#[case::dangling_pointer(&[&*DIRECTORY_B], true, None)]
|
#[case::dangling_pointer(&[&*DIRECTORY_B], true, None)]
|
||||||
/// Uploading a directory failing validation should fail immediately.
|
|
||||||
#[case::failing_validation(&[&*BROKEN_DIRECTORY], true, None)]
|
|
||||||
/// Uploading a directory which refers to another Directory with a wrong size should fail.
|
/// Uploading a directory which refers to another Directory with a wrong size should fail.
|
||||||
#[case::wrong_size_in_parent(&[&*DIRECTORY_A, &*BROKEN_PARENT_DIRECTORY], true, None)]
|
#[case::wrong_size_in_parent(&[&*DIRECTORY_A, &*BROKEN_PARENT_DIRECTORY], true, None)]
|
||||||
fn test_uploads(
|
fn test_uploads(
|
||||||
|
@ -366,8 +369,6 @@ mod tests {
|
||||||
#[case::unconnected_node(&*DIRECTORY_C, &[&*DIRECTORY_C, &*DIRECTORY_B], true, None)]
|
#[case::unconnected_node(&*DIRECTORY_C, &[&*DIRECTORY_C, &*DIRECTORY_B], true, None)]
|
||||||
/// Downloading B (specified as the root) but receiving A instead should fail immediately, because A has no connection to B (the root).
|
/// Downloading B (specified as the root) but receiving A instead should fail immediately, because A has no connection to B (the root).
|
||||||
#[case::dangling_pointer(&*DIRECTORY_B, &[&*DIRECTORY_A], true, None)]
|
#[case::dangling_pointer(&*DIRECTORY_B, &[&*DIRECTORY_A], true, None)]
|
||||||
/// Downloading a directory failing validation should fail immediately.
|
|
||||||
#[case::failing_validation(&*BROKEN_DIRECTORY, &[&*BROKEN_DIRECTORY], true, None)]
|
|
||||||
/// Downloading a directory which refers to another Directory with a wrong size should fail.
|
/// Downloading a directory which refers to another Directory with a wrong size should fail.
|
||||||
#[case::wrong_size_in_parent(&*BROKEN_PARENT_DIRECTORY, &[&*BROKEN_PARENT_DIRECTORY, &*DIRECTORY_A], true, None)]
|
#[case::wrong_size_in_parent(&*BROKEN_PARENT_DIRECTORY, &[&*BROKEN_PARENT_DIRECTORY, &*DIRECTORY_A], true, None)]
|
||||||
fn test_downloads(
|
fn test_downloads(
|
||||||
|
|
|
@ -18,6 +18,11 @@ use super::DirectoryService;
|
||||||
/// - `sled:///absolute/path/to/somewhere`
|
/// - `sled:///absolute/path/to/somewhere`
|
||||||
/// Uses sled, using a path on the disk for persistency. Can be only opened
|
/// Uses sled, using a path on the disk for persistency. Can be only opened
|
||||||
/// from one process at the same time.
|
/// from one process at the same time.
|
||||||
|
/// - `redb:`
|
||||||
|
/// Uses a in-memory redb implementation.
|
||||||
|
/// - `redb:///absolute/path/to/somewhere`
|
||||||
|
/// Uses redb, using a path on the disk for persistency. Can be only opened
|
||||||
|
/// from one process at the same time.
|
||||||
/// - `grpc+unix:///absolute/path/to/somewhere`
|
/// - `grpc+unix:///absolute/path/to/somewhere`
|
||||||
/// Connects to a local tvix-store gRPC service via Unix socket.
|
/// Connects to a local tvix-store gRPC service via Unix socket.
|
||||||
/// - `grpc+http://host:port`, `grpc+https://host:port`
|
/// - `grpc+http://host:port`, `grpc+https://host:port`
|
||||||
|
@ -52,6 +57,8 @@ mod tests {
|
||||||
lazy_static! {
|
lazy_static! {
|
||||||
static ref TMPDIR_SLED_1: TempDir = TempDir::new().unwrap();
|
static ref TMPDIR_SLED_1: TempDir = TempDir::new().unwrap();
|
||||||
static ref TMPDIR_SLED_2: TempDir = TempDir::new().unwrap();
|
static ref TMPDIR_SLED_2: TempDir = TempDir::new().unwrap();
|
||||||
|
static ref TMPDIR_REDB_1: TempDir = TempDir::new().unwrap();
|
||||||
|
static ref TMPDIR_REDB_2: TempDir = TempDir::new().unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[rstest]
|
#[rstest]
|
||||||
|
@ -75,6 +82,16 @@ mod tests {
|
||||||
#[case::memory_invalid_root_path("memory:///", false)]
|
#[case::memory_invalid_root_path("memory:///", false)]
|
||||||
/// This sets a memory url path to "/foo", which is invalid.
|
/// This sets a memory url path to "/foo", which is invalid.
|
||||||
#[case::memory_invalid_root_path_foo("memory:///foo", false)]
|
#[case::memory_invalid_root_path_foo("memory:///foo", false)]
|
||||||
|
/// This configures redb in temporary mode.
|
||||||
|
#[case::redb_valid_temporary("redb://", true)]
|
||||||
|
/// This configures redb with /, which should fail.
|
||||||
|
#[case::redb_invalid_root("redb:///", false)]
|
||||||
|
/// This configures redb with a host, not path, which should fail.
|
||||||
|
#[case::redb_invalid_host("redb://foo.example", false)]
|
||||||
|
/// This configures redb with a valid path, which should succeed.
|
||||||
|
#[case::redb_valid_path(&format!("redb://{}", &TMPDIR_REDB_1.path().join("foo").to_str().unwrap()), true)]
|
||||||
|
/// This configures redb with a host, and a valid path path, which should fail.
|
||||||
|
#[case::redb_invalid_host_with_valid_path(&format!("redb://foo.example{}", &TMPDIR_REDB_2.path().join("bar").to_str().unwrap()), false)]
|
||||||
/// Correct scheme to connect to a unix socket.
|
/// Correct scheme to connect to a unix socket.
|
||||||
#[case::grpc_valid_unix_socket("grpc+unix:///path/to/somewhere", true)]
|
#[case::grpc_valid_unix_socket("grpc+unix:///path/to/somewhere", true)]
|
||||||
/// Correct scheme for unix socket, but setting a host too, which is invalid.
|
/// Correct scheme for unix socket, but setting a host too, which is invalid.
|
||||||
|
|
|
@ -1,9 +1,9 @@
|
||||||
use std::collections::HashSet;
|
use std::collections::HashSet;
|
||||||
|
|
||||||
use super::{DirectoryPutter, DirectoryService};
|
use super::{Directory, DirectoryPutter, DirectoryService};
|
||||||
use crate::composition::{CompositionContext, ServiceBuilder};
|
use crate::composition::{CompositionContext, ServiceBuilder};
|
||||||
use crate::proto::{self, get_directory_request::ByWhat};
|
use crate::proto::{self, get_directory_request::ByWhat};
|
||||||
use crate::{B3Digest, Error};
|
use crate::{B3Digest, DirectoryError, Error};
|
||||||
use async_stream::try_stream;
|
use async_stream::try_stream;
|
||||||
use futures::stream::BoxStream;
|
use futures::stream::BoxStream;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
@ -41,10 +41,7 @@ where
|
||||||
T::Future: Send,
|
T::Future: Send,
|
||||||
{
|
{
|
||||||
#[instrument(level = "trace", skip_all, fields(directory.digest = %digest))]
|
#[instrument(level = "trace", skip_all, fields(directory.digest = %digest))]
|
||||||
async fn get(
|
async fn get(&self, digest: &B3Digest) -> Result<Option<Directory>, crate::Error> {
|
||||||
&self,
|
|
||||||
digest: &B3Digest,
|
|
||||||
) -> Result<Option<crate::proto::Directory>, crate::Error> {
|
|
||||||
// Get a new handle to the gRPC client, and copy the digest.
|
// Get a new handle to the gRPC client, and copy the digest.
|
||||||
let mut grpc_client = self.grpc_client.clone();
|
let mut grpc_client = self.grpc_client.clone();
|
||||||
let digest_cpy = digest.clone();
|
let digest_cpy = digest.clone();
|
||||||
|
@ -72,15 +69,10 @@ where
|
||||||
"requested directory with digest {}, but got {}",
|
"requested directory with digest {}, but got {}",
|
||||||
digest, actual_digest
|
digest, actual_digest
|
||||||
)))
|
)))
|
||||||
} else if let Err(e) = directory.validate() {
|
|
||||||
// Validate the Directory itself is valid.
|
|
||||||
warn!("directory failed validation: {}", e.to_string());
|
|
||||||
Err(crate::Error::StorageError(format!(
|
|
||||||
"directory {} failed validation: {}",
|
|
||||||
digest, e,
|
|
||||||
)))
|
|
||||||
} else {
|
} else {
|
||||||
Ok(Some(directory))
|
Ok(Some(directory.try_into().map_err(|_| {
|
||||||
|
Error::StorageError("invalid root digest length in response".to_string())
|
||||||
|
})?))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(None) => Ok(None),
|
Ok(None) => Ok(None),
|
||||||
|
@ -90,11 +82,11 @@ where
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(level = "trace", skip_all, fields(directory.digest = %directory.digest()))]
|
#[instrument(level = "trace", skip_all, fields(directory.digest = %directory.digest()))]
|
||||||
async fn put(&self, directory: crate::proto::Directory) -> Result<B3Digest, crate::Error> {
|
async fn put(&self, directory: Directory) -> Result<B3Digest, crate::Error> {
|
||||||
let resp = self
|
let resp = self
|
||||||
.grpc_client
|
.grpc_client
|
||||||
.clone()
|
.clone()
|
||||||
.put(tokio_stream::once(directory))
|
.put(tokio_stream::once(proto::Directory::from(directory)))
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
match resp {
|
match resp {
|
||||||
|
@ -113,7 +105,7 @@ where
|
||||||
fn get_recursive(
|
fn get_recursive(
|
||||||
&self,
|
&self,
|
||||||
root_directory_digest: &B3Digest,
|
root_directory_digest: &B3Digest,
|
||||||
) -> BoxStream<'static, Result<proto::Directory, Error>> {
|
) -> BoxStream<'static, Result<Directory, Error>> {
|
||||||
let mut grpc_client = self.grpc_client.clone();
|
let mut grpc_client = self.grpc_client.clone();
|
||||||
let root_directory_digest = root_directory_digest.clone();
|
let root_directory_digest = root_directory_digest.clone();
|
||||||
|
|
||||||
|
@ -130,19 +122,11 @@ where
|
||||||
// The Directory digests we received so far
|
// The Directory digests we received so far
|
||||||
let mut received_directory_digests: HashSet<B3Digest> = HashSet::new();
|
let mut received_directory_digests: HashSet<B3Digest> = HashSet::new();
|
||||||
// The Directory digests we're still expecting to get sent.
|
// The Directory digests we're still expecting to get sent.
|
||||||
let mut expected_directory_digests: HashSet<B3Digest> = HashSet::from([root_directory_digest]);
|
let mut expected_directory_digests: HashSet<B3Digest> = HashSet::from([root_directory_digest.clone()]);
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
match stream.message().await {
|
match stream.message().await {
|
||||||
Ok(Some(directory)) => {
|
Ok(Some(directory)) => {
|
||||||
// validate the directory itself.
|
|
||||||
if let Err(e) = directory.validate() {
|
|
||||||
Err(crate::Error::StorageError(format!(
|
|
||||||
"directory {} failed validation: {}",
|
|
||||||
directory.digest(),
|
|
||||||
e,
|
|
||||||
)))?;
|
|
||||||
}
|
|
||||||
// validate we actually expected that directory, and move it from expected to received.
|
// validate we actually expected that directory, and move it from expected to received.
|
||||||
let directory_digest = directory.digest();
|
let directory_digest = directory.digest();
|
||||||
let was_expected = expected_directory_digests.remove(&directory_digest);
|
let was_expected = expected_directory_digests.remove(&directory_digest);
|
||||||
|
@ -168,14 +152,28 @@ where
|
||||||
.insert(child_directory_digest);
|
.insert(child_directory_digest);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let directory = directory.try_into()
|
||||||
|
.map_err(|e: DirectoryError| Error::StorageError(e.to_string()))?;
|
||||||
|
|
||||||
yield directory;
|
yield directory;
|
||||||
},
|
},
|
||||||
|
Ok(None) if expected_directory_digests.len() == 1 && expected_directory_digests.contains(&root_directory_digest) => {
|
||||||
|
// The root directory of the requested closure was not found, return an
|
||||||
|
// empty stream
|
||||||
|
return
|
||||||
|
}
|
||||||
Ok(None) => {
|
Ok(None) => {
|
||||||
// If we were still expecting something, that's an error.
|
// The stream has ended
|
||||||
if !expected_directory_digests.is_empty() {
|
let diff_len = expected_directory_digests
|
||||||
|
// Account for directories which have been referenced more than once,
|
||||||
|
// but only received once since they were deduplicated
|
||||||
|
.difference(&received_directory_digests)
|
||||||
|
.count();
|
||||||
|
// If this is not empty, then the closure is incomplete
|
||||||
|
if diff_len != 0 {
|
||||||
Err(crate::Error::StorageError(format!(
|
Err(crate::Error::StorageError(format!(
|
||||||
"still expected {} directories, but got premature end of stream",
|
"still expected {} directories, but got premature end of stream",
|
||||||
expected_directory_digests.len(),
|
diff_len
|
||||||
)))?
|
)))?
|
||||||
} else {
|
} else {
|
||||||
return
|
return
|
||||||
|
@ -268,11 +266,11 @@ pub struct GRPCPutter {
|
||||||
#[async_trait]
|
#[async_trait]
|
||||||
impl DirectoryPutter for GRPCPutter {
|
impl DirectoryPutter for GRPCPutter {
|
||||||
#[instrument(level = "trace", skip_all, fields(directory.digest=%directory.digest()), err)]
|
#[instrument(level = "trace", skip_all, fields(directory.digest=%directory.digest()), err)]
|
||||||
async fn put(&mut self, directory: proto::Directory) -> Result<(), crate::Error> {
|
async fn put(&mut self, directory: Directory) -> Result<(), crate::Error> {
|
||||||
match self.rq {
|
match self.rq {
|
||||||
// If we're not already closed, send the directory to directory_sender.
|
// If we're not already closed, send the directory to directory_sender.
|
||||||
Some((_, ref directory_sender)) => {
|
Some((_, ref directory_sender)) => {
|
||||||
if directory_sender.send(directory).is_err() {
|
if directory_sender.send(directory.into()).is_err() {
|
||||||
// If the channel has been prematurely closed, invoke close (so we can peek at the error code)
|
// If the channel has been prematurely closed, invoke close (so we can peek at the error code)
|
||||||
// That error code is much more helpful, because it
|
// That error code is much more helpful, because it
|
||||||
// contains the error message from the server.
|
// contains the error message from the server.
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
use crate::{proto, B3Digest, Error};
|
use crate::{B3Digest, Error};
|
||||||
use futures::stream::BoxStream;
|
use futures::stream::BoxStream;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
@ -7,8 +7,9 @@ use tonic::async_trait;
|
||||||
use tracing::{instrument, warn};
|
use tracing::{instrument, warn};
|
||||||
|
|
||||||
use super::utils::traverse_directory;
|
use super::utils::traverse_directory;
|
||||||
use super::{DirectoryPutter, DirectoryService, SimplePutter};
|
use super::{Directory, DirectoryPutter, DirectoryService, SimplePutter};
|
||||||
use crate::composition::{CompositionContext, ServiceBuilder};
|
use crate::composition::{CompositionContext, ServiceBuilder};
|
||||||
|
use crate::proto;
|
||||||
|
|
||||||
#[derive(Clone, Default)]
|
#[derive(Clone, Default)]
|
||||||
pub struct MemoryDirectoryService {
|
pub struct MemoryDirectoryService {
|
||||||
|
@ -18,7 +19,7 @@ pub struct MemoryDirectoryService {
|
||||||
#[async_trait]
|
#[async_trait]
|
||||||
impl DirectoryService for MemoryDirectoryService {
|
impl DirectoryService for MemoryDirectoryService {
|
||||||
#[instrument(skip(self, digest), fields(directory.digest = %digest))]
|
#[instrument(skip(self, digest), fields(directory.digest = %digest))]
|
||||||
async fn get(&self, digest: &B3Digest) -> Result<Option<proto::Directory>, Error> {
|
async fn get(&self, digest: &B3Digest) -> Result<Option<Directory>, Error> {
|
||||||
let db = self.db.read().await;
|
let db = self.db.read().await;
|
||||||
|
|
||||||
match db.get(digest) {
|
match db.get(digest) {
|
||||||
|
@ -37,35 +38,20 @@ impl DirectoryService for MemoryDirectoryService {
|
||||||
)));
|
)));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Validate the Directory itself is valid.
|
Ok(Some(directory.clone().try_into().map_err(|e| {
|
||||||
if let Err(e) = directory.validate() {
|
crate::Error::StorageError(format!("corrupted directory: {}", e))
|
||||||
warn!("directory failed validation: {}", e.to_string());
|
})?))
|
||||||
return Err(Error::StorageError(format!(
|
|
||||||
"directory {} failed validation: {}",
|
|
||||||
actual_digest, e,
|
|
||||||
)));
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(Some(directory.clone()))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(skip(self, directory), fields(directory.digest = %directory.digest()))]
|
#[instrument(skip(self, directory), fields(directory.digest = %directory.digest()))]
|
||||||
async fn put(&self, directory: proto::Directory) -> Result<B3Digest, Error> {
|
async fn put(&self, directory: Directory) -> Result<B3Digest, Error> {
|
||||||
let digest = directory.digest();
|
let digest = directory.digest();
|
||||||
|
|
||||||
// validate the directory itself.
|
|
||||||
if let Err(e) = directory.validate() {
|
|
||||||
return Err(Error::InvalidRequest(format!(
|
|
||||||
"directory {} failed validation: {}",
|
|
||||||
digest, e,
|
|
||||||
)));
|
|
||||||
}
|
|
||||||
|
|
||||||
// store it
|
// store it
|
||||||
let mut db = self.db.write().await;
|
let mut db = self.db.write().await;
|
||||||
db.insert(digest.clone(), directory);
|
db.insert(digest.clone(), directory.into());
|
||||||
|
|
||||||
Ok(digest)
|
Ok(digest)
|
||||||
}
|
}
|
||||||
|
@ -74,7 +60,7 @@ impl DirectoryService for MemoryDirectoryService {
|
||||||
fn get_recursive(
|
fn get_recursive(
|
||||||
&self,
|
&self,
|
||||||
root_directory_digest: &B3Digest,
|
root_directory_digest: &B3Digest,
|
||||||
) -> BoxStream<'static, Result<proto::Directory, Error>> {
|
) -> BoxStream<'static, Result<Directory, Error>> {
|
||||||
traverse_directory(self.clone(), root_directory_digest)
|
traverse_directory(self.clone(), root_directory_digest)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
use crate::composition::{Registry, ServiceBuilder};
|
use crate::composition::{Registry, ServiceBuilder};
|
||||||
use crate::{proto, B3Digest, Error};
|
use crate::{B3Digest, Directory, Error};
|
||||||
|
|
||||||
use futures::stream::BoxStream;
|
use futures::stream::BoxStream;
|
||||||
use tonic::async_trait;
|
use tonic::async_trait;
|
||||||
|
@ -10,6 +10,7 @@ mod grpc;
|
||||||
mod memory;
|
mod memory;
|
||||||
mod object_store;
|
mod object_store;
|
||||||
mod order_validator;
|
mod order_validator;
|
||||||
|
mod redb;
|
||||||
mod simple_putter;
|
mod simple_putter;
|
||||||
mod sled;
|
mod sled;
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
@ -17,13 +18,14 @@ pub mod tests;
|
||||||
mod traverse;
|
mod traverse;
|
||||||
mod utils;
|
mod utils;
|
||||||
|
|
||||||
pub use self::combinators::{Cache, CacheConfig};
|
pub use self::combinators::{Cache, CacheConfig, Router, RouterConfig};
|
||||||
pub use self::directory_graph::DirectoryGraph;
|
pub use self::directory_graph::{DirectoryGraph, ValidatedDirectoryGraph};
|
||||||
pub use self::from_addr::from_addr;
|
pub use self::from_addr::from_addr;
|
||||||
pub use self::grpc::{GRPCDirectoryService, GRPCDirectoryServiceConfig};
|
pub use self::grpc::{GRPCDirectoryService, GRPCDirectoryServiceConfig};
|
||||||
pub use self::memory::{MemoryDirectoryService, MemoryDirectoryServiceConfig};
|
pub use self::memory::{MemoryDirectoryService, MemoryDirectoryServiceConfig};
|
||||||
pub use self::object_store::{ObjectStoreDirectoryService, ObjectStoreDirectoryServiceConfig};
|
pub use self::object_store::{ObjectStoreDirectoryService, ObjectStoreDirectoryServiceConfig};
|
||||||
pub use self::order_validator::{LeavesToRootValidator, OrderValidator, RootToLeavesValidator};
|
pub use self::order_validator::{LeavesToRootValidator, OrderValidator, RootToLeavesValidator};
|
||||||
|
pub use self::redb::{RedbDirectoryService, RedbDirectoryServiceConfig};
|
||||||
pub use self::simple_putter::SimplePutter;
|
pub use self::simple_putter::SimplePutter;
|
||||||
pub use self::sled::{SledDirectoryService, SledDirectoryServiceConfig};
|
pub use self::sled::{SledDirectoryService, SledDirectoryServiceConfig};
|
||||||
pub use self::traverse::descend_to;
|
pub use self::traverse::descend_to;
|
||||||
|
@ -36,7 +38,7 @@ mod bigtable;
|
||||||
pub use self::bigtable::{BigtableDirectoryService, BigtableParameters};
|
pub use self::bigtable::{BigtableDirectoryService, BigtableParameters};
|
||||||
|
|
||||||
/// The base trait all Directory services need to implement.
|
/// The base trait all Directory services need to implement.
|
||||||
/// This is a simple get and put of [crate::proto::Directory], returning their
|
/// This is a simple get and put of [Directory], returning their
|
||||||
/// digest.
|
/// digest.
|
||||||
#[async_trait]
|
#[async_trait]
|
||||||
pub trait DirectoryService: Send + Sync {
|
pub trait DirectoryService: Send + Sync {
|
||||||
|
@ -48,14 +50,14 @@ pub trait DirectoryService: Send + Sync {
|
||||||
/// Directory digests that are at the "root", aka the last element that's
|
/// Directory digests that are at the "root", aka the last element that's
|
||||||
/// sent to a DirectoryPutter. This makes sense for implementations bundling
|
/// sent to a DirectoryPutter. This makes sense for implementations bundling
|
||||||
/// closures of directories together in batches.
|
/// closures of directories together in batches.
|
||||||
async fn get(&self, digest: &B3Digest) -> Result<Option<proto::Directory>, Error>;
|
async fn get(&self, digest: &B3Digest) -> Result<Option<Directory>, Error>;
|
||||||
/// Uploads a single Directory message, and returns the calculated
|
/// Uploads a single Directory message, and returns the calculated
|
||||||
/// digest, or an error. An error *must* also be returned if the message is
|
/// digest, or an error. An error *must* also be returned if the message is
|
||||||
/// not valid.
|
/// not valid.
|
||||||
async fn put(&self, directory: proto::Directory) -> Result<B3Digest, Error>;
|
async fn put(&self, directory: Directory) -> Result<B3Digest, Error>;
|
||||||
|
|
||||||
/// Looks up a closure of [proto::Directory].
|
/// Looks up a closure of [Directory].
|
||||||
/// Ideally this would be a `impl Stream<Item = Result<proto::Directory, Error>>`,
|
/// Ideally this would be a `impl Stream<Item = Result<Directory, Error>>`,
|
||||||
/// and we'd be able to add a default implementation for it here, but
|
/// and we'd be able to add a default implementation for it here, but
|
||||||
/// we can't have that yet.
|
/// we can't have that yet.
|
||||||
///
|
///
|
||||||
|
@ -68,12 +70,14 @@ pub trait DirectoryService: Send + Sync {
|
||||||
/// Directories are sent in an order from the root to the leaves, so that
|
/// Directories are sent in an order from the root to the leaves, so that
|
||||||
/// the receiving side can validate each message to be a connected to the root
|
/// the receiving side can validate each message to be a connected to the root
|
||||||
/// that has initially been requested.
|
/// that has initially been requested.
|
||||||
|
///
|
||||||
|
/// In case the directory can not be found, this should return an empty stream.
|
||||||
fn get_recursive(
|
fn get_recursive(
|
||||||
&self,
|
&self,
|
||||||
root_directory_digest: &B3Digest,
|
root_directory_digest: &B3Digest,
|
||||||
) -> BoxStream<'static, Result<proto::Directory, Error>>;
|
) -> BoxStream<'static, Result<Directory, Error>>;
|
||||||
|
|
||||||
/// Allows persisting a closure of [proto::Directory], which is a graph of
|
/// Allows persisting a closure of [Directory], which is a graph of
|
||||||
/// connected Directory messages.
|
/// connected Directory messages.
|
||||||
fn put_multiple_start(&self) -> Box<dyn DirectoryPutter>;
|
fn put_multiple_start(&self) -> Box<dyn DirectoryPutter>;
|
||||||
}
|
}
|
||||||
|
@ -83,18 +87,18 @@ impl<A> DirectoryService for A
|
||||||
where
|
where
|
||||||
A: AsRef<dyn DirectoryService> + Send + Sync,
|
A: AsRef<dyn DirectoryService> + Send + Sync,
|
||||||
{
|
{
|
||||||
async fn get(&self, digest: &B3Digest) -> Result<Option<proto::Directory>, Error> {
|
async fn get(&self, digest: &B3Digest) -> Result<Option<Directory>, Error> {
|
||||||
self.as_ref().get(digest).await
|
self.as_ref().get(digest).await
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn put(&self, directory: proto::Directory) -> Result<B3Digest, Error> {
|
async fn put(&self, directory: Directory) -> Result<B3Digest, Error> {
|
||||||
self.as_ref().put(directory).await
|
self.as_ref().put(directory).await
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_recursive(
|
fn get_recursive(
|
||||||
&self,
|
&self,
|
||||||
root_directory_digest: &B3Digest,
|
root_directory_digest: &B3Digest,
|
||||||
) -> BoxStream<'static, Result<proto::Directory, Error>> {
|
) -> BoxStream<'static, Result<Directory, Error>> {
|
||||||
self.as_ref().get_recursive(root_directory_digest)
|
self.as_ref().get_recursive(root_directory_digest)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -103,7 +107,7 @@ where
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Provides a handle to put a closure of connected [proto::Directory] elements.
|
/// Provides a handle to put a closure of connected [Directory] elements.
|
||||||
///
|
///
|
||||||
/// The consumer can periodically call [DirectoryPutter::put], starting from the
|
/// The consumer can periodically call [DirectoryPutter::put], starting from the
|
||||||
/// leaves. Once the root is reached, [DirectoryPutter::close] can be called to
|
/// leaves. Once the root is reached, [DirectoryPutter::close] can be called to
|
||||||
|
@ -115,12 +119,12 @@ where
|
||||||
/// but a single file or symlink.
|
/// but a single file or symlink.
|
||||||
#[async_trait]
|
#[async_trait]
|
||||||
pub trait DirectoryPutter: Send {
|
pub trait DirectoryPutter: Send {
|
||||||
/// Put a individual [proto::Directory] into the store.
|
/// Put a individual [Directory] into the store.
|
||||||
/// Error semantics and behaviour is up to the specific implementation of
|
/// Error semantics and behaviour is up to the specific implementation of
|
||||||
/// this trait.
|
/// this trait.
|
||||||
/// Due to bursting, the returned error might refer to an object previously
|
/// Due to bursting, the returned error might refer to an object previously
|
||||||
/// sent via `put`.
|
/// sent via `put`.
|
||||||
async fn put(&mut self, directory: proto::Directory) -> Result<(), Error>;
|
async fn put(&mut self, directory: Directory) -> Result<(), Error>;
|
||||||
|
|
||||||
/// Close the stream, and wait for any errors.
|
/// Close the stream, and wait for any errors.
|
||||||
/// If there's been any invalid Directory message uploaded, and error *must*
|
/// If there's been any invalid Directory message uploaded, and error *must*
|
||||||
|
@ -133,8 +137,10 @@ pub(crate) fn register_directory_services(reg: &mut Registry) {
|
||||||
reg.register::<Box<dyn ServiceBuilder<Output = dyn DirectoryService>>, super::directoryservice::ObjectStoreDirectoryServiceConfig>("objectstore");
|
reg.register::<Box<dyn ServiceBuilder<Output = dyn DirectoryService>>, super::directoryservice::ObjectStoreDirectoryServiceConfig>("objectstore");
|
||||||
reg.register::<Box<dyn ServiceBuilder<Output = dyn DirectoryService>>, super::directoryservice::MemoryDirectoryServiceConfig>("memory");
|
reg.register::<Box<dyn ServiceBuilder<Output = dyn DirectoryService>>, super::directoryservice::MemoryDirectoryServiceConfig>("memory");
|
||||||
reg.register::<Box<dyn ServiceBuilder<Output = dyn DirectoryService>>, super::directoryservice::CacheConfig>("cache");
|
reg.register::<Box<dyn ServiceBuilder<Output = dyn DirectoryService>>, super::directoryservice::CacheConfig>("cache");
|
||||||
|
reg.register::<Box<dyn ServiceBuilder<Output = dyn DirectoryService>>, super::directoryservice::RouterConfig>("router");
|
||||||
reg.register::<Box<dyn ServiceBuilder<Output = dyn DirectoryService>>, super::directoryservice::GRPCDirectoryServiceConfig>("grpc");
|
reg.register::<Box<dyn ServiceBuilder<Output = dyn DirectoryService>>, super::directoryservice::GRPCDirectoryServiceConfig>("grpc");
|
||||||
reg.register::<Box<dyn ServiceBuilder<Output = dyn DirectoryService>>, super::directoryservice::SledDirectoryServiceConfig>("sled");
|
reg.register::<Box<dyn ServiceBuilder<Output = dyn DirectoryService>>, super::directoryservice::SledDirectoryServiceConfig>("sled");
|
||||||
|
reg.register::<Box<dyn ServiceBuilder<Output = dyn DirectoryService>>, super::directoryservice::RedbDirectoryServiceConfig>("redb");
|
||||||
#[cfg(feature = "cloud")]
|
#[cfg(feature = "cloud")]
|
||||||
{
|
{
|
||||||
reg.register::<Box<dyn ServiceBuilder<Output = dyn DirectoryService>>, super::directoryservice::BigtableParameters>("bigtable");
|
reg.register::<Box<dyn ServiceBuilder<Output = dyn DirectoryService>>, super::directoryservice::BigtableParameters>("bigtable");
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue