diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml index 268481e..d4f25cd 100644 --- a/.github/workflows/tests.yaml +++ b/.github/workflows/tests.yaml @@ -6,11 +6,19 @@ env: CARGO_TERM_COLOR: always jobs: - test: + check_fmt: runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: dtolnay/rust-toolchain@nightly + with: + components: rustfmt + - uses: clechasseur/rs-fmt-check@v2 + test: + runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: Get Dependencies run: | sudo apt-get update diff --git a/Cargo.lock b/Cargo.lock index a9a1f2e..07d599a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -85,21 +85,6 @@ dependencies = [ "memchr", ] -[[package]] -name = "alloc-no-stdlib" -version = "2.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc7bb162ec39d46ab1ca8c77bf72e890535becd1751bb45f64c597edb4c8c6b3" - -[[package]] -name = "alloc-stdlib" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94fb8275041c72129eb51b7d0322c29b8387a0386127718b096429201a5d6ece" -dependencies = [ - "alloc-no-stdlib", -] - [[package]] name = "allocator-api2" version = "0.2.16" @@ -202,22 +187,6 @@ dependencies = [ "wait-timeout", ] -[[package]] -name = "async-compression" -version = "0.4.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c90a406b4495d129f00461241616194cb8a032c8d1c53c657f0961d5f8e0498" -dependencies = [ - "brotli", - "flate2", - "futures-core", - "memchr", - "pin-project-lite", - "tokio", - "zstd", - "zstd-safe", -] - [[package]] name = "async-trait" version = "0.1.79" @@ -244,7 +213,7 @@ version = "0.26.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0f77d243921b0979fbbd728dd2d5162e68ac8252976797c24eb5b3a6af9090dc" dependencies = [ - "http 0.2.12", + "http", "log", "native-tls", "serde", @@ -295,74 +264,6 @@ dependencies = [ "thiserror", ] -[[package]] -name = "axum" -version = "0.7.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a6c9af12842a67734c9a2e355436e5d03b22383ed60cf13cd0c18fbfe3dcbcf" -dependencies = [ - "async-trait", - "axum-core", - "axum-macros", - "bytes", - "futures-util", - "http 1.1.0", - "http-body 1.0.0", - "http-body-util", - "hyper 1.3.1", - "hyper-util", - "itoa", - "matchit", - "memchr", - "mime", - "percent-encoding", - "pin-project-lite", - "rustversion", - "serde", - "serde_json", - "serde_path_to_error", - "serde_urlencoded", - "sync_wrapper 1.0.1", - "tokio", - "tower", - "tower-layer", - "tower-service", - "tracing", -] - -[[package]] -name = "axum-core" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a15c63fd72d41492dc4f497196f5da1fb04fb7529e631d73630d1b491e47a2e3" -dependencies = [ - "async-trait", - "bytes", - "futures-util", - "http 1.1.0", - "http-body 1.0.0", - "http-body-util", - "mime", - "pin-project-lite", - "rustversion", - "sync_wrapper 0.1.2", - "tower-layer", - "tower-service", - "tracing", -] - -[[package]] -name = "axum-macros" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00c055ee2d014ae5981ce1016374e8213682aa14d9bf40e48ab48b5f3ef20eaa" -dependencies = [ - "heck 0.4.1", - "proc-macro2", - "quote", - "syn 2.0.58", -] - [[package]] name = "backtrace" version = "0.3.71" @@ -467,8 +368,8 @@ dependencies = [ "futures-core", "futures-util", "hex", - "http 0.2.12", - "hyper 0.14.28", + "http", + "hyper", "hyperlocal", "log", "pin-project-lite", @@ -495,27 +396,6 @@ dependencies = [ "serde_with", ] -[[package]] -name = "brotli" -version = "6.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74f7971dbd9326d58187408ab83117d8ac1bb9c17b085fdacd1cf2f598719b6b" -dependencies = [ - "alloc-no-stdlib", - "alloc-stdlib", - "brotli-decompressor", -] - -[[package]] -name = "brotli-decompressor" -version = "4.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6221fe77a248b9117d431ad93761222e1cf8ff282d9d1d5d9f53d6299a1cf76" -dependencies = [ - "alloc-no-stdlib", - "alloc-stdlib", -] - [[package]] name = "bstr" version = "1.9.1" @@ -550,10 +430,6 @@ name = "cc" version = "1.0.91" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1fd97381a8cc6493395a5afc4c691c1084b3768db713b73aa215217aa245d153" -dependencies = [ - "jobserver", - "libc", -] [[package]] name = "cfg-if" @@ -569,10 +445,8 @@ checksum = "a21f936df1771bf62b77f047b726c4625ff2e8aa607c01ec06e5a05bd8463401" dependencies = [ "android-tzdata", "iana-time-zone", - "js-sys", "num-traits", "serde", - "wasm-bindgen", "windows-targets 0.52.4", ] @@ -729,15 +603,6 @@ version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5" -[[package]] -name = "crc32fast" -version = "1.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a97769d94ddab943e4510d138150169a2758b5ef3eb191a9ee688de3e23ef7b3" -dependencies = [ - "cfg-if", -] - [[package]] name = "crossbeam-queue" version = "0.3.11" @@ -847,9 +712,7 @@ name = "docker2fl" version = "0.1.0" dependencies = [ "anyhow", - "axum", "bollard", - "chrono", "clap", "futures-util", "git-version", @@ -863,8 +726,6 @@ dependencies = [ "tokio", "tokio-async-drop", "toml", - "tower", - "tower-http", "uuid", ] @@ -937,16 +798,6 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8fcfdc7a0362c9f4444381a9e697c79d435fe65b52a37466fc2c1184cee9edc6" -[[package]] -name = "flate2" -version = "1.0.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f54427cfd1c7829e2a139fcefea601bf088ebca651d2bf53ebc600eac295dae" -dependencies = [ - "crc32fast", - "miniz_oxide", -] - [[package]] name = "flume" version = "0.11.0" @@ -1162,7 +1013,7 @@ dependencies = [ "futures-core", "futures-sink", "futures-util", - "http 0.2.12", + "http", "indexmap 2.2.6", "slab", "tokio", @@ -1278,17 +1129,6 @@ dependencies = [ "itoa", ] -[[package]] -name = "http" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258" -dependencies = [ - "bytes", - "fnv", - "itoa", -] - [[package]] name = "http-body" version = "0.4.6" @@ -1296,39 +1136,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" dependencies = [ "bytes", - "http 0.2.12", - "pin-project-lite", -] - -[[package]] -name = "http-body" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1cac85db508abc24a2e48553ba12a996e87244a0395ce011e62b37158745d643" -dependencies = [ - "bytes", - "http 1.1.0", -] - -[[package]] -name = "http-body-util" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0475f8b2ac86659c21b64320d5d653f9efe42acd2a4e560073ec61a155a34f1d" -dependencies = [ - "bytes", - "futures-core", - "http 1.1.0", - "http-body 1.0.0", + "http", "pin-project-lite", ] -[[package]] -name = "http-range-header" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08a397c49fec283e3d6211adbe480be95aae5f304cfb923e9970e08956d5168a" - [[package]] name = "httparse" version = "1.8.0" @@ -1352,8 +1163,8 @@ dependencies = [ "futures-core", "futures-util", "h2", - "http 0.2.12", - "http-body 0.4.6", + "http", + "http-body", "httparse", "httpdate", "itoa", @@ -1365,25 +1176,6 @@ dependencies = [ "want", ] -[[package]] -name = "hyper" -version = "1.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe575dd17d0862a9a33781c8c4696a55c320909004a67a00fb286ba8b1bc496d" -dependencies = [ - "bytes", - "futures-channel", - "futures-util", - "http 1.1.0", - "http-body 1.0.0", - "httparse", - "httpdate", - "itoa", - "pin-project-lite", - "smallvec", - "tokio", -] - [[package]] name = "hyper-tls" version = "0.5.0" @@ -1391,28 +1183,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" dependencies = [ "bytes", - "hyper 0.14.28", + "hyper", "native-tls", "tokio", "tokio-native-tls", ] -[[package]] -name = "hyper-util" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca38ef113da30126bbff9cd1705f9273e15d45498615d138b0c20279ac7a76aa" -dependencies = [ - "bytes", - "futures-util", - "http 1.1.0", - "http-body 1.0.0", - "hyper 1.3.1", - "pin-project-lite", - "socket2", - "tokio", -] - [[package]] name = "hyperlocal" version = "0.8.0" @@ -1421,7 +1197,7 @@ checksum = "0fafdf7b2b2de7c9784f76e02c0935e65a8117ec3b768644379983ab333ac98c" dependencies = [ "futures-util", "hex", - "hyper 0.14.28", + "hyper", "pin-project", "tokio", ] @@ -1522,15 +1298,6 @@ version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" -[[package]] -name = "jobserver" -version = "0.1.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2b099aaa34a9751c5bf0878add70444e1ed2dd73f347be99003d4577277de6e" -dependencies = [ - "libc", -] - [[package]] name = "js-sys" version = "0.3.69" @@ -1603,12 +1370,6 @@ dependencies = [ "hashbrown 0.12.3", ] -[[package]] -name = "matchit" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94" - [[package]] name = "maybe-async" version = "0.2.10" @@ -1657,16 +1418,6 @@ version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" -[[package]] -name = "mime_guess" -version = "2.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4192263c238a5f0d0c6bfd21f336a313a4ce1c450542449ca191bb657b4642ef" -dependencies = [ - "mime", - "unicase", -] - [[package]] name = "minidom" version = "0.15.2" @@ -2247,9 +1998,9 @@ dependencies = [ "futures-core", "futures-util", "h2", - "http 0.2.12", - "http-body 0.4.6", - "hyper 0.14.28", + "http", + "http-body", + "hyper", "hyper-tls", "ipnet", "js-sys", @@ -2263,7 +2014,7 @@ dependencies = [ "serde", "serde_json", "serde_urlencoded", - "sync_wrapper 0.1.2", + "sync_wrapper", "system-configuration", "tokio", "tokio-native-tls", @@ -2291,7 +2042,6 @@ dependencies = [ "futures", "git-version", "hex", - "lazy_static", "libc", "log", "lru", @@ -2374,8 +2124,8 @@ dependencies = [ "futures", "hex", "hmac", - "http 0.2.12", - "hyper 0.14.28", + "http", + "hyper", "hyper-tls", "log", "maybe-async", @@ -2445,12 +2195,6 @@ dependencies = [ "untrusted", ] -[[package]] -name = "rustversion" -version = "1.0.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "955d28af4278de8121b7ebeb796b6a45735dc01436d898801014aced2773a3d6" - [[package]] name = "rxml" version = "0.9.1" @@ -2553,16 +2297,6 @@ dependencies = [ "serde", ] -[[package]] -name = "serde_path_to_error" -version = "0.1.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af99884400da37c88f5e9146b7f1fd0fbcae8f6eec4e9da38b67d05486f814a6" -dependencies = [ - "itoa", - "serde", -] - [[package]] name = "serde_repr" version = "0.1.19" @@ -2988,12 +2722,6 @@ version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" -[[package]] -name = "sync_wrapper" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7065abeca94b6a8a577f9bd45aa0867a2238b74e8eb67cf10d492bc39351394" - [[package]] name = "synstructure" version = "0.12.6" @@ -3077,9 +2805,9 @@ dependencies = [ [[package]] name = "time" -version = "0.3.34" +version = "0.3.36" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8248b6521bb14bc45b4067159b9b6ad792e2d6d754d6c41fb50e29fefe38749" +checksum = "5dfd88e563464686c916c7e46e623e520ddc6d79fa6641390f2e3fa86e83e885" dependencies = [ "deranged", "itoa", @@ -3100,9 +2828,9 @@ checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" [[package]] name = "time-macros" -version = "0.2.17" +version = "0.2.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ba3a3ef41e6672a2f0f001392bb5dcd3ff0a9992d618ca761a11c3121547774" +checksum = "3f252a68540fde3a3877aeea552b832b40ab9a69e318efd078774a01ddee1ccf" dependencies = [ "num-conv", "time-core", @@ -3212,57 +2940,6 @@ dependencies = [ "serde", ] -[[package]] -name = "tower" -version = "0.4.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" -dependencies = [ - "futures-core", - "futures-util", - "pin-project", - "pin-project-lite", - "tokio", - "tokio-util", - "tower-layer", - "tower-service", - "tracing", -] - -[[package]] -name = "tower-http" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e9cd434a998747dd2c4276bc96ee2e0c7a2eadf3cae88e52be55a05fa9053f5" -dependencies = [ - "async-compression", - "base64 0.21.7", - "bitflags 2.5.0", - "bytes", - "futures-core", - "futures-util", - "http 1.1.0", - "http-body 1.0.0", - "http-body-util", - "http-range-header", - "httpdate", - "mime", - "mime_guess", - "percent-encoding", - "pin-project-lite", - "tokio", - "tokio-util", - "tower-layer", - "tower-service", - "tracing", -] - -[[package]] -name = "tower-layer" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c20c8dbed6283a09604c3e69b4b7eeb54e298b8a600d4d5ecb5ad39de609f1d0" - [[package]] name = "tower-service" version = "0.3.2" @@ -3313,15 +2990,6 @@ version = "1.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" -[[package]] -name = "unicase" -version = "2.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7d2d4dafb69621809a81864c9c1b864479e1235c0dd4e199924b9742439ed89" -dependencies = [ - "version_check", -] - [[package]] name = "unicode-bidi" version = "0.3.15" @@ -3535,9 +3203,9 @@ checksum = "5f20c57d8d7db6d3b86154206ae5d8fba62dd39573114de97c2cb0578251f8e1" [[package]] name = "which" -version = "6.0.1" +version = "6.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8211e4f58a2b2805adfbefbc07bab82958fc91e3836339b1ab7ae32465dce0d7" +checksum = "3d9c5ed668ee1f17edb3b627225343d210006a90bb1e3745ce1f30b1fb115075" dependencies = [ "either", "home", @@ -3791,31 +3459,3 @@ name = "zeroize" version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "525b4ec142c6b68a2d10f01f7bbf6755599ca3f81ea53b8431b7dd348f5fdb2d" - -[[package]] -name = "zstd" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d789b1514203a1120ad2429eae43a7bd32b90976a7bb8a05f7ec02fa88cc23a" -dependencies = [ - "zstd-safe", -] - -[[package]] -name = "zstd-safe" -version = "7.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1cd99b45c6bc03a018c8b8a86025678c87e55526064e38f9df301989dce7ec0a" -dependencies = [ - "zstd-sys", -] - -[[package]] -name = "zstd-sys" -version = "2.0.10+zstd.1.5.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c253a4914af5bafc8fa8c86ee400827e83cf6ec01195ec1f1ed8441bf00d65aa" -dependencies = [ - "cc", - "pkg-config", -] diff --git a/Cargo.toml b/Cargo.toml index 09c3f09..80d1433 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -3,7 +3,7 @@ resolver = "2" members = [ "rfs", - "docker2fl" + "docker2fl" ] [profile.release] diff --git a/docker2fl/Cargo.toml b/docker2fl/Cargo.toml index e70f969..cc2ed87 100644 --- a/docker2fl/Cargo.toml +++ b/docker2fl/Cargo.toml @@ -31,8 +31,4 @@ toml = "0.4.2" clap = { version = "4.2", features = ["derive"] } serde = { version = "1.0.159" , features = ["derive"] } -axum = { version = "0.7.3" , features = ["macros"] } -chrono = { version = "0.4", features = ["serde"] } -tower = { version = "0.4", features = ["util", "timeout", "load-shed", "limit"] } -tower-http = { version = "0.5.0", features = ["fs", "cors", "add-extension", "auth", "compression-full", "trace", "limit"] } tokio-async-drop = "0.1.0" diff --git a/docker2fl/src/main.rs b/docker2fl/src/main.rs index 39a4daa..14ad8a4 100644 --- a/docker2fl/src/main.rs +++ b/docker2fl/src/main.rs @@ -84,7 +84,7 @@ async fn main() -> Result<()> { }); let fl_name = docker_image.replace([':', '/'], "-") + ".fl"; - let meta = fungi::Writer::new(&fl_name).await?; + let meta = fungi::Writer::new(&fl_name, true).await?; let store = parse_router(&opts.store).await?; let res = docker2fl::convert(meta, store, &docker_image, credentials).await; diff --git a/docs/README.md b/docs/README.md index 55ce36e..a3447ad 100644 --- a/docs/README.md +++ b/docs/README.md @@ -64,10 +64,10 @@ the `block` table is used to associate data file blocks with files. An `id` fiel the route table holds routing information for the blobs. It basically describe where to find `blobs` with certain `ids`. The routing is done as following: -> Note routing table is loaded one time when `rfs` is started and +> Note routing table is loaded one time when `rfs` is started. - We use the first byte of the blob `id` as the `route key` -- The `route key`` is then consulted against the routing table +- The `route key` is then consulted against the routing table - While building an `FL` all matching stores are updated with the new blob. This is how the system does replication - On `getting` an object, the list of matching routes are tried in random order the first one to return a value is used - Note that same range and overlapping ranges are allowed, this is how shards and replications are done. diff --git a/rfs/Cargo.toml b/rfs/Cargo.toml index a741cd0..8081720 100644 --- a/rfs/Cargo.toml +++ b/rfs/Cargo.toml @@ -27,7 +27,7 @@ path = "src/lib.rs" [dependencies] anyhow = "1.0.44" -time = "0.3.3" +time = "0.3" sqlx = { version = "0.7.4", features = [ "runtime-tokio-rustls", "sqlite" ] } tokio = { version = "1", features = [ "rt", "rt-multi-thread", "macros"] } libc = "0.2" @@ -44,7 +44,6 @@ url = "2.3.1" blake2b_simd = "1" aes-gcm = "0.10" hex = "0.4" -lazy_static = "1.4" rand = "0.8" # next are only needed for the binarys clap = { version = "4.2", features = ["derive"], optional = true} @@ -56,6 +55,7 @@ rust-s3 = "0.34.0-rc3" openssl = { version = "0.10", features = ["vendored"] } regex = "1.9.6" which = "6.0" +reqwest = "0.11" [dependencies.polyfuse] branch = "master" diff --git a/rfs/README.md b/rfs/README.md index 45f529e..8210755 100644 --- a/rfs/README.md +++ b/rfs/README.md @@ -22,7 +22,7 @@ to be able to use from anywhere on your system. ## Stores -A store in where the actual data lives. A store can be as simple as a `directory` on your local machine in that case the files on the `fl` are only 'accessible' on your local machine. A store can also be a `zdb` running remotely or a cluster of `zdb`. Right now only `dir`, `zdb` and `s3` stores are supported but this will change in the future to support even more stores. +A store in where the actual data lives. A store can be as simple as a `directory` on your local machine in that case the files on the `fl` are only 'accessible' on your local machine. A store can also be a `zdb` running remotely or a cluster of `zdb`. Right now only `dir`, `http`, `zdb` and `s3` stores are supported but this will change in the future to support even more stores. ## Usage @@ -41,6 +41,8 @@ The simplest form of `` is a `url`. the store `url` defines the sto - `s3`: aws-s3 is used for storing and retrieving large amounts of data (blobs) in buckets (directories). An example `s3://:@:/` `region` is an optional param for s3 stores, if you want to provide one you can add it as a query to the url `?region=` +- `http`: http is a store mostly used for wrapping a dir store to fetch data through http requests. It does not support uploading, just fetching the data. + It can be set in the FL file as the store to fetch the data with `rfs config`. Example: `http://localhost:9000/store` (https works too). `` can also be of the form `-=` where `start` and `end` are a hex bytes for partitioning of blob keys. rfs will then store a set of blobs on the defined store if they blob key falls in the `[start:end]` range (inclusive). @@ -48,7 +50,7 @@ If the `start-end` range is not provided a `00-FF` range is assume basically a c This is only useful because `rfs` can accept multiple stores on the command line with different and/or overlapping ranges. -For example `-s 00-80=dir:///tmp/store0 -s 81-ff=dir://tmp/store1` means all keys that has prefix byte in range `[00-80]` will be written to /tmp/store0 all other keys `00-ff` will be written to store1. +For example `-s 00-80=dir:///tmp/store0 -s 81-ff=dir://tmp/store1` means all keys that has prefix byte in range `[00-80]` will be written to /tmp/store0 all other keys `[81-ff]` will be written to store1. The same range can appear multiple times, which means the blob will be replicated to all the stores that matches its key prefix. diff --git a/rfs/src/config.rs b/rfs/src/config.rs new file mode 100644 index 0000000..62eaf4d --- /dev/null +++ b/rfs/src/config.rs @@ -0,0 +1,72 @@ +use crate::{ + fungi::{meta::Tag, Reader, Result, Writer}, + store::{self, Store}, +}; + +pub async fn tag_list(reader: Reader) -> Result<()> { + let tags = reader.tags().await?; + if !tags.is_empty() { + println!("tags:"); + } + for (key, value) in tags { + println!("\t{}={}", key, value); + } + Ok(()) +} + +pub async fn tag_add(writer: Writer, tags: Vec<(String, String)>) -> Result<()> { + for (key, value) in tags { + writer.tag(Tag::Custom(key.as_str()), value).await?; + } + Ok(()) +} + +pub async fn tag_delete(writer: Writer, keys: Vec, all: bool) -> Result<()> { + if all { + writer.delete_tags().await?; + return Ok(()); + } + for key in keys { + writer.delete_tag(Tag::Custom(key.as_str())).await?; + } + Ok(()) +} + +pub async fn store_list(reader: Reader) -> Result<()> { + let routes = reader.routes().await?; + if !routes.is_empty() { + println!("routes:") + } + for route in routes { + println!( + "\trange:[{}-{}] store:{}", + route.start, route.end, route.url + ); + } + Ok(()) +} + +pub async fn store_add(writer: Writer, stores: Vec) -> Result<()> { + let store = store::parse_router(stores.as_slice()).await?; + for route in store.routes() { + writer + .route( + route.start.unwrap_or(u8::MIN), + route.end.unwrap_or(u8::MAX), + route.url, + ) + .await?; + } + Ok(()) +} + +pub async fn store_delete(writer: Writer, stores: Vec, all: bool) -> Result<()> { + if all { + writer.delete_routes().await?; + return Ok(()); + } + for store in stores { + writer.delete_route(store).await?; + } + Ok(()) +} diff --git a/rfs/src/fungi/meta.rs b/rfs/src/fungi/meta.rs index 8e13789..bc251cf 100644 --- a/rfs/src/fungi/meta.rs +++ b/rfs/src/fungi/meta.rs @@ -277,6 +277,14 @@ impl Reader { Ok(value.map(|v| v.0)) } + pub async fn tags(&self) -> Result> { + let tags: Vec<(String, String)> = sqlx::query_as("select key, value from tag;") + .fetch_all(&self.pool) + .await?; + + Ok(tags) + } + pub async fn routes(&self) -> Result> { let results: Vec = sqlx::query_as("select start, end, url from route;") .fetch_all(&self.pool) @@ -340,8 +348,10 @@ pub struct Writer { impl Writer { /// create a new mkondo writer - pub async fn new>(path: P) -> Result { - let _ = tokio::fs::remove_file(&path).await; + pub async fn new>(path: P, remove: bool) -> Result { + if remove { + let _ = tokio::fs::remove_file(&path).await; + } let opts = SqliteConnectOptions::new() .create_if_missing(true) @@ -409,13 +419,39 @@ impl Writer { } pub async fn tag>(&self, tag: Tag<'_>, value: V) -> Result<()> { - sqlx::query("insert into tag (key, value) values (?, ?);") + sqlx::query("insert or replace into tag (key, value) values (?, ?);") .bind(tag.key()) .bind(value.as_ref()) .execute(&self.pool) .await?; Ok(()) } + pub async fn delete_tag(&self, tag: Tag<'_>) -> Result<()> { + sqlx::query("delete from tag where key = ?;") + .bind(tag.key()) + .execute(&self.pool) + .await?; + Ok(()) + } + + pub async fn delete_route>(&self, url: U) -> Result<()> { + sqlx::query("delete from route where url = ?;") + .bind(url.as_ref()) + .execute(&self.pool) + .await?; + Ok(()) + } + + pub async fn delete_tags(&self) -> Result<()> { + sqlx::query("delete from tag;").execute(&self.pool).await?; + Ok(()) + } + pub async fn delete_routes(&self) -> Result<()> { + sqlx::query("delete from route;") + .execute(&self.pool) + .await?; + Ok(()) + } } #[cfg(test)] @@ -425,7 +461,7 @@ mod test { #[tokio::test] async fn test_inode() { const PATH: &str = "/tmp/inode.fl"; - let meta = Writer::new(PATH).await.unwrap(); + let meta = Writer::new(PATH, true).await.unwrap(); let ino = meta .inode(Inode { @@ -449,7 +485,7 @@ mod test { #[tokio::test] async fn test_get_children() { const PATH: &str = "/tmp/children.fl"; - let meta = Writer::new(PATH).await.unwrap(); + let meta = Writer::new(PATH, true).await.unwrap(); let ino = meta .inode(Inode { @@ -486,7 +522,7 @@ mod test { #[tokio::test] async fn test_get_block() { const PATH: &str = "/tmp/block.fl"; - let meta = Writer::new(PATH).await.unwrap(); + let meta = Writer::new(PATH, true).await.unwrap(); let hash: [u8; ID_LEN] = [ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, @@ -509,7 +545,7 @@ mod test { #[tokio::test] async fn test_get_tag() { const PATH: &str = "/tmp/tag.fl"; - let meta = Writer::new(PATH).await.unwrap(); + let meta = Writer::new(PATH, true).await.unwrap(); meta.tag(Tag::Version, "0.1").await.unwrap(); meta.tag(Tag::Author, "azmy").await.unwrap(); meta.tag(Tag::Custom("custom"), "value").await.unwrap(); @@ -535,7 +571,7 @@ mod test { #[tokio::test] async fn test_get_routes() { const PATH: &str = "/tmp/route.fl"; - let meta = Writer::new(PATH).await.unwrap(); + let meta = Writer::new(PATH, true).await.unwrap(); meta.route(0, 128, "zdb://hub1.grid.tf").await.unwrap(); meta.route(129, 255, "zdb://hub2.grid.tf").await.unwrap(); @@ -560,7 +596,7 @@ mod test { #[tokio::test] async fn test_walk() { const PATH: &str = "/tmp/walk.fl"; - let meta = Writer::new(PATH).await.unwrap(); + let meta = Writer::new(PATH, true).await.unwrap(); let parent = meta .inode(Inode { diff --git a/rfs/src/lib.rs b/rfs/src/lib.rs index a1f855c..77f8e30 100644 --- a/rfs/src/lib.rs +++ b/rfs/src/lib.rs @@ -9,6 +9,7 @@ mod pack; pub use pack::pack; mod unpack; pub use unpack::unpack; +pub mod config; const PARALLEL_UPLOAD: usize = 10; // number of files we can upload in parallel @@ -53,7 +54,7 @@ mod test { } println!("file generation complete"); - let writer = meta::Writer::new(root.join("meta.fl")).await.unwrap(); + let writer = meta::Writer::new(root.join("meta.fl"), true).await.unwrap(); // while we at it we can already create 2 stores and create a router store on top // of that. diff --git a/rfs/src/main.rs b/rfs/src/main.rs index 647191a..9389f16 100644 --- a/rfs/src/main.rs +++ b/rfs/src/main.rs @@ -2,14 +2,15 @@ extern crate log; use nix::sys::signal::{self, Signal}; use nix::unistd::Pid; +use std::error::Error; use std::io::Read; use anyhow::{Context, Result}; use clap::{ArgAction, Args, Parser, Subcommand}; -use rfs::cache; use rfs::fungi; use rfs::store::{self, Router, Stores}; +use rfs::{cache, config}; mod fs; /// mount flists @@ -32,6 +33,8 @@ enum Commands { Pack(PackOptions), /// unpack (downloads) content of an FL the provided location Unpack(UnpackOptions), + /// list or modify FL metadata and stores + Config(ConfigOptions), } #[derive(Args, Debug)] @@ -90,10 +93,91 @@ struct UnpackOptions { #[clap(short, long, default_value_t = false)] preserve_ownership: bool, - /// target directory to upload + /// target directory for unpacking target: String, } +#[derive(Args, Debug)] +struct ConfigOptions { + /// path to metadata file (flist) + #[clap(short, long)] + meta: String, + + #[command(subcommand)] + command: ConfigCommands, +} + +#[derive(Subcommand, Debug)] +enum ConfigCommands { + #[command(subcommand)] + Tag(TagOperation), + #[command(subcommand)] + Store(StoreOperation), +} + +#[derive(Subcommand, Debug)] +enum TagOperation { + List, + Add(TagAddOptions), + Delete(TagDeleteOptions), +} + +#[derive(Args, Debug)] +struct TagAddOptions { + /// pair of key-values separated with '=' + #[clap(short, long, value_parser = parse_key_val::, number_of_values = 1)] + tag: Vec<(String, String)>, +} + +#[derive(Args, Debug)] +struct TagDeleteOptions { + /// key to remove + #[clap(short, long, action=ArgAction::Append)] + key: Vec, + /// remove all tags + #[clap(short, long, default_value_t = false)] + all: bool, +} + +#[derive(Subcommand, Debug)] +enum StoreOperation { + List, + Add(StoreAddOptions), + Delete(StoreDeleteOptions), +} + +#[derive(Args, Debug)] +struct StoreAddOptions { + /// store url in the format [xx-xx=]. the range xx-xx is optional and used for + /// sharding. the URL is per store type, please check docs for more information + #[clap(short, long, action=ArgAction::Append)] + store: Vec, +} + +#[derive(Args, Debug)] +struct StoreDeleteOptions { + /// store to remove + #[clap(short, long, action=ArgAction::Append)] + store: Vec, + /// remove all stores + #[clap(short, long, default_value_t = false)] + all: bool, +} + +/// Parse a single key-value pair +fn parse_key_val(s: &str) -> Result<(T, U), Box> +where + T: std::str::FromStr, + T::Err: Error + Send + Sync + 'static, + U: std::str::FromStr, + U::Err: Error + Send + Sync + 'static, +{ + let pos = s + .find('=') + .ok_or_else(|| format!("invalid KEY=value: no `=` found in `{s}`"))?; + Ok((s[..pos].parse()?, s[pos + 1..].parse()?)) +} + fn main() -> Result<()> { let opts = Options::parse(); @@ -115,6 +199,7 @@ fn main() -> Result<()> { Commands::Mount(opts) => mount(opts), Commands::Pack(opts) => pack(opts), Commands::Unpack(opts) => unpack(opts), + Commands::Config(opts) => config(opts), } } @@ -123,7 +208,7 @@ fn pack(opts: PackOptions) -> Result<()> { rt.block_on(async move { let store = store::parse_router(opts.store.as_slice()).await?; - let meta = fungi::Writer::new(opts.meta).await?; + let meta = fungi::Writer::new(opts.meta, true).await?; rfs::pack(meta, store, opts.target, !opts.no_strip_password).await?; Ok(()) @@ -240,3 +325,36 @@ async fn get_router(meta: &fungi::Reader) -> Result> { Ok(router) } + +fn config(opts: ConfigOptions) -> Result<()> { + let rt = tokio::runtime::Runtime::new()?; + + rt.block_on(async move { + let writer = fungi::Writer::new(opts.meta.clone(), false) + .await + .context("failed to initialize metadata database")?; + + let reader = fungi::Reader::new(opts.meta) + .await + .context("failed to initialize metadata database")?; + + match opts.command { + ConfigCommands::Tag(opts) => match opts { + TagOperation::List => config::tag_list(reader).await?, + TagOperation::Add(opts) => config::tag_add(writer, opts.tag).await?, + TagOperation::Delete(opts) => { + config::tag_delete(writer, opts.key, opts.all).await? + } + }, + ConfigCommands::Store(opts) => match opts { + StoreOperation::List => config::store_list(reader).await?, + StoreOperation::Add(opts) => config::store_add(writer, opts.store).await?, + StoreOperation::Delete(opts) => { + config::store_delete(writer, opts.store, opts.all).await? + } + }, + } + + Ok(()) + }) +} diff --git a/rfs/src/store/http.rs b/rfs/src/store/http.rs new file mode 100644 index 0000000..a3136fb --- /dev/null +++ b/rfs/src/store/http.rs @@ -0,0 +1,73 @@ +use super::{Error, Result, Route, Store}; +use reqwest::{self, StatusCode}; +use url::Url; + +#[derive(Clone)] +pub struct HTTPStore { + url: Url, +} + +impl HTTPStore { + pub async fn make>(url: &U) -> Result { + let u = Url::parse(url.as_ref())?; + if u.scheme() != "http" && u.scheme() != "https" { + return Err(Error::Other(anyhow::Error::msg("invalid scheme"))); + } + + Ok(HTTPStore::new(u).await?) + } + pub async fn new>(url: U) -> Result { + let url = url.into(); + Ok(Self { url }) + } +} + +#[async_trait::async_trait] +impl Store for HTTPStore { + async fn get(&self, key: &[u8]) -> Result> { + let file = hex::encode(key); + let mut file_path = self.url.clone(); + file_path + .path_segments_mut() + .map_err(|_| Error::Other(anyhow::Error::msg("cannot be base")))? + .push(&file[0..2]) + .push(&file); + let mut legacy_path = self.url.clone(); + + legacy_path + .path_segments_mut() + .map_err(|_| Error::Other(anyhow::Error::msg("cannot be base")))? + .push(&file); + + let data = match reqwest::get(file_path).await { + Ok(mut response) => { + if response.status() == StatusCode::NOT_FOUND { + response = reqwest::get(legacy_path) + .await + .map_err(|_| Error::KeyNotFound)?; + if response.status() != StatusCode::OK { + return Err(Error::KeyNotFound); + } + } + if response.status() != StatusCode::OK { + return Err(Error::Unavailable); + } + response.bytes().await.map_err(|e| Error::Other(e.into()))? + } + Err(err) => return Err(Error::Other(err.into())), + }; + Ok(data.into()) + } + + async fn set(&self, _key: &[u8], _blob: &[u8]) -> Result<()> { + Err(Error::Other(anyhow::Error::msg( + "http store doesn't support uploading", + ))) + } + + fn routes(&self) -> Vec { + let r = Route::url(self.url.clone()); + + vec![r] + } +} diff --git a/rfs/src/store/mod.rs b/rfs/src/store/mod.rs index 8430cd6..d1e3c1e 100644 --- a/rfs/src/store/mod.rs +++ b/rfs/src/store/mod.rs @@ -1,5 +1,6 @@ mod bs; pub mod dir; +pub mod http; mod router; pub mod s3store; pub mod zdb; @@ -16,21 +17,10 @@ pub async fn make>(u: U) -> Result { let parsed = url::Url::parse(u.as_ref())?; match parsed.scheme() { - dir::SCHEME => return Ok(Stores::Dir( - dir::DirStore::make(&u) - .await - .expect("failed to make dir store"), - )), - "s3" | "s3s" | "s3s+tls" => return Ok(Stores::S3( - s3store::S3Store::make(&u) - .await - .expect(format!("failed to make {} store", parsed.scheme()).as_str()), - )), - "zdb" => return Ok(Stores::ZDB( - zdb::ZdbStore::make(&u) - .await - .expect("failed to make zdb store"), - )), + dir::SCHEME => return Ok(Stores::Dir(dir::DirStore::make(&u).await?)), + "s3" | "s3s" | "s3s+tls" => return Ok(Stores::S3(s3store::S3Store::make(&u).await?)), + "zdb" => return Ok(Stores::ZDB(zdb::ZdbStore::make(&u).await?)), + "http" | "https" => return Ok(Stores::HTTP(http::HTTPStore::make(&u).await?)), _ => return Err(Error::UnknownStore(parsed.scheme().into())), } } @@ -203,6 +193,7 @@ pub enum Stores { S3(s3store::S3Store), Dir(dir::DirStore), ZDB(zdb::ZdbStore), + HTTP(http::HTTPStore), } #[async_trait::async_trait] @@ -212,6 +203,7 @@ impl Store for Stores { self::Stores::S3(s3_store) => s3_store.get(key).await, self::Stores::Dir(dir_store) => dir_store.get(key).await, self::Stores::ZDB(zdb_store) => zdb_store.get(key).await, + self::Stores::HTTP(http_store) => http_store.get(key).await, } } async fn set(&self, key: &[u8], blob: &[u8]) -> Result<()> { @@ -219,6 +211,7 @@ impl Store for Stores { self::Stores::S3(s3_store) => s3_store.set(key, blob).await, self::Stores::Dir(dir_store) => dir_store.set(key, blob).await, self::Stores::ZDB(zdb_store) => zdb_store.set(key, blob).await, + self::Stores::HTTP(http_store) => http_store.set(key, blob).await, } } fn routes(&self) -> Vec { @@ -226,6 +219,7 @@ impl Store for Stores { self::Stores::S3(s3_store) => s3_store.routes(), self::Stores::Dir(dir_store) => dir_store.routes(), self::Stores::ZDB(zdb_store) => zdb_store.routes(), + self::Stores::HTTP(http_store) => http_store.routes(), } } } diff --git a/scripts/README.md b/scripts/README.md new file mode 100644 index 0000000..f658b4f --- /dev/null +++ b/scripts/README.md @@ -0,0 +1,197 @@ +# Garage s3 server with flist + +## Requirements + +- tfcmd +- docker2fl +- rust +- docker +- git +- sqlite +- minio (or any third-party tool you want to use) +- caddy + +### Install tfcmd + +```bash +wget https://github.com/threefoldtech/tfgrid-sdk-go/releases/download/v0.15.11/tfgrid-sdk-go_Linux_x86_64.tar.gz +mkdir tfgrid-sdk-go +tar -xzf tfgrid-sdk-go_Linux_x86_64.tar.gz -C tfgrid-sdk-go +sudo mv tfgrid-sdk-go/tfcmd /usr/bin/ +sudo rm -rf tfgrid-sdk-go_Linux_x86_64.tar.gz tfgrid-sdk-go +``` + +- Login to tfcmd + +```bash +tfcmd login +``` + +### Install rust + +```bash +apt-get update +apt-get install -y curl +curl https://sh.rustup.rs -sSf | sh +export PATH="$HOME/.cargo/bin:$PATH" +apt-get install -y build-essential +apt-get install -y musl-dev musl-tools +apt-get update +``` + +### Install docker + +```bash +apt-get update +apt-get install -y ca-certificates curl +install -m 0755 -d /etc/apt/keyrings +curl -fsSL https://download.docker.com/linux/ubuntu/gpg -o /etc/apt/keyrings/docker.asc +chmod a+r /etc/apt/keyrings/docker.asc +echo "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.asc] https://download.docker.com/linux/ubuntu $(. /etc/os-release && echo "$VERSION_CODENAME") stable" | tee /etc/apt/sources.list.d/docker.list > /dev/null +apt-get update +apt-get install -y docker-ce docker-ce-cli containerd.io docker-buildx-plugin docker-compose-plugin +apt-get update +dockerd > docker.log 2>&1 & +``` + +### Install docker2fl + +```bash +git clone https://github.com/threefoldtech/rfs.git +cd rfs +rustup target add x86_64-unknown-linux-musl +cargo build --features build-binary --release --target=x86_64-unknown-linux-musl +mv ./target/x86_64-unknown-linux-musl/release/docker2fl /usr/local/bin +``` + +### Install sqlite + +```bash +apt update +apt install sqlite3 +``` + +### Install minio + +```bash +curl https://dl.min.io/client/mc/release/linux-amd64/mc \ + --create-dirs \ + -o $HOME/minio-binaries/mc +chmod +x $HOME/minio-binaries/mc +export PATH=$PATH:$HOME/minio-binaries/ +``` + +### Install Caddy + +```bash +sudo apt install -y debian-keyring debian-archive-keyring apt-transport-https curl +curl -1sLf 'https://dl.cloudsmith.io/public/caddy/stable/gpg.key' | sudo gpg --dearmor -o /usr/share/keyrings/caddy-stable-archive-keyring.gpg +curl -1sLf 'https://dl.cloudsmith.io/public/caddy/stable/debian.deb.txt' | sudo tee /etc/apt/sources.list.d/caddy-stable.list +sudo apt update +sudo apt install caddy +``` + +## Usage + +### Deploy garage server + +Run garage server using garage server [script](./deploy_garage.sh) + +```bash +chmod +x deploy_garage.sh +./deploy_garage.sh +``` + +This script includes: + +1. Deploy a vm with mycelium IP to run garage s3 server over it. +2. Install garage in the vm. +3. Run the garage server with the given configuration. + +### Manage buckets in garage server + +Manage your buckets using manage buckets [script](./manage_buckets.sh) + +```bash +export MYCELIUM_IP=<"your machine mycelium IP which has your garage server"> +chmod +x manage_buckets.sh +./manage_buckets.sh +``` + +This script includes: + +1. Create 2 buckets in garage server one for `flist` and the other for `blobs`. +2. Allow web for both buckets to be able to serve them. +3. Create 2 keys one for write and the other for read only. The `write-key` will be used to upload the flist and the blobs through rfs. The `read-key` should be updated for flist and blobs to prevent updating them. +4. Adding the keys with their permissions to the bucket. + +> *NOTE:* Don't forget to save your read and write keys (ID and secret). + +### Convert docker images to flist and upload it + +- Convert your image to an flist, The content will be uploaded over blobs buckets + +```bash +export IMAGE=<"Your image for example `threefolddev/ubuntu:22.04`"> +export WRITE_KEY_ID=<"your key ID"> +export WRITE_KEY_SECRET=<"your key secret"> +export MYCELIUM_IP=<"your machine mycelium IP which has your garage server"> + +docker2fl -i $IMAGE -s 's3://$WRITE_KEY_ID:$WRITE_KEY_SECRET@$[$MYCELIUM_IP]:3900/blobs?region=garage' +``` + +- Update the key to the read only key + +```bash TODO: +sqlite3 +.open "" +update route set url="s3://:@[]:3900/blobs?region=garage" +``` + +- Upload your flist to flist bucket using minio (you can use any other client). + +```bash +export PATH=$PATH:$HOME/minio-binaries/ +mc alias set \ + garage \ + "http://[$MYCELIUM_IP]:3900" \ + "$WRITE_KEY_ID" \ + "$WRITE_KEY_SECRET" \ + --api S3v4 + +export FLIST_NAME=<"your flist name"> + +mc cp $FLIST_NAME "s3://flist/$FLIST_NAME" +``` + +### Serve the flist + +- Deploy a name gateway for any domain you want and get the fqdn + +```bash +tfcmd deploy gateway name -n "" --backends http://[$MYCELIUM_IP]:80 +``` + +- Create Caddyfile + +```Caddyfile +http:// { + route /flists/* { + uri strip_prefix /flists + reverse_proxy http://127.0.0.1:3902 { + header_up Host "flist" + } + } + route /blobs/* { + uri strip_prefix /blobs + reverse_proxy http://127.0.0.1:3902 { + header_up Host "blobs" + } + } +} +``` + +- Run `caddy run` + +Finally, you can get your flist using `https:///flists/`. +and get your blobs using `https:///blobs/`. diff --git a/scripts/deploy_garage.sh b/scripts/deploy_garage.sh new file mode 100755 index 0000000..f085bc1 --- /dev/null +++ b/scripts/deploy_garage.sh @@ -0,0 +1,50 @@ +#!/bin/bash + +set -ex + +# Deploy a vm for garage server with mycelium for s3 server + +tfcmd deploy vm --name s3_server --ssh ~/.ssh/id_rsa.pub --cpu 8 --memory 16 --disk 50 --rootfs 10 +sleep 6 # wait deployment +OUTPUT=$(tfcmd get vm s3_server 2>&1 | tail -n +3 | tr { '\n' | tr , '\n' | tr } '\n') +MYCELIUM_IP=$(echo "$OUTPUT" | grep -Eo '"mycelium_ip"[^,]*' | awk -F'"' '{print $4}') + +# Expose S3 server over mycelium IP + +ssh root@$MYCELIUM_IP " +wget https://garagehq.deuxfleurs.fr/_releases/v1.0.0/x86_64-unknown-linux-musl/garage +chmod +x garage +mv garage /usr/local/bin + +cat > /etc/garage.toml < output.log 2>&1 & +" diff --git a/scripts/manage_buckets.sh b/scripts/manage_buckets.sh new file mode 100755 index 0000000..837ba74 --- /dev/null +++ b/scripts/manage_buckets.sh @@ -0,0 +1,62 @@ +#!/bin/bash + +set -ex + +if [ -z ${MYCELIUM_IP+x} ] +then + echo 'Error! $MYCELIUM_IP is required.' + exit 64 +fi + +# Create flist bucket and blobs bucket for rfs store and allow web for both + +NODE_ID=$(ssh root@$MYCELIUM_IP "garage status | awk 'NR==3{print \$1}'") + +ssh root@$MYCELIUM_IP " +garage layout assign -z dc1 -c 1G $NODE_ID +garage layout apply --version 1 +garage bucket create blobs +garage bucket create flist +garage bucket website --allow flist +garage bucket website --allow blobs +garage bucket list +" + +# We need to generate a key allowing read and write permissions +# This allow us to upload different files over the s3 server bucket + +WRITE_KEY_INFO=$(ssh root@$MYCELIUM_IP "garage key create write-rfs-key | awk 'NR==2{print \$3}NR==3{print \$3}'") +WRITE_KEY_ID=$(echo $KEY_INFO | awk '{print $1}') +WRITE_KEY_SECRET=$(echo $KEY_INFO | awk '{print $2}') + +# We need to generate a key allowing read only permission +# This allow us to only download different files over the s3 server bucket +# After generating the flist it should be updated to include the read key only + +READ_KEY_INFO=$(ssh root@$MYCELIUM_IP "garage key create read-rfs-key | awk 'NR==2{print \$3}NR==3{print \$3}'") +READ_KEY_ID=$(echo $KEY_INFO | awk '{print $1}') +READ_KEY_SECRET=$(echo $KEY_INFO | awk '{print $2}') + +ssh root@$MYCELIUM_IP " +garage bucket allow \ + --read \ + --write \ + --owner \ + flist \ + --key write-rfs-key +garage bucket allow \ + --read \ + --write \ + --owner \ + blobs \ + --key write-rfs-key + +garage bucket allow \ + --read \ + flist \ + --key read-rfs-key +garage bucket allow \ + --read \ + blobs \ + --key read-rfs-key +# "