multithread upload
This commit is contained in:
parent
343c2f0c02
commit
92e5c3f985
103
Cargo.lock
generated
103
Cargo.lock
generated
@ -656,6 +656,19 @@ version = "1.0.1"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "0b6a852b24ab71dffc585bcb46eaf7959d175cb865a7152e35b348d1b2960422"
|
checksum = "0b6a852b24ab71dffc585bcb46eaf7959d175cb865a7152e35b348d1b2960422"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "console"
|
||||||
|
version = "0.15.8"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "0e1f83fc076bd6dd27517eacdf25fef6c4dfe5f1d7448bafaaf3a26f13b5e4eb"
|
||||||
|
dependencies = [
|
||||||
|
"encode_unicode",
|
||||||
|
"lazy_static",
|
||||||
|
"libc",
|
||||||
|
"unicode-width",
|
||||||
|
"windows-sys 0.52.0",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "const-oid"
|
name = "const-oid"
|
||||||
version = "0.9.6"
|
version = "0.9.6"
|
||||||
@ -805,6 +818,12 @@ dependencies = [
|
|||||||
"zeroize",
|
"zeroize",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "encode_unicode"
|
||||||
|
version = "0.3.6"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "encoding_rs"
|
name = "encoding_rs"
|
||||||
version = "0.8.34"
|
version = "0.8.34"
|
||||||
@ -1096,6 +1115,12 @@ version = "1.0.3"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9"
|
checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "human_bytes"
|
||||||
|
version = "0.4.3"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "91f255a4535024abf7640cb288260811fc14794f62b063652ed349f9a6c2348e"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "hyper"
|
name = "hyper"
|
||||||
version = "0.14.30"
|
version = "0.14.30"
|
||||||
@ -1151,7 +1176,7 @@ dependencies = [
|
|||||||
"hyper 0.14.30",
|
"hyper 0.14.30",
|
||||||
"log",
|
"log",
|
||||||
"rustls 0.21.12",
|
"rustls 0.21.12",
|
||||||
"rustls-native-certs 0.6.3",
|
"rustls-native-certs",
|
||||||
"tokio",
|
"tokio",
|
||||||
"tokio-rustls 0.24.1",
|
"tokio-rustls 0.24.1",
|
||||||
]
|
]
|
||||||
@ -1166,14 +1191,11 @@ dependencies = [
|
|||||||
"http 1.1.0",
|
"http 1.1.0",
|
||||||
"hyper 1.4.1",
|
"hyper 1.4.1",
|
||||||
"hyper-util",
|
"hyper-util",
|
||||||
"log",
|
|
||||||
"rustls 0.23.11",
|
"rustls 0.23.11",
|
||||||
"rustls-native-certs 0.7.1",
|
|
||||||
"rustls-pki-types",
|
"rustls-pki-types",
|
||||||
"tokio",
|
"tokio",
|
||||||
"tokio-rustls 0.26.0",
|
"tokio-rustls 0.26.0",
|
||||||
"tower-service",
|
"tower-service",
|
||||||
"webpki-roots",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -1255,6 +1277,28 @@ dependencies = [
|
|||||||
"hashbrown",
|
"hashbrown",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "indicatif"
|
||||||
|
version = "0.17.8"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "763a5a8f45087d6bcea4222e7b72c291a054edf80e4ef6efd2a4979878c7bea3"
|
||||||
|
dependencies = [
|
||||||
|
"console",
|
||||||
|
"instant",
|
||||||
|
"number_prefix",
|
||||||
|
"portable-atomic",
|
||||||
|
"unicode-width",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "instant"
|
||||||
|
version = "0.1.13"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "e0242819d153cba4b4b05a5a8f2a7e9bbf97b6055b2a002b395c96b5ff3c0222"
|
||||||
|
dependencies = [
|
||||||
|
"cfg-if",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ipnet"
|
name = "ipnet"
|
||||||
version = "2.9.0"
|
version = "2.9.0"
|
||||||
@ -1282,6 +1326,12 @@ dependencies = [
|
|||||||
"wasm-bindgen",
|
"wasm-bindgen",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "lazy_static"
|
||||||
|
version = "1.5.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "libc"
|
name = "libc"
|
||||||
version = "0.2.155"
|
version = "0.2.155"
|
||||||
@ -1412,6 +1462,12 @@ dependencies = [
|
|||||||
"libc",
|
"libc",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "number_prefix"
|
||||||
|
version = "0.4.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "object"
|
name = "object"
|
||||||
version = "0.36.1"
|
version = "0.36.1"
|
||||||
@ -1565,6 +1621,12 @@ version = "0.3.30"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "d231b230927b5e4ad203db57bbcbee2802f6bce620b1e4a9024a07d94e2907ec"
|
checksum = "d231b230927b5e4ad203db57bbcbee2802f6bce620b1e4a9024a07d94e2907ec"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "portable-atomic"
|
||||||
|
version = "1.6.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "7170ef9988bc169ba16dd36a7fa041e5c4cbeb6a35b76d4c03daded371eae7c0"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "powerfmt"
|
name = "powerfmt"
|
||||||
version = "0.2.0"
|
version = "0.2.0"
|
||||||
@ -1600,8 +1662,8 @@ dependencies = [
|
|||||||
"aws-smithy-types",
|
"aws-smithy-types",
|
||||||
"chrono",
|
"chrono",
|
||||||
"clap",
|
"clap",
|
||||||
"hyper 1.4.1",
|
"human_bytes",
|
||||||
"hyper-rustls 0.27.2",
|
"indicatif",
|
||||||
"reqwest",
|
"reqwest",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
@ -1747,7 +1809,6 @@ version = "0.23.11"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "4828ea528154ae444e5a642dbb7d5623354030dc9822b83fd9bb79683c7399d0"
|
checksum = "4828ea528154ae444e5a642dbb7d5623354030dc9822b83fd9bb79683c7399d0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"log",
|
|
||||||
"once_cell",
|
"once_cell",
|
||||||
"rustls-pki-types",
|
"rustls-pki-types",
|
||||||
"rustls-webpki 0.102.5",
|
"rustls-webpki 0.102.5",
|
||||||
@ -1767,19 +1828,6 @@ dependencies = [
|
|||||||
"security-framework",
|
"security-framework",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "rustls-native-certs"
|
|
||||||
version = "0.7.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "a88d6d420651b496bdd98684116959239430022a115c1240e6c3993be0b15fba"
|
|
||||||
dependencies = [
|
|
||||||
"openssl-probe",
|
|
||||||
"rustls-pemfile 2.1.2",
|
|
||||||
"rustls-pki-types",
|
|
||||||
"schannel",
|
|
||||||
"security-framework",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rustls-pemfile"
|
name = "rustls-pemfile"
|
||||||
version = "1.0.4"
|
version = "1.0.4"
|
||||||
@ -2297,6 +2345,12 @@ dependencies = [
|
|||||||
"tinyvec",
|
"tinyvec",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "unicode-width"
|
||||||
|
version = "0.1.13"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "0336d538f7abc86d282a4189614dfaa90810dfc2c6f6427eaf88e16311dd225d"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "untrusted"
|
name = "untrusted"
|
||||||
version = "0.9.0"
|
version = "0.9.0"
|
||||||
@ -2441,15 +2495,6 @@ dependencies = [
|
|||||||
"wasm-bindgen",
|
"wasm-bindgen",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "webpki-roots"
|
|
||||||
version = "0.26.3"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "bd7c23921eeb1713a4e851530e9b9756e4fb0e89978582942612524cf09f01cd"
|
|
||||||
dependencies = [
|
|
||||||
"rustls-pki-types",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "windows-core"
|
name = "windows-core"
|
||||||
version = "0.52.0"
|
version = "0.52.0"
|
||||||
|
12
Cargo.toml
12
Cargo.toml
@ -6,19 +6,13 @@ edition = "2021"
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = "1.0.86"
|
anyhow = "1.0.86"
|
||||||
aws-config = "1.5.4"
|
aws-config = "1.5.4"
|
||||||
aws-sdk-s3 = "1.41.0"
|
aws-sdk-s3 = {version = "1.41.0", default-features = false, features = ["rt-tokio", "sigv4a", "behavior-version-latest"]}
|
||||||
aws-smithy-runtime = "1.6.2"
|
aws-smithy-runtime = "1.6.2"
|
||||||
aws-smithy-types = "1.2.0"
|
aws-smithy-types = "1.2.0"
|
||||||
chrono = { version = "0.4.38", features = ["serde"] }
|
chrono = { version = "0.4.38", features = ["serde"] }
|
||||||
clap = { version = "4.5.9", features = ["derive"] }
|
clap = { version = "4.5.9", features = ["derive"] }
|
||||||
hyper = "1.4.1"
|
human_bytes = "0.4.3"
|
||||||
hyper-rustls = { version = "0.27.2", default-features = false, features = [
|
indicatif = "0.17.8"
|
||||||
"webpki-tokio",
|
|
||||||
"http1",
|
|
||||||
"native-tokio",
|
|
||||||
"tls12",
|
|
||||||
"logging",
|
|
||||||
] }
|
|
||||||
reqwest = { version = "0.12.5", features = ["json"] }
|
reqwest = { version = "0.12.5", features = ["json"] }
|
||||||
serde = { version = "1.0.204", features = ["derive"] }
|
serde = { version = "1.0.204", features = ["derive"] }
|
||||||
serde_json = "1.0.120"
|
serde_json = "1.0.120"
|
||||||
|
@ -42,9 +42,7 @@ impl Client {
|
|||||||
.map_err(Into::into)
|
.map_err(Into::into)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get_token(
|
pub async fn get_upload_token(&self) -> anyhow::Result<types::response::GetFileLinkTokenResponse> {
|
||||||
&self,
|
|
||||||
) -> anyhow::Result<types::response::GetFileLinkTokenResponse> {
|
|
||||||
let client = reqwest::Client::new();
|
let client = reqwest::Client::new();
|
||||||
let request = client
|
let request = client
|
||||||
.get(&format!(
|
.get(&format!(
|
||||||
@ -59,6 +57,40 @@ impl Client {
|
|||||||
.await
|
.await
|
||||||
.map_err(Into::into)
|
.map_err(Into::into)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub async fn get_download_link(
|
||||||
|
&self,
|
||||||
|
req: types::request::GetFileLinkRequest,
|
||||||
|
) -> anyhow::Result<types::response::GetFileLinkResponse> {
|
||||||
|
let client = reqwest::Client::new();
|
||||||
|
let request = client
|
||||||
|
.post("https://forest.sendy.jp/cloud/service/file/v1/filelink/download")
|
||||||
|
.bearer_auth(&self.token)
|
||||||
|
.json(&req);
|
||||||
|
|
||||||
|
let response = request.send().await?;
|
||||||
|
response
|
||||||
|
.json::<types::response::GetFileLinkResponse>()
|
||||||
|
.await
|
||||||
|
.map_err(Into::into)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn check_action(
|
||||||
|
&self,
|
||||||
|
req: types::request::CheckActionRequest,
|
||||||
|
) -> anyhow::Result<types::response::CheckActionResponse> {
|
||||||
|
let client = reqwest::Client::new();
|
||||||
|
let request = client
|
||||||
|
.post("https://forest.sendy.jp/cloud/service/file/v3/files/check")
|
||||||
|
.bearer_auth(&self.token)
|
||||||
|
.json(&req);
|
||||||
|
|
||||||
|
let response = request.send().await?;
|
||||||
|
response
|
||||||
|
.json::<types::response::CheckActionResponse>()
|
||||||
|
.await
|
||||||
|
.map_err(Into::into)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// https://www.rakuten-drive.com/api/account/refreshtoken POST RefreshTokenRequest RefreshTokenResponse
|
// https://www.rakuten-drive.com/api/account/refreshtoken POST RefreshTokenRequest RefreshTokenResponse
|
||||||
@ -69,5 +101,6 @@ impl Client {
|
|||||||
// https://forest.sendy.jp/cloud/service/file/v3/files/check POST CheckActionRequest CheckActionResponse
|
// https://forest.sendy.jp/cloud/service/file/v3/files/check POST CheckActionRequest CheckActionResponse
|
||||||
// https://forest.sendy.jp/cloud/service/file/v3/files/move PUT MoveFileRequest MoveFileResponse
|
// https://forest.sendy.jp/cloud/service/file/v3/files/move PUT MoveFileRequest MoveFileResponse
|
||||||
// https://forest.sendy.jp/cloud/service/file/v1/check/upload POST CheckUploadRequest CheckUploadResponse
|
// https://forest.sendy.jp/cloud/service/file/v1/check/upload POST CheckUploadRequest CheckUploadResponse
|
||||||
// https://forest.sendy.jp/cloud/service/file/v1/filelink/token?host_id=GclT7DrnLFho7vnIirUzjtMLhRk2&path=hello GET GetFileLinkTokenRequest GetFileLinkTokenResponse
|
// https://forest.sendy.jp/cloud/service/file/v1/filelink/token?host_id=GclT7DrnLFho7vnIirUzjtMLhRk2&path=hello GET GetFileLinkTokenResponse
|
||||||
// https://forest.sendy.jp/cloud/service/file/v1/complete/upload POST CompleteUploadRequest
|
// https://forest.sendy.jp/cloud/service/file/v1/complete/upload POST CompleteUploadRequest
|
||||||
|
// https://forest.sendy.jp/cloud/service/file/v1/filelink/download POST GetFileLinkRequest GetFileLinkResponse
|
||||||
|
463
src/main.rs
463
src/main.rs
@ -1,21 +1,28 @@
|
|||||||
use std::path::{Path, PathBuf};
|
use std::{
|
||||||
|
io::{stdout, Write},
|
||||||
|
path::{Path, PathBuf},
|
||||||
|
sync::Arc,
|
||||||
|
};
|
||||||
|
|
||||||
use aws_config::{BehaviorVersion, Region, SdkConfig};
|
use aws_config::{environment::region, BehaviorVersion, Region, SdkConfig};
|
||||||
use aws_sdk_s3::{
|
use aws_sdk_s3::{
|
||||||
config::Credentials, operation::upload_part, primitives::ByteStream, types::CompletedPart,
|
config::Credentials, operation::upload_part, primitives::ByteStream, types::CompletedPart,
|
||||||
};
|
};
|
||||||
use aws_smithy_runtime::client::http::hyper_014::HyperClientBuilder;
|
use aws_smithy_runtime::client::http::hyper_014::HyperClientBuilder;
|
||||||
use aws_smithy_types::byte_stream::Length;
|
use aws_smithy_types::byte_stream::Length;
|
||||||
use clap::{Parser, Subcommand};
|
use clap::{Parser, Subcommand};
|
||||||
use tokio::{fs::File, io::BufReader};
|
use human_bytes::human_bytes;
|
||||||
|
use indicatif::{ProgressBar, ProgressState, ProgressStyle};
|
||||||
|
use tokio::{fs::File, io::BufReader, sync::Mutex};
|
||||||
use types::response::ListFilesResponseFile;
|
use types::response::ListFilesResponseFile;
|
||||||
|
|
||||||
mod endpoints;
|
mod endpoints;
|
||||||
mod types;
|
mod types;
|
||||||
|
|
||||||
const BEARER_TOKEN: &str = "eyJhbGciOiJSUzI1NiIsImtpZCI6ImMxNTQwYWM3MWJiOTJhYTA2OTNjODI3MTkwYWNhYmU1YjA1NWNiZWMiLCJ0eXAiOiJKV1QifQ.eyJuYW1lIjoi5bm457-8IOW_l-adkSIsInBsYW4iOiJza2YiLCJpc3MiOiJodHRwczovL3NlY3VyZXRva2VuLmdvb2dsZS5jb20vc2VuZHktc2VydmljZSIsImF1ZCI6InNlbmR5LXNlcnZpY2UiLCJhdXRoX3RpbWUiOjE3MjEyMjYwMTUsInVzZXJfaWQiOiJHY2xUN0RybkxGaG83dm5JaXJVemp0TUxoUmsyIiwic3ViIjoiR2NsVDdEcm5MRmhvN3ZuSWlyVXpqdE1MaFJrMiIsImlhdCI6MTcyMTI0NzQxMCwiZXhwIjoxNzIxMjUxMDEwLCJlbWFpbCI6ImtvdXN1a2UxMTIzNjEyNEBnbWFpbC5jb20iLCJlbWFpbF92ZXJpZmllZCI6ZmFsc2UsImZpcmViYXNlIjp7ImlkZW50aXRpZXMiOnsiZW1haWwiOlsia291c3VrZTExMjM2MTI0QGdtYWlsLmNvbSJdfSwic2lnbl9pbl9wcm92aWRlciI6ImN1c3RvbSJ9fQ.ikxsOAbgKhKywvvC1Ot28AEZ7_DTVNaMI2KSEFaZAaPTtgPk6fqYzegW2iwq7GK_ySmCuKppPEeSD8nKDggeX96z36Y1zd5xm7EIWTCdmCB36gjhAkAowVenRX2VW3gIVCJVHUQ50UEVM4CMzw73N058fQ97wAdHVp2oOtZOczJyQpAZuy0zqXSKWvnom0SfNz0iZov7r3TLSBlxSMGjEu_aSInq7yMOSHNkbQHenelv3592EY_ktnFLYSYi1HWEEijqsKSGdf01DYBkC5H8Eq0snk7n8NvKFAaUxT8DClxHlE_xagOnbkfCBh-AN2CqnkwxOi7Kkh0iWOkdMLqK0w";
|
const BEARER_TOKEN: &str = "eyJhbGciOiJSUzI1NiIsImtpZCI6ImMxNTQwYWM3MWJiOTJhYTA2OTNjODI3MTkwYWNhYmU1YjA1NWNiZWMiLCJ0eXAiOiJKV1QifQ.eyJuYW1lIjoi5bm457-8IOW_l-adkSIsInBsYW4iOiJza2YiLCJpc3MiOiJodHRwczovL3NlY3VyZXRva2VuLmdvb2dsZS5jb20vc2VuZHktc2VydmljZSIsImF1ZCI6InNlbmR5LXNlcnZpY2UiLCJhdXRoX3RpbWUiOjE3MjEyMjYwMTUsInVzZXJfaWQiOiJHY2xUN0RybkxGaG83dm5JaXJVemp0TUxoUmsyIiwic3ViIjoiR2NsVDdEcm5MRmhvN3ZuSWlyVXpqdE1MaFJrMiIsImlhdCI6MTcyMTI1NTM1OCwiZXhwIjoxNzIxMjU4OTU4LCJlbWFpbCI6ImtvdXN1a2UxMTIzNjEyNEBnbWFpbC5jb20iLCJlbWFpbF92ZXJpZmllZCI6ZmFsc2UsImZpcmViYXNlIjp7ImlkZW50aXRpZXMiOnsiZW1haWwiOlsia291c3VrZTExMjM2MTI0QGdtYWlsLmNvbSJdfSwic2lnbl9pbl9wcm92aWRlciI6ImN1c3RvbSJ9fQ.uC-X4XCMTJ-Vv0bmm85cZy65LVdNxRKBlNXxsg8_QqyMV1rRzmpDMQpwWKk10OUDj6xovg1tfmlUW2syL0twANO8hKOSlI_wLZ1Rvvm0TF8EvDLvv8OGFc93nm3OIaSaiZj-xcORZzeJDVHsdraGoYDX3YbYPIJAhDaOsHX5_QbLwuxoz0dxd0fTAoDH7aEpDhcojjTmMImtbGqMzpvUpwNunJaJK2YZTYiHXZtcK7mr9cQLF5b3Exee--R5hGEU9E49jGtXKQNrP_6mkTXVivJh6TdKeFiMCrbc-6xZvuBnkEQ8g0GvU9cERhJTZ73U2jdHLzWbYitCh2nzkbQDNA";
|
||||||
const HOST_ID: &str = "GclT7DrnLFho7vnIirUzjtMLhRk2";
|
const HOST_ID: &str = "GclT7DrnLFho7vnIirUzjtMLhRk2";
|
||||||
const CHUNK_SIZE: usize = 1024 * 1024 * 10; // 10MB
|
const CHUNK_SIZE: usize = 1024 * 1024 * 10; // 10MB
|
||||||
|
const APP_VERSION: &str = "v21.11.10";
|
||||||
|
|
||||||
#[derive(Parser, Debug)]
|
#[derive(Parser, Debug)]
|
||||||
#[command(version, about, long_about=None)]
|
#[command(version, about, long_about=None)]
|
||||||
@ -38,60 +45,39 @@ enum Commands {
|
|||||||
#[clap(short, long)]
|
#[clap(short, long)]
|
||||||
recursive: bool,
|
recursive: bool,
|
||||||
},
|
},
|
||||||
Download {},
|
Download {
|
||||||
|
#[clap(short, long)]
|
||||||
|
path: String,
|
||||||
|
#[clap(long)]
|
||||||
|
prefix: Option<String>,
|
||||||
|
},
|
||||||
|
Move {},
|
||||||
Delete {},
|
Delete {},
|
||||||
MkDir {},
|
MkDir {},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
struct TargetFile {
|
||||||
|
file: PathBuf,
|
||||||
|
path: String,
|
||||||
|
}
|
||||||
|
|
||||||
#[tokio::main]
|
#[tokio::main]
|
||||||
async fn main() {
|
async fn main() {
|
||||||
let args = Args::parse();
|
let args = Args::parse();
|
||||||
|
|
||||||
match &args.command {
|
match &args.command {
|
||||||
Commands::List { prefix } => {
|
Commands::List { prefix } => {
|
||||||
let client = endpoints::Client::new(BEARER_TOKEN.to_string(), HOST_ID.to_string());
|
let res = list_files(prefix.clone()).await.unwrap();
|
||||||
let pagination_size = 40;
|
|
||||||
let mut files = Vec::<ListFilesResponseFile>::new();
|
|
||||||
let req = types::request::ListFilesRequest {
|
|
||||||
from: 0,
|
|
||||||
host_id: client.host_id.clone(),
|
|
||||||
path: prefix.clone().unwrap_or("".to_string()),
|
|
||||||
sort_type: "path".to_string(),
|
|
||||||
reverse: false,
|
|
||||||
thumbnail_size: 130,
|
|
||||||
to: pagination_size,
|
|
||||||
};
|
|
||||||
let mut res = client.list_files(req).await.unwrap();
|
|
||||||
|
|
||||||
files.append(&mut res.file);
|
|
||||||
|
|
||||||
if !res.last_page {
|
|
||||||
let mut cursor = res.file.len() as i64;
|
|
||||||
loop {
|
|
||||||
let req = types::request::ListFilesRequest {
|
|
||||||
from: cursor,
|
|
||||||
host_id: client.host_id.clone(),
|
|
||||||
path: prefix.clone().unwrap_or("".to_string()),
|
|
||||||
sort_type: "path".to_string(),
|
|
||||||
reverse: false,
|
|
||||||
thumbnail_size: 130,
|
|
||||||
to: pagination_size + cursor,
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut next_res = client.list_files(req).await.unwrap();
|
|
||||||
|
|
||||||
files.append(&mut next_res.file);
|
|
||||||
|
|
||||||
if next_res.last_page {
|
|
||||||
break;
|
|
||||||
} else {
|
|
||||||
cursor += next_res.file.len() as i64;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
res.file = files;
|
|
||||||
res.file.iter().for_each(|f| {
|
res.file.iter().for_each(|f| {
|
||||||
println!("{:#?}", f);
|
let permission_string = if f.is_folder { "d" } else { "-" };
|
||||||
|
println!(
|
||||||
|
"{} {}\t{}\t{}",
|
||||||
|
permission_string,
|
||||||
|
human_bytes(f.size as u32),
|
||||||
|
f.last_modified,
|
||||||
|
f.path
|
||||||
|
);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
Commands::Upload {
|
Commands::Upload {
|
||||||
@ -99,15 +85,60 @@ async fn main() {
|
|||||||
prefix,
|
prefix,
|
||||||
recursive,
|
recursive,
|
||||||
} => {
|
} => {
|
||||||
// file check
|
println!("{}", recursive);
|
||||||
if !file.exists() {
|
|
||||||
println!("File not found: {:?}", file);
|
// is folder
|
||||||
|
if file.is_dir() && !*recursive {
|
||||||
|
println!("Use --recursive option for folder upload");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
// is folder
|
|
||||||
if file.is_dir() || *recursive {
|
let mut files = Vec::<TargetFile>::new();
|
||||||
println!("Folder upload is not supported. Use --recursive option.");
|
|
||||||
return;
|
if file.is_dir() && *recursive {
|
||||||
|
// upload folder
|
||||||
|
let mut dirs = Vec::<PathBuf>::new();
|
||||||
|
dirs.push(file.clone());
|
||||||
|
while let Some(dir) = dirs.pop() {
|
||||||
|
let entries = std::fs::read_dir(dir).unwrap();
|
||||||
|
for entry in entries {
|
||||||
|
let entry = entry.unwrap();
|
||||||
|
let path = entry.path();
|
||||||
|
if path.is_dir() {
|
||||||
|
dirs.push(path);
|
||||||
|
} else {
|
||||||
|
files.push(TargetFile {
|
||||||
|
file: path.clone(),
|
||||||
|
path: path
|
||||||
|
.strip_prefix(file)
|
||||||
|
.unwrap()
|
||||||
|
.to_str()
|
||||||
|
.unwrap()
|
||||||
|
.to_string(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// for file in files {
|
||||||
|
// println!("{:?}", file);
|
||||||
|
// }
|
||||||
|
} else {
|
||||||
|
// file check
|
||||||
|
if !file.exists() {
|
||||||
|
println!("File not found: {:?}", file);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
files.push(TargetFile {
|
||||||
|
file: file.clone(),
|
||||||
|
path: file.file_name().unwrap().to_str().unwrap().to_string(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if cfg!(windows) {
|
||||||
|
// replase \ to /
|
||||||
|
files.iter_mut().for_each(|f| {
|
||||||
|
f.path = f.path.replace('\\', "/");
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
let client = endpoints::Client::new(BEARER_TOKEN.to_string(), HOST_ID.to_string());
|
let client = endpoints::Client::new(BEARER_TOKEN.to_string(), HOST_ID.to_string());
|
||||||
@ -116,19 +147,22 @@ async fn main() {
|
|||||||
host_id: client.host_id.clone(),
|
host_id: client.host_id.clone(),
|
||||||
path: prefix.clone().unwrap_or("".to_string()),
|
path: prefix.clone().unwrap_or("".to_string()),
|
||||||
upload_id: "".to_string(),
|
upload_id: "".to_string(),
|
||||||
file: vec![types::request::CheckUploadRequestFile {
|
file: files
|
||||||
path: file.to_str().unwrap().to_string(),
|
.iter()
|
||||||
size: file.metadata().unwrap().len() as i64,
|
.map(|f| types::request::CheckUploadRequestFile {
|
||||||
}],
|
path: f.path.clone(),
|
||||||
|
size: f.file.metadata().unwrap().len() as i64,
|
||||||
|
})
|
||||||
|
.collect(),
|
||||||
};
|
};
|
||||||
|
|
||||||
let check_upload_res = client.check_upload(req).await.unwrap();
|
let check_upload_res = client.check_upload(req).await.unwrap();
|
||||||
|
|
||||||
println!("{:#?}", check_upload_res);
|
// println!("{:#?}", check_upload_res);
|
||||||
|
|
||||||
let token_res = client.get_token().await.unwrap();
|
let token_res = client.get_upload_token().await.unwrap();
|
||||||
|
|
||||||
println!("{:#?}", token_res);
|
// println!("{:#?}", token_res);
|
||||||
|
|
||||||
let cledential = Credentials::new(
|
let cledential = Credentials::new(
|
||||||
token_res.access_key_id.clone(),
|
token_res.access_key_id.clone(),
|
||||||
@ -139,7 +173,6 @@ async fn main() {
|
|||||||
);
|
);
|
||||||
let config = aws_sdk_s3::Config::builder()
|
let config = aws_sdk_s3::Config::builder()
|
||||||
.behavior_version_latest()
|
.behavior_version_latest()
|
||||||
.endpoint_url("https://sendy-cloud.s3.ap-northeast-1.amazonaws.com")
|
|
||||||
.region(Region::new(check_upload_res.region.clone()))
|
.region(Region::new(check_upload_res.region.clone()))
|
||||||
.credentials_provider(cledential)
|
.credentials_provider(cledential)
|
||||||
.force_path_style(true)
|
.force_path_style(true)
|
||||||
@ -148,34 +181,111 @@ async fn main() {
|
|||||||
let s3_client = aws_sdk_s3::Client::from_conf(config);
|
let s3_client = aws_sdk_s3::Client::from_conf(config);
|
||||||
let file_size = file.metadata().unwrap().len();
|
let file_size = file.metadata().unwrap().len();
|
||||||
|
|
||||||
if file_size > CHUNK_SIZE as u64 {
|
// if file_size > CHUNK_SIZE as u64 {
|
||||||
multipart_upload(token_res, check_upload_res, file.clone())
|
for (i, file) in files.iter().enumerate() {
|
||||||
.await
|
println!("Multi Uploading: {:?}", file.file);
|
||||||
.unwrap();
|
|
||||||
} else {
|
multipart_upload(
|
||||||
let stream = ByteStream::read_from()
|
&token_res,
|
||||||
.path(file.clone())
|
&check_upload_res.bucket,
|
||||||
.offset(0)
|
&check_upload_res.file[i],
|
||||||
.length(Length::Exact(file_size))
|
&check_upload_res.prefix,
|
||||||
.build()
|
&check_upload_res.region,
|
||||||
.await
|
&check_upload_res.upload_id,
|
||||||
.unwrap();
|
file.clone(),
|
||||||
let key = check_upload_res.prefix + "/" + check_upload_res.file[0].path.as_str();
|
)
|
||||||
let _upload_res = s3_client
|
.await
|
||||||
.put_object()
|
.unwrap();
|
||||||
.bucket(check_upload_res.bucket)
|
// }
|
||||||
.key(key)
|
// } else {
|
||||||
.body(stream)
|
// for (i, file) in files.iter().enumerate() {
|
||||||
.send()
|
// println!("Uploading: {:?}", file.file);
|
||||||
.await
|
// let stream = ByteStream::read_from()
|
||||||
.unwrap();
|
// .path(file.file.clone())
|
||||||
|
// .offset(0)
|
||||||
|
// .length(Length::Exact(file_size))
|
||||||
|
// .build()
|
||||||
|
// .await
|
||||||
|
// .unwrap();
|
||||||
|
// let key =
|
||||||
|
// check_upload_res.prefix.to_owned() + check_upload_res.file[i].path.as_str();
|
||||||
|
// let _upload_res = s3_client
|
||||||
|
// .put_object()
|
||||||
|
// .bucket(check_upload_res.bucket.clone())
|
||||||
|
// .key(key)
|
||||||
|
// .body(stream)
|
||||||
|
// .send()
|
||||||
|
// .await
|
||||||
|
// .unwrap();
|
||||||
|
// }
|
||||||
|
// }
|
||||||
}
|
}
|
||||||
|
|
||||||
println!("Upload");
|
loop {
|
||||||
|
let req = types::request::CheckActionRequest {
|
||||||
|
key: check_upload_res.upload_id.clone(),
|
||||||
|
};
|
||||||
|
let res = client.check_action(req).await.unwrap();
|
||||||
|
|
||||||
|
if res.state == "complete" {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::thread::sleep(std::time::Duration::from_millis(200));
|
||||||
|
}
|
||||||
|
|
||||||
|
println!("Uploaded");
|
||||||
}
|
}
|
||||||
Commands::Download {} => {
|
Commands::Download { path, prefix } => {
|
||||||
|
let client = endpoints::Client::new(BEARER_TOKEN.to_string(), HOST_ID.to_string());
|
||||||
|
|
||||||
|
let file_name = path.split('/').last().unwrap();
|
||||||
|
let file_path =
|
||||||
|
path.split('/').collect::<Vec<&str>>()[0..path.split('/').count() - 1].join("/");
|
||||||
|
|
||||||
|
let list = list_files(Some(file_path.clone())).await.unwrap();
|
||||||
|
|
||||||
|
let file = list
|
||||||
|
.file
|
||||||
|
.iter()
|
||||||
|
.find(|f| f.path == *path)
|
||||||
|
.expect("File not found");
|
||||||
|
|
||||||
|
let req = types::request::GetFileLinkRequest {
|
||||||
|
app_version: APP_VERSION.to_string(),
|
||||||
|
file: vec![types::request::GetFileLinkRequestFile {
|
||||||
|
path: path.to_string(),
|
||||||
|
size: file.size,
|
||||||
|
}],
|
||||||
|
host_id: client.host_id.clone(),
|
||||||
|
path: file_path,
|
||||||
|
};
|
||||||
|
|
||||||
|
let res = client.get_download_link(req).await.unwrap();
|
||||||
|
|
||||||
|
// run aria2c
|
||||||
|
|
||||||
|
let stdout = std::process::Command::new("aria2c")
|
||||||
|
.arg("-x16")
|
||||||
|
.arg("-s16")
|
||||||
|
.arg("-d")
|
||||||
|
.arg(".")
|
||||||
|
.arg(res.url)
|
||||||
|
.stdout(std::process::Stdio::piped())
|
||||||
|
.spawn()
|
||||||
|
.expect("failed to execute process")
|
||||||
|
.stdout
|
||||||
|
.expect("failed to get stdout");
|
||||||
|
|
||||||
|
let reader = std::io::BufReader::new(stdout);
|
||||||
|
|
||||||
|
std::io::BufRead::lines(reader).for_each(|line| println!("{}", line.unwrap()));
|
||||||
|
|
||||||
println!("Download");
|
println!("Download");
|
||||||
}
|
}
|
||||||
|
Commands::Move {} => {
|
||||||
|
println!("Move");
|
||||||
|
}
|
||||||
Commands::Delete {} => {
|
Commands::Delete {} => {
|
||||||
println!("Delete");
|
println!("Delete");
|
||||||
}
|
}
|
||||||
@ -186,21 +296,25 @@ async fn main() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async fn multipart_upload(
|
async fn multipart_upload(
|
||||||
token_res: types::response::GetFileLinkTokenResponse,
|
token_res: &types::response::GetFileLinkTokenResponse,
|
||||||
check_upload_res: types::response::CheckUploadResponse,
|
bucket: &str,
|
||||||
file: PathBuf,
|
target_file: &types::response::CheckUploadResponseFile,
|
||||||
|
prefix: &str,
|
||||||
|
region: &str,
|
||||||
|
upload_id: &str,
|
||||||
|
file: TargetFile,
|
||||||
) -> anyhow::Result<()> {
|
) -> anyhow::Result<()> {
|
||||||
if !file.exists() {
|
if !file.file.exists() {
|
||||||
println!("File not found: {:?}", file);
|
println!("File not found: {:?}", file.file);
|
||||||
return Err(anyhow::anyhow!("File not found: {:?}", file));
|
return Err(anyhow::anyhow!("File not found: {:?}", file.file));
|
||||||
}
|
}
|
||||||
|
|
||||||
let file_size = file.metadata().unwrap().len();
|
let file_size = file.file.metadata().unwrap().len();
|
||||||
|
|
||||||
let cledential = Credentials::new(
|
let cledential = Credentials::new(
|
||||||
token_res.access_key_id,
|
&token_res.access_key_id,
|
||||||
token_res.secret_access_key,
|
&token_res.secret_access_key,
|
||||||
Some(token_res.session_token),
|
Some(token_res.session_token.clone()),
|
||||||
// 2024-07-18T07:14:42Z
|
// 2024-07-18T07:14:42Z
|
||||||
Some(
|
Some(
|
||||||
chrono::DateTime::parse_from_rfc3339(&token_res.expiration)
|
chrono::DateTime::parse_from_rfc3339(&token_res.expiration)
|
||||||
@ -210,31 +324,26 @@ async fn multipart_upload(
|
|||||||
"2021-06-01",
|
"2021-06-01",
|
||||||
);
|
);
|
||||||
|
|
||||||
// let tls_client = hyper_rustls::HttpsConnectorBuilder::new();
|
|
||||||
|
|
||||||
// let hyper_connector = HyperClientBuilder::new().build(tls_client);
|
|
||||||
|
|
||||||
let config = aws_sdk_s3::Config::builder()
|
let config = aws_sdk_s3::Config::builder()
|
||||||
.behavior_version_latest()
|
.behavior_version_latest()
|
||||||
.credentials_provider(cledential)
|
.credentials_provider(cledential)
|
||||||
.region(Region::new(check_upload_res.region))
|
.region(Region::new(region.to_owned()))
|
||||||
.endpoint_url("https://sendy-cloud.s3.ap-northeast-1.amazonaws.com")
|
// .endpoint_url("https://sendy-cloud.s3.ap-northeast-1.amazonaws.com")
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
let s3_client = aws_sdk_s3::Client::from_conf(config);
|
let s3_client = aws_sdk_s3::Client::from_conf(config);
|
||||||
// let file = BufReader::new(File::open(file).await.unwrap());
|
|
||||||
|
|
||||||
let key = check_upload_res.prefix + check_upload_res.file[0].path.as_str();
|
let key = prefix.to_owned() + target_file.path.as_str();
|
||||||
|
|
||||||
let multipart_upload_res = s3_client
|
let multipart_upload_res = s3_client
|
||||||
.create_multipart_upload()
|
.create_multipart_upload()
|
||||||
.bucket(check_upload_res.bucket.clone())
|
.bucket(bucket)
|
||||||
.key(key.clone())
|
.key(key.clone())
|
||||||
.send()
|
.send()
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
let upload_id = multipart_upload_res.upload_id().unwrap();
|
let upload_id = multipart_upload_res.upload_id().unwrap().to_string();
|
||||||
|
|
||||||
let mut chunk_count = file_size / CHUNK_SIZE as u64;
|
let mut chunk_count = file_size / CHUNK_SIZE as u64;
|
||||||
let mut size_of_last_chunk = file_size % CHUNK_SIZE as u64;
|
let mut size_of_last_chunk = file_size % CHUNK_SIZE as u64;
|
||||||
@ -243,47 +352,82 @@ async fn multipart_upload(
|
|||||||
chunk_count -= 1;
|
chunk_count -= 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut upload_parts = Vec::<CompletedPart>::new();
|
let upload_parts = Arc::new(Mutex::new(Vec::<CompletedPart>::new()));
|
||||||
|
|
||||||
|
let pb = ProgressBar::new(file_size);
|
||||||
|
pb.set_style(ProgressStyle::with_template("{spinner:.green} [{elapsed_precise}] [{wide_bar:.cyan/blue}] {bytes}/{total_bytes} ({eta})")
|
||||||
|
.unwrap()
|
||||||
|
.with_key("eta", |state: &ProgressState, w: &mut dyn std::fmt::Write| write!(w, "{:.1}s", state.eta().as_secs_f64()).unwrap())
|
||||||
|
.progress_chars("#>-"));
|
||||||
|
|
||||||
|
let semaphore = Arc::new(tokio::sync::Semaphore::new(20));
|
||||||
|
let mut handles = Vec::new();
|
||||||
|
|
||||||
for chunk_index in 0..chunk_count {
|
for chunk_index in 0..chunk_count {
|
||||||
let this_chunk = if chunk_count - 1 == chunk_index {
|
let bucket = bucket.to_owned();
|
||||||
size_of_last_chunk
|
let key = key.clone();
|
||||||
} else {
|
let upload_id = upload_id.clone();
|
||||||
CHUNK_SIZE as u64
|
let s3_client = s3_client.clone();
|
||||||
};
|
let pb = pb.clone();
|
||||||
let stream = ByteStream::read_from()
|
let file = file.clone();
|
||||||
.path(file.clone())
|
let upload_parts = upload_parts.clone();
|
||||||
.offset(chunk_index * CHUNK_SIZE as u64)
|
|
||||||
.length(Length::Exact(this_chunk))
|
let semaphore = semaphore.clone().acquire_owned().await.unwrap();
|
||||||
.build()
|
|
||||||
.await
|
let handle = tokio::spawn(async move {
|
||||||
.unwrap();
|
let _permit = semaphore;
|
||||||
//Chunk index needs to start at 0, but part numbers start at 1.
|
|
||||||
let part_number = (chunk_index as i32) + 1;
|
let this_chunk = if chunk_count - 1 == chunk_index {
|
||||||
let upload_part_res = s3_client
|
size_of_last_chunk
|
||||||
.upload_part()
|
} else {
|
||||||
.key(&key)
|
CHUNK_SIZE as u64
|
||||||
.bucket(&check_upload_res.bucket)
|
};
|
||||||
.upload_id(upload_id)
|
let stream = ByteStream::read_from()
|
||||||
.body(stream)
|
.path(file.file.clone())
|
||||||
.part_number(part_number)
|
.offset(chunk_index * CHUNK_SIZE as u64)
|
||||||
.send()
|
.length(Length::Exact(this_chunk))
|
||||||
.await?;
|
.build()
|
||||||
upload_parts.push(
|
.await
|
||||||
CompletedPart::builder()
|
.unwrap();
|
||||||
.e_tag(upload_part_res.e_tag.unwrap_or_default())
|
//Chunk index needs to start at 0, but part numbers start at 1.
|
||||||
|
let part_number = (chunk_index as i32) + 1;
|
||||||
|
let upload_part_res = s3_client
|
||||||
|
.upload_part()
|
||||||
|
.key(&key)
|
||||||
|
.bucket(bucket)
|
||||||
|
.upload_id(upload_id)
|
||||||
|
.body(stream)
|
||||||
.part_number(part_number)
|
.part_number(part_number)
|
||||||
.build(),
|
.send()
|
||||||
);
|
.await
|
||||||
|
.unwrap();
|
||||||
|
upload_parts.lock().await.push(
|
||||||
|
CompletedPart::builder()
|
||||||
|
.e_tag(upload_part_res.e_tag.unwrap_or_default())
|
||||||
|
.part_number(part_number)
|
||||||
|
.build(),
|
||||||
|
);
|
||||||
|
pb.inc(this_chunk);
|
||||||
|
});
|
||||||
|
handles.push(handle);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
for handle in handles {
|
||||||
|
handle.await.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
upload_parts
|
||||||
|
.lock()
|
||||||
|
.await
|
||||||
|
.sort_by(|a, b| a.part_number.cmp(&b.part_number));
|
||||||
|
|
||||||
let completed_multipart_upload = aws_sdk_s3::types::CompletedMultipartUpload::builder()
|
let completed_multipart_upload = aws_sdk_s3::types::CompletedMultipartUpload::builder()
|
||||||
.set_parts(Some(upload_parts))
|
.set_parts(Some(upload_parts.lock().await.clone()))
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
let _complete_multipart_upload_res = s3_client
|
let _complete_multipart_upload_res = s3_client
|
||||||
.complete_multipart_upload()
|
.complete_multipart_upload()
|
||||||
.bucket(check_upload_res.bucket)
|
.bucket(bucket)
|
||||||
.key(key)
|
.key(key)
|
||||||
.upload_id(upload_id)
|
.upload_id(upload_id)
|
||||||
.multipart_upload(completed_multipart_upload)
|
.multipart_upload(completed_multipart_upload)
|
||||||
@ -291,5 +435,52 @@ async fn multipart_upload(
|
|||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
|
pb.finish_with_message("Uploaded");
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async fn list_files(prefix: Option<String>) -> anyhow::Result<types::response::ListFilesResponse> {
|
||||||
|
let client = endpoints::Client::new(BEARER_TOKEN.to_string(), HOST_ID.to_string());
|
||||||
|
let pagination_size = 40;
|
||||||
|
let mut files = Vec::<ListFilesResponseFile>::new();
|
||||||
|
let req = types::request::ListFilesRequest {
|
||||||
|
from: 0,
|
||||||
|
host_id: client.host_id.clone(),
|
||||||
|
path: prefix.clone().unwrap_or("".to_string()),
|
||||||
|
sort_type: "path".to_string(),
|
||||||
|
reverse: false,
|
||||||
|
thumbnail_size: 130,
|
||||||
|
to: pagination_size,
|
||||||
|
};
|
||||||
|
let mut res = client.list_files(req).await?;
|
||||||
|
|
||||||
|
files.append(&mut res.file);
|
||||||
|
|
||||||
|
if !res.last_page {
|
||||||
|
let mut cursor = res.file.len() as i64;
|
||||||
|
loop {
|
||||||
|
let req = types::request::ListFilesRequest {
|
||||||
|
from: cursor,
|
||||||
|
host_id: client.host_id.clone(),
|
||||||
|
path: prefix.clone().unwrap_or("".to_string()),
|
||||||
|
sort_type: "path".to_string(),
|
||||||
|
reverse: false,
|
||||||
|
thumbnail_size: 130,
|
||||||
|
to: pagination_size + cursor,
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut next_res = client.list_files(req).await?;
|
||||||
|
files.append(&mut next_res.file);
|
||||||
|
|
||||||
|
if next_res.last_page {
|
||||||
|
break;
|
||||||
|
} else {
|
||||||
|
cursor += next_res.file.len() as i64;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
res.file = files;
|
||||||
|
|
||||||
|
Ok(res)
|
||||||
|
}
|
||||||
|
@ -101,3 +101,24 @@ pub struct DeleteFileRequest {
|
|||||||
pub prefix: String,
|
pub prefix: String,
|
||||||
pub trash: bool,
|
pub trash: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
pub struct GetFileLinkRequest {
|
||||||
|
pub app_version: String,
|
||||||
|
pub file: Vec<GetFileLinkRequestFile>,
|
||||||
|
pub host_id: String,
|
||||||
|
pub path: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
pub struct GetFileLinkRequestFile {
|
||||||
|
pub path: String,
|
||||||
|
pub size: i64,
|
||||||
|
}
|
||||||
|
|
||||||
|
// #[derive(Debug, Serialize, Deserialize)]
|
||||||
|
// pub struct GetFileLinkRequest {
|
||||||
|
// pub host_id: String,
|
||||||
|
// pub path: String,
|
||||||
|
// }
|
@ -72,7 +72,7 @@ pub struct JobKeyResponse {
|
|||||||
pub struct CheckActionResponse {
|
pub struct CheckActionResponse {
|
||||||
pub action: String,
|
pub action: String,
|
||||||
pub state: String,
|
pub state: String,
|
||||||
pub usage_size: i64,
|
pub usage_size: Option<i64>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize)]
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
@ -102,3 +102,10 @@ pub struct GetFileLinkTokenResponse {
|
|||||||
pub secret_access_key: String,
|
pub secret_access_key: String,
|
||||||
pub session_token: String,
|
pub session_token: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
pub struct GetFileLinkResponse {
|
||||||
|
pub url: String,
|
||||||
|
}
|
Loading…
x
Reference in New Issue
Block a user