diff --git a/Cargo.lock b/Cargo.lock index 8b2cc46..a5189b6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -656,6 +656,19 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b6a852b24ab71dffc585bcb46eaf7959d175cb865a7152e35b348d1b2960422" +[[package]] +name = "console" +version = "0.15.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e1f83fc076bd6dd27517eacdf25fef6c4dfe5f1d7448bafaaf3a26f13b5e4eb" +dependencies = [ + "encode_unicode", + "lazy_static", + "libc", + "unicode-width", + "windows-sys 0.52.0", +] + [[package]] name = "const-oid" version = "0.9.6" @@ -805,6 +818,12 @@ dependencies = [ "zeroize", ] +[[package]] +name = "encode_unicode" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f" + [[package]] name = "encoding_rs" version = "0.8.34" @@ -1096,6 +1115,12 @@ version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" +[[package]] +name = "human_bytes" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91f255a4535024abf7640cb288260811fc14794f62b063652ed349f9a6c2348e" + [[package]] name = "hyper" version = "0.14.30" @@ -1151,7 +1176,7 @@ dependencies = [ "hyper 0.14.30", "log", "rustls 0.21.12", - "rustls-native-certs 0.6.3", + "rustls-native-certs", "tokio", "tokio-rustls 0.24.1", ] @@ -1166,14 +1191,11 @@ dependencies = [ "http 1.1.0", "hyper 1.4.1", "hyper-util", - "log", "rustls 0.23.11", - "rustls-native-certs 0.7.1", "rustls-pki-types", "tokio", "tokio-rustls 0.26.0", "tower-service", - "webpki-roots", ] [[package]] @@ -1255,6 +1277,28 @@ dependencies = [ "hashbrown", ] +[[package]] +name = "indicatif" +version = "0.17.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "763a5a8f45087d6bcea4222e7b72c291a054edf80e4ef6efd2a4979878c7bea3" +dependencies = [ + "console", + "instant", + "number_prefix", + "portable-atomic", + "unicode-width", +] + +[[package]] +name = "instant" +version = "0.1.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e0242819d153cba4b4b05a5a8f2a7e9bbf97b6055b2a002b395c96b5ff3c0222" +dependencies = [ + "cfg-if", +] + [[package]] name = "ipnet" version = "2.9.0" @@ -1282,6 +1326,12 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "lazy_static" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" + [[package]] name = "libc" version = "0.2.155" @@ -1412,6 +1462,12 @@ dependencies = [ "libc", ] +[[package]] +name = "number_prefix" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3" + [[package]] name = "object" version = "0.36.1" @@ -1565,6 +1621,12 @@ version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d231b230927b5e4ad203db57bbcbee2802f6bce620b1e4a9024a07d94e2907ec" +[[package]] +name = "portable-atomic" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7170ef9988bc169ba16dd36a7fa041e5c4cbeb6a35b76d4c03daded371eae7c0" + [[package]] name = "powerfmt" version = "0.2.0" @@ -1600,8 +1662,8 @@ dependencies = [ "aws-smithy-types", "chrono", "clap", - "hyper 1.4.1", - "hyper-rustls 0.27.2", + "human_bytes", + "indicatif", "reqwest", "serde", "serde_json", @@ -1747,7 +1809,6 @@ version = "0.23.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4828ea528154ae444e5a642dbb7d5623354030dc9822b83fd9bb79683c7399d0" dependencies = [ - "log", "once_cell", "rustls-pki-types", "rustls-webpki 0.102.5", @@ -1767,19 +1828,6 @@ dependencies = [ "security-framework", ] -[[package]] -name = "rustls-native-certs" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a88d6d420651b496bdd98684116959239430022a115c1240e6c3993be0b15fba" -dependencies = [ - "openssl-probe", - "rustls-pemfile 2.1.2", - "rustls-pki-types", - "schannel", - "security-framework", -] - [[package]] name = "rustls-pemfile" version = "1.0.4" @@ -2297,6 +2345,12 @@ dependencies = [ "tinyvec", ] +[[package]] +name = "unicode-width" +version = "0.1.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0336d538f7abc86d282a4189614dfaa90810dfc2c6f6427eaf88e16311dd225d" + [[package]] name = "untrusted" version = "0.9.0" @@ -2441,15 +2495,6 @@ dependencies = [ "wasm-bindgen", ] -[[package]] -name = "webpki-roots" -version = "0.26.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd7c23921eeb1713a4e851530e9b9756e4fb0e89978582942612524cf09f01cd" -dependencies = [ - "rustls-pki-types", -] - [[package]] name = "windows-core" version = "0.52.0" diff --git a/Cargo.toml b/Cargo.toml index 2fe3999..31ff702 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -6,19 +6,13 @@ edition = "2021" [dependencies] anyhow = "1.0.86" aws-config = "1.5.4" -aws-sdk-s3 = "1.41.0" +aws-sdk-s3 = {version = "1.41.0", default-features = false, features = ["rt-tokio", "sigv4a", "behavior-version-latest"]} aws-smithy-runtime = "1.6.2" aws-smithy-types = "1.2.0" chrono = { version = "0.4.38", features = ["serde"] } clap = { version = "4.5.9", features = ["derive"] } -hyper = "1.4.1" -hyper-rustls = { version = "0.27.2", default-features = false, features = [ - "webpki-tokio", - "http1", - "native-tokio", - "tls12", - "logging", -] } +human_bytes = "0.4.3" +indicatif = "0.17.8" reqwest = { version = "0.12.5", features = ["json"] } serde = { version = "1.0.204", features = ["derive"] } serde_json = "1.0.120" diff --git a/src/endpoints.rs b/src/endpoints.rs index 121fb40..e2b5a9f 100644 --- a/src/endpoints.rs +++ b/src/endpoints.rs @@ -42,9 +42,7 @@ impl Client { .map_err(Into::into) } - pub async fn get_token( - &self, - ) -> anyhow::Result { + pub async fn get_upload_token(&self) -> anyhow::Result { let client = reqwest::Client::new(); let request = client .get(&format!( @@ -59,6 +57,40 @@ impl Client { .await .map_err(Into::into) } + + pub async fn get_download_link( + &self, + req: types::request::GetFileLinkRequest, + ) -> anyhow::Result { + let client = reqwest::Client::new(); + let request = client + .post("https://forest.sendy.jp/cloud/service/file/v1/filelink/download") + .bearer_auth(&self.token) + .json(&req); + + let response = request.send().await?; + response + .json::() + .await + .map_err(Into::into) + } + + pub async fn check_action( + &self, + req: types::request::CheckActionRequest, + ) -> anyhow::Result { + let client = reqwest::Client::new(); + let request = client + .post("https://forest.sendy.jp/cloud/service/file/v3/files/check") + .bearer_auth(&self.token) + .json(&req); + + let response = request.send().await?; + response + .json::() + .await + .map_err(Into::into) + } } // https://www.rakuten-drive.com/api/account/refreshtoken POST RefreshTokenRequest RefreshTokenResponse @@ -69,5 +101,6 @@ impl Client { // https://forest.sendy.jp/cloud/service/file/v3/files/check POST CheckActionRequest CheckActionResponse // https://forest.sendy.jp/cloud/service/file/v3/files/move PUT MoveFileRequest MoveFileResponse // https://forest.sendy.jp/cloud/service/file/v1/check/upload POST CheckUploadRequest CheckUploadResponse -// https://forest.sendy.jp/cloud/service/file/v1/filelink/token?host_id=GclT7DrnLFho7vnIirUzjtMLhRk2&path=hello GET GetFileLinkTokenRequest GetFileLinkTokenResponse +// https://forest.sendy.jp/cloud/service/file/v1/filelink/token?host_id=GclT7DrnLFho7vnIirUzjtMLhRk2&path=hello GET GetFileLinkTokenResponse // https://forest.sendy.jp/cloud/service/file/v1/complete/upload POST CompleteUploadRequest +// https://forest.sendy.jp/cloud/service/file/v1/filelink/download POST GetFileLinkRequest GetFileLinkResponse diff --git a/src/main.rs b/src/main.rs index e2c5e70..c5b8307 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,21 +1,28 @@ -use std::path::{Path, PathBuf}; +use std::{ + io::{stdout, Write}, + path::{Path, PathBuf}, + sync::Arc, +}; -use aws_config::{BehaviorVersion, Region, SdkConfig}; +use aws_config::{environment::region, BehaviorVersion, Region, SdkConfig}; use aws_sdk_s3::{ config::Credentials, operation::upload_part, primitives::ByteStream, types::CompletedPart, }; use aws_smithy_runtime::client::http::hyper_014::HyperClientBuilder; use aws_smithy_types::byte_stream::Length; use clap::{Parser, Subcommand}; -use tokio::{fs::File, io::BufReader}; +use human_bytes::human_bytes; +use indicatif::{ProgressBar, ProgressState, ProgressStyle}; +use tokio::{fs::File, io::BufReader, sync::Mutex}; use types::response::ListFilesResponseFile; mod endpoints; mod types; -const BEARER_TOKEN: &str = "eyJhbGciOiJSUzI1NiIsImtpZCI6ImMxNTQwYWM3MWJiOTJhYTA2OTNjODI3MTkwYWNhYmU1YjA1NWNiZWMiLCJ0eXAiOiJKV1QifQ.eyJuYW1lIjoi5bm457-8IOW_l-adkSIsInBsYW4iOiJza2YiLCJpc3MiOiJodHRwczovL3NlY3VyZXRva2VuLmdvb2dsZS5jb20vc2VuZHktc2VydmljZSIsImF1ZCI6InNlbmR5LXNlcnZpY2UiLCJhdXRoX3RpbWUiOjE3MjEyMjYwMTUsInVzZXJfaWQiOiJHY2xUN0RybkxGaG83dm5JaXJVemp0TUxoUmsyIiwic3ViIjoiR2NsVDdEcm5MRmhvN3ZuSWlyVXpqdE1MaFJrMiIsImlhdCI6MTcyMTI0NzQxMCwiZXhwIjoxNzIxMjUxMDEwLCJlbWFpbCI6ImtvdXN1a2UxMTIzNjEyNEBnbWFpbC5jb20iLCJlbWFpbF92ZXJpZmllZCI6ZmFsc2UsImZpcmViYXNlIjp7ImlkZW50aXRpZXMiOnsiZW1haWwiOlsia291c3VrZTExMjM2MTI0QGdtYWlsLmNvbSJdfSwic2lnbl9pbl9wcm92aWRlciI6ImN1c3RvbSJ9fQ.ikxsOAbgKhKywvvC1Ot28AEZ7_DTVNaMI2KSEFaZAaPTtgPk6fqYzegW2iwq7GK_ySmCuKppPEeSD8nKDggeX96z36Y1zd5xm7EIWTCdmCB36gjhAkAowVenRX2VW3gIVCJVHUQ50UEVM4CMzw73N058fQ97wAdHVp2oOtZOczJyQpAZuy0zqXSKWvnom0SfNz0iZov7r3TLSBlxSMGjEu_aSInq7yMOSHNkbQHenelv3592EY_ktnFLYSYi1HWEEijqsKSGdf01DYBkC5H8Eq0snk7n8NvKFAaUxT8DClxHlE_xagOnbkfCBh-AN2CqnkwxOi7Kkh0iWOkdMLqK0w"; +const BEARER_TOKEN: &str = "eyJhbGciOiJSUzI1NiIsImtpZCI6ImMxNTQwYWM3MWJiOTJhYTA2OTNjODI3MTkwYWNhYmU1YjA1NWNiZWMiLCJ0eXAiOiJKV1QifQ.eyJuYW1lIjoi5bm457-8IOW_l-adkSIsInBsYW4iOiJza2YiLCJpc3MiOiJodHRwczovL3NlY3VyZXRva2VuLmdvb2dsZS5jb20vc2VuZHktc2VydmljZSIsImF1ZCI6InNlbmR5LXNlcnZpY2UiLCJhdXRoX3RpbWUiOjE3MjEyMjYwMTUsInVzZXJfaWQiOiJHY2xUN0RybkxGaG83dm5JaXJVemp0TUxoUmsyIiwic3ViIjoiR2NsVDdEcm5MRmhvN3ZuSWlyVXpqdE1MaFJrMiIsImlhdCI6MTcyMTI1NTM1OCwiZXhwIjoxNzIxMjU4OTU4LCJlbWFpbCI6ImtvdXN1a2UxMTIzNjEyNEBnbWFpbC5jb20iLCJlbWFpbF92ZXJpZmllZCI6ZmFsc2UsImZpcmViYXNlIjp7ImlkZW50aXRpZXMiOnsiZW1haWwiOlsia291c3VrZTExMjM2MTI0QGdtYWlsLmNvbSJdfSwic2lnbl9pbl9wcm92aWRlciI6ImN1c3RvbSJ9fQ.uC-X4XCMTJ-Vv0bmm85cZy65LVdNxRKBlNXxsg8_QqyMV1rRzmpDMQpwWKk10OUDj6xovg1tfmlUW2syL0twANO8hKOSlI_wLZ1Rvvm0TF8EvDLvv8OGFc93nm3OIaSaiZj-xcORZzeJDVHsdraGoYDX3YbYPIJAhDaOsHX5_QbLwuxoz0dxd0fTAoDH7aEpDhcojjTmMImtbGqMzpvUpwNunJaJK2YZTYiHXZtcK7mr9cQLF5b3Exee--R5hGEU9E49jGtXKQNrP_6mkTXVivJh6TdKeFiMCrbc-6xZvuBnkEQ8g0GvU9cERhJTZ73U2jdHLzWbYitCh2nzkbQDNA"; const HOST_ID: &str = "GclT7DrnLFho7vnIirUzjtMLhRk2"; const CHUNK_SIZE: usize = 1024 * 1024 * 10; // 10MB +const APP_VERSION: &str = "v21.11.10"; #[derive(Parser, Debug)] #[command(version, about, long_about=None)] @@ -38,60 +45,39 @@ enum Commands { #[clap(short, long)] recursive: bool, }, - Download {}, + Download { + #[clap(short, long)] + path: String, + #[clap(long)] + prefix: Option, + }, + Move {}, Delete {}, MkDir {}, } +#[derive(Debug, Clone)] +struct TargetFile { + file: PathBuf, + path: String, +} + #[tokio::main] async fn main() { let args = Args::parse(); match &args.command { Commands::List { prefix } => { - let client = endpoints::Client::new(BEARER_TOKEN.to_string(), HOST_ID.to_string()); - let pagination_size = 40; - let mut files = Vec::::new(); - let req = types::request::ListFilesRequest { - from: 0, - host_id: client.host_id.clone(), - path: prefix.clone().unwrap_or("".to_string()), - sort_type: "path".to_string(), - reverse: false, - thumbnail_size: 130, - to: pagination_size, - }; - let mut res = client.list_files(req).await.unwrap(); - - files.append(&mut res.file); - - if !res.last_page { - let mut cursor = res.file.len() as i64; - loop { - let req = types::request::ListFilesRequest { - from: cursor, - host_id: client.host_id.clone(), - path: prefix.clone().unwrap_or("".to_string()), - sort_type: "path".to_string(), - reverse: false, - thumbnail_size: 130, - to: pagination_size + cursor, - }; - - let mut next_res = client.list_files(req).await.unwrap(); - - files.append(&mut next_res.file); - - if next_res.last_page { - break; - } else { - cursor += next_res.file.len() as i64; - } - } - } - res.file = files; + let res = list_files(prefix.clone()).await.unwrap(); res.file.iter().for_each(|f| { - println!("{:#?}", f); + let permission_string = if f.is_folder { "d" } else { "-" }; + println!( + "{} {}\t{}\t{}", + permission_string, + human_bytes(f.size as u32), + f.last_modified, + f.path + ); }); } Commands::Upload { @@ -99,15 +85,60 @@ async fn main() { prefix, recursive, } => { - // file check - if !file.exists() { - println!("File not found: {:?}", file); + println!("{}", recursive); + + // is folder + if file.is_dir() && !*recursive { + println!("Use --recursive option for folder upload"); return; } - // is folder - if file.is_dir() || *recursive { - println!("Folder upload is not supported. Use --recursive option."); - return; + + let mut files = Vec::::new(); + + if file.is_dir() && *recursive { + // upload folder + let mut dirs = Vec::::new(); + dirs.push(file.clone()); + while let Some(dir) = dirs.pop() { + let entries = std::fs::read_dir(dir).unwrap(); + for entry in entries { + let entry = entry.unwrap(); + let path = entry.path(); + if path.is_dir() { + dirs.push(path); + } else { + files.push(TargetFile { + file: path.clone(), + path: path + .strip_prefix(file) + .unwrap() + .to_str() + .unwrap() + .to_string(), + }); + } + } + } + // for file in files { + // println!("{:?}", file); + // } + } else { + // file check + if !file.exists() { + println!("File not found: {:?}", file); + return; + } + files.push(TargetFile { + file: file.clone(), + path: file.file_name().unwrap().to_str().unwrap().to_string(), + }); + } + + if cfg!(windows) { + // replase \ to / + files.iter_mut().for_each(|f| { + f.path = f.path.replace('\\', "/"); + }); } let client = endpoints::Client::new(BEARER_TOKEN.to_string(), HOST_ID.to_string()); @@ -116,19 +147,22 @@ async fn main() { host_id: client.host_id.clone(), path: prefix.clone().unwrap_or("".to_string()), upload_id: "".to_string(), - file: vec![types::request::CheckUploadRequestFile { - path: file.to_str().unwrap().to_string(), - size: file.metadata().unwrap().len() as i64, - }], + file: files + .iter() + .map(|f| types::request::CheckUploadRequestFile { + path: f.path.clone(), + size: f.file.metadata().unwrap().len() as i64, + }) + .collect(), }; let check_upload_res = client.check_upload(req).await.unwrap(); - println!("{:#?}", check_upload_res); + // println!("{:#?}", check_upload_res); - let token_res = client.get_token().await.unwrap(); + let token_res = client.get_upload_token().await.unwrap(); - println!("{:#?}", token_res); + // println!("{:#?}", token_res); let cledential = Credentials::new( token_res.access_key_id.clone(), @@ -139,7 +173,6 @@ async fn main() { ); let config = aws_sdk_s3::Config::builder() .behavior_version_latest() - .endpoint_url("https://sendy-cloud.s3.ap-northeast-1.amazonaws.com") .region(Region::new(check_upload_res.region.clone())) .credentials_provider(cledential) .force_path_style(true) @@ -148,34 +181,111 @@ async fn main() { let s3_client = aws_sdk_s3::Client::from_conf(config); let file_size = file.metadata().unwrap().len(); - if file_size > CHUNK_SIZE as u64 { - multipart_upload(token_res, check_upload_res, file.clone()) - .await - .unwrap(); - } else { - let stream = ByteStream::read_from() - .path(file.clone()) - .offset(0) - .length(Length::Exact(file_size)) - .build() - .await - .unwrap(); - let key = check_upload_res.prefix + "/" + check_upload_res.file[0].path.as_str(); - let _upload_res = s3_client - .put_object() - .bucket(check_upload_res.bucket) - .key(key) - .body(stream) - .send() - .await - .unwrap(); + // if file_size > CHUNK_SIZE as u64 { + for (i, file) in files.iter().enumerate() { + println!("Multi Uploading: {:?}", file.file); + + multipart_upload( + &token_res, + &check_upload_res.bucket, + &check_upload_res.file[i], + &check_upload_res.prefix, + &check_upload_res.region, + &check_upload_res.upload_id, + file.clone(), + ) + .await + .unwrap(); + // } + // } else { + // for (i, file) in files.iter().enumerate() { + // println!("Uploading: {:?}", file.file); + // let stream = ByteStream::read_from() + // .path(file.file.clone()) + // .offset(0) + // .length(Length::Exact(file_size)) + // .build() + // .await + // .unwrap(); + // let key = + // check_upload_res.prefix.to_owned() + check_upload_res.file[i].path.as_str(); + // let _upload_res = s3_client + // .put_object() + // .bucket(check_upload_res.bucket.clone()) + // .key(key) + // .body(stream) + // .send() + // .await + // .unwrap(); + // } + // } } - println!("Upload"); + loop { + let req = types::request::CheckActionRequest { + key: check_upload_res.upload_id.clone(), + }; + let res = client.check_action(req).await.unwrap(); + + if res.state == "complete" { + break; + } + + std::thread::sleep(std::time::Duration::from_millis(200)); + } + + println!("Uploaded"); } - Commands::Download {} => { + Commands::Download { path, prefix } => { + let client = endpoints::Client::new(BEARER_TOKEN.to_string(), HOST_ID.to_string()); + + let file_name = path.split('/').last().unwrap(); + let file_path = + path.split('/').collect::>()[0..path.split('/').count() - 1].join("/"); + + let list = list_files(Some(file_path.clone())).await.unwrap(); + + let file = list + .file + .iter() + .find(|f| f.path == *path) + .expect("File not found"); + + let req = types::request::GetFileLinkRequest { + app_version: APP_VERSION.to_string(), + file: vec![types::request::GetFileLinkRequestFile { + path: path.to_string(), + size: file.size, + }], + host_id: client.host_id.clone(), + path: file_path, + }; + + let res = client.get_download_link(req).await.unwrap(); + + // run aria2c + + let stdout = std::process::Command::new("aria2c") + .arg("-x16") + .arg("-s16") + .arg("-d") + .arg(".") + .arg(res.url) + .stdout(std::process::Stdio::piped()) + .spawn() + .expect("failed to execute process") + .stdout + .expect("failed to get stdout"); + + let reader = std::io::BufReader::new(stdout); + + std::io::BufRead::lines(reader).for_each(|line| println!("{}", line.unwrap())); + println!("Download"); } + Commands::Move {} => { + println!("Move"); + } Commands::Delete {} => { println!("Delete"); } @@ -186,21 +296,25 @@ async fn main() { } async fn multipart_upload( - token_res: types::response::GetFileLinkTokenResponse, - check_upload_res: types::response::CheckUploadResponse, - file: PathBuf, + token_res: &types::response::GetFileLinkTokenResponse, + bucket: &str, + target_file: &types::response::CheckUploadResponseFile, + prefix: &str, + region: &str, + upload_id: &str, + file: TargetFile, ) -> anyhow::Result<()> { - if !file.exists() { - println!("File not found: {:?}", file); - return Err(anyhow::anyhow!("File not found: {:?}", file)); + if !file.file.exists() { + println!("File not found: {:?}", file.file); + return Err(anyhow::anyhow!("File not found: {:?}", file.file)); } - let file_size = file.metadata().unwrap().len(); + let file_size = file.file.metadata().unwrap().len(); let cledential = Credentials::new( - token_res.access_key_id, - token_res.secret_access_key, - Some(token_res.session_token), + &token_res.access_key_id, + &token_res.secret_access_key, + Some(token_res.session_token.clone()), // 2024-07-18T07:14:42Z Some( chrono::DateTime::parse_from_rfc3339(&token_res.expiration) @@ -210,31 +324,26 @@ async fn multipart_upload( "2021-06-01", ); - // let tls_client = hyper_rustls::HttpsConnectorBuilder::new(); - - // let hyper_connector = HyperClientBuilder::new().build(tls_client); - let config = aws_sdk_s3::Config::builder() .behavior_version_latest() .credentials_provider(cledential) - .region(Region::new(check_upload_res.region)) - .endpoint_url("https://sendy-cloud.s3.ap-northeast-1.amazonaws.com") + .region(Region::new(region.to_owned())) + // .endpoint_url("https://sendy-cloud.s3.ap-northeast-1.amazonaws.com") .build(); let s3_client = aws_sdk_s3::Client::from_conf(config); - // let file = BufReader::new(File::open(file).await.unwrap()); - let key = check_upload_res.prefix + check_upload_res.file[0].path.as_str(); + let key = prefix.to_owned() + target_file.path.as_str(); let multipart_upload_res = s3_client .create_multipart_upload() - .bucket(check_upload_res.bucket.clone()) + .bucket(bucket) .key(key.clone()) .send() .await .unwrap(); - let upload_id = multipart_upload_res.upload_id().unwrap(); + let upload_id = multipart_upload_res.upload_id().unwrap().to_string(); let mut chunk_count = file_size / CHUNK_SIZE as u64; let mut size_of_last_chunk = file_size % CHUNK_SIZE as u64; @@ -243,47 +352,82 @@ async fn multipart_upload( chunk_count -= 1; } - let mut upload_parts = Vec::::new(); + let upload_parts = Arc::new(Mutex::new(Vec::::new())); + + let pb = ProgressBar::new(file_size); + pb.set_style(ProgressStyle::with_template("{spinner:.green} [{elapsed_precise}] [{wide_bar:.cyan/blue}] {bytes}/{total_bytes} ({eta})") + .unwrap() + .with_key("eta", |state: &ProgressState, w: &mut dyn std::fmt::Write| write!(w, "{:.1}s", state.eta().as_secs_f64()).unwrap()) + .progress_chars("#>-")); + + let semaphore = Arc::new(tokio::sync::Semaphore::new(20)); + let mut handles = Vec::new(); for chunk_index in 0..chunk_count { - let this_chunk = if chunk_count - 1 == chunk_index { - size_of_last_chunk - } else { - CHUNK_SIZE as u64 - }; - let stream = ByteStream::read_from() - .path(file.clone()) - .offset(chunk_index * CHUNK_SIZE as u64) - .length(Length::Exact(this_chunk)) - .build() - .await - .unwrap(); - //Chunk index needs to start at 0, but part numbers start at 1. - let part_number = (chunk_index as i32) + 1; - let upload_part_res = s3_client - .upload_part() - .key(&key) - .bucket(&check_upload_res.bucket) - .upload_id(upload_id) - .body(stream) - .part_number(part_number) - .send() - .await?; - upload_parts.push( - CompletedPart::builder() - .e_tag(upload_part_res.e_tag.unwrap_or_default()) + let bucket = bucket.to_owned(); + let key = key.clone(); + let upload_id = upload_id.clone(); + let s3_client = s3_client.clone(); + let pb = pb.clone(); + let file = file.clone(); + let upload_parts = upload_parts.clone(); + + let semaphore = semaphore.clone().acquire_owned().await.unwrap(); + + let handle = tokio::spawn(async move { + let _permit = semaphore; + + let this_chunk = if chunk_count - 1 == chunk_index { + size_of_last_chunk + } else { + CHUNK_SIZE as u64 + }; + let stream = ByteStream::read_from() + .path(file.file.clone()) + .offset(chunk_index * CHUNK_SIZE as u64) + .length(Length::Exact(this_chunk)) + .build() + .await + .unwrap(); + //Chunk index needs to start at 0, but part numbers start at 1. + let part_number = (chunk_index as i32) + 1; + let upload_part_res = s3_client + .upload_part() + .key(&key) + .bucket(bucket) + .upload_id(upload_id) + .body(stream) .part_number(part_number) - .build(), - ); + .send() + .await + .unwrap(); + upload_parts.lock().await.push( + CompletedPart::builder() + .e_tag(upload_part_res.e_tag.unwrap_or_default()) + .part_number(part_number) + .build(), + ); + pb.inc(this_chunk); + }); + handles.push(handle); } + for handle in handles { + handle.await.unwrap(); + } + + upload_parts + .lock() + .await + .sort_by(|a, b| a.part_number.cmp(&b.part_number)); + let completed_multipart_upload = aws_sdk_s3::types::CompletedMultipartUpload::builder() - .set_parts(Some(upload_parts)) + .set_parts(Some(upload_parts.lock().await.clone())) .build(); let _complete_multipart_upload_res = s3_client .complete_multipart_upload() - .bucket(check_upload_res.bucket) + .bucket(bucket) .key(key) .upload_id(upload_id) .multipart_upload(completed_multipart_upload) @@ -291,5 +435,52 @@ async fn multipart_upload( .await .unwrap(); + pb.finish_with_message("Uploaded"); + Ok(()) } + +async fn list_files(prefix: Option) -> anyhow::Result { + let client = endpoints::Client::new(BEARER_TOKEN.to_string(), HOST_ID.to_string()); + let pagination_size = 40; + let mut files = Vec::::new(); + let req = types::request::ListFilesRequest { + from: 0, + host_id: client.host_id.clone(), + path: prefix.clone().unwrap_or("".to_string()), + sort_type: "path".to_string(), + reverse: false, + thumbnail_size: 130, + to: pagination_size, + }; + let mut res = client.list_files(req).await?; + + files.append(&mut res.file); + + if !res.last_page { + let mut cursor = res.file.len() as i64; + loop { + let req = types::request::ListFilesRequest { + from: cursor, + host_id: client.host_id.clone(), + path: prefix.clone().unwrap_or("".to_string()), + sort_type: "path".to_string(), + reverse: false, + thumbnail_size: 130, + to: pagination_size + cursor, + }; + + let mut next_res = client.list_files(req).await?; + files.append(&mut next_res.file); + + if next_res.last_page { + break; + } else { + cursor += next_res.file.len() as i64; + } + } + } + res.file = files; + + Ok(res) +} diff --git a/src/types/request.rs b/src/types/request.rs index 180a70a..3006602 100644 --- a/src/types/request.rs +++ b/src/types/request.rs @@ -101,3 +101,24 @@ pub struct DeleteFileRequest { pub prefix: String, pub trash: bool, } + +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "snake_case")] +pub struct GetFileLinkRequest { + pub app_version: String, + pub file: Vec, + pub host_id: String, + pub path: String, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct GetFileLinkRequestFile { + pub path: String, + pub size: i64, +} + +// #[derive(Debug, Serialize, Deserialize)] +// pub struct GetFileLinkRequest { +// pub host_id: String, +// pub path: String, +// } \ No newline at end of file diff --git a/src/types/response.rs b/src/types/response.rs index b2e14e1..fc6dca7 100644 --- a/src/types/response.rs +++ b/src/types/response.rs @@ -72,7 +72,7 @@ pub struct JobKeyResponse { pub struct CheckActionResponse { pub action: String, pub state: String, - pub usage_size: i64, + pub usage_size: Option, } #[derive(Debug, Serialize, Deserialize)] @@ -102,3 +102,10 @@ pub struct GetFileLinkTokenResponse { pub secret_access_key: String, pub session_token: String, } + + +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "snake_case")] +pub struct GetFileLinkResponse { + pub url: String, +} \ No newline at end of file