init
This commit is contained in:
commit
343c2f0c02
1
.gitignore
vendored
Normal file
1
.gitignore
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
/target
|
2641
Cargo.lock
generated
Normal file
2641
Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
25
Cargo.toml
Normal file
25
Cargo.toml
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
[package]
|
||||||
|
name = "rakuten-drive-cui"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2021"
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
anyhow = "1.0.86"
|
||||||
|
aws-config = "1.5.4"
|
||||||
|
aws-sdk-s3 = "1.41.0"
|
||||||
|
aws-smithy-runtime = "1.6.2"
|
||||||
|
aws-smithy-types = "1.2.0"
|
||||||
|
chrono = { version = "0.4.38", features = ["serde"] }
|
||||||
|
clap = { version = "4.5.9", features = ["derive"] }
|
||||||
|
hyper = "1.4.1"
|
||||||
|
hyper-rustls = { version = "0.27.2", default-features = false, features = [
|
||||||
|
"webpki-tokio",
|
||||||
|
"http1",
|
||||||
|
"native-tokio",
|
||||||
|
"tls12",
|
||||||
|
"logging",
|
||||||
|
] }
|
||||||
|
reqwest = { version = "0.12.5", features = ["json"] }
|
||||||
|
serde = { version = "1.0.204", features = ["derive"] }
|
||||||
|
serde_json = "1.0.120"
|
||||||
|
tokio = { version = "1.38.0", features = ["full"] }
|
73
src/endpoints.rs
Normal file
73
src/endpoints.rs
Normal file
@ -0,0 +1,73 @@
|
|||||||
|
use crate::types;
|
||||||
|
|
||||||
|
pub struct Client {
|
||||||
|
pub token: String,
|
||||||
|
pub host_id: String,
|
||||||
|
}
|
||||||
|
impl Client {
|
||||||
|
pub fn new(token: String, host_id: String) -> Self {
|
||||||
|
Self { token, host_id }
|
||||||
|
}
|
||||||
|
pub async fn list_files(
|
||||||
|
&self,
|
||||||
|
req: types::request::ListFilesRequest,
|
||||||
|
) -> anyhow::Result<types::response::ListFilesResponse> {
|
||||||
|
let client = reqwest::Client::new();
|
||||||
|
let request = client
|
||||||
|
.post("https://forest.sendy.jp/cloud/service/file/v1/files")
|
||||||
|
.bearer_auth(&self.token)
|
||||||
|
.json(&req);
|
||||||
|
|
||||||
|
let response = request.send().await?;
|
||||||
|
response
|
||||||
|
.json::<types::response::ListFilesResponse>()
|
||||||
|
.await
|
||||||
|
.map_err(Into::into)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn check_upload(
|
||||||
|
&self,
|
||||||
|
req: types::request::CheckUploadRequest,
|
||||||
|
) -> anyhow::Result<types::response::CheckUploadResponse> {
|
||||||
|
let client = reqwest::Client::new();
|
||||||
|
let request = client
|
||||||
|
.post("https://forest.sendy.jp/cloud/service/file/v1/check/upload")
|
||||||
|
.bearer_auth(&self.token)
|
||||||
|
.json(&req);
|
||||||
|
|
||||||
|
let response = request.send().await?;
|
||||||
|
response
|
||||||
|
.json::<types::response::CheckUploadResponse>()
|
||||||
|
.await
|
||||||
|
.map_err(Into::into)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_token(
|
||||||
|
&self,
|
||||||
|
) -> anyhow::Result<types::response::GetFileLinkTokenResponse> {
|
||||||
|
let client = reqwest::Client::new();
|
||||||
|
let request = client
|
||||||
|
.get(&format!(
|
||||||
|
"https://forest.sendy.jp/cloud/service/file/v1/filelink/token?host_id={}&path={}",
|
||||||
|
self.host_id, "hello"
|
||||||
|
))
|
||||||
|
.bearer_auth(&self.token);
|
||||||
|
|
||||||
|
let response = request.send().await?;
|
||||||
|
response
|
||||||
|
.json::<types::response::GetFileLinkTokenResponse>()
|
||||||
|
.await
|
||||||
|
.map_err(Into::into)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// https://www.rakuten-drive.com/api/account/refreshtoken POST RefreshTokenRequest RefreshTokenResponse
|
||||||
|
// https://forest.sendy.jp/cloud/service/file/v1/files POST ListFilesRequest ListFilesResponse
|
||||||
|
// https://forest.sendy.jp/cloud/service/file/v3/files DELETE DeleteFileRequest JobKeyResponse
|
||||||
|
// https://forest.sendy.jp/cloud/service/file/v1/files/create POST CreateFolderRequest
|
||||||
|
// https://forest.sendy.jp/cloud/service/file/v3/files/rename PUT RenameFileRequest RenameFileResponse
|
||||||
|
// https://forest.sendy.jp/cloud/service/file/v3/files/check POST CheckActionRequest CheckActionResponse
|
||||||
|
// https://forest.sendy.jp/cloud/service/file/v3/files/move PUT MoveFileRequest MoveFileResponse
|
||||||
|
// https://forest.sendy.jp/cloud/service/file/v1/check/upload POST CheckUploadRequest CheckUploadResponse
|
||||||
|
// https://forest.sendy.jp/cloud/service/file/v1/filelink/token?host_id=GclT7DrnLFho7vnIirUzjtMLhRk2&path=hello GET GetFileLinkTokenRequest GetFileLinkTokenResponse
|
||||||
|
// https://forest.sendy.jp/cloud/service/file/v1/complete/upload POST CompleteUploadRequest
|
295
src/main.rs
Normal file
295
src/main.rs
Normal file
@ -0,0 +1,295 @@
|
|||||||
|
use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
|
use aws_config::{BehaviorVersion, Region, SdkConfig};
|
||||||
|
use aws_sdk_s3::{
|
||||||
|
config::Credentials, operation::upload_part, primitives::ByteStream, types::CompletedPart,
|
||||||
|
};
|
||||||
|
use aws_smithy_runtime::client::http::hyper_014::HyperClientBuilder;
|
||||||
|
use aws_smithy_types::byte_stream::Length;
|
||||||
|
use clap::{Parser, Subcommand};
|
||||||
|
use tokio::{fs::File, io::BufReader};
|
||||||
|
use types::response::ListFilesResponseFile;
|
||||||
|
|
||||||
|
mod endpoints;
|
||||||
|
mod types;
|
||||||
|
|
||||||
|
const BEARER_TOKEN: &str = "eyJhbGciOiJSUzI1NiIsImtpZCI6ImMxNTQwYWM3MWJiOTJhYTA2OTNjODI3MTkwYWNhYmU1YjA1NWNiZWMiLCJ0eXAiOiJKV1QifQ.eyJuYW1lIjoi5bm457-8IOW_l-adkSIsInBsYW4iOiJza2YiLCJpc3MiOiJodHRwczovL3NlY3VyZXRva2VuLmdvb2dsZS5jb20vc2VuZHktc2VydmljZSIsImF1ZCI6InNlbmR5LXNlcnZpY2UiLCJhdXRoX3RpbWUiOjE3MjEyMjYwMTUsInVzZXJfaWQiOiJHY2xUN0RybkxGaG83dm5JaXJVemp0TUxoUmsyIiwic3ViIjoiR2NsVDdEcm5MRmhvN3ZuSWlyVXpqdE1MaFJrMiIsImlhdCI6MTcyMTI0NzQxMCwiZXhwIjoxNzIxMjUxMDEwLCJlbWFpbCI6ImtvdXN1a2UxMTIzNjEyNEBnbWFpbC5jb20iLCJlbWFpbF92ZXJpZmllZCI6ZmFsc2UsImZpcmViYXNlIjp7ImlkZW50aXRpZXMiOnsiZW1haWwiOlsia291c3VrZTExMjM2MTI0QGdtYWlsLmNvbSJdfSwic2lnbl9pbl9wcm92aWRlciI6ImN1c3RvbSJ9fQ.ikxsOAbgKhKywvvC1Ot28AEZ7_DTVNaMI2KSEFaZAaPTtgPk6fqYzegW2iwq7GK_ySmCuKppPEeSD8nKDggeX96z36Y1zd5xm7EIWTCdmCB36gjhAkAowVenRX2VW3gIVCJVHUQ50UEVM4CMzw73N058fQ97wAdHVp2oOtZOczJyQpAZuy0zqXSKWvnom0SfNz0iZov7r3TLSBlxSMGjEu_aSInq7yMOSHNkbQHenelv3592EY_ktnFLYSYi1HWEEijqsKSGdf01DYBkC5H8Eq0snk7n8NvKFAaUxT8DClxHlE_xagOnbkfCBh-AN2CqnkwxOi7Kkh0iWOkdMLqK0w";
|
||||||
|
const HOST_ID: &str = "GclT7DrnLFho7vnIirUzjtMLhRk2";
|
||||||
|
const CHUNK_SIZE: usize = 1024 * 1024 * 10; // 10MB
|
||||||
|
|
||||||
|
#[derive(Parser, Debug)]
|
||||||
|
#[command(version, about, long_about=None)]
|
||||||
|
struct Args {
|
||||||
|
#[command(subcommand)]
|
||||||
|
command: Commands,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Subcommand, Debug)]
|
||||||
|
enum Commands {
|
||||||
|
List {
|
||||||
|
#[clap(short, long)]
|
||||||
|
prefix: Option<String>,
|
||||||
|
},
|
||||||
|
Upload {
|
||||||
|
#[clap(short, long)]
|
||||||
|
file: PathBuf,
|
||||||
|
#[clap(short, long)]
|
||||||
|
prefix: Option<String>,
|
||||||
|
#[clap(short, long)]
|
||||||
|
recursive: bool,
|
||||||
|
},
|
||||||
|
Download {},
|
||||||
|
Delete {},
|
||||||
|
MkDir {},
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::main]
|
||||||
|
async fn main() {
|
||||||
|
let args = Args::parse();
|
||||||
|
|
||||||
|
match &args.command {
|
||||||
|
Commands::List { prefix } => {
|
||||||
|
let client = endpoints::Client::new(BEARER_TOKEN.to_string(), HOST_ID.to_string());
|
||||||
|
let pagination_size = 40;
|
||||||
|
let mut files = Vec::<ListFilesResponseFile>::new();
|
||||||
|
let req = types::request::ListFilesRequest {
|
||||||
|
from: 0,
|
||||||
|
host_id: client.host_id.clone(),
|
||||||
|
path: prefix.clone().unwrap_or("".to_string()),
|
||||||
|
sort_type: "path".to_string(),
|
||||||
|
reverse: false,
|
||||||
|
thumbnail_size: 130,
|
||||||
|
to: pagination_size,
|
||||||
|
};
|
||||||
|
let mut res = client.list_files(req).await.unwrap();
|
||||||
|
|
||||||
|
files.append(&mut res.file);
|
||||||
|
|
||||||
|
if !res.last_page {
|
||||||
|
let mut cursor = res.file.len() as i64;
|
||||||
|
loop {
|
||||||
|
let req = types::request::ListFilesRequest {
|
||||||
|
from: cursor,
|
||||||
|
host_id: client.host_id.clone(),
|
||||||
|
path: prefix.clone().unwrap_or("".to_string()),
|
||||||
|
sort_type: "path".to_string(),
|
||||||
|
reverse: false,
|
||||||
|
thumbnail_size: 130,
|
||||||
|
to: pagination_size + cursor,
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut next_res = client.list_files(req).await.unwrap();
|
||||||
|
|
||||||
|
files.append(&mut next_res.file);
|
||||||
|
|
||||||
|
if next_res.last_page {
|
||||||
|
break;
|
||||||
|
} else {
|
||||||
|
cursor += next_res.file.len() as i64;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
res.file = files;
|
||||||
|
res.file.iter().for_each(|f| {
|
||||||
|
println!("{:#?}", f);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
Commands::Upload {
|
||||||
|
file,
|
||||||
|
prefix,
|
||||||
|
recursive,
|
||||||
|
} => {
|
||||||
|
// file check
|
||||||
|
if !file.exists() {
|
||||||
|
println!("File not found: {:?}", file);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
// is folder
|
||||||
|
if file.is_dir() || *recursive {
|
||||||
|
println!("Folder upload is not supported. Use --recursive option.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let client = endpoints::Client::new(BEARER_TOKEN.to_string(), HOST_ID.to_string());
|
||||||
|
|
||||||
|
let req = types::request::CheckUploadRequest {
|
||||||
|
host_id: client.host_id.clone(),
|
||||||
|
path: prefix.clone().unwrap_or("".to_string()),
|
||||||
|
upload_id: "".to_string(),
|
||||||
|
file: vec![types::request::CheckUploadRequestFile {
|
||||||
|
path: file.to_str().unwrap().to_string(),
|
||||||
|
size: file.metadata().unwrap().len() as i64,
|
||||||
|
}],
|
||||||
|
};
|
||||||
|
|
||||||
|
let check_upload_res = client.check_upload(req).await.unwrap();
|
||||||
|
|
||||||
|
println!("{:#?}", check_upload_res);
|
||||||
|
|
||||||
|
let token_res = client.get_token().await.unwrap();
|
||||||
|
|
||||||
|
println!("{:#?}", token_res);
|
||||||
|
|
||||||
|
let cledential = Credentials::new(
|
||||||
|
token_res.access_key_id.clone(),
|
||||||
|
token_res.secret_access_key.clone(),
|
||||||
|
Some(token_res.session_token.clone()),
|
||||||
|
None,
|
||||||
|
"2021-06-01",
|
||||||
|
);
|
||||||
|
let config = aws_sdk_s3::Config::builder()
|
||||||
|
.behavior_version_latest()
|
||||||
|
.endpoint_url("https://sendy-cloud.s3.ap-northeast-1.amazonaws.com")
|
||||||
|
.region(Region::new(check_upload_res.region.clone()))
|
||||||
|
.credentials_provider(cledential)
|
||||||
|
.force_path_style(true)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
let s3_client = aws_sdk_s3::Client::from_conf(config);
|
||||||
|
let file_size = file.metadata().unwrap().len();
|
||||||
|
|
||||||
|
if file_size > CHUNK_SIZE as u64 {
|
||||||
|
multipart_upload(token_res, check_upload_res, file.clone())
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
} else {
|
||||||
|
let stream = ByteStream::read_from()
|
||||||
|
.path(file.clone())
|
||||||
|
.offset(0)
|
||||||
|
.length(Length::Exact(file_size))
|
||||||
|
.build()
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
let key = check_upload_res.prefix + "/" + check_upload_res.file[0].path.as_str();
|
||||||
|
let _upload_res = s3_client
|
||||||
|
.put_object()
|
||||||
|
.bucket(check_upload_res.bucket)
|
||||||
|
.key(key)
|
||||||
|
.body(stream)
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
println!("Upload");
|
||||||
|
}
|
||||||
|
Commands::Download {} => {
|
||||||
|
println!("Download");
|
||||||
|
}
|
||||||
|
Commands::Delete {} => {
|
||||||
|
println!("Delete");
|
||||||
|
}
|
||||||
|
Commands::MkDir {} => {
|
||||||
|
println!("MkDir");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn multipart_upload(
|
||||||
|
token_res: types::response::GetFileLinkTokenResponse,
|
||||||
|
check_upload_res: types::response::CheckUploadResponse,
|
||||||
|
file: PathBuf,
|
||||||
|
) -> anyhow::Result<()> {
|
||||||
|
if !file.exists() {
|
||||||
|
println!("File not found: {:?}", file);
|
||||||
|
return Err(anyhow::anyhow!("File not found: {:?}", file));
|
||||||
|
}
|
||||||
|
|
||||||
|
let file_size = file.metadata().unwrap().len();
|
||||||
|
|
||||||
|
let cledential = Credentials::new(
|
||||||
|
token_res.access_key_id,
|
||||||
|
token_res.secret_access_key,
|
||||||
|
Some(token_res.session_token),
|
||||||
|
// 2024-07-18T07:14:42Z
|
||||||
|
Some(
|
||||||
|
chrono::DateTime::parse_from_rfc3339(&token_res.expiration)
|
||||||
|
.unwrap()
|
||||||
|
.into(),
|
||||||
|
),
|
||||||
|
"2021-06-01",
|
||||||
|
);
|
||||||
|
|
||||||
|
// let tls_client = hyper_rustls::HttpsConnectorBuilder::new();
|
||||||
|
|
||||||
|
// let hyper_connector = HyperClientBuilder::new().build(tls_client);
|
||||||
|
|
||||||
|
let config = aws_sdk_s3::Config::builder()
|
||||||
|
.behavior_version_latest()
|
||||||
|
.credentials_provider(cledential)
|
||||||
|
.region(Region::new(check_upload_res.region))
|
||||||
|
.endpoint_url("https://sendy-cloud.s3.ap-northeast-1.amazonaws.com")
|
||||||
|
.build();
|
||||||
|
|
||||||
|
let s3_client = aws_sdk_s3::Client::from_conf(config);
|
||||||
|
// let file = BufReader::new(File::open(file).await.unwrap());
|
||||||
|
|
||||||
|
let key = check_upload_res.prefix + check_upload_res.file[0].path.as_str();
|
||||||
|
|
||||||
|
let multipart_upload_res = s3_client
|
||||||
|
.create_multipart_upload()
|
||||||
|
.bucket(check_upload_res.bucket.clone())
|
||||||
|
.key(key.clone())
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let upload_id = multipart_upload_res.upload_id().unwrap();
|
||||||
|
|
||||||
|
let mut chunk_count = file_size / CHUNK_SIZE as u64;
|
||||||
|
let mut size_of_last_chunk = file_size % CHUNK_SIZE as u64;
|
||||||
|
if size_of_last_chunk == 0 {
|
||||||
|
size_of_last_chunk = CHUNK_SIZE as u64;
|
||||||
|
chunk_count -= 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut upload_parts = Vec::<CompletedPart>::new();
|
||||||
|
|
||||||
|
for chunk_index in 0..chunk_count {
|
||||||
|
let this_chunk = if chunk_count - 1 == chunk_index {
|
||||||
|
size_of_last_chunk
|
||||||
|
} else {
|
||||||
|
CHUNK_SIZE as u64
|
||||||
|
};
|
||||||
|
let stream = ByteStream::read_from()
|
||||||
|
.path(file.clone())
|
||||||
|
.offset(chunk_index * CHUNK_SIZE as u64)
|
||||||
|
.length(Length::Exact(this_chunk))
|
||||||
|
.build()
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
//Chunk index needs to start at 0, but part numbers start at 1.
|
||||||
|
let part_number = (chunk_index as i32) + 1;
|
||||||
|
let upload_part_res = s3_client
|
||||||
|
.upload_part()
|
||||||
|
.key(&key)
|
||||||
|
.bucket(&check_upload_res.bucket)
|
||||||
|
.upload_id(upload_id)
|
||||||
|
.body(stream)
|
||||||
|
.part_number(part_number)
|
||||||
|
.send()
|
||||||
|
.await?;
|
||||||
|
upload_parts.push(
|
||||||
|
CompletedPart::builder()
|
||||||
|
.e_tag(upload_part_res.e_tag.unwrap_or_default())
|
||||||
|
.part_number(part_number)
|
||||||
|
.build(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
let completed_multipart_upload = aws_sdk_s3::types::CompletedMultipartUpload::builder()
|
||||||
|
.set_parts(Some(upload_parts))
|
||||||
|
.build();
|
||||||
|
|
||||||
|
let _complete_multipart_upload_res = s3_client
|
||||||
|
.complete_multipart_upload()
|
||||||
|
.bucket(check_upload_res.bucket)
|
||||||
|
.key(key)
|
||||||
|
.upload_id(upload_id)
|
||||||
|
.multipart_upload(completed_multipart_upload)
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
2
src/types/mod.rs
Normal file
2
src/types/mod.rs
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
pub mod request;
|
||||||
|
pub mod response;
|
103
src/types/request.rs
Normal file
103
src/types/request.rs
Normal file
@ -0,0 +1,103 @@
|
|||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
pub struct RefreshTokenRequest {
|
||||||
|
pub refresh_token: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
pub struct ListFilesRequest {
|
||||||
|
pub from: i64,
|
||||||
|
pub host_id: String,
|
||||||
|
pub path: String,
|
||||||
|
pub reverse: bool,
|
||||||
|
pub sort_type: String,
|
||||||
|
pub thumbnail_size: i64,
|
||||||
|
pub to: i64,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
pub struct CreateFolderRequest {
|
||||||
|
pub host_id: String,
|
||||||
|
pub name: String,
|
||||||
|
pub path: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
pub struct RenameFileRequest {
|
||||||
|
pub file: Vec<RenameFileRequestFile>,
|
||||||
|
pub host_id: String,
|
||||||
|
pub name: String,
|
||||||
|
pub path: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
|
||||||
|
pub struct RenameFileRequestFile {
|
||||||
|
pub last_modified: String, // 1970-01-20T22:07:12.804Z
|
||||||
|
pub path: String,
|
||||||
|
pub size: i64,
|
||||||
|
pub version_id: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
pub struct CheckActionRequest {
|
||||||
|
pub key: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
pub struct MoveFileRequest {
|
||||||
|
pub file: Vec<RenameFileRequestFile>,
|
||||||
|
pub host_id: String,
|
||||||
|
pub prefix: String,
|
||||||
|
pub path: String,
|
||||||
|
pub target_id: String,
|
||||||
|
pub to_path: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
pub struct CheckUploadRequest {
|
||||||
|
pub file: Vec<CheckUploadRequestFile>,
|
||||||
|
pub host_id: String,
|
||||||
|
pub path: String,
|
||||||
|
pub upload_id: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
|
||||||
|
pub struct CheckUploadRequestFile {
|
||||||
|
pub path: String,
|
||||||
|
pub size: i64,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
pub struct CompleteUploadRequest {
|
||||||
|
pub file: Vec<CompleteUploadRequestFile>,
|
||||||
|
pub host_id: String,
|
||||||
|
pub path: String,
|
||||||
|
pub state: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
|
||||||
|
pub struct CompleteUploadRequestFile {
|
||||||
|
pub path: String,
|
||||||
|
pub size: i64,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
pub struct DeleteFileRequest {
|
||||||
|
pub file: Vec<RenameFileRequestFile>,
|
||||||
|
pub host_id: String,
|
||||||
|
pub prefix: String,
|
||||||
|
pub trash: bool,
|
||||||
|
}
|
104
src/types/response.rs
Normal file
104
src/types/response.rs
Normal file
@ -0,0 +1,104 @@
|
|||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
pub struct RefreshTokenResponse {
|
||||||
|
pub uid: String,
|
||||||
|
pub email: String,
|
||||||
|
pub email_verified: bool,
|
||||||
|
pub display_name: String,
|
||||||
|
pub disabled: bool,
|
||||||
|
pub metadata: RefreshTokenResponseMetadata,
|
||||||
|
pub provider_data: Vec<serde_json::Value>,
|
||||||
|
pub custom_claims: RefreshTokenResponseCustomClaims,
|
||||||
|
pub tokens_valid_after_time: String, // Wed, 17 Jul 2024 14:20:15 GMT
|
||||||
|
pub refresh_token: String,
|
||||||
|
pub id_token: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
|
||||||
|
pub struct RefreshTokenResponseMetadata {
|
||||||
|
pub last_sign_in_time: String, // Wed, 17 Jul 2024 14:20:15 GMT
|
||||||
|
pub creation_time: String, // Wed, 17 Jul 2024 14:20:15 GMT
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
|
||||||
|
pub struct RefreshTokenResponseCustomClaims {
|
||||||
|
pub plan: String, // skf = 50GB free
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
pub struct ListFilesResponse {
|
||||||
|
pub access_level: String,
|
||||||
|
pub count: i64,
|
||||||
|
pub file: Vec<ListFilesResponseFile>,
|
||||||
|
pub last_page: bool,
|
||||||
|
pub owner: String,
|
||||||
|
pub prefix: String,
|
||||||
|
pub usage_size: i64,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
#[serde(rename_all = "PascalCase")]
|
||||||
|
pub struct ListFilesResponseFile {
|
||||||
|
pub has_child_folder: bool,
|
||||||
|
pub is_backed_up: bool,
|
||||||
|
pub is_folder: bool,
|
||||||
|
pub is_latest: bool,
|
||||||
|
pub is_share: String,
|
||||||
|
pub last_modified: String, // 2024-07-16T06:18:06.595Z
|
||||||
|
|
||||||
|
#[serde(rename = "OwnerID")]
|
||||||
|
pub owner_id: String, // OwnerID
|
||||||
|
pub path: String,
|
||||||
|
pub size: i64,
|
||||||
|
pub thumbnail: String,
|
||||||
|
|
||||||
|
#[serde(rename = "VersionID")]
|
||||||
|
pub version_id: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
pub struct JobKeyResponse {
|
||||||
|
pub key: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
pub struct CheckActionResponse {
|
||||||
|
pub action: String,
|
||||||
|
pub state: String,
|
||||||
|
pub usage_size: i64,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
pub struct CheckUploadResponse {
|
||||||
|
pub bucket: String,
|
||||||
|
pub file: Vec<CheckUploadResponseFile>,
|
||||||
|
pub prefix: String,
|
||||||
|
pub region: String,
|
||||||
|
pub upload_id: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
|
||||||
|
pub struct CheckUploadResponseFile {
|
||||||
|
pub last_modified: String, // 1970-01-20T22:07:12.804Z
|
||||||
|
pub path: String,
|
||||||
|
pub size: i64,
|
||||||
|
pub version_id: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
#[serde(rename_all = "PascalCase")]
|
||||||
|
pub struct GetFileLinkTokenResponse {
|
||||||
|
pub access_key_id: String,
|
||||||
|
pub expiration: String, // 2024-07-16T06:18:06.595Z
|
||||||
|
pub secret_access_key: String,
|
||||||
|
pub session_token: String,
|
||||||
|
}
|
Loading…
x
Reference in New Issue
Block a user