lib improve

This commit is contained in:
sim1222 2024-07-19 15:05:30 +09:00
parent d9b587c0a5
commit 1e31d74904
Signed by: sim1222
GPG Key ID: D1AE30E316E44E5D
7 changed files with 469 additions and 398 deletions

View File

@ -162,7 +162,12 @@ impl Client {
match serde_json::from_str(&text) {
std::result::Result::Ok(json) => Ok(json),
Err(e) => Err(anyhow::Error::new(e).context(text.trim().to_string())),
Err(err) => {
match serde_json::from_str::<types::response::SendyError>(&text) {
std::result::Result::Ok(json) => Err(anyhow::anyhow!("{:?}", json)),
Err(_) => Err(anyhow::Error::new(err).context(text.trim().to_string())),
}
}
}
}

View File

@ -1,363 +0,0 @@
use std::{
cmp::{max, min},
io::{stdout, Write},
path::{Path, PathBuf},
sync::Arc,
};
use aws_config::{environment::region, BehaviorVersion, Region, SdkConfig};
use aws_sdk_s3::{
config::Credentials, operation::upload_part, primitives::ByteStream, types::CompletedPart,
};
use aws_smithy_runtime::client::http::hyper_014::HyperClientBuilder;
use aws_smithy_types::byte_stream::Length;
use clap::{Parser, Subcommand};
use human_bytes::human_bytes;
use indicatif::{ProgressBar, ProgressState, ProgressStyle};
use tokio::{fs::File, io::BufReader, sync::Mutex};
use crate::{
client, constants::APP_VERSION, list_files, types, util::{check_job, file_detail, multipart_upload}
};
#[derive(Debug, Clone)]
pub struct TargetFile {
pub file: PathBuf,
pub path: String,
}
pub async fn list(prefix: &Option<String>, client: &mut client::Client) -> anyhow::Result<()> {
let res = list_files(Some(&prefix.clone().unwrap_or("".to_string())), client)
.await
.unwrap();
res.file.iter().for_each(|f| {
let permission_string = if f.is_folder { "d" } else { "-" };
println!(
"{}\t{}\t{}\t{}",
permission_string,
human_bytes(f.size as u32),
f.last_modified,
f.path
);
});
Ok(())
}
pub async fn upload(
file: &PathBuf,
prefix: &Option<String>,
recursive: &bool,
fake_size: &Option<u64>,
client: &mut client::Client,
) -> anyhow::Result<()> {
// is folder
if file.is_dir() && !*recursive {
println!("Use --recursive option for folder upload");
return Err(anyhow::anyhow!("Use --recursive option for folder upload"));
}
let mut files = Vec::<TargetFile>::new();
if file.is_dir() && *recursive {
// upload folder
let mut dirs = Vec::<PathBuf>::new();
dirs.push(file.clone());
while let Some(dir) = dirs.pop() {
let entries = std::fs::read_dir(dir).unwrap();
for entry in entries {
let entry = entry.unwrap();
let path = entry.path();
if path.is_dir() {
dirs.push(path);
} else {
files.push(TargetFile {
file: path.clone(),
path: path
.strip_prefix(file)
.unwrap()
.to_str()
.expect("Invalid File Name")
.to_string(),
});
}
}
}
// for file in files {
// println!("{:?}", file);
// }
} else {
// file check
if !file.exists() {
println!("File not found: {:?}", file);
return Err(anyhow::anyhow!("File not found: {:?}", file));
}
files.push(TargetFile {
file: file.clone(),
path: file.file_name().unwrap().to_str().unwrap().to_string(),
});
}
if cfg!(windows) {
// replase \ to /
files.iter_mut().for_each(|f| {
f.path = f.path.replace('\\', "/");
});
}
let req = types::request::CheckUploadRequest {
host_id: client.host_id.clone(),
path: prefix.clone().unwrap_or("".to_string()),
upload_id: "".to_string(),
file: files
.iter()
.map(|f| types::request::CheckUploadRequestFile {
path: f.path.clone(),
size: fake_size.unwrap_or(f.file.metadata().unwrap().len()) as i64,
})
.collect(),
};
let check_upload_res = client.check_upload(req).await.unwrap();
// println!("{:#?}", check_upload_res);
let token_res = client.get_upload_token().await.unwrap();
// println!("{:#?}", token_res);
let cledential = Credentials::new(
token_res.access_key_id.clone(),
token_res.secret_access_key.clone(),
Some(token_res.session_token.clone()),
None,
"2021-06-01",
);
let _config = aws_sdk_s3::Config::builder()
.behavior_version_latest()
.region(Region::new(check_upload_res.region.clone()))
.credentials_provider(cledential)
.force_path_style(true)
.build();
// if file_size > CHUNK_SIZE as u64 {
for (i, file) in files.iter().enumerate() {
println!("Multi Uploading: {:?}", file.file);
multipart_upload(
&token_res,
&check_upload_res.bucket,
&check_upload_res.file[i],
&check_upload_res.prefix,
&check_upload_res.region,
&check_upload_res.upload_id,
file.clone(),
)
.await
.unwrap();
// }
// } else {
// for (i, file) in files.iter().enumerate() {
// println!("Uploading: {:?}", file.file);
// let stream = ByteStream::read_from()
// .path(file.file.clone())
// .offset(0)
// .length(Length::Exact(file_size))
// .build()
// .await
// .unwrap();
// let key =
// check_upload_res.prefix.to_owned() + check_upload_res.file[i].path.as_str();
// let _upload_res = s3_client
// .put_object()
// .bucket(check_upload_res.bucket.clone())
// .key(key)
// .body(stream)
// .send()
// .await
// .unwrap();
// }
// }
}
match check_job(&check_upload_res.upload_id, client).await {
Ok(_) => Ok(()),
Err(e) => {
println!("Error: {:?}", e);
return Err(anyhow::anyhow!("Error: {:?}", e));
}
}
}
pub async fn download(
path: &String,
prefix: &Option<String>,
client: &mut client::Client,
) -> anyhow::Result<()> {
let _file_name = path.split('/').last().unwrap();
let file_path =
path.split('/').collect::<Vec<&str>>()[0..path.split('/').count() - 1].join("/");
let list = list_files(Some(&file_path), client).await.unwrap();
let file = list
.file
.iter()
.find(|f| f.path == *path)
.expect("File not found");
let req = types::request::GetFileLinkRequest {
app_version: APP_VERSION.to_string(),
file: vec![types::request::GetFileLinkRequestFile {
path: path.to_string(),
size: file.size,
}],
host_id: client.host_id.clone(),
path: file_path,
};
let res = client.get_download_link(req).await.unwrap();
// run aria2c
// TODO: Implement self implementation of multi connection download
let stdout = std::process::Command::new("aria2c")
.arg("-x16")
.arg("-s16")
.arg("-d")
.arg(".")
.arg(res.url)
.stdout(std::process::Stdio::piped())
.spawn()
.expect("failed to execute process")
.stdout
.expect("failed to get stdout");
let reader = std::io::BufReader::new(stdout);
std::io::BufRead::lines(reader).for_each(|line| println!("{}", line.unwrap()));
Ok(())
}
pub async fn delete(
path: &String,
recursive: &bool,
client: &mut client::Client,
) -> anyhow::Result<()> {
let file = file_detail(path, client).await.unwrap();
if file.is_folder && !*recursive {
println!("Use --recursive option for folder delete");
return Err(anyhow::anyhow!("Use --recursive option for folder delete"));
}
let req = types::request::DeleteFileRequest {
file: vec![types::request::FileModifyRequestFile {
last_modified: file.last_modified,
path: file.path,
version_id: file.version_id,
size: file.size,
}],
host_id: client.host_id.clone(),
prefix: "".to_string(),
trash: true,
};
let res = client.delete_file(req).await.unwrap();
match check_job(&res.key, client).await {
Ok(_) => {
println!("Deleted.");
Ok(())
}
Err(e) => {
println!("Error: {:?}", e);
Err(anyhow::anyhow!("Error: {:?}", e))
}
}
}
pub async fn mkdir(
name: &String,
path: &Option<String>,
client: &mut client::Client,
) -> anyhow::Result<()> {
if name.contains('/') {
println!("Please use --path option for set parent directory");
return Err(anyhow::anyhow!(
"Please use --path option for set parent directory"
));
}
let req = types::request::CreateFolderRequest {
host_id: client.host_id.clone(),
name: name.clone(),
path: path.clone().unwrap_or("".to_string()),
};
match client.mkdir(req).await {
Ok(_) => {
println!("Created: {:?}", name);
Ok(())
}
Err(e) => {
println!("Error: {:?}", e);
Err(anyhow::anyhow!("Error: {:?}", e))
}
}
}
pub async fn rename(
path: &String,
name: &String,
client: &mut client::Client,
) -> anyhow::Result<()> {
if name.contains('/') {
println!("Can't use / in file name");
println!("Name should be file name only.");
return Err(anyhow::anyhow!("Can't use / in file name"));
}
let file_path =
path.split('/').collect::<Vec<&str>>()[0..path.split('/').count() - 1].join("/") + "/";
let list = list_files(Some(&file_path), client).await.unwrap();
let file = list
.file
.iter()
.find(|f| f.path == *path)
.expect("File not found");
let req = types::request::RenameFileRequest {
file: types::request::FileModifyRequestFile {
last_modified: file.last_modified.clone(),
path: file.path.clone(),
version_id: file.version_id.clone(),
size: file.size,
},
host_id: client.host_id.clone(),
name: name.clone(),
prefix: file_path,
};
let res = client.rename_file(req).await.unwrap();
match check_job(&res.key, client).await {
Ok(_) => {
println!("Renamed.");
Ok(())
}
Err(e) => {
println!("Error: {:?}", e);
Err(anyhow::anyhow!("Error: {:?}", e))
}
}
}
pub async fn info(path: &String, client: &mut client::Client) -> anyhow::Result<()> {
let req = types::request::FileDetailRequest {
host_id: client.host_id.clone(),
path: path.to_string(),
thumbnail_size: 130,
};
let res = client.file_detail(req).await.unwrap();
println!("{:#?}", res);
Ok(())
}

View File

@ -1,8 +1,10 @@
use commands::list;
use util::list_files;
use aws_config::Region;
use aws_sdk_s3::config::Credentials;
use constants::APP_VERSION;
use std::path::PathBuf;
use util::{check_job, file_detail, list_files, multipart_upload, TargetFile};
mod client;
mod commands;
mod constants;
mod types;
mod util;
@ -18,18 +20,11 @@ impl RakutenDriveClient {
}
pub async fn list(
&self,
prefix: &Option<String>,
prefix: Option<&str>,
) -> anyhow::Result<types::response::ListFilesResponse> {
list_files(
Some(&prefix.clone().unwrap_or("".to_string())),
&self.client,
)
.await
list_files(Some(prefix.unwrap_or("")), &self.client).await
}
pub async fn info(
&self,
path: &str,
) -> anyhow::Result<types::response::FileDetailResponse> {
pub async fn info(&self, path: &str) -> anyhow::Result<types::response::FileDetailResponse> {
let req = types::request::FileDetailRequest {
host_id: self.client.host_id.clone(),
path: path.to_string(),
@ -37,4 +32,386 @@ impl RakutenDriveClient {
};
self.client.file_detail(req).await
}
pub async fn upload(
&self,
file: &PathBuf,
prefix: Option<&str>,
recursive: bool,
fake_size: Option<u64>,
) -> anyhow::Result<()> {
// is folder
if file.is_dir() && !recursive {
println!("Use --recursive option for folder upload");
return Err(anyhow::anyhow!("Use --recursive option for folder upload"));
}
let mut files = Vec::<TargetFile>::new();
if file.is_dir() && recursive {
// upload folder
let mut dirs = Vec::<PathBuf>::new();
dirs.push(file.clone());
while let Some(dir) = dirs.pop() {
let entries = std::fs::read_dir(dir).unwrap();
for entry in entries {
let entry = entry.unwrap();
let path = entry.path();
if path.is_dir() {
dirs.push(path);
} else {
files.push(TargetFile {
file: path.clone(),
path: path
.strip_prefix(file)
.unwrap()
.to_str()
.expect("Invalid File Name")
.to_string(),
});
}
}
}
// for file in files {
// println!("{:?}", file);
// }
} else {
// file check
if !file.exists() {
println!("File not found: {:?}", file);
return Err(anyhow::anyhow!("File not found: {:?}", file));
}
files.push(TargetFile {
file: file.clone(),
path: file.file_name().unwrap().to_str().unwrap().to_string(),
});
}
if cfg!(windows) {
// replase \ to /
files.iter_mut().for_each(|f| {
f.path = f.path.replace('\\', "/");
});
}
for file in &files {
if (file_detail(&file.path, &self.client).await).is_ok() {
println!("File already exists.");
return Err(anyhow::anyhow!("File already exists."));
}
}
let req = types::request::CheckUploadRequest {
host_id: self.client.host_id.clone(),
path: prefix.unwrap_or("").to_string(),
upload_id: "".to_string(),
file: files
.iter()
.map(|f| types::request::CheckUploadRequestFile {
path: f.path.clone(),
size: fake_size.unwrap_or(f.file.metadata().unwrap().len()) as i64,
})
.collect(),
};
let check_upload_res = self.client.check_upload(req).await.unwrap();
// println!("{:#?}", check_upload_res);
let token_res = self.client.get_upload_token().await.unwrap();
// println!("{:#?}", token_res);
let cledential = Credentials::new(
token_res.access_key_id.clone(),
token_res.secret_access_key.clone(),
Some(token_res.session_token.clone()),
None,
"2021-06-01",
);
let _config = aws_sdk_s3::Config::builder()
.behavior_version_latest()
.region(Region::new(check_upload_res.region.clone()))
.credentials_provider(cledential)
.force_path_style(true)
.build();
// if file_size > CHUNK_SIZE as u64 {
for (i, file) in files.iter().enumerate() {
println!("Multi Uploading: {:?}", file.file);
multipart_upload(
&token_res,
&check_upload_res.bucket,
&check_upload_res.file[i],
&check_upload_res.prefix,
&check_upload_res.region,
&check_upload_res.upload_id,
file.clone(),
)
.await
.unwrap();
// }
// } else {
// for (i, file) in files.iter().enumerate() {
// println!("Uploading: {:?}", file.file);
// let stream = ByteStream::read_from()
// .path(file.file.clone())
// .offset(0)
// .length(Length::Exact(file_size))
// .build()
// .await
// .unwrap();
// let key =
// check_upload_res.prefix.to_owned() + check_upload_res.file[i].path.as_str();
// let _upload_res = s3_client
// .put_object()
// .bucket(check_upload_res.bucket.clone())
// .key(key)
// .body(stream)
// .send()
// .await
// .unwrap();
// }
// }
}
match check_job(&check_upload_res.upload_id, &self.client).await {
Ok(_) => Ok(()),
Err(e) => {
println!("Error: {:?}", e);
Err(anyhow::anyhow!("Error: {:?}", e))
}
}
}
pub async fn download(&self, path: &str, prefix: Option<&str>) -> anyhow::Result<()> {
let _file_name = path.split('/').last().unwrap();
let file_path =
path.split('/').collect::<Vec<&str>>()[0..path.split('/').count() - 1].join("/");
let file = match file_detail(path, &self.client).await {
Ok(file) => file,
Err(e) => {
return Err(e);
}
};
let req = types::request::GetFileLinkRequest {
app_version: APP_VERSION.to_string(),
file: vec![types::request::GetFileLinkRequestFile {
path: path.to_string(),
size: file.size,
}],
host_id: self.client.host_id.clone(),
path: file_path,
};
let res = self.client.get_download_link(req).await.unwrap();
// run aria2c
// TODO: Implement self implementation of multi connection download
let stdout = std::process::Command::new("aria2c")
.arg("-x16")
.arg("-s16")
.arg("-d")
.arg(".")
.arg(res.url)
.stdout(std::process::Stdio::piped())
.spawn()
.expect("failed to execute process")
.stdout
.expect("failed to get stdout");
let reader = std::io::BufReader::new(stdout);
std::io::BufRead::lines(reader).for_each(|line| println!("{}", line.unwrap()));
Ok(())
}
pub async fn mkdir(&self, name: &str, path: Option<&str>) -> anyhow::Result<()> {
if name.contains('/') {
println!("Please use --path option for set parent directory");
return Err(anyhow::anyhow!(
"Please use --path option for set parent directory"
));
}
let req = types::request::CreateFolderRequest {
host_id: self.client.host_id.clone(),
name: name.to_string(),
path: path.unwrap_or("").to_string(),
};
match self.client.mkdir(req).await {
Ok(_) => {
println!("Created: {:?}", name);
Ok(())
}
Err(e) => {
println!("Error: {:?}", e);
Err(anyhow::anyhow!("Error: {:?}", e))
}
}
}
pub async fn rename(&self, path: &str, name: &str) -> anyhow::Result<()> {
if name.contains('/') {
println!("Can't use / in file name");
println!("Name should be file name only.");
return Err(anyhow::anyhow!("Can't use / in file name"));
}
let file_path =
path.split('/').collect::<Vec<&str>>()[0..path.split('/').count() - 1].join("/") + "/";
let file = match file_detail(path, &self.client).await {
Ok(file) => file,
Err(e) => {
return Err(e);
}
};
let req = types::request::RenameFileRequest {
file: types::request::FileModifyRequestFile {
last_modified: file.last_modified.clone(),
path: file.path.clone(),
version_id: file.version_id.clone(),
size: file.size,
},
host_id: self.client.host_id.clone(),
name: name.to_string(),
prefix: file_path,
};
let res = self.client.rename_file(req).await.unwrap();
match check_job(&res.key, &self.client).await {
Ok(_) => {
println!("Renamed.");
Ok(())
}
Err(e) => {
println!("Error: {:?}", e);
Err(anyhow::anyhow!("Error: {:?}", e))
}
}
}
pub async fn move_file(&self, path: &str, dest: &str) -> anyhow::Result<()> {
if !dest.ends_with('/') {
println!("Destination should be directory.");
return Err(anyhow::anyhow!("Destination should be directory."));
}
let file = file_detail(path, &self.client).await.unwrap();
let file_name = path.split('/').last().unwrap();
let file_dir =
path.split('/').collect::<Vec<&str>>()[0..path.split('/').count() - 1].join("/") + "/";
if (file_detail((dest.to_string() + file_name).as_str(), &self.client).await).is_ok() {
println!("File already exists.");
return Err(anyhow::anyhow!("File already exists."));
}
let req = types::request::MoveFileRequest {
file: vec![types::request::FileModifyRequestFile {
last_modified: file.last_modified,
path: file.path,
size: file.size,
version_id: file.version_id,
}],
host_id: self.client.host_id.clone(),
prefix: file_dir.clone(),
target_id: self.client.host_id.clone(),
to_path: dest.to_string(),
};
let res = self.client.move_file(req).await.unwrap();
match check_job(&res.key, &self.client).await {
Ok(_) => {
println!("Moved.");
Ok(())
}
Err(e) => {
println!("Error: {:?}", e);
Err(anyhow::anyhow!("Error: {:?}", e))
}
}
}
pub async fn delete(&self, path: &str, recursive: &bool) -> anyhow::Result<()> {
let file = file_detail(path, &self.client).await.unwrap();
if file.is_folder && !*recursive {
println!("Use --recursive option for folder delete");
return Err(anyhow::anyhow!("Use --recursive option for folder delete"));
}
let req = types::request::DeleteFileRequest {
file: vec![types::request::FileModifyRequestFile {
last_modified: file.last_modified,
path: file.path,
version_id: file.version_id,
size: file.size,
}],
host_id: self.client.host_id.clone(),
prefix: "".to_string(),
trash: true,
};
let res = self.client.delete_file(req).await.unwrap();
match check_job(&res.key, &self.client).await {
Ok(_) => {
println!("Deleted.");
Ok(())
}
Err(e) => {
println!("Error: {:?}", e);
Err(anyhow::anyhow!("Error: {:?}", e))
}
}
}
pub async fn copy(&self, src: &str, dest: &str) -> anyhow::Result<()> {
if !dest.ends_with('/') {
println!("Destination should be directory.");
return Err(anyhow::anyhow!("Destination should be directory."));
}
let file_name = src.split('/').last().unwrap();
let file_dir =
src.split('/').collect::<Vec<&str>>()[0..src.split('/').count() - 1].join("/") + "/";
let file = file_detail(src, &self.client).await.unwrap();
if (file_detail((dest.to_string() + file_name).as_str(), &self.client).await).is_ok() {
println!("File already exists.");
return Err(anyhow::anyhow!("File already exists."));
}
let req = types::request::CopyFileRequest {
file: vec![types::request::CopyFileRequestFile {
last_modified: file.last_modified,
path: file.path,
version_id: file.version_id,
size: file.size,
}],
host_id: self.client.host_id.clone(),
prefix: file_dir.clone(),
target_id: self.client.host_id.clone(),
to_path: dest.to_string(),
};
let res = self.client.copy_file(req).await.unwrap();
match check_job(&res.key, &self.client).await {
Ok(_) => {
println!("Copied.");
Ok(())
}
Err(e) => {
println!("Error: {:?}", e);
Err(anyhow::anyhow!("Error: {:?}", e))
}
}
}
}

View File

@ -12,20 +12,18 @@ use aws_sdk_s3::{
use aws_smithy_runtime::client::http::hyper_014::HyperClientBuilder;
use aws_smithy_types::byte_stream::Length;
use clap::{Parser, Subcommand};
use commands::{delete, download, info, list, mkdir, rename, upload};
use constants::REFRESH_TOKEN;
use human_bytes::human_bytes;
use indicatif::{ProgressBar, ProgressState, ProgressStyle};
use rakuten_drive_cui::RakutenDriveClient;
use tokio::{fs::File, io::BufReader, sync::Mutex};
use types::response::ListFilesResponseFile;
use util::*;
mod client;
mod commands;
mod constants;
mod types;
mod util;
mod constants;
#[derive(Parser, Debug)]
#[command(version, about, long_about=None)]
@ -67,7 +65,13 @@ enum Commands {
prefix: Option<String>,
},
#[clap(about = "Move file")]
Move {},
Move {
// Source file path
path: String,
// Destination folder path
dest: String,
},
#[clap(about = "Delete file")]
Delete {
path: String,
@ -108,28 +112,44 @@ enum Commands {
async fn main() -> anyhow::Result<()> {
let args = Args::parse();
let mut client = client::Client::try_new(REFRESH_TOKEN.to_string())
.await
.unwrap();
let client = RakutenDriveClient::try_new(REFRESH_TOKEN.to_string()).await?;
match &args.command {
Commands::List { prefix } => list(prefix, &mut client).await,
Commands::List { prefix } => {
client.list(prefix.as_deref()).await.unwrap();
}
Commands::Upload {
file,
prefix,
recursive,
fake_size,
} => upload(file, prefix, recursive, fake_size, &mut client).await,
Commands::Download { path, prefix } => download(path, prefix, &mut client).await,
Commands::Move {} => {
todo!("Move");
} => {
client
.upload(file, prefix.as_deref(), *recursive, *fake_size)
.await.unwrap();
}
Commands::Download { path, prefix } => {
client.download(path.as_str(), prefix.as_deref()).await.unwrap();
}
Commands::Move { path, dest } => {
client.move_file(path, dest).await.unwrap();
}
Commands::Delete { path, recursive } => {
client.delete(path, recursive).await.unwrap();
}
Commands::Mkdir { name, path } => {
client.mkdir(name, path.as_deref()).await.unwrap();
}
Commands::Delete { path, recursive } => delete(path, recursive, &mut client).await,
Commands::Mkdir { name, path } => mkdir(name, path, &mut client).await,
Commands::Copy { src: _, dest: _ } => {
todo!("Copy");
}
Commands::Rename { path, name } => rename(path, name, &mut client).await,
Commands::Info { path } => info(path, &mut client).await,
Commands::Rename { path, name } => {
client.rename(path, name).await.unwrap();
}
Commands::Info { path } => {
client.info(path).await.unwrap();
}
}
Ok(())
}

View File

@ -66,7 +66,6 @@ pub struct MoveFileRequest {
pub file: Vec<FileModifyRequestFile>,
pub host_id: String,
pub prefix: String,
pub path: String,
pub target_id: String,
pub to_path: String,
}

View File

@ -103,7 +103,6 @@ pub struct GetFileLinkTokenResponse {
pub session_token: String,
}
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub struct GetFileLinkResponse {
@ -148,3 +147,32 @@ pub struct FileDetailResponseFile {
#[serde(rename = "VersionID")]
pub version_id: String,
}
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub struct SendyError {
pub error: SendyErrorType,
}
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
pub enum SendyErrorType {
SendyErrFileNoFolder,
SendyErrFileNoSuchKey,
SendyErrFileAlreadyExistFileName,
SendyErrFileLongKey,
SendyErrExceededFolderMaxStorage,
SendyErrExceededTraffic,
SendyErrServer,
SendyErrAlreadyRunning,
SendyErrNoLinkToSave,
SendyErrPasswordNotMatch,
SendyErrLinkExpired,
SendyErrUninvitedUser,
SendyErrFileWrongPath,
SendyErrFileNoPermission,
SendyErrLinkInvalidPassword,
SendyErrShareDownwardShareExist,
SendyErrShareUpwardShareExist,
SendyErrShareFolderIncludedOrInclude,
}

View File

@ -19,10 +19,15 @@ use tokio::{fs::File, io::BufReader, sync::Mutex};
use crate::{constants::CHUNK_SIZE, types};
use crate::{
client::{self},
commands::TargetFile,
types::response::ListFilesResponseFile,
};
#[derive(Debug, Clone)]
pub struct TargetFile {
pub file: PathBuf,
pub path: String,
}
pub async fn multipart_upload(
token_res: &types::response::GetFileLinkTokenResponse,
bucket: &str,