Skip to content

Commit 3157409

Browse files
authored
Merge pull request #24 from KekOnTheWorld/main
Added error handling and format
2 parents 3d2ad2f + ebff524 commit 3157409

18 files changed

+189
-124
lines changed

src/config.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,3 @@
11
pub const EXTENSION_MAX_LENGTH: usize = 10;
22
pub const STREAM_ID_LENGTH: usize = 64;
3-
pub const FILE_ID_LENGTH: usize = 7;
3+
pub const FILE_ID_LENGTH: usize = 7;

src/database.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
use diesel::pg::PgConnection;
22

3-
use diesel::r2d2::{ Pool, PooledConnection, ConnectionManager, PoolError };
3+
use diesel::r2d2::{ConnectionManager, Pool, PoolError, PooledConnection};
44

55
pub type PgPool = Pool<ConnectionManager<PgConnection>>;
66
pub type PgPooledConnection = PooledConnection<ConnectionManager<PgConnection>>;
@@ -12,4 +12,4 @@ fn init_pool(database_url: &str) -> Result<PgPool, PoolError> {
1212

1313
pub fn establish_connection(database_url: String) -> PgPool {
1414
init_pool(&database_url).expect("Failed to create pool")
15-
}
15+
}

src/errors.rs

+7-3
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ use std::fmt::{self, Display, Formatter};
1010
pub struct JsonError {
1111
err_type: JsonErrorType,
1212
field: String,
13-
error: String
13+
error: String,
1414
}
1515

1616
#[derive(Debug)]
@@ -35,7 +35,11 @@ impl JsonErrorType {
3535

3636
impl JsonError {
3737
pub fn new(err_type: JsonErrorType, field: String, error: String) -> Self {
38-
Self { err_type, field, error }
38+
Self {
39+
err_type,
40+
field,
41+
error,
42+
}
3943
}
4044
}
4145

@@ -96,4 +100,4 @@ json_error_type!(FS_REMOVE, StatusCode::INTERNAL_SERVER_ERROR);
96100

97101
json_error_type!(HASH_MATCH, StatusCode::BAD_REQUEST);
98102

99-
json_error_type!(OVERFLOW, StatusCode::BAD_REQUEST);
103+
json_error_type!(OVERFLOW, StatusCode::BAD_REQUEST);

src/http.rs

+5-5
Original file line numberDiff line numberDiff line change
@@ -3,11 +3,11 @@ use std::{io::Error, sync::Arc};
33
use std::collections::HashMap;
44

55
use actix_cors::Cors;
6-
use actix_web::{HttpServer, App, web};
6+
use actix_web::{web, App, HttpServer};
77
use sha1::Sha1;
88
use tokio::{fs::File, sync::Mutex};
99

10-
use crate::{database::PgPool, colors, routes};
10+
use crate::{colors, database::PgPool, routes};
1111

1212
pub struct UploadState {
1313
pub map: Mutex<HashMap<String, UploadEntry>>,
@@ -22,13 +22,13 @@ pub struct UploadState {
2222
pub embed_color: String,
2323
pub download_url: String,
2424

25-
pub chunk_size: usize
25+
pub chunk_size: usize,
2626
}
2727

2828
pub struct UploadEntry {
2929
pub file: File,
3030
pub ext: String,
31-
pub hasher: Sha1
31+
pub hasher: Sha1,
3232
}
3333

3434
pub async fn main(state: Arc<UploadState>, address: String, port: u16) -> Result<(), Error> {
@@ -81,4 +81,4 @@ pub async fn main(state: Arc<UploadState>, address: String, port: u16) -> Result
8181
return Err(error);
8282
}
8383
}
84-
}
84+
}

src/main.rs

+23-27
Original file line numberDiff line numberDiff line change
@@ -12,18 +12,18 @@ use std::sync::Arc;
1212

1313
use dotenv::dotenv;
1414
use http::UploadState;
15-
use tokio::sync::Mutex;
1615
use std::env;
16+
use tokio::sync::Mutex;
1717

18-
pub mod database;
19-
pub mod schema;
20-
pub mod routes;
21-
pub mod util;
18+
pub mod colors;
2219
pub mod config;
23-
pub mod models;
20+
pub mod database;
2421
pub mod errors;
2522
pub mod http;
26-
pub mod colors;
23+
pub mod models;
24+
pub mod routes;
25+
pub mod schema;
26+
pub mod util;
2727

2828
fn clean_tmp<'a>(tmp: &'a str) {
2929
fs::remove_dir_all(tmp).expect("Failed to remove temp directory!");
@@ -33,42 +33,38 @@ fn clean_tmp<'a>(tmp: &'a str) {
3333
#[tokio::main]
3434
async fn main() {
3535
dotenv().ok();
36-
37-
let tmp_dir = env::var("tmp_dir")
38-
.unwrap_or("tmp/".to_owned());
39-
40-
let upload_dir = env::var("upload_dir")
41-
.unwrap_or("upload/".to_owned());
4236

43-
let web_dir = env::var("web_dir")
44-
.unwrap_or("web/".to_owned());
37+
let tmp_dir = env::var("tmp_dir").unwrap_or("tmp/".to_owned());
38+
39+
let upload_dir = env::var("upload_dir").unwrap_or("upload/".to_owned());
4540

46-
let embed_description = env::var("embed_description")
47-
.unwrap_or("No description availlable".to_owned());
41+
let web_dir = env::var("web_dir").unwrap_or("web/".to_owned());
4842

49-
let embed_color = env::var("embed_color")
50-
.unwrap_or("#ffffff".to_owned());
43+
let embed_description =
44+
env::var("embed_description").unwrap_or("No description availlable".to_owned());
5145

52-
let download_url = env::var("download_url")
53-
.unwrap_or("http://example.com/".to_owned());
46+
let embed_color = env::var("embed_color").unwrap_or("#ffffff".to_owned());
47+
48+
let download_url = env::var("download_url").unwrap_or("http://example.com/".to_owned());
5449

5550
let port = env::var("port")
5651
.unwrap_or("6942".to_owned())
5752
.parse()
5853
.unwrap_or(6942);
5954

60-
let address = env::var("address")
61-
.unwrap_or("0.0.0.0".to_owned());
55+
let address = env::var("address").unwrap_or("0.0.0.0".to_owned());
6256

6357
let chunk_size: usize = env::var("chunksize")
6458
.unwrap_or("2048".to_owned())
6559
.parse()
66-
.unwrap_or(2048) * 1024;
60+
.unwrap_or(2048)
61+
* 1024;
6762

6863
// Clean temp directory
6964
clean_tmp(tmp_dir.as_str());
7065

71-
let pool = database::establish_connection(env::var("DATABASE_URL").expect("Database url not set!"));
66+
let pool =
67+
database::establish_connection(env::var("DATABASE_URL").expect("Database url not set!"));
7268

7369
let state = UploadState {
7470
map: Mutex::new(HashMap::new()),
@@ -79,8 +75,8 @@ async fn main() {
7975
embed_description,
8076
embed_color,
8177
download_url,
82-
chunk_size
78+
chunk_size,
8379
};
8480

8581
let _result = http::main(Arc::new(state), address, port).await;
86-
}
82+
}

src/models/file.rs

+5-6
Original file line numberDiff line numberDiff line change
@@ -1,16 +1,16 @@
1-
use diesel::{PgConnection, QueryResult, QueryDsl};
21
use crate::diesel::RunQueryDsl;
2+
use diesel::{PgConnection, QueryDsl, QueryResult};
33

44
use crate::schema::files;
55

66
use crate::diesel::ExpressionMethods;
77

88
#[derive(Queryable, Insertable)]
9-
#[table_name="files"]
9+
#[table_name = "files"]
1010
pub struct File {
1111
pub id: String,
1212
pub ext: String,
13-
pub hash: String
13+
pub hash: String,
1414
}
1515

1616
impl File {
@@ -20,11 +20,10 @@ impl File {
2020
.execute(connection)
2121
}
2222

23-
pub fn find(id: String, connection: &PgConnection) -> Vec<File> {
23+
pub fn find(id: String, connection: &PgConnection) -> QueryResult<Vec<File>> {
2424
files::table
2525
.filter(files::dsl::id.eq(id))
2626
.select((files::dsl::id, files::dsl::ext, files::dsl::hash))
2727
.load::<File>(connection)
28-
.expect("Error while executing query!")
2928
}
30-
}
29+
}

src/routes/api/create.rs

+18-10
Original file line numberDiff line numberDiff line change
@@ -1,25 +1,35 @@
11
use std::sync::Arc;
22

3-
use actix_web::{web, Responder, post, Result};
4-
use sha1::{Sha1, Digest};
3+
use actix_web::{post, web, Responder, Result};
4+
use sha1::{Digest, Sha1};
55
use tokio::fs::File;
66

7-
use crate::{http::{UploadState, UploadEntry}, util::{checker, random}, config};
8-
7+
use crate::{
8+
config,
9+
http::{UploadEntry, UploadState},
10+
util::{checker, random},
11+
};
912

1013
#[post("/api/c/{ext}")]
1114
pub async fn create(
1215
path: web::Path<(String,)>,
1316
state: web::Data<Arc<UploadState>>,
1417
) -> Result<impl Responder> {
1518
let ext = path.into_inner().0;
16-
checker::in_bounds("Length of extension ", ext.len(), 0, config::EXTENSION_MAX_LENGTH)?;
19+
checker::in_bounds(
20+
"Length of extension ",
21+
ext.len(),
22+
0,
23+
config::EXTENSION_MAX_LENGTH,
24+
)?;
1725

1826
let map = &mut state.map.lock().await;
1927

2028
let stream = random::random_b64(config::STREAM_ID_LENGTH);
21-
22-
let file = File::create(state.tmp_dir.clone() + &stream).await.map_err(|e| crate::error!(FS_CREATE, FILE, "Error while creating file: {}", e))?;
29+
30+
let file = File::create(state.tmp_dir.clone() + &stream)
31+
.await
32+
.map_err(|e| crate::error!(FS_CREATE, FILE, "Error while creating file: {}", e))?;
2333

2434
let hasher = Sha1::new();
2535

@@ -29,7 +39,5 @@ pub async fn create(
2939

3040
map.insert(stream.clone(), entry);
3141

32-
Ok(web::Json(json!({
33-
"stream": stream
34-
})))
42+
Ok(web::Json(json!({ "stream": stream })))
3543
}

src/routes/api/download.rs

+24-9
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,20 @@
1-
use std::{sync::Arc, path::Path};
1+
use std::{path::Path, sync::Arc};
22

33
use actix_files::NamedFile;
4-
use actix_web::{web, get, Result, Responder, http::header::{ContentDisposition, DispositionType, DispositionParam}};
5-
6-
use crate::{http::UploadState, util::{checker, files}, models::file};
4+
use actix_web::{
5+
get,
6+
http::header::{ContentDisposition, DispositionParam, DispositionType},
7+
web, Responder, Result,
8+
};
9+
10+
use crate::{
11+
http::UploadState,
12+
models::file,
13+
util::{
14+
checker::{self, map_qres},
15+
files,
16+
},
17+
};
718

819
#[get("/api/d/{id}/")]
920
pub async fn download(
@@ -14,21 +25,25 @@ pub async fn download(
1425

1526
let db_connection = &checker::get_con(&state.pool)?;
1627

17-
if let Some(entry) = file::File::find(id, &db_connection).into_iter().next() {
28+
if let Some(entry) = map_qres(
29+
file::File::find(id, &db_connection),
30+
"Error while selecting files",
31+
)?
32+
.into_iter()
33+
.next()
34+
{
1835
let filename = files::get_filename(entry.hash.clone(), entry.ext);
1936

2037
let named_file = NamedFile::open(Path::new(state.upload_dir.as_str()).join(entry.hash))
2138
.map_err(|e| crate::error!(FS_OPEN, FILE, "Error while opening file: {}", e))?;
2239

2340
let content_disposition = ContentDisposition {
2441
disposition: DispositionType::Attachment,
25-
parameters: vec![
26-
DispositionParam::Filename(filename)
27-
],
42+
parameters: vec![DispositionParam::Filename(filename)],
2843
};
2944

3045
Ok(named_file.set_content_disposition(content_disposition))
3146
} else {
3247
Err(crate::error!(NOT_FOUND, ID, "File with id not found").into())
3348
}
34-
}
49+
}

src/routes/api/finish.rs

+19-12
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,18 @@
11
use std::sync::Arc;
22

3-
use actix_web::{web, Responder, post, Result};
3+
use actix_web::{post, web, Responder, Result};
44
use sha1::Digest;
55
use tokio::fs;
66

7-
use crate::{http::UploadState, util::{checker::{self, map_qres}, random}, config, models::file};
8-
7+
use crate::{
8+
config,
9+
http::UploadState,
10+
models::file,
11+
util::{
12+
checker::{self, map_qres},
13+
random,
14+
},
15+
};
916

1017
#[post("/api/f/{stream}/{hash}")]
1118
pub async fn finish(
@@ -21,27 +28,27 @@ pub async fn finish(
2128
let file_path = state.tmp_dir.clone() + &stream;
2229

2330
if file_hash.eq(&hash) {
24-
fs::rename(file_path, state.upload_dir.clone() + &file_hash).await.map_err(|e| crate::error!(FS_RENAME, FILE, "Error while renaming file: {}", e))?;
31+
fs::rename(file_path, state.upload_dir.clone() + &file_hash)
32+
.await
33+
.map_err(|e| crate::error!(FS_RENAME, FILE, "Error while renaming file: {}", e))?;
2534

2635
let id = random::random_b64(config::FILE_ID_LENGTH);
2736

2837
let db_connection = &checker::get_con(&state.pool)?;
2938

3039
let new_file = file::File {
31-
id: id.clone(),
32-
ext: entry.ext.clone(),
33-
hash: file_hash
40+
id: id.clone(),
41+
ext: entry.ext.clone(),
42+
hash: file_hash,
3443
};
35-
44+
3645
map_qres(new_file.create(db_connection), "Error while inserting file")?;
3746

38-
Ok(web::Json(json!({
39-
"id": id
40-
})))
47+
Ok(web::Json(json!({ "id": id })))
4148
} else {
4249
Err(crate::error!(HASH_MATCH, HASH, "Hash doesn't match").into())
4350
}
4451
} else {
4552
Err(crate::error!(NOT_FOUND, STREAM, "Stream not found").into())
4653
}
47-
}
54+
}

src/routes/api/mod.rs

+3-3
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
pub mod create;
2-
pub mod upload;
3-
pub mod remove;
2+
pub mod download;
43
pub mod finish;
5-
pub mod download;
4+
pub mod remove;
5+
pub mod upload;

0 commit comments

Comments
 (0)