Final preparation

This commit is contained in:
StNicolay 2024-08-15 19:15:58 +03:00
parent ab138e8536
commit a3e4ac2b2e
Signed by: StNicolay
GPG Key ID: 9693D04DCD962B0D
11 changed files with 208 additions and 58 deletions

10
.dockerignore Normal file
View File

@ -0,0 +1,10 @@
**/target/
**/.vscode/
**/.env
**/.git/
**/.dockerignore
**/Dockerfile
**/compose.yaml
**/LICENSE
**/README.md
files/

8
Cargo.lock generated
View File

@ -1732,18 +1732,18 @@ dependencies = [
[[package]]
name = "serde"
version = "1.0.207"
version = "1.0.208"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5665e14a49a4ea1b91029ba7d3bca9f299e1f7cfa194388ccc20f14743e784f2"
checksum = "cff085d2cb684faa248efb494c39b68e522822ac0de72ccf08109abde717cfb2"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
version = "1.0.207"
version = "1.0.208"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6aea2634c86b0e8ef2cfdc0c340baede54ec27b1e46febd7f80dffb2aa44a00e"
checksum = "24008e81ff7613ed8e5ba0cfaf24e2c2f1e5b8a0495711e44fcd4882fca62bcf"
dependencies = [
"proc-macro2",
"quote",

View File

@ -22,7 +22,7 @@ axum-extra = { version = "0.9", features = ["typed-header"] }
chrono = { version = "0.4", features = ["serde"] }
dotenvy = "0.15"
futures = "0.3"
itertools = "0.13.0"
itertools = "0.13"
jsonwebtoken = "9"
rand = "0.8"
scrypt = { version = "0.11", default-features = false, features = ["std"] }

19
Dockerfile Normal file
View File

@ -0,0 +1,19 @@
FROM rust:slim AS chef
RUN cargo install cargo-chef
WORKDIR /app
FROM chef AS planner
COPY . .
RUN cargo chef prepare
FROM chef AS builder
COPY --from=planner /app/recipe.json recipe.json
RUN cargo chef cook --release
COPY . .
RUN cargo b -r
FROM debian:stable-slim
EXPOSE 3000
WORKDIR /app
COPY --from=builder /app/target/release/project .
CMD [ "./project" ]

25
compose-dev.yaml Normal file
View File

@ -0,0 +1,25 @@
services:
backend:
build: .
ports:
- 3000:3000
environment:
JWT_SECRET: ${JWT_SECRET}
DATABASE_URL: 'postgresql://tester:testing123!@backend_db/backend'
depends_on:
- backend_db
backend_db:
image: ghcr.io/fboulnois/pg_uuidv7:1.5.0
environment:
- POSTGRES_USER=tester
- POSTGRES_PASSWORD=testing123!
- POSTGRES_DB=backend
ports:
- 5432:5432
volumes:
- backend_db_data:/var/lib/postgresql/data
restart: unless-stopped
volumes:
backend_db_data:

View File

@ -1,15 +1,22 @@
services:
db:
backend:
build: .
environment:
JWT_SECRET: ${JWT_SECRET}
DATABASE_URL: 'postgresql://tester:testing123!@backend_db/backend'
depends_on:
- backend_db
restart: unless-stopped
backend_db:
image: ghcr.io/fboulnois/pg_uuidv7:1.5.0
environment:
- POSTGRES_USER=tester
- POSTGRES_PASSWORD=testing123!
- POSTGRES_DB=testing
ports:
- 5432:5432
- POSTGRES_DB=backend
volumes:
- postgres_data:/var/lib/postgresql/data
- backend_db_data:/var/lib/postgresql/data
restart: unless-stopped
volumes:
postgres_data:
backend_db_data:

View File

@ -1,7 +1,9 @@
use std::collections::{HashMap, HashSet};
use std::{
collections::{HashMap, HashSet},
fmt::Write as _,
};
use axum::extract::multipart::{self, Multipart};
use tokio::io::AsyncWrite;
use crate::prelude::*;
@ -10,25 +12,50 @@ pub struct Params {
parent_folder: Uuid,
}
#[derive(Serialize, Debug, Default)]
pub struct Response {
success: HashMap<Box<str>, Uuid>,
error: HashMap<Box<str>, &'static str>,
}
fn validate_name(name: &str, existing_names: &HashSet<String>) -> Result<(), &'static str> {
if name.len() > 255 {
return Err("Name too long");
}
if existing_names.contains(name) {
return Err("Item with that name already exists");
}
Ok(())
}
async fn create_file(
file_id: Uuid,
file: impl AsyncWrite + Unpin,
storage: &crate::FileStorage,
file_name: &str,
field: &mut multipart::Field<'_>,
parent_folder: Uuid,
pool: &Pool,
) -> bool {
let (hash, size) = match crate::FileStorage::write_to_file(file, field).await {
Ok(values) => values,
Err(err) => {
tracing::warn!(%err);
return false;
}
};
db::file::insert(file_id, parent_folder, file_name, size, hash, pool)
) -> anyhow::Result<Uuid> {
let (file_id, file) = storage.create().await?;
let (hash, size) = crate::FileStorage::write_to_file(file, field).await?;
db::file::insert(file_id, parent_folder, file_name, size, hash, pool).await?;
Ok(file_id)
}
async fn parse_field(
field: &mut multipart::Field<'_>,
name: &str,
storage: &crate::FileStorage,
parent_folder: Uuid,
pool: &Pool,
existing_names: &HashSet<String>,
) -> Result<Uuid, &'static str> {
validate_name(name, existing_names)?;
create_file(storage, name, field, parent_folder, pool)
.await
.inspect_err(|err| tracing::warn!(%err))
.is_ok()
.map_err(|err| {
tracing::warn!(%err, "Error creating the file");
"Error creating the file"
})
}
pub async fn upload(
@ -36,7 +63,7 @@ pub async fn upload(
State(state): State<AppState>,
claims: Claims,
mut multi: Multipart,
) -> GeneralResult<Json<HashMap<String, Uuid>>> {
) -> GeneralResult<Json<Response>> {
db::folder::get_permissions(params.parent_folder, claims.user_id, &state.pool)
.await
.can_write_guard()?;
@ -46,39 +73,47 @@ pub async fn upload(
.await
.handle_internal("Error getting existing names")?;
let mut result = HashMap::new();
let mut response = Response::default();
while let Ok(Some(mut field)) = multi.next_field().await {
let Some(file_name) = field.file_name().map(ToOwned::to_owned) else {
continue;
};
if existing_names.contains(&file_name) {
continue;
}
if file_name.len() > 50 {
continue;
}
let Ok((file_id, mut file)) = state.storage.create().await else {
tracing::warn!("Couldn't create uuid for new file");
let Some(file_name) = field.file_name().map(Box::<str>::from) else {
continue;
};
let is_success = create_file(
file_id,
&mut file,
&file_name,
let parse_result = parse_field(
&mut field,
&file_name,
&state.storage,
params.parent_folder,
&state.pool,
&existing_names,
)
.await;
if !is_success {
let _ = state.storage.delete(file_id).await;
continue;
}
result.insert(file_name, file_id);
match parse_result {
Ok(uuid) => {
response.success.insert(file_name, uuid);
}
Err(err) => {
response.error.insert(file_name, err);
}
}
}
Ok(Json(result))
if !response.success.is_empty() {
return Ok(Json(response));
}
if response.error.is_empty() {
return Err(GeneralError::message(
StatusCode::BAD_REQUEST,
"No files sent",
));
}
let mut message = "No file successfully uploaded:".to_owned();
for (key, val) in response.error {
write!(message, "\n{key}: {val}").unwrap();
}
Err(GeneralError::message(StatusCode::BAD_REQUEST, message))
}

View File

@ -15,6 +15,13 @@ pub async fn create(
.await
.can_write_guard()?;
if params.folder_name.len() > 255 {
return Err(GeneralError::message(
StatusCode::BAD_REQUEST,
"Folder name too long",
));
}
let exists = db::folder::name_exists(params.parent_folder_id, &params.folder_name, &pool)
.await
.handle_internal("Error getting existing names")?;

View File

@ -16,7 +16,7 @@ pub async fn get(
) -> GeneralResult<Json<HashMap<i32, PermissionRaw>>> {
db::folder::get_permissions(params.folder_id, claims.user_id, &pool)
.await
.can_manage_guard()?;
.can_read_guard()?;
db::permissions::get_all_for_folder(params.folder_id, &pool)
.await

View File

@ -33,11 +33,12 @@ fn validate_password(password: &str) -> Result<(), ValidationError> {
has_special = true;
}
}
let error_msgs = [has_lower, has_upper, has_number, has_special]
let msg = [has_lower, has_upper, has_number, has_special]
.into_iter()
.zip(["No lower", "No upper", "No numbers", "No special"])
.filter_map(|(param, msg)| (!param).then_some(msg));
let msg = error_msgs.format(" ").to_string();
.filter_map(|(param, msg)| (!param).then_some(msg))
.format(" ")
.to_string();
if !msg.is_empty() {
return Err(ValidationError::new("invalid_password").with_message(msg.into()));
}

View File

@ -44,11 +44,37 @@ async fn create_test_users(pool: &Pool) -> anyhow::Result<()> {
Ok(())
}
fn init_tracing() {
use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt};
let mut err = None;
tracing_subscriber::registry()
.with(
tracing_subscriber::EnvFilter::try_from_default_env().unwrap_or_else(|inner_err| {
err = Some(inner_err);
"debug,sqlx=info,axum::rejection=trace".parse().unwrap()
}),
)
.with(tracing_subscriber::fmt::layer())
.init();
if let Some(err) = err {
tracing::info!(
%err,
"Error constructing EnvFilter, falling back to using the default"
);
}
}
#[tokio::main]
async fn main() -> anyhow::Result<()> {
if env::var("RUST_BACKTRACE").is_err() {
env::set_var("RUST_BACKTRACE", "1");
}
let _ = dotenvy::dotenv();
tracing_subscriber::fmt::init();
init_tracing();
auth::force_init_keys();
@ -109,12 +135,32 @@ fn app(state: AppState) -> Router {
permissions::{self, get_top_level::get_top_level},
users,
};
use tower_http::ServiceBuilderExt as _;
use tower_http::{
trace::{MakeSpan, TraceLayer},
ServiceBuilderExt as _,
};
#[derive(Clone, Copy)]
struct SpanMaker;
impl<B> MakeSpan<B> for SpanMaker {
fn make_span(&mut self, request: &axum::http::Request<B>) -> tracing::Span {
tracing::debug_span!(
"request",
method = %request.method(),
uri = %request.uri(),
version = ?request.version(),
headers = ?request.headers(),
request_id = %uuid::Uuid::now_v7()
)
}
}
const TEN_GIBIBYTES: usize = 10 * 1024 * 1024 * 1024;
let middleware = tower::ServiceBuilder::new()
.layer(DefaultBodyLimit::disable())
.layer(DefaultBodyLimit::max(TEN_GIBIBYTES))
.sensitive_headers([header::AUTHORIZATION, header::COOKIE])
.trace_for_http()
.layer(TraceLayer::new_for_http().make_span_with(SpanMaker))
.compression();
// Build route service