Compare commits

..

23 Commits

Author SHA1 Message Date
5f79a9f0d2 Added timeouts and a struct for empty response 2024-08-17 12:04:53 +03:00
9e3b9527d3 Applying the DefaultBodyLimit layer only to file uploads and modifications 2024-08-15 22:43:00 +03:00
a3e4ac2b2e Final preparation 2024-08-15 20:41:59 +03:00
ab138e8536 Updated deps 2024-08-15 16:03:11 +03:00
62f55043a5 Small optimization for non unix targets 2024-08-11 11:37:51 +03:00
ec7fbc07a0 Removed file size limit 2024-08-11 10:25:36 +03:00
1c9bd104e0 Tweaks for the desktop client 2024-08-10 09:01:06 +03:00
8eb5be96b3 Small change 2024-08-06 16:44:49 +03:00
2b12996453 Removed E in ErrorHandlingExt 2024-08-06 16:39:43 +03:00
75afab933d More error handling improvements 2024-08-06 16:02:44 +03:00
eba30d1e9d Permission guard simplification 2024-08-05 23:45:00 +03:00
9f76228ebe Error handling 2024-08-05 23:32:16 +03:00
8a4e2dc467 Timezone and folder creation fixes 2024-08-05 21:06:25 +03:00
8d297fffdf Prepared queries 2024-08-04 13:51:19 +03:00
ea5c65b6e5 Moved login and register to users 2024-08-04 12:38:50 +03:00
7669a02a95 Cleanup 2024-08-04 12:34:46 +03:00
bac5584b46 Search changes 2024-08-04 10:03:35 +03:00
b6c71ee35b Registration and fixes 2024-08-04 09:48:41 +03:00
94bb1371fa Switched auth_post to accept a form instead of a json 2024-08-03 21:05:28 +03:00
0614c4cad0 Expanded token lifespan to 30 days 2024-08-03 20:48:43 +03:00
c4ff602ec7 Now checking that user_id from claims exists 2024-08-03 20:15:08 +03:00
9f36d8e663 Removed utoipa 2024-08-03 19:41:29 +03:00
40f0526500 Added ability to get the info of the current user 2024-08-03 16:45:54 +03:00
57 changed files with 1158 additions and 558 deletions

10
.dockerignore Normal file
View File

@ -0,0 +1,10 @@
**/target/
**/.vscode/
**/.env
**/.git/
**/.dockerignore
**/Dockerfile
**/compose.yaml
**/LICENSE
**/README.md
files/

View File

@ -1,12 +1,12 @@
{
"db_name": "PostgreSQL",
"query": "SELECT\n username,\n permission_type as \"permission_type: PermissionRaw\"\nFROM\n permissions\n INNER JOIN users ON permissions.user_id = users.user_id\nWHERE\n folder_id = $1",
"query": "SELECT\n users.user_id,\n permission_type as \"permission_type: PermissionRaw\"\nFROM\n permissions\n INNER JOIN users ON permissions.user_id = users.user_id\nWHERE\n folder_id = $1",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "username",
"type_info": "Varchar"
"name": "user_id",
"type_info": "Int4"
},
{
"ordinal": 1,
@ -35,5 +35,5 @@
false
]
},
"hash": "39b78c7f3266bea5e3e44aa372574319cb74dea6b3d0bc16d25e29ca28803317"
"hash": "003349bc951a935fdfb285f99a726c221e3d1d02cb9e47b4c385545298b27217"
}

View File

@ -0,0 +1,40 @@
{
"db_name": "PostgreSQL",
"query": "WITH\n permitted as (\n SELECT\n folder_id\n FROM\n permissions\n WHERE\n user_id = $1\n )\nSELECT\n folder_id, owner_id, folder_name, created_at\nFROM\n folders\nWHERE\n folder_id IN (\n SELECT\n folder_id\n FROM\n permitted\n )\n AND parent_folder_id NOT IN (\n SELECT\n folder_id\n FROM\n permitted\n )",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "folder_id",
"type_info": "Uuid"
},
{
"ordinal": 1,
"name": "owner_id",
"type_info": "Int4"
},
{
"ordinal": 2,
"name": "folder_name",
"type_info": "Varchar"
},
{
"ordinal": 3,
"name": "created_at",
"type_info": "Timestamptz"
}
],
"parameters": {
"Left": [
"Int4"
]
},
"nullable": [
false,
false,
false,
false
]
},
"hash": "1c5dda0e613ee57819d4c9534f3bcd8809f313026a187a2eff66fa4f7ba888a5"
}

View File

@ -0,0 +1,28 @@
{
"db_name": "PostgreSQL",
"query": "SELECT user_id, hashed_password FROM users WHERE username = $1 OR email = $1",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "user_id",
"type_info": "Int4"
},
{
"ordinal": 1,
"name": "hashed_password",
"type_info": "Bytea"
}
],
"parameters": {
"Left": [
"Text"
]
},
"nullable": [
false,
false
]
},
"hash": "20af817890cb184e17d193e18132796e02e5e7352542f507acda25e9cd6cfc61"
}

View File

@ -21,7 +21,7 @@
{
"ordinal": 3,
"name": "created_at",
"type_info": "Timestamp"
"type_info": "Timestamptz"
}
],
"parameters": {

View File

@ -1,24 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "INSERT INTO folders(parent_folder_id, owner_id, folder_name) VALUES ($1, $2, $3) RETURNING folder_id",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "folder_id",
"type_info": "Uuid"
}
],
"parameters": {
"Left": [
"Uuid",
"Int4",
"Varchar"
]
},
"nullable": [
false
]
},
"hash": "3dd4a65d3106d742c2221c0589ac68d4621c6e351f9fbb7aa58629ff2d829234"
}

View File

@ -0,0 +1,17 @@
{
"db_name": "PostgreSQL",
"query": "INSERT INTO folders(parent_folder_id, owner_id, folder_name, folder_id) VALUES ($1, $2, $3, $4)",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Uuid",
"Int4",
"Varchar",
"Uuid"
]
},
"nullable": []
},
"hash": "3faa32dd95822ae8687784817f68e48e726eedd2b7af7e52712974b4f04a8f80"
}

View File

@ -1,6 +1,6 @@
{
"db_name": "PostgreSQL",
"query": "UPDATE users SET username = $2, email = $3 WHERE user_id = $1 RETURNING *",
"query": "UPDATE users SET username = $2, email = $3 WHERE user_id = $1 RETURNING user_id, username, email",
"describe": {
"columns": [
{
@ -32,5 +32,5 @@
false
]
},
"hash": "347a486f9ea5183b1c4c16234a1833ea61970ea7f901dd57c0715ae3dbddd164"
"hash": "70a68acb301745ef393185c2bef92627648a6e419303adb40f56c09d55291cbd"
}

View File

@ -0,0 +1,22 @@
{
"db_name": "PostgreSQL",
"query": "SELECT EXISTS(SELECT user_id FROM users WHERE user_id = $1)",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "exists",
"type_info": "Bool"
}
],
"parameters": {
"Left": [
"Int4"
]
},
"nullable": [
null
]
},
"hash": "a04a4e8d3a394883a2f1052074bd43fcadafa0c1ba66f36ac49fc54b5c4150b3"
}

View File

@ -1,6 +1,6 @@
{
"db_name": "PostgreSQL",
"query": "SELECT\n user_id, username, email\nFROM\n users\nORDER BY\n GREATEST (\n similarity (email, $1),\n similarity (username, $1)\n ) DESC",
"query": "SELECT\n user_id, username, email, \n GREATEST (\n similarity (email, $1),\n similarity (username, $1)\n ) as \"similarity!\"\nFROM\n users\nORDER BY\n \"similarity!\" DESC\nLIMIT 20",
"describe": {
"columns": [
{
@ -17,6 +17,11 @@
"ordinal": 2,
"name": "email",
"type_info": "Varchar"
},
{
"ordinal": 3,
"name": "similarity!",
"type_info": "Float4"
}
],
"parameters": {
@ -27,8 +32,9 @@
"nullable": [
false,
false,
false
false,
null
]
},
"hash": "61a26b3321bb5b58a0b90e61b2cdcacfb46a03eb0c0a89839c9b3eff53cb7e56"
"hash": "e0d415b13ccf7aa865558395eb6997bfff50762d36cf3742470a897f4588c802"
}

View File

@ -26,12 +26,12 @@
{
"ordinal": 4,
"name": "created_at",
"type_info": "Timestamp"
"type_info": "Timestamptz"
},
{
"ordinal": 5,
"name": "updated_at",
"type_info": "Timestamp"
"type_info": "Timestamptz"
}
],
"parameters": {

View File

@ -1,6 +1,6 @@
{
"db_name": "PostgreSQL",
"query": "SELECT\n f.folder_id,\n owner_id,\n folder_name,\n created_at\nFROM\n folders f\n JOIN permissions p ON f.folder_id = p.folder_id\nWHERE\n parent_folder_id = $1\n AND p.user_id = $2",
"query": "SELECT\n f.folder_id,\n owner_id,\n folder_name,\n created_at\nFROM\n folders f\n LEFT JOIN permissions p ON f.folder_id = p.folder_id\nWHERE\n parent_folder_id = $1\n AND (p.user_id = $2 OR f.owner_id = $2)",
"describe": {
"columns": [
{
@ -21,7 +21,7 @@
{
"ordinal": 3,
"name": "created_at",
"type_info": "Timestamp"
"type_info": "Timestamptz"
}
],
"parameters": {
@ -37,5 +37,5 @@
false
]
},
"hash": "b11a87b3b9f6289e831b1f0cb0e8f35283687a9b13d050ac15f16e2a8cec046f"
"hash": "ef707c0f6d2ef0d66e71929167b5c82bb8bf923736e6c797711bc3124f0693bc"
}

View File

@ -1,22 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "WITH\n permitted as (\n SELECT\n folder_id\n FROM\n permissions\n WHERE\n user_id = $1\n )\nSELECT\n folder_id\nFROM\n folders\nWHERE\n folder_id IN (\n SELECT\n folder_id\n FROM\n permitted\n )\n AND parent_folder_id NOT IN (\n SELECT\n folder_id\n FROM\n permitted\n )",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "folder_id",
"type_info": "Uuid"
}
],
"parameters": {
"Left": [
"Int4"
]
},
"nullable": [
false
]
},
"hash": "f9e36f45f25dd2439a7a0b16b6df356a0a2a47e70b6e031ea5a0442adc86725b"
}

View File

@ -1,6 +1,6 @@
{
"db_name": "PostgreSQL",
"query": "INSERT INTO users(username, email) VALUES ($1, $2) RETURNING user_id",
"query": "INSERT INTO users(username, email, hashed_password) VALUES ($1, $2, $3) ON CONFLICT DO NOTHING RETURNING user_id",
"describe": {
"columns": [
{
@ -12,12 +12,13 @@
"parameters": {
"Left": [
"Varchar",
"Varchar"
"Varchar",
"Bytea"
]
},
"nullable": [
false
]
},
"hash": "9602875e192fd321f3a773aa7eb5145cb0d1e7f31def733fd11394e9ad6c0d21"
"hash": "fb94ebf44aff9c5c56cc43ef47f571b4dc1fcdcbc595aef4d245ee2454b0a458"
}

288
Cargo.lock generated
View File

@ -107,7 +107,7 @@ checksum = "6e0c28dcc82d7c8ead5cb13beb15405b57b8546e93215673ff8ca0349a028107"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.72",
"syn 2.0.74",
]
[[package]]
@ -220,7 +220,7 @@ dependencies = [
"heck 0.4.1",
"proc-macro2",
"quote",
"syn 2.0.72",
"syn 2.0.74",
]
[[package]]
@ -321,12 +321,13 @@ checksum = "8318a53db07bb3f8dca91a600466bdb3f2eaadeedfdbcf02e1accbad9271ba50"
[[package]]
name = "cc"
version = "1.1.7"
version = "1.1.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "26a5c3fd7bfa1ce3897a3a3501d362b2d87b7f2583ebcb4a949ec25911025cbc"
checksum = "72db2f7947ecee9b03b510377e8bb9077afa27176fdbff55c51027e976fdcc48"
dependencies = [
"jobserver",
"libc",
"shlex",
]
[[package]]
@ -377,15 +378,15 @@ checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8"
[[package]]
name = "core-foundation-sys"
version = "0.8.6"
version = "0.8.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "06ea2b9bc92be3c2baa9334a323ebca2d6f074ff852cd1d7b11064035cd3868f"
checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b"
[[package]]
name = "cpufeatures"
version = "0.2.12"
version = "0.2.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "53fe5e26ff1b7aef8bca9c6080520cfb8d9333c7568e1829cef191a9723e5504"
checksum = "51e852e6dc9a5bed1fae92dd2375037bf2b768725bf3be87811edee3249d09ad"
dependencies = [
"libc",
]
@ -439,6 +440,41 @@ dependencies = [
"typenum",
]
[[package]]
name = "darling"
version = "0.20.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6f63b86c8a8826a49b8c21f08a2d07338eec8d900540f8630dc76284be802989"
dependencies = [
"darling_core",
"darling_macro",
]
[[package]]
name = "darling_core"
version = "0.20.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "95133861a8032aaea082871032f5815eb9e98cef03fa916ab4500513994df9e5"
dependencies = [
"fnv",
"ident_case",
"proc-macro2",
"quote",
"strsim",
"syn 2.0.74",
]
[[package]]
name = "darling_macro"
version = "0.20.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806"
dependencies = [
"darling_core",
"quote",
"syn 2.0.74",
]
[[package]]
name = "der"
version = "0.7.9"
@ -642,7 +678,7 @@ checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.72",
"syn 2.0.74",
]
[[package]]
@ -885,9 +921,9 @@ dependencies = [
[[package]]
name = "hyper-util"
version = "0.1.6"
version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3ab92f4f49ee4fb4f997c784b7a2e0fa70050211e0b6a287f898c3c9785ca956"
checksum = "cde7055719c54e36e95e8719f95883f22072a48ede39db7fc17a4e1d5281e9b9"
dependencies = [
"bytes",
"futures-util",
@ -921,6 +957,12 @@ dependencies = [
"cc",
]
[[package]]
name = "ident_case"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39"
[[package]]
name = "idna"
version = "0.5.0"
@ -933,13 +975,12 @@ dependencies = [
[[package]]
name = "indexmap"
version = "2.3.0"
version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "de3fc2e30ba82dd1b3911c8de1ffc143c74a914a14e99514d7637e3099df5ea0"
checksum = "93ead53efc7ea8ed3cfb0c79fc8023fbb782a5432b52830b6518941cebe6505c"
dependencies = [
"equivalent",
"hashbrown",
"serde",
]
[[package]]
@ -951,6 +992,15 @@ dependencies = [
"generic-array",
]
[[package]]
name = "itertools"
version = "0.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186"
dependencies = [
"either",
]
[[package]]
name = "itoa"
version = "1.0.11"
@ -968,9 +1018,9 @@ dependencies = [
[[package]]
name = "js-sys"
version = "0.3.69"
version = "0.3.70"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "29c15563dc2726973df627357ce0c9ddddbea194836909d655df6a75d2cf296d"
checksum = "1868808506b929d7b0cfa8f75951347aa71bb21144b7791bae35d9bccfcfe37a"
dependencies = [
"wasm-bindgen",
]
@ -1001,9 +1051,9 @@ dependencies = [
[[package]]
name = "libc"
version = "0.2.155"
version = "0.2.156"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c"
checksum = "a5f43f184355eefb8d17fc948dbecf6c13be3c141f20d834ae842193a448c72a"
[[package]]
name = "libm"
@ -1098,9 +1148,9 @@ dependencies = [
[[package]]
name = "mio"
version = "1.0.1"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4569e456d394deccd22ce1c1913e6ea0e54519f577285001215d33557431afe4"
checksum = "80e04d1dcff3aae0704555fe5fee3bcfaf3d1fdf8a7e521d5b9d2b42acb52cec"
dependencies = [
"hermit-abi",
"libc",
@ -1210,9 +1260,9 @@ dependencies = [
[[package]]
name = "object"
version = "0.36.2"
version = "0.36.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f203fa8daa7bb185f760ae12bd8e097f63d17041dcdcaf675ac54cdf863170e"
checksum = "27b64972346851a39438c60b341ebc01bba47464ae329e55cf343eb93964efd9"
dependencies = [
"memchr",
]
@ -1327,7 +1377,7 @@ checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.72",
"syn 2.0.74",
]
[[package]]
@ -1427,6 +1477,7 @@ dependencies = [
"chrono",
"dotenvy",
"futures",
"itertools",
"jsonwebtoken",
"rand",
"scrypt",
@ -1441,9 +1492,8 @@ dependencies = [
"tower-http",
"tracing",
"tracing-subscriber",
"utoipa",
"utoipauto",
"uuid",
"validator",
]
[[package]]
@ -1682,29 +1732,29 @@ dependencies = [
[[package]]
name = "serde"
version = "1.0.204"
version = "1.0.208"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bc76f558e0cbb2a839d37354c575f1dc3fdc6546b5be373ba43d95f231bf7c12"
checksum = "cff085d2cb684faa248efb494c39b68e522822ac0de72ccf08109abde717cfb2"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
version = "1.0.204"
version = "1.0.208"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e0cd7e117be63d3c3678776753929474f3b04a43a080c744d6b0ae2a8c28e222"
checksum = "24008e81ff7613ed8e5ba0cfaf24e2c2f1e5b8a0495711e44fcd4882fca62bcf"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.72",
"syn 2.0.74",
]
[[package]]
name = "serde_json"
version = "1.0.122"
version = "1.0.125"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "784b6203951c57ff748476b126ccb5e8e2959a5c19e5c617ab1956be3dbc68da"
checksum = "83c8e735a073ccf5be70aa8066aa984eaf2fa000db6c8d0100ae605b366d31ed"
dependencies = [
"itoa",
"memchr",
@ -1765,6 +1815,21 @@ dependencies = [
"lazy_static",
]
[[package]]
name = "shlex"
version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
[[package]]
name = "signal-hook-registry"
version = "1.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a9e9e0b4211b72e7b8b6e85c807d36c212bdb33ea8587f7569562a84df5465b1"
dependencies = [
"libc",
]
[[package]]
name = "signature"
version = "2.2.0"
@ -1911,7 +1976,7 @@ dependencies = [
"quote",
"sqlx-core",
"sqlx-macros-core",
"syn 2.0.72",
"syn 2.0.74",
]
[[package]]
@ -1934,7 +1999,7 @@ dependencies = [
"sqlx-mysql",
"sqlx-postgres",
"sqlx-sqlite",
"syn 2.0.72",
"syn 2.0.74",
"tempfile",
"tokio",
"url",
@ -2060,6 +2125,12 @@ dependencies = [
"unicode-properties",
]
[[package]]
name = "strsim"
version = "0.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
[[package]]
name = "subtle"
version = "2.6.1"
@ -2078,9 +2149,9 @@ dependencies = [
[[package]]
name = "syn"
version = "2.0.72"
version = "2.0.74"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dc4b9b9bf2add8093d3f2c0204471e951b2285580335de42f9d2534f3ae7a8af"
checksum = "1fceb41e3d546d0bd83421d3409b1460cc7444cd389341a4c880fe7a042cb3d7"
dependencies = [
"proc-macro2",
"quote",
@ -2101,15 +2172,15 @@ checksum = "a7065abeca94b6a8a577f9bd45aa0867a2238b74e8eb67cf10d492bc39351394"
[[package]]
name = "tempfile"
version = "3.11.0"
version = "3.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b8fcd239983515c23a32fb82099f97d0b11b8c72f654ed659363a95c3dad7a53"
checksum = "04cbcdd0c794ebb0d4cf35e88edd2f7d2c4c3e9a5a6dab322839b321c6a87a64"
dependencies = [
"cfg-if",
"fastrand",
"once_cell",
"rustix",
"windows-sys 0.52.0",
"windows-sys 0.59.0",
]
[[package]]
@ -2129,7 +2200,7 @@ checksum = "a4558b58466b9ad7ca0f102865eccc95938dca1a74a856f2b57b6629050da261"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.72",
"syn 2.0.74",
]
[[package]]
@ -2200,6 +2271,7 @@ dependencies = [
"mio",
"parking_lot",
"pin-project-lite",
"signal-hook-registry",
"socket2",
"tokio-macros",
"windows-sys 0.52.0",
@ -2213,7 +2285,7 @@ checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.72",
"syn 2.0.74",
]
[[package]]
@ -2280,15 +2352,15 @@ dependencies = [
[[package]]
name = "tower-layer"
version = "0.3.2"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c20c8dbed6283a09604c3e69b4b7eeb54e298b8a600d4d5ecb5ad39de609f1d0"
checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e"
[[package]]
name = "tower-service"
version = "0.3.2"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52"
checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3"
[[package]]
name = "tracing"
@ -2310,7 +2382,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.72",
"syn 2.0.74",
]
[[package]]
@ -2409,64 +2481,6 @@ dependencies = [
"percent-encoding",
]
[[package]]
name = "utoipa"
version = "4.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c5afb1a60e207dca502682537fefcfd9921e71d0b83e9576060f09abc6efab23"
dependencies = [
"indexmap",
"serde",
"serde_json",
"utoipa-gen",
]
[[package]]
name = "utoipa-gen"
version = "4.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7bf0e16c02bc4bf5322ab65f10ab1149bdbcaa782cba66dc7057370a3f8190be"
dependencies = [
"proc-macro-error",
"proc-macro2",
"quote",
"regex",
"syn 2.0.72",
"uuid",
]
[[package]]
name = "utoipauto"
version = "0.1.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4713aabc5ed18aabcd594345b48983b112c0b5dab3d24754352e7f5cf924da03"
dependencies = [
"utoipauto-macro",
]
[[package]]
name = "utoipauto-core"
version = "0.1.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "17e82ab96c5a55263b5bed151b8426410d93aa909a453acdbd4b6792b5af7d64"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.72",
]
[[package]]
name = "utoipauto-macro"
version = "0.1.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "86b8338dc3c9526011ffaa2aa6bd60ddfda9d49d2123108690755c6e34844212"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.72",
"utoipauto-core",
]
[[package]]
name = "uuid"
version = "1.10.0"
@ -2477,6 +2491,36 @@ dependencies = [
"serde",
]
[[package]]
name = "validator"
version = "0.18.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "db79c75af171630a3148bd3e6d7c4f42b6a9a014c2945bc5ed0020cbb8d9478e"
dependencies = [
"idna",
"once_cell",
"regex",
"serde",
"serde_derive",
"serde_json",
"url",
"validator_derive",
]
[[package]]
name = "validator_derive"
version = "0.18.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "55591299b7007f551ed1eb79a684af7672c19c3193fb9e0a31936987bb2438ec"
dependencies = [
"darling",
"once_cell",
"proc-macro-error",
"proc-macro2",
"quote",
"syn 2.0.74",
]
[[package]]
name = "valuable"
version = "0.1.0"
@ -2509,34 +2553,35 @@ checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b"
[[package]]
name = "wasm-bindgen"
version = "0.2.92"
version = "0.2.93"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8"
checksum = "a82edfc16a6c469f5f44dc7b571814045d60404b55a0ee849f9bcfa2e63dd9b5"
dependencies = [
"cfg-if",
"once_cell",
"wasm-bindgen-macro",
]
[[package]]
name = "wasm-bindgen-backend"
version = "0.2.92"
version = "0.2.93"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "614d787b966d3989fa7bb98a654e369c762374fd3213d212cfc0251257e747da"
checksum = "9de396da306523044d3302746f1208fa71d7532227f15e347e2d93e4145dd77b"
dependencies = [
"bumpalo",
"log",
"once_cell",
"proc-macro2",
"quote",
"syn 2.0.72",
"syn 2.0.74",
"wasm-bindgen-shared",
]
[[package]]
name = "wasm-bindgen-macro"
version = "0.2.92"
version = "0.2.93"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1f8823de937b71b9460c0c34e25f3da88250760bec0ebac694b49997550d726"
checksum = "585c4c91a46b072c92e908d99cb1dcdf95c5218eeb6f3bf1efa991ee7a68cccf"
dependencies = [
"quote",
"wasm-bindgen-macro-support",
@ -2544,22 +2589,22 @@ dependencies = [
[[package]]
name = "wasm-bindgen-macro-support"
version = "0.2.92"
version = "0.2.93"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7"
checksum = "afc340c74d9005395cf9dd098506f7f44e38f2b4a21c6aaacf9a105ea5e1e836"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.72",
"syn 2.0.74",
"wasm-bindgen-backend",
"wasm-bindgen-shared",
]
[[package]]
name = "wasm-bindgen-shared"
version = "0.2.92"
version = "0.2.93"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96"
checksum = "c62a0a307cb4a311d3a07867860911ca130c3494e8c2719593806c08bc5d0484"
[[package]]
name = "webpki-roots"
@ -2626,6 +2671,15 @@ dependencies = [
"windows-targets 0.52.6",
]
[[package]]
name = "windows-sys"
version = "0.59.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b"
dependencies = [
"windows-targets 0.52.6",
]
[[package]]
name = "windows-targets"
version = "0.48.5"
@ -2765,7 +2819,7 @@ checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.72",
"syn 2.0.74",
]
[[package]]

View File

@ -22,6 +22,7 @@ axum-extra = { version = "0.9", features = ["typed-header"] }
chrono = { version = "0.4", features = ["serde"] }
dotenvy = "0.15"
futures = "0.3"
itertools = "0.13"
jsonwebtoken = "9"
rand = "0.8"
scrypt = { version = "0.11", default-features = false, features = ["std"] }
@ -37,12 +38,17 @@ sqlx = { version = "0.8", features = [
"uuid",
] }
subtle = "2"
tokio = { version = "1", features = ["parking_lot", "rt-multi-thread"] }
tokio = { version = "1", features = [
"parking_lot",
"rt-multi-thread",
"signal",
] }
tokio-util = { version = "0.7" }
tower = { version = "0.4" }
tower-http = { version = "0.5", features = [
"compression-full",
"sensitive-headers",
"timeout",
"trace",
"util",
] }
@ -51,6 +57,5 @@ tracing-subscriber = { version = "0.3", features = [
"parking_lot",
"env-filter",
] }
utoipa = { version = "4", features = ["axum_extras", "uuid", "chrono"] }
utoipauto = "0.1"
uuid = { version = "1", features = ["serde", "v7"] }
validator = { version = "0.18", features = ["derive"] }

19
Dockerfile Normal file
View File

@ -0,0 +1,19 @@
FROM rust:slim AS chef
RUN cargo install cargo-chef
WORKDIR /app
FROM chef AS planner
COPY . .
RUN cargo chef prepare
FROM chef AS builder
COPY --from=planner /app/recipe.json recipe.json
RUN cargo chef cook --release
COPY . .
RUN cargo b -r
FROM debian:stable-slim
EXPOSE 3000
WORKDIR /app
COPY --from=builder /app/target/release/project .
CMD [ "./project" ]

25
compose-dev.yaml Normal file
View File

@ -0,0 +1,25 @@
services:
backend:
build: .
ports:
- 3000:3000
environment:
JWT_SECRET: ${JWT_SECRET}
DATABASE_URL: 'postgresql://tester:testing123!@backend_db/backend'
depends_on:
- backend_db
backend_db:
image: ghcr.io/fboulnois/pg_uuidv7:1.5.0
environment:
- POSTGRES_USER=tester
- POSTGRES_PASSWORD=testing123!
- POSTGRES_DB=backend
ports:
- 5432:5432
volumes:
- backend_db_data:/var/lib/postgresql/data
restart: unless-stopped
volumes:
backend_db_data:

View File

@ -1,14 +1,22 @@
services:
db:
backend:
build: .
environment:
JWT_SECRET: ${JWT_SECRET}
DATABASE_URL: 'postgresql://tester:testing123!@backend_db/backend'
depends_on:
- backend_db
restart: unless-stopped
backend_db:
image: ghcr.io/fboulnois/pg_uuidv7:1.5.0
environment:
- POSTGRES_USER=tester
- POSTGRES_PASSWORD=testing123!
- POSTGRES_DB=testing
ports:
- 5432:5432
- POSTGRES_DB=backend
volumes:
- postgres_data:/var/lib/postgresql/data
- backend_db_data:/var/lib/postgresql/data
restart: unless-stopped
volumes:
postgres_data:
backend_db_data:

View File

@ -16,7 +16,7 @@ CREATE TABLE
parent_folder_id UUID REFERENCES folders (folder_id) ON DELETE CASCADE DEFAULT null,
owner_id INT REFERENCES users (user_id) ON DELETE CASCADE NOT NULL,
folder_name VARCHAR(255) NOT NULL,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL
created_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP NOT NULL
);
CREATE TABLE
@ -26,8 +26,8 @@ CREATE TABLE
file_name VARCHAR(255) NOT NULL,
file_size BIGINT NOT NULL,
sha512 BYTEA NOT NULL,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL
created_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP NOT NULL,
updated_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP NOT NULL
);
CREATE TYPE permission AS ENUM ('read', 'write', 'manage');

1
sql/create_folder.sql Normal file
View File

@ -0,0 +1 @@
INSERT INTO folders(parent_folder_id, owner_id, folder_name, folder_id) VALUES ($1, $2, $3, $4)

View File

@ -1,5 +1,5 @@
SELECT
username,
users.user_id,
permission_type as "permission_type: PermissionRaw"
FROM
permissions

View File

@ -5,7 +5,7 @@ SELECT
created_at
FROM
folders f
JOIN permissions p ON f.folder_id = p.folder_id
LEFT JOIN permissions p ON f.folder_id = p.folder_id
WHERE
parent_folder_id = $1
AND p.user_id = $2
AND (p.user_id = $2 OR f.owner_id = $2)

View File

@ -8,7 +8,7 @@ WITH
user_id = $1
)
SELECT
folder_id
folder_id, owner_id, folder_name, created_at
FROM
folders
WHERE

View File

@ -1,9 +1,11 @@
SELECT
user_id, username, email
FROM
users
ORDER BY
user_id, username, email,
GREATEST (
similarity (email, $1),
similarity (username, $1)
) DESC
) as "similarity!"
FROM
users
ORDER BY
"similarity!" DESC
LIMIT 20

View File

@ -1,21 +1,21 @@
use std::{array::TryFromSliceError, sync::LazyLock};
use axum::{
extract::FromRequestParts,
http::{request::Parts, StatusCode},
response::IntoResponse,
extract::{FromRef, FromRequestParts},
http::request::Parts,
RequestPartsExt,
};
use axum_extra::{
headers::{authorization::Bearer, Authorization},
TypedHeader,
};
use chrono::{TimeDelta, Utc};
use jsonwebtoken::{decode, encode, DecodingKey, EncodingKey, Header, Validation};
use rand::{rngs::OsRng, RngCore};
use serde::{Deserialize, Serialize};
use subtle::ConstantTimeEq;
use crate::{db, Pool};
use crate::prelude::*;
pub const HASH_LENGTH: usize = 64;
pub const SALT_LENGTH: usize = 64;
@ -46,7 +46,7 @@ pub fn force_init_keys() {
LazyLock::force(&KEYS);
}
/// Hashes the bytes with Scrypt with the given salt
/// Hashes the bytes using Scrypt with the given salt
#[must_use]
fn hash_scrypt(bytes: &[u8], salt: &[u8]) -> [u8; HASH_LENGTH] {
let mut hash = [0; HASH_LENGTH];
@ -55,6 +55,7 @@ fn hash_scrypt(bytes: &[u8], salt: &[u8]) -> [u8; HASH_LENGTH] {
}
/// Verifieble scrypt hashed bytes
#[cfg_attr(test, derive(PartialEq))] // == OPERATOR MUSTN'T BE USED OUTSIZE OF TESTS
pub struct HashedBytes {
pub hash: [u8; HASH_LENGTH],
pub salt: [u8; SALT_LENGTH],
@ -64,7 +65,7 @@ impl HashedBytes {
/// Hashes the bytes
#[must_use]
pub fn hash_bytes(bytes: &[u8]) -> Self {
let mut salt = [0; 64];
let mut salt = [0; SALT_LENGTH];
OsRng.fill_bytes(&mut salt);
Self {
hash: hash_scrypt(bytes, &salt),
@ -120,14 +121,19 @@ pub struct Claims {
pub exp: i64,
}
const JWT_ALGORITHM: jsonwebtoken::Algorithm = jsonwebtoken::Algorithm::HS256;
impl Claims {
pub fn encode(self) -> Result<Token, Error> {
let access_token = encode(
&Header::new(jsonwebtoken::Algorithm::HS256),
&self,
&KEYS.encoding_key,
)
.map_err(|_| Error::TokenCreation)?;
pub fn new(user_id: i32) -> Self {
Self {
user_id,
exp: (Utc::now() + TimeDelta::days(30)).timestamp(),
}
}
pub fn encode(self) -> Result<Token, GeneralError> {
let access_token = encode(&Header::new(JWT_ALGORITHM), &self, &KEYS.encoding_key)
.handle_internal("Token creation error")?;
let token = Token {
access_token,
token_type: "Bearer",
@ -136,38 +142,63 @@ impl Claims {
}
}
#[derive(Debug)]
pub enum Error {
WrongCredentials,
TokenCreation,
InvalidToken,
}
impl IntoResponse for Error {
fn into_response(self) -> axum::response::Response {
let (status, error_message) = match self {
Error::WrongCredentials => (StatusCode::UNAUTHORIZED, "Wrong credentials"),
Error::TokenCreation => (StatusCode::INTERNAL_SERVER_ERROR, "Token creation error"),
Error::InvalidToken => (StatusCode::BAD_REQUEST, "Invalid token"),
};
(status, error_message).into_response()
}
}
#[axum::async_trait]
impl<T> FromRequestParts<T> for Claims {
type Rejection = Error;
impl<T> FromRequestParts<T> for Claims
where
Pool: FromRef<T>,
T: Sync,
{
type Rejection = GeneralError;
async fn from_request_parts(parts: &mut Parts, _state: &T) -> Result<Self, Self::Rejection> {
async fn from_request_parts(parts: &mut Parts, state: &T) -> Result<Self, Self::Rejection> {
const INVALID_TOKEN: GeneralError =
GeneralError::const_message(StatusCode::UNAUTHORIZED, "Invalid token");
let pool = Pool::from_ref(state);
let TypedHeader(Authorization(bearer)) = parts
.extract::<TypedHeader<Authorization<Bearer>>>()
.await
.map_err(|_| Error::InvalidToken)?;
// Decode the user data
let token_data =
decode::<Claims>(bearer.token(), &KEYS.decoding_key, &Validation::default())
.map_err(|_| Error::InvalidToken)?;
.map_err(|_| INVALID_TOKEN)?;
Ok(token_data.claims)
let claims: Claims = decode(
bearer.token(),
&KEYS.decoding_key,
&Validation::new(JWT_ALGORITHM),
)
.map_err(|_| INVALID_TOKEN)?
.claims;
db::users::exists(claims.user_id, &pool)
.await
.handle_internal("Token validation error")?
.then_some(claims)
.ok_or(GeneralError::const_message(
StatusCode::UNAUTHORIZED,
"Wrong credentials",
))
}
}
#[cfg(test)]
mod tests {
use super::HashedBytes;
const PASSWORD: &str = "Password12313#!#4)$*!#";
#[test]
fn test_hash_conversion() {
let bytes = HashedBytes::hash_bytes(PASSWORD.as_bytes());
let bytes2 = HashedBytes::from_bytes(&bytes.as_bytes()).unwrap();
assert!(bytes == bytes2);
}
#[test]
fn test_hash() {
assert!(HashedBytes::hash_bytes(PASSWORD.as_bytes()).verify(PASSWORD.as_bytes()));
}
#[test]
fn test_different_hash() {
assert!(!HashedBytes::hash_bytes(PASSWORD.as_bytes()).verify(b"Different Password"));
}
}

View File

@ -1,9 +1,6 @@
use futures::Stream;
use uuid::Uuid;
use db::permissions::PermissionType;
use crate::Pool;
use super::permissions::PermissionType;
use crate::prelude::*;
pub async fn insert(
file_id: Uuid,
@ -38,8 +35,8 @@ pub struct FileWithoutParentId {
pub file_name: String,
pub file_size: i64,
pub sha512: String,
pub created_at: chrono::NaiveDateTime,
pub updated_at: chrono::NaiveDateTime,
pub created_at: chrono::DateTime<chrono::Utc>,
pub updated_at: chrono::DateTime<chrono::Utc>,
}
pub fn get_files(
@ -70,10 +67,11 @@ pub async fn get_permissions(
}
pub async fn get_name(file_id: Uuid, pool: &Pool) -> sqlx::Result<Option<String>> {
let record = sqlx::query!("SELECT file_name FROM files WHERE file_id = $1", file_id)
let name = sqlx::query!("SELECT file_name FROM files WHERE file_id = $1", file_id)
.fetch_optional(pool)
.await?;
Ok(record.map(|record| record.file_name))
.await?
.map(|record| record.file_name);
Ok(name)
}
pub async fn delete(file_id: Uuid, pool: &Pool) -> sqlx::Result<bool> {

View File

@ -1,7 +1,4 @@
use futures::{Stream, TryStreamExt};
use uuid::Uuid;
use crate::{db::permissions::PermissionRaw, Pool};
use crate::{db::permissions::PermissionRaw, prelude::*};
use super::permissions::PermissionType;
@ -48,7 +45,7 @@ pub struct FolderWithoutParentId {
pub folder_id: Uuid,
pub owner_id: i32,
pub folder_name: String,
pub created_at: chrono::NaiveDateTime,
pub created_at: chrono::DateTime<chrono::Utc>,
}
pub async fn get_by_id(
@ -90,20 +87,26 @@ pub async fn name_exists(parent_folder_id: Uuid, name: &str, pool: &Pool) -> sql
.map(|row| row.exists.unwrap_or(false))
}
pub async fn insert(
parent_folder_id: Uuid,
user_id: i32,
folder_name: &str,
pool: &Pool,
) -> sqlx::Result<Uuid> {
sqlx::query!("INSERT INTO folders(parent_folder_id, owner_id, folder_name) VALUES ($1, $2, $3) RETURNING folder_id",
parent_folder_id,
user_id,
folder_name
)
.fetch_one(pool)
.await
.map(|record| record.folder_id)
/// Creates a folder in the database. Do not use this function to create the ROOT folder
pub async fn insert(parent_folder_id: Uuid, folder_name: &str, pool: &Pool) -> sqlx::Result<Uuid> {
let folder_id = Uuid::now_v7();
let owner_id = get_by_id(parent_folder_id, pool)
.await?
.ok_or(sqlx::Error::RowNotFound)?
.owner_id;
let result = sqlx::query_file!(
"sql/create_folder.sql",
parent_folder_id,
owner_id,
folder_name,
folder_id
)
.execute(pool)
.await?;
if result.rows_affected() == 0 {
return Err(sqlx::Error::RowNotFound);
}
Ok(folder_id)
}
pub fn delete(folder_id: Uuid, pool: &Pool) -> impl Stream<Item = sqlx::Result<Uuid>> + '_ {

View File

@ -1,13 +1,11 @@
use std::collections::HashMap;
use std::{borrow::Cow, collections::HashMap};
use axum::http::StatusCode;
use futures::TryStreamExt as _;
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use db::folder::FolderWithoutParentId;
use crate::Pool;
use crate::prelude::*;
#[derive(sqlx::Type, Debug, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
#[sqlx(type_name = "permission")]
#[sqlx(rename_all = "lowercase")]
pub enum PermissionRaw {
@ -37,46 +35,74 @@ impl From<Option<PermissionRaw>> for PermissionType {
}
}
impl From<PermissionType> for PermissionRaw {
fn from(value: PermissionType) -> Self {
match value {
PermissionType::Manage => Self::Manage,
PermissionType::Write => Self::Write,
PermissionType::Read => Self::Read,
PermissionType::NoPermission => unreachable!(),
}
}
}
impl PermissionType {
pub fn can_read(self) -> bool {
self >= PermissionType::Read
}
pub fn can_read_guard(self) -> Result<(), StatusCode> {
if !self.can_read() {
return Err(StatusCode::NOT_FOUND);
}
fn can_read_guard(self) -> GeneralResult<()> {
self.can_read().then_some(()).item_not_found()?;
Ok(())
}
pub fn can_write_guard(self) -> Result<(), StatusCode> {
fn can_write_guard(self) -> GeneralResult<()> {
self.can_read_guard()?;
if self < PermissionType::Write {
return Err(StatusCode::FORBIDDEN);
return Err(GeneralError::message(
StatusCode::FORBIDDEN,
"Cannot write to the folder",
));
}
Ok(())
}
pub fn can_manage_guard(self) -> Result<(), StatusCode> {
fn can_manage_guard(self) -> GeneralResult<()> {
self.can_read_guard()?;
if self < PermissionType::Manage {
return Err(StatusCode::FORBIDDEN);
return Err(GeneralError::message(
StatusCode::FORBIDDEN,
"Cannot manage the folder",
));
}
Ok(())
}
}
pub trait PermissionExt {
fn can_read_guard(self) -> GeneralResult<()>;
fn can_write_guard(self) -> GeneralResult<()>;
fn can_manage_guard(self) -> GeneralResult<()>;
}
fn permissions_error(error: sqlx::Error) -> GeneralError {
GeneralError {
status_code: StatusCode::INTERNAL_SERVER_ERROR,
message: Cow::Borrowed("Error getting permissions"),
error: Some(error.into()),
}
}
fn apply_guard(
result: sqlx::Result<PermissionType>,
func: impl FnOnce(PermissionType) -> GeneralResult<()>,
) -> GeneralResult<()> {
result.map_err(permissions_error).and_then(func)
}
impl PermissionExt for sqlx::Result<PermissionType> {
fn can_read_guard(self) -> GeneralResult<()> {
apply_guard(self, PermissionType::can_read_guard)
}
fn can_write_guard(self) -> GeneralResult<()> {
apply_guard(self, PermissionType::can_write_guard)
}
fn can_manage_guard(self) -> GeneralResult<()> {
apply_guard(self, PermissionType::can_manage_guard)
}
}
pub async fn insert(
user_id: i32,
folder_id: Uuid,
@ -97,10 +123,10 @@ pub async fn insert(
pub async fn get_all_for_folder(
folder_id: Uuid,
pool: &Pool,
) -> sqlx::Result<HashMap<String, PermissionRaw>> {
) -> sqlx::Result<HashMap<i32, PermissionRaw>> {
sqlx::query_file!("sql/get_all_permissions_for_folder.sql", folder_id)
.fetch(pool)
.map_ok(|record| (record.username, record.permission_type))
.map_ok(|record| (record.user_id, record.permission_type))
.try_collect()
.await
}
@ -112,10 +138,16 @@ pub async fn delete_for_folder(folder_id: Uuid, user_id: i32, pool: &Pool) -> sq
.map(|_| ())
}
pub async fn get_top_level_permitted_folders(user_id: i32, pool: &Pool) -> sqlx::Result<Vec<Uuid>> {
sqlx::query_file!("sql/get_top_level_folder.sql", user_id)
.fetch(pool)
.map_ok(|record| record.folder_id)
.try_collect()
.await
pub async fn get_top_level_permitted_folders(
user_id: i32,
pool: &Pool,
) -> sqlx::Result<Vec<FolderWithoutParentId>> {
sqlx::query_file_as!(
FolderWithoutParentId,
"sql/get_top_level_folder.sql",
user_id
)
.fetch(pool)
.try_collect()
.await
}

View File

@ -1,8 +1,4 @@
use futures::{stream::BoxStream, Stream, TryStreamExt};
use serde::Serialize;
use uuid::Uuid;
use crate::Pool;
use crate::prelude::*;
/// Creates user and returns its id
pub async fn create_user(
@ -10,16 +6,19 @@ pub async fn create_user(
user_email: &str,
hashed_password: &[u8],
pool: &Pool,
) -> sqlx::Result<i32> {
let id = sqlx::query!(
"INSERT INTO users(username, email, hashed_password) VALUES ($1, $2, $3) RETURNING user_id",
) -> sqlx::Result<Option<i32>> {
let Some(record) = sqlx::query!(
"INSERT INTO users(username, email, hashed_password) VALUES ($1, $2, $3) ON CONFLICT DO NOTHING RETURNING user_id",
user_name,
user_email,
hashed_password
)
.fetch_one(pool)
.fetch_optional(pool)
.await?
.user_id;
else {
return Ok(None);
};
let id = record.user_id;
sqlx::query!(
"INSERT INTO folders(owner_id, folder_name) VALUES ($1, $2)",
id,
@ -27,7 +26,7 @@ pub async fn create_user(
)
.execute(pool)
.await?;
Ok(id)
Ok(Some(id))
}
/// Deletes the user and returns the files that must be deleted
@ -61,17 +60,27 @@ pub async fn update(
.await
}
pub async fn get(user_id: i32, pool: &Pool) -> sqlx::Result<UserInfo> {
pub async fn exists(user_id: i32, pool: &Pool) -> sqlx::Result<bool> {
sqlx::query!(
"SELECT EXISTS(SELECT user_id FROM users WHERE user_id = $1)",
user_id
)
.fetch_one(pool)
.await
.map(|record| record.exists.unwrap_or(false))
}
pub async fn get(user_id: i32, pool: &Pool) -> sqlx::Result<Option<UserInfo>> {
sqlx::query_as!(
UserInfo,
"SELECT user_id, username, email FROM users WHERE user_id = $1",
user_id
)
.fetch_one(pool)
.fetch_optional(pool)
.await
}
/// Gets the hashed password field by either the email or th username
/// Gets the hashed password field by either the email or the username
pub async fn get_hash(search_string: &str, pool: &Pool) -> sqlx::Result<Option<(i32, Vec<u8>)>> {
let record = sqlx::query!(
"SELECT user_id, hashed_password FROM users WHERE username = $1 OR email = $1",
@ -82,9 +91,17 @@ pub async fn get_hash(search_string: &str, pool: &Pool) -> sqlx::Result<Option<(
Ok(record.map(|record| (record.user_id, record.hashed_password)))
}
#[derive(Serialize, Debug)]
pub struct UserSearch {
pub user_id: i32,
pub username: String,
pub email: String,
pub similarity: f32,
}
pub fn search_for_user<'a>(
search_string: &str,
pool: &'a Pool,
) -> BoxStream<'a, sqlx::Result<UserInfo>> {
sqlx::query_file_as!(UserInfo, "sql/search_for_user.sql", search_string).fetch(pool)
) -> BoxStream<'a, sqlx::Result<UserSearch>> {
sqlx::query_file_as!(UserSearch, "sql/search_for_user.sql", search_string).fetch(pool)
}

View File

@ -1,34 +0,0 @@
use chrono::TimeDelta;
use crate::{
auth::{authenticate_user, Error, Token},
prelude::*,
};
#[derive(Deserialize, Debug)]
pub struct Params {
username: String,
password: String,
}
fn get_exp() -> i64 {
let mut time = chrono::Utc::now();
time += TimeDelta::minutes(30);
time.timestamp()
}
pub async fn post(
State(state): State<AppState>,
Json(payload): Json<Params>,
) -> Result<Json<Token>, Error> {
let user_id = authenticate_user(&payload.username, &payload.password, &state.pool)
.await
.map_err(|_| Error::WrongCredentials)?
.ok_or(Error::WrongCredentials)?;
Claims {
user_id,
exp: get_exp(),
}
.encode()
.map(Json)
}

View File

@ -1 +0,0 @@
pub mod auth_post;

View File

@ -1,4 +1,4 @@
pub use crate::prelude::*;
use crate::prelude::*;
#[derive(Deserialize, Debug)]
pub struct Params {
@ -9,24 +9,20 @@ pub async fn delete(
Query(params): Query<Params>,
State(state): State<AppState>,
claims: Claims,
) -> Result<StatusCode, StatusCode> {
) -> GeneralResult<EmptyResponse> {
db::file::get_permissions(params.file_id, claims.user_id, &state.pool)
.await
.handle_internal()?
.can_write_guard()?;
let deleted = db::file::delete(params.file_id, &state.pool)
db::file::delete(params.file_id, &state.pool)
.await
.handle_internal()?;
if !deleted {
return Err(StatusCode::NOT_FOUND); // Will not happen most of the time due to can write guard
}
.handle_internal("Error deleting the file")?;
state
.storage
.delete(params.file_id)
.await
.handle_internal()?;
.handle_internal("Error deleting the file")?;
Ok(StatusCode::NO_CONTENT)
Ok(EmptyResponse)
}

View File

@ -12,16 +12,15 @@ pub async fn download(
Query(params): Query<Params>,
State(state): State<AppState>,
claims: Claims,
) -> Result<impl IntoResponse, StatusCode> {
) -> GeneralResult<impl IntoResponse> {
db::file::get_permissions(params.file_id, claims.user_id, &state.pool)
.await
.handle_internal()?
.can_read_guard()?;
let mut name = db::file::get_name(params.file_id, &state.pool)
.await
.handle_internal()?
.ok_or(StatusCode::NOT_FOUND)?;
.handle_internal("Error getting file info")?
.item_not_found()?;
name = name
.chars()
.fold(String::with_capacity(name.len()), |mut result, char| {
@ -32,7 +31,11 @@ pub async fn download(
result
});
let file = state.storage.read(params.file_id).await.handle_internal()?;
let file = state
.storage
.read(params.file_id)
.await
.handle_internal("Error reading the file")?;
let body = Body::from_stream(ReaderStream::new(file));
let disposition = format!("attachment; filename=\"{name}\"");
let headers = [(header::CONTENT_DISPOSITION, disposition)];

View File

@ -12,10 +12,9 @@ pub async fn modify(
State(state): State<AppState>,
claims: Claims,
mut multipart: Multipart,
) -> Result<StatusCode, StatusCode> {
) -> GeneralResult<EmptyResponse> {
db::file::get_permissions(params.file_id, claims.user_id, &state.pool)
.await
.handle_internal()?
.can_write_guard()?;
// Very weird work around to get the first file in multipart
@ -23,7 +22,12 @@ pub async fn modify(
match multipart.next_field().await {
Ok(Some(field)) if field.file_name().is_some() => break field,
Ok(Some(_)) => continue,
_ => return Err(StatusCode::BAD_REQUEST),
_ => {
return Err(GeneralError::message(
StatusCode::BAD_REQUEST,
"No file in the multipart",
))
}
}
};
@ -31,19 +35,22 @@ pub async fn modify(
.storage
.write(params.file_id)
.await
.handle_internal()?
.ok_or(StatusCode::NOT_FOUND)?;
.handle_internal("Error writing to the file")?
.item_not_found()?;
let (hash, size) = crate::FileStorage::write_to_file(&mut file, &mut field)
.await
.map_err(|err| {
tracing::warn!(%err);
StatusCode::INTERNAL_SERVER_ERROR
GeneralError::message(
StatusCode::INTERNAL_SERVER_ERROR,
"Error writing to the file",
)
})?;
db::file::update(params.file_id, size, hash, &state.pool)
.await
.handle_internal()?;
.handle_internal("Error updating the file")?;
Ok(StatusCode::NO_CONTENT)
Ok(EmptyResponse)
}

View File

@ -1,8 +1,9 @@
use std::collections::{HashMap, HashSet};
use std::{
collections::{HashMap, HashSet},
fmt::Write as _,
};
use axum::extract::multipart::{self, Multipart};
use futures::TryStreamExt;
use tokio::io::AsyncWrite;
use crate::prelude::*;
@ -11,25 +12,60 @@ pub struct Params {
parent_folder: Uuid,
}
#[derive(Serialize, Debug, Default)]
pub struct Response {
success: HashMap<Box<str>, Uuid>,
error: HashMap<Box<str>, &'static str>,
}
fn validate_name(name: &str, existing_names: &HashSet<String>) -> Result<(), &'static str> {
if name.len() > 255 {
return Err("Name too long");
}
if existing_names.contains(name) {
return Err("Item with that name already exists");
}
Ok(())
}
async fn create_file(
file_id: Uuid,
file: impl AsyncWrite + Unpin,
storage: &crate::FileStorage,
file_name: &str,
field: &mut multipart::Field<'_>,
parent_folder: Uuid,
pool: &Pool,
) -> bool {
let (hash, size) = match crate::FileStorage::write_to_file(file, field).await {
Ok(values) => values,
) -> anyhow::Result<Uuid> {
let (file_id, file) = storage.create().await?;
let result = async {
let (hash, size) = crate::FileStorage::write_to_file(file, field).await?;
db::file::insert(file_id, parent_folder, file_name, size, hash, pool).await?;
anyhow::Result::Ok(())
}
.await;
match result {
Ok(()) => Ok(file_id),
Err(err) => {
tracing::warn!(%err);
return false;
let _ = storage.delete(file_id).await;
Err(err)
}
};
db::file::insert(file_id, parent_folder, file_name, size, hash, pool)
}
}
async fn parse_field(
field: &mut multipart::Field<'_>,
name: &str,
storage: &crate::FileStorage,
parent_folder: Uuid,
pool: &Pool,
existing_names: &HashSet<String>,
) -> Result<Uuid, &'static str> {
validate_name(name, existing_names)?;
create_file(storage, name, field, parent_folder, pool)
.await
.inspect_err(|err| tracing::warn!(%err))
.is_ok()
.map_err(|err| {
tracing::warn!(%err, "Error creating the file");
"Error creating the file"
})
}
pub async fn upload(
@ -37,43 +73,57 @@ pub async fn upload(
State(state): State<AppState>,
claims: Claims,
mut multi: Multipart,
) -> Result<Json<HashMap<String, Uuid>>, StatusCode> {
) -> GeneralResult<Json<Response>> {
db::folder::get_permissions(params.parent_folder, claims.user_id, &state.pool)
.await
.handle_internal()?
.can_write_guard()?;
let existing_names: HashSet<String> = db::folder::get_names(params.parent_folder, &state.pool)
.try_collect()
.await
.handle_internal()?;
let mut result = HashMap::new();
.handle_internal("Error getting existing names")?;
let mut response = Response::default();
while let Ok(Some(mut field)) = multi.next_field().await {
let Some(file_name) = field.file_name().map(ToOwned::to_owned) else {
let Some(file_name) = field.file_name().map(Box::<str>::from) else {
continue;
};
if existing_names.contains(&file_name) {
continue;
}
let Ok((file_id, mut file)) = state.storage.create().await else {
tracing::warn!("Couldn't create uuid for new file");
continue;
};
let is_success = create_file(
file_id,
&mut file,
&file_name,
let parse_result = parse_field(
&mut field,
&file_name,
&state.storage,
params.parent_folder,
&state.pool,
&existing_names,
)
.await;
if !is_success {
let _ = state.storage.delete(file_id).await;
continue;
match parse_result {
Ok(uuid) => {
response.success.insert(file_name, uuid);
}
Err(err) => {
response.error.insert(file_name, err);
}
}
result.insert(file_name, file_id);
}
Ok(Json(result))
if !response.success.is_empty() {
return Ok(Json(response));
}
if response.error.is_empty() {
return Err(GeneralError::message(
StatusCode::BAD_REQUEST,
"No files sent",
));
}
let mut message = "No file successfully uploaded:".to_owned();
for (key, val) in response.error {
write!(message, "\n{key}: {val}").unwrap();
}
Err(GeneralError::message(StatusCode::BAD_REQUEST, message))
}

View File

@ -10,27 +10,30 @@ pub async fn create(
State(pool): State<Pool>,
claims: Claims,
Json(params): Json<Params>,
) -> Result<Json<Uuid>, StatusCode> {
) -> GeneralResult<Json<Uuid>> {
db::folder::get_permissions(params.parent_folder_id, claims.user_id, &pool)
.await
.handle_internal()?
.can_write_guard()?;
if params.folder_name.len() > 255 {
return Err(GeneralError::message(
StatusCode::BAD_REQUEST,
"Folder name too long",
));
}
let exists = db::folder::name_exists(params.parent_folder_id, &params.folder_name, &pool)
.await
.handle_internal()?;
.handle_internal("Error getting existing names")?;
if exists {
return Err(StatusCode::CONFLICT);
return Err(GeneralError::message(
StatusCode::CONFLICT,
"Name already taken",
));
}
let id = db::folder::insert(
params.parent_folder_id,
claims.user_id,
&params.folder_name,
&pool,
)
.await
.handle_internal()?;
Ok(Json(id))
db::folder::insert(params.parent_folder_id, &params.folder_name, &pool)
.await
.handle_internal("Error creating the folder")
.map(Json)
}

View File

@ -1,5 +1,3 @@
use futures::TryStreamExt;
use crate::prelude::*;
#[derive(Deserialize, Debug)]
@ -10,18 +8,20 @@ pub struct Params {
pub async fn delete(
State(state): State<AppState>,
claims: Claims,
Json(params): Json<Params>,
) -> Result<(), StatusCode> {
Query(params): Query<Params>,
) -> GeneralResult<EmptyResponse> {
let root = db::folder::get_root(claims.user_id, &state.pool)
.await
.handle_internal()?;
.handle_internal("Error getting the root folder")?;
if params.folder_id == root {
return Err(StatusCode::BAD_REQUEST);
return Err(GeneralError::message(
StatusCode::BAD_REQUEST,
"Cannot delete the root folder",
));
}
db::folder::get_permissions(params.folder_id, claims.user_id, &state.pool)
.await
.handle_internal()?
.can_write_guard()?;
let storage = &state.storage;
@ -31,5 +31,7 @@ pub async fn delete(
Ok(())
})
.await
.handle_internal()
.handle_internal("Error deleting the fodler")?;
Ok(EmptyResponse)
}

View File

@ -1,4 +1,4 @@
use futures::TryStreamExt;
use db::{file::FileWithoutParentId, folder::FolderWithoutParentId};
use tokio::try_join;
use super::list::Params;
@ -7,13 +7,13 @@ use crate::prelude::*;
#[derive(Serialize, Debug)]
pub struct FolderStructure {
#[serde(flatten)]
folder_base: db::folder::FolderWithoutParentId,
folder_base: FolderWithoutParentId,
folders: Vec<FolderStructure>,
files: Vec<db::file::FileWithoutParentId>,
files: Vec<FileWithoutParentId>,
}
impl From<db::folder::FolderWithoutParentId> for FolderStructure {
fn from(value: db::folder::FolderWithoutParentId) -> Self {
impl From<FolderWithoutParentId> for FolderStructure {
fn from(value: FolderWithoutParentId) -> Self {
FolderStructure {
folder_base: value,
folders: Vec::new(),
@ -22,41 +22,35 @@ impl From<db::folder::FolderWithoutParentId> for FolderStructure {
}
}
#[derive(Debug, Serialize)]
pub struct Response {
folder_id: Uuid,
structure: FolderStructure,
}
pub async fn structure(
Query(params): Query<Params>,
State(pool): State<Pool>,
claims: Claims,
) -> Result<Json<Response>, StatusCode> {
) -> GeneralResult<Json<FolderStructure>> {
let folder_id = db::folder::process_id(params.folder_id, claims.user_id, &pool)
.await
.handle_internal()?
.ok_or(StatusCode::NOT_FOUND)?;
.handle_internal("Error processing id")?
.item_not_found()?;
let folder = db::folder::get_by_id(folder_id, &pool)
.await
.handle_internal()?
.ok_or(StatusCode::NOT_FOUND)?;
let mut response = Response {
folder_id,
structure: folder.into(),
};
let mut stack = vec![&mut response.structure];
.handle_internal("Error getting folder info")?
.item_not_found()?;
let mut response: FolderStructure = folder.into();
let mut stack = vec![&mut response];
while let Some(folder) = stack.pop() {
let (files, folders) = try_join!(
db::file::get_files(folder_id, &pool).try_collect(),
db::folder::get_folders(folder_id, claims.user_id, &pool)
db::file::get_files(folder.folder_base.folder_id, &pool).try_collect(),
db::folder::get_folders(folder.folder_base.folder_id, claims.user_id, &pool)
.map_ok(Into::into)
.try_collect()
)
.handle_internal()?;
.handle_internal("Error getting folder contents")?;
folder.folders = folders;
folder.files = files;
stack.extend(folder.folders.iter_mut());
}
Ok(Json(response))
}

View File

@ -1,4 +1,3 @@
use futures::TryStreamExt;
use tokio::try_join;
use crate::prelude::*;
@ -19,17 +18,17 @@ pub async fn list(
Query(params): Query<Params>,
State(pool): State<Pool>,
claims: Claims,
) -> Result<Json<Response>, StatusCode> {
) -> GeneralResult<Json<Response>> {
let folder_id = db::folder::process_id(params.folder_id, claims.user_id, &pool)
.await
.handle_internal()?
.ok_or(StatusCode::NOT_FOUND)?;
.handle_internal("Error processing id")?
.handle(StatusCode::NOT_FOUND, "Item not found")?;
let (files, folders) = try_join!(
db::file::get_files(folder_id, &pool).try_collect(),
db::folder::get_folders(folder_id, claims.user_id, &pool).try_collect()
)
.handle_internal()?;
.handle_internal("Error getting folder contents")?;
Ok(Json(Response {
folder_id,

View File

@ -1,4 +1,3 @@
pub mod authorization;
pub mod file;
pub mod folder;
pub mod permissions;

View File

@ -9,18 +9,17 @@ pub struct Params {
pub async fn delete(
State(pool): State<Pool>,
claims: Claims,
Json(params): Json<Params>,
) -> Result<StatusCode, StatusCode> {
Query(params): Query<Params>,
) -> GeneralResult<EmptyResponse> {
if params.user_id != claims.user_id {
db::folder::get_permissions(params.folder_id, claims.user_id, &pool)
.await
.handle_internal()?
.can_manage_guard()?;
}
db::permissions::delete_for_folder(params.folder_id, params.user_id, &pool)
.await
.handle_internal()?;
.handle_internal("Error deleting the permissions")?;
Ok(StatusCode::NO_CONTENT)
Ok(EmptyResponse)
}

View File

@ -13,14 +13,13 @@ pub async fn get(
State(pool): State<Pool>,
Query(params): Query<Params>,
claims: Claims,
) -> Result<Json<HashMap<String, PermissionRaw>>, StatusCode> {
) -> GeneralResult<Json<HashMap<i32, PermissionRaw>>> {
db::folder::get_permissions(params.folder_id, claims.user_id, &pool)
.await
.handle_internal()?
.can_manage_guard()?;
.can_read_guard()?;
let permissions = db::permissions::get_all_for_folder(params.folder_id, &pool)
db::permissions::get_all_for_folder(params.folder_id, &pool)
.await
.handle_internal()?;
Ok(Json(permissions))
.handle_internal("Error getting permissions")
.map(Json)
}

View File

@ -1,11 +1,13 @@
use db::folder::FolderWithoutParentId;
use crate::prelude::*;
pub async fn get_top_level(
State(pool): State<Pool>,
claims: Claims,
) -> Result<Json<Vec<Uuid>>, StatusCode> {
let folders = db::permissions::get_top_level_permitted_folders(claims.user_id, &pool)
) -> GeneralResult<Json<Vec<FolderWithoutParentId>>> {
db::permissions::get_top_level_permitted_folders(claims.user_id, &pool)
.await
.handle_internal()?;
Ok(Json(folders))
.handle_internal("Error reading from the database")
.map(Json)
}

View File

@ -1,6 +1,4 @@
use db::permissions::PermissionRaw;
use crate::prelude::*;
use crate::{db::permissions::PermissionRaw, prelude::*};
#[derive(Deserialize, Debug)]
pub struct Params {
@ -13,19 +11,39 @@ pub async fn set(
claims: Claims,
State(pool): State<Pool>,
Json(params): Json<Params>,
) -> Result<StatusCode, StatusCode> {
) -> GeneralResult<EmptyResponse> {
let root = db::folder::get_root(claims.user_id, &pool)
.await
.handle_internal()?;
.handle_internal("Error getting the root folder")?;
if params.folder_id == root {
return Err(StatusCode::BAD_REQUEST);
return Err(GeneralError::message(
StatusCode::BAD_REQUEST,
"Cannot set permissions for the root folder",
));
}
db::folder::get_permissions(params.folder_id, claims.user_id, &pool)
.await
.handle_internal()?
.can_manage_guard()?;
if params.user_id == claims.user_id {
return Err(GeneralError::message(
StatusCode::BAD_REQUEST,
"Cannot set your own permissions",
));
}
let folder_info = db::folder::get_by_id(params.folder_id, &pool)
.await
.handle_internal("Error getting folder info")?
.item_not_found()?;
if folder_info.owner_id == params.user_id {
return Err(GeneralError::message(
StatusCode::BAD_REQUEST,
"Cannot set permissions of the folder's owner",
));
}
db::permissions::insert(
params.user_id,
params.folder_id,
@ -33,7 +51,7 @@ pub async fn set(
&pool,
)
.await
.handle_internal()?;
.handle_internal("Error writing to the database")?;
Ok(StatusCode::NO_CONTENT)
Ok(EmptyResponse)
}

View File

@ -1,16 +1,19 @@
use futures::TryStreamExt;
use std::time::Duration;
use crate::prelude::*;
pub async fn delete(
State(AppState { pool, ref storage }): State<AppState>,
claims: Claims,
) -> Result<(), StatusCode> {
) -> GeneralResult<EmptyResponse> {
tokio::time::sleep(Duration::from_secs(100)).await;
db::users::delete_user(claims.user_id, &pool)
.try_for_each_concurrent(5, |file_id| async move {
let _ = storage.delete(file_id).await;
Ok(())
})
.await
.handle_internal()
.handle_internal("Error deleting the user")?;
Ok(EmptyResponse)
}

View File

@ -5,12 +5,22 @@ pub struct Params {
user_id: i32,
}
pub async fn get(
State(pool): State<Pool>,
Query(params): Query<Params>,
) -> Result<Json<db::users::UserInfo>, StatusCode> {
let info = db::users::get(params.user_id, &pool)
type Response = GeneralResult<Json<db::users::UserInfo>>;
pub async fn get(State(pool): State<Pool>, Query(params): Query<Params>) -> Response {
db::users::get(params.user_id, &pool)
.await
.handle_internal()?;
Ok(Json(info))
.handle_internal("Error getting the user")?
.handle(StatusCode::NOT_FOUND, "User not found")
.map(Json)
}
pub async fn current(state: State<Pool>, claims: Claims) -> Response {
get(
state,
Query(Params {
user_id: claims.user_id,
}),
)
.await
}

View File

@ -0,0 +1,26 @@
use axum::Form;
use crate::{
auth::{authenticate_user, Token},
prelude::*,
};
#[derive(Deserialize, Debug)]
pub struct Params {
username: String,
password: String,
}
pub async fn login(
State(pool): State<Pool>,
Form(payload): Form<Params>,
) -> GeneralResult<Json<Token>> {
let user_id = authenticate_user(&payload.username, &payload.password, &pool)
.await
.handle_internal("Error getting user from database")?
.handle(
StatusCode::NOT_FOUND,
"User with this name and password doesn't exist",
)?;
Claims::new(user_id).encode().map(Json)
}

View File

@ -1,4 +1,6 @@
pub mod delete;
pub mod get;
pub mod login;
pub mod put;
pub mod register;
pub mod search;

View File

@ -1,8 +1,12 @@
use validator::Validate;
use crate::prelude::*;
#[derive(Deserialize, Debug)]
#[derive(Deserialize, Debug, Validate)]
pub struct Params {
#[validate(length(min = 3, max = 10))]
username: String,
#[validate(email)]
email: String,
}
@ -10,9 +14,10 @@ pub async fn put(
State(pool): State<Pool>,
claims: Claims,
Json(params): Json<Params>,
) -> Result<Json<db::users::UserInfo>, StatusCode> {
let info = db::users::update(claims.user_id, &params.username, &params.email, &pool)
) -> GeneralResult<Json<db::users::UserInfo>> {
params.validate()?;
db::users::update(claims.user_id, &params.username, &params.email, &pool)
.await
.handle_internal()?;
Ok(Json(info))
.handle_internal("Error updating the user")
.map(Json)
}

View File

@ -0,0 +1,64 @@
use axum::Form;
use itertools::Itertools;
use validator::{Validate, ValidationError};
use crate::{
auth::{HashedBytes, Token},
prelude::*,
};
#[derive(Deserialize, Debug, Validate)]
pub struct Params {
#[validate(length(min = 3, max = 10))]
username: String,
#[validate(email)]
email: String,
#[validate(length(min = 6), custom(function = "validate_password"))]
password: String,
}
fn validate_password(password: &str) -> Result<(), ValidationError> {
let mut has_lower = false;
let mut has_upper = false;
let mut has_number = false;
let mut has_special = false;
for char in password.chars() {
if char.is_lowercase() {
has_lower = true;
} else if char.is_uppercase() {
has_upper = true;
} else if char.is_ascii_digit() {
has_number = true;
} else {
has_special = true;
}
}
let msg = [has_lower, has_upper, has_number, has_special]
.into_iter()
.zip(["No lower", "No upper", "No numbers", "No special"])
.filter_map(|(param, msg)| (!param).then_some(msg))
.format(" ")
.to_string();
if !msg.is_empty() {
return Err(ValidationError::new("invalid_password").with_message(msg.into()));
}
Ok(())
}
pub async fn register(
State(pool): State<Pool>,
Form(params): Form<Params>,
) -> GeneralResult<Json<Token>> {
params.validate()?;
let password = HashedBytes::hash_bytes(params.password.as_bytes()).as_bytes();
let id = db::users::create_user(&params.username, &params.email, &password, &pool)
.await
.handle_internal("Error creating the user")?
.handle(
StatusCode::BAD_REQUEST,
"The username or the email are taken",
)?;
Claims::new(id).encode().map(Json)
}

View File

@ -1,5 +1,3 @@
use futures::TryStreamExt;
use crate::prelude::*;
#[derive(Deserialize, Debug)]
@ -10,11 +8,12 @@ pub struct Params {
pub async fn search(
State(pool): State<Pool>,
Query(params): Query<Params>,
) -> sqlx::Result<Json<Vec<db::users::UserInfo>>, StatusCode> {
let users = db::users::search_for_user(&params.search_string, &pool)
) -> GeneralResult<Json<Vec<db::users::UserSearch>>> {
db::users::search_for_user(&params.search_string, &pool)
.take(20)
.try_filter(|user| future::ready(user.similarity > 0.1))
.try_collect()
.await
.handle_internal()?;
Ok(Json(users))
.handle_internal("Error getting users from the database")
.map(Json)
}

View File

@ -1,28 +1,103 @@
use axum::http::StatusCode;
use std::borrow::Cow;
use axum::{http::StatusCode, response::IntoResponse};
type BoxError = Box<dyn std::error::Error>;
pub fn handle_error(error: impl Into<BoxError>) {
let error: BoxError = error.into();
tracing::error!(error);
/// Common error type for the project
pub struct GeneralError {
/// Response status code
pub status_code: StatusCode,
/// Message to send to the user
pub message: Cow<'static, str>,
/// Error to log
pub error: Option<BoxError>,
}
pub trait ErrorHandlingExt<T, E>
impl GeneralError {
pub fn message(status_code: StatusCode, message: impl Into<Cow<'static, str>>) -> Self {
Self {
status_code,
message: message.into(),
error: None,
}
}
pub const fn const_message(status_code: StatusCode, message: &'static str) -> Self {
Self {
status_code,
message: Cow::Borrowed(message),
error: None,
}
}
}
impl IntoResponse for GeneralError {
fn into_response(self) -> axum::response::Response {
if let Some(err) = self.error {
tracing::error!(err, message = %self.message, status_code = ?self.status_code);
}
(self.status_code, self.message).into_response()
}
}
impl From<validator::ValidationErrors> for GeneralError {
fn from(value: validator::ValidationErrors) -> Self {
GeneralError::message(StatusCode::BAD_REQUEST, value.to_string())
}
}
pub type GeneralResult<T> = Result<T, GeneralError>;
pub trait ErrorHandlingExt<T>
where
Self: Sized,
{
fn handle(self, code: StatusCode) -> Result<T, StatusCode>;
fn handle(
self,
status_code: StatusCode,
message: impl Into<Cow<'static, str>>,
) -> GeneralResult<T>;
fn handle_internal(self) -> Result<T, StatusCode> {
self.handle(StatusCode::INTERNAL_SERVER_ERROR)
fn handle_internal(self, message: impl Into<Cow<'static, str>>) -> GeneralResult<T> {
self.handle(StatusCode::INTERNAL_SERVER_ERROR, message)
}
}
impl<T, E: Into<BoxError>> ErrorHandlingExt<T, E> for Result<T, E> {
fn handle(self, code: StatusCode) -> Result<T, StatusCode> {
self.map_err(|err| {
handle_error(err);
code
impl<T, E: Into<BoxError>> ErrorHandlingExt<T> for Result<T, E> {
fn handle(
self,
status_code: StatusCode,
message: impl Into<Cow<'static, str>>,
) -> GeneralResult<T> {
self.map_err(|err| GeneralError {
status_code,
message: message.into(),
error: Some(err.into()),
})
}
}
impl<T> ErrorHandlingExt<T> for Option<T> {
fn handle(
self,
status_code: StatusCode,
message: impl Into<Cow<'static, str>>,
) -> GeneralResult<T> {
self.ok_or_else(|| GeneralError {
status_code,
message: message.into(),
error: None,
})
}
}
pub trait ItemNotFoundExt<T> {
fn item_not_found(self) -> Result<T, GeneralError>;
}
impl<T> ItemNotFoundExt<T> for Option<T> {
fn item_not_found(self) -> GeneralResult<T> {
self.handle(StatusCode::NOT_FOUND, "Item not found")
}
}

View File

@ -5,14 +5,14 @@ use std::{
};
use axum::body::Bytes;
use futures::{Stream, StreamExt};
use sha2::Digest as _;
use tokio::{
fs,
io::{AsyncWrite, AsyncWriteExt, BufWriter},
};
use tokio_util::io::StreamReader;
use uuid::Uuid;
use crate::prelude::*;
#[derive(Clone)]
pub struct FileStorage(Arc<Path>);
@ -95,6 +95,7 @@ impl FileStorage {
const BUF_CAP: usize = 64 * 1024 * 1024; // 64 MiB
let mut hash = sha2::Sha512::new();
let mut size: i64 = 0;
let stream = stream.map(|value| {
let bytes = value.map_err(io::Error::other)?;
hash.update(&bytes);
@ -104,10 +105,12 @@ impl FileStorage {
.ok_or_else(|| io::Error::other(anyhow::anyhow!("Size calculation overflow")))?;
io::Result::Ok(bytes)
});
let mut reader = StreamReader::new(stream);
let mut writer = BufWriter::with_capacity(BUF_CAP, file);
tokio::io::copy_buf(&mut reader, &mut writer).await?;
writer.flush().await?;
let hash = hash.finalize().to_vec();
Ok((hash, size))
}

View File

@ -4,17 +4,13 @@ mod endpoints;
mod errors;
mod file_storage;
mod prelude;
mod util;
use std::{env, net::Ipv4Addr};
use auth::HashedBytes;
use axum::{extract::FromRef, routing::post, Router};
use file_storage::FileStorage;
use tokio::net::TcpListener;
type Pool = sqlx::postgres::PgPool;
#[derive(Clone, FromRef)]
#[derive(Clone, axum::extract::FromRef)]
struct AppState {
pool: Pool,
storage: FileStorage,
@ -29,8 +25,8 @@ async fn create_test_users(pool: &Pool) -> anyhow::Result<()> {
if count > 0 {
return Ok(());
}
let hash1 = HashedBytes::hash_bytes(b"Password1").as_bytes();
let hash2 = HashedBytes::hash_bytes(b"Password2").as_bytes();
let hash1 = auth::HashedBytes::hash_bytes(b"Password1").as_bytes();
let hash2 = auth::HashedBytes::hash_bytes(b"Password2").as_bytes();
tokio::try_join!(
db::users::create_user("Test1", "test1@example.com", &hash1, pool),
@ -40,12 +36,39 @@ async fn create_test_users(pool: &Pool) -> anyhow::Result<()> {
Ok(())
}
fn init_tracing() {
use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt};
let mut err = None;
tracing_subscriber::registry()
.with(
tracing_subscriber::EnvFilter::try_from_default_env().unwrap_or_else(|inner_err| {
err = Some(inner_err);
"debug,sqlx=info,axum::rejection=trace".parse().unwrap()
}),
)
.with(tracing_subscriber::fmt::layer())
.init();
if let Some(err) = err {
tracing::info!(
%err,
"Error constructing EnvFilter, falling back to using the default"
);
}
}
#[tokio::main]
async fn main() -> anyhow::Result<()> {
// TODO: add utoipa and utoipauto for swagger
use std::{env, net::Ipv4Addr};
if env::var("RUST_BACKTRACE").is_err() {
env::set_var("RUST_BACKTRACE", "1");
}
let _ = dotenvy::dotenv();
tracing_subscriber::fmt::init();
init_tracing();
auth::force_init_keys();
@ -65,38 +88,95 @@ async fn main() -> anyhow::Result<()> {
let router = app(state);
let addr = (Ipv4Addr::UNSPECIFIED, 3000);
let listener = TcpListener::bind(addr).await?;
let listener = tokio::net::TcpListener::bind(addr).await?;
axum::serve(listener, router).await?;
axum::serve(listener, router)
.with_graceful_shutdown(shutdown_signal())
.await?;
Ok(())
}
fn app(state: AppState) -> Router {
use axum::{http::header, routing::get};
async fn shutdown_signal() {
use tokio::signal;
let ctrl_c = async {
signal::ctrl_c()
.await
.expect("failed to install Ctrl+C handler");
};
#[cfg(unix)]
{
let terminate = async {
signal::unix::signal(signal::unix::SignalKind::terminate())
.expect("failed to install signal handler")
.recv()
.await;
};
tokio::select! {
() = ctrl_c => {}
() = terminate => {}
}
}
#[cfg(not(unix))]
ctrl_c.await;
}
fn app(state: AppState) -> axum::Router {
use axum::{
extract::DefaultBodyLimit,
handler::Handler as _,
http::header,
routing::{get, post},
};
use endpoints::{
authorization, file, folder,
file, folder,
permissions::{self, get_top_level::get_top_level},
users,
};
use tower_http::ServiceBuilderExt as _;
use tower_http::{
timeout::TimeoutLayer,
trace::{MakeSpan, TraceLayer},
ServiceBuilderExt as _,
};
let sensitive_headers = [header::AUTHORIZATION, header::COOKIE];
#[derive(Clone, Copy)]
struct SpanMaker;
let middleware = tower::ServiceBuilder::new()
.sensitive_headers(sensitive_headers)
.trace_for_http()
impl<B> MakeSpan<B> for SpanMaker {
fn make_span(&mut self, request: &axum::http::Request<B>) -> tracing::Span {
tracing::debug_span!(
"request",
method = %request.method(),
uri = %request.uri(),
version = ?request.version(),
headers = ?request.headers(),
request_id = %uuid::Uuid::now_v7()
)
}
}
const TEN_GIBIBYTES: usize = 10 * 1024 * 1024 * 1024;
let body_limit = DefaultBodyLimit::max(TEN_GIBIBYTES);
let timeout = TimeoutLayer::new(std::time::Duration::from_secs(10));
let common_middleware = tower::ServiceBuilder::new()
.sensitive_headers([header::AUTHORIZATION, header::COOKIE])
.layer(TraceLayer::new_for_http().make_span_with(SpanMaker))
.compression();
// Build route service
Router::new()
.route(
"/files",
get(file::download::download)
.post(file::upload::upload)
.delete(file::delete::delete)
.patch(file::modify::modify),
)
let file_router = axum::Router::new().route(
"/",
get(file::download::download)
.post(file::upload::upload.layer(body_limit.clone()))
.delete(file::delete::delete.layer(timeout))
.patch(file::modify::modify.layer(body_limit.clone())),
);
let general_router = axum::Router::new()
.route(
"/folders",
get(folder::list::list)
@ -120,8 +200,15 @@ fn app(state: AppState) -> Router {
.delete(users::delete::delete)
.put(users::put::put),
)
.route("/users/current", get(users::get::current))
.route("/users/search", get(users::search::search))
.route("/authorize", post(authorization::auth_post::post))
.layer(middleware)
.route("/users/register", post(users::register::register))
.route("/users/authorize", post(users::login::login))
.layer(timeout);
axum::Router::new()
.nest("/files", file_router)
.nest("/", general_router)
.layer(common_middleware)
.with_state(state)
}

View File

@ -1,8 +1,14 @@
pub(crate) use crate::{auth::Claims, db, errors::ErrorHandlingExt as _, AppState, Pool};
pub(crate) use crate::{
auth::Claims,
db::{self, permissions::PermissionExt as _},
errors::{ErrorHandlingExt as _, GeneralError, GeneralResult, ItemNotFoundExt as _},
util::EmptyResponse,
AppState, Pool,
};
pub use axum::{
extract::{Json, Query, State},
http::StatusCode,
};
pub use futures::StreamExt as _;
pub use futures::{future, stream::BoxStream, Stream, StreamExt as _, TryStreamExt as _};
pub use serde::{Deserialize, Serialize};
pub use uuid::Uuid;

11
src/util.rs Normal file
View File

@ -0,0 +1,11 @@
use axum::response::IntoResponse;
use crate::prelude::*;
pub struct EmptyResponse;
impl IntoResponse for EmptyResponse {
fn into_response(self) -> axum::response::Response {
StatusCode::NO_CONTENT.into_response()
}
}