diff --git a/.sqlx/query-090673660f991b66b0b5a7e2492e94011405a313f89943cff7e64e3ccc674822.json b/.sqlx/query-090673660f991b66b0b5a7e2492e94011405a313f89943cff7e64e3ccc674822.json deleted file mode 100644 index 66dc25a..0000000 --- a/.sqlx/query-090673660f991b66b0b5a7e2492e94011405a313f89943cff7e64e3ccc674822.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT COUNT(*) AS \"count!\"\n FROM users\n WHERE name = ANY($1)\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "count!", - "type_info": "Int8" - } - ], - "parameters": { - "Left": [ - "TextArray" - ] - }, - "nullable": [ - null - ] - }, - "hash": "090673660f991b66b0b5a7e2492e94011405a313f89943cff7e64e3ccc674822" -} diff --git a/.sqlx/query-19d85e2094bcb4ac818975b9477f4cc3de4128ef0aa3383369092f2df56636d9.json b/.sqlx/query-19d85e2094bcb4ac818975b9477f4cc3de4128ef0aa3383369092f2df56636d9.json deleted file mode 100644 index 860846c..0000000 --- a/.sqlx/query-19d85e2094bcb4ac818975b9477f4cc3de4128ef0aa3383369092f2df56636d9.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT name\n FROM groups\n WHERE name = $1\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "name", - "type_info": "Text" - } - ], - "parameters": { - "Left": [ - "Text" - ] - }, - "nullable": [ - false - ] - }, - "hash": "19d85e2094bcb4ac818975b9477f4cc3de4128ef0aa3383369092f2df56636d9" -} diff --git a/.sqlx/query-275592cdd00626bcb0c5c3054952b6cd170d0692354100d0a1c25c2dba9e9e6b.json b/.sqlx/query-275592cdd00626bcb0c5c3054952b6cd170d0692354100d0a1c25c2dba9e9e6b.json deleted file mode 100644 index b461d48..0000000 --- a/.sqlx/query-275592cdd00626bcb0c5c3054952b6cd170d0692354100d0a1c25c2dba9e9e6b.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n DELETE FROM groups\n WHERE name = $1\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Text" - ] - }, - "nullable": [] - }, - "hash": "275592cdd00626bcb0c5c3054952b6cd170d0692354100d0a1c25c2dba9e9e6b" -} diff --git a/.sqlx/query-282189b1fc3f70e5c2de3f19a3cc8b1fe7e32e4b9b501674ea138acf0cd759ff.json b/.sqlx/query-282189b1fc3f70e5c2de3f19a3cc8b1fe7e32e4b9b501674ea138acf0cd759ff.json deleted file mode 100644 index bdfe759..0000000 --- a/.sqlx/query-282189b1fc3f70e5c2de3f19a3cc8b1fe7e32e4b9b501674ea138acf0cd759ff.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n DELETE FROM users\n WHERE name = $1\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Text" - ] - }, - "nullable": [] - }, - "hash": "282189b1fc3f70e5c2de3f19a3cc8b1fe7e32e4b9b501674ea138acf0cd759ff" -} diff --git a/.sqlx/query-52bcae42b069a7665baeff903774e624f3e7ae6e2474d03c8619fa1816edefe0.json b/.sqlx/query-52bcae42b069a7665baeff903774e624f3e7ae6e2474d03c8619fa1816edefe0.json deleted file mode 100644 index 4f65e4f..0000000 --- a/.sqlx/query-52bcae42b069a7665baeff903774e624f3e7ae6e2474d03c8619fa1816edefe0.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT\n g.name,\n COALESCE(array_agg(ug.user_name ORDER BY ug.user_name), ARRAY[]::TEXT[]) AS \"users!\"\n FROM groups g\n LEFT JOIN users_groups ug ON g.name = ug.group_name\n WHERE g.name = $1\n GROUP BY g.name\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "name", - "type_info": "Text" - }, - { - "ordinal": 1, - "name": "users!", - "type_info": "TextArray" - } - ], - "parameters": { - "Left": [ - "Text" - ] - }, - "nullable": [ - false, - null - ] - }, - "hash": "52bcae42b069a7665baeff903774e624f3e7ae6e2474d03c8619fa1816edefe0" -} diff --git a/.sqlx/query-5dbde6bba584448a7be9fd6965aec52a8050d21c453d7ec221be44bd0d893fd1.json b/.sqlx/query-5dbde6bba584448a7be9fd6965aec52a8050d21c453d7ec221be44bd0d893fd1.json deleted file mode 100644 index 9b88d1a..0000000 --- a/.sqlx/query-5dbde6bba584448a7be9fd6965aec52a8050d21c453d7ec221be44bd0d893fd1.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n INSERT INTO users (name, display_name, password, email, disabled, image)\n VALUES ($1, $2, $3, $4, $5, $6)\n ON CONFLICT (name) DO UPDATE\n SET display_name = EXCLUDED.display_name,\n password = EXCLUDED.password,\n email = EXCLUDED.email,\n disabled = EXCLUDED.disabled,\n image = EXCLUDED.image\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Text", - "Text", - "Text", - "Text", - "Bool", - "Text" - ] - }, - "nullable": [] - }, - "hash": "5dbde6bba584448a7be9fd6965aec52a8050d21c453d7ec221be44bd0d893fd1" -} diff --git a/.sqlx/query-74d4ef98ee975bfe90418171dea43397316f8d57ac4d9b09248bb5b0f767b166.json b/.sqlx/query-74d4ef98ee975bfe90418171dea43397316f8d57ac4d9b09248bb5b0f767b166.json deleted file mode 100644 index 7407713..0000000 --- a/.sqlx/query-74d4ef98ee975bfe90418171dea43397316f8d57ac4d9b09248bb5b0f767b166.json +++ /dev/null @@ -1,52 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT name, display_name, password, email, disabled, image\n FROM users\n WHERE name = $1\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "name", - "type_info": "Text" - }, - { - "ordinal": 1, - "name": "display_name", - "type_info": "Text" - }, - { - "ordinal": 2, - "name": "password", - "type_info": "Text" - }, - { - "ordinal": 3, - "name": "email", - "type_info": "Text" - }, - { - "ordinal": 4, - "name": "disabled", - "type_info": "Bool" - }, - { - "ordinal": 5, - "name": "image", - "type_info": "Text" - } - ], - "parameters": { - "Left": [ - "Text" - ] - }, - "nullable": [ - false, - false, - false, - false, - false, - true - ] - }, - "hash": "74d4ef98ee975bfe90418171dea43397316f8d57ac4d9b09248bb5b0f767b166" -} diff --git a/.sqlx/query-91b332e6af78793ae53cfdbf8e5edccfe031a21ad1ca8240024adb7e0006570b.json b/.sqlx/query-91b332e6af78793ae53cfdbf8e5edccfe031a21ad1ca8240024adb7e0006570b.json deleted file mode 100644 index 64d5fa1..0000000 --- a/.sqlx/query-91b332e6af78793ae53cfdbf8e5edccfe031a21ad1ca8240024adb7e0006570b.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n INSERT INTO users_groups (user_name, group_name)\n SELECT * FROM UNNEST($1::text[], $2::text[])\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "TextArray", - "TextArray" - ] - }, - "nullable": [] - }, - "hash": "91b332e6af78793ae53cfdbf8e5edccfe031a21ad1ca8240024adb7e0006570b" -} diff --git a/.sqlx/query-9313aac97fa5191c47874e2e3834ca713d3a3b5556ac26c3cc51ee138f411982.json b/.sqlx/query-9313aac97fa5191c47874e2e3834ca713d3a3b5556ac26c3cc51ee138f411982.json deleted file mode 100644 index 75c1022..0000000 --- a/.sqlx/query-9313aac97fa5191c47874e2e3834ca713d3a3b5556ac26c3cc51ee138f411982.json +++ /dev/null @@ -1,58 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT\n u.name,\n u.display_name,\n u.password,\n u.email,\n u.disabled,\n u.image,\n COALESCE(array_agg(ug.group_name ORDER BY ug.group_name), ARRAY[]::TEXT[]) AS \"groups!\"\n FROM users u\n LEFT JOIN users_groups ug ON u.name = ug.user_name\n WHERE u.name = $1\n GROUP BY u.name, u.email, u.disabled, u.image\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "name", - "type_info": "Text" - }, - { - "ordinal": 1, - "name": "display_name", - "type_info": "Text" - }, - { - "ordinal": 2, - "name": "password", - "type_info": "Text" - }, - { - "ordinal": 3, - "name": "email", - "type_info": "Text" - }, - { - "ordinal": 4, - "name": "disabled", - "type_info": "Bool" - }, - { - "ordinal": 5, - "name": "image", - "type_info": "Text" - }, - { - "ordinal": 6, - "name": "groups!", - "type_info": "TextArray" - } - ], - "parameters": { - "Left": [ - "Text" - ] - }, - "nullable": [ - false, - false, - false, - false, - false, - true, - null - ] - }, - "hash": "9313aac97fa5191c47874e2e3834ca713d3a3b5556ac26c3cc51ee138f411982" -} diff --git a/.sqlx/query-95bbd23a12bf44b1bc31859a1fd324c16d76ec2797f68da75fc6e526a3cd0bc4.json b/.sqlx/query-95bbd23a12bf44b1bc31859a1fd324c16d76ec2797f68da75fc6e526a3cd0bc4.json deleted file mode 100644 index 44f7ab1..0000000 --- a/.sqlx/query-95bbd23a12bf44b1bc31859a1fd324c16d76ec2797f68da75fc6e526a3cd0bc4.json +++ /dev/null @@ -1,56 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT\n u.name,\n u.display_name,\n u.password,\n u.email,\n u.disabled,\n u.image,\n COALESCE(array_agg(ug.group_name ORDER BY ug.group_name), ARRAY[]::TEXT[]) AS \"groups!\"\n FROM users u\n LEFT JOIN users_groups ug ON u.name = ug.user_name\n GROUP BY u.name, u.email, u.disabled, u.image\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "name", - "type_info": "Text" - }, - { - "ordinal": 1, - "name": "display_name", - "type_info": "Text" - }, - { - "ordinal": 2, - "name": "password", - "type_info": "Text" - }, - { - "ordinal": 3, - "name": "email", - "type_info": "Text" - }, - { - "ordinal": 4, - "name": "disabled", - "type_info": "Bool" - }, - { - "ordinal": 5, - "name": "image", - "type_info": "Text" - }, - { - "ordinal": 6, - "name": "groups!", - "type_info": "TextArray" - } - ], - "parameters": { - "Left": [] - }, - "nullable": [ - false, - false, - false, - false, - false, - true, - null - ] - }, - "hash": "95bbd23a12bf44b1bc31859a1fd324c16d76ec2797f68da75fc6e526a3cd0bc4" -} diff --git a/.sqlx/query-9caa0dac7d2a5098a09278e2331e86d87b1e4a6916836ca0d1a0509a159affc8.json b/.sqlx/query-9caa0dac7d2a5098a09278e2331e86d87b1e4a6916836ca0d1a0509a159affc8.json deleted file mode 100644 index 662e2f4..0000000 --- a/.sqlx/query-9caa0dac7d2a5098a09278e2331e86d87b1e4a6916836ca0d1a0509a159affc8.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT COUNT(*) AS \"count!\"\n FROM groups\n WHERE name = ANY($1)\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "count!", - "type_info": "Int8" - } - ], - "parameters": { - "Left": [ - "TextArray" - ] - }, - "nullable": [ - null - ] - }, - "hash": "9caa0dac7d2a5098a09278e2331e86d87b1e4a6916836ca0d1a0509a159affc8" -} diff --git a/.sqlx/query-adb2455e26b1cddf90a54d08e79f57258db1212ef4120868581cd0a8a81eff8f.json b/.sqlx/query-adb2455e26b1cddf90a54d08e79f57258db1212ef4120868581cd0a8a81eff8f.json deleted file mode 100644 index 2cc1724..0000000 --- a/.sqlx/query-adb2455e26b1cddf90a54d08e79f57258db1212ef4120868581cd0a8a81eff8f.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n DELETE FROM users_groups\n WHERE group_name = $1\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Text" - ] - }, - "nullable": [] - }, - "hash": "adb2455e26b1cddf90a54d08e79f57258db1212ef4120868581cd0a8a81eff8f" -} diff --git a/.sqlx/query-b1be2a377b5bfaf093618d049c0ed8b759f946580870558c699cce9490a0e0f2.json b/.sqlx/query-b1be2a377b5bfaf093618d049c0ed8b759f946580870558c699cce9490a0e0f2.json deleted file mode 100644 index 7c5e5c0..0000000 --- a/.sqlx/query-b1be2a377b5bfaf093618d049c0ed8b759f946580870558c699cce9490a0e0f2.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "INSERT INTO groups (name) VALUES ($1)", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Text" - ] - }, - "nullable": [] - }, - "hash": "b1be2a377b5bfaf093618d049c0ed8b759f946580870558c699cce9490a0e0f2" -} diff --git a/.sqlx/query-ba1cb3d9ffd5dd2260815616abc0b93cd67767cf299f443023d8ab9f9a12c44c.json b/.sqlx/query-ba1cb3d9ffd5dd2260815616abc0b93cd67767cf299f443023d8ab9f9a12c44c.json deleted file mode 100644 index 499e815..0000000 --- a/.sqlx/query-ba1cb3d9ffd5dd2260815616abc0b93cd67767cf299f443023d8ab9f9a12c44c.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "INSERT INTO users (name, display_name, password, email, disabled, image)\n VALUES ($1, $2, $3, $4, $5, $6)\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Text", - "Text", - "Text", - "Text", - "Bool", - "Text" - ] - }, - "nullable": [] - }, - "hash": "ba1cb3d9ffd5dd2260815616abc0b93cd67767cf299f443023d8ab9f9a12c44c" -} diff --git a/.sqlx/query-e52660da218cabe80565d95bf77add43558dc3a99c29246cf61d2431ddf34cf8.json b/.sqlx/query-e52660da218cabe80565d95bf77add43558dc3a99c29246cf61d2431ddf34cf8.json deleted file mode 100644 index 7854eef..0000000 --- a/.sqlx/query-e52660da218cabe80565d95bf77add43558dc3a99c29246cf61d2431ddf34cf8.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT\n g.name,\n COALESCE(array_agg(ug.user_name ORDER BY ug.user_name), ARRAY[]::TEXT[]) AS \"users!\"\n FROM groups g\n LEFT JOIN users_groups ug ON g.name = ug.group_name\n GROUP BY g.name\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "name", - "type_info": "Text" - }, - { - "ordinal": 1, - "name": "users!", - "type_info": "TextArray" - } - ], - "parameters": { - "Left": [] - }, - "nullable": [ - false, - null - ] - }, - "hash": "e52660da218cabe80565d95bf77add43558dc3a99c29246cf61d2431ddf34cf8" -} diff --git a/.sqlx/query-e7258b575bc6d1d71f9c62a9c6b56f6103ab7caebc26886346e4ecec399bd86c.json b/.sqlx/query-e7258b575bc6d1d71f9c62a9c6b56f6103ab7caebc26886346e4ecec399bd86c.json deleted file mode 100644 index 91d865c..0000000 --- a/.sqlx/query-e7258b575bc6d1d71f9c62a9c6b56f6103ab7caebc26886346e4ecec399bd86c.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n DELETE FROM users_groups\n WHERE user_name = $1\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Text" - ] - }, - "nullable": [] - }, - "hash": "e7258b575bc6d1d71f9c62a9c6b56f6103ab7caebc26886346e4ecec399bd86c" -} diff --git a/Cargo.lock b/Cargo.lock index c8aa429..808753b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -974,6 +974,15 @@ dependencies = [ "zeroize", ] +[[package]] +name = "email_address" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e079f19b08ca6239f47f8ba8509c11cf3ea30095831f7fed61441475edd8c449" +dependencies = [ + "serde", +] + [[package]] name = "equivalent" version = "1.0.2" @@ -1082,22 +1091,6 @@ dependencies = [ "percent-encoding", ] -[[package]] -name = "fuser" -version = "0.15.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53274f494609e77794b627b1a3cddfe45d675a6b2e9ba9c0fdc8d8eee2184369" -dependencies = [ - "libc", - "log", - "memchr", - "nix", - "page_size", - "pkg-config", - "smallvec", - "zerocopy", -] - [[package]] name = "futures-channel" version = "0.3.31" @@ -1249,20 +1242,23 @@ dependencies = [ "axum", "axum-extra", "clap", - "fuser", + "email_address", "log", "log4rs", "non-empty-string", + "nonempty", "openidconnect", "passwords", "redis 0.31.0", "redis-macros", "serde", "serde_json", + "serde_with", "serde_yaml", "sqlx", "time", "tokio", + "url", "uuid", ] @@ -1867,18 +1863,6 @@ dependencies = [ "windows-sys 0.59.0", ] -[[package]] -name = "nix" -version = "0.29.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "71e2746dc3a24dd78b3cfcb7be93368c6de9963d30f43a6a73998a9cf4b17b46" -dependencies = [ - "bitflags", - "cfg-if 1.0.0", - "cfg_aliases", - "libc", -] - [[package]] name = "non-empty-string" version = "0.2.6" @@ -1889,6 +1873,15 @@ dependencies = [ "serde", ] +[[package]] +name = "nonempty" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "549e471b99ccaf2f89101bec68f4d244457d5a95a9c3d0672e9564124397741d" +dependencies = [ + "serde", +] + [[package]] name = "num-bigint" version = "0.4.6" @@ -2063,16 +2056,6 @@ dependencies = [ "sha2 0.10.9", ] -[[package]] -name = "page_size" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30d5b2194ed13191c1999ae0704b7839fb18384fa22e49b57eeaa97d79ce40da" -dependencies = [ - "libc", - "winapi", -] - [[package]] name = "parking" version = "2.2.1" diff --git a/Cargo.toml b/Cargo.toml index c24b964..5e0a9b0 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -20,18 +20,21 @@ async-session = "3.0.0" axum = { version = "0.8.4", features = ["macros"] } axum-extra = { version = "0.10.1", features = ["typed-header"] } clap = { version = "4.5.39", features = ["derive"] } -fuser = "0.15.1" +email_address = "0.2.9" log = "0.4.27" log4rs = "1.3.0" non-empty-string = { version = "0.2.6", features = ["serde"] } +nonempty = { version = "0.11.0", features = ["serialize"] } openidconnect = { version = "4.0.0", features = ["reqwest"] } passwords = "3.1.16" redis = { version = "0.31.0", features = ["tokio-comp"] } redis-macros = "0.5.4" serde = "1.0.219" serde_json = "1.0.140" +serde_with = "3.12.0" serde_yaml = "0.9.34" sqlx = { version = "0.8.6", features = ["runtime-tokio", "postgres", "time", "uuid"] } time = { version = "0.3.41", features = ["serde"] } -tokio = { version = "1.45.1", features = ["rt-multi-thread", "process"] } +tokio = { version = "1.45.1", features = ["rt-multi-thread", "signal"] } +url = { version = "2.5.4", features = ["serde"] } uuid = { version = "1.17.0", features = ["serde"] } diff --git a/migrations/20250605080246_init.sql b/migrations/20250605080246_init.sql index d916a9f..e69de29 100644 --- a/migrations/20250605080246_init.sql +++ b/migrations/20250605080246_init.sql @@ -1,56 +0,0 @@ -CREATE TABLE IF NOT EXISTS users ( - name TEXT PRIMARY KEY, - display_name TEXT NOT NULL, - password TEXT NOT NULL, - email TEXT NOT NULL UNIQUE, - disabled BOOLEAN NOT NULL, - image TEXT, - created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), - updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW() -); - -CREATE TABLE IF NOT EXISTS groups ( - name TEXT PRIMARY KEY, - created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), - updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW() -); - -CREATE TABLE IF NOT EXISTS users_groups ( - user_name TEXT NOT NULL, - group_name TEXT NOT NULL, - PRIMARY KEY (user_name, group_name), - FOREIGN KEY (user_name) REFERENCES users(name) ON DELETE CASCADE, - FOREIGN KEY (group_name) REFERENCES groups(name) ON DELETE CASCADE -); - -CREATE OR REPLACE FUNCTION update_timestamp() -RETURNS TRIGGER AS $$ -BEGIN - NEW.updated_at = NOW(); - RETURN NEW; -END; -$$ LANGUAGE plpgsql; - -CREATE TRIGGER update_users_timestamp -BEFORE UPDATE ON users -FOR EACH ROW -EXECUTE FUNCTION update_timestamp(); - -CREATE TRIGGER update_groups_timestamp -BEFORE UPDATE ON groups -FOR EACH ROW -EXECUTE FUNCTION update_timestamp(); - -CREATE OR REPLACE FUNCTION update_users_groups_timestamp() -RETURNS TRIGGER AS $$ -BEGIN - UPDATE users SET updated_at = NOW() WHERE name = NEW.user_name; - UPDATE groups SET updated_at = NOW() WHERE name = NEW.group_name; - RETURN NULL; -END; -$$ LANGUAGE plpgsql; - -CREATE TRIGGER update_users_groups_timestamp -AFTER INSERT OR DELETE ON users_groups -FOR EACH ROW -EXECUTE FUNCTION update_users_groups_timestamp(); diff --git a/src/config.rs b/src/config.rs index 7e961d2..b7f44a3 100644 --- a/src/config.rs +++ b/src/config.rs @@ -1,11 +1,12 @@ use clap::Parser; +use non_empty_string::NonEmptyString; use serde::Deserialize; use std::{ error::Error, - fs, net::{IpAddr, Ipv4Addr}, - path::PathBuf, + path::{Path, PathBuf}, }; +use tokio::fs; #[derive(Clone, Deserialize)] pub struct ServerConfig { @@ -28,12 +29,12 @@ const fn default_server_port() -> u16 { #[derive(Clone, Deserialize)] pub struct OAuthConfig { - pub issuer_url: String, - pub client_id: String, - pub client_secret: String, + pub issuer_url: NonEmptyString, + pub client_id: NonEmptyString, + pub client_secret: NonEmptyString, #[serde(default)] pub insecure: bool, - pub admin_group: String, + pub admin_group: NonEmptyString, } #[derive(Clone, Deserialize)] @@ -43,16 +44,16 @@ pub struct AutheliaConfig { #[derive(Clone, Deserialize)] pub struct PostgresqlConfig { - pub user: String, - pub password: String, - pub host: String, + pub user: NonEmptyString, + pub password: NonEmptyString, + pub host: NonEmptyString, pub port: u16, - pub database: String, + pub database: NonEmptyString, } #[derive(Clone, Deserialize)] pub struct RedisConfig { - pub host: String, + pub host: NonEmptyString, pub port: u16, #[serde(default)] pub database: u8, @@ -67,11 +68,12 @@ pub struct Config { pub redis: RedisConfig, } -impl TryFrom<&PathBuf> for Config { - type Error = Box; - - fn try_from(path: &PathBuf) -> Result { - let contents = fs::read_to_string(path)?; +impl Config { + pub async fn from_path

(path: P) -> Result> + where + P: AsRef, + { + let contents = fs::read_to_string(path).await?; let config = serde_yaml::from_str(&contents)?; Ok(config) } diff --git a/src/main.rs b/src/main.rs index 33460b2..c2e9b6c 100644 --- a/src/main.rs +++ b/src/main.rs @@ -11,37 +11,34 @@ use axum::serve; use clap::Parser; use log::info; use log4rs::config::Deserializers; -use std::{error::Error, net::SocketAddr}; +use std::net::SocketAddr; use tokio::net::TcpListener; +use utils::shutdown_signal; -use config::{Args, Config}; +use config::Args; use state::State; #[tokio::main] async fn main() { let args = Args::parse(); - log4rs::init_file(args.log_config, Deserializers::default()).unwrap(); + log4rs::init_file(&args.log_config, Deserializers::default()).unwrap(); - let config = Config::try_from(&args.config).unwrap(); - let state = State::from_config(config.clone()).await; + let state = State::from_args(args).await.unwrap(); - init(&state).await.unwrap(); - - let routes = routes::routes(state); - let app = axum::Router::new().nest(&format!("{}/api", config.server.subpath), routes); - - let addr = SocketAddr::from((config.server.address, config.server.port)); - let listener = TcpListener::bind(addr).await.unwrap(); - - info!("Listening on {}", listener.local_addr().unwrap()); - serve(listener, app).await.unwrap(); -} - -async fn init(state: &State) -> Result<(), Box> { sqlx::migrate!("./migrations") .run(&state.pg_pool) .await .expect("Failed to run migrations"); - Ok(()) + let routes = routes::routes(state.clone()); + let app = axum::Router::new().nest(&format!("{}/api", state.config.server.subpath), routes); + + let addr = SocketAddr::from((state.config.server.address, state.config.server.port)); + let listener = TcpListener::bind(addr).await.unwrap(); + + info!("Listening on {}", listener.local_addr().unwrap()); + serve(listener, app) + .with_graceful_shutdown(shutdown_signal()) + .await + .unwrap(); } diff --git a/src/models/authelia.rs b/src/models/authelia.rs index a96f1f5..22d543c 100644 --- a/src/models/authelia.rs +++ b/src/models/authelia.rs @@ -1,24 +1,76 @@ +use email_address::EmailAddress; +use non_empty_string::NonEmptyString; use serde::{Deserialize, Serialize}; -use serde_json::Value; +use serde_yaml::Value; +use tokio::fs; +use url::Url; -use std::collections::HashMap; +use std::{ + collections::{HashMap, hash_map}, + error::Error, + ops::{Deref, DerefMut}, + path::Path, +}; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct User { + pub displayname: NonEmptyString, + pub password: NonEmptyString, + pub email: Option, + pub picture: Option, + #[serde(default)] + pub disabled: bool, + #[serde(default)] + pub groups: Vec, + + #[serde(flatten)] + pub extra: HashMap, +} #[derive(Debug, Clone, Serialize, Deserialize)] pub struct UsersFile { - pub users: HashMap, - - #[serde(flatten)] - pub extra: Option>, + pub users: HashMap, } -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct UserFile { - pub displayname: String, - pub password: String, - pub email: Option, - pub disabled: Option, - pub groups: Option>, +impl Deref for UsersFile { + type Target = HashMap; - #[serde(flatten)] - pub extra: Option>, + fn deref(&self) -> &Self::Target { + &self.users + } +} + +impl DerefMut for UsersFile { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.users + } +} + +impl IntoIterator for UsersFile { + type Item = (NonEmptyString, User); + type IntoIter = hash_map::IntoIter; + + fn into_iter(self) -> Self::IntoIter { + self.users.into_iter() + } +} + +impl UsersFile { + pub async fn load

(path: &P) -> Result> + where + P: AsRef + Send + Sync, + { + let content = fs::read_to_string(path.as_ref()).await?; + serde_yaml::from_str(&content) + .map_err(|e| format!("Failed to parse users file: {e}").into()) + } + + pub async fn save

(&self, path: &P) -> Result<(), Box> + where + P: AsRef + Send + Sync, + { + let content = serde_yaml::to_string(self)?; + fs::write(path.as_ref(), content).await?; + Ok(()) + } } diff --git a/src/models/groups.rs b/src/models/groups.rs deleted file mode 100644 index a88cbe9..0000000 --- a/src/models/groups.rs +++ /dev/null @@ -1,143 +0,0 @@ -use std::error::Error; - -use serde::{Deserialize, Serialize}; -use sqlx::{PgPool, prelude::FromRow, query, query_as}; - -#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] -pub struct Group { - pub name: String, -} - -impl Group { - pub async fn select_by_name( - pool: &PgPool, - name: &str, - ) -> Result, Box> { - let group = query_as!( - Group, - r#" - SELECT name - FROM groups - WHERE name = $1 - "#, - name - ) - .fetch_optional(pool) - .await?; - - Ok(group) - } - - pub async fn delete_by_name( - pool: &PgPool, - name: &str, - ) -> Result<(), Box> { - query!( - r#" - DELETE FROM groups - WHERE name = $1 - "#, - name - ) - .execute(pool) - .await?; - - Ok(()) - } - - pub async fn all_exist_by_names( - pool: &PgPool, - names: &[String], - ) -> Result> { - let row = query!( - r#" - SELECT COUNT(*) AS "count!" - FROM groups - WHERE name = ANY($1) - "#, - names - ) - .fetch_one(pool) - .await?; - - Ok(row.count == i64::try_from(names.len()).unwrap()) - } -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct GroupWithUsers { - pub name: String, - pub users: Vec, -} - -impl GroupWithUsers { - pub async fn select(pool: &PgPool) -> Result, Box> { - let groups = query_as!( - GroupWithUsers, - r#" - SELECT - g.name, - COALESCE(array_agg(ug.user_name ORDER BY ug.user_name), ARRAY[]::TEXT[]) AS "users!" - FROM groups g - LEFT JOIN users_groups ug ON g.name = ug.group_name - GROUP BY g.name - "# - ) - .fetch_all(pool) - .await?; - - Ok(groups) - } - - pub async fn select_by_name( - pool: &PgPool, - name: &str, - ) -> Result, Box> { - let group = query_as!( - GroupWithUsers, - r#" - SELECT - g.name, - COALESCE(array_agg(ug.user_name ORDER BY ug.user_name), ARRAY[]::TEXT[]) AS "users!" - FROM groups g - LEFT JOIN users_groups ug ON g.name = ug.group_name - WHERE g.name = $1 - GROUP BY g.name - "#, - name - ) - .fetch_optional(pool) - .await?; - - Ok(group) - } - - pub async fn insert( - pool: &PgPool, - group_with_users: &Self, - ) -> Result<(), Box> { - let mut tx = pool.begin().await?; - - query!( - r#"INSERT INTO groups (name) VALUES ($1)"#, - group_with_users.name - ) - .execute(&mut *tx) - .await?; - - query!( - r#" - INSERT INTO users_groups (user_name, group_name) - SELECT * FROM UNNEST($1::text[], $2::text[]) - "#, - &group_with_users.users, - &vec![group_with_users.name.clone(); group_with_users.users.len()] - ) - .execute(&mut *tx) - .await?; - - tx.commit().await?; - - Ok(()) - } -} diff --git a/src/models/intersections.rs b/src/models/intersections.rs deleted file mode 100644 index 3bfd64a..0000000 --- a/src/models/intersections.rs +++ /dev/null @@ -1,74 +0,0 @@ -use std::error::Error; - -use serde::{Deserialize, Serialize}; -use sqlx::{FromRow, PgPool, query}; - -#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] -pub struct UsersGroups { - pub user_name: String, - pub group_name: String, -} - -impl UsersGroups { - pub async fn set_users_for_group( - pool: &PgPool, - group_name: &str, - users: &[String], - ) -> Result<(), Box> { - let mut tx = pool.begin().await?; - - query!( - r#" - DELETE FROM users_groups - WHERE group_name = $1 - "#, - group_name - ) - .execute(&mut *tx) - .await?; - - query!( - r#" - INSERT INTO users_groups (user_name, group_name) - SELECT * FROM UNNEST($1::text[], $2::text[]) - "#, - users, - &vec![group_name.to_string(); users.len()] - ) - .execute(&mut *tx) - .await?; - - Ok(()) - } - - pub async fn set_groups_for_user( - pool: &PgPool, - user_name: &str, - groups: &[String], - ) -> Result<(), Box> { - let mut tx = pool.begin().await?; - - query!( - r#" - DELETE FROM users_groups - WHERE user_name = $1 - "#, - user_name - ) - .execute(&mut *tx) - .await?; - - query!( - r#" - INSERT INTO users_groups (user_name, group_name) - SELECT * FROM UNNEST($1::text[], $2::text[]) - "#, - &vec![user_name.to_string(); groups.len()], - groups - ) - .execute(&mut *tx) - .await?; - - Ok(()) - } -} diff --git a/src/models/mod.rs b/src/models/mod.rs index c620df4..d9f4373 100644 --- a/src/models/mod.rs +++ b/src/models/mod.rs @@ -1,5 +1,2 @@ pub mod authelia; -pub mod groups; -pub mod intersections; pub mod invites; -pub mod users; diff --git a/src/models/users.rs b/src/models/users.rs deleted file mode 100644 index 9ebdfac..0000000 --- a/src/models/users.rs +++ /dev/null @@ -1,199 +0,0 @@ -use std::error::Error; - -use serde::{Deserialize, Serialize}; -use sqlx::{FromRow, PgPool, query, query_as}; - -#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] -pub struct User { - pub name: String, - pub display_name: String, - pub password: String, - pub email: String, - #[serde(default)] - pub disabled: bool, - #[serde(default)] - pub image: Option, -} - -impl User { - pub async fn select_by_name( - pool: &PgPool, - name: &str, - ) -> Result, Box> { - let user = query_as!( - User, - r#" - SELECT name, display_name, password, email, disabled, image - FROM users - WHERE name = $1 - "#, - name - ) - .fetch_optional(pool) - .await?; - - Ok(user) - } - - pub async fn upsert(pool: &PgPool, user: &Self) -> Result<(), Box> { - query!( - r#" - INSERT INTO users (name, display_name, password, email, disabled, image) - VALUES ($1, $2, $3, $4, $5, $6) - ON CONFLICT (name) DO UPDATE - SET display_name = EXCLUDED.display_name, - password = EXCLUDED.password, - email = EXCLUDED.email, - disabled = EXCLUDED.disabled, - image = EXCLUDED.image - "#, - user.name, - user.display_name, - user.password, - user.email, - user.disabled, - user.image - ) - .execute(pool) - .await?; - - Ok(()) - } - - pub async fn delete_by_name( - pool: &PgPool, - name: &str, - ) -> Result<(), Box> { - query!( - r#" - DELETE FROM users - WHERE name = $1 - "#, - name - ) - .execute(pool) - .await?; - - Ok(()) - } - - pub async fn all_exist_by_names( - pool: &PgPool, - names: &[String], - ) -> Result> { - let row = query!( - r#" - SELECT COUNT(*) AS "count!" - FROM users - WHERE name = ANY($1) - "#, - names - ) - .fetch_one(pool) - .await?; - - Ok(row.count == i64::try_from(names.len()).unwrap()) - } -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct UserWithGroups { - pub name: String, - pub display_name: String, - pub password: String, - pub email: String, - #[serde(default)] - pub disabled: bool, - #[serde(default)] - pub image: Option, - pub groups: Vec, -} - -impl UserWithGroups { - pub async fn select(pool: &PgPool) -> Result, Box> { - let users = query_as!( - UserWithGroups, - r#" - SELECT - u.name, - u.display_name, - u.password, - u.email, - u.disabled, - u.image, - COALESCE(array_agg(ug.group_name ORDER BY ug.group_name), ARRAY[]::TEXT[]) AS "groups!" - FROM users u - LEFT JOIN users_groups ug ON u.name = ug.user_name - GROUP BY u.name, u.email, u.disabled, u.image - "# - ) - .fetch_all(pool) - .await?; - - Ok(users) - } - - pub async fn select_by_name( - pool: &PgPool, - name: &str, - ) -> Result, Box> { - let user = query_as!( - UserWithGroups, - r#" - SELECT - u.name, - u.display_name, - u.password, - u.email, - u.disabled, - u.image, - COALESCE(array_agg(ug.group_name ORDER BY ug.group_name), ARRAY[]::TEXT[]) AS "groups!" - FROM users u - LEFT JOIN users_groups ug ON u.name = ug.user_name - WHERE u.name = $1 - GROUP BY u.name, u.email, u.disabled, u.image - "#, - name - ) - .fetch_optional(pool) - .await?; - - Ok(user) - } - - pub async fn insert( - pool: &PgPool, - user_with_groups: &Self, - ) -> Result<(), Box> { - let mut tx = pool.begin().await?; - - query!( - r#"INSERT INTO users (name, display_name, password, email, disabled, image) - VALUES ($1, $2, $3, $4, $5, $6) - "#, - user_with_groups.name, - user_with_groups.display_name, - user_with_groups.password, - user_with_groups.email, - user_with_groups.disabled, - user_with_groups.image - ) - .execute(&mut *tx) - .await?; - - query!( - r#" - INSERT INTO users_groups (user_name, group_name) - SELECT * FROM UNNEST($1::text[], $2::text[]) - "#, - &user_with_groups.groups, - &vec![user_with_groups.name.clone(); user_with_groups.groups.len()] - ) - .execute(&mut *tx) - .await?; - - tx.commit().await?; - - Ok(()) - } -} diff --git a/src/routes/groups.rs b/src/routes/groups.rs index 80b653a..d6d6264 100644 --- a/src/routes/groups.rs +++ b/src/routes/groups.rs @@ -8,41 +8,37 @@ use axum::{ }; use non_empty_string::NonEmptyString; +use nonempty::NonEmpty; use serde::{Deserialize, Serialize}; -use sqlx::PgPool; -use crate::{ - config::Config, - models::{self, groups::Group}, - routes::auth, - state::State, -}; +use crate::{config::Config, models::authelia, routes::auth, state::State}; #[derive(Debug, Serialize)] struct GroupResponse { - users: Vec, + users: Vec, } -impl From for GroupResponse { - fn from(group: models::groups::GroupWithUsers) -> Self { - Self { users: group.users } - } -} - -type GroupsResponse = HashMap; +type GroupsResponse = HashMap; pub async fn get_all( _: auth::User, - extract::State(pg_pool): extract::State, + extract::State(config): extract::State, ) -> Result { - let groups_with_users = models::groups::GroupWithUsers::select(&pg_pool) + let users = authelia::UsersFile::load(&config.authelia.user_database) .await .or(Err(StatusCode::INTERNAL_SERVER_ERROR))?; - let groups_response = groups_with_users - .into_iter() - .map(|group| (group.name.clone(), GroupResponse::from(group))) - .collect::(); + let mut groups_response: GroupsResponse = HashMap::new(); + + for (username, user) in users.iter() { + for group in &user.groups { + let group_response = groups_response + .entry(group.clone()) + .or_insert_with(|| GroupResponse { users: Vec::new() }); + + group_response.users.push(username.clone()); + } + } Ok(Json(groups_response)) } @@ -50,102 +46,114 @@ pub async fn get_all( pub async fn get( _: auth::User, extract::Path(name): extract::Path, - extract::State(pg_pool): extract::State, + extract::State(config): extract::State, ) -> Result { - let group_with_users = models::groups::GroupWithUsers::select_by_name(&pg_pool, name.as_str()) + let users = authelia::UsersFile::load(&config.authelia.user_database) .await - .or(Err(StatusCode::INTERNAL_SERVER_ERROR))? - .ok_or(StatusCode::NOT_FOUND)?; + .or(Err(StatusCode::INTERNAL_SERVER_ERROR))?; - Ok(Json(GroupResponse::from(group_with_users))) + let group_users = users + .iter() + .filter_map(|(username, user)| { + if user.groups.contains(&name) { + Some(username.clone()) + } else { + None + } + }) + .collect::>(); + + if group_users.is_empty() { + return Err(StatusCode::NOT_FOUND); + } + + Ok(Json(GroupResponse { users: group_users })) } #[derive(Debug, Deserialize)] pub struct GroupCreate { name: NonEmptyString, - users: Vec, + users: NonEmpty, } pub async fn create( _: auth::User, - extract::State(pg_pool): extract::State, + extract::State(config): extract::State, extract::Json(group_create): extract::Json, ) -> Result { - if models::groups::Group::select_by_name(&pg_pool, group_create.name.as_str()) + let mut users = authelia::UsersFile::load(&config.authelia.user_database) .await - .or(Err(StatusCode::INTERNAL_SERVER_ERROR))? - .is_some() + .or(Err(StatusCode::INTERNAL_SERVER_ERROR))?; + + if users + .iter() + .any(|(_, user)| user.groups.contains(&group_create.name)) { return Err(StatusCode::CONFLICT); } - let users = group_create + if !group_create .users - .into_iter() - .map(|u| u.to_string()) - .collect::>(); - - if !models::users::User::all_exist_by_names(&pg_pool, &users) - .await - .or(Err(StatusCode::INTERNAL_SERVER_ERROR))? + .iter() + .all(|user| users.contains_key(user.as_str())) { return Err(StatusCode::NOT_FOUND); } - let group_with_users = models::groups::GroupWithUsers { - name: group_create.name.to_string(), - users, - }; - - models::groups::GroupWithUsers::insert(&pg_pool, &group_with_users) - .await - .or(Err(StatusCode::INTERNAL_SERVER_ERROR))?; + for user in group_create.users { + users + .get_mut(user.as_str()) + .unwrap() + .groups + .push(group_create.name.clone()); + } Ok(()) } #[derive(Debug, Deserialize)] pub struct GroupUpdate { - users: Option>, + users: Option>, } pub async fn update( session_user: auth::User, extract::Path(name): extract::Path, - extract::State(pg_pool): extract::State, extract::State(config): extract::State, extract::Json(group_update): extract::Json, ) -> Result { - let group = models::groups::Group::select_by_name(&pg_pool, name.as_str()) - .await - .or(Err(StatusCode::INTERNAL_SERVER_ERROR))? - .ok_or(StatusCode::NOT_FOUND)?; - - let mut logout = false; - - if let Some(users) = &group_update.users { - let users = users.iter().map(ToString::to_string).collect::>(); - - if !models::users::User::all_exist_by_names(&pg_pool, &users) - .await - .or(Err(StatusCode::INTERNAL_SERVER_ERROR))? - { - return Err(StatusCode::NOT_FOUND); - } - - models::intersections::UsersGroups::set_users_for_group( - &pg_pool, - group.name.as_str(), - &users, - ) + let mut users = authelia::UsersFile::load(&config.authelia.user_database) .await .or(Err(StatusCode::INTERNAL_SERVER_ERROR))?; - if name == config.oauth.admin_group && !users.contains(&session_user.username) { - logout = true; + if !users.iter().any(|(_, user)| user.groups.contains(&name)) { + return Err(StatusCode::NOT_FOUND); + } + + let mut logout = false; + if let Some(new_users) = group_update.users { + for (username, user) in users.iter_mut() { + if new_users.contains(username) { + if !user.groups.contains(&name) { + user.groups.push(name.clone()); + } + } else { + user.groups.retain(|g| g != &name); + } + + if *username == *session_user.username + && !user.groups.contains(&config.oauth.admin_group) + { + logout = true; + } } } + users + .save(&config.authelia.user_database) + .await + .or(Err(StatusCode::INTERNAL_SERVER_ERROR))?; + if logout { return Ok(Redirect::to("/api/auth/logout").into_response()); } @@ -155,20 +163,27 @@ pub async fn update( pub async fn delete( _: auth::User, - extract::Path(name): extract::Path, - extract::State(pg_pool): extract::State, + extract::Path(name): extract::Path, extract::State(config): extract::State, ) -> Result { if name == config.oauth.admin_group { return Err(StatusCode::FORBIDDEN); } - let group = models::groups::Group::select_by_name(&pg_pool, &name) + let mut users = authelia::UsersFile::load(&config.authelia.user_database) .await - .or(Err(StatusCode::INTERNAL_SERVER_ERROR))? - .ok_or(StatusCode::NOT_FOUND)?; + .or(Err(StatusCode::INTERNAL_SERVER_ERROR))?; - Group::delete_by_name(&pg_pool, &group.name) + if !users.iter().any(|(_, user)| user.groups.contains(&name)) { + return Err(StatusCode::NOT_FOUND); + } + + for user in users.values_mut() { + user.groups.retain(|g| g != &name); + } + + users + .save(&config.authelia.user_database) .await .or(Err(StatusCode::INTERNAL_SERVER_ERROR))?; diff --git a/src/routes/users.rs b/src/routes/users.rs index f7bc4af..567c32c 100644 --- a/src/routes/users.rs +++ b/src/routes/users.rs @@ -7,49 +7,62 @@ use axum::{ routing, }; +use email_address::EmailAddress; use non_empty_string::NonEmptyString; use serde::{Deserialize, Serialize}; -use sqlx::PgPool; +use serde_json::Value; +use url::Url; use crate::{ - config::Config, models, routes::auth, state::State, - utils::crypto::generate_random_password_hash, + config::Config, + models::authelia, + routes::auth, + state::State, + utils::crypto::{generate_random_password_hash, hash_password}, }; #[derive(Debug, Serialize)] struct UserResponse { - display_name: String, - email: String, + displayname: NonEmptyString, + email: Option, + picture: Option, disabled: bool, - image: Option, - groups: Vec, + groups: Vec, + + #[serde(flatten)] + extra: HashMap, } -impl From for UserResponse { - fn from(user: models::users::UserWithGroups) -> Self { +impl From for UserResponse { + fn from(user: authelia::User) -> Self { Self { - display_name: user.display_name, + displayname: user.displayname, email: user.email, + picture: user.picture, disabled: user.disabled, - image: user.image, groups: user.groups, + extra: user + .extra + .into_iter() + .map(|(k, v)| (k, serde_json::to_value(v).unwrap())) + .collect(), } } } -type UsersResponse = HashMap; +type UsersResponse = HashMap; pub async fn get_all( _: auth::User, - extract::State(pg_pool): extract::State, + extract::State(config): extract::State, ) -> Result { - let users_with_groups = models::users::UserWithGroups::select(&pg_pool) + let users = authelia::UsersFile::load(&config.authelia.user_database) .await .or(Err(StatusCode::INTERNAL_SERVER_ERROR))?; - let users_response = users_with_groups + let users_response = users .into_iter() - .map(|user| (user.name.clone(), UserResponse::from(user))) + .map(|(name, user)| (name, user.into())) .collect::(); Ok(Json(users_response)) @@ -58,63 +71,67 @@ pub async fn get_all( pub async fn get( _: auth::User, extract::Path(name): extract::Path, - extract::State(pg_pool): extract::State, + extract::State(config): extract::State, ) -> Result { - let user_with_groups = models::users::UserWithGroups::select_by_name(&pg_pool, name.as_str()) + let users = authelia::UsersFile::load(&config.authelia.user_database) .await - .or(Err(StatusCode::INTERNAL_SERVER_ERROR))? - .ok_or(StatusCode::NOT_FOUND)?; + .or(Err(StatusCode::INTERNAL_SERVER_ERROR))?; - Ok(Json(UserResponse::from(user_with_groups))) + let user = users.get(name.as_str()).ok_or(StatusCode::NOT_FOUND)?; + + Ok(Json(UserResponse::from(user.clone()))) } #[derive(Debug, Deserialize)] pub struct UserCreate { name: NonEmptyString, displayname: NonEmptyString, - email: NonEmptyString, + email: Option, + password: Option, + picture: Option, + #[serde(default)] disabled: bool, - image: Option, + #[serde(default)] groups: Vec, + + #[serde(flatten)] + extra: HashMap, } pub async fn create( _: auth::User, - extract::State(pg_pool): extract::State, + extract::State(config): extract::State, extract::Json(user_create): extract::Json, ) -> Result { - if models::users::User::select_by_name(&pg_pool, user_create.name.as_str()) + let mut users = authelia::UsersFile::load(&config.authelia.user_database) .await - .or(Err(StatusCode::INTERNAL_SERVER_ERROR))? - .is_some() - { + .or(Err(StatusCode::INTERNAL_SERVER_ERROR))?; + + if users.contains_key(user_create.name.as_str()) { return Err(StatusCode::CONFLICT); } - let groups = user_create - .groups - .into_iter() - .map(|g| g.to_string()) - .collect::>(); - - if !models::groups::Group::all_exist_by_names(&pg_pool, &groups) - .await - .or(Err(StatusCode::INTERNAL_SERVER_ERROR))? - { - return Err(StatusCode::NOT_FOUND); - } - - let user_with_groups = models::users::UserWithGroups { - name: user_create.name.to_string(), - display_name: user_create.displayname.to_string(), - password: generate_random_password_hash(), - email: user_create.email.to_string(), + let user = authelia::User { + displayname: user_create.displayname, + password: user_create.password.map_or_else( + || NonEmptyString::new(generate_random_password_hash()).unwrap(), + |p| p, + ), + email: user_create.email, + picture: user_create.picture, disabled: user_create.disabled, - image: user_create.image.map(|i| i.to_string()), - groups, + groups: user_create.groups, + extra: user_create + .extra + .into_iter() + .map(|(k, v)| (k, serde_json::from_value(v).unwrap())) + .collect(), }; - models::users::UserWithGroups::insert(&pg_pool, &user_with_groups) + users.insert(user_create.name, user); + + users + .save(&config.authelia.user_database) .await .or(Err(StatusCode::INTERNAL_SERVER_ERROR))?; @@ -123,70 +140,69 @@ pub async fn create( #[derive(Debug, Deserialize)] pub struct UserUpdate { - display_name: Option, - email: Option, + displayname: Option, + password: Option, + #[serde(default, with = "serde_with::rust::double_option")] + #[allow(clippy::option_option)] + email: Option>, + #[serde(default, with = "serde_with::rust::double_option")] + #[allow(clippy::option_option)] + picture: Option>, disabled: Option, - image: Option, groups: Option>, + + #[serde(flatten)] + extra: HashMap, } pub async fn update( session_user: auth::User, extract::Path(name): extract::Path, - extract::State(pg_pool): extract::State, extract::State(config): extract::State, extract::Json(user_update): extract::Json, ) -> Result { - let user = models::users::User::select_by_name(&pg_pool, name.as_str()) - .await - .or(Err(StatusCode::INTERNAL_SERVER_ERROR))? - .ok_or(StatusCode::NOT_FOUND)?; - - let mut logout = false; - - if let Some(groups) = user_update.groups { - let groups = groups - .into_iter() - .map(|g| g.to_string()) - .collect::>(); - - if !models::groups::Group::all_exist_by_names(&pg_pool, &groups) - .await - .or(Err(StatusCode::INTERNAL_SERVER_ERROR))? - { - return Err(StatusCode::NOT_FOUND); - } - - models::intersections::UsersGroups::set_groups_for_user( - &pg_pool, - user.name.as_str(), - &groups, - ) + let mut users = authelia::UsersFile::load(&config.authelia.user_database) .await .or(Err(StatusCode::INTERNAL_SERVER_ERROR))?; - if name == session_user.username.to_string() && !groups.contains(&config.oauth.admin_group) - { - logout = true; - } + let user = users.get_mut(name.as_str()).ok_or(StatusCode::NOT_FOUND)?; + + if let Some(displayname) = user_update.displayname { + user.displayname = displayname; } - let user = models::users::User { - name: user.name, - display_name: user_update - .display_name - .map(|d| d.to_string()) - .unwrap_or(user.display_name), - password: user.password, - email: user_update - .email - .map(|e| e.to_string()) - .unwrap_or(user.email), - disabled: user_update.disabled.unwrap_or(user.disabled), - image: user_update.image.map(|i| i.to_string()).or(user.image), - }; + if let Some(email) = user_update.email { + user.email = email; + } - models::users::User::upsert(&pg_pool, &user) + if let Some(password) = user_update.password { + user.password = NonEmptyString::new(hash_password(password.as_str())).unwrap(); + } + + if let Some(picture) = user_update.picture { + user.picture = picture; + } + + if let Some(disabled) = user_update.disabled { + user.disabled = disabled; + } + + let mut logout = false; + if let Some(groups) = user_update.groups { + if name == *session_user.username && !groups.contains(&config.oauth.admin_group) { + logout = true; + } + + user.groups = groups; + } + + for (k, v) in user_update.extra { + user.extra + .insert(k.clone(), serde_json::from_value(v).unwrap()); + } + + users + .save(&config.authelia.user_database) .await .or(Err(StatusCode::INTERNAL_SERVER_ERROR))?; @@ -200,18 +216,24 @@ pub async fn update( pub async fn delete( session_user: auth::User, extract::Path(name): extract::Path, - extract::State(pg_pool): extract::State, + extract::State(config): extract::State, ) -> Result { - if name == session_user.username.to_string() { + if name == *session_user.username { return Err(StatusCode::FORBIDDEN); } - let user = models::users::User::select_by_name(&pg_pool, &name) + let mut users = authelia::UsersFile::load(&config.authelia.user_database) .await - .or(Err(StatusCode::INTERNAL_SERVER_ERROR))? - .ok_or(StatusCode::NOT_FOUND)?; + .or(Err(StatusCode::INTERNAL_SERVER_ERROR))?; - models::users::User::delete_by_name(&pg_pool, &user.name) + if !users.contains_key(&name) { + return Err(StatusCode::NOT_FOUND); + } + + users.remove(&name); + + users + .save(&config.authelia.user_database) .await .or(Err(StatusCode::INTERNAL_SERVER_ERROR))?; diff --git a/src/state.rs b/src/state.rs index 4d13bca..45ff177 100644 --- a/src/state.rs +++ b/src/state.rs @@ -1,3 +1,5 @@ +use std::error::Error; + use async_redis_session::RedisSessionStore; use axum::extract::FromRef; use openidconnect::{ @@ -10,11 +12,9 @@ use openidconnect::{ }, reqwest, }; -use redis::{self, AsyncCommands}; use sqlx::{PgPool, postgres::PgPoolOptions}; -use tokio::spawn; -use crate::config::Config; +use crate::config::{Args, Config}; pub type OAuthClient< HasAuthUrl = EndpointSet, @@ -54,20 +54,22 @@ pub struct State { } impl State { - pub async fn from_config(config: Config) -> Self { - let (oauth_http_client, oauth_client) = oauth_client(&config).await; - let pg_pool = pg_pool(&config).await; - let redis_client = redis_client(&config).await; - let session_store = session_store(&config); + pub async fn from_args(args: Args) -> Result> { + let config = Config::from_path(&args.config).await?; - Self { + let (oauth_http_client, oauth_client) = oauth_client(&config).await?; + let pg_pool = pg_pool(&config).await?; + let redis_client = redis_client(&config).await?; + let session_store = session_store(&config)?; + + Ok(Self { config, oauth_http_client, oauth_client, pg_pool, redis_client, session_store, - } + }) } } @@ -107,38 +109,35 @@ impl FromRef for RedisSessionStore { } } -async fn oauth_client(config: &Config) -> (reqwest::Client, OAuthClient) { +async fn oauth_client( + config: &Config, +) -> Result<(reqwest::Client, OAuthClient), Box> { let oauth_http_client = reqwest::ClientBuilder::new() .redirect(reqwest::redirect::Policy::none()) .danger_accept_invalid_certs(config.oauth.insecure) - .build() - .unwrap(); + .build()?; let provider_metadata = CoreProviderMetadata::discover_async( - IssuerUrl::new(config.oauth.issuer_url.clone()).unwrap(), + IssuerUrl::new(config.oauth.issuer_url.to_string()).unwrap(), &oauth_http_client, ) - .await - .unwrap(); + .await?; let oauth_client = OAuthClient::from_provider_metadata( provider_metadata, - ClientId::new(config.oauth.client_id.clone()), - Some(ClientSecret::new(config.oauth.client_secret.clone())), + ClientId::new(config.oauth.client_id.to_string()), + Some(ClientSecret::new(config.oauth.client_secret.to_string())), ) - .set_redirect_uri( - RedirectUrl::new(format!( - "{}{}/api/auth/callback", - config.server.host, config.server.subpath - )) - .unwrap(), - ); + .set_redirect_uri(RedirectUrl::new(format!( + "{}{}/api/auth/callback", + config.server.host, config.server.subpath + ))?); - (oauth_http_client, oauth_client) + Ok((oauth_http_client, oauth_client)) } -async fn pg_pool(config: &Config) -> PgPool { - PgPoolOptions::new() +async fn pg_pool(config: &Config) -> Result> { + Ok(PgPoolOptions::new() .max_connections(5) .connect(&format!( "postgres://{}:{}@{}:{}/{}", @@ -148,61 +147,26 @@ async fn pg_pool(config: &Config) -> PgPool { config.postgresql.port, config.postgresql.database )) - .await - .unwrap() + .await?) } -async fn redis_client(config: &Config) -> redis::aio::MultiplexedConnection { +async fn redis_client( + config: &Config, +) -> Result> { let url = format!( "redis://{}:{}/{}", config.redis.host, config.redis.port, config.redis.database ); - let client = redis::Client::open(url).unwrap(); - let mut connection = client.get_multiplexed_async_connection().await.unwrap(); - - let _: () = redis::cmd("CONFIG") - .arg("SET") - .arg("notify-keyspace-events") - .arg("Ex") - .query_async(&mut connection) - .await - .unwrap(); - - let database = config.redis.database.to_string(); - spawn(async move { - let (tx, mut rx) = tokio::sync::mpsc::unbounded_channel(); - let rconfig = redis::AsyncConnectionConfig::new().set_push_sender(tx); - let mut connection = client - .get_multiplexed_async_connection_with_config(&rconfig) - .await - .unwrap(); - - let channel = format!("__keyevent@{database}__:expired"); - connection.subscribe(&[channel]).await.unwrap(); - - while let Some(msg) = rx.recv().await { - if let Some(msg) = redis::Msg::from_push_info(msg) { - if let Ok(key) = msg.get_payload::() { - if !key.starts_with("invite:") { - continue; - } - - let id = key.trim_start_matches("invite:").to_string(); - let _: i64 = connection.srem("invite:all", id).await.unwrap(); - } - } - } - }); - - connection + let client = redis::Client::open(url)?; + Ok(client.get_multiplexed_async_connection().await?) } -fn session_store(config: &Config) -> RedisSessionStore { +fn session_store(config: &Config) -> Result> { let url = format!( "redis://{}:{}/{}", config.redis.host, config.redis.port, config.redis.database ); - RedisSessionStore::new(url).unwrap().with_prefix("session:") + Ok(RedisSessionStore::new(url)?.with_prefix("session:")) } diff --git a/src/utils/crypto.rs b/src/utils/crypto.rs index 8ac9496..bb001dc 100644 --- a/src/utils/crypto.rs +++ b/src/utils/crypto.rs @@ -33,3 +33,20 @@ pub fn generate_random_password_hash() -> String { password_hash } + +pub fn hash_password(password: &str) -> String { + let salt = SaltString::generate(&mut OsRng); + + let argon2 = Argon2::new( + argon2::Algorithm::Argon2id, + argon2::Version::V0x13, + argon2::Params::new(65536, 3, 4, Some(32)).unwrap(), + ); + + let password_hash = argon2 + .hash_password(password.as_bytes(), &salt) + .unwrap() + .to_string(); + + password_hash +} diff --git a/src/utils/mod.rs b/src/utils/mod.rs index 274f0ed..1b1d211 100644 --- a/src/utils/mod.rs +++ b/src/utils/mod.rs @@ -1 +1,21 @@ +use tokio::{select, signal}; + pub mod crypto; + +pub async fn shutdown_signal() { + let ctrl_c = async { + signal::ctrl_c().await.unwrap(); + }; + + let terminate = async { + signal::unix::signal(tokio::signal::unix::SignalKind::terminate()) + .unwrap() + .recv() + .await; + }; + + select! { + () = ctrl_c => {}, + () = terminate => {}, + } +} diff --git a/support/manifest.yaml b/support/manifest.yaml index 0842778..dc264e5 100644 --- a/support/manifest.yaml +++ b/support/manifest.yaml @@ -5,7 +5,7 @@ metadata: spec: containers: - name: glyph - image: registry.karaolidis.com/karaolidis/glyph:latest + picture: registry.karaolidis.com/karaolidis/glyph:latest volumeMounts: - name: glyph-config mountPath: /etc/glyph @@ -21,7 +21,7 @@ spec: ] - name: postgresql - image: docker.io/library/postgres:latest + picture: docker.io/library/postgres:latest env: - name: POSTGRES_DB value: glyph @@ -34,10 +34,10 @@ spec: hostPort: 5432 - name: redis - image: docker.io/library/redis:latest + picture: docker.io/library/redis:latest - name: authelia - image: docker.io/authelia/authelia:latest + picture: docker.io/authelia/authelia:latest volumeMounts: - name: authelia-config mountPath: /etc/authelia @@ -53,7 +53,7 @@ spec: ] - name: traefik - image: docker.io/library/traefik:latest + picture: docker.io/library/traefik:latest args: - "--providers.file.directory=/etc/traefik/dynamic" - "--providers.file.watch=true"