Compare commits

..

4 Commits

Author SHA1 Message Date
ab9f2cbc09 Add fuse callbacks
Signed-off-by: Nikolaos Karaolidis <nick@karaolidis.com>
2025-06-07 11:00:33 +01:00
ab3cb8bd4e Add fuse access checks
Signed-off-by: Nikolaos Karaolidis <nick@karaolidis.com>
2025-06-06 12:46:03 +01:00
3cad7cce61 Update README
Signed-off-by: Nikolaos Karaolidis <nick@karaolidis.com>
2025-06-05 23:37:35 +01:00
d1d49731ea Add fuser implementation
Signed-off-by: Nikolaos Karaolidis <nick@karaolidis.com>
2025-06-05 23:24:26 +01:00
41 changed files with 2702 additions and 447 deletions

View File

@@ -0,0 +1,15 @@
{
"db_name": "PostgreSQL",
"query": "\n INSERT INTO glyph_users_groups (user_name, group_name)\n SELECT * FROM UNNEST($1::text[], $2::text[])\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"TextArray",
"TextArray"
]
},
"nullable": []
},
"hash": "019256af8ccf4fc3f1ad6daa0ed3bc945f141020837732c9cf680fbcc438c6a8"
}

View File

@@ -0,0 +1,26 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT\n g.name,\n ARRAY(SELECT ug.user_name FROM glyph_users_groups ug WHERE ug.group_name = g.name) AS \"users!\"\n FROM glyph_groups g\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "name",
"type_info": "Text"
},
{
"ordinal": 1,
"name": "users!",
"type_info": "TextArray"
}
],
"parameters": {
"Left": []
},
"nullable": [
false,
null
]
},
"hash": "1493410c6cf4f7a4cadadf5321f42ad265282d3072eded0b9caaa3dc81ab8b45"
}

View File

@@ -0,0 +1,15 @@
{
"db_name": "PostgreSQL",
"query": "\n INSERT INTO glyph_users_groups (user_name, group_name)\n SELECT * FROM UNNEST($1::text[], $2::text[])\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"TextArray",
"TextArray"
]
},
"nullable": []
},
"hash": "1bf35e562ef97408038259aca7b36f719b5f5697efbce538bf3f4eefec6b8d16"
}

View File

@@ -0,0 +1,14 @@
{
"db_name": "PostgreSQL",
"query": "\n DELETE FROM glyph_groups\n WHERE name <> ALL($1)\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"TextArray"
]
},
"nullable": []
},
"hash": "244c0ca382a0bd8040667c05b457d2b55f15670d696faba7d57f42090b040378"
}

View File

@@ -0,0 +1,19 @@
{
"db_name": "PostgreSQL",
"query": "\n INSERT INTO glyph_users (name, display_name, password, email, disabled, picture)\n VALUES ($1, $2, $3, $4, $5, $6)\n ON CONFLICT (name) DO UPDATE\n SET display_name = EXCLUDED.display_name,\n password = EXCLUDED.password,\n email = EXCLUDED.email,\n disabled = EXCLUDED.disabled,\n picture = EXCLUDED.picture\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Text",
"Text",
"Text",
"Text",
"Bool",
"Text"
]
},
"nullable": []
},
"hash": "3613d8c9b991fb1d39ad99ed36778e5ba9933ca3cf0f064ae5f139cee1cdad42"
}

View File

@@ -0,0 +1,19 @@
{
"db_name": "PostgreSQL",
"query": "\n INSERT INTO glyph_users (name, display_name, password, email, disabled, picture)\n VALUES ($1, $2, $3, $4, $5, $6)\n ON CONFLICT (name) DO UPDATE\n SET display_name = EXCLUDED.display_name,\n password = EXCLUDED.password,\n email = EXCLUDED.email,\n disabled = EXCLUDED.disabled,\n picture = EXCLUDED.picture\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Text",
"Text",
"Text",
"Text",
"Bool",
"Text"
]
},
"nullable": []
},
"hash": "364208fb0f4ef56a2ea755481a0d994fed588ff998524521936306e772ae8dce"
}

View File

@@ -0,0 +1,22 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT COUNT(*) AS \"count!\"\n FROM glyph_groups\n WHERE name = ANY($1)\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "count!",
"type_info": "Int8"
}
],
"parameters": {
"Left": [
"TextArray"
]
},
"nullable": [
null
]
},
"hash": "3a6bd1951cac5c82e67fa3610aa90984810cb3e5d596ec0f864ae5aa2631816a"
}

View File

@@ -0,0 +1,52 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT name, display_name, password, email, disabled, picture\n FROM glyph_users\n WHERE name = $1\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "name",
"type_info": "Text"
},
{
"ordinal": 1,
"name": "display_name",
"type_info": "Text"
},
{
"ordinal": 2,
"name": "password",
"type_info": "Text"
},
{
"ordinal": 3,
"name": "email",
"type_info": "Text"
},
{
"ordinal": 4,
"name": "disabled",
"type_info": "Bool"
},
{
"ordinal": 5,
"name": "picture",
"type_info": "Text"
}
],
"parameters": {
"Left": [
"Text"
]
},
"nullable": [
false,
false,
false,
false,
false,
true
]
},
"hash": "538bac60e4dff5453e9d69f8b94dd623844f4ab89b2963c625f6c47e2dca9c02"
}

View File

@@ -0,0 +1,14 @@
{
"db_name": "PostgreSQL",
"query": "\n DELETE FROM glyph_users_groups\n WHERE user_name = $1\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Text"
]
},
"nullable": []
},
"hash": "5d457b24e090dd2d3be512de08706387e62e4ec7997f60c3958572cce8985c27"
}

View File

@@ -0,0 +1,15 @@
{
"db_name": "PostgreSQL",
"query": "\n INSERT INTO glyph_users_groups (user_name, group_name)\n SELECT * FROM UNNEST($1::text[], $2::text[])\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"TextArray",
"TextArray"
]
},
"nullable": []
},
"hash": "6830e994b974d76fdcf51cf0e540ce0fa79b58f8eaf0c7ecb1a22e6fc1ebf505"
}

View File

@@ -0,0 +1,14 @@
{
"db_name": "PostgreSQL",
"query": "\n DELETE FROM glyph_groups\n WHERE name = $1\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Text"
]
},
"nullable": []
},
"hash": "6a626592b97a77a5804125753527fc9451fa39565b363d2e54ee06b2f36b177f"
}

View File

@@ -0,0 +1,14 @@
{
"db_name": "PostgreSQL",
"query": "\n DELETE FROM glyph_users\n WHERE name <> ALL($1)\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"TextArray"
]
},
"nullable": []
},
"hash": "8f1394702f150d3642129fcca8417e5e01911c13191be44339dc36a6c4978db2"
}

View File

@@ -0,0 +1,14 @@
{
"db_name": "PostgreSQL",
"query": "\n DELETE FROM glyph_users_groups\n WHERE user_name = $1\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Text"
]
},
"nullable": []
},
"hash": "a15ceb9e5c596ce9639a79436e9e642c65c2cac61054b2fdaa7190c0e8acf0d0"
}

View File

@@ -0,0 +1,14 @@
{
"db_name": "PostgreSQL",
"query": "INSERT INTO glyph_groups (name) VALUES ($1)",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Text"
]
},
"nullable": []
},
"hash": "af519bc617842e3d48a976f9aa4b107b2690b0a259ee02b133985e3eb00a4165"
}

View File

@@ -0,0 +1,58 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT\n u.name,\n u.display_name,\n u.password,\n u.email,\n u.disabled,\n u.picture,\n ARRAY(SELECT ug.group_name FROM glyph_users_groups ug WHERE ug.user_name = u.name) AS \"groups!\"\n FROM glyph_users u\n WHERE u.name = $1\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "name",
"type_info": "Text"
},
{
"ordinal": 1,
"name": "display_name",
"type_info": "Text"
},
{
"ordinal": 2,
"name": "password",
"type_info": "Text"
},
{
"ordinal": 3,
"name": "email",
"type_info": "Text"
},
{
"ordinal": 4,
"name": "disabled",
"type_info": "Bool"
},
{
"ordinal": 5,
"name": "picture",
"type_info": "Text"
},
{
"ordinal": 6,
"name": "groups!",
"type_info": "TextArray"
}
],
"parameters": {
"Left": [
"Text"
]
},
"nullable": [
false,
false,
false,
false,
false,
true,
null
]
},
"hash": "b0b3c6daf78b7a04e75b781d997b61a56fbc8005fe1e419febb87b5247f44ade"
}

View File

@@ -0,0 +1,56 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT\n u.name,\n u.display_name,\n u.password,\n u.email,\n u.disabled,\n u.picture,\n ARRAY(SELECT ug.group_name FROM glyph_users_groups ug WHERE ug.user_name = u.name) AS \"groups!\"\n FROM glyph_users u\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "name",
"type_info": "Text"
},
{
"ordinal": 1,
"name": "display_name",
"type_info": "Text"
},
{
"ordinal": 2,
"name": "password",
"type_info": "Text"
},
{
"ordinal": 3,
"name": "email",
"type_info": "Text"
},
{
"ordinal": 4,
"name": "disabled",
"type_info": "Bool"
},
{
"ordinal": 5,
"name": "picture",
"type_info": "Text"
},
{
"ordinal": 6,
"name": "groups!",
"type_info": "TextArray"
}
],
"parameters": {
"Left": []
},
"nullable": [
false,
false,
false,
false,
false,
true,
null
]
},
"hash": "bc5847efd81251c0e4b524b84f2485ebbd0cd0a813d364815858add87d0e07a2"
}

View File

@@ -0,0 +1,14 @@
{
"db_name": "PostgreSQL",
"query": "\n DELETE FROM glyph_users_groups\n WHERE group_name = $1\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Text"
]
},
"nullable": []
},
"hash": "cd8831c93f8714f5242bf0b3dae6240a46b37d5d163414bd739fa0025b6de0a5"
}

View File

@@ -0,0 +1,19 @@
{
"db_name": "PostgreSQL",
"query": "\n INSERT INTO glyph_users (name, display_name, password, email, disabled, picture)\n VALUES ($1, $2, $3, $4, $5, $6)\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Text",
"Text",
"Text",
"Text",
"Bool",
"Text"
]
},
"nullable": []
},
"hash": "ce1fade2aaf62ce5a3b1448d92517d73533dd9b098d4dfb679a0e77e4a42b3e6"
}

View File

@@ -0,0 +1,18 @@
{
"db_name": "PostgreSQL",
"query": "\n INSERT INTO glyph_users (name, display_name, password, email, disabled)\n VALUES ($1, $2, $3, $4, $5)\n ON CONFLICT (name) DO UPDATE\n SET display_name = EXCLUDED.display_name,\n password = EXCLUDED.password,\n email = EXCLUDED.email,\n disabled = EXCLUDED.disabled\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Text",
"Text",
"Text",
"Text",
"Bool"
]
},
"nullable": []
},
"hash": "e1f9c5d85717bdf150dd2352196d7477db1f00a8776777702227a3a6ef8a8c4a"
}

View File

@@ -0,0 +1,14 @@
{
"db_name": "PostgreSQL",
"query": "\n DELETE FROM glyph_users\n WHERE name = $1\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Text"
]
},
"nullable": []
},
"hash": "e355e946faf174f24ffb3bdb4cf3e6f3047431316b1e6ef752e2c33a0e0a0c07"
}

View File

@@ -0,0 +1,22 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT COUNT(*) AS \"count!\"\n FROM glyph_users\n WHERE name = ANY($1)\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "count!",
"type_info": "Int8"
}
],
"parameters": {
"Left": [
"TextArray"
]
},
"nullable": [
null
]
},
"hash": "e53bf6042de37af5560c1861dc4056827cf4ea87449f209dac15ba5d6bfc1704"
}

View File

@@ -0,0 +1,22 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT name\n FROM glyph_groups\n WHERE name = $1\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "name",
"type_info": "Text"
}
],
"parameters": {
"Left": [
"Text"
]
},
"nullable": [
false
]
},
"hash": "ee2e54ee09eb411a441931e5f9070f9216093191fb46f25cb0b237ef95c92c5d"
}

View File

@@ -0,0 +1,28 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT\n g.name,\n ARRAY(SELECT ug.user_name FROM glyph_users_groups ug WHERE ug.group_name = g.name) AS \"users!\"\n FROM glyph_groups g\n WHERE g.name = $1\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "name",
"type_info": "Text"
},
{
"ordinal": 1,
"name": "users!",
"type_info": "TextArray"
}
],
"parameters": {
"Left": [
"Text"
]
},
"nullable": [
false,
null
]
},
"hash": "f769fe0d94fe52430004b327c1afe839d61eed8b53836bf4f091c9c56fa1cf17"
}

64
Cargo.lock generated
View File

@@ -974,15 +974,6 @@ dependencies = [
"zeroize", "zeroize",
] ]
[[package]]
name = "email_address"
version = "0.2.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e079f19b08ca6239f47f8ba8509c11cf3ea30095831f7fed61441475edd8c449"
dependencies = [
"serde",
]
[[package]] [[package]]
name = "equivalent" name = "equivalent"
version = "1.0.2" version = "1.0.2"
@@ -1091,6 +1082,22 @@ dependencies = [
"percent-encoding", "percent-encoding",
] ]
[[package]]
name = "fuser"
version = "0.15.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "53274f494609e77794b627b1a3cddfe45d675a6b2e9ba9c0fdc8d8eee2184369"
dependencies = [
"libc",
"log",
"memchr",
"nix",
"page_size",
"pkg-config",
"smallvec",
"zerocopy",
]
[[package]] [[package]]
name = "futures-channel" name = "futures-channel"
version = "0.3.31" version = "0.3.31"
@@ -1242,23 +1249,21 @@ dependencies = [
"axum", "axum",
"axum-extra", "axum-extra",
"clap", "clap",
"email_address", "fuser",
"libc",
"log", "log",
"log4rs", "log4rs",
"non-empty-string", "non-empty-string",
"nonempty",
"openidconnect", "openidconnect",
"parking_lot",
"passwords", "passwords",
"redis 0.31.0", "redis 0.31.0",
"redis-macros", "redis-macros",
"serde", "serde",
"serde_json",
"serde_with",
"serde_yaml", "serde_yaml",
"sqlx", "sqlx",
"time", "time",
"tokio", "tokio",
"url",
"uuid", "uuid",
] ]
@@ -1863,6 +1868,18 @@ dependencies = [
"windows-sys 0.59.0", "windows-sys 0.59.0",
] ]
[[package]]
name = "nix"
version = "0.29.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "71e2746dc3a24dd78b3cfcb7be93368c6de9963d30f43a6a73998a9cf4b17b46"
dependencies = [
"bitflags",
"cfg-if 1.0.0",
"cfg_aliases",
"libc",
]
[[package]] [[package]]
name = "non-empty-string" name = "non-empty-string"
version = "0.2.6" version = "0.2.6"
@@ -1873,15 +1890,6 @@ dependencies = [
"serde", "serde",
] ]
[[package]]
name = "nonempty"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "549e471b99ccaf2f89101bec68f4d244457d5a95a9c3d0672e9564124397741d"
dependencies = [
"serde",
]
[[package]] [[package]]
name = "num-bigint" name = "num-bigint"
version = "0.4.6" version = "0.4.6"
@@ -2056,6 +2064,16 @@ dependencies = [
"sha2 0.10.9", "sha2 0.10.9",
] ]
[[package]]
name = "page_size"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "30d5b2194ed13191c1999ae0704b7839fb18384fa22e49b57eeaa97d79ce40da"
dependencies = [
"libc",
"winapi",
]
[[package]] [[package]]
name = "parking" name = "parking"
version = "2.2.1" version = "2.2.1"

View File

@@ -20,21 +20,19 @@ async-session = "3.0.0"
axum = { version = "0.8.4", features = ["macros"] } axum = { version = "0.8.4", features = ["macros"] }
axum-extra = { version = "0.10.1", features = ["typed-header"] } axum-extra = { version = "0.10.1", features = ["typed-header"] }
clap = { version = "4.5.39", features = ["derive"] } clap = { version = "4.5.39", features = ["derive"] }
email_address = "0.2.9" fuser = { version = "0.15.1", features = ["abi-7-31"] }
libc = "0.2.172"
log = "0.4.27" log = "0.4.27"
log4rs = "1.3.0" log4rs = "1.3.0"
non-empty-string = { version = "0.2.6", features = ["serde"] } non-empty-string = { version = "0.2.6", features = ["serde"] }
nonempty = { version = "0.11.0", features = ["serialize"] }
openidconnect = { version = "4.0.0", features = ["reqwest"] } openidconnect = { version = "4.0.0", features = ["reqwest"] }
parking_lot = "0.12.4"
passwords = "3.1.16" passwords = "3.1.16"
redis = { version = "0.31.0", features = ["tokio-comp"] } redis = { version = "0.31.0", features = ["tokio-comp"] }
redis-macros = "0.5.4" redis-macros = "0.5.4"
serde = "1.0.219" serde = "1.0.219"
serde_json = "1.0.140"
serde_with = "3.12.0"
serde_yaml = "0.9.34" serde_yaml = "0.9.34"
sqlx = { version = "0.8.6", features = ["runtime-tokio", "postgres", "time", "uuid"] } sqlx = { version = "0.8.6", features = ["runtime-tokio", "postgres", "time", "uuid"] }
time = { version = "0.3.41", features = ["serde"] } time = { version = "0.3.41", features = ["serde"] }
tokio = { version = "1.45.1", features = ["rt-multi-thread", "signal"] } tokio = { version = "1.45.1", features = ["rt-multi-thread", "process", "signal"] }
url = { version = "2.5.4", features = ["serde"] }
uuid = { version = "1.17.0", features = ["serde"] } uuid = { version = "1.17.0", features = ["serde"] }

View File

@@ -1,6 +1,10 @@
# glyph # glyph
*Glyph* is an Authelia user file database manager. Because files are light but unwieldy, and LDAP is convenient but complex. *Glyph* was an Authelia user file database manager. Because files were light but unwieldy, and LDAP was convenient but complex.
Now, it's turned into a diabolical FUSE MITM contraption that should have never been built. It pretends to be a one-file filesystem (ඞ) that can be read and written to by Authelia, but it actually stores user and group data in a PostgreSQL database.
Do not use this under any circumstances. It is not secure, it is not stable, and it is not created in God's image. It is a joke, and an unfunny one at that.
## Development ## Development

View File

@@ -33,6 +33,25 @@
treefmt = inputs.treefmt-nix.lib.evalModule pkgs ./treefmt.nix; treefmt = inputs.treefmt-nix.lib.evalModule pkgs ./treefmt.nix;
in in
{ {
packages.default = pkgs.rustPlatform.buildRustPackage {
pname = "glyph";
version = "0.1.0";
src = ./.;
cargoLock = {
lockFile = ./Cargo.lock;
};
SQLX_OFFLINE = true;
nativeBuildInputs = with pkgs; [
pkg-config
];
buildInputs = with pkgs; [
fuse3
];
};
devShells.default = pkgs.mkShell { devShells.default = pkgs.mkShell {
packages = with pkgs; [ packages = with pkgs; [
cargo cargo
@@ -42,6 +61,8 @@
cargo-udeps cargo-udeps
cargo-outdated cargo-outdated
sqlx-cli sqlx-cli
fuse3
pkg-config
]; ];
}; };

View File

@@ -0,0 +1,56 @@
CREATE TABLE IF NOT EXISTS glyph_users (
name TEXT PRIMARY KEY,
display_name TEXT NOT NULL,
password TEXT NOT NULL,
email TEXT NOT NULL UNIQUE,
disabled BOOLEAN NOT NULL,
picture TEXT,
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
);
CREATE TABLE IF NOT EXISTS glyph_groups (
name TEXT PRIMARY KEY,
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
);
CREATE TABLE IF NOT EXISTS glyph_users_groups (
user_name TEXT NOT NULL,
group_name TEXT NOT NULL,
PRIMARY KEY (user_name, group_name),
FOREIGN KEY (user_name) REFERENCES glyph_users(name) ON DELETE CASCADE,
FOREIGN KEY (group_name) REFERENCES glyph_groups(name) ON DELETE CASCADE
);
CREATE OR REPLACE FUNCTION glyph_update_timestamp()
RETURNS TRIGGER AS $$
BEGIN
NEW.updated_at = NOW();
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
CREATE OR REPLACE TRIGGER glyph_update_users_timestamp
BEFORE UPDATE ON glyph_users
FOR EACH ROW
EXECUTE FUNCTION glyph_update_timestamp();
CREATE OR REPLACE TRIGGER glyph_update_groups_timestamp
BEFORE UPDATE ON glyph_groups
FOR EACH ROW
EXECUTE FUNCTION glyph_update_timestamp();
CREATE OR REPLACE FUNCTION glyph_update_users_groups_timestamp()
RETURNS TRIGGER AS $$
BEGIN
UPDATE glyph_users SET updated_at = NOW() WHERE name = NEW.user_name;
UPDATE glyph_groups SET updated_at = NOW() WHERE name = NEW.group_name;
RETURN NULL;
END;
$$ LANGUAGE plpgsql;
CREATE OR REPLACE TRIGGER glyph_update_users_groups_timestamp
AFTER INSERT OR DELETE ON glyph_users_groups
FOR EACH ROW
EXECUTE FUNCTION glyph_update_users_groups_timestamp();

View File

@@ -1,12 +1,14 @@
use clap::Parser; use clap::Parser;
use non_empty_string::NonEmptyString;
use serde::Deserialize; use serde::Deserialize;
use sqlx::query;
use std::{ use std::{
error::Error, error::Error,
fs,
net::{IpAddr, Ipv4Addr}, net::{IpAddr, Ipv4Addr},
path::{Path, PathBuf}, path::PathBuf,
}; };
use tokio::fs;
use crate::utils::crypto::hash_password;
#[derive(Clone, Deserialize)] #[derive(Clone, Deserialize)]
pub struct ServerConfig { pub struct ServerConfig {
@@ -29,51 +31,86 @@ const fn default_server_port() -> u16 {
#[derive(Clone, Deserialize)] #[derive(Clone, Deserialize)]
pub struct OAuthConfig { pub struct OAuthConfig {
pub issuer_url: NonEmptyString, pub issuer_url: String,
pub client_id: NonEmptyString, pub client_id: String,
pub client_secret: NonEmptyString, pub client_secret: String,
#[serde(default)] #[serde(default)]
pub insecure: bool, pub insecure: bool,
pub admin_group: NonEmptyString, pub admin_group: String,
} }
#[derive(Clone, Deserialize)] #[derive(Clone, Deserialize)]
pub struct AutheliaConfig { pub struct FuseConfig {
pub user_database: PathBuf, pub mount_directory: PathBuf,
pub user_database_name: String,
} }
#[derive(Clone, Deserialize)] #[derive(Clone, Deserialize)]
pub struct PostgresqlConfig { pub struct PostgresqlConfig {
pub user: NonEmptyString, pub user: String,
pub password: NonEmptyString, pub password: String,
pub host: NonEmptyString, pub host: String,
pub port: u16, pub port: u16,
pub database: NonEmptyString, pub database: String,
} }
#[derive(Clone, Deserialize)] #[derive(Clone, Deserialize)]
pub struct RedisConfig { pub struct RedisConfig {
pub host: NonEmptyString, pub host: String,
pub port: u16, pub port: u16,
#[serde(default)] #[serde(default)]
pub database: u8, pub database: u8,
} }
#[derive(Clone, Deserialize)]
pub struct AdminConfig {
pub name: String,
pub display_name: String,
pub password: String,
pub email: String,
}
impl AdminConfig {
pub async fn upsert(&self, pool: &sqlx::PgPool) -> Result<(), Box<dyn Error + Send + Sync>> {
let password = hash_password(&self.password);
query!(
r#"
INSERT INTO glyph_users (name, display_name, password, email, disabled)
VALUES ($1, $2, $3, $4, $5)
ON CONFLICT (name) DO UPDATE
SET display_name = EXCLUDED.display_name,
password = EXCLUDED.password,
email = EXCLUDED.email,
disabled = EXCLUDED.disabled
"#,
self.name,
self.display_name,
password,
self.email,
false
)
.execute(pool)
.await?;
Ok(())
}
}
#[derive(Clone, Deserialize)] #[derive(Clone, Deserialize)]
pub struct Config { pub struct Config {
pub server: ServerConfig, pub server: ServerConfig,
pub oauth: OAuthConfig, pub oauth: OAuthConfig,
pub authelia: AutheliaConfig, pub fuse: FuseConfig,
pub postgresql: PostgresqlConfig, pub postgresql: PostgresqlConfig,
pub redis: RedisConfig, pub redis: RedisConfig,
pub admin: AdminConfig,
} }
impl Config { impl TryFrom<&PathBuf> for Config {
pub async fn from_path<P>(path: P) -> Result<Self, Box<dyn Error + Send + Sync>> type Error = Box<dyn Error + Send + Sync>;
where
P: AsRef<Path>, fn try_from(path: &PathBuf) -> Result<Self, Self::Error> {
{ let contents = fs::read_to_string(path)?;
let contents = fs::read_to_string(path).await?;
let config = serde_yaml::from_str(&contents)?; let config = serde_yaml::from_str(&contents)?;
Ok(config) Ok(config)
} }
@@ -88,4 +125,7 @@ pub struct Args {
/// Path to the log4rs config file /// Path to the log4rs config file
#[arg(short, long, value_name = "FILE", default_value = "log4rs.yaml")] #[arg(short, long, value_name = "FILE", default_value = "log4rs.yaml")]
pub log_config: PathBuf, pub log_config: PathBuf,
/// Additional arguments to pass to Authelia
#[arg(last = true, num_args = 0.., allow_hyphen_values = true)]
pub passthrough: Vec<String>,
} }

1045
src/fuse.rs Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -2,6 +2,7 @@
#![allow(clippy::missing_docs_in_private_items)] #![allow(clippy::missing_docs_in_private_items)]
mod config; mod config;
mod fuse;
mod models; mod models;
mod routes; mod routes;
mod state; mod state;
@@ -13,22 +14,16 @@ use log::info;
use log4rs::config::Deserializers; use log4rs::config::Deserializers;
use std::net::SocketAddr; use std::net::SocketAddr;
use tokio::net::TcpListener; use tokio::net::TcpListener;
use utils::shutdown_signal;
use config::Args; use config::Args;
use state::State; use state::State;
#[tokio::main] #[tokio::main]
async fn main() { async fn main() {
let args = Args::parse(); let args: Args = Args::parse();
log4rs::init_file(&args.log_config, Deserializers::default()).unwrap(); log4rs::init_file(args.log_config.clone(), Deserializers::default()).unwrap();
let state = State::from_args(args).await.unwrap(); let state = State::from_args(args).await;
sqlx::migrate!("./migrations")
.run(&state.pg_pool)
.await
.expect("Failed to run migrations");
let routes = routes::routes(state.clone()); let routes = routes::routes(state.clone());
let app = axum::Router::new().nest(&format!("{}/api", state.config.server.subpath), routes); let app = axum::Router::new().nest(&format!("{}/api", state.config.server.subpath), routes);
@@ -37,8 +32,9 @@ async fn main() {
let listener = TcpListener::bind(addr).await.unwrap(); let listener = TcpListener::bind(addr).await.unwrap();
info!("Listening on {}", listener.local_addr().unwrap()); info!("Listening on {}", listener.local_addr().unwrap());
serve(listener, app) serve(listener, app)
.with_graceful_shutdown(shutdown_signal()) .with_graceful_shutdown(utils::shutdown_signal())
.await .await
.unwrap(); .unwrap();
} }

View File

@@ -1,76 +1,102 @@
use email_address::EmailAddress; use log::warn;
use non_empty_string::NonEmptyString;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_yaml::Value; use serde_yaml::Value;
use tokio::fs; use sqlx::PgPool;
use url::Url;
use std::{ use std::{collections::HashMap, error::Error};
collections::{HashMap, hash_map},
error::Error, #[derive(Debug, Clone, Serialize, Deserialize)]
ops::{Deref, DerefMut}, pub struct Users {
path::Path, pub users: HashMap<String, User>,
};
#[serde(flatten)]
pub extra: Option<HashMap<String, Value>>,
}
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct User { pub struct User {
pub displayname: NonEmptyString, pub displayname: String,
pub password: NonEmptyString, pub password: String,
pub email: Option<EmailAddress>, pub email: Option<String>,
pub picture: Option<Url>, pub disabled: Option<bool>,
#[serde(default)] pub picture: Option<String>,
pub disabled: bool, pub groups: Option<Vec<String>>,
#[serde(default)]
pub groups: Vec<NonEmptyString>,
#[serde(flatten)] #[serde(flatten)]
pub extra: HashMap<NonEmptyString, Value>, pub extra: Option<HashMap<String, Value>>,
} }
#[derive(Debug, Clone, Serialize, Deserialize)] impl TryInto<Vec<super::users::UserWithGroups>> for Users {
pub struct UsersFile { type Error = Box<dyn Error + Send + Sync>;
pub users: HashMap<NonEmptyString, User>,
}
impl Deref for UsersFile { fn try_into(self) -> Result<Vec<super::users::UserWithGroups>, Self::Error> {
type Target = HashMap<NonEmptyString, User>; self.users
.into_iter()
fn deref(&self) -> &Self::Target { .map(|(name, user)| {
&self.users let groups = user.groups.unwrap_or_default();
Ok(super::users::UserWithGroups {
name: name.clone(),
display_name: user.displayname,
password: user.password,
email: user
.email
.ok_or_else(|| format!("User {} is missing an email", &name))?,
disabled: user.disabled.unwrap_or(false),
picture: user.picture,
groups,
})
})
.collect()
} }
} }
impl DerefMut for UsersFile { impl Users {
fn deref_mut(&mut self) -> &mut Self::Target { pub fn from_fuse(pool: &PgPool, contents: &str) {
&mut self.users let Ok(users) = serde_yaml::from_str::<Self>(contents) else {
} warn!("Failed to parse users from JSON.");
} return;
};
impl IntoIterator for UsersFile {
type Item = (NonEmptyString, User); let users_with_groups: Vec<super::users::UserWithGroups> = match users.try_into() {
type IntoIter = hash_map::IntoIter<NonEmptyString, User>; Ok(users) => users,
Err(e) => {
fn into_iter(self) -> Self::IntoIter { warn!("Failed to convert Users to UserWithGroups: {e}");
self.users.into_iter() return;
} }
} };
impl UsersFile { let rt = tokio::runtime::Runtime::new().unwrap();
pub async fn load<P>(path: &P) -> Result<Self, Box<dyn Error + Send + Sync>> rt.block_on(async {
where super::users::UserWithGroups::upsert_many_delete_remaining(pool, &users_with_groups)
P: AsRef<Path> + Send + Sync, .await
{ .unwrap_or_else(|e| warn!("Failed to upsert users: {e}"));
let content = fs::read_to_string(path.as_ref()).await?; });
serde_yaml::from_str(&content) }
.map_err(|e| format!("Failed to parse users file: {e}").into())
} pub async fn to_fuse(pool: &PgPool) -> Result<String, Box<dyn Error + Send + Sync>> {
let users_with_groups = super::users::UserWithGroups::select_all(pool).await?;
pub async fn save<P>(&self, path: &P) -> Result<(), Box<dyn Error + Send + Sync>>
where let users = Self {
P: AsRef<Path> + Send + Sync, users: users_with_groups
{ .into_iter()
let content = serde_yaml::to_string(self)?; .map(|user| {
fs::write(path.as_ref(), content).await?; (
Ok(()) user.name.clone(),
User {
displayname: user.display_name,
password: user.password,
email: Some(user.email),
disabled: Some(user.disabled),
picture: user.picture,
groups: Some(user.groups),
extra: None,
},
)
})
.collect(),
extra: None,
};
Ok(serde_yaml::to_string(&users)?)
} }
} }

137
src/models/groups.rs Normal file
View File

@@ -0,0 +1,137 @@
use std::error::Error;
use serde::{Deserialize, Serialize};
use sqlx::{PgPool, prelude::FromRow, query, query_as};
#[derive(Debug, Clone, Serialize, Deserialize, FromRow)]
pub struct Group {
pub name: String,
}
impl Group {
pub async fn select(
pool: &PgPool,
name: &str,
) -> Result<Option<Self>, Box<dyn Error + Send + Sync>> {
let group = query_as!(
Group,
r#"
SELECT name
FROM glyph_groups
WHERE name = $1
"#,
name
)
.fetch_optional(pool)
.await?;
Ok(group)
}
pub async fn delete(pool: &PgPool, name: &str) -> Result<(), Box<dyn Error + Send + Sync>> {
query!(
r#"
DELETE FROM glyph_groups
WHERE name = $1
"#,
name
)
.execute(pool)
.await?;
Ok(())
}
pub async fn all_exist(
pool: &PgPool,
names: &[String],
) -> Result<bool, Box<dyn Error + Send + Sync>> {
let row = query!(
r#"
SELECT COUNT(*) AS "count!"
FROM glyph_groups
WHERE name = ANY($1)
"#,
names
)
.fetch_one(pool)
.await?;
Ok(row.count == i64::try_from(names.len()).unwrap())
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct GroupWithUsers {
pub name: String,
#[serde(default)]
pub users: Vec<String>,
}
impl GroupWithUsers {
pub async fn select_all(pool: &PgPool) -> Result<Vec<Self>, Box<dyn Error + Send + Sync>> {
let groups = query_as!(
GroupWithUsers,
r#"
SELECT
g.name,
ARRAY(SELECT ug.user_name FROM glyph_users_groups ug WHERE ug.group_name = g.name) AS "users!"
FROM glyph_groups g
"#
)
.fetch_all(pool)
.await?;
Ok(groups)
}
pub async fn select(
pool: &PgPool,
name: &str,
) -> Result<Option<Self>, Box<dyn Error + Send + Sync>> {
let group = query_as!(
GroupWithUsers,
r#"
SELECT
g.name,
ARRAY(SELECT ug.user_name FROM glyph_users_groups ug WHERE ug.group_name = g.name) AS "users!"
FROM glyph_groups g
WHERE g.name = $1
"#,
name
)
.fetch_optional(pool)
.await?;
Ok(group)
}
pub async fn insert(
pool: &PgPool,
group_with_users: &Self,
) -> Result<(), Box<dyn std::error::Error>> {
let mut tx = pool.begin().await?;
query!(
r#"INSERT INTO glyph_groups (name) VALUES ($1)"#,
group_with_users.name
)
.execute(&mut *tx)
.await?;
query!(
r#"
INSERT INTO glyph_users_groups (user_name, group_name)
SELECT * FROM UNNEST($1::text[], $2::text[])
"#,
&group_with_users.users,
&vec![group_with_users.name.clone(); group_with_users.users.len()]
)
.execute(&mut *tx)
.await?;
tx.commit().await?;
Ok(())
}
}

View File

@@ -0,0 +1,74 @@
use std::error::Error;
use serde::{Deserialize, Serialize};
use sqlx::{FromRow, PgPool, query};
#[derive(Debug, Clone, Serialize, Deserialize, FromRow)]
pub struct UsersGroups {
pub user_name: String,
pub group_name: String,
}
impl UsersGroups {
pub async fn set_users_for_group(
pool: &PgPool,
group_name: &str,
users: &[String],
) -> Result<(), Box<dyn Error + Send + Sync>> {
let mut tx = pool.begin().await?;
query!(
r#"
DELETE FROM glyph_users_groups
WHERE group_name = $1
"#,
group_name
)
.execute(&mut *tx)
.await?;
query!(
r#"
INSERT INTO glyph_users_groups (user_name, group_name)
SELECT * FROM UNNEST($1::text[], $2::text[])
"#,
users,
&vec![group_name.to_string(); users.len()]
)
.execute(&mut *tx)
.await?;
Ok(())
}
pub async fn set_groups_for_user(
pool: &PgPool,
user_name: &str,
groups: &[String],
) -> Result<(), Box<dyn Error + Send + Sync>> {
let mut tx = pool.begin().await?;
query!(
r#"
DELETE FROM glyph_users_groups
WHERE user_name = $1
"#,
user_name
)
.execute(&mut *tx)
.await?;
query!(
r#"
INSERT INTO glyph_users_groups (user_name, group_name)
SELECT * FROM UNNEST($1::text[], $2::text[])
"#,
&vec![user_name.to_string(); groups.len()],
groups
)
.execute(&mut *tx)
.await?;
Ok(())
}
}

View File

@@ -1,2 +1,5 @@
pub mod authelia; pub mod authelia;
pub mod groups;
pub mod intersections;
pub mod invites; pub mod invites;
pub mod users;

283
src/models/users.rs Normal file
View File

@@ -0,0 +1,283 @@
use std::{collections::HashSet, error::Error};
use serde::{Deserialize, Serialize};
use sqlx::{FromRow, PgPool, query, query_as};
#[derive(Debug, Clone, Serialize, Deserialize, FromRow)]
pub struct User {
pub name: String,
pub display_name: String,
pub password: String,
pub email: String,
#[serde(default)]
pub disabled: bool,
#[serde(default)]
pub picture: Option<String>,
}
impl User {
pub async fn select(
pool: &PgPool,
name: &str,
) -> Result<Option<Self>, Box<dyn Error + Send + Sync>> {
let user = query_as!(
User,
r#"
SELECT name, display_name, password, email, disabled, picture
FROM glyph_users
WHERE name = $1
"#,
name
)
.fetch_optional(pool)
.await?;
Ok(user)
}
pub async fn upsert(pool: &PgPool, user: &Self) -> Result<(), Box<dyn Error + Send + Sync>> {
query!(
r#"
INSERT INTO glyph_users (name, display_name, password, email, disabled, picture)
VALUES ($1, $2, $3, $4, $5, $6)
ON CONFLICT (name) DO UPDATE
SET display_name = EXCLUDED.display_name,
password = EXCLUDED.password,
email = EXCLUDED.email,
disabled = EXCLUDED.disabled,
picture = EXCLUDED.picture
"#,
user.name,
user.display_name,
user.password,
user.email,
user.disabled,
user.picture
)
.execute(pool)
.await?;
Ok(())
}
pub async fn delete(pool: &PgPool, name: &str) -> Result<(), Box<dyn Error + Send + Sync>> {
query!(
r#"
DELETE FROM glyph_users
WHERE name = $1
"#,
name
)
.execute(pool)
.await?;
Ok(())
}
pub async fn all_exist(
pool: &PgPool,
names: &[String],
) -> Result<bool, Box<dyn Error + Send + Sync>> {
let row = query!(
r#"
SELECT COUNT(*) AS "count!"
FROM glyph_users
WHERE name = ANY($1)
"#,
names
)
.fetch_one(pool)
.await?;
Ok(row.count == i64::try_from(names.len()).unwrap())
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct UserWithGroups {
pub name: String,
pub display_name: String,
pub password: String,
pub email: String,
#[serde(default)]
pub disabled: bool,
#[serde(default)]
pub picture: Option<String>,
#[serde(default)]
pub groups: Vec<String>,
}
impl UserWithGroups {
pub async fn select_all(pool: &PgPool) -> Result<Vec<Self>, Box<dyn Error + Send + Sync>> {
let users = query_as!(
UserWithGroups,
r#"
SELECT
u.name,
u.display_name,
u.password,
u.email,
u.disabled,
u.picture,
ARRAY(SELECT ug.group_name FROM glyph_users_groups ug WHERE ug.user_name = u.name) AS "groups!"
FROM glyph_users u
"#
)
.fetch_all(pool)
.await?;
Ok(users)
}
pub async fn select(
pool: &PgPool,
name: &str,
) -> Result<Option<Self>, Box<dyn Error + Send + Sync>> {
let user = query_as!(
UserWithGroups,
r#"
SELECT
u.name,
u.display_name,
u.password,
u.email,
u.disabled,
u.picture,
ARRAY(SELECT ug.group_name FROM glyph_users_groups ug WHERE ug.user_name = u.name) AS "groups!"
FROM glyph_users u
WHERE u.name = $1
"#,
name
)
.fetch_optional(pool)
.await?;
Ok(user)
}
pub async fn insert(
pool: &PgPool,
user_with_groups: &Self,
) -> Result<(), Box<dyn Error + Send + Sync>> {
let mut tx = pool.begin().await?;
query!(
r#"
INSERT INTO glyph_users (name, display_name, password, email, disabled, picture)
VALUES ($1, $2, $3, $4, $5, $6)
"#,
user_with_groups.name,
user_with_groups.display_name,
user_with_groups.password,
user_with_groups.email,
user_with_groups.disabled,
user_with_groups.picture
)
.execute(&mut *tx)
.await?;
query!(
r#"
INSERT INTO glyph_users_groups (user_name, group_name)
SELECT * FROM UNNEST($1::text[], $2::text[])
"#,
&user_with_groups.groups,
&vec![user_with_groups.name.clone(); user_with_groups.groups.len()]
)
.execute(&mut *tx)
.await?;
tx.commit().await?;
Ok(())
}
pub async fn upsert_many_delete_remaining(
pool: &PgPool,
users_with_groups: &[Self],
) -> Result<(), Box<dyn Error + Send + Sync>> {
let mut tx = pool.begin().await?;
for user in users_with_groups {
query!(
r#"
INSERT INTO glyph_users (name, display_name, password, email, disabled, picture)
VALUES ($1, $2, $3, $4, $5, $6)
ON CONFLICT (name) DO UPDATE
SET display_name = EXCLUDED.display_name,
password = EXCLUDED.password,
email = EXCLUDED.email,
disabled = EXCLUDED.disabled,
picture = EXCLUDED.picture
"#,
user.name,
user.display_name,
user.password,
user.email,
user.disabled,
user.picture
)
.execute(&mut *tx)
.await?;
query!(
r#"
DELETE FROM glyph_users_groups
WHERE user_name = $1
"#,
user.name
)
.execute(&mut *tx)
.await?;
if !user.groups.is_empty() {
query!(
r#"
INSERT INTO glyph_users_groups (user_name, group_name)
SELECT * FROM UNNEST($1::text[], $2::text[])
"#,
&user.groups,
&vec![user.name.clone(); user.groups.len()]
)
.execute(&mut *tx)
.await?;
}
}
let users = users_with_groups
.iter()
.map(|user| user.name.clone())
.collect::<Vec<_>>();
query!(
r#"
DELETE FROM glyph_users
WHERE name <> ALL($1)
"#,
&users
)
.execute(&mut *tx)
.await?;
let groups = users_with_groups
.iter()
.flat_map(|user| user.groups.iter().cloned())
.collect::<HashSet<_>>()
.into_iter()
.collect::<Vec<_>>();
query!(
r#"
DELETE FROM glyph_groups
WHERE name <> ALL($1)
"#,
&groups
)
.execute(pool)
.await?;
tx.commit().await?;
Ok(())
}
}

View File

@@ -8,37 +8,41 @@ use axum::{
}; };
use non_empty_string::NonEmptyString; use non_empty_string::NonEmptyString;
use nonempty::NonEmpty;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use sqlx::PgPool;
use crate::{config::Config, models::authelia, routes::auth, state::State}; use crate::{
config::Config,
models::{self, groups::Group},
routes::auth,
state::State,
};
#[derive(Debug, Serialize)] #[derive(Debug, Serialize)]
struct GroupResponse { struct GroupResponse {
users: Vec<NonEmptyString>, users: Vec<String>,
} }
type GroupsResponse = HashMap<NonEmptyString, GroupResponse>; impl From<models::groups::GroupWithUsers> for GroupResponse {
fn from(group: models::groups::GroupWithUsers) -> Self {
Self { users: group.users }
}
}
type GroupsResponse = HashMap<String, GroupResponse>;
pub async fn get_all( pub async fn get_all(
_: auth::User, _: auth::User,
extract::State(config): extract::State<Config>, extract::State(pg_pool): extract::State<PgPool>,
) -> Result<impl IntoResponse, StatusCode> { ) -> Result<impl IntoResponse, StatusCode> {
let users = authelia::UsersFile::load(&config.authelia.user_database) let groups_with_users = models::groups::GroupWithUsers::select_all(&pg_pool)
.await .await
.or(Err(StatusCode::INTERNAL_SERVER_ERROR))?; .or(Err(StatusCode::INTERNAL_SERVER_ERROR))?;
let mut groups_response: GroupsResponse = HashMap::new(); let groups_response = groups_with_users
.into_iter()
for (username, user) in users.iter() { .map(|group| (group.name.clone(), GroupResponse::from(group)))
for group in &user.groups { .collect::<GroupsResponse>();
let group_response = groups_response
.entry(group.clone())
.or_insert_with(|| GroupResponse { users: Vec::new() });
group_response.users.push(username.clone());
}
}
Ok(Json(groups_response)) Ok(Json(groups_response))
} }
@@ -46,114 +50,102 @@ pub async fn get_all(
pub async fn get( pub async fn get(
_: auth::User, _: auth::User,
extract::Path(name): extract::Path<NonEmptyString>, extract::Path(name): extract::Path<NonEmptyString>,
extract::State(config): extract::State<Config>, extract::State(pg_pool): extract::State<PgPool>,
) -> Result<impl IntoResponse, StatusCode> { ) -> Result<impl IntoResponse, StatusCode> {
let users = authelia::UsersFile::load(&config.authelia.user_database) let group_with_users = models::groups::GroupWithUsers::select(&pg_pool, name.as_str())
.await .await
.or(Err(StatusCode::INTERNAL_SERVER_ERROR))?; .or(Err(StatusCode::INTERNAL_SERVER_ERROR))?
.ok_or(StatusCode::NOT_FOUND)?;
let group_users = users Ok(Json(GroupResponse::from(group_with_users)))
.iter()
.filter_map(|(username, user)| {
if user.groups.contains(&name) {
Some(username.clone())
} else {
None
}
})
.collect::<Vec<_>>();
if group_users.is_empty() {
return Err(StatusCode::NOT_FOUND);
}
Ok(Json(GroupResponse { users: group_users }))
} }
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]
pub struct GroupCreate { pub struct GroupCreate {
name: NonEmptyString, name: NonEmptyString,
users: NonEmpty<NonEmptyString>, users: Vec<NonEmptyString>,
} }
pub async fn create( pub async fn create(
_: auth::User, _: auth::User,
extract::State(config): extract::State<Config>, extract::State(pg_pool): extract::State<PgPool>,
extract::Json(group_create): extract::Json<GroupCreate>, extract::Json(group_create): extract::Json<GroupCreate>,
) -> Result<impl IntoResponse, StatusCode> { ) -> Result<impl IntoResponse, StatusCode> {
let mut users = authelia::UsersFile::load(&config.authelia.user_database) if models::groups::Group::select(&pg_pool, group_create.name.as_str())
.await .await
.or(Err(StatusCode::INTERNAL_SERVER_ERROR))?; .or(Err(StatusCode::INTERNAL_SERVER_ERROR))?
.is_some()
if users
.iter()
.any(|(_, user)| user.groups.contains(&group_create.name))
{ {
return Err(StatusCode::CONFLICT); return Err(StatusCode::CONFLICT);
} }
if !group_create let users = group_create
.users .users
.iter() .into_iter()
.all(|user| users.contains_key(user.as_str())) .map(|u| u.to_string())
.collect::<Vec<_>>();
if !models::users::User::all_exist(&pg_pool, &users)
.await
.or(Err(StatusCode::INTERNAL_SERVER_ERROR))?
{ {
return Err(StatusCode::NOT_FOUND); return Err(StatusCode::NOT_FOUND);
} }
for user in group_create.users { let group_with_users = models::groups::GroupWithUsers {
users name: group_create.name.to_string(),
.get_mut(user.as_str()) users,
.unwrap() };
.groups
.push(group_create.name.clone()); models::groups::GroupWithUsers::insert(&pg_pool, &group_with_users)
} .await
.or(Err(StatusCode::INTERNAL_SERVER_ERROR))?;
Ok(()) Ok(())
} }
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]
pub struct GroupUpdate { pub struct GroupUpdate {
users: Option<NonEmpty<NonEmptyString>>, users: Option<Vec<NonEmptyString>>,
} }
pub async fn update( pub async fn update(
session_user: auth::User, session_user: auth::User,
extract::Path(name): extract::Path<NonEmptyString>, extract::Path(name): extract::Path<NonEmptyString>,
extract::State(pg_pool): extract::State<PgPool>,
extract::State(config): extract::State<Config>, extract::State(config): extract::State<Config>,
extract::Json(group_update): extract::Json<GroupUpdate>, extract::Json(group_update): extract::Json<GroupUpdate>,
) -> Result<impl IntoResponse, StatusCode> { ) -> Result<impl IntoResponse, StatusCode> {
let mut users = authelia::UsersFile::load(&config.authelia.user_database) let group = models::groups::Group::select(&pg_pool, name.as_str())
.await .await
.or(Err(StatusCode::INTERNAL_SERVER_ERROR))?; .or(Err(StatusCode::INTERNAL_SERVER_ERROR))?
.ok_or(StatusCode::NOT_FOUND)?;
if !users.iter().any(|(_, user)| user.groups.contains(&name)) {
return Err(StatusCode::NOT_FOUND);
}
let mut logout = false; let mut logout = false;
if let Some(new_users) = group_update.users {
for (username, user) in users.iter_mut() {
if new_users.contains(username) {
if !user.groups.contains(&name) {
user.groups.push(name.clone());
}
} else {
user.groups.retain(|g| g != &name);
}
if *username == *session_user.username if let Some(users) = &group_update.users {
&& !user.groups.contains(&config.oauth.admin_group) let users = users.iter().map(ToString::to_string).collect::<Vec<_>>();
{
logout = true; if !models::users::User::all_exist(&pg_pool, &users)
} .await
.or(Err(StatusCode::INTERNAL_SERVER_ERROR))?
{
return Err(StatusCode::NOT_FOUND);
} }
}
users models::intersections::UsersGroups::set_users_for_group(
.save(&config.authelia.user_database) &pg_pool,
group.name.as_str(),
&users,
)
.await .await
.or(Err(StatusCode::INTERNAL_SERVER_ERROR))?; .or(Err(StatusCode::INTERNAL_SERVER_ERROR))?;
if name == config.oauth.admin_group && !users.contains(&session_user.username) {
logout = true;
}
}
if logout { if logout {
return Ok(Redirect::to("/api/auth/logout").into_response()); return Ok(Redirect::to("/api/auth/logout").into_response());
} }
@@ -163,27 +155,20 @@ pub async fn update(
pub async fn delete( pub async fn delete(
_: auth::User, _: auth::User,
extract::Path(name): extract::Path<NonEmptyString>, extract::Path(name): extract::Path<String>,
extract::State(pg_pool): extract::State<PgPool>,
extract::State(config): extract::State<Config>, extract::State(config): extract::State<Config>,
) -> Result<impl IntoResponse, StatusCode> { ) -> Result<impl IntoResponse, StatusCode> {
if name == config.oauth.admin_group { if name == config.oauth.admin_group {
return Err(StatusCode::FORBIDDEN); return Err(StatusCode::FORBIDDEN);
} }
let mut users = authelia::UsersFile::load(&config.authelia.user_database) let group = models::groups::Group::select(&pg_pool, &name)
.await .await
.or(Err(StatusCode::INTERNAL_SERVER_ERROR))?; .or(Err(StatusCode::INTERNAL_SERVER_ERROR))?
.ok_or(StatusCode::NOT_FOUND)?;
if !users.iter().any(|(_, user)| user.groups.contains(&name)) { Group::delete(&pg_pool, &group.name)
return Err(StatusCode::NOT_FOUND);
}
for user in users.values_mut() {
user.groups.retain(|g| g != &name);
}
users
.save(&config.authelia.user_database)
.await .await
.or(Err(StatusCode::INTERNAL_SERVER_ERROR))?; .or(Err(StatusCode::INTERNAL_SERVER_ERROR))?;

View File

@@ -7,62 +7,49 @@ use axum::{
routing, routing,
}; };
use email_address::EmailAddress;
use non_empty_string::NonEmptyString; use non_empty_string::NonEmptyString;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_json::Value; use sqlx::PgPool;
use url::Url;
use crate::{ use crate::{
config::Config, config::Config, models, routes::auth, state::State,
models::authelia, utils::crypto::generate_random_password_hash,
routes::auth,
state::State,
utils::crypto::{generate_random_password_hash, hash_password},
}; };
#[derive(Debug, Serialize)] #[derive(Debug, Serialize)]
struct UserResponse { struct UserResponse {
displayname: NonEmptyString, display_name: String,
email: Option<EmailAddress>, email: String,
picture: Option<Url>,
disabled: bool, disabled: bool,
groups: Vec<NonEmptyString>, picture: Option<String>,
groups: Vec<String>,
#[serde(flatten)]
extra: HashMap<NonEmptyString, Value>,
} }
impl From<authelia::User> for UserResponse { impl From<models::users::UserWithGroups> for UserResponse {
fn from(user: authelia::User) -> Self { fn from(user: models::users::UserWithGroups) -> Self {
Self { Self {
displayname: user.displayname, display_name: user.display_name,
email: user.email, email: user.email,
picture: user.picture,
disabled: user.disabled, disabled: user.disabled,
picture: user.picture,
groups: user.groups, groups: user.groups,
extra: user
.extra
.into_iter()
.map(|(k, v)| (k, serde_json::to_value(v).unwrap()))
.collect(),
} }
} }
} }
type UsersResponse = HashMap<NonEmptyString, UserResponse>; type UsersResponse = HashMap<String, UserResponse>;
pub async fn get_all( pub async fn get_all(
_: auth::User, _: auth::User,
extract::State(config): extract::State<Config>, extract::State(pg_pool): extract::State<PgPool>,
) -> Result<impl IntoResponse, StatusCode> { ) -> Result<impl IntoResponse, StatusCode> {
let users = authelia::UsersFile::load(&config.authelia.user_database) let users_with_groups = models::users::UserWithGroups::select_all(&pg_pool)
.await .await
.or(Err(StatusCode::INTERNAL_SERVER_ERROR))?; .or(Err(StatusCode::INTERNAL_SERVER_ERROR))?;
let users_response = users let users_response = users_with_groups
.into_iter() .into_iter()
.map(|(name, user)| (name, user.into())) .map(|user| (user.name.clone(), UserResponse::from(user)))
.collect::<UsersResponse>(); .collect::<UsersResponse>();
Ok(Json(users_response)) Ok(Json(users_response))
@@ -71,67 +58,63 @@ pub async fn get_all(
pub async fn get( pub async fn get(
_: auth::User, _: auth::User,
extract::Path(name): extract::Path<NonEmptyString>, extract::Path(name): extract::Path<NonEmptyString>,
extract::State(config): extract::State<Config>, extract::State(pg_pool): extract::State<PgPool>,
) -> Result<impl IntoResponse, StatusCode> { ) -> Result<impl IntoResponse, StatusCode> {
let users = authelia::UsersFile::load(&config.authelia.user_database) let user_with_groups = models::users::UserWithGroups::select(&pg_pool, name.as_str())
.await .await
.or(Err(StatusCode::INTERNAL_SERVER_ERROR))?; .or(Err(StatusCode::INTERNAL_SERVER_ERROR))?
.ok_or(StatusCode::NOT_FOUND)?;
let user = users.get(name.as_str()).ok_or(StatusCode::NOT_FOUND)?; Ok(Json(UserResponse::from(user_with_groups)))
Ok(Json(UserResponse::from(user.clone())))
} }
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]
pub struct UserCreate { pub struct UserCreate {
name: NonEmptyString, name: NonEmptyString,
displayname: NonEmptyString, displayname: NonEmptyString,
email: Option<EmailAddress>, email: NonEmptyString,
password: Option<NonEmptyString>,
picture: Option<Url>,
#[serde(default)]
disabled: bool, disabled: bool,
#[serde(default)] picture: Option<NonEmptyString>,
groups: Vec<NonEmptyString>, groups: Vec<NonEmptyString>,
#[serde(flatten)]
extra: HashMap<NonEmptyString, Value>,
} }
pub async fn create( pub async fn create(
_: auth::User, _: auth::User,
extract::State(config): extract::State<Config>, extract::State(pg_pool): extract::State<PgPool>,
extract::Json(user_create): extract::Json<UserCreate>, extract::Json(user_create): extract::Json<UserCreate>,
) -> Result<impl IntoResponse, StatusCode> { ) -> Result<impl IntoResponse, StatusCode> {
let mut users = authelia::UsersFile::load(&config.authelia.user_database) if models::users::User::select(&pg_pool, user_create.name.as_str())
.await .await
.or(Err(StatusCode::INTERNAL_SERVER_ERROR))?; .or(Err(StatusCode::INTERNAL_SERVER_ERROR))?
.is_some()
if users.contains_key(user_create.name.as_str()) { {
return Err(StatusCode::CONFLICT); return Err(StatusCode::CONFLICT);
} }
let user = authelia::User { let groups = user_create
displayname: user_create.displayname, .groups
password: user_create.password.map_or_else( .into_iter()
|| NonEmptyString::new(generate_random_password_hash()).unwrap(), .map(|g| g.to_string())
|p| p, .collect::<Vec<_>>();
),
email: user_create.email, if !models::groups::Group::all_exist(&pg_pool, &groups)
picture: user_create.picture, .await
.or(Err(StatusCode::INTERNAL_SERVER_ERROR))?
{
return Err(StatusCode::NOT_FOUND);
}
let user_with_groups = models::users::UserWithGroups {
name: user_create.name.to_string(),
display_name: user_create.displayname.to_string(),
password: generate_random_password_hash(),
email: user_create.email.to_string(),
disabled: user_create.disabled, disabled: user_create.disabled,
groups: user_create.groups, picture: user_create.picture.map(|i| i.to_string()),
extra: user_create groups,
.extra
.into_iter()
.map(|(k, v)| (k, serde_json::from_value(v).unwrap()))
.collect(),
}; };
users.insert(user_create.name, user); models::users::UserWithGroups::insert(&pg_pool, &user_with_groups)
users
.save(&config.authelia.user_database)
.await .await
.or(Err(StatusCode::INTERNAL_SERVER_ERROR))?; .or(Err(StatusCode::INTERNAL_SERVER_ERROR))?;
@@ -140,69 +123,70 @@ pub async fn create(
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]
pub struct UserUpdate { pub struct UserUpdate {
displayname: Option<NonEmptyString>, display_name: Option<NonEmptyString>,
password: Option<NonEmptyString>, email: Option<NonEmptyString>,
#[serde(default, with = "serde_with::rust::double_option")]
#[allow(clippy::option_option)]
email: Option<Option<EmailAddress>>,
#[serde(default, with = "serde_with::rust::double_option")]
#[allow(clippy::option_option)]
picture: Option<Option<Url>>,
disabled: Option<bool>, disabled: Option<bool>,
picture: Option<NonEmptyString>,
groups: Option<Vec<NonEmptyString>>, groups: Option<Vec<NonEmptyString>>,
#[serde(flatten)]
extra: HashMap<NonEmptyString, Value>,
} }
pub async fn update( pub async fn update(
session_user: auth::User, session_user: auth::User,
extract::Path(name): extract::Path<NonEmptyString>, extract::Path(name): extract::Path<NonEmptyString>,
extract::State(pg_pool): extract::State<PgPool>,
extract::State(config): extract::State<Config>, extract::State(config): extract::State<Config>,
extract::Json(user_update): extract::Json<UserUpdate>, extract::Json(user_update): extract::Json<UserUpdate>,
) -> Result<impl IntoResponse, StatusCode> { ) -> Result<impl IntoResponse, StatusCode> {
let mut users = authelia::UsersFile::load(&config.authelia.user_database) let user = models::users::User::select(&pg_pool, name.as_str())
.await
.or(Err(StatusCode::INTERNAL_SERVER_ERROR))?
.ok_or(StatusCode::NOT_FOUND)?;
let mut logout = false;
if let Some(groups) = user_update.groups {
let groups = groups
.into_iter()
.map(|g| g.to_string())
.collect::<Vec<_>>();
if !models::groups::Group::all_exist(&pg_pool, &groups)
.await
.or(Err(StatusCode::INTERNAL_SERVER_ERROR))?
{
return Err(StatusCode::NOT_FOUND);
}
models::intersections::UsersGroups::set_groups_for_user(
&pg_pool,
user.name.as_str(),
&groups,
)
.await .await
.or(Err(StatusCode::INTERNAL_SERVER_ERROR))?; .or(Err(StatusCode::INTERNAL_SERVER_ERROR))?;
let user = users.get_mut(name.as_str()).ok_or(StatusCode::NOT_FOUND)?; if name == session_user.username.to_string() && !groups.contains(&config.oauth.admin_group)
{
if let Some(displayname) = user_update.displayname {
user.displayname = displayname;
}
if let Some(email) = user_update.email {
user.email = email;
}
if let Some(password) = user_update.password {
user.password = NonEmptyString::new(hash_password(password.as_str())).unwrap();
}
if let Some(picture) = user_update.picture {
user.picture = picture;
}
if let Some(disabled) = user_update.disabled {
user.disabled = disabled;
}
let mut logout = false;
if let Some(groups) = user_update.groups {
if name == *session_user.username && !groups.contains(&config.oauth.admin_group) {
logout = true; logout = true;
} }
user.groups = groups;
} }
for (k, v) in user_update.extra { let user = models::users::User {
user.extra name: user.name,
.insert(k.clone(), serde_json::from_value(v).unwrap()); display_name: user_update
} .display_name
.map(|d| d.to_string())
.unwrap_or(user.display_name),
password: user.password,
email: user_update
.email
.map(|e| e.to_string())
.unwrap_or(user.email),
disabled: user_update.disabled.unwrap_or(user.disabled),
picture: user_update.picture.map(|i| i.to_string()).or(user.picture),
};
users models::users::User::upsert(&pg_pool, &user)
.save(&config.authelia.user_database)
.await .await
.or(Err(StatusCode::INTERNAL_SERVER_ERROR))?; .or(Err(StatusCode::INTERNAL_SERVER_ERROR))?;
@@ -216,24 +200,18 @@ pub async fn update(
pub async fn delete( pub async fn delete(
session_user: auth::User, session_user: auth::User,
extract::Path(name): extract::Path<String>, extract::Path(name): extract::Path<String>,
extract::State(config): extract::State<Config>, extract::State(pg_pool): extract::State<PgPool>,
) -> Result<impl IntoResponse, StatusCode> { ) -> Result<impl IntoResponse, StatusCode> {
if name == *session_user.username { if name == session_user.username.to_string() {
return Err(StatusCode::FORBIDDEN); return Err(StatusCode::FORBIDDEN);
} }
let mut users = authelia::UsersFile::load(&config.authelia.user_database) let user = models::users::User::select(&pg_pool, &name)
.await .await
.or(Err(StatusCode::INTERNAL_SERVER_ERROR))?; .or(Err(StatusCode::INTERNAL_SERVER_ERROR))?
.ok_or(StatusCode::NOT_FOUND)?;
if !users.contains_key(&name) { models::users::User::delete(&pg_pool, &user.name)
return Err(StatusCode::NOT_FOUND);
}
users.remove(&name);
users
.save(&config.authelia.user_database)
.await .await
.or(Err(StatusCode::INTERNAL_SERVER_ERROR))?; .or(Err(StatusCode::INTERNAL_SERVER_ERROR))?;

View File

@@ -1,4 +1,4 @@
use std::error::Error; use std::{sync::Arc, time::Duration};
use async_redis_session::RedisSessionStore; use async_redis_session::RedisSessionStore;
use axum::extract::FromRef; use axum::extract::FromRef;
@@ -13,8 +13,13 @@ use openidconnect::{
reqwest, reqwest,
}; };
use sqlx::{PgPool, postgres::PgPoolOptions}; use sqlx::{PgPool, postgres::PgPoolOptions};
use tokio::{process::Command, spawn, task::JoinHandle, time::sleep};
use crate::config::{Args, Config}; use crate::{
config::{Args, Config},
fuse::AutheliaFS,
models,
};
pub type OAuthClient< pub type OAuthClient<
HasAuthUrl = EndpointSet, HasAuthUrl = EndpointSet,
@@ -46,30 +51,46 @@ pub type OAuthClient<
#[derive(Clone)] #[derive(Clone)]
pub struct State { pub struct State {
pub config: Config, pub config: Config,
pub oauth_http_client: reqwest::Client,
pub oauth_client: OAuthClient,
pub pg_pool: PgPool, pub pg_pool: PgPool,
pub redis_client: redis::aio::MultiplexedConnection, pub redis_client: redis::aio::MultiplexedConnection,
pub filesystem: AutheliaFS,
pub mount: Arc<JoinHandle<()>>,
pub authelia: Arc<JoinHandle<()>>,
pub oauth_http_client: reqwest::Client,
pub oauth_client: OAuthClient,
pub session_store: RedisSessionStore, pub session_store: RedisSessionStore,
} }
impl State { impl State {
pub async fn from_args(args: Args) -> Result<Self, Box<dyn Error + Send + Sync>> { pub async fn from_args(args: Args) -> Self {
let config = Config::from_path(&args.config).await?; let config = Config::try_from(&args.config).unwrap();
let (oauth_http_client, oauth_client) = oauth_client(&config).await?; let pg_pool = pg_pool(&config).await;
let pg_pool = pg_pool(&config).await?; sqlx::migrate!("./migrations").run(&pg_pool).await.unwrap();
let redis_client = redis_client(&config).await?; config.admin.upsert(&pg_pool).await.unwrap();
let session_store = session_store(&config)?;
Ok(Self { let redis_client = redis_client(&config).await;
let (filesystem, mount) = fuse(&config, &pg_pool).await;
let contents = models::authelia::Users::to_fuse(&pg_pool).await.unwrap();
filesystem.store(contents).await.unwrap();
let authelia = authelia(args.passthrough);
let (oauth_http_client, oauth_client) = oauth_client(&config).await;
let session_store = session_store(&config);
Self {
config, config,
oauth_http_client,
oauth_client,
pg_pool, pg_pool,
redis_client, redis_client,
filesystem,
mount,
authelia,
oauth_http_client,
oauth_client,
session_store, session_store,
}) }
} }
} }
@@ -79,18 +100,6 @@ impl FromRef<State> for Config {
} }
} }
impl FromRef<State> for reqwest::Client {
fn from_ref(state: &State) -> Self {
state.oauth_http_client.clone()
}
}
impl FromRef<State> for OAuthClient {
fn from_ref(state: &State) -> Self {
state.oauth_client.clone()
}
}
impl FromRef<State> for PgPool { impl FromRef<State> for PgPool {
fn from_ref(state: &State) -> Self { fn from_ref(state: &State) -> Self {
state.pg_pool.clone() state.pg_pool.clone()
@@ -103,41 +112,32 @@ impl FromRef<State> for redis::aio::MultiplexedConnection {
} }
} }
impl FromRef<State> for AutheliaFS {
fn from_ref(state: &State) -> Self {
state.filesystem.clone()
}
}
impl FromRef<State> for reqwest::Client {
fn from_ref(state: &State) -> Self {
state.oauth_http_client.clone()
}
}
impl FromRef<State> for OAuthClient {
fn from_ref(state: &State) -> Self {
state.oauth_client.clone()
}
}
impl FromRef<State> for RedisSessionStore { impl FromRef<State> for RedisSessionStore {
fn from_ref(state: &State) -> Self { fn from_ref(state: &State) -> Self {
state.session_store.clone() state.session_store.clone()
} }
} }
async fn oauth_client( async fn pg_pool(config: &Config) -> PgPool {
config: &Config, PgPoolOptions::new()
) -> Result<(reqwest::Client, OAuthClient), Box<dyn Error + Send + Sync>> {
let oauth_http_client = reqwest::ClientBuilder::new()
.redirect(reqwest::redirect::Policy::none())
.danger_accept_invalid_certs(config.oauth.insecure)
.build()?;
let provider_metadata = CoreProviderMetadata::discover_async(
IssuerUrl::new(config.oauth.issuer_url.to_string()).unwrap(),
&oauth_http_client,
)
.await?;
let oauth_client = OAuthClient::from_provider_metadata(
provider_metadata,
ClientId::new(config.oauth.client_id.to_string()),
Some(ClientSecret::new(config.oauth.client_secret.to_string())),
)
.set_redirect_uri(RedirectUrl::new(format!(
"{}{}/api/auth/callback",
config.server.host, config.server.subpath
))?);
Ok((oauth_http_client, oauth_client))
}
async fn pg_pool(config: &Config) -> Result<PgPool, Box<dyn Error + Send + Sync>> {
Ok(PgPoolOptions::new()
.max_connections(5) .max_connections(5)
.connect(&format!( .connect(&format!(
"postgres://{}:{}@{}:{}/{}", "postgres://{}:{}@{}:{}/{}",
@@ -147,26 +147,104 @@ async fn pg_pool(config: &Config) -> Result<PgPool, Box<dyn Error + Send + Sync>
config.postgresql.port, config.postgresql.port,
config.postgresql.database config.postgresql.database
)) ))
.await?) .await
.unwrap()
} }
async fn redis_client( async fn redis_client(config: &Config) -> redis::aio::MultiplexedConnection {
config: &Config,
) -> Result<redis::aio::MultiplexedConnection, Box<dyn Error + Send + Sync>> {
let url = format!( let url = format!(
"redis://{}:{}/{}", "redis://{}:{}/{}",
config.redis.host, config.redis.port, config.redis.database config.redis.host, config.redis.port, config.redis.database
); );
let client = redis::Client::open(url)?; let client = redis::Client::open(url).unwrap();
Ok(client.get_multiplexed_async_connection().await?) client.get_multiplexed_async_connection().await.unwrap()
} }
fn session_store(config: &Config) -> Result<RedisSessionStore, Box<dyn Error + Send + Sync>> { async fn fuse(config: &Config, pg_pool: &PgPool) -> (AutheliaFS, Arc<JoinHandle<()>>) {
let fs = AutheliaFS::new(
config.fuse.clone(),
Some(Box::new(models::authelia::Users::from_fuse)),
pg_pool.clone(),
)
.await;
let fs_clone = fs.clone();
let mount = Arc::new(spawn(async move {
loop {
let _ = fs_clone.clone().run().await;
}
}));
(fs, mount)
}
fn authelia(args: Vec<String>) -> Arc<JoinHandle<()>> {
Arc::new(spawn(async move {
loop {
let _ = Command::new("authelia")
.args(args.clone())
.spawn()
.unwrap()
.wait()
.await;
}
}))
}
async fn oauth_client(config: &Config) -> (reqwest::Client, OAuthClient) {
let oauth_http_client = reqwest::ClientBuilder::new()
.redirect(reqwest::redirect::Policy::none())
.danger_accept_invalid_certs(config.oauth.insecure)
.build()
.unwrap();
let mut provider_metadata = None;
let retries = 10;
let mut backoff = Duration::from_secs(1);
for i in 0..retries {
if let Ok(metadata) = CoreProviderMetadata::discover_async(
IssuerUrl::new(config.oauth.issuer_url.clone()).unwrap(),
&oauth_http_client,
)
.await
{
provider_metadata = Some(metadata);
break;
}
if i == retries - 1 {
break;
}
sleep(backoff).await;
backoff *= 2;
}
let provider_metadata = provider_metadata.unwrap();
let oauth_client = OAuthClient::from_provider_metadata(
provider_metadata,
ClientId::new(config.oauth.client_id.clone()),
Some(ClientSecret::new(config.oauth.client_secret.clone())),
)
.set_redirect_uri(
RedirectUrl::new(format!(
"{}{}/api/auth/callback",
config.server.host, config.server.subpath
))
.unwrap(),
);
(oauth_http_client, oauth_client)
}
fn session_store(config: &Config) -> RedisSessionStore {
let url = format!( let url = format!(
"redis://{}:{}/{}", "redis://{}:{}/{}",
config.redis.host, config.redis.port, config.redis.database config.redis.host, config.redis.port, config.redis.database
); );
Ok(RedisSessionStore::new(url)?.with_prefix("session:")) RedisSessionStore::new(url).unwrap().with_prefix("session:")
} }

View File

@@ -2,7 +2,7 @@ FROM docker.io/library/rust AS builder
ARG BUILD_MODE=debug ARG BUILD_MODE=debug
RUN apt-get update && apt-get clean RUN apt-get update && apt-get install -y fuse3 libfuse3-dev && apt-get clean
WORKDIR /app WORKDIR /app
@@ -18,11 +18,11 @@ COPY .sqlx ./.sqlx
RUN cargo build $(if [ "$BUILD_MODE" = "release" ]; then echo "--release"; else echo ""; fi) RUN cargo build $(if [ "$BUILD_MODE" = "release" ]; then echo "--release"; else echo ""; fi)
RUN mkdir -p build && cp target/$(if [ "$BUILD_MODE" = "release" ]; then echo "release"; else echo "debug"; fi)/glyph build/glyph RUN mkdir -p build && cp target/$(if [ "$BUILD_MODE" = "release" ]; then echo "release"; else echo "debug"; fi)/glyph build/glyph
FROM docker.io/library/debian:bookworm-slim FROM docker.io/authelia/authelia
COPY --from=builder /app/build/glyph /usr/local/bin/glyph COPY --from=builder /app/build/glyph /usr/bin/glyph
COPY --from=builder /usr/lib/x86_64-linux-gnu/libfuse3.so.3 /usr/lib/x86_64-linux-gnu/libfuse3.so.3
COPY --from=builder /usr/lib/x86_64-linux-gnu/libgcc_s.so.1 /usr/lib/x86_64-linux-gnu/libgcc_s.so.1
EXPOSE 8080/tcp ENTRYPOINT ["/usr/bin/glyph"]
ENTRYPOINT ["/usr/local/bin/glyph"]
CMD ["--help"] CMD ["--help"]

View File

@@ -5,12 +5,22 @@ metadata:
spec: spec:
containers: containers:
- name: glyph - name: glyph
picture: registry.karaolidis.com/karaolidis/glyph:latest image: registry.karaolidis.com/karaolidis/glyph:latest
securityContext:
privileged: true
capabilities:
add:
- SYS_ADMIN
resources:
limits:
podman.io/device=/dev/fuse: 1
volumeMounts: volumeMounts:
- name: glyph-config - name: glyph-config
mountPath: /etc/glyph mountPath: /etc/glyph
- name: authelia-users - name: authelia-config
mountPath: /etc/authelia/users mountPath: /etc/authelia/config
- name: authelia-storage
mountPath: /var/lib/authelia
command: command:
[ [
"glyph", "glyph",
@@ -18,10 +28,13 @@ spec:
"/etc/glyph/default.yml", "/etc/glyph/default.yml",
--log-config, --log-config,
"/etc/glyph/log4rs.yml", "/etc/glyph/log4rs.yml",
"--",
"--config",
"/etc/authelia/config/configuration.yml",
] ]
- name: postgresql - name: postgresql
picture: docker.io/library/postgres:latest image: docker.io/library/postgres:latest
env: env:
- name: POSTGRES_DB - name: POSTGRES_DB
value: glyph value: glyph
@@ -34,26 +47,10 @@ spec:
hostPort: 5432 hostPort: 5432
- name: redis - name: redis
picture: docker.io/library/redis:latest image: docker.io/library/redis:latest
- name: authelia
picture: docker.io/authelia/authelia:latest
volumeMounts:
- name: authelia-config
mountPath: /etc/authelia
- name: authelia-users
mountPath: /etc/authelia/users
- name: authelia-storage
mountPath: /var/lib/authelia
command:
[
"/bin/sh",
"-c",
"cp /etc/authelia/users.yml /etc/authelia/users/users.yml && exec authelia --config /etc/authelia/configuration.yml",
]
- name: traefik - name: traefik
picture: docker.io/library/traefik:latest image: docker.io/library/traefik:latest
args: args:
- "--providers.file.directory=/etc/traefik/dynamic" - "--providers.file.directory=/etc/traefik/dynamic"
- "--providers.file.watch=true" - "--providers.file.watch=true"
@@ -72,13 +69,11 @@ spec:
- name: authelia-config - name: authelia-config
configMap: configMap:
name: authelia-config name: authelia-config
- name: authelia-users
emptyDir: {}
- name: authelia-storage
emptyDir: {}
- name: traefik-config - name: traefik-config
configMap: configMap:
name: traefik-config name: traefik-config
- name: authelia-storage
emptyDir: {}
--- ---
apiVersion: v1 apiVersion: v1
kind: ConfigMap kind: ConfigMap
@@ -89,13 +84,6 @@ data:
server: server:
host: https://app.glyph.local host: https://app.glyph.local
database:
host: postgresql
port: 5432
user: glyph
password: glyph
database: glyph
oauth: oauth:
issuer_url: https://id.glyph.local issuer_url: https://id.glyph.local
client_id: glyph client_id: glyph
@@ -103,13 +91,27 @@ data:
admin_group: admins admin_group: admins
insecure: true insecure: true
authelia: fuse:
user_database: /etc/authelia/users/users.yml mount_directory: /etc/authelia/users
user_database_name: users.yml
postgresql:
host: postgresql
port: 5432
user: glyph
password: glyph
database: glyph
redis: redis:
host: redis host: redis
port: 6379 port: 6379
admin:
name: glyph
display_name: Glyph
password: glyph
email: glyph@karaolidis.com
log4rs.yml: | log4rs.yml: |
appenders: appenders:
stdout: stdout:
@@ -198,13 +200,6 @@ data:
redirect_uris: redirect_uris:
- "https://app.glyph.local/api/auth/callback" - "https://app.glyph.local/api/auth/callback"
authorization_policy: "one_factor" authorization_policy: "one_factor"
users.yml: |
users:
glyph:
displayname: "glyph"
password: "$argon2id$v=19$m=65536,t=3,p=4$lobLBhv2SKyVZZZCl+e8Lg$VzPmcTksXBNlJfeztMUqMDgdU47qT5bB1Gk+QHigASQ" # The digest of 'glyph'.
groups:
- "admins"
--- ---
apiVersion: v1 apiVersion: v1
kind: ConfigMap kind: ConfigMap
@@ -232,7 +227,7 @@ data:
authelia-service: authelia-service:
loadBalancer: loadBalancer:
servers: servers:
- url: "http://authelia:9091" - url: "http://glyph:9091"
glyph-service: glyph-service:
loadBalancer: loadBalancer: